diff --git a/frontend/src/metabase/admin/settings/components/SettingsSetting.jsx b/frontend/src/metabase/admin/settings/components/SettingsSetting.jsx
index a1827f2f27765897d54d3485191b0f78d44f3fbf..8f98e9d7c280022551901c287a578eb223cc7501 100644
--- a/frontend/src/metabase/admin/settings/components/SettingsSetting.jsx
+++ b/frontend/src/metabase/admin/settings/components/SettingsSetting.jsx
@@ -3,6 +3,7 @@ import React, { Component, PropTypes } from "react";
 import SettingHeader from "./SettingHeader.jsx";
 
 import SettingInput from "./widgets/SettingInput.jsx";
+import SettingNumber from "./widgets/SettingNumber.jsx";
 import SettingPassword from "./widgets/SettingPassword.jsx";
 import SettingRadio from "./widgets/SettingRadio.jsx";
 import SettingToggle from "./widgets/SettingToggle.jsx";
@@ -10,6 +11,7 @@ import SettingSelect from "./widgets/SettingSelect.jsx";
 
 const SETTING_WIDGET_MAP = {
     "string":   SettingInput,
+    "number":   SettingNumber,
     "password": SettingPassword,
     "select":   SettingSelect,
     "radio":    SettingRadio,
diff --git a/frontend/src/metabase/admin/settings/components/widgets/SettingNumber.jsx b/frontend/src/metabase/admin/settings/components/widgets/SettingNumber.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..05fd4da0445808f4c7febc128deb46da6df01bd0
--- /dev/null
+++ b/frontend/src/metabase/admin/settings/components/widgets/SettingNumber.jsx
@@ -0,0 +1,8 @@
+import React from "react";
+
+import SettingInput from "./SettingInput";
+
+const SettingNumber = ({ type = "number", ...props }) =>
+    <SettingInput {...props} type="number" />
+
+export default SettingNumber;
diff --git a/frontend/src/metabase/admin/settings/selectors.js b/frontend/src/metabase/admin/settings/selectors.js
index 1f620e2ba88a90be895262ebfe397f88c5861871..ae501e697ee409d3393db53d75673754fa40a973 100644
--- a/frontend/src/metabase/admin/settings/selectors.js
+++ b/frontend/src/metabase/admin/settings/selectors.js
@@ -86,7 +86,7 @@ const SECTIONS = [
                 key: "email-smtp-port",
                 display_name: "SMTP Port",
                 placeholder: "587",
-                type: "string",
+                type: "number",
                 required: true,
                 validations: [["integer", "That's not a valid port number"]]
             },
@@ -236,6 +236,34 @@ const SECTIONS = [
                 getHidden: (settings) => !settings["enable-embedding"]
             }
         ]
+    },
+    {
+        name: "Caching",
+        settings: [
+            {
+                key: "enable-query-caching",
+                display_name: "Enable Caching",
+                type: "boolean"
+            },
+            {
+                key: "query-caching-min-ttl",
+                display_name: "Minimum Query Duration",
+                type: "number",
+                getHidden: (settings) => !settings["enable-query-caching"]
+            },
+            {
+                key: "query-caching-ttl-ratio",
+                display_name: "Cache Time-To-Live (TTL)",
+                type: "number",
+                getHidden: (settings) => !settings["enable-query-caching"]
+            },
+            {
+                key: "query-caching-max-kb",
+                display_name: "Max Cache Entry Size",
+                type: "number",
+                getHidden: (settings) => !settings["enable-query-caching"]
+            }
+        ]
     }
 ];
 for (const section of SECTIONS) {
diff --git a/frontend/src/metabase/components/ShrinkableList.jsx b/frontend/src/metabase/components/ShrinkableList.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..15f9a89a19fdd06548923f97eda749b3be7c95f9
--- /dev/null
+++ b/frontend/src/metabase/components/ShrinkableList.jsx
@@ -0,0 +1,59 @@
+/* @flow */
+
+import React, { Component, PropTypes } from "react";
+import ReactDOM from "react-dom";
+
+import ExplicitSize from "metabase/components/ExplicitSize";
+
+type Props = {
+    className?: string,
+    items: any[],
+    renderItem: (item: any) => any,
+    renderItemSmall: (item: any) => any
+};
+
+type State = {
+    isShrunk: ?boolean
+};
+
+@ExplicitSize
+export default class ShrinkableList extends Component<*, Props, State> {
+    state: State = {
+        isShrunk: null
+    }
+
+    componentWillReceiveProps() {
+        this.setState({
+            isShrunk: null
+        })
+    }
+
+    componentDidMount() {
+        this.componentDidUpdate();
+    }
+
+    componentDidUpdate() {
+        const container = ReactDOM.findDOMNode(this)
+        const { isShrunk } = this.state;
+        if (container && isShrunk === null) {
+            this.setState({
+                isShrunk: container.scrollWidth !== container.offsetWidth
+            })
+        }
+    }
+
+    render() {
+        const { items, className, renderItemSmall, renderItem } = this.props;
+        const { isShrunk } = this.state;
+        return (
+            <div className={className}>
+                { items.map(item =>
+                    isShrunk ?
+                        renderItemSmall(item)
+                    :
+                        renderItem(item)
+                )}
+            </div>
+        );
+    }
+}
diff --git a/frontend/src/metabase/components/Tooltip.jsx b/frontend/src/metabase/components/Tooltip.jsx
index 68a84a2c6a3343882c046e3ee9bfab27ffbc4395..b892ba3e0fdfc840742c4de580e78be3b9a9350f 100644
--- a/frontend/src/metabase/components/Tooltip.jsx
+++ b/frontend/src/metabase/components/Tooltip.jsx
@@ -14,7 +14,7 @@ export default class Tooltip extends Component {
     }
 
     static propTypes = {
-        tooltip: PropTypes.node.isRequired,
+        tooltip: PropTypes.node,
         children: PropTypes.element.isRequired,
         isEnabled: PropTypes.bool,
         verticalAttachments: PropTypes.array,
diff --git a/frontend/src/metabase/css/core/flex.css b/frontend/src/metabase/css/core/flex.css
index 9ad2d5853c9fea87147af5128d3264e0e8de72e1..f88fcf8efb97e6ba990655bf4b5468b492994915 100644
--- a/frontend/src/metabase/css/core/flex.css
+++ b/frontend/src/metabase/css/core/flex.css
@@ -32,6 +32,10 @@
     justify-content: space-between;
 }
 
+.justify-end {
+    justify-content: flex-end;
+}
+
 .align-start {
     align-items: flex-start;
 }
diff --git a/frontend/src/metabase/css/query_builder.css b/frontend/src/metabase/css/query_builder.css
index a8cb318deca637f3a5104b37fdbd098911dc7753..f298eff4caf824a7d90164dc5ec9cfb288b2c88e 100644
--- a/frontend/src/metabase/css/query_builder.css
+++ b/frontend/src/metabase/css/query_builder.css
@@ -518,12 +518,13 @@
     z-index: 1;
     opacity: 1;
     box-shadow: 0 1px 2px rgba(0, 0, 0, .22);
-    transition: margin-top 0.5s, opacity 0.5s;
+    transition: transform 0.5s, opacity 0.5s;
     min-width: 8em;
+    position: relative;
 }
 
 .RunButton.RunButton--hidden {
-    margin-top: -110px;
+    transform: translateY(-65px);
     opacity: 0;
 }
 
diff --git a/frontend/src/metabase/qb/components/TimeseriesFilterWidget.jsx b/frontend/src/metabase/qb/components/TimeseriesFilterWidget.jsx
index bafa6394ae0c9402c97694f6ffd4e60f7c25a0f9..b8c6348ff9f1e3daa44f1a999de69d9b5eac16d2 100644
--- a/frontend/src/metabase/qb/components/TimeseriesFilterWidget.jsx
+++ b/frontend/src/metabase/qb/components/TimeseriesFilterWidget.jsx
@@ -34,7 +34,7 @@ type Props = {
     card: CardObject,
     tableMetadata: TableMetadata,
     setDatasetQuery: (datasetQuery: DatasetQuery) => void,
-    runQueryFn: () => void
+    runQuery: () => void
 };
 
 type State = {
@@ -90,7 +90,7 @@ export default class TimeseriesFilterWidget extends Component<*, Props, State> {
             card,
             tableMetadata,
             setDatasetQuery,
-            runQueryFn
+            runQuery
         } = this.props;
         const { filter, filterIndex, currentFilter } = this.state;
         let currentDescription;
@@ -148,7 +148,7 @@ export default class TimeseriesFilterWidget extends Component<*, Props, State> {
                                     ...card.dataset_query,
                                     query
                                 });
-                                runQueryFn();
+                                runQuery();
                             }
                             if (this._popover) {
                                 this._popover.close();
diff --git a/frontend/src/metabase/qb/components/TimeseriesGroupingWidget.jsx b/frontend/src/metabase/qb/components/TimeseriesGroupingWidget.jsx
index 4681fdbe7a139b647831b85cdeb18b9ec02bcb36..b07282f3b549e225b8b44c0d193e008b69fd5f5e 100644
--- a/frontend/src/metabase/qb/components/TimeseriesGroupingWidget.jsx
+++ b/frontend/src/metabase/qb/components/TimeseriesGroupingWidget.jsx
@@ -20,14 +20,14 @@ import type {
 type Props = {
     card: CardObject,
     setDatasetQuery: (datasetQuery: DatasetQuery) => void,
-    runQueryFn: () => void
+    runQuery: () => void
 };
 
 export default class TimeseriesGroupingWidget extends Component<*, Props, *> {
     _popover: ?any;
 
     render() {
-        const { card, setDatasetQuery, runQueryFn } = this.props;
+        const { card, setDatasetQuery, runQuery } = this.props;
         if (Card.isStructured(card)) {
             const query = Card.getQuery(card);
             const breakouts = query && Query.getBreakouts(query);
@@ -62,7 +62,7 @@ export default class TimeseriesGroupingWidget extends Component<*, Props, *> {
                                     ...card.dataset_query,
                                     query
                                 });
-                                runQueryFn();
+                                runQuery();
                                 if (this._popover) {
                                     this._popover.close();
                                 }
diff --git a/frontend/src/metabase/qb/components/modes/TimeseriesMode.jsx b/frontend/src/metabase/qb/components/modes/TimeseriesMode.jsx
index e60cbf8232ba836cec1afc73637919db6915aa07..2842d620b655da38b7c16bf7e41c1a82bc8dfab8 100644
--- a/frontend/src/metabase/qb/components/modes/TimeseriesMode.jsx
+++ b/frontend/src/metabase/qb/components/modes/TimeseriesMode.jsx
@@ -29,7 +29,7 @@ type Props = {
     lastRunCard: CardObject,
     tableMetadata: TableMetadata,
     setDatasetQuery: (datasetQuery: DatasetQuery) => void,
-    runQueryFn: () => void
+    runQuery: () => void
 };
 
 export const TimeseriesModeFooter = (props: Props) => {
diff --git a/frontend/src/metabase/query_builder/actions.js b/frontend/src/metabase/query_builder/actions.js
index 3916b647c42d54ae30d8c43ceadef1f7f42897b8..4baf7bc67caf8d82235f288e7c414acf955ba8cb 100644
--- a/frontend/src/metabase/query_builder/actions.js
+++ b/frontend/src/metabase/query_builder/actions.js
@@ -223,7 +223,7 @@ export const initializeQB = createThunkAction(INITIALIZE_QB, (location, params)
         if (card && card.dataset_query && (Query.canRun(card.dataset_query.query) || card.dataset_query.type === "native")) {
             // NOTE: timeout to allow Parameters widget to set parameterValues
             setTimeout(() =>
-                dispatch(runQuery(card, false))
+                dispatch(runQuery(card, { shouldUpdateUrl: false }))
             , 0);
         }
 
@@ -285,7 +285,7 @@ export const cancelEditing = createThunkAction(CANCEL_EDITING, () => {
         dispatch(loadMetadataForCard(card));
 
         // we do this to force the indication of the fact that the card should not be considered dirty when the url is updated
-        dispatch(runQuery(card, false));
+        dispatch(runQuery(card, { shouldUpdateUrl: false }));
         dispatch(updateUrl(card, { dirty: false }));
 
         MetabaseAnalytics.trackEvent("QueryBuilder", "Edit Cancel");
@@ -466,7 +466,7 @@ export const reloadCard = createThunkAction(RELOAD_CARD, () => {
         dispatch(loadMetadataForCard(card));
 
         // we do this to force the indication of the fact that the card should not be considered dirty when the url is updated
-        dispatch(runQuery(card, false));
+        dispatch(runQuery(card, { shouldUpdateUrl: false }));
         dispatch(updateUrl(card, { dirty: false }));
 
         return card;
@@ -482,7 +482,7 @@ export const setCardAndRun = createThunkAction(SET_CARD_AND_RUN, (runCard, shoul
 
         dispatch(loadMetadataForCard(card));
 
-        dispatch(runQuery(card, shouldUpdateUrl));
+        dispatch(runQuery(card, { shouldUpdateUrl: shouldUpdateUrl }));
 
         return card;
     };
@@ -850,7 +850,11 @@ export const removeQueryExpression = createQueryAction(
 
 // runQuery
 export const RUN_QUERY = "metabase/qb/RUN_QUERY";
-export const runQuery = createThunkAction(RUN_QUERY, (card, shouldUpdateUrl = true, parameterValues, dirty) => {
+export const runQuery = createThunkAction(RUN_QUERY, (card, {
+    shouldUpdateUrl = true,
+    ignoreCache = false, // currently only implemented for saved cards
+    parameterValues
+} = {}) => {
     return async (dispatch, getState) => {
         const state = getState();
         const parameters = getParameters(state);
@@ -880,8 +884,12 @@ export const runQuery = createThunkAction(RUN_QUERY, (card, shouldUpdateUrl = tr
         const datasetQuery = applyParameters(card, parameters, parameterValues);
 
         // use the CardApi.query if the query is saved and not dirty so users with view but not create permissions can see it.
-        if (card.id && !cardIsDirty) {
-            CardApi.query({ cardId: card.id, parameters: datasetQuery.parameters }, { cancelled: cancelQueryDeferred.promise }).then(onQuerySuccess, onQueryError);
+        if (card.id != null && !cardIsDirty) {
+            CardApi.query({
+                cardId: card.id,
+                parameters: datasetQuery.parameters,
+                ignore_cache: ignoreCache
+            }, { cancelled: cancelQueryDeferred.promise }).then(onQuerySuccess, onQueryError);
         } else {
             MetabaseApi.dataset(datasetQuery, { cancelled: cancelQueryDeferred.promise }).then(onQuerySuccess, onQueryError);
         }
@@ -1108,8 +1116,6 @@ export const toggleDataReferenceFn = toggleDataReference;
 export const onBeginEditing = beginEditing;
 export const onCancelEditing = cancelEditing;
 export const setQueryModeFn = setQueryMode;
-export const runQueryFn = runQuery;
-export const cancelQueryFn = cancelQuery;
 export const setDatabaseFn = setQueryDatabase;
 export const setSourceTableFn = setQuerySourceTable;
 export const setDisplayFn = setCardVisualization;
diff --git a/frontend/src/metabase/query_builder/components/NativeQueryEditor.jsx b/frontend/src/metabase/query_builder/components/NativeQueryEditor.jsx
index a68ac9cb3ec9b4be627ff43cf2fda37e950c0809..9c53a89e8aa66c29e8dde0c99f1a3f0f74e1e128 100644
--- a/frontend/src/metabase/query_builder/components/NativeQueryEditor.jsx
+++ b/frontend/src/metabase/query_builder/components/NativeQueryEditor.jsx
@@ -90,7 +90,7 @@ export default class NativeQueryEditor extends Component {
         nativeDatabases: PropTypes.array.isRequired,
         datasetQuery: PropTypes.object.isRequired,
         setDatasetQuery: PropTypes.func.isRequired,
-        runQueryFn: PropTypes.func.isRequired,
+        runQuery: PropTypes.func.isRequired,
         setDatabaseFn: PropTypes.func.isRequired,
         autocompleteResultsFn: PropTypes.func.isRequired,
         isOpen: PropTypes.bool,
@@ -163,10 +163,10 @@ export default class NativeQueryEditor extends Component {
                 const selectedText = this._editor.getSelectedText();
                 if (selectedText) {
                     const temporaryCard = assocIn(card, ["dataset_query", "native", "query"], selectedText);
-                    this.props.runQueryFn(temporaryCard, false, null, true);
+                    this.props.runQuery(temporaryCard, { shouldUpdateUrl: false });
                 }
             } else {
-                this.props.runQueryFn();
+                this.props.runQuery();
             }
         }
     }
diff --git a/frontend/src/metabase/query_builder/components/QueryVisualization.jsx b/frontend/src/metabase/query_builder/components/QueryVisualization.jsx
index b83c10c334110d563ed19da32b8065fe63ad9520..bfa9b9c33acfe617c15b2c9567611c87779b784f 100644
--- a/frontend/src/metabase/query_builder/components/QueryVisualization.jsx
+++ b/frontend/src/metabase/query_builder/components/QueryVisualization.jsx
@@ -2,6 +2,10 @@ import React, { Component, PropTypes } from "react";
 import { Link } from "react-router";
 
 import LoadingSpinner from 'metabase/components/LoadingSpinner.jsx';
+import Tooltip from "metabase/components/Tooltip";
+import Icon from "metabase/components/Icon";
+import ShrinkableList from "metabase/components/ShrinkableList";
+
 import RunButton from './RunButton.jsx';
 import VisualizationSettings from './VisualizationSettings.jsx';
 
@@ -12,13 +16,16 @@ import Warnings from "./Warnings.jsx";
 import QueryDownloadWidget from "./QueryDownloadWidget.jsx";
 import QuestionEmbedWidget from "../containers/QuestionEmbedWidget";
 
-import { formatNumber, inflect } from "metabase/lib/formatting";
+import { formatNumber, inflect, duration } from "metabase/lib/formatting";
 import Utils from "metabase/lib/utils";
 import MetabaseSettings from "metabase/lib/settings";
 import * as Urls from "metabase/lib/urls";
 
 import cx from "classnames";
 import _ from "underscore";
+import moment from "moment";
+
+const REFRESH_TOOLTIP_THRESHOLD = 30 * 1000; // 30 seconds
 
 export default class QueryVisualization extends Component {
     constructor(props, context) {
@@ -40,8 +47,8 @@ export default class QueryVisualization extends Component {
         cellClickedFn: PropTypes.func,
         isRunning: PropTypes.bool.isRequired,
         isRunnable: PropTypes.bool.isRequired,
-        runQueryFn: PropTypes.func.isRequired,
-        cancelQueryFn: PropTypes.func
+        runQuery: PropTypes.func.isRequired,
+        cancelQuery: PropTypes.func
     };
 
     static defaultProps = {
@@ -63,44 +70,85 @@ export default class QueryVisualization extends Component {
         }
     }
 
-    queryIsDirty() {
-        // a query is considered dirty if ANY part of it has been changed
-        return (
-            !Utils.equals(this.props.card.dataset_query, this.state.lastRunDatasetQuery) ||
-            !Utils.equals(this.props.parameterValues, this.state.lastRunParameterValues)
-        );
-    }
-
     isChartDisplay(display) {
         return (display !== "table" && display !== "scalar");
     }
 
+    runQuery = () => {
+        this.props.runQuery(null, { ignoreCache: true });
+    }
+
     renderHeader() {
-        const { isObjectDetail, isRunnable, isRunning, isAdmin, card, result, runQueryFn, cancelQueryFn } = this.props;
-        const isDirty = this.queryIsDirty();
+        const { isObjectDetail, isRunnable, isRunning, isResultDirty, isAdmin, card, result, cancelQuery } = this.props;
         const isSaved = card.id != null;
+
+        let runButtonTooltip;
+        if (!isResultDirty && result && result.cached && result.average_execution_time > REFRESH_TOOLTIP_THRESHOLD) {
+            runButtonTooltip = `This question will take approximately ${duration(result.average_execution_time)} to refresh`;
+        }
+
+        const messages = [];
+        if (result && result.cached) {
+            messages.push({
+                icon: "clock",
+                message: (
+                    <div>
+                        Updated {moment(result.updated_at).fromNow()}
+                    </div>
+                )
+            })
+        }
+        if (result && result.data && !isObjectDetail && card.display === "table") {
+            messages.push({
+                icon: "table2",
+                message: (
+                    <div>
+                        { result.data.rows_truncated != null ? ("Showing first ") : ("Showing ")}
+                        <strong>{formatNumber(result.row_count)}</strong>
+                        { " " + inflect("row", result.data.rows.length) }
+                    </div>
+                )
+            })
+        }
+
         const isPublicLinksEnabled = MetabaseSettings.get("public_sharing");
         const isEmbeddingEnabled = MetabaseSettings.get("embedding");
         return (
-            <div className="relative flex flex-no-shrink mt3 mb1" style={{ minHeight: "2em" }}>
-                <span className="relative z4">
+            <div className="relative flex align-center flex-no-shrink mt2 mb1" style={{ minHeight: "2em" }}>
+                <div className="z4 flex-full">
                   { !isObjectDetail && <VisualizationSettings ref="settings" {...this.props} /> }
-                </span>
-                <div className="absolute flex layout-centered left right z3">
-                    <RunButton
-                        isRunnable={isRunnable}
-                        isDirty={isDirty}
-                        isRunning={isRunning}
-                        onRun={runQueryFn}
-                        onCancel={cancelQueryFn}
-                    />
                 </div>
-                <div className="absolute right z4 flex align-center" style={{ lineHeight: 0 /* needed to align icons :-/ */ }}>
-                    { !isDirty && this.renderCount() }
+                <div className="z3">
+                    <Tooltip tooltip={runButtonTooltip}>
+                        <RunButton
+                            isRunnable={isRunnable}
+                            isDirty={isResultDirty}
+                            isRunning={isRunning}
+                            onRun={this.runQuery}
+                            onCancel={cancelQuery}
+                        />
+                    </Tooltip>
+                </div>
+                <div className="z4 flex-full flex align-center justify-end" style={{ lineHeight: 0 /* needed to align icons :-/ */ }}>
+                    <ShrinkableList
+                        className="flex"
+                        items={messages}
+                        renderItem={(item) =>
+                            <div className="flex-no-shrink flex align-center mx2 h5 text-grey-4">
+                                <Icon className="mr1" name={item.icon} size={12} />
+                                {item.message}
+                            </div>
+                        }
+                        renderItemSmall={(item) =>
+                            <Tooltip tooltip={<div className="p1">{item.message}</div>}>
+                                <Icon className="mx1" name={item.icon} size={16} />
+                            </Tooltip>
+                        }
+                    />
                     { !isObjectDetail &&
-                        <Warnings warnings={this.state.warnings} className="mx2" size={18} />
+                        <Warnings warnings={this.state.warnings} className="mx1" size={18} />
                     }
-                    { !isDirty && result && !result.error ?
+                    { !isResultDirty && result && !result.error ?
                         <QueryDownloadWidget
                             className="mx1"
                             card={card}
@@ -121,19 +169,6 @@ export default class QueryVisualization extends Component {
         );
     }
 
-    renderCount() {
-        let { result, isObjectDetail, card } = this.props;
-        if (result && result.data && !isObjectDetail && card.display === "table") {
-            return (
-                <div>
-                    { result.data.rows_truncated != null ? ("Showing first ") : ("Showing ")}
-                    <b>{formatNumber(result.row_count)}</b>
-                    { " " + inflect("row", result.data.rows.length) }.
-                </div>
-            );
-        }
-    }
-
     render() {
         const { className, card, databases, isObjectDetail, isRunning, result } = this.props
         let viz;
diff --git a/frontend/src/metabase/query_builder/components/dataref/DataReference.jsx b/frontend/src/metabase/query_builder/components/dataref/DataReference.jsx
index d706aa202c86dff9be9f120485a698f850a5cc94..31c4a8b5638d6a4e228ad55a2f24ad5761b5fd3c 100644
--- a/frontend/src/metabase/query_builder/components/dataref/DataReference.jsx
+++ b/frontend/src/metabase/query_builder/components/dataref/DataReference.jsx
@@ -26,7 +26,7 @@ export default class DataReference extends Component {
     static propTypes = {
         query: PropTypes.object.isRequired,
         onClose: PropTypes.func.isRequired,
-        runQueryFn: PropTypes.func.isRequired,
+        runQuery: PropTypes.func.isRequired,
         setDatasetQuery: PropTypes.func.isRequired,
         setDatabaseFn: PropTypes.func.isRequired,
         setSourceTableFn: PropTypes.func.isRequired,
diff --git a/frontend/src/metabase/query_builder/components/dataref/FieldPane.jsx b/frontend/src/metabase/query_builder/components/dataref/FieldPane.jsx
index 4cdde08fdb9cbf36e103bc8b713dd75058dea42d..3e301ac5e2676b8620043091f2409bcfa13ca8d8 100644
--- a/frontend/src/metabase/query_builder/components/dataref/FieldPane.jsx
+++ b/frontend/src/metabase/query_builder/components/dataref/FieldPane.jsx
@@ -28,7 +28,7 @@ export default class FieldPane extends Component {
         field: PropTypes.object.isRequired,
         datasetQuery: PropTypes.object,
         loadTableAndForeignKeysFn: PropTypes.func.isRequired,
-        runQueryFn: PropTypes.func.isRequired,
+        runQuery: PropTypes.func.isRequired,
         setDatasetQuery: PropTypes.func.isRequired,
         setCardAndRun: PropTypes.func.isRequired
     };
@@ -63,7 +63,7 @@ export default class FieldPane extends Component {
         }
         Query.addBreakout(datasetQuery.query, this.props.field.id);
         this.props.setDatasetQuery(datasetQuery);
-        this.props.runQueryFn();
+        this.props.runQuery();
     }
 
     newCard() {
diff --git a/frontend/src/metabase/query_builder/components/dataref/MetricPane.jsx b/frontend/src/metabase/query_builder/components/dataref/MetricPane.jsx
index 9c64ed132d6340e1da8a613f2c39be54e49a4a33..fc2d964519f992d7a86744ac048827468efe2fd7 100644
--- a/frontend/src/metabase/query_builder/components/dataref/MetricPane.jsx
+++ b/frontend/src/metabase/query_builder/components/dataref/MetricPane.jsx
@@ -26,7 +26,7 @@ export default class MetricPane extends Component {
         metric: PropTypes.object.isRequired,
         query: PropTypes.object,
         loadTableAndForeignKeysFn: PropTypes.func.isRequired,
-        runQueryFn: PropTypes.func.isRequired,
+        runQuery: PropTypes.func.isRequired,
         setDatasetQuery: PropTypes.func.isRequired,
         setCardAndRun: PropTypes.func.isRequired
     };
diff --git a/frontend/src/metabase/query_builder/components/dataref/SegmentPane.jsx b/frontend/src/metabase/query_builder/components/dataref/SegmentPane.jsx
index 665f1a04008872e4f2ffa428bf4d7de22d686ba6..0e3a9f4d32a23619ce3fbffa6781c33db8013ed9 100644
--- a/frontend/src/metabase/query_builder/components/dataref/SegmentPane.jsx
+++ b/frontend/src/metabase/query_builder/components/dataref/SegmentPane.jsx
@@ -27,7 +27,7 @@ export default class SegmentPane extends Component {
         segment: PropTypes.object.isRequired,
         datasetQuery: PropTypes.object,
         loadTableAndForeignKeysFn: PropTypes.func.isRequired,
-        runQueryFn: PropTypes.func.isRequired,
+        runQuery: PropTypes.func.isRequired,
         setDatasetQuery: PropTypes.func.isRequired,
         setCardAndRun: PropTypes.func.isRequired
     };
@@ -53,7 +53,7 @@ export default class SegmentPane extends Component {
         }
         Query.addFilter(datasetQuery.query, ["SEGMENT", this.props.segment.id]);
         this.props.setDatasetQuery(datasetQuery);
-        this.props.runQueryFn();
+        this.props.runQuery();
     }
 
     newCard() {
diff --git a/project.clj b/project.clj
index 0ad6cf5a9a459c654ec20ed1b66bfd8120f580e2..f972daf60dc0175ebe9ae6ca40f91f24c996eab1 100644
--- a/project.clj
+++ b/project.clj
@@ -50,6 +50,7 @@
                  [com.mattbertolini/liquibase-slf4j "2.0.0"]          ; Java Migrations lib
                  [com.mchange/c3p0 "0.9.5.2"]                         ; connection pooling library
                  [com.novemberain/monger "3.1.0"]                     ; MongoDB Driver
+                 [com.taoensso/nippy "2.13.0"]                        ; Fast serialization (i.e., GZIP) library for Clojure
                  [compojure "1.5.2"]                                  ; HTTP Routing library built on Ring
                  [crypto-random "1.2.0"]                              ; library for generating cryptographically secure random bytes and strings
                  [environ "1.1.0"]                                    ; easy environment management
diff --git a/reset_password/metabase/reset_password/core.clj b/reset_password/metabase/reset_password/core.clj
index f963a49e8dc88550ed3788842ea355119e7d5a75..510adf2a331102750cd1a5a8ccc3f54782d3249a 100644
--- a/reset_password/metabase/reset_password/core.clj
+++ b/reset_password/metabase/reset_password/core.clj
@@ -14,10 +14,10 @@
 (defn -main
   [email-address]
   (mdb/setup-db!)
-  (println (format "Resetting password for %s..." email-address))
+  (printf "Resetting password for %s...\n" email-address)
   (try
-    (println (format "OK [[[%s]]]" (set-reset-token! email-address)))
+    (printf "OK [[[%s]]]\n" (set-reset-token! email-address))
     (System/exit 0)
     (catch Throwable e
-      (println (format "FAIL [[[%s]]]" (.getMessage e)))
+      (printf "FAIL [[[%s]]]\n" (.getMessage e))
       (System/exit -1))))
diff --git a/resources/migrations/052_add_query_cache_table.yaml b/resources/migrations/052_add_query_cache_table.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..e21e19f5047f4a544217a8faf4d822d624cae3b0
--- /dev/null
+++ b/resources/migrations/052_add_query_cache_table.yaml
@@ -0,0 +1,49 @@
+databaseChangeLog:
+  - property:
+        name: blob.type
+        value: blob
+        dbms: mysql,h2
+  - property:
+      name: blob.type
+      value: bytea
+      dbms: postgresql
+  - changeSet:
+      id: 52
+      author: camsaul
+      changes:
+        - createTable:
+            tableName: query_cache
+            remarks: 'Cached results of queries are stored here when using the DB-based query cache.'
+            columns:
+              - column:
+                  name: query_hash
+                  type: binary(32)
+                  remarks: 'The hash of the query dictionary. (This is a 256-bit SHA3 hash of the query dict).'
+                  constraints:
+                    primaryKey: true
+                    nullable: false
+              - column:
+                  name: updated_at
+                  type: datetime
+                  remarks: 'The timestamp of when these query results were last refreshed.'
+                  constraints:
+                    nullable: false
+              - column:
+                  name: results
+                  type: ${blob.type}
+                  remarks: 'Cached, compressed results of running the query with the given hash.'
+                  constraints:
+                    nullable: false
+        - createIndex:
+            tableName: query_cache
+            indexName: idx_query_cache_updated_at
+            columns:
+              column:
+                name: updated_at
+        - addColumn:
+            tableName: report_card
+            columns:
+              - column:
+                  name: cache_ttl
+                  type: int
+                  remarks: 'The maximum time, in seconds, to return cached results for this Card rather than running a new query.'
diff --git a/resources/migrations/053_add_query_table.yaml b/resources/migrations/053_add_query_table.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..772de999cb50a8a43200f4f043ff3034ed93d425
--- /dev/null
+++ b/resources/migrations/053_add_query_table.yaml
@@ -0,0 +1,22 @@
+databaseChangeLog:
+  - changeSet:
+      id: 53
+      author: camsaul
+      changes:
+        - createTable:
+            tableName: query
+            remarks: 'Information (such as average execution time) for different queries that have been previously ran.'
+            columns:
+              - column:
+                  name: query_hash
+                  type: binary(32)
+                  remarks: 'The hash of the query dictionary. (This is a 256-bit SHA3 hash of the query dict.)'
+                  constraints:
+                    primaryKey: true
+                    nullable: false
+              - column:
+                  name: average_execution_time
+                  type: int
+                  remarks: 'Average execution time for the query, round to nearest number of milliseconds. This is updated as a rolling average.'
+                  constraints:
+                    nullable: false
diff --git a/sample_dataset/metabase/sample_dataset/generate.clj b/sample_dataset/metabase/sample_dataset/generate.clj
index 3807dbcc1427aa73d7334a964379adae8c0a4a86..50bd431ce21ea4db945264236063c992fae29c91 100644
--- a/sample_dataset/metabase/sample_dataset/generate.clj
+++ b/sample_dataset/metabase/sample_dataset/generate.clj
@@ -253,7 +253,7 @@
           (= (count (:products %)) products)
           (every? keyword? (keys %))
           (every? sequential? (vals %))]}
-  (println (format "Generating random data: %d people, %d products..." people products))
+  (printf "Generating random data: %d people, %d products...\n" people products)
   (let [products (mapv product-add-reviews (create-randoms products random-product))
         people   (mapv (partial person-add-orders products) (create-randoms people random-person))]
     {:people   (mapv #(dissoc % :orders) people)
@@ -416,6 +416,6 @@
 
 (defn -main [& [filename]]
   (let [filename (or filename sample-dataset-filename)]
-    (println (format "Writing sample dataset to %s..." filename))
+    (printf "Writing sample dataset to %s...\n" filename)
     (create-h2-db filename)
     (System/exit 0)))
diff --git a/src/metabase/api/card.clj b/src/metabase/api/card.clj
index 3c3b09c511463c7a0e5d6d112de9f85b850a723f..0abd7ad37015666f4dc86d6e107fc87ea6d5a5eb 100644
--- a/src/metabase/api/card.clj
+++ b/src/metabase/api/card.clj
@@ -1,5 +1,6 @@
 (ns metabase.api.card
   (:require [clojure.data :as data]
+            [clojure.tools.logging :as log]
             [cheshire.core :as json]
             [compojure.core :refer [GET POST DELETE PUT]]
             [schema.core :as s]
@@ -18,10 +19,14 @@
                              [interface :as mi]
                              [label :refer [Label]]
                              [permissions :as perms]
+                             [query :as query]
                              [table :refer [Table]]
                              [view-log :refer [ViewLog]])
-            (metabase [query-processor :as qp]
-                      [util :as u])
+            [metabase.public-settings :as public-settings]
+            [metabase.query-processor :as qp]
+            [metabase.query-processor.middleware.cache :as cache]
+            [metabase.query-processor.util :as qputil]
+            [metabase.util :as u]
             [metabase.util.schema :as su])
   (:import java.util.UUID))
 
@@ -354,6 +359,26 @@
 
 ;;; ------------------------------------------------------------ Running a Query ------------------------------------------------------------
 
+(defn- query-magic-ttl
+  "Compute a 'magic' cache TTL time (in seconds) for QUERY by multipling its historic average execution times by the `query-caching-ttl-ratio`.
+   If the TTL is less than a second, this returns `nil` (i.e., the cache should not be utilized.)"
+  [query]
+  (when-let [average-duration (query/average-execution-time-ms (qputil/query-hash query))]
+    (let [ttl-seconds (Math/round (float (/ (* average-duration (public-settings/query-caching-ttl-ratio))
+                                            1000.0)))]
+      (when-not (zero? ttl-seconds)
+        (log/info (format "Question's average execution duration is %d ms; using 'magic' TTL of %d seconds" average-duration ttl-seconds) (u/emoji "💾"))
+        ttl-seconds))))
+
+(defn- query-for-card [card parameters constraints]
+  (let [query (assoc (:dataset_query card)
+                :constraints constraints
+                :parameters  parameters)
+        ttl   (when (public-settings/enable-query-caching)
+                (or (:cache_ttl card)
+                    (query-magic-ttl query)))]
+    (assoc query :cache_ttl ttl)))
+
 (defn run-query-for-card
   "Run the query for Card with PARAMETERS and CONSTRAINTS, and return results in the usual format."
   {:style/indent 1}
@@ -362,9 +387,7 @@
                      context     :question}}]
   {:pre [(u/maybe? sequential? parameters)]}
   (let [card    (read-check Card card-id)
-        query   (assoc (:dataset_query card)
-                  :parameters  parameters
-                  :constraints constraints)
+        query   (query-for-card card parameters constraints)
         options {:executed-by  *current-user-id*
                  :context      context
                  :card-id      card-id
@@ -374,26 +397,30 @@
 
 (defendpoint POST "/:card-id/query"
   "Run the query associated with a Card."
-  [card-id, :as {{:keys [parameters]} :body}]
-  (run-query-for-card card-id, :parameters parameters))
+  [card-id :as {{:keys [parameters ignore_cache], :or {ignore_cache false}} :body}]
+  {ignore_cache (s/maybe s/Bool)}
+  (binding [cache/*ignore-cached-results* ignore_cache]
+    (run-query-for-card card-id, :parameters parameters)))
 
 (defendpoint POST "/:card-id/query/csv"
   "Run the query associated with a Card, and return its results as CSV. Note that this expects the parameters as serialized JSON in the 'parameters' parameter"
   [card-id parameters]
   {parameters (s/maybe su/JSONString)}
-  (dataset-api/as-csv (run-query-for-card card-id
-                        :parameters  (json/parse-string parameters keyword)
-                        :constraints nil
-                        :context     :csv-download)))
+  (binding [cache/*ignore-cached-results* true]
+    (dataset-api/as-csv (run-query-for-card card-id
+                          :parameters  (json/parse-string parameters keyword)
+                          :constraints nil
+                          :context     :csv-download))))
 
 (defendpoint POST "/:card-id/query/json"
   "Run the query associated with a Card, and return its results as JSON. Note that this expects the parameters as serialized JSON in the 'parameters' parameter"
   [card-id parameters]
   {parameters (s/maybe su/JSONString)}
-  (dataset-api/as-json (run-query-for-card card-id
-                         :parameters  (json/parse-string parameters keyword)
-                         :constraints nil
-                         :context     :json-download)))
+  (binding [cache/*ignore-cached-results* true]
+    (dataset-api/as-json (run-query-for-card card-id
+                           :parameters  (json/parse-string parameters keyword)
+                           :constraints nil
+                           :context     :json-download))))
 
 
 ;;; ------------------------------------------------------------ Sharing is Caring ------------------------------------------------------------
diff --git a/src/metabase/api/dataset.clj b/src/metabase/api/dataset.clj
index ddeaf0e370cf7040ede72a001eb480c11d3e61ca..0dae851c67f8b593f82f55715825106e9029bb05 100644
--- a/src/metabase/api/dataset.clj
+++ b/src/metabase/api/dataset.clj
@@ -8,6 +8,7 @@
                     [hydrate :refer [hydrate]])
             (metabase.models [card :refer [Card]]
                              [database :refer [Database]]
+                             [query :as query]
                              [query-execution :refer [QueryExecution]])
             [metabase.query-processor :as qp]
             [metabase.query-processor.util :as qputil]
@@ -41,8 +42,8 @@
   (read-check Database database)
   ;; try calculating the average for the query as it was given to us, otherwise with the default constraints if there's no data there.
   ;; if we still can't find relevant info, just default to 0
-  {:average (or (qputil/query-average-duration query)
-                (qputil/query-average-duration (assoc query :constraints default-query-constraints))
+  {:average (or (query/average-execution-time-ms (qputil/query-hash query))
+                (query/average-execution-time-ms (qputil/query-hash (assoc query :constraints default-query-constraints)))
                 0)})
 
 (defn as-csv
diff --git a/src/metabase/cmd/load_from_h2.clj b/src/metabase/cmd/load_from_h2.clj
index cd73db016935c026d12edb1c7664b014656df981..879b4e35ac043a33068fcadc2387b8b9c38370ec 100644
--- a/src/metabase/cmd/load_from_h2.clj
+++ b/src/metabase/cmd/load_from_h2.clj
@@ -46,7 +46,6 @@
                              [pulse-card :refer [PulseCard]]
                              [pulse-channel :refer [PulseChannel]]
                              [pulse-channel-recipient :refer [PulseChannelRecipient]]
-                             [query-execution :refer [QueryExecution]]
                              [raw-column :refer [RawColumn]]
                              [raw-table :refer [RawTable]]
                              [revision :refer [Revision]]
@@ -89,7 +88,6 @@
    DashboardCard
    DashboardCardSeries
    Activity
-   QueryExecution
    Pulse
    PulseCard
    PulseChannel
diff --git a/src/metabase/config.clj b/src/metabase/config.clj
index e5d45b1b6c7dab6b361c24b6554237862ea7bc29..ac7ff3fd221372021a4a8776af0136b0b9ad5fa1 100644
--- a/src/metabase/config.clj
+++ b/src/metabase/config.clj
@@ -29,7 +29,8 @@
    :mb-version-info-url "http://static.metabase.com/version-info.json"
    :max-session-age "20160"                     ; session length in minutes (14 days)
    :mb-colorize-logs "true"
-   :mb-emoji-in-logs "true"})
+   :mb-emoji-in-logs "true"
+   :mb-qp-cache-backend "db"})
 
 
 (defn config-str
diff --git a/src/metabase/email.clj b/src/metabase/email.clj
index 0bbc09ffc3fb8b3a32d263a82ad25fdbf0ec3499..9731b50acdbdae802f760dda254ccf090546ef74 100644
--- a/src/metabase/email.clj
+++ b/src/metabase/email.clj
@@ -6,7 +6,8 @@
             [metabase.util :as u])
   (:import javax.mail.Session))
 
-;; ## CONFIG
+;;; CONFIG
+;; TODO - smtp-port should be switched to type :integer
 
 (defsetting email-from-address  "Email address you want to use as the sender of Metabase." :default "notifications@metabase.com")
 (defsetting email-smtp-host     "The address of the SMTP server that handles your emails.")
diff --git a/src/metabase/middleware.clj b/src/metabase/middleware.clj
index f460dac615cd142fa07cc597e4c9ec6ac8160221..88f19c572b8683105bff19f4bfbbbbf5b27c4800 100644
--- a/src/metabase/middleware.clj
+++ b/src/metabase/middleware.clj
@@ -92,10 +92,10 @@
   [session-id]
   (when (and session-id (init-status/complete?))
     (when-let [session (or (session-with-id session-id)
-                           (println "no matching session with ID") ; NOCOMMIT
+                           (println "no matching session with ID") ; DEBUG
                            )]
       (if (session-expired? session)
-        (println (format "session-is-expired! %d min / %d min" (session-age-minutes session) (config/config-int :max-session-age))) ; NOCOMMIT
+        (printf "session-is-expired! %d min / %d min\n" (session-age-minutes session) (config/config-int :max-session-age)) ; DEBUG
         {:metabase-user-id (:user_id session)
          :is-superuser?    (:is_superuser session)}))))
 
diff --git a/src/metabase/models/interface.clj b/src/metabase/models/interface.clj
index 804877b9dd93f5c4cba623b15d2d48aa53bb7016..308210288029306ae3d095a390eb1a00a84bfffd 100644
--- a/src/metabase/models/interface.clj
+++ b/src/metabase/models/interface.clj
@@ -1,15 +1,20 @@
 (ns metabase.models.interface
   (:require [clojure.core.memoize :as memoize]
             [cheshire.core :as json]
+            [taoensso.nippy :as nippy]
             [toucan.models :as models]
             [metabase.config :as config]
             [metabase.util :as u]
-            [metabase.util.encryption :as encryption]))
+            [metabase.util.encryption :as encryption])
+  (:import java.sql.Blob))
 
 ;;; ------------------------------------------------------------ Toucan Extensions ------------------------------------------------------------
 
 (models/set-root-namespace! 'metabase.models)
 
+
+;;; types
+
 (defn- json-in [obj]
   (if (string? obj)
     obj
@@ -39,6 +44,24 @@
   :in  encrypted-json-in
   :out (comp cached-encrypted-json-out u/jdbc-clob->str))
 
+(defn compress
+  "Compress OBJ, returning a byte array."
+  [obj]
+  (nippy/freeze obj {:compressor nippy/snappy-compressor}))
+
+(defn decompress
+  "Decompress COMPRESSED-BYTES."
+  [compressed-bytes]
+  (if (instance? Blob compressed-bytes)
+    (recur (.getBytes ^Blob compressed-bytes 0 (.length ^Blob compressed-bytes)))
+    (nippy/thaw compressed-bytes {:compressor nippy/snappy-compressor})))
+
+(models/add-type! :compressed
+  :in  compress
+  :out decompress)
+
+
+;;; properties
 
 (defn- add-created-at-timestamp [obj & _]
   (assoc obj :created_at (u/new-sql-timestamp)))
@@ -50,6 +73,11 @@
   :insert (comp add-created-at-timestamp add-updated-at-timestamp)
   :update add-updated-at-timestamp)
 
+;; like `timestamped?`, but for models that only have an `:updated_at` column
+(models/add-property! :updated-at-timestamped?
+  :insert add-updated-at-timestamp
+  :update add-updated-at-timestamp)
+
 
 ;;; ------------------------------------------------------------ New Permissions Stuff ------------------------------------------------------------
 
diff --git a/src/metabase/models/query.clj b/src/metabase/models/query.clj
new file mode 100644
index 0000000000000000000000000000000000000000..74c80cba809607118cb40df8ac24b4ee3354e5be
--- /dev/null
+++ b/src/metabase/models/query.clj
@@ -0,0 +1,41 @@
+(ns metabase.models.query
+  (:require (toucan [db :as db]
+                    [models :as models])
+            [metabase.db :as mdb]
+            [metabase.util :as u]
+            [metabase.util.honeysql-extensions :as hx]))
+
+(models/defmodel Query :query)
+
+;;; Helper Fns
+
+(defn average-execution-time-ms
+  "Fetch the average execution time (in milliseconds) for query with QUERY-HASH if available.
+   Returns `nil` if no information is available."
+  ^Integer [^bytes query-hash]
+  {:pre [(instance? (Class/forName "[B") query-hash)]}
+  (db/select-one-field :average_execution_time Query :query_hash query-hash))
+
+(defn- int-casting-type
+  "Return appropriate type for use in SQL `CAST(x AS type)` statement.
+   MySQL doesn't accept `integer`, so we have to use `unsigned`; Postgres doesn't accept `unsigned`.
+   so we have to use `integer`. Yay SQL dialect differences :D"
+  []
+  (if (= (mdb/db-type) :mysql)
+    :unsigned
+    :integer))
+
+(defn update-average-execution-time!
+  "Update the recorded average execution time for query with QUERY-HASH."
+  ^Integer [^bytes query-hash, ^Integer execution-time-ms]
+  {:pre [(instance? (Class/forName "[B") query-hash)]}
+  (or
+   ;; if there's already a matching Query update the rolling average
+   (db/update-where! Query {:query_hash query-hash}
+     :average_execution_time (hx/cast (int-casting-type) (hx/round (hx/+ (hx/* 0.9 :average_execution_time)
+                                                                         (*    0.1 execution-time-ms))
+                                                                   0)))
+   ;; otherwise add a new entry, using the value of EXECUTION-TIME-MS as a starting point
+   (db/insert! Query
+     :query_hash             query-hash
+     :average_execution_time execution-time-ms)))
diff --git a/src/metabase/models/query_cache.clj b/src/metabase/models/query_cache.clj
new file mode 100644
index 0000000000000000000000000000000000000000..e4277919d641cb58629f75311f2da3093c13794a
--- /dev/null
+++ b/src/metabase/models/query_cache.clj
@@ -0,0 +1,12 @@
+(ns metabase.models.query-cache
+  "A model used to cache query results in the database."
+  (:require [toucan.models :as models]
+            [metabase.util :as u]))
+
+(models/defmodel QueryCache :query_cache)
+
+(u/strict-extend (class QueryCache)
+  models/IModel
+  (merge models/IModelDefaults
+         {:types      (constantly {:results :compressed})
+          :properties (constantly {:updated-at-timestamped? true})}))
diff --git a/src/metabase/models/setting.clj b/src/metabase/models/setting.clj
index 970f490a27efab049bce36401c38a49df17a718a..713a2b92f49c6ae73399e1fd6b0d55835e7aaaca 100644
--- a/src/metabase/models/setting.clj
+++ b/src/metabase/models/setting.clj
@@ -53,7 +53,7 @@
 
 
 (def ^:private Type
-  (s/enum :string :boolean :json))
+  (s/enum :string :boolean :json :integer))
 
 (def ^:private SettingDefinition
   {:name        s/Keyword
@@ -152,6 +152,12 @@
   ^Boolean [setting-or-name]
   (string->boolean (get-string setting-or-name)))
 
+(defn get-integer
+  "Get integer value of (presumably `:integer`) SETTING-OR-NAME. This is the default getter for `:integer` settings."
+  ^Integer [setting-or-name]
+  (when-let [s (get-string setting-or-name)]
+    (Integer/parseInt s)))
+
 (defn get-json
   "Get the string value of SETTING-OR-NAME and parse it as JSON."
   [setting-or-name]
@@ -160,6 +166,7 @@
 (def ^:private default-getter-for-type
   {:string  get-string
    :boolean get-boolean
+   :integer get-integer
    :json    get-json})
 
 (defn get
@@ -220,6 +227,15 @@
                                    false "false"
                                    nil   nil))))
 
+(defn set-integer!
+  "Set the value of integer SETTING-OR-NAME."
+  [setting-or-name new-value]
+  (set-string! setting-or-name (when new-value
+                                 (assert (or (integer? new-value)
+                                             (and (string? new-value)
+                                                  (re-matches #"^\d+$" new-value))))
+                                 (str new-value))))
+
 (defn set-json!
   "Serialize NEW-VALUE for SETTING-OR-NAME as a JSON string and save it."
   [setting-or-name new-value]
@@ -229,6 +245,7 @@
 (def ^:private default-setter-for-type
   {:string  set-string!
    :boolean set-boolean!
+   :integer set-integer!
    :json    set-json!})
 
 (defn set!
@@ -316,7 +333,7 @@
    You may optionally pass any of the OPTIONS below:
 
    *  `:default` - The default value of the setting. (default: `nil`)
-   *  `:type` - `:string` (default), `:boolean`, or `:json`. Non-`:string` settings have special default getters and setters that automatically coerce values to the correct types.
+   *  `:type` - `:string` (default), `:boolean`, `:integer`, or `:json`. Non-`:string` settings have special default getters and setters that automatically coerce values to the correct types.
    *  `:internal?` - This `Setting` is for internal use and shouldn't be exposed in the UI (i.e., not
                      returned by the corresponding endpoints). Default: `false`
    *  `:getter` - A custom getter fn, which takes no arguments. Overrides the default implementation.
diff --git a/src/metabase/public_settings.clj b/src/metabase/public_settings.clj
index 107170404c3abdb8e97b213e3541995cc9523398..4d9900614fa575d603f9838e7abeefbc22074568 100644
--- a/src/metabase/public_settings.clj
+++ b/src/metabase/public_settings.clj
@@ -53,6 +53,40 @@
   :type    :boolean
   :default false)
 
+
+(defsetting enable-query-caching
+  "Enabling caching will save the results of queries that take a long time to run."
+  :type    :boolean
+  :default false)
+
+(defsetting query-caching-max-kb
+  "The maximum size of the cache per card, in kilobytes:"
+  ;; (This size is a measurement of the length of *uncompressed* serialized result *rows*. The actual size of
+  ;; the results as stored will vary somewhat, since this measurement doesn't include metadata returned with the
+  ;; results, and doesn't consider whether the results are compressed, as the `:db` backend does.)
+  :type    :integer
+  :default 1000)
+
+(defsetting query-caching-max-ttl
+  "The absoulte maximum time to keep any cached query results, in seconds."
+  :type    :integer
+  :default (* 60 60 24 100)) ; 100 days
+
+(defsetting query-caching-min-ttl
+  "Metabase will cache all saved questions with an average query execution time longer than
+   this many seconds:"
+  :type    :integer
+  :default 60)
+
+(defsetting query-caching-ttl-ratio
+  "To determine how long each saved question's cached result should stick around, we take the
+   query's average execution time and multiply that by whatever you input here. So if a query
+   takes on average 2 minutes to run, and you input 10 for your multiplier, its cache entry
+   will persist for 20 minutes."
+  :type    :integer
+  :default 10)
+
+
 (defn remove-public-uuid-if-public-sharing-is-disabled
   "If public sharing is *disabled* and OBJECT has a `:public_uuid`, remove it so people don't try to use it (since it won't work).
    Intended for use as part of a `post-select` implementation for Cards and Dashboards."
@@ -81,6 +115,7 @@
    :anon_tracking_enabled (anon-tracking-enabled)
    :custom_geojson        (setting/get :custom-geojson)
    :email_configured      ((resolve 'metabase.email/email-configured?))
+   :enable_query_caching  (enable-query-caching)
    :engines               ((resolve 'metabase.driver/available-drivers))
    :ga_code               "UA-60817802-1"
    :google_auth_client_id (setting/get :google-auth-client-id)
diff --git a/src/metabase/query_processor.clj b/src/metabase/query_processor.clj
index f08dfa5b4bee3857c06b897dce41b4a81b8fcf7b..834d312848664a35d1e38417a565cc2ed27cb2cf 100644
--- a/src/metabase/query_processor.clj
+++ b/src/metabase/query_processor.clj
@@ -4,13 +4,15 @@
             [schema.core :as s]
             [toucan.db :as db]
             [metabase.driver :as driver]
-            [metabase.models.query-execution :refer [QueryExecution], :as query-execution]
+            (metabase.models [query :as query]
+                             [query-execution :refer [QueryExecution], :as query-execution])
             [metabase.query-processor.util :as qputil]
             (metabase.query-processor.middleware [add-implicit-clauses :as implicit-clauses]
                                                  [add-row-count-and-status :as row-count-and-status]
                                                  [add-settings :as add-settings]
                                                  [annotate-and-sort :as annotate-and-sort]
                                                  [catch-exceptions :as catch-exceptions]
+                                                 [cache :as cache]
                                                  [cumulative-aggregations :as cumulative-ags]
                                                  [dev :as dev]
                                                  [driver-specific :as driver-specific]
@@ -79,8 +81,9 @@
        driver-specific/process-query-in-context      ; (drivers can inject custom middleware if they implement IDriver's `process-query-in-context`)
        add-settings/add-settings
        resolve-driver/resolve-driver                 ; ▲▲▲ DRIVER RESOLUTION POINT ▲▲▲ All functions *above* will have access to the driver during PRE- *and* POST-PROCESSING
-       catch-exceptions/catch-exceptions
-       log-query/log-initial-query)
+       log-query/log-initial-query
+       cache/maybe-return-cached-results
+       catch-exceptions/catch-exceptions)
    query))
 ;; ▲▲▲ PRE-PROCESSING ▲▲▲ happens from BOTTOM-TO-TOP, e.g. the results of `expand-macros` are (eventually) passed to `expand-resolve`
 
@@ -100,9 +103,10 @@
 ;;; +----------------------------------------------------------------------------------------------------+
 
 (defn- save-query-execution!
-  "Save (or update) a `QueryExecution`."
+  "Save a `QueryExecution` and update the average execution time for the corresponding `Query`."
   [query-execution]
   (u/prog1 query-execution
+    (query/update-average-execution-time! (:hash query-execution) (:running_time query-execution))
     (db/insert! QueryExecution (dissoc query-execution :json_query))))
 
 (defn- save-and-return-failed-query!
@@ -126,17 +130,20 @@
 (defn- save-and-return-successful-query!
   "Save QueryExecution state and construct a completed (successful) query response"
   [query-execution query-result]
-  ;; record our query execution and format response
-  (-> (assoc query-execution
-        :running_time (- (System/currentTimeMillis)
-                         (:start_time_millis query-execution))
-        :result_rows  (get query-result :row_count 0))
-      (dissoc :start_time_millis)
-      save-query-execution!
-      ;; at this point we've saved and we just need to massage things into our final response format
-      (dissoc :error :result_rows :hash :executor_id :native :card_id :dashboard_id :pulse_id)
-      (merge query-result)
-      (assoc :status :completed)))
+  (let [query-execution (-> (assoc query-execution
+                              :running_time (- (System/currentTimeMillis)
+                                               (:start_time_millis query-execution))
+                              :result_rows  (get query-result :row_count 0))
+                            (dissoc :start_time_millis))]
+    ;; only insert a new record into QueryExecution if the results *were not* cached (i.e., only if a Query was actually ran)
+    (when-not (:cached query-result)
+      (save-query-execution! query-execution))
+    ;; ok, now return the results in the normal response format
+    (merge (dissoc query-execution :error :result_rows :hash :executor_id :native :card_id :dashboard_id :pulse_id)
+           query-result
+           {:status                 :completed
+            :average_execution_time (when (:cached query-result)
+                                      (query/average-execution-time-ms (:hash query-execution)))})))
 
 
 (defn- assert-query-status-successful
diff --git a/src/metabase/query_processor/middleware/cache.clj b/src/metabase/query_processor/middleware/cache.clj
new file mode 100644
index 0000000000000000000000000000000000000000..06bea49173d50e81db33bda3737b1238e842219a
--- /dev/null
+++ b/src/metabase/query_processor/middleware/cache.clj
@@ -0,0 +1,146 @@
+(ns metabase.query-processor.middleware.cache
+  "Middleware that returns cached results for queries when applicable.
+
+   If caching is enabled (`enable-query-caching` is `true`) cached results will be returned for Cards if possible. There's
+   a global default TTL defined by the setting `query-caching-default-ttl`, but individual Cards can override this value
+   with custom TTLs with a value for `:cache_ttl`.
+
+   For all other queries, caching is skipped.
+
+   Various caching backends are defined in `metabase.query-processor.middleware.cache-backend` namespaces.
+   The default backend is `db`, which uses the application database; this value can be changed by setting the env var
+   `MB_QP_CACHE_BACKEND`.
+
+    Refer to `metabase.query-processor.middleware.cache-backend.interface` for more details about how the cache backends themselves."
+  (:require [clojure.tools.logging :as log]
+            [metabase.config :as config]
+            [metabase.public-settings :as public-settings]
+            [metabase.query-processor.middleware.cache-backend.interface :as i]
+            [metabase.query-processor.util :as qputil]
+            [metabase.util :as u]))
+
+(def ^:dynamic ^Boolean *ignore-cached-results*
+  "Should we force the query to run, ignoring cached results even if they're available?
+   Setting this to `true` will run the query again and will still save the updated results."
+  false)
+
+
+;;; ------------------------------------------------------------ Backend ------------------------------------------------------------
+
+(def ^:private backend-instance
+  (atom nil))
+
+(defn- valid-backend? [instance] (extends? i/IQueryProcessorCacheBackend (class instance)))
+
+(defn- get-backend-instance-in-namespace
+  "Return a valid query cache backend `instance` in BACKEND-NS-SYMB, or throw an Exception if none exists."
+  ;; if for some reason the resolved var doesn't satisfy `IQueryProcessorCacheBackend` we'll reload the namespace
+  ;; it belongs to and try one more time.
+  ;; This fixes the issue in dev sessions where the interface namespace gets reloaded causing the cache implementation
+  ;; to no longer satisfy the protocol
+  ([backend-ns-symb]
+   (get-backend-instance-in-namespace backend-ns-symb :allow-reload))
+  ([backend-ns-symb allow-reload?]
+   (let [varr (ns-resolve backend-ns-symb 'instance)]
+     (cond
+       (not varr)             (throw (Exception. (str "No var named 'instance' found in namespace " backend-ns-symb)))
+       (valid-backend? @varr) @varr
+       allow-reload?          (do (require backend-ns-symb :reload)
+                                  (get-backend-instance-in-namespace backend-ns-symb false))
+       :else                  (throw (Exception. (format "%s/instance doesn't satisfy IQueryProcessorCacheBackend" backend-ns-symb)))))))
+
+(defn- set-backend!
+  "Set the cache backend to the cache defined by the keyword BACKEND.
+
+   (This should be something like `:db`, `:redis`, or `:memcached`. See the
+   documentation in `metabase.query-processor.middleware.cache-backend.interface` for details on how this works.)"
+  ([]
+   (set-backend! (config/config-kw :mb-qp-cache-backend)))
+  ([backend]
+   (let [backend-ns (symbol (str "metabase.query-processor.middleware.cache-backend." (munge (name backend))))]
+     (require backend-ns)
+     (log/info "Using query processor cache backend:" (u/format-color 'blue backend) (u/emoji "💾"))
+     (reset! backend-instance (get-backend-instance-in-namespace backend-ns)))))
+
+
+
+;;; ------------------------------------------------------------ Cache Operations ------------------------------------------------------------
+
+(defn- cached-results [query-hash max-age-seconds]
+  (when-not *ignore-cached-results*
+    (when-let [results (i/cached-results @backend-instance query-hash max-age-seconds)]
+      (assert (u/is-temporal? (:updated_at results))
+        "cached-results should include an `:updated_at` field containing the date when the query was last ran.")
+      (log/info "Returning cached results for query" (u/emoji "💾"))
+      (assoc results :cached true))))
+
+(defn- save-results!  [query-hash results]
+  (log/info "Caching results for next time for query" (u/emoji "💾"))
+  (i/save-results! @backend-instance query-hash results))
+
+
+;;; ------------------------------------------------------------ Middleware ------------------------------------------------------------
+
+(defn- is-cacheable? ^Boolean [{cache-ttl :cache_ttl}]
+  (boolean (and (public-settings/enable-query-caching)
+                cache-ttl)))
+
+(defn- results-are-below-max-byte-threshold?
+  "Measure the size of the `:rows` in QUERY-RESULTS and see whether they're smaller than `query-caching-max-kb`
+   *before* compression."
+  ^Boolean [{{rows :rows} :data}]
+  (let [max-bytes (* (public-settings/query-caching-max-kb) 1024)]
+    ;; We don't want to serialize the entire result set since that could explode if the query is one that returns a
+    ;; huge number of rows. (We also want to keep `:rows` lazy.)
+    ;; So we'll serialize one row at a time, and keep a running total of bytes; if we pass the `query-caching-max-kb`
+    ;; threshold, we'll fail right away.
+    (loop [total-bytes 0, [row & more] rows]
+      (cond
+        (> total-bytes max-bytes) false
+        (not row)                 true
+        :else                     (recur (+ total-bytes (count (str row)))
+                                         more)))))
+
+(defn- save-results-if-successful! [query-hash results]
+  (when (and (= (:status results) :completed)
+             (or (results-are-below-max-byte-threshold? results)
+                 (log/info "Results are too large to cache." (u/emoji "😫"))))
+    (save-results! query-hash results)))
+
+(defn- run-query-and-save-results-if-successful! [query-hash qp query]
+  (let [start-time-ms (System/currentTimeMillis)
+        results       (qp query)
+        total-time-ms (- (System/currentTimeMillis) start-time-ms)
+        min-ttl-ms    (* (public-settings/query-caching-min-ttl) 1000)]
+    (log/info (format "Query took %d ms to run; miminum for cache eligibility is %d ms" total-time-ms min-ttl-ms))
+    (when (>= total-time-ms min-ttl-ms)
+      (save-results-if-successful! query-hash results))
+    results))
+
+(defn- run-query-with-cache [qp {cache-ttl :cache_ttl, :as query}]
+  (let [query-hash (qputil/query-hash query)]
+    (or (cached-results query-hash cache-ttl)
+        (run-query-and-save-results-if-successful! query-hash qp query))))
+
+
+(defn maybe-return-cached-results
+  "Middleware for caching results of a query if applicable.
+   In order for a query to be eligible for caching:
+
+     *  Caching (the `enable-query-caching` Setting) must be enabled
+     *  The query must pass a `:cache_ttl` value. For Cards, this can be the value of `:cache_ttl`,
+        otherwise falling back to the value of the `query-caching-default-ttl` Setting.
+     *  The query must already be permissions-checked. Since the cache bypasses the normal
+        query processor pipeline, the ad-hoc permissions-checking middleware isn't applied for cached results.
+        (The various `/api/card/` endpoints that make use of caching do `can-read?` checks for the Card *before*
+        running the query, satisfying this requirement.)
+     *  The result *rows* of the query must be less than `query-caching-max-kb` when serialized (before compression)."
+  [qp]
+  ;; choose the caching backend if needed
+  (when-not @backend-instance
+    (set-backend!))
+  ;; ok, now do the normal middleware thing
+  (fn [query]
+    (if-not (is-cacheable? query)
+      (qp query)
+      (run-query-with-cache qp query))))
diff --git a/src/metabase/query_processor/middleware/cache_backend/db.clj b/src/metabase/query_processor/middleware/cache_backend/db.clj
new file mode 100644
index 0000000000000000000000000000000000000000..4964d8a88a83ad027b33f3781bbc0e965e568585
--- /dev/null
+++ b/src/metabase/query_processor/middleware/cache_backend/db.clj
@@ -0,0 +1,42 @@
+(ns metabase.query-processor.middleware.cache-backend.db
+  (:require [toucan.db :as db]
+            (metabase.models [interface :as models]
+                             [query-cache :refer [QueryCache]])
+            [metabase.public-settings :as public-settings]
+            [metabase.query-processor.middleware.cache-backend.interface :as i]
+            [metabase.util :as u]))
+
+(defn- cached-results
+  "Return cached results for QUERY-HASH if they exist and are newer than MAX-AGE-SECONDS."
+  [query-hash max-age-seconds]
+  (when-let [{:keys [results updated_at]} (db/select-one [QueryCache :results :updated_at]
+                                            :query_hash query-hash
+                                            :updated_at [:>= (u/->Timestamp (- (System/currentTimeMillis)
+                                                                               (* 1000 max-age-seconds)))])]
+    (assoc results :updated_at updated_at)))
+
+(defn- purge-old-cache-entries!
+  "Delete any cache entries that are older than the global max age `max-cache-entry-age-seconds` (currently 3 months)."
+  []
+  (db/simple-delete! QueryCache
+    :updated_at [:<= (u/->Timestamp (- (System/currentTimeMillis)
+                                       (* 1000 (public-settings/query-caching-max-ttl))))]))
+
+(defn- save-results!
+  "Save the RESULTS of query with QUERY-HASH, updating an existing QueryCache entry
+  if one already exists, otherwise creating a new entry."
+  [query-hash results]
+  (purge-old-cache-entries!)
+  (or (db/update-where! QueryCache {:query_hash query-hash}
+        :updated_at (u/new-sql-timestamp)
+        :results    (models/compress results)) ; have to manually call these here since Toucan doesn't call type conversion fns for update-where! (yet)
+      (db/insert! QueryCache
+        :query_hash query-hash
+        :results    results))
+  :ok)
+
+(def instance
+  "Implementation of `IQueryProcessorCacheBackend` that uses the database for caching results."
+  (reify i/IQueryProcessorCacheBackend
+    (cached-results [_ query-hash max-age-seconds] (cached-results query-hash max-age-seconds))
+    (save-results!  [_ query-hash results]         (save-results! query-hash results))))
diff --git a/src/metabase/query_processor/middleware/cache_backend/interface.clj b/src/metabase/query_processor/middleware/cache_backend/interface.clj
new file mode 100644
index 0000000000000000000000000000000000000000..4dbf762ca981c6f32ee4cc28fa0d0995f5be2c46
--- /dev/null
+++ b/src/metabase/query_processor/middleware/cache_backend/interface.clj
@@ -0,0 +1,37 @@
+(ns metabase.query-processor.middleware.cache-backend.interface
+  "Interface used to define different Query Processor cache backends.
+   Defining a backend is straightforward: define a new namespace with the pattern
+
+     metabase.query-processor.middleware.cache-backend.<backend>
+
+   Where backend is a key representing the backend, e.g. `db`, `redis`, or `memcached`.
+
+   In that namespace, create an object that reifies (or otherwise implements) `IQueryProcessorCacheBackend`.
+   This object *must* be stored in a var called `instance`.
+
+   That's it. See `metabase.query-processor.middleware.cache-backend.db` for a complete example of how this is done.")
+
+
+(defprotocol IQueryProcessorCacheBackend
+  "Protocol that different Metabase cache backends must implement.
+
+   QUERY-HASH as passed below is a byte-array representing a 256-byte SHA3 hash; encode this as needed for use as a
+   cache entry key. RESULTS are passed (and should be returned) as a Clojure object, and individual backends are free
+   to encode this as appropriate when storing the results. (It's probably not a bad idea to compress the results; this
+   is what the `:db` backend does.)"
+
+  (cached-results [this, query-hash, ^Integer max-age-seconds]
+    "Return cached results for the query with byte array QUERY-HASH if those results are present in the cache and are less
+     than MAX-AGE-SECONDS old. Otherwise, return `nil`.
+
+  This method must also return a Timestamp from when the query was last ran. This must be `assoc`ed with the query results
+  under the key `:updated_at`.
+
+    (cached-results [_ query-hash max-age-seconds]
+      (when-let [[results updated-at] (maybe-fetch-results query-hash max-age-seconds)]
+        (assoc results :updated_at updated-at)))")
+
+  (save-results! [this query-hash results]
+    "Add a cache entry with the RESULTS of running query with byte array QUERY-HASH.
+     This should replace any prior entries for QUERY-HASH and update the cache timestamp to the current system time.
+     (This is also an appropriate point to purge any entries older than the value of the `query-caching-max-ttl` Setting.)"))
diff --git a/src/metabase/query_processor/util.clj b/src/metabase/query_processor/util.clj
index 3846fb546662a6f6e15dd7d0f4ac8dcb01127db0..dec320e4dc655fec44f52390c155f7996624686f 100644
--- a/src/metabase/query_processor/util.clj
+++ b/src/metabase/query_processor/util.clj
@@ -3,8 +3,7 @@
   (:require (buddy.core [codecs :as codecs]
                         [hash :as hash])
             [cheshire.core :as json]
-            [toucan.db :as db]
-            [metabase.models.query-execution :refer [QueryExecution]]))
+            [toucan.db :as db]))
 
 (defn mbql-query?
   "Is the given query an MBQL query?"
@@ -41,7 +40,7 @@
    (This is done so irrelevant info or options that don't affect query results doesn't result in the same query producing different hashes.)"
   [query]
   {:pre [(map? query)]}
-  (let [{:keys [constraints parameters], :as query} (select-keys query [:database :type :query :parameters :constraints])]
+  (let [{:keys [constraints parameters], :as query} (select-keys query [:database :type :query :native :parameters :constraints])]
     (cond-> query
       (empty? constraints) (dissoc :constraints)
       (empty? parameters)  (dissoc :parameters))))
@@ -50,17 +49,3 @@
   "Return a 256-bit SHA3 hash of QUERY as a key for the cache. (This is returned as a byte array.)"
   [query]
   (hash/sha3-256 (json/generate-string (select-keys-for-hashing query))))
-
-
-;;; ------------------------------------------------------------ Historic Duration Info ------------------------------------------------------------
-
-(defn query-average-duration
-  "Return the average running time of QUERY over the last 10 executions in milliseconds.
-   Returns `nil` if there's not available data."
-  ^Float [query]
-  (when-let [running-times (db/select-field :running_time QueryExecution
-                             :hash (query-hash query)
-                             {:order-by [[:started_at :desc]]
-                              :limit    10})]
-    (float (/ (reduce + running-times)
-              (count running-times)))))
diff --git a/src/metabase/util/honeysql_extensions.clj b/src/metabase/util/honeysql_extensions.clj
index 66a5ccaef18b7da1b29dd32bd7fce661c6dd6aae..5b21d722754cf15f64f30df2d3a4e9a6e56e6331 100644
--- a/src/metabase/util/honeysql_extensions.clj
+++ b/src/metabase/util/honeysql_extensions.clj
@@ -92,10 +92,15 @@
   (hsql/call :cast x (hsql/raw (name c))))
 
 (defn format
-  "SQL `FORMAT` function."
+  "SQL `format` function."
   [format-str expr]
   (hsql/call :format expr (literal format-str)))
 
+(defn round
+  "SQL `round` function."
+  [x decimal-places]
+  (hsql/call :round x decimal-places))
+
 (defn ->date                     "CAST X to a `date`."                     [x] (cast :date x))
 (defn ->datetime                 "CAST X to a `datetime`."                 [x] (cast :datetime x))
 (defn ->timestamp                "CAST X to a `timestamp`."                [x] (cast :timestamp x))
diff --git a/test/metabase/api/card_test.clj b/test/metabase/api/card_test.clj
index 34a759795d2831efa8e133687e49cebdbeaf7780..1a3bb3e25ce76b6cc7980a4b3f2899d09d88a7a5 100644
--- a/test/metabase/api/card_test.clj
+++ b/test/metabase/api/card_test.clj
@@ -36,7 +36,8 @@
    :embedding_params  nil
    :made_public_by_id nil
    :public_uuid       nil
-   :query_type        "query"})
+   :query_type        "query"
+   :cache_ttl         nil})
 
 ;; ## GET /api/card
 ;; Filter cards by database
diff --git a/test/metabase/api/dataset_test.clj b/test/metabase/api/dataset_test.clj
index 20a86830f56f070d9df5a9b72ec7eb20930ce132..637acfad10827deecd13941e0ba37f3ff514f5b3 100644
--- a/test/metabase/api/dataset_test.clj
+++ b/test/metabase/api/dataset_test.clj
@@ -52,22 +52,23 @@
 ;; Just a basic sanity check to make sure Query Processor endpoint is still working correctly.
 (expect
   [;; API call response
-   {:data         {:rows    [[1000]]
-                   :columns ["count"]
-                   :cols    [{:base_type "type/Integer", :special_type "type/Number", :name "count", :display_name "count", :id nil, :table_id nil,
-                              :description nil, :target nil, :extra_info {}, :source "aggregation"}]
-                   :native_form true}
-    :row_count    1
-    :status       "completed"
-    :context      "ad-hoc"
-    :json_query   (-> (wrap-inner-query
-                        (query checkins
-                          (ql/aggregation (ql/count))))
-                      (assoc :type "query")
-                      (assoc-in [:query :aggregation] [{:aggregation-type "count", :custom-name nil}])
-                      (assoc :constraints default-query-constraints))
-    :started_at   true
-    :running_time true}
+   {:data                   {:rows    [[1000]]
+                             :columns ["count"]
+                             :cols    [{:base_type "type/Integer", :special_type "type/Number", :name "count", :display_name "count", :id nil, :table_id nil,
+                                        :description nil, :target nil, :extra_info {}, :source "aggregation"}]
+                             :native_form true}
+    :row_count              1
+    :status                 "completed"
+    :context                "ad-hoc"
+    :json_query             (-> (wrap-inner-query
+                                  (query checkins
+                                    (ql/aggregation (ql/count))))
+                                (assoc :type "query")
+                                (assoc-in [:query :aggregation] [{:aggregation-type "count", :custom-name nil}])
+                                (assoc :constraints default-query-constraints))
+    :started_at             true
+    :running_time           true
+    :average_execution_time nil}
    ;; QueryExecution record in the DB
    {:hash         true
     :row_count    1
diff --git a/test/metabase/cmd/load_from_h2_test.clj b/test/metabase/cmd/load_from_h2_test.clj
index c9a6c119457d11dc399285b187257b64bdddf860..3d3942f4773bce5f0f0b16c0e2fcd3145abd288e 100644
--- a/test/metabase/cmd/load_from_h2_test.clj
+++ b/test/metabase/cmd/load_from_h2_test.clj
@@ -13,7 +13,10 @@
 
 (def ^:private models-to-exclude
   "Models that should *not* be migrated in `load-from-h2`."
-  #{"LegacyQueryExecution"})
+  #{"LegacyQueryExecution"
+    "Query"
+    "QueryCache"
+    "QueryExecution"})
 
 (defn- all-model-names []
   (set (for [ns       (ns-find/find-namespaces (classpath/classpath))
diff --git a/test/metabase/events/revision_test.clj b/test/metabase/events/revision_test.clj
index 72c08795905261c829981144aba90e986bdc4abc..ef56ae4507f773109b1879a3d40dd5d45ef6f5bc 100644
--- a/test/metabase/events/revision_test.clj
+++ b/test/metabase/events/revision_test.clj
@@ -41,6 +41,7 @@
    :made_public_by_id      nil
    :name                   (:name card)
    :public_uuid            nil
+   :cache_ttl              nil
    :query_type             "query"
    :table_id               (id :categories)
    :visualization_settings {}})
diff --git a/test/metabase/permissions_collection_test.clj b/test/metabase/permissions_collection_test.clj
index a4d86a18d98bf5b22125afd3990a91eb20302c4a..575f572aff5b0f7cc72f275214c2455874c91293 100644
--- a/test/metabase/permissions_collection_test.clj
+++ b/test/metabase/permissions_collection_test.clj
@@ -19,9 +19,9 @@
 (defn- api-call-was-successful? {:style/indent 0} [response]
   (when (and (string? response)
              (not= response "You don't have permissions to do that."))
-    (println "users in db:" (db/select-field :email 'User)) ; NOCOMMIT
     (println "RESPONSE:" response)) ; DEBUG
-  (not= response "You don't have permissions to do that."))
+  (and (not= response "You don't have permissions to do that.")
+       (not= response "Unauthenticated")))
 
 (defn- can-run-query? [username]
   (api-call-was-successful? ((test-users/user->client username) :post (format "card/%d/query" (u/get-id *card:db2-count-of-venues*)))))
@@ -52,6 +52,7 @@
 (perms-test/expect-with-test-data
   true
   (tt/with-temp Collection [collection]
+    (println "[In the occasionally failing test]") ; DEBUG
     (set-card-collection! collection)
     (permissions/grant-collection-read-permissions! (group/all-users) collection)
     (can-run-query? :rasta)))
diff --git a/test/metabase/query_processor/middleware/cache_test.clj b/test/metabase/query_processor/middleware/cache_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..9a2f3afe91e5fcb54beedccd69ec417579ac5ef2
--- /dev/null
+++ b/test/metabase/query_processor/middleware/cache_test.clj
@@ -0,0 +1,187 @@
+(ns metabase.query-processor.middleware.cache-test
+  "Tests for the Query Processor cache."
+  (:require [expectations :refer :all]
+            [toucan.db :as db]
+            [metabase.models.query-cache :refer [QueryCache]]
+            [metabase.query-processor.middleware.cache :as cache]
+            [metabase.test.util :as tu]))
+
+(tu/resolve-private-vars metabase.query-processor.middleware.cache
+  is-cacheable?
+  results-are-below-max-byte-threshold?)
+
+(def ^:private mock-results
+  {:row_count 8
+   :status    :completed
+   :data      {:rows [[:toucan      71]
+                      [:bald-eagle  92]
+                      [:hummingbird 11]
+                      [:owl         10]
+                      [:chicken     69]
+                      [:robin       96]
+                      [:osprey      72]
+                      [:flamingo    70]]}})
+
+(def ^:private ^:dynamic ^Integer *query-execution-delay-ms* 0)
+
+(defn- mock-qp [& _]
+  (Thread/sleep *query-execution-delay-ms*)
+  mock-results)
+
+(def ^:private maybe-return-cached-results (cache/maybe-return-cached-results mock-qp))
+
+(defn- clear-cache! [] (db/simple-delete! QueryCache))
+
+(defn- cached? [results]
+  (if (:cached results)
+    :cached
+    :not-cached))
+
+(defn- run-query [& {:as query-kvs}]
+  (cached? (maybe-return-cached-results (merge {:cache_ttl 60, :query :abc} query-kvs))))
+
+
+;;; ------------------------------------------------------------ tests for is-cacheable? ------------------------------------------------------------
+
+;; something is-cacheable? if it includes a cach_ttl and the caching setting is enabled
+(expect
+  (tu/with-temporary-setting-values [enable-query-caching true]
+    (is-cacheable? {:cache_ttl 100})))
+
+(expect
+  false
+  (tu/with-temporary-setting-values [enable-query-caching false]
+    (is-cacheable? {:cache_ttl 100})))
+
+(expect
+  false
+  (tu/with-temporary-setting-values [enable-query-caching true]
+    (is-cacheable? {:cache_ttl nil})))
+
+
+;;; ------------------------------------------------------------ results-are-below-max-byte-threshold? ------------------------------------------------------------
+
+(expect
+  (tu/with-temporary-setting-values [query-caching-max-kb 128]
+    (results-are-below-max-byte-threshold? {:data {:rows [[1 "ABCDEF"]
+                                                          [3 "GHIJKL"]]}})))
+
+(expect
+  false
+  (tu/with-temporary-setting-values [query-caching-max-kb 1]
+    (results-are-below-max-byte-threshold? {:data {:rows (repeat 500 [1 "ABCDEF"])}})))
+
+;; check that `results-are-below-max-byte-threshold?` is lazy and fails fast if the query is over the threshold rather than serializing the entire thing
+(expect
+  false
+  (let [lazy-seq-realized? (atom false)]
+    (tu/with-temporary-setting-values [query-caching-max-kb 1]
+      (results-are-below-max-byte-threshold? {:data {:rows (lazy-cat (repeat 500 [1 "ABCDEF"])
+                                                                     (do (reset! lazy-seq-realized? true)
+                                                                         [2 "GHIJKL"]))}})
+      @lazy-seq-realized?)))
+
+
+;;; ------------------------------------------------------------ End-to-end middleware tests ------------------------------------------------------------
+
+;; if there's nothing in the cache, cached results should *not* be returned
+(expect
+  :not-cached
+  (tu/with-temporary-setting-values [enable-query-caching  true
+                                     query-caching-min-ttl 0]
+    (clear-cache!)
+    (run-query)))
+
+;; if we run the query twice, the second run should return cached results
+(expect
+  :cached
+  (tu/with-temporary-setting-values [enable-query-caching  true
+                                     query-caching-min-ttl 0]
+    (clear-cache!)
+    (run-query)
+    (run-query)))
+
+;; ...but if the cache entry is past it's TTL, the cached results shouldn't be returned
+(expect
+  :not-cached
+  (tu/with-temporary-setting-values [enable-query-caching  true
+                                     query-caching-min-ttl 0]
+    (clear-cache!)
+    (run-query :cache_ttl 1)
+    (Thread/sleep 2000)
+    (run-query :cache_ttl 1)))
+
+;; if caching is disabled then cache shouldn't be used even if there's something valid in there
+(expect
+  :not-cached
+  (tu/with-temporary-setting-values [enable-query-caching  true
+                                     query-caching-min-ttl 0]
+    (clear-cache!)
+    (run-query)
+    (tu/with-temporary-setting-values [enable-query-caching  false
+                                       query-caching-min-ttl 0]
+      (run-query))))
+
+
+;; check that `query-caching-max-kb` is respected and queries aren't cached if they're past the threshold
+(expect
+  :not-cached
+  (tu/with-temporary-setting-values [enable-query-caching  true
+                                     query-caching-max-kb  0
+                                     query-caching-min-ttl 0]
+    (clear-cache!)
+    (run-query)
+    (run-query)))
+
+;; check that `query-caching-max-ttl` is respected. Whenever a new query is cached the cache should evict any entries older that `query-caching-max-ttl`.
+;; Set max-ttl to one second, run query `:abc`, then wait two seconds, and run `:def`. This should trigger the cache flush for entries past `:max-ttl`;
+;; and the cached entry for `:abc` should be deleted. Running `:abc` a subsequent time should not return cached results
+(expect
+  :not-cached
+  (tu/with-temporary-setting-values [enable-query-caching  true
+                                     query-caching-max-ttl 1
+                                     query-caching-min-ttl 0]
+    (clear-cache!)
+    (run-query)
+    (Thread/sleep 2000)
+    (run-query, :query :def)
+    (run-query)))
+
+;; check that *ignore-cached-results* is respected when returning results...
+(expect
+  :not-cached
+  (tu/with-temporary-setting-values [enable-query-caching  true
+                                     query-caching-min-ttl 0]
+    (clear-cache!)
+    (run-query)
+    (binding [cache/*ignore-cached-results* true]
+      (run-query))))
+
+;; ...but if it's set those results should still be cached for next time.
+(expect
+  :cached
+  (tu/with-temporary-setting-values [enable-query-caching  true
+                                     query-caching-min-ttl 0]
+    (clear-cache!)
+    (binding [cache/*ignore-cached-results* true]
+      (run-query))
+    (run-query)))
+
+;; if the cache takes less than the min TTL to execute, it shouldn't be cached
+(expect
+  :not-cached
+  (tu/with-temporary-setting-values [enable-query-caching  true
+                                     query-caching-min-ttl 60]
+    (clear-cache!)
+    (run-query)
+    (run-query)))
+
+;; ...but if it takes *longer* than the min TTL, it should be cached
+(expect
+  :cached
+  (tu/with-temporary-setting-values [enable-query-caching  true
+                                     query-caching-min-ttl 1]
+    (binding [*query-execution-delay-ms* 1200]
+      (clear-cache!)
+      (run-query)
+      (run-query))))
diff --git a/test/metabase/query_processor/util_test.clj b/test/metabase/query_processor/util_test.clj
index 160278cdc2f1852da20ac0c35c5eda8b99d69a4e..357583558a259029c48d0101d354c459d7c69542 100644
--- a/test/metabase/query_processor/util_test.clj
+++ b/test/metabase/query_processor/util_test.clj
@@ -94,3 +94,14 @@
     (qputil/query-hash {:query :abc})
     (qputil/query-hash {:query :abc, :constraints nil})
     (qputil/query-hash {:query :abc, :constraints {}})))
+
+;; make sure two different natiev queries have different hashes!
+(expect
+  false
+  (array=
+    (qputil/query-hash {:database    2
+                        :type        "native"
+                        :native      {:query "SELECT pg_sleep(15), 1 AS one"}})
+    (qputil/query-hash {:database    2
+                        :type        "native"
+                        :native      {:query "SELECT pg_sleep(15), 2 AS two"}})))
diff --git a/test/metabase/query_processor_test.clj b/test/metabase/query_processor_test.clj
index 04989644a6e345eb4e400ba9936cf784657e10bd..3e0fb4f0391f77122d15acba55ae9645e5121b12 100644
--- a/test/metabase/query_processor_test.clj
+++ b/test/metabase/query_processor_test.clj
@@ -294,7 +294,7 @@
   {:style/indent 0}
   [results]
   (vec (or (get-in results [:data :rows])
-           (println (u/pprint-to-str 'red results))
+           (println (u/pprint-to-str 'red results)) ; DEBUG
            (throw (Exception. "Error!")))))
 
 (defn rows+column-names
diff --git a/test/metabase/sync_database_test.clj b/test/metabase/sync_database_test.clj
index e3f963012af804337e025d6502f7d67f0ff1ecf7..d44d2967019187c16c3f7692855d908b6abc442e 100644
--- a/test/metabase/sync_database_test.clj
+++ b/test/metabase/sync_database_test.clj
@@ -326,21 +326,23 @@
 
 
 ;;; -------------------- Make sure that if a Field's cardinality passes `metabase.sync-database.analyze/low-cardinality-threshold` (currently 300) (#3215) --------------------
+(defn- insert-range-sql [rang]
+  (str "INSERT INTO blueberries_consumed (num) VALUES "
+       (str/join ", " (for [n rang]
+                        (str "(" n ")")))))
+
 (expect
   false
   (let [details {:db (str "mem:" (tu/random-name) ";DB_CLOSE_DELAY=10")}]
     (binding [mdb/*allow-potentailly-unsafe-connections* true]
       (tt/with-temp Database [db {:engine :h2, :details details}]
-        (let [driver       (driver/engine->driver :h2)
-              spec         (sql/connection-details->spec driver details)
-              exec!        #(doseq [statement %]
-                              (println (jdbc/execute! spec [statement])))
-              insert-range #(str "INSERT INTO blueberries_consumed (num) VALUES "
-                                 (str/join ", " (for [n %]
-                                                  (str "(" n ")"))))]
+        (let [driver (driver/engine->driver :h2)
+              spec   (sql/connection-details->spec driver details)
+              exec!  #(doseq [statement %]
+                        (jdbc/execute! spec [statement]))]
           ;; create the `blueberries_consumed` table and insert a 100 values
           (exec! ["CREATE TABLE blueberries_consumed (num INTEGER NOT NULL);"
-                  (insert-range (range 100))])
+                  (insert-range-sql (range 100))])
           (sync-database! db, :full-sync? true)
           (let [table-id (db/select-one-id Table :db_id (u/get-id db))
                 field-id (db/select-one-id Field :table_id table-id)]
@@ -348,6 +350,6 @@
             (assert (= (count (db/select-one-field :values FieldValues :field_id field-id))
                        100))
             ;; ok, now insert enough rows to push the field past the `low-cardinality-threshold` and sync again, there should be no more field values
-            (exec! [(insert-range (range 100 (+ 100 @(resolve 'metabase.sync-database.analyze/low-cardinality-threshold))))])
+            (exec! [(insert-range-sql (range 100 (+ 100 @(resolve 'metabase.sync-database.analyze/low-cardinality-threshold))))])
             (sync-database! db, :full-sync? true)
             (db/exists? FieldValues :field_id field-id)))))))
diff --git a/test/metabase/test/data/druid.clj b/test/metabase/test/data/druid.clj
index 59d063b122a1466449b2f4e63ff5fba7feb91a44..44e68061b36aa592a8f8518f4084488c25fe052e 100644
--- a/test/metabase/test/data/druid.clj
+++ b/test/metabase/test/data/druid.clj
@@ -139,7 +139,7 @@
     (println "STATUS URL: " (str indexer-endpoint "/" task "/status"))
     (loop [remaining-seconds indexer-timeout-seconds]
       (let [status (get-in (GET status-url) [:status :status])]
-        (println (format "%s (%d seconds elapsed)" status (- indexer-timeout-seconds remaining-seconds)))
+        (printf "%s (%d seconds elapsed)\n" status (- indexer-timeout-seconds remaining-seconds))
         (when (not= status "SUCCESS")
           (when (<= remaining-seconds 0)
             (throw (Exception. (str "Failed to finish indexing druid data after " indexer-timeout-seconds " seconds!"))))
diff --git a/test/metabase/test/data/generic_sql.clj b/test/metabase/test/data/generic_sql.clj
index 5a93ba82eb14fe52b3ceb367b61a30b36f82f0cb..49a3b4fe11ae5f064e3f9f7ccb691e58422c2611 100644
--- a/test/metabase/test/data/generic_sql.clj
+++ b/test/metabase/test/data/generic_sql.clj
@@ -13,7 +13,8 @@
                                 [interface :as i])
             [metabase.util :as u]
             [metabase.util.honeysql-extensions :as hx])
-  (:import clojure.lang.Keyword
+  (:import java.sql.SQLException
+           clojure.lang.Keyword
            (metabase.test.data.interface DatabaseDefinition
                                          FieldDefinition
                                          TableDefinition)))
@@ -218,7 +219,7 @@
                                           :quoting             (sql/quote-style driver)
                                           :allow-dashed-names? true)))]
     (try (jdbc/execute! spec sql+args)
-         (catch java.sql.SQLException e
+         (catch SQLException e
            (println (u/format-color 'red "INSERT FAILED: \n%s\n" sql+args))
            (jdbc/print-sql-exception-chain e)))))
 
@@ -249,15 +250,15 @@
       (let [sql (s/replace sql #";+" ";")]
         (try
           (jdbc/execute! (database->spec driver context dbdef) [sql] {:transaction? false, :multi? true})
-          (catch java.sql.SQLException e
+          (catch SQLException e
             (println "Error executing SQL:" sql)
-            (println (format "Caught SQLException:\n%s"
-                             (with-out-str (jdbc/print-sql-exception-chain e))))
+            (printf "Caught SQLException:\n%s\n"
+                    (with-out-str (jdbc/print-sql-exception-chain e)))
             (throw e))
           (catch Throwable e
             (println "Error executing SQL:" sql)
-            (println (format "Caught Exception: %s %s\n%s" (class e) (.getMessage e)
-                             (with-out-str (.printStackTrace e))))
+            (printf "Caught Exception: %s %s\n%s\n" (class e) (.getMessage e)
+                    (with-out-str (.printStackTrace e)))
             (throw e)))))))
 
 
diff --git a/test/metabase/test/data/sqlserver.clj b/test/metabase/test/data/sqlserver.clj
index 858ebdac8104c515b51a290010c551413c59999b..d4bb90ec570292308bc1cf8edddfa39f86105052 100644
--- a/test/metabase/test/data/sqlserver.clj
+++ b/test/metabase/test/data/sqlserver.clj
@@ -102,7 +102,7 @@
       (with-redefs [+suffix identity]
         (doseq [db leftover-dbs]
           (u/ignore-exceptions
-            (println (format "Deleting leftover SQL Server DB '%s'..." db))
+            (printf "Deleting leftover SQL Server DB '%s'...\n" db)
             ;; Don't try to kill other connections to this DB with SET SINGLE_USER -- some other instance (eg CI) might be using it
             (jdbc/execute! connection-spec [(format "DROP DATABASE \"%s\";" db)])
             (println "[ok]")))))))
diff --git a/test/metabase/test/data/users.clj b/test/metabase/test/data/users.clj
index d08a28c4017d6e02b3bd783927be181eb1144f30..5698ebae330aae5620c9422b05d3653aaa14eb40 100644
--- a/test/metabase/test/data/users.clj
+++ b/test/metabase/test/data/users.clj
@@ -9,7 +9,8 @@
             (metabase.models [permissions-group :as perms-group]
                              [user :refer [User]])
             [metabase.util :as u]
-            [metabase.test.util :refer [random-name]]))
+            [metabase.test.util :refer [random-name]])
+  (:import clojure.lang.ExceptionInfo))
 
 ;;; ------------------------------------------------------------ User Definitions ------------------------------------------------------------
 
@@ -59,7 +60,7 @@
      (when-not (init-status/complete?)
        (when (<= max-wait-seconds 0)
          (throw (Exception. "Metabase still hasn't finished initializing.")))
-       (println (format "Metabase is not yet initialized, waiting 1 second (max wait remaining: %d seconds)..." max-wait-seconds))
+       (printf "Metabase is not yet initialized, waiting 1 second (max wait remaining: %d seconds)...\n" max-wait-seconds)
        (Thread/sleep 1000)
        (recur (dec max-wait-seconds))))))
 
@@ -71,6 +72,7 @@
   {:pre [(string? email) (string? first) (string? last) (string? password) (m/boolean? superuser) (m/boolean? active)]}
   (wait-for-initiailization)
   (or (User :email email)
+      (println "Creating test user:" email) ; DEBUG
       (db/insert! User
         :email        email
         :first_name   first
@@ -123,7 +125,7 @@
     (fn [id]
       (@m id))))
 
-(def ^:private tokens (atom {}))
+(defonce ^:private tokens (atom {}))
 
 (defn- username->token [username]
   (or (@tokens username)
@@ -134,18 +136,15 @@
 (defn- client-fn [username & args]
   (try
     (apply http/client (username->token username) args)
-    (catch Throwable e
+    (catch ExceptionInfo e
       (let [{:keys [status-code]} (ex-data e)]
         (when-not (= status-code 401)
           (throw e))
         ;; If we got a 401 unauthenticated clear the tokens cache + recur
+        (printf "Got 401 (Unauthenticated) for %s. Clearing cached auth tokens and retrying request.\n" username) ; DEBUG
         (reset! tokens {})
         (apply client-fn username args)))))
 
-;; TODO - does it make sense just to make this a non-higher-order function? Or a group of functions, e.g.
-;; (GET :rasta 200 "field/10/values")
-;; vs.
-;; ((user->client :rasta) :get 200 "field/10/values")
 (defn user->client
   "Returns a `metabase.http-client/client` partially bound with the credentials for User with USERNAME.
    In addition, it forces lazy creation of the User if needed.
@@ -160,4 +159,4 @@
   "Delete all users besides the 4 persistent test users.
    This is a HACK to work around tests that don't properly clean up after themselves; one day we should be able to remove this. (TODO)"
   []
-  (db/delete! 'User :id [:not-in (map user->id [:crowberto :lucky :rasta :trashbird])]))
+  (db/delete! User :id [:not-in (map user->id [:crowberto :lucky :rasta :trashbird])]))
diff --git a/test/metabase/timeseries_query_processor_test.clj b/test/metabase/timeseries_query_processor_test.clj
index 5d05675e8e36c968b57db2d0015acc32577c06ec..81f48c2ed9db80654346434781a68d95975b2529 100644
--- a/test/metabase/timeseries_query_processor_test.clj
+++ b/test/metabase/timeseries_query_processor_test.clj
@@ -45,7 +45,7 @@
 
 (defn- data [results]
   (when-let [data (or (:data results)
-                      (println (u/pprint-to-str results)))]
+                      (println (u/pprint-to-str results)))] ; DEBUG
     (-> data
         (select-keys [:columns :rows])
         (update :rows vec))))