diff --git a/.clj-kondo/config.edn b/.clj-kondo/config.edn
index 40147f7f96c40ee48e6d8c18276ed6bff2240fa7..975078f718ca652136b70dacd56527e3f720b40c 100644
--- a/.clj-kondo/config.edn
+++ b/.clj-kondo/config.edn
@@ -586,6 +586,7 @@
     metabase.driver.sql-jdbc.test-util                            sql-jdbc.tu
     metabase.driver.sql.parameters.substitute                     sql.params.substitute
     metabase.driver.sql.parameters.substitution                   sql.params.substitution
+    metabase.eid-translation                                      eid-translation
     metabase.email-test                                           et
     metabase.email.messages                                       messages
     metabase.formatter                                            formatter
diff --git a/.github/workflows/loki.yml b/.github/workflows/loki.yml
index 2d983e244ea3bb54e486a5c5e982622eae954843..2204419b1eb2e8b584e3a9dae6611596fec69bcd 100644
--- a/.github/workflows/loki.yml
+++ b/.github/workflows/loki.yml
@@ -83,5 +83,6 @@ jobs:
         uses: actions/upload-artifact@v4
         with:
           name: loki-report
+          include-hidden-files: true
           path: .loki/
           if-no-files-found: ignore
diff --git a/.github/workflows/release-embedding-sdk.yml b/.github/workflows/release-embedding-sdk.yml
index 044c50f207322d3951408f6797d0b68fe9824cf1..db3978bc8ff5223743822df42aa2f52e9ac6fed5 100644
--- a/.github/workflows/release-embedding-sdk.yml
+++ b/.github/workflows/release-embedding-sdk.yml
@@ -5,11 +5,11 @@ on:
     inputs:
       # TODO: Add a version validation, so the workflow won't fail when publishing to npm
       sdk_version:
-        description: 'SDK version (e.g. 0.1.3)'
+        description: "SDK version (e.g. 0.1.3)"
         type: string
         required: true
       git_ref:
-        description: 'A git commit hash that will be used to build the SDK'
+        description: "A git commit hash that will be used to build the SDK"
         type: string
         required: true
 
@@ -115,42 +115,8 @@ jobs:
           name: sdk-changelog-diff
           path: ./changelog-diff
 
-  build-jar:
-    needs: test
-    uses: ./.github/workflows/uberjar-sdk.yml
-    with:
-      git_ref: ${{ inputs.git_ref }}
-      image_name: embedding-sdk-${{ inputs.sdk_version }}
-    secrets: inherit
-
-  upload-jar:
-    needs: build-jar
-    runs-on: ubuntu-22.04
-    timeout-minutes: 20
-    steps:
-      - uses: actions/download-artifact@v4
-        name: Retrieve uberjar artifact
-        with:
-          name: metabase-ee-uberjar
-
-      - name: Configure AWS credentials
-        uses: aws-actions/configure-aws-credentials@v4
-        with:
-          aws-access-key-id: ${{ secrets.AWS_S3_RELEASE_ACCESS_KEY_ID }}
-          aws-secret-access-key: ${{ secrets.AWS_S3_RELEASE_SECRET_ACCESS_KEY }}
-          aws-region: ${{ vars.AWS_REGION }}
-
-      - name: Upload uberjar to S3
-        env:
-          BUCKET: ${{ vars.AWS_S3_DOWNLOADS_BUCKET }}
-          BUCKET_PATH: sdk/v${{ inputs.sdk_version }}/metabase.jar
-          FILE: ./target/uberjar/metabase.jar
-        shell: bash
-        run: |
-          aws s3 cp $FILE s3://$BUCKET/$BUCKET_PATH
-
   publish-npm:
-    needs: [build-sdk, upload-jar]
+    needs: build-sdk
     runs-on: ubuntu-22.04
     timeout-minutes: 20
     steps:
@@ -249,6 +215,6 @@ jobs:
           git tag ${{ env.tag }}
 
       - name: Push the new tag
-        id : push-tag
+        id: push-tag
         run: |
           git push origin ${{ env.tag }}
diff --git a/.github/workflows/uberjar-sdk.yml b/.github/workflows/uberjar-sdk.yml
deleted file mode 100644
index ff8837fe657780e63490102e02648ea3e3d0f5c1..0000000000000000000000000000000000000000
--- a/.github/workflows/uberjar-sdk.yml
+++ /dev/null
@@ -1,157 +0,0 @@
-name: Build + Docker Uberjar for SDK
-
-on:
-  # This workflow will be called from "release-embedding-sdk.yml"
-  workflow_call:
-    inputs:
-      git_ref:
-        description: 'Valid git reference e.g. branches, tags, hashes'
-        type: string
-        required: true
-      image_name:
-        description: Docker image used to upload to Dockerhub
-        type: string
-        required: true
-
-jobs:
-  build:
-    name: Build MB ${{ matrix.edition }}
-    runs-on: ubuntu-22.04
-    timeout-minutes: 40
-    strategy:
-      matrix:
-        edition: [ee]
-    env:
-      MB_EDITION: ${{ matrix.edition }}
-      INTERACTIVE: false
-    steps:
-    - name: Check out the code
-      uses: actions/checkout@v4
-      with:
-        ref: ${{ inputs.git_ref }}
-    - name: Prepare front-end environment
-      uses: ./.github/actions/prepare-frontend
-    - name: Prepare back-end environment
-      uses: ./.github/actions/prepare-backend
-      with:
-        m2-cache-key: uberjar
-    - name: Build
-      run: ./bin/build.sh
-    - name: Prepare uberjar artifact
-      uses: ./.github/actions/prepare-uberjar-artifact
-      with:
-        name: metabase-ee-uberjar
-
-  check_jar_health:
-    runs-on: ubuntu-22.04
-    name: Is ${{ matrix.edition }} (java ${{ matrix.java-version }}) healthy?
-    needs: build
-    timeout-minutes: 10
-    strategy:
-      matrix:
-        edition: [ee]
-        java-version: [11, 17]
-    steps:
-    - name: Prepare JRE (Java Run-time Environment)
-      uses: actions/setup-java@v4
-      with:
-        java-package: jre
-        java-version: ${{ matrix.java-version }}
-        distribution: 'temurin'
-    - run: java -version
-    - uses: actions/download-artifact@v4
-      name: Retrieve uberjar artifact
-      with:
-        name: metabase-ee-uberjar
-    - name: Launch uberjar
-      run: java -jar ./target/uberjar/metabase.jar &
-    - name: Wait for Metabase to start
-      run: while ! curl 'http://localhost:3000/api/health' | grep '{"status":"ok"}'; do sleep 1; done
-
-  containerize_test_and_push_container:
-    runs-on: ubuntu-22.04
-    name: Containerize ${{ matrix.edition }}
-    needs: check_jar_health
-    strategy:
-      matrix:
-        edition: [ee]
-    services:
-      registry:
-        image: registry:2
-        ports:
-          - 5000:5000
-    steps:
-      - name: Extract and clean branch name
-        shell: bash
-        run: echo "branch=$(echo $GITHUB_REF_NAME | sed 's/[^-._a-zA-Z0-9]/-/g')" >> $GITHUB_OUTPUT
-        id: extract_branch
-
-      - name: Set image name to env
-        run: echo "image_name=${{ inputs.image_name || steps.extract_branch.outputs.branch }}" >> "$GITHUB_ENV"
-
-      - name: Verify the intended tag of the container image
-        run: echo "Container image will be tagged as ${{ env.image_name }}-${{ matrix.edition }}"
-
-      - name: Check out the code (Dockerfile needed)
-        uses: actions/checkout@v4
-        with:
-          ref: ${{ inputs.git_ref }}
-
-      - name: Download uploaded artifacts to insert into container
-        uses: actions/download-artifact@v4
-        with:
-          name: metabase-ee-uberjar
-          path: bin/docker/
-
-      - name: Move the ${{ matrix.edition }} uberjar to the context dir
-        run: mv bin/docker/target/uberjar/metabase.jar bin/docker/.
-
-      - name: Set up Docker Buildx
-        id: buildx
-        uses: docker/setup-buildx-action@v2.5.0
-        with:
-          driver-opts: network=host
-
-      - name: Build ${{ matrix.edition }} container
-        uses: docker/build-push-action@v3
-        with:
-          context: bin/docker/.
-          platforms: linux/amd64
-          network: host
-          tags: localhost:5000/metabase-dev:${{ env.image_name }}-${{ matrix.edition }}
-          no-cache: true
-          push: true
-
-      - name: Launch ${{ matrix.edition }} container
-        run: docker run --rm -dp 3000:3000 localhost:5000/metabase-dev:${{ env.image_name }}-${{ matrix.edition }}
-        timeout-minutes: 5
-
-      - name: Is Docker running?
-        run: docker ps
-
-      - name: Wait for Metabase to start and reach 100% health
-        run: while ! curl -s 'http://localhost:3000/api/health' | grep '{"status":"ok"}'; do sleep 1; done
-        timeout-minutes: 3
-
-      - name: Login to Docker Hub
-        uses: docker/login-action@v2
-        with:
-          username: ${{ secrets.DOCKERHUB_USERNAME }}
-          password: ${{ secrets.DOCKERHUB_TOKEN }}
-
-      - name: Retag and push images
-        run: docker tag localhost:5000/metabase-dev:${{ env.image_name }}-ee ${{ github.repository_owner }}/metabase-dev:${{ env.image_name }} && docker push ${{ github.repository_owner }}/metabase-dev:${{ env.image_name }}
-
-      - name: Run Trivy vulnerability scanner
-        uses: aquasecurity/trivy-action@master
-        env:
-          TRIVY_OFFLINE_SCAN: true
-        with:
-          image-ref: docker.io/${{ github.repository_owner }}/metabase-dev:${{ env.image_name }}
-          format: sarif
-          output: trivy-results.sarif
-
-      - name: Upload Trivy scan results to GitHub Security tab
-        uses: github/codeql-action/upload-sarif@v2
-        with:
-          sarif_file: 'trivy-results.sarif'
diff --git a/.loki/reference/chrome_laptop_embed_PublicOrEmbeddedDashboardView_Transparent_Theme_No_Background_Scroll.png b/.loki/reference/chrome_laptop_embed_PublicOrEmbeddedDashboardView_Transparent_Theme_No_Background_Scroll.png
index 89f009957507a138d568ebe5f5a9da1dfa5a5e3b..66a8f9a41e7b900920b3aa820d2935e03750e449 100644
Binary files a/.loki/reference/chrome_laptop_embed_PublicOrEmbeddedDashboardView_Transparent_Theme_No_Background_Scroll.png and b/.loki/reference/chrome_laptop_embed_PublicOrEmbeddedDashboardView_Transparent_Theme_No_Background_Scroll.png differ
diff --git a/docs/releases.md b/docs/releases.md
index 0f1cf2f9fa08b2184205e491156fc1e08ad5e400..5d86b3bb2a02306bf8a42b9e46b0f75660c6f27f 100644
--- a/docs/releases.md
+++ b/docs/releases.md
@@ -19,6 +19,7 @@ To see what's new, check out all the [major release announcements](https://www.m
 
 ## Metabase Enterprise Edition releases
 
+- [v1.50.30](https://github.com/metabase/metabase/releases/tag/v1.50.30)
 - [v1.50.29](https://github.com/metabase/metabase/releases/tag/v1.50.29)
 - [v1.50.28](https://github.com/metabase/metabase/releases/tag/v1.50.28)
 - [v1.50.27](https://github.com/metabase/metabase/releases/tag/v1.50.27)
@@ -208,6 +209,7 @@ To see what's new, check out all the [major release announcements](https://www.m
 
 ## Metabase Open Source Edition releases
 
+- [v0.50.30](https://github.com/metabase/metabase/releases/tag/v0.50.30)
 - [v0.50.29](https://github.com/metabase/metabase/releases/tag/v0.50.29)
 - [v0.50.28](https://github.com/metabase/metabase/releases/tag/v0.50.28)
 - [v0.50.27](https://github.com/metabase/metabase/releases/tag/v0.50.27)
diff --git a/e2e/support/helpers/api/createQuestion.ts b/e2e/support/helpers/api/createQuestion.ts
index 8775cd77a11b43409d2de1679d4bffca73baadae..a7a6751890612fe9cd2ec3dbe58fe1e1a195a784 100644
--- a/e2e/support/helpers/api/createQuestion.ts
+++ b/e2e/support/helpers/api/createQuestion.ts
@@ -6,6 +6,8 @@ import type {
   StructuredQuery,
 } from "metabase-types/api";
 
+import { visitMetric, visitModel, visitQuestion } from "../e2e-misc-helpers";
+
 export type QuestionDetails = {
   dataset_query: DatasetQuery;
   /**
@@ -123,7 +125,7 @@ export const question = (
   }: QuestionDetails,
   {
     loadMetadata = false,
-    visitQuestion = false,
+    visitQuestion: shouldVisitQuestion = false,
     wrapId = false,
     idAlias = "questionId",
     interceptAlias = "cardQuery",
@@ -161,21 +163,17 @@ export const question = (
         });
       }
 
-      if (loadMetadata || visitQuestion) {
+      if (loadMetadata || shouldVisitQuestion) {
         if (type === "model") {
-          cy.intercept("POST", "/api/dataset").as("dataset");
-          cy.visit(`/model/${body.id}`);
-          cy.wait("@dataset"); // Wait for `result_metadata` to load
+          visitModel(body.id);
         } else if (type === "metric") {
-          cy.intercept("POST", "/api/dataset").as("dataset");
-          cy.visit(`/metric/${body.id}`);
-          cy.wait("@dataset"); // Wait for `result_metadata` to load
+          visitMetric(body.id);
         } else {
           // We need to use the wildcard because endpoint for pivot tables has the following format: `/api/card/pivot/${id}/query`
           cy.intercept("POST", `/api/card/**/${body.id}/query`).as(
             interceptAlias,
           );
-          cy.visit(`/question/${body.id}`);
+          visitQuestion(body.id);
           cy.wait("@" + interceptAlias); // Wait for `result_metadata` to load
         }
       }
diff --git a/e2e/support/helpers/e2e-downloads-helpers.ts b/e2e/support/helpers/e2e-downloads-helpers.ts
index 4743d5b4dffa0b9548899aa7d336656907ec7903..364750819476711fcee24e2dbce958103ec9a646 100644
--- a/e2e/support/helpers/e2e-downloads-helpers.ts
+++ b/e2e/support/helpers/e2e-downloads-helpers.ts
@@ -23,8 +23,17 @@ interface DownloadAndAssertParams {
   publicUuid?: string;
   dashboardId?: number;
   enableFormatting?: boolean;
+  enablePivoting?: boolean;
 }
 
+export const exportFromDashcard = (format: string) => {
+  popover().within(() => {
+    cy.findByText("Download results").click();
+    cy.findByText(format).click();
+    cy.findByTestId("download-results-button").click();
+  });
+};
+
 /**
  * Trigger the download of CSV or XLSX files and assert on the results in the related sheet.
  * It applies to both unsaved questions (queries) and the saved ones.
@@ -42,6 +51,7 @@ export function downloadAndAssert(
     downloadMethod = "POST",
     isDashboard,
     enableFormatting = true,
+    enablePivoting = false,
   }: DownloadAndAssertParams,
   callback: (data: unknown) => void,
 ) {
@@ -84,11 +94,22 @@ export function downloadAndAssert(
   } else {
     cy.findByTestId("download-button").click();
   }
-  // Initiate the file download
-  if (!enableFormatting) {
-    cy.window().trigger("keydown", { key: "Alt" });
-  }
-  popover().findByText(`.${fileType}`).click();
+
+  popover().within(() => {
+    cy.findByText(`.${fileType}`).click();
+
+    const formattingButtonLabel = enableFormatting
+      ? "Formatted"
+      : "Unformatted";
+
+    cy.findByText(formattingButtonLabel).click();
+
+    if (enablePivoting) {
+      cy.findByText("Keep data pivoted").click();
+    }
+
+    cy.findByTestId("download-results-button").click();
+  });
 
   cy.wait("@fileDownload")
     .its("request")
diff --git a/e2e/test/scenarios/dashboard/dashboard.cy.spec.js b/e2e/test/scenarios/dashboard/dashboard.cy.spec.js
index 89408a98f6094a6954ff07467c587e50538da479..98fbe640a6ef344b3a82c4839ca0bf24b272e410 100644
--- a/e2e/test/scenarios/dashboard/dashboard.cy.spec.js
+++ b/e2e/test/scenarios/dashboard/dashboard.cy.spec.js
@@ -1156,8 +1156,8 @@ describeWithSnowplow("scenarios > dashboard", () => {
 
       expectGoodSnowplowEvent({
         event: "new_iframe_card_created",
-        dashboard_id: id,
-        domain_name: "example.com",
+        target_id: id,
+        event_detail: "example.com",
       });
     });
   });
diff --git a/e2e/test/scenarios/dashboard/x-rays.cy.spec.js b/e2e/test/scenarios/dashboard/x-rays.cy.spec.js
index f9d4ebf564df1c3e7ab767e5253d73d5cca1d24c..9b7bd6dfb90e4cd2489b32be825d47070b4c87b0 100644
--- a/e2e/test/scenarios/dashboard/x-rays.cy.spec.js
+++ b/e2e/test/scenarios/dashboard/x-rays.cy.spec.js
@@ -6,6 +6,7 @@ import {
   assertEChartsTooltip,
   cartesianChartCircle,
   chartPathWithFillColor,
+  dashboardGrid,
   getDashboardCards,
   main,
   popover,
@@ -364,6 +365,46 @@ describe("scenarios > x-rays", { tags: "@slow" }, () => {
     );
     cy.findByTestId("dashboard-grid").should("have.css", "max-width", "1048px");
   });
+
+  it("should render all cards without errors (metabase#48519)", () => {
+    cy.intercept("POST", "/api/dataset").as("dataset");
+
+    cy.visit(`/auto/dashboard/table/${ORDERS_ID}`);
+    // There're 8 questions on the Orders x-ray dashboard
+    cy.wait(Array(8).fill("@dataset"), { timeout: 60 * 1000 });
+
+    getDashcardByTitle("Total transactions")
+      .findByText("18,760")
+      .should("exist");
+    getDashcardByTitle("Transactions in the last 30 days")
+      .findByTestId("scalar-value")
+      .should("exist"); // not asserting a value as it's dynamic
+    getDashcardByTitle("Average quantity per month").within(() => {
+      cy.findByText("Average of Quantity").should("exist");
+      cy.findByText("Created At").should("exist");
+    });
+    getDashcardByTitle("Sales per source").within(() => {
+      cy.findByText("Organic").should("exist");
+      cy.findByText("Affiliate").should("exist");
+      cy.findByText("Count").should("exist");
+      cy.findByText("Created At").should("exist");
+    });
+    getDashcardByTitle("Sales per product").within(() => {
+      cy.findByText("Product → Title").should("exist");
+      cy.findByText("Aerodynamic Bronze Hat").should("exist");
+    });
+    getDashcardByTitle("Sales for each product category").within(() => {
+      cy.findByText("Product → Category").should("exist");
+      cy.findByText("Doohickey").should("exist");
+      cy.findByText("Count").should("exist");
+    });
+    getDashcardByTitle("Sales per state")
+      .findAllByTestId("choropleth-feature")
+      .should("have.length", 50); // 50 states
+    getDashcardByTitle("Sales by coordinates")
+      .findByText("Leaflet")
+      .should("exist");
+  });
 });
 
 function waitForSatisfyingResponse(
@@ -383,3 +424,7 @@ function waitForSatisfyingResponse(
     }
   });
 }
+
+function getDashcardByTitle(title) {
+  return dashboardGrid().findByText(title).closest("[data-testid='dashcard']");
+}
diff --git a/e2e/test/scenarios/embedding/embed-resource-downloads.cy.spec.ts b/e2e/test/scenarios/embedding/embed-resource-downloads.cy.spec.ts
index 5d004cdafe2c8f3536c8802348a83264c027bb55..534b746e04f423b244e5c2d47b7d2014e16e53d1 100644
--- a/e2e/test/scenarios/embedding/embed-resource-downloads.cy.spec.ts
+++ b/e2e/test/scenarios/embedding/embed-resource-downloads.cy.spec.ts
@@ -6,6 +6,7 @@ import {
   describeWithSnowplowEE,
   expectGoodSnowplowEvent,
   expectNoBadSnowplowEvents,
+  exportFromDashcard,
   getDashboardCardMenu,
   main,
   popover,
@@ -109,9 +110,7 @@ describeWithSnowplowEE(
 
         showDashboardCardActions();
         getDashboardCardMenu().click();
-        popover().findByText("Download results").click();
-        popover().findByText(".csv").click();
-
+        exportFromDashcard(".csv");
         cy.verifyDownload(".csv", { contains: true });
 
         expectGoodSnowplowEvent({
@@ -171,7 +170,10 @@ describeWithSnowplowEE(
         waitLoading();
 
         cy.findByTestId("download-button").click();
-        popover().findByText(".png").click();
+        popover().within(() => {
+          cy.findByText(".png").click();
+          cy.findByTestId("download-results-button").click();
+        });
 
         cy.verifyDownload(".png", { contains: true });
 
@@ -200,7 +202,10 @@ describeWithSnowplowEE(
 
         cy.findByTestId("download-button").click();
 
-        popover().findByText(".csv").click();
+        popover().within(() => {
+          cy.findByText(".csv").click();
+          cy.findByTestId("download-results-button").click();
+        });
 
         cy.verifyDownload(".csv", { contains: true });
 
diff --git a/e2e/test/scenarios/embedding/embedding-questions.cy.spec.js b/e2e/test/scenarios/embedding/embedding-questions.cy.spec.js
index 8570be2da341ba8d40f36fece8d656a57883f635..136d359df9257bd5da96257640ba279c8cc0d632 100644
--- a/e2e/test/scenarios/embedding/embedding-questions.cy.spec.js
+++ b/e2e/test/scenarios/embedding/embedding-questions.cy.spec.js
@@ -299,7 +299,7 @@ describeEE("scenarios > embedding > questions > downloads", () => {
         cy.findByRole("contentinfo").icon("download").click();
 
         popover().within(() => {
-          cy.findByText("Download full results");
+          cy.findAllByText("Download").should("have.length", 2);
           cy.findByText(".csv");
           cy.findByText(".xlsx");
           cy.findByText(".json");
diff --git a/e2e/test/scenarios/embedding/interactive-embedding.cy.spec.js b/e2e/test/scenarios/embedding/interactive-embedding.cy.spec.js
index 3c37c277ff8fb032a1e0808a179e5b6d37937f63..a21567857d5af56ce7c30361dad13a4601e25209 100644
--- a/e2e/test/scenarios/embedding/interactive-embedding.cy.spec.js
+++ b/e2e/test/scenarios/embedding/interactive-embedding.cy.spec.js
@@ -11,6 +11,7 @@ import {
   describeEE,
   entityPickerModal,
   entityPickerModalTab,
+  exportFromDashcard,
   getDashboardCard,
   getDashboardCardMenu,
   getNextUnsavedDashboardCardId,
@@ -553,18 +554,17 @@ describeEE("scenarios > embedding > full app", () => {
           res.headers["X-Metabase-Anti-CSRF-Token"] = CSRF_TOKEN;
         });
       });
-      cy.intercept(
-        "POST",
-        "/api/dashboard/*/dashcard/*/card/*/query/csv?format_rows=true",
-      ).as("CsvDownload");
+      cy.intercept("POST", "/api/dashboard/*/dashcard/*/card/*/query/csv").as(
+        "CsvDownload",
+      );
       visitDashboardUrl({
         url: `/dashboard/${ORDERS_DASHBOARD_ID}`,
       });
 
       getDashboardCard().realHover();
       getDashboardCardMenu().click();
-      popover().findByText("Download results").click();
-      popover().findByText(".csv").click();
+
+      exportFromDashcard(".csv");
 
       cy.wait("@CsvDownload").then(interception => {
         expect(
diff --git a/e2e/test/scenarios/metrics/browse.cy.spec.ts b/e2e/test/scenarios/metrics/browse.cy.spec.ts
index 55834279e7a95e1f2dfb218df72a320993a98e29..caf13fa72991557d2e4c6fed085a9da438c07457 100644
--- a/e2e/test/scenarios/metrics/browse.cy.spec.ts
+++ b/e2e/test/scenarios/metrics/browse.cy.spec.ts
@@ -111,11 +111,24 @@ describe("scenarios > browse > metrics", () => {
 
     it("should show the empty metrics page", () => {
       cy.visit("/browse/metrics");
-      main()
-        .findByText(
+      main().within(() => {
+        cy.findByText(
           "Create Metrics to define the official way to calculate important numbers for your team",
-        )
-        .should("be.visible");
+        ).should("be.visible");
+        cy.findByText("Create metric").should("be.visible").click();
+      });
+      cy.location("pathname").should("eq", "/metric/query");
+    });
+
+    it("should not show the create metric button if the user does not have data access", () => {
+      cy.signInAsSandboxedUser();
+      cy.visit("/browse/metrics");
+      main().within(() => {
+        cy.findByText(
+          "Create Metrics to define the official way to calculate important numbers for your team",
+        ).should("be.visible");
+        cy.findByText("Create metric").should("not.exist");
+      });
     });
   });
 
diff --git a/e2e/test/scenarios/metrics/metrics-question.cy.spec.js b/e2e/test/scenarios/metrics/metrics-question.cy.spec.js
index a86e8deea8136bd319a6a6ec22b79d23a8fffb3f..0ec2507ea38f660831a5cca13391da926a160bf4 100644
--- a/e2e/test/scenarios/metrics/metrics-question.cy.spec.js
+++ b/e2e/test/scenarios/metrics/metrics-question.cy.spec.js
@@ -15,6 +15,7 @@ import {
   popover,
   queryBuilderHeader,
   restore,
+  summarize,
   undoToast,
   visitMetric,
 } from "e2e/support/helpers";
@@ -66,9 +67,7 @@ describe("scenarios > metrics > question", () => {
   });
 
   it("should be able to move a metric to a different collection", () => {
-    createQuestion(ORDERS_SCALAR_METRIC).then(({ body: card }) =>
-      visitMetric(card.id),
-    );
+    createQuestion(ORDERS_SCALAR_METRIC, { visitQuestion: true });
     openQuestionActions();
     popover().findByText("Move").click();
     modal().within(() => {
@@ -83,9 +82,7 @@ describe("scenarios > metrics > question", () => {
   });
 
   it("should be able to add a filter with an ad-hoc question", () => {
-    createQuestion(ORDERS_SCALAR_METRIC).then(({ body: card }) =>
-      visitMetric(card.id),
-    );
+    createQuestion(ORDERS_SCALAR_METRIC, { visitQuestion: true });
     cy.findByTestId("qb-header-action-panel").button("Filter").click();
     modal().within(() => {
       cy.findByText("Product").click();
@@ -98,9 +95,7 @@ describe("scenarios > metrics > question", () => {
   });
 
   it("should be able to add a custom aggregation expression based on a metric", () => {
-    createQuestion(ORDERS_TIMESERIES_METRIC).then(({ body: card }) =>
-      visitMetric(card.id),
-    );
+    createQuestion(ORDERS_TIMESERIES_METRIC, { visitQuestion: true });
     cy.findByTestId("qb-header-action-panel").button("Summarize").click();
     cy.findByTestId("sidebar-content")
       .button(ORDERS_TIMESERIES_METRIC.name)
@@ -114,18 +109,14 @@ describe("scenarios > metrics > question", () => {
   });
 
   it("should be able to add a breakout with an ad-hoc question", () => {
-    createQuestion(ORDERS_TIMESERIES_METRIC).then(({ body: card }) =>
-      visitMetric(card.id),
-    );
+    createQuestion(ORDERS_TIMESERIES_METRIC, { visitQuestion: true });
     cy.findByTestId("qb-header-action-panel").button("Summarize").click();
     cy.findByTestId("sidebar-content").findByText("Category").click();
     echartsContainer().findByText("Product → Category").should("be.visible");
   });
 
   it("should be able to change the temporal unit when consuming a timeseries metric", () => {
-    createQuestion(ORDERS_TIMESERIES_METRIC).then(({ body: card }) =>
-      visitMetric(card.id),
-    );
+    createQuestion(ORDERS_TIMESERIES_METRIC, { visitQuestion: true });
     assertQueryBuilderRowCount(49);
     cy.findByTestId("qb-header-action-panel").button("Summarize").click();
     cy.findByTestId("sidebar-content")
@@ -138,10 +129,7 @@ describe("scenarios > metrics > question", () => {
   });
 
   it("should be able to drill-thru with a metric", () => {
-    createQuestion(ORDERS_TIMESERIES_METRIC).then(({ body: card }) => {
-      visitMetric(card.id);
-      cy.wait("@dataset");
-    });
+    createQuestion(ORDERS_TIMESERIES_METRIC, { visitQuestion: true });
     cartesianChartCircle()
       .eq(23) // random dot
       .click({ force: true });
@@ -155,10 +143,7 @@ describe("scenarios > metrics > question", () => {
   });
 
   it("should be able to drill-thru with a metric without the aggregation clause", () => {
-    createQuestion(ORDERS_TIMESERIES_METRIC).then(({ body: card }) => {
-      visitMetric(card.id);
-      cy.wait("@dataset");
-    });
+    createQuestion(ORDERS_TIMESERIES_METRIC, { visitQuestion: true });
     cartesianChartCircle()
       .eq(23) // random dot
       .click({ force: true });
@@ -221,4 +206,15 @@ describe("scenarios > metrics > question", () => {
       cy.button("Summarize").should("not.exist");
     });
   });
+
+  it("should not show 'Replace existing question' option when saving an edited ad-hoc question from a metric (metabase#48555)", () => {
+    cy.signInAsNormalUser();
+    createQuestion(ORDERS_SCALAR_METRIC, { visitQuestion: true });
+
+    summarize();
+    cy.button("Done").click();
+
+    queryBuilderHeader().button("Save").click();
+    modal().findByText("Replace or save as new?").should("not.exist");
+  });
 });
diff --git a/e2e/test/scenarios/models/reproductions.cy.spec.js b/e2e/test/scenarios/models/reproductions.cy.spec.js
index dbdb5d00cc488e852e424d4f76914f14980e6e4c..3e366a7bf392e2b7ada6d2cde96869da61321be9 100644
--- a/e2e/test/scenarios/models/reproductions.cy.spec.js
+++ b/e2e/test/scenarios/models/reproductions.cy.spec.js
@@ -1574,6 +1574,7 @@ describe("issue 32963", () => {
 
   it("should pick sensible display for model based questions (metabase#32963)", () => {
     cy.findByTestId("qb-header").button("Summarize").click();
+    cy.intercept("POST", "/api/dataset").as("dataset");
 
     rightSidebar().within(() => {
       cy.findAllByText("Created At").eq(0).click();
diff --git a/e2e/test/scenarios/sharing/downloads/downloads.cy.spec.js b/e2e/test/scenarios/sharing/downloads/downloads.cy.spec.js
index 3b36b2ae2eda8aa90ebd8e1fd217de69814995c7..655274b3b2df0654cd16a48a48ebb8ba7f64b08c 100644
--- a/e2e/test/scenarios/sharing/downloads/downloads.cy.spec.js
+++ b/e2e/test/scenarios/sharing/downloads/downloads.cy.spec.js
@@ -16,6 +16,7 @@ import {
   entityPickerModalTab,
   expectGoodSnowplowEvent,
   expectNoBadSnowplowEvents,
+  exportFromDashcard,
   filterWidget,
   getDashboardCard,
   getDashboardCardMenu,
@@ -94,57 +95,58 @@ describe("scenarios > question > download", () => {
     });
   });
 
-  it("should allow downloading unformatted CSV data", () => {
-    const fieldRef = ["field", ORDERS.TOTAL, null];
-    const columnKey = `["ref",${JSON.stringify(fieldRef)}]`;
+  testCases.forEach(fileType => {
+    it(`should allow downloading unformatted ${fileType} data`, () => {
+      const fieldRef = ["field", ORDERS.TOTAL, null];
+      const columnKey = `["ref",${JSON.stringify(fieldRef)}]`;
 
-    createQuestion(
-      {
-        query: {
-          "source-table": ORDERS_ID,
-          fields: [fieldRef],
-        },
-        visualization_settings: {
-          column_settings: {
-            [columnKey]: {
-              currency: "USD",
-              currency_in_header: false,
-              currency_style: "code",
-              number_style: "currency",
+      createQuestion(
+        {
+          query: {
+            "source-table": ORDERS_ID,
+            fields: [fieldRef],
+          },
+          visualization_settings: {
+            column_settings: {
+              [columnKey]: {
+                currency: "USD",
+                currency_in_header: false,
+                currency_style: "code",
+                number_style: "currency",
+              },
             },
           },
         },
-      },
-      { visitQuestion: true, wrapId: true },
-    );
+        { visitQuestion: true, wrapId: true },
+      );
 
-    queryBuilderMain().findByText("USD 39.72").should("exist");
+      queryBuilderMain().findByText("USD 39.72").should("exist");
 
-    cy.get("@questionId").then(questionId => {
-      const opts = { questionId, fileType: "csv" };
+      cy.get("@questionId").then(questionId => {
+        const opts = { questionId, fileType };
 
-      downloadAndAssert(
-        {
-          ...opts,
-          enableFormatting: true,
-        },
-        sheet => {
-          expect(sheet["A1"].v).to.eq("Total");
-          expect(sheet["A2"].v).to.eq("USD 39.72");
-        },
-      );
+        downloadAndAssert(
+          {
+            ...opts,
+            enableFormatting: true,
+          },
+          sheet => {
+            expect(sheet["A1"].v).to.eq("Total");
+            expect(sheet["A2"].w).to.eq("USD 39.72");
+          },
+        );
 
-      downloadAndAssert(
-        {
-          ...opts,
-          enableFormatting: false,
-        },
-        sheet => {
-          expect(sheet["A1"].v).to.eq("Total");
-          expect(sheet["A2"].v).to.eq(39.718145389078366);
-          expect(sheet["A2"].w).to.eq("39.718145389078366");
-        },
-      );
+        downloadAndAssert(
+          {
+            ...opts,
+            enableFormatting: false,
+          },
+          sheet => {
+            expect(sheet["A1"].v).to.eq("Total");
+            expect(sheet["A2"].v).to.eq(39.718145389078366);
+          },
+        );
+      });
     });
   });
 
@@ -364,10 +366,7 @@ describe("scenarios > question > download", () => {
       getDashboardCard(0).findByText("Created At").should("be.visible");
       getDashboardCardMenu(0).click();
 
-      popover().within(() => {
-        cy.findByText("Download results").click();
-        cy.findByText(".png").click();
-      });
+      exportFromDashcard(".png");
 
       showDashboardCardActions(1);
       getDashboardCard(1).findByText("User ID").should("be.visible");
@@ -388,6 +387,7 @@ describe("scenarios > question > download", () => {
 
       popover().within(() => {
         cy.findByText(".png").click();
+        cy.findByTestId("download-results-button").click();
       });
 
       cy.verifyDownload(".png", { contains: true });
@@ -463,10 +463,7 @@ describeWithSnowplow("[snowplow] scenarios > dashboard", () => {
     getDashboardCard(0).findByText("Created At").should("be.visible");
     getDashboardCardMenu(0).click();
 
-    popover().within(() => {
-      cy.findByText("Download results").click();
-      cy.findByText(".png").click();
-    });
+    exportFromDashcard(".png");
 
     expectGoodSnowplowEvent({
       event: "download_results_clicked",
diff --git a/e2e/test/scenarios/sharing/public-resource-downloads.cy.spec.ts b/e2e/test/scenarios/sharing/public-resource-downloads.cy.spec.ts
index c20823db1a2082eb6b9a7f358e8f6c89da8ea511..16db73a13940b509d529ef03414768434ae60f72 100644
--- a/e2e/test/scenarios/sharing/public-resource-downloads.cy.spec.ts
+++ b/e2e/test/scenarios/sharing/public-resource-downloads.cy.spec.ts
@@ -147,7 +147,10 @@ describeWithSnowplowEE(
         waitLoading();
 
         cy.findByTestId("download-button").click();
-        popover().findByText(".png").click();
+        popover().within(() => {
+          cy.findByText(".png").click();
+          cy.findByTestId("download-results-button").click();
+        });
 
         cy.verifyDownload(".png", { contains: true });
 
diff --git a/e2e/test/scenarios/sharing/subscriptions.cy.spec.js b/e2e/test/scenarios/sharing/subscriptions.cy.spec.js
index 0a1d18e4b8e2f11e2dd5f3d1ba8f162a0534217f..2fa5113b736d4ea7c0427be065a768a4c94e280a 100644
--- a/e2e/test/scenarios/sharing/subscriptions.cy.spec.js
+++ b/e2e/test/scenarios/sharing/subscriptions.cy.spec.js
@@ -311,13 +311,7 @@ describe("scenarios > dashboard > subscriptions", () => {
       assignRecipient();
       // This is extremely fragile
       // TODO: update test once changes from `https://github.com/metabase/metabase/pull/14121` are merged into `master`
-      // eslint-disable-next-line no-unscoped-text-selectors -- deprecated usage
-      cy.findByText("Attach results")
-        .parent()
-        .parent()
-        .next()
-        .find("input") // Toggle
-        .click();
+      cy.findByLabelText("Attach results").click();
       // eslint-disable-next-line no-unscoped-text-selectors -- deprecated usage
       cy.findByText("Questions to attach").click();
       clickButton("Done");
diff --git a/e2e/test/scenarios/visualizations-tabular/pivot_tables.cy.spec.js b/e2e/test/scenarios/visualizations-tabular/pivot_tables.cy.spec.js
index a845665586cc09faf2386176951455f41db594dc..c38722153b6eedee4fdcbfec7734be1599ffad16 100644
--- a/e2e/test/scenarios/visualizations-tabular/pivot_tables.cy.spec.js
+++ b/e2e/test/scenarios/visualizations-tabular/pivot_tables.cy.spec.js
@@ -735,8 +735,9 @@ describe("scenarios > visualizations > pivot tables", { tags: "@slow" }, () => {
   it("should open the download popover (metabase#14750)", () => {
     createTestQuestion();
     cy.icon("download").click();
-    // eslint-disable-next-line no-unscoped-text-selectors -- deprecated usage
-    popover().within(() => cy.findByText("Download full results"));
+    popover().within(() =>
+      cy.findAllByText("Download").should("have.length", 2),
+    );
   });
 
   it.skip("should work for user without data permissions (metabase#14989)", () => {
diff --git a/enterprise/frontend/src/embedding-sdk/lib/interactive-question/run-question-on-navigate.ts b/enterprise/frontend/src/embedding-sdk/lib/interactive-question/run-question-on-navigate.ts
index 69a1b64ea3be52171b8320466d03a07951a52b84..fc4e4cd35a6f3e3657544f82ffba8ba7d99ed723 100644
--- a/enterprise/frontend/src/embedding-sdk/lib/interactive-question/run-question-on-navigate.ts
+++ b/enterprise/frontend/src/embedding-sdk/lib/interactive-question/run-question-on-navigate.ts
@@ -3,7 +3,7 @@ import type {
   NavigateToNewCardParams,
   SdkQuestionState,
 } from "embedding-sdk/types/question";
-import { loadCard } from "metabase/lib/card";
+import { loadCard } from "metabase/query_builder/actions/core/card";
 import { loadMetadataForCard } from "metabase/questions/actions";
 import { getMetadata } from "metabase/selectors/metadata";
 import { getCardAfterVisualizationClick } from "metabase/visualizations/lib/utils";
diff --git a/frontend/src/metabase-types/analytics/dashboard.ts b/frontend/src/metabase-types/analytics/dashboard.ts
index ad0550f424250843f636d27a8be687a47a5e1281..b30e81cdaa71cdcb7d97c4bb2cab1f9673f0ffe7 100644
--- a/frontend/src/metabase-types/analytics/dashboard.ts
+++ b/frontend/src/metabase-types/analytics/dashboard.ts
@@ -73,12 +73,6 @@ export type NewActionCardCreatedEvent = ValidateEvent<{
   dashboard_id: number;
 }>;
 
-export type NewIFrameCardCreatedEvent = ValidateEvent<{
-  event: "new_iframe_card_created";
-  dashboard_id: number;
-  domain_name: string | null;
-}>;
-
 export type CardSetToHideWhenNoResultsEvent = ValidateEvent<{
   event: "card_set_to_hide_when_no_results";
   dashboard_id: number;
@@ -139,7 +133,6 @@ export type DashboardEvent =
   | NewHeadingCardCreatedEvent
   | NewLinkCardCreatedEvent
   | NewActionCardCreatedEvent
-  | NewIFrameCardCreatedEvent
   | CardSetToHideWhenNoResultsEvent
   | DashboardPdfExportedEvent
   | CardMovedToTabEvent
diff --git a/frontend/src/metabase-types/analytics/event.ts b/frontend/src/metabase-types/analytics/event.ts
index f740cc68b2ee50e1c910fc726a3bf6a1384e364d..a1710f806354bbcec9d3f3503ec21ad201524f0c 100644
--- a/frontend/src/metabase-types/analytics/event.ts
+++ b/frontend/src/metabase-types/analytics/event.ts
@@ -26,7 +26,14 @@ export type NewsletterToggleClickedEvent = ValidateEvent<{
   event_detail: "opted-in" | "opted-out";
 }>;
 
+export type NewIFrameCardCreatedEvent = ValidateEvent<{
+  event: "new_iframe_card_created";
+  event_detail: string | null;
+  target_id: number | null;
+}>;
+
 export type SimpleEvent =
+  | NewIFrameCardCreatedEvent
   | NewsletterToggleClickedEvent
   | OnboardingCSVUploadClickedEvent
   | OnboardingDatabaseUploadClickedEvent;
diff --git a/frontend/src/metabase-types/api/mocks/revision.ts b/frontend/src/metabase-types/api/mocks/revision.ts
index 12833b5c1c73fca620cd961d2a9cbcd25506fcfb..f9f3783786a222869d64ba5e05279f7dbeb0373d 100644
--- a/frontend/src/metabase-types/api/mocks/revision.ts
+++ b/frontend/src/metabase-types/api/mocks/revision.ts
@@ -1,21 +1,20 @@
 import type { Revision } from "metabase-types/api";
 
-export const createMockRevision = (opts?: Partial<Revision>): Revision => {
-  return {
+export const createMockRevision = (opts?: Partial<Revision>): Revision => ({
+  model_id: 1,
+  id: 1,
+  description: "created this",
+  message: null,
+  timestamp: "2023-05-16T13:33:30.198622-07:00",
+  is_creation: true,
+  is_reversion: false,
+  has_multiple_changes: false,
+  user: {
     id: 1,
-    description: "created this",
-    message: null,
-    timestamp: "2023-05-16T13:33:30.198622-07:00",
-    is_creation: true,
-    is_reversion: false,
-    has_multiple_changes: false,
-    user: {
-      id: 1,
-      first_name: "Admin",
-      last_name: "Test",
-      common_name: "Admin Test",
-    },
-    diff: null,
-    ...opts,
-  };
-};
+    first_name: "Admin",
+    last_name: "Test",
+    common_name: "Admin Test",
+  },
+  diff: null,
+  ...opts,
+});
diff --git a/frontend/src/metabase-types/api/revision.ts b/frontend/src/metabase-types/api/revision.ts
index 10e6b9620686a2779ee53b45cb69d6bfd0731102..f0536bc65a6565ed40fa8ba79c20a8d3348b6c76 100644
--- a/frontend/src/metabase-types/api/revision.ts
+++ b/frontend/src/metabase-types/api/revision.ts
@@ -1,3 +1,5 @@
+import type { CardId } from "metabase-types/api";
+
 export interface Revision {
   id: number;
   description: string;
@@ -13,6 +15,7 @@ export interface Revision {
     last_name: string;
     common_name: string;
   };
+  model_id: CardId;
 }
 
 export interface RevisionListQuery {
diff --git a/frontend/src/metabase/browse/components/BrowseContainer.styled.tsx b/frontend/src/metabase/browse/components/BrowseContainer.styled.tsx
index 02715838ed5f71f344c532fc50590718b3e9885e..d04bab8328b4448f47ba1fe26757ddad8839974f 100644
--- a/frontend/src/metabase/browse/components/BrowseContainer.styled.tsx
+++ b/frontend/src/metabase/browse/components/BrowseContainer.styled.tsx
@@ -29,7 +29,7 @@ export const BrowseHeader = styled.div`
   color: ${({ theme }) => theme.fn.themeColor("dark")};
 `;
 
-export const BrowseMain = styled.main`
+export const BrowseMain = styled.div`
   display: flex;
   flex-flow: column nowrap;
   flex: 1;
diff --git a/frontend/src/metabase/browse/metrics/BrowseMetrics.tsx b/frontend/src/metabase/browse/metrics/BrowseMetrics.tsx
index b5af1d99dbf95b7951b9118cb4fa21d59f64a4e7..980c143d0b80bb08ec05b1ea1b25d8fe41a25d56 100644
--- a/frontend/src/metabase/browse/metrics/BrowseMetrics.tsx
+++ b/frontend/src/metabase/browse/metrics/BrowseMetrics.tsx
@@ -2,13 +2,27 @@ import { useState } from "react";
 import { t } from "ttag";
 
 import NoResults from "assets/img/metrics_bot.svg";
+import { getCurrentUser } from "metabase/admin/datamodel/selectors";
 import { skipToken } from "metabase/api";
+import { useDatabaseListQuery, useDocsUrl } from "metabase/common/hooks";
 import { useFetchMetrics } from "metabase/common/hooks/use-fetch-metrics";
 import EmptyState from "metabase/components/EmptyState";
 import { DelayedLoadingAndErrorWrapper } from "metabase/components/LoadingAndErrorWrapper/DelayedLoadingAndErrorWrapper";
+import Link from "metabase/core/components/Link";
 import { useSelector } from "metabase/lib/redux";
+import * as Urls from "metabase/lib/urls";
 import { PLUGIN_CONTENT_VERIFICATION } from "metabase/plugins";
-import { Box, Flex, Group, Icon, Stack, Text, Title } from "metabase/ui";
+import { getHasDataAccess } from "metabase/selectors/data";
+import {
+  Box,
+  Button,
+  Flex,
+  Group,
+  Icon,
+  Stack,
+  Text,
+  Title,
+} from "metabase/ui";
 
 import {
   BrowseContainer,
@@ -86,15 +100,48 @@ export function BrowseMetrics() {
 }
 
 function MetricsEmptyState() {
+  const isLoggedIn = Boolean(useSelector(getCurrentUser));
+  const { data: databases = [] } = useDatabaseListQuery({
+    enabled: isLoggedIn,
+  });
+  const hasDataAccess = getHasDataAccess(databases);
+
+  const newMetricLink = Urls.newQuestion({
+    mode: "query",
+    cardType: "metric",
+  });
+
+  const { url: metricsDocsLink, showMetabaseLinks } = useDocsUrl(
+    "data-modeling/metrics",
+  );
+
   return (
     <Flex align="center" justify="center" mih="70vh">
       <Box maw="30rem">
         <EmptyState
           title={t`Create Metrics to define the official way to calculate important numbers for your team`}
           message={
-            <Text mt="sm" maw="25rem">
-              {t`Metrics are like pre-defined calculations: create your aggregations once, save them as metrics, and use them whenever you need to analyze your data.`}
-            </Text>
+            <Box>
+              <Text mt="sm" maw="25rem">
+                {t`Metrics are like pre-defined calculations: create your aggregations once, save them as metrics, and use them whenever you need to analyze your data.`}
+              </Text>
+              <Flex pt="md" align="center" justify="center" gap="md">
+                {showMetabaseLinks && (
+                  <Link
+                    target="_blank"
+                    to={metricsDocsLink}
+                    variant="brandBold"
+                  >{t`Read the docs`}</Link>
+                )}
+                {hasDataAccess && (
+                  <Button
+                    component={Link}
+                    to={newMetricLink}
+                    variant="filled"
+                  >{t`Create metric`}</Button>
+                )}
+              </Flex>
+            </Box>
           }
           illustrationElement={<img src={NoResults} />}
         />
diff --git a/frontend/src/metabase/browse/metrics/tests/common.unit.spec.tsx b/frontend/src/metabase/browse/metrics/tests/common.unit.spec.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..c8fdbd40d4a34d4d7aaf87d879c14c69c9efab3d
--- /dev/null
+++ b/frontend/src/metabase/browse/metrics/tests/common.unit.spec.tsx
@@ -0,0 +1,58 @@
+import { screen, within } from "__support__/ui";
+
+import { setup } from "./setup";
+
+describe("BrowseMetrics (OSS)", () => {
+  it("displays an empty message when no metrics are found", async () => {
+    setup({ metricCount: 0 });
+    expect(
+      await screen.findByText(
+        "Create Metrics to define the official way to calculate important numbers for your team",
+      ),
+    ).toBeInTheDocument();
+    expect(await screen.findByText("Create metric")).toBeInTheDocument();
+  });
+
+  it("should not show the Create metric button if the user does not have data access", async () => {
+    setup({ metricCount: 0, databases: [] });
+    expect(
+      await screen.findByText(
+        "Create Metrics to define the official way to calculate important numbers for your team",
+      ),
+    ).toBeInTheDocument();
+    expect(screen.queryByText("Create metric")).not.toBeInTheDocument();
+  });
+
+  it("displays a link to the metrics docs", async () => {
+    setup({ metricCount: 0 });
+    await screen.findByText(
+      "Create Metrics to define the official way to calculate important numbers for your team",
+    );
+    expect(screen.getByText("Read the docs")).toBeInTheDocument();
+  });
+
+  it("displays the Our Analytics collection if it has a metric", async () => {
+    setup({ metricCount: 25 });
+    const table = await screen.findByRole("table", {
+      name: /Table of metrics/,
+    });
+    expect(table).toBeInTheDocument();
+    expect(
+      within(table).getAllByTestId("path-for-collection: Our analytics"),
+    ).toHaveLength(2);
+    expect(within(table).getByText("Metric 20")).toBeInTheDocument();
+    expect(within(table).getByText("Metric 21")).toBeInTheDocument();
+    expect(within(table).getByText("Metric 22")).toBeInTheDocument();
+  });
+
+  it("displays collection breadcrumbs", async () => {
+    setup({ metricCount: 5 });
+    const table = await screen.findByRole("table", {
+      name: /Table of metrics/,
+    });
+    expect(within(table).getByText("Metric 1")).toBeInTheDocument();
+    expect(
+      within(table).getAllByTestId("path-for-collection: Alpha"),
+    ).toHaveLength(3);
+  });
+});
diff --git a/frontend/src/metabase/browse/metrics/tests/enterprise.unit.spec.tsx b/frontend/src/metabase/browse/metrics/tests/enterprise.unit.spec.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..3ae1ec77d96d10a77129861d2eb90de5c5dc1ac9
--- /dev/null
+++ b/frontend/src/metabase/browse/metrics/tests/enterprise.unit.spec.tsx
@@ -0,0 +1,29 @@
+import { screen } from "__support__/ui";
+
+import { type SetupOpts, setup as baseSetup } from "./setup";
+
+function setup(opts: SetupOpts) {
+  baseSetup({
+    hasEnterprisePlugins: true,
+    ...opts,
+  });
+}
+
+describe("BrowseMetrics (EE without token)", () => {
+  it("displays a link to the metrics docs when show-metabase-links = true", async () => {
+    setup({ metricCount: 0, showMetabaseLinks: true });
+    await screen.findByText(
+      "Create Metrics to define the official way to calculate important numbers for your team",
+    );
+    expect(screen.getByText("Read the docs")).toBeInTheDocument();
+  });
+
+  it("displays a link to the metrics docs when show-metabase-links = false", async () => {
+    // Still show the link on enterprise, because disabling metabase links is not supported
+    setup({ metricCount: 0, showMetabaseLinks: false });
+    await screen.findByText(
+      "Create Metrics to define the official way to calculate important numbers for your team",
+    );
+    expect(screen.getByText("Read the docs")).toBeInTheDocument();
+  });
+});
diff --git a/frontend/src/metabase/browse/metrics/tests/premium.unit.spec.tsx b/frontend/src/metabase/browse/metrics/tests/premium.unit.spec.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..cf139c9dd5b0b4a2cdcc4df0224e0ed0f11d003c
--- /dev/null
+++ b/frontend/src/metabase/browse/metrics/tests/premium.unit.spec.tsx
@@ -0,0 +1,29 @@
+import { screen } from "__support__/ui";
+
+import { type SetupOpts, setup as baseSetup } from "./setup";
+
+function setup(opts: SetupOpts) {
+  baseSetup({
+    hasEnterprisePlugins: true,
+    tokenFeatures: { whitelabel: true },
+    ...opts,
+  });
+}
+
+describe("BrowseMetrics (EE with whitelabel token)", () => {
+  it("displays a link to the metrics docs when show-metabase-links = true", async () => {
+    setup({ metricCount: 0, showMetabaseLinks: true });
+    await screen.findByText(
+      "Create Metrics to define the official way to calculate important numbers for your team",
+    );
+    expect(screen.getByText("Read the docs")).toBeInTheDocument();
+  });
+
+  it("does not display a link to the metrics docs when show-metabase-links = false", async () => {
+    setup({ metricCount: 0, showMetabaseLinks: false });
+    await screen.findByText(
+      "Create Metrics to define the official way to calculate important numbers for your team",
+    );
+    expect(screen.queryByText("Read the docs")).not.toBeInTheDocument();
+  });
+});
diff --git a/frontend/src/metabase/browse/metrics/BrowseMetrics.unit.spec.tsx b/frontend/src/metabase/browse/metrics/tests/setup.tsx
similarity index 71%
rename from frontend/src/metabase/browse/metrics/BrowseMetrics.unit.spec.tsx
rename to frontend/src/metabase/browse/metrics/tests/setup.tsx
index ae42d5608eb2d3b7edd1d5f5d0a12f5a48656ad2..6bd334d4757fed43cbed7552f797f4e8c0ceeeaa 100644
--- a/frontend/src/metabase/browse/metrics/BrowseMetrics.unit.spec.tsx
+++ b/frontend/src/metabase/browse/metrics/tests/setup.tsx
@@ -1,64 +1,35 @@
+import { setupEnterprisePlugins } from "__support__/enterprise";
 import {
   setupCardEndpoints,
   setupCardQueryEndpoints,
+  setupDatabasesEndpoints,
   setupRecentViewsEndpoints,
   setupSearchEndpoints,
   setupSettingsEndpoints,
 } from "__support__/server-mocks";
-import { renderWithProviders, screen, within } from "__support__/ui";
+import { mockSettings } from "__support__/settings";
+import { renderWithProviders } from "__support__/ui";
+import type { Database, TokenFeatures } from "metabase-types/api";
 import {
   createMockCard,
   createMockCollection,
+  createMockDatabase,
   createMockDataset,
   createMockSearchResult,
+  createMockTokenFeatures,
+  createMockUser,
 } from "metabase-types/api/mocks";
-import { createMockSetupState } from "metabase-types/store/mocks";
-
-import { BrowseMetrics } from "./BrowseMetrics";
-import { createMockMetricResult, createMockRecentMetric } from "./test-utils";
-import type { MetricResult, RecentMetric } from "./types";
+import {
+  createMockSetupState,
+  createMockState,
+} from "metabase-types/store/mocks";
 
-type SetupOpts = {
-  metricCount?: number;
-  recentMetricCount?: number;
-};
+import { BrowseMetrics } from "../BrowseMetrics";
+import { createMockMetricResult, createMockRecentMetric } from "../test-utils";
+import type { MetricResult, RecentMetric } from "../types";
 
 const TEST_DATASET = createMockDataset();
 
-function setup({
-  metricCount = Infinity,
-  recentMetricCount = 5,
-}: SetupOpts = {}) {
-  const mockMetricResults = mockMetrics.map(createMockMetricResult);
-  const mockRecentMetrics = mockMetrics.map(metric =>
-    createMockRecentMetric(metric as RecentMetric),
-  );
-
-  const metrics = mockMetricResults.slice(0, metricCount);
-  const recentMetrics = mockRecentMetrics.slice(0, recentMetricCount);
-
-  setupSettingsEndpoints([]);
-  setupSearchEndpoints(metrics.map(createMockSearchResult));
-  setupRecentViewsEndpoints(recentMetrics);
-
-  for (const metric of metrics) {
-    const card = createMockCard({
-      id: metric.id,
-    });
-
-    setupCardEndpoints(card);
-    setupCardQueryEndpoints(card, TEST_DATASET);
-  }
-
-  return renderWithProviders(<BrowseMetrics />, {
-    storeInitialState: {
-      setup: createMockSetupState({
-        locale: { name: "English", code: "en" },
-      }),
-    },
-  });
-}
-
 const defaultRootCollection = createMockCollection({
   id: "root",
   name: "Our analytics",
@@ -232,38 +203,61 @@ const mockMetrics: Partial<MetricResult>[] = [
   ...partialMetric,
 }));
 
-describe("BrowseMetrics", () => {
-  it("displays an empty message when no metrics are found", async () => {
-    setup({ metricCount: 0 });
-    expect(
-      await screen.findByText(
-        "Create Metrics to define the official way to calculate important numbers for your team",
-      ),
-    ).toBeInTheDocument();
-  });
+export type SetupOpts = {
+  metricCount?: number;
+  recentMetricCount?: number;
+  showMetabaseLinks?: boolean;
+  hasEnterprisePlugins?: boolean;
+  tokenFeatures?: Partial<TokenFeatures>;
+  databases?: Database[];
+};
 
-  it("displays the Our Analytics collection if it has a metric", async () => {
-    setup({ metricCount: 25 });
-    const table = await screen.findByRole("table", {
-      name: /Table of metrics/,
-    });
-    expect(table).toBeInTheDocument();
-    expect(
-      within(table).getAllByTestId("path-for-collection: Our analytics"),
-    ).toHaveLength(2);
-    expect(within(table).getByText("Metric 20")).toBeInTheDocument();
-    expect(within(table).getByText("Metric 21")).toBeInTheDocument();
-    expect(within(table).getByText("Metric 22")).toBeInTheDocument();
+const MOCK_DATABASE = createMockDatabase({ id: 1, name: "Database Name" });
+
+export function setup({
+  metricCount = Infinity,
+  recentMetricCount = 5,
+  showMetabaseLinks = true,
+  hasEnterprisePlugins,
+  tokenFeatures = {},
+  databases = [MOCK_DATABASE],
+}: SetupOpts = {}) {
+  const state = createMockState({
+    setup: createMockSetupState({
+      locale: { name: "English", code: "en" },
+    }),
+    settings: mockSettings({
+      "show-metabase-links": showMetabaseLinks,
+      "token-features": createMockTokenFeatures(tokenFeatures),
+    }),
+    currentUser: createMockUser({ id: 1 }),
   });
 
-  it("displays collection breadcrumbs", async () => {
-    setup({ metricCount: 5 });
-    const table = await screen.findByRole("table", {
-      name: /Table of metrics/,
+  if (hasEnterprisePlugins) {
+    setupEnterprisePlugins();
+  }
+
+  const mockMetricResults = mockMetrics.map(createMockMetricResult);
+  const mockRecentMetrics = mockMetrics.map(metric =>
+    createMockRecentMetric(metric as RecentMetric),
+  );
+
+  const metrics = mockMetricResults.slice(0, metricCount);
+  const recentMetrics = mockRecentMetrics.slice(0, recentMetricCount);
+
+  setupDatabasesEndpoints(databases);
+  setupSettingsEndpoints([]);
+  setupSearchEndpoints(metrics.map(createMockSearchResult));
+  setupRecentViewsEndpoints(recentMetrics);
+
+  for (const metric of metrics) {
+    const card = createMockCard({
+      id: metric.id,
     });
-    expect(within(table).getByText("Metric 1")).toBeInTheDocument();
-    expect(
-      within(table).getAllByTestId("path-for-collection: Alpha"),
-    ).toHaveLength(3);
-  });
-});
+
+    setupCardEndpoints(card);
+    setupCardQueryEndpoints(card, TEST_DATASET);
+  }
+
+  return renderWithProviders(<BrowseMetrics />, { storeInitialState: state });
+}
diff --git a/frontend/src/metabase/common/components/ExportSettingsWidget/ExportSettingsWidget.tsx b/frontend/src/metabase/common/components/ExportSettingsWidget/ExportSettingsWidget.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..712a3323a8d93b65af04b7fea119ab0ee14da100
--- /dev/null
+++ b/frontend/src/metabase/common/components/ExportSettingsWidget/ExportSettingsWidget.tsx
@@ -0,0 +1,71 @@
+import { t } from "ttag";
+
+import type { ExportFormat } from "metabase/common/types/export";
+import { useSelector } from "metabase/lib/redux";
+import { getApplicationName } from "metabase/selectors/whitelabel";
+import { Checkbox, Chip, Group, Radio, Stack, Text } from "metabase/ui";
+
+interface ExportSettingsWidgetProps {
+  formats: ExportFormat[];
+  selectedFormat: ExportFormat;
+  isFormattingEnabled: boolean;
+  isPivotingEnabled: boolean;
+  canConfigurePivoting?: boolean;
+  canConfigureFormatting?: boolean;
+  onChangeFormat: (format: ExportFormat) => void;
+  onTogglePivoting: () => void;
+  onToggleFormatting: () => void;
+}
+
+export const ExportSettingsWidget = ({
+  formats,
+  selectedFormat,
+  isFormattingEnabled,
+  isPivotingEnabled,
+  canConfigureFormatting,
+  canConfigurePivoting,
+  onChangeFormat,
+  onToggleFormatting,
+  onTogglePivoting,
+}: ExportSettingsWidgetProps) => {
+  const applicationName = useSelector(getApplicationName);
+  return (
+    <Stack>
+      <Chip.Group value={selectedFormat} onChange={onChangeFormat}>
+        <Group spacing="xs" noWrap>
+          {formats.map(format => (
+            <Chip
+              key={format}
+              value={format}
+              variant="brand"
+            >{`.${format}`}</Chip>
+          ))}
+        </Group>
+      </Chip.Group>
+      {canConfigureFormatting ? (
+        <Stack spacing="xs">
+          <Radio.Group
+            value={isFormattingEnabled ? "true" : "false"}
+            onChange={() => onToggleFormatting()}
+          >
+            <Group>
+              <Radio value="true" label={t`Formatted`} />
+              <Radio value="false" label={t`Unformatted`} />
+            </Group>
+          </Radio.Group>
+          <Text
+            size="sm"
+            color="text-medium"
+          >{t`E.g. September 6, 2024 or $187.50, like in ${applicationName}`}</Text>
+        </Stack>
+      ) : null}
+      {canConfigurePivoting ? (
+        <Checkbox
+          label={t`Keep data pivoted`}
+          checked={isPivotingEnabled}
+          onChange={() => onTogglePivoting()}
+        />
+      ) : null}
+    </Stack>
+  );
+};
diff --git a/frontend/src/metabase/common/components/ExportSettingsWidget/index.ts b/frontend/src/metabase/common/components/ExportSettingsWidget/index.ts
new file mode 100644
index 0000000000000000000000000000000000000000..abef489d8fde5ae5f134d6181a1dc7673663a166
--- /dev/null
+++ b/frontend/src/metabase/common/components/ExportSettingsWidget/index.ts
@@ -0,0 +1 @@
+export * from "./ExportSettingsWidget";
diff --git a/frontend/src/metabase/common/types/export.ts b/frontend/src/metabase/common/types/export.ts
new file mode 100644
index 0000000000000000000000000000000000000000..c1f851a3e660a6fe1be22373703317c4b0dc60c8
--- /dev/null
+++ b/frontend/src/metabase/common/types/export.ts
@@ -0,0 +1 @@
+export type ExportFormat = "csv" | "xlsx" | "json" | "png";
diff --git a/frontend/src/metabase/components/SaveQuestionForm/context.tsx b/frontend/src/metabase/components/SaveQuestionForm/context.tsx
index 803e2bb898af84485158b813a8d3a6b0419c10d9..77d6ad0f0e05d2c9d587299cc13fc98c86a60fdc 100644
--- a/frontend/src/metabase/components/SaveQuestionForm/context.tsx
+++ b/frontend/src/metabase/components/SaveQuestionForm/context.tsx
@@ -75,14 +75,14 @@ export const SaveQuestionProvider = ({
   // we care only about the very first result as question can be changed before
   // the modal is closed
   const [isSavedQuestionInitiallyChanged] = useState(
-    isNotNull(originalQuestion) &&
-      originalQuestion.type() !== "model" &&
-      question.isDirtyComparedTo(originalQuestion),
+    isNotNull(originalQuestion) && question.isDirtyComparedTo(originalQuestion),
   );
 
   const showSaveType =
     isSavedQuestionInitiallyChanged &&
     originalQuestion != null &&
+    originalQuestion.type() !== "model" &&
+    originalQuestion.type() !== "metric" &&
     originalQuestion.canWrite();
 
   return (
diff --git a/frontend/src/metabase/dashboard/analytics.ts b/frontend/src/metabase/dashboard/analytics.ts
index 8b4b7132c2f0a4cd16f252f7c43211de1a4300e9..406dd87482b066d51bc843ec72343f08ddf8201f 100644
--- a/frontend/src/metabase/dashboard/analytics.ts
+++ b/frontend/src/metabase/dashboard/analytics.ts
@@ -1,4 +1,4 @@
-import { trackSchemaEvent } from "metabase/lib/analytics";
+import { trackSchemaEvent, trackSimpleEvent } from "metabase/lib/analytics";
 import type { DashboardId, DashboardWidth } from "metabase-types/api";
 
 import type { SectionId } from "./sections";
@@ -84,10 +84,10 @@ export const trackIFrameDashcardsSaved = (
   dashboardId: DashboardId,
   domainName: string | null,
 ) => {
-  trackSchemaEvent("dashboard", {
+  trackSimpleEvent({
     event: "new_iframe_card_created",
-    dashboard_id: getDashboardId(dashboardId),
-    domain_name: domainName,
+    target_id: getDashboardId(dashboardId),
+    event_detail: domainName,
   });
 };
 
diff --git a/frontend/src/metabase/dashboard/components/DashCard/DashCardMenu/DashCardMenu.unit.spec.tsx b/frontend/src/metabase/dashboard/components/DashCard/DashCardMenu/DashCardMenu.unit.spec.tsx
index 5942273f0f8d32dcac3a01faf5ddb52e2d11f763..f29e74b12bb8dcc2480ab79a092ceeb99a041953 100644
--- a/frontend/src/metabase/dashboard/components/DashCard/DashCardMenu/DashCardMenu.unit.spec.tsx
+++ b/frontend/src/metabase/dashboard/components/DashCard/DashCardMenu/DashCardMenu.unit.spec.tsx
@@ -195,7 +195,9 @@ describe("DashCardMenu", () => {
     await userEvent.click(getIcon("ellipsis"));
     await userEvent.click(await screen.findByText("Download results"));
 
-    expect(screen.getByText("Download full results")).toBeInTheDocument();
+    expect(
+      await screen.findByRole("heading", { name: /download/i }),
+    ).toBeInTheDocument();
   });
 
   it("should not display query export options when query is running", async () => {
diff --git a/frontend/src/metabase/dashboard/reducers-typed.ts b/frontend/src/metabase/dashboard/reducers-typed.ts
index 22d8513c70d7d2b0de979b8267dd747379a9de0d..d48f831835dc33d75562ec225e1f2cfb71d72d3a 100644
--- a/frontend/src/metabase/dashboard/reducers-typed.ts
+++ b/frontend/src/metabase/dashboard/reducers-typed.ts
@@ -1,5 +1,5 @@
 import { createReducer } from "@reduxjs/toolkit";
-import { assocIn } from "icepick";
+import { assocIn, dissocIn } from "icepick";
 import { omit } from "underscore";
 
 import {
@@ -9,14 +9,18 @@ import {
   updateDashboardEnableEmbedding,
 } from "metabase/api";
 import Dashboards from "metabase/entities/dashboards";
+import Questions from "metabase/entities/questions";
+import Revisions from "metabase/entities/revisions";
 import { handleActions } from "metabase/lib/redux";
 import { NAVIGATE_BACK_TO_DASHBOARD } from "metabase/query_builder/actions";
 import type { UiParameter } from "metabase-lib/v1/parameters/types";
 import type {
+  Card,
   DashCardId,
   Dashboard,
   ParameterId,
   ParameterValueOrArray,
+  Revision,
 } from "metabase-types/api";
 import type {
   DashboardSidebarName,
@@ -40,6 +44,7 @@ import {
   addDashcardIdsToLoadingQueue,
   addManyCardsToDash,
   cancelFetchCardData,
+  clearCardData,
   fetchCardDataAction,
   fetchDashboard,
   fetchDashboardCardDataAction,
@@ -204,17 +209,12 @@ export const sidebar = createReducer(
 export const parameterValues = createReducer(
   INITIAL_DASHBOARD_STATE.parameterValues,
   builder => {
-    builder.addCase<
-      string,
-      {
-        type: string;
-        payload: {
-          clearCache?: boolean;
-        };
-      }
-    >(INITIALIZE, (state, { payload: { clearCache = true } = {} }) => {
-      return clearCache ? {} : state;
-    });
+    builder.addCase(
+      initialize,
+      (state, { payload: { clearCache = true } = {} }) => {
+        return clearCache ? {} : state;
+      },
+    );
 
     builder.addCase(fetchDashboard.fulfilled, (_state, { payload }) => {
       return payload.parameterValues;
@@ -259,17 +259,12 @@ export const parameterValues = createReducer(
       }
     });
 
-    builder.addCase<
-      string,
-      {
-        type: string;
-        payload: {
-          id: ParameterId;
-        };
-      }
-    >(REMOVE_PARAMETER, (state, { payload: { id } }) => {
-      delete state[id];
-    });
+    builder.addCase<string, { type: string; payload: { id: ParameterId } }>(
+      REMOVE_PARAMETER,
+      (state, { payload: { id } }) => {
+        delete state[id];
+      },
+    );
   },
 );
 
@@ -413,3 +408,44 @@ export const loadingDashCards = createReducer(
       }));
   },
 );
+
+export const dashcardData = createReducer(
+  INITIAL_DASHBOARD_STATE.dashcardData,
+  builder => {
+    builder
+      .addCase(initialize, (state, action) => {
+        const { clearCache = true } = action.payload ?? {};
+        return clearCache ? {} : state;
+      })
+      .addCase(fetchCardDataAction.fulfilled, (state, action) => {
+        const { dashcard_id, card_id, result } = action.payload ?? {};
+        if (dashcard_id && card_id) {
+          return assocIn(state, [dashcard_id, card_id], result);
+        }
+      })
+      .addCase(clearCardData, (state, action) => {
+        const { cardId, dashcardId } = action.payload;
+        return dissocIn(state, [dashcardId, cardId]);
+      })
+      .addCase<string, { type: string; payload: { object: Card } }>(
+        Questions.actionTypes.UPDATE,
+        (state, action) => {
+          const id = action.payload.object.id;
+          for (const dashcardId in state) {
+            delete state[dashcardId][id];
+          }
+        },
+      )
+      .addCase<string, { type: string; payload: Revision }>(
+        Revisions.actionTypes.REVERT,
+        (state, action) => {
+          const { model_id } = action.payload;
+          if (model_id) {
+            for (const dashcardId in state) {
+              delete state[dashcardId][model_id];
+            }
+          }
+        },
+      );
+  },
+);
diff --git a/frontend/src/metabase/dashboard/reducers.js b/frontend/src/metabase/dashboard/reducers.js
index fe5bf641438464503e81a15d02cf8d86086f0984..3b3f35cdf3ea09afda1cdaafb5e67cf4062252fa 100644
--- a/frontend/src/metabase/dashboard/reducers.js
+++ b/frontend/src/metabase/dashboard/reducers.js
@@ -4,13 +4,11 @@ import _ from "underscore";
 
 import Actions from "metabase/entities/actions";
 import Questions from "metabase/entities/questions";
-import Revisions from "metabase/entities/revisions";
 import { combineReducers, handleActions } from "metabase/lib/redux";
 
 import {
   ADD_CARD_TO_DASH,
   ADD_MANY_CARDS_TO_DASH,
-  CLEAR_CARD_DATA,
   INITIALIZE,
   MARK_NEW_CARD_SEEN,
   REMOVE_CARD_FROM_DASH,
@@ -24,7 +22,6 @@ import {
   UNDO_REMOVE_CARD_FROM_DASH,
   UPDATE_DASHCARD_VISUALIZATION_SETTINGS,
   UPDATE_DASHCARD_VISUALIZATION_SETTINGS_FOR_COLUMN,
-  fetchCardDataAction,
   fetchDashboard,
   tabsReducer,
 } from "./actions";
@@ -33,6 +30,7 @@ import {
   autoApplyFilters,
   dashboardId,
   dashboards,
+  dashcardData,
   editingDashboard,
   isAddParameterPopoverOpen,
   isNavigatingBackToDashboard,
@@ -162,32 +160,6 @@ const dashcards = handleActions(
   INITIAL_DASHBOARD_STATE.dashcards,
 );
 
-// Many of these slices are also updated by `tabsReducer` in `frontend/src/metabase/dashboard/actions/tabs.ts`
-const dashcardData = handleActions(
-  {
-    // clear existing dashboard data when loading a dashboard
-    [INITIALIZE]: {
-      next: (state, { payload: { clearCache = true } = {} }) =>
-        clearCache ? {} : state,
-    },
-    [fetchCardDataAction.fulfilled]: {
-      next: (state, { payload: { dashcard_id, card_id, result } }) =>
-        assocIn(state, [dashcard_id, card_id], result),
-    },
-    [CLEAR_CARD_DATA]: {
-      next: (state, { payload: { cardId, dashcardId } }) =>
-        assocIn(state, [dashcardId, cardId]),
-    },
-    [Questions.actionTypes.UPDATE]: (state, { payload: { object: card } }) =>
-      _.mapObject(state, dashboardData => dissoc(dashboardData, card.id)),
-    [Revisions.actionTypes.REVERT]: (state, { payload: revision }) =>
-      _.mapObject(state, dashboardData =>
-        dissoc(dashboardData, revision.model_id),
-      ),
-  },
-  INITIAL_DASHBOARD_STATE.dashcardData,
-);
-
 const draftParameterValues = handleActions(
   {
     [INITIALIZE]: {
diff --git a/frontend/src/metabase/entities/questions.js b/frontend/src/metabase/entities/questions.js
index 7bb73fddaab82de7078f3ee35e4d5ca8a69d7bc5..9f8c6043b18353da216fe9f6cc12c5da3d6112c2 100644
--- a/frontend/src/metabase/entities/questions.js
+++ b/frontend/src/metabase/entities/questions.js
@@ -17,7 +17,7 @@ import {
   undo,
 } from "metabase/lib/entities";
 import { compose, withAction, withNormalize } from "metabase/lib/redux";
-import * as Urls from "metabase/lib/urls";
+import * as Urls from "metabase/lib/urls/questions";
 import { PLUGIN_MODERATION } from "metabase/plugins";
 import {
   API_UPDATE_QUESTION,
diff --git a/frontend/src/metabase/lib/analytics.ts b/frontend/src/metabase/lib/analytics.ts
index a8541fefe874035d3679dfc54715739543d567fa..f6cebcce21d0c88a345ac1dd55c5597855095518 100644
--- a/frontend/src/metabase/lib/analytics.ts
+++ b/frontend/src/metabase/lib/analytics.ts
@@ -18,7 +18,7 @@ const VERSIONS: Record<SchemaType, SchemaVersion> = {
   browse_data: "1-0-0",
   cleanup: "1-0-1",
   csvupload: "1-0-3",
-  dashboard: "1-1-6",
+  dashboard: "1-1-5",
   database: "1-0-1",
   downloads: "1-0-0",
   embed_flow: "1-0-2",
diff --git a/frontend/src/metabase/lib/card.js b/frontend/src/metabase/lib/card.js
index 7dcd280498e694248266cc6f50e715fc5c17c18f..1a2deaf46eb3fdb0f4cbc19dfd52b6225a127505 100644
--- a/frontend/src/metabase/lib/card.js
+++ b/frontend/src/metabase/lib/card.js
@@ -1,4 +1,3 @@
-import Questions from "metabase/entities/questions";
 import { b64hash_to_utf8, utf8_to_b64url } from "metabase/lib/encoding";
 import { equals } from "metabase/lib/utils";
 
@@ -11,35 +10,6 @@ export function createCard(name = null) {
   };
 }
 
-// load a card either by ID or from a base64 serialization.  if both are present then they are merged, which the serialized version taking precedence
-// TODO: move to redux
-export async function loadCard(cardId, { dispatch, getState }) {
-  try {
-    await dispatch(
-      Questions.actions.fetch(
-        { id: cardId },
-        {
-          properties: [
-            "id",
-            "dataset_query",
-            "display",
-            "visualization_settings",
-          ], // complies with Card interface
-        },
-      ),
-    );
-
-    const question = Questions.selectors.getObject(getState(), {
-      entityId: cardId,
-    });
-
-    return question?.card();
-  } catch (error) {
-    console.error("error loading card", error);
-    throw error;
-  }
-}
-
 function getCleanCard(card) {
   return {
     name: card.name,
diff --git a/frontend/src/metabase/lib/urls/misc.js b/frontend/src/metabase/lib/urls/misc.js
deleted file mode 100644
index 34d94d50a0217ff6d42a60e35e6f1e1fcbc10c1e..0000000000000000000000000000000000000000
--- a/frontend/src/metabase/lib/urls/misc.js
+++ /dev/null
@@ -1,6 +0,0 @@
-export const exportFormats = ["csv", "xlsx", "json"];
-export const exportFormatPng = "png";
-
-export function accountSettings() {
-  return "/account/profile";
-}
diff --git a/frontend/src/metabase/lib/urls/misc.ts b/frontend/src/metabase/lib/urls/misc.ts
new file mode 100644
index 0000000000000000000000000000000000000000..e5816a9a682b9c51471c036c6281160a5271af7b
--- /dev/null
+++ b/frontend/src/metabase/lib/urls/misc.ts
@@ -0,0 +1,8 @@
+import type { ExportFormat } from "metabase/common/types/export";
+
+export const exportFormats: ExportFormat[] = ["csv", "xlsx", "json"];
+export const exportFormatPng: ExportFormat = "png";
+
+export function accountSettings() {
+  return "/account/profile";
+}
diff --git a/frontend/src/metabase/parameters/components/ParameterValuePicker/ParameterValuePicker.tsx b/frontend/src/metabase/parameters/components/ParameterValuePicker/ParameterValuePicker.tsx
index 5ac1b98e94297fa83d3b7da4d1765832d858d50d..8f58f3a6a842bc784242cb5b0ffc9c793891afc7 100644
--- a/frontend/src/metabase/parameters/components/ParameterValuePicker/ParameterValuePicker.tsx
+++ b/frontend/src/metabase/parameters/components/ParameterValuePicker/ParameterValuePicker.tsx
@@ -4,7 +4,7 @@ import _ from "underscore";
 
 import { useDispatch } from "metabase/lib/redux";
 import { fetchParameterValues } from "metabase/parameters/actions";
-import { DefaultParameterValueWidget } from "metabase/query_builder/components/template_tags/TagEditorParamParts";
+import { DefaultParameterValueWidget } from "metabase/query_builder/components/template_tags/TagEditorParamParts/TagEditorParam.styled";
 import type { UiParameter } from "metabase-lib/v1/parameters/types";
 import { isDateParameter } from "metabase-lib/v1/parameters/utils/parameter-type";
 import type { Parameter, TemplateTag } from "metabase-types/api";
diff --git a/frontend/src/metabase/public/containers/PublicOrEmbeddedQuestion/PublicOrEmbeddedQuestion.unit.spec.tsx b/frontend/src/metabase/public/containers/PublicOrEmbeddedQuestion/PublicOrEmbeddedQuestion.unit.spec.tsx
index 698292aac092f157ce08e9238d0bdb655f3a48a0..8bb869a2341aadc282dfd44cb7cced397621cd47 100644
--- a/frontend/src/metabase/public/containers/PublicOrEmbeddedQuestion/PublicOrEmbeddedQuestion.unit.spec.tsx
+++ b/frontend/src/metabase/public/containers/PublicOrEmbeddedQuestion/PublicOrEmbeddedQuestion.unit.spec.tsx
@@ -133,7 +133,9 @@ describe("PublicOrEmbeddedQuestion", () => {
       await userEvent.click(getIcon("download"));
 
       expect(
-        within(screen.getByRole("dialog")).getByText("Download full results"),
+        within(screen.getByRole("dialog")).getByRole("heading", {
+          name: /download/i,
+        }),
       ).toBeInTheDocument();
     });
 
diff --git a/frontend/src/metabase/public/containers/PublicOrEmbeddedQuestion/PublicOrEmbeddedQuestionView.stories.tsx b/frontend/src/metabase/public/containers/PublicOrEmbeddedQuestion/PublicOrEmbeddedQuestionView.stories.tsx
index 64f5e170f7e601da99724b0c7efc7f6e96f68e07..116f56bc16ee4054fed66557b404166df31262dd 100644
--- a/frontend/src/metabase/public/containers/PublicOrEmbeddedQuestion/PublicOrEmbeddedQuestionView.stories.tsx
+++ b/frontend/src/metabase/public/containers/PublicOrEmbeddedQuestion/PublicOrEmbeddedQuestionView.stories.tsx
@@ -374,6 +374,7 @@ const downloadQuestionAsPng = async (
   const documentElement = within(document.documentElement);
   const pngButton = await documentElement.findByText(".png");
   await userEvent.click(pngButton);
+  await userEvent.click(documentElement.getByTestId("download-results-button"));
   await canvas.findByTestId("image-downloaded");
   asyncCallback();
 };
diff --git a/frontend/src/metabase/public/lib/analytics.ts b/frontend/src/metabase/public/lib/analytics.ts
index e8ebc7abb34719eb6e8b358e0bfc80b02478c0a3..c0272990deb17550ddb08f5f29c3ad443d3760c6 100644
--- a/frontend/src/metabase/public/lib/analytics.ts
+++ b/frontend/src/metabase/public/lib/analytics.ts
@@ -124,7 +124,7 @@ export const trackPublicLinkCopied = ({
   format = null,
 }: {
   artifact: EmbedResourceType;
-  format?: ExportFormatType | null;
+  format?: ExportFormatType | "html";
 }): void => {
   trackSchemaEvent(SCHEMA_NAME, {
     event: "public_link_copied",
diff --git a/frontend/src/metabase/query_builder/actions/core/card.ts b/frontend/src/metabase/query_builder/actions/core/card.ts
new file mode 100644
index 0000000000000000000000000000000000000000..6f40bb53852b45f35f24423e7709a43fb65f2d55
--- /dev/null
+++ b/frontend/src/metabase/query_builder/actions/core/card.ts
@@ -0,0 +1,34 @@
+import Questions from "metabase/entities/questions";
+import type { CardId } from "metabase-types/api";
+import type { Dispatch, GetState } from "metabase-types/store";
+
+// load a card either by ID or from a base64 serialization.  if both are present then they are merged, which the serialized version taking precedence
+export async function loadCard(
+  cardId: CardId,
+  { dispatch, getState }: { dispatch: Dispatch; getState: GetState },
+) {
+  try {
+    await dispatch(
+      Questions.actions.fetch(
+        { id: cardId },
+        {
+          properties: [
+            "id",
+            "dataset_query",
+            "display",
+            "visualization_settings",
+          ], // complies with Card interface
+        },
+      ),
+    );
+
+    const question = Questions.selectors.getObject(getState(), {
+      entityId: cardId,
+    });
+
+    return question?.card();
+  } catch (error) {
+    console.error("error loading card", error);
+    throw error;
+  }
+}
diff --git a/frontend/src/metabase/query_builder/actions/core/core.js b/frontend/src/metabase/query_builder/actions/core/core.js
index 1fc2852f6c15ce05cd9e57b853db927a5767ac31..946b1b53bf3a2ae2fc63a3fe2c191c5a3f456b42 100644
--- a/frontend/src/metabase/query_builder/actions/core/core.js
+++ b/frontend/src/metabase/query_builder/actions/core/core.js
@@ -6,7 +6,6 @@ import Databases from "metabase/entities/databases";
 import { updateModelIndexes } from "metabase/entities/model-indexes/actions";
 import Questions from "metabase/entities/questions";
 import Revision from "metabase/entities/revisions";
-import { loadCard } from "metabase/lib/card";
 import { shouldOpenInBlankWindow } from "metabase/lib/dom";
 import { createThunkAction } from "metabase/lib/redux";
 import * as Urls from "metabase/lib/urls";
@@ -39,6 +38,7 @@ import { zoomInRow } from "../object-detail";
 import { clearQueryResult, runQuestionQuery } from "../querying";
 import { onCloseSidebars } from "../ui";
 
+import { loadCard } from "./card";
 import { API_UPDATE_QUESTION, SOFT_RELOAD_CARD } from "./types";
 import { updateQuestion } from "./updateQuestion";
 
diff --git a/frontend/src/metabase/query_builder/actions/core/initializeQB.ts b/frontend/src/metabase/query_builder/actions/core/initializeQB.ts
index 99482b906cb4551ce8751f1568660397ed165f82..678c67df453822fbb1c4d46246114018a04b9806 100644
--- a/frontend/src/metabase/query_builder/actions/core/initializeQB.ts
+++ b/frontend/src/metabase/query_builder/actions/core/initializeQB.ts
@@ -4,7 +4,7 @@ import querystring from "querystring";
 import { fetchAlertsForQuestion } from "metabase/alert/alert";
 import Questions from "metabase/entities/questions";
 import Snippets from "metabase/entities/snippets";
-import { deserializeCardFromUrl, loadCard } from "metabase/lib/card";
+import { deserializeCardFromUrl } from "metabase/lib/card";
 import { isNotNull } from "metabase/lib/types";
 import * as Urls from "metabase/lib/urls";
 import {
@@ -35,6 +35,7 @@ import { getQueryBuilderModeFromLocation } from "../../typed-utils";
 import { updateUrl } from "../navigation";
 import { cancelQuery, runQuestionQuery } from "../querying";
 
+import { loadCard } from "./card";
 import { resetQB } from "./core";
 import {
   getParameterValuesForQuestion,
diff --git a/frontend/src/metabase/query_builder/actions/core/initializeQB.unit.spec.ts b/frontend/src/metabase/query_builder/actions/core/initializeQB.unit.spec.ts
index 403dcae6bb91adb8a1a7039d344ab643c43a55a9..6c2798e61cef06c9ce8d384ab378d9c9c777e8db 100644
--- a/frontend/src/metabase/query_builder/actions/core/initializeQB.unit.spec.ts
+++ b/frontend/src/metabase/query_builder/actions/core/initializeQB.unit.spec.ts
@@ -39,6 +39,7 @@ import { createMockState } from "metabase-types/store/mocks";
 
 import * as querying from "../querying";
 
+import * as cardActions from "./card";
 import * as core from "./core";
 import { initializeQB } from "./initializeQB";
 
@@ -131,7 +132,9 @@ async function setup({
     fetchMock.get(`path:/api/card/${card.id}`, card);
   }
 
-  jest.spyOn(CardLib, "loadCard").mockReturnValue(Promise.resolve({ ...card }));
+  jest
+    .spyOn(cardActions, "loadCard")
+    .mockReturnValue(Promise.resolve({ ...card }));
 
   return baseSetup({ location, params, ...opts });
 }
@@ -440,7 +443,7 @@ describe("QB Actions > initializeQB", () => {
         fetchMock.get(`path:/api/card/${originalCard.id}`, originalCard);
 
         jest
-          .spyOn(CardLib, "loadCard")
+          .spyOn(cardActions, "loadCard")
           .mockReturnValueOnce(Promise.resolve({ ...originalCard }));
 
         return setup({ card: q, ...opts });
diff --git a/frontend/src/metabase/query_builder/components/QueryDownloadPopover/QueryDownloadPopover.tsx b/frontend/src/metabase/query_builder/components/QueryDownloadPopover/QueryDownloadPopover.tsx
index bfc52e98f2a9639e2a64c59feaef09144c4a598d..07e46efb2e846cd386924c8080910cdb03b0bb14 100644
--- a/frontend/src/metabase/query_builder/components/QueryDownloadPopover/QueryDownloadPopover.tsx
+++ b/frontend/src/metabase/query_builder/components/QueryDownloadPopover/QueryDownloadPopover.tsx
@@ -1,29 +1,28 @@
-import { useState } from "react";
-import { useKeyPressEvent } from "react-use";
+import { useCallback, useState } from "react";
 import { t } from "ttag";
 
-import { isMac } from "metabase/lib/browser";
+import { ExportSettingsWidget } from "metabase/common/components/ExportSettingsWidget";
+import type { ExportFormat } from "metabase/common/types/export";
 import { exportFormatPng, exportFormats } from "metabase/lib/urls";
 import { PLUGIN_FEATURE_LEVEL_PERMISSIONS } from "metabase/plugins";
-import { Group, Icon, Stack, Text, Title, Tooltip } from "metabase/ui";
+import { Box, Button, Icon, Stack, Text, Title } from "metabase/ui";
 import { canSavePng } from "metabase/visualizations";
 import type Question from "metabase-lib/v1/Question";
 import type { Dataset } from "metabase-types/api";
 
-import { DownloadButton } from "./DownloadButton";
-import { checkCanManageFormatting } from "./utils";
-
 type QueryDownloadPopoverProps = {
   question: Question;
   result: Dataset;
-  onDownload: (opts: { type: string; enableFormatting: boolean }) => void;
+  onDownload: (opts: {
+    type: string;
+    enableFormatting: boolean;
+    enablePivot: boolean;
+  }) => void;
 };
 
-const getFormattingInfoTooltipLabel = () => {
-  return isMac()
-    ? t`Hold the Option key to download unformatted results`
-    : t`Hold the Alt key to download unformatted results`;
-};
+const canConfigurePivoting = (format: string, display: string) =>
+  display === "pivot" && format !== "json";
+const canConfigureFormatting = (format: string) => format !== "png";
 
 export const QueryDownloadPopover = ({
   question,
@@ -31,60 +30,54 @@ export const QueryDownloadPopover = ({
   onDownload,
 }: QueryDownloadPopoverProps) => {
   const canDownloadPng = canSavePng(question.display());
+  const formats = canDownloadPng
+    ? [...exportFormats, exportFormatPng]
+    : exportFormats;
+
+  const [isPivoted, setIsPivoted] = useState(false);
+  const [isFormatted, setIsFormatted] = useState(true);
+  const [format, setFormat] = useState<ExportFormat>(formats[0]);
+
   const hasTruncatedResults =
     result.data != null && result.data.rows_truncated != null;
   const limitedDownloadSizeText =
     PLUGIN_FEATURE_LEVEL_PERMISSIONS.getDownloadWidgetMessageOverride(result) ??
     t`The maximum download size is 1 million rows.`;
 
-  const formats = canDownloadPng
-    ? [...exportFormats, exportFormatPng]
-    : exportFormats;
-
-  const [isAltPressed, toggleAltPressed] = useState(false);
-  useKeyPressEvent(
-    e => e.key === "Alt" || e.key === "Option",
-    () => {
-      toggleAltPressed(true);
-    },
-    () => {
-      toggleAltPressed(false);
-    },
-  );
+  const handleDownload = useCallback(() => {
+    onDownload({
+      type: format,
+      enableFormatting: isFormatted,
+      enablePivot: isPivoted,
+    });
+  }, [format, isFormatted, isPivoted, onDownload]);
 
   return (
-    <Stack w={hasTruncatedResults ? "18.75rem" : "16.25rem"}>
-      <Group align="center" position="apart" px="sm">
-        <Title order={4}>{t`Download full results`}</Title>
-        <Tooltip label={getFormattingInfoTooltipLabel()}>
-          <Icon name="info_filled" />
-        </Tooltip>
-      </Group>
-
+    <Stack w={hasTruncatedResults ? "18.75rem" : "16.25rem"} p={8}>
+      <Title order={4}>{t`Download`}</Title>
+      <ExportSettingsWidget
+        selectedFormat={format}
+        formats={formats}
+        isFormattingEnabled={isFormatted}
+        isPivotingEnabled={isPivoted}
+        canConfigureFormatting={canConfigureFormatting(format)}
+        canConfigurePivoting={canConfigurePivoting(format, question.display())}
+        onChangeFormat={setFormat}
+        onToggleFormatting={() => setIsFormatted(prev => !prev)}
+        onTogglePivoting={() => setIsPivoted(prev => !prev)}
+      />
       {hasTruncatedResults && (
-        <Text px="sm">
-          <div>{t`Your answer has a large number of rows so it could take a while to download.`}</div>
-          <div>{limitedDownloadSizeText}</div>
+        <Text size="sm" color="text-medium">
+          <Box mb="1rem">{t`Your answer has a large number of rows so it could take a while to download.`}</Box>
+          <Box>{limitedDownloadSizeText}</Box>
         </Text>
       )}
-
-      <Stack spacing="sm">
-        {formats.map(format => (
-          <DownloadButton
-            key={format}
-            format={format}
-            onClick={() => {
-              onDownload({
-                type: format,
-                enableFormatting: !(
-                  checkCanManageFormatting(format) && isAltPressed
-                ),
-              });
-            }}
-            isAltPressed={isAltPressed}
-          />
-        ))}
-      </Stack>
+      <Button
+        data-testid="download-results-button"
+        leftIcon={<Icon name="download" />}
+        variant="filled"
+        onClick={handleDownload}
+      >{t`Download`}</Button>
     </Stack>
   );
 };
diff --git a/frontend/src/metabase/query_builder/components/QueryDownloadPopover/QueryDownloadPopover.unit.spec.tsx b/frontend/src/metabase/query_builder/components/QueryDownloadPopover/QueryDownloadPopover.unit.spec.tsx
index 00ad38b7ffdfb8b840618243285e6ab0e2d5d974..686ba82ca803196a088a962877259da858e9355b 100644
--- a/frontend/src/metabase/query_builder/components/QueryDownloadPopover/QueryDownloadPopover.unit.spec.tsx
+++ b/frontend/src/metabase/query_builder/components/QueryDownloadPopover/QueryDownloadPopover.unit.spec.tsx
@@ -1,9 +1,8 @@
-import { within } from "@testing-library/react";
 import userEvent from "@testing-library/user-event";
 
 import { setupCardQueryDownloadEndpoint } from "__support__/server-mocks";
 import { createMockEntitiesState } from "__support__/store";
-import { act, fireEvent, renderWithProviders, screen } from "__support__/ui";
+import { act, renderWithProviders, screen } from "__support__/ui";
 import { checkNotNull } from "metabase/lib/types";
 import { getMetadata } from "metabase/selectors/metadata";
 import registerVisualizations from "metabase/visualizations/register";
@@ -83,66 +82,75 @@ describe("QueryDownloadPopover", () => {
   it("should trigger download on click", async () => {
     const { onDownload } = setup();
     await act(async () => await userEvent.click(screen.getByText(/csv/)));
+    await userEvent.click(await screen.findByTestId("download-results-button"));
     expect(onDownload).toHaveBeenCalledWith({
       type: "csv",
       enableFormatting: true,
+      enablePivot: false,
     });
   });
 
-  it.each(["csv", "json"])(
+  it.each(["csv", "json", "xlsx"])(
     "should trigger unformatted download for %s format",
     async format => {
       const { onDownload } = setup();
-      const _userEvent = userEvent.setup();
-
-      expect(screen.queryByText(/Unformatted/i)).not.toBeInTheDocument();
-      const downloadButton = () => {
-        return screen.getByRole("button", { name: new RegExp(format) });
-      };
-
-      await fireEvent.keyDown(downloadButton(), {
-        key: "Alt",
-      });
-
-      await act(async () => await _userEvent.hover(downloadButton()));
 
+      await userEvent.click(screen.getByLabelText(`.${format}`));
+      await userEvent.click(screen.getByLabelText(`Unformatted`));
       expect(
-        await within(downloadButton()).findByText(/Unformatted/i),
-      ).toBeVisible();
-
-      await act(async () => {
-        await _userEvent.click(downloadButton());
-      });
+        screen.queryByLabelText("Keep data pivoted"),
+      ).not.toBeInTheDocument();
+      await userEvent.click(
+        await screen.findByTestId("download-results-button"),
+      );
 
       expect(onDownload).toHaveBeenCalledWith({
         type: format,
         enableFormatting: false,
+        enablePivot: false,
       });
     },
   );
 
-  it.each(["xlsx", "png"])(
-    "should not trigger unformatted download for %s format",
-    async format => {
-      const { onDownload } = setup({ card: { ...TEST_CARD, display: "line" } });
+  it("should not trigger unformatted download for png format", async () => {
+    const format = "png";
+    const { onDownload } = setup({ card: { ...TEST_CARD, display: "line" } });
 
-      const downloadButton = () => {
-        return screen.getByRole("button", { name: new RegExp(format) });
-      };
+    await userEvent.click(screen.getByLabelText(`.${format}`));
+    expect(screen.queryByLabelText(`Formatted`)).not.toBeInTheDocument();
+    expect(
+      screen.queryByLabelText("Keep data pivoted"),
+    ).not.toBeInTheDocument();
+    await userEvent.click(await screen.findByTestId("download-results-button"));
 
-      expect(screen.queryByText(/Unformatted/i)).not.toBeInTheDocument();
-      await fireEvent.keyDown(downloadButton(), {
-        key: "Alt",
+    expect(onDownload).toHaveBeenCalledWith({
+      type: format,
+      enableFormatting: true,
+      enablePivot: false,
+    });
+  });
+
+  it.each(["csv", "xlsx"])(
+    "allows configure pivoting for %s format",
+    async format => {
+      const { onDownload } = setup({
+        card: {
+          ...TEST_CARD,
+          display: "pivot",
+        },
       });
-      expect(
-        within(downloadButton()).queryByText(/Unformatted/i),
-      ).not.toBeInTheDocument();
 
-      await act(async () => await userEvent.click(downloadButton()));
+      await userEvent.click(screen.getByLabelText(`.${format}`));
+      await userEvent.click(screen.getByLabelText(`Unformatted`));
+      await userEvent.click(screen.getByLabelText("Keep data pivoted"));
+      await userEvent.click(
+        await screen.findByTestId("download-results-button"),
+      );
 
       expect(onDownload).toHaveBeenCalledWith({
         type: format,
-        enableFormatting: true,
+        enableFormatting: false,
+        enablePivot: true,
       });
     },
   );
diff --git a/frontend/src/metabase/query_builder/components/QueryDownloadPopover/use-download-data.ts b/frontend/src/metabase/query_builder/components/QueryDownloadPopover/use-download-data.ts
index 277234f89e948ca872288975a87a5924fc15a797..7297352e88304040ae3daac270b2f4ba1af97e78 100644
--- a/frontend/src/metabase/query_builder/components/QueryDownloadPopover/use-download-data.ts
+++ b/frontend/src/metabase/query_builder/components/QueryDownloadPopover/use-download-data.ts
@@ -26,6 +26,7 @@ export type UseDownloadDataParams = {
 type HandleDataDownloadParams = {
   type: string;
   enableFormatting: boolean;
+  enablePivot: boolean;
 };
 
 export const useDownloadData = ({
@@ -43,11 +44,16 @@ export const useDownloadData = ({
   const dispatch = useDispatch();
 
   return useAsyncFn(
-    async ({ type, enableFormatting }: HandleDataDownloadParams) => {
+    async ({
+      type,
+      enableFormatting,
+      enablePivot,
+    }: HandleDataDownloadParams) => {
       await dispatch(
         downloadQueryResults({
           type,
           enableFormatting,
+          enablePivot,
           question,
           result,
           dashboardId,
diff --git a/frontend/src/metabase/query_builder/components/QueryDownloadWidget/QueryDownloadWidget.unit.spec.tsx b/frontend/src/metabase/query_builder/components/QueryDownloadWidget/QueryDownloadWidget.unit.spec.tsx
index 8af529f8e877bc658d4a096e4b567df5f6166644..806777e25976b7840172622023c44d25100edfc3 100644
--- a/frontend/src/metabase/query_builder/components/QueryDownloadWidget/QueryDownloadWidget.unit.spec.tsx
+++ b/frontend/src/metabase/query_builder/components/QueryDownloadWidget/QueryDownloadWidget.unit.spec.tsx
@@ -54,10 +54,9 @@ describe("QueryDownloadWidget", () => {
     setup();
 
     await userEvent.click(getIcon("download"));
-    await userEvent.unhover(getIcon("download"));
 
     expect(
-      await screen.findByText("Download full results"),
+      await screen.findByRole("heading", { name: /download/i }),
     ).toBeInTheDocument();
   });
 });
diff --git a/frontend/src/metabase/query_builder/components/view/ViewHeader/components/AdHocQuestionLeftSide/AdHocQuestionLeftSide.tsx b/frontend/src/metabase/query_builder/components/view/ViewHeader/components/AdHocQuestionLeftSide/AdHocQuestionLeftSide.tsx
index d9ae8a950ec71cede4c0de8efa26d119c6b90d26..982c1958dc7abfa9bb6124306c8b065383f0be73 100644
--- a/frontend/src/metabase/query_builder/components/view/ViewHeader/components/AdHocQuestionLeftSide/AdHocQuestionLeftSide.tsx
+++ b/frontend/src/metabase/query_builder/components/view/ViewHeader/components/AdHocQuestionLeftSide/AdHocQuestionLeftSide.tsx
@@ -7,15 +7,14 @@ import {
   ViewHeaderLeftSubHeading,
   ViewHeaderMainLeftContentContainer,
 } from "metabase/query_builder/components/view/ViewHeader/ViewTitleHeader.styled";
-import {
-  QuestionDataSource,
-  QuestionDescription,
-} from "metabase/query_builder/components/view/ViewHeader/components";
 import type { QueryModalType } from "metabase/query_builder/constants";
 import { MODAL_TYPES } from "metabase/query_builder/constants";
 import * as Lib from "metabase-lib";
 import type Question from "metabase-lib/v1/Question";
 
+import { QuestionDataSource } from "../QuestionDataSource";
+import { QuestionDescription } from "../QuestionDescription";
+
 interface AdHocQuestionLeftSideProps {
   question: Question;
   originalQuestion?: Question;
diff --git a/frontend/src/metabase/query_builder/components/view/ViewHeader/components/HeaderCollectionBadge/HeaderCollectionBadge.jsx b/frontend/src/metabase/query_builder/components/view/ViewHeader/components/HeaderCollectionBadge/HeaderCollectionBadge.jsx
index 4f3871b004a1fcb19bfb7591baa615e551f6ab71..e03bd1bce8968c504a74058c204920b885a1a724 100644
--- a/frontend/src/metabase/query_builder/components/view/ViewHeader/components/HeaderCollectionBadge/HeaderCollectionBadge.jsx
+++ b/frontend/src/metabase/query_builder/components/view/ViewHeader/components/HeaderCollectionBadge/HeaderCollectionBadge.jsx
@@ -2,7 +2,8 @@ import PropTypes from "prop-types";
 import { t } from "ttag";
 
 import * as Urls from "metabase/lib/urls";
-import { HeadBreadcrumbs } from "metabase/query_builder/components/view/ViewHeader/components";
+
+import { HeadBreadcrumbs } from "../HeaderBreadcrumbs";
 
 HeaderCollectionBadge.propTypes = {
   question: PropTypes.object.isRequired,
diff --git a/frontend/src/metabase/query_builder/components/view/ViewHeader/components/SavedQuestionLeftSide/SavedQuestionLeftSide.tsx b/frontend/src/metabase/query_builder/components/view/ViewHeader/components/SavedQuestionLeftSide/SavedQuestionLeftSide.tsx
index 549bb1ecd78d321d8312e1e24e08834395565fca..eab493ce947d142c6be7d45c20c5282b207df5f0 100644
--- a/frontend/src/metabase/query_builder/components/view/ViewHeader/components/SavedQuestionLeftSide/SavedQuestionLeftSide.tsx
+++ b/frontend/src/metabase/query_builder/components/view/ViewHeader/components/SavedQuestionLeftSide/SavedQuestionLeftSide.tsx
@@ -11,13 +11,12 @@ import {
   ViewHeaderLeftSubHeading,
   ViewHeaderMainLeftContentContainer,
 } from "metabase/query_builder/components/view/ViewHeader/ViewTitleHeader.styled";
-import {
-  HeadBreadcrumbs,
-  QuestionDataSource,
-} from "metabase/query_builder/components/view/ViewHeader/components";
-import { HeaderCollectionBadge } from "metabase/query_builder/components/view/ViewHeader/components/HeaderCollectionBadge/HeaderCollectionBadge";
 import type Question from "metabase-lib/v1/Question";
 
+import { HeadBreadcrumbs } from "../HeaderBreadcrumbs";
+import { HeaderCollectionBadge } from "../HeaderCollectionBadge";
+import { QuestionDataSource } from "../QuestionDataSource";
+
 interface SavedQuestionLeftSideProps {
   question: Question;
   isObjectDetail?: boolean;
diff --git a/frontend/src/metabase/query_builder/components/view/ViewHeader/components/ViewTitleHeaderRightSide/ViewTitleHeaderRightSide.tsx b/frontend/src/metabase/query_builder/components/view/ViewHeader/components/ViewTitleHeaderRightSide/ViewTitleHeaderRightSide.tsx
index 3a67ecfcfd253430459757de257e8758e05fbde5..8ace53965851875a2167849740401daf5bf6fcba 100644
--- a/frontend/src/metabase/query_builder/components/view/ViewHeader/components/ViewTitleHeaderRightSide/ViewTitleHeaderRightSide.tsx
+++ b/frontend/src/metabase/query_builder/components/view/ViewHeader/components/ViewTitleHeaderRightSide/ViewTitleHeaderRightSide.tsx
@@ -12,14 +12,6 @@ import {
   ViewHeaderIconButtonContainer,
   ViewRunButtonWithTooltip,
 } from "metabase/query_builder/components/view/ViewHeader/ViewTitleHeader.styled";
-import {
-  ExploreResultsLink,
-  FilterHeaderButton,
-  QuestionActions,
-  QuestionNotebookButton,
-  QuestionSummarizeWidget,
-  ToggleNativeQueryPreview,
-} from "metabase/query_builder/components/view/ViewHeader/components";
 import { canExploreResults } from "metabase/query_builder/components/view/ViewHeader/utils";
 import type { QueryModalType } from "metabase/query_builder/constants";
 import { MODAL_TYPES } from "metabase/query_builder/constants";
@@ -30,6 +22,13 @@ import type Question from "metabase-lib/v1/Question";
 import type { Dataset } from "metabase-types/api";
 import type { DatasetEditorTab, QueryBuilderMode } from "metabase-types/store";
 
+import { ExploreResultsLink } from "../ExploreResultsLink";
+import { FilterHeaderButton } from "../FilterHeaderButton";
+import { QuestionActions } from "../QuestionActions";
+import { QuestionNotebookButton } from "../QuestionNotebookButton";
+import { QuestionSummarizeWidget } from "../QuestionSummarizeWidget";
+import { ToggleNativeQueryPreview } from "../ToggleNativeQueryPreview";
+
 interface ViewTitleHeaderRightSideProps {
   question: Question;
   result: Dataset;
diff --git a/frontend/src/metabase/query_builder/containers/QueryBuilder.unit.spec.tsx b/frontend/src/metabase/query_builder/containers/QueryBuilder.unit.spec.tsx
index e84fb59d391d049f411dd8984517650fa64806a0..2107c2ced31b6aba6114827f9f1e931b0ee06a88 100644
--- a/frontend/src/metabase/query_builder/containers/QueryBuilder.unit.spec.tsx
+++ b/frontend/src/metabase/query_builder/containers/QueryBuilder.unit.spec.tsx
@@ -166,8 +166,9 @@ describe("QueryBuilder", () => {
       expect(inputArea).toHaveValue("SELECT 1");
 
       await userEvent.click(screen.getByTestId("download-button"));
+      await userEvent.click(await screen.findByLabelText(".csv"));
       await userEvent.click(
-        await screen.findByRole("button", { name: ".csv" }),
+        await screen.findByTestId("download-results-button"),
       );
 
       expect(mockDownloadEndpoint.called()).toBe(true);
@@ -194,8 +195,9 @@ describe("QueryBuilder", () => {
       expect(inputArea).toHaveValue("SELECT 1 union SELECT 2");
 
       await userEvent.click(screen.getByTestId("download-button"));
+      await userEvent.click(await screen.findByLabelText(".csv"));
       await userEvent.click(
-        await screen.findByRole("button", { name: ".csv" }),
+        await screen.findByTestId("download-results-button"),
       );
 
       const [url, options] = mockDownloadEndpoint.lastCall() as MockCall;
diff --git a/frontend/src/metabase/redux/downloads.ts b/frontend/src/metabase/redux/downloads.ts
index 535abea59b88ac2609b56d2fbf3122381634b859..76046612aafd8e34a88928b6cb183fd186790ebe 100644
--- a/frontend/src/metabase/redux/downloads.ts
+++ b/frontend/src/metabase/redux/downloads.ts
@@ -24,6 +24,7 @@ export interface DownloadQueryResultsOpts {
   question: Question;
   result: Dataset;
   enableFormatting?: boolean;
+  enablePivot?: boolean;
   dashboardId?: DashboardId;
   dashcardId?: DashCardId;
   uuid?: string;
@@ -153,7 +154,8 @@ const getDatasetParams = ({
   question,
   dashboardId,
   dashcardId,
-  enableFormatting,
+  enableFormatting = false,
+  enablePivot = false,
   uuid,
   token,
   params = {},
@@ -162,8 +164,10 @@ const getDatasetParams = ({
 }: DownloadQueryResultsOpts): DownloadQueryResultsParams => {
   const cardId = question.id();
 
-  // Formatting is always enabled for Excel
-  const format_rows = enableFormatting && type !== "xlsx" ? "true" : "false";
+  const exportParams = {
+    format_rows: enableFormatting,
+    pivot_results: enablePivot,
+  };
 
   const { accessedVia, resourceType: resource } = getDownloadedResourceType({
     dashboardId,
@@ -179,9 +183,9 @@ const getDatasetParams = ({
       return {
         method: "POST",
         url: `/api/public/dashboard/${dashboardId}/dashcard/${dashcardId}/card/${cardId}/${type}`,
-        params: new URLSearchParams({ format_rows }),
         body: {
           parameters: result?.json_query?.parameters ?? [],
+          ...exportParams,
         },
       };
     }
@@ -191,7 +195,6 @@ const getDatasetParams = ({
         url: Urls.publicQuestion({ uuid, type, includeSiteUrl: false }),
         params: new URLSearchParams({
           parameters: JSON.stringify(result?.json_query?.parameters ?? []),
-          format_rows,
         }),
       };
     }
@@ -203,7 +206,7 @@ const getDatasetParams = ({
       return {
         method: "GET",
         url: `/api/embed/dashboard/${token}/dashcard/${dashcardId}/card/${cardId}/${type}`,
-        params: Urls.getEncodedUrlSearchParams({ ...params, format_rows }),
+        params: Urls.getEncodedUrlSearchParams({ ...params, ...exportParams }),
       };
     }
 
@@ -211,7 +214,8 @@ const getDatasetParams = ({
       // For whatever wacky reason the /api/embed endpoint expect params like ?key=value instead
       // of like ?params=<json-encoded-params-array> like the other endpoints do.
       const params = new URLSearchParams(window.location.search);
-      params.set("format_rows", format_rows);
+      params.set("format_rows", String(enableFormatting));
+      params.set("pivot_results", String(enablePivot));
       return {
         method: "GET",
         url: Urls.embedCard(token, type),
@@ -226,9 +230,9 @@ const getDatasetParams = ({
     return {
       method: "POST",
       url: `/api/dashboard/${dashboardId}/dashcard/${dashcardId}/card/${cardId}/query/${type}`,
-      params: new URLSearchParams({ format_rows }),
       body: {
         parameters: result?.json_query?.parameters ?? [],
+        ...exportParams,
       },
     };
   }
@@ -237,9 +241,9 @@ const getDatasetParams = ({
     return {
       method: "POST",
       url: `/api/card/${cardId}/query/${type}`,
-      params: new URLSearchParams({ format_rows }),
       body: {
         parameters: result?.json_query?.parameters ?? [],
+        ...exportParams,
       },
     };
   }
@@ -247,10 +251,10 @@ const getDatasetParams = ({
     return {
       method: "POST",
       url: `/api/dataset/${type}`,
-      params: new URLSearchParams({ format_rows }),
       body: {
         query: _.omit(result?.json_query ?? {}, "constraints"),
         visualization_settings: visualizationSettings ?? {},
+        ...exportParams,
       },
     };
   }
diff --git a/frontend/src/metabase/sharing/components/AddEditSidebar/AddEditEmailSidebar.jsx b/frontend/src/metabase/sharing/components/AddEditSidebar/AddEditEmailSidebar.jsx
index 239d378a3f5ceaa05b1bef886a3a7f0d1ee25085..96ab7d1e52b08c55c293d4d1ad658165eee1dc5a 100644
--- a/frontend/src/metabase/sharing/components/AddEditSidebar/AddEditEmailSidebar.jsx
+++ b/frontend/src/metabase/sharing/components/AddEditSidebar/AddEditEmailSidebar.jsx
@@ -129,26 +129,6 @@ function _AddEditEmailSidebar({
             onChange={toggleSkipIfEmpty}
           />
         </div>
-        <div
-          className={cx(
-            CS.textBold,
-            CS.py2,
-            CS.flex,
-            CS.justifyBetween,
-            CS.alignCenter,
-            CS.borderTop,
-          )}
-        >
-          <div className={cx(CS.flex, CS.alignCenter)}>
-            <Heading>{t`Attach results`}</Heading>
-            <Icon
-              name="info"
-              className={cx(CS.textMedium, CS.ml1)}
-              size={12}
-              tooltip={t`Attachments can contain up to 2,000 rows of data.`}
-            />
-          </div>
-        </div>
         <EmailAttachmentPicker
           cards={pulse.cards}
           pulse={pulse}
diff --git a/frontend/src/metabase/sharing/components/EmailAttachmentPicker.jsx b/frontend/src/metabase/sharing/components/EmailAttachmentPicker.jsx
index 4ad01aa047d16c036dd4669581d6affc30c1fb9a..cfe124c4f46cb6b35c5362e6a957589105241065 100644
--- a/frontend/src/metabase/sharing/components/EmailAttachmentPicker.jsx
+++ b/frontend/src/metabase/sharing/components/EmailAttachmentPicker.jsx
@@ -4,12 +4,12 @@ import { Component } from "react";
 import { t } from "ttag";
 import _ from "underscore";
 
-import { SegmentedControl } from "metabase/components/SegmentedControl";
+import { ExportSettingsWidget } from "metabase/common/components/ExportSettingsWidget";
 import { StackedCheckBox } from "metabase/components/StackedCheckBox";
-import Label from "metabase/components/type/Label";
 import CheckBox from "metabase/core/components/CheckBox";
 import Toggle from "metabase/core/components/Toggle";
 import CS from "metabase/css/core/index.css";
+import { Box, Group, Icon, Text } from "metabase/ui";
 
 export default class EmailAttachmentPicker extends Component {
   DEFAULT_ATTACHMENT_TYPE = "csv";
@@ -17,6 +17,7 @@ export default class EmailAttachmentPicker extends Component {
   state = {
     isEnabled: false,
     isFormattingEnabled: true,
+    isPivotingEnabled: false,
     selectedAttachmentType: this.DEFAULT_ATTACHMENT_TYPE,
     selectedCardIds: new Set(),
   };
@@ -44,11 +45,14 @@ export default class EmailAttachmentPicker extends Component {
     }
   }
 
-  calculateStateFromCards() {
-    const { cards } = this.props;
-    const selectedCards = cards.filter(card => {
+  _getCardsWithAttachments() {
+    return this.props.cards.filter(card => {
       return card.include_csv || card.include_xls;
     });
+  }
+
+  calculateStateFromCards() {
+    const selectedCards = this._getCardsWithAttachments();
 
     return {
       isEnabled: selectedCards.length > 0,
@@ -56,9 +60,14 @@ export default class EmailAttachmentPicker extends Component {
         this.attachmentTypeFor(selectedCards) || this.DEFAULT_ATTACHMENT_TYPE,
       selectedCardIds: new Set(selectedCards.map(card => card.id)),
       isFormattingEnabled: getInitialFormattingState(selectedCards),
+      isPivotingEnabled: getInitialPivotingState(selectedCards),
     };
   }
 
+  canConfigurePivoting() {
+    return this.props.cards.some(card => card.display === "pivot");
+  }
+
   shouldUpdateState(newState, currentState) {
     return (
       (currentState.isEnabled || !newState.isEnabled) &&
@@ -73,9 +82,9 @@ export default class EmailAttachmentPicker extends Component {
    */
   updatePulseCards(attachmentType, selectedCardIds) {
     const { pulse, setPulse } = this.props;
-    const { isFormattingEnabled } = this.state;
+    const { isFormattingEnabled, isPivotingEnabled } = this.state;
 
-    const isXls = attachmentType === "xls",
+    const isXls = attachmentType === "xlsx",
       isCsv = attachmentType === "csv";
 
     this.setState({ selectedAttachmentType: attachmentType });
@@ -86,6 +95,7 @@ export default class EmailAttachmentPicker extends Component {
         card.include_csv = selectedCardIds.has(card.id) && isCsv;
         card.include_xls = selectedCardIds.has(card.id) && isXls;
         card.format_rows = isCsv && isFormattingEnabled; // Excel always uses formatting
+        card.pivot_results = card.display === "pivot" && isPivotingEnabled;
         return card;
       }),
     });
@@ -103,7 +113,7 @@ export default class EmailAttachmentPicker extends Component {
 
   attachmentTypeFor(cards) {
     if (cards.some(c => c.include_xls)) {
-      return "xls";
+      return "xlsx";
     } else if (cards.some(c => c.include_csv)) {
       return "csv";
     } else {
@@ -185,6 +195,21 @@ export default class EmailAttachmentPicker extends Component {
     );
   };
 
+  onTogglePivoting = () => {
+    this.setState(
+      prevState => ({
+        ...prevState,
+        isPivotingEnabled: !prevState.isPivotingEnabled,
+      }),
+      () => {
+        this.updatePulseCards(
+          this.state.selectedAttachmentType,
+          this.state.selectedCardIds,
+        );
+      },
+    );
+  };
+
   disableAllCards() {
     const selectedCardIds = new Set();
     this.updatePulseCards(this.state.selectedAttachmentType, selectedCardIds);
@@ -204,41 +229,44 @@ export default class EmailAttachmentPicker extends Component {
     const {
       isEnabled,
       isFormattingEnabled,
+      isPivotingEnabled,
       selectedAttachmentType,
       selectedCardIds,
     } = this.state;
 
     return (
       <div>
-        <Toggle
-          aria-label={t`Attach results`}
-          value={isEnabled}
-          onChange={this.toggleAttach}
-        />
-
+        <Group className={CS.borderTop} position="apart" pt="1.5rem">
+          <Group position="left" spacing="0">
+            <Text fw="bold">{t`Attach results as files`}</Text>
+            <Icon
+              name="info"
+              className={cx(CS.textMedium, CS.ml1)}
+              size={12}
+              tooltip={t`Attachments can contain up to 2,000 rows of data.`}
+            />
+          </Group>
+          <Toggle
+            aria-label={t`Attach results`}
+            value={isEnabled}
+            onChange={this.toggleAttach}
+          />
+        </Group>
         {isEnabled && (
           <div>
-            <div className={cx(CS.my1, CS.flex, CS.justifyBetween)}>
-              <Label className={CS.pt1}>{t`File format`}</Label>
-              <SegmentedControl
-                options={[
-                  { name: ".csv", value: "csv" },
-                  { name: ".xlsx", value: "xls" },
-                ]}
-                onChange={this.setAttachmentType}
-                value={selectedAttachmentType}
-                fullWidth
+            <Box py="1rem">
+              <ExportSettingsWidget
+                selectedFormat={selectedAttachmentType}
+                formats={["csv", "xlsx"]}
+                isFormattingEnabled={isFormattingEnabled}
+                isPivotingEnabled={isPivotingEnabled}
+                canConfigureFormatting={selectedAttachmentType === "csv"}
+                canConfigurePivoting={this.canConfigurePivoting()}
+                onChangeFormat={this.setAttachmentType}
+                onToggleFormatting={this.onToggleFormatting}
+                onTogglePivoting={this.onTogglePivoting}
               />
-            </div>
-            {selectedAttachmentType === "csv" && (
-              <div className={cx(CS.mt2, CS.mb3)}>
-                <CheckBox
-                  checked={!isFormattingEnabled}
-                  label={t`Use unformatted values in attachments`}
-                  onChange={this.onToggleFormatting}
-                />
-              </div>
-            )}
+            </Box>
             <div
               className={cx(
                 CS.textBold,
@@ -305,3 +333,10 @@ function getInitialFormattingState(cards) {
   }
   return true;
 }
+
+function getInitialPivotingState(cards) {
+  if (cards.length > 0) {
+    return cards.some(card => !!card.pivot_results);
+  }
+  return false;
+}
diff --git a/frontend/src/metabase/sharing/components/EmailAttachmentPicker.unit.spec.js b/frontend/src/metabase/sharing/components/EmailAttachmentPicker.unit.spec.js
index 14672da0be49e42e56182b8fb8128f9d0ac30f3c..5373c53b09dee631332fb65181ff8c01237789fa 100644
--- a/frontend/src/metabase/sharing/components/EmailAttachmentPicker.unit.spec.js
+++ b/frontend/src/metabase/sharing/components/EmailAttachmentPicker.unit.spec.js
@@ -1,4 +1,8 @@
-import { fireEvent, render, screen } from "@testing-library/react";
+import { fireEvent, screen } from "@testing-library/react";
+
+import { mockSettings } from "__support__/settings";
+import { renderWithProviders } from "__support__/ui";
+import { createMockState } from "metabase-types/store/mocks";
 
 import EmailAttachmentPicker from "./EmailAttachmentPicker";
 
@@ -9,12 +13,19 @@ function setup({ pulse = createPulse(), hasAttachments = false } = {}) {
     pulse.cards[0]["include_xls"] = true;
   }
 
-  render(
+  const state = createMockState({
+    settings: mockSettings({
+      "application-name": "Metabase",
+    }),
+  });
+
+  renderWithProviders(
     <EmailAttachmentPicker
       cards={pulse.cards}
       pulse={pulse}
       setPulse={setPulse}
     />,
+    { storeInitialState: state },
   );
 
   return { setPulse };
diff --git a/frontend/src/metabase/status/components/DownloadsStatus/DownloadsStatus.unit.spec.tsx b/frontend/src/metabase/status/components/DownloadsStatus/DownloadsStatus.unit.spec.tsx
index 5d57d8d72d94589eaa24e6884dfa0fae80ed22a7..7fb38adfbffb033e773daeae9c23832aec4ec477 100644
--- a/frontend/src/metabase/status/components/DownloadsStatus/DownloadsStatus.unit.spec.tsx
+++ b/frontend/src/metabase/status/components/DownloadsStatus/DownloadsStatus.unit.spec.tsx
@@ -56,7 +56,7 @@ describe("DownloadsStatus", () => {
     const dispatch = store.dispatch as Dispatch;
 
     fetchMock.post(
-      "http://localhost/api/card/1/query/csv?format_rows=false",
+      "http://localhost/api/card/1/query/csv",
       {
         headers: { "Content-Disposition": 'filename="test.csv"' },
       },
@@ -94,7 +94,7 @@ describe("DownloadsStatus", () => {
     const dispatch = store.dispatch as Dispatch;
 
     fetchMock.post(
-      "http://localhost/api/card/1/query/csv?format_rows=false",
+      "http://localhost/api/card/1/query/csv",
       {
         throws: new Error("Network error"),
       },
@@ -139,7 +139,7 @@ describe("DownloadsStatus", () => {
     const dispatch = store.dispatch as Dispatch;
 
     fetchMock.post(
-      "http://localhost/api/card/1/query/csv?format_rows=false",
+      "http://localhost/api/card/1/query/csv",
       {
         headers: { "Content-Disposition": 'filename="test.csv"' },
       },
diff --git a/frontend/src/metabase/ui/components/inputs/Radio/Radio.stories.mdx b/frontend/src/metabase/ui/components/inputs/Radio/Radio.stories.mdx
new file mode 100644
index 0000000000000000000000000000000000000000..87c38f0672996fccdc03e241fe88b488e66a44a2
--- /dev/null
+++ b/frontend/src/metabase/ui/components/inputs/Radio/Radio.stories.mdx
@@ -0,0 +1,135 @@
+import { Canvas, Story, Meta } from "@storybook/addon-docs";
+import { Radio, Stack, Group } from "metabase/ui";
+
+export const args = {
+  label: "Label",
+  description: "",
+  disabled: false,
+  labelPosition: "right",
+};
+
+export const argTypes = {
+  label: {
+    control: { type: "text" },
+  },
+  description: {
+    control: { type: "text" },
+  },
+  disabled: {
+    control: { type: "boolean" },
+  },
+  labelPosition: {
+    options: ["left", "right"],
+    control: { type: "inline-radio" },
+  },
+};
+
+<Meta title="Inputs/Radio" component={Radio} args={args} argTypes={argTypes} />
+
+# Radio
+
+Our themed wrapper around [Mantine Radio](https://v6.mantine.dev/core/radio/).
+
+## When to use Radio
+
+Radio buttons allow users to select a single option from a list of mutually exclusive options. All possible options are exposed up front for users to compare.
+
+## Docs
+
+- [Figma File](https://www.figma.com/file/7LCGPhkbJdrhdIaeiU1O9c/Input-%2F-Radio?type=design&node-id=1-96&mode=design&t=yaNljw178EFJeU7k-0)
+- [Mantine Radio Docs](https://v6.mantine.dev/core/radio/)
+
+## Usage guidelines
+
+- **Use this component if there are more than 5 options**. If there are fewer options, feel free to check out Radio or Select.
+- For option ordering, try to use your best judgement on a sensible order. For example, Yes should come before No. Alphabetical ordering is usually a good fallback if there's no inherent order in your set of choices.
+- In almost all circumstances you'll want to use `<Radio.Group>` to provide a set of options and help with defaultValues and state management between them.
+
+## Examples
+
+export const DefaultTemplate = args => <Radio {...args} />;
+
+export const RadioGroupTemplate = args => (
+  <Radio.Group
+    defaultValue={"react"}
+    label="An array of good frameworks"
+    description="But which one to use?"
+  >
+    <Stack mt="md">
+      <Radio value="react" label="React" />
+      <Radio value="svelte" label="Svelte" />
+      <Radio value="ng" label="Angular" />
+      <Radio value="vue" label="Vue" />
+    </Stack>
+  </Radio.Group>
+);
+
+export const StateTemplate = args => (
+  <Stack>
+    <Radio {...args} label="Default radio" />
+    <Radio {...args} label="Checked radio" defaultChecked />
+    <Radio {...args} label="Disabled radio" disabled />
+    <Radio {...args} label="Disabled checked radio" disabled defaultChecked />
+  </Stack>
+);
+
+export const Default = DefaultTemplate.bind({});
+
+<Canvas>
+  <Story name="Default">{Default}</Story>
+</Canvas>
+
+### Radio.Group
+
+export const RadioGroup = RadioGroupTemplate.bind({});
+
+<Canvas>
+  <Story name="Radio group">{RadioGroup}</Story>
+</Canvas>
+
+### Label
+
+export const Label = StateTemplate.bind({});
+
+<Canvas>
+  <Story name="Label">{Label}</Story>
+</Canvas>
+
+#### Left label position
+
+export const LabelLeft = StateTemplate.bind({});
+LabelLeft.args = {
+  labelPosition: "left",
+};
+
+<Canvas>
+  <Story name="Label, left position">{LabelLeft}</Story>
+</Canvas>
+
+### Description
+
+export const Description = StateTemplate.bind({});
+Description.args = {
+  description: "Description",
+};
+
+<Canvas>
+  <Story name="Description">{Description}</Story>
+</Canvas>
+
+export const DescriptionLeft = StateTemplate.bind({});
+DescriptionLeft.args = {
+  description: "Description",
+  labelPosition: "left",
+};
+
+#### Left label position
+
+<Canvas>
+  <Story name="Description, left position">{DescriptionLeft}</Story>
+</Canvas>
+
+## Related components
+
+- Checkbox
+- Select
diff --git a/frontend/src/metabase/ui/components/inputs/Radio/Radio.styled.tsx b/frontend/src/metabase/ui/components/inputs/Radio/Radio.styled.tsx
index 503f925277b2df306bf9904dcd43ce90b4c9905a..33d9cbad5878419777df0de33a818a5027720fba 100644
--- a/frontend/src/metabase/ui/components/inputs/Radio/Radio.styled.tsx
+++ b/frontend/src/metabase/ui/components/inputs/Radio/Radio.styled.tsx
@@ -1,8 +1,4 @@
-import type {
-  MantineTheme,
-  MantineThemeOverride,
-  RadioStylesParams,
-} from "@mantine/core";
+import type { MantineTheme, MantineThemeOverride } from "@mantine/core";
 import { getSize, getStylesRef, rem } from "@mantine/core";
 
 const SIZES = {
@@ -14,11 +10,7 @@ export const getRadioOverrides = (): MantineThemeOverride["components"] => ({
     defaultProps: {
       size: "md",
     },
-    styles: (
-      theme: MantineTheme,
-      { labelPosition: _labelPosition }: RadioStylesParams,
-      { size = "md" },
-    ) => ({
+    styles: (theme: MantineTheme, { size = "md" }) => ({
       root: {
         [`&:has(.${getStylesRef("input")}:disabled)`]: {
           [`.${getStylesRef("label")}`]: {
diff --git a/frontend/src/metabase/visualizations/components/LegendHorizontal.jsx b/frontend/src/metabase/visualizations/components/LegendHorizontal.jsx
index d63fcd82994978849674b2fa686e509c5072c9e6..61fbb429593b47272e91ffc15d49779b21f2a0c8 100644
--- a/frontend/src/metabase/visualizations/components/LegendHorizontal.jsx
+++ b/frontend/src/metabase/visualizations/components/LegendHorizontal.jsx
@@ -13,7 +13,7 @@ export default class LegendHorizontal extends Component {
       className,
       titles,
       colors,
-      hiddenIndices,
+      hiddenIndices = [],
       hovered,
       onHoverChange,
       onToggleSeriesVisibility,
diff --git a/modules/drivers/databricks/src/metabase/driver/databricks.clj b/modules/drivers/databricks/src/metabase/driver/databricks.clj
index 7772c1eea8033b0061647a469c5991e2147953be..c10cd6fe8f579b8d9950ddefdc15fc2af41c9614 100644
--- a/modules/drivers/databricks/src/metabase/driver/databricks.clj
+++ b/modules/drivers/databricks/src/metabase/driver/databricks.clj
@@ -10,6 +10,7 @@
    [metabase.driver.sql-jdbc.execute.legacy-impl :as sql-jdbc.legacy]
    [metabase.driver.sql-jdbc.sync :as sql-jdbc.sync]
    [metabase.driver.sql.query-processor :as sql.qp]
+   [metabase.driver.sync :as driver.s]
    [metabase.query-processor.timezone :as qp.timezone]
    [metabase.util :as u]
    [metabase.util.honey-sql-2 :as h2x]
@@ -44,6 +45,39 @@
     ((get-method sql-jdbc.sync/database-type->base-type :hive-like)
      driver database-type)))
 
+(defn- get-tables-sql
+  [catalog]
+  (assert (string? (not-empty catalog)))
+  [(str/join
+    "\n"
+    ["select"
+     "  TABLE_NAME as name,"
+     "  TABLE_SCHEMA as schema,"
+     "  COMMENT description"
+     "  from information_schema.tables"
+     "  where TABLE_CATALOG = ?"
+     "    AND TABLE_SCHEMA <> 'information_schema'"])
+   catalog])
+
+(defn- describe-database-tables
+  [database]
+  (let [[inclusion-patterns
+         exclusion-patterns] (driver.s/db-details->schema-filter-patterns database)
+        syncable? (fn [schema]
+                    (driver.s/include-schema? inclusion-patterns exclusion-patterns schema))]
+    (eduction
+     (filter (comp syncable? :schema))
+     (sql-jdbc.execute/reducible-query database (get-tables-sql (-> database :details :catalog))))))
+
+(defmethod driver/describe-database :databricks
+  [driver database]
+  (try
+    {:tables (into #{} (describe-database-tables database))}
+    (catch Throwable e
+      (throw (ex-info (format "Error in %s describe-database: %s" driver (ex-message e))
+                      {}
+                      e)))))
+
 (defmethod sql-jdbc.sync/describe-fields-sql :databricks
   [driver & {:keys [schema-names table-names]}]
   (sql/format {:select [[:c.column_name :name]
diff --git a/modules/drivers/databricks/test/metabase/driver/databricks_test.clj b/modules/drivers/databricks/test/metabase/driver/databricks_test.clj
index 22daf1967a1116c0f71da6221c1fab68bbf261fc..0424983f288213aae360d6af298875f65e1397c3 100644
--- a/modules/drivers/databricks/test/metabase/driver/databricks_test.clj
+++ b/modules/drivers/databricks/test/metabase/driver/databricks_test.clj
@@ -8,10 +8,11 @@
    [metabase.test :as mt]
    [toucan2.core :as t2]))
 
+;; Because the datasets that are tested are preloaded, it is fine just to modify the database details to sync other schemas.
 (deftest ^:parallel sync-test
-  (testing "`driver/describe-database` implementation returns expected resutls."
-    (mt/test-driver
-      :databricks
+  (mt/test-driver
+    :databricks
+    (testing "`driver/describe-database` implementation returns expected results for inclusion of test-data schema."
       (is (= {:tables
               #{{:name "venues", :schema "test-data", :description nil}
                 {:name "checkins", :schema "test-data", :description nil}
@@ -21,7 +22,27 @@
                 {:name "reviews", :schema "test-data", :description nil}
                 {:name "orders", :schema "test-data", :description nil}
                 {:name "products", :schema "test-data", :description nil}}}
-             (driver/describe-database :databricks (mt/db)))))))
+             (driver/describe-database :databricks (mt/db)))))
+    (testing "`driver/describe-database` returns expected results for `all` schema filters."
+      (let [actual-tables (driver/describe-database :databricks (-> (mt/db)
+                                                                    (update :details dissoc :schema-filters-patterns)
+                                                                    (update :details assoc  :schema-filters-type "all")))]
+        (testing "tables from multiple schemas were found"
+          (is (contains? (:tables actual-tables) {:name "venues", :schema "test-data", :description nil}))
+          (is (contains? (:tables actual-tables) {:name "checkins", :schema "test-data", :description nil}))
+          (is (contains? (:tables actual-tables) {:name "airport", :schema "airports", :description nil}))
+          (is (contains? (:tables actual-tables) {:name "bird", :schema "bird-flocks", :description nil})))
+        (testing "information_schema is excluded"
+          (is (empty? (filter #(= "information_schema" (:schema %)) (:tables actual-tables)))))))
+    (testing "`driver/describe-database` returns expected results for `exclusion` schema filters."
+      (let [actual-tables (driver/describe-database :databricks (update (mt/db) :details assoc
+                                                                        :schema-filters-patterns "test-data"
+                                                                        :schema-filters-type "exclusion"))]
+        (testing "tables from multiple schemas were found"
+          (is (not (contains? (:tables actual-tables) {:name "venues", :schema "test-data", :description nil})))
+          (is (not (contains? (:tables actual-tables) {:name "checkins", :schema "test-data", :description nil})))
+          (is (contains? (:tables actual-tables) {:name "airport", :schema "airports", :description nil}))
+          (is (contains? (:tables actual-tables) {:name "bird", :schema "bird-flocks", :description nil})))))))
 
 (deftest ^:parallel describe-fields-test
   (testing "`describe-fields` returns expected values"
diff --git a/modules/drivers/databricks/test/metabase/test/data/databricks.clj b/modules/drivers/databricks/test/metabase/test/data/databricks.clj
index e5df3cf627fe4b1ec01d9a55ffe806ca7cf2dcf0..60c64c84a3c33e97c2ae56bbba055c3f4066a271 100644
--- a/modules/drivers/databricks/test/metabase/test/data/databricks.clj
+++ b/modules/drivers/databricks/test/metabase/test/data/databricks.clj
@@ -126,7 +126,7 @@
   [driver dbdef]
   (if *allow-database-deletion*
     ((get-method tx/destroy-db! :sql-jdbc/test-extensions) driver dbdef)
-    (log/warn "`*allow-database-creation*` is `false`. Database removal is suppressed.")))
+    (log/warn "`*allow-database-deletion*` is `false`. Database removal is suppressed.")))
 
 ;; Differences to the :sql-jdbc/test-extensions original: false transactions, not using `jdbc/execute!` for
 ;; timezone setting, not overriding database timezone.
diff --git a/modules/drivers/mongo/deps.edn b/modules/drivers/mongo/deps.edn
index 4c06c954495722a8ebfe23664613e2c1bb36b0c1..79d781ec752f7e88e0d868a4b9c9dbf218f0c2f7 100644
--- a/modules/drivers/mongo/deps.edn
+++ b/modules/drivers/mongo/deps.edn
@@ -3,4 +3,4 @@
 
  :deps
  {com.google.guava/guava {:mvn/version "33.1.0-jre"}
-  org.mongodb/mongodb-driver-sync {:mvn/version "5.0.1"}}}
+  org.mongodb/mongodb-driver-sync {:mvn/version "5.2.0"}}}
diff --git a/modules/drivers/mongo/src/metabase/driver/mongo.clj b/modules/drivers/mongo/src/metabase/driver/mongo.clj
index 319b4d127f57061647ad5f446684dbf97bbd47ee..ad38b59b87f35158e38b995b4cd0f294660d7bd9 100644
--- a/modules/drivers/mongo/src/metabase/driver/mongo.clj
+++ b/modules/drivers/mongo/src/metabase/driver/mongo.clj
@@ -4,10 +4,10 @@
    [cheshire.core :as json]
    [cheshire.generate :as json.generate]
    [clojure.string :as str]
-   [flatland.ordered.map :as ordered-map]
+   [clojure.walk :as walk]
+   [medley.core :as m]
    [metabase.db.metadata-queries :as metadata-queries]
    [metabase.driver :as driver]
-   [metabase.driver.common :as driver.common]
    [metabase.driver.mongo.connection :as mongo.connection]
    [metabase.driver.mongo.database :as mongo.db]
    [metabase.driver.mongo.execute :as mongo.execute]
@@ -18,9 +18,12 @@
    [metabase.driver.util :as driver.u]
    [metabase.lib.metadata :as lib.metadata]
    [metabase.lib.metadata.protocols :as lib.metadata.protocols]
+   [metabase.lib.schema.common :as lib.schema.common]
+   [metabase.query-processor :as qp]
    [metabase.query-processor.store :as qp.store]
    [metabase.util :as u]
    [metabase.util.log :as log]
+   [metabase.util.malli :as mu]
    [taoensso.nippy :as nippy])
   (:import
    (com.mongodb.client MongoClient MongoDatabase)
@@ -95,90 +98,11 @@
 
 ;;; ### Syncing
 
-(declare update-field-attrs)
-
 (defmethod driver/sync-in-context :mongo
   [_ database do-sync-fn]
   (mongo.connection/with-mongo-client [_ database]
     (do-sync-fn)))
 
-(defn- val->semantic-type [field-value]
-  (cond
-    ;; 1. url?
-    (and (string? field-value)
-         (u/url? field-value))
-    :type/URL
-
-    ;; 2. json?
-    (and (string? field-value)
-         (or (str/starts-with? field-value "{")
-             (str/starts-with? field-value "[")))
-    (when-let [j (u/ignore-exceptions (json/parse-string field-value))]
-      (when (or (map? j)
-                (sequential? j))
-        :type/SerializedJSON))))
-
-(defn- find-nested-fields [field-value nested-fields]
-  (loop [[k & more-keys] (keys field-value)
-         fields nested-fields]
-    (if-not k
-      fields
-      (recur more-keys (update fields k (partial update-field-attrs (k field-value)))))))
-
-(defn- update-field-attrs [field-value field-def]
-  (-> field-def
-      (update :count u/safe-inc)
-      (update :len #(if (string? field-value)
-                      (+ (or % 0) (count field-value))
-                      %))
-      (update :types (fn [types]
-                       (update types (type field-value) u/safe-inc)))
-      (update :semantic-types (fn [semantic-types]
-                                (if-let [st (val->semantic-type field-value)]
-                                  (update semantic-types st u/safe-inc)
-                                  semantic-types)))
-      (update :nested-fields (fn [nested-fields]
-                               (if (map? field-value)
-                                 (find-nested-fields field-value nested-fields)
-                                 nested-fields)))))
-
-(defn- most-common-object-type
-  "Given a sequence of tuples like [Class <number-of-occurances>] return the Class with the highest number of
-  occurances. The basic idea here is to take a sample of values for a Field and then determine the most common type
-  for its values, and use that as the Metabase base type. For example if we have a Field called `zip_code` and it's a
-  number 90% of the time and a string the other 10%, we'll just call it a `:type/Number`."
-  ^Class [field-types]
-  (when (seq field-types)
-    (first (apply max-key second field-types))))
-
-(defn- class->base-type [^Class klass]
-  (if (isa? klass org.bson.types.ObjectId)
-    :type/MongoBSONID
-    (driver.common/class->base-type klass)))
-
-(defn- describe-table-field [field-kw field-info idx]
-  (let [most-common-object-type  (most-common-object-type (:types field-info))
-        [nested-fields idx-next]
-        (reduce
-         (fn [[nested-fields idx] nested-field]
-           (let [[nested-field idx-next] (describe-table-field nested-field
-                                                               (nested-field (:nested-fields field-info))
-                                                               idx)]
-             [(conj nested-fields nested-field) idx-next]))
-         [#{} (inc idx)]
-         (keys (:nested-fields field-info)))]
-    [(cond-> {:name              (name field-kw)
-              :database-type     (some-> most-common-object-type .getName)
-              :base-type         (class->base-type most-common-object-type)
-              :database-position idx}
-       (= :_id field-kw)           (assoc :pk? true)
-       (:semantic-types field-info) (assoc :semantic-type (->> (:semantic-types field-info)
-                                                               (filterv #(some? (first %)))
-                                                               (sort-by second)
-                                                               last
-                                                               first))
-       (:nested-fields field-info) (assoc :nested-fields nested-fields)) idx-next]))
-
 (defmethod driver/dbms-version :mongo
   [_driver database]
   (mongo.connection/with-mongo-database [db database]
@@ -218,45 +142,255 @@
                     :value %}))
            set))))
 
-(defn- sample-documents [^MongoDatabase db table sort-direction]
-  (let [coll (mongo.util/collection db (:name table))]
-    (mongo.util/do-find coll {:keywordize true
-                              :limit metadata-queries/nested-field-sample-limit
-                              :skip 0
-                              :sort-criteria [[:_id sort-direction]]
-                              :batch-size 256})))
-
-(defn- table-sample-column-info
-  "Sample the rows (i.e., documents) in `table` and return a map of information about the column keys we found in that
-   sample. The results will look something like:
-
-      {:_id      {:count 200, :len nil, :types {java.lang.Long 200}, :semantic-types nil, :nested-fields nil},
-       :severity {:count 200, :len nil, :types {java.lang.Long 200}, :semantic-types nil, :nested-fields nil}}"
-  [^MongoDatabase db table]
-  (try
-    (reduce
-     (fn [field-defs row]
-       (loop [[k & more-keys] (keys row), fields field-defs]
-         (if-not k
-           fields
-           (recur more-keys (update fields k (partial update-field-attrs (k row)))))))
-     (ordered-map/ordered-map)
-     (concat (sample-documents db table 1) (sample-documents db table -1)))
-    (catch Throwable t
-      (log/error (format "Error introspecting collection: %s" (:name table)) t))))
+(defn- describe-table-query-step
+  "A single reduction step in the [[describe-table-query]] pipeline.
+  At the end of each step the output is a combination of 'result' and 'item' objects. There is one 'result' for each
+  path which has the most common type for that path. 'item' objects have yet to be aggregated into 'result' objects.
+  Each object has the following keys:
+  - result: true means the object represents a 'result', false means it represents a 'item' to be further processed.
+  - path:   The path to the field in the document.
+  - type:   If 'item', the type of the field's value. If 'result', the most common type for the field.
+  - index:  If 'item', the index of the field in the parent object. If 'result', it is the minimum of such indices.
+  - object: If 'item', the value of the field if it's an object. If 'result', it is null."
+  [max-depth depth]
+  [{"$facet"
+    (cond-> {"results"    [{"$match" {"result" true}}]
+             "newResults" [{"$match" {"result" false}}
+                           {"$group" {"_id"   {"type" "$type"
+                                               "path" "$path"}
+                                      ;; count is zero if type is "null" so we only select "null" as the type if there
+                                      ;; is no other type for the path
+                                      "count" {"$sum" {"$cond" {"if"   {"$eq" ["$type" "null"]}
+                                                                "then" 0
+                                                                "else" 1}}}
+                                      "index" {"$min" "$index"}}}
+                           {"$sort" {"count" -1}}
+                           {"$group" {"_id"      "$_id.path"
+                                      "type"     {"$first" "$_id.type"}
+                                      "index"    {"$min" "$index"}}}
+                           {"$project" {"path"   "$_id"
+                                        "type"   1
+                                        "result" {"$literal" true}
+                                        "object" nil
+                                        "index"  1}}]}
+      (not= depth max-depth)
+      (assoc "nextItems" [{"$match" {"result" false, "object" {"$ne" nil}}}
+                          {"$project" {"path" 1
+                                       "kvs"  {"$map" {"input" {"$objectToArray" "$object"}
+                                                       "as"    "item"
+                                                       "in"    {"k"      "$$item.k"
+                                                                ;; we only need v in the next step it's an object
+                                                                "object" {"$cond" {"if"   {"$eq" [{"$type" "$$item.v"} "object"]}
+                                                                                   "then" "$$item.v"
+                                                                                   "else" nil}}
+                                                                "type"   {"$type" "$$item.v"}}}}}}
+                          {"$unwind" {"path" "$kvs", "includeArrayIndex" "index"}}
+                          {"$project" {"path"   {"$concat" ["$path" "." "$kvs.k"]}
+                                       "type"   "$kvs.type"
+                                       "result" {"$literal" false}
+                                       "index"  1
+                                       "object" "$kvs.object"}}]))}
+   {"$project" {"acc" {"$concatArrays" (cond-> ["$results" "$newResults"]
+                                         (not= depth max-depth)
+                                         (conj "$nextItems"))}}}
+   {"$unwind" "$acc"}
+   {"$replaceRoot" {"newRoot" "$acc"}}])
+
+(defn- describe-table-query
+  "To understand how this works, see the comment block below for a rough translation of this query into Clojure."
+  [& {:keys [collection-name sample-size max-depth]}]
+  (let [start-n       (quot sample-size 2)
+        end-n         (- sample-size start-n)
+        sample        [{"$sort" {"_id" 1}}
+                       {"$limit" start-n}
+                       {"$unionWith"
+                        {"coll" collection-name
+                         "pipeline" [{"$sort" {"_id" -1}}
+                                     {"$limit" end-n}]}}]
+        initial-items [{"$project" {"path" "$ROOT"
+                                    "kvs" {"$map" {"input" {"$objectToArray" "$$ROOT"}
+                                                   "as"    "item"
+                                                   "in"    {"k"      "$$item.k"
+                                                            "object" {"$cond" {"if"   {"$eq" [{"$type" "$$item.v"} "object"]}
+                                                                               "then" "$$item.v"
+                                                                               "else" nil}}
+                                                            "type"   {"$type" "$$item.v"}}}}}}
+                       {"$unwind" {"path" "$kvs", "includeArrayIndex" "index"}}
+                       {"$project" {"path"   "$kvs.k"
+                                    "result" {"$literal" false}
+                                    "type"   "$kvs.type"
+                                    "index"  1
+                                    "object" "$kvs.object"}}]]
+    (concat sample
+            initial-items
+            (mapcat #(describe-table-query-step max-depth %) (range (inc max-depth)))
+            [{"$project" {"_id" 0, "path" "$path", "type" "$type", "index" "$index"}}])))
+
+(comment
+  ;; `describe-table-clojure` is a reference implementation for [[describe-table-query]] in Clojure.
+  ;; It is almost logically equivalent, excluding minor details like how the sample is taken, and dealing with null
+  ;; values. It is arguably easier to understand the Clojure version and translate it into MongoDB query language
+  ;; than to understand the MongoDB query version directly.
+  (defn describe-table-clojure [sample-data max-depth]
+    (let [;; initial items is a list of maps, each map a field in a document in the sample
+          initial-items (mapcat (fn [x]
+                                  (for [[i [k v]] (map vector (range) x)]
+                                    {:path   (name k)
+                                     :object (if (map? v) v nil)
+                                     :index  i
+                                     :type   (type v)}))
+                                sample-data)
+          most-common (fn [xs]
+                        (key (apply max-key val (frequencies xs))))]
+      (:results (reduce
+                 (fn [{:keys [results next-items]} depth]
+                   {:results    (concat results
+                                        (for [[path group] (group-by :path next-items)]
+                                          {:path  path
+                                           :type  (most-common (map :type group))
+                                           :index (apply min (map :index group))}))
+                    :next-items (when (not= depth max-depth)
+                                  (->> (keep :object next-items)
+                                       (mapcat (fn [x]
+                                                 (for [[i [k v]] (map vector (range) x)]
+                                                   {:path   (str (:path x) "." (name k))
+                                                    :object (if (map? v) v nil)
+                                                    :index  i
+                                                    :type   (type v)})))))})
+                 {:results [], :next-items initial-items}
+                 (range (inc max-depth))))))
+  ;; Example:
+  (def sample-data
+    [{:a 1 :b {:c "hello" :d [1 2 3]}}
+     {:a 2 :b {:c "world"}}])
+  (describe-table-clojure sample-data 0)
+  ;; => ({:path "a", :type java.lang.Long, :index 0}
+  ;;     {:path "b", :type clojure.lang.PersistentArrayMap, :index 1})
+  (describe-table-clojure sample-data 1)
+  ;; => ({:path "a", :type java.lang.Long, :index 0}
+  ;;     {:path "b", :type clojure.lang.PersistentArrayMap, :index 1}
+  ;;     {:path ".c", :type java.lang.String, :index 0}
+  ;;     {:path ".d", :type clojure.lang.PersistentVector, :index 1})
+  )
+
+(def describe-table-query-depth
+  "The depth of nested objects that [[describe-table-query]] will execute to. If set to 0, the query will only return the
+  fields at the root level of the document. If set to K, the query will return fields at K levels of nesting beyond that.
+  Setting its value involves a trade-off: the lower it is, the faster describe-table-query executes, but the more queries we might
+  have to execute."
+  ;; Cal 2024-09-15: I chose 100 as the limit because it's a pretty safe bet it won't be exceeded (the documents we've
+  ;; seen on cloud are all <20 levels deep)
+  ;; Case 2024-10-04: Sharded clusters seem to run into exponentially more work the bigger this is. Over 20 and this
+  ;; risks never finishing.
+  ;; From arakaki:
+  ;;  > I think we can pick a max-depth that works well. I know that some other related tools set limits of 7 nested levels.
+  ;;  > And that would be definitely ok for most.
+  ;;  > If people have problems with that, I think we can make it configurable.
+  7)
+
+(mu/defn- describe-table :- [:sequential
+                             [:map {:closed true}
+                              [:path  ::lib.schema.common/non-blank-string]
+                              [:type  ::lib.schema.common/non-blank-string]
+                              [:index :int]]]
+  "Queries the database, returning a list of maps with metadata for each field in the table (aka collection).
+  Like `driver/describe-table` but the data is directly from the [[describe-table-query]] and needs further processing."
+  [db table]
+  (let [query (describe-table-query {:collection-name (:name table)
+                                     :sample-size     (* metadata-queries/nested-field-sample-limit 2)
+                                     :max-depth       describe-table-query-depth})
+        data  (:data (qp/process-query {:database (:id db)
+                                        :type     "native"
+                                        :native   {:collection (:name table)
+                                                   :query      (json/generate-string query)}}))
+        cols  (map (comp keyword :name) (:cols data))]
+    (map #(zipmap cols %) (:rows data))))
+
+(defn- type-alias->base-type [type-alias]
+  ;; Mongo types from $type aggregation operation
+  ;; https://www.mongodb.com/docs/manual/reference/operator/aggregation/type/#available-types
+  (get {"double"     :type/Float
+        "string"     :type/Text
+        "object"     :type/Dictionary
+        "array"      :type/Array
+        "binData"    :type/*
+        "objectId"   :type/MongoBSONID
+        "bool"       :type/Boolean
+        "date"       :type/Instant
+        "null"       :type/*
+        "regex"      :type/*
+        "dbPointer"  :type/*
+        "javascript" :type/*
+        "symbol"     :type/Text
+        "int"        :type/Integer
+        "timestamp"  :type/Instant
+        "long"       :type/Integer
+        "decimal"    :type/Decimal}
+       type-alias :type/*))
+
+(defn- add-database-position
+  "Adds :database-position to all fields. It starts at 0 and is ordered by a depth-first traversal of nested fields."
+  [fields i]
+  (->> fields
+       ;; Previously database-position was set with Clojure according to the logic in this imperative pseudocode:
+       ;; i = 0
+       ;; for each row in sample:
+       ;;   for each k,v in row:
+       ;;     field.database-position = i
+       ;;     i = i + 1
+       ;;     for each k,v in field.nested-fields:
+       ;;       field.database-position = i
+       ;;       i = i + 1
+       ;;       etc.
+       ;; We can't match this logic exactly with a MongoDB query. We can get close though: index is the minimum index
+       ;; of the key in the object over all documents in the sample. however, there can be more than one key that has
+       ;; the same index. so name is used to keep the order stable.
+       (sort-by (juxt :index :name))
+       (reduce (fn [[fields i] field]
+                 (let [field             (assoc field :database-position i)
+                       i                 (inc i)
+                       nested-fields     (:nested-fields field)
+                       [nested-fields i] (if nested-fields
+                                           (add-database-position nested-fields i)
+                                           [nested-fields i])
+                       field             (-> field
+                                             (m/assoc-some :nested-fields nested-fields)
+                                             (dissoc :index))]
+                   [(conj fields field) i]))
+               [#{} i])))
 
 (defmethod driver/describe-table :mongo
-  [_ database table]
-  (mongo.connection/with-mongo-database [^MongoDatabase db database]
-    (let [column-info (table-sample-column-info db table)]
-      {:schema nil
-       :name   (:name table)
-       :fields (first
-                (reduce (fn [[fields idx] [field info]]
-                          (let [[described-field new-idx] (describe-table-field field info idx)]
-                            [(conj fields described-field) new-idx]))
-                        [#{} 0]
-                        column-info))})))
+  [_driver database table]
+  (let [fields (->> (describe-table database table)
+                    (map (fn [x]
+                           (let [path (str/split (:path x) #"\.")
+                                 name (last path)]
+                             (cond-> {:name              name
+                                      :database-type     (:type x)
+                                      :base-type         (type-alias->base-type (:type x))
+                                      ; index is used by `set-database-position`, and not present in final result
+                                      :index             (:index x)
+                                      ; path is used to nest fields, and not present in final result
+                                      :path              path}
+                               (= name "_id")
+                               (assoc :pk? true))))))
+        ;; convert the flat list of fields into deeply-nested map.
+        ;; `fields` and `:nested-fields` values are maps from name to field
+        fields (reduce
+                (fn [acc field]
+                  (assoc-in acc (interpose :nested-fields (:path field)) (dissoc field :path)))
+                {}
+                fields)
+        ;; replace maps from name to field with sets of fields
+        fields (walk/postwalk (fn [x]
+                                (cond-> x
+                                  (map? x)
+                                  (m/update-existing :nested-fields #(set (vals %)))))
+                              (set (vals fields)))
+        [fields _] (add-database-position fields 0)]
+    {:schema nil
+     :name   (:name table)
+     :fields fields}))
 
 (doseq [[feature supported?] {:basic-aggregations              true
                               :expression-aggregations         true
diff --git a/modules/drivers/mongo/src/metabase/driver/mongo/util.clj b/modules/drivers/mongo/src/metabase/driver/mongo/util.clj
index c5e2a5313157118148ea99485793f4bcef8469f3..f3f762476b479dce5f53725703fc2fde8686ddec 100644
--- a/modules/drivers/mongo/src/metabase/driver/mongo/util.clj
+++ b/modules/drivers/mongo/src/metabase/driver/mongo/util.clj
@@ -82,6 +82,11 @@
   [^MongoCollection coll document-map]
   (.insertOne coll (mongo.conversion/to-document document-map)))
 
+(defn insert-many
+  "Insert document into mongo collection."
+  [^MongoCollection coll document-maps]
+  (.insertMany coll (map mongo.conversion/to-document document-maps)))
+
 (defn start-session!
   "Start session on client `c`."
   ^ClientSession [^MongoClient c]
diff --git a/modules/drivers/mongo/test/metabase/driver/missing-fields.json b/modules/drivers/mongo/test/metabase/driver/missing-fields.json
new file mode 100644
index 0000000000000000000000000000000000000000..ae4569fcd7c0689f38ebd952449fb4b6c68c3f9a
--- /dev/null
+++ b/modules/drivers/mongo/test/metabase/driver/missing-fields.json
@@ -0,0 +1,31 @@
+[
+  {
+    "a": "a string",
+    "b": {
+      "b_c": "a string",
+      "b_d": 42,
+      "b_e": {
+        "b_e_f": "a string"
+      }
+    },
+    "c": null
+  },
+  {
+    "a": "a string",
+    "b": {
+      "b_d": null,
+      "b_e": null,
+      "b_c": null
+    }
+  },
+  {
+    "a": "a string",
+    "b": {
+      "b_e": {}
+    }
+  },
+  {
+    "a": "a string",
+    "b": null
+  }
+]
diff --git a/modules/drivers/mongo/test/metabase/driver/mongo/sharded_cluster_test.clj b/modules/drivers/mongo/test/metabase/driver/mongo/sharded_cluster_test.clj
index dbe3c3414549d759faf9c74b1f20f4fb8ce95d8d..012ce3e24dd5da0ee0032617283e5c45b4bec1de 100644
--- a/modules/drivers/mongo/test/metabase/driver/mongo/sharded_cluster_test.clj
+++ b/modules/drivers/mongo/test/metabase/driver/mongo/sharded_cluster_test.clj
@@ -2,6 +2,7 @@
   (:require
    [clojure.test :refer :all]
    [metabase.driver :as driver]
+   [metabase.sync :as sync]
    [metabase.test :as mt]))
 
 (set! *warn-on-reflection* true)
@@ -10,4 +11,6 @@
   (mt/test-driver
     :mongo
     (testing "Mongo driver can connect to a sharded cluster"
-      (is (true? (driver/can-connect? :mongo (mt/db)))))))
+      (is (true? (driver/can-connect? :mongo (mt/db)))))
+    (testing "Mongo can sync"
+      (sync/sync-database! (mt/db)))))
diff --git a/modules/drivers/mongo/test/metabase/driver/mongo_test.clj b/modules/drivers/mongo/test/metabase/driver/mongo_test.clj
index ba9a75a07737887d61ccb27e0bd144dd0f04a37b..fc1cf8c75785ee88632f60655bbd2431c8a1bfde 100644
--- a/modules/drivers/mongo/test/metabase/driver/mongo_test.clj
+++ b/modules/drivers/mongo/test/metabase/driver/mongo_test.clj
@@ -27,8 +27,7 @@
    [metabase.util.log :as log]
    [metabase.xrays.automagic-dashboards.core :as magic]
    [taoensso.nippy :as nippy]
-   [toucan2.core :as t2]
-   [toucan2.tools.with-temp :as t2.with-temp])
+   [toucan2.core :as t2])
   (:import
    (org.bson.types ObjectId)))
 
@@ -96,7 +95,7 @@
              {:dbms_version  {:semantic-version [2 2134234]}
               :expected false}]]
       (testing (str "supports with " dbms_version)
-        (t2.with-temp/with-temp [Database db {:name "dummy", :engine "mongo", :dbms_version dbms_version}]
+        (mt/with-temp [Database db {:name "dummy", :engine "mongo", :dbms_version dbms_version}]
           (is (= expected
                  (driver/database-supports? :mongo :expressions db))))))
     (is (= #{:collection}
@@ -133,10 +132,10 @@
 (deftest ^:parallel nested-native-query-test
   (mt/test-driver :mongo
     (testing "Mbql query with nested native source query _returns correct results_ (#30112)"
-      (t2.with-temp/with-temp [Card {:keys [id]} {:dataset_query {:type     :native
-                                                                  :native   {:collection    "venues"
-                                                                             :query         native-query}
-                                                                  :database (mt/id)}}]
+      (mt/with-temp [Card {:keys [id]} {:dataset_query {:type     :native
+                                                        :native   {:collection    "venues"
+                                                                   :query         native-query}
+                                                        :database (mt/id)}}]
         (let [query (mt/mbql-query nil
                       {:source-table (str "card__" id)
                        :limit        1})]
@@ -159,10 +158,10 @@
                            "    \"longitude\": \"$longitude\",\n"
                            "    \"price\": \"$price\"}\n"
                            "}]")]
-        (t2.with-temp/with-temp [Card {:keys [id]} {:dataset_query {:type     :native
-                                                                    :native   {:collection    "venues"
-                                                                               :query         query-str}
-                                                                    :database (mt/id)}}]
+        (mt/with-temp [Card {:keys [id]} {:dataset_query {:type     :native
+                                                          :native   {:collection    "venues"
+                                                                     :query         query-str}
+                                                          :database (mt/id)}}]
           (let [query (mt/mbql-query venues
                         {:source-table (str "card__" id)
                          :aggregation [:count]
@@ -196,32 +195,105 @@
              {:schema nil, :name "reviews"}}
            (:tables (driver/describe-database :mongo (mt/db)))))))
 
-(deftest ^:parallel describe-table-test
+(deftest ^:parallel describe-table-query-test
+  (is (= [{"$sort" {"_id" 1}}
+          {"$limit" 500}
+          {"$unionWith" {"coll" "collection-name", "pipeline" [{"$sort" {"_id" -1}} {"$limit" 500}]}}
+          {"$project"
+           {"path" "$ROOT",
+            "kvs"
+            {"$map"
+             {"input" {"$objectToArray" "$$ROOT"},
+              "as" "item",
+              "in"
+              {"k" "$$item.k",
+               "object"
+               {"$cond" {"if" {"$eq" [{"$type" "$$item.v"} "object"]}, "then" "$$item.v", "else" nil}},
+               "type" {"$type" "$$item.v"}}}}}}
+          {"$unwind" {"path" "$kvs", "includeArrayIndex" "index"}}
+          {"$project"
+           {"path" "$kvs.k",
+            "result" {"$literal" false},
+            "type" "$kvs.type",
+            "index" 1,
+            "object" "$kvs.object"}}
+          {"$facet"
+           {"results" [{"$match" {"result" true}}],
+            "newResults"
+            [{"$match" {"result" false}}
+             {"$group"
+              {"_id" {"type" "$type", "path" "$path"},
+               "count" {"$sum" {"$cond" {"if" {"$eq" ["$type" "null"]}, "then" 0, "else" 1}}},
+               "index" {"$min" "$index"}}}
+             {"$sort" {"count" -1}}
+             {"$group" {"_id" "$_id.path", "type" {"$first" "$_id.type"}, "index" {"$min" "$index"}}}
+             {"$project" {"path" "$_id", "type" 1, "result" {"$literal" true}, "object" nil, "index" 1}}],
+            "nextItems"
+            [{"$match" {"result" false, "object" {"$ne" nil}}}
+             {"$project"
+              {"path" 1,
+               "kvs"
+               {"$map"
+                {"input" {"$objectToArray" "$object"},
+                 "as" "item",
+                 "in"
+                 {"k" "$$item.k",
+                  "object"
+                  {"$cond" {"if" {"$eq" [{"$type" "$$item.v"} "object"]}, "then" "$$item.v", "else" nil}},
+                  "type" {"$type" "$$item.v"}}}}}}
+             {"$unwind" {"path" "$kvs", "includeArrayIndex" "index"}}
+             {"$project"
+              {"path" {"$concat" ["$path" "." "$kvs.k"]},
+               "type" "$kvs.type",
+               "result" {"$literal" false},
+               "index" 1,
+               "object" "$kvs.object"}}]}}
+          {"$project" {"acc" {"$concatArrays" ["$results" "$newResults" "$nextItems"]}}}
+          {"$unwind" "$acc"}
+          {"$replaceRoot" {"newRoot" "$acc"}}
+          {"$facet"
+           {"results" [{"$match" {"result" true}}],
+            "newResults"
+            [{"$match" {"result" false}}
+             {"$group"
+              {"_id" {"type" "$type", "path" "$path"},
+               "count" {"$sum" {"$cond" {"if" {"$eq" ["$type" "null"]}, "then" 0, "else" 1}}},
+               "index" {"$min" "$index"}}}
+             {"$sort" {"count" -1}}
+             {"$group" {"_id" "$_id.path", "type" {"$first" "$_id.type"}, "index" {"$min" "$index"}}}
+             {"$project" {"path" "$_id", "type" 1, "result" {"$literal" true}, "object" nil, "index" 1}}]}}
+          {"$project" {"acc" {"$concatArrays" ["$results" "$newResults"]}}}
+          {"$unwind" "$acc"}
+          {"$replaceRoot" {"newRoot" "$acc"}}
+          {"$project" {"_id" 0, "index" "$index", "path" "$path", "type" "$type"}}]
+         (#'mongo/describe-table-query :collection-name "collection-name" :sample-size 1000 :max-depth 1))))
+
+(deftest describe-table-test
   (mt/test-driver :mongo
     (is (= {:schema nil
             :name   "venues"
             :fields #{{:name              "name"
-                       :database-type     "java.lang.String"
+                       :database-type     "string"
                        :base-type         :type/Text
                        :database-position 1}
                       {:name              "latitude"
-                       :database-type     "java.lang.Double"
+                       :database-type     "double"
                        :base-type         :type/Float
                        :database-position 3}
                       {:name              "longitude"
-                       :database-type     "java.lang.Double"
+                       :database-type     "double"
                        :base-type         :type/Float
                        :database-position 4}
                       {:name              "price"
-                       :database-type     "java.lang.Long"
+                       :database-type     "long"
                        :base-type         :type/Integer
                        :database-position 5}
                       {:name              "category_id"
-                       :database-type     "java.lang.Long"
+                       :database-type     "long"
                        :base-type         :type/Integer
                        :database-position 2}
                       {:name              "_id"
-                       :database-type     "java.lang.Long"
+                       :database-type     "long"
                        :base-type         :type/Integer
                        :pk?               true
                        :database-position 0}}}
@@ -285,7 +357,6 @@
                                        :table_id (mt/id :top-level-indexed) :name "name")]
             (testing "sanity check that we have 2 `name` fields"
               (is (= 2 (count name-fields))))
-
             (testing "only the top level field is indexed"
               (is (=? [{:name             "name"
                         :parent_id        nil
@@ -411,9 +482,9 @@
     (mt/dataset all-null-columns
       ;; do a full sync on the DB to get the correct semantic type info
       (sync/sync-database! (mt/db))
-      (is (= [{:name "_id",            :database_type "java.lang.Long",   :base_type :type/Integer, :semantic_type :type/PK}
-              {:name "favorite_snack", :database_type "NULL",             :base_type :type/*,       :semantic_type nil}
-              {:name "name",           :database_type "java.lang.String", :base_type :type/Text,    :semantic_type :type/Name}]
+      (is (= [{:name "_id",            :database_type "long",   :base_type :type/Integer, :semantic_type :type/PK}
+              {:name "favorite_snack", :database_type "null",   :base_type :type/*,       :semantic_type nil}
+              {:name "name",           :database_type "string", :base_type :type/Text,    :semantic_type :type/Name}]
              (map
               (partial into {})
               (t2/select [Field :name :database_type :base_type :semantic_type]
@@ -437,10 +508,10 @@
              ["Silvereye" "cherries" nil]]]]
           ;; do a full sync on the DB to get the correct semantic type info
           (sync/sync-database! (mt/db))
-          (is (= #{{:name "_id", :database_type "java.lang.Long", :base_type :type/Integer, :semantic_type :type/PK}
-                   {:name "favorite_snack", :database_type "java.lang.String", :base_type :type/Text, :semantic_type :type/Category}
-                   {:name "name", :database_type "java.lang.String", :base_type :type/Text, :semantic_type :type/Name}
-                   {:name "max_wingspan", :database_type "java.lang.Long", :base_type :type/Integer, :semantic_type nil}}
+          (is (= #{{:name "_id",            :database_type "long",   :base_type :type/Integer, :semantic_type :type/PK}
+                   {:name "favorite_snack", :database_type "string", :base_type :type/Text,    :semantic_type :type/Category}
+                   {:name "name",           :database_type "string", :base_type :type/Text,    :semantic_type :type/Name}
+                   {:name "max_wingspan",   :database_type "long",   :base_type :type/Integer, :semantic_type nil}}
                  (into #{}
                        (map (partial into {}))
                        (t2/select [Field :name :database_type :base_type :semantic_type]
@@ -616,13 +687,6 @@
                (rows-count {:query      "[{$match: {date: {$gte: ISODate(\"2015-12-20\")}}}]"
                             :collection "checkins"})))))))
 
-(deftest ^:parallel most-common-object-type-test
-  (is (= String
-         (#'mongo/most-common-object-type [[Float 20] [Integer 10] [String 30]])))
-  (testing "make sure it handles `nil` types correctly as well (#6880)"
-    (is (= nil
-           (#'mongo/most-common-object-type [[Float 20] [nil 40] [Integer 10] [String 30]])))))
-
 (deftest xrays-test
   (mt/test-driver :mongo
     (testing "make sure x-rays don't use features that the driver doesn't support"
@@ -637,7 +701,7 @@
     (testing (str "if we query a something an there are no values for the Field, the query should still return "
                   "successfully! (#8929 and #8894)")
       ;; add a temporary Field that doesn't actually exist to test data categories
-      (t2.with-temp/with-temp [Field _ {:name "parent_id", :table_id (mt/id :categories)}]
+      (mt/with-temp [Field _ {:name "parent_id", :table_id (mt/id :categories)}]
         ;; ok, now run a basic MBQL query against categories Table. When implicit Field IDs get added the `parent_id`
         ;; Field will be included
         (testing (str "if the column does not come back in the results for a given document we should fill in the "
@@ -701,6 +765,45 @@
   (with-open [rdr (java.io.FileReader. (java.io.File. filename))]
     (json/parse-stream rdr true)))
 
+(defn- missing-fields-db []
+  (create-database-from-row-maps!
+   "test-missing-fields"
+   "coll"
+   (json-from-file "modules/drivers/mongo/test/metabase/driver/missing-fields.json")))
+
+(deftest sync-missing-fields-test
+  (mt/test-driver :mongo
+    (mt/with-db (missing-fields-db)
+      (sync/sync-database! (missing-fields-db))
+      (testing "Test that fields with missing or null values get synced correctly"
+        (let [results (map #(into {} %)
+                           (t2/select [Field :id :name :database_type :base_type :semantic_type :parent_id]
+                                      :active   true
+                                      :table_id (mt/id :coll)
+                                      {:order-by [:database_position]}))]
+          (is (=? [{:name "_id",    :database_type "long",   :base_type :type/Integer,   :semantic_type :type/PK}
+                   {:name "a",     :database_type "string", :base_type :type/Text,       :semantic_type :type/Category}
+                   {:name "b",     :database_type "object", :base_type :type/Dictionary, :semantic_type nil}
+                   {:name "b_c",   :database_type "string", :base_type :type/Text,       :semantic_type :type/Category}
+                   {:name "b_d",   :database_type "int",    :base_type :type/Integer,    :semantic_type :type/Category}
+                   {:name "b_e",   :database_type "object", :base_type :type/Dictionary, :semantic_type nil}
+                   {:name "b_e_f", :database_type "string", :base_type :type/Text,       :semantic_type :type/Category}
+                   {:name "c",     :database_type "null",   :base_type :type/*,          :semantic_type nil}]
+                  results))
+          (testing "parent_ids are correct"
+            (let [parent (fn [field-name]
+                           (let [field (first (filter #(= (:name %) field-name) results))]
+                             (:name (first (filter #(= (:id %) (:parent_id field)) results)))))]
+              (is (= {"_id"   nil
+                      "a"     nil
+                      "b"     nil
+                      "c"     nil
+                      "b_c"   "b"
+                      "b_d"   "b"
+                      "b_e"   "b"
+                      "b_e_f" "b_e"}
+                     (into {} (map (juxt :name #(parent (:name %))) results)))))))))))
+
 (defn- array-fields-db []
   (create-database-from-row-maps!
    "test-16299"
diff --git a/modules/drivers/mongo/test/metabase/test/data/mongo.clj b/modules/drivers/mongo/test/metabase/test/data/mongo.clj
index 97b873c740e9261bfda9992c9461154cea6e0a2c..4887484b7f131439701a9dacab95a239967cd597 100644
--- a/modules/drivers/mongo/test/metabase/test/data/mongo.clj
+++ b/modules/drivers/mongo/test/metabase/test/data/mongo.clj
@@ -76,17 +76,20 @@
         (when indexed?
           (mongo.util/create-index (mongo.util/collection db table-name) {field-name 1})))
       (let [field-names (for [field-definition field-definitions]
-                          (keyword (:field-name field-definition)))]
-        ;; Use map-indexed so we can get an ID for each row (index + 1)
-        (doseq [[i row] (map-indexed vector rows)]
-          (try
-            ;; Insert each row
-            (mongo.util/insert-one (mongo.util/collection db (name table-name))
-                                   (into (ordered-map/ordered-map :_id (inc i))
-                                         (cond->> (zipmap field-names row)
-                                           *remove-nil?* (m/remove-vals nil?))))
-            ;; If row already exists then nothing to do
-            (catch com.mongodb.MongoException _)))))))
+                          (keyword (:field-name field-definition)))
+            rows (map (fn [[i row]]
+                        (into (ordered-map/ordered-map :_id (inc i))
+                              (cond->> (zipmap field-names row)
+                                *remove-nil?* (m/remove-vals nil?))))
+                      ;; Use map-indexed so we can get an ID for each row (index + 1)
+                      (map-indexed vector rows))]
+        (try
+          ;; Insert each row
+          (mongo.util/insert-many
+           (mongo.util/collection db (name table-name))
+           rows)
+          ;; If row already exists then nothing to do
+          (catch com.mongodb.MongoException _))))))
 
 (defmethod tx/destroy-db! :mongo
   [driver dbdef]
diff --git a/package.json b/package.json
index 4a6e8ba14fa740345b607facb00fe00aa46c7c04..601ca4f5b802b108c026016f045aa84012fd17d9 100644
--- a/package.json
+++ b/package.json
@@ -323,6 +323,7 @@
   "resolutions": {
     "ansi-html": "https://registry.npmjs.org/ansi-html-community/-/ansi-html-community-0.0.8.tgz",
     "ansi-regex": "5.0.1",
+    "cookie": "^0.7.0",
     "d3-color": "^3.1.0",
     "debug": "^4.3.4",
     "follow-redirects": "^1.15.5",
diff --git a/resources/migrations/001_update_migrations.yaml b/resources/migrations/001_update_migrations.yaml
index ae7e1f6f0d0e68dc1060ca96afefb6ddc20e9625..a818c18ba144839a7e19c7e40a5adc3f5ae8abf5 100644
--- a/resources/migrations/001_update_migrations.yaml
+++ b/resources/migrations/001_update_migrations.yaml
@@ -9270,6 +9270,25 @@ databaseChangeLog:
                   type: ${text.type}
                   remarks: running, failed, or completed
 
+  - changeSet:
+      id: v51.2024-09-03T16:54:18
+      author: adam-james
+      comment: Add pivot_results to pulse_card
+      preConditions:
+        - not:
+            - columnExists:
+                tableName: pulse_card
+                columnName: pivot_results
+      changes:
+        - addColumn:
+            tableName: pulse_card
+            columns:
+              - column:
+                  name: pivot_results
+                  type: ${boolean.type}
+                  defaultValue: false
+                  remarks: Whether or not to apply pivot processing to the rows of the export
+
   - changeSet:
       id: v51.2024-09-09T15:11:16
       author: johnswanson
diff --git a/snowplow/iglu-client-embedded/schemas/com.metabase/dashboard/jsonschema/1-1-6 b/snowplow/iglu-client-embedded/schemas/com.metabase/dashboard/jsonschema/1-1-6
deleted file mode 100644
index 56b2b16a05dc56b14f40f1a184ddcb64ffcc22ef..0000000000000000000000000000000000000000
--- a/snowplow/iglu-client-embedded/schemas/com.metabase/dashboard/jsonschema/1-1-6
+++ /dev/null
@@ -1,124 +0,0 @@
-{
-  "$schema": "http://iglucentral.com/schemas/com.snowplowanalytics.self-desc/schema/jsonschema/1-0-0#",
-  "description": "Dashboard events",
-  "self": {
-    "vendor": "com.metabase",
-    "name": "dashboard",
-    "format": "jsonschema",
-    "version": "1-1-6"
-  },
-  "type": "object",
-  "properties": {
-    "event": {
-      "description": "Event name",
-      "type": "string",
-      "enum": [
-        "dashboard_created",
-        "dashboard_saved",
-        "question_added_to_dashboard",
-        "auto_apply_filters_disabled",
-        "dashboard_tab_created",
-        "dashboard_tab_deleted",
-        "dashboard_tab_duplicated",
-        "new_text_card_created",
-        "new_heading_card_created",
-        "new_link_card_created",
-        "new_action_card_created",
-        "new_iframe_card_created",
-        "card_set_to_hide_when_no_results",
-        "dashboard_pdf_exported",
-        "card_moved_to_tab",
-        "dashboard_card_duplicated",
-        "dashboard_card_replaced",
-        "dashboard_section_added",
-        "dashboard_width_toggled",
-        "dashboard_filter_required"
-      ],
-      "maxLength": 1024
-    },
-    "dashboard_id": {
-      "description": "Unique identifier for a dashboard within the Metabase instance",
-      "type": "integer",
-      "minimum": 0,
-      "maximum": 2147483647
-    },
-    "question_id": {
-      "description": "Unique identifier for a question added to a dashboard",
-      "type": [
-        "integer",
-        "null"
-      ],
-      "minimum": 0,
-      "maximum": 2147483647
-    },
-    "num_tabs": {
-      "description": "Number of tabs affected after the event",
-      "type": [
-        "integer",
-        "null"
-      ],
-      "minimum": 0,
-      "maximum": 2147483647
-    },
-    "total_num_tabs": {
-      "description": "Total number of active tabs after the events",
-      "type": [
-        "integer",
-        "null"
-      ],
-      "minimum": 0,
-      "maximum": 2147483647
-    },
-    "duration_milliseconds": {
-      "description": "Duration the action took to complete in milliseconds",
-      "type": [
-        "integer",
-        "null"
-      ],
-      "minimum": 0,
-      "maximum": 2147483647
-    },
-    "section_layout": {
-      "description": "String describing the layout that was selected from the pre-built options",
-      "type": [
-        "string",
-        "null"
-      ],
-      "maxLength": 1024
-    },
-    "full_width": {
-      "description": "Boolean set to True if the dashboard was toggled to full width and False if full width was disabled.",
-      "type": [
-        "boolean",
-        "null"
-      ]
-    },
-    "dashboard_accessed_via": {
-      "description": "Indicate if the dashboard was accessed via metabase ('internal'), public link, static embed etc",
-      "type": [
-        "string",
-        "null"
-      ],
-      "enum": [
-        "internal",
-        "public-link",
-        "static-embed",
-        "interactive-iframe-embed",
-        "sdk-embed"
-      ]
-    },
-    "domain_name": {
-      "description": "Domain name of the iframe",
-      "type": [
-        "string",
-        "null"
-      ],
-      "maxLength": 1024
-    }
-  },
-  "required": [
-    "event",
-    "dashboard_id"
-  ],
-  "additionalProperties": true
-}
diff --git a/snowplow/iglu-client-embedded/schemas/com.metabase/instance_stats/jsonschema/2-0-0 b/snowplow/iglu-client-embedded/schemas/com.metabase/instance_stats/jsonschema/2-0-0
new file mode 100644
index 0000000000000000000000000000000000000000..6ff9e821d5586357b10898d38176712e7281737e
--- /dev/null
+++ b/snowplow/iglu-client-embedded/schemas/com.metabase/instance_stats/jsonschema/2-0-0
@@ -0,0 +1,211 @@
+{
+  "$schema": "http://iglucentral.com/schemas/com.snowplowanalytics.self-desc/schema/jsonschema/1-0-0#",
+  "description": "Schema for daily stats ping, tracking instance metrics and settings",
+  "self": {
+    "vendor": "com.metabase",
+    "name": "instance_stats",
+    "format": "jsonschema",
+    "version": "2-0-0"
+  },
+  "type": "object",
+  "properties": {
+    "analytics_uuid": {
+      "description": "The UUID for the instance",
+      "type": "string",
+      "maxLength": 255
+    },
+    "features": {
+      "description": "Features",
+      "type": "array",
+      "items": {
+        "type": "object",
+        "description": "A single instance feature",
+        "properties": {
+          "name": {
+            "description": "The unique name of the feature",
+            "type": "string",
+            "maxLength": 255
+          },
+          "available": {
+            "description": "Whether the feature is available, i.e. can it be enabled/disabled or is it always on",
+            "type": "boolean"
+          },
+          "enabled": {
+            "description": "Whether the feature is enabled, i.e. can it be used by the users/instance",
+            "type": "boolean"
+          }
+        },
+        "required": ["name", "available", "enabled"],
+        "additionalProperties": true
+      }
+    },
+    "grouped_metrics": {
+      "description": "Key-value pairs of grouped metrics, with tags.",
+      "type": "array",
+      "items": {
+        "description": "a Grouped Metric, which has a key a value and tags",
+        "type": "object",
+        "properties": {
+          "name": {
+            "description": "The unique name of the grouped metric",
+            "type": "string",
+            "maxLength": 255
+          },
+          "values": {
+            "description": "Values for the grouped metric",
+            "type": "array",
+            "items": {
+              "type": "object",
+              "description": "Items in a groped metric value",
+              "properties": {
+                "group": {
+                  "type": "string",
+                  "description": "The group name",
+                  "maxLength": 255
+                },
+                "value": {
+                  "type": "number",
+                  "description": "The value for the group",
+                  "minimum": 0,
+                  "maximum": 9007199254740991
+                }
+              },
+              "required": ["group", "value"],
+              "additionalProperties": false
+            }
+          },
+          "tags": {
+            "description": "Tags that can be used flagging teams / features the grouped_metric belongs to",
+            "type": "array",
+            "items": {
+              "description": "a single tag",
+              "type": "string",
+              "maxLength": 255
+            }
+          }
+        },
+        "required": ["name", "values", "tags"],
+        "additionalProperties": false
+      }
+    },
+    "instance_attributes": {
+      "description": "Key-value pairs of instance attributes",
+      "type": "array",
+      "items": {
+        "type": "object",
+        "description": "A single instance attribute",
+        "properties": {
+          "key": {
+            "description": "The key for this attribute",
+            "type": "string",
+            "maxLength": 255
+          },
+          "value": {
+            "description": "The value of this attribute",
+            "type": ["string", "boolean", "integer", "null"],
+            "maxLength": 255,
+            "minimum": 0,
+            "maximum": 2147483647
+          }
+        },
+        "required": ["key", "value"]
+      }
+    },
+    "metadata": {
+      "description": "Metadata about the anonymous stats collection",
+      "type": "array",
+      "items": {
+        "type": "object",
+        "description": "A single metadata key/value",
+        "properties": {
+          "key": {
+            "description": "The key for this metadata",
+            "type": "string",
+            "maxLength": 255
+          },
+          "value": {
+            "description": "The value of this metadata",
+            "type": ["string", "boolean", "integer", "null"],
+            "maxLength": 255,
+            "minimum": 0,
+            "maximum": 2147483647
+          }
+        },
+        "required": ["key", "value"]
+      }
+    },
+    "metrics": {
+      "description": "Key-value pairs of metrics, with tags.",
+      "type": "array",
+      "items": {
+        "type": "object",
+        "description": "A single metric attribute",
+        "properties": {
+          "name": {
+            "description": "The unique name of the metric",
+            "type": "string",
+            "maxLength": 255
+          },
+          "value": {
+            "type": "integer",
+            "description": "The value of the metric",
+            "minimum": 0,
+            "maximum": 2147483647
+          },
+          "tags": {
+            "type": "array",
+            "description": "Tags that can be used for flagging teams / features the metric belongs to",
+            "items": {
+              "description": "a tag",
+              "type": "string",
+              "maxLength": 255
+            }
+          }
+        },
+        "required": ["name", "value", "tags"]
+      }
+    },
+    "settings": {
+      "type": "array",
+      "description": "Key-value pairs of settings, with tags.",
+      "items": {
+        "type": "object",
+        "description": "A single setting attribute",
+        "properties": {
+          "key": {
+            "type": "string",
+            "maxLength": 255,
+            "description": "The unique name of the setting"
+          },
+          "value": {
+            "type": ["string", "boolean", "integer", "null"],
+            "description": "The value of this setting",
+            "maxLength": 255,
+            "minimum": 0,
+            "maximum": 2147483647
+          },
+          "tags": {
+            "type": "array",
+            "description": "Tags that can be used for flagging teams / features the setting belongs to",
+            "items": {
+              "description": "a tag for the setting to help categorize it",
+              "type": "string",
+              "maxLength": 255
+            }
+          }
+        },
+        "required": ["key", "value", "tags"]
+      }
+    }
+  },
+  "additionalProperties": false,
+  "required": [
+    "analytics_uuid",
+    "features",
+    "grouped_metrics",
+    "instance_attributes",
+    "metadata",
+    "metrics",
+    "settings"
+  ]
+}
diff --git a/src/metabase/analytics/snowplow.clj b/src/metabase/analytics/snowplow.clj
index 74e2e6178bf1f8856d1e4386b33f252e829439de..1cb1e4e0362157d34217de1a04602135572bfec2 100644
--- a/src/metabase/analytics/snowplow.clj
+++ b/src/metabase/analytics/snowplow.clj
@@ -30,7 +30,13 @@
 
 (set! *warn-on-reflection* true)
 
-;; Adding or updating a Snowplow schema? Make sure that the map below is updated accordingly.
+;; Adding or updating a Snowplow schema? Here are some things to keep in mind:
+;; - Snowplow schemata are versioned and immutable, so if you need to make changes to a schema, you should create a new
+;;   version of it. The version number should be updated in the `schema->version` map below.
+;; - Schemas live inside the `/snowplow/iglu-client-embedded/schemas` directory.
+;; - The new schema should be added to the Metabase repo via the normal pull request workflow before it is uploaded to
+;;   SnowcatCloud in the last step. Make sure to sanity check your schema with SnowcatCloud in the
+;;   #external-snowcat-cloud channel since there might be some back and forth on the format.
 
 (def ^:private schema->version
   "The most recent version for each event schema. This should be updated whenever a new version of a schema is added
@@ -38,7 +44,7 @@
   {::account        "1-0-1"
    ::browse_data    "1-0-0"
    ::invite         "1-0-1"
-   ::instance_stats "1-0-0"
+   ::instance_stats "2-0-0"
    ::csvupload      "1-0-3"
    ::dashboard      "1-1-4"
    ::database       "1-0-1"
diff --git a/src/metabase/analytics/stats.clj b/src/metabase/analytics/stats.clj
index 80188cacb70f6745b91206f9382482dc0de39132..c912e7f9675ca02beb60f35827169f5b4eb26092 100644
--- a/src/metabase/analytics/stats.clj
+++ b/src/metabase/analytics/stats.clj
@@ -14,6 +14,7 @@
    [metabase.db :as db]
    [metabase.db.query :as mdb.query]
    [metabase.driver :as driver]
+   [metabase.eid-translation :as eid-translation]
    [metabase.email :as email]
    [metabase.embed.settings :as embed.settings]
    [metabase.integrations.google :as google]
@@ -30,6 +31,7 @@
    [metabase.util :as u]
    [metabase.util.honey-sql-2 :as h2x]
    [metabase.util.log :as log]
+   [metabase.util.malli :as mu]
    [toucan2.core :as t2]))
 
 (set! *warn-on-reflection* true)
@@ -125,21 +127,23 @@
    :application_database                 (config/config-str :mb-db-type)
    :check_for_updates                    (public-settings/check-for-updates)
    :report_timezone                      (driver/report-timezone)
-   ; We deprecated advanced humanization but have this here anyways
+   ;; We deprecated advanced humanization but have this here anyways
    :friendly_names                       (= (humanization/humanization-strategy) "advanced")
    :email_configured                     (email/email-configured?)
    :slack_configured                     (slack/slack-configured?)
    :sso_configured                       (google/google-auth-enabled)
    :instance_started                     (snowplow/instance-creation)
    :has_sample_data                      (t2/exists? Database, :is_sample true)
-   :enable_embedding                     #_:clj-kondo/ignore (embed.settings/enable-embedding)
+   :enable_embedding                     #_{:clj-kondo/ignore [:deprecated-var]} (embed.settings/enable-embedding)
    :enable_embedding_sdk                 (embed.settings/enable-embedding-sdk)
    :enable_embedding_interactive         (embed.settings/enable-embedding-interactive)
-   :embedding_app_origin_set             (boolean  (or
-                                                    #_:clj-kondo/ignore (embed.settings/embedding-app-origin)
-                                                    (embed.settings/embedding-app-origins-interactive)
-                                                    (let [sdk-origins (embed.settings/embedding-app-origins-sdk)]
-                                                      (and sdk-origins (not= "localhost:*" sdk-origins)))))
+   :enable_embedding_static              (embed.settings/enable-embedding-static)
+   :embedding_app_origin_set             (boolean
+                                          #_{:clj-kondo/ignore [:deprecated-var]}
+                                          (embed.settings/embedding-app-origin))
+   :embedding_app_origin_sdk_set         (boolean (let [sdk-origins (embed.settings/embedding-app-origins-sdk)]
+                                                    (and sdk-origins (not= "localhost:*" sdk-origins))))
+   :embedding_app_origin_interactive_set (embed.settings/embedding-app-origins-interactive)
    :appearance_site_name                 (not= (public-settings/site-name) "Metabase")
    :appearance_help_link                 (public-settings/help-link)
    :appearance_logo                      (not= (public-settings/application-logo-url) "app/assets/img/logo.svg")
@@ -445,7 +449,9 @@
   []
   (let [{:keys [length count]} (t2/select-one [QueryCache [[:avg [:length :results]] :length] [:%count.* :count]])]
     {:average_entry_size (int (or length 0))
-     :num_queries_cached (bin-small-number count)}))
+     :num_queries_cached (bin-small-number count)
+     ;; this value gets used in the snowplow ping 'metrics' section.
+     :num_queries_cached_unbinned count}))
 
 ;;; System Metrics
 
@@ -561,6 +567,11 @@
       :else
       nil)))
 
+(defn m->kv-vec
+  "Convert a map to a vector of key-value maps with keys 'key' and 'value' for each key-value pair in the map."
+  [m]
+  (mapv (fn [[k v]] {"key" (name k) "value" v}) m))
+
 (defn- snowplow-instance-attributes
   [stats]
   (let [system-stats (-> stats :stats :system)
@@ -574,11 +585,113 @@
           :deployment_model                 (deployment-model)
           :startup_time_millis              (-> stats :startup_time_millis)
           :has_activation_signals_completed (completed-activation-signals?)})]
-    (mapv
-     (fn [[k v]]
-       {"key"   (name k)
-        "value" v})
-     instance-attributes)))
+    (m->kv-vec instance-attributes)))
+
+(mu/defn- get-translation-count
+  :- [:map [:ok :int] [:not-found :int] [:invalid-format :int] [:total :int]]
+  "Get and clear the entity-id translation counter. This is meant to be called during the daily stats collection process."
+  []
+  (let [counter (setting/get-value-of-type :json :entity-id-translation-counter)]
+    (merge counter {:total (apply + (vals counter))})))
+
+(mu/defn- clear-translation-count!
+  "We want to reset the eid translation count on every stat ping, so we do it here."
+  []
+  (u/prog1 eid-translation/default-counter
+    (setting/set-value-of-type! :json :entity-id-translation-counter <>)))
+
+(defn- categorize-query-execution [{client :embedding_client executor :executor_id}]
+  (cond
+    (= "embedding-sdk-react" client)                     "sdk_embed"
+    (and (= "embedding-iframe" client) (some? executor)) "interactive_embed"
+    (and (= "embedding-iframe" client) (nil? executor))  "static_embed"
+    (and (#{"" nil} client) (nil? executor))             "public_link"
+    :else                                                "internal"))
+
+(defn- ->one-day-ago []
+  (t/minus (t/offset-date-time) (t/days 1)))
+
+(defn- ->snowplow-grouped-metric-info []
+  {:query_executions (merge
+                      {"sdk_embed" 0 "interactive_embed" 0 "static_embed" 0 "public_link" 0 "internal" 0}
+                      (-> categorize-query-execution
+                          (group-by
+                           (t2/select [:model/QueryExecution :embedding_client :executor_id]))
+                          (update-vals count)))})
+
+(defn- snowplow-grouped-metrics [{query-executions :query_executions :as _snowplow-grouped-metric-info}]
+  [{:name :query_executions_by_source
+    :values (mapv (fn [qe-group]
+                    {:group qe-group
+                     :value (get query-executions qe-group)})
+                  ["interactive_embed" "internal" "public_link" "sdk_embed" "static_embed"])
+    :tags ["embedding"]}])
+
+(defn- ->snowplow-metric-info
+  "Collects Snowplow metrics data that is not in the legacy stats format. Also clears entity id translation count."
+  []
+  (let [one-day-ago (->one-day-ago)
+        total-translation-count (:total (get-translation-count))
+        _ (clear-translation-count!)]
+    {:models                  (t2/count :model/Card :type :model :archived false)
+     :new_embedded_dashboards (t2/count :model/Dashboard
+                                        :enable_embedding true
+                                        :archived false
+                                        :created_at [:>= one-day-ago])
+     :new_users_last_24h        (t2/count :model/User
+                                          :is_active true
+                                          :date_joined [:>= one-day-ago])
+     :pivot_tables              (t2/count :model/Card :display :pivot :archived false)
+     :query_executions_last_24h (t2/count :model/QueryExecution :started_at [:>= one-day-ago])
+     :entity_id_translations_last_24h total-translation-count}))
+
+(mu/defn- snowplow-metrics
+  [stats metric-info :- [:map
+                         [:models :int]
+                         [:new_embedded_dashboards :int]
+                         [:new_users_last_24h :int]
+                         [:pivot_tables :int]
+                         [:query_executions_last_24h :int]
+                         [:entity_id_translations_last_24h :int]]]
+  (mapv
+   (fn [[k v tags]]
+     (assert (every? string? tags) "Tags must be strings in snowplow metrics.")
+     (assert (some? v) "Cannot have a nil value in snowplow metrics.")
+     {"name" (name k) "value" v "tags" (-> tags sort vec)})
+   [[:above_goal_alerts               (get-in stats [:stats :alert :above_goal] 0)                    #{"alerts"}]
+    [:alerts                          (get-in stats [:stats :alert :alerts] 0)                        #{"alerts"}]
+    [:all_time_query_executions       (get-in stats [:stats :execution :executions] 0)                #{"query_executions"}]
+    [:analyzed_databases              (get-in stats [:stats :database :databases :analyzed] 0)        #{}]
+    [:cache_average_entry_size        (get-in stats [:stats :cache :average_entry_size] 0)            #{"cache"}]
+    [:cache_num_queries_cached        (get-in stats [:stats :cache :num_queries_cached_unbinned] 0)   #{"cache"}]
+    [:cards_in_collections            (get-in stats [:stats :collection :cards_in_collections] 0)     #{"collections"}]
+    [:cards_not_in_collections        (get-in stats [:stats :collection :cards_not_in_collections] 0) #{"collections"}]
+    [:collections                     (get-in stats [:stats :collection :collections] 0)              #{"collections"}]
+    [:connected_databases             (get-in stats [:stats :database :databases :total] 0)           #{"databases"}]
+    [:dashboards_with_params          (get-in stats [:stats :dashboard :with_params] 0)               #{"dashboards"}]
+    [:embedded_dashboards             (get-in stats [:stats :dashboard :embedded :total] 0)           #{"dashboards" "embedding"}]
+    [:embedded_questions              (get-in stats [:stats :question :embedded :total] 0)            #{"questions" "embedding"}]
+    [:entity_id_translations_last_24h (:entity_id_translations_last_24h metric-info 0)                #{"embedding"}]
+    [:first_time_only_alerts          (get-in stats [:stats :alert :first_time_only] 0)               #{"alerts"}]
+    [:metabase_fields                 (get-in stats [:stats :field :fields] 0)                        #{"fields"}]
+    [:metrics                         (get-in stats [:stats :metric :metrics] 0)                      #{"metrics"}]
+    [:models                          (:models metric-info 0)                                         #{}]
+    [:native_questions                (get-in stats [:stats :question :questions :native] 0)          #{"questions"}]
+    [:new_embedded_dashboards         (:new_embedded_dashboards metric-info 0)                        #{}]
+    [:new_users_last_24h              (:new_users_last_24h metric-info 0)                             #{"users"}]
+    [:permission_groups               (get-in stats [:stats :group :groups] 0)                        #{"permissions"}]
+    [:pivot_tables                    (:pivot_tables metric-info 0)                                   #{}]
+    [:public_dashboards               (get-in stats [:stats :dashboard :public :total] 0)             #{"dashboards"}]
+    [:public_dashboards_with_params   (get-in stats [:stats :dashboard :public :with_params] 0)       #{"dashboards"}]
+    [:public_questions                (get-in stats [:stats :question :public :total] 0)              #{"questions"}]
+    [:public_questions_with_params    (get-in stats [:stats :question :public :with_params] 0)        #{"questions"}]
+    [:query_builder_questions         (get-in stats [:stats :question :questions :total] 0)           #{"questions"}]
+    [:query_executions_last_24h       (:query_executions_last_24h metric-info 0)                      #{"query_executions"}]
+    [:questions                       (get-in stats [:stats :question :questions :total] 0)           #{"questions"}]
+    [:questions_with_params           (get-in stats [:stats :question :questions :with_params] 0)     #{"questions"}]
+    [:segments                        (get-in stats [:stats :segment :segments] 0)                    #{"segments"}]
+    [:tables                          (get-in stats [:stats :table :tables] 0)                        #{"tables"}]
+    [:users                           (get-in stats [:stats :user :users :total] 0)                   #{"users"}]]))
 
 (defn- whitelabeling-in-use?
   "Are any whitelabeling settings set to values other than their default?"
@@ -735,23 +848,42 @@
   "Send stats to Metabase's snowplow collector. Transforms stats into the format required by the Snowplow schema."
   [stats]
   (let [instance-attributes (snowplow-instance-attributes stats)
+        metrics             (snowplow-metrics stats (->snowplow-metric-info))
+        grouped-metrics     (snowplow-grouped-metrics (->snowplow-grouped-metric-info))
         features            (snowplow-features)]
-    {:instance-attributes instance-attributes
-     :features            features}))
+    ;; grouped_metrics and settings are required in the json schema, but their data will be included in the next Milestone:
+    {:analytics_uuid      (snowplow/analytics-uuid)
+     :features            features
+     :grouped_metrics     grouped-metrics
+     :instance_attributes instance-attributes
+     :metrics             metrics
+     :settings             []}))
+
+(defn- generate-instance-stats!
+  "Generate stats for this instance as data"
+  []
+  (let [stats (legacy-anonymous-usage-stats)]
+    {:stats (-> stats
+                ;; `:num_queries_cached_unbinned` is added to [[legacy-anonymous-usage-stats]]'s return value to make
+                ;; computing [[snowplow-anonymous-usage-stats]] more efficient. It shouldn't be sent by
+                ;; [[send-stats-deprecited!]].
+                (update-in [:stats :stats :cache] dissoc :num_queries_cached_unbinned))
+     :snowplow-stats (snowplow-anonymous-usage-stats stats)}))
 
 (defn phone-home-stats!
   "Collect usage stats and phone them home"
   []
   (when (public-settings/anon-tracking-enabled)
-    (let [start-time-ms  (System/currentTimeMillis)
-          stats          (legacy-anonymous-usage-stats)
-          snowplow-stats (snowplow-anonymous-usage-stats stats)
-          end-time-ms    (System/currentTimeMillis)
-          elapsed-secs   (quot (- end-time-ms start-time-ms) 1000)]
+    (let [start-time-ms                  (System/currentTimeMillis)
+          {:keys [stats snowplow-stats]} (generate-instance-stats!)
+          end-time-ms                    (System/currentTimeMillis)
+          elapsed-secs                   (quot (- end-time-ms start-time-ms) 1000)
+          snowplow-data                  (assoc snowplow-stats
+                                                :metadata [{"key"   "stats_export_time_seconds"
+                                                            "value" elapsed-secs}])]
+      (assert (= #{:analytics_uuid :features :grouped_metrics :instance_attributes :metadata :metrics :settings}
+                 (set (keys snowplow-data)))
+              (str "Missing required keys in snowplow-data. got:" (sort (keys snowplow-data))))
       #_{:clj-kondo/ignore [:deprecated-var]}
       (send-stats-deprecated! stats)
-      (snowplow/track-event! ::snowplow/instance_stats
-                             (assoc snowplow-stats
-                                    :metadata
-                                    [{"key"   "stats_export_time_seconds"
-                                      "value" elapsed-secs}])))))
+      (snowplow/track-event! ::snowplow/instance_stats snowplow-data))))
diff --git a/src/metabase/api/card.clj b/src/metabase/api/card.clj
index b5cbb6cde208404940786253d78328c06d49aa7a..bc7e0210d48b23015e120508a77b552ff9a360a2 100644
--- a/src/metabase/api/card.clj
+++ b/src/metabase/api/card.clj
@@ -702,10 +702,11 @@
 
   `parameters` should be passed as query parameter encoded as a serialized JSON string (this is because this endpoint
   is normally used to power 'Download Results' buttons that use HTML `form` actions)."
-  [card-id export-format :as {{:keys [parameters format_rows]} :params}]
+  [card-id export-format :as {{:keys [parameters pivot_results format_rows]} :params}]
   {card-id       ms/PositiveInt
    parameters    [:maybe ms/JSONString]
-   format_rows   [:maybe :boolean]
+   format_rows   [:maybe ms/BooleanValue]
+   pivot_results [:maybe ms/BooleanValue]
    export-format (into [:enum] api.dataset/export-formats)}
   (qp.card/process-query-for-card
    card-id export-format
@@ -715,7 +716,8 @@
    :middleware  {:process-viz-settings?  true
                  :skip-results-metadata? true
                  :ignore-cached-results? true
-                 :format-rows?           format_rows
+                 :format-rows?           (or format_rows false)
+                 :pivot?                 (or pivot_results false)
                  :js-int-to-string?      false}))
 
 ;;; ----------------------------------------------- Sharing is Caring ------------------------------------------------
diff --git a/src/metabase/api/dashboard.clj b/src/metabase/api/dashboard.clj
index b380de653c0e8431c6df7ffc24c1394a2bb030ef..7e6d4a5152c15823461e59f906401ac4f36c21b4 100644
--- a/src/metabase/api/dashboard.clj
+++ b/src/metabase/api/dashboard.clj
@@ -1263,13 +1263,14 @@
 
   `parameters` should be passed as query parameter encoded as a serialized JSON string (this is because this endpoint
   is normally used to power 'Download Results' buttons that use HTML `form` actions)."
-  [dashboard-id dashcard-id card-id export-format :as {{:keys [parameters format_rows], :as request-parameters} :params}]
+  [dashboard-id dashcard-id card-id export-format :as {{:keys [parameters format_rows pivot_results] :as request-parameters} :params}]
   {dashboard-id  ms/PositiveInt
    dashcard-id   ms/PositiveInt
    card-id       ms/PositiveInt
    parameters    [:maybe ms/JSONString]
    export-format api.dataset/ExportFormat
-   format_rows   [:maybe :boolean]}
+   format_rows   [:maybe ms/BooleanValue]
+   pivot_results [:maybe ms/BooleanValue]}
   (m/mapply qp.dashboard/process-query-for-dashcard
             (merge
              request-parameters
@@ -1286,7 +1287,8 @@
               :middleware    {:process-viz-settings?  true
                               :skip-results-metadata? true
                               :ignore-cached-results? true
-                              :format-rows?           format_rows
+                              :format-rows?           (or format_rows false)
+                              :pivot?                 (or pivot_results false)
                               :js-int-to-string?      false}})))
 
 (api/defendpoint POST "/pivot/:dashboard-id/dashcard/:dashcard-id/card/:card-id/query"
diff --git a/src/metabase/api/dataset.clj b/src/metabase/api/dataset.clj
index e82cfd30c81d2a1ad9f1f6e99868a904912b2f57..1db18b317eff1f4d6f5f8fe202e01b6513099bbb 100644
--- a/src/metabase/api/dataset.clj
+++ b/src/metabase/api/dataset.clj
@@ -129,12 +129,11 @@
 
 (api/defendpoint POST ["/:export-format", :export-format export-format-regex]
   "Execute a query and download the result data as a file in the specified format."
-  [export-format :as {{:keys [query visualization_settings format_rows]
+  [export-format :as {{:keys [query visualization_settings]
                        :or   {visualization_settings "{}"}} :params}]
   {query                  ms/JSONString
    visualization_settings ms/JSONString
-   format_rows            [:maybe :boolean]
-   export-format          (into [:enum] export-formats)}
+   export-format          ExportFormat}
   (let [{:keys [was-pivot] :as query} (json/parse-string query keyword)
         query                         (dissoc query :was-pivot)
         viz-settings                  (-> (json/parse-string visualization_settings viz-setting-key-fn)
@@ -146,8 +145,7 @@
                                           (update :middleware #(-> %
                                                                    (dissoc :add-default-userland-constraints? :js-int-to-string?)
                                                                    (assoc :process-viz-settings? true
-                                                                          :skip-results-metadata? true
-                                                                          :format-rows? format_rows))))]
+                                                                          :skip-results-metadata? true))))]
     (run-streaming-query
      (qp/userland-query query)
      :export-format export-format
diff --git a/src/metabase/api/embed.clj b/src/metabase/api/embed.clj
index 005cd43581b445d258d4ddb67eaccba8ec4facf5..55981980ca84d9bf7aedad3cf17a20f26d51b4b5 100644
--- a/src/metabase/api/embed.clj
+++ b/src/metabase/api/embed.clj
@@ -90,7 +90,7 @@
      :card-id           card-id
      :token-params      (embed/get-in-unsigned-token-or-throw unsigned-token [:params])
      :embedding-params  (t2/select-one-fn :embedding_params Card :id card-id)
-     :query-params      (api.embed.common/parse-query-params query-params)
+     :query-params      (api.embed.common/parse-query-params (dissoc query-params :format_rows :pivot_results))
      :qp                qp
      :constraints       constraints
      :options           options)))
@@ -107,17 +107,19 @@
 
 (api/defendpoint GET ["/card/:token/query/:export-format", :export-format api.dataset/export-format-regex]
   "Like `GET /api/embed/card/query`, but returns the results as a file in the specified format."
-  [token export-format format_rows :as {:keys [query-params]}]
+  [token export-format format_rows pivot_results :as {:keys [query-params]}]
   {export-format (into [:enum] api.dataset/export-formats)
-   format_rows   [:maybe :boolean]}
+   format_rows   [:maybe :boolean]
+   pivot_results [:maybe :boolean]}
   (run-query-for-unsigned-token-async
    (unsign-and-translate-ids token)
    export-format
-   (api.embed.common/parse-query-params (dissoc (m/map-keys keyword query-params) :format_rows))
+   (api.embed.common/parse-query-params (dissoc (m/map-keys keyword query-params) :format_rows :pivot_results))
    :constraints nil
    :middleware {:process-viz-settings? true
                 :js-int-to-string?     false
-                :format-rows?          format_rows}))
+                :format-rows?          (or format_rows false)
+                :pivot?                (or pivot_results false)}))
 
 ;;; ----------------------------------------- /api/embed/dashboard endpoints -----------------------------------------
 
@@ -159,7 +161,7 @@
      :card-id          card-id
      :embedding-params (t2/select-one-fn :embedding_params Dashboard :id dashboard-id)
      :token-params     (embed/get-in-unsigned-token-or-throw unsigned-token [:params])
-     :query-params     (api.embed.common/parse-query-params (dissoc query-params :format_rows))
+     :query-params     (api.embed.common/parse-query-params (dissoc query-params :format_rows :pivot_results))
      :constraints      constraints
      :qp               qp
      :middleware       middleware)))
@@ -255,10 +257,11 @@
                       :export-format api.dataset/export-format-regex]
   "Fetch the results of running a Card belonging to a Dashboard using a JSON Web Token signed with the
   `embedding-secret-key` return the data in one of the export formats"
-  [token export-format dashcard-id card-id format_rows :as {:keys [query-params]}]
+  [token export-format dashcard-id card-id format_rows pivot_results :as {:keys [query-params]}]
   {dashcard-id   ms/PositiveInt
    card-id       ms/PositiveInt
    format_rows   [:maybe :boolean]
+   pivot_results [:maybe :boolean]
    export-format (into [:enum] api.dataset/export-formats)}
   (process-query-for-dashcard-with-signed-token token
                                                 dashcard-id
@@ -268,7 +271,8 @@
                                                 :constraints nil
                                                 :middleware {:process-viz-settings? true
                                                              :js-int-to-string?     false
-                                                             :format-rows?          format_rows}))
+                                                             :format-rows?          (or format_rows false)
+                                                             :pivot?                (or pivot_results false)}))
 
 ;;; ----------------------------------------------- Param values -------------------------------------------------
 
diff --git a/src/metabase/api/embed/common.clj b/src/metabase/api/embed/common.clj
index f36b82c1c263cf624cdb3a2ca7bd4e96a49c9a6a..71c837eb67327de32cd5bb32c74426824934129b 100644
--- a/src/metabase/api/embed/common.clj
+++ b/src/metabase/api/embed/common.clj
@@ -12,6 +12,7 @@
    [metabase.api.dashboard :as api.dashboard]
    [metabase.api.public :as api.public]
    [metabase.driver.common.parameters.operators :as params.ops]
+   [metabase.eid-translation :as eid-translation]
    [metabase.models.card :as card]
    [metabase.models.params :as params]
    [metabase.models.setting :as setting :refer [defsetting]]
@@ -325,15 +326,8 @@
   "A Malli schema for a map of model names to a sequence of entity ids."
   (mc/schema [:map-of ApiName [:sequential :string]]))
 
-(def ^:private EidTranslationStatus
-  [:enum :ok :not-found :invalid-format])
-
 ;; -------------------- Entity Id Translation Analytics --------------------
 
-(def ^:private
-  default-eid-translation-counter
-  (zipmap (rest EidTranslationStatus) (repeat 0)))
-
 (defsetting entity-id-translation-counter
   (deferred-tru "A counter for tracking the number of entity_id -> id translations. Whenever we call [[model->entity-ids->ids]], we increment this counter by the number of translations.")
   :encryption :no
@@ -341,33 +335,21 @@
   :export?    false
   :audit      :never
   :type       :json
-  :default    default-eid-translation-counter
+  :default    eid-translation/default-counter
   :doc false)
 
-(defn- compute-result [processed-result current-count]
-  (merge-with + processed-result current-count))
-
 (mu/defn update-translation-count!
   "Update the entity-id translation counter with the results of a batch of entity-id translations."
-  [results :- [:sequential EidTranslationStatus]]
+  [results :- [:sequential eid-translation/Status]]
   (let [processed-result (frequencies results)]
     (entity-id-translation-counter!
-     (compute-result processed-result (entity-id-translation-counter)))))
-
-(defn- add-total [counter]
-  (merge counter {:total (apply + (vals counter))}))
-
-(defn get-and-clear-translation-count!
-  "Get and clear the entity-id translation counter. This is meant to be called during the daily stats collection process."
-  []
-  (u/prog1 (add-total (entity-id-translation-counter))
-    (entity-id-translation-counter! default-eid-translation-counter)))
+     (merge-with + processed-result (entity-id-translation-counter)))))
 
 (mu/defn- entity-ids->id-for-model :- [:sequential [:tuple
                                                     ;; We want to pass incorrectly formatted entity-ids through here,
                                                     ;; but this is assumed to be an entity-id:
                                                     :string
-                                                    [:map [:status EidTranslationStatus]]]]
+                                                    [:map [:status eid-translation/Status]]]]
   "Given a model and a sequence of entity ids on that model, return a pairs of entity-id, id."
   [api-name eids]
   (let [model (->model api-name) ;; This lookup is safe because we've already validated the api-names
diff --git a/src/metabase/api/public.clj b/src/metabase/api/public.clj
index dcd1b0b4dff7a38fe43680d21e914eccba49a10f..8c885f7bf3bf9fd6d18350e19205792868b1c333 100644
--- a/src/metabase/api/public.clj
+++ b/src/metabase/api/public.clj
@@ -188,10 +188,11 @@
 (api/defendpoint GET "/card/:uuid/query/:export-format"
   "Fetch a publicly-accessible Card and return query results in the specified format. Does not require auth
   credentials. Public sharing must be enabled."
-  [uuid export-format :as {{:keys [parameters format_rows]} :params}]
+  [uuid export-format :as {{:keys [parameters format_rows pivot_results]} :params}]
   {uuid          ms/UUIDString
    export-format api.dataset/ExportFormat
    format_rows   [:maybe :boolean]
+   pivot_results [:maybe :boolean]
    parameters    [:maybe ms/JSONString]}
   (process-query-for-card-with-public-uuid
    uuid
@@ -200,7 +201,8 @@
    :constraints nil
    :middleware {:process-viz-settings? true
                 :js-int-to-string?     false
-                :format-rows?          format_rows}))
+                :format-rows?          (or format_rows false)
+                :pivot?                (or pivot_results false)}))
 
 ;;; ----------------------------------------------- Public Dashboards ------------------------------------------------
 
@@ -309,11 +311,13 @@
 (api/defendpoint POST ["/dashboard/:uuid/dashcard/:dashcard-id/card/:card-id/:export-format"
                        :export-format api.dataset/export-format-regex]
   "Fetch the results of running a publicly-accessible Card belonging to a Dashboard and return the data in one of the export formats. Does not require auth credentials. Public sharing must be enabled."
-  [uuid card-id dashcard-id parameters export-format]
+  [uuid card-id dashcard-id parameters export-format :as {{:keys [format_rows pivot_results]} :params}]
   {uuid          ms/UUIDString
    dashcard-id   ms/PositiveInt
    card-id       ms/PositiveInt
    parameters    [:maybe ms/JSONString]
+   format_rows   [:maybe ms/BooleanValue]
+   pivot_results [:maybe ms/BooleanValue]
    export-format (into [:enum] api.dataset/export-formats)}
   (validation/check-public-sharing-enabled)
   (api/check-404 (t2/select-one-pk :model/Card :id card-id :archived false))
@@ -323,7 +327,11 @@
               :card-id       card-id
               :dashcard-id   dashcard-id
               :export-format export-format
-              :parameters    parameters))))
+              :parameters    parameters
+              :constraints   nil
+              :middleware    {:process-viz-settings? true
+                              :format-rows?          (or format_rows false)
+                              :pivot?                (or pivot_results false)}))))
 
 (api/defendpoint GET "/dashboard/:uuid/dashcard/:dashcard-id/execute"
   "Fetches the values for filling in execution parameters. Pass PK parameters and values to select."
diff --git a/src/metabase/db/custom_migrations.clj b/src/metabase/db/custom_migrations.clj
index cee80df09a34d383d84f4b98814b6bd85aab463e..ce85d2b2d5e069f891b648fea06250e0ddcb0ba4 100644
--- a/src/metabase/db/custom_migrations.clj
+++ b/src/metabase/db/custom_migrations.clj
@@ -1260,8 +1260,11 @@
                   (t2/query {:insert-into table-name :values values})))
               (let [group-id (:id (t2/query-one {:select :id :from :permissions_group :where [:= :name "All Users"]}))]
                 (t2/query {:insert-into :permissions
-                           :values      [{:object   (format "/collection/%s/" example-collection-id)
-                                          :group_id group-id}]}))
+                           :values      [{:object        (format "/collection/%s/" example-collection-id)
+                                          :group_id      group-id
+                                          :perm_type     "perms/collection-access"
+                                          :perm_value    "read-and-write"
+                                          :collection_id example-collection-id}]}))
               (t2/query {:insert-into :setting
                          :values      [{:key   "example-dashboard-id"
                                         :value (str example-dashboard-id)}]})))))))
diff --git a/src/metabase/driver.clj b/src/metabase/driver.clj
index e6eaf52ab5101e9e8f05c28ee01d4ce9ba7cb689..1a225a99ac9d82af3a83a2c91425af9300e4a1c0 100644
--- a/src/metabase/driver.clj
+++ b/src/metabase/driver.clj
@@ -735,7 +735,8 @@
                               :schemas                                true
                               :test/jvm-timezone-setting              true
                               :fingerprint                            true
-                              :upload-with-auto-pk                    true}]
+                              :upload-with-auto-pk                    true
+                              :test/dynamic-dataset-loading           true}]
   (defmethod database-supports? [::driver feature] [_driver _feature _db] supported?))
 
 ;;; By default a driver supports `:native-parameter-card-reference` if it supports `:native-parameters` AND
diff --git a/src/metabase/eid_translation.clj b/src/metabase/eid_translation.clj
new file mode 100644
index 0000000000000000000000000000000000000000..de8444819d1cd6f7042459b9483f9b45bef0eca2
--- /dev/null
+++ b/src/metabase/eid_translation.clj
@@ -0,0 +1,9 @@
+(ns metabase.eid-translation)
+
+(def Status
+  "Malli enum for possible statuses for entity_id -> id translations."
+  [:enum :ok :not-found :invalid-format])
+
+(def default-counter
+  "The empty counter for tracking the number of entity_id -> id translations."
+  (zipmap (rest Status) (repeat 0)))
diff --git a/src/metabase/email/messages.clj b/src/metabase/email/messages.clj
index 3902ca0d00cdf8374bcb248a5f71dd15a52e07fb..4f00f9217e8d253dcfa93fb648a193ede6167bb7 100644
--- a/src/metabase/email/messages.clj
+++ b/src/metabase/email/messages.clj
@@ -392,7 +392,7 @@
   point in the future; for now, this function is a stopgap.
 
   Results are streamed synchronously. Caller is responsible for closing `os` when this call is complete."
-  [export-format format-rows? ^OutputStream os {{:keys [rows]} :data, database-id :database_id, :as results}]
+  [^OutputStream os {:keys [export-format format-rows? pivot?]} {{:keys [rows]} :data, database-id :database_id, :as results}]
   ;; make sure Database/driver info is available for the streaming results writers -- they might need this in order to
   ;; get timezone information when writing results
   (driver/with-driver (driver.u/database->driver database-id)
@@ -405,6 +405,7 @@
         (qp.si/begin! w
                       (-> results
                           (assoc-in [:data :format-rows?] format-rows?)
+                          (assoc-in [:data :pivot?] pivot?)
                           (assoc-in [:data :ordered-cols] ordered-cols))
                       viz-settings')
         (dorun
@@ -415,18 +416,18 @@
         (qp.si/finish! w results)))))
 
 (defn- result-attachment
-  [{{card-name :name format-rows :format_rows :as card} :card
+  [{{card-name :name format-rows :format_rows pivot-results :pivot_results :as card} :card
     {{:keys [rows]} :data :as result}                   :result}]
   (when (seq rows)
     [(when-let [temp-file (and (:include_csv card)
                                (create-temp-file-or-throw "csv"))]
        (with-open [os (io/output-stream temp-file)]
-         (stream-api-results-to-export-format :csv format-rows os result))
+         (stream-api-results-to-export-format os {:export-format :csv :format-rows? format-rows :pivot? pivot-results} result))
        (create-result-attachment-map "csv" card-name temp-file))
      (when-let [temp-file (and (:include_xls card)
                                (create-temp-file-or-throw "xlsx"))]
        (with-open [os (io/output-stream temp-file)]
-         (stream-api-results-to-export-format :xlsx format-rows os result))
+         (stream-api-results-to-export-format os {:export-format :xlsx :format-rows? format-rows :pivot? pivot-results} result))
        (create-result-attachment-map "xlsx" card-name temp-file))]))
 
 (defn- part-attachments [parts]
@@ -509,7 +510,7 @@
   (for [{{result-card-id :id} :card :as result} results
         :let [pulse-card (m/find-first #(= (:id %) result-card-id) (:cards pulse))]]
     (if result-card-id
-      (update result :card merge (select-keys pulse-card [:include_csv :include_xls :format_rows]))
+      (update result :card merge (select-keys pulse-card [:include_csv :include_xls :format_rows :pivot_results]))
       result)))
 
 (defn render-pulse-email
diff --git a/src/metabase/lib/convert.cljc b/src/metabase/lib/convert.cljc
index 39487ed2518ae6fd50f2c498cf1c4c7966adf617..387321211b00a580292196dc0d87e6a8c7c4e3d7 100644
--- a/src/metabase/lib/convert.cljc
+++ b/src/metabase/lib/convert.cljc
@@ -84,16 +84,23 @@
       clean-stage-schema-errors
       clean-stage-ref-errors))
 
+(def ^:dynamic *clean-query*
+  "If true (this is the default), the query is cleaned.
+  When converting queries at later stages of the preprocessing pipeline, this cleaning might not be desirable."
+  true)
+
 (defn- clean [almost-query]
-  (loop [almost-query almost-query
-         stage-index 0]
-    (let [current-stage (nth (:stages almost-query) stage-index)
-          new-stage (clean-stage current-stage)]
-      (if (= current-stage new-stage)
-        (if (= stage-index (dec (count (:stages almost-query))))
-          almost-query
-          (recur almost-query (inc stage-index)))
-        (recur (update almost-query :stages assoc stage-index new-stage) stage-index)))))
+  (if-not *clean-query*
+    almost-query
+    (loop [almost-query almost-query
+           stage-index 0]
+      (let [current-stage (nth (:stages almost-query) stage-index)
+            new-stage (clean-stage current-stage)]
+        (if (= current-stage new-stage)
+          (if (= stage-index (dec (count (:stages almost-query))))
+            almost-query
+            (recur almost-query (inc stage-index)))
+          (recur (update almost-query :stages assoc stage-index new-stage) stage-index))))))
 
 (defmulti ->pMBQL
   "Coerce something to pMBQL (the version of MBQL manipulated by Metabase Lib v2) if it's not already pMBQL."
@@ -523,7 +530,7 @@
 
 (defmethod ->legacy-MBQL :mbql/join [join]
   (let [base (cond-> (disqualify join)
-               (str/starts-with? (:alias join) legacy-default-join-alias) (dissoc :alias))]
+               (and *clean-query* (str/starts-with? (:alias join) legacy-default-join-alias)) (dissoc :alias))]
     (merge (-> base
                (dissoc :stages :conditions)
                (update-vals ->legacy-MBQL))
diff --git a/src/metabase/lib/metadata/jvm.clj b/src/metabase/lib/metadata/jvm.clj
index 5d72fb866c321b229f4d5c164abba23a647e8bb9..147d324550b1f0d40d63d9866c1cf73aeda39e9f 100644
--- a/src/metabase/lib/metadata/jvm.clj
+++ b/src/metabase/lib/metadata/jvm.clj
@@ -139,7 +139,8 @@
   [query-type model parsed-args honeysql]
   (merge
    (next-method query-type model parsed-args honeysql)
-   {:select    [:field/base_type
+   {:select    [:field/active
+                :field/base_type
                 :field/coercion_strategy
                 :field/database_type
                 :field/description
diff --git a/src/metabase/lib/remove_replace.cljc b/src/metabase/lib/remove_replace.cljc
index 8853d0e60207aabbc2b6867ceb637e2660b59e36..1d25673996ff1e80efe2dec7edcbeea423bbb2a6 100644
--- a/src/metabase/lib/remove_replace.cljc
+++ b/src/metabase/lib/remove_replace.cljc
@@ -205,10 +205,10 @@
                              (some (fn [{:keys [lib/source lib/source-uuid] :as column}]
                                      (when (and (= :source/previous-stage source) (= target-uuid source-uuid))
                                        (:lib/desired-column-alias column)))))]
-      (if target-ref-id
+      (cond-> query
         ;; We are moving to the next stage, so pass the current query as the unmodified-query-for-stage
-        (remove-local-references query stage-number query :field {} target-ref-id)
-        query))
+        target-ref-id
+        (remove-local-references stage-number query :field {} target-ref-id)))
     query))
 
 (defn- find-location
diff --git a/src/metabase/lib/schema/metadata.cljc b/src/metabase/lib/schema/metadata.cljc
index 5be304ff1cd20e70b1f479cd0839084572a10ed0..718b350f5064bdf815e1accf28617d696ca42d8f 100644
--- a/src/metabase/lib/schema/metadata.cljc
+++ b/src/metabase/lib/schema/metadata.cljc
@@ -150,6 +150,7 @@
    [:effective-type {:optional true} [:maybe ::lib.schema.common/base-type]]
    ;; type of this column in the data warehouse, e.g. `TEXT` or `INTEGER`
    [:database-type  {:optional true} [:maybe :string]]
+   [:active         {:optional true} :boolean]
    ;; if this is a field from another table (implicit join), this is the field in the current table that should be
    ;; used to perform the implicit join. e.g. if current table is `VENUES` and this field is `CATEGORIES.ID`, then the
    ;; `fk_field_id` would be `VENUES.CATEGORY_ID`. In a `:field` reference this is saved in the options map as
diff --git a/src/metabase/models/pulse.clj b/src/metabase/models/pulse.clj
index b94d120c8db45dd4467da7e9ea04766013197d40..79dfc546145a5ed6697cd5d75b4ca3bbf62fea46 100644
--- a/src/metabase/models/pulse.clj
+++ b/src/metabase/models/pulse.clj
@@ -238,7 +238,7 @@
   [pulse-ids]
   (t2/select
    :model/Card
-   {:select    [:c.id :c.name :c.description :c.collection_id :c.display :pc.include_csv :pc.include_xls :pc.format_rows
+   {:select    [:c.id :c.name :c.description :c.collection_id :c.display :pc.include_csv :pc.include_xls :pc.format_rows :pc.pivot_results
                 :pc.dashboard_card_id :dc.dashboard_id [nil :parameter_mappings] [:p.id :pulse_id]] ;; :dc.parameter_mappings - how do you select this?
     :from      [[:pulse :p]]
     :join      [[:pulse_card :pc] [:= :p.id :pc.pulse_id]
@@ -423,6 +423,7 @@
    :include_csv       (get card :include_csv false)
    :include_xls       (get card :include_xls false)
    :format_rows       (get card :format_rows true)
+   :pivot_results     (get card :pivot_results false)
    :dashboard_card_id (get card :dashboard_card_id nil)})
 
 ;;; ------------------------------------------ Other Persistence Functions -------------------------------------------
@@ -439,13 +440,14 @@
   (t2/delete! PulseCard :pulse_id (u/the-id notification-or-id))
   ;; now just insert all of the cards that were given to us
   (when (seq card-refs)
-    (let [cards (map-indexed (fn [i {card-id :id :keys [include_csv include_xls format_rows dashboard_card_id]}]
+    (let [cards (map-indexed (fn [i {card-id :id :keys [include_csv include_xls format_rows pivot_results dashboard_card_id]}]
                                {:pulse_id          (u/the-id notification-or-id)
                                 :card_id           card-id
                                 :position          i
                                 :include_csv       include_csv
                                 :include_xls       include_xls
                                 :format_rows       format_rows
+                                :pivot_results     pivot_results
                                 :dashboard_card_id dashboard_card_id})
                              card-refs)]
       (t2/insert! PulseCard cards))))
diff --git a/src/metabase/models/pulse_card.clj b/src/metabase/models/pulse_card.clj
index b42d03124f1544186cb229175595e01be093740c..18e80d1c7f4c66ca5aa0f27dffe5acd3f76402c4 100644
--- a/src/metabase/models/pulse_card.clj
+++ b/src/metabase/models/pulse_card.clj
@@ -34,18 +34,20 @@
    [:position          {:optional true} [:maybe ms/IntGreaterThanOrEqualToZero]]
    [:include_csv       {:optional true} [:maybe :boolean]]
    [:include_xls       {:optional true} [:maybe :boolean]]
-   [:format_rows       {:optional true} [:maybe :boolean]]])
+   [:format_rows       {:optional true} [:maybe :boolean]]
+   [:pivot_results     {:optional true} [:maybe :boolean]]])
 
 (mu/defn bulk-create!
   "Creates new PulseCards, joining the given card, pulse, and dashboard card and setting appropriate defaults for other
   values if they're not provided."
   [new-pulse-cards :- [:sequential NewPulseCard]]
   (t2/insert! PulseCard
-              (for [{:keys [card_id pulse_id dashboard_card_id position include_csv include_xls format_rows]} new-pulse-cards]
+              (for [{:keys [card_id pulse_id dashboard_card_id position include_csv include_xls format_rows pivot_results]} new-pulse-cards]
                 {:card_id           card_id
                  :pulse_id          pulse_id
                  :dashboard_card_id dashboard_card_id
                  :position          (u/or-with some? position (next-position-for pulse_id))
                  :include_csv       (boolean include_csv)
                  :include_xls       (boolean include_xls)
-                 :format_rows       (boolean format_rows)})))
+                 :format_rows       (boolean format_rows)
+                 :pivot_results     (boolean pivot_results)})))
diff --git a/src/metabase/pulse/util.clj b/src/metabase/pulse/util.clj
index a18b68c555a692b54ef29f2b30d6159583d6f151..04f71e9bcc3db1898372cbc02b605b8a3eadc02b 100644
--- a/src/metabase/pulse/util.clj
+++ b/src/metabase/pulse/util.clj
@@ -5,6 +5,7 @@
    [metabase.query-processor :as qp]
    [metabase.query-processor.dashboard :as qp.dashboard]
    [metabase.query-processor.middleware.permissions :as qp.perms]
+   [metabase.query-processor.pivot :as qp.pivot]
    [metabase.server.middleware.session :as mw.session]
    [metabase.util :as u]
    [metabase.util.log :as log]
@@ -24,19 +25,24 @@
                   card-type :type
                   :as       card} (t2/select-one :model/Card :id card-id, :archived false)]
         (let [query         (assoc query :async? false)
+              process-fn (if (= :pivot (:display card))
+                           qp.pivot/run-pivot-query
+                           qp/process-query)
               process-query (fn []
                               (binding [qp.perms/*card-id* card-id]
-                                (qp/process-query
+                                (process-fn
                                  (qp/userland-query
-                                  (assoc query :middleware {:skip-results-metadata?            true
-                                                            :process-viz-settings?             true
-                                                            :js-int-to-string?                 false
-                                                            :add-default-userland-constraints? false})
+                                  (assoc query
+                                         :middleware {:skip-results-metadata?            true
+                                                      :process-viz-settings?             true
+                                                      :js-int-to-string?                 false
+                                                      :add-default-userland-constraints? false})
                                   (merge (cond-> {:executed-by pulse-creator-id
                                                   :context     :pulse
                                                   :card-id     card-id}
                                            (= card-type :model)
                                            (assoc :metadata/model-metadata metadata))
+                                         {:visualization-settings (:visualization_settings card)}
                                          options)))))
               result        (if pulse-creator-id
                               (mw.session/with-current-user pulse-creator-id
diff --git a/src/metabase/query_processor/middleware/pivot_export.clj b/src/metabase/query_processor/middleware/pivot_export.clj
index 25903f4feee91fec83c2e861c33152e1dfe95c73..fc35b62827321590968a5febdefeb12c2188771d 100644
--- a/src/metabase/query_processor/middleware/pivot_export.clj
+++ b/src/metabase/query_processor/middleware/pivot_export.clj
@@ -6,6 +6,8 @@
   (fn add-query-for-pivot-rff* [metadata]
     ;; the `qp.si/streaming-results-writer` implmementations can apply/not-apply formatting based on the key's value
     (let [opts     (get-in query [:middleware :pivot-options])
+          pivot    (get-in query [:middleware :pivot?])
           metadata (cond-> metadata
-                     opts (assoc :pivot-export-options opts))]
+                     opts  (assoc :pivot-export-options opts)
+                     pivot (assoc :pivot? pivot))]
       (rff metadata))))
diff --git a/src/metabase/query_processor/middleware/remove_inactive_field_refs.clj b/src/metabase/query_processor/middleware/remove_inactive_field_refs.clj
new file mode 100644
index 0000000000000000000000000000000000000000..b85fc08d2eef2174dda463a998241697f89f14ca
--- /dev/null
+++ b/src/metabase/query_processor/middleware/remove_inactive_field_refs.clj
@@ -0,0 +1,132 @@
+(ns metabase.query-processor.middleware.remove-inactive-field-refs
+  "This middleware exists to let queries run even if some database columns have been removed in the data warehouse.
+
+  Queries that don't depend on removed columns (other than showing them) should run and show the available data.
+  Queries that use removed fields otherwise, e.g., for filtering or summary will continue to fail.
+
+  We only try to fix queries if we know a column has been removed. We recognize this during the next sync: deleted
+  columns are marked active = false."
+  (:require
+   [metabase.lib.equality :as lib.equality]
+   [metabase.lib.metadata :as lib.metadata]
+   [metabase.lib.schema :as lib.schema]
+   [metabase.lib.util :as lib.util]
+   [metabase.lib.walk :as lib.walk]
+   [metabase.util :as u]
+   [metabase.util.malli :as mu]))
+
+(defn- collect-fields-clauses
+  [query]
+  (let [clauses (volatile! (transient {}))
+        visitor (fn [_query _path-type path stage-or-join]
+                  (let [fields (:fields stage-or-join)]
+                    (when (and (seqable? fields) (seq fields))
+                      (vswap! clauses assoc! path fields))
+                    nil))]
+    (lib.walk/walk query visitor)
+    (persistent! @clauses)))
+
+(defn- next-path
+  [query stage-path]
+  (let [type-index (- (count stage-path) 2)
+        parent-stage-path (subvec stage-path 0 type-index)
+        next-stage-path   (update stage-path (dec (count stage-path)) inc)]
+    (cond
+      (= (get stage-path type-index) :joins)
+      ;; the stage this join is in
+      parent-stage-path
+
+      (some? (get-in query next-stage-path))
+      next-stage-path
+
+      (pos? type-index)
+      ;; the join this stage is in
+      parent-stage-path)))
+
+(defn- source-metadata->stage-metadata
+  [source-metadata-column]
+  (-> source-metadata-column
+      (update-keys u/->kebab-case-en)
+      (assoc :lib/type :metadata/column)))
+
+(defn- column-metadata
+  [query stage-path]
+  (or (not-empty (get-in query (into stage-path [:lib/stage-metadata :columns])))
+      (not-empty (into [] (map source-metadata->stage-metadata) (get-in query (conj stage-path :source-metadata))))
+      (when (> (count stage-path) 2)
+        (column-metadata query (subvec stage-path 0 (- (count stage-path) 2))))))
+
+(defn- resolve-refs
+  [columns removed-field-refs default-alias]
+  (let [columns-with-deafult-alias (delay (into [] (map #(assoc % :source-alias default-alias)) columns))]
+    (mapv #(or (lib.equality/find-matching-column % columns)
+               (when default-alias
+                 (lib.equality/find-matching-column % @columns-with-deafult-alias)))
+          removed-field-refs)))
+
+(defn- propagate-removal
+  [query stage-path removed-field-refs]
+  (if-let [next-stage-path (next-path query stage-path)]
+    (if-not (-> query (get-in next-stage-path) :fields)
+      (recur query next-stage-path removed-field-refs)
+      (let [columns (column-metadata query stage-path)
+            removed-columns (when (seq columns)
+                              (resolve-refs columns removed-field-refs (:alias (get-in query stage-path))))
+            next-fields-path (conj next-stage-path :fields)
+            next-stage-fields (get-in query next-fields-path)
+            removed-field-refs (when (seq next-stage-fields)
+                                 (into #{}
+                                       (keep #(lib.equality/find-matching-ref % next-stage-fields))
+                                       removed-columns))]
+        (if-not (seq removed-field-refs)
+          query
+          (-> query
+              (assoc-in next-fields-path (into [] (remove removed-field-refs) next-stage-fields))
+              (recur next-stage-path removed-field-refs)))))
+    query))
+
+(defn- filter-fields-clause
+  [query stage-path fields active-field-ids]
+  (let [removed-field-refs (into #{}
+                                 (filter (fn [field]
+                                           (and (lib.util/field-clause? field)
+                                                (let [id (get field 2)]
+                                                  (and (integer? id)
+                                                       (not (active-field-ids id)))))))
+                                 fields)]
+    (if-not (seq removed-field-refs)
+      query
+      (-> query
+          (assoc-in (conj stage-path :fields) (into [] (remove removed-field-refs) fields))
+          (propagate-removal stage-path removed-field-refs)))))
+
+(defn- keep-active-fields
+  [query fields-clauses active-field-ids]
+  (reduce-kv #(filter-fields-clause %1 %2 %3 active-field-ids) query fields-clauses))
+
+(mu/defn remove-inactive-field-refs :- ::lib.schema/query
+  "Remove any references to fields that are not active.
+  This might result in a broken query, but the original query would break at run time too because of the
+  references to columns that do not exist in the database.
+  This middleware can fix queries that contain references that are not used other than being returned.
+
+  This function should be called after the point where the implicit :fields clauses are added to the query.
+  We determine which direct database field references are referencing active fields and remove the others.
+  Then we recursively remove references to the removed columns."
+  [query :- ::lib.schema/query]
+  (let [fields-clauses (collect-fields-clauses query)
+        field-ids (into #{}
+                        (comp cat
+                              (filter lib.util/field-clause?)
+                              (map #(get % 2))
+                              (filter integer?))
+                        (vals fields-clauses))
+        active-field-ids (if (seq field-ids)
+                           (into #{}
+                                 (comp (filter :active)
+                                       (map :id))
+                                 (lib.metadata/bulk-metadata query :metadata/column field-ids))
+                           #{})]
+    (cond-> query
+      (not= field-ids active-field-ids)
+      (keep-active-fields fields-clauses active-field-ids))))
diff --git a/src/metabase/query_processor/pivot.clj b/src/metabase/query_processor/pivot.clj
index 3e0d6bbafd830cf3df896b870ec7085e217b032a..ecbca94d08165e47b158348344797add0bde2711 100644
--- a/src/metabase/query_processor/pivot.clj
+++ b/src/metabase/query_processor/pivot.clj
@@ -96,7 +96,8 @@
                        :pivot-cols    pivot-cols}))))
   (sort-by
    (partial group-bitmask num-breakouts)
-   (distinct
+   (m/distinct-by
+    (partial group-bitmask num-breakouts)
     (map
      (comp vec sort)
      ;; this can happen for the public/embed endpoints, where we aren't given a pivot-rows / pivot-cols parameter, so
diff --git a/src/metabase/query_processor/pivot/postprocess.clj b/src/metabase/query_processor/pivot/postprocess.clj
index 7f2225c9838a4d2b3856648025cb993f9eff4341..7b277a2700147dfd05ee577a4d80133c059c2c13 100644
--- a/src/metabase/query_processor/pivot/postprocess.clj
+++ b/src/metabase/query_processor/pivot/postprocess.clj
@@ -7,6 +7,7 @@
   (:require
    [clojure.math.combinatorics :as math.combo]
    [clojure.set :as set]
+   [metabase.query-processor.streaming.common :as common]
    [metabase.util.malli :as mu]
    [metabase.util.malli.registry :as mr]))
 
@@ -40,141 +41,6 @@
    [:pivot-measures {:optional true}
     [:sequential [:int {:min 0}]]]])
 
-(defn- all-values-for
-  "Get all possible values for pivot-col/row 'k'.
-  `rows` are the raw pivot rows, `idx` is the column index to get values from.
-  `include-nil?` controls whether an extra `nil` value is added to the returned values.
-  This extra nil is needed for some combinations later on, but not others, hence the switch."
-  [rows idx include-nil?]
-  (let [all-vals (distinct (mapv #(get % idx) rows))]
-    (concat (vec (remove nil? all-vals)) (when include-nil? [nil]))))
-
-(mu/defn- pivot-row-titles
-  [{:keys [column-titles pivot-rows pivot-cols]} :- ::pivot-spec]
-  (if (seq pivot-rows)
-    (mapv #(get column-titles %) pivot-rows)
-    [(get column-titles (first pivot-cols) "")]))
-
-(mu/defn- pivot-measure-titles
-  [{:keys [column-titles pivot-measures]} :- ::pivot-spec]
-  (mapv #(get column-titles %) pivot-measures))
-
-(mu/defn- header-builder
-  "Construct the export-style pivot headers from the raw pivot rows, according to the indices specified in `pivot-spec`."
-  [rows {:keys [pivot-cols pivot-measures] :as pivot-spec} :- ::pivot-spec]
-  (let [row-titles                 (pivot-row-titles pivot-spec)
-        measure-titles             (pivot-measure-titles pivot-spec)
-        n-measures                 (count pivot-measures)
-        multiple-measures?         (< 1 n-measures)
-        include-row-totals-header? (seq pivot-cols)
-        ;; For each pivot column, get the possible values for that column
-        ;; Then, get the cartesian product of each for all of the value groups
-        ;; Each group will have (count pivot-cols) entries and the values
-        ;; will be from the columns in the same order as presented in pivot-cols.
-        ;; So, if pivot-cols is [0 1], the first col-value-group will have [first-value-from-first-col first-value-from-second-col]
-        col-value-groups           (apply math.combo/cartesian-product (concat
-                                                                        (map (fn [col-k]
-                                                                               (all-values-for rows col-k false))
-                                                                             pivot-cols)
-                                                                        (when (seq measure-titles)
-                                                                          [measure-titles])))
-        header-indices             (if (or multiple-measures? (not (seq pivot-cols)))
-                                     ;; when there are more than 1 pivot-measures, we need to
-                                     ;; add one more header row that holds the titles of the measure columns
-                                     ;; and we know it's always just one more row, so we can inc the count.
-                                     (range (inc (count pivot-cols)))
-                                     (range (count pivot-cols)))]
-    ;; Each Header (1 header row per pivot-col) will first start with the Pivot Row Titles. There will be (count pivot-rows) entries.
-    ;; Then, Get all of the nth entries in the col-value-gropus for the nth header, and then append "Row Totals" label.
-    (mapv
-     (fn [col-idx]
-       (vec (concat
-             row-titles
-             (map #(nth % col-idx) col-value-groups)
-             (if (and
-                  multiple-measures?
-                  (seq pivot-cols)
-                  (= col-idx (last header-indices)))
-               measure-titles
-               (when include-row-totals-header?
-                 (repeat (max 1 n-measures) "Row totals"))))))
-     header-indices)))
-
-(mu/defn- col-grouper
-  "Map of raw pivot rows keyed by [pivot-cols]. Use it per row-group.
-  This constructs a map where you can use a pivot-cols-value tuple to find the row-builder
-  That is, suppose we have 2 pivot-cols, and valid values might be 'AA' and 'BA'. To get the
-  raw pivot row where each of those values matches, you can run this function to get `m` and then
-  use `(get m ['AA' 'BA'])` to get the row.
-
-  This is used inside `row-grouper` on a subset of the total list of raw pivot rows."
-  [rows {:keys [pivot-cols]} :- ::pivot-spec]
-  (when (seq pivot-cols)
-    (let [cols-groups (group-by (apply juxt (map (fn [k] #(get % k)) pivot-cols)) rows)]
-      cols-groups)))
-
-(mu/defn- row-grouper
-  "Map of raw pivot rows keyed by [pivot-rows]. The logic for how the map is initially constructed is the same
-  as in `col-grouper`. Then, each map entry value (which is a subset of rows) is updated by running the `sub-rows-fn` on the subset.
-  This sub-rows-fn does the following:
-   - group the sub rows with `col-grouper`
-   - create the list of all possible pivot column value combinations and manually concat the 'nils group' too
-   - using the `cols-groups` map, get the `padded-sub-rows`
-   - Now, since we only need the values in the `pivot-measures` indices, transform the sub-rows by getting just those indices
-  Thus, the output of the `row-grouper` is a nested map allowing you to get a raw pivot row corresponding to any combination
-  of pivot-row values and pivot-col values by doing something like this:
-
-  `(get-in m [[row-idx1 row-idx2] [col-idx1 col-idx2]])`"
-  [rows {:keys [pivot-rows pivot-cols pivot-measures] :as pivot-spec} :- ::pivot-spec]
-  (let [rows-groups (if (seq pivot-rows)
-                      (group-by (apply juxt (map (fn [k] #(get % k)) pivot-rows)) rows)
-                      {[nil] rows})
-        sub-rows-fn (fn [sub-rows]
-                      (let [cols-groups     (col-grouper sub-rows pivot-spec)
-                            padded-sub-rows (vec
-                                             (mapcat
-                                              ;; if the particular values combination is not found, we want to pad that with nil
-                                              ;; so that subsequent cells are rendered in the correct place
-                                              ;; in other words, each padded-sub-row must be the same length
-                                              #(get cols-groups (vec %) [nil])
-                                              ;; this is a list of all possible pivot col value combinations. eg. ['AA' 'BA']
-                                              ;; concat the nils so that the [nil nil] combination shows up once, which is
-                                              ;; necessary to include the row totals and grand totals
-                                              (concat
-                                               (apply math.combo/cartesian-product (map #(all-values-for rows % false) pivot-cols))
-                                               [(vec (repeat (count pivot-cols) nil))])))]
-                        (vec (mapcat (fn [row]
-                                       (mapv #(get row %) pivot-measures))
-                                     ;; cols-groups will be nil if there are no pivot columns
-                                     ;; In such a case, we don't need to modify the rows with padding
-                                     ;; we only need to grab the pivot-measures directly
-                                     (if cols-groups
-                                       padded-sub-rows
-                                       (take 1 sub-rows))))))]
-    (-> rows-groups
-        (update-vals sub-rows-fn))))
-
-(mu/defn- totals-row-fn
-  "Given a work in progress pivot export row (NOT a raw pivot row), add Totals labels if appropriate."
-  [export-style-row {:keys [pivot-rows]} :- ::pivot-spec]
-  (let [n-row-cols       (count pivot-rows)
-        row-indices      (range n-row-cols)
-        row-vals         (take n-row-cols export-style-row)
-        totals-for-value (fn [idx v]
-                           (if (and ((set row-indices) idx)
-                                    (some? v))
-                             (format "Totals for %s" v)
-                             v))]
-    (cond
-      (every? nil? row-vals)
-      (assoc export-style-row 0 "Grand Totals")
-
-      (some nil? row-vals)
-      (vec (map-indexed totals-for-value export-style-row))
-
-      :else
-      export-style-row)))
-
 (defn pivot-grouping-key
   "Get the index into the raw pivot rows for the 'pivot-grouping' column."
   [column-titles]
@@ -197,41 +63,193 @@
 (mu/defn add-pivot-measures :- ::pivot-spec
   "Given a pivot-spec map without the `:pivot-measures` key, determine what key(s) the measures will be and assoc that value into `:pivot-measures`."
   [pivot-spec :- ::pivot-spec]
-  (assoc pivot-spec :pivot-measures (pivot-measures pivot-spec)))
-
-(mu/defn- row-builder
-  "Construct the export-style pivot rows from the raw pivot rows, according to the indices specified in `pivot-spec`.
-
-  This function:
-   - creates the row-groups map using `row-grouper`. This already has the nearly complete rows.
-   - create the list of pivot row value combinations. We don't need the nils group this time; it would result in
-     a doubling of the Grand totals row
-     We DO want to generate tuples that can have 'nil' (eg. ['CA' nil]) so that Row totals rows will be grabbed from the map too.
-   - construct each row by concatenating the row values, since they're the labels for each pivot-row. Handily, the `ks` are exactly
-     these values, so we can just concat each k to its `row-groups` entry
-   - Run the `totals-row-fn` to add the Row totals and Grand totals labels in the right spots."
-  [rows {:keys [pivot-rows] :as pivot-spec} :- ::pivot-spec]
-  (let [row-groups (row-grouper rows pivot-spec)
-        ks         (if (seq pivot-rows)
-                     (mapv vec (concat
-                                (apply math.combo/cartesian-product (map #(all-values-for rows % true) pivot-rows))))
-                     [[nil]])]
-    (->> (map (fn [k] (vec (concat k (get row-groups k)))) ks)
-         (filter #(< (count pivot-rows) (count %)))
-         (map #(totals-row-fn % pivot-spec)))))
-
-(defn- clean-row
-  [row]
-  (mapv #(if (= "" %)
-           nil
-           %)
-        row))
-
-(mu/defn pivot-builder
-  "Create the rows for a pivot export from raw pivot rows `rows`."
-  [rows pivot-spec :- ::pivot-spec]
-  (let [rows       (mapv clean-row rows)
-        pivot-spec (add-pivot-measures pivot-spec)]
-    (vec (concat
-          (header-builder rows pivot-spec)
-          (row-builder rows pivot-spec)))))
+  (-> pivot-spec
+      (assoc :pivot-measures (pivot-measures pivot-spec))
+      (assoc :pivot-grouping (pivot-grouping-key (:column-titles pivot-spec)))))
+
+(mu/defn add-totals-settings :- ::pivot-spec
+  "Given a pivot-spec map and `viz-settings`, add the `:row-totals?` and `:col-totals?` keys."
+  [pivot-spec :- ::pivot-spec viz-settings]
+  (let [row-totals (if (contains? viz-settings :pivot.show_row_totals)
+                     (:pivot.show_row_totals viz-settings)
+                     true)
+        col-totals (if (contains? viz-settings :pivot.show_column_totals)
+                     (:pivot.show_column_totals viz-settings)
+                     true)]
+    (-> pivot-spec
+        (assoc :row-totals? row-totals)
+        (assoc :col-totals? col-totals))))
+
+(mu/defn init-pivot
+  "Initiate the pivot data structure."
+  [pivot-spec :- ::pivot-spec]
+  (let [{:keys [pivot-rows pivot-cols pivot-measures]} pivot-spec]
+    {:config         pivot-spec
+     :data           {}
+     :row-values     (zipmap pivot-rows (repeat (sorted-set)))
+     :column-values  (zipmap pivot-cols (repeat (sorted-set)))
+     :measure-values (zipmap pivot-measures (repeat (sorted-set)))}))
+
+(defn- update-set
+  [m k v]
+  (update m k conj v))
+
+(defn- update-aggregate
+  "Update the given `measure-aggregations` with `new-values` using the appropriate function in the `agg-fns` map.
+
+  Measure aggregations is a map whose keys are each pivot-measure; often just 1 key, but could be several depending on how the user has set up their measures.
+  `new-values` are the values being added and have the same keys as `measure-aggregations`.
+  `agg-fns` is also a map of the measure keys indicating the type of aggregation.
+  For now (2024-09-10), agg-fn is `+`, which actually works fine for every aggregation type in our implementation. This is because the pivot qp
+  returns rows that have already done the aggregation set by the user in the query (eg. count or sum, or whatever), so the post-processing done here
+  will always work. For each 'cell', there will only ever be 1 value per measure (the already-aggregated value from the qp)."
+  [measure-aggregations new-values agg-fns]
+  (into {}
+        (map
+         (fn [[measure-key agg]]
+           (let [agg-fn (get agg-fns measure-key +) ; default aggregation is just summation
+                 new-v  (get new-values measure-key)]
+             [measure-key (if new-v
+                            (agg-fn agg new-v)
+                            agg)])))
+        measure-aggregations))
+
+(defn add-row
+  "Aggregate the given `row` into the `pivot` datastructure."
+  [pivot row]
+  (let [{:keys [pivot-rows
+                pivot-cols
+                pivot-measures
+                measures]} (:config pivot)
+        row-path           (mapv row pivot-rows)
+        col-path           (mapv row pivot-cols)
+        measure-vals       (select-keys row pivot-measures)
+        total-fn           (fn [m path]
+                             (if (seq path)
+                               (update-in m path
+                                          #(update-aggregate (or % (zipmap pivot-measures (repeat 0))) measure-vals measures))
+                               m))]
+    (-> pivot
+        (update :row-count (fn [v] (if v (inc v) 0)))
+        (update :data update-in (concat row-path col-path)
+                #(update-aggregate (or % (zipmap pivot-measures (repeat 0))) measure-vals measures))
+        (update :totals (fn [totals]
+                          (-> totals
+                              (total-fn [:grand-total])
+                              (total-fn row-path)
+                              (total-fn col-path)
+                              (total-fn [:section-totals (first row-path)])
+                              (total-fn (concat [:column-totals (first row-path)] col-path)))))
+        (update :row-values #(reduce-kv update-set % (select-keys row pivot-rows)))
+        (update :column-values #(reduce-kv update-set % (select-keys row pivot-cols))))))
+
+(defn- fmt
+  "Format a value using the provided formatter or identity function."
+  [formatter value]
+  ((or formatter identity) (common/format-value value)))
+
+(defn- build-column-headers
+  "Build multi-level column headers."
+  [{:keys [pivot-cols pivot-measures column-titles row-totals?]} col-combos col-formatters]
+  (concat
+   (if (= 1 (count pivot-measures))
+     (mapv (fn [col-combo] (mapv fmt col-formatters col-combo)) col-combos)
+     (for [col-combo col-combos
+           measure-key pivot-measures]
+       (conj
+        (mapv fmt col-formatters col-combo)
+        (get column-titles measure-key))))
+   (repeat (count pivot-measures)
+           (concat
+            (when (and row-totals? (seq pivot-cols)) ["Row totals"])
+            (repeat (dec (count pivot-cols)) nil)
+            (when (and (seq pivot-cols) (> (count pivot-measures) 1)) [nil])))))
+
+(defn- build-headers
+  "Combine row keys with column headers."
+  [column-headers {:keys [pivot-cols pivot-rows column-titles]}]
+  (map (fn [h]
+         (if (and (seq pivot-cols) (not (seq pivot-rows)))
+           (concat (map #(get column-titles %) pivot-cols) h)
+           (concat (map #(get column-titles %) pivot-rows) h)))
+       (let [hs (filter seq column-headers)]
+         (when (seq hs)
+           (apply map vector hs)))))
+
+(defn- build-row
+  "Build a single row of the pivot table."
+  [row-combo col-combos pivot-measures data totals row-totals? ordered-formatters row-formatters]
+  (let [row-path row-combo]
+    (concat
+     (when-not (seq row-formatters) (repeat (count pivot-measures) nil))
+     (mapv fmt row-formatters row-combo)
+     (concat
+      (for [col-combo col-combos
+            measure-key pivot-measures]
+        (fmt (get ordered-formatters measure-key)
+             (get-in data (concat row-path col-combo [measure-key]))))
+      (when row-totals?
+        (for [measure-key pivot-measures]
+          (fmt (get ordered-formatters measure-key)
+               (get-in totals (concat row-path [measure-key])))))))))
+
+(defn- build-column-totals
+  "Build column totals for a section."
+  [section col-combos pivot-measures totals row-totals? ordered-formatters pivot-rows]
+  (concat
+   (cons (format "Totals for %s" (fmt (get ordered-formatters (first pivot-rows)) section))
+         (repeat (dec (count pivot-rows)) nil))
+   (for [col-combo col-combos
+         measure-key pivot-measures]
+     (fmt (get ordered-formatters measure-key)
+          (get-in totals (concat
+                          [:column-totals section]
+                          col-combo
+                          [measure-key]))))
+   (when row-totals?
+     (for [measure-key pivot-measures]
+       (fmt (get ordered-formatters measure-key)
+            (get-in totals [:section-totals section measure-key]))))))
+
+(defn- build-grand-totals
+  "Build grand totals row."
+  [{:keys [pivot-cols pivot-rows pivot-measures]} col-combos totals row-totals? ordered-formatters]
+  (concat
+   (if (and (seq pivot-cols) (not (seq pivot-rows)))
+     (cons "Grand totals" (repeat (dec (count pivot-cols)) nil))
+     (cons "Grand totals" (repeat (dec (count pivot-rows)) nil)))
+   (when row-totals?
+     (for [col-combo col-combos
+           measure-key pivot-measures]
+       (fmt (get ordered-formatters measure-key)
+            (get-in totals (concat col-combo [measure-key])))))
+   (for [measure-key pivot-measures]
+     (fmt (get ordered-formatters measure-key)
+          (get-in totals [:grand-total measure-key])))))
+
+(defn build-pivot-output
+  "Arrange and format the aggregated `pivot` data."
+  [pivot ordered-formatters]
+  (let [{:keys [config data totals row-values column-values]} pivot
+        {:keys [pivot-rows pivot-cols pivot-measures column-titles row-totals? col-totals?]} config
+        row-formatters (mapv #(get ordered-formatters %) pivot-rows)
+        col-formatters (mapv #(get ordered-formatters %) pivot-cols)
+        row-combos (apply math.combo/cartesian-product (map row-values pivot-rows))
+        col-combos (apply math.combo/cartesian-product (map column-values pivot-cols))
+        row-totals? (and row-totals? (boolean (seq pivot-cols)))
+        column-headers (build-column-headers config col-combos col-formatters)
+        headers (or (seq (build-headers column-headers config))
+                    [(concat
+                      (map #(get column-titles %) pivot-rows)
+                      (map #(get column-titles %) pivot-measures))])]
+    (concat
+     headers
+     (apply concat
+            (for [section-row-combos (sort-by ffirst (vals (group-by first row-combos)))]
+              (concat
+               (for [row-combo (sort-by first section-row-combos)]
+                 (build-row row-combo col-combos pivot-measures data totals row-totals? ordered-formatters row-formatters))
+               (when (and col-totals? (> (count pivot-rows) 1))
+                 [(build-column-totals (ffirst section-row-combos) col-combos pivot-measures totals row-totals? ordered-formatters pivot-rows)]))))
+     (when col-totals?
+       [(build-grand-totals config col-combos totals row-totals? ordered-formatters)]))))
diff --git a/src/metabase/query_processor/preprocess.clj b/src/metabase/query_processor/preprocess.clj
index 04b656c520d9bbf56e433ca712de307589d76558..fd26c6aaebe1b138a79195d6f643fd1a9eac4364 100644
--- a/src/metabase/query_processor/preprocess.clj
+++ b/src/metabase/query_processor/preprocess.clj
@@ -33,6 +33,7 @@
    [metabase.query-processor.middleware.persistence :as qp.persistence]
    [metabase.query-processor.middleware.pre-alias-aggregations :as qp.pre-alias-aggregations]
    [metabase.query-processor.middleware.reconcile-breakout-and-order-by-bucketing :as reconcile-bucketing]
+   [metabase.query-processor.middleware.remove-inactive-field-refs :as qp.remove-inactive-field-refs]
    [metabase.query-processor.middleware.resolve-fields :as qp.resolve-fields]
    [metabase.query-processor.middleware.resolve-joined-fields :as resolve-joined-fields]
    [metabase.query-processor.middleware.resolve-joins :as resolve-joins]
@@ -72,6 +73,29 @@
                      assoc :converted-form query)))
       (with-meta (meta middleware-fn))))
 
+(def ^:private unconverted-property?
+  (some-fn #{:info} qualified-keyword?))
+
+(defn- copy-unconverted-properties
+  [to from]
+  (reduce-kv (fn [m k v]
+               (cond-> m
+                 (unconverted-property? k) (assoc k v)))
+             to
+             from))
+
+(defn- ensure-pmbql-for-unclean-query
+  [middleware-fn]
+  (-> (fn [query]
+        (mu/disable-enforcement
+          (binding [lib.convert/*clean-query* false]
+            (let [query' (-> (cond->> query
+                               (not (:lib/type query))
+                               (lib.query/query (qp.store/metadata-provider)))
+                             (copy-unconverted-properties query))]
+              (-> query' middleware-fn ->legacy)))))
+      (with-meta (meta middleware-fn))))
+
 (def ^:private middleware
   "Pre-processing middleware. Has the form
 
@@ -104,6 +128,7 @@
    (ensure-legacy #'resolve-joined-fields/resolve-joined-fields)
    (ensure-legacy #'fix-bad-refs/fix-bad-references)
    (ensure-legacy #'escape-join-aliases/escape-join-aliases)
+   (ensure-pmbql-for-unclean-query #'qp.remove-inactive-field-refs/remove-inactive-field-refs)
    ;; yes, this is called a second time, because we need to handle any joins that got added
    (ensure-legacy #'qp.middleware.enterprise/apply-sandboxing)
    (ensure-legacy #'qp.cumulative-aggregations/rewrite-cumulative-aggregations)
diff --git a/src/metabase/query_processor/streaming/csv.clj b/src/metabase/query_processor/streaming/csv.clj
index c19d556c8122b12c62e98cf2c9bfc50de49dd60e..8272f8c41e97fd1bac08a4bd83976dea89f208fa 100644
--- a/src/metabase/query_processor/streaming/csv.clj
+++ b/src/metabase/query_processor/streaming/csv.clj
@@ -73,65 +73,72 @@
   [_ ^OutputStream os]
   (let [writer             (BufferedWriter. (OutputStreamWriter. os StandardCharsets/UTF_8))
         ordered-formatters (volatile! nil)
-        rows!              (atom [])
-        pivot-options      (atom nil)
-        ;; if we're processing results from a pivot query, there will be a column 'pivot-grouping' that we don't want to include
-        ;; in the final results, so we get the idx into the row in order to remove it
-        pivot-grouping-idx (volatile! nil)]
+        pivot-data         (atom nil)]
     (reify qp.si/StreamingResultsWriter
-      (begin! [_ {{:keys [ordered-cols results_timezone format-rows? pivot-export-options]
-                   :or   {format-rows? true}} :data} viz-settings]
-        (let [opts           (when (and *pivot-export-post-processing-enabled* pivot-export-options)
-                               (assoc pivot-export-options :column-titles (mapv :display_name ordered-cols)))
+      (begin! [_ {{:keys [ordered-cols results_timezone format-rows? pivot-export-options pivot?]
+                   :or   {format-rows? true
+                          pivot?       false}} :data} viz-settings]
+        (let [opts               (when (and pivot? pivot-export-options)
+                                   (-> (merge {:pivot-rows []
+                                               :pivot-cols []}
+                                              pivot-export-options)
+                                       (assoc :column-titles (mapv :display_name ordered-cols))
+                                       (qp.pivot.postprocess/add-totals-settings viz-settings)
+                                       qp.pivot.postprocess/add-pivot-measures))
               ;; col-names are created later when exporting a pivot table, so only create them if there are no pivot options
-              col-names      (when-not opts (common/column-titles ordered-cols (::mb.viz/column-settings viz-settings) format-rows?))
-              pivot-grouping (qp.pivot.postprocess/pivot-grouping-key col-names)]
-          ;; when pivot options exist, we want to save them to access later when processing the complete set of results for export.
-          (when pivot-grouping (vreset! pivot-grouping-idx pivot-grouping))
-          (when opts
-            (reset! pivot-options (merge {:pivot-rows []
-                                          :pivot-cols []} opts)))
+              col-names          (when-not opts (common/column-titles ordered-cols (::mb.viz/column-settings viz-settings) format-rows?))
+              pivot-grouping-key (qp.pivot.postprocess/pivot-grouping-key col-names)]
+
+          ;; initialize the pivot-data
+          ;; If exporting pivoted, init the pivot data structure
+          ;; Otherwise, just store the pivot-grouping key index
+          (when (and pivot? pivot-export-options)
+            (reset! pivot-data (qp.pivot.postprocess/init-pivot opts)))
+          (when pivot-grouping-key
+            (swap! pivot-data assoc :pivot-grouping pivot-grouping-key))
+
           (vreset! ordered-formatters
                    (if format-rows?
                      (mapv #(formatter/create-formatter results_timezone % viz-settings) ordered-cols)
                      (vec (repeat (count ordered-cols) identity))))
+
           ;; write the column names for non-pivot tables
-          (when-not opts
-            (let [modified-row (cond->> col-names
-                                 @pivot-grouping-idx (m/remove-nth @pivot-grouping-idx))]
-              (write-csv writer [modified-row]))
-            (.flush writer))))
+          (when col-names
+            (let [header (m/remove-nth (or pivot-grouping-key (inc (count col-names))) col-names)]
+              (write-csv writer [header])
+              (.flush writer)))))
 
       (write-row! [_ row _row-num _ {:keys [output-order]}]
-        (let [ordered-row (vec
-                           (if output-order
-                             (let [row-v (into [] row)]
-                               (for [i output-order] (row-v i)))
-                             row))
-              xf-row      (perf/mapv (fn [formatter r]
-                                       (formatter (common/format-value r)))
-                                     @ordered-formatters ordered-row)]
-          (if @pivot-options
-            ;; if we're processing a pivot result, we don't write it out yet, just store it
-            ;; so that we can post process the full set of results in finish!
-            (swap! rows! conj xf-row)
-            (let [pivot-grouping-key @pivot-grouping-idx
-                  group              (get ordered-row pivot-grouping-key)
-                  cleaned-row        (cond->> xf-row
-                                       pivot-grouping-key (m/remove-nth pivot-grouping-key))]
-              ;; when a pivot-grouping col exists, we check its group number. When it's zero,
-              ;; we keep it, otherwise don't include it in the results as it's a row representing a subtotal of some kind
-              (when (or (= qp.pivot.postprocess/NON_PIVOT_ROW_GROUP group)
-                        (not group))
-                (write-csv writer [cleaned-row])
+        (let [ordered-row (if output-order
+                            (let [row-v (into [] row)]
+                              (into [] (for [i output-order] (row-v i))))
+                            row)]
+          (if (contains? @pivot-data :config)
+            ;; if we're processing a pivot result, we don't write it out yet, just aggregate it
+            ;; so that we can post process the data in finish!
+            (when (= 0 (nth ordered-row (get-in @pivot-data [:config :pivot-grouping])))
+              (swap! pivot-data (fn [a] (qp.pivot.postprocess/add-row a ordered-row))))
+
+            (if-let [{:keys [pivot-grouping]} @pivot-data]
+              (let [group (get ordered-row pivot-grouping)]
+                (when (= 0 group)
+                  (let [formatted-row (->> (perf/mapv (fn [formatter r]
+                                                        (formatter (common/format-value r)))
+                                                      @ordered-formatters ordered-row)
+                                           (m/remove-nth pivot-grouping))]
+                    (write-csv writer [formatted-row])
+                    (.flush writer))))
+              (let [formatted-row (perf/mapv (fn [formatter r]
+                                               (formatter (common/format-value r)))
+                                             @ordered-formatters ordered-row)]
+                (write-csv writer [formatted-row])
                 (.flush writer))))))
 
       (finish! [_ _]
         ;; TODO -- not sure we need to flush both
-        (when @pivot-options
-          (let [pivot-table-rows (qp.pivot.postprocess/pivot-builder @rows! @pivot-options)]
-            (doseq [xf-row pivot-table-rows]
-              (write-csv writer [xf-row]))))
+        (when (contains? @pivot-data :config)
+          (doseq [xf-row (qp.pivot.postprocess/build-pivot-output @pivot-data @ordered-formatters)]
+            (write-csv writer [xf-row])))
         (.flush writer)
         (.flush os)
         (.close writer)))))
diff --git a/src/metabase/query_processor/streaming/xlsx.clj b/src/metabase/query_processor/streaming/xlsx.clj
index 9969416c6e6bc32598c397de9c5c11abbc21af33..8f1d9d641634775dce7c2935691f8bb7eaa65dc6 100644
--- a/src/metabase/query_processor/streaming/xlsx.clj
+++ b/src/metabase/query_processor/streaming/xlsx.clj
@@ -19,6 +19,7 @@
   (:import
    (java.io OutputStream)
    (java.time LocalDate LocalDateTime LocalTime OffsetDateTime OffsetTime ZonedDateTime)
+   (org.apache.poi.openxml4j.util ZipSecureFile)
    (org.apache.poi.ss SpreadsheetVersion)
    (org.apache.poi.ss.usermodel Cell DataConsolidateFunction DataFormat DateUtil Workbook)
    (org.apache.poi.ss.util AreaReference CellRangeAddress CellReference)
@@ -216,28 +217,30 @@
   [format-settings {semantic-type  :semantic_type
                     effective-type :effective_type
                     base-type      :base_type
-                    unit           :unit :as col}]
-  (let [col-type (common/col-type col)]
-    (u/one-or-many
-     (cond
-        ;; Primary key or foreign key
-       (isa? col-type :Relation/*)
-       "0"
-
-       (isa? semantic-type :type/Coordinate)
-       nil
-
-        ;; This logic is a guard against someone setting the semantic type of a non-temporal value like 1.0 to temporal.
-        ;; It will not apply formatting to the value in this case.
-       (and (or (some #(contains? datetime-setting-keys %) (keys format-settings))
-                (isa? semantic-type :type/Temporal))
-            (or (isa? effective-type :type/Temporal)
-                (isa? base-type :type/Temporal)))
-       (datetime-format-string format-settings unit)
-
-       (or (some #(contains? number-setting-keys %) (keys format-settings))
-           (isa? col-type :type/Currency))
-       (number-format-strings format-settings)))))
+                    unit           :unit :as col}
+   format-rows?]
+  (when format-rows?
+    (let [col-type (common/col-type col)]
+      (u/one-or-many
+       (cond
+         ;; Primary key or foreign key
+         (isa? col-type :Relation/*)
+         "0"
+
+         (isa? semantic-type :type/Coordinate)
+         nil
+
+         ;; This logic is a guard against someone setting the semantic type of a non-temporal value like 1.0 to temporal.
+         ;; It will not apply formatting to the value in this case.
+         (and (or (some #(contains? datetime-setting-keys %) (keys format-settings))
+                  (isa? semantic-type :type/Temporal))
+              (or (isa? effective-type :type/Temporal)
+                  (isa? base-type :type/Temporal)))
+         (datetime-format-string format-settings unit)
+
+         (or (some #(contains? number-setting-keys %) (keys format-settings))
+             (isa? col-type :type/Currency))
+         (number-format-strings format-settings))))))
 
 ;;; +----------------------------------------------------------------------------------------------------------------+
 ;;; |                                             XLSX export logic                                                  |
@@ -261,10 +264,10 @@
 
 (defn- compute-column-cell-styles
   "Compute a sequence of cell styles for each column"
-  [^Workbook workbook ^DataFormat data-format viz-settings cols]
+  [^Workbook workbook ^DataFormat data-format viz-settings cols format-rows?]
   (for [col cols]
     (let [settings       (common/viz-settings-for-col col viz-settings)
-          format-strings (format-settings->format-strings settings col)]
+          format-strings (format-settings->format-strings settings col format-rows?)]
       (when (seq format-strings)
         (mapv
          (partial cell-string-format-style workbook data-format)
@@ -428,73 +431,76 @@
   "Adds a row of values to the spreadsheet. Values with the `scaled` viz setting are scaled prior to being added.
 
   This is based on the equivalent function in Docjure, but adapted to support Metabase viz settings."
-  {:arglists '([sheet values cols col-settings cell-styles typed-cell-styles])}
-  (fn [sheet _values _cols _col-settings _cell-styles _typed-cell-styles]
+  {:arglists '([sheet values cols col-settings cell-styles typed-cell-styles]
+               [sheet row-num values cols col-settings cell-styles typed-cell-styles])}
+  (fn [sheet & _args]
     (class sheet)))
 
 (defmethod add-row! org.apache.poi.xssf.streaming.SXSSFSheet
-  [^SXSSFSheet sheet values cols col-settings cell-styles typed-cell-styles]
-  (let [row-num (if (= 0 (.getPhysicalNumberOfRows sheet))
-                  0
-                  (inc (.getLastRowNum sheet)))
-        row     (.createRow sheet row-num)
-        ;; Using iterators here to efficiently go over multiple collections at once.
-        val-it (.iterator ^Iterable values)
-        col-it (.iterator ^Iterable cols)
-        sty-it (.iterator ^Iterable cell-styles)]
-    (loop [index 0]
-      (when (.hasNext val-it)
-        (let [value (.next val-it)
-              col (.next col-it)
-              styles (.next sty-it)
-              id-or-name   (or (:id col) (:name col))
-              settings     (or (get col-settings {::mb.viz/field-id id-or-name})
-                               (get col-settings {::mb.viz/column-name id-or-name})
-                               (get col-settings {::mb.viz/column-name (:name col)}))
-              scaled-val   (if (and value (::mb.viz/scale settings))
-                             (* value (::mb.viz/scale settings))
-                             value)
-              ;; Temporal values are converted into strings in the format-rows QP middleware, which is enabled during
-              ;; dashboard subscription/pulse generation. If so, we should parse them here so that formatting is applied.
-              parsed-value (or
-                            (maybe-parse-temporal-value value col)
-                            (maybe-parse-coordinate-value value col)
-                            scaled-val)]
-          (set-cell! (.createCell ^SXSSFRow row index) parsed-value styles typed-cell-styles))
-        (recur (inc index))))
-    row))
+  ([^SXSSFSheet sheet values cols col-settings cell-styles typed-cell-styles]
+   (let [row-num (if (= 0 (.getPhysicalNumberOfRows sheet))
+                   0
+                   (inc (.getLastRowNum sheet)))]
+     (add-row! ^SXSSFSheet sheet row-num values cols col-settings cell-styles typed-cell-styles)))
+  ([^SXSSFSheet sheet row-num values cols col-settings cell-styles typed-cell-styles]
+   (let [row     (.createRow sheet ^Integer row-num)
+         ;; Using iterators here to efficiently go over multiple collections at once.
+         val-it (.iterator ^Iterable values)
+         col-it (.iterator ^Iterable cols)
+         sty-it (.iterator ^Iterable cell-styles)]
+     (loop [index 0]
+       (when (.hasNext val-it)
+         (let [value (.next val-it)
+               col (.next col-it)
+               styles (.next sty-it)
+               id-or-name   (or (:id col) (:name col))
+               settings     (or (get col-settings {::mb.viz/field-id id-or-name})
+                                (get col-settings {::mb.viz/column-name id-or-name}))
+               scaled-val   (if (and value (::mb.viz/scale settings))
+                              (* value (::mb.viz/scale settings))
+                              value)
+               ;; Temporal values are converted into strings in the format-rows QP middleware, which is enabled during
+               ;; dashboard subscription/pulse generation. If so, we should parse them here so that formatting is applied.
+               parsed-value (or
+                             (maybe-parse-temporal-value value col)
+                             (maybe-parse-coordinate-value value col)
+                             scaled-val)]
+           (set-cell! (.createCell ^SXSSFRow row index) parsed-value styles typed-cell-styles))
+         (recur (inc index))))
+     row)))
 
 (defmethod add-row! org.apache.poi.xssf.usermodel.XSSFSheet
-  [^XSSFSheet sheet values cols col-settings cell-styles typed-cell-styles]
-  (let [row-num (if (= 0 (.getPhysicalNumberOfRows sheet))
-                  0
-                  (inc (.getLastRowNum sheet)))
-        row     (.createRow sheet row-num)
-        ;; Using iterators here to efficiently go over multiple collections at once.
-        val-it (.iterator ^Iterable values)
-        col-it (.iterator ^Iterable cols)
-        sty-it (.iterator ^Iterable cell-styles)]
-    (loop [index 0]
-      (when (.hasNext val-it)
-        (let [value (.next val-it)
-              col (.next col-it)
-              styles (.next sty-it)
-              id-or-name   (or (:id col) (:name col))
-              settings     (or (get col-settings {::mb.viz/field-id id-or-name})
-                               (get col-settings {::mb.viz/column-name id-or-name})
-                               (get col-settings {::mb.viz/column-name (:name col)}))
-              scaled-val   (if (and value (::mb.viz/scale settings))
-                             (* value (::mb.viz/scale settings))
-                             value)
-              ;; Temporal values are converted into strings in the format-rows QP middleware, which is enabled during
-              ;; dashboard subscription/pulse generation. If so, we should parse them here so that formatting is applied.
-              parsed-value (or
-                            (maybe-parse-temporal-value value col)
-                            (maybe-parse-coordinate-value value col)
-                            scaled-val)]
-          (set-cell! (.createCell ^XSSFRow row index) parsed-value styles typed-cell-styles))
-        (recur (inc index))))
-    row))
+  ([^XSSFSheet sheet values cols col-settings cell-styles typed-cell-styles]
+   (let [row-num (if (= 0 (.getPhysicalNumberOfRows sheet))
+                   0
+                   (inc (.getLastRowNum sheet)))]
+     (add-row! ^XSSFSheet sheet row-num values cols col-settings cell-styles typed-cell-styles)))
+  ([^XSSFSheet sheet row-num values cols col-settings cell-styles typed-cell-styles]
+   (let [row     (.createRow sheet ^Integer row-num)
+         ;; Using iterators here to efficiently go over multiple collections at once.
+         val-it (.iterator ^Iterable values)
+         col-it (.iterator ^Iterable cols)
+         sty-it (.iterator ^Iterable cell-styles)]
+     (loop [index 0]
+       (when (.hasNext val-it)
+         (let [value (.next val-it)
+               col (.next col-it)
+               styles (.next sty-it)
+               id-or-name   (or (:id col) (:name col))
+               settings     (or (get col-settings {::mb.viz/field-id id-or-name})
+                                (get col-settings {::mb.viz/column-name id-or-name}))
+               scaled-val   (if (and value (::mb.viz/scale settings))
+                              (* value (::mb.viz/scale settings))
+                              value)
+               ;; Temporal values are converted into strings in the format-rows QP middleware, which is enabled during
+               ;; dashboard subscription/pulse generation. If so, we should parse them here so that formatting is applied.
+               parsed-value (or
+                             (maybe-parse-temporal-value value col)
+                             (maybe-parse-coordinate-value value col)
+                             scaled-val)]
+           (set-cell! (.createCell ^XSSFRow row index) parsed-value styles typed-cell-styles))
+         (recur (inc index))))
+     row)))
 
 (def ^:dynamic *auto-sizing-threshold*
   "The maximum number of rows we should use for auto-sizing. If this number is too large, exports
@@ -527,21 +533,6 @@
     (.setAutoFilter ^SXSSFSheet sheet (new CellRangeAddress 0 0 0 (dec col-count)))
     (.createFreezePane ^SXSSFSheet sheet 0 1)))
 
-(defn- cell-range
-  [rows]
-  (let [x (dec (count (first rows)))
-        y (dec (count rows))]
-    (CellRangeAddress.
-     0 ;; first row
-     y ;; last row
-     0 ;; first col
-     x ;; last col
-     )))
-
-(defn- cell-range->area-ref
-  [cell-range]
-  (AreaReference. (.formatAsString ^CellRangeAddress cell-range) SpreadsheetVersion/EXCEL2007))
-
 ;; Possible Functions: https://poi.apache.org/apidocs/dev/org/apache/poi/ss/usermodel/DataConsolidateFunction.html
 ;; I'm only including the keys that seem to work for our Pivot Tables as of 2024-06-06
 (defn- col->aggregation-fn
@@ -567,10 +558,18 @@
         (assoc :aggregation-functions agg-fns)
         (assoc :pivot-grouping-key (qp.pivot.postprocess/pivot-grouping-key titles)))))
 
-(defn- native-pivot
-  [rows
-   {:keys [pivot-grouping-key] :as pivot-spec}
-   {:keys [ordered-cols col-settings viz-settings]}]
+;; Below, we need to provide an AreaReference to create a pivot table.
+;; Creating an AreaReference will 'realize' every CellReference inside it, and so the larger the AreaReference,
+;; the more memory we use, and the larger the filesize.
+;; Unfortunately, we can't avoid this, so we try to only create a reference that matches the row count, which we can
+;; only estimate using fingerprint distinct counts (which cannot be guaranteed correct).
+;; So, by default we use some large number as a basis.
+;; We have to set the min Inflate Ratio lower than the default's 0.01 because otherwise we get a 'zip bomb detected' error.
+;; Since we're the ones creating the file, we can lower the ratio to get what we want.
+(ZipSecureFile/setMinInflateRatio 0.001)
+(defn- init-native-pivot
+  [{:keys [pivot-grouping-key] :as pivot-spec}
+   {:keys [ordered-cols col-settings viz-settings format-rows?]}]
   (let [idx-shift                   (fn [indices]
                                       (map (fn [idx]
                                              (if (> idx pivot-grouping-key)
@@ -580,39 +579,50 @@
         pivot-rows                  (idx-shift (:pivot-rows pivot-spec))
         pivot-cols                  (idx-shift (:pivot-cols pivot-spec))
         pivot-measures              (idx-shift (:pivot-measures pivot-spec))
-        aggregation-functions       (vec (m/remove-nth pivot-grouping-key (:aggregation-functions pivot-spec)))
+        ;; for now, these are unused, since the rows from qp.pivot will already contain their aggregated values
+        _aggregation-functions      (vec (m/remove-nth pivot-grouping-key (:aggregation-functions pivot-spec)))
         wb                          (spreadsheet/create-workbook
                                      "pivot" [[]]
                                      "data" [])
         data-format                 (. ^XSSFWorkbook wb createDataFormat)
-        cell-styles                 (compute-column-cell-styles wb data-format viz-settings ordered-cols)
+        cell-styles                 (compute-column-cell-styles wb data-format viz-settings ordered-cols format-rows?)
         typed-cell-styles           (compute-typed-cell-styles wb data-format)
         data-sheet                  (spreadsheet/select-sheet "data" wb)
         pivot-sheet                 (spreadsheet/select-sheet "pivot" wb)
-        area-ref                    (cell-range->area-ref (cell-range rows))
-        _                           (doseq [row rows]
-                                      (add-row! data-sheet row ordered-cols col-settings cell-styles typed-cell-styles))
+        col-names                   (common/column-titles ordered-cols col-settings format-rows?)
+        _                           (add-row! data-sheet col-names ordered-cols col-settings cell-styles typed-cell-styles)
+        area-ref                    (AreaReference/getWholeColumn SpreadsheetVersion/EXCEL2007
+                                                                  "A"
+                                                                  (CellReference/convertNumToColString (dec (count ordered-cols))))
         ^XSSFPivotTable pivot-table (.createPivotTable ^XSSFSheet pivot-sheet
                                                        ^AreaReference area-ref
-                                                       (CellReference. "A1")
+                                                       (CellReference. 0 0)
                                                        ^XSSFSheet data-sheet)]
     (doseq [idx pivot-rows]
       (.addRowLabel pivot-table idx))
     (doseq [idx pivot-cols]
       (.addColLabel pivot-table idx))
     (doseq [idx pivot-measures]
-      (.addColumnLabel pivot-table (get aggregation-functions idx DataConsolidateFunction/COUNT) idx))
-    wb))
-
-;; As a first step towards hollistically solving this issue: https://github.com/metabase/metabase/issues/44556
-;; (which is basically that very large pivot tables can crash the export process),
-;; The post processing is disabled completely.
-;; This should remain `false` until it's fixed
-;; TODO: rework this post-processing once there's a clear way in app to enable/disable it, or to select alternate download options
-(def ^:dynamic *pivot-export-post-processing-enabled*
-  "Flag to enable/disable export post-processing of pivot tables.
-  Disabled by default and should remain disabled until Issue #44556 is resolved and a clear plan is made."
-  false)
+      (.addColumnLabel pivot-table DataConsolidateFunction/SUM #_(get aggregation-functions idx DataConsolidateFunction/SUM) idx))
+    (let [swb   (-> (SXSSFWorkbook. ^XSSFWorkbook wb)
+                    (doto (.setCompressTempFiles true)))
+          sheet (spreadsheet/select-sheet "data" swb)]
+      (doseq [i (range (count ordered-cols))]
+        (.trackColumnForAutoSizing ^SXSSFSheet sheet i))
+      (setup-header-row! sheet (count ordered-cols))
+      {:workbook swb
+       :sheet    sheet})))
+
+(defn- init-workbook
+  [{:keys [ordered-cols col-settings format-rows?]}]
+  (let [workbook (SXSSFWorkbook.)
+        sheet    (spreadsheet/add-sheet! workbook (tru "Query result"))]
+    (doseq [i (range (count ordered-cols))]
+      (.trackColumnForAutoSizing ^SXSSFSheet sheet i))
+    (setup-header-row! sheet (count ordered-cols))
+    (spreadsheet/add-row! sheet (common/column-titles ordered-cols col-settings format-rows?))
+    {:workbook workbook
+     :sheet    sheet}))
 
 (defmethod qp.si/streaming-results-writer :xlsx
   [_ ^OutputStream os]
@@ -621,91 +631,65 @@
   (let [f (io/file (str (System/getProperty "java.io.tmpdir") "/poifiles"))]
     (when-not (.exists f)
       (.mkdirs f)))
-  (let [workbook           (SXSSFWorkbook.)
-        sheet              (spreadsheet/add-sheet! workbook (tru "Query result"))
-        _                  (set-no-style-custom-helper sheet)
-        data-format        (. workbook createDataFormat)
+  (let [workbook-data      (volatile! nil)
         cell-styles        (volatile! nil)
         typed-cell-styles  (volatile! nil)
-        pivot-data!        (atom {:rows []})
-        ;; if we're processing results from a pivot query, there will be a column 'pivot-grouping' that we don't want to include
-        ;; in the final results, so we get the idx into the row in order to remove it
         pivot-grouping-idx (volatile! nil)]
     (reify qp.si/StreamingResultsWriter
-      (begin! [_ {{:keys [ordered-cols format-rows? pivot-export-options]} :data}
+      (begin! [_ {{:keys [ordered-cols format-rows? pivot? pivot-export-options]
+                   :or   {format-rows? true
+                          pivot?       false}} :data}
                {col-settings ::mb.viz/column-settings :as viz-settings}]
-        (let [opts           (when (and *pivot-export-post-processing-enabled* pivot-export-options)
-                               (pivot-opts->pivot-spec (merge {:pivot-cols []
-                                                               :pivot-rows []}
-                                                              pivot-export-options) ordered-cols))
-              col-names      (common/column-titles ordered-cols (::mb.viz/column-settings viz-settings) format-rows?)
-              pivot-grouping (qp.pivot.postprocess/pivot-grouping-key col-names)]
-          (when pivot-grouping (vreset! pivot-grouping-idx pivot-grouping))
-          (vreset! cell-styles (compute-column-cell-styles workbook data-format viz-settings ordered-cols))
-          (vreset! typed-cell-styles (compute-typed-cell-styles workbook data-format))
-          ;; when pivot options exist, we want to save them to access later when processing the complete set of results for export.
-          (when opts
-            (swap! pivot-data! assoc
-                   :cell-style-data {:ordered-cols ordered-cols
+        (let [opts               (when (and pivot? pivot-export-options)
+                                   (pivot-opts->pivot-spec (merge {:pivot-cols []
+                                                                   :pivot-rows []}
+                                                                  pivot-export-options) ordered-cols))
+              col-names          (common/column-titles ordered-cols (::mb.viz/column-settings viz-settings) format-rows?)
+              pivot-grouping-key (qp.pivot.postprocess/pivot-grouping-key col-names)]
+          (when pivot-grouping-key (vreset! pivot-grouping-idx pivot-grouping-key))
+          (if opts
+            (let [wb (init-native-pivot opts
+                                        {:ordered-cols ordered-cols
+                                         :col-settings col-settings
+                                         :viz-settings viz-settings
+                                         :format-rows? format-rows?})]
+              (vreset! workbook-data wb))
+            (let [wb (init-workbook {:ordered-cols (cond->> ordered-cols
+                                                     pivot-grouping-key (m/remove-nth pivot-grouping-key))
                                      :col-settings col-settings
-                                     :viz-settings viz-settings}
-                   :pivot-options opts))
+                                     :format-rows? true})]
+              (vreset! workbook-data wb)))
 
-          (when-not opts
-            (doseq [i (range (count ordered-cols))]
-              (.trackColumnForAutoSizing ^SXSSFSheet sheet i))
-            (setup-header-row! sheet (count ordered-cols))
-            (let [modified-row (cond->> (common/column-titles ordered-cols (::mb.viz/column-settings viz-settings) true)
-                                 @pivot-grouping-idx (m/remove-nth @pivot-grouping-idx))]
-              (spreadsheet/add-row! sheet modified-row)))))
+          (let [{:keys [workbook sheet]} @workbook-data
+                data-format              (. ^SXSSFWorkbook workbook createDataFormat)]
+            (set-no-style-custom-helper sheet)
+            (vreset! cell-styles (compute-column-cell-styles workbook data-format viz-settings ordered-cols format-rows?))
+            (vreset! typed-cell-styles (compute-typed-cell-styles workbook data-format)))))
 
       (write-row! [_ row row-num ordered-cols {:keys [output-order] :as viz-settings}]
-        (let [ordered-row             (vec (if output-order
-                                             (let [row-v (into [] row)]
-                                               (for [i output-order] (row-v i)))
-                                             row))
-              col-settings            (::mb.viz/column-settings viz-settings)
-              {:keys [pivot-options]} @pivot-data!
-              pivot-grouping-key      @pivot-grouping-idx
-              group                   (get ordered-row pivot-grouping-key)
-              cleaned-row             (cond->> ordered-row
-                                        pivot-grouping-key (m/remove-nth pivot-grouping-key))]
-          ;; when a pivot-grouping col exists, we check its group number. When it's zero,
-          ;; we keep it, otherwise don't include it in the results as it's a row representing a subtotal of some kind
-          (when (or (= qp.pivot.postprocess/NON_PIVOT_ROW_GROUP group)
-                    (not group))
-            (if pivot-options
-              ;; TODO: right now, the way I'm building up the native pivot,
-              ;; I end up using the docjure set-cell! (since I create a whole sheet with all the rows at once)
-              ;; I'll want to change that so I can use the set-cell! method we have in this ns, but for now just string everything.
-              (let [modified-row (mapv (fn [value]
-                                         (if (number? value)
-                                           value
-                                           (str value)))
-                                       cleaned-row)]
-                (swap! pivot-data! update :rows conj modified-row))
-              (do
-                (add-row! sheet cleaned-row ordered-cols col-settings @cell-styles @typed-cell-styles)
-                (when (= (inc row-num) *auto-sizing-threshold*)
-                  (autosize-columns! sheet)))))))
+        (let [ordered-row        (vec (if output-order
+                                        (let [row-v (into [] row)]
+                                          (for [i output-order] (row-v i)))
+                                        row))
+              col-settings       (::mb.viz/column-settings viz-settings)
+              pivot-grouping-key @pivot-grouping-idx
+              group              (get row pivot-grouping-key)
+              modified-row       (cond->> ordered-row
+                                   pivot-grouping-key (m/remove-nth pivot-grouping-key))
+              {:keys [sheet]}    @workbook-data]
+          (when (or (not group)
+                    (= group 0))
+            (add-row! sheet (inc row-num) modified-row ordered-cols col-settings @cell-styles @typed-cell-styles)
+            (when (= (inc row-num) *auto-sizing-threshold*)
+              (autosize-columns! sheet)))))
 
       (finish! [_ {:keys [row_count]}]
-        (let [{:keys [pivot-options rows cell-style-data]} @pivot-data!
-              pivot-grouping-key                           @pivot-grouping-idx]
-          (if pivot-options
-            (let [header (vec (m/remove-nth pivot-grouping-key (:column-titles pivot-options)))
-                  wb     (native-pivot (concat [header] rows) pivot-options cell-style-data)]
-              (try
-                (spreadsheet/save-workbook-into-stream! os wb)
-                (finally
-                  (.dispose workbook)
-                  (.close os))))
-            (do
-              (when (or (nil? row_count) (< row_count *auto-sizing-threshold*))
+        (let [{:keys [workbook sheet]} @workbook-data]
+          (when (or (nil? row_count) (< row_count *auto-sizing-threshold*))
                 ;; Auto-size columns if we never hit the row threshold, or a final row count was not provided
-                (autosize-columns! sheet))
-              (try
-                (spreadsheet/save-workbook-into-stream! os workbook)
-                (finally
-                  (.dispose workbook)
-                  (.close os))))))))))
+            (autosize-columns! sheet))
+          (try
+            (spreadsheet/save-workbook-into-stream! os workbook)
+            (finally
+              (.dispose ^SXSSFWorkbook workbook)
+              (.close os))))))))
diff --git a/test/metabase/api/alert_test.clj b/test/metabase/api/alert_test.clj
index 0bf7bb0d241055098485779ae2bdc6eacf332cee..c74ebb7222da2921f97b06b8e0e7ce7951ff0755 100644
--- a/test/metabase/api/alert_test.clj
+++ b/test/metabase/api/alert_test.clj
@@ -32,11 +32,13 @@
 
 (defn- pulse-card-details [card]
   (-> card
-      (select-keys [:name :description :display])
+      (select-keys [:name :description :display :pivot_results])
       (update :display name)
       (update :collection_id boolean)
-      (assoc :id true, :include_csv false, :include_xls false, :format_rows true, :dashboard_card_id false,
-             :dashboard_id false, :parameter_mappings nil)))
+      (assoc :id true :include_csv false :include_xls false
+             :format_rows true :pivot_results false
+             :dashboard_card_id false
+             :dashboard_id false :parameter_mappings nil)))
 
 (defn- recipient-details [user-kwd]
   (-> user-kwd
diff --git a/test/metabase/api/card_test.clj b/test/metabase/api/card_test.clj
index df5b16286c940d689040fb3777693f3dd901c782..f670cabe807bb56cd41b5478c74da425248960bc 100644
--- a/test/metabase/api/card_test.clj
+++ b/test/metabase/api/card_test.clj
@@ -2068,12 +2068,12 @@
     (with-temp-native-card! [_ card]
       (with-cards-in-readable-collection! card
         (is (= [{(keyword "COUNT(*)") "75"}]
-               (mt/user-http-request :rasta :post 200 (format "card/%d/query/json" (u/the-id card))))))))
+               (mt/user-http-request :rasta :post 200 (format "card/%d/query/json" (u/the-id card)) :format_rows true))))))
   (testing "with parameters"
     (with-temp-native-card-with-params! [_ card]
       (with-cards-in-readable-collection! card
         (is (= [{(keyword "COUNT(*)") "8"}]
-               (mt/user-http-request :rasta :post 200 (format "card/%d/query/json" (u/the-id card))
+               (mt/user-http-request :rasta :post 200 (format "card/%d/query/json" (u/the-id card)) :format_rows true
                                      :parameters encoded-params)))))))
 
 (deftest renamed-column-names-are-applied-to-json-test
@@ -2147,7 +2147,10 @@
                                                                                                              (mt/id :orders :id))
                                                                                                             {:column_title "IDENTIFIER"}}}}]
           (letfn [(col-names [card-id]
-                    (->> (mt/user-http-request :crowberto :post 200 (format "card/%d/query/json" card-id)) first keys (map name) set))]
+                    (->> (mt/user-http-request :crowberto :post 200
+                                               (format "card/%d/query/json" card-id)
+                                               :format_rows true)
+                         first keys (map name) set))]
             (testing "Renaming columns via viz settings is correctly applied to the CSV export"
               (is (= #{"THE_ID" "ORDER TAX" "Total Amount" "Discount Applied ($)" "Amount Ordered" "Effective Tax Rate"}
                      (col-names base-card-id))))
@@ -2208,7 +2211,8 @@
       (testing "Removing the time portion of the timestamp should only show the date"
         (is (= [["T"] ["2023-1-1"]]
                (parse-xlsx-results-to-strings
-                (mt/user-http-request :rasta :post 200 (format "card/%d/query/xlsx" (u/the-id card))))))))))
+                (mt/user-http-request :rasta :post 200 (format "card/%d/query/xlsx" (u/the-id card))
+                                      :format_rows true))))))))
 
 (deftest xlsx-default-currency-formatting-test
   (testing "The default currency is USD"
@@ -2222,7 +2226,8 @@
       (is (= [["MONEY"]
               ["[$$]123.45"]]
              (parse-xlsx-results-to-strings
-              (mt/user-http-request :rasta :post 200 (format "card/%d/query/xlsx" (u/the-id card)))))))))
+              (mt/user-http-request :rasta :post 200 (format "card/%d/query/xlsx" (u/the-id card))
+                                    :format_rows true)))))))
 
 (deftest xlsx-default-currency-formatting-test-2
   (testing "Default localization settings take effect"
@@ -2238,7 +2243,8 @@
         (is (= [["MONEY"]
                 ["[$€]123.45"]]
                (parse-xlsx-results-to-strings
-                (mt/user-http-request :rasta :post 200 (format "card/%d/query/xlsx" (u/the-id card))))))))))
+                (mt/user-http-request :rasta :post 200 (format "card/%d/query/xlsx" (u/the-id card))
+                                      :format_rows true))))))))
 
 (deftest xlsx-currency-formatting-test
   (testing "Currencies are applied correctly in Excel files"
@@ -2260,7 +2266,8 @@
           (is (= [currencies
                   ["[$$]123.45" "[$CA$]123.45" "[$€]123.45" "[$¥]123.45"]]
                  (parse-xlsx-results-to-strings
-                  (mt/user-http-request :rasta :post 200 (format "card/%d/query/xlsx" (u/the-id card)))))))))))
+                  (mt/user-http-request :rasta :post 200 (format "card/%d/query/xlsx" (u/the-id card))
+                                        :format_rows true)))))))))
 
 (deftest xlsx-full-formatting-test
   (testing "Formatting should be applied correctly for all types, including numbers, currencies, exponents, and times. (relates to #14393)"
@@ -2398,7 +2405,8 @@
                       ["Jan 1, 2023, 12:34 PM" "2023-1-1" "1-1-2023, 12:34:56.000" "Jan 1, 2023" "12:34 PM" "2,345.30" "[$$]2,345.30" "[$USD] 3456.30" "2,931.30 US dollars" "1.71806E+4" "8.02%" "0.000%"]
                       ["Jan 1, 2023, 12:34 PM" "2023-1-1" "1-1-2023, 12:34:56.000" "Jan 1, 2023" "12:34 PM" "3,456.00" "[$$]3,456.00" "[$USD] 2300.00" "2,250.00 US dollars" "12.7181E+4" "95.40%" "11.580%"]]
                      (parse-xlsx-results-to-strings
-                      (mt/user-http-request :rasta :post 200 (format "card/%d/query/xlsx" (u/the-id card))))))))))
+                      (mt/user-http-request :rasta :post 200 (format "card/%d/query/xlsx" (u/the-id card))
+                                            :format_rows true))))))))
       (testing "Global currency settings are applied correctly"
         (mt/with-temporary-setting-values [custom-formatting {:type/Temporal {:date_abbreviate true}
                                                               :type/Currency {:currency "EUR", :currency_style "symbol"}}]
@@ -2413,9 +2421,11 @@
                       ["Jan 1, 2023, 12:34 PM" "2023-1-1" "1-1-2023, 12:34:56.000" "Jan 1, 2023" "12:34 PM" "2,345.30" "[$€]2,345.30" "[$EUR] 3456.30" "2,931.30 euros" "1.71806E+4" "8.02%" "0.000%"]
                       ["Jan 1, 2023, 12:34 PM" "2023-1-1" "1-1-2023, 12:34:56.000" "Jan 1, 2023" "12:34 PM" "3,456.00" "[$€]3,456.00" "[$EUR] 2300.00" "2,250.00 euros" "12.7181E+4" "95.40%" "11.580%"]]
                      (parse-xlsx-results-to-strings
-                      (mt/user-http-request :rasta :post 200 (format "card/%d/query/xlsx" (u/the-id card)))))))
+                      (mt/user-http-request :rasta :post 200 (format "card/%d/query/xlsx" (u/the-id card))
+                                            :format_rows true)))))
             (parse-xlsx-results-to-strings
-             (mt/user-http-request :rasta :post 200 (format "card/%d/query/xlsx" (u/the-id card))))))))))
+             (mt/user-http-request :rasta :post 200 (format "card/%d/query/xlsx" (u/the-id card))
+                                   :format_rows true))))))))
 
 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
 
@@ -3607,7 +3617,8 @@
             (is (= expected
                    (->> (mt/user-http-request
                          :crowberto :post 200
-                         (format "card/%s/query/%s?format_rows=%s" card-id (name export-format) apply-formatting?))
+                         (format "card/%s/query/%s" card-id (name export-format))
+                         :format_rows apply-formatting?)
                         ((get output-helper export-format)))))))))))
 
 (deftest ^:parallel can-restore
diff --git a/test/metabase/api/dashboard_test.clj b/test/metabase/api/dashboard_test.clj
index 929bc2017708f3ca2f41f4dfac59cf1144dee7f4..318ae703120254a547736ed089ee7dc229c0d856 100644
--- a/test/metabase/api/dashboard_test.clj
+++ b/test/metabase/api/dashboard_test.clj
@@ -3664,6 +3664,7 @@
                      (parse-export-format-results
                       (mt/user-real-request :rasta :post 200 url
                                             {:request-options {:as :byte-array}}
+                                            :format_rows true
                                             :parameters (json/generate-string [{:id    "_PRICE_"
                                                                                 :value 4}]))
                       export-format))))))))))
@@ -4506,7 +4507,8 @@
             (is (= expected
                    (->> (mt/user-http-request
                          :crowberto :post 200
-                         (format "/dashboard/%s/dashcard/%s/card/%s/query/%s?format_rows=%s"                                   dashboard-id dashcard-id card-id (name export-format) apply-formatting?))
+                         (format "/dashboard/%s/dashcard/%s/card/%s/query/%s" dashboard-id dashcard-id card-id (name export-format))
+                         :format_rows apply-formatting?)
                         ((get output-helper export-format)))))))))))
 (deftest can-restore
   (let [can-restore? (fn [dash-id user]
diff --git a/test/metabase/api/dataset_test.clj b/test/metabase/api/dataset_test.clj
index ae375fb3769c0215030303e1182063eba423be06..7a324aae7ac37b03237b9d93ab960b8c463ad119 100644
--- a/test/metabase/api/dataset_test.clj
+++ b/test/metabase/api/dataset_test.clj
@@ -726,8 +726,8 @@
           (is (= expected
                  (->> (mt/user-http-request
                        :crowberto :post 200
-                       (format "dataset/%s?format_rows=%s" (name export-format) apply-formatting?)
-                       :query (json/generate-string q))
+                       (format "dataset/%s" (name export-format))
+                       :query (json/generate-string (assoc q :middleware {:format-rows? apply-formatting?})))
                       ((get output-helper export-format))))))))))
 
 (deftest ^:parallel query-metadata-test
diff --git a/test/metabase/api/downloads_exports_test.clj b/test/metabase/api/downloads_exports_test.clj
index 39147e35faa9ddff9a3a575206c6e67e1de1f99a..392aff434135c219546ab0605d821d498651ba9a 100644
--- a/test/metabase/api/downloads_exports_test.clj
+++ b/test/metabase/api/downloads_exports_test.clj
@@ -10,6 +10,7 @@
   - Dashboard Subscription Attachments
   - Alert attachments"
   (:require
+   [cheshire.core :as json]
    [clojure.data.csv :as csv]
    [clojure.java.io :as io]
    [clojure.set :as set]
@@ -20,8 +21,6 @@
    [metabase.public-settings :as public-settings]
    [metabase.pulse :as pulse]
    [metabase.pulse.test-util :as pulse.test-util]
-   [metabase.query-processor.streaming.csv :as qp.csv]
-   [metabase.query-processor.streaming.xlsx :as qp.xlsx]
    [metabase.test :as mt])
   (:import
    (org.apache.poi.ss.usermodel DataFormatter)
@@ -33,10 +32,10 @@
   (.formatCellValue cell-formatter c))
 
 (defn- read-xlsx
-  [result]
+  [pivot result]
   (with-open [in (io/input-stream result)]
     (->> (spreadsheet/load-workbook in)
-         (spreadsheet/select-sheet "Query result")
+         (spreadsheet/select-sheet (if pivot "data" "Query result"))
          (spreadsheet/row-seq)
          (mapv (fn [r]
                  (->>  (spreadsheet/cell-seq r)
@@ -50,27 +49,61 @@
      (map #(mapv % ks) result))))
 
 (defn- process-results
-  [export-format results]
+  [pivot export-format results]
   (when (seq results)
     (case export-format
       :csv  (csv/read-csv results)
-      :xlsx (read-xlsx results)
+      :xlsx (read-xlsx pivot results)
       :json (tabulate-maps results))))
 
 (defn- card-download
-  [{:keys [id] :as _card} export-format format-rows?]
-  (->> (format "card/%d/query/%s?format_rows=%s" id (name export-format) format-rows?)
-       (mt/user-http-request :crowberto :post 200)
-       (process-results export-format)))
+  [{:keys [id] :as _card} {:keys [export-format format-rows pivot]}]
+  (->> (mt/user-http-request :crowberto :post 200
+                             (format "card/%d/query/%s" id (name export-format))
+                             :format_rows   format-rows
+                             :pivot_results pivot)
+       (process-results pivot export-format)))
+
+(defn- unsaved-card-download
+  [card {:keys [export-format format-rows pivot]}]
+  (->> (mt/user-http-request :crowberto :post 200
+                             (format "dataset/%s" (name export-format))
+                             :visualization_settings (json/generate-string
+                                                      (:visualization_settings card))
+                             :query (json/generate-string
+                                     (assoc (:dataset_query card)
+                                            :was-pivot (boolean pivot)
+                                            :info {:visualization-settings (:visualization_settings card)}
+                                            :middleware
+                                            {:format-rows?    format-rows
+                                             :pivot?          (boolean pivot)
+                                             :userland-query? true})))
+       (process-results pivot export-format)))
+
+(defn public-question-download
+  [card {:keys [export-format format-rows pivot]
+         :or   {format-rows false
+                pivot       false}}]
+  (let [public-uuid  (str (random-uuid))
+        cleaned-card (dissoc card :id :entity_id)]
+    (mt/with-temp [:model/Card _ (assoc cleaned-card :public_uuid public-uuid)]
+      (->> (mt/user-http-request :crowberto :get 200
+                                 (format "public/card/%s/query/%s?format_rows=%s&pivot_results=%s"
+                                         public-uuid (name export-format)
+                                         format-rows
+                                         pivot))
+           (process-results pivot export-format)))))
 
 (defn- dashcard-download
-  [card-or-dashcard export-format format-rows?]
+  [card-or-dashcard {:keys [export-format format-rows pivot]}]
   (letfn [(dashcard-download* [{dashcard-id  :id
                                 card-id      :card_id
                                 dashboard-id :dashboard_id}]
-            (->> (format "dashboard/%d/dashcard/%d/card/%d/query/%s?format_rows=%s" dashboard-id dashcard-id card-id (name export-format) format-rows?)
-                 (mt/user-http-request :crowberto :post 200)
-                 (process-results export-format)))]
+            (->> (mt/user-http-request :crowberto :post 200
+                                       (format "dashboard/%d/dashcard/%d/card/%d/query/%s" dashboard-id dashcard-id card-id (name export-format))
+                                       :format_rows   format-rows
+                                       :pivot_results pivot)
+                 (process-results pivot export-format)))]
     (if (contains? card-or-dashcard :dashboard_id)
       (dashcard-download* card-or-dashcard)
       (mt/with-temp [:model/Dashboard {dashboard-id :id} {}
@@ -78,6 +111,25 @@
                                                     :card_id      (:id card-or-dashcard)}]
         (dashcard-download* dashcard)))))
 
+(defn- public-dashcard-download
+  [card-or-dashcard {:keys [export-format format-rows pivot]}]
+  (let [public-uuid (str (random-uuid))]
+    (letfn [(public-dashcard-download* [{dashcard-id  :id
+                                         card-id      :card_id}]
+              (->> (mt/user-http-request :crowberto :post 200
+                                         (format "public/dashboard/%s/dashcard/%d/card/%d/%s"
+                                                 public-uuid dashcard-id card-id (name export-format))
+                                         :format_rows   format-rows
+                                         :pivot_results pivot)
+                   (process-results pivot export-format)))]
+      (if (contains? card-or-dashcard :dashboard_id)
+        (mt/with-temp [:model/Dashboard {dashboard-id :id} {:public_uuid public-uuid}]
+          (public-dashcard-download* (assoc card-or-dashcard :dashboard_id dashboard-id)))
+        (mt/with-temp [:model/Dashboard {dashboard-id :id} {:public_uuid public-uuid}
+                       :model/DashboardCard dashcard {:dashboard_id dashboard-id
+                                                      :card_id      (:id card-or-dashcard)}]
+          (public-dashcard-download* dashcard))))))
+
 (defn- run-pulse-and-return-attached-csv-data!
   "Simulate sending the pulse email, get the attached text/csv content, and parse into a map of
   attachment name -> column name -> column data"
@@ -97,10 +149,10 @@
             msgs))))
 
 (defn- alert-attachment!
-  [card export-format _format-rows?]
+  [card {:keys [export-format format-rows pivot]}]
   (letfn [(alert-attachment* [pulse]
             (->> (run-pulse-and-return-attached-csv-data! pulse export-format)
-                 (process-results export-format)))]
+                 (process-results pivot export-format)))]
     (mt/with-temp [:model/Pulse {pulse-id :id
                                  :as      pulse} {:name "Test Alert"
                                                   :alert_condition "rows"}
@@ -108,6 +160,8 @@
                                        (when (= :csv  export-format) {:include_csv true})
                                        (when (= :json export-format) {:include_json true})
                                        (when (= :xlsx export-format) {:include_xlsx true})
+                                       {:format_rows format-rows}
+                                       {:pivot_results pivot}
                                        {:pulse_id pulse-id
                                         :card_id  (:id card)})
                    :model/PulseChannel {pulse-channel-id :id} {:channel_type :email
@@ -118,20 +172,21 @@
       (alert-attachment* pulse))))
 
 (defn- subscription-attachment!
-  [card-or-dashcard export-format _format-rows?]
+  [card-or-dashcard {:keys [export-format format-rows pivot]}]
   (letfn [(subscription-attachment* [pulse]
             (->> (run-pulse-and-return-attached-csv-data! pulse export-format)
-                 (process-results export-format)))]
+                 (process-results pivot export-format)))]
     (if (contains? card-or-dashcard :dashboard_id)
       ;; dashcard
       (mt/with-temp [:model/Pulse {pulse-id :id
                                    :as      pulse} {:name         "Test Pulse"
                                                     :dashboard_id (:dashboard_id card-or-dashcard)}
                      :model/PulseCard _ (merge
-                                         (case export-format
-                                           :csv  {:include_csv true}
-                                           :json {:include_json true}
-                                           :xlsx {:include_xlsx true})
+                                         (when (= :csv  export-format) {:include_csv true})
+                                         (when (= :json export-format) {:include_json true})
+                                         (when (= :xlsx export-format) {:include_xlsx true})
+                                         {:format_rows format-rows}
+                                         {:pivot_results pivot}
                                          {:pulse_id          pulse-id
                                           :card_id           (:card_id card-or-dashcard)
                                           :dashboard_card_id (:id card-or-dashcard)})
@@ -152,6 +207,8 @@
                                          (when (= :csv  export-format) {:include_csv true})
                                          (when (= :json export-format) {:include_json true})
                                          (when (= :xlsx export-format) {:include_xlsx true})
+                                         {:format_rows format-rows}
+                                         {:pivot_results pivot}
                                          {:pulse_id          pulse-id
                                           :card_id           (:id card-or-dashcard)
                                           :dashboard_card_id dashcard-id})
@@ -162,14 +219,27 @@
                                                      :user_id          (mt/user->id :rasta)}]
         (subscription-attachment* pulse)))))
 
+(defn all-downloads
+  [card-or-dashcard opts]
+  (merge
+   (when-not (contains? card-or-dashcard :dashboard_id)
+     {:unsaved-card-download    (unsaved-card-download card-or-dashcard opts)
+      :card-download            (card-download card-or-dashcard opts)
+      :public-question-download (public-question-download card-or-dashcard opts)})
+   {:dashcard-download (card-download card-or-dashcard opts)
+    :public-dashcard-download (public-dashcard-download card-or-dashcard opts)}))
+
 (defn all-outputs!
-  [card-or-dashcard export-format format-rows?]
+  [card-or-dashcard opts]
   (merge
    (when-not (contains? card-or-dashcard :dashboard_id)
-     {:card-download    (card-download card-or-dashcard export-format format-rows?)
-      :alert-attachment (alert-attachment! card-or-dashcard export-format format-rows?)})
-   {:dashcard-download       (card-download card-or-dashcard export-format format-rows?)
-    :subscription-attachment (subscription-attachment! card-or-dashcard export-format format-rows?)}))
+     {:unsaved-card-download    (unsaved-card-download card-or-dashcard opts)
+      :public-question-download (public-question-download card-or-dashcard opts)
+      :card-download            (card-download card-or-dashcard opts)
+      :alert-attachment         (alert-attachment! card-or-dashcard opts)})
+   {:dashcard-download        (card-download card-or-dashcard opts)
+    :public-dashcard-download (public-dashcard-download card-or-dashcard opts)
+    :subscription-attachment  (subscription-attachment! card-or-dashcard opts)}))
 
 (set! *warn-on-reflection* true)
 
@@ -188,9 +258,103 @@
    [:field "D" {:base-type :type/Text}]
    [:field "MEASURE" {:base-type :type/Integer}]])
 
-;; The Pivot Table Download/export test can be a bit confusing. I've kept a 'see pivot result' function in a comment at the end of this ns
-;; If you eval/run that in your repl, you should be able to see the results (It's not too many rows so should print acceptably)
-;; If you need to add assertions or fix up this test, that may be a helpful function to run!
+(deftest simple-pivot-export-test
+  (testing "Pivot table exports look pivoted"
+    (mt/dataset test-data
+      (mt/with-temp [:model/Card card
+                     {:display                :pivot
+                      :visualization_settings {:pivot_table.column_split
+                                               {:rows    [[:field (mt/id :products :category) {:base-type :type/Text}]]
+                                                :columns [[:field (mt/id :products :created_at) {:base-type :type/DateTime :temporal-unit :year}]]
+                                                :values  [[:aggregation 0]]}
+                                               :column_settings
+                                               {"[\"name\",\"sum\"]" {:number_style       "currency"
+                                                                      :currency_in_header false}}}
+                      :dataset_query          {:database (mt/id)
+                                               :type     :query
+                                               :query
+                                               {:source-table (mt/id :products)
+                                                :aggregation  [[:sum [:field (mt/id :products :price) {:base-type :type/Float}]]]
+                                                :breakout     [[:field (mt/id :products :category) {:base-type :type/Text}]
+                                                               [:field (mt/id :products :created_at) {:base-type :type/DateTime :temporal-unit :year}]]}}}]
+        (testing "formatted"
+          (is (= [[["Category" "2016" "2017" "2018" "2019" "Row totals"]
+                   ["Doohickey" "$632.14" "$854.19" "$496.43" "$203.13" "$2,185.89"]
+                   ["Gadget" "$679.83" "$1,059.11" "$844.51" "$435.75" "$3,019.20"]
+                   ["Gizmo" "$529.70" "$1,080.18" "$997.94" "$227.06" "$2,834.88"]
+                   ["Widget" "$987.39" "$1,014.68" "$912.20" "$195.04" "$3,109.31"]
+                   ["Grand totals" "$2,829.06" "$4,008.16" "$3,251.08" "$1,060.98" "$11,149.28"]]
+                  #{:unsaved-card-download :card-download :dashcard-download
+                    :alert-attachment :subscription-attachment
+                    :public-question-download :public-dashcard-download}]
+                 (->> (all-outputs! card {:export-format :csv :format-rows true :pivot true})
+                      (group-by second)
+                      ((fn [m] (update-vals m #(into #{} (mapv first %)))))
+                      (apply concat)))))
+        (testing "unformatted"
+          (is (= [[["Category"
+                    "2016-01-01T00:00:00Z"
+                    "2017-01-01T00:00:00Z"
+                    "2018-01-01T00:00:00Z"
+                    "2019-01-01T00:00:00Z"
+                    "Row totals"]
+                   ["Doohickey" "632.14" "854.19" "496.43" "203.13" "2185.89"]
+                   ["Gadget" "679.83" "1059.11" "844.51" "435.75" "3019.20"]
+                   ["Gizmo" "529.7" "1080.18" "997.94" "227.06" "2834.88"]
+                   ["Widget" "987.39" "1014.68" "912.2" "195.04" "3109.31"]
+                   ["Grand totals" "2829.06" "4008.16" "3251.08" "1060.98" "11149.28"]]
+                  #{:unsaved-card-download :card-download :dashcard-download
+                    :alert-attachment :subscription-attachment
+                    :public-question-download :public-dashcard-download}]
+                 (->> (all-outputs! card {:export-format :csv :format-rows false :pivot true})
+                      (group-by second)
+                      ((fn [m] (update-vals m #(into #{} (mapv first %)))))
+                      (apply concat)))))))))
+
+(deftest simple-pivot-export-row-col-totals-test
+  (testing "Pivot table csv exports respect row/column totals viz-settings"
+    (doseq [row-totals? [#_true false]
+            col-totals? [#_true false]]
+      (mt/dataset test-data
+        (mt/with-temp [:model/Card card
+                       {:display                :pivot
+                        :visualization_settings {:pivot_table.column_split
+                                                 {:rows    [[:field (mt/id :products :category) {:base-type :type/Text}]]
+                                                  :columns [[:field (mt/id :products :created_at) {:base-type :type/DateTime :temporal-unit :year}]]
+                                                  :values  [[:aggregation 0]]}
+                                                 :pivot.show_row_totals    row-totals?
+                                                 :pivot.show_column_totals col-totals?
+                                                 :column_settings
+                                                 {"[\"name\",\"sum\"]" {:number_style       "currency"
+                                                                        :currency_in_header false}}}
+                        :dataset_query          {:database (mt/id)
+                                                 :type     :query
+                                                 :query
+                                                 {:source-table (mt/id :products)
+                                                  :aggregation  [[:sum [:field (mt/id :products :price) {:base-type :type/Float}]]]
+                                                  :breakout     [[:field (mt/id :products :category) {:base-type :type/Text}]
+                                                                 [:field (mt/id :products :created_at) {:base-type :type/DateTime :temporal-unit :year}]]}}}]
+          (testing (format "formatted with row-totals: %s and col-totals: %s" row-totals? col-totals?)
+            (is (= [(keep
+                     (fn [row]
+                       (when row
+                         (if row-totals?
+                           row
+                           (vec (drop-last row)))))
+                     [["Category" "2016" "2017" "2018" "2019" "Row totals"]
+                      ["Doohickey" "$632.14" "$854.19" "$496.43" "$203.13" "$2,185.89"]
+                      ["Gadget" "$679.83" "$1,059.11" "$844.51" "$435.75" "$3,019.20"]
+                      ["Gizmo" "$529.70" "$1,080.18" "$997.94" "$227.06" "$2,834.88"]
+                      ["Widget" "$987.39" "$1,014.68" "$912.20" "$195.04" "$3,109.31"]
+                      (when col-totals? ["Grand totals" "$2,829.06" "$4,008.16" "$3,251.08" "$1,060.98" "$11,149.28"])])
+                    #{:unsaved-card-download :card-download :dashcard-download
+                      :alert-attachment :subscription-attachment
+                      :public-question-download :public-dashcard-download}]
+                   (->> (all-outputs! card {:export-format :csv :format-rows true :pivot true})
+                        (group-by second)
+                        ((fn [m] (update-vals m #(into #{} (mapv first %)))))
+                        (apply concat))))))))))
+
 (deftest ^:parallel pivot-export-test
   []
   (mt/dataset test-data
@@ -206,7 +370,7 @@
                                 :display_name field-name
                                 :field_ref    [:field field-name {:base-type base-type}]
                                 :base_type    base-type}))}
-                   :model/Card {pivot-card-id :id}
+                   :model/Card pivot-card
                    {:display                :pivot
                     :visualization_settings {:pivot_table.column_split
                                              {:rows    [[:field "C" {:base-type :type/Text}]
@@ -224,77 +388,74 @@
                                                [:field "C" {:base-type :type/Text}]
                                                [:field "D" {:base-type :type/Text}]]
                                               :source-table (format "card__%s" pivot-data-card-id)}}}]
-      (binding [qp.csv/*pivot-export-post-processing-enabled* true]
-        (let [result (->> (mt/user-http-request :crowberto :post 200 (format "card/%d/query/csv?format_rows=false" pivot-card-id))
-                          csv/read-csv)]
-          (testing "Pivot CSV Exports look like a Pivoted Table"
-            (testing "The Headers Properly indicate the pivot rows names."
-              ;; Pivot Rows Header are Simply the Column names from the rows specified in
-              ;; [:visualization_settings :pivot_table.column_split :rows]
-              (is (= [["C" "D"]
-                      ["C" "D"]]
-                     [(take 2 (first result))
-                      (take 2 (second result))])))
-            (testing "The Headers Properly indicate the pivot column names."
-              (let [[header1 header2]  (map set (take 2 result))
-                    possible-vals-of-a #{"AA" "AB" "AC" "AD"}
-                    possible-vals-of-b #{"BA" "BB" "BC" "BD"}]
-                ;; In a Pivot Table, the Column headers are derived from the Possible Values in each Pivot Column
-                ;; Since the test data used here has defined 2 Pivot Columns, there must be 2 Header rows
-                ;; to fully capture all of the combinations of possible values for each pivot-col specified in
-                ;; [:visualization_settings :pivot_table.column_split :columns]
-                ;; To hopefully illustrate a bit, Let's consider that:
-                ;; Cat A can have "AA" "AB" "AC" "AD"
-                ;; Cat B can have "BA" "BB" "BC" "BD"
-                ;; The first 4 Entries in Header 1 (excluding the Pivot Rows Display Names) will all be "AA"
-                ;; And the first 4 Entries in Header 2 will be "BA" "BB" "BC" "BD"
-                (is (= [["AA" "AA" "AA" "AA"]
-                        ["BA" "BB" "BC" "BD"]]
-                       [(take 4 (drop 2 (first result)))
-                        (take 4 (drop 2 (second result)))]))
-                ;; This combination logic would continue for each specified Pivot Column, but we'll just stick with testing 2
-                ;; To keep things relatively easy to read and understand.
-                (testing "The first Header only contains possible values from the first specified pivot column"
-                  (is (set/subset? possible-vals-of-a header1))
-                  (is (not (set/subset? possible-vals-of-b header1))))
-                (testing "The second Header only contains possible values from the second specified pivot column"
-                  (is (set/subset? possible-vals-of-b header2))
-                  (is (not (set/subset? possible-vals-of-a header2))))
-                (testing "The Headers also show the Row Totals header"
-                  (is (= ["Row totals"
-                          "Row totals"]
-                         (map last (take 2 result))))))))
-
-          (testing "The Columns Properly indicate the pivot row names."
-            (let [col1               (map first result)
-                  col2               (map second result)
-                  possible-vals-of-c #{"CA" "CB" "CC" "CD"}
-                  possible-vals-of-d #{"DA" "DB" "DC" "DD"}]
-              ;; In a Pivot Table, the Row headers (the first columns in the result)
-              ;; are derived from the Possible Values in each Pivot Row
-              ;; Since the test data used here has defined 2 Pivot Rows, there are 2 Row Header columns
-              ;; to fully capture all of the combinations of possible values for each pivot-row specified in
-              ;; [:visualization_settings :pivot_table.column_split :rows]
+      (let [result (card-download pivot-card {:export-format :csv :pivot true})]
+        (testing "Pivot CSV Exports look like a Pivoted Table"
+          (testing "The Headers Properly indicate the pivot rows names."
+            ;; Pivot Rows Header are Simply the Column names from the rows specified in
+            ;; [:visualization_settings :pivot_table.column_split :rows]
+            (is (= [["C" "D"]
+                    ["C" "D"]]
+                   [(take 2 (first result))
+                    (take 2 (second result))])))
+          (testing "The Headers Properly indicate the pivot column names."
+            (let [[header1 header2]  (map set (take 2 result))
+                  possible-vals-of-a #{"AA" "AB" "AC" "AD"}
+                  possible-vals-of-b #{"BA" "BB" "BC" "BD"}]
+              ;; In a Pivot Table, the Column headers are derived from the Possible Values in each Pivot Column
+              ;; Since the test data used here has defined 2 Pivot Columns, there must be 2 Header rows
+              ;; to fully capture all of the combinations of possible values for each pivot-col specified in
+              ;; [:visualization_settings :pivot_table.column_split :columns]
               ;; To hopefully illustrate a bit, Let's consider that:
-              ;; Cat C can have "CA" "CB" "CC" "CD"
-              ;; Cat D can have "DA" "DB" "DC" "DD"
-              ;; The first 4 Entries in col1 (excluding the Pivot Rows Display Names) will all be "CA"
-              ;; And the first 4 Entries in col2 will be "DA" "DB" "DC" "DD"
-              (is (= [["CA" "CA" "CA" "CA"]
-                      ["DA" "DB" "DC" "DD"]]
-                     [(take 4 (drop 2 col1))
-                      (take 4 (drop 2 col2))]))
-              ;; This combination logic would continue for each specified Pivot Row, but we'll just stick with testing 2
+              ;; Cat A can have "AA" "AB" "AC" "AD"
+              ;; Cat B can have "BA" "BB" "BC" "BD"
+              ;; The first 4 Entries in Header 1 (excluding the Pivot Rows Display Names) will all be "AA"
+              ;; And the first 4 Entries in Header 2 will be "BA" "BB" "BC" "BD"
+              (is (= [["AA" "AA" "AA" "AA"]
+                      ["BA" "BB" "BC" "BD"]]
+                     [(take 4 (drop 2 (first result)))
+                      (take 4 (drop 2 (second result)))]))
+              ;; This combination logic would continue for each specified Pivot Column, but we'll just stick with testing 2
               ;; To keep things relatively easy to read and understand.
-              (testing "The first Column only contains possible values from the first specified pivot row"
-                (is (set/subset? possible-vals-of-c (set col1)))
-                (is (not (set/subset? possible-vals-of-d (set col1)))))
-              (testing "The second Column only contains possible values from the second specified pivot row"
-                (is (set/subset? possible-vals-of-d (set col2)))
-                (is (not (set/subset? possible-vals-of-c (set col2)))))
-              (testing "The 1st Column also shows the Grand Total"
-                (is (= "Grand Totals"
-                       (first (last result))))))))))))
+              (testing "The first Header only contains possible values from the first specified pivot column"
+                (is (set/subset? possible-vals-of-a header1))
+                (is (not (set/subset? possible-vals-of-b header1))))
+              (testing "The second Header only contains possible values from the second specified pivot column"
+                (is (set/subset? possible-vals-of-b header2))
+                (is (not (set/subset? possible-vals-of-a header2))))
+              (testing "The Headers also show the Row Totals header"
+                (is (= ["Row totals" ""]
+                       (map last (take 2 result))))))))
+
+        (testing "The Columns Properly indicate the pivot row names."
+          (let [col1               (map first result)
+                col2               (map second result)
+                possible-vals-of-c #{"CA" "CB" "CC" "CD"}
+                possible-vals-of-d #{"DA" "DB" "DC" "DD"}]
+            ;; In a Pivot Table, the Row headers (the first columns in the result)
+            ;; are derived from the Possible Values in each Pivot Row
+            ;; Since the test data used here has defined 2 Pivot Rows, there are 2 Row Header columns
+            ;; to fully capture all of the combinations of possible values for each pivot-row specified in
+            ;; [:visualization_settings :pivot_table.column_split :rows]
+            ;; To hopefully illustrate a bit, Let's consider that:
+            ;; Cat C can have "CA" "CB" "CC" "CD"
+            ;; Cat D can have "DA" "DB" "DC" "DD"
+            ;; The first 4 Entries in col1 (excluding the Pivot Rows Display Names) will all be "CA"
+            ;; And the first 4 Entries in col2 will be "DA" "DB" "DC" "DD"
+            (is (= [["CA" "CA" "CA" "CA"]
+                    ["DA" "DB" "DC" "DD"]]
+                   [(take 4 (drop 2 col1))
+                    (take 4 (drop 2 col2))]))
+            ;; This combination logic would continue for each specified Pivot Row, but we'll just stick with testing 2
+            ;; To keep things relatively easy to read and understand.
+            (testing "The first Column only contains possible values from the first specified pivot row"
+              (is (set/subset? possible-vals-of-c (set col1)))
+              (is (not (set/subset? possible-vals-of-d (set col1)))))
+            (testing "The second Column only contains possible values from the second specified pivot row"
+              (is (set/subset? possible-vals-of-d (set col2)))
+              (is (not (set/subset? possible-vals-of-c (set col2)))))
+            (testing "The 1st Column also shows the Grand Total"
+              (is (= "Grand totals"
+                     (first (last result)))))))))))
 
 (deftest multi-measure-pivot-tables-headers-test
   (testing "Pivot tables with multiple measures correctly include the measure titles in the final header row."
@@ -302,8 +463,8 @@
       (mt/with-temp [:model/Card {pivot-card-id :id}
                      {:display                :pivot
                       :visualization_settings {:pivot_table.column_split
-                                               {:rows    [[:field (mt/id :products :created_at) {:base-type :type/DateTime, :temporal-unit :month}]],
-                                                :columns [[:field (mt/id :products :category) {:base-type :type/Text}]],
+                                               {:rows    [[:field (mt/id :products :created_at) {:base-type :type/DateTime :temporal-unit :year}]]
+                                                :columns [[:field (mt/id :products :category) {:base-type :type/Text}]]
                                                 :values  [[:aggregation 0]
                                                           [:aggregation 1]]}}
                       :dataset_query          {:database (mt/id)
@@ -311,25 +472,27 @@
                                                :query
                                                {:source-table (mt/id :products)
                                                 :aggregation  [[:sum [:field (mt/id :products :price) {:base-type :type/Float}]]
-                                                               [:avg [:field (mt/id :products :rating) {:base-type :type/Float}]]],
+                                                               [:avg [:field (mt/id :products :rating) {:base-type :type/Float}]]]
                                                 :breakout     [[:field (mt/id :products :category) {:base-type :type/Text}]
-                                                               [:field (mt/id :products :created_at) {:base-type :type/DateTime, :temporal-unit :month}]]}}}]
-        (binding [qp.csv/*pivot-export-post-processing-enabled* true]
-          (let [result (->> (mt/user-http-request :crowberto :post 200 (format "card/%d/query/csv?format_rows=false" pivot-card-id))
-                            csv/read-csv)]
-            (is (= [["Created At" "Doohickey" "Doohickey" "Gadget" "Gadget" "Gizmo" "Gizmo" "Widget" "Widget" "Row totals" "Row totals"]
-                    ["Created At"
-                     "Sum of Price"
-                     "Average of Rating"
-                     "Sum of Price"
-                     "Average of Rating"
-                     "Sum of Price"
-                     "Average of Rating"
-                     "Sum of Price"
-                     "Average of Rating"
-                     "Sum of Price"
-                     "Average of Rating"]]
-                   (take 2 result)))))))))
+                                                               [:field (mt/id :products :created_at) {:base-type :type/DateTime :temporal-unit :year}]]}}}]
+        (let [result (->> (mt/user-http-request :crowberto :post 200
+                                                (format "card/%d/query/csv" pivot-card-id)
+                                                :format_rows   true
+                                                :pivot_results true)
+                          csv/read-csv)]
+          (is (= [["Created At"
+                   "Doohickey" "Doohickey"
+                   "Gadget" "Gadget"
+                   "Gizmo" "Gizmo"
+                   "Widget" "Widget"
+                   "Row totals" "Row totals"]
+                  ["Created At"
+                   "Sum of Price" "Average of Rating"
+                   "Sum of Price" "Average of Rating"
+                   "Sum of Price" "Average of Rating"
+                   "Sum of Price" "Average of Rating"
+                   "" ""]]
+                 (take 2 result))))))))
 
 (deftest ^:parallel zero-column-pivot-tables-test
   (testing "Pivot tables with zero columns download correctly."
@@ -348,16 +511,18 @@
                                                 :aggregation  [[:sum [:field (mt/id :products :price) {:base-type :type/Float}]]]
                                                 :breakout     [[:field (mt/id :products :category) {:base-type :type/Text}]
                                                                [:field (mt/id :products :created_at) {:base-type :type/DateTime :temporal-unit :month}]]}}}]
-        (binding [qp.csv/*pivot-export-post-processing-enabled* true]
-          (let [result (->> (mt/user-http-request :crowberto :post 200 (format "card/%d/query/csv?format_rows=false" pivot-card-id))
-                            csv/read-csv)]
-            (is (= [["Created At" "Category" "Sum of Price"]
-                    ["2016-05-01T00:00:00Z" "Doohickey" "144.12"]
-                    ["2016-05-01T00:00:00Z" "Gadget" "81.58"]
-                    ["2016-05-01T00:00:00Z" "Gizmo" "75.09"]
-                    ["2016-05-01T00:00:00Z" "Widget" "90.21"]
-                    ["Totals for 2016-05-01T00:00:00Z" "" "391"]]
-                   (take 6 result)))))))))
+        (let [result (->> (mt/user-http-request :crowberto :post 200
+                                                (format "card/%d/query/csv" pivot-card-id)
+                                                :format_rows   true
+                                                :pivot_results true)
+                          csv/read-csv)]
+          (is (= [["Created At" "Category" "Sum of Price"]
+                  ["April, 2016" "Doohickey" ""]
+                  ["April, 2016" "Gadget" "49.54"]
+                  ["April, 2016" "Gizmo" "87.29"]
+                  ["April, 2016" "Widget" ""]
+                  ["Totals for April, 2016" "" "136.83"]]
+                 (take 6 result))))))))
 
 (deftest ^:parallel zero-row-pivot-tables-test
   (testing "Pivot tables with zero rows download correctly."
@@ -374,12 +539,15 @@
                                                {:source-table (mt/id :products)
                                                 :aggregation  [[:sum [:field (mt/id :products :price) {:base-type :type/Float}]]]
                                                 :breakout     [[:field (mt/id :products :category) {:base-type :type/Text}]]}}}]
-        (binding [qp.csv/*pivot-export-post-processing-enabled* true]
-          (let [result (->> (mt/user-http-request :crowberto :post 200 (format "card/%d/query/csv?format_rows=false" pivot-card-id))
-                            csv/read-csv)]
-            (is (= [["Category" "Doohickey" "Gadget" "Gizmo" "Widget" "Row totals"]
-                    ["Grand Totals" "2185.89" "3019.2" "2834.88" "3109.31" "11149.28"]]
-                   result))))))))
+        (let [result (->> (mt/user-http-request :crowberto :post 200
+                                                (format "card/%d/query/csv" pivot-card-id)
+                                                :format_rows   false
+                                                :pivot_results true)
+                          csv/read-csv)]
+          (is (= [["Category" "Doohickey" "Gadget" "Gizmo" "Widget" "Row totals"]
+                  ["" "2185.89" "3019.2" "2834.88" "3109.31" ""]
+                  ["Grand totals" "2185.89" "3019.2" "2834.88" "3109.31" "11149.28"]]
+                 result)))))))
 
 (deftest ^:parallel zero-column-multiple-meausres-pivot-tables-test
   (testing "Pivot tables with zero columns and multiple measures download correctly."
@@ -387,8 +555,9 @@
       (mt/with-temp [:model/Card {pivot-card-id :id}
                      {:display                :pivot
                       :visualization_settings {:pivot_table.column_split
-                                               {:rows    [[:field (mt/id :products :created_at) {:base-type :type/DateTime :temporal-unit :month}]
-                                                          [:field (mt/id :products :category) {:base-type :type/Text}]]
+                                               {:rows    [[:field (mt/id :products :category) {:base-type :type/Text}]
+                                                          [:field (mt/id :products :created_at) {:base-type     :type/DateTime
+                                                                                                 :temporal-unit :year}]]
                                                 :columns []
                                                 :values  [[:aggregation 0] [:aggregation 1]]}}
                       :dataset_query          {:database (mt/id)
@@ -396,35 +565,38 @@
                                                :query
                                                {:source-table (mt/id :products)
                                                 :aggregation  [[:sum [:field (mt/id :products :price) {:base-type :type/Float}]]
-                                                               [:sum [:field (mt/id :products :price) {:base-type :type/Float}]]]
+                                                               [:count]]
                                                 :breakout     [[:field (mt/id :products :category) {:base-type :type/Text}]
-                                                               [:field (mt/id :products :created_at) {:base-type :type/DateTime :temporal-unit :year}]]}}}]
-        (binding [qp.csv/*pivot-export-post-processing-enabled* true]
-          (let [result (->> (mt/user-http-request :crowberto :post 200 (format "card/%d/query/csv?format_rows=false" pivot-card-id))
-                            csv/read-csv)]
-            (is (= [["Created At" "Category" "Sum of Price" "Sum of Price"]
-                    ["2016-01-01T00:00:00Z" "Doohickey" "632.14" "632.14"]
-                    ["2016-01-01T00:00:00Z" "Gadget" "679.83" "679.83"]
-                    ["2016-01-01T00:00:00Z" "Gizmo" "529.7" "529.7"]
-                    ["2016-01-01T00:00:00Z" "Widget" "987.39" "987.39"]
-                    ["Totals for 2016-01-01T00:00:00Z" "" "2829.06" "2829.06"]
-                    ["2017-01-01T00:00:00Z" "Doohickey" "854.19" "854.19"]
-                    ["2017-01-01T00:00:00Z" "Gadget" "1059.11" "1059.11"]
-                    ["2017-01-01T00:00:00Z" "Gizmo" "1080.18" "1080.18"]
-                    ["2017-01-01T00:00:00Z" "Widget" "1014.68" "1014.68"]
-                    ["Totals for 2017-01-01T00:00:00Z" "" "4008.16" "4008.16"]
-                    ["2018-01-01T00:00:00Z" "Doohickey" "496.43" "496.43"]
-                    ["2018-01-01T00:00:00Z" "Gadget" "844.51" "844.51"]
-                    ["2018-01-01T00:00:00Z" "Gizmo" "997.94" "997.94"]
-                    ["2018-01-01T00:00:00Z" "Widget" "912.2" "912.2"]
-                    ["Totals for 2018-01-01T00:00:00Z" "" "3251.08" "3251.08"]
-                    ["2019-01-01T00:00:00Z" "Doohickey" "203.13" "203.13"]
-                    ["2019-01-01T00:00:00Z" "Gadget" "435.75" "435.75"]
-                    ["2019-01-01T00:00:00Z" "Gizmo" "227.06" "227.06"]
-                    ["2019-01-01T00:00:00Z" "Widget" "195.04" "195.04"]
-                    ["Totals for 2019-01-01T00:00:00Z" "" "1060.98" "1060.98"]
-                    ["Grand Totals" "" "11149.28" "11149.28"]]
-                   result))))))))
+                                                               [:field (mt/id :products :created_at) {:base-type     :type/DateTime
+                                                                                                      :temporal-unit :year}]]}}}]
+        (let [result (->> (mt/user-http-request :crowberto :post 200
+                                                (format "card/%d/query/csv" pivot-card-id)
+                                                :format_rows   true
+                                                :pivot_results true)
+                          csv/read-csv)]
+          (is (= [["Category" "Created At" "Sum of Price" "Count"]
+                  ["Doohickey" "2016" "632.14" "13"]
+                  ["Doohickey" "2017" "854.19" "17"]
+                  ["Doohickey" "2018" "496.43" "8"]
+                  ["Doohickey" "2019" "203.13" "4"]
+                  ["Totals for Doohickey" "" "2,185.89" "42"]
+                  ["Gadget" "2016" "679.83" "13"]
+                  ["Gadget" "2017" "1,059.11" "19"]
+                  ["Gadget" "2018" "844.51" "14"]
+                  ["Gadget" "2019" "435.75" "7"]
+                  ["Totals for Gadget" "" "3,019.2" "53"]
+                  ["Gizmo" "2016" "529.7" "9"]
+                  ["Gizmo" "2017" "1,080.18" "21"]
+                  ["Gizmo" "2018" "997.94" "17"]
+                  ["Gizmo" "2019" "227.06" "4"]
+                  ["Totals for Gizmo" "" "2,834.88" "51"]
+                  ["Widget" "2016" "987.39" "19"]
+                  ["Widget" "2017" "1,014.68" "18"]
+                  ["Widget" "2018" "912.2" "14"]
+                  ["Widget" "2019" "195.04" "3"]
+                  ["Totals for Widget" "" "3,109.31" "54"]
+                  ["Grand totals" "" "11,149.28" "200"]]
+                 result)))))))
 
 (deftest pivot-table-native-pivot-in-xlsx-test
   (testing "Pivot table xlsx downloads produce a 'native pivot' in the workbook."
@@ -444,13 +616,15 @@
                                                                [:avg [:field (mt/id :products :rating) {:base-type :type/Float}]]]
                                                 :breakout     [[:field (mt/id :products :category) {:base-type :type/Text}]
                                                                [:field (mt/id :products :created_at) {:base-type :type/DateTime :temporal-unit :month}]]}}}]
-        (binding [qp.xlsx/*pivot-export-post-processing-enabled* true]
-          (let [result (mt/user-http-request :crowberto :post 200 (format "card/%d/query/xlsx?format_rows=false" pivot-card-id))
-                pivot  (with-open [in (io/input-stream result)]
-                         (->> (spreadsheet/load-workbook in)
-                              (spreadsheet/select-sheet "pivot")
-                              ((fn [s] (.getPivotTables ^XSSFSheet s)))))]
-            (is (not (nil? pivot)))))))))
+        (let [result (mt/user-http-request :crowberto :post 200
+                                           (format "card/%d/query/xlsx" pivot-card-id)
+                                           :format_rows   true
+                                           :pivot_results true)
+              pivot  (with-open [in (io/input-stream result)]
+                       (->> (spreadsheet/load-workbook in)
+                            (spreadsheet/select-sheet "pivot")
+                            ((fn [s] (.getPivotTables ^XSSFSheet s)))))]
+          (is (not (nil? pivot))))))))
 
 (deftest ^:parallel zero-column-native-pivot-tables-test
   (testing "Pivot tables with zero columns download correctly as xlsx."
@@ -469,24 +643,26 @@
                                                 :aggregation  [[:sum [:field (mt/id :products :price) {:base-type :type/Float}]]]
                                                 :breakout     [[:field (mt/id :products :category) {:base-type :type/Text}]
                                                                [:field (mt/id :products :created_at) {:base-type :type/DateTime :temporal-unit :month}]]}}}]
-        (binding [qp.xlsx/*pivot-export-post-processing-enabled* true]
-          (let [result       (mt/user-http-request :crowberto :post 200 (format "card/%d/query/xlsx?format_rows=false" pivot-card-id))
-                [pivot data] (with-open [in (io/input-stream result)]
-                               (let [wb    (spreadsheet/load-workbook in)
-                                     pivot (.getPivotTables ^XSSFSheet (spreadsheet/select-sheet "pivot" wb))
-                                     data  (->> (spreadsheet/select-sheet "data" wb)
-                                                spreadsheet/row-seq
-                                                (mapv (fn [row] (->> (spreadsheet/cell-seq row)
-                                                                     (mapv spreadsheet/read-cell)))))]
-                                 [pivot data]))]
-            (is (not (nil? pivot)))
-            (is (= [["Category" "Created At" "Sum of Price"]
-                    ["Doohickey" #inst "2016-05-01T00:00:00.000-00:00" 144.12]
-                    ["Doohickey" #inst "2016-06-01T00:00:00.000-00:00" 82.92]
-                    ["Doohickey" #inst "2016-07-01T00:00:00.000-00:00" 78.22]
-                    ["Doohickey" #inst "2016-08-01T00:00:00.000-00:00" 71.09]
-                    ["Doohickey" #inst "2016-09-01T00:00:00.000-00:00" 45.65]]
-                   (take 6 data)))))))))
+        (let [result       (mt/user-http-request :crowberto :post 200
+                                                 (format "card/%d/query/xlsx" pivot-card-id)
+                                                 :format_rows   true
+                                                 :pivot_results true)
+              [pivot data] (with-open [in (io/input-stream result)]
+                             (let [wb    (spreadsheet/load-workbook in)
+                                   pivot (.getPivotTables ^XSSFSheet (spreadsheet/select-sheet "pivot" wb))
+                                   data  (->> (spreadsheet/select-sheet "data" wb)
+                                              spreadsheet/row-seq
+                                              (mapv (fn [row] (->> (spreadsheet/cell-seq row)
+                                                                   (mapv spreadsheet/read-cell)))))]
+                               [pivot data]))]
+          (is (not (nil? pivot)))
+          (is (= [["Category" "Created At" "Sum of Price"]
+                  ["Doohickey" #inst "2016-05-01T00:00:00.000-00:00" 144.12]
+                  ["Doohickey" #inst "2016-06-01T00:00:00.000-00:00" 82.92]
+                  ["Doohickey" #inst "2016-07-01T00:00:00.000-00:00" 78.22]
+                  ["Doohickey" #inst "2016-08-01T00:00:00.000-00:00" 71.09]
+                  ["Doohickey" #inst "2016-09-01T00:00:00.000-00:00" 45.65]]
+                 (take 6 data))))))))
 
 (deftest ^:parallel zero-row-native-pivot-tables-test
   (testing "Pivot tables with zero rows download correctly as xlsx."
@@ -503,26 +679,28 @@
                                                {:source-table (mt/id :products)
                                                 :aggregation  [[:sum [:field (mt/id :products :price) {:base-type :type/Float}]]]
                                                 :breakout     [[:field (mt/id :products :category) {:base-type :type/Text}]]}}}]
-        (binding [qp.xlsx/*pivot-export-post-processing-enabled* true]
-          (let [result       (mt/user-http-request :crowberto :post 200 (format "card/%d/query/xlsx?format_rows=false" pivot-card-id))
-                [pivot data] (with-open [in (io/input-stream result)]
-                               (let [wb    (spreadsheet/load-workbook in)
-                                     pivot (.getPivotTables ^XSSFSheet (spreadsheet/select-sheet "pivot" wb))
-                                     data  (->> (spreadsheet/select-sheet "data" wb)
-                                                spreadsheet/row-seq
-                                                (mapv (fn [row] (->> (spreadsheet/cell-seq row)
-                                                                     (mapv spreadsheet/read-cell)))))]
-                                 [pivot data]))]
-            (is (not (nil? pivot)))
-            (is (= [["Category" "Sum of Price"]
-                    ["Doohickey" 2185.89]
-                    ["Gadget" 3019.2]
-                    ["Gizmo" 2834.88]
-                    ["Widget" 3109.31]]
-                   (take 6 data)))))))))
+        (let [result       (mt/user-http-request :crowberto :post 200
+                                                 (format "card/%d/query/xlsx" pivot-card-id)
+                                                 :format_rows   true
+                                                 :pivot_results true)
+              [pivot data] (with-open [in (io/input-stream result)]
+                             (let [wb    (spreadsheet/load-workbook in)
+                                   pivot (.getPivotTables ^XSSFSheet (spreadsheet/select-sheet "pivot" wb))
+                                   data  (->> (spreadsheet/select-sheet "data" wb)
+                                              spreadsheet/row-seq
+                                              (mapv (fn [row] (->> (spreadsheet/cell-seq row)
+                                                                   (mapv spreadsheet/read-cell)))))]
+                               [pivot data]))]
+          (is (not (nil? pivot)))
+          (is (= [["Category" "Sum of Price"]
+                  ["Doohickey" 2185.89]
+                  ["Gadget" 3019.2]
+                  ["Gizmo" 2834.88]
+                  ["Widget" 3109.31]]
+                 (take 6 data))))))))
 
-(deftest ^:parallel pivot-table-exports-respect-dynamic-var-setting
-  (testing "Pivot tables will export the 'classic' way by default for."
+(deftest ^:parallel pivot-table-questions-can-export-unpivoted
+  (testing "Pivot tables will export the 'classic' way by default"
     (testing "for csv"
       (mt/dataset test-data
         (mt/with-temp [:model/Card {pivot-card-id :id}
@@ -539,14 +717,16 @@
                                                   :aggregation  [[:sum [:field (mt/id :products :price) {:base-type :type/Float}]]]
                                                   :breakout     [[:field (mt/id :products :category) {:base-type :type/Text}]
                                                                  [:field (mt/id :products :created_at) {:base-type :type/DateTime :temporal-unit :month}]]}}}]
-          (let [result (->> (mt/user-http-request :crowberto :post 200 (format "card/%d/query/csv?format_rows=false" pivot-card-id))
+          (let [result (->> (mt/user-http-request :crowberto :post 200
+                                                  (format "card/%d/query/csv" pivot-card-id)
+                                                  :format_rows true)
                             csv/read-csv)]
             (is (= [["Category" "Created At" "Sum of Price"]
-                    ["Doohickey" "2016-05-01T00:00:00Z" "144.12"]
-                    ["Doohickey" "2016-06-01T00:00:00Z" "82.92"]
-                    ["Doohickey" "2016-07-01T00:00:00Z" "78.22"]
-                    ["Doohickey" "2016-08-01T00:00:00Z" "71.09"]
-                    ["Doohickey" "2016-09-01T00:00:00Z" "45.65"]]
+                    ["Doohickey" "May, 2016" "144.12"]
+                    ["Doohickey" "June, 2016" "82.92"]
+                    ["Doohickey" "July, 2016" "78.22"]
+                    ["Doohickey" "August, 2016" "71.09"]
+                    ["Doohickey" "September, 2016" "45.65"]]
                    (take 6 result)))))))
     (testing "for xlsx"
       (mt/dataset test-data
@@ -596,8 +776,8 @@
                                                       {:table.cell_column "TOTAL"
                                                        :column_settings   {(format "[\"ref\",[\"field\",%d,null]]" (mt/id :orders :total))
                                                                            {:column_title "CASH MONEY"}}}}]
-          (let [card-result     (card-download card :csv true)
-                dashcard-result (dashcard-download dashcard :csv true)
+          (let [card-result     (card-download card {:export-format :csv :format-rows true})
+                dashcard-result (dashcard-download dashcard {:export-format :csv :format-rows true})
                 card-header     ["ID" "User ID" "Product ID" "SUB CASH MONEY" "Tax"
                                  "Total" "Discount ($)" "Created At" "Quantity"]
                 dashcard-header ["ID" "User ID" "Product ID" "SUB CASH MONEY" "Tax"
@@ -626,8 +806,8 @@
                                                        {:table.cell_column "TOTAL"
                                                         :column_settings   {(format "[\"ref\",[\"field\",%d,null]]" (mt/id :orders :total))
                                                                             {:column_title "CASH MONEY"}}}}]
-          (let [subscription-result (subscription-attachment! dashcard :csv true)
-                alert-result        (alert-attachment! card :csv true)
+          (let [subscription-result (subscription-attachment! dashcard {:export-format :csv :format-rows true})
+                alert-result        (alert-attachment! card {:export-format :csv :format-rows true})
                 alert-header        ["ID" "User ID" "Product ID" "SUB CASH MONEY" "Tax"
                                      "Total" "Discount ($)" "Created At" "Quantity"]
                 subscription-header ["ID" "User ID" "Product ID" "SUB CASH MONEY" "Tax"
@@ -645,11 +825,14 @@
                                          :dataset_query {:database (mt/id)
                                                          :type     :native
                                                          :native   {:query "SELECT 1 as A FROM generate_series(1,1100000);"}}}]
-          (let [results (all-outputs! card :csv true)]
-            (is (= {:card-download           1050001
-                    :alert-attachment        1050001
-                    :dashcard-download       1050001
-                    :subscription-attachment 1050001}
+          (let [results (all-outputs! card {:export-format :csv :format-rows true})]
+            (is (= {:card-download            1050001
+                    :unsaved-card-download    1050001
+                    :alert-attachment         1050001
+                    :dashcard-download        1050001
+                    :subscription-attachment  1050001
+                    :public-question-download 1050001
+                    :public-dashcard-download 1050001}
                    (update-vals results count))))))))
   (testing "Downloads row limit default works."
     (mt/dataset test-data
@@ -657,11 +840,14 @@
                                        :dataset_query {:database (mt/id)
                                                        :type     :native
                                                        :native   {:query "SELECT 1 as A FROM generate_series(1,1100000);"}}}]
-        (let [results (all-outputs! card :csv true)]
-          (is (= {:card-download           1048576
-                  :alert-attachment        1048576
-                  :dashcard-download       1048576
-                  :subscription-attachment 1048576}
+        (let [results (all-outputs! card {:export-format :csv :format-rows true})]
+          (is (= {:card-download            1048576
+                  :unsaved-card-download    1048576
+                  :alert-attachment         1048576
+                  :dashcard-download        1048576
+                  :subscription-attachment  1048576
+                  :public-question-download 1048576
+                  :public-dashcard-download 1048576}
                  (update-vals results count))))))))
 
 (deftest ^:parallel model-viz-settings-downloads-test
@@ -690,8 +876,8 @@
                                                                     :visibility_type    :normal
                                                                     :display_name       "Subtotal"
                                                                     :base_type          :type/Float}]}]
-          (let [card-result     (card-download card :csv true)
-                dashcard-result (dashcard-download card :csv true)]
+          (let [card-result     (card-download card {:export-format :csv :format-rows true})
+                dashcard-result (dashcard-download card {:export-format :csv :format-rows true})]
             (is (= {:card-download     ["Subtotal (CAD)" "0.38"]
                     :dashcard-download ["Subtotal (CAD)" "0.38"]}
                    {:card-download     (mapv #(nth % 3) (take 2 card-result))
@@ -716,10 +902,10 @@
                                                                    :currency_in_header false}}}}]
         (testing "for csv"
           (is (= "2,185.89 Canadian dollars"
-                 (-> (card-download card :csv true) second second))))
+                 (-> (card-download card {:export-format :csv :format-rows true}) second second))))
         (testing "for xlsx (#43039)"
           (is (= "2,185.89 Canadian dollars"
-                 (-> (card-download card :xlsx true) second second))))))))
+                 (-> (card-download card {:export-format :xlsx :format-rows true}) second second))))))))
 
 (deftest table-metadata-affects-column-formatting-properly
   (testing "A Table's configured metadata (eg. Semantic Type of currency) can affect column formatting"
@@ -746,11 +932,11 @@
                                                                                      {:currency_in_header false}}}}]
         (testing "for csv"
           (is (= [["Discount"] ["$6.42"]]
-                 (-> (card-download card :csv true)))))
+                 (-> (card-download card {:export-format :csv :format-rows true})))))
         (testing "for xlsx"
           ;; the [$$] part will appear as $ when you open the Excel file in a spreadsheet app
           (is (= [["Discount"] ["[$$]6.42"]]
-                 (-> (card-download card :xlsx true)))))))))
+                 (-> (card-download card {:export-format :xlsx :format-rows true})))))))))
 
 (deftest clean-errors-test
   (testing "Queries that error should not include visualization settings (metabase-private #233)"
@@ -780,9 +966,9 @@
             ;; for now, don't try to read xlsx back in, it will not be correct since we end up writing
             ;; a json blob to the output stream, it creates an invalid xlsx anyway.
             ;; This is not new behaviour, we'll just fix it when a better solution to 'errors in downloaded files' comes along
-            (let [results (mt/user-http-request :rasta :post 200 (format "card/%d/query/%s?format_rows=true" card-id export-format))
+            (let [results (mt/user-http-request :rasta :post 200 (format "card/%d/query/%s" card-id export-format) :format_rows true)
                   results-string (if (= "xlsx" export-format)
-                                   (read-xlsx results)
+                                   (read-xlsx false results)
                                    (str results))]
               (testing (format "Testing export format: %s" export-format)
                 (doseq [illegal illegal-strings]
@@ -811,6 +997,37 @@
             (let [result (mt/user-http-request :crowberto :post 200
                                                (format "card/%d/query/%s?format_rows=false" pivot-card-id export-format)
                                                {})
-                  data   (process-results (keyword export-format) result)]
+                  data   (process-results false (keyword export-format) result)]
               (is (= ["Category" "Sum of Price"]
-                     (first data))))))))))
+                     (first data)))
+              (is (= 2
+                     (count (second data)))))))))))
+
+(deftest format-rows-value-affects-xlsx-exports
+  (testing "Format-rows true/false is respected for xlsx exports."
+    (mt/dataset test-data
+      (mt/with-temp [:model/Card card
+                     {:display                :pivot
+                      :visualization_settings {:pivot_table.column_split
+                                               {:rows    [[:field (mt/id :products :category) {:base-type :type/Text}]]
+                                                :columns [[:field (mt/id :products :created_at) {:base-type :type/DateTime :temporal-unit :year}]]
+                                                :values  [[:aggregation 0]]}
+                                               :column_settings
+                                               {"[\"name\",\"sum\"]" {:number_style       "currency"
+                                                                      :currency_in_header false}}}
+                      :dataset_query          {:database (mt/id)
+                                               :type     :query
+                                               :query
+                                               {:source-table (mt/id :products)
+                                                :aggregation  [[:sum [:field (mt/id :products :price) {:base-type :type/Float}]]]
+                                                :breakout     [[:field (mt/id :products :category) {:base-type :type/Text}]
+                                                               [:field (mt/id :products :created_at) {:base-type :type/DateTime :temporal-unit :year}]]}}}]
+        (is (= [["Category" "Created At" "Sum of Price"]
+                ["Doohickey" "2016" "632.14"]
+                ["Doohickey" "2017" "854.19"]]
+               (take 3 (card-download card {:export-format :xlsx :format-rows true :pivot true})))
+            ;; Excel will apply a default format which is seen here. The 'actual' data in the cells is unformatted.
+            (= [["Category" "Created At" "Sum of Price"]
+                ["Doohickey" "January 1, 2016, 12:00 AM" "632.14"]
+                ["Doohickey" "January 1, 2017, 12:00 AM" "854.19"]]
+               (take 3 (card-download card {:export-format :xlsx :format-rows false :pivot true}))))))))
diff --git a/test/metabase/api/embed/common_test.clj b/test/metabase/api/embed/common_test.clj
index 5c7813cd82fe61dda6550e815244def0f253f537..5a24250a6bd5a9336e165f41635a500bbe80d743 100644
--- a/test/metabase/api/embed/common_test.clj
+++ b/test/metabase/api/embed/common_test.clj
@@ -1,26 +1,34 @@
 (ns metabase.api.embed.common-test
   (:require [clojure.test :refer [deftest is testing]]
+            [metabase.analytics.stats :as stats]
             [metabase.api.embed.common :as api.embed.common]
+            [metabase.eid-translation :as eid-translation]
             [metabase.test :as mt]
             [toucan2.core :as t2]))
 
 (deftest ->id-test
-  (api.embed.common/get-and-clear-translation-count!)
-  (is (= @#'api.embed.common/default-eid-translation-counter
-         (api.embed.common/entity-id-translation-counter)))
+  (#'stats/clear-translation-count!)
+  (is (= (assoc eid-translation/default-counter :total 0)
+         (#'stats/get-translation-count)))
   (mt/with-temp [:model/Card {card-id :id card-eid :entity_id} {}]
     (is (= card-id (api.embed.common/->id :card card-id)))
     (is (= card-id (api.embed.common/->id :model/Card card-id)))
-    (is (partial= {:ok 0 :total 0} (api.embed.common/get-and-clear-translation-count!))
+    (is (partial= {:ok 0 :total 0} (#'stats/get-translation-count))
         "Translations are not counted when they don't occur")
+    (#'stats/clear-translation-count!)
     (is (= card-id (api.embed.common/->id :card card-eid)))
     (is (= card-id (api.embed.common/->id :model/Card card-eid)))
-    (is (partial= {:ok 2 :total 2} (api.embed.common/get-and-clear-translation-count!))
-        "Translations are counted when they do occur"))
+    (is (partial= {:ok 2 :total 2} (#'stats/get-translation-count))
+        "Translations are counted when they do occur")
+    (#'stats/clear-translation-count!))
 
   (doseq [[card-id entity-id] (t2/select-fn->fn :id :entity_id [:model/Card :id :entity_id] {:limit 100})]
     (testing (str "card-id: " card-id " entity-id: " entity-id)
+
       (is (= card-id (api.embed.common/->id :model/Card card-id)))
       (is (= card-id (api.embed.common/->id :card card-id)))
+
       (is (= card-id (api.embed.common/->id :model/Card entity-id)))
-      (is (= card-id (api.embed.common/->id :card entity-id))))))
+      (is (= card-id (api.embed.common/->id :card entity-id)))))
+  (is (malli= [:map [:ok pos-int?] [:total pos-int?]]
+              (#'stats/get-translation-count))))
diff --git a/test/metabase/api/embed_test.clj b/test/metabase/api/embed_test.clj
index 047f743dc230af15bfe4775403ae13759b9f5da7..7f8f1873472470a8ccbce287989f33ea445d6ddb 100644
--- a/test/metabase/api/embed_test.clj
+++ b/test/metabase/api/embed_test.clj
@@ -6,6 +6,7 @@
    [clj-time.core :as time]
    [clojure.data.csv :as csv]
    [clojure.set :as set]
+   [clojure.string :as str]
    [clojure.test :refer :all]
    [crypto.random :as crypto-random]
    [dk.ative.docjure.spreadsheet :as spreadsheet]
@@ -289,7 +290,9 @@
   (str "embed/card/"
        (card-token card-or-id additional-token-keys)
        "/query"
-       response-format-route-suffix))
+       response-format-route-suffix
+       (when-not (str/blank? response-format-route-suffix)
+         "?format_rows=true")))
 
 (def ^:private response-format->request-options
   {""      nil
@@ -426,7 +429,10 @@
 
           (testing "If `:locked` parameter is present in URL params, request should fail"
             (is (= "You can only specify a value for :venue_id in the JWT."
-                   (client/client :get 400 (str (card-query-url card response-format {:params {:venue_id 100}}) "?venue_id=100"))))))))))
+                   (let [url (card-query-url card response-format {:params {:venue_id 100}})]
+                     (client/client :get 400 (str url (if (str/includes? url "format_rows")
+                                                        "&venue_id=100"
+                                                        "?venue_id=100"))))))))))))
 
 (deftest card-disabled-params-test
   (with-embedding-enabled-and-new-secret-key!
@@ -439,7 +445,10 @@
 
         (testing "If a `:disabled` param is passed in the URL the request should fail"
           (is (= "You're not allowed to specify a value for :venue_id."
-                 (client/client :get 400 (str (card-query-url card response-format) "?venue_id=200")))))))))
+                 (let [url (card-query-url card response-format)]
+                   (client/client :get 400 (str url (if (str/includes? url "format_rows")
+                                                      "&venue_id=200"
+                                                      "?venue_id=200")))))))))))
 
 (deftest card-enabled-params-test
   (mt/test-helpers-set-global-values!
@@ -448,7 +457,10 @@
         (do-response-formats [response-format request-options]
           (testing "If `:enabled` param is present in both JWT and the URL, the request should fail"
             (is (= "You can't specify a value for :venue_id if it's already set in the JWT."
-                   (client/real-client :get 400 (str (card-query-url card response-format {:params {:venue_id 100}}) "?venue_id=200")))))
+                   (let [url (card-query-url card response-format {:params {:venue_id 100}})]
+                     (client/client :get 400 (str url (if (str/includes? url "format_rows")
+                                                        "&venue_id=100"
+                                                        "?venue_id=100")))))))
 
           (testing "If an `:enabled` param is present in the JWT, that's ok"
             #_{:clj-kondo/ignore [:deprecated-var]}
@@ -462,9 +474,12 @@
             #_{:clj-kondo/ignore [:deprecated-var]}
             (test-query-results
              response-format
-             (client/real-client :get (response-format->status-code response-format)
-                                 (str (card-query-url card response-format) "?venue_id=200")
-                                 {:request-options request-options}))))))))
+             (let [url (card-query-url card response-format)]
+               (client/real-client :get (response-format->status-code response-format)
+                                   (str url (if (str/includes? url "format_rows")
+                                              "&venue_id=200"
+                                              "?venue_id=200"))
+                                   {:request-options request-options})))))))))
 
 (defn card-with-date-field-filter-default
   []
@@ -537,7 +552,7 @@
     (with-embedding-enabled-and-new-secret-key!
       (t2.with-temp/with-temp [Card card (card-with-date-field-filter)]
         (is (= "count\n107\n"
-               (client/client :get 200 (str (card-query-url card "/csv") "?date=Q1-2014"))))))))
+               (client/client :get 200 (str (card-query-url card "/csv") "&date=Q1-2014"))))))))
 
 (deftest csv-forward-url-test
   (mt/test-helpers-set-global-values!
diff --git a/test/metabase/api/public_test.clj b/test/metabase/api/public_test.clj
index ed670efabe0a6c97d14f9ad70c363366c8a3ba7f..38b61b929022e14ba5f9029157454879109dd061 100644
--- a/test/metabase/api/public_test.clj
+++ b/test/metabase/api/public_test.clj
@@ -314,16 +314,16 @@
 
         (testing ":json download response format"
           (is (= [{:Count "100"}]
-                 (client/client :get 200 (str "public/card/" uuid "/query/json")))))
+                 (client/client :get 200 (str "public/card/" uuid "/query/json?format_rows=true")))))
 
         (testing ":csv download response format"
           (is (= "Count\n100\n"
-                 (client/client :get 200 (str "public/card/" uuid "/query/csv"), :format :csv))))
+                 (client/client :get 200 (str "public/card/" uuid "/query/csv?format_rows=true"), :format :csv))))
 
         (testing ":xlsx download response format"
           (is (= [{:col "Count"} {:col 100.0}]
                  (parse-xlsx-response
-                  (client/client :get 200 (str "public/card/" uuid "/query/xlsx"))))))))))
+                  (client/client :get 200 (str "public/card/" uuid "/query/xlsx?format_rows=true"))))))))))
 
 (deftest execute-public-card-as-user-without-perms-test
   (testing "A user that doesn't have permissions to run the query normally should still be able to run a public Card as if they weren't logged in"
diff --git a/test/metabase/api/pulse_test.clj b/test/metabase/api/pulse_test.clj
index 9e294d287c0e30e91e6889998383a7cf356e04dc..b1caaf17fe41bd74f2e52c2b0ae24bb6f30366ba 100644
--- a/test/metabase/api/pulse_test.clj
+++ b/test/metabase/api/pulse_test.clj
@@ -47,7 +47,8 @@
       (update :collection_id boolean)
       ;; why? these fields in this last assoc are from the PulseCard model and this function takes the Card model
       ;; because PulseCard is somewhat hidden behind the scenes
-      (assoc :include_csv false, :include_xls false, :dashboard_card_id nil, :dashboard_id nil, :format_rows true
+      (assoc :include_csv false :include_xls false :dashboard_card_id nil :dashboard_id nil
+             :format_rows true :pivot_results false
              :parameter_mappings nil)))
 
 (defn- pulse-channel-details [channel]
diff --git a/test/metabase/dashboard_subscription_test.clj b/test/metabase/dashboard_subscription_test.clj
index ec2c4e93fb3e3969c5171435a676dddfbae2aee7..ba0d533b76baa349dcf66315e55163d6a72e4185 100644
--- a/test/metabase/dashboard_subscription_test.clj
+++ b/test/metabase/dashboard_subscription_test.clj
@@ -987,7 +987,7 @@
   (when (seq rows)
     [(let [^java.io.ByteArrayOutputStream baos (java.io.ByteArrayOutputStream.)]
        (with-open [os baos]
-         (#'messages/stream-api-results-to-export-format :csv true os result)
+         (#'messages/stream-api-results-to-export-format os {:export-format :csv :format-rows? true} result)
          (let [output-string (.toString baos "UTF-8")]
            {:type         :attachment
             :content-type :csv
diff --git a/test/metabase/db/custom_migrations_test.clj b/test/metabase/db/custom_migrations_test.clj
index 3564c5bdfab6699cf79ec486897789d5524ace19..3319922129ca2f526592e3b4ae760c366a977912 100644
--- a/test/metabase/db/custom_migrations_test.clj
+++ b/test/metabase/db/custom_migrations_test.clj
@@ -1984,7 +1984,17 @@
             (binding [custom-migrations/*create-sample-content* create?]
               (is (false? (sample-content-created?)))
               (migrate!)
-              (is ((if create? true? false?) (sample-content-created?)))))))))
+              (is ((if create? true? false?) (sample-content-created?))))
+
+            (when (true? create?)
+              (testing "The Examples collection has permissions set to grant read-write access to all users"
+                (let [id (t2/select-one-pk :model/Collection :is_sample true)]
+                  (is (partial=
+                       {:collection_id id
+                        :perm_type     :perms/collection-access
+                        :perm_value    :read-and-write}
+                       (t2/select-one :model/Permissions :collection_id id)))))))))))
+
   (testing "The sample content isn't created if the sample database existed already in the past (or any database for that matter)"
     (impl/test-migrations "v50.2024-05-27T15:55:22" [migrate!]
       (let [sample-content-created? #(boolean (not-empty (t2/query "SELECT * FROM report_dashboard where name = 'E-commerce insights'")))]
@@ -1999,6 +2009,7 @@
         (is (false? (sample-content-created?)))
         (is (empty? (t2/query "SELECT * FROM metabase_database"))
             "No database should have been created"))))
+
   (testing "The sample content isn't created if a user existed already"
     (impl/test-migrations "v50.2024-05-27T15:55:22" [migrate!]
       (let [sample-content-created? #(boolean (not-empty (t2/query "SELECT * FROM report_dashboard where name = 'E-commerce insights'")))]
diff --git a/test/metabase/lib/convert_test.cljc b/test/metabase/lib/convert_test.cljc
index 1e253f206e7c0fc441e9f9fbfc156dcdaccec80c..c5b68468ebc99c47fca942d2f8fa445e941b0955 100644
--- a/test/metabase/lib/convert_test.cljc
+++ b/test/metabase/lib/convert_test.cljc
@@ -476,6 +476,123 @@
                :source-table 1}
     :type     :query}))
 
+(deftest ^:parallel unclean-stage-round-trip-test
+  (binding [lib.convert/*clean-query* false]
+    (doseq [query
+            [{:database 7
+              :type :query
+              :query {:joins [{:alias "__join"
+                               :strategy :left-join
+                               :condition [:= [:field 388 nil] 1]
+                               :source-table 44}]
+                      :source-table 43
+                      :fields [[:field 390 nil]
+                               [:field 391 nil]
+                               [:field 388 nil]
+                               [:field 392 nil]
+                               [:field 393 nil]
+                               [:field 389 nil]]}}
+             {:database 7
+              :qp/source-card-id 1
+              :info {:card-id 1}
+              :type :query
+              :query {:limit 2
+                      :fields [[:field 350 {:base-type :type/Text :join-alias "Card 2 - Category"}]
+                               [:field "count" {:base-type :type/Integer}]
+                               [:field 350 {:join-alias "Card 2 - Category"}]]
+                      :joins [{:fields [[:field 350 {:join-alias "Card 2 - Category"}]]
+                               :source-metadata [{:semantic_type :type/Category
+                                                  :table_id 45
+                                                  :name "CATEGORY"
+                                                  :field_ref [:field 350 {:base-type :type/Text}]
+                                                  :effective_type :type/Text
+                                                  :id 350
+                                                  :display_name "Category"
+                                                  :fingerprint {:global {:distinct-count 4
+                                                                         :nil% 0}
+                                                                :type {:type/Text {:percent-json 0
+                                                                                   :percent-url 0
+                                                                                   :percent-email 0
+                                                                                   :percent-state 0
+                                                                                   :average-length 6.375}}}
+                                                  :base_type :type/Text}]
+                               :alias "Card 2 - Category"
+                               :strategy :left-join
+                               :source-query/model? false
+                               :qp/stage-had-source-card 2
+                               :condition [:=
+                                           [:field "Products__CATEGORY" {:base-type :type/Text}]
+                                           [:field 350 {:base-type :type/Text, :join-alias "Card 2 - Category"}]]
+                               :source-query {:source-table 45
+                                              :breakout [[:field 350 {:base-type :type/Text}]]
+                                              :qp/stage-is-from-source-card 2
+                                              :order-by [[:asc [:field 350 {:base-type :type/Text}]]]}}]
+                      :source-query {:qp/stage-had-source-card 1
+                                     :source-query/model? false
+                                     :fields [[:field 350 {:base-type :type/Text, :join-alias "Products"}]
+                                              [:field "count" {:base-type :type/Integer}]]
+                                     :source-query {:source-table 42
+                                                    :breakout [[:field 350 {:base-type :type/Text, :join-alias "Products"}]]
+                                                    :aggregation [[:count]]
+                                                    :qp/stage-is-from-source-card 1
+                                                    :order-by [[:asc [:field 350 {:base-type :type/Text, :join-alias "Products"}]]]
+                                                    :joins [{:alias "Products"
+                                                             :strategy :left-join
+                                                             :condition [:=
+                                                                         [:field 382 {:base-type :type/Integer}]
+                                                                         [:field 351 {:base-type :type/BigInteger
+                                                                                      :join-alias "Products"}]]
+                                                             :source-table 45}
+                                                            {:alias "People - User"
+                                                             :strategy :left-join
+                                                             :condition [:=
+                                                                         [:field 381 {:base-type :type/Integer}]
+                                                                         [:field 370 {:base-type :type/BigInteger
+                                                                                      :join-alias "People - User"}]]
+                                                             :source-table 40}]}
+                                     :source-metadata [{:semantic_type :type/Category
+                                                        :table_id 45
+                                                        :name "CATEGORY"
+                                                        :field_ref [:field 350 {:base-type :type/Text, :join-alias "Products"}]
+                                                        :effective_type :type/Text
+                                                        :id 350
+                                                        :display_name "Products → Category"
+                                                        :fingerprint {:global {:distinct-count 4, :nil% 0}
+                                                                      :type {:type/Text {:percent-json 0
+                                                                                         :percent-url 0
+                                                                                         :percent-email 0
+                                                                                         :percent-state 0
+                                                                                         :average-length 6.375}}}
+                                                        :base_type :type/Text
+                                                        :source_alias "Products"}
+                                                       {:name "count"
+                                                        :display_name "Count"
+                                                        :base_type :type/Integer
+                                                        :semantic_type :type/Quantity
+                                                        :field_ref [:aggregation 0]}]}
+                      :source-metadata [{:semantic_type :type/Category
+                                         :table_id 45
+                                         :name "CATEGORY"
+                                         :field_ref [:field 350 {:base-type :type/Text
+                                                                 :join-alias "Card 2 - Category"}]
+                                         :effective_type :type/Text
+                                         :id 350
+                                         :display_name "Products → Category"
+                                         :fingerprint {:global {:distinct-count 4, :nil% 0}
+                                                       :type {:type/Text {:percent-json 0
+                                                                          :percent-url 0
+                                                                          :percent-email 0
+                                                                          :percent-state 0
+                                                                          :average-length 6.375}}}
+                                         :base_type :type/Text
+                                         :source_alias "Products"}
+                                        {:name "count"
+                                         :display_name "Count"
+                                         :base_type :type/Integer
+                                         :semantic_type :type/Quantity
+                                         :field_ref [:field "count" {:base-type :type/Integer}]}]}}]]
+      (test-round-trip query))))
+
 (deftest ^:parallel round-trip-options-test
   (testing "Round-tripping (p)MBQL caluses with options (#30280)"
     (testing "starting with pMBQL"
diff --git a/test/metabase/models/pulse_test.clj b/test/metabase/models/pulse_test.clj
index 90d8b57440c714e1c31f0d10931d4146213fad51..93e0127603b1e1d9f849209636ac654d69f80756 100644
--- a/test/metabase/models/pulse_test.clj
+++ b/test/metabase/models/pulse_test.clj
@@ -79,6 +79,7 @@
                              :include_csv        false
                              :include_xls        false
                              :format_rows        true
+                             :pivot_results      false
                              :dashboard_card_id  nil
                              :dashboard_id       nil
                              :parameter_mappings nil}]
@@ -145,6 +146,7 @@
                              :include_csv        false
                              :include_xls        false
                              :format_rows        true
+                             :pivot_results      false
                              :dashboard_card_id  nil
                              :dashboard_id       nil
                              :parameter_mappings nil}]})
@@ -236,6 +238,7 @@
                                    :include_csv        false
                                    :include_xls        false
                                    :format_rows        true
+                                   :pivot_results      false
                                    :dashboard_card_id  nil
                                    :dashboard_id       nil
                                    :parameter_mappings nil}
@@ -246,6 +249,7 @@
                                    :include_csv        false
                                    :include_xls        false
                                    :format_rows        true
+                                   :pivot_results      false
                                    :dashboard_card_id  nil
                                    :dashboard_id       nil
                                    :parameter_mappings nil}]
@@ -276,7 +280,7 @@
                          :parameters   [],
                          :channel      ["email"],
                          :schedule     ["daily"],
-                         :recipients   [[{:email       "foo@bar.com"}
+                         :recipients   [[{:email "foo@bar.com"}
                                          {:first_name  "Crowberto"
                                           :last_name   "Corv"
                                           :email       "crowberto@metabase.com"
diff --git a/test/metabase/query_processor/middleware/remove_inactive_field_refs_test.clj b/test/metabase/query_processor/middleware/remove_inactive_field_refs_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..c77f4c548e15003af75248e6af837c1336a1fa1e
--- /dev/null
+++ b/test/metabase/query_processor/middleware/remove_inactive_field_refs_test.clj
@@ -0,0 +1,155 @@
+(ns ^:mb/once metabase.query-processor.middleware.remove-inactive-field-refs-test
+  (:require
+   [clojure.test :refer :all]
+   [metabase.lib.metadata.jvm :as lib.metadata.jvm]
+   [metabase.query-processor :as qp]
+   [metabase.query-processor.store :as qp.store]
+   [metabase.test :as mt]
+   [metabase.util :as u]
+   [toucan2.core :as t2]))
+
+(deftest ^:synchronized deleted-columns-test
+  ;; It doesn't really matter which DB we test with. The test uses H2 column names.
+  (qp.store/with-metadata-provider (lib.metadata.jvm/application-database-metadata-provider (mt/id))
+    (mt/with-temp [:model/Card card0 {:dataset_query
+                                      (mt/mbql-query orders
+                                        {:joins [{:source-table $$products
+                                                  :alias "Product"
+                                                  :condition
+                                                  [:= $orders.product_id
+                                                   [:field %products.id {:join-alias "Product"}]]
+                                                  :fields :all}]})}
+                   :model/Card card1 {:dataset_query
+                                      (mt/mbql-query orders
+                                        {:fields [$id $subtotal $tax $total $created_at $quantity]
+                                         :joins [{:source-table $$products
+                                                  :alias "Product"
+                                                  :condition
+                                                  [:= $orders.product_id
+                                                   [:field %products.id {:join-alias "Product"}]]
+                                                  :fields :all}]})}
+                   :model/Card card2 {:dataset_query
+                                      (mt/mbql-query orders
+                                        {:fields [$id $subtotal $tax $total $created_at $quantity]
+                                         :joins [{:source-table $$products
+                                                  :alias "Product"
+                                                  :condition
+                                                  [:= $orders.product_id
+                                                   [:field %products.id {:join-alias "Product"}]]
+                                                  :fields
+                                                  [[:field %products.id {:join-alias "Product"}]
+                                                   [:field %products.title {:join-alias "Product"}]
+                                                   [:field %products.vendor {:join-alias "Product"}]
+                                                   [:field %products.price {:join-alias "Product"}]
+                                                   [:field %products.rating {:join-alias "Product"}]]}]})}
+                   :model/Card card3 {:dataset_query
+                                      (mt/mbql-query orders
+                                        {:source-table (str "card__" (u/the-id card2))
+                                         :fields [[:field "ID" {:base-type :type/BigInteger}]
+                                                  [:field "TAX" {:base-type :type/Float}]
+                                                  [:field "TOTAL" {:base-type :type/Float}]
+                                                  [:field "ID_2" {:base-type :type/BigInteger}]
+                                                  [:field "RATING" {:base-type :type/Float}]]
+                                         :filter [:> [:field "TOTAL" {:base-type :type/Float}] 3]})}]
+      (let [summary-query (mt/mbql-query orders
+                            {:source-table (str "card__" (u/the-id card3))
+                             :aggregation [[:sum [:field "TOTAL" {:base-type :type/Float}]]]
+                             :breakout [[:field "RATING" {:base-type :type/Float}]]})
+            join-query (mt/mbql-query orders
+                         {:source-table (mt/id :products)
+                          :joins [{:source-table (str "card__" (u/the-id card2))
+                                   :alias "Card"
+                                   :condition
+                                   [:= $products.id
+                                    [:field "ID_2" {:join-alias "Card"
+                                                    :base-type :type/BigInteger}]]
+                                   :fields
+                                   [[:field "ID_2" {:join-alias "Card"
+                                                    :base-type :type/BigInteger}]
+                                    [:field "TOTAL" {:join-alias "Card"
+                                                     :base-type :type/Float}]
+                                    [:field "TAX" {:join-alias "Card"
+                                                   :base-type :type/Float}]
+                                    [:field "VENDOR" {:join-alias "Card"
+                                                      :base-type :type/Text}]]}]})]
+        ;; running these questions before fields get removed from the database
+        (testing "Behavior before the deletion (if this changes, the other cases have to change accordingly)"
+          (doseq [[card fields] {card0 ["ID" "USER_ID" "PRODUCT_ID" "SUBTOTAL" "TAX" "TOTAL" "DISCOUNT"
+                                        "CREATED_AT" "QUANTITY"
+                                        "ID_2" "EAN" "TITLE" "CATEGORY" "VENDOR" "PRICE"
+                                        "RATING" "CREATED_AT_2"]
+                                 card1 ["ID" "SUBTOTAL" "TAX" "TOTAL" "CREATED_AT" "QUANTITY"
+                                        "ID_2" "EAN" "TITLE" "CATEGORY" "VENDOR" "PRICE"
+                                        "RATING" "CREATED_AT_2"]
+                                 card2 ["ID" "SUBTOTAL" "TAX" "TOTAL" "CREATED_AT" "QUANTITY"
+                                        "ID_2" "TITLE" "VENDOR" "PRICE" "RATING"]
+                                 card3 ["ID" "TAX" "TOTAL" "ID_2" "RATING"]}]
+            (let [query (mt/mbql-query orders
+                          {:source-table (str "card__" (u/the-id card))})]
+              (let [results (qp/process-query query)]
+                (is (=? fields
+                        (map :name (mt/cols results)))))))
+          (is (= ["Product → Rating" "Sum of Total"]
+                 (->> (mt/process-query summary-query)
+                      mt/cols
+                      (map :display_name))))
+          (is (= ["ID" "Ean" "Title" "Category" "Vendor" "Price" "Rating" "Created At"
+                  "Card → ID 2" "Card → Total" "Card → Tax" "Card → Vendor"]
+                 (->> (mt/process-query join-query)
+                      mt/cols
+                      (map :display_name)))))
+
+        ;; simulate the deletion of some fields and sync marking them inactive
+        (let [inactive-ids [(mt/id :orders :tax) (mt/id :products :ean) (mt/id :products :vendor)]]
+          (t2/update! :model/Field :id [:in inactive-ids] {:active false})
+
+          ;; running the actual tests
+          (try
+            (let [mp (lib.metadata.jvm/application-database-metadata-provider (mt/id))]
+              (binding [qp.store/*TESTS-ONLY-allow-replacing-metadata-provider* true]
+                (qp.store/with-metadata-provider mp
+                  ;; running these questions after fields have been removed from the database
+                  ;; and the change has been detected by syncing
+                  (testing "Questions return the same columns except the ones deleted"
+                    (doseq [[card fields] {card0 ["ID" "USER_ID" "PRODUCT_ID" "SUBTOTAL" "TOTAL" "DISCOUNT"
+                                                  "CREATED_AT" "QUANTITY"
+                                                  "ID_2" "TITLE" "CATEGORY" "PRICE"
+                                                  "RATING" "CREATED_AT_2"]
+                                           card1 ["ID" "SUBTOTAL" "TOTAL" "CREATED_AT" "QUANTITY"
+                                                  "ID_2" "TITLE" "CATEGORY" "PRICE"
+                                                  "RATING" "CREATED_AT_2"]
+                                           card2 ["ID" "SUBTOTAL" "TOTAL" "CREATED_AT" "QUANTITY"
+                                                  "ID_2" "TITLE" "PRICE" "RATING"]
+                                           card3 ["ID" "TOTAL" "ID_2" "RATING"]}]
+                      (let [query (mt/mbql-query orders
+                                    {:source-table (str "card__" (u/the-id card))})]
+                        (let [results (qp/process-query query)]
+                          (is (=? fields
+                                  (map :name (mt/cols results))))))))
+                  (testing "Active columns can be used"
+                    (is (= ["Product → Rating" "Sum of Total"]
+                           (->> (mt/run-mbql-query orders
+                                  {:source-table (str "card__" (u/the-id card2))
+                                   :aggregation [[:sum [:field "TOTAL" {:base-type :type/Float}]]]
+                                   :breakout [[:field "RATING" {:base-type :type/Integer}]]})
+                                mt/cols
+                                (map :display_name)))))
+                  (testing "Using deleted columns results in an error"
+                    (is (thrown? clojure.lang.ExceptionInfo
+                                 (mt/run-mbql-query orders
+                                   {:source-table (str "card__" (u/the-id card2))
+                                    :aggregation [[:sum [:field "TAX" {:base-type :type/Float}]]]
+                                    :breakout [[:field "RATING" {:base-type :type/Integer}]]}))))
+                  (testing "Additional level of nesting is OK"
+                    (is (= ["Product → Rating" "Sum of Total"]
+                           (->> (mt/process-query summary-query)
+                                mt/cols
+                                (map :display_name))))
+                    (testing "in joins too"
+                      (is (= ["ID" "Title" "Category" "Price" "Rating" "Created At"
+                              "Card → ID 2" "Card → Total"]
+                             (->> (qp/process-query join-query)
+                                  mt/cols
+                                  (map :display_name)))))))))
+            (finally
+              (t2/update! :model/Field :id [:in inactive-ids] {:active true}))))))))
diff --git a/test/metabase/query_processor/pivot/postprocess_test.clj b/test/metabase/query_processor/pivot/postprocess_test.clj
index 2bf04cc5818aa9845421cc337a2b32359a55be41..e98e20ce3b61595b67cfa0374479cf0bc85f72e9 100644
--- a/test/metabase/query_processor/pivot/postprocess_test.clj
+++ b/test/metabase/query_processor/pivot/postprocess_test.clj
@@ -1,14 +1,7 @@
 (ns metabase.query-processor.pivot.postprocess-test
   (:require
    [clojure.test :refer :all]
-   [metabase.query-processor.pivot.postprocess :as qp.pivot.postprocess]))
-
-(def ^:private pivot-base-rows
-  (for [a ["AA" "AB" "AC" "AD"]
-        b ["BA" "BB" "BC" "BD"]
-        c ["CA" "CB" "CC" "CD"]
-        d ["DA" "DB" "DC" "DD"]]
-    [a b c d 0 1]))
+   [metabase.query-processor.pivot.postprocess :as process]))
 
 (def ^:private column-titles
   ["A" "B" "C" "D" "pivot-grouping" "MEASURE"])
@@ -20,41 +13,60 @@
 
 (deftest add-pivot-measures-test
   (testing "Given a `pivot-spec` without `:pivot-measures`, add them."
-    (is (= [5] (:pivot-measures (#'qp.pivot.postprocess/add-pivot-measures pivot-spec))))))
+    (is (= [5] (:pivot-measures (process/add-pivot-measures pivot-spec))))))
+
+(deftest pivot-aggregation-test
+  (testing "The pivot aggregation datastructure stores values as expected"
+    (let [pivot-config {:pivot-rows     [0]
+                        :pivot-cols     [1]
+                        :column-titles  ["A" "B" "pivot-grouping" "Count"]
+                        :row-totals?    true
+                        :col-totals?    true
+                        :pivot-measures [3]
+                        :pivot-grouping 2}
+          pivot        (process/init-pivot pivot-config)
+          pivot-data   (reduce process/add-row pivot [["aA" "bA" 0 1] ; add 4 rows in aA bA
+                                                      ["aA" "bA" 0 1]
+                                                      ["aA" "bA" 0 1]
+                                                      ["aA" "bA" 0 1]
+                                                      ["aA" "bB" 0 1]
+                                                      ["aA" "bC" 0 1]
+                                                      ["aA" "bD" 0 1]
+                                                      ["aB" "bA" 0 1]
+                                                      ["aB" "bB" 0 1]
+                                                      ["aB" "bC" 0 1]
+                                                      ["aB" "bD" 0 1]])]
+      (testing "data aggregation matches the input rows"
+        ;; Every row will contribute to the MEASURE somewhere, determined by
+        ;; the values in each pivot-row and pivot-col index. For example,
+        ;; given the pivot-config in this test, the row `["X" "Y" 0 1]` will end up adding
+        ;; {"X" {"Y" {3 1}}}
+        ;; the operation is essentially an assoc-in done per measure:
+
+        ;;   assoc-in    path from rows cols and measures            value from measure idx
+        ;; `(assoc-in (concat pivot-rows pivot-cols pivot-measures) (get-in row measure-idx))`
+        (is (= {"aA" {"bA" {3 4} "bB" {3 1} "bC" {3 1} "bD" {3 1}}
+                "aB" {"bA" {3 1} "bB" {3 1} "bC" {3 1} "bD" {3 1}}}
+               (:data pivot-data)))
 
-(deftest all-values-for-test
-  (testing "The `all-values-for` function correctly finds the values for the given column idx"
-    (doseq [[idx include-nil? expected-values] [[0 true  ["AA" "AB" "AC" "AD" nil]]
-                                                [1 false ["BA" "BB" "BC" "BD"]]
-                                                [2 true  ["CA" "CB" "CC" "CD" nil]]
-                                                [3 false ["DA" "DB" "DC" "DD"]]
-                                                [5 false [1]]]]
-      (testing (format "Column index %s has correct expected values." idx)
-        (is (= expected-values
-               (#'qp.pivot.postprocess/all-values-for pivot-base-rows idx include-nil?)))))))
+        ;; all distinct values encountered for each row/col index are stored
+        ;; this is necessary to construct the headers as well as the combinations
+        ;; that make up the 'paths' to get measure values for every cell in the pivot table
+        (is (= {:row-values {0 #{"aA" "aB"}}
+                :column-values {1 #{"bA" "bB" "bC" "bD"}}}
+               (select-keys pivot-data [:row-values :column-values])))
 
-(deftest header-builder-test
-  (testing "The `header-builder` function returns the correctly formed header(s)."
-    ;; Title from Pivot Rows, then values from first pivot-cols, then 'Row Totals'
-    (is (= [["C" "AA" "AB" "AC" "AD" "Row totals"]]
-           (#'qp.pivot.postprocess/header-builder pivot-base-rows (merge
-                                                                   pivot-spec
-                                                                   {:pivot-cols [0]
-                                                                    :pivot-rows [2]}))))
-    (is (= [["C" "D" "BA" "BB" "BC" "BD" "Row totals"]]
-           (#'qp.pivot.postprocess/header-builder pivot-base-rows (merge
-                                                                   pivot-spec
-                                                                   {:pivot-cols [1]
-                                                                    :pivot-rows [2 3]}))))
-    (is (= [["C" "AA" "AA" "AA" "AA" "AB" "AB" "AB" "AB" "AC" "AC" "AC" "AC" "AD" "AD" "AD" "AD" "Row totals"]
-            ["C" "BA" "BB" "BC" "BD" "BA" "BB" "BC" "BD" "BA" "BB" "BC" "BD" "BA" "BB" "BC" "BD" "Row totals"]]
-           (#'qp.pivot.postprocess/header-builder pivot-base-rows (merge
-                                                                   pivot-spec
-                                                                   {:pivot-cols [0 1]
-                                                                    :pivot-rows [2]}))))
-    (is (= [["C" "D" "AA" "AA" "AA" "AA" "AB" "AB" "AB" "AB" "AC" "AC" "AC" "AC" "AD" "AD" "AD" "AD" "Row totals"]
-            ["C" "D" "BA" "BB" "BC" "BD" "BA" "BB" "BC" "BD" "BA" "BB" "BC" "BD" "BA" "BB" "BC" "BD" "Row totals"]]
-           (#'qp.pivot.postprocess/header-builder pivot-base-rows (merge
-                                                                   pivot-spec
-                                                                   {:pivot-cols [0 1]
-                                                                    :pivot-rows [2 3]}))))))
+        ;; since everything is aggregated as a row is added, we can store all of the
+        ;; relevant totals right away and use them to construct the pivot table totals
+        ;; if the user has specified them on (they're on by default and likely to be on most of the time)
+        (is (= {:grand-total {3 11}
+                :section-totals {"aA" {3 7} "aB" {3 4}} ;; section refers to the 'row totals' interspersed between each row-wise group
+                :column-totals {"aA" {"bA" {3 4} "bB" {3 1} "bC" {3 1} "bD" {3 1}}
+                                "aB" {"bA" {3 1} "bB" {3 1} "bC" {3 1} "bD" {3 1}}}
+                "aA" {3 7}
+                "aB" {3 4}
+                "bC" {3 2}
+                "bB" {3 2}
+                "bD" {3 2}
+                "bA" {3 5}}
+               (:totals pivot-data)))))))
diff --git a/test/metabase/query_processor/streaming/xlsx_test.clj b/test/metabase/query_processor/streaming/xlsx_test.clj
index 8382fd9c0409b9019c01fa04736d1874e268d6a2..227e375c891d44eb0c3b8a51a1981c958d782d50 100644
--- a/test/metabase/query_processor/streaming/xlsx_test.clj
+++ b/test/metabase/query_processor/streaming/xlsx_test.clj
@@ -28,7 +28,7 @@
    (let [viz-settings (common/viz-settings-for-col
                        (assoc col :field_ref [:field 1])
                        {::mb.viz/column-settings {{::mb.viz/field-id 1} format-settings}})
-         format-strings (@#'qp.xlsx/format-settings->format-strings viz-settings col)]
+         format-strings (@#'qp.xlsx/format-settings->format-strings viz-settings col true)]
      ;; If only one format string is returned (for datetimes) or both format strings
      ;; are equal, just return a single value to make tests more readable.
      (cond
diff --git a/test/metabase/query_processor/streaming_test.clj b/test/metabase/query_processor/streaming_test.clj
index ecc8e3339b1e8e0e624f20cc677cd709ef2b9d37..b762df6b4910176c6183523efe341543ea7ff978 100644
--- a/test/metabase/query_processor/streaming_test.clj
+++ b/test/metabase/query_processor/streaming_test.clj
@@ -333,6 +333,7 @@
                   (let [results (mt/user-http-request user :post 200
                                                       (format "dataset/%s" (name export-format))
                                                       {:request-options {:as (if (= export-format :xlsx) :byte-array :string)}}
+                                                      :format_rows true
                                                       :query query-json
                                                       :visualization_settings viz-settings-json)]
                     ((-> assertions export-format) results))
@@ -340,7 +341,8 @@
                   :card
                   (let [results (mt/user-http-request user :post 200
                                                       (format "card/%d/query/%s" (u/the-id card) (name export-format))
-                                                      {:request-options {:as (if (= export-format :xlsx) :byte-array :string)}})]
+                                                      {:request-options {:as (if (= export-format :xlsx) :byte-array :string)}}
+                                                      :format_rows true)]
                     ((-> assertions export-format) results))
 
                   :dashboard
@@ -350,12 +352,13 @@
                                                               (u/the-id dashcard)
                                                               (u/the-id card)
                                                               (name export-format))
-                                                      {:request-options {:as (if (= export-format :xlsx) :byte-array :string)}})]
+                                                      {:request-options {:as (if (= export-format :xlsx) :byte-array :string)}}
+                                                      :format_rows true)]
                     ((-> assertions export-format) results))
 
                   :public
                   (let [results (mt/user-http-request user :get 200
-                                                      (format "public/card/%s/query/%s" public-uuid (name export-format))
+                                                      (format "public/card/%s/query/%s?format_rows=true" public-uuid (name export-format))
                                                       {:request-options {:as (if (= export-format :xlsx) :byte-array :string)}})]
                     ((-> assertions export-format) results))
 
diff --git a/yarn.lock b/yarn.lock
index 223820daa33e2967fcb63374ac2cd18096637a69..658c3cc893d669fbfe6d37057487e1b3dba5c27d 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -9185,10 +9185,10 @@ cookie-signature@1.0.6:
   resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c"
   integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw=
 
-cookie@0.6.0:
-  version "0.6.0"
-  resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.6.0.tgz#2798b04b071b0ecbff0dbb62a505a8efa4e19051"
-  integrity sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==
+cookie@0.6.0, cookie@^0.7.0:
+  version "0.7.2"
+  resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.7.2.tgz#556369c472a2ba910f2979891b526b3436237ed7"
+  integrity sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==
 
 copy-concurrently@^1.0.0:
   version "1.0.5"