diff --git a/frontend/src/metabase-lib/lib/queries/StructuredQuery.js b/frontend/src/metabase-lib/lib/queries/StructuredQuery.js index 6e251a792633f75938f43ca755b0ff5a23be2665..04f64ee530981c5cf871cee50ee1367d33d9d471 100644 --- a/frontend/src/metabase-lib/lib/queries/StructuredQuery.js +++ b/frontend/src/metabase-lib/lib/queries/StructuredQuery.js @@ -635,12 +635,24 @@ export default class StructuredQuery extends AtomicQuery { return this._updateQuery(Q.removeExpression, arguments); } - // FIELD OPTIONS + // FIELDS + /** + * Returns dimension options that can appear in the `fields` clause + */ + fieldsOptions(dimensionFilter = () => true): DimensionOptions { + if (this.isBareRows() && this.breakouts().length === 0) { + return this.dimensionOptions(dimensionFilter); + } + // TODO: allow adding fields connected by broken out PKs? + return { count: 0, dimensions: [], fks: [] }; + } + + // DIMENSION OPTIONS // TODO Atte Keinänen 6/18/17: Refactor to dimensionOptions which takes a dimensionFilter // See aggregationFieldOptions for an explanation why that covers more use cases - fieldOptions(fieldFilter = () => true): DimensionOptions { - const fieldOptions = { + dimensionOptions(dimensionFilter = () => true): DimensionOptions { + const dimensionOptions = { count: 0, fks: [], dimensions: [], @@ -648,11 +660,6 @@ export default class StructuredQuery extends AtomicQuery { const table = this.tableMetadata(); if (table) { - const dimensionFilter = dimension => { - const field = dimension.field && dimension.field(); - return !field || (field.isDimension() && fieldFilter(field)); - }; - const dimensionIsFKReference = dimension => dimension.field && dimension.field() && dimension.field().isFK(); @@ -660,8 +667,8 @@ export default class StructuredQuery extends AtomicQuery { // .filter(d => !dimensionIsFKReference(d)); for (const dimension of filteredNonFKDimensions) { - fieldOptions.count++; - fieldOptions.dimensions.push(dimension); + dimensionOptions.count++; + dimensionOptions.dimensions.push(dimension); } const fkDimensions = this.dimensions().filter(dimensionIsFKReference); @@ -671,8 +678,8 @@ export default class StructuredQuery extends AtomicQuery { .filter(dimensionFilter); if (fkDimensions.length > 0) { - fieldOptions.count += fkDimensions.length; - fieldOptions.fks.push({ + dimensionOptions.count += fkDimensions.length; + dimensionOptions.fks.push({ field: dimension.field(), dimension: dimension, dimensions: fkDimensions, @@ -681,7 +688,17 @@ export default class StructuredQuery extends AtomicQuery { } } - return fieldOptions; + return dimensionOptions; + } + + // FIELD OPTIONS + + fieldOptions(fieldFilter = () => true) { + const dimensionFilter = dimension => { + const field = dimension.field && dimension.field(); + return !field || (field.isDimension() && fieldFilter(field)); + }; + return this.dimensionOptions(dimensionFilter); } // DIMENSIONS diff --git a/frontend/src/metabase/App.jsx b/frontend/src/metabase/App.jsx index 768127b06f684e1ae744bc3712ba8737cfc97305..dc0bd741b94c9ba9997599d1edd21917e7e2055d 100644 --- a/frontend/src/metabase/App.jsx +++ b/frontend/src/metabase/App.jsx @@ -2,7 +2,7 @@ import React, { Component } from "react"; import { connect } from "react-redux"; - +import ScrollToTop from "metabase/hoc/ScrollToTop"; import Navbar from "metabase/nav/containers/Navbar.jsx"; import UndoListing from "metabase/containers/UndoListing"; @@ -57,11 +57,13 @@ export default class App extends Component { } return ( - <div className="relative"> - <Navbar location={location} /> - {errorPage ? getErrorComponent(errorPage) : children} - <UndoListing /> - </div> + <ScrollToTop> + <div className="relative"> + <Navbar location={location} /> + {errorPage ? getErrorComponent(errorPage) : children} + <UndoListing /> + </div> + </ScrollToTop> ); } } diff --git a/frontend/src/metabase/components/CollectionLanding.jsx b/frontend/src/metabase/components/CollectionLanding.jsx index 4410acdbf5ee94d574d3b1fbce7b84855833d2ce..764e62227181b6367c0eb327a0cf6f090d0940c7 100644 --- a/frontend/src/metabase/components/CollectionLanding.jsx +++ b/frontend/src/metabase/components/CollectionLanding.jsx @@ -282,68 +282,70 @@ class DefaultLanding extends React.Component { /> </Box> </GridItem> - <GridItem w={itemWidth}> - <Box> - <ItemTypeFilterBar /> - <Card mt={1} className="relative"> - {unpinnedItems.length > 0 ? ( - <PinDropTarget pinIndex={null} margin={8}> - <Box - style={{ - position: "relative", - height: ROW_HEIGHT * unpinnedItems.length, - }} - > - <VirtualizedList - items={unpinnedItems} - rowHeight={ROW_HEIGHT} - renderItem={({ item, index }) => ( - <Box className="relative"> - <ItemDragSource - item={item} - selection={selection} - > - <NormalItem - key={`${item.type}:${item.id}`} + {unpinned.length > 0 && ( + <GridItem w={itemWidth}> + <Box> + <ItemTypeFilterBar /> + <Card mt={1} className="relative"> + {unpinnedItems.length > 0 ? ( + <PinDropTarget pinIndex={null} margin={8}> + <Box + style={{ + position: "relative", + height: ROW_HEIGHT * unpinnedItems.length, + }} + > + <VirtualizedList + items={unpinnedItems} + rowHeight={ROW_HEIGHT} + renderItem={({ item, index }) => ( + <Box className="relative"> + <ItemDragSource item={item} - collection={collection} selection={selection} - onToggleSelected={onToggleSelected} - onMove={moveItems => - this.setState({ moveItems }) - } - /> - </ItemDragSource> - </Box> - )} - /> - </Box> - </PinDropTarget> - ) : ( - <Box> - {location.query.type && - EMPTY_STATES[location.query.type]} - <PinDropTarget - pinIndex={null} - hideUntilDrag - margin={10} - > - {({ hovered }) => ( - <div - className={cx( - "m2 flex layout-centered", - hovered ? "text-brand" : "text-grey-2", + > + <NormalItem + key={`${item.type}:${item.id}`} + item={item} + collection={collection} + selection={selection} + onToggleSelected={onToggleSelected} + onMove={moveItems => + this.setState({ moveItems }) + } + /> + </ItemDragSource> + </Box> )} - > - {t`Drag here to un-pin`} - </div> - )} + /> + </Box> </PinDropTarget> - </Box> - )} - </Card> - </Box> - </GridItem> + ) : ( + <Box> + {location.query.type && + EMPTY_STATES[location.query.type]} + <PinDropTarget + pinIndex={null} + hideUntilDrag + margin={10} + > + {({ hovered }) => ( + <div + className={cx( + "m2 flex layout-centered", + hovered ? "text-brand" : "text-grey-2", + )} + > + {t`Drag here to un-pin`} + </div> + )} + </PinDropTarget> + </Box> + )} + </Card> + </Box> + </GridItem> + )} </Grid> </Box> </Box> diff --git a/frontend/src/metabase/components/CollectionList.jsx b/frontend/src/metabase/components/CollectionList.jsx index 71f439d520a2d5ff9c9ceb07812c779d4a7246ea..587dea43b053e0ee58b4039a5132674fe056b691 100644 --- a/frontend/src/metabase/components/CollectionList.jsx +++ b/frontend/src/metabase/components/CollectionList.jsx @@ -75,6 +75,7 @@ class CollectionList extends React.Component { to={Urls.newCollection(currentCollection.id)} color={normal.grey2} hover={{ color: normal.blue }} + p={w === 1 ? [1, 2] : 0} > <Flex align="center" py={1}> <Icon name="add" mr={1} bordered /> diff --git a/frontend/src/metabase/components/Header.jsx b/frontend/src/metabase/components/Header.jsx index c062d59fc9d7792866f388a3414e2a2082aae627..92c2fbd991e4e048b0fcb2434b9144c2aaf1d2c7 100644 --- a/frontend/src/metabase/components/Header.jsx +++ b/frontend/src/metabase/components/Header.jsx @@ -1,6 +1,7 @@ import React, { Component } from "react"; import ReactDOM from "react-dom"; +import CollectionBadge from "metabase/questions/components/CollectionBadge"; import InputBlurChange from "metabase/components/InputBlurChange.jsx"; import HeaderModal from "metabase/components/HeaderModal.jsx"; import TitleAndDescription from "metabase/components/TitleAndDescription.jsx"; @@ -77,6 +78,7 @@ export default class Header extends Component { } render() { + const { item } = this.props; let titleAndDescription; if (this.props.isEditingInfo) { titleAndDescription = ( @@ -154,9 +156,12 @@ export default class Header extends Component { } ref="header" > - <div className="Entity py3"> + <div className="Entity py3 mb1"> {titleAndDescription} {attribution} + {!this.props.isEditingInfo && ( + <CollectionBadge collectionId={item.collection_id} /> + )} </div> <div className="flex align-center flex-align-right"> diff --git a/frontend/src/metabase/components/TitleAndDescription.jsx b/frontend/src/metabase/components/TitleAndDescription.jsx index 277447ad8fc411c88817698c7909478e2cf3a675..729362e1522ef78ec26a93499ebf6448d53cc42c 100644 --- a/frontend/src/metabase/components/TitleAndDescription.jsx +++ b/frontend/src/metabase/components/TitleAndDescription.jsx @@ -13,7 +13,7 @@ type Attributes = { }; const TitleAndDescription = ({ title, description, className }: Attributes) => ( <div className={cx("flex align-center", className)}> - <h2 className="mr1">{title}</h2> + <h2 className="h2 mr1">{title}</h2> {description && ( <Tooltip tooltip={description} maxWidth={"22em"}> <Icon name="info" style={{ marginTop: 3 }} /> diff --git a/frontend/src/metabase/containers/Overworld.jsx b/frontend/src/metabase/containers/Overworld.jsx index 61eb23bf7497484a49b2ce93e69c3af77db80a11..5c15377586aaea10fb59015d5b89ff337bd7a29c 100644 --- a/frontend/src/metabase/containers/Overworld.jsx +++ b/frontend/src/metabase/containers/Overworld.jsx @@ -62,7 +62,7 @@ class Overworld extends React.Component { <MetabotLogo /> <Box ml={2}> <Subhead>{Greeting.sayHello(this.props.user.first_name)}</Subhead> - <p className="text-paragraph m0 text-grey-3">{t`Don't tell anyone but you're my favorite`}</p> + <p className="text-paragraph m0 text-grey-3">{t`Don't tell anyone, but you're my favorite.`}</p> </Box> </Flex> <CollectionItemsLoader collectionId="root"> @@ -177,7 +177,7 @@ class Overworld extends React.Component { <Box p={3} bg={colors["bg-medium"]}> <Icon name="database" - color={normal.green} + color={normal.purple} mb={3} size={28} /> diff --git a/frontend/src/metabase/containers/UndoListing.jsx b/frontend/src/metabase/containers/UndoListing.jsx index 75a01cbeb1e713f252f0fce93c3abef994812c9a..2506f9a4315da558515871a39acb9c951b85a029 100644 --- a/frontend/src/metabase/containers/UndoListing.jsx +++ b/frontend/src/metabase/containers/UndoListing.jsx @@ -59,6 +59,11 @@ export default class UndoListing extends Component { {undos.map(undo => ( <Card key={undo._domId} dark p={2} mt={1}> <Flex align="center"> + <Icon + name={(undo.icon && undo.icon) || "check"} + color="white" + mr={1} + /> {typeof undo.message === "function" ? ( undo.message(undo) ) : undo.message ? ( @@ -72,6 +77,7 @@ export default class UndoListing extends Component { <Link ml={1} onClick={() => performUndo(undo.id)} + className="link text-bold" >{t`Undo`}</Link> )} <Icon diff --git a/frontend/src/metabase/hoc/ScrollToTop.js b/frontend/src/metabase/hoc/ScrollToTop.js new file mode 100644 index 0000000000000000000000000000000000000000..dacffd445ff436f416a28d9d5133b0b08195781e --- /dev/null +++ b/frontend/src/metabase/hoc/ScrollToTop.js @@ -0,0 +1,18 @@ +import React from "react"; +import { withRouter } from "react-router"; + +@withRouter +class ScrollToTop extends React.Component { + componentDidUpdate(prevProps) { + // Compare location.pathame to see if we're on a different URL. Do this to ensure + // that query strings don't cause a scroll to the top + if (this.props.location.pathname !== prevProps.location.pathname) { + window.scrollTo(0, 0); + } + } + render() { + return this.props.children; + } +} + +export default ScrollToTop; diff --git a/frontend/src/metabase/home/containers/SearchApp.jsx b/frontend/src/metabase/home/containers/SearchApp.jsx index e02312b06468a24c2d19965dc53e72da042e565f..00607785497bcc7f88510368dd2e7b65f22fa451 100644 --- a/frontend/src/metabase/home/containers/SearchApp.jsx +++ b/frontend/src/metabase/home/containers/SearchApp.jsx @@ -27,7 +27,7 @@ export default class SearchApp extends React.Component { </Flex> <ItemTypeFilterBar filters={FILTERS.concat({ - name: t`'Collections`, + name: t`Collections`, filter: "collection", icon: "all", })} diff --git a/frontend/src/metabase/lib/dataset.js b/frontend/src/metabase/lib/dataset.js index 02e819c304123f3eef00b43b22a83ac45869e8e2..51ae58f0c5d83d763161a5f21c21109f6e13ba7d 100644 --- a/frontend/src/metabase/lib/dataset.js +++ b/frontend/src/metabase/lib/dataset.js @@ -1,6 +1,21 @@ +/* @flow */ + import _ from "underscore"; -import type { Value, Column, DatasetData } from "metabase/meta/types/Dataset"; +import type { + Value, + Column, + ColumnName, + DatasetData, +} from "metabase/meta/types/Dataset"; +import type { Card } from "metabase/meta/types/Card"; +import type { ConcreteField } from "metabase/meta/types/Query"; + +type ColumnSetting = { + name: ColumnName, + fieldRef?: ConcreteField, + enabled: boolean, +}; // Many aggregations result in [[null]] if there are no rows to aggregate after filters export const datasetContainsNoResults = (data: DatasetData): boolean => @@ -11,9 +26,109 @@ export const datasetContainsNoResults = (data: DatasetData): boolean => */ export const rangeForValue = ( value: Value, - column: Column, + column: ?Column, ): ?[number, number] => { - if (column && column.binning_info && column.binning_info.bin_width) { + if ( + typeof value === "number" && + column && + column.binning_info && + column.binning_info.bin_width + ) { return [value, value + column.binning_info.bin_width]; } }; + +/** + * Returns a MBQL field reference (ConcreteField) for a given result dataset column + * @param {Column} column Dataset result column + * @return {?ConcreteField} MBQL field reference + */ +export function fieldRefForColumn(column: Column): ?ConcreteField { + if (column.id != null) { + if (Array.isArray(column.id)) { + // $FlowFixMe: sometimes col.id is a field reference (e.x. nested queries), if so just return it + return column.id; + } else if (column.fk_field_id != null) { + return ["fk->", column.fk_field_id, column.id]; + } else { + return ["field-id", column.id]; + } + } else if (column["expression-name"] != null) { + return ["expression", column["expression-name"]]; + } else { + return null; + } +} + +/** + * Finds the column object from the dataset results for the given `table.columns` column setting + * @param {Column[]} columns Dataset results columns + * @param {ColumnSetting} columnSetting A "column setting" from the `table.columns` settings + * @return {?Column} A result column + */ +export function findColumnForColumnSetting( + columns: Column[], + columnSetting: ColumnSetting, +): ?Column { + const index = findColumnIndexForColumnSetting(columns, columnSetting); + if (index >= 0) { + return columns[index]; + } else { + return null; + } +} + +export function findColumnIndexForColumnSetting( + columns: Column[], + columnSetting: ColumnSetting, +): number { + const { fieldRef } = columnSetting; + // first try to find by fieldRef + if (fieldRef != null) { + const index = _.findIndex(columns, col => + _.isEqual(fieldRef, fieldRefForColumn(col)), + ); + if (index >= 0) { + return index; + } + } + // if that fails, find by column name + return _.findIndex(columns, col => col.name === columnSetting.name); +} + +/** + * Synchronizes the "table.columns" visualization setting to the structured + * query's `fields` + * @param {[type]} card Card to synchronize `fields`. Mutates value + * @param {[type]} cols Columns in last run results + */ +export function syncQueryFields(card: Card, cols: Column[]): void { + if ( + card.dataset_query.type === "query" && + card.visualization_settings["table.columns"] + ) { + const visibleColumns = card.visualization_settings["table.columns"] + .filter(columnSetting => columnSetting.enabled) + .map(columnSetting => findColumnForColumnSetting(cols, columnSetting)); + const fields = visibleColumns + .map(column => column && fieldRefForColumn(column)) + .filter(field => field); + if (!_.isEqual(card.dataset_query.query.fields, fields)) { + console.log("fields actual", card.dataset_query.query.fields); + console.log("fields expected", fields); + card.dataset_query.query.fields = fields; + } + } +} + +export function getExistingFields(card: Card, cols: Column[]): ConcreteField[] { + const query = card.dataset_query.query; + if (query.fields && query.fields > 0) { + return query.fields; + } else if (!query.aggregation && !query.breakout) { + // $FlowFixMe: + return cols.map(col => fieldRefForColumn(col)).filter(id => id != null); + } else { + return []; + } +} diff --git a/frontend/src/metabase/lib/query/query.js b/frontend/src/metabase/lib/query/query.js index ee356c62bb8fd641292e4ed17bf0015aef6f0d9e..569a01173ba2050273e2e31672f7daecc20be755 100644 --- a/frontend/src/metabase/lib/query/query.js +++ b/frontend/src/metabase/lib/query/query.js @@ -14,6 +14,7 @@ import type { ExpressionClause, ExpressionName, Expression, + FieldsClause, } from "metabase/meta/types/Query"; import type { TableMetadata } from "metabase/meta/types/Metadata"; @@ -94,6 +95,9 @@ export const removeOrderBy = (query: SQ, index: number) => export const clearOrderBy = (query: SQ) => setOrderByClause(query, O.clearOrderBy(query.order_by)); +// FIELD +export const clearFields = (query: SQ) => setFieldsClause(query, null); + // LIMIT export const getLimit = (query: SQ) => L.getLimit(query.limit); @@ -140,13 +144,14 @@ function setAggregationClause( ): SQ { let wasBareRows = A.isBareRows(query.aggregation); let isBareRows = A.isBareRows(aggregationClause); - // when switching to or from bare rows clear out any sorting clauses + // when switching to or from bare rows clear out any sorting and fields clauses if (isBareRows !== wasBareRows) { - clearOrderBy(query); + query = clearFields(query); + query = clearOrderBy(query); } // for bare rows we always clear out any dimensions because they don't make sense if (isBareRows) { - clearBreakouts(query); + query = clearBreakouts(query); } return setClause("aggregation", query, aggregationClause); } @@ -158,6 +163,8 @@ function setBreakoutClause(query: SQ, breakoutClause: ?BreakoutClause): SQ { query = removeOrderBy(query, index); } } + // clear fields when changing breakouts + query = clearFields(query); return setClause("breakout", query, breakoutClause); } function setFilterClause(query: SQ, filterClause: ?FilterClause): SQ { @@ -166,6 +173,9 @@ function setFilterClause(query: SQ, filterClause: ?FilterClause): SQ { function setOrderByClause(query: SQ, orderByClause: ?OrderByClause): SQ { return setClause("order_by", query, orderByClause); } +function setFieldsClause(query: SQ, fieldsClause: ?FieldsClause): SQ { + return setClause("fields", query, fieldsClause); +} function setLimitClause(query: SQ, limitClause: ?LimitClause): SQ { return setClause("limit", query, limitClause); } @@ -185,7 +195,9 @@ type FilterClauseName = | "breakout" | "order_by" | "limit" - | "expressions"; + | "expressions" + | "fields"; + function setClause(clauseName: FilterClauseName, query: SQ, clause: ?any): SQ { query = { ...query }; if (clause == null) { diff --git a/frontend/src/metabase/meta/types/Dataset.js b/frontend/src/metabase/meta/types/Dataset.js index 48c73d4d326c0a7a6d587b6d6aba858c04c02f10..dc09def520d990d743be807b4ca4a288f71a9db9 100644 --- a/frontend/src/metabase/meta/types/Dataset.js +++ b/frontend/src/metabase/meta/types/Dataset.js @@ -21,6 +21,8 @@ export type Column = { source?: "fields" | "aggregation" | "breakout", unit?: DatetimeUnit, binning_info?: BinningInfo, + fk_field_id?: FieldId, + "expression-name"?: any, }; export type Value = string | number | ISO8601Time | boolean | null | {}; diff --git a/frontend/src/metabase/meta/types/Query.js b/frontend/src/metabase/meta/types/Query.js index abf0897ab6d30b3f6e480b67f203f64e25b4c517..e73aa57328916122f2364ee807aa104689fb0988 100644 --- a/frontend/src/metabase/meta/types/Query.js +++ b/frontend/src/metabase/meta/types/Query.js @@ -271,4 +271,4 @@ export type Expression = [ export type ExpressionOperator = "+" | "-" | "*" | "/"; export type ExpressionOperand = ConcreteField | NumericLiteral | Expression; -export type FieldsClause = Field[]; +export type FieldsClause = ConcreteField[]; diff --git a/frontend/src/metabase/nav/components/ProfileLink.jsx b/frontend/src/metabase/nav/components/ProfileLink.jsx index a216e5f955e4c85d63aa3671c5849f70c51465d3..81c71d25c45f57ce8afce9b3b3bf2a5a8b1e3da9 100644 --- a/frontend/src/metabase/nav/components/ProfileLink.jsx +++ b/frontend/src/metabase/nav/components/ProfileLink.jsx @@ -14,82 +14,68 @@ import Logs from "metabase/components/Logs"; import LogoIcon from "metabase/components/LogoIcon"; import EntityMenu from "metabase/components/EntityMenu"; +// generate the proper set of list items for the current user +// based on whether they're an admin or not export default class ProfileLink extends Component { - constructor(props, context) { - super(props, context); - - this.state = { - dropdownOpen: false, - modalOpen: null, - }; - - _.bindAll( - this, - "toggleDropdown", - "closeDropdown", - "openModal", - "closeModal", - ); - } + state = { + dropdownOpen: false, + }; static propTypes = { user: PropTypes.object.isRequired, context: PropTypes.string.isRequired, }; - toggleDropdown() { - this.setState({ dropdownOpen: !this.state.dropdownOpen }); - } - - closeDropdown() { - this.setState({ dropdownOpen: false }); - } - - openModal(modalName) { + openModal = modalName => { this.setState({ dropdownOpen: false, modalOpen: modalName }); - } + }; - closeModal() { + closeModal = () => { this.setState({ modalOpen: null }); - } + }; + + generateOptionsForUser = () => { + const admin = this.props.user.is_superuser; + const adminContext = this.props.context === "admin"; + return [ + { + title: t`Account settings`, + icon: null, + link: Urls.accountSettings(), + }, + ...(admin && [ + { + title: adminContext ? t`Exit admin` : t`Admin`, + icon: null, + link: adminContext ? "/" : "/admin", + }, + ]), + ...(admin && [ + { + title: t`Logs`, + icon: null, + action: () => this.openModal("logs"), + }, + ]), + { + title: t`About Metabase`, + icon: null, + action: () => this.openModal("about"), + }, + { + title: t`Sign out`, + icon: null, + link: "auth/logout", + }, + ]; + }; render() { - const { context } = this.props; const { modalOpen } = this.state; const { tag, date, ...versionExtra } = MetabaseSettings.get("version"); - const admin = context === "admin"; return ( <Box> - <EntityMenu - items={[ - { - title: t`Account settings`, - icon: null, - link: Urls.accountSettings(), - }, - { - title: admin ? t`Exit admin` : t`Admin`, - icon: null, - link: admin ? "/" : "/admin", - }, - { - title: t`Logs`, - icon: null, - action: () => this.openModal("logs"), - }, - { - title: t`About Metabase`, - icon: null, - action: () => this.openModal("about"), - }, - { - title: t`Sign out`, - icon: null, - link: "auth/logout", - }, - ]} - triggerIcon="gear" - /> + <EntityMenu items={this.generateOptionsForUser()} triggerIcon="gear" /> {modalOpen === "about" ? ( <Modal small onClose={this.closeModal}> <div className="px4 pt4 pb2 text-centered relative"> diff --git a/frontend/src/metabase/nav/containers/Navbar.jsx b/frontend/src/metabase/nav/containers/Navbar.jsx index cb52ea0b82e5bd9f9e2d12f1000a3b0f654faf51..32c748ccadf9266cdc86887640c786afad77429d 100644 --- a/frontend/src/metabase/nav/containers/Navbar.jsx +++ b/frontend/src/metabase/nav/containers/Navbar.jsx @@ -125,7 +125,7 @@ class SearchBar extends React.Component { pr={2} pl={1} value={this.state.searchText} - placeholder="Search for anything..." + placeholder="Search…" onClick={() => this.setState({ active: true })} onChange={e => this.setState({ searchText: e.target.value })} onKeyPress={e => { diff --git a/frontend/src/metabase/query_builder/actions.js b/frontend/src/metabase/query_builder/actions.js index df318d9759a3a6a077d8f36005835751a95188aa..f73f547a91d454f249e4675c4a5c3127ede8660e 100644 --- a/frontend/src/metabase/query_builder/actions.js +++ b/frontend/src/metabase/query_builder/actions.js @@ -24,6 +24,7 @@ import { } from "metabase/lib/card"; import { formatSQL } from "metabase/lib/formatting"; import Query, { createQuery } from "metabase/lib/query"; +import { syncQueryFields, getExistingFields } from "metabase/lib/dataset"; import { isPK } from "metabase/lib/types"; import Utils from "metabase/lib/utils"; import { getEngineNativeType, formatJsonQuery } from "metabase/lib/engine"; @@ -521,7 +522,13 @@ export const loadDatabaseFields = createThunkAction( }, ); -function updateVisualizationSettings(card, isEditing, display, vizSettings) { +function updateVisualizationSettings( + card, + isEditing, + display, + vizSettings, + result, +) { // don't need to store undefined vizSettings = Utils.copy(vizSettings); for (const name in vizSettings) { @@ -550,6 +557,10 @@ function updateVisualizationSettings(card, isEditing, display, vizSettings) { updatedCard.display = display; updatedCard.visualization_settings = vizSettings; + if (result && result.data && result.data.cols) { + syncQueryFields(updatedCard, result.data.cols); + } + return updatedCard; } @@ -570,6 +581,7 @@ export const setCardVisualization = createThunkAction( uiControls.isEditing, display, card.visualization_settings, + getFirstQueryResult(getState()), ); dispatch(updateUrl(updatedCard, { dirty: true })); return updatedCard; @@ -589,6 +601,7 @@ export const updateCardVisualizationSettings = createThunkAction( uiControls.isEditing, card.display, { ...card.visualization_settings, ...settings }, + getFirstQueryResult(getState()), ); dispatch(updateUrl(updatedCard, { dirty: true })); return updatedCard; @@ -608,6 +621,7 @@ export const replaceAllCardVisualizationSettings = createThunkAction( uiControls.isEditing, card.display, settings, + getFirstQueryResult(getState()), ); dispatch(updateUrl(updatedCard, { dirty: true })); return updatedCard; @@ -1425,6 +1439,35 @@ export const loadObjectDetailFKReferences = createThunkAction( }, ); +const ADD_FIELD = "metabase/qb/ADD_FIELD"; +export const addField = createThunkAction( + ADD_FIELD, + (field, run = true) => (dispatch, getState) => { + const { qb: { card } } = getState(); + const queryResult = getFirstQueryResult(getState()); + if ( + card.dataset_query.type === "query" && + queryResult && + queryResult.data + ) { + dispatch( + setDatasetQuery( + { + ...card.dataset_query, + query: { + ...card.dataset_query.query, + fields: getExistingFields(card, queryResult.data.cols).concat([ + field, + ]), + }, + }, + true, + ), + ); + } + }, +); + // DEPRECATED: use metabase/entities/questions export const ARCHIVE_QUESTION = "metabase/qb/ARCHIVE_QUESTION"; export const archiveQuestion = createThunkAction( diff --git a/frontend/src/metabase/query_builder/components/QueryHeader.jsx b/frontend/src/metabase/query_builder/components/QueryHeader.jsx index a603e657106a21960ec7a4307c8b73a9f1437826..1a29a8461cd55939e3c0f21b197f3970423d8d4a 100644 --- a/frontend/src/metabase/query_builder/components/QueryHeader.jsx +++ b/frontend/src/metabase/query_builder/components/QueryHeader.jsx @@ -1,6 +1,5 @@ import React, { Component } from "react"; import PropTypes from "prop-types"; -import { Link } from "react-router"; import { connect } from "react-redux"; import { t } from "c-3po"; import QueryModeButton from "./QueryModeButton.jsx"; @@ -17,6 +16,7 @@ import QuestionSavedModal from "metabase/components/QuestionSavedModal.jsx"; import Tooltip from "metabase/components/Tooltip.jsx"; import CollectionMoveModal from "metabase/containers/CollectionMoveModal.jsx"; import ArchiveQuestionModal from "metabase/query_builder/containers/ArchiveQuestionModal"; +import CollectionBadge from "metabase/questions/components/CollectionBadge"; import SaveQuestionModal from "metabase/containers/SaveQuestionModal.jsx"; @@ -24,8 +24,6 @@ import { clearRequestState } from "metabase/redux/requests"; import { RevisionApi } from "metabase/services"; -import * as Urls from "metabase/lib/urls"; - import cx from "classnames"; import _ from "underscore"; import NativeQuery from "metabase-lib/lib/queries/NativeQuery"; @@ -523,22 +521,8 @@ export default class QueryHeader extends Component { buttons={this.getHeaderButtons()} setItemAttributeFn={this.props.onSetCardAttribute} badge={ - this.props.card.collection && ( - <Link - to={Urls.collection(this.props.card.collection.id)} - className="text-uppercase flex align-center no-decoration" - style={{ - color: this.props.card.collection.color, - fontSize: 12, - }} - > - <Icon - name="collection" - size={12} - style={{ marginRight: "0.5em" }} - /> - {this.props.card.collection.name} - </Link> + this.props.card.id && ( + <CollectionBadge collectionId={this.props.card.collection_id} /> ) } /> diff --git a/frontend/src/metabase/query_builder/components/VisualizationSettings.jsx b/frontend/src/metabase/query_builder/components/VisualizationSettings.jsx index 4d195d4c28b208d0563c2b64133c86678fcd345e..b22ba0f2dcb908f4d91b7c1de2d98874c6974492 100644 --- a/frontend/src/metabase/query_builder/components/VisualizationSettings.jsx +++ b/frontend/src/metabase/query_builder/components/VisualizationSettings.jsx @@ -17,7 +17,7 @@ export default class VisualizationSettings extends React.Component { } static propTypes = { - card: PropTypes.object.isRequired, + question: PropTypes.object.isRequired, result: PropTypes.object, setDisplayFn: PropTypes.func.isRequired, onUpdateVisualizationSettings: PropTypes.func.isRequired, @@ -31,9 +31,9 @@ export default class VisualizationSettings extends React.Component { }; renderChartTypePicker() { - let { result, card } = this.props; + let { result, question } = this.props; let { CardVisualization } = getVisualizationRaw([ - { card, data: result.data }, + { card: question.card(), data: result.data }, ]); let triggerElement = ( @@ -68,7 +68,7 @@ export default class VisualizationSettings extends React.Component { className={cx( "p2 flex align-center cursor-pointer bg-brand-hover text-white-hover", { - "ChartType--selected": vizType === card.display, + "ChartType--selected": vizType === question.display(), "ChartType--notSensible": !( result && result.data && @@ -111,7 +111,14 @@ export default class VisualizationSettings extends React.Component { ref="popover" > <ChartSettings - series={[{ card: this.props.card, data: this.props.result.data }]} + question={this.props.question} + addField={this.props.addField} + series={[ + { + card: this.props.question.card(), + data: this.props.result.data, + }, + ]} onChange={this.props.onReplaceAllVisualizationSettings} /> </ModalWithTrigger> diff --git a/frontend/src/metabase/questions/components/CollectionBadge.jsx b/frontend/src/metabase/questions/components/CollectionBadge.jsx index 0d5fae450aa042ba972d5dd1f4b3137f94e7133f..f9d1043157ffcf84d5fbb24b5bc4b2233658d85b 100644 --- a/frontend/src/metabase/questions/components/CollectionBadge.jsx +++ b/frontend/src/metabase/questions/components/CollectionBadge.jsx @@ -1,30 +1,33 @@ import React from "react"; +import { Flex } from "grid-styled"; import { Link } from "react-router"; +import Icon from "metabase/components/Icon"; + +import { entityObjectLoader } from "metabase/entities/containers/EntityObjectLoader"; import * as Urls from "metabase/lib/urls"; -import Color from "color"; import cx from "classnames"; -import colors from "metabase/lib/colors"; - -const CollectionBadge = ({ className, collection }) => { - const color = Color(collection.color); - const darkened = color.darken(0.1); - const lightened = color.lighten(0.4); - return ( - <Link - to={Urls.collection(collection.id)} - className={cx(className, "flex align-center px1 rounded mx1")} - style={{ - fontSize: 14, - color: lightened.isDark() ? colors["text-white"] : darkened, - backgroundColor: lightened, - }} - > - {collection.name} - </Link> - ); -}; +@entityObjectLoader({ + entityType: "collections", + entityId: (state, props) => props.collectionId || "root", + wrapped: true, +}) +class CollectionBadge extends React.Component { + render() { + const { object } = this.props; + return ( + <Link to={Urls.collection(object.id)} className={cx("inline-block link")}> + <Flex align="center"> + <Icon name={object.getIcon()} mr={1} /> + <h5 className="text-uppercase" style={{ fontWeight: 900 }}> + {object.name} + </h5> + </Flex> + </Link> + ); + } +} export default CollectionBadge; diff --git a/frontend/src/metabase/visualizations/components/ChartSettings.jsx b/frontend/src/metabase/visualizations/components/ChartSettings.jsx index 1e49a9766f8605d1e3de7fffcdbc5816f99a4966..2311f840c63f5b99193da57b4cecf9ab177097f2 100644 --- a/frontend/src/metabase/visualizations/components/ChartSettings.jsx +++ b/frontend/src/metabase/visualizations/components/ChartSettings.jsx @@ -23,12 +23,23 @@ const Widget = ({ value, onChange, props, + // NOTE: special props to support adding additional fields + question, + addField, }) => { const W = widget; return ( <div className={cx("mb2", { hide: hidden, disable: disabled })}> {title && <h4 className="mb1">{title}</h4>} - {W && <W value={value} onChange={onChange} {...props} />} + {W && ( + <W + value={value} + onChange={onChange} + question={question} + addField={addField} + {...props} + /> + )} </div> ); }; @@ -44,17 +55,14 @@ class ChartSettings extends Component { }; } - getChartTypeName() { - let { CardVisualization } = getVisualizationTransformed(this.props.series); - switch (CardVisualization.identifier) { - case "table": - return "table"; - case "scalar": - return "number"; - case "funnel": - return "funnel"; - default: - return "chart"; + componentWillReceiveProps(nextProps) { + if (this.props.series !== nextProps.series) { + this.setState({ + series: this._getSeries( + nextProps.series, + nextProps.series[0].card.visualization_settings, + ), + }); } } @@ -102,7 +110,7 @@ class ChartSettings extends Component { }; render() { - const { isDashboard } = this.props; + const { isDashboard, question, addField } = this.props; const { series } = this.state; const tabs = {}; @@ -152,7 +160,12 @@ class ChartSettings extends Component { <div className="Grid-cell Cell--1of3 scroll-y scroll-show border-right p4"> {widgets && widgets.map(widget => ( - <Widget key={`${widget.id}`} {...widget} /> + <Widget + key={`${widget.id}`} + question={question} + addField={addField} + {...widget} + /> ))} </div> <div className="Grid-cell flex flex-column pt2"> diff --git a/frontend/src/metabase/visualizations/components/settings/ChartSettingOrderedColumns.jsx b/frontend/src/metabase/visualizations/components/settings/ChartSettingOrderedColumns.jsx new file mode 100644 index 0000000000000000000000000000000000000000..2e4d4380117eb26065e6316bc400c2e134182220 --- /dev/null +++ b/frontend/src/metabase/visualizations/components/settings/ChartSettingOrderedColumns.jsx @@ -0,0 +1,193 @@ +import React, { Component } from "react"; +import { t } from "c-3po"; + +import Icon from "metabase/components/Icon.jsx"; + +import { SortableContainer, SortableElement } from "react-sortable-hoc"; + +import StructuredQuery from "metabase-lib/lib/queries/StructuredQuery"; +import { + fieldRefForColumn, + findColumnForColumnSetting, +} from "metabase/lib/dataset"; +import { getFriendlyName } from "metabase/visualizations/lib/utils"; + +import _ from "underscore"; + +const SortableColumn = SortableElement( + ({ columnSetting, getColumnName, onRemove }) => ( + <ColumnItem + title={getColumnName(columnSetting)} + onRemove={() => onRemove(columnSetting)} + /> + ), +); + +const SortableColumnList = SortableContainer( + ({ columnSettings, getColumnName, onRemove }) => { + return ( + <div> + {columnSettings.map((columnSetting, index) => ( + <SortableColumn + key={`item-${index}`} + index={columnSetting.index} + columnSetting={columnSetting} + getColumnName={getColumnName} + onRemove={onRemove} + /> + ))} + </div> + ); + }, +); + +export default class ChartSettingOrderedColumns extends Component { + handleEnable = columnSetting => { + const columnSettings = [...this.props.value]; + const index = columnSetting.index; + columnSettings[index] = { ...columnSettings[index], enabled: true }; + this.props.onChange(columnSettings); + }; + + handleDisable = columnSetting => { + const columnSettings = [...this.props.value]; + const index = columnSetting.index; + columnSettings[index] = { ...columnSettings[index], enabled: false }; + this.props.onChange(columnSettings); + }; + + handleSortEnd = ({ oldIndex, newIndex }) => { + const fields = [...this.props.value]; + fields.splice(newIndex, 0, fields.splice(oldIndex, 1)[0]); + this.props.onChange(fields); + }; + + handleAddNewField = fieldRef => { + const { value, onChange, addField } = this.props; + onChange([ + // remove duplicates + ...value.filter( + columnSetting => !_.isEqual(columnSetting.fieldRef, fieldRef), + ), + { fieldRef, enabled: true }, + ]); + addField(fieldRef); + }; + + getColumnName = columnSetting => + getFriendlyName( + findColumnForColumnSetting(this.props.columns, columnSetting) || { + display_name: "[Unknown]", + }, + ); + + render() { + const { value, question, columns } = this.props; + + let additionalFieldOptions = { count: 0 }; + if (columns && question && question.query() instanceof StructuredQuery) { + const fieldRefs = columns.map(column => fieldRefForColumn(column)); + additionalFieldOptions = question.query().fieldsOptions(dimension => { + const mbql = dimension.mbql(); + return !_.find(fieldRefs, fieldRef => _.isEqual(fieldRef, mbql)); + }); + } + + const [enabledColumns, disabledColumns] = _.partition( + value + .filter(columnSetting => + findColumnForColumnSetting(columns, columnSetting), + ) + .map((columnSetting, index) => ({ ...columnSetting, index })), + columnSetting => columnSetting.enabled, + ); + + return ( + <div className="list"> + <div>{t`Click and drag to change their order`}</div> + {enabledColumns.length > 0 ? ( + <SortableColumnList + columnSettings={enabledColumns} + getColumnName={this.getColumnName} + onRemove={this.handleDisable} + onSortEnd={this.handleSortEnd} + distance={5} + helperClass="z5" + /> + ) : ( + <div className="my2 p2 flex layout-centered bg-grey-0 text-grey-1 text-bold rounded"> + {t`Add fields from the list below`} + </div> + )} + {disabledColumns.length > 0 || additionalFieldOptions.count > 0 ? ( + <h4 className="mb2 mt4 pt4 border-top">{`More fields`}</h4> + ) : null} + {disabledColumns.map((columnSetting, index) => ( + <ColumnItem + key={index} + title={this.getColumnName(columnSetting)} + onAdd={() => this.handleEnable(columnSetting)} + /> + ))} + {additionalFieldOptions.count > 0 && ( + <div> + {additionalFieldOptions.dimensions.map((dimension, index) => ( + <ColumnItem + key={index} + title={dimension.displayName()} + onAdd={() => this.handleAddNewField(dimension.mbql())} + /> + ))} + {additionalFieldOptions.fks.map(fk => ( + <div> + <div className="my2 text-grey-4 text-bold text-uppercase text-small"> + {fk.field.target.table.display_name} + </div> + {fk.dimensions.map((dimension, index) => ( + <ColumnItem + key={index} + title={dimension.displayName()} + onAdd={() => this.handleAddNewField(dimension.mbql())} + /> + ))} + </div> + ))} + </div> + )} + </div> + ); + } +} + +const ColumnItem = ({ title, onAdd, onRemove }) => ( + <div + className="my1 bordered rounded shadowed cursor-pointer overflow-hidden bg-white" + onClick={onAdd} + > + <div className="p1 border-bottom relative"> + <div className="px1 flex align-center relative"> + <span className="h4 flex-full text-dark">{title}</span> + {onAdd && ( + <Icon + name="add" + className="cursor-pointer text-grey-1 text-grey-4-hover" + onClick={e => { + e.stopPropagation(); + onAdd(); + }} + /> + )} + {onRemove && ( + <Icon + name="close" + className="cursor-pointer text-grey-1 text-grey-4-hover" + onClick={e => { + e.stopPropagation(); + onRemove(); + }} + /> + )} + </div> + </div> + </div> +); diff --git a/frontend/src/metabase/visualizations/components/settings/ChartSettingOrderedFields.jsx b/frontend/src/metabase/visualizations/components/settings/ChartSettingOrderedFields.jsx deleted file mode 100644 index 94e9290306fbaa1c2e2337eecc55f2aa66c86a46..0000000000000000000000000000000000000000 --- a/frontend/src/metabase/visualizations/components/settings/ChartSettingOrderedFields.jsx +++ /dev/null @@ -1,110 +0,0 @@ -import React, { Component } from "react"; - -import CheckBox from "metabase/components/CheckBox.jsx"; -import Icon from "metabase/components/Icon.jsx"; - -import { SortableContainer, SortableElement } from "react-sortable-hoc"; - -import cx from "classnames"; -import _ from "underscore"; - -const SortableField = SortableElement( - ({ field, columnNames, onSetEnabled }) => ( - <div - className={cx("flex align-center p1", { - "text-grey-2": !field.enabled, - })} - > - <CheckBox - checked={field.enabled} - onChange={e => onSetEnabled(e.target.checked)} - /> - <span className="ml1 h4">{columnNames[field.name]}</span> - <Icon - className="flex-align-right text-grey-2 mr1 cursor-pointer" - name="grabber" - width={14} - height={14} - /> - </div> - ), -); - -const SortableFieldList = SortableContainer( - ({ fields, columnNames, onSetEnabled }) => { - return ( - <div> - {fields.map((field, index) => ( - <SortableField - key={`item-${index}`} - index={index} - field={field} - columnNames={columnNames} - onSetEnabled={enabled => onSetEnabled(index, enabled)} - /> - ))} - </div> - ); - }, -); - -export default class ChartSettingOrderedFields extends Component { - handleSetEnabled = (index, checked) => { - const fields = [...this.props.value]; - fields[index] = { ...fields[index], enabled: checked }; - this.props.onChange(fields); - }; - - handleToggleAll = anyEnabled => { - const fields = this.props.value.map(field => ({ - ...field, - enabled: !anyEnabled, - })); - this.props.onChange([...fields]); - }; - - handleSortEnd = ({ oldIndex, newIndex }) => { - const fields = [...this.props.value]; - fields.splice(newIndex, 0, fields.splice(oldIndex, 1)[0]); - this.props.onChange(fields); - }; - - isAnySelected = () => { - const { value } = this.props; - return _.any(value, field => field.enabled); - }; - - render() { - const { value, columnNames } = this.props; - const anyEnabled = this.isAnySelected(); - return ( - <div className="list"> - <div className="toggle-all"> - <div - className={cx("flex align-center p1", { - "text-grey-2": !anyEnabled, - })} - > - <CheckBox - checked={anyEnabled} - className={cx("text-brand", { "text-grey-2": !anyEnabled })} - onChange={e => this.handleToggleAll(anyEnabled)} - invertChecked - /> - <span className="ml1 h4"> - {anyEnabled ? "Unselect all" : "Select all"} - </span> - </div> - </div> - <SortableFieldList - fields={value} - columnNames={columnNames} - onSetEnabled={this.handleSetEnabled} - onSortEnd={this.handleSortEnd} - distance={5} - helperClass="z5" - /> - </div> - ); - } -} diff --git a/frontend/src/metabase/visualizations/visualizations/Table.jsx b/frontend/src/metabase/visualizations/visualizations/Table.jsx index 0ec7fef9f0ab3e0b24d5dc1bfa9cde5b49d493e3..74701ba5f78c8f6ce67667db269673d84fad22ea 100644 --- a/frontend/src/metabase/visualizations/visualizations/Table.jsx +++ b/frontend/src/metabase/visualizations/visualizations/Table.jsx @@ -6,14 +6,12 @@ import TableInteractive from "../components/TableInteractive.jsx"; import TableSimple from "../components/TableSimple.jsx"; import { t } from "c-3po"; import * as DataGrid from "metabase/lib/data_grid"; +import { findColumnIndexForColumnSetting } from "metabase/lib/dataset"; import Query from "metabase/lib/query"; import { isMetric, isDimension } from "metabase/lib/schema_metadata"; -import { - columnsAreValid, - getFriendlyName, -} from "metabase/visualizations/lib/utils"; -import ChartSettingOrderedFields from "metabase/visualizations/components/settings/ChartSettingOrderedFields.jsx"; +import { columnsAreValid } from "metabase/visualizations/lib/utils"; +import ChartSettingOrderedColumns from "metabase/visualizations/components/settings/ChartSettingOrderedColumns.jsx"; import ChartSettingsTableFormatting, { isFormattable, } from "metabase/visualizations/components/settings/ChartSettingsTableFormatting.jsx"; @@ -186,8 +184,8 @@ export default class Table extends Component { }, "table.columns": { section: "Data", - title: t`Fields to include`, - widget: ChartSettingOrderedFields, + title: t`Visible fields`, + widget: ChartSettingOrderedColumns, getHidden: (series, vizSettings) => vizSettings["table.pivot"], isValid: ([{ card, data }]) => card.visualization_settings["table.columns"] && @@ -201,10 +199,7 @@ export default class Table extends Component { enabled: col.visibility_type !== "details-only", })), getProps: ([{ data: { cols } }]) => ({ - columnNames: cols.reduce( - (o, col) => ({ ...o, [col.name]: getFriendlyName(col) }), - {}, - ), + columns: cols, }), }, "table.column_widths": {}, @@ -304,10 +299,13 @@ export default class Table extends Component { }); } else { const { cols, rows, columns } = data; - const columnIndexes = settings["table.columns"] - .filter(f => f.enabled) - .map(f => _.findIndex(cols, c => c.name === f.name)) - .filter(i => i >= 0 && i < cols.length); + const columnSettings = settings["table.columns"]; + const columnIndexes = columnSettings + .filter(columnSetting => columnSetting.enabled) + .map(columnSetting => + findColumnIndexForColumnSetting(cols, columnSetting), + ) + .filter(columnIndex => columnIndex >= 0 && columnIndex < cols.length); this.setState({ data: { diff --git a/frontend/test/nav/ProfileLink.unit.spec.js b/frontend/test/nav/ProfileLink.unit.spec.js new file mode 100644 index 0000000000000000000000000000000000000000..35c371207da938a062d8d2a6f91e46ad20a655d6 --- /dev/null +++ b/frontend/test/nav/ProfileLink.unit.spec.js @@ -0,0 +1,31 @@ +import React from "react"; +import { shallow } from "enzyme"; +import ProfileLink from "metabase/nav/components/ProfileLink"; + +jest.mock("metabase/lib/settings", () => ({ + get: () => ({ + tag: 1, + version: 1, + }), +})); + +describe("ProfileLink", () => { + describe("options", () => { + describe("normal user", () => { + it("should show the proper set of items", () => { + const normalUser = { is_superuser: false }; + const wrapper = shallow(<ProfileLink user={normalUser} context={""} />); + + expect(wrapper.instance().generateOptionsForUser().length).toBe(3); + }); + }); + describe("admin", () => { + it("should show the proper set of items", () => { + const admin = { is_superuser: true }; + const wrapper = shallow(<ProfileLink user={admin} context={""} />); + + expect(wrapper.instance().generateOptionsForUser().length).toBe(5); + }); + }); + }); +}); diff --git a/frontend/test/parameters/parameters.integ.spec.js b/frontend/test/parameters/parameters.integ.spec.js index e2345ee3162dbbdbdf59cad22faa23bf5a90d3c7..1adcb1876f8b0eccb54cc7d119956d438160ab35 100644 --- a/frontend/test/parameters/parameters.integ.spec.js +++ b/frontend/test/parameters/parameters.integ.spec.js @@ -314,7 +314,7 @@ describe("parameters", () => { app = mount(store.getAppContainer()); await store.waitForActions([FETCH_DASHBOARD]); - expect(app.find(".DashboardHeader .Entity h2").text()).toEqual( + expect(app.find(".DashboardHeader .Entity .h2").text()).toEqual( "Test Dashboard", ); diff --git a/frontend/test/query_builder/components/VisualizationSettings.integ.spec.js b/frontend/test/query_builder/components/VisualizationSettings.integ.spec.js index 18076d55c217d35a28c4ad61f26a2b140d4ccc48..b75b248bee1acef873a86b83e74a9b3b668181d5 100644 --- a/frontend/test/query_builder/components/VisualizationSettings.integ.spec.js +++ b/frontend/test/query_builder/components/VisualizationSettings.integ.spec.js @@ -16,7 +16,6 @@ import { import { FETCH_TABLE_METADATA } from "metabase/redux/metadata"; -import CheckBox from "metabase/components/CheckBox"; import RunButton from "metabase/query_builder/components/RunButton"; import VisualizationSettings from "metabase/query_builder/components/VisualizationSettings"; @@ -65,17 +64,14 @@ describe("QueryBuilder", () => { const doneButton = settingsModal.find(".Button--primary"); expect(doneButton.length).toBe(1); - const fieldsToIncludeCheckboxes = settingsModal.find(CheckBox); - expect(fieldsToIncludeCheckboxes.length).toBe(7); + const fieldsToIncludeRemoveButtons = settingsModal.find(".Icon-close"); + expect(fieldsToIncludeRemoveButtons.length).toBe(6); click( - fieldsToIncludeCheckboxes.filterWhere( - checkbox => - checkbox - .parent() - .find("span") - .text() === "Created At", - ), + settingsModal + .find("ColumnItem") + .findWhere(x => x.text() === "Created At") + .find(".Icon-close"), ); expect(table.find('div[children="Created At"]').length).toBe(0); diff --git a/frontend/test/visualizations/components/ChartSettings.unit.spec.js b/frontend/test/visualizations/components/ChartSettings.unit.spec.js deleted file mode 100644 index 1729473d62e72e8cead0782338c111f02748057e..0000000000000000000000000000000000000000 --- a/frontend/test/visualizations/components/ChartSettings.unit.spec.js +++ /dev/null @@ -1,81 +0,0 @@ -import React from "react"; - -import ChartSettings from "metabase/visualizations/components/ChartSettings"; - -import { TableCard } from "../__support__/visualizations"; - -import { mount } from "enzyme"; -import { click } from "__support__/enzyme_utils"; - -function renderChartSettings(enabled = true) { - const props = { - series: [ - TableCard("Foo", { - card: { - visualization_settings: { - "table.columns": [{ name: "Foo_col0", enabled: enabled }], - }, - }, - }), - ], - }; - return mount(<ChartSettings {...props} onChange={() => {}} />); -} - -// The ExplicitSize component uses the matchMedia DOM API -// which does not exist in jest's JSDOM -Object.defineProperty(window, "matchMedia", { - value: jest.fn(() => { - return { - matches: true, - addListener: () => {}, - removeListener: () => {}, - }; - }), -}); - -// We have to do some mocking here to avoid calls to GA and to Metabase settings -jest.mock("metabase/lib/settings", () => ({ - get: () => "v", -})); - -describe("ChartSettings", () => { - describe("toggling fields", () => { - describe("disabling all fields", () => { - it("should show null state", () => { - const chartSettings = renderChartSettings(); - - expect(chartSettings.find(".toggle-all .Icon-check").length).toEqual(1); - expect(chartSettings.find("table").length).toEqual(1); - - click(chartSettings.find(".toggle-all .cursor-pointer")); - - expect(chartSettings.find(".toggle-all .Icon-check").length).toEqual(0); - expect(chartSettings.find("table").length).toEqual(0); - expect(chartSettings.text()).toContain( - "Every field is hidden right now", - ); - }); - }); - - describe("enabling all fields", () => { - it("should show all columns", () => { - const chartSettings = renderChartSettings(false); - - expect(chartSettings.find(".toggle-all .Icon-check").length).toEqual(0); - expect(chartSettings.find("table").length).toEqual(0); - expect(chartSettings.text()).toContain( - "Every field is hidden right now", - ); - - click(chartSettings.find(".toggle-all .cursor-pointer")); - - expect(chartSettings.find(".toggle-all .Icon-check").length).toEqual(1); - expect(chartSettings.find("table").length).toEqual(1); - expect(chartSettings.text()).not.toContain( - "Every field is hidden right now", - ); - }); - }); - }); -}); diff --git a/frontend/test/visualizations/components/settings/ChartSettingOrderedColumns.unit.spec.js b/frontend/test/visualizations/components/settings/ChartSettingOrderedColumns.unit.spec.js new file mode 100644 index 0000000000000000000000000000000000000000..b2cb958d61e38712f372c140ea29cb4fe5c45c46 --- /dev/null +++ b/frontend/test/visualizations/components/settings/ChartSettingOrderedColumns.unit.spec.js @@ -0,0 +1,84 @@ +import React from "react"; + +import ChartSettingOrderedColumns from "metabase/visualizations/components/settings/ChartSettingOrderedColumns"; + +import { mount } from "enzyme"; + +import { question } from "../../../__support__/sample_dataset_fixture.js"; + +function renderChartSettingOrderedColumns(props) { + return mount( + <ChartSettingOrderedColumns + onChange={() => {}} + columns={[{ name: "Foo" }, { name: "Bar" }]} + {...props} + />, + ); +} + +describe("ChartSettingOrderedColumns", () => { + it("should have the correct add and remove buttons", () => { + const setting = renderChartSettingOrderedColumns({ + value: [{ name: "Foo", enabled: true }, { name: "Bar", enabled: false }], + }); + expect(setting.find(".Icon-add")).toHaveLength(1); + expect(setting.find(".Icon-close")).toHaveLength(1); + }); + it("should add a column", () => { + const onChange = jest.fn(); + const setting = renderChartSettingOrderedColumns({ + value: [{ name: "Foo", enabled: true }, { name: "Bar", enabled: false }], + onChange, + }); + setting.find(".Icon-add").simulate("click"); + expect(onChange.mock.calls).toEqual([ + [[{ name: "Foo", enabled: true }, { name: "Bar", enabled: true }]], + ]); + }); + it("should remove a column", () => { + const onChange = jest.fn(); + const setting = renderChartSettingOrderedColumns({ + value: [{ name: "Foo", enabled: true }, { name: "Bar", enabled: false }], + onChange, + }); + setting.find(".Icon-close").simulate("click"); + expect(onChange.mock.calls).toEqual([ + [[{ name: "Foo", enabled: false }, { name: "Bar", enabled: false }]], + ]); + }); + it("should reorder columns", () => { + const onChange = jest.fn(); + const setting = renderChartSettingOrderedColumns({ + value: [{ name: "Foo", enabled: true }, { name: "Bar", enabled: true }], + onChange, + }); + // just call handleSortEnd directly for now as it's difficult to simulate drag and drop + setting.instance().handleSortEnd({ oldIndex: 1, newIndex: 0 }); + expect(onChange.mock.calls).toEqual([ + [[{ name: "Bar", enabled: true }, { name: "Foo", enabled: true }]], + ]); + }); + + describe("for structured queries", () => { + it("should list and add additional columns", () => { + const onChange = jest.fn(); + const addField = jest.fn(); + const setting = renderChartSettingOrderedColumns({ + value: [], + columns: [], + question, + onChange, + addField, + }); + expect(setting.find(".Icon-add")).toHaveLength(28); + setting + .find(".Icon-add") + .first() + .simulate("click"); + expect(addField.mock.calls).toEqual([[["field-id", 1]]]); + expect(onChange.mock.calls).toEqual([ + [[{ fieldRef: ["field-id", 1], enabled: true }]], + ]); + }); + }); +}); diff --git a/frontend/test/visualizations/components/settings/ChartSettingOrderedFields.unit.spec.js b/frontend/test/visualizations/components/settings/ChartSettingOrderedFields.unit.spec.js deleted file mode 100644 index 84cc84d4e8208c1af635c79955d2cf2d97b63c58..0000000000000000000000000000000000000000 --- a/frontend/test/visualizations/components/settings/ChartSettingOrderedFields.unit.spec.js +++ /dev/null @@ -1,79 +0,0 @@ -import React from "react"; - -import ChartSettingOrderedFields from "metabase/visualizations/components/settings/ChartSettingOrderedFields"; - -import { mount } from "enzyme"; - -function renderChartSettingOrderedFields(props) { - return mount(<ChartSettingOrderedFields onChange={() => {}} {...props} />); -} - -describe("ChartSettingOrderedFields", () => { - describe("isAnySelected", () => { - describe("when on or more fields are enabled", () => { - it("should be true", () => { - const chartSettings = renderChartSettingOrderedFields({ - columnNames: { id: "ID", text: "Text" }, - value: [ - { name: "id", enabled: true }, - { name: "text", enabled: false }, - ], - }); - expect(chartSettings.instance().isAnySelected()).toEqual(true); - }); - }); - - describe("when no fields are enabled", () => { - it("should be false", () => { - const chartSettings = renderChartSettingOrderedFields({ - columnNames: { id: "ID", text: "Text" }, - value: [ - { name: "id", enabled: false }, - { name: "text", enabled: false }, - ], - }); - expect(chartSettings.instance().isAnySelected()).toEqual(false); - }); - }); - }); - - describe("toggleAll", () => { - describe("when passed false", () => { - it("should mark all fields as enabled", () => { - const onChange = jest.fn(); - const chartSettings = renderChartSettingOrderedFields({ - columnNames: { id: "ID", text: "Text" }, - value: [ - { name: "id", enabled: false }, - { name: "text", enabled: false }, - ], - onChange, - }); - chartSettings.instance().handleToggleAll(false); - expect(onChange.mock.calls[0][0]).toEqual([ - { name: "id", enabled: true }, - { name: "text", enabled: true }, - ]); - }); - }); - - describe("when passed true", () => { - it("should mark all fields as disabled", () => { - const onChange = jest.fn(); - const chartSettings = renderChartSettingOrderedFields({ - columnNames: { id: "ID", text: "Text" }, - value: [ - { name: "id", enabled: true }, - { name: "text", enabled: true }, - ], - onChange, - }); - chartSettings.instance().handleToggleAll(true); - expect(onChange.mock.calls[0][0]).toEqual([ - { name: "id", enabled: false }, - { name: "text", enabled: false }, - ]); - }); - }); - }); -}); diff --git a/project.clj b/project.clj index 76ed1ee742832d3d8330bf41c7d90d07d763d4fe..55620b3cbf12898a4ed9896e51ecaa678f78e859 100644 --- a/project.clj +++ b/project.clj @@ -83,7 +83,7 @@ [net.sf.cssbox/cssbox "4.12" ; HTML / CSS rendering :exclusions [org.slf4j/slf4j-api]] [org.clojars.pntblnk/clj-ldap "0.0.12"] ; LDAP client - [org.liquibase/liquibase-core "3.5.3"] ; migration management (Java lib) + [org.liquibase/liquibase-core "3.6.2"] ; migration management (Java lib) [org.postgresql/postgresql "42.2.2"] ; Postgres driver [org.slf4j/slf4j-log4j12 "1.7.25"] ; abstraction for logging frameworks -- allows end user to plug in desired logging framework at deployment time [org.tcrawley/dynapath "0.2.5"] ; Dynamically add Jars (e.g. Oracle or Vertica) to classpath @@ -96,7 +96,7 @@ [ring/ring-jetty-adapter "1.6.0"] ; Ring adapter using Jetty webserver (used to run a Ring server for unit tests) [ring/ring-json "0.4.0"] ; Ring middleware for reading/writing JSON automatically [stencil "0.5.0"] ; Mustache templates for Clojure - [toucan "1.1.7" ; Model layer, hydration, and DB utilities + [toucan "1.1.9" ; Model layer, hydration, and DB utilities :exclusions [honeysql]]] :repositories [["bintray" "https://dl.bintray.com/crate/crate"] ; Repo for Crate JDBC driver ["redshift" "https://s3.amazonaws.com/redshift-driver-downloads"]] diff --git a/resources/automagic_dashboards/field/Country.yaml b/resources/automagic_dashboards/field/Country.yaml index fa493bb6e66d221a2461f005f143cc0bd20b9690..9cd91ba6b52cd88f3fc5c87d0a139ea99e38510d 100644 --- a/resources/automagic_dashboards/field/Country.yaml +++ b/resources/automagic_dashboards/field/Country.yaml @@ -38,7 +38,7 @@ groups: - Overview: title: Overview - Breakdowns: - title: How [[this]] are distributed + title: How the [[this]] is distributed cards: - Count: title: Count @@ -60,7 +60,7 @@ cards: group: Overview width: 6 - Distribution: - title: Distribution of [[this]] + title: How the [[this]] is distributed visualization: map: map.type: region diff --git a/resources/automagic_dashboards/field/DateTime.yaml b/resources/automagic_dashboards/field/DateTime.yaml index 8af35916858fee9120726dc52d316e1fb9e77ef5..346f802e8882b3a25a9d9b26f883e325bdf79d9d 100644 --- a/resources/automagic_dashboards/field/DateTime.yaml +++ b/resources/automagic_dashboards/field/DateTime.yaml @@ -32,9 +32,9 @@ groups: - Overview: title: Overview - Breakdowns: - title: How [[this]] is distributed + title: How the [[this]] is distributed - Seasonality: - title: Seasonal patterns in [[this]] + title: Seasonal patterns in the [[this]] cards: - Count: title: Count diff --git a/resources/automagic_dashboards/field/GenericField.yaml b/resources/automagic_dashboards/field/GenericField.yaml index 71d7143394f2565736faabb1212144b36bedb305..fe5396453e26c0e6edfa2e0a58b5e0d5c94f9a8b 100644 --- a/resources/automagic_dashboards/field/GenericField.yaml +++ b/resources/automagic_dashboards/field/GenericField.yaml @@ -38,7 +38,7 @@ groups: - Overview: title: Overview - Breakdowns: - title: How [[this]] are distributed + title: How the [[this]] is distributed cards: - Count: title: Count @@ -60,7 +60,7 @@ cards: group: Overview width: 6 - Distribution: - title: How [[this]] is distributed + title: How the [[this]] is distributed visualization: bar metrics: Count dimensions: this @@ -68,12 +68,13 @@ cards: width: 12 - ByNumber: title: "[[GenericNumber]] by [[this]]" - visualization: line + visualization: bar metrics: - Sum - Avg dimensions: this group: Breakdowns + height: 8 - Crosstab: title: "[[this]] by [[GenericCategoryMedium]]" visualization: table diff --git a/resources/automagic_dashboards/field/Number.yaml b/resources/automagic_dashboards/field/Number.yaml index 937e04ec5c0dfeb17a1e3476c76ea85495433b9a..01a3864a03780028d76f0b91e6068583ceceec84 100644 --- a/resources/automagic_dashboards/field/Number.yaml +++ b/resources/automagic_dashboards/field/Number.yaml @@ -38,11 +38,11 @@ groups: - Overview: title: Overview - Breakdowns: - title: How [[this]] is distributed across categories + title: How the [[this]] is distributed across categories - Seasonality: - title: How [[this]] changes with time + title: How the [[this]] changes with time - Geographical: - title: How [[this]] is distributed geographically + title: How the [[this]] is distributed geographically cards: - Count: title: Count @@ -75,7 +75,7 @@ cards: group: Overview width: 9 - Distribution: - title: How [[this]] is distributed + title: How the [[this]] is distributed visualization: bar metrics: Count dimensions: diff --git a/resources/automagic_dashboards/field/State.yaml b/resources/automagic_dashboards/field/State.yaml index 2df61dca59ed4c93b5e5eb2c57ca5ff746e10690..f6c19178b715e7573c616ae8d185fa75316a7842 100644 --- a/resources/automagic_dashboards/field/State.yaml +++ b/resources/automagic_dashboards/field/State.yaml @@ -38,7 +38,7 @@ groups: - Overview: title: Overview - Breakdowns: - title: How [[this]] are distributed + title: How the [[this]] is distributed cards: - Count: title: Count diff --git a/resources/automagic_dashboards/metric/GenericMetric.yaml b/resources/automagic_dashboards/metric/GenericMetric.yaml index 46287df7733281d6a26caeae4dc995a39a2901e2..f76fa1582a60e4a626713555d609a7a48f3df03d 100644 --- a/resources/automagic_dashboards/metric/GenericMetric.yaml +++ b/resources/automagic_dashboards/metric/GenericMetric.yaml @@ -1,5 +1,5 @@ -title: A look at your [[this]] -transient_title: Here's a quick look at your [[this]] +title: A look at the [[this]] +transient_title: Here's a quick look at the [[this]] description: How it's distributed across time and other categories. applies_to: GenericTable metrics: @@ -35,15 +35,17 @@ dimensions: field_type: GenericTable.ZipCode groups: - Periodicity: - title: "[[this]] over time" + title: The [[this]] over time - Geographical: - title: "[[this]] by location" + title: The [[this]] by location - Categories: - title: How [[this]] is distributed across different categories + title: How this metric is distributed across different categories - Numbers: - title: How [[this]] is distributed across different numbers - - LargeCategories: - title: Top and bottom [[this]] + title: How this metric is distributed across different numbers + - LargeCategoriesTop: + title: Top 5 per category + - LargeCategoriesBottom: + title: Bottom 5 per category dashboard_filters: - Timestamp - State @@ -119,9 +121,11 @@ cards: map.region: us_states - ByNumber: group: Numbers - title: How [[this]] is distributed across [[GenericNumber]] + title: "[[this]] by [[GenericNumber]]" metrics: this - dimensions: GenericNumber + dimensions: + - GenericNumber: + aggregation: default visualization: bar - ByCategoryMedium: group: Categories @@ -151,7 +155,7 @@ cards: order_by: this: descending - ByCategoryLargeTop: - group: LargeCategories + group: LargeCategoriesTop title: "[[this]] per [[GenericCategoryLarge]], top 5" metrics: this dimensions: GenericCategoryLarge @@ -160,7 +164,7 @@ cards: this: descending limit: 5 - ByCategoryLargeBottom: - group: LargeCategories + group: LargeCategoriesBottom title: "[[this]] per [[GenericCategoryLarge]], bottom 5" metrics: this dimensions: GenericCategoryLarge diff --git a/resources/automagic_dashboards/question/GenericQuestion.yaml b/resources/automagic_dashboards/question/GenericQuestion.yaml index a7d0cfaa713b033f4af334319fbefdb4fbc5fcaa..ad7a7b8b067df81e1decbd24ceb1b06efb700844 100644 --- a/resources/automagic_dashboards/question/GenericQuestion.yaml +++ b/resources/automagic_dashboards/question/GenericQuestion.yaml @@ -1,4 +1,4 @@ title: "A closer look at your [[this]]" transient_title: "Here's a closer look at your [[this]]" -description: Here is breakdown of metrics and dimensions used in [[this]]. -applies_to: GenericTable \ No newline at end of file +description: A closer look at the metrics and dimensions used in this saved question. +applies_to: GenericTable diff --git a/resources/automagic_dashboards/table/GenericTable.yaml b/resources/automagic_dashboards/table/GenericTable.yaml index cb9744168ce4201e9fcf07687c4cd9c450e53fae..d06fc4986ef3c61f0a399bb5782081e23fb1c5ef 100644 --- a/resources/automagic_dashboards/table/GenericTable.yaml +++ b/resources/automagic_dashboards/table/GenericTable.yaml @@ -20,10 +20,6 @@ dimensions: - Source: field_type: GenericTable.Source score: 100 - - GenericCategorySmall: - field_type: GenericTable.Category - score: 80 - max_cardinality: 5 - GenericCategoryMedium: field_type: GenericTable.Category score: 75 @@ -91,13 +87,13 @@ groups: - Overview: title: Summary - Singletons: - title: These are the same for all your [[this]] + title: These are the same for all your [[this.short-name]] - ByTime: - title: "[[this]] across time" + title: "These [[this.short-name]] across time" - Geographical: - title: Where your [[this]] are + title: Where your [[this.short-name]] are - General: - title: How [[this]] are distributed + title: How these [[this.short-name]] are distributed dashboard_filters: - Timestamp - Date @@ -112,13 +108,13 @@ dashboard_filters: cards: # Overview - Rowcount: - title: Total [[this]] + title: Total [[this.short-name]] visualization: scalar metrics: Count score: 100 group: Overview - RowcountLast30Days: - title: New [[this]] in the last 30 days + title: "[[this.short-name]] added in the last 30 days" visualization: scalar metrics: Count score: 100 @@ -132,7 +128,7 @@ cards: group: Overview # General - NumberDistribution: - title: How [[this]] are distributed across [[GenericNumber]] + title: "[[this.short-name]] by [[GenericNumber]]" dimensions: - GenericNumber: aggregation: default @@ -141,7 +137,7 @@ cards: score: 90 group: General - CountByCategoryMedium: - title: "[[this]] per [[GenericCategoryMedium]]" + title: "[[this.short-name]] per [[GenericCategoryMedium]]" dimensions: GenericCategoryMedium metrics: Count visualization: row @@ -151,7 +147,7 @@ cards: order_by: - Count: descending - CountByCategoryLarge: - title: "[[this]] per [[GenericCategoryLarge]]" + title: "[[this.short-name]] per [[GenericCategoryLarge]]" dimensions: GenericCategoryLarge metrics: Count visualization: table @@ -162,7 +158,7 @@ cards: - Count: descending # Geographical - CountByCountry: - title: "[[this]] per country" + title: "[[this.short-name]] per country" metrics: Count dimensions: Country score: 90 @@ -173,7 +169,7 @@ cards: group: Geographical height: 6 - CountByState: - title: "[[this]] per state" + title: "[[this.short-name]] per state" metrics: Count dimensions: State score: 90 @@ -184,7 +180,7 @@ cards: group: Geographical height: 6 - CountByCoords: - title: "[[this]] by coordinates" + title: "[[this.short-name]] by coordinates" metrics: Count dimensions: - Long @@ -195,42 +191,42 @@ cards: height: 6 # By Time - CountByJoinDate: - title: "[[this]] that have joined over time" + title: "[[this.short-name]] that have joined over time" visualization: line dimensions: JoinTimestamp metrics: Count score: 90 group: ByTime - CountByJoinDate: - title: "[[this]] that have joined over time" + title: "[[this.short-name]] that have joined over time" visualization: line dimensions: JoinDate metrics: Count score: 90 group: ByTime - CountByCreateDate: - title: New [[this]] over time + title: New [[this.short-name]] over time visualization: line dimensions: CreateTimestamp metrics: Count score: 90 group: ByTime - CountByCreateDate: - title: New [[this]] over time + title: New [[this.short-name]] over time visualization: line dimensions: CreateDate metrics: Count score: 90 group: ByTime - CountByTimestamp: - title: "[[this]] by [[Timestamp]]" + title: "[[this.short-name]] by [[Timestamp]]" visualization: line dimensions: Timestamp metrics: Count score: 20 group: ByTime - CountByTimestamp: - title: "[[this]] by [[Timestamp]]" + title: "[[this.short-name]] by [[Timestamp]]" visualization: line dimensions: Date metrics: Count @@ -371,7 +367,7 @@ cards: group: ByTime x_label: "[[Timestamp]]" - DayOfWeekCreateDate: - title: Weekdays when new [[this]] were added + title: Weekdays when [[this.short-name]] were added visualization: bar dimensions: - CreateTimestamp: @@ -381,7 +377,7 @@ cards: group: ByTime x_label: Created At by day of the week - DayOfWeekCreateDate: - title: Weekdays when new [[this]] were added + title: Weekdays when [[this.short-name]] were added visualization: bar dimensions: - CreateDate: @@ -391,7 +387,7 @@ cards: group: ByTime x_label: Created At by day of the week - HourOfDayCreateDate: - title: Hours when new [[this]] were added + title: Hours when [[this.short-name]] were added visualization: bar dimensions: - CreateTimestamp: @@ -401,7 +397,7 @@ cards: group: ByTime x_label: Created At by hour of the day - HourOfDayCreateDate: - title: Hours when new [[this]] were added + title: Hours when [[this.short-name]] were added visualization: bar dimensions: - CreateTime: @@ -411,7 +407,7 @@ cards: group: ByTime x_label: Created At by hour of the day - DayOfMonthCreateDate: - title: Days when new [[this]] were added + title: Days when [[this.short-name]] were added visualization: bar dimensions: - CreateTimestamp: @@ -421,7 +417,7 @@ cards: group: ByTime x_label: Created At by day of the month - DayOfMonthCreateDate: - title: Days when new [[this]] were added + title: Days when [[this.short-name]] were added visualization: bar dimensions: - CreateDate: @@ -431,7 +427,7 @@ cards: group: ByTime x_label: Created At by day of the month - MonthOfYearCreateDate: - title: Months when new [[this]] were added + title: Months when [[this.short-name]] were added visualization: bar dimensions: - CreateTimestamp: @@ -441,7 +437,7 @@ cards: group: ByTime x_label: Created At by month of the year - MonthOfYearCreateDate: - title: Months when new [[this]] were added + title: Months when [[this.short-name]] were added visualization: bar dimensions: - CreateDate: @@ -451,7 +447,7 @@ cards: group: ByTime x_label: Created At by month of the year - QuerterOfYearCreateDate: - title: Quarters when new [[this]] were added + title: Quarters when [[this.short-name]] were added visualization: bar dimensions: - CreateTimestamp: @@ -461,7 +457,7 @@ cards: group: ByTime x_label: Created At by quarter of the year - QuerterOfYearCreateDate: - title: Quarters when new [[this]] were added + title: Quarters when [[this.short-name]] were added visualization: bar dimensions: - CreateDate: @@ -471,7 +467,7 @@ cards: group: ByTime x_label: Created At by quarter of the year - DayOfWeekJoinDate: - title: Weekdays when [[this]] joined + title: Weekdays when [[this.short-name]] joined visualization: bar dimensions: - JoinTimestamp: @@ -481,7 +477,7 @@ cards: group: ByTime x_label: Join date by day of the week - DayOfWeekJoinDate: - title: Weekdays when [[this]] joined + title: Weekdays when [[this.short-name]] joined visualization: bar dimensions: - JoinDate: @@ -491,7 +487,7 @@ cards: group: ByTime x_label: Join date by day of the week - HourOfDayJoinDate: - title: Hours when [[this]] joined + title: Hours when [[this.short-name]] joined visualization: bar dimensions: - JoinTimestamp: @@ -501,7 +497,7 @@ cards: group: ByTime x_label: Join date by hour of the day - HourOfDayJoinDate: - title: Hours when [[this]] joined + title: Hours when [[this.short-name]] joined visualization: bar dimensions: - JoinTime: @@ -511,7 +507,7 @@ cards: group: ByTime x_label: Join date by hour of the day - DayOfMonthJoinDate: - title: Days of the month when [[this]] joined + title: Days of the month when [[this.short-name]] joined visualization: bar dimensions: - JoinTimestamp: @@ -521,7 +517,7 @@ cards: group: ByTime x_label: Join date by day of the month - DayOfMonthJoinDate: - title: Days of the month when [[this]] joined + title: Days of the month when [[this.short-name]] joined visualization: bar dimensions: - JoinDate: @@ -531,7 +527,7 @@ cards: group: ByTime x_label: Join date by day of the month - MonthOfYearJoinDate: - title: Months when [[this]] joined + title: Months when [[this.short-name]] joined visualization: bar dimensions: - JoinTimestamp: @@ -541,7 +537,7 @@ cards: group: ByTime x_label: Join date by month of the year - MonthOfYearJoinDate: - title: Months when [[this]] joined + title: Months when [[this.short-name]] joined visualization: bar dimensions: - JoinDate: @@ -551,7 +547,7 @@ cards: group: ByTime x_label: Join date by month of the year - QuerterOfYearJoinDate: - title: Quarters when [[this]] joined + title: Quarters when [[this.short-name]] joined visualization: bar dimensions: - JoinTimestamp: @@ -561,7 +557,7 @@ cards: group: ByTime x_label: Join date by quarter of the year - QuerterOfYearJoinDate: - title: Quarters when [[this]] joined + title: Quarters when [[this.short-name]] joined visualization: bar dimensions: - JoinDate: diff --git a/resources/automagic_dashboards/table/UserTable.yaml b/resources/automagic_dashboards/table/UserTable.yaml index 1844a20527cb0c73019e8a347b9a960085c61c08..b6297d02f83611b6ea7cdda861c6dbfd49c57668 100644 --- a/resources/automagic_dashboards/table/UserTable.yaml +++ b/resources/automagic_dashboards/table/UserTable.yaml @@ -23,14 +23,11 @@ dimensions: - JoinDate: field_type: Date score: 30 -- Source: - field_type: Source - GenericNumber: field_type: GenericTable.Number score: 80 - Source: field_type: GenericTable.Source - score: 100 - GenericCategoryMedium: field_type: GenericTable.Category score: 75 @@ -54,9 +51,9 @@ groups: - Overview: title: Overview - Geographical: - title: Where these [[this]] are + title: Where these [[this.short-name]] are - General: - title: How these [[this]] are distributed + title: How these [[this.short-name]] are distributed dashboard_filters: - JoinDate - GenericCategoryMedium @@ -66,7 +63,7 @@ dashboard_filters: cards: # Overview - Rowcount: - title: Total [[this]] + title: Total [[this.short-name]] visualization: scalar metrics: Count score: 100 @@ -74,7 +71,7 @@ cards: width: 5 height: 3 - RowcountLast30Days: - title: New [[this]] in the last 30 days + title: New [[this.short-name]] in the last 30 days visualization: scalar metrics: Count score: 100 @@ -84,8 +81,7 @@ cards: height: 3 - NewUsersByMonth: visualization: line - title: New [[this]] per month - description: The number of new [[this]] each month + title: New [[this.short-name]] per month dimensions: JoinDate metrics: Count score: 100 @@ -94,7 +90,7 @@ cards: height: 7 # Geographical - CountByCountry: - title: Number of [[this]] per country + title: Per country metrics: Count dimensions: Country score: 90 @@ -104,7 +100,7 @@ cards: map.region: world_countries group: Geographical - CountByState: - title: "[[this]] per state" + title: "Per state" metrics: Count dimensions: State score: 90 @@ -115,7 +111,7 @@ cards: map.region: us_states group: Geographical - CountByCoords: - title: "[[this]] by coordinates" + title: "By coordinates" metrics: Count dimensions: - Long @@ -135,7 +131,7 @@ cards: score: 90 group: General - CountByCategoryMedium: - title: "[[this]] per [[GenericCategoryMedium]]" + title: "Per [[GenericCategoryMedium]]" dimensions: GenericCategoryMedium metrics: Count visualization: row @@ -144,8 +140,18 @@ cards: group: General order_by: - Count: descending + - CountBySource: + title: "Per [[Source]]" + dimensions: Source + metrics: Count + visualization: row + score: 80 + height: 8 + group: General + order_by: + - Count: descending - CountByCategoryLarge: - title: "[[this]] per [[GenericCategoryLarge]]" + title: "Per [[GenericCategoryLarge]]" dimensions: GenericCategoryLarge metrics: Count visualization: table diff --git a/resources/migrations/000_migrations.yaml b/resources/migrations/000_migrations.yaml index 518b0708b69766640719f602221f5af9381b54d2..e84372866dbf26c84aab135127629dd2ea1ffc22 100644 --- a/resources/migrations/000_migrations.yaml +++ b/resources/migrations/000_migrations.yaml @@ -1,7 +1,22 @@ databaseChangeLog: +# The quoting strategy decides when things like column names should be quoted. +# This is in place to deal with Liquibase not knowing about all of the new +# reserved words in MySQL 8+. Using a column name that is a reserved word +# causes a failure. Quoting all objects breaks H2 support though as it will +# quote table names but not quote that same table name in a foreign key reference +# which will cause a failure when trying to initially setup the database. + - property: + name: quote_strategy + value: QUOTE_ALL_OBJECTS + dbms: mysql,mariadb + - property: + name: quote_strategy + value: LEGACY + dbms: postgresql,h2 - changeSet: id: '1' author: agilliland + objectQuotingStrategy: ${quote_strategy} changes: - createTable: columns: @@ -1449,6 +1464,8 @@ databaseChangeLog: - changeSet: id: 10 author: cammsaul + validCheckSum: 7:97fec69516d0dfe424ea7365f51bb87e + validCheckSum: 7:3b90e2fe0ac8e617a1f30ef95d39319b changes: - createTable: tableName: revision @@ -1509,7 +1526,7 @@ databaseChangeLog: replace: WITHOUT with: WITH - modifySql: - dbms: mysql + dbms: mysql,mariadb replace: replace: object VARCHAR with: object TEXT @@ -1552,6 +1569,8 @@ databaseChangeLog: - changeSet: id: 13 author: agilliland + validCheckSum: 7:f27286894439bef33ff93761f9b32bc4 + validCheckSum: 7:1bc8ccc9b1803cda5651f144029be40c changes: - createTable: tableName: activity @@ -1636,7 +1655,7 @@ databaseChangeLog: replace: WITHOUT with: WITH - modifySql: - dbms: mysql + dbms: mysql,mariadb replace: replace: details VARCHAR with: details TEXT @@ -1698,6 +1717,7 @@ databaseChangeLog: - changeSet: id: 15 author: agilliland + objectQuotingStrategy: ${quote_strategy} changes: - addColumn: tableName: revision @@ -2060,6 +2080,7 @@ databaseChangeLog: - changeSet: id: 23 author: agilliland + objectQuotingStrategy: ${quote_strategy} changes: - modifyDataType: tableName: metabase_table @@ -3186,6 +3207,7 @@ databaseChangeLog: - changeSet: id: 46 author: camsaul + objectQuotingStrategy: ${quote_strategy} changes: - addNotNullConstraint: tableName: report_dashboardcard @@ -3501,7 +3523,7 @@ databaseChangeLog: - property: name: blob.type value: blob - dbms: mysql,h2 + dbms: mysql,h2,mariadb - property: name: blob.type value: bytea @@ -4272,7 +4294,7 @@ databaseChangeLog: dbms: postgresql,h2 sql: "COMMENT ON COLUMN collection.name IS 'The user-facing name of this Collection.'" - sql: - dbms: mysql + dbms: mysql,mariadb sql: "ALTER TABLE `collection` CHANGE `name` `name` TEXT NOT NULL COMMENT 'The user-facing name of this Collection.'" # In 0.30.0 we finally removed the long-deprecated native read permissions. Since they're no longer considered valid by diff --git a/src/metabase/api/session.clj b/src/metabase/api/session.clj index eeb578f5373dbf6dd45f341d76898ec30725beb7..22e80070ba80718aa45933a0aee013b716db1678 100644 --- a/src/metabase/api/session.clj +++ b/src/metabase/api/session.clj @@ -33,7 +33,8 @@ (db/insert! Session :id <> :user_id (:id user)) - (events/publish-event! :user-login {:user_id (:id user), :session_id <>, :first_login (not (boolean (:last_login user)))}))) + (events/publish-event! :user-login + {:user_id (:id user), :session_id <>, :first_login (not (boolean (:last_login user)))}))) ;;; ## API Endpoints @@ -53,7 +54,8 @@ (try (when-let [user-info (ldap/find-user username)] (when-not (ldap/verify-password user-info password) - ;; Since LDAP knows about the user, fail here to prevent the local strategy to be tried with a possibly outdated password + ;; Since LDAP knows about the user, fail here to prevent the local strategy to be tried with a possibly + ;; outdated password (throw (ex-info password-fail-message {:status-code 400 :errors {:password password-fail-snippet}}))) @@ -114,7 +116,8 @@ (throttle/check (forgot-password-throttlers :ip-address) remote-address) (throttle/check (forgot-password-throttlers :email) email) ;; Don't leak whether the account doesn't exist, just pretend everything is ok - (when-let [{user-id :id, google-auth? :google_auth} (db/select-one ['User :id :google_auth] :email email, :is_active true)] + (when-let [{user-id :id, google-auth? :google_auth} (db/select-one ['User :id :google_auth] + :email email, :is_active true)] (let [reset-token (user/set-password-reset-token! user-id) password-reset-url (str (public-settings/site-url) "/auth/reset_password/" reset-token)] (email/send-password-reset-email! email google-auth? server-name password-reset-url) @@ -130,7 +133,8 @@ [^String token] (when-let [[_ user-id] (re-matches #"(^\d+)_.+$" token)] (let [user-id (Integer/parseInt user-id)] - (when-let [{:keys [reset_token reset_triggered], :as user} (db/select-one [User :id :last_login :reset_triggered :reset_token] + (when-let [{:keys [reset_token reset_triggered], :as user} (db/select-one [User :id :last_login :reset_triggered + :reset_token] :id user-id, :is_active true)] ;; Make sure the plaintext token matches up with the hashed one for this user (when (u/ignore-exceptions @@ -206,8 +210,9 @@ (when-not (autocreate-user-allowed-for-email? email) ;; Use some wacky status code (428 - Precondition Required) so we will know when to so the error screen specific ;; to this situation - (throw (ex-info (tru "You''ll need an administrator to create a Metabase account before you can use Google to log in.") - {:status-code 428})))) + (throw + (ex-info (tru "You''ll need an administrator to create a Metabase account before you can use Google to log in.") + {:status-code 428})))) (s/defn ^:private google-auth-create-new-user! [{:keys [email] :as new-user} :- user/NewUser] diff --git a/src/metabase/automagic_dashboards/core.clj b/src/metabase/automagic_dashboards/core.clj index d0610e428eced2a73c309e6f916152ca8ce4572c..641b40a1239bbdf57cf9f4c7f7d4775eff205c5e 100644 --- a/src/metabase/automagic_dashboards/core.clj +++ b/src/metabase/automagic_dashboards/core.clj @@ -32,10 +32,12 @@ [metabase.related :as related] [metabase.sync.analyze.classify :as classify] [metabase.util :as u] + [metabase.util.date :as date] [puppetlabs.i18n.core :as i18n :refer [tru trs]] [ring.util.codec :as codec] [schema.core :as s] - [toucan.db :as db])) + [toucan.db :as db]) + (:import java.util.TimeZone)) (def ^:private public-endpoint "/auto/dashboard/") @@ -46,57 +48,80 @@ [root id-or-name] (if (->> root :source (instance? (type Table))) (Field id-or-name) - (let [field (->> root + (when-let [field (->> root :source :result_metadata - (some (comp #{id-or-name} :name)))] + (m/find-first (comp #{id-or-name} :name)))] (-> field (update :base_type keyword) (update :special_type keyword) field/map->FieldInstance (classify/run-classifiers {}))))) -(defn- metric->description - [root metric] - (let [aggregation-clause (-> metric :definition :aggregation first) - field (some->> aggregation-clause - second - filters/field-reference->id - (->field root))] - (if field - (tru "{0} of {1}" (-> aggregation-clause first name str/capitalize) (:display_name field)) - (-> aggregation-clause first name str/capitalize)))) +(def ^:private ^{:arglists '([root])} source-name + (comp (some-fn :display_name :name) :source)) + +(def ^:private op->name + {:sum (tru "sum") + :avg (tru "average") + :min (tru "minumum") + :max (tru "maximum") + :count (tru "number") + :distinct (tru "distinct count") + :stddev (tru "standard deviation") + :cum-count (tru "cumulative count") + :cum-sum (tru "cumulative sum")}) + +(def ^:private ^{:arglists '([metric])} saved-metric? + (comp #{:metric} qp.util/normalize-token first)) + +(def ^:private ^{:arglists '([metric])} custom-expression? + (comp #{:named} qp.util/normalize-token first)) + +(def ^:private ^{:arglists '([metric])} adhoc-metric? + (complement (some-fn saved-metric? custom-expression?))) + +(defn- metric-name + [[op & args :as metric]] + (cond + (adhoc-metric? metric) (-> op qp.util/normalize-token op->name) + (saved-metric? metric) (-> args first Metric :name) + :else (second args))) (defn- join-enumeration - [[x & xs]] - (if xs + [xs] + (if (next xs) (tru "{0} and {1}" (str/join ", " (butlast xs)) (last xs)) - x)) + (first xs))) + +(defn- metric->description + [root aggregation-clause] + (join-enumeration + (for [metric (if (sequential? (first aggregation-clause)) + aggregation-clause + [aggregation-clause])] + (if (adhoc-metric? metric) + (tru "{0} of {1}" (metric-name metric) (or (some->> metric + second + filters/field-reference->id + (->field root) + :display_name) + (source-name root))) + (metric-name metric))))) (defn- question-description [root question] (let [aggregations (->> (qp.util/get-in-normalized question [:dataset_query :query :aggregation]) - (map (fn [[op arg]] - (cond - (-> op qp.util/normalize-token (= :metric)) - (-> arg Metric :name) - - arg - (tru "{0} of {1}" (name op) (->> arg - filters/field-reference->id - (->field root) - :display_name)) - - :else - (name op)))) - join-enumeration) + (metric->description root)) dimensions (->> (qp.util/get-in-normalized question [:dataset_query :query :breakout]) (mapcat filters/collect-field-references) (map (comp :display_name (partial ->field root) filters/field-reference->id)) join-enumeration)] - (tru "{0} by {1}" aggregations dimensions))) + (if dimensions + (tru "{0} by {1}" aggregations dimensions) + aggregations))) (def ^:private ^{:arglists '([x])} encode-base64-json (comp codec/base64-encode codecs/str->bytes json/encode)) @@ -112,6 +137,7 @@ :full-name (if (isa? (:entity_type table) :entity/GoogleAnalyticsTable) (:display_name table) (tru "{0} table" (:display_name table))) + :short-name (:display_name table) :source table :database (:db_id table) :url (format "%stable/%s" public-endpoint (u/get-id table)) @@ -121,10 +147,11 @@ [segment] (let [table (-> segment :table_id Table)] {:entity segment - :full-name (tru "{0} segment" (:name segment)) + :full-name (tru "{0} in the {1} segment" (:display_name table) (:name segment)) + :short-name (:display_name table) :source table :database (:db_id table) - :query-filter (-> segment :definition :filter) + :query-filter [:SEGMENT (u/get-id segment)] :url (format "%ssegment/%s" public-endpoint (u/get-id segment)) :rules-prefix ["table"]})) @@ -132,7 +159,10 @@ [metric] (let [table (-> metric :table_id Table)] {:entity metric - :full-name (tru "{0} metric" (:name metric)) + :full-name (if (:id metric) + (tru "{0} metric" (:name metric)) + (:name metric)) + :short-name (:name metric) :source table :database (:db_id table) ;; We use :id here as it might not be a concrete field but rather one from a nested query which @@ -145,6 +175,7 @@ (let [table (field/table field)] {:entity field :full-name (tru "{0} field" (:display_name field)) + :short-name (:display_name field) :source table :database (:db_id table) ;; We use :id here as it might not be a concrete metric but rather one from a nested query @@ -176,31 +207,35 @@ source-question (assoc :entity_type :entity/GenericTable)) (native-query? card) (-> card (assoc :entity_type :entity/GenericTable)) - :else (-> card ((some-fn :table_id :table-id)) Table))) + :else (-> card (qp.util/get-normalized :table-id) Table))) (defmethod ->root (type Card) [card] - {:entity card - :source (source card) - :database (:database_id card) - :query-filter (qp.util/get-in-normalized card [:dataset_query :query :filter]) - :full-name (tru "{0} question" (:name card)) - :url (format "%squestion/%s" public-endpoint (u/get-id card)) - :rules-prefix [(if (table-like? card) - "table" - "question")]}) + (let [source (source card)] + {:entity card + :source source + :database (:database_id card) + :query-filter (qp.util/get-in-normalized card [:dataset_query :query :filter]) + :full-name (tru "\"{0}\" question" (:name card)) + :short-name (source-name {:source source}) + :url (format "%squestion/%s" public-endpoint (u/get-id card)) + :rules-prefix [(if (table-like? card) + "table" + "question")]})) (defmethod ->root (type Query) [query] - (let [source (source query)] + (let [source (source query)] {:entity query :source source :database (:database-id query) + :query-filter (qp.util/get-in-normalized query [:dataset_query :query :filter]) :full-name (cond (native-query? query) (tru "Native query") (table-like? query) (-> source ->root :full-name) :else (question-description {:source source} query)) - :url (format "%sadhoc/%s" public-endpoint (encode-base64-json query)) + :short-name (source-name {:source source}) + :url (format "%sadhoc/%s" public-endpoint (encode-base64-json (:dataset_query query))) :rules-prefix [(if (table-like? query) "table" "question")]})) @@ -349,8 +384,13 @@ bindings) (comp first #(filter-tables % tables) rules/->entity) identity)] - (str/replace s #"\[\[(\w+)\]\]" (fn [[_ identifier]] - (->reference template-type (bindings identifier)))))) + (str/replace s #"\[\[(\w+)(?:\.([\w\-]+))?\]\]" + (fn [[_ identifier attribute]] + (let [entity (bindings identifier) + attribute (some-> attribute qp.util/normalize-token)] + (or (and (ifn? entity) (entity attribute)) + (root attribute) + (->reference template-type entity))))))) (defn- field-candidates [context {:keys [field_type links_to named max_cardinality] :as constraints}] @@ -440,9 +480,9 @@ (-> context :source u/get-id) (->> context :source u/get-id (str "card__")))} (not-empty filters) - (assoc :filter (transduce (map :filter) - merge-filter-clauses - filters)) + (assoc :filter (->> filters + (map :filter) + (apply merge-filter-clauses))) (not-empty dimensions) (assoc :breakout dimensions) @@ -487,21 +527,41 @@ (u/update-when :graph.metrics metric->name) (u/update-when :graph.dimensions dimension->name))])) +(defn- capitalize-first + [s] + (str (str/upper-case (subs s 0 1)) (subs s 1))) + (defn- instantiate-metadata [x context bindings] (-> (walk/postwalk (fn [form] (if (string? form) - (fill-templates :string context bindings form) + (let [new-form (fill-templates :string context bindings form)] + (if (not= new-form form) + (capitalize-first new-form) + new-form)) form)) x) (u/update-when :visualization #(instantate-visualization % bindings (:metrics context))))) (defn- valid-breakout-dimension? - [{:keys [base_type engine] :as f}] + [{:keys [base_type engine]}] (not (and (isa? base_type :type/Number) (= engine :druid)))) +(defn- singular-cell-dimensions + [root] + (letfn [(collect-dimensions [[op & args]] + (case (some-> op qp.util/normalize-token) + :and (mapcat collect-dimensions args) + := (filters/collect-field-references args) + nil))] + (->> root + :cell-query + collect-dimensions + (map filters/field-reference->id) + set))) + (defn- card-candidates "Generate all potential cards given a card definition and bindings for dimensions, metrics, and filters." @@ -509,8 +569,7 @@ (let [order_by (build-order-by dimensions metrics order_by) metrics (map (partial get (:metrics context)) metrics) filters (cond-> (map (partial get (:filters context)) filters) - (:query-filter context) - (conj {:filter (:query-filter context)})) + (:query-filter context) (conj {:filter (:query-filter context)})) score (if query score (* (or (->> dimensions @@ -520,33 +579,35 @@ rules/max-score) (/ score rules/max-score))) dimensions (map (comp (partial into [:dimension]) first) dimensions) - used-dimensions (rules/collect-dimensions [dimensions metrics filters query])] + used-dimensions (rules/collect-dimensions [dimensions metrics filters query]) + cell-dimension? (->> context :root singular-cell-dimensions)] (->> used-dimensions (map (some-fn #(get-in (:dimensions context) [% :matches]) (comp #(filter-tables % (:tables context)) rules/->entity))) (apply combo/cartesian-product) - (filter (fn [instantiations] + (map (partial zipmap used-dimensions)) + (filter (fn [bindings] (->> dimensions - (map (comp (zipmap used-dimensions instantiations) second)) - (every? valid-breakout-dimension?)))) - (map (fn [instantiations] - (let [bindings (zipmap used-dimensions instantiations) - query (if query - (build-query context bindings query) - (build-query context bindings - filters - metrics - dimensions - limit - order_by))] + (map (comp bindings second)) + (every? (every-pred valid-breakout-dimension? + (complement (comp cell-dimension? id-or-name))))))) + (map (fn [bindings] + (let [query (if query + (build-query context bindings query) + (build-query context bindings + filters + metrics + dimensions + limit + order_by))] (-> card - (assoc :metrics metrics) (instantiate-metadata context (->> metrics (map :name) (zipmap (:metrics card)) (merge bindings))) - (assoc :score score - :dataset_query query)))))))) + (assoc :dataset_query query + :metrics (map (some-fn :name (comp metric-name :metric)) metrics) + :score score)))))))) (defn- matching-rules "Return matching rules orderd by specificity. @@ -632,7 +693,7 @@ (update :special_type keyword) field/map->FieldInstance (classify/run-classifiers {}) - (map #(assoc % :engine engine))))) + (assoc :engine engine)))) constantly))] (as-> {:source (assoc source :fields (table->fields source)) :root root @@ -664,8 +725,7 @@ ([root rule context] (-> rule (select-keys [:title :description :transient_title :groups]) - (instantiate-metadata context {}) - (assoc :refinements (:cell-query root))))) + (instantiate-metadata context {})))) (s/defn ^:private apply-rule [root, rule :- rules/Rule] @@ -673,15 +733,16 @@ dashboard (make-dashboard root rule context) filters (->> rule :dashboard_filters - (mapcat (comp :matches (:dimensions context)))) + (mapcat (comp :matches (:dimensions context))) + (remove (comp (singular-cell-dimensions root) id-or-name))) cards (make-cards context rule)] (when (or (not-empty cards) (-> rule :cards nil?)) [(assoc dashboard - :filters filters - :cards cards - :context context) - rule]))) + :filters filters + :cards cards) + rule + context]))) (def ^:private ^:const ^Long max-related 6) (def ^:private ^:const ^Long max-cards 15) @@ -709,16 +770,15 @@ [root, rule :- (s/maybe rules/Rule)] (->> (rules/get-rules (concat (:rules-prefix root) [(:rule rule)])) (keep (fn [indepth] - (when-let [[dashboard _] (apply-rule root indepth)] + (when-let [[dashboard _ _] (apply-rule root indepth)] {:title ((some-fn :short-title :title) dashboard) :description (:description dashboard) :url (format "%s/rule/%s/%s" (:url root) (:rule rule) (:rule indepth))}))) (hash-map :indepth))) (defn- drilldown-fields - [dashboard] - (->> dashboard - :context + [context] + (->> context :dimensions vals (mapcat :matches) @@ -786,47 +846,57 @@ [sideways sideways sideways down down up])}) (s/defn ^:private related - "Build a balancee list of related X-rays. General composition of the list is determined for each + "Build a balanced list of related X-rays. General composition of the list is determined for each root type individually via `related-selectors`. That recepie is then filled round-robin style." - [dashboard, rule :- (s/maybe rules/Rule)] - (let [root (-> dashboard :context :root)] - (->> (merge (indepth root rule) - (drilldown-fields dashboard) - (related-entities root)) - (fill-related max-related (related-selectors (-> root :entity type))) - (group-by :selector) - (m/map-vals (partial map :entity))))) + [{:keys [root] :as context}, rule :- (s/maybe rules/Rule)] + (->> (merge (indepth root rule) + (drilldown-fields context) + (related-entities root)) + (fill-related max-related (related-selectors (-> root :entity type))) + (group-by :selector) + (m/map-vals (partial map :entity)))) + +(defn- filter-referenced-fields + "Return a map of fields referenced in filter cluase." + [root filter-clause] + (->> filter-clause + filters/collect-field-references + (mapcat (fn [[_ & ids]] + (for [id ids] + [id (->field root id)]))) + (remove (comp nil? second)) + (into {}))) (defn- automagic-dashboard "Create dashboards for table `root` using the best matching heuristics." - [{:keys [rule show rules-prefix query-filter cell-query full-name] :as root}] - (if-let [[dashboard rule] (if rule - (apply-rule root (rules/get-rule rule)) - (->> root - (matching-rules (rules/get-rules rules-prefix)) - (keep (partial apply-rule root)) - ;; `matching-rules` returns an `ArraySeq` (via `sort-by`) so - ;; `first` realises one element at a time (no chunking). - first))] - (do + [{:keys [rule show rules-prefix full-name] :as root}] + (if-let [[dashboard rule context] (if rule + (apply-rule root (rules/get-rule rule)) + (->> root + (matching-rules (rules/get-rules rules-prefix)) + (keep (partial apply-rule root)) + ;; `matching-rules` returns an `ArraySeq` (via `sort-by`) + ;; so `first` realises one element at a time + ;; (no chunking). + first))] + (let [show (or show max-cards)] (log/infof (trs "Applying heuristic %s to %s.") (:rule rule) full-name) (log/infof (trs "Dimensions bindings:\n%s") - (->> dashboard - :context + (->> context :dimensions (m/map-vals #(update % :matches (partial map :name))) u/pprint-to-str)) (log/infof (trs "Using definitions:\nMetrics:\n%s\nFilters:\n%s") - (-> dashboard :context :metrics u/pprint-to-str) - (-> dashboard :context :filters u/pprint-to-str)) - (-> (cond-> dashboard - (or query-filter cell-query) - (assoc :title (tru "A closer look at {0}" full-name))) - (populate/create-dashboard (or show max-cards)) - (assoc :related (related dashboard rule)) - (assoc :more (when (and (-> dashboard :cards count (> max-cards)) - (not= show :all)) - (format "%s#show=all" (:url root)))))) + (-> context :metrics u/pprint-to-str) + (-> context :filters u/pprint-to-str)) + (-> dashboard + (populate/create-dashboard show) + (assoc :related (related context rule) + :more (when (and (not= show :all) + (-> dashboard :cards count (> show))) + (format "%s#show=all" (:url root))) + :transient_filters (:query-filter context) + :param_fields (->> context :query-filter (filter-referenced-fields root))))) (throw (ex-info (trs "Can''t create dashboard for {0}" full-name) {:root root :available-rules (map :rule (or (some-> rule rules/get-rule vector) @@ -858,11 +928,11 @@ qp.util/normalize-token (= :metric)) (-> aggregation-clause second Metric) - (let [metric (metric/map->MetricInstance - {:definition {:aggregation [aggregation-clause] - :source_table (:table_id question)} - :table_id (:table_id question)})] - (assoc metric :name (metric->description root metric))))) + (let [table-id (qp.util/get-normalized question :table-id)] + (metric/map->MetricInstance {:definition {:aggregation [aggregation-clause] + :source_table table-id} + :name (metric->description root aggregation-clause) + :table_id table-id})))) (qp.util/get-in-normalized question [:dataset_query :query :aggregation]))) (defn- collect-breakout-fields @@ -876,48 +946,158 @@ (defn- decompose-question [root question opts] (map #(automagic-analysis % (assoc opts - :source (:source root) - :database (:database root))) + :source (:source root) + :query-filter (:query-filter root) + :database (:database root))) (concat (collect-metrics root question) (collect-breakout-fields root question)))) +(defn- pluralize + [x] + (case (mod x 10) + 1 (tru "{0}st" x) + 2 (tru "{0}nd" x) + 3 (tru "{0}rd" x) + (tru "{0}th" x))) + +(defn- humanize-datetime + [dt unit] + (let [dt (date/str->date-time dt) + tz (.getID ^TimeZone @date/jvm-timezone) + unparse-with-formatter (fn [formatter dt] + (t.format/unparse + (t.format/formatter formatter (t/time-zone-for-id tz)) + dt))] + (case unit + :minute (tru "at {0}" (unparse-with-formatter "h:mm a, MMMM d, YYYY" dt)) + :hour (tru "at {0}" (unparse-with-formatter "h a, MMMM d, YYYY" dt)) + :day (tru "on {0}" (unparse-with-formatter "MMMM d, YYYY" dt)) + :week (tru "in {0} week - {1}" + (pluralize (date/date-extract :week-of-year dt tz)) + (str (date/date-extract :year dt tz))) + :month (tru "in {0}" (unparse-with-formatter "MMMM YYYY" dt)) + :quarter (tru "in Q{0} - {1}" + (date/date-extract :quarter-of-year dt tz) + (str (date/date-extract :year dt tz))) + :year (unparse-with-formatter "YYYY" dt) + :day-of-week (unparse-with-formatter "EEEE" dt) + :hour-of-day (tru "at {0}" (unparse-with-formatter "h a" dt)) + :month-of-year (unparse-with-formatter "MMMM" dt) + :quarter-of-year (tru "Q{0}" (date/date-extract :quarter-of-year dt tz)) + (:minute-of-hour + :day-of-month + :day-of-year + :week-of-year) (date/date-extract unit dt tz)))) + +(defn- field-reference->field + [root field-reference] + (cond-> (->> field-reference + filters/collect-field-references + first + filters/field-reference->id + (->field root)) + (-> field-reference first qp.util/normalize-token (= :datetime-field)) + (assoc :unit (-> field-reference last qp.util/normalize-token)))) + +(defmulti + ^{:private true + :arglists '([fieldset [op & args]])} + humanize-filter-value (fn [_ [op & args]] + (qp.util/normalize-token op))) + +(def ^:private unit-name (comp {:minute-of-hour "minute" + :hour-of-day "hour" + :day-of-week "day of week" + :day-of-month "day of month" + :day-of-year "day of year" + :week-of-year "week" + :month-of-year "month" + :quarter-of-year "quarter"} + qp.util/normalize-token)) + +(defn- field-name + ([root field-reference] + (->> field-reference (field-reference->field root) field-name)) + ([{:keys [display_name unit] :as field}] + (cond->> display_name + (and (filters/periodic-datetime? field) unit) (format "%s of %s" (unit-name unit))))) + +(defmethod humanize-filter-value := + [root [_ field-reference value]] + (let [field (field-reference->field root field-reference) + field-name (field-name field)] + (if (or (filters/datetime? field) + (filters/periodic-datetime? field)) + (tru "{0} is {1}" field-name (humanize-datetime value (:unit field))) + (tru "{0} is {1}" field-name value)))) + +(defmethod humanize-filter-value :between + [root [_ field-reference min-value max-value]] + (tru "{0} is between {1} and {2}" (field-name root field-reference) min-value max-value)) + +(defmethod humanize-filter-value :inside + [root [_ lat-reference lon-reference lat-max lon-min lat-min lon-max]] + (tru "{0} is between {1} and {2}; and {3} is between {4} and {5}" + (field-name root lon-reference) lon-min lon-max + (field-name root lat-reference) lat-min lat-max)) + +(defmethod humanize-filter-value :and + [root [_ & clauses]] + (->> clauses + (map (partial humanize-filter-value root)) + join-enumeration)) + +(defn- cell-title + [root cell-query] + (str/join " " [(->> (qp.util/get-in-normalized (-> root :entity) [:dataset_query :query :aggregation]) + (metric->description root)) + (tru "where {0}" (humanize-filter-value root cell-query))])) + (defmethod automagic-analysis (type Card) [card {:keys [cell-query] :as opts}] - (let [root (->root card)] - (if (or (table-like? card) - cell-query) + (let [root (->root card) + cell-url (format "%squestion/%s/cell/%s" public-endpoint + (u/get-id card) + (encode-base64-json cell-query))] + (if (table-like? card) (automagic-dashboard (merge (cond-> root - cell-query (merge {:url (format "%squestion/%s/cell/%s" public-endpoint - (u/get-id card) - (encode-base64-json cell-query)) + cell-query (merge {:url cell-url :entity (:source root) :rules-prefix ["table"]})) opts)) (let [opts (assoc opts :show :all)] - (->> (decompose-question root card opts) - (apply populate/merge-dashboards (automagic-dashboard root)) - (merge {:related (related {:context {:root {:entity card}}} nil)})))))) + (cond-> (apply populate/merge-dashboards + (automagic-dashboard (merge (cond-> root + cell-query (assoc :url cell-url)) + opts)) + (decompose-question root card opts)) + cell-query (merge (let [title (tru "A closer look at {0}" (cell-title root cell-query))] + {:transient_name title + :name title}))))))) (defmethod automagic-analysis (type Query) [query {:keys [cell-query] :as opts}] - (let [root (->root query)] - (if (or (table-like? query) - (:cell-query opts)) + (let [root (->root query) + cell-url (format "%sadhoc/%s/cell/%s" public-endpoint + (encode-base64-json (:dataset_query query)) + (encode-base64-json cell-query))] + (if (table-like? query) (automagic-dashboard (merge (cond-> root - cell-query (merge {:url (format "%sadhoc/%s/cell/%s" public-endpoint - (encode-base64-json (:dataset_query query)) - (encode-base64-json cell-query)) + cell-query (merge {:url cell-url :entity (:source root) :rules-prefix ["table"]})) - (update opts :cell-query - (partial filters/inject-refinement - (qp.util/get-in-normalized query [:dataset_query :query :filter]))))) + opts)) (let [opts (assoc opts :show :all)] - (->> (decompose-question root query opts) - (apply populate/merge-dashboards (automagic-dashboard root)) - (merge {:related (related {:context {:root {:entity query}}} nil)})))))) + (cond-> (apply populate/merge-dashboards + (automagic-dashboard (merge (cond-> root + cell-query (assoc :url cell-url)) + opts)) + (decompose-question root query opts)) + cell-query (merge (let [title (tru "A closer look at the {0}" (cell-title root cell-query))] + {:transient_name title + :name title}))))))) (defmethod automagic-analysis (type Field) [field opts] @@ -930,8 +1110,7 @@ :from [Field] :where [:in :table_id (map u/get-id tables)] :group-by [:table_id]}) - (into {} (map (fn [{:keys [count table_id]}] - [table_id count])))) + (into {} (map (juxt :table_id :count)))) list-like? (->> (when-let [candidates (->> field-count (filter (comp (partial >= 2) val)) (map key) diff --git a/src/metabase/automagic_dashboards/filters.clj b/src/metabase/automagic_dashboards/filters.clj index d6d7c5b8c7e63ca1e0765a8e163006720c68e912..a560f807175fb0419c0947f2ac78df022bdb8cca 100644 --- a/src/metabase/automagic_dashboards/filters.clj +++ b/src/metabase/automagic_dashboards/filters.clj @@ -11,7 +11,7 @@ [(s/one (s/constrained su/KeywordOrString (comp #{:field-id :fk-> :field-literal} qp.util/normalize-token)) "head") - s/Any]) + (s/cond-pre s/Int su/KeywordOrString)]) (def ^:private ^{:arglists '([form])} field-reference? "Is given form an MBQL field reference?" @@ -25,7 +25,7 @@ (defmethod field-reference->id :field-id [[_ id]] (if (sequential? id) - (second id) + (field-reference->id id) id)) (defmethod field-reference->id :fk-> @@ -44,12 +44,14 @@ (tree-seq (some-fn sequential? map?) identity) (filter field-reference?))) -(def ^:private ^{:arglists '([field])} periodic-datetime? +(def ^{:arglists '([field])} periodic-datetime? + "Is `field` a periodic datetime (eg. day of month)?" (comp #{:minute-of-hour :hour-of-day :day-of-week :day-of-month :day-of-year :week-of-year :month-of-year :quarter-of-year} :unit)) -(defn- datetime? +(defn datetime? + "Is `field` a datetime?" [field] (and (not (periodic-datetime? field)) (or (isa? (:base_type field) :type/DateTime) @@ -154,10 +156,10 @@ remove-unqualified (sort-by interestingness >) (take max-filters) - (map #(assoc % :fk-map (build-fk-map fks %))) (reduce (fn [dashboard candidate] - (let [filter-id (-> candidate hash str) + (let [filter-id (-> candidate ((juxt :id :name :unit)) hash str) + candidate (assoc candidate :fk-map (build-fk-map fks candidate)) dashcards (:ordered_cards dashboard) dashcards-new (map #(add-filter % filter-id candidate) dashcards)] ;; Only add filters that apply to all cards. @@ -172,17 +174,6 @@ dashboard))))) -(defn filter-referenced-fields - "Return a map of fields referenced in filter cluase." - [filter-clause] - (->> filter-clause - collect-field-references - (mapcat (fn [[_ & ids]] - (for [id ids] - [id (Field id)]))) - (into {}))) - - (defn- flatten-filter-clause [filter-clause] (when (not-empty filter-clause) @@ -208,4 +199,4 @@ (->> filter-clause flatten-filter-clause (remove (comp in-refinement? collect-field-references)) - (reduce merge-filter-clauses refinement)))) + (apply merge-filter-clauses refinement)))) diff --git a/src/metabase/automagic_dashboards/populate.clj b/src/metabase/automagic_dashboards/populate.clj index fb1c1c048d320d38acf44a7f2a4795bce621e52a..60fd4bce4fa89b1add9f12ec40ffbac9d8fc402f 100644 --- a/src/metabase/automagic_dashboards/populate.clj +++ b/src/metabase/automagic_dashboards/populate.clj @@ -4,10 +4,9 @@ [clojure.tools.logging :as log] [metabase.api.common :as api] [metabase.automagic-dashboards.filters :as filters] - [metabase.models - [card :as card] - [field :refer [Field]]] + [metabase.models.card :as card] [metabase.query-processor.util :as qp.util] + [metabase.util :as u] [puppetlabs.i18n.core :as i18n :refer [trs]] [toucan.db :as db])) @@ -80,17 +79,17 @@ (defn- visualization-settings [{:keys [metrics x_label y_label series_labels visualization dimensions] :as card}] - (let [metric-name (some-fn :name (comp str/capitalize name first :metric)) - [display visualization-settings] visualization] + (let [[display visualization-settings] visualization] {:display display - :visualization_settings - (-> visualization-settings - (merge (colorize card)) - (cond-> - (some :name metrics) (assoc :graph.series_labels (map metric-name metrics)) - series_labels (assoc :graph.series_labels series_labels) - x_label (assoc :graph.x_axis.title_text x_label) - y_label (assoc :graph.y_axis.title_text y_label)))})) + :visualization_settings (-> visualization-settings + (assoc :graph.series_labels metrics) + (merge (colorize card)) + (cond-> + series_labels (assoc :graph.series_labels series_labels) + + x_label (assoc :graph.x_axis.title_text x_label) + + y_label (assoc :graph.y_axis.title_text y_label)))})) (defn- add-card "Add a card to dashboard `dashboard` at position [`x`, `y`]." @@ -236,15 +235,13 @@ (defn create-dashboard "Create dashboard and populate it with cards." ([dashboard] (create-dashboard dashboard :all)) - ([{:keys [title transient_title description groups filters cards refinements fieldset]} n] + ([{:keys [title transient_title description groups filters cards refinements]} n] (let [n (cond (= n :all) (count cards) (keyword? n) (Integer/parseInt (name n)) :else n) dashboard {:name title :transient_name (or transient_title title) - :transient_filters refinements - :param_fields (filters/filter-referenced-fields refinements) :description description :creator_id api/*current-user-id* :parameters []} @@ -265,43 +262,62 @@ (cond-> dashboard (not-empty filters) (filters/add-filters filters max-filters))))) +(defn- downsize-titles + [markdown] + (->> markdown + str/split-lines + (map (fn [line] + (if (str/starts-with? line "#") + (str "#" line) + line))) + str/join)) + +(defn- merge-filters + [ds] + (when (->> ds + (mapcat :ordered_cards) + (keep (comp :table_id :card)) + distinct + count + (= 1)) + [(->> ds (mapcat :parameters) distinct) + (->> ds + (mapcat :ordered_cards) + (mapcat :parameter_mappings) + (map #(dissoc % :card_id)) + distinct)])) + (defn merge-dashboards "Merge dashboards `ds` into dashboard `d`." - [d & ds] - (let [filter-targets (when (->> ds - (mapcat :ordered_cards) - (keep (comp :table_id :card)) - distinct - count - (= 1)) - (->> ds - (mapcat :ordered_cards) - (mapcat :parameter_mappings) - (mapcat (comp filters/collect-field-references :target)) - (map filters/field-reference->id) - distinct - (map Field)))] - (cond-> (reduce - (fn [target dashboard] - (let [offset (->> target - :ordered_cards - (map #(+ (:row %) (:sizeY %))) - (apply max -1) ; -1 so it neturalizes +1 for spacing if - ; the target dashboard is empty. - inc)] - (-> target - (add-text-card {:width grid-width - :height group-heading-height - :text (format "# %s" (:name dashboard)) - :visualization-settings {:dashcard.background false - :text.align_vertical :bottom}} - [offset 0]) - (update :ordered_cards concat - (->> dashboard - :ordered_cards - (map #(-> % - (update :row + offset group-heading-height) - (dissoc :parameter_mappings)))))))) - d - ds) - (not-empty filter-targets) (filters/add-filters filter-targets max-filters)))) + [& ds] + (let [[paramters parameter-mappings] (merge-filters ds)] + (reduce + (fn [target dashboard] + (let [offset (->> target + :ordered_cards + (map #(+ (:row %) (:sizeY %))) + (apply max -1) ; -1 so it neturalizes +1 for spacing if + ; the target dashboard is empty. + inc) + cards (->> dashboard + :ordered_cards + (map #(-> % + (update :row + offset group-heading-height) + (u/update-in-when [:visualization_settings :text] + downsize-titles) + (assoc :parameter_mappings + (when (:card_id %) + (for [mapping parameter-mappings] + (assoc mapping :card_id (:card_id %))))))))] + (-> target + (add-text-card {:width grid-width + :height group-heading-height + :text (format "# %s" (:name dashboard)) + :visualization-settings {:dashcard.background false + :text.align_vertical :bottom}} + [offset 0]) + (update :ordered_cards concat cards)))) + (-> ds + first + (assoc :parameters paramters)) + (rest ds)))) diff --git a/src/metabase/automagic_dashboards/rules.clj b/src/metabase/automagic_dashboards/rules.clj index 50bad7029869ed93be85428e75301a0876ac2905..6e6842033f60c07b16edf0c3aae7e21fcd7fde51 100644 --- a/src/metabase/automagic_dashboards/rules.clj +++ b/src/metabase/automagic_dashboards/rules.clj @@ -4,6 +4,7 @@ [clojure.string :as str] [clojure.tools.logging :as log] [metabase.automagic-dashboards.populate :as populate] + [metabase.query-processor.util :as qp.util] [metabase.util :as u] [metabase.util.schema :as su] [puppetlabs.i18n.core :as i18n :refer [trs]] @@ -119,8 +120,7 @@ (mapcat (comp k val first) cards)) (def ^:private DimensionForm - [(s/one (s/constrained (s/cond-pre s/Str s/Keyword) - (comp #{"dimension"} str/lower-case name)) + [(s/one (s/constrained (s/cond-pre s/Str s/Keyword) (comp #{:dimension} qp.util/normalize-token)) "dimension") (s/one s/Str "identifier") su/Map]) diff --git a/src/metabase/db.clj b/src/metabase/db.clj index 78110be4483e775282c74a985b801aba254ebaeb..375451011125f9cf4dec0b707804a923e796038c 100644 --- a/src/metabase/db.clj +++ b/src/metabase/db.clj @@ -1,5 +1,5 @@ (ns metabase.db - "Database definition and helper functions for interacting with the database." + "Application database definition, and setup logic, and helper functions for interacting with it." (:require [clojure [string :as s] [walk :as walk]] @@ -11,6 +11,7 @@ [config :as config] [util :as u]] [metabase.db.spec :as dbspec] + [puppetlabs.i18n.core :refer [trs]] [ring.util.codec :as codec] [toucan.db :as db]) (:import com.mchange.v2.c3p0.ComboPooledDataSource @@ -45,8 +46,9 @@ [(System/getProperty "user.dir") "/" db-file-name options])))))) (defn- parse-connection-string - "Parse a DB connection URI like `postgres://cam@localhost.com:5432/cams_cool_db?ssl=true&sslfactory=org.postgresql.ssl.NonValidatingFactory` - and return a broken-out map." + "Parse a DB connection URI like + `postgres://cam@localhost.com:5432/cams_cool_db?ssl=true&sslfactory=org.postgresql.ssl.NonValidatingFactory` and + return a broken-out map." [uri] (when-let [[_ protocol user pass host port db query] (re-matches #"^([^:/@]+)://(?:([^:/@]+)(?::([^:@]+))?@)?([^:@]+)(?::(\d+))?/([^/?]+)(?:\?(.*))?$" uri)] (merge {:type (case (keyword protocol) @@ -73,8 +75,8 @@ (config/config-kw :mb-db-type))) (def db-connection-details - "Connection details that can be used when pretending the Metabase DB is itself a `Database` - (e.g., to use the Generic SQL driver functions on the Metabase DB itself)." + "Connection details that can be used when pretending the Metabase DB is itself a `Database` (e.g., to use the Generic + SQL driver functions on the Metabase DB itself)." (delay (or @connection-string-details (case (db-type) :h2 {:type :h2 ; TODO - we probably don't need to specifc `:type` here since we can just call (db-type) @@ -112,19 +114,19 @@ (def ^:private ^:const ^String changelog-file "liquibase.yaml") (defn- migrations-sql - "Return a string of SQL containing the DDL statements needed to perform unrun LIQUIBASE migrations." + "Return a string of SQL containing the DDL statements needed to perform unrun `liquibase` migrations." ^String [^Liquibase liquibase] (let [writer (StringWriter.)] (.update liquibase "" writer) (.toString writer))) (defn- migrations-lines - "Return a sequnce of DDL statements that should be used to perform migrations for LIQUIBASE. + "Return a sequnce of DDL statements that should be used to perform migrations for `liquibase`. - MySQL gets snippy if we try to run the entire DB migration as one single string; it seems to only like it if we run - one statement at a time; Liquibase puts each DDL statement on its own line automatically so just split by lines and - filter out blank / comment lines. Even though this is not neccesary for H2 or Postgres go ahead and do it anyway - because it keeps the code simple and doesn't make a significant performance difference." + MySQL gets snippy if we try to run the entire DB migration as one single string; it seems to only like it if we run + one statement at a time; Liquibase puts each DDL statement on its own line automatically so just split by lines and + filter out blank / comment lines. Even though this is not neccesary for H2 or Postgres go ahead and do it anyway + because it keeps the code simple and doesn't make a significant performance difference." [^Liquibase liquibase] (for [line (s/split-lines (migrations-sql liquibase)) :when (not (or (s/blank? line) @@ -132,57 +134,67 @@ line)) (defn- has-unrun-migrations? - "Does LIQUIBASE have migration change sets that haven't been run yet? + "Does `liquibase` have migration change sets that haven't been run yet? - It's a good idea to Check to make sure there's actually something to do before running `(migrate :up)` because - `migrations-sql` will always contain SQL to create and release migration locks, which is both slightly dangerous - and a waste of time when we won't be using them." + It's a good idea to Check to make sure there's actually something to do before running `(migrate :up)` because + `migrations-sql` will always contain SQL to create and release migration locks, which is both slightly dangerous and + a waste of time when we won't be using them." ^Boolean [^Liquibase liquibase] (boolean (seq (.listUnrunChangeSets liquibase nil)))) -(defn- has-migration-lock? +(defn- migration-lock-exists? "Is a migration lock in place for LIQUIBASE?" ^Boolean [^Liquibase liquibase] (boolean (seq (.listLocks liquibase)))) (defn- wait-for-migration-lock-to-be-cleared - "Check and make sure the database isn't locked. If it is, sleep for 2 seconds and then retry several times. - There's a chance the lock will end up clearing up so we can run migrations normally." + "Check and make sure the database isn't locked. If it is, sleep for 2 seconds and then retry several times. There's a + chance the lock will end up clearing up so we can run migrations normally." [^Liquibase liquibase] (u/auto-retry 5 - (when (has-migration-lock? liquibase) + (when (migration-lock-exists? liquibase) (Thread/sleep 2000) - (throw (Exception. (str "Database has migration lock; cannot run migrations. You can force-release these locks " - "by running `java -jar metabase.jar migrate release-locks`.")))))) + (throw + (Exception. + (str + (trs "Database has migration lock; cannot run migrations.") + " " + (trs "You can force-release these locks by running `java -jar metabase.jar migrate release-locks`."))))))) (defn- migrate-up-if-needed! - "Run any unrun LIQUIBASE migrations, if needed. + "Run any unrun `liquibase` migrations, if needed. - This creates SQL for the migrations to be performed, then executes each DDL statement. - Running `.update` directly doesn't seem to work as we'd expect; it ends up commiting the changes made and they - can't be rolled back at the end of the transaction block. Converting the migration to SQL string and running that - via `jdbc/execute!` seems to do the trick." + This creates SQL for the migrations to be performed, then executes each DDL statement. Running `.update` directly + doesn't seem to work as we'd expect; it ends up commiting the changes made and they can't be rolled back at the end + of the transaction block. Converting the migration to SQL string and running that via `jdbc/execute!` seems to do + the trick." [conn, ^Liquibase liquibase] - (log/info "Checking if Database has unrun migrations...") + (log/info (trs "Checking if Database has unrun migrations...")) (when (has-unrun-migrations? liquibase) - (log/info "Database has unrun migrations. Waiting for migration lock to be cleared...") + (log/info (trs "Database has unrun migrations. Waiting for migration lock to be cleared...")) (wait-for-migration-lock-to-be-cleared liquibase) - (log/info "Migration lock is cleared. Running migrations...") - (doseq [line (migrations-lines liquibase)] - (jdbc/execute! conn [line])))) + ;; while we were waiting for the lock, it was possible that another instance finished the migration(s), so make + ;; sure something still needs to be done... + (if (has-unrun-migrations? liquibase) + (do + (log/info (trs "Migration lock is cleared. Running migrations...")) + (doseq [line (migrations-lines liquibase)] + (jdbc/execute! conn [line]))) + (log/info + (trs "Migration lock cleared, but nothing to do here! Migrations were finished by another instance."))))) (defn- force-migrate-up-if-needed! "Force migrating up. This does two things differently from `migrate-up-if-needed!`: - 1. This doesn't check to make sure the DB locks are cleared - 2. Any DDL statements that fail are ignored + 1. This doesn't check to make sure the DB locks are cleared + 2. Any DDL statements that fail are ignored - It can be used to fix situations where the database got into a weird state, as was common before the fixes made in - #3295. + It can be used to fix situations where the database got into a weird state, as was common before the fixes made in + #3295. - Each DDL statement is ran inside a nested transaction; that way if the nested transaction fails we can roll it back - without rolling back the entirety of changes that were made. (If a single statement in a transaction fails you - can't do anything futher until you clear the error state by doing something like calling `.rollback`.)" + Each DDL statement is ran inside a nested transaction; that way if the nested transaction fails we can roll it back + without rolling back the entirety of changes that were made. (If a single statement in a transaction fails you can't + do anything futher until you clear the error state by doing something like calling `.rollback`.)" [conn, ^Liquibase liquibase] (.clearCheckSums liquibase) (when (has-unrun-migrations? liquibase) @@ -197,7 +209,7 @@ (def ^{:arglists '([])} ^DatabaseFactory database-factory "Return an instance of the Liquibase `DatabaseFactory`. This is done on a background thread at launch because - otherwise it adds 2 seconds to startup time." + otherwise it adds 2 seconds to startup time." (partial deref (future (DatabaseFactory/getInstance)))) (defn- conn->liquibase @@ -222,7 +234,8 @@ "DATABASECHANGELOG" "databasechangelog") fresh-install? (jdbc/with-db-metadata [meta (jdbc-details)] ;; don't migrate on fresh install - (empty? (jdbc/metadata-query (.getTables meta nil nil liquibases-table-name (into-array String ["TABLE"]))))) + (empty? (jdbc/metadata-query + (.getTables meta nil nil liquibases-table-name (into-array String ["TABLE"]))))) query (format "UPDATE %s SET FILENAME = ?" liquibases-table-name)] (when-not fresh-install? (jdbc/execute! conn [query "migrations/000_migrations.yaml"])))) @@ -252,11 +265,11 @@ ;; Disable auto-commit. This should already be off but set it just to be safe (.setAutoCommit (jdbc/get-connection conn) false) ;; Set up liquibase and let it do its thing - (log/info "Setting up Liquibase...") + (log/info (trs "Setting up Liquibase...")) (try (let [liquibase (conn->liquibase conn)] (consolidate-liquibase-changesets conn) - (log/info "Liquibase is ready.") + (log/info (trs "Liquibase is ready.")) (case direction :up (migrate-up-if-needed! conn liquibase) :force (force-migrate-up-if-needed! conn liquibase) @@ -279,7 +292,7 @@ ;;; +----------------------------------------------------------------------------------------------------------------+ (defn connection-pool - "Create a C3P0 connection pool for the given database SPEC." + "Create a C3P0 connection pool for the given database `spec`." [{:keys [subprotocol subname classname minimum-pool-size idle-connection-test-period excess-timeout] :or {minimum-pool-size 3 idle-connection-test-period 0 @@ -348,12 +361,12 @@ (verify-db-connection (:type db-details) db-details)) ([engine details] {:pre [(keyword? engine) (map? details)]} - (log/info (u/format-color 'cyan "Verifying %s Database Connection ..." (name engine))) + (log/info (u/format-color 'cyan (trs "Verifying {0} Database Connection ..." (name engine)))) (assert (binding [*allow-potentailly-unsafe-connections* true] (require 'metabase.driver) ((resolve 'metabase.driver/can-connect-with-details?) engine details)) (format "Unable to connect to Metabase %s DB." (name engine))) - (log/info "Verify Database Connection ... " (u/emoji "✅")))) + (log/info (trs "Verify Database Connection ... ") (u/emoji "✅")))) (def ^:dynamic ^Boolean *disable-data-migrations* @@ -379,11 +392,24 @@ (defn- run-schema-migrations! "Run through our DB migration process and make sure DB is fully prepared" [auto-migrate? db-details] - (log/info "Running Database Migrations...") + (log/info (trs "Running Database Migrations...")) (if auto-migrate? - (migrate! db-details :up) + ;; There is a weird situation where running the migrations can cause a race condition: if two (or more) instances + ;; in a horizontal cluster are started at the exact same time, they can all start running migrations (and all + ;; acquire a lock) at the exact same moment. Since they all acquire a lock at the same time, none of them would + ;; have been blocked from starting by the lock being in place. (Yes, this not working sort of defeats the whole + ;; purpose of the lock in the first place, but this *is* Liquibase.) + ;; + ;; So what happens is one instance will ultimately end up commiting the transaction first (and succeed), while the + ;; others will fail due to duplicate tables or the like and have their transactions rolled back. + ;; + ;; However, we don't want to have that instance killed because its migrations failed for that reason, so retry a + ;; second time; this time, it will either run into the lock, or see that there are no migrations to run in the + ;; first place, and launch normally. + (u/auto-retry 1 + (migrate! db-details :up)) (print-migrations-and-quit! db-details)) - (log/info "Database Migrations Current ... " (u/emoji "✅"))) + (log/info (trs "Database Migrations Current ... ") (u/emoji "✅"))) (defn- run-data-migrations! "Do any custom code-based migrations now that the db structure is up to date." diff --git a/src/metabase/driver.clj b/src/metabase/driver.clj index 6abe80896205973191342c2439635d582da25622..bdf893ba77347ee8b83f4d8612ed7d1841a3537a 100644 --- a/src/metabase/driver.clj +++ b/src/metabase/driver.clj @@ -392,7 +392,8 @@ [clojure.lang.IPersistentMap :type/Dictionary] [clojure.lang.IPersistentVector :type/Array] [org.bson.types.ObjectId :type/MongoBSONID] - [org.postgresql.util.PGobject :type/*]]) + [org.postgresql.util.PGobject :type/*] + [nil :type/*]]) ; all-NULL columns in DBs like Mongo w/o explicit types (log/warn (trs "Don''t know how to map class ''{0}'' to a Field base_type, falling back to :type/*." klass)) :type/*)) diff --git a/src/metabase/driver/mongo.clj b/src/metabase/driver/mongo.clj index 45f504ea3bc5898a6ce370e178e55c05e15f9093..c886e27b1d66cb4302273414818bc2b5d9c84faa 100644 --- a/src/metabase/driver/mongo.clj +++ b/src/metabase/driver/mongo.clj @@ -116,7 +116,7 @@ (defn- describe-table-field [field-kw field-info] (let [most-common-object-type (most-common-object-type (vec (:types field-info)))] (cond-> {:name (name field-kw) - :database-type (.getName most-common-object-type) + :database-type (some-> most-common-object-type .getName) :base-type (driver/class->base-type most-common-object-type)} (= :_id field-kw) (assoc :pk? true) (:special-types field-info) (assoc :special-type (->> (vec (:special-types field-info)) diff --git a/src/metabase/events.clj b/src/metabase/events.clj index 28e245f5b2f51a7ec29fa2fa4ef552f88184e2e1..11613ae0531e4acf49ef59b94100d8843c0b6219 100644 --- a/src/metabase/events.clj +++ b/src/metabase/events.clj @@ -55,6 +55,7 @@ (defn publish-event! "Publish an item into the events stream. Returns the published item to allow for chaining." + {:style/indent 1} [topic event-item] {:pre [(keyword topic)]} (async/go (async/>! events-channel {:topic (keyword topic), :item event-item})) diff --git a/src/metabase/metabot.clj b/src/metabase/metabot.clj index 6b034ba180fadd26adee745cc7062b8800d8baf8..22ece0d117f2d1bbfafbdf55f3c6eb1ecbefdce1 100644 --- a/src/metabase/metabot.clj +++ b/src/metabase/metabot.clj @@ -7,6 +7,7 @@ [string :as str]] [clojure.java.io :as io] [clojure.tools.logging :as log] + [honeysql.core :as hsql] [manifold [deferred :as d] [stream :as s]] @@ -21,8 +22,10 @@ [permissions :refer [Permissions]] [permissions-group :as perms-group] [setting :as setting :refer [defsetting]]] - [metabase.util.urls :as urls] - [puppetlabs.i18n.core :refer [tru trs]] + [metabase.util + [date :as du] + [urls :as urls]] + [puppetlabs.i18n.core :refer [trs tru]] [throttle.core :as throttle] [toucan.db :as db])) @@ -32,7 +35,106 @@ :default false) -;;; ------------------------------------------------------------ Perms Checking ------------------------------------------------------------ +;;; ------------------------------------- Deciding which instance is the MetaBot ------------------------------------- + +;; Close your eyes, and imagine a scenario: someone is running multiple Metabase instances in a horizontal cluster. +;; Good for them, but how do we make sure one, and only one, of those instances, replies to incoming MetaBot commands? +;; It would certainly be too much if someone ran, say, 4 instances, and typing `metabot kanye` into Slack gave them 4 +;; Kanye West quotes, wouldn't it? +;; +;; Luckily, we have an "elegant" solution: we'll use the Settings framework to keep track of which instance is +;; currently serving as the MetaBot. We'll have that instance periodically check in; if it doesn't check in for some +;; timeout interval, we'll consider the job of MetaBot up for grabs. Each instance will periodically check if the +;; MetaBot job is open, and, if so, whoever discovers it first will take it. + + +;; How do we uniquiely identify each instance? +;; +;; `local-process-uuid` is randomly-generated upon launch and used to identify this specific Metabase instance during +;; this specifc run. Restarting the server will change this UUID, and each server in a hortizontal cluster will have +;; its own ID, making this different from the `site-uuid` Setting. The local process UUID is used to differentiate +;; different horizontally clustered MB instances so we can determine which of them will handle MetaBot duties. +;; +;; TODO - if we ever want to use this elsewhere, we need to move it to `metabase.config` or somewhere else central +;; like that. +(defonce ^:private local-process-uuid + (str (java.util.UUID/randomUUID))) + +(defsetting ^:private metabot-instance-uuid + "UUID of the active MetaBot instance (the Metabase process currently handling MetaBot duties.)" + ;; This should be cached because we'll be checking it fairly often, basically every 2 seconds as part of the + ;; websocket monitor thread to see whether we're MetaBot (the thread won't open the WebSocket unless that instance + ;; is handling MetaBot duties) + :internal? true) + +(defsetting ^:private metabot-instance-last-checkin + "Timestamp of the last time the active MetaBot instance checked in." + :internal? true + ;; caching is disabled for this, since it is intended to be updated frequently (once a minute or so) If we use the + ;; cache, it will trigger cache invalidation for all the other instances (wasteful), and possibly at any rate be + ;; incorrect (for example, if another instance checked in a minute ago, our local cache might not get updated right + ;; away, causing us to falsely assume the MetaBot role is up for grabs.) + :cache? false + :type :timestamp) + +(defn- current-timestamp-from-db + "Fetch the current timestamp from the DB. Why do this from the DB? It's not safe to assume multiple instances have + clocks exactly in sync; but since each instance is using the same application DB, we can use it as a cannonical + source of truth." + ^java.sql.Timestamp [] + (-> (db/query {:select [[(hsql/raw "current_timestamp") :current_timestamp]]}) + first + :current_timestamp)) + +(defn- update-last-checkin! + "Update the last checkin timestamp recorded in the DB." + [] + (metabot-instance-last-checkin (current-timestamp-from-db))) + +(defn- seconds-since-last-checkin + "Return the number of seconds since the active MetaBot instance last checked in (updated the + `metabot-instance-last-checkin` Setting). If a MetaBot instance has *never* checked in, this returns `nil`. (Since + `last-checkin` is one of the few Settings that isn't cached, this always requires a DB call.)" + [] + (when-let [last-checkin (metabot-instance-last-checkin)] + (u/prog1 (-> (- (.getTime (current-timestamp-from-db)) + (.getTime last-checkin)) + (/ 1000)) + (log/debug (u/format-color 'magenta (trs "Last MetaBot checkin was {0} ago." (du/format-seconds <>))))))) + +(def ^:private ^Integer recent-checkin-timeout-interval-seconds + "Number of seconds since the last MetaBot checkin that we will consider the MetaBot job to be 'up for grabs', + currently 3 minutes. (i.e. if the current MetaBot job holder doesn't check in for more than 3 minutes, it's up for + grabs.)" + (int (* 60 3))) + +(defn- last-checkin-was-not-recent? + "`true` if the last checkin of the active MetaBot instance was more than 3 minutes ago, or if there has never been a + checkin. (This requires DB calls, so it should not be called too often -- once a minute [at the time of this + writing] should be sufficient.)" + [] + (if-let [seconds-since-last-checkin (seconds-since-last-checkin)] + (> seconds-since-last-checkin + recent-checkin-timeout-interval-seconds) + true)) + +(defn- am-i-the-metabot? + "Does this instance currently have the MetaBot job? (Does not require any DB calls, so may safely be called + often (i.e. in the websocket monitor thread loop.)" + [] + (= (metabot-instance-uuid) + local-process-uuid)) + +(defn- become-metabot! + "Direct this instance to assume the duties of acting as MetaBot, and update the Settings we use to track assignment + accordingly." + [] + (log/info (u/format-color 'green (trs "This instance will now handle MetaBot duties."))) + (metabot-instance-uuid local-process-uuid) + (update-last-checkin!)) + + +;;; ------------------------------------------------- Perms Checking ------------------------------------------------- (defn- metabot-permissions "Return the set of permissions granted to the MetaBot." @@ -50,7 +152,7 @@ `(do-with-metabot-permissions (fn [] ~@body))) -;;; # ------------------------------------------------------------ Metabot Command Handlers ------------------------------------------------------------ +;;; -------------------------------------------- Metabot Command Handlers -------------------------------------------- (def ^:private ^:dynamic *channel-id* nil) @@ -110,15 +212,18 @@ (defn- card-with-name [card-name] (first (u/prog1 (db/select [Card :id :name], :%lower.name [:like (str \% (str/lower-case card-name) \%)]) (when (> (count <>) 1) - (throw (Exception. (str (tru "Could you be a little more specific? I found these cards with names that matched:\n{0}" - (format-cards <>))))))))) + (throw (Exception. + (str (tru "Could you be a little more specific? I found these cards with names that matched:\n{0}" + (format-cards <>))))))))) (defn- id-or-name->card [card-id-or-name] (cond (integer? card-id-or-name) (db/select-one [Card :id :name], :id card-id-or-name) (or (string? card-id-or-name) (symbol? card-id-or-name)) (card-with-name card-id-or-name) - :else (throw (Exception. (str (tru "I don''t know what Card `{0}` is. Give me a Card ID or name." card-id-or-name)))))) + :else (throw (Exception. + (str (tru "I don''t know what Card `{0}` is. Give me a Card ID or name." + card-id-or-name)))))) (defn ^:metabot show @@ -130,13 +235,16 @@ (do (with-metabot-permissions (read-check Card card-id)) - (do-async (let [attachments (pulse/create-and-upload-slack-attachments! (pulse/create-slack-attachment-data [(pulse/execute-card card-id, :context :metabot)]))] + (do-async (let [attachments (pulse/create-and-upload-slack-attachments! + (pulse/create-slack-attachment-data + [(pulse/execute-card card-id, :context :metabot)]))] (slack/post-chat-message! *channel-id* nil attachments))) (tru "Ok, just a second...")) (throw (Exception. (str (tru "Not Found")))))) - ;; If the card name comes without spaces, e.g. (show 'my 'wacky 'card) turn it into a string an recur: (show "my wacky card") + ;; If the card name comes without spaces, e.g. (show 'my 'wacky 'card) turn it into a string an recur: (show "my + ;; wacky card") ([word & more] (show (str/join " " (cons word more))))) @@ -171,7 +279,7 @@ (str ":kanye:\n> " (rand-nth @kanye-quotes))) -;;; # ------------------------------------------------------------ Metabot Command Dispatch ------------------------------------------------------------ +;;; -------------------------------------------- Metabot Command Dispatch -------------------------------------------- (def ^:private apply-metabot-fn (dispatch-fn "understand" :metabot)) @@ -189,7 +297,7 @@ (apply apply-metabot-fn tokens))))) -;;; # ------------------------------------------------------------ Metabot Input Handling ------------------------------------------------------------ +;;; --------------------------------------------- Metabot Input Handling --------------------------------------------- (defn- message->command-str "Get the command portion of a message *event* directed at Metabot. @@ -241,7 +349,7 @@ nil))))) -;;; # ------------------------------------------------------------ Websocket Connection Stuff ------------------------------------------------------------ +;;; ------------------------------------------- Websocket Connection Stuff ------------------------------------------- (defn- connect-websocket! [] (when-let [websocket-url (slack/websocket-url)] @@ -264,9 +372,10 @@ ;; and if it is no longer equal to theirs they should die (defonce ^:private websocket-monitor-thread-id (atom nil)) -;; we'll use a THROTTLER to implement exponential backoff for recconenction attempts, since THROTTLERS are designed with for this sort of thing -;; e.g. after the first failed connection we'll wait 2 seconds, then each that amount increases by the `:delay-exponent` of 1.3 -;; so our reconnection schedule will look something like: +;; we'll use a THROTTLER to implement exponential backoff for recconenction attempts, since THROTTLERS are designed +;; with for this sort of thing e.g. after the first failed connection we'll wait 2 seconds, then each that amount +;; increases by the `:delay-exponent` of 1.3. So our reconnection schedule will look something like: +;; ;; number of consecutive failed attempts | seconds before next try (rounded up to nearest multiple of 2 seconds) ;; --------------------------------------+---------------------------------------------------------------------- ;; 0 | 2 @@ -276,47 +385,91 @@ ;; 4 | 8 ;; 5 | 14 ;; 6 | 30 -;; we'll throttle this based on values of the `slack-token` setting; that way if someone changes its value they won't have to wait -;; whatever the exponential delay is before the connection is retried +;; +;; we'll throttle this based on values of the `slack-token` setting; that way if someone changes its value they won't +;; have to wait whatever the exponential delay is before the connection is retried (def ^:private reconnection-attempt-throttler (throttle/make-throttler nil :attempts-threshold 1, :initial-delay-ms 2000, :delay-exponent 1.3)) (defn- should-attempt-to-reconnect? ^Boolean [] - (boolean (u/ignore-exceptions - (throttle/check reconnection-attempt-throttler (slack/slack-token)) - true))) + (boolean + (u/ignore-exceptions + (throttle/check reconnection-attempt-throttler (slack/slack-token)) + true))) + +(defn- reopen-websocket-connection-if-needed! + "Check to see if websocket connection is [still] open, [re-]open it if not." + [] + ;; Only open the Websocket connection if this instance is the MetaBot + (when (am-i-the-metabot?) + (when (= (.getId (Thread/currentThread)) @websocket-monitor-thread-id) + (try + (when (or (not @websocket) + (s/closed? @websocket)) + (log/debug (trs "MetaBot WebSocket is closed. Reconnecting now.")) + (connect-websocket!)) + (catch Throwable e + (log/error (trs "Error connecting websocket:") (.getMessage e))))))) (defn- start-websocket-monitor! [] (future (reset! websocket-monitor-thread-id (.getId (Thread/currentThread))) - ;; Every 2 seconds check to see if websocket connection is [still] open, [re-]open it if not (loop [] + ;; Every 2 seconds... (while (not (should-attempt-to-reconnect?)) (Thread/sleep 2000)) - (when (= (.getId (Thread/currentThread)) @websocket-monitor-thread-id) - (try - (when (or (not @websocket) - (s/closed? @websocket)) - (log/debug (trs "MetaBot WebSocket is closed. Reconnecting now.")) - (connect-websocket!)) - (catch Throwable e - (log/error (trs "Error connecting websocket:") (.getMessage e)))) - (recur))))) + (reopen-websocket-connection-if-needed!) + (recur)))) + + +(defn- check-and-update-instance-status! + "Check whether the current instance is serving as the MetaBot; if so, update the last checkin timestamp; if not, check + whether we should become the MetaBot (and do so if we should)." + [] + (cond + ;; if we're already the MetaBot instance, update the last checkin timestamp + (am-i-the-metabot?) + (do + (log/debug (trs "This instance is performing MetaBot duties.")) + (update-last-checkin!)) + ;; otherwise if the last checkin was too long ago, it's time for us to assume the mantle of MetaBot + (last-checkin-was-not-recent?) + (become-metabot!) + ;; otherwise someone else is the MetaBot and we're done here! woo + :else + (log/debug (u/format-color 'blue (trs "Another instance is already handling MetaBot duties."))))) + +(defn- start-instance-monitor! [] + (future + (loop [] + (check-and-update-instance-status!) + (Thread/sleep (* 60 1000)) + (recur)))) + +(defn- seconds-to-wait-before-starting + "Return a random number of seconds to wait before starting MetaBot processess, between 0 and 59. This is done to + introduce a bit of jitter that should prevent a rush of multiple instances all racing to become the MetaBot at the + same time." + [] + (mod (.nextInt (java.security.SecureRandom.)) 60)) (defn start-metabot! "Start the MetaBot! :robot_face: - This will spin up a background thread that opens and maintains a Slack WebSocket connection." + This will spin up a background thread that opens and maintains a Slack WebSocket connection." [] - (when (and (slack/slack-token) - (metabot-enabled)) - (log/info "Starting MetaBot WebSocket monitor thread...") - (start-websocket-monitor!))) + (future + (Thread/sleep (* 1000 (seconds-to-wait-before-starting))) + (when (and (slack/slack-token) + (metabot-enabled)) + (log/info (trs "Starting MetaBot threads...")) + (start-websocket-monitor!) + (start-instance-monitor!)))) (defn stop-metabot! "Stop the MetaBot! :robot_face: - This will stop the background thread that responsible for the Slack WebSocket connection." + This will stop the background thread that responsible for the Slack WebSocket connection." [] (log/info (trs "Stopping MetaBot... 🤖")) (reset! websocket-monitor-thread-id nil) diff --git a/src/metabase/models/setting.clj b/src/metabase/models/setting.clj index 29a595b9e26a68ac848ae7b55fad76045e5fbce5..e88a0d3980f1140e433290e104cde34d8e68af10 100644 --- a/src/metabase/models/setting.clj +++ b/src/metabase/models/setting.clj @@ -42,7 +42,9 @@ [db :as mdb] [events :as events] [util :as u]] - [metabase.util.honeysql-extensions :as hx] + [metabase.util + [date :as du] + [honeysql-extensions :as hx]] [puppetlabs.i18n.core :refer [trs tru]] [schema.core :as s] [toucan @@ -60,7 +62,16 @@ (def ^:private Type - (s/enum :string :boolean :json :integer :double)) + (s/enum :string :boolean :json :integer :double :timestamp)) + +(def ^:private default-tag-for-type + "Type tag that will be included in the Setting's metadata, so that the getter function will not cause reflection + warnings." + {:string String + :boolean Boolean + :integer Long + :double Double + :timestamp java.sql.Timestamp}) (def ^:private SettingDefinition {:name s/Keyword @@ -69,7 +80,9 @@ :type Type ; all values are stored in DB as Strings, :getter clojure.lang.IFn ; different getters/setters take care of parsing/unparsing :setter clojure.lang.IFn - :internal? s/Bool}) ; should the API never return this setting? (default: false) + :tag (s/maybe Class) ; type annotation, e.g. ^String, to be applied. Defaults to tag based on :type + :internal? s/Bool ; should the API never return this setting? (default: false) + :cache? s/Bool}) ; should the getter always fetch this value "fresh" from the DB? (default: false) (defonce ^:private registered-settings @@ -157,10 +170,13 @@ (when-let [last-known-update (core/get @cache settings-last-updated-key)] ;; compare it to the value in the DB. This is done be seeing whether a row exists ;; WHERE value > <local-value> - (db/select-one Setting - {:where [:and - [:= :key settings-last-updated-key] - [:> :value last-known-update]]}))))) + (u/prog1 (db/select-one Setting + {:where [:and + [:= :key settings-last-updated-key] + [:> :value last-known-update]]}) + (when <> + (log/info (u/format-color 'red + (trs "Settings have been changed on another instance, and will be reloaded here."))))))))) (def ^:private cache-update-check-interval-ms "How often we should check whether the Settings cache is out of date (which requires a DB call)?" @@ -227,15 +243,20 @@ (when (seq v) v))) +(def ^:private ^:dynamic *disable-cache* false) + (defn- db-value "Get the value, if any, of `setting-or-name` from the DB (using / restoring the cache as needed)." ^String [setting-or-name] - (restore-cache-if-needed!) - (clojure.core/get @cache (setting-name setting-or-name))) + (if *disable-cache* + (db/select-one-field :value Setting :key (setting-name setting-or-name)) + (do + (restore-cache-if-needed!) + (clojure.core/get @cache (setting-name setting-or-name))))) (defn get-string - "Get string value of `setting-or-name`. This is the default getter for `String` settings; valuBis fetched as follows: + "Get string value of `setting-or-name`. This is the default getter for `String` settings; value is fetched as follows: 1. From the database (i.e., set via the admin panel), if a value is present; 2. From corresponding env var, if any; @@ -285,18 +306,26 @@ [setting-or-name] (json/parse-string (get-string setting-or-name) keyword)) +(defn get-timestamp + "Get the string value of `setting-or-name` and parse it as an ISO-8601-formatted string, returning a Timestamp." + [setting-or-name] + (du/->Timestamp (get-string setting-or-name) :no-timezone)) + (def ^:private default-getter-for-type - {:string get-string - :boolean get-boolean - :integer get-integer - :json get-json - :double get-double}) + {:string get-string + :boolean get-boolean + :integer get-integer + :json get-json + :timestamp get-timestamp + :double get-double}) (defn get "Fetch the value of `setting-or-name`. What this means depends on the Setting's `:getter`; by default, this looks for first for a corresponding env var, then checks the cache, then returns the default value of the Setting, if any." [setting-or-name] - ((:getter (resolve-setting setting-or-name)))) + (let [{:keys [cache? getter]} (resolve-setting setting-or-name)] + (binding [*disable-cache* (not cache?)] + (getter)))) ;;; +----------------------------------------------------------------------------------------------------------------+ @@ -353,7 +382,10 @@ (swap! cache assoc setting-name new-value) (swap! cache dissoc setting-name)) ;; Record the fact that a Setting has been updated so eventaully other instances (if applicable) find out about it - (update-settings-last-updated!) + ;; (For Settings that don't use the Cache, don't update the `last-updated` value, because it will cause other + ;; instances to do needless reloading of the cache from the DB) + (when-not *disable-cache* + (update-settings-last-updated!)) ;; Now return the `new-value`. new-value)) @@ -389,15 +421,20 @@ (defn set-json! "Serialize `new-value` for `setting-or-name` as a JSON string and save it." [setting-or-name new-value] - (set-string! setting-or-name (when new-value - (json/generate-string new-value)))) + (set-string! setting-or-name (some-> new-value json/generate-string))) + +(defn set-timestamp! + "Serialize `new-value` for `setting-or-name` as a ISO 8601-encoded timestamp strign and save it." + [setting-or-name new-value] + (set-string! setting-or-name (some-> new-value du/date->iso-8601))) (def ^:private default-setter-for-type - {:string set-string! - :boolean set-boolean! - :integer set-integer! - :json set-json! - :double set-double!}) + {:string set-string! + :boolean set-boolean! + :integer set-integer! + :json set-json! + :timestamp set-timestamp! + :double set-double!}) (defn set! "Set the value of `setting-or-name`. What this means depends on the Setting's `:setter`; by default, this just updates @@ -409,7 +446,9 @@ (mandrill-api-key \"xyz123\")" [setting-or-name new-value] - ((:setter (resolve-setting setting-or-name)) new-value)) + (let [{:keys [setter cache?]} (resolve-setting setting-or-name)] + (binding [*disable-cache* (not cache?)] + (setter new-value)))) ;;; +----------------------------------------------------------------------------------------------------------------+ @@ -427,7 +466,9 @@ :default default :getter (partial (default-getter-for-type setting-type) setting-name) :setter (partial (default-setter-for-type setting-type) setting-name) - :internal? false} + :tag (default-tag-for-type setting-type) + :internal? false + :cache? true} (dissoc setting :name :type :default))) (s/validate SettingDefinition <>) (swap! registered-settings assoc setting-name <>))) @@ -440,10 +481,11 @@ (defn metadata-for-setting-fn "Create metadata for the function automatically generated by `defsetting`." - [{:keys [default description], setting-type :type, :as setting}] + [{:keys [default description tag], setting-type :type, :as setting}] {:arglists '([] [new-value]) ;; indentation below is intentional to make it clearer what shape the generated documentation is going to take. ;; Turn on auto-complete-mode in Emacs and see for yourself! + :tag tag :doc (str/join "\n" [ description "" (format "`%s` is a %s Setting. You can get its value by calling:" (setting-name setting) (name setting-type)) @@ -501,7 +543,9 @@ * `:setter` - A custom setter fn, which takes a single argument. Overrides the default implementation. (This can in turn call functions in this namespace like `set-string!` or `set-boolean!` to invoke the default setter behavior. Keep in mind that the custom setter may be passed `nil`, which should - clear the values of the Setting.)" + clear the values of the Setting.) + * `:cache?` - Should this Setting be cached? (default `true`)? Be careful when disabling this, because it could + have a very negative performance impact." {:style/indent 1} [setting-symb description & {:as options}] {:pre [(symbol? setting-symb)]} diff --git a/src/metabase/query_processor/middleware/expand_macros.clj b/src/metabase/query_processor/middleware/expand_macros.clj index d4365fa7d252632e1a8feb8c2315d73de698e5a6..dd4a878c8c36635be1568d3f970a54953f446038 100644 --- a/src/metabase/query_processor/middleware/expand_macros.clj +++ b/src/metabase/query_processor/middleware/expand_macros.clj @@ -112,13 +112,14 @@ "Merge filter clauses." ([] []) ([clause] clause) - ([base-clause additional-clauses] - (cond - (and (seq base-clause) - (seq additional-clauses)) [:and base-clause additional-clauses] - (seq base-clause) base-clause - (seq additional-clauses) additional-clauses - :else []))) + ([base-clause & additional-clauses] + (let [additional-clauses (filter seq additional-clauses)] + (cond + (and (seq base-clause) + (seq additional-clauses)) (apply vector :and base-clause additional-clauses) + (seq base-clause) base-clause + (seq additional-clauses) (apply merge-filter-clauses additional-clauses) + :else [])))) (defn- add-metrics-filter-clauses "Add any FILTER-CLAUSES to the QUERY-DICT. If query has existing filter clauses, the new ones are diff --git a/src/metabase/setup.clj b/src/metabase/setup.clj index bdb1f1be029e927c39774b2a9b69b3eb0d734509..da1c2828b1c0ebd64fde1ec991e0f37d63423eed 100644 --- a/src/metabase/setup.clj +++ b/src/metabase/setup.clj @@ -1,27 +1,33 @@ -(ns metabase.setup) +(ns metabase.setup + (:require [metabase.models.setting :refer [Setting defsetting]] + [toucan.db :as db])) -(defonce ^:private setup-token - (atom nil)) +(defsetting ^:private setup-token + "A token used to signify that an instance has permissions to create the initial User. This is created upon the first + launch of Metabase, by the first instance; once used, it is cleared out, never to be used again." + :internal? true) (defn token-value "Return the value of the setup token, if any." [] - @setup-token) + (setup-token)) (defn token-match? "Function for checking if the supplied string matches our setup token. - Returns boolean `true` if supplied token matches `@setup-token`, `false` otherwise." + Returns boolean `true` if supplied token matches the setup token, `false` otherwise." [token] {:pre [(string? token)]} - (= token @setup-token)) + (= token (setup-token))) (defn create-token! - "Create and set a new `@setup-token`. - Returns the newly created token." + "Create and set a new setup token, if one has not already been created. Returns the newly created token." [] - (reset! setup-token (str (java.util.UUID/randomUUID)))) + ;; fetch the value directly from the DB; *do not* rely on cached value, in case a different instance came along and + ;; already created it + (or (db/select-one-field :value Setting :key "setup-token") + (setup-token (str (java.util.UUID/randomUUID))))) (defn clear-token! - "Clear the `@setup-token` if it exists and reset it to nil." + "Clear the setup token if it exists and reset it to `nil`." [] - (reset! setup-token nil)) + (setup-token nil)) diff --git a/src/metabase/sync/interface.clj b/src/metabase/sync/interface.clj index 1ee51060d5575a12c4841fb83555121ccdefc582..ade235b04f51f20a89b14afa1269e182b53fc08b 100644 --- a/src/metabase/sync/interface.clj +++ b/src/metabase/sync/interface.clj @@ -22,7 +22,7 @@ (def TableMetadataField "Schema for a given Field as provided in `describe-table`." {:name su/NonBlankString - :database-type su/NonBlankString + :database-type (s/maybe su/NonBlankString) ; blank if the Field is all NULL & untyped, i.e. in Mongo :base-type su/FieldType (s/optional-key :special-type) (s/maybe su/FieldType) (s/optional-key :pk?) s/Bool diff --git a/src/metabase/sync/sync_metadata/fields.clj b/src/metabase/sync/sync_metadata/fields.clj index 1aa22346f0148869fb7d004508d4ff36e59251ff..9f4fd5e12ba89bdad1a810f8cdc1907947449c22 100644 --- a/src/metabase/sync/sync_metadata/fields.clj +++ b/src/metabase/sync/sync_metadata/fields.clj @@ -74,7 +74,7 @@ {:table_id (u/get-id table) :name field-name :display_name (humanization/name->human-readable-name field-name) - :database_type database-type + :database_type (or database-type "NULL") ; placeholder for Fields w/ no type info (e.g. Mongo) & all NULL :base_type base-type :special_type (special-type field) :parent_id parent-id})))) diff --git a/src/metabase/util/date.clj b/src/metabase/util/date.clj index 2ad043a84d29ea060b1314144319add26eca8644..d74ccbe438a3d429b4d60c67d4d7d3917bbd0c1f 100644 --- a/src/metabase/util/date.clj +++ b/src/metabase/util/date.clj @@ -24,7 +24,7 @@ :tag TimeZone} *data-timezone*) -(defprotocol ITimeZoneCoercible +(defprotocol ^:private ITimeZoneCoercible "Coerce object to `java.util.TimeZone`" (coerce-to-timezone ^TimeZone [this] "Coerce `this` to `java.util.TimeZone`")) @@ -41,7 +41,8 @@ "UTC TimeZone" (coerce-to-timezone "UTC")) -(def ^:private jvm-timezone +(def jvm-timezone + "Machine time zone" (delay (coerce-to-timezone (System/getProperty "user.timezone")))) (defn- warn-on-timezone-conflict @@ -85,7 +86,7 @@ [db & body] `(call-with-effective-timezone ~db (fn [] ~@body))) -(defprotocol ITimestampCoercible +(defprotocol ^:private ITimestampCoercible "Coerce object to a `java.sql.Timestamp`." (coerce-to-timestamp ^java.sql.Timestamp [this] [this timezone-coercible] "Coerce this object to a `java.sql.Timestamp`. Strings are parsed as ISO-8601.")) @@ -110,7 +111,12 @@ (defn ^Timestamp ->Timestamp "Converts `coercible-to-ts` to a `java.util.Timestamp`. Requires a `coercible-to-tz` if converting a string. Leans - on clj-time to ensure correct conversions between the various types" + on clj-time to ensure correct conversions between the various types + + NOTE: This function requires you to pass in a timezone or bind `*report-timezone*`, probably to make sure you're not + doing something dumb by forgetting it. For cases where you'd just like to parse an ISO-8601-encoded String in peace + without specifying a timezone, pass in `:no-timezone` as the second param to explicitly have things parsed without + one. (Keep in mind that if your string does not specify a timezone, it will be parsed as UTC by default.)" ([coercible-to-ts] {:pre [(or (not (string? coercible-to-ts)) (and (string? coercible-to-ts) (bound? #'*report-timezone*)))]} @@ -119,10 +125,11 @@ {:pre [(or (not (string? coercible-to-ts)) (and (string? coercible-to-ts) timezone))]} (if (string? coercible-to-ts) - (coerce-to-timestamp (str->date-time coercible-to-ts (coerce-to-timezone timezone))) + (coerce-to-timestamp (str->date-time coercible-to-ts (when-not (= timezone :no-timezone) + (coerce-to-timezone timezone)))) (coerce-to-timestamp coercible-to-ts)))) -(defprotocol IDateTimeFormatterCoercible +(defprotocol ^:private IDateTimeFormatterCoercible "Protocol for converting objects to `DateTimeFormatters`." (->DateTimeFormatter ^org.joda.time.format.DateTimeFormatter [this] "Coerce object to a `DateTimeFormatter`.")) @@ -139,15 +146,15 @@ (defn parse-date - "Parse a datetime string S with a custom DATE-FORMAT, which can be a format string, clj-time formatter keyword, or + "Parse a datetime string `s` with a custom `date-format`, which can be a format string, clj-time formatter keyword, or anything else that can be coerced to a `DateTimeFormatter`. - (parse-date \"yyyyMMdd\" \"20160201\") -> #inst \"2016-02-01\" - (parse-date :date-time \"2016-02-01T00:00:00.000Z\") -> #inst \"2016-02-01\"" + (parse-date \"yyyyMMdd\" \"20160201\") -> #inst \"2016-02-01\" + (parse-date :date-time \"2016-02-01T00:00:00.000Z\") -> #inst \"2016-02-01\"" ^java.sql.Timestamp [date-format, ^String s] (->Timestamp (time/parse (->DateTimeFormatter date-format) s))) -(defprotocol ISO8601 +(defprotocol ^:private ISO8601 "Protocol for converting objects to ISO8601 formatted strings." (->iso-8601-datetime ^String [this timezone-id] "Coerce object to an ISO8601 date-time string such as \"2015-11-18T23:55:03.841Z\" with a given TIMEZONE.")) @@ -174,12 +181,12 @@ (time/formatters :time))))) (defn format-time - "Returns a string representation of the time found in `T`" + "Returns a string representation of the time found in `t`" [t time-zone-id] (time/unparse (time-formatter time-zone-id) (coerce/to-date-time t))) (defn is-time? - "Returns true if `V` is a Time object" + "Returns true if `v` is a Time object" [v] (and v (instance? Time v))) @@ -198,13 +205,13 @@ (->Timestamp (System/currentTimeMillis))) (defn format-date - "Format DATE using a given DATE-FORMAT. NOTE: This will create a date string in the JVM's timezone, not the report + "Format `date` using a given `date-format`. NOTE: This will create a date string in the JVM's timezone, not the report timezone. - DATE is anything that can coerced to a `Timestamp` via `->Timestamp`, such as a `Date`, `Timestamp`, - `Long` (ms since the epoch), or an ISO-8601 `String`. DATE defaults to the current moment in time. + `date` is anything that can coerced to a `Timestamp` via `->Timestamp`, such as a `Date`, `Timestamp`, + `Long` (ms since the epoch), or an ISO-8601 `String`. `date` defaults to the current moment in time. - DATE-FORMAT is anything that can be passed to `->DateTimeFormatter`, such as `String` + `date-format` is anything that can be passed to `->DateTimeFormatter`, such as `String` (using [the usual date format args](http://docs.oracle.com/javase/7/docs/api/java/text/SimpleDateFormat.html)), `Keyword`, or `DateTimeFormatter`. @@ -218,7 +225,7 @@ (time/unparse (->DateTimeFormatter date-format) (coerce/from-sql-time (->Timestamp date))))) (def ^{:arglists '([] [date])} date->iso-8601 - "Format DATE a an ISO-8601 string." + "Format `date` a an ISO-8601 string." (partial format-date :date-time)) (defn date-string? @@ -231,14 +238,14 @@ (->Timestamp s utc))))) (defn ->Date - "Coerece DATE to a `java.util.Date`." + "Coerece `date` to a `java.util.Date`." (^java.util.Date [] (java.util.Date.)) (^java.util.Date [date] (java.util.Date. (.getTime (->Timestamp date))))) (defn ->Calendar - "Coerce DATE to a `java.util.Calendar`." + "Coerce `date` to a `java.util.Calendar`." (^java.util.Calendar [] (doto (Calendar/getInstance) (.setTimeZone (TimeZone/getTimeZone "UTC")))) @@ -250,7 +257,7 @@ (.setTimeZone (TimeZone/getTimeZone timezone-id))))) (defn relative-date - "Return a new `Timestamp` relative to the current time using a relative date UNIT. + "Return a new Timestamp relative to the current time using a relative date `unit`. (relative-date :year -1) -> #inst 2014-11-12 ..." (^java.sql.Timestamp [unit amount] @@ -275,7 +282,7 @@ :year}) (defn date-extract - "Extract UNIT from DATE. DATE defaults to now. + "Extract `unit` from `date`. `date` defaults to now. (date-extract :year) -> 2015" ([unit] @@ -324,7 +331,7 @@ (format "%d-%02d-01'T'ZZ" year month))) (defn date-trunc - "Truncate DATE to UNIT. DATE defaults to now. + "Truncate `date` to `unit`. `date` defaults to now. (date-trunc :month). ;; -> #inst \"2015-11-01T00:00:00\"" @@ -344,7 +351,7 @@ :year (trunc-with-format "yyyy-01-01'T'ZZ" date timezone-id)))) (defn date-trunc-or-extract - "Apply date bucketing with UNIT to DATE. DATE defaults to now." + "Apply date bucketing with `unit` to `date`. `date` defaults to now." ([unit] (date-trunc-or-extract unit (System/currentTimeMillis) "UTC")) ([unit date] @@ -369,9 +376,25 @@ (recur (/ n divisor) more) (format "%.0f %s" (double n) (name unit))))) +(defn format-microseconds + "Format a time interval in microseconds into something more readable." + ^String [microseconds] + (format-nanoseconds (* 1000.0 microseconds))) + +(defn format-milliseconds + "Format a time interval in milliseconds into something more readable." + ^String [milliseconds] + (format-microseconds (* 1000.0 milliseconds))) + +(defn format-seconds + "Format a time interval in seconds into something more readable." + ^String [seconds] + (format-milliseconds (* 1000.0 seconds))) + +;; TODO - Not sure this belongs in the datetime util namespace (defmacro profile - "Like `clojure.core/time`, but lets you specify a MESSAGE that gets printed with the total time, - and formats the time nicely using `format-nanoseconds`." + "Like `clojure.core/time`, but lets you specify a `message` that gets printed with the total time, and formats the + time nicely using `format-nanoseconds`." {:style/indent 1} ([form] `(profile ~(str form) ~form)) @@ -383,8 +406,8 @@ (format-nanoseconds (- (System/nanoTime) start-time#)))))))) (defn- str->date-time-with-formatters - "Attempt to parse `DATE-STR` using `FORMATTERS`. First successful - parse is returned, or nil" + "Attempt to parse `date-str` using `formatters`. First successful parse is returned, or `nil` if it cannot be + successfully parsed." ([formatters date-str] (str->date-time-with-formatters formatters date-str nil)) ([formatters ^String date-str ^TimeZone tz] @@ -401,9 +424,9 @@ (->DateTimeFormatter "yyyy-MM-dd HH:mm:ss.SSS")) (def ^:private ordered-date-parsers - "When using clj-time.format/parse without a formatter, it tries all default formatters, but not ordered by how - likely the date formatters will succeed. This leads to very slow parsing as many attempts fail before the right one - is found. Using this retains that flexibility but improves performance by trying the most likely ones first" + "When using clj-time.format/parse without a formatter, it tries all default formatters, but not ordered by how likely + the date formatters will succeed. This leads to very slow parsing as many attempts fail before the right one is + found. Using this retains that flexibility but improves performance by trying the most likely ones first" (let [most-likely-default-formatters [:mysql :date-hour-minute-second :date-time :date :basic-date-time :basic-date-time-no-ms :date-time :date-time-no-ms]] @@ -412,7 +435,7 @@ (vals (apply dissoc time/formatters most-likely-default-formatters))))) (defn str->date-time - "Like clj-time.format/parse but uses an ordered list of parsers to be faster. Returns the parsed date or nil if it + "Like clj-time.format/parse but uses an ordered list of parsers to be faster. Returns the parsed date, or `nil` if it was unable to be parsed." (^org.joda.time.DateTime [^String date-str] (str->date-time date-str nil)) @@ -425,8 +448,7 @@ [(time/formatter "HH:mmZ") (time/formatter "HH:mm:SSZ") (time/formatter "HH:mm:SS.SSSZ")]))) (defn str->time - "Parse `TIME-STR` and return a `java.sql.Time` instance. Returns nil - if `TIME-STR` can't be parsed." + "Parse `time-str` and return a `java.sql.Time` instance. Returns `nil` if `time-str` can't be parsed." ([^String date-str] (str->time date-str nil)) ([^String date-str ^TimeZone tz] diff --git a/test/metabase/automagic_dashboards/core_test.clj b/test/metabase/automagic_dashboards/core_test.clj index fd17104868250334e912012325ff03e054eece9c..9e7c3f72a9a92343762d116cad09b2d67b349140 100644 --- a/test/metabase/automagic_dashboards/core_test.clj +++ b/test/metabase/automagic_dashboards/core_test.clj @@ -1,14 +1,20 @@ (ns metabase.automagic-dashboards.core-test - (:require [expectations :refer :all] + (:require [clj-time + [core :as t] + [format :as t.format]] + [expectations :refer :all] [metabase.api.common :as api] [metabase.automagic-dashboards [core :refer :all :as magic] [rules :as rules]] [metabase.models [card :refer [Card]] + [collection :refer [Collection]] [database :refer [Database]] [field :as field :refer [Field]] [metric :refer [Metric]] + [permissions :as perms] + [permissions-group :as perms-group] [query :as query] [table :refer [Table] :as table] [user :as user]] @@ -16,6 +22,8 @@ [metabase.test.data :as data] [metabase.test.data.users :as test-users] [metabase.test.util :as tu] + [metabase.util.date :as date] + [puppetlabs.i18n.core :as i18n :refer [tru]] [toucan.db :as db] [toucan.util.test :as tt])) @@ -82,15 +90,15 @@ (tree-seq (some-fn sequential? map?) identity) (keep (fn [form] (when (map? form) - (:url form)))))) + ((some-fn :url :more) form)))))) (defn- valid-urls? [dashboard] (->> dashboard collect-urls (every? (fn [url] - ((test-users/user->client :rasta) :get 200 (format "automagic-dashboards/%s" - (subs url 16))))))) + ((test-users/user->client :rasta) :get 200 + (format "automagic-dashboards/%s" (subs url 16))))))) (def ^:private valid-card? (comp qp/expand :dataset_query)) @@ -103,12 +111,28 @@ (assert (every? valid-card? (keep :card (:ordered_cards dashboard)))) true) +(defn- test-automagic-analysis + ([entity] (test-automagic-analysis entity nil)) + ([entity cell-query] + ;; We want to both generate as many cards as we can to catch all aberrations, but also make sure + ;; that size limiting works. + (and (valid-dashboard? (automagic-analysis entity {:cell-query cell-query :show :all})) + (valid-dashboard? (automagic-analysis entity {:cell-query cell-query :show 1}))))) + (expect (with-rasta (with-dashboard-cleanup (->> (db/select Table :db_id (data/id)) - (keep #(automagic-analysis % {})) - (every? valid-dashboard?))))) + (every? test-automagic-analysis))))) + +(expect + (with-rasta + (with-dashboard-cleanup + (->> (automagic-analysis (Table (data/id :venues)) {:show 1}) + :ordered_cards + (filter :card) + count + (= 1))))) (expect (with-rasta @@ -116,28 +140,32 @@ (->> (db/select Field :table_id [:in (db/select-field :id Table :db_id (data/id))] :visibility_type "normal") - (keep #(automagic-analysis % {})) - (every? valid-dashboard?))))) + (every? test-automagic-analysis))))) (expect (tt/with-temp* [Metric [{metric-id :id} {:table_id (data/id :venues) :definition {:query {:aggregation ["count"]}}}]] (with-rasta (with-dashboard-cleanup - (->> (Metric) (keep #(automagic-analysis % {})) (every? valid-dashboard?)))))) + (->> (Metric) (every? test-automagic-analysis)))))) (expect - (tt/with-temp* [Card [{card-id :id} {:table_id (data/id :venues) + (tt/with-temp* [Collection [{collection-id :id}] + Card [{card-id :id} {:table_id (data/id :venues) + :collection_id collection-id :dataset_query {:query {:filter [:> [:field-id (data/id :venues :price)] 10] :source_table (data/id :venues)} :type :query :database (data/id)}}]] (with-rasta (with-dashboard-cleanup - (-> card-id Card (automagic-analysis {}) valid-dashboard?))))) + (perms/grant-collection-readwrite-permissions! (perms-group/all-users) collection-id) + (-> card-id Card test-automagic-analysis))))) (expect - (tt/with-temp* [Card [{card-id :id} {:table_id (data/id :venues) + (tt/with-temp* [Collection [{collection-id :id}] + Card [{card-id :id} {:table_id (data/id :venues) + :collection_id collection-id :dataset_query {:query {:aggregation [[:count]] :breakout [[:field-id (data/id :venues :category_id)]] :source_table (data/id :venues)} @@ -145,80 +173,98 @@ :database (data/id)}}]] (with-rasta (with-dashboard-cleanup - (-> card-id Card (automagic-analysis {}) valid-dashboard?))))) + (-> card-id Card test-automagic-analysis))))) (expect - (tt/with-temp* [Card [{card-id :id} {:table_id nil + (tt/with-temp* [Collection [{collection-id :id}] + Card [{card-id :id} {:table_id nil + :collection_id collection-id :dataset_query {:native {:query "select * from users"} :type :native :database (data/id)}}]] (with-rasta (with-dashboard-cleanup - (-> card-id Card (automagic-analysis {}) valid-dashboard?))))) + (perms/grant-collection-readwrite-permissions! (perms-group/all-users) collection-id) + (-> card-id Card test-automagic-analysis))))) (expect - (tt/with-temp* [Card [{source-id :id} {:table_id (data/id :venues) + (tt/with-temp* [Collection [{collection-id :id}] + Card [{source-id :id} {:table_id (data/id :venues) + :collection_id collection-id :dataset_query {:query {:source_table (data/id :venues)} :type :query :database (data/id)}}] Card [{card-id :id} {:table_id (data/id :venues) + :collection_id collection-id :dataset_query {:query {:filter [:> [:field-id (data/id :venues :price)] 10] :source_table (str "card__" source-id)} :type :query :database -1337}}]] (with-rasta (with-dashboard-cleanup - (-> card-id Card (automagic-analysis {}) valid-dashboard?))))) + (perms/grant-collection-readwrite-permissions! (perms-group/all-users) collection-id) + (-> card-id Card test-automagic-analysis))))) (expect - (tt/with-temp* [Card [{source-id :id} {:table_id nil + (tt/with-temp* [Collection [{collection-id :id}] + Card [{source-id :id} {:table_id nil + :collection_id collection-id :dataset_query {:native {:query "select * from users"} :type :native :database (data/id)}}] Card [{card-id :id} {:table_id (data/id :venues) + :collection_id collection-id :dataset_query {:query {:filter [:> [:field-id (data/id :venues :price)] 10] :source_table (str "card__" source-id)} :type :query :database -1337}}]] (with-rasta (with-dashboard-cleanup - (-> card-id Card (automagic-analysis {}) valid-dashboard?))))) + (perms/grant-collection-readwrite-permissions! (perms-group/all-users) collection-id) + (-> card-id Card test-automagic-analysis))))) (expect - (tt/with-temp* [Card [{card-id :id} {:table_id nil + (tt/with-temp* [Collection [{collection-id :id}] + Card [{card-id :id} {:table_id nil + :collection_id collection-id :dataset_query {:native {:query "select * from users"} :type :native :database (data/id)}}]] (with-rasta (with-dashboard-cleanup - (-> card-id Card (automagic-analysis {}) valid-dashboard?))))) + (perms/grant-collection-readwrite-permissions! (perms-group/all-users) collection-id) + (-> card-id Card test-automagic-analysis))))) (expect - (tt/with-temp* [Card [{card-id :id} {:table_id (data/id :venues) + (tt/with-temp* [Collection [{collection-id :id}] + Card [{card-id :id} {:table_id (data/id :venues) + :collection_id collection-id :dataset_query {:query {:filter [:> [:field-id (data/id :venues :price)] 10] :source_table (data/id :venues)} :type :query :database (data/id)}}]] (with-rasta (with-dashboard-cleanup + (perms/grant-collection-readwrite-permissions! (perms-group/all-users) collection-id) (-> card-id Card - (automagic-analysis {:cell-query [:= [:field-id (data/id :venues :category_id)] 2]}) - valid-dashboard?))))) + (test-automagic-analysis [:= [:field-id (data/id :venues :category_id)] 2])))))) (expect - (tt/with-temp* [Card [{card-id :id} {:table_id (data/id :venues) + (tt/with-temp* [Collection [{collection-id :id}] + Card [{card-id :id} {:table_id (data/id :venues) + :collection_id collection-id :dataset_query {:query {:filter [:> [:field-id (data/id :venues :price)] 10] :source_table (data/id :venues)} :type :query :database (data/id)}}]] (with-rasta (with-dashboard-cleanup + (perms/grant-collection-readwrite-permissions! (perms-group/all-users) collection-id) (-> card-id Card - (automagic-analysis {:cell-query [:!= [:field-id (data/id :venues :category_id)] 2]}) - valid-dashboard?))))) + (test-automagic-analysis [:= [:field-id (data/id :venues :category_id)] 2])))))) (expect @@ -228,7 +274,7 @@ :source_table (data/id :venues)} :type :query :database (data/id)})] - (-> q (automagic-analysis {}) valid-dashboard?))))) + (test-automagic-analysis q))))) (expect (with-rasta @@ -238,7 +284,7 @@ :source_table (data/id :venues)} :type :query :database (data/id)})] - (-> q (automagic-analysis {}) valid-dashboard?))))) + (test-automagic-analysis q))))) (expect (with-rasta @@ -248,7 +294,7 @@ :source_table (data/id :checkins)} :type :query :database (data/id)})] - (-> q (automagic-analysis {}) valid-dashboard?))))) + (test-automagic-analysis q))))) (expect (with-rasta @@ -257,9 +303,7 @@ :source_table (data/id :venues)} :type :query :database (data/id)})] - (-> q - (automagic-analysis {:cell-query [:= [:field-id (data/id :venues :category_id)] 2]}) - valid-dashboard?))))) + (test-automagic-analysis q [:= [:field-id (data/id :venues :category_id)] 2]))))) ;;; ------------------- /candidates ------------------- @@ -408,3 +452,60 @@ (#'magic/optimal-datetime-resolution {:fingerprint {:type {:type/DateTime {:earliest "2017-01-01T00:00:00" :latest "2017-01-01T00:02:00"}}}})) + + +;;; ------------------- Datetime humanization (for chart and dashboard titles) ------------------- + +(let [tz (-> date/jvm-timezone deref ^TimeZone .getID) + dt (t/from-time-zone (t/date-time 1990 9 9 12 30) + (t/time-zone-for-id tz)) + unparse-with-formatter (fn [formatter dt] + (t.format/unparse + (t.format/formatter formatter (t/time-zone-for-id tz)) dt))] + (expect + [(tru "at {0}" (unparse-with-formatter "h:mm a, MMMM d, YYYY" dt)) + (tru "at {0}" (unparse-with-formatter "h a, MMMM d, YYYY" dt)) + (tru "on {0}" (unparse-with-formatter "MMMM d, YYYY" dt)) + (tru "in {0} week - {1}" + (#'magic/pluralize (date/date-extract :week-of-year dt tz)) + (str (date/date-extract :year dt tz))) + (tru "in {0}" (unparse-with-formatter "MMMM YYYY" dt)) + (tru "in Q{0} - {1}" + (date/date-extract :quarter-of-year dt tz) + (str (date/date-extract :year dt tz))) + (unparse-with-formatter "YYYY" dt) + (unparse-with-formatter "EEEE" dt) + (tru "at {0}" (unparse-with-formatter "h a" dt)) + (unparse-with-formatter "MMMM" dt) + (tru "Q{0}" (date/date-extract :quarter-of-year dt tz)) + (date/date-extract :minute-of-hour dt tz) + (date/date-extract :day-of-month dt tz) + (date/date-extract :week-of-year dt tz)] + (let [dt (t.format/unparse (t.format/formatters :date-hour-minute-second) dt)] + [(#'magic/humanize-datetime dt :minute) + (#'magic/humanize-datetime dt :hour) + (#'magic/humanize-datetime dt :day) + (#'magic/humanize-datetime dt :week) + (#'magic/humanize-datetime dt :month) + (#'magic/humanize-datetime dt :quarter) + (#'magic/humanize-datetime dt :year) + (#'magic/humanize-datetime dt :day-of-week) + (#'magic/humanize-datetime dt :hour-of-day) + (#'magic/humanize-datetime dt :month-of-year) + (#'magic/humanize-datetime dt :quarter-of-year) + (#'magic/humanize-datetime dt :minute-of-hour) + (#'magic/humanize-datetime dt :day-of-month) + (#'magic/humanize-datetime dt :week-of-year)]))) + +(expect + [(tru "{0}st" 1) + (tru "{0}nd" 22) + (tru "{0}rd" 303) + (tru "{0}th" 0) + (tru "{0}th" 8)] + (map #'magic/pluralize [1 22 303 0 8])) + +;; Make sure we have handlers for all the units available +(expect + (every? (partial #'magic/humanize-datetime "1990-09-09T12:30:00") + (concat (var-get #'date/date-extract-units) (var-get #'date/date-trunc-units)))) diff --git a/test/metabase/automagic_dashboards/filters_test.clj b/test/metabase/automagic_dashboards/filters_test.clj index 28f3e6584d08378e691066376a2e9873e2db5b2b..0c64afef3910cf613e903d9ec06febc39101572a 100644 --- a/test/metabase/automagic_dashboards/filters_test.clj +++ b/test/metabase/automagic_dashboards/filters_test.clj @@ -12,7 +12,7 @@ ;; If there's no overlap between filter clauses, just merge using `:and`. (expect - [:and [:and [:and [:= [:field-id 3] 42] [:= [:fk-> 1 9] "foo"]] [:> [:field-id 2] 10]] [:< [:field-id 2] 100]] + [:and [:= [:field-id 3] 42] [:= [:fk-> 1 9] "foo"] [:> [:field-id 2] 10] [:< [:field-id 2] 100]] (inject-refinement [:and [:= [:fk-> 1 9] "foo"] [:and [:> [:field-id 2] 10] [:< [:field-id 2] 100]]] diff --git a/test/metabase/driver/mongo_test.clj b/test/metabase/driver/mongo_test.clj index 4a346543db6a20e7ac834315ba7cdb6f819b8137..7c4b6fb1cd64823313f544fd50cf473ebfd2f171 100644 --- a/test/metabase/driver/mongo_test.clj +++ b/test/metabase/driver/mongo_test.clj @@ -122,6 +122,24 @@ :pk? true}}} (driver/describe-table (MongoDriver.) (data/db) (Table (data/id :venues)))) +;; Make sure that all-NULL columns work are synced correctly (#6875) +(i/def-database-definition ^:private all-null-columns + [["bird_species" + [{:field-name "name", :base-type :type/Text} + {:field-name "favorite_snack", :base-type :type/Text}] + [["House Finch" nil] + ["Mourning Dove" nil]]]]) + +(datasets/expect-with-engine :mongo + [{:name "_id", :database_type "java.lang.Long", :base_type :type/Integer, :special_type :type/PK} + {:name "favorite_snack", :database_type "NULL", :base_type :type/*, :special_type nil} + {:name "name", :database_type "java.lang.String", :base_type :type/Text, :special_type :type/Name}] + (data/dataset metabase.driver.mongo-test/all-null-columns + (map (partial into {}) + (db/select [Field :name :database_type :base_type :special_type] + :table_id (data/id :bird_species) + {:order-by [:name]})))) + ;;; table-rows-sample (datasets/expect-with-engine :mongo diff --git a/test/metabase/metabot_test.clj b/test/metabase/metabot_test.clj new file mode 100644 index 0000000000000000000000000000000000000000..be6bee6467c2e4576f38c97d0605df0c5fd22317 --- /dev/null +++ b/test/metabase/metabot_test.clj @@ -0,0 +1,37 @@ +(ns metabase.metabot-test + (:require [expectations :refer :all] + [metabase.metabot :as metabot] + [metabase.util.date :as du])) + +;; test that if we're not the MetaBot based on Settings, our function to check is working correctly +(expect + false + (do + (#'metabot/metabot-instance-uuid nil) + (#'metabot/metabot-instance-last-checkin nil) + (#'metabot/am-i-the-metabot?))) + +;; test that if nobody is currently the MetaBot, we will become the MetaBot +(expect + (do + (#'metabot/metabot-instance-uuid nil) + (#'metabot/metabot-instance-last-checkin nil) + (#'metabot/check-and-update-instance-status!) + (#'metabot/am-i-the-metabot?))) + +;; test that if nobody has checked in as MetaBot for a while, we will become the MetaBot +(expect + (do + (#'metabot/metabot-instance-uuid (str (java.util.UUID/randomUUID))) + (#'metabot/metabot-instance-last-checkin (du/relative-date :minute -10 (#'metabot/current-timestamp-from-db))) + (#'metabot/check-and-update-instance-status!) + (#'metabot/am-i-the-metabot?))) + +;; check that if another instance has checked in recently, we will *not* become the MetaBot +(expect + false + (do + (#'metabot/metabot-instance-uuid (str (java.util.UUID/randomUUID))) + (#'metabot/metabot-instance-last-checkin (#'metabot/current-timestamp-from-db)) + (#'metabot/check-and-update-instance-status!) + (#'metabot/am-i-the-metabot?))) diff --git a/test/metabase/models/dashboard_test.clj b/test/metabase/models/dashboard_test.clj index f3548eb6f6c6cee6f1b7dbccb9601acd5856708f..1c7338e6491f802e8b26a15fe453e86c385905b6 100644 --- a/test/metabase/models/dashboard_test.clj +++ b/test/metabase/models/dashboard_test.clj @@ -229,14 +229,15 @@ ;; test that we save a transient dashboard (expect - 8 (tu/with-model-cleanup ['Card 'Dashboard 'DashboardCard 'Collection] (binding [api/*current-user-id* (users/user->id :rasta) api/*current-user-permissions-set* (-> :rasta users/user->id user/permissions-set atom)] - (->> (magic/automagic-analysis (Table (id :venues)) {}) - save-transient-dashboard! - :id - (db/count 'DashboardCard :dashboard_id))))) + (let [dashboard (magic/automagic-analysis (Table (id :venues)) {})] + (->> dashboard + save-transient-dashboard! + :id + (db/count 'DashboardCard :dashboard_id) + (= (-> dashboard :ordered_cards count))))))) diff --git a/test/metabase/models/setting_test.clj b/test/metabase/models/setting_test.clj index c7c55ef06ac6b7eb920b6d8b3b4acf98c50f3def..6474e20c67236e392d84e3050f9a7a292cd7bd8c 100644 --- a/test/metabase/models/setting_test.clj +++ b/test/metabase/models/setting_test.clj @@ -49,6 +49,10 @@ (test-setting-2 setting-2-value)) +(expect + String + (:tag (meta #'test-setting-1))) + ;; ## GETTERS ;; Test defsetting getter fn. Should return the value from env var MB_TEST_SETTING_1 (expect "ABCDEFG" @@ -219,6 +223,10 @@ ;;; ------------------------------------------------ BOOLEAN SETTINGS ------------------------------------------------ +(expect + Boolean + (:tag (meta #'test-boolean-setting))) + (expect {:value nil, :is_env_setting false, :env_name "MB_TEST_BOOLEAN_SETTING", :default nil} (user-facing-info-with-db-and-env-var-values :test-boolean-setting nil nil)) @@ -293,6 +301,10 @@ ;; ok, make sure the setting was set (toucan-name))) +(expect + String + (:tag (meta #'toucan-name))) + ;;; ----------------------------------------------- Encrypted Settings ----------------------------------------------- @@ -323,6 +335,23 @@ (actual-value-in-db :toucan-name))) +;;; ----------------------------------------------- TIMESTAMP SETTINGS ----------------------------------------------- + +(defsetting ^:private test-timestamp-setting + "Test timestamp setting" + :type :timestamp) + +(expect + java.sql.Timestamp + (:tag (meta #'test-timestamp-setting))) + +;; make sure we can set & fetch the value and that it gets serialized/deserialized correctly +(expect + #inst "2018-07-11T09:32:00.000Z" + (do (test-timestamp-setting #inst "2018-07-11T09:32:00.000Z") + (test-timestamp-setting))) + + ;;; --------------------------------------------- Cache Synchronization ---------------------------------------------- (def ^:private settings-last-updated-key @(resolve 'metabase.models.setting/settings-last-updated-key)) @@ -444,3 +473,34 @@ ;; detect a cache out-of-date situation and flush the cache as appropriate, giving us the updated value when we ;; call! :wow: (toucan-name))) + + +;;; ----------------------------------------------- Uncached Settings ------------------------------------------------ + +(defsetting ^:private uncached-setting + "A test setting that should *not* be cached." + :cache? false) + +;; make sure uncached setting still saves to the DB +(expect + "ABCDEF" + (encryption-test/with-secret-key nil + (uncached-setting "ABCDEF") + (actual-value-in-db "uncached-setting"))) + +;; make sure that fetching the Setting always fetches the latest value from the DB +(expect + "123456" + (encryption-test/with-secret-key nil + (uncached-setting "ABCDEF") + (db/update-where! Setting {:key "uncached-setting"} + :value "123456") + (uncached-setting))) + +;; make sure that updating the setting doesn't update the last-updated timestamp in the cache $$ +(expect + nil + (encryption-test/with-secret-key nil + (clear-settings-last-updated-value-in-db!) + (uncached-setting "abcdef") + (settings-last-updated-value-in-db))) diff --git a/test/metabase/test/util.clj b/test/metabase/test/util.clj index 006e5cf54a798830e5f89b0e9df7472d8509d03b..304763012af27fa8462cea0706ca89344c01a66d 100644 --- a/test/metabase/test/util.clj +++ b/test/metabase/test/util.clj @@ -37,7 +37,8 @@ [dataset-definitions :as defs]] [toucan.db :as db] [toucan.util.test :as test]) - (:import java.util.TimeZone + (:import com.mchange.v2.c3p0.PooledDataSource + java.util.TimeZone org.joda.time.DateTimeZone [org.quartz CronTrigger JobDetail JobKey Scheduler Trigger])) @@ -451,7 +452,7 @@ timezone. That causes problems for tests that we can determine the database's timezone. This function will reset the connections in the connection pool for `db` to ensure that we get fresh session with no timezone specified" [db] - (when-let [conn-pool (:datasource (sql/db->pooled-connection-spec db))] + (when-let [^PooledDataSource conn-pool (:datasource (sql/db->pooled-connection-spec db))] (.softResetAllUsers conn-pool))) (defn db-timezone-id