diff --git a/.babelrc b/.babelrc index 64c3c7c8291b6b605c3af5c578b8a70c43b5ecd6..d71a945b42ee2f5ce98ffedd50a8f391b4d47ac1 100644 --- a/.babelrc +++ b/.babelrc @@ -1,5 +1,5 @@ { - "plugins": ["transform-decorators-legacy"], + "plugins": ["transform-flow-strip-types", "transform-decorators-legacy"], "presets": ["es2015", "stage-0", "react"], "env": { "development": { diff --git a/.dir-locals.el b/.dir-locals.el index cc95b68f811235e0b39be112d72cf0ca028b902a..05897084cadb09137777d16a32f8cf450980a4a1 100644 --- a/.dir-locals.el +++ b/.dir-locals.el @@ -36,7 +36,6 @@ (match 1) (match-$ 1) (org-perms-case 1) - (post-insert 1) (post-select 1) (pre-cascade-delete 1) (pre-insert 1) diff --git a/.eslintrc b/.eslintrc index d25ef1e66c705191b872f9213d21d078fa6283f9..9959b38e84ff5682bbf9496a184dbb33206c953d 100644 --- a/.eslintrc +++ b/.eslintrc @@ -34,7 +34,9 @@ "react/display-name": 1, "react/prop-types": 0, "react/no-did-mount-set-state": 0, - "react/no-did-update-set-state": 0 + "react/no-did-update-set-state": 0, + "flow-vars/define-flow-type": 1, + "flow-vars/use-flow-type": 1 }, "globals": { "angular": false @@ -46,7 +48,8 @@ }, "parser": "babel-eslint", "plugins": [ - "react" + "react", + "flow-vars" ], "extends": ["eslint:recommended", "plugin:react/recommended"] } diff --git a/.flowconfig b/.flowconfig new file mode 100644 index 0000000000000000000000000000000000000000..8c7aa0cb03d3c19d5fa3276d233337fc077b815d --- /dev/null +++ b/.flowconfig @@ -0,0 +1,18 @@ +[ignore] +.*/node_modules/react/node_modules/.* +.*/node_modules/postcss-import/node_modules/.* +.*/node_modules/fixed-data-table/.* +.*/node_modules/@kadira/storybook/node_modules/.* + +[include] + +[libs] +./frontend/interfaces + +[options] +module.system.node.resolve_dirname=node_modules +module.system.node.resolve_dirname=frontend/src +esproposal.decorators=ignore +esproposal.class_static_fields=enable +esproposal.class_instance_fields=enable +suppress_comment= \\(.\\|\n\\)*\\$FlowFixMe diff --git a/.reduxrc b/.reduxrc index e3c001fd1b416aa3c4fc3bebf2024384d2364146..e096200b91dd761ee5586669caeb146152ef07f5 100644 --- a/.reduxrc +++ b/.reduxrc @@ -1,5 +1,5 @@ { - "sourceBase":"frontend/src", + "sourceBase":"frontend/src/metabase", "testBase":"frontend/test/unit", "smartPath":"containers", "dumbPath":"components", diff --git a/.storybook/webpack.config.js b/.storybook/webpack.config.js index c5abda8cca8ce28cdbebca25b70f276f4c0db58d..46f15dcaebf4c603a21c3d8a7c3206919e130eeb 100644 --- a/.storybook/webpack.config.js +++ b/.storybook/webpack.config.js @@ -2,7 +2,7 @@ const path = require('path'); const WEBPACK_CONFIG = require("../webpack.config"); -var SRC_PATH = path.resolve(__dirname, '../frontend/src'); +var SRC_PATH = path.resolve(__dirname, '../frontend/src/metabase'); var CSS_CONFIG = { localIdentName: "[name]__[local]___[hash:base64:5]", diff --git a/bin/update-emoji b/bin/update-emoji index 943780eb142948dfc7497b0b7997001cc8500f06..a9b7cd5854d050d370eb91b434569b4a0146750b 100755 --- a/bin/update-emoji +++ b/bin/update-emoji @@ -44,4 +44,4 @@ orderedEmoji.forEach(function(emoji) { } }); -fs.writeFileSync("frontend/src/lib/emoji.json", JSON.stringify(results)); +fs.writeFileSync("frontend/src/metabase/lib/emoji.json", JSON.stringify(results)); diff --git a/frontend/interfaces/angular.js b/frontend/interfaces/angular.js new file mode 100644 index 0000000000000000000000000000000000000000..ef98d365764f199ee06df4892155e7b606e39d58 --- /dev/null +++ b/frontend/interfaces/angular.js @@ -0,0 +1 @@ +declare var angular: any; diff --git a/frontend/src/Routes.jsx b/frontend/src/metabase/Routes.jsx similarity index 100% rename from frontend/src/Routes.jsx rename to frontend/src/metabase/Routes.jsx diff --git a/frontend/src/admin/databases/components/CreatedDatabaseModal.jsx b/frontend/src/metabase/admin/databases/components/CreatedDatabaseModal.jsx similarity index 100% rename from frontend/src/admin/databases/components/CreatedDatabaseModal.jsx rename to frontend/src/metabase/admin/databases/components/CreatedDatabaseModal.jsx diff --git a/frontend/src/admin/databases/components/DatabaseEdit.jsx b/frontend/src/metabase/admin/databases/components/DatabaseEdit.jsx similarity index 100% rename from frontend/src/admin/databases/components/DatabaseEdit.jsx rename to frontend/src/metabase/admin/databases/components/DatabaseEdit.jsx diff --git a/frontend/src/admin/databases/components/DatabaseEditForms.jsx b/frontend/src/metabase/admin/databases/components/DatabaseEditForms.jsx similarity index 100% rename from frontend/src/admin/databases/components/DatabaseEditForms.jsx rename to frontend/src/metabase/admin/databases/components/DatabaseEditForms.jsx diff --git a/frontend/src/admin/databases/components/DatabaseList.jsx b/frontend/src/metabase/admin/databases/components/DatabaseList.jsx similarity index 100% rename from frontend/src/admin/databases/components/DatabaseList.jsx rename to frontend/src/metabase/admin/databases/components/DatabaseList.jsx diff --git a/frontend/src/admin/databases/components/DeleteDatabaseModal.jsx b/frontend/src/metabase/admin/databases/components/DeleteDatabaseModal.jsx similarity index 100% rename from frontend/src/admin/databases/components/DeleteDatabaseModal.jsx rename to frontend/src/metabase/admin/databases/components/DeleteDatabaseModal.jsx diff --git a/frontend/src/admin/databases/databases.controllers.js b/frontend/src/metabase/admin/databases/databases.controllers.js similarity index 100% rename from frontend/src/admin/databases/databases.controllers.js rename to frontend/src/metabase/admin/databases/databases.controllers.js diff --git a/frontend/src/admin/databases/databases.module.js b/frontend/src/metabase/admin/databases/databases.module.js similarity index 100% rename from frontend/src/admin/databases/databases.module.js rename to frontend/src/metabase/admin/databases/databases.module.js diff --git a/frontend/src/admin/datamodel/actions.js b/frontend/src/metabase/admin/datamodel/actions.js similarity index 100% rename from frontend/src/admin/datamodel/actions.js rename to frontend/src/metabase/admin/datamodel/actions.js diff --git a/frontend/src/admin/datamodel/components/FieldSet.jsx b/frontend/src/metabase/admin/datamodel/components/FieldSet.jsx similarity index 100% rename from frontend/src/admin/datamodel/components/FieldSet.jsx rename to frontend/src/metabase/admin/datamodel/components/FieldSet.jsx diff --git a/frontend/src/admin/datamodel/components/FormInput.jsx b/frontend/src/metabase/admin/datamodel/components/FormInput.jsx similarity index 100% rename from frontend/src/admin/datamodel/components/FormInput.jsx rename to frontend/src/metabase/admin/datamodel/components/FormInput.jsx diff --git a/frontend/src/admin/datamodel/components/FormLabel.jsx b/frontend/src/metabase/admin/datamodel/components/FormLabel.jsx similarity index 100% rename from frontend/src/admin/datamodel/components/FormLabel.jsx rename to frontend/src/metabase/admin/datamodel/components/FormLabel.jsx diff --git a/frontend/src/admin/datamodel/components/FormTextArea.jsx b/frontend/src/metabase/admin/datamodel/components/FormTextArea.jsx similarity index 100% rename from frontend/src/admin/datamodel/components/FormTextArea.jsx rename to frontend/src/metabase/admin/datamodel/components/FormTextArea.jsx diff --git a/frontend/src/admin/datamodel/components/ObjectActionSelect.jsx b/frontend/src/metabase/admin/datamodel/components/ObjectActionSelect.jsx similarity index 100% rename from frontend/src/admin/datamodel/components/ObjectActionSelect.jsx rename to frontend/src/metabase/admin/datamodel/components/ObjectActionSelect.jsx diff --git a/frontend/src/admin/datamodel/components/ObjectRetireModal.jsx b/frontend/src/metabase/admin/datamodel/components/ObjectRetireModal.jsx similarity index 100% rename from frontend/src/admin/datamodel/components/ObjectRetireModal.jsx rename to frontend/src/metabase/admin/datamodel/components/ObjectRetireModal.jsx diff --git a/frontend/src/admin/datamodel/components/PartialQueryBuilder.jsx b/frontend/src/metabase/admin/datamodel/components/PartialQueryBuilder.jsx similarity index 100% rename from frontend/src/admin/datamodel/components/PartialQueryBuilder.jsx rename to frontend/src/metabase/admin/datamodel/components/PartialQueryBuilder.jsx diff --git a/frontend/src/admin/datamodel/components/database/ColumnItem.jsx b/frontend/src/metabase/admin/datamodel/components/database/ColumnItem.jsx similarity index 100% rename from frontend/src/admin/datamodel/components/database/ColumnItem.jsx rename to frontend/src/metabase/admin/datamodel/components/database/ColumnItem.jsx diff --git a/frontend/src/admin/datamodel/components/database/ColumnsList.jsx b/frontend/src/metabase/admin/datamodel/components/database/ColumnsList.jsx similarity index 100% rename from frontend/src/admin/datamodel/components/database/ColumnsList.jsx rename to frontend/src/metabase/admin/datamodel/components/database/ColumnsList.jsx diff --git a/frontend/src/admin/datamodel/components/database/MetadataEditor.jsx b/frontend/src/metabase/admin/datamodel/components/database/MetadataEditor.jsx similarity index 95% rename from frontend/src/admin/datamodel/components/database/MetadataEditor.jsx rename to frontend/src/metabase/admin/datamodel/components/database/MetadataEditor.jsx index fb15da524d7f652ac05b34caa3b45660b3248195..87db1662d1900fac347284e98183d79b93cf530a 100644 --- a/frontend/src/admin/datamodel/components/database/MetadataEditor.jsx +++ b/frontend/src/metabase/admin/datamodel/components/database/MetadataEditor.jsx @@ -93,13 +93,13 @@ export default class MetadataEditor extends Component { } } else { content = ( - <div className="flex flex-full layout-centered"> + <div style={{paddingTop: "10rem"}} className="full text-centered"> <h2 className="text-grey-3">Select any table to see its schema and add or edit metadata.</h2> </div> ); } return ( - <div className="MetadataEditor full-height p3"> + <div className="p3"> <MetadataHeader ref="header" databaseId={this.props.databaseId} @@ -108,7 +108,7 @@ export default class MetadataEditor extends Component { isShowingSchema={this.state.isShowingSchema} toggleShowSchema={this.toggleShowSchema} /> - <div className="MetadataEditor-main flex flex-row flex-full mt2 full-height"> + <div style={{minHeight: "60vh"}} className="flex flex-row flex-full mt2 full-height"> <MetadataTablePicker tableId={this.props.tableId} tables={(this.props.databaseMetadata) ? this.props.databaseMetadata.tables : []} diff --git a/frontend/src/admin/datamodel/components/database/MetadataHeader.jsx b/frontend/src/metabase/admin/datamodel/components/database/MetadataHeader.jsx similarity index 100% rename from frontend/src/admin/datamodel/components/database/MetadataHeader.jsx rename to frontend/src/metabase/admin/datamodel/components/database/MetadataHeader.jsx diff --git a/frontend/src/admin/datamodel/components/database/MetadataSchema.jsx b/frontend/src/metabase/admin/datamodel/components/database/MetadataSchema.jsx similarity index 100% rename from frontend/src/admin/datamodel/components/database/MetadataSchema.jsx rename to frontend/src/metabase/admin/datamodel/components/database/MetadataSchema.jsx diff --git a/frontend/src/admin/datamodel/components/database/MetadataSchemaList.jsx b/frontend/src/metabase/admin/datamodel/components/database/MetadataSchemaList.jsx similarity index 100% rename from frontend/src/admin/datamodel/components/database/MetadataSchemaList.jsx rename to frontend/src/metabase/admin/datamodel/components/database/MetadataSchemaList.jsx diff --git a/frontend/src/admin/datamodel/components/database/MetadataTable.jsx b/frontend/src/metabase/admin/datamodel/components/database/MetadataTable.jsx similarity index 100% rename from frontend/src/admin/datamodel/components/database/MetadataTable.jsx rename to frontend/src/metabase/admin/datamodel/components/database/MetadataTable.jsx diff --git a/frontend/src/admin/datamodel/components/database/MetadataTableList.jsx b/frontend/src/metabase/admin/datamodel/components/database/MetadataTableList.jsx similarity index 100% rename from frontend/src/admin/datamodel/components/database/MetadataTableList.jsx rename to frontend/src/metabase/admin/datamodel/components/database/MetadataTableList.jsx diff --git a/frontend/src/admin/datamodel/components/database/MetadataTablePicker.jsx b/frontend/src/metabase/admin/datamodel/components/database/MetadataTablePicker.jsx similarity index 100% rename from frontend/src/admin/datamodel/components/database/MetadataTablePicker.jsx rename to frontend/src/metabase/admin/datamodel/components/database/MetadataTablePicker.jsx diff --git a/frontend/src/admin/datamodel/components/database/MetricItem.jsx b/frontend/src/metabase/admin/datamodel/components/database/MetricItem.jsx similarity index 100% rename from frontend/src/admin/datamodel/components/database/MetricItem.jsx rename to frontend/src/metabase/admin/datamodel/components/database/MetricItem.jsx diff --git a/frontend/src/admin/datamodel/components/database/MetricsList.jsx b/frontend/src/metabase/admin/datamodel/components/database/MetricsList.jsx similarity index 100% rename from frontend/src/admin/datamodel/components/database/MetricsList.jsx rename to frontend/src/metabase/admin/datamodel/components/database/MetricsList.jsx diff --git a/frontend/src/admin/datamodel/components/database/SegmentItem.jsx b/frontend/src/metabase/admin/datamodel/components/database/SegmentItem.jsx similarity index 100% rename from frontend/src/admin/datamodel/components/database/SegmentItem.jsx rename to frontend/src/metabase/admin/datamodel/components/database/SegmentItem.jsx diff --git a/frontend/src/admin/datamodel/components/database/SegmentsList.jsx b/frontend/src/metabase/admin/datamodel/components/database/SegmentsList.jsx similarity index 100% rename from frontend/src/admin/datamodel/components/database/SegmentsList.jsx rename to frontend/src/metabase/admin/datamodel/components/database/SegmentsList.jsx diff --git a/frontend/src/admin/datamodel/components/revisions/QueryDiff.jsx b/frontend/src/metabase/admin/datamodel/components/revisions/QueryDiff.jsx similarity index 100% rename from frontend/src/admin/datamodel/components/revisions/QueryDiff.jsx rename to frontend/src/metabase/admin/datamodel/components/revisions/QueryDiff.jsx diff --git a/frontend/src/admin/datamodel/components/revisions/Revision.jsx b/frontend/src/metabase/admin/datamodel/components/revisions/Revision.jsx similarity index 100% rename from frontend/src/admin/datamodel/components/revisions/Revision.jsx rename to frontend/src/metabase/admin/datamodel/components/revisions/Revision.jsx diff --git a/frontend/src/admin/datamodel/components/revisions/RevisionDiff.jsx b/frontend/src/metabase/admin/datamodel/components/revisions/RevisionDiff.jsx similarity index 100% rename from frontend/src/admin/datamodel/components/revisions/RevisionDiff.jsx rename to frontend/src/metabase/admin/datamodel/components/revisions/RevisionDiff.jsx diff --git a/frontend/src/admin/datamodel/components/revisions/RevisionHistory.jsx b/frontend/src/metabase/admin/datamodel/components/revisions/RevisionHistory.jsx similarity index 100% rename from frontend/src/admin/datamodel/components/revisions/RevisionHistory.jsx rename to frontend/src/metabase/admin/datamodel/components/revisions/RevisionHistory.jsx diff --git a/frontend/src/admin/datamodel/components/revisions/TextDiff.jsx b/frontend/src/metabase/admin/datamodel/components/revisions/TextDiff.jsx similarity index 100% rename from frontend/src/admin/datamodel/components/revisions/TextDiff.jsx rename to frontend/src/metabase/admin/datamodel/components/revisions/TextDiff.jsx diff --git a/frontend/src/admin/datamodel/containers/MetricApp.jsx b/frontend/src/metabase/admin/datamodel/containers/MetricApp.jsx similarity index 100% rename from frontend/src/admin/datamodel/containers/MetricApp.jsx rename to frontend/src/metabase/admin/datamodel/containers/MetricApp.jsx diff --git a/frontend/src/admin/datamodel/containers/MetricForm.jsx b/frontend/src/metabase/admin/datamodel/containers/MetricForm.jsx similarity index 100% rename from frontend/src/admin/datamodel/containers/MetricForm.jsx rename to frontend/src/metabase/admin/datamodel/containers/MetricForm.jsx diff --git a/frontend/src/admin/datamodel/containers/RevisionHistoryApp.jsx b/frontend/src/metabase/admin/datamodel/containers/RevisionHistoryApp.jsx similarity index 100% rename from frontend/src/admin/datamodel/containers/RevisionHistoryApp.jsx rename to frontend/src/metabase/admin/datamodel/containers/RevisionHistoryApp.jsx diff --git a/frontend/src/admin/datamodel/containers/SegmentApp.jsx b/frontend/src/metabase/admin/datamodel/containers/SegmentApp.jsx similarity index 100% rename from frontend/src/admin/datamodel/containers/SegmentApp.jsx rename to frontend/src/metabase/admin/datamodel/containers/SegmentApp.jsx diff --git a/frontend/src/admin/datamodel/containers/SegmentForm.jsx b/frontend/src/metabase/admin/datamodel/containers/SegmentForm.jsx similarity index 100% rename from frontend/src/admin/datamodel/containers/SegmentForm.jsx rename to frontend/src/metabase/admin/datamodel/containers/SegmentForm.jsx diff --git a/frontend/src/admin/datamodel/datamodel.controllers.js b/frontend/src/metabase/admin/datamodel/datamodel.controllers.js similarity index 100% rename from frontend/src/admin/datamodel/datamodel.controllers.js rename to frontend/src/metabase/admin/datamodel/datamodel.controllers.js diff --git a/frontend/src/admin/datamodel/datamodel.module.js b/frontend/src/metabase/admin/datamodel/datamodel.module.js similarity index 100% rename from frontend/src/admin/datamodel/datamodel.module.js rename to frontend/src/metabase/admin/datamodel/datamodel.module.js diff --git a/frontend/src/admin/datamodel/reducers.js b/frontend/src/metabase/admin/datamodel/reducers.js similarity index 100% rename from frontend/src/admin/datamodel/reducers.js rename to frontend/src/metabase/admin/datamodel/reducers.js diff --git a/frontend/src/admin/datamodel/selectors.js b/frontend/src/metabase/admin/datamodel/selectors.js similarity index 100% rename from frontend/src/admin/datamodel/selectors.js rename to frontend/src/metabase/admin/datamodel/selectors.js diff --git a/frontend/src/admin/people/actions.js b/frontend/src/metabase/admin/people/actions.js similarity index 100% rename from frontend/src/admin/people/actions.js rename to frontend/src/metabase/admin/people/actions.js diff --git a/frontend/src/admin/people/components/AdminPeople.jsx b/frontend/src/metabase/admin/people/components/AdminPeople.jsx similarity index 100% rename from frontend/src/admin/people/components/AdminPeople.jsx rename to frontend/src/metabase/admin/people/components/AdminPeople.jsx diff --git a/frontend/src/admin/people/components/EditUserForm.jsx b/frontend/src/metabase/admin/people/components/EditUserForm.jsx similarity index 100% rename from frontend/src/admin/people/components/EditUserForm.jsx rename to frontend/src/metabase/admin/people/components/EditUserForm.jsx diff --git a/frontend/src/admin/people/components/UserActionsSelect.jsx b/frontend/src/metabase/admin/people/components/UserActionsSelect.jsx similarity index 100% rename from frontend/src/admin/people/components/UserActionsSelect.jsx rename to frontend/src/metabase/admin/people/components/UserActionsSelect.jsx diff --git a/frontend/src/admin/people/components/UserRoleSelect.jsx b/frontend/src/metabase/admin/people/components/UserRoleSelect.jsx similarity index 100% rename from frontend/src/admin/people/components/UserRoleSelect.jsx rename to frontend/src/metabase/admin/people/components/UserRoleSelect.jsx diff --git a/frontend/src/admin/people/containers/AdminPeopleApp.jsx b/frontend/src/metabase/admin/people/containers/AdminPeopleApp.jsx similarity index 100% rename from frontend/src/admin/people/containers/AdminPeopleApp.jsx rename to frontend/src/metabase/admin/people/containers/AdminPeopleApp.jsx diff --git a/frontend/src/admin/people/people.controllers.js b/frontend/src/metabase/admin/people/people.controllers.js similarity index 100% rename from frontend/src/admin/people/people.controllers.js rename to frontend/src/metabase/admin/people/people.controllers.js diff --git a/frontend/src/admin/people/people.module.js b/frontend/src/metabase/admin/people/people.module.js similarity index 100% rename from frontend/src/admin/people/people.module.js rename to frontend/src/metabase/admin/people/people.module.js diff --git a/frontend/src/admin/people/reducers.js b/frontend/src/metabase/admin/people/reducers.js similarity index 100% rename from frontend/src/admin/people/reducers.js rename to frontend/src/metabase/admin/people/reducers.js diff --git a/frontend/src/admin/people/selectors.js b/frontend/src/metabase/admin/people/selectors.js similarity index 100% rename from frontend/src/admin/people/selectors.js rename to frontend/src/metabase/admin/people/selectors.js diff --git a/frontend/src/admin/settings/components/SettingsEditor.jsx b/frontend/src/metabase/admin/settings/components/SettingsEditor.jsx similarity index 100% rename from frontend/src/admin/settings/components/SettingsEditor.jsx rename to frontend/src/metabase/admin/settings/components/SettingsEditor.jsx diff --git a/frontend/src/admin/settings/components/SettingsEmailForm.jsx b/frontend/src/metabase/admin/settings/components/SettingsEmailForm.jsx similarity index 100% rename from frontend/src/admin/settings/components/SettingsEmailForm.jsx rename to frontend/src/metabase/admin/settings/components/SettingsEmailForm.jsx diff --git a/frontend/src/admin/settings/components/SettingsEmailFormElement.jsx b/frontend/src/metabase/admin/settings/components/SettingsEmailFormElement.jsx similarity index 100% rename from frontend/src/admin/settings/components/SettingsEmailFormElement.jsx rename to frontend/src/metabase/admin/settings/components/SettingsEmailFormElement.jsx diff --git a/frontend/src/admin/settings/components/SettingsHeader.jsx b/frontend/src/metabase/admin/settings/components/SettingsHeader.jsx similarity index 100% rename from frontend/src/admin/settings/components/SettingsHeader.jsx rename to frontend/src/metabase/admin/settings/components/SettingsHeader.jsx diff --git a/frontend/src/admin/settings/components/SettingsSetting.jsx b/frontend/src/metabase/admin/settings/components/SettingsSetting.jsx similarity index 100% rename from frontend/src/admin/settings/components/SettingsSetting.jsx rename to frontend/src/metabase/admin/settings/components/SettingsSetting.jsx diff --git a/frontend/src/admin/settings/components/SettingsSlackForm.jsx b/frontend/src/metabase/admin/settings/components/SettingsSlackForm.jsx similarity index 100% rename from frontend/src/admin/settings/components/SettingsSlackForm.jsx rename to frontend/src/metabase/admin/settings/components/SettingsSlackForm.jsx diff --git a/frontend/src/admin/settings/components/SettingsUpdatesForm.jsx b/frontend/src/metabase/admin/settings/components/SettingsUpdatesForm.jsx similarity index 100% rename from frontend/src/admin/settings/components/SettingsUpdatesForm.jsx rename to frontend/src/metabase/admin/settings/components/SettingsUpdatesForm.jsx diff --git a/frontend/src/admin/settings/settings.controllers.js b/frontend/src/metabase/admin/settings/settings.controllers.js similarity index 100% rename from frontend/src/admin/settings/settings.controllers.js rename to frontend/src/metabase/admin/settings/settings.controllers.js diff --git a/frontend/src/admin/settings/settings.module.js b/frontend/src/metabase/admin/settings/settings.module.js similarity index 100% rename from frontend/src/admin/settings/settings.module.js rename to frontend/src/metabase/admin/settings/settings.module.js diff --git a/frontend/src/app.js b/frontend/src/metabase/app.js similarity index 99% rename from frontend/src/app.js rename to frontend/src/metabase/app.js index d6d199fdcf1fc242975bf868b015e1dbce3baf7a..f53c12590a1195902fdde7c323eb0951dd4ed883 100644 --- a/frontend/src/app.js +++ b/frontend/src/metabase/app.js @@ -1,3 +1,5 @@ +/* @flow weak */ + import 'babel-polyfill'; // angular: @@ -31,6 +33,7 @@ import "./admin/people/people.module"; import "./admin/settings/settings.module"; import "./admin/datamodel/datamodel.module"; + import Routes from "./Routes.jsx"; import { createStoreWithAngularScope, combineReducers } from "metabase/lib/redux"; diff --git a/frontend/src/auth/auth.controllers.js b/frontend/src/metabase/auth/auth.controllers.js similarity index 100% rename from frontend/src/auth/auth.controllers.js rename to frontend/src/metabase/auth/auth.controllers.js diff --git a/frontend/src/auth/auth.module.js b/frontend/src/metabase/auth/auth.module.js similarity index 100% rename from frontend/src/auth/auth.module.js rename to frontend/src/metabase/auth/auth.module.js diff --git a/frontend/src/auth/auth.services.js b/frontend/src/metabase/auth/auth.services.js similarity index 100% rename from frontend/src/auth/auth.services.js rename to frontend/src/metabase/auth/auth.services.js diff --git a/frontend/src/card/card.controllers.js b/frontend/src/metabase/card/card.controllers.js similarity index 100% rename from frontend/src/card/card.controllers.js rename to frontend/src/metabase/card/card.controllers.js diff --git a/frontend/src/card/card.module.js b/frontend/src/metabase/card/card.module.js similarity index 100% rename from frontend/src/card/card.module.js rename to frontend/src/metabase/card/card.module.js diff --git a/frontend/src/components/ActionButton.jsx b/frontend/src/metabase/components/ActionButton.jsx similarity index 100% rename from frontend/src/components/ActionButton.jsx rename to frontend/src/metabase/components/ActionButton.jsx diff --git a/frontend/src/components/AddToDashSelectDashModal.jsx b/frontend/src/metabase/components/AddToDashSelectDashModal.jsx similarity index 100% rename from frontend/src/components/AddToDashSelectDashModal.jsx rename to frontend/src/metabase/components/AddToDashSelectDashModal.jsx diff --git a/frontend/src/components/BodyComponent.jsx b/frontend/src/metabase/components/BodyComponent.jsx similarity index 100% rename from frontend/src/components/BodyComponent.jsx rename to frontend/src/metabase/components/BodyComponent.jsx diff --git a/frontend/src/components/Breadcrumbs.css b/frontend/src/metabase/components/Breadcrumbs.css similarity index 100% rename from frontend/src/components/Breadcrumbs.css rename to frontend/src/metabase/components/Breadcrumbs.css diff --git a/frontend/src/components/Breadcrumbs.jsx b/frontend/src/metabase/components/Breadcrumbs.jsx similarity index 100% rename from frontend/src/components/Breadcrumbs.jsx rename to frontend/src/metabase/components/Breadcrumbs.jsx diff --git a/frontend/src/components/ButtonBar.jsx b/frontend/src/metabase/components/ButtonBar.jsx similarity index 100% rename from frontend/src/components/ButtonBar.jsx rename to frontend/src/metabase/components/ButtonBar.jsx diff --git a/frontend/src/components/Calendar.css b/frontend/src/metabase/components/Calendar.css similarity index 100% rename from frontend/src/components/Calendar.css rename to frontend/src/metabase/components/Calendar.css diff --git a/frontend/src/components/Calendar.jsx b/frontend/src/metabase/components/Calendar.jsx similarity index 100% rename from frontend/src/components/Calendar.jsx rename to frontend/src/metabase/components/Calendar.jsx diff --git a/frontend/src/components/CheckBox.jsx b/frontend/src/metabase/components/CheckBox.jsx similarity index 100% rename from frontend/src/components/CheckBox.jsx rename to frontend/src/metabase/components/CheckBox.jsx diff --git a/frontend/src/components/ColumnarSelector.css b/frontend/src/metabase/components/ColumnarSelector.css similarity index 100% rename from frontend/src/components/ColumnarSelector.css rename to frontend/src/metabase/components/ColumnarSelector.css diff --git a/frontend/src/components/ColumnarSelector.jsx b/frontend/src/metabase/components/ColumnarSelector.jsx similarity index 100% rename from frontend/src/components/ColumnarSelector.jsx rename to frontend/src/metabase/components/ColumnarSelector.jsx diff --git a/frontend/src/components/Confirm.jsx b/frontend/src/metabase/components/Confirm.jsx similarity index 100% rename from frontend/src/components/Confirm.jsx rename to frontend/src/metabase/components/Confirm.jsx diff --git a/frontend/src/components/CreateDashboardModal.jsx b/frontend/src/metabase/components/CreateDashboardModal.jsx similarity index 100% rename from frontend/src/components/CreateDashboardModal.jsx rename to frontend/src/metabase/components/CreateDashboardModal.jsx diff --git a/frontend/src/components/DashboardsDropdown.jsx b/frontend/src/metabase/components/DashboardsDropdown.jsx similarity index 100% rename from frontend/src/components/DashboardsDropdown.jsx rename to frontend/src/metabase/components/DashboardsDropdown.jsx diff --git a/frontend/src/components/DatabaseDetailsForm.jsx b/frontend/src/metabase/components/DatabaseDetailsForm.jsx similarity index 100% rename from frontend/src/components/DatabaseDetailsForm.jsx rename to frontend/src/metabase/components/DatabaseDetailsForm.jsx diff --git a/frontend/src/components/DeleteModalWithConfirm.jsx b/frontend/src/metabase/components/DeleteModalWithConfirm.jsx similarity index 100% rename from frontend/src/components/DeleteModalWithConfirm.jsx rename to frontend/src/metabase/components/DeleteModalWithConfirm.jsx diff --git a/frontend/src/components/DeleteQuestionModal.jsx b/frontend/src/metabase/components/DeleteQuestionModal.jsx similarity index 100% rename from frontend/src/components/DeleteQuestionModal.jsx rename to frontend/src/metabase/components/DeleteQuestionModal.jsx diff --git a/frontend/src/components/Ellipsified.jsx b/frontend/src/metabase/components/Ellipsified.jsx similarity index 100% rename from frontend/src/components/Ellipsified.jsx rename to frontend/src/metabase/components/Ellipsified.jsx diff --git a/frontend/src/components/Expandable.jsx b/frontend/src/metabase/components/Expandable.jsx similarity index 100% rename from frontend/src/components/Expandable.jsx rename to frontend/src/metabase/components/Expandable.jsx diff --git a/frontend/src/components/ExplicitSize.jsx b/frontend/src/metabase/components/ExplicitSize.jsx similarity index 100% rename from frontend/src/components/ExplicitSize.jsx rename to frontend/src/metabase/components/ExplicitSize.jsx diff --git a/frontend/src/components/FormField.jsx b/frontend/src/metabase/components/FormField.jsx similarity index 100% rename from frontend/src/components/FormField.jsx rename to frontend/src/metabase/components/FormField.jsx diff --git a/frontend/src/components/Header.jsx b/frontend/src/metabase/components/Header.jsx similarity index 100% rename from frontend/src/components/Header.jsx rename to frontend/src/metabase/components/Header.jsx diff --git a/frontend/src/components/HeaderBar.jsx b/frontend/src/metabase/components/HeaderBar.jsx similarity index 100% rename from frontend/src/components/HeaderBar.jsx rename to frontend/src/metabase/components/HeaderBar.jsx diff --git a/frontend/src/components/HistoryModal.jsx b/frontend/src/metabase/components/HistoryModal.jsx similarity index 100% rename from frontend/src/components/HistoryModal.jsx rename to frontend/src/metabase/components/HistoryModal.jsx diff --git a/frontend/src/components/Icon.jsx b/frontend/src/metabase/components/Icon.jsx similarity index 100% rename from frontend/src/components/Icon.jsx rename to frontend/src/metabase/components/Icon.jsx diff --git a/frontend/src/components/IconBorder.jsx b/frontend/src/metabase/components/IconBorder.jsx similarity index 100% rename from frontend/src/components/IconBorder.jsx rename to frontend/src/metabase/components/IconBorder.jsx diff --git a/frontend/src/components/Input.jsx b/frontend/src/metabase/components/Input.jsx similarity index 100% rename from frontend/src/components/Input.jsx rename to frontend/src/metabase/components/Input.jsx diff --git a/frontend/src/components/ListSearchField.jsx b/frontend/src/metabase/components/ListSearchField.jsx similarity index 100% rename from frontend/src/components/ListSearchField.jsx rename to frontend/src/metabase/components/ListSearchField.jsx diff --git a/frontend/src/components/LoadingAndErrorWrapper.jsx b/frontend/src/metabase/components/LoadingAndErrorWrapper.jsx similarity index 100% rename from frontend/src/components/LoadingAndErrorWrapper.jsx rename to frontend/src/metabase/components/LoadingAndErrorWrapper.jsx diff --git a/frontend/src/components/LoadingSpinner.css b/frontend/src/metabase/components/LoadingSpinner.css similarity index 100% rename from frontend/src/components/LoadingSpinner.css rename to frontend/src/metabase/components/LoadingSpinner.css diff --git a/frontend/src/components/LoadingSpinner.jsx b/frontend/src/metabase/components/LoadingSpinner.jsx similarity index 100% rename from frontend/src/components/LoadingSpinner.jsx rename to frontend/src/metabase/components/LoadingSpinner.jsx diff --git a/frontend/src/components/LogoIcon.jsx b/frontend/src/metabase/components/LogoIcon.jsx similarity index 100% rename from frontend/src/components/LogoIcon.jsx rename to frontend/src/metabase/components/LogoIcon.jsx diff --git a/frontend/src/components/Logs.jsx b/frontend/src/metabase/components/Logs.jsx similarity index 100% rename from frontend/src/components/Logs.jsx rename to frontend/src/metabase/components/Logs.jsx diff --git a/frontend/src/components/Modal.jsx b/frontend/src/metabase/components/Modal.jsx similarity index 100% rename from frontend/src/components/Modal.jsx rename to frontend/src/metabase/components/Modal.jsx diff --git a/frontend/src/components/ModalContent.jsx b/frontend/src/metabase/components/ModalContent.jsx similarity index 100% rename from frontend/src/components/ModalContent.jsx rename to frontend/src/metabase/components/ModalContent.jsx diff --git a/frontend/src/components/ModalWithTrigger.jsx b/frontend/src/metabase/components/ModalWithTrigger.jsx similarity index 100% rename from frontend/src/components/ModalWithTrigger.jsx rename to frontend/src/metabase/components/ModalWithTrigger.jsx diff --git a/frontend/src/components/Navbar.jsx b/frontend/src/metabase/components/Navbar.jsx similarity index 100% rename from frontend/src/components/Navbar.jsx rename to frontend/src/metabase/components/Navbar.jsx diff --git a/frontend/src/components/NewsletterForm.jsx b/frontend/src/metabase/components/NewsletterForm.jsx similarity index 100% rename from frontend/src/components/NewsletterForm.jsx rename to frontend/src/metabase/components/NewsletterForm.jsx diff --git a/frontend/src/components/NotFound.jsx b/frontend/src/metabase/components/NotFound.jsx similarity index 100% rename from frontend/src/components/NotFound.jsx rename to frontend/src/metabase/components/NotFound.jsx diff --git a/frontend/src/components/OnClickOutsideWrapper.jsx b/frontend/src/metabase/components/OnClickOutsideWrapper.jsx similarity index 100% rename from frontend/src/components/OnClickOutsideWrapper.jsx rename to frontend/src/metabase/components/OnClickOutsideWrapper.jsx diff --git a/frontend/src/components/PasswordReveal.jsx b/frontend/src/metabase/components/PasswordReveal.jsx similarity index 100% rename from frontend/src/components/PasswordReveal.jsx rename to frontend/src/metabase/components/PasswordReveal.jsx diff --git a/frontend/src/components/Popover.jsx b/frontend/src/metabase/components/Popover.jsx similarity index 100% rename from frontend/src/components/Popover.jsx rename to frontend/src/metabase/components/Popover.jsx diff --git a/frontend/src/components/PopoverWithTrigger.jsx b/frontend/src/metabase/components/PopoverWithTrigger.jsx similarity index 100% rename from frontend/src/components/PopoverWithTrigger.jsx rename to frontend/src/metabase/components/PopoverWithTrigger.jsx diff --git a/frontend/src/components/ProfileLink.jsx b/frontend/src/metabase/components/ProfileLink.jsx similarity index 100% rename from frontend/src/components/ProfileLink.jsx rename to frontend/src/metabase/components/ProfileLink.jsx diff --git a/frontend/src/components/ProgressBar.jsx b/frontend/src/metabase/components/ProgressBar.jsx similarity index 100% rename from frontend/src/components/ProgressBar.jsx rename to frontend/src/metabase/components/ProgressBar.jsx diff --git a/frontend/src/components/QuestionSavedModal.jsx b/frontend/src/metabase/components/QuestionSavedModal.jsx similarity index 100% rename from frontend/src/components/QuestionSavedModal.jsx rename to frontend/src/metabase/components/QuestionSavedModal.jsx diff --git a/frontend/src/components/RadioSelect.jsx b/frontend/src/metabase/components/RadioSelect.jsx similarity index 100% rename from frontend/src/components/RadioSelect.jsx rename to frontend/src/metabase/components/RadioSelect.jsx diff --git a/frontend/src/components/SaveQuestionModal.jsx b/frontend/src/metabase/components/SaveQuestionModal.jsx similarity index 100% rename from frontend/src/components/SaveQuestionModal.jsx rename to frontend/src/metabase/components/SaveQuestionModal.jsx diff --git a/frontend/src/components/SaveStatus.jsx b/frontend/src/metabase/components/SaveStatus.jsx similarity index 100% rename from frontend/src/components/SaveStatus.jsx rename to frontend/src/metabase/components/SaveStatus.jsx diff --git a/frontend/src/components/Select.jsx b/frontend/src/metabase/components/Select.jsx similarity index 100% rename from frontend/src/components/Select.jsx rename to frontend/src/metabase/components/Select.jsx diff --git a/frontend/src/components/SortableItemList.css b/frontend/src/metabase/components/SortableItemList.css similarity index 100% rename from frontend/src/components/SortableItemList.css rename to frontend/src/metabase/components/SortableItemList.css diff --git a/frontend/src/components/SortableItemList.jsx b/frontend/src/metabase/components/SortableItemList.jsx similarity index 100% rename from frontend/src/components/SortableItemList.jsx rename to frontend/src/metabase/components/SortableItemList.jsx diff --git a/frontend/src/components/StackedCheckBox.jsx b/frontend/src/metabase/components/StackedCheckBox.jsx similarity index 100% rename from frontend/src/components/StackedCheckBox.jsx rename to frontend/src/metabase/components/StackedCheckBox.jsx diff --git a/frontend/src/components/Toggle.css b/frontend/src/metabase/components/Toggle.css similarity index 100% rename from frontend/src/components/Toggle.css rename to frontend/src/metabase/components/Toggle.css diff --git a/frontend/src/components/Toggle.jsx b/frontend/src/metabase/components/Toggle.jsx similarity index 100% rename from frontend/src/components/Toggle.jsx rename to frontend/src/metabase/components/Toggle.jsx diff --git a/frontend/src/components/Tooltip.jsx b/frontend/src/metabase/components/Tooltip.jsx similarity index 100% rename from frontend/src/components/Tooltip.jsx rename to frontend/src/metabase/components/Tooltip.jsx diff --git a/frontend/src/components/TooltipPopover.jsx b/frontend/src/metabase/components/TooltipPopover.jsx similarity index 100% rename from frontend/src/components/TooltipPopover.jsx rename to frontend/src/metabase/components/TooltipPopover.jsx diff --git a/frontend/src/components/Triggerable.jsx b/frontend/src/metabase/components/Triggerable.jsx similarity index 100% rename from frontend/src/components/Triggerable.jsx rename to frontend/src/metabase/components/Triggerable.jsx diff --git a/frontend/src/components/UserAvatar.jsx b/frontend/src/metabase/components/UserAvatar.jsx similarity index 100% rename from frontend/src/components/UserAvatar.jsx rename to frontend/src/metabase/components/UserAvatar.jsx diff --git a/frontend/src/components/form/FormField.jsx b/frontend/src/metabase/components/form/FormField.jsx similarity index 100% rename from frontend/src/components/form/FormField.jsx rename to frontend/src/metabase/components/form/FormField.jsx diff --git a/frontend/src/components/form/FormLabel.jsx b/frontend/src/metabase/components/form/FormLabel.jsx similarity index 100% rename from frontend/src/components/form/FormLabel.jsx rename to frontend/src/metabase/components/form/FormLabel.jsx diff --git a/frontend/src/components/form/FormMessage.jsx b/frontend/src/metabase/components/form/FormMessage.jsx similarity index 100% rename from frontend/src/components/form/FormMessage.jsx rename to frontend/src/metabase/components/form/FormMessage.jsx diff --git a/frontend/src/components/icons/ClockIcon.jsx b/frontend/src/metabase/components/icons/ClockIcon.jsx similarity index 100% rename from frontend/src/components/icons/ClockIcon.jsx rename to frontend/src/metabase/components/icons/ClockIcon.jsx diff --git a/frontend/src/components/icons/CountdownIcon.jsx b/frontend/src/metabase/components/icons/CountdownIcon.jsx similarity index 100% rename from frontend/src/components/icons/CountdownIcon.jsx rename to frontend/src/metabase/components/icons/CountdownIcon.jsx diff --git a/frontend/src/components/icons/FullscreenIcon.jsx b/frontend/src/metabase/components/icons/FullscreenIcon.jsx similarity index 100% rename from frontend/src/components/icons/FullscreenIcon.jsx rename to frontend/src/metabase/components/icons/FullscreenIcon.jsx diff --git a/frontend/src/components/icons/NightModeIcon.jsx b/frontend/src/metabase/components/icons/NightModeIcon.jsx similarity index 100% rename from frontend/src/components/icons/NightModeIcon.jsx rename to frontend/src/metabase/components/icons/NightModeIcon.jsx diff --git a/frontend/src/controllers.js b/frontend/src/metabase/controllers.js similarity index 100% rename from frontend/src/controllers.js rename to frontend/src/metabase/controllers.js diff --git a/frontend/src/css/admin.css b/frontend/src/metabase/css/admin.css similarity index 100% rename from frontend/src/css/admin.css rename to frontend/src/metabase/css/admin.css diff --git a/frontend/src/css/card.css b/frontend/src/metabase/css/card.css similarity index 100% rename from frontend/src/css/card.css rename to frontend/src/metabase/css/card.css diff --git a/frontend/src/css/components/buttons.css b/frontend/src/metabase/css/components/buttons.css similarity index 100% rename from frontend/src/css/components/buttons.css rename to frontend/src/metabase/css/components/buttons.css diff --git a/frontend/src/css/components/dropdown.css b/frontend/src/metabase/css/components/dropdown.css similarity index 100% rename from frontend/src/css/components/dropdown.css rename to frontend/src/metabase/css/components/dropdown.css diff --git a/frontend/src/css/components/form.css b/frontend/src/metabase/css/components/form.css similarity index 100% rename from frontend/src/css/components/form.css rename to frontend/src/metabase/css/components/form.css diff --git a/frontend/src/css/components/header.css b/frontend/src/metabase/css/components/header.css similarity index 100% rename from frontend/src/css/components/header.css rename to frontend/src/metabase/css/components/header.css diff --git a/frontend/src/css/components/icons.css b/frontend/src/metabase/css/components/icons.css similarity index 100% rename from frontend/src/css/components/icons.css rename to frontend/src/metabase/css/components/icons.css diff --git a/frontend/src/css/components/mb_data_table.css b/frontend/src/metabase/css/components/mb_data_table.css similarity index 100% rename from frontend/src/css/components/mb_data_table.css rename to frontend/src/metabase/css/components/mb_data_table.css diff --git a/frontend/src/css/components/modal.css b/frontend/src/metabase/css/components/modal.css similarity index 100% rename from frontend/src/css/components/modal.css rename to frontend/src/metabase/css/components/modal.css diff --git a/frontend/src/css/components/popover.css b/frontend/src/metabase/css/components/popover.css similarity index 100% rename from frontend/src/css/components/popover.css rename to frontend/src/metabase/css/components/popover.css diff --git a/frontend/src/css/components/select.css b/frontend/src/metabase/css/components/select.css similarity index 100% rename from frontend/src/css/components/select.css rename to frontend/src/metabase/css/components/select.css diff --git a/frontend/src/css/components/table.css b/frontend/src/metabase/css/components/table.css similarity index 100% rename from frontend/src/css/components/table.css rename to frontend/src/metabase/css/components/table.css diff --git a/frontend/src/css/core/arrow.css b/frontend/src/metabase/css/core/arrow.css similarity index 100% rename from frontend/src/css/core/arrow.css rename to frontend/src/metabase/css/core/arrow.css diff --git a/frontend/src/css/core/base.css b/frontend/src/metabase/css/core/base.css similarity index 100% rename from frontend/src/css/core/base.css rename to frontend/src/metabase/css/core/base.css diff --git a/frontend/src/css/core/bordered.css b/frontend/src/metabase/css/core/bordered.css similarity index 100% rename from frontend/src/css/core/bordered.css rename to frontend/src/metabase/css/core/bordered.css diff --git a/frontend/src/css/core/box_sizing.css b/frontend/src/metabase/css/core/box_sizing.css similarity index 100% rename from frontend/src/css/core/box_sizing.css rename to frontend/src/metabase/css/core/box_sizing.css diff --git a/frontend/src/css/core/breakpoints.css b/frontend/src/metabase/css/core/breakpoints.css similarity index 100% rename from frontend/src/css/core/breakpoints.css rename to frontend/src/metabase/css/core/breakpoints.css diff --git a/frontend/src/css/core/clearfix.css b/frontend/src/metabase/css/core/clearfix.css similarity index 100% rename from frontend/src/css/core/clearfix.css rename to frontend/src/metabase/css/core/clearfix.css diff --git a/frontend/src/css/core/colors.css b/frontend/src/metabase/css/core/colors.css similarity index 100% rename from frontend/src/css/core/colors.css rename to frontend/src/metabase/css/core/colors.css diff --git a/frontend/src/css/core/cursor.css b/frontend/src/metabase/css/core/cursor.css similarity index 100% rename from frontend/src/css/core/cursor.css rename to frontend/src/metabase/css/core/cursor.css diff --git a/frontend/src/css/core/flex.css b/frontend/src/metabase/css/core/flex.css similarity index 100% rename from frontend/src/css/core/flex.css rename to frontend/src/metabase/css/core/flex.css diff --git a/frontend/src/css/core/float.css b/frontend/src/metabase/css/core/float.css similarity index 100% rename from frontend/src/css/core/float.css rename to frontend/src/metabase/css/core/float.css diff --git a/frontend/src/css/core/grid.css b/frontend/src/metabase/css/core/grid.css similarity index 100% rename from frontend/src/css/core/grid.css rename to frontend/src/metabase/css/core/grid.css diff --git a/frontend/src/css/core/headings.css b/frontend/src/metabase/css/core/headings.css similarity index 100% rename from frontend/src/css/core/headings.css rename to frontend/src/metabase/css/core/headings.css diff --git a/frontend/src/css/core/hide.css b/frontend/src/metabase/css/core/hide.css similarity index 100% rename from frontend/src/css/core/hide.css rename to frontend/src/metabase/css/core/hide.css diff --git a/frontend/src/css/core/index.css b/frontend/src/metabase/css/core/index.css similarity index 100% rename from frontend/src/css/core/index.css rename to frontend/src/metabase/css/core/index.css diff --git a/frontend/src/css/core/inputs.css b/frontend/src/metabase/css/core/inputs.css similarity index 100% rename from frontend/src/css/core/inputs.css rename to frontend/src/metabase/css/core/inputs.css diff --git a/frontend/src/css/core/layout.css b/frontend/src/metabase/css/core/layout.css similarity index 100% rename from frontend/src/css/core/layout.css rename to frontend/src/metabase/css/core/layout.css diff --git a/frontend/src/css/core/link.css b/frontend/src/metabase/css/core/link.css similarity index 100% rename from frontend/src/css/core/link.css rename to frontend/src/metabase/css/core/link.css diff --git a/frontend/src/css/core/rounded.css b/frontend/src/metabase/css/core/rounded.css similarity index 100% rename from frontend/src/css/core/rounded.css rename to frontend/src/metabase/css/core/rounded.css diff --git a/frontend/src/css/core/scroll.css b/frontend/src/metabase/css/core/scroll.css similarity index 100% rename from frontend/src/css/core/scroll.css rename to frontend/src/metabase/css/core/scroll.css diff --git a/frontend/src/css/core/shadow.css b/frontend/src/metabase/css/core/shadow.css similarity index 100% rename from frontend/src/css/core/shadow.css rename to frontend/src/metabase/css/core/shadow.css diff --git a/frontend/src/css/core/spacing.css b/frontend/src/metabase/css/core/spacing.css similarity index 100% rename from frontend/src/css/core/spacing.css rename to frontend/src/metabase/css/core/spacing.css diff --git a/frontend/src/css/core/text.css b/frontend/src/metabase/css/core/text.css similarity index 100% rename from frontend/src/css/core/text.css rename to frontend/src/metabase/css/core/text.css diff --git a/frontend/src/css/core/transitions.css b/frontend/src/metabase/css/core/transitions.css similarity index 100% rename from frontend/src/css/core/transitions.css rename to frontend/src/metabase/css/core/transitions.css diff --git a/frontend/src/css/dashboard.css b/frontend/src/metabase/css/dashboard.css similarity index 100% rename from frontend/src/css/dashboard.css rename to frontend/src/metabase/css/dashboard.css diff --git a/frontend/src/css/home.css b/frontend/src/metabase/css/home.css similarity index 100% rename from frontend/src/css/home.css rename to frontend/src/metabase/css/home.css diff --git a/frontend/src/css/index.css b/frontend/src/metabase/css/index.css similarity index 100% rename from frontend/src/css/index.css rename to frontend/src/metabase/css/index.css diff --git a/frontend/src/css/login.css b/frontend/src/metabase/css/login.css similarity index 100% rename from frontend/src/css/login.css rename to frontend/src/metabase/css/login.css diff --git a/frontend/src/css/pulse.css b/frontend/src/metabase/css/pulse.css similarity index 100% rename from frontend/src/css/pulse.css rename to frontend/src/metabase/css/pulse.css diff --git a/frontend/src/css/query_builder.css b/frontend/src/metabase/css/query_builder.css similarity index 100% rename from frontend/src/css/query_builder.css rename to frontend/src/metabase/css/query_builder.css diff --git a/frontend/src/css/setup.css b/frontend/src/metabase/css/setup.css similarity index 100% rename from frontend/src/css/setup.css rename to frontend/src/metabase/css/setup.css diff --git a/frontend/src/css/tutorial.css b/frontend/src/metabase/css/tutorial.css similarity index 100% rename from frontend/src/css/tutorial.css rename to frontend/src/metabase/css/tutorial.css diff --git a/frontend/src/css/vendor.css b/frontend/src/metabase/css/vendor.css similarity index 100% rename from frontend/src/css/vendor.css rename to frontend/src/metabase/css/vendor.css diff --git a/frontend/src/dashboard/actions.js b/frontend/src/metabase/dashboard/actions.js similarity index 100% rename from frontend/src/dashboard/actions.js rename to frontend/src/metabase/dashboard/actions.js diff --git a/frontend/src/dashboard/components/AddSeriesModal.jsx b/frontend/src/metabase/dashboard/components/AddSeriesModal.jsx similarity index 100% rename from frontend/src/dashboard/components/AddSeriesModal.jsx rename to frontend/src/metabase/dashboard/components/AddSeriesModal.jsx diff --git a/frontend/src/dashboard/components/AddToDashSelectQuestionModal.jsx b/frontend/src/metabase/dashboard/components/AddToDashSelectQuestionModal.jsx similarity index 100% rename from frontend/src/dashboard/components/AddToDashSelectQuestionModal.jsx rename to frontend/src/metabase/dashboard/components/AddToDashSelectQuestionModal.jsx diff --git a/frontend/src/dashboard/components/DashCard.jsx b/frontend/src/metabase/dashboard/components/DashCard.jsx similarity index 100% rename from frontend/src/dashboard/components/DashCard.jsx rename to frontend/src/metabase/dashboard/components/DashCard.jsx diff --git a/frontend/src/dashboard/components/Dashboard.jsx b/frontend/src/metabase/dashboard/components/Dashboard.jsx similarity index 100% rename from frontend/src/dashboard/components/Dashboard.jsx rename to frontend/src/metabase/dashboard/components/Dashboard.jsx diff --git a/frontend/src/dashboard/components/DashboardGrid.jsx b/frontend/src/metabase/dashboard/components/DashboardGrid.jsx similarity index 100% rename from frontend/src/dashboard/components/DashboardGrid.jsx rename to frontend/src/metabase/dashboard/components/DashboardGrid.jsx diff --git a/frontend/src/dashboard/components/DashboardHeader.jsx b/frontend/src/metabase/dashboard/components/DashboardHeader.jsx similarity index 100% rename from frontend/src/dashboard/components/DashboardHeader.jsx rename to frontend/src/metabase/dashboard/components/DashboardHeader.jsx diff --git a/frontend/src/dashboard/components/DeleteDashboardModal.jsx b/frontend/src/metabase/dashboard/components/DeleteDashboardModal.jsx similarity index 100% rename from frontend/src/dashboard/components/DeleteDashboardModal.jsx rename to frontend/src/metabase/dashboard/components/DeleteDashboardModal.jsx diff --git a/frontend/src/dashboard/components/RefreshWidget.css b/frontend/src/metabase/dashboard/components/RefreshWidget.css similarity index 100% rename from frontend/src/dashboard/components/RefreshWidget.css rename to frontend/src/metabase/dashboard/components/RefreshWidget.css diff --git a/frontend/src/dashboard/components/RefreshWidget.jsx b/frontend/src/metabase/dashboard/components/RefreshWidget.jsx similarity index 100% rename from frontend/src/dashboard/components/RefreshWidget.jsx rename to frontend/src/metabase/dashboard/components/RefreshWidget.jsx diff --git a/frontend/src/dashboard/components/RemoveFromDashboardModal.jsx b/frontend/src/metabase/dashboard/components/RemoveFromDashboardModal.jsx similarity index 100% rename from frontend/src/dashboard/components/RemoveFromDashboardModal.jsx rename to frontend/src/metabase/dashboard/components/RemoveFromDashboardModal.jsx diff --git a/frontend/src/dashboard/components/grid/GridItem.jsx b/frontend/src/metabase/dashboard/components/grid/GridItem.jsx similarity index 100% rename from frontend/src/dashboard/components/grid/GridItem.jsx rename to frontend/src/metabase/dashboard/components/grid/GridItem.jsx diff --git a/frontend/src/dashboard/components/grid/GridLayout.jsx b/frontend/src/metabase/dashboard/components/grid/GridLayout.jsx similarity index 100% rename from frontend/src/dashboard/components/grid/GridLayout.jsx rename to frontend/src/metabase/dashboard/components/grid/GridLayout.jsx diff --git a/frontend/src/dashboard/containers/DashboardApp.jsx b/frontend/src/metabase/dashboard/containers/DashboardApp.jsx similarity index 100% rename from frontend/src/dashboard/containers/DashboardApp.jsx rename to frontend/src/metabase/dashboard/containers/DashboardApp.jsx diff --git a/frontend/src/dashboard/dashboard.controllers.js b/frontend/src/metabase/dashboard/dashboard.controllers.js similarity index 100% rename from frontend/src/dashboard/dashboard.controllers.js rename to frontend/src/metabase/dashboard/dashboard.controllers.js diff --git a/frontend/src/dashboard/dashboard.module.js b/frontend/src/metabase/dashboard/dashboard.module.js similarity index 100% rename from frontend/src/dashboard/dashboard.module.js rename to frontend/src/metabase/dashboard/dashboard.module.js diff --git a/frontend/src/dashboard/reducers.js b/frontend/src/metabase/dashboard/reducers.js similarity index 100% rename from frontend/src/dashboard/reducers.js rename to frontend/src/metabase/dashboard/reducers.js diff --git a/frontend/src/dashboard/selectors.js b/frontend/src/metabase/dashboard/selectors.js similarity index 100% rename from frontend/src/dashboard/selectors.js rename to frontend/src/metabase/dashboard/selectors.js diff --git a/frontend/src/directives.js b/frontend/src/metabase/directives.js similarity index 100% rename from frontend/src/directives.js rename to frontend/src/metabase/directives.js diff --git a/frontend/src/filters.js b/frontend/src/metabase/filters.js similarity index 100% rename from frontend/src/filters.js rename to frontend/src/metabase/filters.js diff --git a/frontend/src/forms.js b/frontend/src/metabase/forms.js similarity index 100% rename from frontend/src/forms.js rename to frontend/src/metabase/forms.js diff --git a/frontend/src/home/actions.js b/frontend/src/metabase/home/actions.js similarity index 100% rename from frontend/src/home/actions.js rename to frontend/src/metabase/home/actions.js diff --git a/frontend/src/home/components/Activity.jsx b/frontend/src/metabase/home/components/Activity.jsx similarity index 100% rename from frontend/src/home/components/Activity.jsx rename to frontend/src/metabase/home/components/Activity.jsx diff --git a/frontend/src/home/components/ActivityItem.jsx b/frontend/src/metabase/home/components/ActivityItem.jsx similarity index 100% rename from frontend/src/home/components/ActivityItem.jsx rename to frontend/src/metabase/home/components/ActivityItem.jsx diff --git a/frontend/src/home/components/ActivityStory.jsx b/frontend/src/metabase/home/components/ActivityStory.jsx similarity index 100% rename from frontend/src/home/components/ActivityStory.jsx rename to frontend/src/metabase/home/components/ActivityStory.jsx diff --git a/frontend/src/home/components/Homepage.jsx b/frontend/src/metabase/home/components/Homepage.jsx similarity index 100% rename from frontend/src/home/components/Homepage.jsx rename to frontend/src/metabase/home/components/Homepage.jsx diff --git a/frontend/src/home/components/NewUserOnboardingModal.jsx b/frontend/src/metabase/home/components/NewUserOnboardingModal.jsx similarity index 100% rename from frontend/src/home/components/NewUserOnboardingModal.jsx rename to frontend/src/metabase/home/components/NewUserOnboardingModal.jsx diff --git a/frontend/src/home/components/RecentViews.jsx b/frontend/src/metabase/home/components/RecentViews.jsx similarity index 100% rename from frontend/src/home/components/RecentViews.jsx rename to frontend/src/metabase/home/components/RecentViews.jsx diff --git a/frontend/src/home/components/Smile.jsx b/frontend/src/metabase/home/components/Smile.jsx similarity index 100% rename from frontend/src/home/components/Smile.jsx rename to frontend/src/metabase/home/components/Smile.jsx diff --git a/frontend/src/home/containers/HomepageApp.jsx b/frontend/src/metabase/home/containers/HomepageApp.jsx similarity index 100% rename from frontend/src/home/containers/HomepageApp.jsx rename to frontend/src/metabase/home/containers/HomepageApp.jsx diff --git a/frontend/src/home/home.controllers.js b/frontend/src/metabase/home/home.controllers.js similarity index 100% rename from frontend/src/home/home.controllers.js rename to frontend/src/metabase/home/home.controllers.js diff --git a/frontend/src/home/home.module.js b/frontend/src/metabase/home/home.module.js similarity index 100% rename from frontend/src/home/home.module.js rename to frontend/src/metabase/home/home.module.js diff --git a/frontend/src/home/reducers.js b/frontend/src/metabase/home/reducers.js similarity index 100% rename from frontend/src/home/reducers.js rename to frontend/src/metabase/home/reducers.js diff --git a/frontend/src/home/selectors.js b/frontend/src/metabase/home/selectors.js similarity index 100% rename from frontend/src/home/selectors.js rename to frontend/src/metabase/home/selectors.js diff --git a/frontend/src/icon_paths.js b/frontend/src/metabase/icon_paths.js similarity index 100% rename from frontend/src/icon_paths.js rename to frontend/src/metabase/icon_paths.js diff --git a/frontend/src/icons.js b/frontend/src/metabase/icons.js similarity index 100% rename from frontend/src/icons.js rename to frontend/src/metabase/icons.js diff --git a/frontend/src/lib/analytics.js b/frontend/src/metabase/lib/analytics.js similarity index 100% rename from frontend/src/lib/analytics.js rename to frontend/src/metabase/lib/analytics.js diff --git a/frontend/src/lib/card.js b/frontend/src/metabase/lib/card.js similarity index 100% rename from frontend/src/lib/card.js rename to frontend/src/metabase/lib/card.js diff --git a/frontend/src/lib/colors.js b/frontend/src/metabase/lib/colors.js similarity index 100% rename from frontend/src/lib/colors.js rename to frontend/src/metabase/lib/colors.js diff --git a/frontend/src/lib/cookies.js b/frontend/src/metabase/lib/cookies.js similarity index 100% rename from frontend/src/lib/cookies.js rename to frontend/src/metabase/lib/cookies.js diff --git a/frontend/src/lib/core.js b/frontend/src/metabase/lib/core.js similarity index 100% rename from frontend/src/lib/core.js rename to frontend/src/metabase/lib/core.js diff --git a/frontend/src/lib/createAngularHistory.js b/frontend/src/metabase/lib/createAngularHistory.js similarity index 100% rename from frontend/src/lib/createAngularHistory.js rename to frontend/src/metabase/lib/createAngularHistory.js diff --git a/frontend/src/lib/dashboard_grid.js b/frontend/src/metabase/lib/dashboard_grid.js similarity index 100% rename from frontend/src/lib/dashboard_grid.js rename to frontend/src/metabase/lib/dashboard_grid.js diff --git a/frontend/src/lib/data_grid.js b/frontend/src/metabase/lib/data_grid.js similarity index 100% rename from frontend/src/lib/data_grid.js rename to frontend/src/metabase/lib/data_grid.js diff --git a/frontend/src/lib/debug.js b/frontend/src/metabase/lib/debug.js similarity index 100% rename from frontend/src/lib/debug.js rename to frontend/src/metabase/lib/debug.js diff --git a/frontend/src/lib/dom.js b/frontend/src/metabase/lib/dom.js similarity index 100% rename from frontend/src/lib/dom.js rename to frontend/src/metabase/lib/dom.js diff --git a/frontend/src/lib/emoji.js b/frontend/src/metabase/lib/emoji.js similarity index 100% rename from frontend/src/lib/emoji.js rename to frontend/src/metabase/lib/emoji.js diff --git a/frontend/src/lib/emoji.json b/frontend/src/metabase/lib/emoji.json similarity index 100% rename from frontend/src/lib/emoji.json rename to frontend/src/metabase/lib/emoji.json diff --git a/frontend/src/lib/expressions.js b/frontend/src/metabase/lib/expressions.js similarity index 97% rename from frontend/src/lib/expressions.js rename to frontend/src/metabase/lib/expressions.js index bee1f045fd991c2926cb665480bffed802d75660..115fa5ab90f3f3ab5593dd763cf0a0faf7a9cd74 100644 --- a/frontend/src/lib/expressions.js +++ b/frontend/src/metabase/lib/expressions.js @@ -134,6 +134,12 @@ function tokenizeExpression(expressionString) { currentToken.value += c; } + // Replace operators in expressionString making sure the operators have exactly one space before and after + VALID_OPERATORS.forEach(function(operator) { + let regex = new RegExp("\\s*[\\" + operator + "]\\s*"); + expressionString = expressionString.replace(regex, ' ' + operator + ' '); + }); + for (; i < expressionString.length; i++) { let c = expressionString.charAt(i); diff --git a/frontend/src/lib/formatting.js b/frontend/src/metabase/lib/formatting.js similarity index 99% rename from frontend/src/lib/formatting.js rename to frontend/src/metabase/lib/formatting.js index c2f5c936777bbcf786a012637b738161a31a881b..8093c5fc394949d32a9bdae3cb1db1a564b84941 100644 --- a/frontend/src/lib/formatting.js +++ b/frontend/src/metabase/lib/formatting.js @@ -178,7 +178,7 @@ export function assignUserColors(userIds, currentUserId, colorClasses = ['bg-bra } export function formatSQL(sql) { - if (sql) { + if (typeof sql === "string") { sql = sql.replace(/\sFROM/, "\nFROM"); sql = sql.replace(/\sLEFT JOIN/, "\nLEFT JOIN"); sql = sql.replace(/\sWHERE/, "\nWHERE"); diff --git a/frontend/src/lib/greeting.js b/frontend/src/metabase/lib/greeting.js similarity index 100% rename from frontend/src/lib/greeting.js rename to frontend/src/metabase/lib/greeting.js diff --git a/frontend/src/lib/promise.js b/frontend/src/metabase/lib/promise.js similarity index 100% rename from frontend/src/lib/promise.js rename to frontend/src/metabase/lib/promise.js diff --git a/frontend/src/lib/pulse.js b/frontend/src/metabase/lib/pulse.js similarity index 100% rename from frontend/src/lib/pulse.js rename to frontend/src/metabase/lib/pulse.js diff --git a/frontend/src/lib/query.js b/frontend/src/metabase/lib/query.js similarity index 100% rename from frontend/src/lib/query.js rename to frontend/src/metabase/lib/query.js diff --git a/frontend/src/lib/query_time.js b/frontend/src/metabase/lib/query_time.js similarity index 97% rename from frontend/src/lib/query_time.js rename to frontend/src/metabase/lib/query_time.js index cee485f22d60b393d4c35b8f86fdd7e9dcd0d526..e7bee912f3ad40aedee2d9cf3cb43bc84e72eeee 100644 --- a/frontend/src/lib/query_time.js +++ b/frontend/src/metabase/lib/query_time.js @@ -85,6 +85,9 @@ export function generateTimeIntervalDescription(n, unit) { return "Yesterday"; } } + + if (!unit && n === 0) return "Today"; // ['relative-datetime', 'current'] is a legal MBQL form but has no unit + unit = inflection.capitalize(unit); if (typeof n === "string") { if (n === "current") { diff --git a/frontend/src/lib/redux.js b/frontend/src/metabase/lib/redux.js similarity index 100% rename from frontend/src/lib/redux.js rename to frontend/src/metabase/lib/redux.js diff --git a/frontend/src/lib/schema_metadata.js b/frontend/src/metabase/lib/schema_metadata.js similarity index 100% rename from frontend/src/lib/schema_metadata.js rename to frontend/src/metabase/lib/schema_metadata.js diff --git a/frontend/src/lib/settings.js b/frontend/src/metabase/lib/settings.js similarity index 100% rename from frontend/src/lib/settings.js rename to frontend/src/metabase/lib/settings.js diff --git a/frontend/src/lib/table.js b/frontend/src/metabase/lib/table.js similarity index 100% rename from frontend/src/lib/table.js rename to frontend/src/metabase/lib/table.js diff --git a/frontend/src/lib/time.js b/frontend/src/metabase/lib/time.js similarity index 100% rename from frontend/src/lib/time.js rename to frontend/src/metabase/lib/time.js diff --git a/frontend/src/lib/urls.js b/frontend/src/metabase/lib/urls.js similarity index 100% rename from frontend/src/lib/urls.js rename to frontend/src/metabase/lib/urls.js diff --git a/frontend/src/lib/utils.js b/frontend/src/metabase/lib/utils.js similarity index 100% rename from frontend/src/lib/utils.js rename to frontend/src/metabase/lib/utils.js diff --git a/frontend/src/lib/visualization_settings.js b/frontend/src/metabase/lib/visualization_settings.js similarity index 100% rename from frontend/src/lib/visualization_settings.js rename to frontend/src/metabase/lib/visualization_settings.js diff --git a/frontend/src/metabase/meta/Card.js b/frontend/src/metabase/meta/Card.js new file mode 100644 index 0000000000000000000000000000000000000000..06f58c99864a2773a0fa54def7fe49c9833c080b --- /dev/null +++ b/frontend/src/metabase/meta/Card.js @@ -0,0 +1,53 @@ +/* @flow */ + +import type { StructuredQueryObject, NativeQueryObject } from "./types/Query"; +import type { CardObject, StructuredDatasetQueryObject, NativeDatasetQueryObject } from "./types/Card"; + +import * as Query from "./Query"; + +export const STRUCTURED_QUERY_TEMPLATE: StructuredDatasetQueryObject = { + database: null, + type: "query", + query: { + source_table: null, + aggregation: ["rows"], + breakout: [], + filter: [] + } +}; + +export const NATIVE_QUERY_TEMPLATE: NativeDatasetQueryObject = { + database: null, + type: "native", + native: { + query: "" + } +}; + +export function isStructured(card: CardObject): bool { + return card.dataset_query.type === "query"; +} + +export function isNative(card: CardObject): bool { + return card.dataset_query.type === "native"; +} + +export function canRun(card: CardObject): bool { + if (card.dataset_query.type === "query") { + const query : StructuredQueryObject = card.dataset_query.query; + return query && query.source_table != undefined && Query.hasValidAggregation(query); + } else if (card.dataset_query.type === "native") { + const native : NativeQueryObject = card.dataset_query.native; + return native && card.dataset_query.database != undefined && native.query !== ""; + } else { + return false; + } +} + +export function getQuery(card: CardObject): ?StructuredQueryObject { + if (card.dataset_query.type === "query") { + return card.dataset_query.query; + } else { + return null; + } +} diff --git a/frontend/src/metabase/meta/Query.js b/frontend/src/metabase/meta/Query.js new file mode 100644 index 0000000000000000000000000000000000000000..7952cf9c37f116b89344d844224a3850a4f15f21 --- /dev/null +++ b/frontend/src/metabase/meta/Query.js @@ -0,0 +1,44 @@ +/* @flow */ + +import type { StructuredQueryObject, FilterClause } from "./types/Query"; + +export function hasValidAggregation(query: StructuredQueryObject): bool { + return false; // FIXME +} + +export function addFilter(query: StructuredQueryObject, newFilter: FilterClause): StructuredQueryObject { + const filters = getFilters(query); + return setFilters(query, [...filters.slice(), newFilter]); // TODO: figure out why flow complains without .slice() +} + +export function updateFilter(query: StructuredQueryObject, index: number, updatedFilter: FilterClause): StructuredQueryObject { + const filters = getFilters(query); + return setFilters(query, [...filters.slice(0, index), updatedFilter, ...filters.slice(index + 1)]); +} + +export function removeFilter(query: StructuredQueryObject, index: number): StructuredQueryObject { + const filters = getFilters(query); + return setFilters(query, [...filters.slice(0, index), ...filters.slice(index + 1)]); +} + +function getFilters(query: StructuredQueryObject): Array<FilterClause> { + if (query.filters == null || query.filters.length === 0) { + return []; + } else if (query.filters[0] === "and") { + return query.filters.slice(1); + } else { + return [query.filters]; + } +} + +function setFilters(query: StructuredQueryObject, filters: Array<FilterClause>): StructuredQueryObject { + if (filters.length === 0) { + query = { ...query }; + delete query.filters; + return query; + } else if (filters.length === 1) { + return { ...query, filters: filters[0] }; + } else { + return { ...query, filters: ["and"].concat(filters) }; + } +} diff --git a/frontend/src/metabase/meta/metadata/Base.js b/frontend/src/metabase/meta/metadata/Base.js new file mode 100644 index 0000000000000000000000000000000000000000..edb6a5f354c1d78c154789fe3d5748b41027f90b --- /dev/null +++ b/frontend/src/metabase/meta/metadata/Base.js @@ -0,0 +1,86 @@ +/* @flow */ + +import memoize from "./memoize"; + +import { Schema, arrayOf, normalize } from "normalizr"; + +type MetaSchemaClass = { type: string, schema: MetaSchema } +type MetaSchemaArrayOf = Array<MetaSchemaClass> +type MetaSchemaType = MetaSchemaClass | MetaSchemaArrayOf; +type MetaSchema = { [key: string]: MetaSchemaType }; + +export default class Base { + static type: string; + static schema: MetaSchema; + + _metadata: Base; + _object: Object; + _entityMaps: Object; + + constructor(object: Object, metadata: ?Base) { + // if metadata is provided assume "object" has been normalized + if (metadata) { + this._metadata = metadata + this._object = object; + } else { + let { entities, result } = normalize(object, toNormalizrSchema(this.constructor)); + this._metadata = this; + this._entityMaps = entities; + this._object = entities[this.constructor.type][result]; + } + + if (this._object && !Array.isArray(this._object)) { + for (const name in this._object) { + // $FlowFixMe + if (this[name] === undefined) { + // $FlowFixMe + this[name] = this._object[name]; + } + } + } + } + + // return the wrapped entity + @memoize + _entity<T: Base>(Klass: Class<T>, id: number): ?T { + if (this !== this._metadata) { + return this._metadata._entity(...arguments); + } + + if (id == null) { + return null; + } + + const object = this._metadata._entityMaps[Klass.type][id]; + if (object == null) { + throw new Error("Entity " + Klass.type + "[" + id + "] not found in loaded metadata."); + } + return new Klass(object, this._metadata); + } + + // return an array of wrapped entities + @memoize + _entities<T: Base>(Klass: Class<T>, ids: Array<number>): Array<?T> { + if (this !== this._metadata) { + return this._metadata._entities(...arguments); + } + + return ids.map(id => this._entity(Klass, id)); + } +} + + +// transform our schema to a normalizr schema +function toNormalizrSchema(Klass: MetaSchemaClass) { + if (Array.isArray(Klass.schema)) { + return arrayOf(toNormalizrSchema(Klass.schema[0])); + } else { + const schema : any = new Schema(Klass.type); + const properties = {} + for (const name in Klass.schema) { + properties[name] = Array.isArray(Klass.schema[name]) ? arrayOf(toNormalizrSchema(Klass.schema[name][0])) : toNormalizrSchema(Klass.schema[name]); + } + schema.define(properties); + return schema; + } +} diff --git a/frontend/src/metabase/meta/metadata/Database.js b/frontend/src/metabase/meta/metadata/Database.js new file mode 100644 index 0000000000000000000000000000000000000000..3921bd7e8614858ebff39c6a9562d027bda8a18b --- /dev/null +++ b/frontend/src/metabase/meta/metadata/Database.js @@ -0,0 +1,19 @@ +/* @flow */ + +import Base from "./Base"; +import Table from "./Table"; + +export default class Database extends Base { + static type = "database"; + static schema = { + tables: [Table] + }; + + table(id: number) { + return this._entity(Table, id); + } + + tables() { + return this._entities(Table, this._object.tables); + } +} diff --git a/frontend/src/metabase/meta/metadata/Field.js b/frontend/src/metabase/meta/metadata/Field.js new file mode 100644 index 0000000000000000000000000000000000000000..f4d55e07f1f26400a07ea55d954f2b55601cfb90 --- /dev/null +++ b/frontend/src/metabase/meta/metadata/Field.js @@ -0,0 +1,31 @@ +/* @flow */ + +import Base from "./Base"; +import Table from "./Table"; + +import { isDate, isNumeric, isBoolean, isString, isSummable, isCategory, isDimension, isMetric } from "metabase/lib/schema_metadata"; + +export default class Field extends Base { + static type = "field"; + static schema = {}; + + table_id: number; + fk_target_field_id: number; + + table() { + return this._entity(Table, this.table_id); + } + + target() { + return this._entity(Field, this.fk_target_field_id); + } + + isDate() { return isDate(this._object); } + isNumeric() { return isNumeric(this._object); } + isBoolean() { return isBoolean(this._object); } + isString() { return isString(this._object); } + isSummable() { return isSummable(this._object); } + isCategory() { return isCategory(this._object); } + isMetric() { return isMetric(this._object); } + isDimension() { return isDimension(this._object); } +} diff --git a/frontend/src/metabase/meta/metadata/Metadata.js b/frontend/src/metabase/meta/metadata/Metadata.js new file mode 100644 index 0000000000000000000000000000000000000000..ad872fb5c6fe3b42ceb5f32798c893d5dffb962d --- /dev/null +++ b/frontend/src/metabase/meta/metadata/Metadata.js @@ -0,0 +1,28 @@ +/* @flow */ + +import Base from "./Base"; +import Database from "./Database"; +import Table from "./Table"; +import Field from "./Field"; + +export default class Metadata extends Base { + static type = "metadata"; + static schema = { databases: [Database] }; + + constructor(databases: Array<Object>) { + super({ databases, id: 0 }); + } + + field(id: number) { + return this._entity(Field, id); + } + table(id: number) { + return this._entity(Table, id); + } + database(id: number) { + return this._entity(Database, id); + } + databases() { + return this._entities(Database, this._object.databases); + } +} diff --git a/frontend/src/metabase/meta/metadata/Table.js b/frontend/src/metabase/meta/metadata/Table.js new file mode 100644 index 0000000000000000000000000000000000000000..d951e85a24596a5c337a2d2f41ca79802c52465e --- /dev/null +++ b/frontend/src/metabase/meta/metadata/Table.js @@ -0,0 +1,27 @@ +/* @flow */ + +import Base from "./Base"; +import Field from "./Field"; +import Database from "./Database"; + + +export default class Table extends Base { + static type = "table"; + static schema = { + fields: [Field] + }; + + db_id: number; + + database() { + return this._entity(Database, this.db_id); + } + + field(id: number) { + return this._entity(Field, id); + } + + fields() { + return this._entities(Field, this._object.fields); + } +} diff --git a/frontend/src/metabase/meta/metadata/demo.js b/frontend/src/metabase/meta/metadata/demo.js new file mode 100644 index 0000000000000000000000000000000000000000..9674461883dac9fb21df01c3b742f811253ae437 --- /dev/null +++ b/frontend/src/metabase/meta/metadata/demo.js @@ -0,0 +1,32 @@ +/* @flow */ + +import Metadata from "./Metadata"; +import Database from "./Database"; + +async function getDatabases() { + let response = await fetch("/api/database?include_tables=true", { credentials: 'same-origin' }); + return await response.json(); +} + +async function getTable(table) { + let response = await fetch("/api/table/" + table.id + "/query_metadata", { credentials: 'same-origin' }); + return await response.json(); +} + +async function loadDatabaseTables(database) { + database.tables = await Promise.all(database.tables.map(getTable)); +} + +async function loadMetadata() { + let databases = await getDatabases(); + await Promise.all(databases.map(loadDatabaseTables)); + return databases; +} + +loadMetadata().then((databases) => { + window.m = new Metadata(databases); + window.d = new Database(databases[0]); + console.log(window.m.databases()); + console.log(window.m.databases()[1].tables()[0].field(1835).target().table().database().tables()[0].fields()[0].isNumeric()); + console.log(window.d.tables()); +}).then(undefined, (err) => console.error(err)) diff --git a/frontend/src/metabase/meta/metadata/memoize.js b/frontend/src/metabase/meta/metadata/memoize.js new file mode 100644 index 0000000000000000000000000000000000000000..fa9e9941c8fe735c40cf2bdafadfb4d486db01d9 --- /dev/null +++ b/frontend/src/metabase/meta/metadata/memoize.js @@ -0,0 +1,19 @@ + +let memoized = new WeakMap(); + +function getWithFallback(map, key, fallback) { + if (!map.has(key)) { + map.set(key, fallback()); + } + return map.get(key); +} + +export default function memoize(target, name, descriptor) { + let method = target[name]; + descriptor.value = function(...args) { + const path = [this, method, ...args]; + const last = path.pop(); + const map = path.reduce((map, key) => getWithFallback(map, key, () => new Map), memoized); + return getWithFallback(map, last, () => method.apply(this, args)); + } +} diff --git a/frontend/src/metabase/meta/types/Card.js b/frontend/src/metabase/meta/types/Card.js new file mode 100644 index 0000000000000000000000000000000000000000..ec58c96b6e092538b0e84e59ad82663981d4113c --- /dev/null +++ b/frontend/src/metabase/meta/types/Card.js @@ -0,0 +1,25 @@ +/* @flow */ + +import type { DatabaseId } from "./base"; +import type { StructuredQueryObject, NativeQueryObject } from "./Query"; + +export type CardId = number; + +export type CardObject = { + id: CardId, + dataset_query: DatasetQueryObject +}; + +export type StructuredDatasetQueryObject = { + type: "query", + query: StructuredQueryObject, + database: ?DatabaseId +}; + +export type NativeDatasetQueryObject = { + type: "native", + native: NativeQueryObject, + database: ?DatabaseId +}; + +export type DatasetQueryObject = StructuredDatasetQueryObject | NativeDatasetQueryObject; diff --git a/frontend/src/metabase/meta/types/Dashboard.js b/frontend/src/metabase/meta/types/Dashboard.js new file mode 100644 index 0000000000000000000000000000000000000000..0f8ffd33c751c8740daec0014dd82d3de2672b33 --- /dev/null +++ b/frontend/src/metabase/meta/types/Dashboard.js @@ -0,0 +1,43 @@ +/* @flow */ + +import type { CardObject, CardId } from "./Card"; +import type { ConcreteField } from "./Query"; + + +export type DashboardObject = { + id: number, + ordered_cards: Array<DashCardObject>, + // incomplete + parameters: Array<ParameterObject> +}; + +export type DashCardObject = { + id: number, + series: Array<CardObject>, + // incomplete + parameter_mappings: Array<ParameterMappingObject>; +}; + +export type ParameterId = string; + +export type ParameterType = + "date-range" | + "category" | + "id"; + +export type ParameterObject = { + id: ParameterId, + name: string, + type: ParameterType, + default: any +}; + +export type ParameterMappingTarget = + ["parameter", string] | + ["dimension", ConcreteField]; + +export type ParameterMappingObject = { + card_id: CardId, + parameter_id: ParameterId, + target: ParameterMappingTarget +}; diff --git a/frontend/src/metabase/meta/types/Query.js b/frontend/src/metabase/meta/types/Query.js new file mode 100644 index 0000000000000000000000000000000000000000..ef132aed358dc42e69611578f4e3c11c9084d3ea --- /dev/null +++ b/frontend/src/metabase/meta/types/Query.js @@ -0,0 +1,91 @@ +/* @flow */ + +import type { TableId } from "./base"; + +export type FieldId = number; +export type SegmentId = number; +export type MetricId = number; + +export type ExpressionName = string; + +export type StringLiteral = string; +export type NumericLiteral = number; +export type DatetimeLiteral = string; + +export type Value = null | boolean | StringLiteral | NumericLiteral | DatetimeLiteral; +export type OrderableValue = NumericLiteral | DatetimeLiteral; + +export type RelativeDatetimePeriod = "current" | "last" | "next" | number; +export type RelativeDatetimeUnit = "minute" | "hour" | "day" | "week" | "month" | "quarter" | "year"; +export type DatetimeUnit = "default" | "minute" | "minute-of-hour" | "hour" | "hour-of-day" | "day" | "day-of-week" | "day-of-month" | "day-of-year" | "week" | "week-of-year" | "month" | "month-of-year" | "quarter" | "quarter-of-year" | "year"; + +export type NativeQueryObject = { + query: string +}; + +export type StructuredQueryObject = { + source_table: ?TableId, + aggregation?: AggregationClause, + breakout?: BreakoutClause, + filter?: FilterClause, + order_by?: OrderByClause, + limit?: LimitClause, + expressions?: { [key: ExpressionName]: Expression } +}; + +export type AggregationClause = + ["rows"] | // deprecated + ["count"] | + ["count"|"avg"|"cum_sum"|"distinct"|"stddev"|"sum"|"min"|"max", ConcreteField] | + ["metric", MetricId]; + +export type BreakoutClause = Array<ConcreteField>; +export type FilterClause = + ["and"|"or", FilterClause, FilterClause] | + ["not", FilterClause] | + ["="|"!=", ConcreteField, Value] | + ["<"|">"|"<="|">=", ConcreteField, OrderableValue] | + ["is-null"|"not-null", ConcreteField] | + ["between", ConcreteField, OrderableValue, OrderableValue] | + ["inside", ConcreteField, ConcreteField, NumericLiteral, NumericLiteral, NumericLiteral, NumericLiteral] | + ["starts-with"|"contains"|"does-not-contain"|"ends-with", + ConcreteField, StringLiteral] | + ["time-interval", ConcreteField, RelativeDatetimePeriod, RelativeDatetimeUnit] | + ["segment", SegmentId]; + +export type OrderByClause = Array<OrderBy>; +export type OrderBy = ["asc"|"desc", Field]; + +export type LimitClause = number; + +export type Field = + ConcreteField | + AggregateField; + +export type ConcreteField = + LocalFieldReference | + ForeignFieldReference | + ExpressionReference | + DatetimeField; + +export type LocalFieldReference = + ["field-id", FieldId] | + FieldId; // deprecated + +export type ForeignFieldReference = + ["fk->", FieldId, FieldId]; + +export type ExpressionReference = + ["expression", ExpressionName]; + +export type DatetimeField = + ["datetime-field", LocalFieldReference | ForeignFieldReference, DatetimeUnit] | + ["datetime-field", LocalFieldReference | ForeignFieldReference, "as", DatetimeUnit]; // deprecated + +export type AggregateField = ["aggregation", number]; + +export type ExpressionOperator = "+" | "-" | "*" | "/"; +export type ExpressionOperand = ConcreteField | NumericLiteral | Expression; + +export type Expression = + [ExpressionOperator, ExpressionOperand, ExpressionOperand]; diff --git a/frontend/src/metabase/meta/types/base.js b/frontend/src/metabase/meta/types/base.js new file mode 100644 index 0000000000000000000000000000000000000000..b676f4106a3849d43a985c496ab942a908cf4601 --- /dev/null +++ b/frontend/src/metabase/meta/types/base.js @@ -0,0 +1,4 @@ +/* @flow */ + +export type DatabaseId = number; +export type TableId = number; diff --git a/frontend/src/pulse/actions.js b/frontend/src/metabase/pulse/actions.js similarity index 100% rename from frontend/src/pulse/actions.js rename to frontend/src/metabase/pulse/actions.js diff --git a/frontend/src/pulse/components/CardPicker.jsx b/frontend/src/metabase/pulse/components/CardPicker.jsx similarity index 100% rename from frontend/src/pulse/components/CardPicker.jsx rename to frontend/src/metabase/pulse/components/CardPicker.jsx diff --git a/frontend/src/pulse/components/PulseCardPreview.jsx b/frontend/src/metabase/pulse/components/PulseCardPreview.jsx similarity index 100% rename from frontend/src/pulse/components/PulseCardPreview.jsx rename to frontend/src/metabase/pulse/components/PulseCardPreview.jsx diff --git a/frontend/src/pulse/components/PulseEdit.jsx b/frontend/src/metabase/pulse/components/PulseEdit.jsx similarity index 100% rename from frontend/src/pulse/components/PulseEdit.jsx rename to frontend/src/metabase/pulse/components/PulseEdit.jsx diff --git a/frontend/src/pulse/components/PulseEditCards.jsx b/frontend/src/metabase/pulse/components/PulseEditCards.jsx similarity index 100% rename from frontend/src/pulse/components/PulseEditCards.jsx rename to frontend/src/metabase/pulse/components/PulseEditCards.jsx diff --git a/frontend/src/pulse/components/PulseEditChannels.jsx b/frontend/src/metabase/pulse/components/PulseEditChannels.jsx similarity index 100% rename from frontend/src/pulse/components/PulseEditChannels.jsx rename to frontend/src/metabase/pulse/components/PulseEditChannels.jsx diff --git a/frontend/src/pulse/components/PulseEditName.jsx b/frontend/src/metabase/pulse/components/PulseEditName.jsx similarity index 100% rename from frontend/src/pulse/components/PulseEditName.jsx rename to frontend/src/metabase/pulse/components/PulseEditName.jsx diff --git a/frontend/src/pulse/components/PulseList.jsx b/frontend/src/metabase/pulse/components/PulseList.jsx similarity index 100% rename from frontend/src/pulse/components/PulseList.jsx rename to frontend/src/metabase/pulse/components/PulseList.jsx diff --git a/frontend/src/pulse/components/PulseListChannel.jsx b/frontend/src/metabase/pulse/components/PulseListChannel.jsx similarity index 100% rename from frontend/src/pulse/components/PulseListChannel.jsx rename to frontend/src/metabase/pulse/components/PulseListChannel.jsx diff --git a/frontend/src/pulse/components/PulseListItem.jsx b/frontend/src/metabase/pulse/components/PulseListItem.jsx similarity index 100% rename from frontend/src/pulse/components/PulseListItem.jsx rename to frontend/src/metabase/pulse/components/PulseListItem.jsx diff --git a/frontend/src/pulse/components/RecipientPicker.jsx b/frontend/src/metabase/pulse/components/RecipientPicker.jsx similarity index 100% rename from frontend/src/pulse/components/RecipientPicker.jsx rename to frontend/src/metabase/pulse/components/RecipientPicker.jsx diff --git a/frontend/src/pulse/components/SchedulePicker.jsx b/frontend/src/metabase/pulse/components/SchedulePicker.jsx similarity index 100% rename from frontend/src/pulse/components/SchedulePicker.jsx rename to frontend/src/metabase/pulse/components/SchedulePicker.jsx diff --git a/frontend/src/pulse/components/SetupMessage.jsx b/frontend/src/metabase/pulse/components/SetupMessage.jsx similarity index 100% rename from frontend/src/pulse/components/SetupMessage.jsx rename to frontend/src/metabase/pulse/components/SetupMessage.jsx diff --git a/frontend/src/pulse/components/SetupModal.jsx b/frontend/src/metabase/pulse/components/SetupModal.jsx similarity index 100% rename from frontend/src/pulse/components/SetupModal.jsx rename to frontend/src/metabase/pulse/components/SetupModal.jsx diff --git a/frontend/src/pulse/components/WhatsAPulse.jsx b/frontend/src/metabase/pulse/components/WhatsAPulse.jsx similarity index 100% rename from frontend/src/pulse/components/WhatsAPulse.jsx rename to frontend/src/metabase/pulse/components/WhatsAPulse.jsx diff --git a/frontend/src/pulse/containers/PulseEditApp.jsx b/frontend/src/metabase/pulse/containers/PulseEditApp.jsx similarity index 100% rename from frontend/src/pulse/containers/PulseEditApp.jsx rename to frontend/src/metabase/pulse/containers/PulseEditApp.jsx diff --git a/frontend/src/pulse/containers/PulseListApp.jsx b/frontend/src/metabase/pulse/containers/PulseListApp.jsx similarity index 100% rename from frontend/src/pulse/containers/PulseListApp.jsx rename to frontend/src/metabase/pulse/containers/PulseListApp.jsx diff --git a/frontend/src/pulse/pulse.module.js b/frontend/src/metabase/pulse/pulse.module.js similarity index 100% rename from frontend/src/pulse/pulse.module.js rename to frontend/src/metabase/pulse/pulse.module.js diff --git a/frontend/src/pulse/reducers.js b/frontend/src/metabase/pulse/reducers.js similarity index 100% rename from frontend/src/pulse/reducers.js rename to frontend/src/metabase/pulse/reducers.js diff --git a/frontend/src/pulse/selectors.js b/frontend/src/metabase/pulse/selectors.js similarity index 100% rename from frontend/src/pulse/selectors.js rename to frontend/src/metabase/pulse/selectors.js diff --git a/frontend/src/query_builder/AccordianList.jsx b/frontend/src/metabase/query_builder/AccordianList.jsx similarity index 100% rename from frontend/src/query_builder/AccordianList.jsx rename to frontend/src/metabase/query_builder/AccordianList.jsx diff --git a/frontend/src/query_builder/AddClauseButton.jsx b/frontend/src/metabase/query_builder/AddClauseButton.jsx similarity index 100% rename from frontend/src/query_builder/AddClauseButton.jsx rename to frontend/src/metabase/query_builder/AddClauseButton.jsx diff --git a/frontend/src/query_builder/AggregationPopover.jsx b/frontend/src/metabase/query_builder/AggregationPopover.jsx similarity index 100% rename from frontend/src/query_builder/AggregationPopover.jsx rename to frontend/src/metabase/query_builder/AggregationPopover.jsx diff --git a/frontend/src/query_builder/AggregationWidget.jsx b/frontend/src/metabase/query_builder/AggregationWidget.jsx similarity index 100% rename from frontend/src/query_builder/AggregationWidget.jsx rename to frontend/src/metabase/query_builder/AggregationWidget.jsx diff --git a/frontend/src/query_builder/BreakoutWidget.jsx b/frontend/src/metabase/query_builder/BreakoutWidget.jsx similarity index 100% rename from frontend/src/query_builder/BreakoutWidget.jsx rename to frontend/src/metabase/query_builder/BreakoutWidget.jsx diff --git a/frontend/src/query_builder/DataSelector.jsx b/frontend/src/metabase/query_builder/DataSelector.jsx similarity index 100% rename from frontend/src/query_builder/DataSelector.jsx rename to frontend/src/metabase/query_builder/DataSelector.jsx diff --git a/frontend/src/query_builder/ExpandableString.jsx b/frontend/src/metabase/query_builder/ExpandableString.jsx similarity index 100% rename from frontend/src/query_builder/ExpandableString.jsx rename to frontend/src/metabase/query_builder/ExpandableString.jsx diff --git a/frontend/src/query_builder/ExtendedOptions.jsx b/frontend/src/metabase/query_builder/ExtendedOptions.jsx similarity index 100% rename from frontend/src/query_builder/ExtendedOptions.jsx rename to frontend/src/metabase/query_builder/ExtendedOptions.jsx diff --git a/frontend/src/query_builder/FieldList.jsx b/frontend/src/metabase/query_builder/FieldList.jsx similarity index 100% rename from frontend/src/query_builder/FieldList.jsx rename to frontend/src/metabase/query_builder/FieldList.jsx diff --git a/frontend/src/query_builder/FieldName.jsx b/frontend/src/metabase/query_builder/FieldName.jsx similarity index 100% rename from frontend/src/query_builder/FieldName.jsx rename to frontend/src/metabase/query_builder/FieldName.jsx diff --git a/frontend/src/query_builder/FieldWidget.jsx b/frontend/src/metabase/query_builder/FieldWidget.jsx similarity index 100% rename from frontend/src/query_builder/FieldWidget.jsx rename to frontend/src/metabase/query_builder/FieldWidget.jsx diff --git a/frontend/src/query_builder/GuiQueryEditor.jsx b/frontend/src/metabase/query_builder/GuiQueryEditor.jsx similarity index 100% rename from frontend/src/query_builder/GuiQueryEditor.jsx rename to frontend/src/metabase/query_builder/GuiQueryEditor.jsx diff --git a/frontend/src/query_builder/LimitWidget.jsx b/frontend/src/metabase/query_builder/LimitWidget.jsx similarity index 100% rename from frontend/src/query_builder/LimitWidget.jsx rename to frontend/src/metabase/query_builder/LimitWidget.jsx diff --git a/frontend/src/query_builder/NativeQueryEditor.jsx b/frontend/src/metabase/query_builder/NativeQueryEditor.jsx similarity index 100% rename from frontend/src/query_builder/NativeQueryEditor.jsx rename to frontend/src/metabase/query_builder/NativeQueryEditor.jsx diff --git a/frontend/src/query_builder/QueryDefinitionTooltip.jsx b/frontend/src/metabase/query_builder/QueryDefinitionTooltip.jsx similarity index 100% rename from frontend/src/query_builder/QueryDefinitionTooltip.jsx rename to frontend/src/metabase/query_builder/QueryDefinitionTooltip.jsx diff --git a/frontend/src/query_builder/QueryHeader.jsx b/frontend/src/metabase/query_builder/QueryHeader.jsx similarity index 99% rename from frontend/src/query_builder/QueryHeader.jsx rename to frontend/src/metabase/query_builder/QueryHeader.jsx index 3d919f44054d870a726672c2c10629066a8eb7c6..0dcb995624384a65ea799de21ac5aec13ae470e5 100644 --- a/frontend/src/query_builder/QueryHeader.jsx +++ b/frontend/src/metabase/query_builder/QueryHeader.jsx @@ -320,6 +320,7 @@ export default class QueryHeader extends Component { allowNativeToQuery={this.props.cardIsNewFn() && !this.props.cardIsDirtyFn()} nativeForm={this.props.result && this.props.result.data && this.props.result.data.native_form} onSetMode={this.props.setQueryModeFn} + tableMetadata={this.props.tableMetadata} /> ]); diff --git a/frontend/src/query_builder/QueryModeButton.jsx b/frontend/src/metabase/query_builder/QueryModeButton.jsx similarity index 73% rename from frontend/src/query_builder/QueryModeButton.jsx rename to frontend/src/metabase/query_builder/QueryModeButton.jsx index 110f786009fd83d76587ee881473b63f0793bccd..046be310c21912e8474912b9c8317ec98fa5f19f 100644 --- a/frontend/src/query_builder/QueryModeButton.jsx +++ b/frontend/src/metabase/query_builder/QueryModeButton.jsx @@ -1,7 +1,8 @@ import React, { Component, PropTypes } from "react"; import cx from "classnames"; -import { formatSQL } from "metabase/lib/formatting"; +import _ from "underscore"; +import { formatSQL, capitalize } from "metabase/lib/formatting"; import Icon from "metabase/components/Icon.jsx"; import Modal from "metabase/components/Modal.jsx"; import Tooltip from "metabase/components/Tooltip.jsx"; @@ -29,17 +30,20 @@ export default class QueryModeButton extends Component { } render() { - const { allowNativeToQuery, mode, nativeForm, onSetMode } = this.props; + const { allowNativeToQuery, mode, nativeForm, onSetMode, tableMetadata } = this.props; // determine the type to switch to based on the type var targetType = (mode === "query") ? "native" : "query"; + const engine = tableMetadata && tableMetadata.db.engine; + const nativeQueryName = _.contains(["mongo", "druid"], engine) ? "native query" : "SQL"; + // maybe switch up the icon based on mode? let onClick = null; let tooltip = "Not Supported"; if (mode === "query") { onClick = nativeForm ? () => this.setState({isOpen: true}) : () => onSetMode("native"); - tooltip = nativeForm ? "View the SQL" : "Switch to SQL"; + tooltip = nativeForm ? `View the ${nativeQueryName}` : `Switch to ${nativeQueryName}`; } else if (mode === "native" && allowNativeToQuery) { onClick = () => onSetMode("query"); tooltip = "Switch to Builder"; @@ -56,19 +60,24 @@ export default class QueryModeButton extends Component { <Modal className="Modal Modal--medium" backdropClassName="Modal-backdrop-dark" isOpen={this.state.isOpen} onClose={() => this.setState({isOpen: false})}> <div className="p4"> <div className="mb3 flex flex-row flex-full align-center justify-between"> - <h2>SQL for this question</h2> + <h2>{capitalize(nativeQueryName)} for this question</h2> <span className="cursor-pointer" onClick={() => this.setState({isOpen: false})}><Icon name="close" width="16px" height="16px" /></span> </div> <pre className="mb3 p2 sql-code"> - {nativeForm && nativeForm.query && formatSQL(nativeForm.query)} + {nativeForm && nativeForm.query && ( + _.contains(["mongo", "druid"], engine) ? + JSON.stringify(nativeForm.query) + : + formatSQL(nativeForm.query) + )} </pre> <div className="text-centered"> <a className="Button Button--primary" onClick={() => { onSetMode(targetType); this.setState({isOpen: false}); - }}>Convert this question to SQL</a> + }}>Convert this question to {nativeQueryName}</a> </div> </div> </Modal> diff --git a/frontend/src/query_builder/QueryVisualization.jsx b/frontend/src/metabase/query_builder/QueryVisualization.jsx similarity index 100% rename from frontend/src/query_builder/QueryVisualization.jsx rename to frontend/src/metabase/query_builder/QueryVisualization.jsx diff --git a/frontend/src/query_builder/QueryVisualizationObjectDetailTable.jsx b/frontend/src/metabase/query_builder/QueryVisualizationObjectDetailTable.jsx similarity index 100% rename from frontend/src/query_builder/QueryVisualizationObjectDetailTable.jsx rename to frontend/src/metabase/query_builder/QueryVisualizationObjectDetailTable.jsx diff --git a/frontend/src/query_builder/RunButton.jsx b/frontend/src/metabase/query_builder/RunButton.jsx similarity index 100% rename from frontend/src/query_builder/RunButton.jsx rename to frontend/src/metabase/query_builder/RunButton.jsx diff --git a/frontend/src/query_builder/SavedQuestionIntroModal.jsx b/frontend/src/metabase/query_builder/SavedQuestionIntroModal.jsx similarity index 100% rename from frontend/src/query_builder/SavedQuestionIntroModal.jsx rename to frontend/src/metabase/query_builder/SavedQuestionIntroModal.jsx diff --git a/frontend/src/query_builder/SearchBar.jsx b/frontend/src/metabase/query_builder/SearchBar.jsx similarity index 100% rename from frontend/src/query_builder/SearchBar.jsx rename to frontend/src/metabase/query_builder/SearchBar.jsx diff --git a/frontend/src/query_builder/SelectionModule.jsx b/frontend/src/metabase/query_builder/SelectionModule.jsx similarity index 100% rename from frontend/src/query_builder/SelectionModule.jsx rename to frontend/src/metabase/query_builder/SelectionModule.jsx diff --git a/frontend/src/query_builder/SortWidget.jsx b/frontend/src/metabase/query_builder/SortWidget.jsx similarity index 100% rename from frontend/src/query_builder/SortWidget.jsx rename to frontend/src/metabase/query_builder/SortWidget.jsx diff --git a/frontend/src/query_builder/TimeGroupingPopover.jsx b/frontend/src/metabase/query_builder/TimeGroupingPopover.jsx similarity index 100% rename from frontend/src/query_builder/TimeGroupingPopover.jsx rename to frontend/src/metabase/query_builder/TimeGroupingPopover.jsx diff --git a/frontend/src/query_builder/VisualizationSettings.jsx b/frontend/src/metabase/query_builder/VisualizationSettings.jsx similarity index 100% rename from frontend/src/query_builder/VisualizationSettings.jsx rename to frontend/src/metabase/query_builder/VisualizationSettings.jsx diff --git a/frontend/src/query_builder/dataref/DataReference.jsx b/frontend/src/metabase/query_builder/dataref/DataReference.jsx similarity index 100% rename from frontend/src/query_builder/dataref/DataReference.jsx rename to frontend/src/metabase/query_builder/dataref/DataReference.jsx diff --git a/frontend/src/query_builder/dataref/DetailPane.jsx b/frontend/src/metabase/query_builder/dataref/DetailPane.jsx similarity index 100% rename from frontend/src/query_builder/dataref/DetailPane.jsx rename to frontend/src/metabase/query_builder/dataref/DetailPane.jsx diff --git a/frontend/src/query_builder/dataref/FieldPane.jsx b/frontend/src/metabase/query_builder/dataref/FieldPane.jsx similarity index 100% rename from frontend/src/query_builder/dataref/FieldPane.jsx rename to frontend/src/metabase/query_builder/dataref/FieldPane.jsx diff --git a/frontend/src/query_builder/dataref/MainPane.jsx b/frontend/src/metabase/query_builder/dataref/MainPane.jsx similarity index 100% rename from frontend/src/query_builder/dataref/MainPane.jsx rename to frontend/src/metabase/query_builder/dataref/MainPane.jsx diff --git a/frontend/src/query_builder/dataref/MetricPane.jsx b/frontend/src/metabase/query_builder/dataref/MetricPane.jsx similarity index 100% rename from frontend/src/query_builder/dataref/MetricPane.jsx rename to frontend/src/metabase/query_builder/dataref/MetricPane.jsx diff --git a/frontend/src/query_builder/dataref/QueryButton.jsx b/frontend/src/metabase/query_builder/dataref/QueryButton.jsx similarity index 100% rename from frontend/src/query_builder/dataref/QueryButton.jsx rename to frontend/src/metabase/query_builder/dataref/QueryButton.jsx diff --git a/frontend/src/query_builder/dataref/QueryDefinition.jsx b/frontend/src/metabase/query_builder/dataref/QueryDefinition.jsx similarity index 100% rename from frontend/src/query_builder/dataref/QueryDefinition.jsx rename to frontend/src/metabase/query_builder/dataref/QueryDefinition.jsx diff --git a/frontend/src/query_builder/dataref/SegmentPane.jsx b/frontend/src/metabase/query_builder/dataref/SegmentPane.jsx similarity index 100% rename from frontend/src/query_builder/dataref/SegmentPane.jsx rename to frontend/src/metabase/query_builder/dataref/SegmentPane.jsx diff --git a/frontend/src/query_builder/dataref/TablePane.jsx b/frontend/src/metabase/query_builder/dataref/TablePane.jsx similarity index 100% rename from frontend/src/query_builder/dataref/TablePane.jsx rename to frontend/src/metabase/query_builder/dataref/TablePane.jsx diff --git a/frontend/src/query_builder/dataref/UseForButton.jsx b/frontend/src/metabase/query_builder/dataref/UseForButton.jsx similarity index 100% rename from frontend/src/query_builder/dataref/UseForButton.jsx rename to frontend/src/metabase/query_builder/dataref/UseForButton.jsx diff --git a/frontend/src/query_builder/expressions/ExpressionEditorTextfield.css b/frontend/src/metabase/query_builder/expressions/ExpressionEditorTextfield.css similarity index 100% rename from frontend/src/query_builder/expressions/ExpressionEditorTextfield.css rename to frontend/src/metabase/query_builder/expressions/ExpressionEditorTextfield.css diff --git a/frontend/src/query_builder/expressions/ExpressionEditorTextfield.jsx b/frontend/src/metabase/query_builder/expressions/ExpressionEditorTextfield.jsx similarity index 100% rename from frontend/src/query_builder/expressions/ExpressionEditorTextfield.jsx rename to frontend/src/metabase/query_builder/expressions/ExpressionEditorTextfield.jsx diff --git a/frontend/src/query_builder/expressions/ExpressionWidget.jsx b/frontend/src/metabase/query_builder/expressions/ExpressionWidget.jsx similarity index 100% rename from frontend/src/query_builder/expressions/ExpressionWidget.jsx rename to frontend/src/metabase/query_builder/expressions/ExpressionWidget.jsx diff --git a/frontend/src/query_builder/expressions/Expressions.jsx b/frontend/src/metabase/query_builder/expressions/Expressions.jsx similarity index 100% rename from frontend/src/query_builder/expressions/Expressions.jsx rename to frontend/src/metabase/query_builder/expressions/Expressions.jsx diff --git a/frontend/src/query_builder/filters/FilterList.jsx b/frontend/src/metabase/query_builder/filters/FilterList.jsx similarity index 100% rename from frontend/src/query_builder/filters/FilterList.jsx rename to frontend/src/metabase/query_builder/filters/FilterList.jsx diff --git a/frontend/src/query_builder/filters/FilterPopover.jsx b/frontend/src/metabase/query_builder/filters/FilterPopover.jsx similarity index 100% rename from frontend/src/query_builder/filters/FilterPopover.jsx rename to frontend/src/metabase/query_builder/filters/FilterPopover.jsx diff --git a/frontend/src/query_builder/filters/FilterWidget.jsx b/frontend/src/metabase/query_builder/filters/FilterWidget.jsx similarity index 100% rename from frontend/src/query_builder/filters/FilterWidget.jsx rename to frontend/src/metabase/query_builder/filters/FilterWidget.jsx diff --git a/frontend/src/query_builder/filters/OperatorSelector.jsx b/frontend/src/metabase/query_builder/filters/OperatorSelector.jsx similarity index 100% rename from frontend/src/query_builder/filters/OperatorSelector.jsx rename to frontend/src/metabase/query_builder/filters/OperatorSelector.jsx diff --git a/frontend/src/query_builder/filters/pickers/DatePicker.jsx b/frontend/src/metabase/query_builder/filters/pickers/DatePicker.jsx similarity index 100% rename from frontend/src/query_builder/filters/pickers/DatePicker.jsx rename to frontend/src/metabase/query_builder/filters/pickers/DatePicker.jsx diff --git a/frontend/src/query_builder/filters/pickers/NumberPicker.jsx b/frontend/src/metabase/query_builder/filters/pickers/NumberPicker.jsx similarity index 100% rename from frontend/src/query_builder/filters/pickers/NumberPicker.jsx rename to frontend/src/metabase/query_builder/filters/pickers/NumberPicker.jsx diff --git a/frontend/src/query_builder/filters/pickers/RelativeDatePicker.jsx b/frontend/src/metabase/query_builder/filters/pickers/RelativeDatePicker.jsx similarity index 100% rename from frontend/src/query_builder/filters/pickers/RelativeDatePicker.jsx rename to frontend/src/metabase/query_builder/filters/pickers/RelativeDatePicker.jsx diff --git a/frontend/src/query_builder/filters/pickers/SelectPicker.jsx b/frontend/src/metabase/query_builder/filters/pickers/SelectPicker.jsx similarity index 100% rename from frontend/src/query_builder/filters/pickers/SelectPicker.jsx rename to frontend/src/metabase/query_builder/filters/pickers/SelectPicker.jsx diff --git a/frontend/src/query_builder/filters/pickers/SpecificDatePicker.jsx b/frontend/src/metabase/query_builder/filters/pickers/SpecificDatePicker.jsx similarity index 100% rename from frontend/src/query_builder/filters/pickers/SpecificDatePicker.jsx rename to frontend/src/metabase/query_builder/filters/pickers/SpecificDatePicker.jsx diff --git a/frontend/src/query_builder/filters/pickers/TextPicker.jsx b/frontend/src/metabase/query_builder/filters/pickers/TextPicker.jsx similarity index 100% rename from frontend/src/query_builder/filters/pickers/TextPicker.jsx rename to frontend/src/metabase/query_builder/filters/pickers/TextPicker.jsx diff --git a/frontend/src/questions/Questions.css b/frontend/src/metabase/questions/Questions.css similarity index 100% rename from frontend/src/questions/Questions.css rename to frontend/src/metabase/questions/Questions.css diff --git a/frontend/src/questions/components/ActionHeader.css b/frontend/src/metabase/questions/components/ActionHeader.css similarity index 100% rename from frontend/src/questions/components/ActionHeader.css rename to frontend/src/metabase/questions/components/ActionHeader.css diff --git a/frontend/src/questions/components/ActionHeader.jsx b/frontend/src/metabase/questions/components/ActionHeader.jsx similarity index 100% rename from frontend/src/questions/components/ActionHeader.jsx rename to frontend/src/metabase/questions/components/ActionHeader.jsx diff --git a/frontend/src/questions/components/EmojiIcon.jsx b/frontend/src/metabase/questions/components/EmojiIcon.jsx similarity index 100% rename from frontend/src/questions/components/EmojiIcon.jsx rename to frontend/src/metabase/questions/components/EmojiIcon.jsx diff --git a/frontend/src/questions/components/EmptyState.jsx b/frontend/src/metabase/questions/components/EmptyState.jsx similarity index 100% rename from frontend/src/questions/components/EmptyState.jsx rename to frontend/src/metabase/questions/components/EmptyState.jsx diff --git a/frontend/src/questions/components/Item.jsx b/frontend/src/metabase/questions/components/Item.jsx similarity index 100% rename from frontend/src/questions/components/Item.jsx rename to frontend/src/metabase/questions/components/Item.jsx diff --git a/frontend/src/questions/components/LabelIcon.css b/frontend/src/metabase/questions/components/LabelIcon.css similarity index 100% rename from frontend/src/questions/components/LabelIcon.css rename to frontend/src/metabase/questions/components/LabelIcon.css diff --git a/frontend/src/questions/components/LabelIcon.jsx b/frontend/src/metabase/questions/components/LabelIcon.jsx similarity index 100% rename from frontend/src/questions/components/LabelIcon.jsx rename to frontend/src/metabase/questions/components/LabelIcon.jsx diff --git a/frontend/src/questions/components/LabelIconPicker.css b/frontend/src/metabase/questions/components/LabelIconPicker.css similarity index 100% rename from frontend/src/questions/components/LabelIconPicker.css rename to frontend/src/metabase/questions/components/LabelIconPicker.css diff --git a/frontend/src/questions/components/LabelIconPicker.jsx b/frontend/src/metabase/questions/components/LabelIconPicker.jsx similarity index 100% rename from frontend/src/questions/components/LabelIconPicker.jsx rename to frontend/src/metabase/questions/components/LabelIconPicker.jsx diff --git a/frontend/src/questions/components/LabelPicker.css b/frontend/src/metabase/questions/components/LabelPicker.css similarity index 100% rename from frontend/src/questions/components/LabelPicker.css rename to frontend/src/metabase/questions/components/LabelPicker.css diff --git a/frontend/src/questions/components/LabelPicker.jsx b/frontend/src/metabase/questions/components/LabelPicker.jsx similarity index 100% rename from frontend/src/questions/components/LabelPicker.jsx rename to frontend/src/metabase/questions/components/LabelPicker.jsx diff --git a/frontend/src/questions/components/Labels.css b/frontend/src/metabase/questions/components/Labels.css similarity index 100% rename from frontend/src/questions/components/Labels.css rename to frontend/src/metabase/questions/components/Labels.css diff --git a/frontend/src/questions/components/Labels.jsx b/frontend/src/metabase/questions/components/Labels.jsx similarity index 100% rename from frontend/src/questions/components/Labels.jsx rename to frontend/src/metabase/questions/components/Labels.jsx diff --git a/frontend/src/questions/components/List.css b/frontend/src/metabase/questions/components/List.css similarity index 100% rename from frontend/src/questions/components/List.css rename to frontend/src/metabase/questions/components/List.css diff --git a/frontend/src/questions/components/List.jsx b/frontend/src/metabase/questions/components/List.jsx similarity index 100% rename from frontend/src/questions/components/List.jsx rename to frontend/src/metabase/questions/components/List.jsx diff --git a/frontend/src/questions/components/SearchHeader.css b/frontend/src/metabase/questions/components/SearchHeader.css similarity index 100% rename from frontend/src/questions/components/SearchHeader.css rename to frontend/src/metabase/questions/components/SearchHeader.css diff --git a/frontend/src/questions/components/SearchHeader.jsx b/frontend/src/metabase/questions/components/SearchHeader.jsx similarity index 100% rename from frontend/src/questions/components/SearchHeader.jsx rename to frontend/src/metabase/questions/components/SearchHeader.jsx diff --git a/frontend/src/questions/components/Sidebar.css b/frontend/src/metabase/questions/components/Sidebar.css similarity index 100% rename from frontend/src/questions/components/Sidebar.css rename to frontend/src/metabase/questions/components/Sidebar.css diff --git a/frontend/src/questions/components/Sidebar.jsx b/frontend/src/metabase/questions/components/Sidebar.jsx similarity index 100% rename from frontend/src/questions/components/Sidebar.jsx rename to frontend/src/metabase/questions/components/Sidebar.jsx diff --git a/frontend/src/questions/components/SidebarLayout.jsx b/frontend/src/metabase/questions/components/SidebarLayout.jsx similarity index 100% rename from frontend/src/questions/components/SidebarLayout.jsx rename to frontend/src/metabase/questions/components/SidebarLayout.jsx diff --git a/frontend/src/questions/containers/EditLabels.css b/frontend/src/metabase/questions/containers/EditLabels.css similarity index 100% rename from frontend/src/questions/containers/EditLabels.css rename to frontend/src/metabase/questions/containers/EditLabels.css diff --git a/frontend/src/questions/containers/EditLabels.jsx b/frontend/src/metabase/questions/containers/EditLabels.jsx similarity index 100% rename from frontend/src/questions/containers/EditLabels.jsx rename to frontend/src/metabase/questions/containers/EditLabels.jsx diff --git a/frontend/src/questions/containers/EntityBrowser.jsx b/frontend/src/metabase/questions/containers/EntityBrowser.jsx similarity index 100% rename from frontend/src/questions/containers/EntityBrowser.jsx rename to frontend/src/metabase/questions/containers/EntityBrowser.jsx diff --git a/frontend/src/questions/containers/EntityItem.jsx b/frontend/src/metabase/questions/containers/EntityItem.jsx similarity index 100% rename from frontend/src/questions/containers/EntityItem.jsx rename to frontend/src/metabase/questions/containers/EntityItem.jsx diff --git a/frontend/src/questions/containers/EntityList.jsx b/frontend/src/metabase/questions/containers/EntityList.jsx similarity index 100% rename from frontend/src/questions/containers/EntityList.jsx rename to frontend/src/metabase/questions/containers/EntityList.jsx diff --git a/frontend/src/questions/containers/LabelEditorForm.css b/frontend/src/metabase/questions/containers/LabelEditorForm.css similarity index 100% rename from frontend/src/questions/containers/LabelEditorForm.css rename to frontend/src/metabase/questions/containers/LabelEditorForm.css diff --git a/frontend/src/questions/containers/LabelEditorForm.jsx b/frontend/src/metabase/questions/containers/LabelEditorForm.jsx similarity index 100% rename from frontend/src/questions/containers/LabelEditorForm.jsx rename to frontend/src/metabase/questions/containers/LabelEditorForm.jsx diff --git a/frontend/src/questions/containers/LabelPopover.jsx b/frontend/src/metabase/questions/containers/LabelPopover.jsx similarity index 100% rename from frontend/src/questions/containers/LabelPopover.jsx rename to frontend/src/metabase/questions/containers/LabelPopover.jsx diff --git a/frontend/src/questions/containers/UndoListing.css b/frontend/src/metabase/questions/containers/UndoListing.css similarity index 100% rename from frontend/src/questions/containers/UndoListing.css rename to frontend/src/metabase/questions/containers/UndoListing.css diff --git a/frontend/src/questions/containers/UndoListing.jsx b/frontend/src/metabase/questions/containers/UndoListing.jsx similarity index 100% rename from frontend/src/questions/containers/UndoListing.jsx rename to frontend/src/metabase/questions/containers/UndoListing.jsx diff --git a/frontend/src/questions/labels.js b/frontend/src/metabase/questions/labels.js similarity index 100% rename from frontend/src/questions/labels.js rename to frontend/src/metabase/questions/labels.js diff --git a/frontend/src/questions/questions.js b/frontend/src/metabase/questions/questions.js similarity index 100% rename from frontend/src/questions/questions.js rename to frontend/src/metabase/questions/questions.js diff --git a/frontend/src/questions/selectors.js b/frontend/src/metabase/questions/selectors.js similarity index 100% rename from frontend/src/questions/selectors.js rename to frontend/src/metabase/questions/selectors.js diff --git a/frontend/src/questions/undo.js b/frontend/src/metabase/questions/undo.js similarity index 100% rename from frontend/src/questions/undo.js rename to frontend/src/metabase/questions/undo.js diff --git a/frontend/src/services.js b/frontend/src/metabase/services.js similarity index 100% rename from frontend/src/services.js rename to frontend/src/metabase/services.js diff --git a/frontend/src/setup/actions.js b/frontend/src/metabase/setup/actions.js similarity index 100% rename from frontend/src/setup/actions.js rename to frontend/src/metabase/setup/actions.js diff --git a/frontend/src/setup/components/CollapsedStep.jsx b/frontend/src/metabase/setup/components/CollapsedStep.jsx similarity index 100% rename from frontend/src/setup/components/CollapsedStep.jsx rename to frontend/src/metabase/setup/components/CollapsedStep.jsx diff --git a/frontend/src/setup/components/DatabaseStep.jsx b/frontend/src/metabase/setup/components/DatabaseStep.jsx similarity index 100% rename from frontend/src/setup/components/DatabaseStep.jsx rename to frontend/src/metabase/setup/components/DatabaseStep.jsx diff --git a/frontend/src/setup/components/PreferencesStep.jsx b/frontend/src/metabase/setup/components/PreferencesStep.jsx similarity index 100% rename from frontend/src/setup/components/PreferencesStep.jsx rename to frontend/src/metabase/setup/components/PreferencesStep.jsx diff --git a/frontend/src/setup/components/Setup.jsx b/frontend/src/metabase/setup/components/Setup.jsx similarity index 100% rename from frontend/src/setup/components/Setup.jsx rename to frontend/src/metabase/setup/components/Setup.jsx diff --git a/frontend/src/setup/components/StepTitle.jsx b/frontend/src/metabase/setup/components/StepTitle.jsx similarity index 100% rename from frontend/src/setup/components/StepTitle.jsx rename to frontend/src/metabase/setup/components/StepTitle.jsx diff --git a/frontend/src/setup/components/UserStep.jsx b/frontend/src/metabase/setup/components/UserStep.jsx similarity index 100% rename from frontend/src/setup/components/UserStep.jsx rename to frontend/src/metabase/setup/components/UserStep.jsx diff --git a/frontend/src/setup/containers/SetupApp.jsx b/frontend/src/metabase/setup/containers/SetupApp.jsx similarity index 100% rename from frontend/src/setup/containers/SetupApp.jsx rename to frontend/src/metabase/setup/containers/SetupApp.jsx diff --git a/frontend/src/setup/reducers.js b/frontend/src/metabase/setup/reducers.js similarity index 100% rename from frontend/src/setup/reducers.js rename to frontend/src/metabase/setup/reducers.js diff --git a/frontend/src/setup/selectors.js b/frontend/src/metabase/setup/selectors.js similarity index 100% rename from frontend/src/setup/selectors.js rename to frontend/src/metabase/setup/selectors.js diff --git a/frontend/src/setup/setup.controllers.js b/frontend/src/metabase/setup/setup.controllers.js similarity index 100% rename from frontend/src/setup/setup.controllers.js rename to frontend/src/metabase/setup/setup.controllers.js diff --git a/frontend/src/setup/setup.module.js b/frontend/src/metabase/setup/setup.module.js similarity index 100% rename from frontend/src/setup/setup.module.js rename to frontend/src/metabase/setup/setup.module.js diff --git a/frontend/src/tutorial/PageFlag.css b/frontend/src/metabase/tutorial/PageFlag.css similarity index 100% rename from frontend/src/tutorial/PageFlag.css rename to frontend/src/metabase/tutorial/PageFlag.css diff --git a/frontend/src/tutorial/PageFlag.jsx b/frontend/src/metabase/tutorial/PageFlag.jsx similarity index 100% rename from frontend/src/tutorial/PageFlag.jsx rename to frontend/src/metabase/tutorial/PageFlag.jsx diff --git a/frontend/src/tutorial/Portal.jsx b/frontend/src/metabase/tutorial/Portal.jsx similarity index 100% rename from frontend/src/tutorial/Portal.jsx rename to frontend/src/metabase/tutorial/Portal.jsx diff --git a/frontend/src/tutorial/QueryBuilderTutorial.jsx b/frontend/src/metabase/tutorial/QueryBuilderTutorial.jsx similarity index 100% rename from frontend/src/tutorial/QueryBuilderTutorial.jsx rename to frontend/src/metabase/tutorial/QueryBuilderTutorial.jsx diff --git a/frontend/src/tutorial/Tutorial.jsx b/frontend/src/metabase/tutorial/Tutorial.jsx similarity index 100% rename from frontend/src/tutorial/Tutorial.jsx rename to frontend/src/metabase/tutorial/Tutorial.jsx diff --git a/frontend/src/tutorial/TutorialModal.jsx b/frontend/src/metabase/tutorial/TutorialModal.jsx similarity index 100% rename from frontend/src/tutorial/TutorialModal.jsx rename to frontend/src/metabase/tutorial/TutorialModal.jsx diff --git a/frontend/src/user/actions.js b/frontend/src/metabase/user/actions.js similarity index 100% rename from frontend/src/user/actions.js rename to frontend/src/metabase/user/actions.js diff --git a/frontend/src/user/components/SetUserPassword.jsx b/frontend/src/metabase/user/components/SetUserPassword.jsx similarity index 100% rename from frontend/src/user/components/SetUserPassword.jsx rename to frontend/src/metabase/user/components/SetUserPassword.jsx diff --git a/frontend/src/user/components/UpdateUserDetails.jsx b/frontend/src/metabase/user/components/UpdateUserDetails.jsx similarity index 100% rename from frontend/src/user/components/UpdateUserDetails.jsx rename to frontend/src/metabase/user/components/UpdateUserDetails.jsx diff --git a/frontend/src/user/components/UserSettings.jsx b/frontend/src/metabase/user/components/UserSettings.jsx similarity index 100% rename from frontend/src/user/components/UserSettings.jsx rename to frontend/src/metabase/user/components/UserSettings.jsx diff --git a/frontend/src/user/containers/UserSettingsApp.jsx b/frontend/src/metabase/user/containers/UserSettingsApp.jsx similarity index 100% rename from frontend/src/user/containers/UserSettingsApp.jsx rename to frontend/src/metabase/user/containers/UserSettingsApp.jsx diff --git a/frontend/src/user/reducers.js b/frontend/src/metabase/user/reducers.js similarity index 100% rename from frontend/src/user/reducers.js rename to frontend/src/metabase/user/reducers.js diff --git a/frontend/src/user/selectors.js b/frontend/src/metabase/user/selectors.js similarity index 100% rename from frontend/src/user/selectors.js rename to frontend/src/metabase/user/selectors.js diff --git a/frontend/src/user/user.controllers.js b/frontend/src/metabase/user/user.controllers.js similarity index 100% rename from frontend/src/user/user.controllers.js rename to frontend/src/metabase/user/user.controllers.js diff --git a/frontend/src/user/user.module.js b/frontend/src/metabase/user/user.module.js similarity index 100% rename from frontend/src/user/user.module.js rename to frontend/src/metabase/user/user.module.js diff --git a/frontend/src/vendor.js b/frontend/src/metabase/vendor.js similarity index 100% rename from frontend/src/vendor.js rename to frontend/src/metabase/vendor.js diff --git a/frontend/src/visualizations/AreaChart.jsx b/frontend/src/metabase/visualizations/AreaChart.jsx similarity index 100% rename from frontend/src/visualizations/AreaChart.jsx rename to frontend/src/metabase/visualizations/AreaChart.jsx diff --git a/frontend/src/visualizations/BarChart.jsx b/frontend/src/metabase/visualizations/BarChart.jsx similarity index 100% rename from frontend/src/visualizations/BarChart.jsx rename to frontend/src/metabase/visualizations/BarChart.jsx diff --git a/frontend/src/visualizations/LineChart.jsx b/frontend/src/metabase/visualizations/LineChart.jsx similarity index 100% rename from frontend/src/visualizations/LineChart.jsx rename to frontend/src/metabase/visualizations/LineChart.jsx diff --git a/frontend/src/visualizations/PieChart.css b/frontend/src/metabase/visualizations/PieChart.css similarity index 100% rename from frontend/src/visualizations/PieChart.css rename to frontend/src/metabase/visualizations/PieChart.css diff --git a/frontend/src/visualizations/PieChart.jsx b/frontend/src/metabase/visualizations/PieChart.jsx similarity index 100% rename from frontend/src/visualizations/PieChart.jsx rename to frontend/src/metabase/visualizations/PieChart.jsx diff --git a/frontend/src/visualizations/PinMap.jsx b/frontend/src/metabase/visualizations/PinMap.jsx similarity index 100% rename from frontend/src/visualizations/PinMap.jsx rename to frontend/src/metabase/visualizations/PinMap.jsx diff --git a/frontend/src/visualizations/Scalar.css b/frontend/src/metabase/visualizations/Scalar.css similarity index 100% rename from frontend/src/visualizations/Scalar.css rename to frontend/src/metabase/visualizations/Scalar.css diff --git a/frontend/src/visualizations/Scalar.jsx b/frontend/src/metabase/visualizations/Scalar.jsx similarity index 100% rename from frontend/src/visualizations/Scalar.jsx rename to frontend/src/metabase/visualizations/Scalar.jsx diff --git a/frontend/src/visualizations/Table.css b/frontend/src/metabase/visualizations/Table.css similarity index 100% rename from frontend/src/visualizations/Table.css rename to frontend/src/metabase/visualizations/Table.css diff --git a/frontend/src/visualizations/Table.jsx b/frontend/src/metabase/visualizations/Table.jsx similarity index 100% rename from frontend/src/visualizations/Table.jsx rename to frontend/src/metabase/visualizations/Table.jsx diff --git a/frontend/src/visualizations/TableInteractive.jsx b/frontend/src/metabase/visualizations/TableInteractive.jsx similarity index 100% rename from frontend/src/visualizations/TableInteractive.jsx rename to frontend/src/metabase/visualizations/TableInteractive.jsx diff --git a/frontend/src/visualizations/TableSimple.jsx b/frontend/src/metabase/visualizations/TableSimple.jsx similarity index 100% rename from frontend/src/visualizations/TableSimple.jsx rename to frontend/src/metabase/visualizations/TableSimple.jsx diff --git a/frontend/src/visualizations/USStateMap.jsx b/frontend/src/metabase/visualizations/USStateMap.jsx similarity index 100% rename from frontend/src/visualizations/USStateMap.jsx rename to frontend/src/metabase/visualizations/USStateMap.jsx diff --git a/frontend/src/visualizations/WorldMap.jsx b/frontend/src/metabase/visualizations/WorldMap.jsx similarity index 100% rename from frontend/src/visualizations/WorldMap.jsx rename to frontend/src/metabase/visualizations/WorldMap.jsx diff --git a/frontend/src/visualizations/XKCDChart.css b/frontend/src/metabase/visualizations/XKCDChart.css similarity index 100% rename from frontend/src/visualizations/XKCDChart.css rename to frontend/src/metabase/visualizations/XKCDChart.css diff --git a/frontend/src/visualizations/XKCDChart.jsx b/frontend/src/metabase/visualizations/XKCDChart.jsx similarity index 100% rename from frontend/src/visualizations/XKCDChart.jsx rename to frontend/src/metabase/visualizations/XKCDChart.jsx diff --git a/frontend/src/visualizations/components/CardRenderer.jsx b/frontend/src/metabase/visualizations/components/CardRenderer.jsx similarity index 100% rename from frontend/src/visualizations/components/CardRenderer.jsx rename to frontend/src/metabase/visualizations/components/CardRenderer.jsx diff --git a/frontend/src/visualizations/components/ChartTooltip.jsx b/frontend/src/metabase/visualizations/components/ChartTooltip.jsx similarity index 100% rename from frontend/src/visualizations/components/ChartTooltip.jsx rename to frontend/src/metabase/visualizations/components/ChartTooltip.jsx diff --git a/frontend/src/visualizations/components/ChartWithLegend.css b/frontend/src/metabase/visualizations/components/ChartWithLegend.css similarity index 100% rename from frontend/src/visualizations/components/ChartWithLegend.css rename to frontend/src/metabase/visualizations/components/ChartWithLegend.css diff --git a/frontend/src/visualizations/components/ChartWithLegend.jsx b/frontend/src/metabase/visualizations/components/ChartWithLegend.jsx similarity index 100% rename from frontend/src/visualizations/components/ChartWithLegend.jsx rename to frontend/src/metabase/visualizations/components/ChartWithLegend.jsx diff --git a/frontend/src/visualizations/components/ChoroplethMap.jsx b/frontend/src/metabase/visualizations/components/ChoroplethMap.jsx similarity index 100% rename from frontend/src/visualizations/components/ChoroplethMap.jsx rename to frontend/src/metabase/visualizations/components/ChoroplethMap.jsx diff --git a/frontend/src/visualizations/components/Legend.css b/frontend/src/metabase/visualizations/components/Legend.css similarity index 100% rename from frontend/src/visualizations/components/Legend.css rename to frontend/src/metabase/visualizations/components/Legend.css diff --git a/frontend/src/visualizations/components/LegendHeader.jsx b/frontend/src/metabase/visualizations/components/LegendHeader.jsx similarity index 100% rename from frontend/src/visualizations/components/LegendHeader.jsx rename to frontend/src/metabase/visualizations/components/LegendHeader.jsx diff --git a/frontend/src/visualizations/components/LegendHorizontal.jsx b/frontend/src/metabase/visualizations/components/LegendHorizontal.jsx similarity index 100% rename from frontend/src/visualizations/components/LegendHorizontal.jsx rename to frontend/src/metabase/visualizations/components/LegendHorizontal.jsx diff --git a/frontend/src/visualizations/components/LegendItem.jsx b/frontend/src/metabase/visualizations/components/LegendItem.jsx similarity index 100% rename from frontend/src/visualizations/components/LegendItem.jsx rename to frontend/src/metabase/visualizations/components/LegendItem.jsx diff --git a/frontend/src/visualizations/components/LegendVertical.jsx b/frontend/src/metabase/visualizations/components/LegendVertical.jsx similarity index 100% rename from frontend/src/visualizations/components/LegendVertical.jsx rename to frontend/src/metabase/visualizations/components/LegendVertical.jsx diff --git a/frontend/src/visualizations/components/LineAreaBarChart.jsx b/frontend/src/metabase/visualizations/components/LineAreaBarChart.jsx similarity index 100% rename from frontend/src/visualizations/components/LineAreaBarChart.jsx rename to frontend/src/metabase/visualizations/components/LineAreaBarChart.jsx diff --git a/frontend/src/visualizations/components/Visualization.jsx b/frontend/src/metabase/visualizations/components/Visualization.jsx similarity index 100% rename from frontend/src/visualizations/components/Visualization.jsx rename to frontend/src/metabase/visualizations/components/Visualization.jsx diff --git a/frontend/src/visualizations/components/settings/ColorSetting.jsx b/frontend/src/metabase/visualizations/components/settings/ColorSetting.jsx similarity index 100% rename from frontend/src/visualizations/components/settings/ColorSetting.jsx rename to frontend/src/metabase/visualizations/components/settings/ColorSetting.jsx diff --git a/frontend/src/visualizations/index.js b/frontend/src/metabase/visualizations/index.js similarity index 100% rename from frontend/src/visualizations/index.js rename to frontend/src/metabase/visualizations/index.js diff --git a/frontend/src/visualizations/lib/CardRenderer.js b/frontend/src/metabase/visualizations/lib/CardRenderer.js similarity index 100% rename from frontend/src/visualizations/lib/CardRenderer.js rename to frontend/src/metabase/visualizations/lib/CardRenderer.js diff --git a/frontend/src/visualizations/lib/ChartRenderer.js b/frontend/src/metabase/visualizations/lib/ChartRenderer.js similarity index 100% rename from frontend/src/visualizations/lib/ChartRenderer.js rename to frontend/src/metabase/visualizations/lib/ChartRenderer.js diff --git a/frontend/src/visualizations/lib/GeoHeatmapChartRenderer.js b/frontend/src/metabase/visualizations/lib/GeoHeatmapChartRenderer.js similarity index 100% rename from frontend/src/visualizations/lib/GeoHeatmapChartRenderer.js rename to frontend/src/metabase/visualizations/lib/GeoHeatmapChartRenderer.js diff --git a/frontend/src/visualizations/lib/errors.js b/frontend/src/metabase/visualizations/lib/errors.js similarity index 100% rename from frontend/src/visualizations/lib/errors.js rename to frontend/src/metabase/visualizations/lib/errors.js diff --git a/frontend/src/visualizations/lib/timeseries.js b/frontend/src/metabase/visualizations/lib/timeseries.js similarity index 100% rename from frontend/src/visualizations/lib/timeseries.js rename to frontend/src/metabase/visualizations/lib/timeseries.js diff --git a/frontend/src/visualizations/lib/tooltip.js b/frontend/src/metabase/visualizations/lib/tooltip.js similarity index 100% rename from frontend/src/visualizations/lib/tooltip.js rename to frontend/src/metabase/visualizations/lib/tooltip.js diff --git a/frontend/src/visualizations/lib/utils.js b/frontend/src/metabase/visualizations/lib/utils.js similarity index 100% rename from frontend/src/visualizations/lib/utils.js rename to frontend/src/metabase/visualizations/lib/utils.js diff --git a/frontend/test/unit/.eslintrc b/frontend/test/unit/.eslintrc new file mode 100644 index 0000000000000000000000000000000000000000..8e791840981604b2d85b6deba2c88bd2b6af8111 --- /dev/null +++ b/frontend/test/unit/.eslintrc @@ -0,0 +1,5 @@ +{ + "env": { + "jasmine": true + } +} diff --git a/frontend/test/unit/lib/data_grid.spec.js b/frontend/test/unit/lib/data_grid.spec.js index 258bd1d3dca5708e696a855d4286ce2991c9e756..fb9eb61b46e0686e93f253411a19fc5bc0db9452 100644 --- a/frontend/test/unit/lib/data_grid.spec.js +++ b/frontend/test/unit/lib/data_grid.spec.js @@ -1,5 +1,3 @@ -/*eslint-env jasmine */ - import { pivot } from "metabase/lib/data_grid"; function makeData(rows) { diff --git a/frontend/test/unit/lib/expressions.spec.js b/frontend/test/unit/lib/expressions.spec.js index c909d6122604bacf36eeb54d8810f0bf9e322bd5..e15ae023e1b159379dec60c360817885f5be57c5 100644 --- a/frontend/test/unit/lib/expressions.spec.js +++ b/frontend/test/unit/lib/expressions.spec.js @@ -1,5 +1,3 @@ -/*eslint-env jasmine */ - import _ from "underscore"; import { formatExpression, parseExpressionString, tokenAtPosition, tokensToExpression } from "metabase/lib/expressions"; diff --git a/frontend/test/unit/lib/query.spec.js b/frontend/test/unit/lib/query.spec.js index 9135d5d05c5db0ea96d0207e690e225816464387..1fde115f8957d17f39af2c6e4b9c48b562116645 100644 --- a/frontend/test/unit/lib/query.spec.js +++ b/frontend/test/unit/lib/query.spec.js @@ -1,5 +1,3 @@ -/*eslint-env jasmine */ - import Query from "metabase/lib/query"; import { createQuery, AggregationClause, BreakoutClause } from "metabase/lib/query"; diff --git a/frontend/test/unit/lib/query_time.spec.js b/frontend/test/unit/lib/query_time.spec.js index ba99282340e7bb7268b34b295b26779f79abd4ce..1686357d897e077150d31ac5beaf87e2d166d929 100644 --- a/frontend/test/unit/lib/query_time.spec.js +++ b/frontend/test/unit/lib/query_time.spec.js @@ -1,5 +1,3 @@ -/*eslint-env jasmine */ - import moment from "moment"; import { expandTimeIntervalFilter, computeFilterTimeRange, absolute } from 'metabase/lib/query_time'; diff --git a/frontend/test/unit/lib/schema_metadata.spec.js b/frontend/test/unit/lib/schema_metadata.spec.js index e33605e3f023b0aba8348e41381fc5c71d81ccce..2ce6be510ab212d1776d78ccafd08413a0d1647e 100644 --- a/frontend/test/unit/lib/schema_metadata.spec.js +++ b/frontend/test/unit/lib/schema_metadata.spec.js @@ -1,5 +1,3 @@ -/*eslint-env jasmine */ - import { getFieldType, DATE_TIME, diff --git a/frontend/test/unit/lib/utils.spec.js b/frontend/test/unit/lib/utils.spec.js index 0a517171f085c9b4fff09649fa6449b79cf777af..3c04826cafe0261d23027c5611b9a2a6510c2e45 100644 --- a/frontend/test/unit/lib/utils.spec.js +++ b/frontend/test/unit/lib/utils.spec.js @@ -1,5 +1,3 @@ -/*eslint-env jasmine */ - import MetabaseUtils from 'metabase/lib/utils'; diff --git a/frontend/test/unit/meta/Card.spec.js b/frontend/test/unit/meta/Card.spec.js new file mode 100644 index 0000000000000000000000000000000000000000..fb5fe699482d1845c0c5ca4cb5906020dce854e8 --- /dev/null +++ b/frontend/test/unit/meta/Card.spec.js @@ -0,0 +1,20 @@ +import * as Card from "metabase/meta/Card"; + +describe("Card", () => { + describe("isStructured()", () => { + it("should return true", () => { + expect(Card.isStructured({ dataset_query : { type: "query" }})).toEqual(true); + }); + it("should return false", () => { + expect(Card.isStructured({ dataset_query : { type: "native" }})).toEqual(false); + }); + }); + describe("isNative()", () => { + it("should return true", () => { + expect(Card.isNative({ dataset_query : { type: "native" }})).toEqual(true); + }); + it("should return false", () => { + expect(Card.isNative({ dataset_query : { type: "query" }})).toEqual(false); + }); + }); +}); diff --git a/frontend/test/unit/meta/Metadata.spec.js b/frontend/test/unit/meta/Metadata.spec.js new file mode 100644 index 0000000000000000000000000000000000000000..aac3836d6a5c952e2309092f26158438cbd49ebe --- /dev/null +++ b/frontend/test/unit/meta/Metadata.spec.js @@ -0,0 +1,10 @@ +import Metadata from "metabase/meta/metadata/Metadata"; + +describe("Metadata", () => { + describe("databases()", () => { + it("should return the right number of databases", () => { + const m = new Metadata([{ id: 1 }, { id: 2 }, { id: 3 }]) + expect(m.databases().length).toEqual(3); + }); + }); +}); diff --git a/frontend/test/unit/meta/Query.spec.js b/frontend/test/unit/meta/Query.spec.js new file mode 100644 index 0000000000000000000000000000000000000000..1f4d87a7647aa06726394c1aa0b63ebd366c77c2 --- /dev/null +++ b/frontend/test/unit/meta/Query.spec.js @@ -0,0 +1,38 @@ +import * as Query from "metabase/meta/Query"; + +describe("Query", () => { + describe("addFilter()", () => { + it("should add a filter when none exists", () => { + let result = Query.addFilter({}, ["=", 1, 2]); + expect(result).toEqual({ filters: ["=", 1, 2]}); + }); + it("should AND existing filter with new filter", () => { + let result = Query.addFilter({ filters: ["=", 1, 2] }, ["=", 3, 4]); + expect(result).toEqual({ filters: ["and", ["=", 1, 2], ["=", 3, 4]] }); + }); + it("should concatenate a filter to existing ANDed filters", () => { + let result = Query.addFilter({ filters: ["and", ["=", 1, 2], ["=", 3, 4]] }, ["=", 5, 6]); + expect(result).toEqual({ filters: ["and", ["=", 1, 2], ["=", 3, 4], ["=", 5, 6]] }); + }); + }); + describe("removeFilter()", () => { + it("should remove entire filter clause if last filter is removed", () => { + let result = Query.removeFilter({ filters: ["=", 1, 2]}, 0); + expect(result).toEqual({ }); + }); + it("should remove AND clause if only one filter remains", () => { + let result = Query.removeFilter({ filters: ["and", ["=", 1, 2], ["=", 3, 4]] }, 0); + expect(result).toEqual({ filters: ["=", 3, 4] }); + }); + it("should remove correct filter", () => { + let result = Query.removeFilter({ filters: ["and", ["=", 1, 2], ["=", 3, 4], ["=", 5, 6]] }, 1); + expect(result).toEqual({ filters: ["and", ["=", 1, 2], ["=", 5, 6]] }); + }); + }); + describe("updateFilter()", () => { + it("should update correct filter", () => { + let result = Query.updateFilter({ filters: ["and", ["=", 1, 2], ["=", 3, 4], ["=", 5, 6]] }, 1, ["=", 7, 8]); + expect(result).toEqual({ filters: ["and", ["=", 1, 2], ["=", 7, 8], ["=", 5, 6]] }); + }); + }); +}); diff --git a/frontend/test/unit/visualizations/lib/timeseries.spec.js b/frontend/test/unit/visualizations/lib/timeseries.spec.js index 0877136247643160034ac56418c568ffe20a901f..852b4c2ff84ecc13e9121b00cbd2367830d9555a 100644 --- a/frontend/test/unit/visualizations/lib/timeseries.spec.js +++ b/frontend/test/unit/visualizations/lib/timeseries.spec.js @@ -1,5 +1,3 @@ -/*eslint-env jasmine */ - import { dimensionIsTimeseries, computeTimeseriesDataInverval diff --git a/npm-shrinkwrap.json b/npm-shrinkwrap.json index 5e72c8f714a7d4f817b5d1e04070016a6039bb42..9083a8a82a36fd5bb2322a63384bf7d06c1a23c3 100644 --- a/npm-shrinkwrap.json +++ b/npm-shrinkwrap.json @@ -1724,6 +1724,22 @@ } } }, + "babel-plugin-transform-flow-strip-types": { + "version": "6.8.0", + "dependencies": { + "babel-plugin-syntax-flow": { + "version": "6.8.0" + }, + "babel-runtime": { + "version": "6.6.1", + "dependencies": { + "core-js": { + "version": "2.4.0" + } + } + } + } + }, "babel-polyfill": { "version": "6.8.0", "dependencies": { @@ -6241,6 +6257,2312 @@ "fixed-data-table": { "version": "0.6.0" }, + "flow-bin": { + "version": "0.24.2", + "dependencies": { + "bin-wrapper": { + "version": "3.0.2", + "dependencies": { + "bin-check": { + "version": "2.0.0", + "dependencies": { + "executable": { + "version": "1.1.0", + "dependencies": { + "meow": { + "version": "3.7.0", + "dependencies": { + "camelcase-keys": { + "version": "2.1.0", + "dependencies": { + "camelcase": { + "version": "2.1.1" + } + } + }, + "decamelize": { + "version": "1.2.0" + }, + "loud-rejection": { + "version": "1.3.0", + "dependencies": { + "array-find-index": { + "version": "1.0.1" + }, + "signal-exit": { + "version": "2.1.2" + } + } + }, + "map-obj": { + "version": "1.0.1" + }, + "minimist": { + "version": "1.2.0" + }, + "normalize-package-data": { + "version": "2.3.5", + "dependencies": { + "hosted-git-info": { + "version": "2.1.4" + }, + "is-builtin-module": { + "version": "1.0.0", + "dependencies": { + "builtin-modules": { + "version": "1.1.1" + } + } + }, + "semver": { + "version": "5.1.0" + }, + "validate-npm-package-license": { + "version": "3.0.1", + "dependencies": { + "spdx-correct": { + "version": "1.0.2", + "dependencies": { + "spdx-license-ids": { + "version": "1.2.1" + } + } + }, + "spdx-expression-parse": { + "version": "1.0.2", + "dependencies": { + "spdx-exceptions": { + "version": "1.0.4" + }, + "spdx-license-ids": { + "version": "1.2.1" + } + } + } + } + } + } + }, + "object-assign": { + "version": "4.1.0" + }, + "read-pkg-up": { + "version": "1.0.1", + "dependencies": { + "find-up": { + "version": "1.1.2", + "dependencies": { + "path-exists": { + "version": "2.1.0" + }, + "pinkie-promise": { + "version": "2.0.1", + "dependencies": { + "pinkie": { + "version": "2.0.4" + } + } + } + } + }, + "read-pkg": { + "version": "1.1.0", + "dependencies": { + "load-json-file": { + "version": "1.1.0", + "dependencies": { + "graceful-fs": { + "version": "4.1.4" + }, + "parse-json": { + "version": "2.2.0", + "dependencies": { + "error-ex": { + "version": "1.3.0", + "dependencies": { + "is-arrayish": { + "version": "0.2.1" + } + } + } + } + }, + "pify": { + "version": "2.3.0" + }, + "pinkie-promise": { + "version": "2.0.1", + "dependencies": { + "pinkie": { + "version": "2.0.4" + } + } + }, + "strip-bom": { + "version": "2.0.0", + "dependencies": { + "is-utf8": { + "version": "0.2.1" + } + } + } + } + }, + "path-type": { + "version": "1.1.0", + "dependencies": { + "graceful-fs": { + "version": "4.1.4" + }, + "pify": { + "version": "2.3.0" + }, + "pinkie-promise": { + "version": "2.0.1", + "dependencies": { + "pinkie": { + "version": "2.0.4" + } + } + } + } + } + } + } + } + }, + "redent": { + "version": "1.0.0", + "dependencies": { + "indent-string": { + "version": "2.1.0", + "dependencies": { + "repeating": { + "version": "2.0.1", + "dependencies": { + "is-finite": { + "version": "1.0.1", + "dependencies": { + "number-is-nan": { + "version": "1.0.0" + } + } + } + } + } + } + }, + "strip-indent": { + "version": "1.0.1", + "dependencies": { + "get-stdin": { + "version": "4.0.1" + } + } + } + } + }, + "trim-newlines": { + "version": "1.0.0" + } + } + } + } + } + } + }, + "bin-version-check": { + "version": "2.1.0", + "dependencies": { + "bin-version": { + "version": "1.0.4", + "dependencies": { + "find-versions": { + "version": "1.2.1", + "dependencies": { + "array-uniq": { + "version": "1.0.2" + }, + "get-stdin": { + "version": "4.0.1" + }, + "meow": { + "version": "3.7.0", + "dependencies": { + "camelcase-keys": { + "version": "2.1.0", + "dependencies": { + "camelcase": { + "version": "2.1.1" + } + } + }, + "decamelize": { + "version": "1.2.0" + }, + "loud-rejection": { + "version": "1.3.0", + "dependencies": { + "array-find-index": { + "version": "1.0.1" + }, + "signal-exit": { + "version": "2.1.2" + } + } + }, + "map-obj": { + "version": "1.0.1" + }, + "normalize-package-data": { + "version": "2.3.5", + "dependencies": { + "hosted-git-info": { + "version": "2.1.4" + }, + "is-builtin-module": { + "version": "1.0.0", + "dependencies": { + "builtin-modules": { + "version": "1.1.1" + } + } + }, + "validate-npm-package-license": { + "version": "3.0.1", + "dependencies": { + "spdx-correct": { + "version": "1.0.2", + "dependencies": { + "spdx-license-ids": { + "version": "1.2.1" + } + } + }, + "spdx-expression-parse": { + "version": "1.0.2", + "dependencies": { + "spdx-exceptions": { + "version": "1.0.4" + }, + "spdx-license-ids": { + "version": "1.2.1" + } + } + } + } + } + } + }, + "object-assign": { + "version": "4.1.0" + }, + "read-pkg-up": { + "version": "1.0.1", + "dependencies": { + "find-up": { + "version": "1.1.2", + "dependencies": { + "path-exists": { + "version": "2.1.0" + }, + "pinkie-promise": { + "version": "2.0.1", + "dependencies": { + "pinkie": { + "version": "2.0.4" + } + } + } + } + }, + "read-pkg": { + "version": "1.1.0", + "dependencies": { + "load-json-file": { + "version": "1.1.0", + "dependencies": { + "graceful-fs": { + "version": "4.1.4" + }, + "parse-json": { + "version": "2.2.0", + "dependencies": { + "error-ex": { + "version": "1.3.0", + "dependencies": { + "is-arrayish": { + "version": "0.2.1" + } + } + } + } + }, + "pify": { + "version": "2.3.0" + }, + "pinkie-promise": { + "version": "2.0.1", + "dependencies": { + "pinkie": { + "version": "2.0.4" + } + } + }, + "strip-bom": { + "version": "2.0.0", + "dependencies": { + "is-utf8": { + "version": "0.2.1" + } + } + } + } + }, + "path-type": { + "version": "1.1.0", + "dependencies": { + "graceful-fs": { + "version": "4.1.4" + }, + "pify": { + "version": "2.3.0" + }, + "pinkie-promise": { + "version": "2.0.1", + "dependencies": { + "pinkie": { + "version": "2.0.4" + } + } + } + } + } + } + } + } + }, + "redent": { + "version": "1.0.0", + "dependencies": { + "indent-string": { + "version": "2.1.0", + "dependencies": { + "repeating": { + "version": "2.0.1", + "dependencies": { + "is-finite": { + "version": "1.0.1", + "dependencies": { + "number-is-nan": { + "version": "1.0.0" + } + } + } + } + } + } + }, + "strip-indent": { + "version": "1.0.1" + } + } + }, + "trim-newlines": { + "version": "1.0.0" + } + } + }, + "semver-regex": { + "version": "1.0.0" + } + } + } + } + }, + "minimist": { + "version": "1.2.0" + }, + "semver": { + "version": "4.3.6" + }, + "semver-truncate": { + "version": "1.1.0", + "dependencies": { + "semver": { + "version": "5.1.0" + } + } + } + } + }, + "download": { + "version": "4.4.3", + "dependencies": { + "caw": { + "version": "1.2.0", + "dependencies": { + "get-proxy": { + "version": "1.1.0", + "dependencies": { + "rc": { + "version": "1.1.6", + "dependencies": { + "deep-extend": { + "version": "0.4.1" + }, + "ini": { + "version": "1.3.4" + }, + "minimist": { + "version": "1.2.0" + }, + "strip-json-comments": { + "version": "1.0.4" + } + } + } + } + }, + "is-obj": { + "version": "1.0.1" + }, + "object-assign": { + "version": "3.0.0" + }, + "tunnel-agent": { + "version": "0.4.3" + } + } + }, + "concat-stream": { + "version": "1.5.1", + "dependencies": { + "inherits": { + "version": "2.0.1" + }, + "readable-stream": { + "version": "2.0.6", + "dependencies": { + "core-util-is": { + "version": "1.0.2" + }, + "isarray": { + "version": "1.0.0" + }, + "process-nextick-args": { + "version": "1.0.7" + }, + "string_decoder": { + "version": "0.10.31" + }, + "util-deprecate": { + "version": "1.0.2" + } + } + }, + "typedarray": { + "version": "0.0.6" + } + } + }, + "filenamify": { + "version": "1.2.1", + "dependencies": { + "filename-reserved-regex": { + "version": "1.0.0" + }, + "strip-outer": { + "version": "1.0.0", + "dependencies": { + "escape-string-regexp": { + "version": "1.0.5" + } + } + }, + "trim-repeated": { + "version": "1.0.0", + "dependencies": { + "escape-string-regexp": { + "version": "1.0.5" + } + } + } + } + }, + "got": { + "version": "5.6.0", + "dependencies": { + "create-error-class": { + "version": "3.0.2", + "dependencies": { + "capture-stack-trace": { + "version": "1.0.0" + } + } + }, + "duplexer2": { + "version": "0.1.4" + }, + "is-plain-obj": { + "version": "1.1.0" + }, + "is-redirect": { + "version": "1.0.0" + }, + "is-retry-allowed": { + "version": "1.0.0" + }, + "is-stream": { + "version": "1.1.0" + }, + "lowercase-keys": { + "version": "1.0.0" + }, + "node-status-codes": { + "version": "1.0.0" + }, + "parse-json": { + "version": "2.2.0", + "dependencies": { + "error-ex": { + "version": "1.3.0", + "dependencies": { + "is-arrayish": { + "version": "0.2.1" + } + } + } + } + }, + "pinkie-promise": { + "version": "2.0.1", + "dependencies": { + "pinkie": { + "version": "2.0.4" + } + } + }, + "timed-out": { + "version": "2.0.0" + }, + "unzip-response": { + "version": "1.0.0" + }, + "url-parse-lax": { + "version": "1.0.0", + "dependencies": { + "prepend-http": { + "version": "1.0.4" + } + } + } + } + }, + "gulp-decompress": { + "version": "1.2.0", + "dependencies": { + "archive-type": { + "version": "3.2.0", + "dependencies": { + "file-type": { + "version": "3.8.0" + } + } + }, + "decompress": { + "version": "3.0.0", + "dependencies": { + "buffer-to-vinyl": { + "version": "1.1.0", + "dependencies": { + "file-type": { + "version": "3.8.0" + }, + "uuid": { + "version": "2.0.2" + } + } + }, + "decompress-tar": { + "version": "3.1.0", + "dependencies": { + "is-tar": { + "version": "1.0.0" + }, + "object-assign": { + "version": "2.1.1" + }, + "strip-dirs": { + "version": "1.1.1", + "dependencies": { + "chalk": { + "version": "1.1.3", + "dependencies": { + "ansi-styles": { + "version": "2.2.1" + }, + "escape-string-regexp": { + "version": "1.0.5" + }, + "has-ansi": { + "version": "2.0.0", + "dependencies": { + "ansi-regex": { + "version": "2.0.0" + } + } + }, + "strip-ansi": { + "version": "3.0.1", + "dependencies": { + "ansi-regex": { + "version": "2.0.0" + } + } + }, + "supports-color": { + "version": "2.0.0" + } + } + }, + "get-stdin": { + "version": "4.0.1" + }, + "is-absolute": { + "version": "0.1.7", + "dependencies": { + "is-relative": { + "version": "0.1.3" + } + } + }, + "is-natural-number": { + "version": "2.1.1" + }, + "minimist": { + "version": "1.2.0" + }, + "sum-up": { + "version": "1.0.3" + } + } + }, + "tar-stream": { + "version": "1.5.2", + "dependencies": { + "bl": { + "version": "1.1.2", + "dependencies": { + "readable-stream": { + "version": "2.0.6", + "dependencies": { + "core-util-is": { + "version": "1.0.2" + }, + "inherits": { + "version": "2.0.1" + }, + "isarray": { + "version": "1.0.0" + }, + "process-nextick-args": { + "version": "1.0.7" + }, + "string_decoder": { + "version": "0.10.31" + }, + "util-deprecate": { + "version": "1.0.2" + } + } + } + } + }, + "end-of-stream": { + "version": "1.1.0", + "dependencies": { + "once": { + "version": "1.3.3", + "dependencies": { + "wrappy": { + "version": "1.0.1" + } + } + } + } + }, + "xtend": { + "version": "4.0.1" + } + } + }, + "through2": { + "version": "0.6.5", + "dependencies": { + "readable-stream": { + "version": "1.0.34", + "dependencies": { + "core-util-is": { + "version": "1.0.2" + }, + "inherits": { + "version": "2.0.1" + }, + "isarray": { + "version": "0.0.1" + }, + "string_decoder": { + "version": "0.10.31" + } + } + }, + "xtend": { + "version": "4.0.1" + } + } + }, + "vinyl": { + "version": "0.4.6", + "dependencies": { + "clone": { + "version": "0.2.0" + }, + "clone-stats": { + "version": "0.0.1" + } + } + } + } + }, + "decompress-tarbz2": { + "version": "3.1.0", + "dependencies": { + "is-bzip2": { + "version": "1.0.0" + }, + "object-assign": { + "version": "2.1.1" + }, + "seek-bzip": { + "version": "1.0.5", + "dependencies": { + "commander": { + "version": "2.8.1", + "dependencies": { + "graceful-readlink": { + "version": "1.0.1" + } + } + } + } + }, + "strip-dirs": { + "version": "1.1.1", + "dependencies": { + "chalk": { + "version": "1.1.3", + "dependencies": { + "ansi-styles": { + "version": "2.2.1" + }, + "escape-string-regexp": { + "version": "1.0.5" + }, + "has-ansi": { + "version": "2.0.0", + "dependencies": { + "ansi-regex": { + "version": "2.0.0" + } + } + }, + "strip-ansi": { + "version": "3.0.1", + "dependencies": { + "ansi-regex": { + "version": "2.0.0" + } + } + }, + "supports-color": { + "version": "2.0.0" + } + } + }, + "get-stdin": { + "version": "4.0.1" + }, + "is-absolute": { + "version": "0.1.7", + "dependencies": { + "is-relative": { + "version": "0.1.3" + } + } + }, + "is-natural-number": { + "version": "2.1.1" + }, + "minimist": { + "version": "1.2.0" + }, + "sum-up": { + "version": "1.0.3" + } + } + }, + "tar-stream": { + "version": "1.5.2", + "dependencies": { + "bl": { + "version": "1.1.2", + "dependencies": { + "readable-stream": { + "version": "2.0.6", + "dependencies": { + "core-util-is": { + "version": "1.0.2" + }, + "inherits": { + "version": "2.0.1" + }, + "isarray": { + "version": "1.0.0" + }, + "process-nextick-args": { + "version": "1.0.7" + }, + "string_decoder": { + "version": "0.10.31" + }, + "util-deprecate": { + "version": "1.0.2" + } + } + } + } + }, + "end-of-stream": { + "version": "1.1.0", + "dependencies": { + "once": { + "version": "1.3.3", + "dependencies": { + "wrappy": { + "version": "1.0.1" + } + } + } + } + }, + "xtend": { + "version": "4.0.1" + } + } + }, + "through2": { + "version": "0.6.5", + "dependencies": { + "readable-stream": { + "version": "1.0.34", + "dependencies": { + "core-util-is": { + "version": "1.0.2" + }, + "inherits": { + "version": "2.0.1" + }, + "isarray": { + "version": "0.0.1" + }, + "string_decoder": { + "version": "0.10.31" + } + } + }, + "xtend": { + "version": "4.0.1" + } + } + }, + "vinyl": { + "version": "0.4.6", + "dependencies": { + "clone": { + "version": "0.2.0" + }, + "clone-stats": { + "version": "0.0.1" + } + } + } + } + }, + "decompress-targz": { + "version": "3.1.0", + "dependencies": { + "is-gzip": { + "version": "1.0.0" + }, + "object-assign": { + "version": "2.1.1" + }, + "strip-dirs": { + "version": "1.1.1", + "dependencies": { + "chalk": { + "version": "1.1.3", + "dependencies": { + "ansi-styles": { + "version": "2.2.1" + }, + "escape-string-regexp": { + "version": "1.0.5" + }, + "has-ansi": { + "version": "2.0.0", + "dependencies": { + "ansi-regex": { + "version": "2.0.0" + } + } + }, + "strip-ansi": { + "version": "3.0.1", + "dependencies": { + "ansi-regex": { + "version": "2.0.0" + } + } + }, + "supports-color": { + "version": "2.0.0" + } + } + }, + "get-stdin": { + "version": "4.0.1" + }, + "is-absolute": { + "version": "0.1.7", + "dependencies": { + "is-relative": { + "version": "0.1.3" + } + } + }, + "is-natural-number": { + "version": "2.1.1" + }, + "minimist": { + "version": "1.2.0" + }, + "sum-up": { + "version": "1.0.3" + } + } + }, + "tar-stream": { + "version": "1.5.2", + "dependencies": { + "bl": { + "version": "1.1.2", + "dependencies": { + "readable-stream": { + "version": "2.0.6", + "dependencies": { + "core-util-is": { + "version": "1.0.2" + }, + "inherits": { + "version": "2.0.1" + }, + "isarray": { + "version": "1.0.0" + }, + "process-nextick-args": { + "version": "1.0.7" + }, + "string_decoder": { + "version": "0.10.31" + }, + "util-deprecate": { + "version": "1.0.2" + } + } + } + } + }, + "end-of-stream": { + "version": "1.1.0", + "dependencies": { + "once": { + "version": "1.3.3", + "dependencies": { + "wrappy": { + "version": "1.0.1" + } + } + } + } + }, + "xtend": { + "version": "4.0.1" + } + } + }, + "through2": { + "version": "0.6.5", + "dependencies": { + "readable-stream": { + "version": "1.0.34", + "dependencies": { + "core-util-is": { + "version": "1.0.2" + }, + "inherits": { + "version": "2.0.1" + }, + "isarray": { + "version": "0.0.1" + }, + "string_decoder": { + "version": "0.10.31" + } + } + }, + "xtend": { + "version": "4.0.1" + } + } + }, + "vinyl": { + "version": "0.4.6", + "dependencies": { + "clone": { + "version": "0.2.0" + }, + "clone-stats": { + "version": "0.0.1" + } + } + } + } + }, + "decompress-unzip": { + "version": "3.4.0", + "dependencies": { + "is-zip": { + "version": "1.0.0" + }, + "stat-mode": { + "version": "0.2.1" + }, + "strip-dirs": { + "version": "1.1.1", + "dependencies": { + "chalk": { + "version": "1.1.3", + "dependencies": { + "ansi-styles": { + "version": "2.2.1" + }, + "escape-string-regexp": { + "version": "1.0.5" + }, + "has-ansi": { + "version": "2.0.0", + "dependencies": { + "ansi-regex": { + "version": "2.0.0" + } + } + }, + "strip-ansi": { + "version": "3.0.1", + "dependencies": { + "ansi-regex": { + "version": "2.0.0" + } + } + }, + "supports-color": { + "version": "2.0.0" + } + } + }, + "get-stdin": { + "version": "4.0.1" + }, + "is-absolute": { + "version": "0.1.7", + "dependencies": { + "is-relative": { + "version": "0.1.3" + } + } + }, + "is-natural-number": { + "version": "2.1.1" + }, + "minimist": { + "version": "1.2.0" + }, + "sum-up": { + "version": "1.0.3" + } + } + }, + "through2": { + "version": "2.0.1", + "dependencies": { + "readable-stream": { + "version": "2.0.6", + "dependencies": { + "core-util-is": { + "version": "1.0.2" + }, + "inherits": { + "version": "2.0.1" + }, + "isarray": { + "version": "1.0.0" + }, + "process-nextick-args": { + "version": "1.0.7" + }, + "string_decoder": { + "version": "0.10.31" + }, + "util-deprecate": { + "version": "1.0.2" + } + } + }, + "xtend": { + "version": "4.0.1" + } + } + }, + "yauzl": { + "version": "2.4.2", + "dependencies": { + "fd-slicer": { + "version": "1.0.1", + "dependencies": { + "pend": { + "version": "1.2.0" + } + } + } + } + } + } + }, + "vinyl-assign": { + "version": "1.2.1" + } + } + }, + "gulp-util": { + "version": "3.0.7", + "dependencies": { + "array-differ": { + "version": "1.0.0" + }, + "array-uniq": { + "version": "1.0.2" + }, + "beeper": { + "version": "1.1.0" + }, + "chalk": { + "version": "1.1.3", + "dependencies": { + "ansi-styles": { + "version": "2.2.1" + }, + "escape-string-regexp": { + "version": "1.0.5" + }, + "has-ansi": { + "version": "2.0.0", + "dependencies": { + "ansi-regex": { + "version": "2.0.0" + } + } + }, + "strip-ansi": { + "version": "3.0.1", + "dependencies": { + "ansi-regex": { + "version": "2.0.0" + } + } + }, + "supports-color": { + "version": "2.0.0" + } + } + }, + "dateformat": { + "version": "1.0.12", + "dependencies": { + "get-stdin": { + "version": "4.0.1" + }, + "meow": { + "version": "3.7.0", + "dependencies": { + "camelcase-keys": { + "version": "2.1.0", + "dependencies": { + "camelcase": { + "version": "2.1.1" + } + } + }, + "decamelize": { + "version": "1.2.0" + }, + "loud-rejection": { + "version": "1.3.0", + "dependencies": { + "array-find-index": { + "version": "1.0.1" + }, + "signal-exit": { + "version": "2.1.2" + } + } + }, + "map-obj": { + "version": "1.0.1" + }, + "normalize-package-data": { + "version": "2.3.5", + "dependencies": { + "hosted-git-info": { + "version": "2.1.4" + }, + "is-builtin-module": { + "version": "1.0.0", + "dependencies": { + "builtin-modules": { + "version": "1.1.1" + } + } + }, + "semver": { + "version": "5.1.0" + }, + "validate-npm-package-license": { + "version": "3.0.1", + "dependencies": { + "spdx-correct": { + "version": "1.0.2", + "dependencies": { + "spdx-license-ids": { + "version": "1.2.1" + } + } + }, + "spdx-expression-parse": { + "version": "1.0.2", + "dependencies": { + "spdx-exceptions": { + "version": "1.0.4" + }, + "spdx-license-ids": { + "version": "1.2.1" + } + } + } + } + } + } + }, + "object-assign": { + "version": "4.1.0" + }, + "read-pkg-up": { + "version": "1.0.1", + "dependencies": { + "find-up": { + "version": "1.1.2", + "dependencies": { + "path-exists": { + "version": "2.1.0" + }, + "pinkie-promise": { + "version": "2.0.1", + "dependencies": { + "pinkie": { + "version": "2.0.4" + } + } + } + } + }, + "read-pkg": { + "version": "1.1.0", + "dependencies": { + "load-json-file": { + "version": "1.1.0", + "dependencies": { + "graceful-fs": { + "version": "4.1.4" + }, + "parse-json": { + "version": "2.2.0", + "dependencies": { + "error-ex": { + "version": "1.3.0", + "dependencies": { + "is-arrayish": { + "version": "0.2.1" + } + } + } + } + }, + "pify": { + "version": "2.3.0" + }, + "pinkie-promise": { + "version": "2.0.1", + "dependencies": { + "pinkie": { + "version": "2.0.4" + } + } + }, + "strip-bom": { + "version": "2.0.0", + "dependencies": { + "is-utf8": { + "version": "0.2.1" + } + } + } + } + }, + "path-type": { + "version": "1.1.0", + "dependencies": { + "graceful-fs": { + "version": "4.1.4" + }, + "pify": { + "version": "2.3.0" + }, + "pinkie-promise": { + "version": "2.0.1", + "dependencies": { + "pinkie": { + "version": "2.0.4" + } + } + } + } + } + } + } + } + }, + "redent": { + "version": "1.0.0", + "dependencies": { + "indent-string": { + "version": "2.1.0", + "dependencies": { + "repeating": { + "version": "2.0.1", + "dependencies": { + "is-finite": { + "version": "1.0.1", + "dependencies": { + "number-is-nan": { + "version": "1.0.0" + } + } + } + } + } + } + }, + "strip-indent": { + "version": "1.0.1" + } + } + }, + "trim-newlines": { + "version": "1.0.0" + } + } + } + } + }, + "fancy-log": { + "version": "1.2.0", + "dependencies": { + "time-stamp": { + "version": "1.0.1" + } + } + }, + "gulplog": { + "version": "1.0.0", + "dependencies": { + "glogg": { + "version": "1.0.0", + "dependencies": { + "sparkles": { + "version": "1.0.0" + } + } + } + } + }, + "has-gulplog": { + "version": "0.1.0", + "dependencies": { + "sparkles": { + "version": "1.0.0" + } + } + }, + "lodash._reescape": { + "version": "3.0.0" + }, + "lodash._reevaluate": { + "version": "3.0.0" + }, + "lodash._reinterpolate": { + "version": "3.0.0" + }, + "lodash.template": { + "version": "3.6.2", + "dependencies": { + "lodash._basecopy": { + "version": "3.0.1" + }, + "lodash._basetostring": { + "version": "3.0.1" + }, + "lodash._basevalues": { + "version": "3.0.0" + }, + "lodash._isiterateecall": { + "version": "3.0.9" + }, + "lodash.escape": { + "version": "3.2.0", + "dependencies": { + "lodash._root": { + "version": "3.0.1" + } + } + }, + "lodash.keys": { + "version": "3.1.2", + "dependencies": { + "lodash._getnative": { + "version": "3.9.1" + }, + "lodash.isarguments": { + "version": "3.0.8" + }, + "lodash.isarray": { + "version": "3.0.4" + } + } + }, + "lodash.restparam": { + "version": "3.6.1" + }, + "lodash.templatesettings": { + "version": "3.1.1" + } + } + }, + "minimist": { + "version": "1.2.0" + }, + "multipipe": { + "version": "0.1.2", + "dependencies": { + "duplexer2": { + "version": "0.0.2", + "dependencies": { + "readable-stream": { + "version": "1.1.14", + "dependencies": { + "core-util-is": { + "version": "1.0.2" + }, + "inherits": { + "version": "2.0.1" + }, + "isarray": { + "version": "0.0.1" + }, + "string_decoder": { + "version": "0.10.31" + } + } + } + } + } + } + }, + "object-assign": { + "version": "3.0.0" + }, + "replace-ext": { + "version": "0.0.1" + }, + "through2": { + "version": "2.0.1", + "dependencies": { + "readable-stream": { + "version": "2.0.6", + "dependencies": { + "core-util-is": { + "version": "1.0.2" + }, + "inherits": { + "version": "2.0.1" + }, + "isarray": { + "version": "1.0.0" + }, + "process-nextick-args": { + "version": "1.0.7" + }, + "string_decoder": { + "version": "0.10.31" + }, + "util-deprecate": { + "version": "1.0.2" + } + } + }, + "xtend": { + "version": "4.0.1" + } + } + }, + "vinyl": { + "version": "0.5.3", + "dependencies": { + "clone": { + "version": "1.0.2" + }, + "clone-stats": { + "version": "0.0.1" + } + } + } + } + } + } + }, + "gulp-rename": { + "version": "1.2.2" + }, + "is-url": { + "version": "1.2.1" + }, + "object-assign": { + "version": "4.1.0" + }, + "read-all-stream": { + "version": "3.1.0", + "dependencies": { + "pinkie-promise": { + "version": "2.0.1", + "dependencies": { + "pinkie": { + "version": "2.0.4" + } + } + } + } + }, + "readable-stream": { + "version": "2.1.2", + "dependencies": { + "core-util-is": { + "version": "1.0.2" + }, + "inherits": { + "version": "2.0.1" + }, + "isarray": { + "version": "1.0.0" + }, + "process-nextick-args": { + "version": "1.0.7" + }, + "string_decoder": { + "version": "0.10.31" + }, + "util-deprecate": { + "version": "1.0.2" + } + } + }, + "stream-combiner2": { + "version": "1.1.1", + "dependencies": { + "duplexer2": { + "version": "0.1.4" + } + } + }, + "vinyl": { + "version": "1.1.1", + "dependencies": { + "clone": { + "version": "1.0.2" + }, + "clone-stats": { + "version": "0.0.1" + }, + "replace-ext": { + "version": "0.0.1" + } + } + }, + "vinyl-fs": { + "version": "2.4.3", + "dependencies": { + "duplexify": { + "version": "3.4.3", + "dependencies": { + "end-of-stream": { + "version": "1.0.0", + "dependencies": { + "once": { + "version": "1.3.3", + "dependencies": { + "wrappy": { + "version": "1.0.1" + } + } + } + } + }, + "inherits": { + "version": "2.0.1" + } + } + }, + "glob-stream": { + "version": "5.3.2", + "dependencies": { + "extend": { + "version": "3.0.0" + }, + "glob-parent": { + "version": "2.0.0", + "dependencies": { + "is-glob": { + "version": "2.0.1", + "dependencies": { + "is-extglob": { + "version": "1.0.0" + } + } + } + } + }, + "micromatch": { + "version": "2.3.8", + "dependencies": { + "arr-diff": { + "version": "2.0.0", + "dependencies": { + "arr-flatten": { + "version": "1.0.1" + } + } + }, + "array-unique": { + "version": "0.2.1" + }, + "braces": { + "version": "1.8.4", + "dependencies": { + "expand-range": { + "version": "1.8.2", + "dependencies": { + "fill-range": { + "version": "2.2.3", + "dependencies": { + "is-number": { + "version": "2.1.0" + }, + "isobject": { + "version": "2.1.0", + "dependencies": { + "isarray": { + "version": "1.0.0" + } + } + }, + "randomatic": { + "version": "1.1.5" + }, + "repeat-string": { + "version": "1.5.4" + } + } + } + } + }, + "preserve": { + "version": "0.2.0" + }, + "repeat-element": { + "version": "1.1.2" + } + } + }, + "expand-brackets": { + "version": "0.1.5", + "dependencies": { + "is-posix-bracket": { + "version": "0.1.1" + } + } + }, + "extglob": { + "version": "0.3.2" + }, + "filename-regex": { + "version": "2.0.0" + }, + "is-extglob": { + "version": "1.0.0" + }, + "is-glob": { + "version": "2.0.1" + }, + "kind-of": { + "version": "3.0.3", + "dependencies": { + "is-buffer": { + "version": "1.1.3" + } + } + }, + "normalize-path": { + "version": "2.0.1" + }, + "object.omit": { + "version": "2.0.0", + "dependencies": { + "for-own": { + "version": "0.1.4", + "dependencies": { + "for-in": { + "version": "0.1.5" + } + } + }, + "is-extendable": { + "version": "0.1.1" + } + } + }, + "parse-glob": { + "version": "3.0.4", + "dependencies": { + "glob-base": { + "version": "0.3.0" + }, + "is-dotfile": { + "version": "1.0.2" + } + } + }, + "regex-cache": { + "version": "0.4.3", + "dependencies": { + "is-equal-shallow": { + "version": "0.1.3" + }, + "is-primitive": { + "version": "2.0.0" + } + } + } + } + }, + "ordered-read-streams": { + "version": "0.3.0", + "dependencies": { + "is-stream": { + "version": "1.1.0" + } + } + }, + "through2": { + "version": "0.6.5", + "dependencies": { + "readable-stream": { + "version": "1.0.34", + "dependencies": { + "core-util-is": { + "version": "1.0.2" + }, + "inherits": { + "version": "2.0.1" + }, + "isarray": { + "version": "0.0.1" + }, + "string_decoder": { + "version": "0.10.31" + } + } + }, + "xtend": { + "version": "4.0.1" + } + } + }, + "to-absolute-glob": { + "version": "0.1.1", + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "dependencies": { + "is-extendable": { + "version": "0.1.1" + } + } + } + } + }, + "unique-stream": { + "version": "2.2.1", + "dependencies": { + "json-stable-stringify": { + "version": "1.0.1", + "dependencies": { + "jsonify": { + "version": "0.0.0" + } + } + } + } + } + } + }, + "graceful-fs": { + "version": "4.1.4" + }, + "gulp-sourcemaps": { + "version": "1.6.0", + "dependencies": { + "convert-source-map": { + "version": "1.2.0" + } + } + }, + "is-valid-glob": { + "version": "0.3.0" + }, + "lazystream": { + "version": "1.0.0" + }, + "lodash.isequal": { + "version": "4.2.0", + "dependencies": { + "lodash._root": { + "version": "3.0.1" + }, + "lodash.keys": { + "version": "4.0.7" + } + } + }, + "merge-stream": { + "version": "1.0.0" + }, + "mkdirp": { + "version": "0.5.1", + "dependencies": { + "minimist": { + "version": "0.0.8" + } + } + }, + "strip-bom": { + "version": "2.0.0", + "dependencies": { + "is-utf8": { + "version": "0.2.1" + } + } + }, + "strip-bom-stream": { + "version": "1.0.0", + "dependencies": { + "first-chunk-stream": { + "version": "1.0.0" + } + } + }, + "through2": { + "version": "2.0.1", + "dependencies": { + "readable-stream": { + "version": "2.0.6", + "dependencies": { + "core-util-is": { + "version": "1.0.2" + }, + "inherits": { + "version": "2.0.1" + }, + "isarray": { + "version": "1.0.0" + }, + "process-nextick-args": { + "version": "1.0.7" + }, + "string_decoder": { + "version": "0.10.31" + }, + "util-deprecate": { + "version": "1.0.2" + } + } + }, + "xtend": { + "version": "4.0.1" + } + } + }, + "through2-filter": { + "version": "2.0.0", + "dependencies": { + "xtend": { + "version": "4.0.1" + } + } + }, + "vali-date": { + "version": "1.0.0" + } + } + }, + "ware": { + "version": "1.3.0", + "dependencies": { + "wrap-fn": { + "version": "0.1.5", + "dependencies": { + "co": { + "version": "3.1.0" + } + } + } + } + } + } + }, + "each-async": { + "version": "1.1.1", + "dependencies": { + "onetime": { + "version": "1.1.0" + }, + "set-immediate-shim": { + "version": "1.0.1" + } + } + }, + "lazy-req": { + "version": "1.1.0" + }, + "os-filter-obj": { + "version": "1.0.3" + } + } + }, + "logalot": { + "version": "2.1.0", + "dependencies": { + "figures": { + "version": "1.6.0" + }, + "squeak": { + "version": "1.3.0", + "dependencies": { + "chalk": { + "version": "1.1.3", + "dependencies": { + "ansi-styles": { + "version": "2.2.1" + }, + "escape-string-regexp": { + "version": "1.0.5" + }, + "has-ansi": { + "version": "2.0.0", + "dependencies": { + "ansi-regex": { + "version": "2.0.0" + } + } + }, + "strip-ansi": { + "version": "3.0.1", + "dependencies": { + "ansi-regex": { + "version": "2.0.0" + } + } + }, + "supports-color": { + "version": "2.0.0" + } + } + }, + "console-stream": { + "version": "0.1.1" + }, + "lpad-align": { + "version": "1.1.0", + "dependencies": { + "get-stdin": { + "version": "4.0.1" + }, + "longest": { + "version": "1.0.1" + }, + "lpad": { + "version": "2.0.1" + }, + "meow": { + "version": "3.7.0", + "dependencies": { + "camelcase-keys": { + "version": "2.1.0", + "dependencies": { + "camelcase": { + "version": "2.1.1" + } + } + }, + "decamelize": { + "version": "1.2.0" + }, + "loud-rejection": { + "version": "1.3.0", + "dependencies": { + "array-find-index": { + "version": "1.0.1" + }, + "signal-exit": { + "version": "2.1.2" + } + } + }, + "map-obj": { + "version": "1.0.1" + }, + "minimist": { + "version": "1.2.0" + }, + "normalize-package-data": { + "version": "2.3.5", + "dependencies": { + "hosted-git-info": { + "version": "2.1.4" + }, + "is-builtin-module": { + "version": "1.0.0", + "dependencies": { + "builtin-modules": { + "version": "1.1.1" + } + } + }, + "semver": { + "version": "5.1.0" + }, + "validate-npm-package-license": { + "version": "3.0.1", + "dependencies": { + "spdx-correct": { + "version": "1.0.2", + "dependencies": { + "spdx-license-ids": { + "version": "1.2.1" + } + } + }, + "spdx-expression-parse": { + "version": "1.0.2", + "dependencies": { + "spdx-exceptions": { + "version": "1.0.4" + }, + "spdx-license-ids": { + "version": "1.2.1" + } + } + } + } + } + } + }, + "object-assign": { + "version": "4.1.0" + }, + "read-pkg-up": { + "version": "1.0.1", + "dependencies": { + "find-up": { + "version": "1.1.2", + "dependencies": { + "path-exists": { + "version": "2.1.0" + }, + "pinkie-promise": { + "version": "2.0.1", + "dependencies": { + "pinkie": { + "version": "2.0.4" + } + } + } + } + }, + "read-pkg": { + "version": "1.1.0", + "dependencies": { + "load-json-file": { + "version": "1.1.0", + "dependencies": { + "graceful-fs": { + "version": "4.1.4" + }, + "parse-json": { + "version": "2.2.0", + "dependencies": { + "error-ex": { + "version": "1.3.0", + "dependencies": { + "is-arrayish": { + "version": "0.2.1" + } + } + } + } + }, + "pify": { + "version": "2.3.0" + }, + "pinkie-promise": { + "version": "2.0.1", + "dependencies": { + "pinkie": { + "version": "2.0.4" + } + } + }, + "strip-bom": { + "version": "2.0.0", + "dependencies": { + "is-utf8": { + "version": "0.2.1" + } + } + } + } + }, + "path-type": { + "version": "1.1.0", + "dependencies": { + "graceful-fs": { + "version": "4.1.4" + }, + "pify": { + "version": "2.3.0" + }, + "pinkie-promise": { + "version": "2.0.1", + "dependencies": { + "pinkie": { + "version": "2.0.4" + } + } + } + } + } + } + } + } + }, + "redent": { + "version": "1.0.0", + "dependencies": { + "indent-string": { + "version": "2.1.0", + "dependencies": { + "repeating": { + "version": "2.0.1", + "dependencies": { + "is-finite": { + "version": "1.0.1", + "dependencies": { + "number-is-nan": { + "version": "1.0.0" + } + } + } + } + } + } + }, + "strip-indent": { + "version": "1.0.1" + } + } + }, + "trim-newlines": { + "version": "1.0.0" + } + } + } + } + } + } + } + } + } + } + }, + "flow-status-webpack-plugin": { + "version": "0.1.4", + "dependencies": { + "colors": { + "version": "1.1.2" + }, + "shelljs": { + "version": "0.6.0" + } + } + }, "glob": { "version": "5.0.15", "dependencies": { diff --git a/package.json b/package.json index 68f7b6ec7de5d330d8e0c5d73a3c72c970961ec8..2695ef9b557c86d6459ab621717bb4f1f753f32f 100644 --- a/package.json +++ b/package.json @@ -68,15 +68,19 @@ "babel-eslint": "^6.0.0-beta.6", "babel-loader": "^6.2.4", "babel-plugin-transform-decorators-legacy": "^1.3.4", + "babel-plugin-transform-flow-strip-types": "^6.8.0", "babel-preset-es2015": "^6.6.0", "babel-preset-react": "^6.5.0", "babel-preset-stage-0": "^6.5.0", "css-loader": "^0.23.1", "eslint": "^2.2.0", "eslint-loader": "^1.3.0", + "eslint-plugin-flow-vars": "^0.4.0", "eslint-plugin-react": "^4.1.0", "extract-text-webpack-plugin": "^1.0.1", "file-loader": "^0.8.5", + "flow-bin": "^0.24.2", + "flow-status-webpack-plugin": "^0.1.4", "glob": "^5.0.15", "html-webpack-plugin": "^2.14.0", "istanbul-instrumenter-loader": "^0.2.0", @@ -103,15 +107,16 @@ "webpack-postcss-tools": "^1.1.1" }, "scripts": { - "lint": "./node_modules/.bin/eslint --ext .js --ext .jsx --max-warnings 0 frontend/src", - "test": "./node_modules/.bin/karma start frontend/test/karma.conf.js --single-run --reporters nyan", - "test-watch": "./node_modules/.bin/karma start frontend/test/karma.conf.js --auto-watch --reporters nyan", - "test-e2e": "./node_modules/.bin/webdriver-manager update && ./node_modules/protractor/bin/protractor frontend/test/protractor-conf.js", - "build": "./node_modules/.bin/webpack", - "build-watch": "./node_modules/.bin/webpack --watch", - "build-hot": "NODE_ENV=hot ./node_modules/.bin/webpack && NODE_ENV=hot node_modules/webpack-dev-server/bin/webpack-dev-server.js --hot --inline --content-base frontend", + "lint": "eslint --ext .js --ext .jsx --max-warnings 0 frontend/src", + "flow": "flow; test $? -eq 0 -o $? -eq 2", + "test": "karma start frontend/test/karma.conf.js --single-run --reporters nyan", + "test-watch": "karma start frontend/test/karma.conf.js --auto-watch --reporters nyan", + "test-e2e": "webdriver-manager update && ./node_modules/protractor/bin/protractor frontend/test/protractor-conf.js", + "build": "webpack", + "build-watch": "webpack --watch", + "build-hot": "NODE_ENV=hot webpack && NODE_ENV=hot node_modules/webpack-dev-server/bin/webpack-dev-server.js --hot --inline --content-base frontend", "shrinkwrap": "npm prune && npm shrinkwrap --dev && ./bin/clean-shrinkwrap", "start": "npm run build && lein ring server", - "storybook": "./node_modules/.bin/start-storybook -p 9001" + "storybook": "start-storybook -p 9001" } } diff --git a/project.clj b/project.clj index 2303bc2d35c6ae1b22ac34fca44c13fd564f91f2..3f99f0f6a5996a36193200e006995bdf96d172a4 100644 --- a/project.clj +++ b/project.clj @@ -26,7 +26,7 @@ [amalloy/ring-gzip-middleware "0.1.3"] ; Ring middleware to GZIP responses if client can handle it [aleph "0.4.1"] ; Async HTTP library; WebSockets [cheshire "5.6.1"] ; fast JSON encoding (used by Ring JSON middleware) - [clj-http "3.0.1" ; HTTP client + [clj-http "3.1.0" ; HTTP client :exclusions [commons-codec commons-io slingshot]] @@ -38,14 +38,14 @@ org.apache.httpcomponents/httpclient net.sourceforge.nekohtml/nekohtml ring/ring-core]] - [com.draines/postal "1.11.4"] ; SMTP library + [com.draines/postal "2.0.0"] ; SMTP library [com.google.apis/google-api-services-bigquery ; Google BigQuery Java Client Library - "v2-rev294-1.21.0"] + "v2-rev300-1.22.0"] [com.h2database/h2 "1.4.191"] ; embedded SQL database [com.mattbertolini/liquibase-slf4j "2.0.0"] ; Java Migrations lib [com.novemberain/monger "3.0.2"] ; MongoDB Driver [compojure "1.5.0"] ; HTTP Routing library built on Ring - [environ "1.0.2"] ; easy environment management + [environ "1.0.3"] ; easy environment management [hiccup "1.0.5"] ; HTML templating [honeysql "0.6.3"] ; Transform Clojure data structures to SQL [korma "0.4.2"] ; SQL generation @@ -54,13 +54,13 @@ javax.jms/jms com.sun.jdmk/jmxtools com.sun.jmx/jmxri]] - [medley "0.7.4"] ; lightweight lib of useful functions + [medley "0.8.1"] ; lightweight lib of useful functions [metabase/throttle "1.0.1"] ; Tools for throttling access to API endpoints and other code pathways - [mysql/mysql-connector-java "5.1.38"] ; MySQL JDBC driver *** DON'T UPDATE THIS YET - NEW VERSION IS JAVA 8+ ONLY: http://dev.mysql.com/doc/connector-j/6.0/en/connector-j-whats-new.html *** + [mysql/mysql-connector-java "5.1.39"] ; MySQL JDBC driver (don't upgrade to 6.0+ yet -- that's Java 8 only) [net.sf.cssbox/cssbox "4.11" ; HTML / CSS rendering :exclusions [org.slf4j/slf4j-api]] [net.sourceforge.jtds/jtds "1.3.1"] ; Open Source SQL Server driver - [org.liquibase/liquibase-core "3.5.0"] ; migration management (Java lib) + [org.liquibase/liquibase-core "3.5.1"] ; migration management (Java lib) [org.slf4j/slf4j-log4j12 "1.7.21"] ; abstraction for logging frameworks -- allows end user to plug in desired logging framework at deployment time [org.yaml/snakeyaml "1.17"] ; YAML parser (required by liquibase) [org.xerial/sqlite-jdbc "3.8.11.2"] ; SQLite driver @@ -73,7 +73,7 @@ [stencil "0.5.0"] ; Mustache templates for Clojure [swiss-arrows "1.0.0"]] ; 'Magic wand' macro -<>, etc. :repositories [["bintray" "https://dl.bintray.com/crate/crate"]] - :plugins [[lein-environ "1.0.2"] ; easy access to environment variables + :plugins [[lein-environ "1.0.3"] ; easy access to environment variables [lein-ring "0.9.7" ; start the HTTP server with 'lein ring server' :exclusions [org.clojure/clojure]]] ; TODO - should this be a dev dependency ? :main ^:skip-aot metabase.core @@ -98,7 +98,6 @@ :deprecations]} ; Turn this off temporarily until we finish removing self-deprecated DB functions & macros like `upd`, `del`, and `sel` :docstring-checker {:include [#"^metabase"] :exclude [#"test" - #"^metabase\.sample-data$" #"^metabase\.http-client$"]} :profiles {:dev {:dependencies [[org.clojure/tools.nrepl "0.2.12"] ; REPL <3 [expectations "2.1.3"] ; unit tests *** DON'T UPDATE THIS UNTIL WE REMOVE USES OF DEPRECATED EXPECT-LET IN THE CODEBASE *** diff --git a/reset_password/metabase/reset_password/core.clj b/reset_password/metabase/reset_password/core.clj index 6d6b76df8c285043a53ea99966690696332bd589..2f60c84fcc959709dd7538fb478d114a284e46e7 100644 --- a/reset_password/metabase/reset_password/core.clj +++ b/reset_password/metabase/reset_password/core.clj @@ -6,7 +6,7 @@ (defn- set-reset-token! "Set and return a new `reset_token` for the user with EMAIL-ADDRESS." [email-address] - (let [user-id (or (db/sel :one :id 'User, :email email-address) + (let [user-id (or (db/select-one-id 'User, :email email-address) (throw (Exception. (format "No user found with email address '%s'. Please check the spelling and try again." email-address))))] (user/set-user-password-reset-token user-id))) diff --git a/resources/words-by-frequency.txt b/resources/words-by-frequency.txt index 4ca263447b613936bbdddfc8e88648032e393663..529dc31b4860bd151843d9c8a0a47c306d120de0 100644 --- a/resources/words-by-frequency.txt +++ b/resources/words-by-frequency.txt @@ -14,7 +14,6 @@ as he on by -s with from or @@ -136,7 +135,6 @@ work now could long -j however group what @@ -145,7 +143,6 @@ started way since around -d both commune while @@ -174,7 +171,6 @@ country main government century -c means university life @@ -203,12 +199,9 @@ four river back just -m still utc -n located -t population including king @@ -220,7 +213,6 @@ left school great end -e against september high @@ -247,7 +239,6 @@ using district based form -b german games near @@ -354,7 +345,6 @@ next moved light five -f third change makes @@ -421,7 +411,6 @@ formed roman instead women -g army computer find @@ -524,9 +513,7 @@ space hockey led close -p political -r eastern look hard @@ -554,12 +541,10 @@ green names especially soon -h someone idea seven included -l caused move open @@ -568,7 +553,6 @@ study official italian run -k village ice working @@ -653,7 +637,6 @@ our reason event goes -v ocean blood hit @@ -863,7 +846,6 @@ date valley tells speed -w stage al standard @@ -125546,4 +125528,4 @@ sherd blabbing oracular incapability -pebbliest \ No newline at end of file +pebbliest diff --git a/src/metabase/api/activity.clj b/src/metabase/api/activity.clj index c47645564c7038793ebf251d2857fe94888d1f4f..721cb03ceafa9668afe9054d8df014ebde8d946f 100644 --- a/src/metabase/api/activity.clj +++ b/src/metabase/api/activity.clj @@ -1,6 +1,5 @@ (ns metabase.api.activity (:require [compojure.core :refer [GET]] - [korma.core :as k] [metabase.api.common :refer :all] [metabase.db :as db] (metabase.models [activity :refer [Activity]] @@ -19,7 +18,7 @@ (defendpoint GET "/" "Get recent activity." [] - (-> (db/sel :many Activity (k/order :timestamp :DESC) (k/limit 40)) + (-> (db/select Activity, {:order-by [[:timestamp :desc]], :limit 40}) (hydrate :user :table :database :model_exists) (->> (mapv dashcard-exists)))) @@ -30,17 +29,14 @@ ;; expected output of the query is a single row per unique model viewed by the current user ;; including a `:max_ts` which has the most recent view timestamp of the item and `:cnt` which has total views ;; and we order the results by most recently viewed then hydrate the basic details of the model - (for [view-log (k/select ViewLog - (k/fields :user_id :model :model_id) - (k/aggregate (count :*) :cnt) - (k/aggregate (max :timestamp) :max_ts) - (k/where (= :user_id *current-user-id*)) - (k/group :user_id :model :model_id) - (k/order :max_ts :desc) - (k/limit 10)) + (for [view-log (db/select [ViewLog :user_id :model :model_id [:%count.* :cnt] [:%max.timestamp :max_ts]] + :user_id *current-user-id* + {:group-by [:user_id :model :model_id] + :order-by [[:max_ts :desc]] + :limit 10}) :let [model-object (case (:model view-log) - "card" (db/sel :one [Card :id :name :description :display], :id (:model_id view-log)) - "dashboard" (db/sel :one [Dashboard :id :name :description], :id (:model_id view-log)) + "card" (db/select-one [Card :id :name :description :display], :id (:model_id view-log)) + "dashboard" (db/select-one [Dashboard :id :name :description], :id (:model_id view-log)) nil)] :when model-object] (assoc view-log :model_object model-object))) diff --git a/src/metabase/api/card.clj b/src/metabase/api/card.clj index 8ede6b6d7898e5a31b43019e4e796a5c0bec3f53..e2ea51fceb4948e5621fd09697ab6bed2ecc8c24 100644 --- a/src/metabase/api/card.clj +++ b/src/metabase/api/card.clj @@ -21,7 +21,7 @@ [cards] (let [card-labels (db/select [CardLabel :card_id :label_id]) label-id->label (when (seq card-labels) - (db/select-id->obj Label :id [:in (map :label_id card-labels)])) + (u/key-by :id (db/select Label :id [:in (map :label_id card-labels)]))) card-id->card-labels (group-by :card_id card-labels)] (for [card cards] (assoc card :labels (for [card-label (card-id->card-labels (:id card))] ; TODO - do these need to be sorted ? @@ -68,7 +68,7 @@ Make sure cards are returned in the same order as CARD-IDS`; `[in card-ids]` won't preserve the order." [card-ids] {:pre [(every? integer? card-ids)]} - (let [card-id->card (db/select-id->obj Card, :id [:in card-ids], :archived false)] + (let [card-id->card (u/key-by :id (db/select Card, :id [:in (set card-ids)], :archived false))] (filter identity (map card-id->card card-ids)))) (defn- cards:recent diff --git a/src/metabase/api/common.clj b/src/metabase/api/common.clj index 9407673c98500e80c076ed53517ef8214949aab5..f1bfdb404d3850e20c27613bbf5c2a94ebe7c260 100644 --- a/src/metabase/api/common.clj +++ b/src/metabase/api/common.clj @@ -5,7 +5,6 @@ [walk :as walk]) [cheshire.core :as json] [compojure.core :refer [defroutes]] - [korma.core :as k] [medley.core :as m] [metabase.api.common.internal :refer :all] [metabase.db :as db] @@ -65,7 +64,7 @@ (defn check-superuser "Check that `*current-user*` is a superuser or throw a 403." [] - (check-403 (:is_superuser @*current-user*))) + (check-403 (db/exists? 'User, :id *current-user-id*, :is_superuser true))) ;;; #### checkp- functions: as in "check param". These functions expect that you pass a symbol so they can throw exceptions w/ relevant error messages. diff --git a/src/metabase/api/dashboard.clj b/src/metabase/api/dashboard.clj index 2937cd1c8ab98ab7abe7d3100b73dc15bb9379b3..2da9075eb36f7a6e56a39748ece4346ede1d5138 100644 --- a/src/metabase/api/dashboard.clj +++ b/src/metabase/api/dashboard.clj @@ -1,7 +1,6 @@ (ns metabase.api.dashboard "/api/dashboard endpoints." (:require [compojure.core :refer [GET POST PUT DELETE]] - [korma.core :as k] [metabase.events :as events] [metabase.api.common :refer :all] [metabase.db :as db] @@ -19,10 +18,10 @@ * `mine` - Return `Dashboards` created by the current user." [f] {f FilterOptionAllOrMine} - (-> (case (or f :all) - :all (db/sel :many Dashboard (k/where (or {:creator_id *current-user-id*} - {:public_perms [> common/perms-none]}))) - :mine (db/sel :many Dashboard :creator_id *current-user-id*)) + (-> (db/select Dashboard {:where (case (or f :all) + :all [:or [:= :creator_id *current-user-id*] + [:> :public_perms common/perms-none]] + :mine [:= :creator_id *current-user-id*])}) (hydrate :creator :can_read :can_write))) (defendpoint POST "/" @@ -63,7 +62,7 @@ [id] (write-check Dashboard id) ;; TODO - it would be much more natural if `cascade-delete` returned the deleted entity instead of an api response - (let [dashboard (db/sel :one Dashboard :id id) + (let [dashboard (Dashboard id) result (db/cascade-delete! Dashboard :id id)] (events/publish-event :dashboard-delete (assoc dashboard :actor_id *current-user-id*)) result)) @@ -96,7 +95,7 @@ ...}]} ...]}" [id :as {{:keys [cards]} :body}] (write-check Dashboard id) - (let [dashcard-ids (set (db/sel :many :id DashboardCard, :dashboard_id id))] + (let [dashcard-ids (db/select-ids DashboardCard, :dashboard_id id)] (doseq [{dashcard-id :id :as dashboard-card} cards] ;; ensure the dashcard we are updating is part of the given dashboard (when (contains? dashcard-ids dashcard-id) diff --git a/src/metabase/api/database.clj b/src/metabase/api/database.clj index 285a72b26e4fcf8c7b3905cd37cc6cfbcdcbfd33..16f252b10cb75929d2aa54aa79910f172d3b3f9e 100644 --- a/src/metabase/api/database.clj +++ b/src/metabase/api/database.clj @@ -1,7 +1,6 @@ (ns metabase.api.database "/api/database endpoints." (:require [compojure.core :refer [GET POST PUT DELETE]] - [korma.core :as k] [metabase.api.common :refer :all] (metabase [config :as config] [db :as db] @@ -53,10 +52,10 @@ (defendpoint GET "/" "Fetch all `Databases`." [include_tables] - (let [dbs (db/sel :many Database (k/order (k/sqlfn :LOWER :name)))] + (let [dbs (db/select Database {:order-by [:%lower.name]})] (if-not include_tables dbs - (let [db-id->tables (group-by :db_id (db/sel :many Table, :active true))] + (let [db-id->tables (group-by :db_id (db/select Table, :active true))] (for [db dbs] (assoc db :tables (sort-by :name (get db-id->tables (:id db) [])))))))) @@ -95,7 +94,7 @@ [] (check-superuser) (sample-data/add-sample-dataset!) - (db/sel :one Database :is_sample true)) + (Database :is_sample true)) (defendpoint GET "/:id" "Get `Database` with ID." @@ -105,9 +104,9 @@ (defendpoint PUT "/:id" "Update a `Database`." [id :as {{:keys [name engine details is_full_sync]} :body}] - {name [Required NonEmptyString] - engine [Required DBEngine] - details [Required Dict]} + {name [Required NonEmptyString] + engine [Required DBEngine] + details [Required Dict]} (check-superuser) (let-404 [database (Database id)] (let [details (if-not (= protected-password (:password details)) @@ -155,22 +154,19 @@ Suggestions include matching `Tables` and `Fields` in this `Database`." [id prefix] ; TODO - should prefix be Required/NonEmptyString ? (read-check Database id) - (let [prefix-len (count prefix) - table-id->name (->> (db/sel :many [Table :id :name] :db_id id :active true) ; fetch all name + ID of all Tables for this DB - (map (fn [{:keys [id name]}] ; make a map of Table ID -> Table Name - {id name})) - (into {})) + (let [prefix-len (count prefix) + table-id->name (db/select-id->field :name Table, :db_id id, :active true) matching-tables (->> (vals table-id->name) ; get all Table names that start with PREFIX (filter (fn [^String table-name] (and (>= (count table-name) prefix-len) (= prefix (.substring table-name 0 prefix-len))))) - (map (fn [table-name] ; return them in the format [table_name "Table"] + (map (fn [table-name] ; return them in the format [table_name "Table"] [table-name "Table"]))) - fields (->> (db/sel :many [Field :name :base_type :special_type :table_id] ; get all Fields with names that start with PREFIX - :table_id [in (keys table-id->name)] ; whose Table is in this DB - :name [like (str prefix "%")] - :visibility_type [not-in ["sensitive" "retired"]]) - (map (fn [{:keys [name base_type special_type table_id]}] ; return them in the format + fields (->> (db/select [Field :name :base_type :special_type :table_id] ; get all Fields with names that start with PREFIX + :table_id [:in (keys table-id->name)] ; whose Table is in this DB + :name [:like (str prefix "%")] + :visibility_type [:not-in ["sensitive" "retired"]]) + (map (fn [{:keys [name base_type special_type table_id]}] ; return them in the format [name (str (table-id->name table_id) " " base_type (when special_type ; [field_name "table_name base_type special_type"] (str " " special_type)))])))] (concat matching-tables fields))) ; return combined seq of Fields + Tables @@ -179,14 +175,14 @@ "Get a list of all `Tables` in `Database`." [id] (read-check Database id) - (db/sel :many Table :db_id id :active true (k/order :name))) + (db/select Table, :db_id id, :active true, {:order-by [:name]})) ; TODO - should this be case-insensitive -- {:order-by [:%lower.name]} -- instead? (defendpoint GET "/:id/idfields" "Get a list of all primary key `Fields` for `Database`." [id] (read-check Database id) - (let [table_ids (db/sel :many :id Table :db_id id :active true)] - (sort-by #(:name (:table %)) (-> (db/sel :many Field :table_id [in table_ids] :special_type "id") + (let [table_ids (db/select-ids Table, :db_id id, :active true)] + (sort-by #(:name (:table %)) (-> (db/select Field, :table_id [:in table_ids], :special_type "id") (hydrate :table))))) (defendpoint POST "/:id/sync" diff --git a/src/metabase/api/dataset.clj b/src/metabase/api/dataset.clj index 930ab40fc04b7a6411630c11140b67ff7ff71e41..13a6ba9113a34517530749fe0025b998269b6c4d 100644 --- a/src/metabase/api/dataset.clj +++ b/src/metabase/api/dataset.clj @@ -3,7 +3,6 @@ (:require [clojure.data.csv :as csv] [cheshire.core :as json] [compojure.core :refer [GET POST]] - [korma.core :as k] [metabase.api.common :refer :all] [metabase.db :as db] (metabase.models [card :refer [Card]] @@ -13,11 +12,11 @@ [metabase.query-processor :as qp] [metabase.util :as u])) -(def ^:const api-max-results-bare-rows +(def ^:private ^:const api-max-results-bare-rows "Maximum number of rows to return specifically on :rows type queries via the API." 2000) -(def ^:const api-max-results +(def ^:private ^:const api-max-results "General maximum number of rows to return from an API query." 10000) @@ -39,13 +38,13 @@ [:as {{:keys [database] :as body} :body}] (read-check Database database) ;; add sensible constraints for results limits on our query - (let [query (assoc body :constraints dataset-query-api-constraints)] - (first (k/select [(k/subselect QueryExecution - (k/fields :running_time) - (k/where {:json_query (json/generate-string query)}) - (k/order :started_at :desc) - (k/limit 10)) :_] - (k/aggregate (avg :running_time) :average))))) + (let [query (assoc body :constraints dataset-query-api-constraints) + running-times (db/select-field :running_time QueryExecution + :json_query (json/generate-string query) + {:order-by [[:started_at :desc]] + :limit 10})] + {:average (float (/ (reduce + running-times) + (count running-times)))})) (defendpoint POST "/csv" "Execute an MQL query and download the result data as a CSV file." @@ -66,11 +65,12 @@ :body (:error response)}))) +;; TODO - AFAIK this endpoint is no longer used. Remove it? </3 (defendpoint GET "/card/:id" "Execute the MQL query for a given `Card` and retrieve both the `Card` and the execution results as JSON. This is a convenience endpoint which simplifies the normal 2 api calls to fetch the `Card` then execute its query." [id] - (let-404 [{:keys [dataset_query] :as card} (db/sel :one Card :id id)] + (let-404 [{:keys [dataset_query] :as card} (Card id)] (read-check card) (read-check Database (:database dataset_query)) ;; add sensible constraints for results limits on our query diff --git a/src/metabase/api/field.clj b/src/metabase/api/field.clj index b90cfd6ccb7bad7ca1ed8fa334c27e4c79c08bae..077b441cd31b1e52ac6830cd278dbf53ed36677e 100644 --- a/src/metabase/api/field.clj +++ b/src/metabase/api/field.clj @@ -87,7 +87,7 @@ (write-check field) (check (field-should-have-field-values? field) [400 "You can only update the mapped values of a Field whose 'special_type' is 'category'/'city'/'state'/'country' or whose 'base_type' is 'BooleanField'."]) - (if-let [field-values-id (db/sel :one :id FieldValues :field_id id)] + (if-let [field-values-id (db/select-one-id FieldValues, :field_id id)] (check-500 (db/update! FieldValues field-values-id :human_readable_values values_map)) (create-field-values-if-needed field values_map))) diff --git a/src/metabase/api/label.clj b/src/metabase/api/label.clj index 4596711500e46dbed007a4c72154bc404a89c566..14a7abc67723a60e030091446848c25fcc59b0be 100644 --- a/src/metabase/api/label.clj +++ b/src/metabase/api/label.clj @@ -1,7 +1,6 @@ (ns metabase.api.label "`/api/label` endpoints." (:require [compojure.core :refer [GET POST DELETE PUT]] - [korma.core :as k] [metabase.api.common :refer [defendpoint define-routes write-check]] [metabase.db :as db] [metabase.models.label :refer [Label]])) @@ -9,7 +8,7 @@ (defendpoint GET "/" "List all `Labels`. :label:" [] - (db/sel :many Label (k/order (k/sqlfn :LOWER :name)))) + (db/select Label {:order-by [:%lower.name]})) (defendpoint POST "/" "Create a new `Label`. :label: " diff --git a/src/metabase/api/notify.clj b/src/metabase/api/notify.clj index 0e5fd13941ec6cb4b6ca8ceee00f352b76bbc4b2..870e62950bafaa474809e83793b409f55d569cd5 100644 --- a/src/metabase/api/notify.clj +++ b/src/metabase/api/notify.clj @@ -15,9 +15,9 @@ [id :as {{:keys [table_id table_name]} :body}] (let-404 [database (Database id)] (cond - table_id (when-let [table (db/sel :one Table, :db_id id, :id (int table_id))] + table_id (when-let [table (Table :db_id id, :id (int table_id))] (future (sync-database/sync-table! table))) - table_name (when-let [table (db/sel :one Table, :db_id id, :name table_name)] + table_name (when-let [table (Table :db_id id, :name table_name)] (future (sync-database/sync-table! table))) :else (future (sync-database/sync-database! database)))) {:success true}) diff --git a/src/metabase/api/session.clj b/src/metabase/api/session.clj index fc3fbb506434fae4df1d609614e6479e6b524fe6..4bb9d472e596d0064d7031ff4020a36c342ce9de 100644 --- a/src/metabase/api/session.clj +++ b/src/metabase/api/session.clj @@ -3,7 +3,6 @@ (:require [clojure.tools.logging :as log] [cemerick.friend.credentials :as creds] [compojure.core :refer [defroutes GET POST DELETE]] - [korma.core :as k] [throttle.core :as throttle] [metabase.api.common :refer :all] [metabase.db :as db] @@ -38,7 +37,7 @@ password [Required NonEmptyString]} (throttle/check (login-throttlers :ip-address) remote-address) (throttle/check (login-throttlers :email) email) - (let [user (db/sel :one :fields [User :id :password_salt :password :last_login], :email email (k/where {:is_active true}))] + (let [user (db/select-one [User :id :password_salt :password :last_login], :email email, :is_active true)] ;; Don't leak whether the account doesn't exist or the password was incorrect (when-not (and user (pass/verify-password password (:password_salt user) (:password user))) @@ -74,7 +73,7 @@ (throttle/check (forgot-password-throttlers :ip-address) remote-address) (throttle/check (forgot-password-throttlers :email) email) ;; Don't leak whether the account doesn't exist, just pretend everything is ok - (when-let [user-id (db/sel :one :id User :email email)] + (when-let [user-id (db/select-one-id User, :email email)] (let [reset-token (set-user-password-reset-token user-id) password-reset-url (str (@(ns-resolve 'metabase.core 'site-url) request) "/auth/reset_password/" reset-token)] (email/send-password-reset-email email server-name password-reset-url) @@ -90,10 +89,10 @@ [^String token] (when-let [[_ user-id] (re-matches #"(^\d+)_.+$" token)] (let [user-id (Integer/parseInt user-id)] - (when-let [{:keys [reset_token reset_triggered], :as user} (db/sel :one :fields [User :id :last_login :reset_triggered :reset_token] :id user-id)] + (when-let [{:keys [reset_token reset_triggered], :as user} (db/select-one [User :id :last_login :reset_triggered :reset_token], :id user-id)] ;; Make sure the plaintext token matches up with the hashed one for this user - (when (try (creds/bcrypt-verify token reset_token) - (catch Throwable _)) + (when (u/ignore-exceptions + (creds/bcrypt-verify token reset_token)) ;; check that the reset was triggered within the last 48 HOURS, after that the token is considered expired (let [token-age (- (System/currentTimeMillis) reset_triggered)] (when (< token-age reset-token-ttl-ms) diff --git a/src/metabase/api/table.clj b/src/metabase/api/table.clj index 961dae68d8ee3c5f509f9c1abcb0881c67acfd5e..e4f8c98bb053188021749ad01a90164b1dfbd857 100644 --- a/src/metabase/api/table.clj +++ b/src/metabase/api/table.clj @@ -1,7 +1,6 @@ (ns metabase.api.table "/api/table endpoints." (:require [compojure.core :refer [GET POST PUT]] - [korma.core :as k] [metabase.api.common :refer :all] [metabase.db :as db] [metabase.driver :as driver] @@ -23,7 +22,7 @@ (defendpoint GET "/" "Get all `Tables`." [] - (-> (db/sel :many Table, :active true, (k/order :name :ASC)) + (-> (db/select Table, :active true, {:order-by [[:name :asc]]}) (hydrate :db) ;; if for some reason a Table doesn't have rows set then set it to 0 so UI doesn't barf (#(map (fn [table] @@ -57,7 +56,7 @@ [id] (let-404 [table (Table id)] (read-check table) - (db/sel :many Field :table_id id, :visibility_type [not-in ["sensitive" "retired"]], (k/order :name :ASC)))) + (db/select Field, :table_id id, :visibility_type [:not-in ["sensitive" "retired"]], {:order-by [[:name :asc]]}))) (defendpoint GET "/:id/query_metadata" "Get metadata about a `Table` useful for running queries. @@ -82,8 +81,8 @@ [id] (let-404 [table (Table id)] (read-check table) - (let [field-ids (db/sel :many :id Field :table_id id :visibility_type [not= "retired"])] - (for [origin-field (db/sel :many Field :fk_target_field_id [in field-ids])] + (let [field-ids (db/select-ids Field, :table_id id, :visibility_type [:not= "retired"])] + (for [origin-field (db/select Field, :fk_target_field_id [:in field-ids])] ;; it's silly to be hydrating some of these tables/dbs {:relationship :Mt1 :origin_id (:id origin-field) @@ -105,7 +104,7 @@ [id :as {{:keys [new_order]} :body}] {new_order [Required ArrayOfIntegers]} (write-check Table id) - (let [table-fields (db/sel :many Field :table_id id)] + (let [table-fields (db/select Field, :table_id id)] ;; run a function over the `new_order` list which simply updates `Field` :position to the index in the vector ;; NOTE: we assume that all `Fields` in the table are represented in the array (dorun diff --git a/src/metabase/api/user.clj b/src/metabase/api/user.clj index 891656f30eff0e1367bf5d6990c40130a84d0590..f1d9ef7511cf7982b7457f39e2df3d9fbeb0048b 100644 --- a/src/metabase/api/user.clj +++ b/src/metabase/api/user.clj @@ -14,9 +14,9 @@ (:is_superuser @*current-user*)))) (defendpoint GET "/" - "Fetch a list of all active `Users`. You must be a superuser to do this." + "Fetch a list of all active `Users`." [] - (db/sel :many User :is_active true)) + (db/select User, :is_active true)) (defendpoint POST "/" @@ -26,7 +26,7 @@ last_name [Required NonEmptyString] email [Required Email]} (check-superuser) - (if-let [existing-user (db/sel :one [User :id :is_active], :email email)] + (if-let [existing-user (db/select-one [User :id :is_active], :email email)] (do (when-not (:is_active existing-user) ;; this user already exists but is inactive, so simply reactivate the account (db/update! User (:id existing-user) @@ -50,7 +50,7 @@ "Fetch a `User`. You must be fetching yourself *or* be a superuser." [id] (check-self-or-superuser id) - (check-404 (db/sel :one User :id id, :is_active true))) + (check-404 (User :id id, :is_active true))) (defendpoint PUT "/:id" @@ -60,7 +60,7 @@ first_name NonEmptyString last_name NonEmptyString} (check-self-or-superuser id) - (check-404 (db/exists? User, :id id, :is_active true)) ; only allow updates if the specified account is active + (check-404 (db/exists? User, :id id, :is_active true)) ; only allow updates if the specified account is active (check-400 (not (db/exists? User, :email email, :id [:not= id]))) ; can't change email if it's already taken BY ANOTHER ACCOUNT (check-500 (db/update-non-nil-keys! User id :email email @@ -77,7 +77,7 @@ [id :as {{:keys [password old_password]} :body}] {password [Required ComplexPassword]} (check-self-or-superuser id) - (let-404 [user (db/sel :one [User :password_salt :password], :id id, :is_active true)] + (let-404 [user (db/select-one [User :password_salt :password], :id id, :is_active true)] (when (= id *current-user-id*) (checkp (creds/bcrypt-verify (str (:password_salt user) old_password) (:password user)) "old_password" "Invalid password"))) (set-user-password id password) @@ -96,7 +96,7 @@ "Resend the user invite email for a given user." [id] (check-superuser) - (when-let [user (db/sel :one User :id id :is_active true)] + (when-let [user (User :id id, :is_active true)] (let [reset-token (set-user-password-reset-token id) ;; NOTE: the new user join url is just a password reset with an indicator that this is a first time user join-url (str (form-password-reset-url reset-token) "#new")] diff --git a/src/metabase/db.clj b/src/metabase/db.clj index 4cf44617ce39f18b55f39110b9de362c56f1f180..d3b46d08383434d08d8656a455e590f2fefe62a6 100644 --- a/src/metabase/db.clj +++ b/src/metabase/db.clj @@ -13,7 +13,6 @@ [medley.core :as m] [ring.util.codec :as codec] [metabase.config :as config] - [metabase.db.internal :as i] [metabase.models.interface :as models] [metabase.util :as u] metabase.util.honeysql-extensions) ; this needs to be loaded so the `:h2` quoting style gets added @@ -200,98 +199,15 @@ (apply setup-db args))) -;; # ---------------------------------------- OLD UTILITY FUNCTIONS ---------------------------------------- - -;; ## SEL +;;; +------------------------------------------------------------------------------------------------------------------------+ +;;; | NEW HONEY-SQL BASED DB UTIL FUNCTIONS | +;;; +------------------------------------------------------------------------------------------------------------------------+ (def ^:dynamic ^Boolean *disable-db-logging* "Should we disable logging for database queries? Normally `false`, but bind this to `true` to keep logging from getting too noisy during operations that require a lot of DB access, like the sync process." false) -(defmacro ^:deprecated sel - "Wrapper for korma `select` that calls `post-select` on results and provides a few other conveniences. - - ONE-OR-MANY tells `sel` how many objects to fetch and is either `:one` or `:many`. - - (sel :one User :id 1) -> returns the User (or nil) whose id is 1 - (sel :many OrgPerm :user_id 1) -> returns sequence of OrgPerms whose user_id is 1 - - OPTION, if specified, is one of `:field`, `:fields`, `:id`, `:id->field`, `:field->id`, `:field->obj`, `:id->fields`, - `:field->field`, or `:field->fields`. - - ;; Only return IDs of objects. - (sel :one :id User :email \"cam@metabase.com\") -> 120 - - ;; Only return the specified field. - (sel :many :field [User :first_name]) -> (\"Cam\" \"Sameer\" ...) - - ;; Return map(s) that only contain the specified fields. - (sel :one :fields [User :id :first_name]) - -> ({:id 1 :first_name \"Cam\"}, {:id 2 :first_name \"Sameer\"} ...) - - ;; Return a map of ID -> field value - (sel :many :id->field [User :first_name]) - -> {1 \"Cam\", 2 \"Sameer\", ...} - - ;; Return a map of field value -> ID. Duplicates will be discarded! - (sel :many :field->id [User :first_name]) - -> {\"Cam\" 1, \"Sameer\" 2} - - ;; Return a map of field value -> field value. - (sel :many :field->field [User :first_name :last_name]) - -> {\"Cam\" \"Saul\", \"Rasta\" \"Toucan\", ...} - - ;; Return a map of field value -> *entire* object. Duplicates will be discarded! - (sel :many :field->obj [Table :name] :db_id 1) - -> {\"venues\" {:id 1, :name \"venues\", ...} - \"users\" {:id 2, :name \"users\", ...}} - - ;; Return a map of field value -> other fields. - (sel :many :field->fields [Table :name :id :db_id]) - -> {\"venues\" {:id 1, :db_id 1} - \"users\" {:id 2, :db_id 1}} - - ;; Return a map of ID -> specified fields - (sel :many :id->fields [User :first_name :last_name]) - -> {1 {:first_name \"Cam\", :last_name \"Saul\"}, - 2 {:first_Name \"Sameer\", :last_name ...}} - - ENTITY may be either an entity like `User` or a vector like `[entity & field-keys]`. - If just an entity is passed, `sel` will return `default-fields` for ENTITY. - Otherwise, if a vector is passed `sel` will return the fields specified by FIELD-KEYS. - - (sel :many [OrgPerm :admin :id] :user_id 1) -> return admin and id of OrgPerms whose user_id is 1 - - ENTITY may optionally be a fully-qualified symbol name of an entity; in this case, the symbol's namespace - will be required and the symbol itself resolved at runtime. This is sometimes neccesary to avoid circular - dependencies. This is slower, however, due to added runtime overhead. - - ;; require/resolve metabase.models.table/Table. Then sel Table 1 - (sel :one 'metabase.models.table/Table :id 1) - - FORMS may be either keyword args, which will be added to a korma `where` clause, or [other korma - clauses](http://www.sqlkorma.com/docs#select) such as `order`, which are passed directly. - - (sel :many Table :db_id 1) -> (select User (where {:id 1})) - (sel :many Table :db_id 1 (order :name :ASC)) -> (select User (where {:id 1}) (order :name ASC))" - {:arglists '([options? entity & forms])} - [& args] - (let [[option args] (u/optional keyword? args)] - `(~(if option - ;; if an option was specified, hand off to macro named metabase.db.internal/sel:OPTION - (symbol (format "metabase.db.internal/sel:%s" (name option))) - ;; otherwise just hand off to low-level sel* macro - 'metabase.db.internal/sel*) - ~@args))) - - - -;;; +------------------------------------------------------------------------------------------------------------------------+ -;;; | NEW HONEY-SQL BASED DB UTIL FUNCTIONS | -;;; +------------------------------------------------------------------------------------------------------------------------+ - -;; THIS DEPRECATES THE *ENTIRE* `metabase.db.internal` namespace. Yay! (defn- entity-symb->ns "Return the namespace symbol where we'd expect to find an entity symbol. @@ -353,7 +269,7 @@ {:pre [(map? honeysql-form)]} ;; Not sure *why* but without setting this binding on *rare* occasion HoneySQL will unwantedly generate SQL for a subquery and wrap the query in parens like "(UPDATE ...)" which is invalid (u/prog1 (binding [hformat/*subquery?* false] - (hsql/format honeysql-form :quoting (quoting-style))) + (hsql/format honeysql-form, :quoting (quoting-style), :allow-dashed-names? true)) (when-not *disable-db-logging* (log/debug (str "DB Call: " (first <>)))))) @@ -363,14 +279,40 @@ [honeysql-form & options] (apply jdbc/query (db-connection) (honeysql->sql honeysql-form) options)) + (defn entity->table-name "Get the keyword table name associated with an ENTITY, which can be anything that can be passed to `resolve-entity`. - (entity->table-name 'CardFavorite) -> :report_cardfavorite" + (db/entity->table-name 'CardFavorite) -> :report_cardfavorite" ^clojure.lang.Keyword [entity] {:post [(keyword? %)]} (keyword (:table (resolve-entity entity)))) + +(defn qualify + "Qualify a FIELD-NAME name with the name its ENTITY. This is necessary for disambiguating fields for HoneySQL queries that contain joins. + + (db/qualify 'CardFavorite :id) -> :report_cardfavorite.id" + ^clojure.lang.Keyword [entity field-name] + (if (vector? field-name) + [(qualify entity (first field-name)) (second field-name)] + (hsql/qualify (entity->table-name entity) field-name))) + +(defn qualified? + "Is FIELD-NAME qualified by (e.g. with its table name)?" + ^Boolean [field-name] + (if (vector? field-name) + (qualified? (first field-name)) + (boolean (re-find #"\." (name field-name))))) + +(defn- maybe-qualify + "Qualify FIELD-NAME with its table name if it's not already qualified." + ^clojure.lang.Keyword [entity field-name] + (if (qualified? field-name) + field-name + (qualify entity field-name))) + + (defn- entity->fields "Get the fields that should be used in a query, destructuring ENTITY if it's wrapped in a vector, otherwise calling `default-fields`. This will return `nil` if the entity isn't wrapped in a vector and uses the default implementation of `default-fields`. @@ -380,22 +322,17 @@ (entity->fields 'Database) -> nil" [entity] (if (vector? entity) - (rest entity) + (let [[entity & fields] entity] + (for [field fields] + (maybe-qualify entity field))) (models/default-fields (resolve-entity entity)))) -(defn qualify - "Qualify a FIELD-NAME name with the name its ENTITY. This is necessary for disambiguating fields for HoneySQL queries that contain joins. - - (qualify 'CardFavorite :id) -> :report_cardfavorite.id" - ^clojure.lang.Keyword [entity field-name] - (hsql/qualify (entity->table-name entity) field-name)) - (defn simple-select "Select objects from the database. Like `select`, but doesn't offer as many conveniences, so you should use that instead. This calls `post-select` on the results. - (simple-select 'User {:where [:= :id 1]})" + (db/simple-select 'User {:where [:= :id 1]})" {:style/indent 1} [entity honeysql-form] (let [entity (resolve-entity entity)] @@ -408,7 +345,7 @@ (defn simple-select-one "Select a single object from the database. Like `select-one`, but doesn't offer as many conveniences, so prefer that instead. - (simple-select-one 'User (h/where [:= :first-name \"Cam\"]))" + (db/simple-select-one 'User (h/where [:= :first-name \"Cam\"]))" ([entity] (simple-select-one entity {})) ([entity honeysql-form] @@ -466,12 +403,12 @@ (db/update! 'Label 11 {:name \"ToucanFriendly\"})" {:style/indent 2} - ([entity honeysql-form] + (^Boolean [entity honeysql-form] (let [entity (resolve-entity entity)] (not= [0] (execute! (merge (h/update (entity->table-name entity)) honeysql-form))))) - ([entity id kvs] + (^Boolean [entity id kvs] {:pre [(integer? id) (map? kvs) (every? keyword? (keys kvs))]} (let [entity (resolve-entity entity) kvs (-> (models/do-pre-update entity (assoc kvs :id id)) @@ -479,9 +416,21 @@ (update! entity (-> (h/sset {} kvs) (where :id id))))) - ([entity id k v & more] + (^Boolean [entity id k v & more] (update! entity id (apply array-map k v more)))) +(defn update-where! + "Convenience for updating several objects matching CONDITIONS-MAP. Returns `true` if any objects were affected. + For updating a single object, prefer using `update!`, which calls ENTITY's `pre-update` method first. + + (db/update-where! Table {:name table-name + :db_id (:id database)} + :active false)" + {:style/indent 2} + ^Boolean [entity conditions-map & {:as values}] + {:pre [(map? conditions-map) (every? keyword? (keys values))]} + (update! entity (where {:set values} conditions-map))) + (defn update-non-nil-keys! "Like `update!`, but filters out KVS with `nil` values." @@ -497,19 +446,19 @@ (defn delete! "Delete an object or objects from the Metabase DB matching certain constraints. Returns `true` if something was deleted, `false` otherwise. - (delete! 'Label) ; delete all Labels - (delete! Label :name \"Cam\") ; delete labels where :name == \"Cam\" - (delete! Label {:name \"Cam\"}) ; for flexibility either a single map or kwargs are accepted + (db/delete! 'Label) ; delete all Labels + (db/delete! Label :name \"Cam\") ; delete labels where :name == \"Cam\" + (db/delete! Label {:name \"Cam\"}) ; for flexibility either a single map or kwargs are accepted Most the time, you should use `cascade-delete!` instead, handles deletion of dependent objects via the entity's implementation of `pre-cascade-delete`." {:style/indent 1} ([entity] (delete! entity {})) - ([entity kvs] - {:pre [(map? kvs) (every? keyword? (keys kvs))]} + ([entity conditions] + {:pre [(map? conditions) (every? keyword? (keys conditions))]} (let [entity (resolve-entity entity)] (not= [0] (execute! (-> (h/delete-from (entity->table-name entity)) - (where kvs)))))) + (where conditions)))))) ([entity k v & more] (delete! entity (apply array-map k v more)))) @@ -524,31 +473,62 @@ :mysql :generated_key :h2 (keyword "scope_identity()"))) -(defn simple-insert! - "Do a raw JDBC `insert!` for a single row. Insert map M into the table named by keyword TABLE-KW. Returns ID of the inserted row (if applicable). - Normally, you shouldn't call this directly; use `insert!` instead, which handles entity resolution and calls `pre-insert` and `post-insert`. +(defn simple-insert-many! + "Do a simple JDBC `insert!` of multiple objects into the database. + Normally you should use `insert-many!` instead, which calls the entity's `pre-insert` method on the ROW-MAPS; + `simple-insert-many!` is offered for cases where you'd like to specifically avoid this behavior. + Returns a sequences of IDs of newly inserted objects. - (simple-insert! :label {:name \"Cam\", :slug \"cam\"}) -> 1" - [table-kw m] - {:pre [(keyword? table-kw) (map? m) (every? keyword? (keys m))]} - ((insert-id-key) (first (jdbc/insert! (db-connection) table-kw m, :entities (quote-fn))))) + (db/simple-insert-many! 'Label [{:name \"Toucan Friendly\"} + {:name \"Bird Approved\"}]) -> [38 39]" + {:style/indent 1} + [entity row-maps] + {:pre [(sequential? row-maps) (every? map? row-maps)]} + (when (seq row-maps) + (let [entity (resolve-entity entity)] + (map (insert-id-key) (apply jdbc/insert! (db-connection) (entity->table-name entity) (concat row-maps [:entities (quote-fn)])))))) + +(defn insert-many! + "Insert several new rows into the Database. Resolves ENTITY, and calls `pre-insert` on each of the ROW-MAPS. + Returns a sequence of the IDs of the newly created objects. + + (db/insert-many! 'Label [{:name \"Toucan Friendly\"} + {:name \"Bird Approved\"}]) -> [38 39]" + {:style/indent 1} + [entity row-maps] + (let [entity (resolve-entity entity)] + (simple-insert-many! entity (for [row-map row-maps] + (models/do-pre-insert entity row-map))))) -(defn insert! - "Insert a new object into the Database. Resolves ENTITY, and calls its `pre-insert` method on OBJECT to prepare it before insertion; - after insertion, it calls `post-insert` on the newly created object and returns it. - For flexibility, `insert!` OBJECT can be either a single map or individual kwargs: +(defn simple-insert! + "Do a simple JDBC `insert!` of a single object. + Normally you should use `insert!` instead, which calls the entity's `pre-insert` method on ROW-MAP; + `simple-insert!` is offered for cases where you'd like to specifically avoid this behavior. + Returns the ID of the inserted object. - (insert! Label {:name \"Toucan Unfriendly\"}) - (insert! 'Label :name \"Toucan Friendly\") + (db/simple-insert! 'Label :name \"Toucan Friendly\") -> 1 - This fetches the newly created object from the database and passes it to the entity's `post-insert` method, ultimately returning the object." + Like `insert!`, `simple-insert!` can be called with either a single ROW-MAP or kv-style arguments." {:style/indent 1} - ([entity object] - {:pre [(map? object)]} - (let [entity (resolve-entity entity) - id (simple-insert! (entity->table-name entity) (models/do-pre-insert entity object))] - (some-> id entity models/post-insert))) + ([entity row-map] + {:pre [(map? row-map) (every? keyword? (keys row-map))]} + (first (simple-insert-many! entity [row-map]))) + ([entity k v & more] + (simple-insert! entity (apply array-map k v more)))) +(defn insert! + "Insert a new object into the Database. Resolves ENTITY, and calls its `pre-insert` method on ROW-MAP to prepare it before insertion; + after insert, it fetches and returns the newly created object. + For flexibility, `insert!` can handle either a single map or individual kwargs: + + (db/insert! Label {:name \"Toucan Unfriendly\"}) + (db/insert! 'Label :name \"Toucan Friendly\")" + {:style/indent 1} + ([entity row-map] + {:pre [(map? row-map) (every? keyword? (keys row-map))]} + (let [entity (resolve-entity entity)] + (when-let [id (simple-insert! entity (models/do-pre-insert entity row-map))] + (entity id)))) ([entity k v & more] (insert! entity (apply array-map k v more)))) @@ -580,14 +560,17 @@ "Select the `:id` of a single object from the database. (select-one-id 'Database :name \"Sample Dataset\") -> 1" + {:style/indent 1} [entity & options] - (apply select-one-field :id entity options)) + (let [entity (resolve-entity entity)] + (apply select-one-field :id entity options))) (defn select-one-count "Select the count of objects matching some condition. ;; Get all Databases whose name is non-nil (select-one-count 'Database :name [:not= nil]) -> 12" + {:style/indent 1} [entity & options] (:count (apply select-one [entity [:%count.* :count]] options))) @@ -601,39 +584,23 @@ (simple-select entity (where+ {:select (or fields [:*])} options)))) (defn select-field - "Select values of a single field for multiple objects. These are returned as a set. + "Select values of a single field for multiple objects. These are returned as a set if any matching fields were returned, otherwise `nil`. (select-field :name 'Database) -> #{\"Sample Dataset\", \"test-data\"}" {:style/indent 2} [field entity & options] {:pre [(keyword? field)]} - (set (map field (apply select [entity field] options)))) + (when-let [results (seq (map field (apply select [entity field] options)))] + (set results))) (defn select-ids - "Select IDs for multiple objects. These are returned as a set. + "Select IDs for multiple objects. These are returned as a set if any matching IDs were returned, otherwise `nil`. (select-ids 'Table :db_id 1) -> #{1 2 3 4}" {:style/indent 1} [entity & options] (apply select-field :id entity options)) -(defn select-field->obj - "Select objects from the database, and return them as a map of FIELD to the objects themselves. - - (select-field->obj :name 'Database) -> {\"Sample Dataset\" {...}, \"test-data\" {...}}" - {:style/indent 2} - [field entity & options] - {:pre [(keyword? field)]} - (into {} (for [result (apply select entity options)] - {(field result) result}))) - -(defn select-id->obj - "Select objects from the database, and return them as a map of their `:id` to the objects themselves. - - (select-id->obj 'Database) -> {1 {...}, 2 {...}}" - [entity & options] - (apply select-field->obj :id entity options)) - (defn select-field->field "Select fields K and V from objects in the database, and return them as a map from K to V. @@ -682,9 +649,22 @@ TODO - this depends on objects having an `:id` column; consider a way to fix this for models like `Setting` that do not have one." {:style/indent 1} - [entity & kvs] + [entity & conditions] (let [entity (resolve-entity entity)] - (doseq [object (apply select entity kvs)] + (doseq [object (apply select entity conditions)] (models/pre-cascade-delete object) (delete! entity :id (:id object)))) {:status 204, :body nil}) + + +;;; Various convenience fns (experiMENTAL) + +(defn join + "Convenience for generating a HoneySQL `JOIN` clause. + + (db/select-ids Table + (db/join [Table :raw_table_id] [RawTable :id]) + :active true)" + [[source-entity fk] [dest-entity pk]] + {:left-join [(entity->table-name dest-entity) [:= (qualify source-entity fk) + (qualify dest-entity pk)]]}) diff --git a/src/metabase/db/internal.clj b/src/metabase/db/internal.clj deleted file mode 100644 index 6c499328eada13a497cdec535583d65f3d43fef1..0000000000000000000000000000000000000000 --- a/src/metabase/db/internal.clj +++ /dev/null @@ -1,230 +0,0 @@ -(ns ^:deprecated metabase.db.internal - "Internal functions and macros used by the public-facing functions in `metabase.db`." - (:require [clojure.string :as s] - [clojure.tools.logging :as log] - [korma.core :as k] - [metabase.config :as config] - [metabase.models.interface :as models] - [metabase.util :as u])) - -(declare entity->korma) - -(defn- ^:deprecated pull-kwargs - "Where FORMS is a sequence like `[:id 1 (limit 3)]`, return a map of kwarg pairs and sequence of remaining forms. - - (pull-kwargs [:id 1 (limit 3) (order :id :ASC]) -> [{:id 1} [(limit 3) (order :id ASC)]" - ([forms] - (pull-kwargs {} [] forms)) - ([kwargs-acc forms-acc [arg1 & rest-args]] - (if-not arg1 [kwargs-acc forms-acc] - (if (keyword? arg1) - (recur (assoc kwargs-acc arg1 (first rest-args)) forms-acc (rest rest-args)) - (recur kwargs-acc (conj forms-acc arg1) rest-args))))) - -(defn ^:deprecated sel-apply-kwargs - "Pull kwargs from forms and add korma `where` form if applicable." - [forms] - (let [[kwargs-map forms] (pull-kwargs forms)] - (if-not (empty? kwargs-map) (conj forms `(k/where ~kwargs-map)) - forms))) - -(defn ^:deprecated destructure-entity - "Take an ENTITY of the form `entity` or `[entity & field-keys]` and return a pair like `[entity field-keys]`." - [entity] - (if-not (vector? entity) [entity nil] - [(first entity) (vec (rest entity))])) - -(def ^{:arglists '([entity])} ^:deprecated entity->korma - "Convert an ENTITY argument to `sel` into the form we should pass to korma `select` and to various multi-methods such as - `post-select`. - - * If entity is a vector like `[User :name]`, only keeps the first arg (`User`) - * Converts fully-qualified entity name strings like `\"metabase.models.user/User\"` to the corresponding entity - and requires their namespace if needed. - * Symbols like `'metabase.models.user/User` are handled the same way as strings. - * Infers the namespace of unqualified symbols like `'CardFavorite`" - (memoize - (fn -entity->korma [entity] - {:post [:metabase.models.interface/entity]} - (cond (vector? entity) (-entity->korma (first entity)) - (string? entity) (-entity->korma (symbol entity)) - (symbol? entity) (let [[_ ns symb] (re-matches #"^(?:([^/]+)/)?([^/]+)$" (str entity)) - _ (assert symb) - ns (symbol (or ns - (str "metabase.models." (-> symb - (s/replace #"([a-z])([A-Z])" "$1-$2") ; convert something like CardFavorite - s/lower-case)))) ; to ns like metabase.models.card_favorite - symb (symbol symb)] - (require ns) - @(ns-resolve ns symb)) - :else entity)))) - - -;;; ## ---------------------------------------- SEL 2.0 FUNCTIONS ---------------------------------------- - -;;; Low-level sel implementation - -(defmacro ^:deprecated sel-fn - "Part of the internal implementation for `sel`, don't call this directly!" - [& forms] - (let [forms (sel-apply-kwargs forms) - entity (gensym "ENTITY")] - (loop [query `(k/select* ~entity), [[f & args] & more] forms] - (cond - f (recur `(~f ~query ~@args) more) - (seq more) (recur query more) - :else `[(fn [~entity] - ~query) ~(str query)])))) - -(defn ^:deprecated sel-exec - "Part of the internal implementation for `sel`, don't call this directly! - Execute the korma form generated by the `sel` macro and process the results." - [entity [select-fn log-str]] - (let [[entity field-keys] (destructure-entity entity) - entity (entity->korma entity) - entity+fields (assoc entity :fields (or field-keys - (models/default-fields entity)))] - ;; Log if applicable - (future - (when (config/config-bool :mb-db-logging) - (when-not @(resolve 'metabase.db/*disable-db-logging*) - (log/debug "DB CALL:" (:name entity) - (or (:fields entity+fields) "*") - (s/replace log-str #"korma.core/" ""))))) - - (for [obj (k/exec (select-fn entity+fields))] - (models/do-post-select entity obj)))) - -(defmacro ^:deprecated sel* - "Part of the internal implementation for `sel`, don't call this directly!" - [entity & forms] - `(sel-exec ~entity (sel-fn ~@forms))) - -;;; :field - -(defmacro ^:deprecated sel:field - "Part of the internal implementation for `sel`, don't call this directly! - Handle `(sel ... :field ...)` forms." - [[entity field] & forms] - `(let [field# ~field] - (map field# (sel* [~entity field#] ~@forms)))) - -;;; :id - -(defmacro ^:deprecated sel:id - "Part of the internal implementation for `sel`, don't call this directly! - Handle `(sel ... :id ...)` forms." - [entity & forms] - `(sel:field [~entity :id] ~@forms)) - -;;; :fields - -(defn ^:deprecated sel:fields* - "Part of the internal implementation for `sel`, don't call this directly! - Handle `(sel ... :fields ...)` forms." - [fields results] - (for [result results] - (select-keys result fields))) - -(defmacro ^:deprecated sel:fields - "Part of the internal implementation for `sel`, don't call this directly! - Handle `(sel ... :fields ...)` forms." - [[entity & fields] & forms] - `(let [fields# ~(vec fields)] - (sel:fields* (set fields#) (sel* `[~~entity ~@fields#] ~@forms)))) - -;;; :id->fields - -(defn ^:deprecated sel:id->fields* - "Part of the internal implementation for `sel`, don't call this directly! - Handle `(sel ... :id->fields ...)` forms." - [fields results] - (->> results - (map (u/rpartial select-keys fields)) - (zipmap (map :id results)))) - -(defmacro ^:deprecated sel:id->fields - "Part of the internal implementation for `sel`, don't call this directly! - Handle `(sel ... :id->fields ...)` forms." - [[entity & fields] & forms] - `(let [fields# ~(conj (set fields) :id)] - (sel:id->fields* fields# (sel* `[~~entity ~@fields#] ~@forms)))) - -;;; :field->field - -(defn ^:deprecated sel:field->field* - "Part of the internal implementation for `sel`, don't call this directly! - Handle `(sel ... :field->field ...)` forms." - [f1 f2 results] - (into {} (for [result results] - {(f1 result) (f2 result)}))) - -(defmacro ^:deprecated sel:field->field - "Part of the internal implementation for `sel`, don't call this directly! - Handle `(sel ... :field->field ...)` forms." - [[entity f1 f2] & forms] - `(let [f1# ~f1 - f2# ~f2] - (sel:field->field* f1# f2# (sel* [~entity f1# f2#] ~@forms)))) - -;;; :field->fields - -(defn ^:deprecated sel:field->fields* - "Part of the internal implementation for `sel`, don't call this directly! - Handle `(sel ... :field->fields ...)` forms." - [key-field other-fields results] - (into {} (for [result results] - {(key-field result) (select-keys result other-fields)}))) - -(defmacro ^:deprecated sel:field->fields - "Part of the internal implementation for `sel`, don't call this directly! - Handle `(sel ... :field->fields ...)` forms." - [[entity key-field & other-fields] & forms] - `(let [key-field# ~key-field - other-fields# ~(vec other-fields)] - (sel:field->fields* key-field# other-fields# (sel* `[~~entity ~key-field# ~@other-fields#] ~@forms)))) - -;;; : id->field - -(defmacro ^:deprecated sel:id->field - "Part of the internal implementation for `sel`, don't call this directly! - Handle `(sel ... :id->field ...)` forms." - [[entity field] & forms] - `(sel:field->field [~entity :id ~field] ~@forms)) - -;;; :field->id - -(defmacro ^:deprecated sel:field->id - "Part of the internal implementation for `sel`, don't call this directly! - Handle `(sel ... :field->id ...)` forms." - [[entity field] & forms] - `(sel:field->field [~entity ~field :id] ~@forms)) - -;;; :field->obj - -(defn ^:deprecated sel:field->obj* - "Part of the internal implementation for `sel`, don't call this directly! - Handle `(sel ... :field->obj ...)` forms." - [field results] - (into {} (for [result results] - {(field result) result}))) - -(defmacro ^:deprecated sel:field->obj - "Part of the internal implementation for `sel`, don't call this directly! - Handle `(sel ... :field->obj ...)` forms." - [[entity field] & forms] - `(sel:field->obj* ~field (sel* ~entity ~@forms))) - -;;; :one & :many - -(defmacro ^:deprecated sel:one - "Part of the internal implementation for `sel`, don't call this directly! - Handle `(sel :one ...)` forms." - [& args] - `(first (metabase.db/sel ~@args (k/limit 1)))) - -(defmacro ^:deprecated sel:many - "Part of the internal implementation for `sel`, don't call this directly! - Handle `(sel :many ...)` forms." - [& args] - `(metabase.db/sel ~@args)) diff --git a/src/metabase/db/migrations.clj b/src/metabase/db/migrations.clj index 9d09b90236226b8c41357046433265ccc97343c0..9fc449d85880ede0bab6cf0ef1e46e0b81fea4aa 100644 --- a/src/metabase/db/migrations.clj +++ b/src/metabase/db/migrations.clj @@ -64,7 +64,7 @@ (defmigration set-card-database-and-table-ids ;; only execute when `:database_id` column on all cards is `nil` (when (= 0 (:cnt (first (k/select Card (k/aggregate (count :*) :cnt) (k/where (not= :database_id nil)))))) - (doseq [{id :id {:keys [type] :as dataset-query} :dataset_query} (db/sel :many [Card :id :dataset_query])] + (doseq [{id :id {:keys [type] :as dataset-query} :dataset_query} (db/select [Card :id :dataset_query])] (when type ;; simply resave the card with the dataset query which will automatically set the database, table, and type (db/update! Card id, :dataset_query dataset-query))))) @@ -74,7 +74,7 @@ ;; UI was automatically setting `:ssl` to `true` for every database added as part of the auto-SSL detection. ;; Since Mongo did *not* support SSL, all existing Mongo DBs should actually have this key set to `false`. (defmigration set-mongodb-databases-ssl-false - (doseq [{:keys [id details]} (db/sel :many :fields [Database :id :details] :engine "mongo")] + (doseq [{:keys [id details]} (db/select [Database :id :details], :engine "mongo")] (db/update! Database id, :details (assoc details :ssl false)))) @@ -94,7 +94,9 @@ ;; Populate the initial value for the `:admin-email` setting for anyone who hasn't done it yet (defmigration set-admin-email (when-not (setting/get :admin-email) - (when-let [email (db/sel :one :field ['User :email] (k/where {:is_superuser true :is_active true}))] + (when-let [email (db/select-one-field :email 'User + :is_superuser true + :is_active true)] (setting/set :admin-email email)))) @@ -107,7 +109,7 @@ ;; Clean up duplicate FK entries (defmigration remove-duplicate-fk-entries - (let [existing-fks (db/sel :many ForeignKey) + (let [existing-fks (db/select ForeignKey) grouped-fks (group-by #(str (:origin_id %) "_" (:destination_id %)) existing-fks)] (doseq [[k fks] grouped-fks] (when (< 1 (count fks)) @@ -119,7 +121,7 @@ ;; Migrate dashboards to the new grid ;; NOTE: this scales the dashboards by 4x in the Y-scale and 3x in the X-scale (defmigration update-dashboards-to-new-grid - (doseq [{:keys [id row col sizeX sizeY]} (db/sel :many DashboardCard)] + (doseq [{:keys [id row col sizeX sizeY]} (db/select DashboardCard)] (k/update DashboardCard (k/set-fields {:row (when row (* row 4)) :col (when col (* col 3)) @@ -178,7 +180,7 @@ ;; migrate FK information from old ForeignKey model to Field.fk_target_field_id (defmigration migrate-fk-metadata (when (> 1 (:cnt (first (k/select Field (k/aggregate (count :*) :cnt) (k/where (not= :fk_target_field_id nil)))))) - (when-let [fks (not-empty (db/sel :many ForeignKey))] + (when-let [fks (not-empty (db/select ForeignKey))] (doseq [{:keys [origin_id destination_id]} fks] (k/update Field (k/set-fields {:fk_target_field_id destination_id}) @@ -191,8 +193,8 @@ (when (= 0 (:cnt (first (k/select RawTable (k/aggregate (count :*) :cnt))))) (binding [db/*disable-db-logging* true] (kdb/transaction - (doseq [{database-id :id, :keys [name engine]} (db/sel :many Database)] - (when-let [tables (not-empty (db/sel :many Table :db_id database-id, :active true))] + (doseq [{database-id :id, :keys [name engine]} (db/select Database)] + (when-let [tables (not-empty (db/select Table, :db_id database-id, :active true))] (log/info (format "Migrating raw schema information for %s database '%s'" engine name)) (let [processed-tables (atom #{})] (doseq [{table-id :id, table-schema :schema, table-name :name} tables] @@ -218,7 +220,7 @@ ;; migrate all Fields in the Table (skipping :dynamic-schema dbs) (when-not (driver/driver-supports? (driver/engine->driver engine) :dynamic-schema) (let [processed-fields (atom #{})] - (doseq [{field-id :id, column-name :name, :as field} (db/sel :many Field :table_id table-id, :visibility_type [not= "retired"])] + (doseq [{field-id :id, column-name :name, :as field} (db/select Field, :table_id table-id, :visibility_type [:not= "retired"])] ;; guard against duplicate fields with the same name (if (contains? @processed-fields column-name) ;; this is a dupe, disable it diff --git a/src/metabase/driver.clj b/src/metabase/driver.clj index 9171c7be85e8f628b5b9bacae86c034f27123dc0..a7c779e3fdd2c8d852b8ac641751975a4c74d8c4 100644 --- a/src/metabase/driver.clj +++ b/src/metabase/driver.clj @@ -147,14 +147,18 @@ (mbql->native ^java.util.Map [this, ^Map query] "Transpile an MBQL structured query into the appropriate native query form. - The input query will be a fully expanded MBQL query (https://github.com/metabase/metabase/wiki/Expanded-Queries) with + The input QUERY will be a [fully-expanded MBQL query](https://github.com/metabase/metabase/wiki/Expanded-Queries) with all the necessary pieces of information to build a properly formatted native query for the given database. + If the underlying query language supports remarks or comments, the driver should use `query->remark` to generate an appropriate message and include that in an appropriate place; + alternatively a driver might directly include the query's `:info` dictionary if the underlying language is JSON-based. + The result of this function will be passed directly into calls to `execute-query`. For example, a driver like Postgres would build a valid SQL expression and return a map such as: - {:query \"SELECT * FROM my_table\"}") + {:query \"-- [Contents of `(query->remark query)`] + SELECT * FROM my_table\"}") (notify-database-updated [this, ^DatabaseInstance database] "*OPTIONAL*. Notify the driver that the attributes of the DATABASE have changed. This is specifically relevant in @@ -328,7 +332,7 @@ (Databases aren't expected to change their types, and this optimization makes things a lot faster). This loads the corresponding driver if needed." - (let [db-id->engine (memoize (fn [db-id] (db/sel :one :field [Database :engine] :id db-id)))] + (let [db-id->engine (memoize (fn [db-id] (db/select-one-field :engine Database, :id db-id)))] (fn [db-id] (engine->driver (db-id->engine db-id))))) diff --git a/src/metabase/driver/bigquery.clj b/src/metabase/driver/bigquery.clj index 76ff1ae1d44b83dd852a7d70ab2ec2ff472a823b..48bf5e41d1f76436577582ba40aa5e711a5d9a6f 100644 --- a/src/metabase/driver/bigquery.clj +++ b/src/metabase/driver/bigquery.clj @@ -3,8 +3,9 @@ [string :as s] [walk :as walk]) [clojure.tools.logging :as log] - (korma [core :as k] - [db :as kdb]) + (honeysql [core :as hsql] + [helpers :as h]) + [korma.db :as kdb] (metabase [config :as config] [db :as db] [driver :as driver]) @@ -14,9 +15,10 @@ [field :as field] [table :as table]) [metabase.sync-database.analyze :as analyze] + [metabase.query-processor :as qp] metabase.query-processor.interface [metabase.util :as u] - [metabase.util.korma-extensions :as kx]) + [metabase.util.honeysql-extensions :as hx]) (:import (java.util Collections Date) (com.google.api.client.googleapis.auth.oauth2 GoogleCredential GoogleCredential$Builder GoogleAuthorizationCodeFlow GoogleAuthorizationCodeFlow$Builder GoogleTokenResponse) com.google.api.client.googleapis.javanet.GoogleNetHttpTransport @@ -151,8 +153,8 @@ (defn- table-schema->metabase-field-info [^TableSchema schema] (for [^TableFieldSchema field (.getFields schema)] - {:name (.getName field) - :base-type (bigquery-type->base-type (.getType field))})) + {:name (.getName field) + :base-type (bigquery-type->base-type (.getType field))})) (defn- describe-table [database {table-name :name}] {:schema nil @@ -160,6 +162,8 @@ :fields (set (table-schema->metabase-field-info (.getSchema (get-table database table-name))))}) +(def ^:private ^:const query-timeout-seconds 90) + (defn- ^QueryResponse execute-bigquery ([{{:keys [project-id]} :details, :as database} query-string] (execute-bigquery (database->client database) project-id query-string)) @@ -167,7 +171,8 @@ ([^Bigquery client, ^String project-id, ^String query-string] {:pre [client (seq project-id) (seq query-string)]} (let [request (doto (QueryRequest.) - #_(.setUseLegacySql false) ; use standards-compliant non-legacy dialect + (.setTimeoutMs (* query-timeout-seconds 1000)) + #_(.setUseLegacySql false) ; use standards-compliant non-legacy dialect -- see https://cloud.google.com/bigquery/sql-reference/enabling-standard-sql (.setQuery query-string))] (execute (.query (.jobs client) project-id request))))) @@ -196,11 +201,9 @@ "STRING" identity "TIMESTAMP" parse-timestamp-str}) -(def ^:private ^:const query-default-timeout-seconds 60) - (defn- post-process-native ([^QueryResponse response] - (post-process-native response query-default-timeout-seconds)) + (post-process-native response query-timeout-seconds)) ([^QueryResponse response, ^Integer timeout-seconds] (if-not (.getJobComplete response) ;; 99% of the time by the time this is called `.getJobComplete` will return `true`. On the off chance it doesn't, wait a few seconds for the job to finish. @@ -255,65 +258,51 @@ ;;; # Generic SQL Driver Methods (defn- date-add [unit timestamp interval] - (k/sqlfn* :DATE_ADD timestamp interval (kx/literal unit))) + (hsql/call :date_add timestamp interval (hx/literal unit))) ;; µs = unix timestamp in microseconds. Most BigQuery functions like strftime require timestamps in this format -(def ^:private ->µs (partial k/sqlfn* :TIMESTAMP_TO_USEC)) +(def ^:private ->µs (partial hsql/call :timestamp_to_usec)) (defn- µs->str [format-str µs] - (k/sqlfn* :STRFTIME_UTC_USEC µs (kx/literal format-str))) + (hsql/call :strftime_utc_usec µs (hx/literal format-str))) (defn- trunc-with-format [format-str timestamp] - (kx/->timestamp (µs->str format-str (->µs timestamp)))) + (hx/->timestamp (µs->str format-str (->µs timestamp)))) (defn- date [unit expr] {:pre [expr]} (case unit :default expr :minute (trunc-with-format "%Y-%m-%d %H:%M:00" expr) - :minute-of-hour (kx/minute expr) + :minute-of-hour (hx/minute expr) :hour (trunc-with-format "%Y-%m-%d %H:00:00" expr) - :hour-of-day (kx/hour expr) - :day (kx/->timestamp (k/sqlfn* :DATE expr)) - :day-of-week (k/sqlfn* :DAYOFWEEK expr) - :day-of-month (k/sqlfn* :DAY expr) - :day-of-year (k/sqlfn* :DAYOFYEAR expr) - :week (date-add :DAY (date :day expr) (kx/- 1 (date :day-of-week expr))) - :week-of-year (kx/week expr) + :hour-of-day (hx/hour expr) + :day (hx/->timestamp (hsql/call :date expr)) + :day-of-week (hsql/call :dayofweek expr) + :day-of-month (hsql/call :day expr) + :day-of-year (hsql/call :dayofyear expr) + :week (date-add :day (date :day expr) (hx/- 1 (date :day-of-week expr))) + :week-of-year (hx/week expr) :month (trunc-with-format "%Y-%m-01" expr) - :month-of-year (kx/month expr) - :quarter (date-add :MONTH + :month-of-year (hx/month expr) + :quarter (date-add :month (trunc-with-format "%Y-01-01" expr) - (kx/* (kx/dec (date :quarter-of-year expr)) + (hx/* (hx/dec (date :quarter-of-year expr)) 3)) - :quarter-of-year (kx/quarter expr) - :year (kx/year expr))) + :quarter-of-year (hx/quarter expr) + :year (hx/year expr))) (defn- unix-timestamp->timestamp [expr seconds-or-milliseconds] (case seconds-or-milliseconds - :seconds (k/sqlfn* :SEC_TO_TIMESTAMP expr) - :milliseconds (k/sqlfn* :MSEC_TO_TIMESTAMP expr))) + :seconds (hsql/call :sec_to_timestamp expr) + :milliseconds (hsql/call :msec_to_timestamp expr))) ;;; # Query Processing (declare driver) -;; this is never actually connected to, just passed to korma so it applies appropriate delimiters when building SQL -(def ^:private ^:const korma-db - {:pool {:subprotocol "sqlite" - :subname ""} - :options {:naming {:keys identity - :fields identity} - :delimiters [\[ \]] - :alias-delimiter " AS " - :subprotocol ""}}) - -(defn- entity [dataset-id table-name] - (-> (k/create-entity (k/raw (format "[%s.%s]" dataset-id table-name))) - (k/database korma-db))) - ;; Make the dataset-id the "schema" of every field or table in the query because otherwise BigQuery can't figure out where things is from (defn- qualify-fields-and-tables-with-dataset-id [{{{:keys [dataset-id]} :details} :database, :as query}] (walk/postwalk (fn [x] @@ -323,18 +312,17 @@ :else x)) (assoc-in query [:query :source-table :schema] dataset-id))) -(defn- korma-form [query entity] - (sqlqp/build-korma-form driver (qualify-fields-and-tables-with-dataset-id query) entity)) +(defn- honeysql-form [outer-query] + (sqlqp/build-honeysql-form driver (qualify-fields-and-tables-with-dataset-id outer-query))) -(defn- korma-form->sql [korma-form] - {:pre [(map? korma-form)]} - ;; replace identifiers like [shakespeare].[word] with ones like [shakespeare.word] since that's what BigQuery expects - (try (s/replace (kdb/with-db korma-db - (k/as-sql korma-form)) - #"\]\.\[" ".") - (catch Throwable e - (log/error (u/format-color 'red "Couldn't convert korma form to SQL:\n%s" (sqlqp/pprint-korma-form korma-form))) - (throw e)))) +(defn- honeysql-form->sql ^String [honeysql-form] + {:pre [(map? honeysql-form)]} + ;; replace identifiers like [shakespeare].[word] with ones like [shakespeare.word] since that's hat BigQuery expects + (let [[sql & args] (sql/honeysql-form->sql+args driver honeysql-form) + sql (s/replace (hx/unescape-dots sql) #"\]\.\[" ".")] + (assert (empty? args) + "BigQuery statements can't be parameterized!") + sql)) (defn- post-process-mbql [dataset-id table-name {:keys [columns rows]}] ;; Since we don't alias column names the come back like "veryNiceDataset_shakepeare_corpus". Strip off the dataset and table IDs @@ -344,14 +332,14 @@ (for [row rows] (zipmap columns row)))) -(defn- mbql->native [{{{:keys [dataset-id]} :details, :as database} :database, {{table-name :name} :source-table} :query, :as query}] +(defn- mbql->native [{{{:keys [dataset-id]} :details, :as database} :database, {{table-name :name} :source-table} :query, :as outer-query}] {:pre [(map? database) (seq dataset-id) (seq table-name)]} - (let [korma-form (korma-form query (entity dataset-id table-name)) - sql (korma-form->sql korma-form)] - (sqlqp/log-korma-form korma-form sql) - {:query sql - :table-name table-name - :mbql? true})) + (binding [sqlqp/*query* outer-query] + (let [honeysql-form (honeysql-form outer-query) + sql (honeysql-form->sql honeysql-form)] + {:query (str "-- " (qp/query->remark outer-query) "\n" sql) + :table-name table-name + :mbql? true}))) (defn- execute-query [{{{:keys [dataset-id]} :details, :as database} :database, {sql :query, :keys [table-name mbql?]} :native}] (let [results (process-native* database sql) @@ -364,19 +352,19 @@ (mapv row columns)) :annotate? true})) -;; This provides an implementation of `prepare-value` that prevents korma from converting forms to prepared statement parameters (`?`) +;; This provides an implementation of `prepare-value` that prevents HoneySQL from converting forms to prepared statement parameters (`?`) ;; TODO - Move this into `metabase.driver.generic-sql` and document it as an alternate implementation for `prepare-value` (?) -;; Or perhaps investigate a lower-level way to disable the functionality in korma, perhaps by swapping out a function somewhere +;; Or perhaps investigate a lower-level way to disable the functionality in HoneySQL, perhaps by swapping out a function somewhere (defprotocol ^:private IPrepareValue (^:private prepare-value [this])) (extend-protocol IPrepareValue nil (prepare-value [_] nil) DateTimeValue (prepare-value [{:keys [value]}] (prepare-value value)) Value (prepare-value [{:keys [value]}] (prepare-value value)) - String (prepare-value [this] (kx/literal this)) - Boolean (prepare-value [this] (k/raw (if this "TRUE" "FALSE"))) - Date (prepare-value [this] (k/sqlfn* :TIMESTAMP (kx/literal (u/date->iso-8601 this)))) - Number (prepare-value [this] (k/raw this)) + String (prepare-value [this] (hx/literal this)) + Boolean (prepare-value [this] (hsql/raw (if this "TRUE" "FALSE"))) + Date (prepare-value [this] (hsql/call :timestamp (hx/literal (u/date->iso-8601 this)))) + Number (prepare-value [this] this) Object (prepare-value [this] (throw (Exception. (format "Don't know how to prepare value %s %s" (class this) this))))) @@ -392,26 +380,44 @@ ag-type))) :else (str schema-name \. table-name \. field-name))) +;; We have to override the default SQL implementations of breakout and order-by because BigQuery propogates casting functions in SELECT +;; BAD: +;; SELECT msec_to_timestamp([sad_toucan_incidents.incidents.timestamp]) AS [sad_toucan_incidents.incidents.timestamp], count(*) AS [count] +;; FROM [sad_toucan_incidents.incidents] +;; GROUP BY msec_to_timestamp([sad_toucan_incidents.incidents.timestamp]) +;; ORDER BY msec_to_timestamp([sad_toucan_incidents.incidents.timestamp]) ASC +;; LIMIT 10 +;; +;; GOOD: +;; SELECT msec_to_timestamp([sad_toucan_incidents.incidents.timestamp]) AS [sad_toucan_incidents.incidents.timestamp], count(*) AS [count] +;; FROM [sad_toucan_incidents.incidents] +;; GROUP BY [sad_toucan_incidents.incidents.timestamp] +;; ORDER BY [sad_toucan_incidents.incidents.timestamp] ASC +;; LIMIT 10 + (defn- field->identitfier [field] - (k/raw (str \[ (field->alias field) \]))) + (hsql/raw (str \[ (field->alias field) \]))) -(defn- apply-breakout [korma-form {breakout-fields :breakout, fields-fields :fields}] - (-> korma-form +(defn- apply-breakout [honeysql-form {breakout-fields :breakout, fields-fields :fields}] + (-> honeysql-form ;; Group by all the breakout fields - ((partial apply k/group) (map field->identitfier breakout-fields)) - ;; Add fields form only for fields that weren't specified in :fields clause -- we don't want to include it twice, or korma will barf - ((partial apply k/fields) (for [field breakout-fields - :when (not (contains? (set fields-fields) field))] - (sqlqp/as (sqlqp/formatted field) field))))) - -(defn- apply-order-by [korma-form {subclauses :order-by}] - (loop [korma-form korma-form, [{:keys [field direction]} & more] subclauses] - (let [korma-form (k/order korma-form (field->identitfier field) (case direction - :ascending :ASC - :descending :DESC))] + ((partial apply h/group) (map field->identitfier breakout-fields)) + ;; Add fields form only for fields that weren't specified in :fields clause -- we don't want to include it twice, or HoneySQL will barf + ((partial apply h/merge-select) (for [field breakout-fields + :when (not (contains? (set fields-fields) field))] + (sqlqp/as (sqlqp/formatted field) field))))) + +(defn- apply-order-by [honeysql-form {subclauses :order-by}] + (loop [honeysql-form honeysql-form, [{:keys [field direction]} & more] subclauses] + (let [honeysql-form (h/merge-order-by honeysql-form [(field->identitfier field) (case direction + :ascending :asc + :descending :desc)])] (if (seq more) - (recur korma-form more) - korma-form)))) + (recur honeysql-form more) + honeysql-form)))) + +(defn- string-length-fn [field-key] + (hsql/call :length field-key)) (defrecord BigQueryDriver [] @@ -427,11 +433,12 @@ :apply-order-by (u/drop-first-arg apply-order-by) :column->base-type (constantly nil) ; these two are actually not applicable :connection-details->spec (constantly nil) ; since we don't use JDBC - :current-datetime-fn (constantly (k/sqlfn* :CURRENT_TIMESTAMP)) + :current-datetime-fn (constantly :%current_timestamp) :date (u/drop-first-arg date) :field->alias (u/drop-first-arg field->alias) :prepare-value (u/drop-first-arg prepare-value) - :string-length-fn (constantly :LENGTH) + :quote-style (constantly :sqlserver) ; we want identifiers quoted [like].[this] + :string-length-fn (u/drop-first-arg string-length-fn) :unix-timestamp->timestamp (u/drop-first-arg unix-timestamp->timestamp)}) driver/IDriver @@ -471,4 +478,4 @@ :field-values-lazy-seq (u/drop-first-arg field-values-lazy-seq) :mbql->native (u/drop-first-arg mbql->native)})) -(driver/register-driver! :bigquery (BigQueryDriver.)) +(driver/register-driver! :bigquery driver) diff --git a/src/metabase/driver/crate.clj b/src/metabase/driver/crate.clj index 1f9cbd6c7f93ae96bd2d53282e5cb85d2dd99813..c59579588ac8b2d7dc68f19619bdaa824b301ddc 100644 --- a/src/metabase/driver/crate.clj +++ b/src/metabase/driver/crate.clj @@ -1,6 +1,7 @@ (ns metabase.driver.crate - (:require [clojure.set :as set] - [korma.core :as k] + (:require [clojure.java.jdbc :as jdbc] + [clojure.set :as set] + [honeysql.core :as hsql] [metabase.driver :as driver] (metabase.driver.crate [analyze :as analyze] [query-processor :as qp] @@ -40,7 +41,7 @@ :geo_point_array :ArrayField}) -(def ^:private ^:const now (k/sqlfn :CURRENT_TIMESTAMP (k/raw 3))) +(def ^:private ^:const now (hsql/call :current_timestamp 3)) (defn- crate-spec [{:keys [hosts] @@ -53,10 +54,10 @@ (defn- can-connect? [details] (let [connection-spec (crate-spec details)] - (= 1 (-> (k/exec-raw connection-spec "select 1 from sys.cluster" :results) - first - vals - first)))) + (= 1 (first (vals (first (jdbc/query connection-spec ["select 1 from sys.cluster"]))))))) + +(defn- string-length-fn [field-key] + (hsql/call :char_length field-key)) (defrecord CrateDriver [] @@ -80,7 +81,7 @@ (merge (sql/ISQLDriverDefaultsMixin) {:connection-details->spec (u/drop-first-arg crate-spec) :column->base-type (u/drop-first-arg column->base-type) - :string-length-fn (constantly :CHAR_LENGTH) + :string-length-fn (u/drop-first-arg string-length-fn) :apply-filter qp/apply-filter :date crate-util/date :unix-timestamp->timestamp crate-util/unix-timestamp->timestamp diff --git a/src/metabase/driver/crate/query_processor.clj b/src/metabase/driver/crate/query_processor.clj index 8cd5f74ac4fb258cf1651134ffd9105620da2e4e..6e468fd47fce910d8bc90891166834026fe10046 100644 --- a/src/metabase/driver/crate/query_processor.clj +++ b/src/metabase/driver/crate/query_processor.clj @@ -1,8 +1,6 @@ (ns metabase.driver.crate.query-processor (:require [clojure.java.jdbc :as jdbc] - [korma.core :as k] - [korma.sql.engine :as kengine] - [korma.sql.fns :as kfns] + [honeysql.helpers :as h] [metabase.driver.generic-sql :as sql] [metabase.driver.generic-sql.query-processor :as qp] [metabase.query-processor.interface :as i]) @@ -19,22 +17,21 @@ :field (:field clause) :value (:max-val clause)})]})) -(defn resolve-subclauses +(defn- filter-clause->predicate "resolve filters recursively" - [clause] - (if (= (count (:subclauses clause)) 0) - (case (:filter-type clause) - :between (qp/filter-clause->predicate (rewrite-between clause)) - (qp/filter-clause->predicate clause)) - (case (:compound-type clause) - :and (apply kfns/pred-and (map resolve-subclauses (:subclauses clause))) - :or (apply kfns/pred-or (map resolve-subclauses (:subclauses clause))) - :not (kfns/pred-not (kengine/pred-map (qp/filter-subclause->predicate clause)))))) + [{:keys [compound-type filter-type subclause subclauses], :as clause}] + (case compound-type + :and (apply vector :and (map filter-clause->predicate subclauses)) + :or (apply vector :or (map filter-clause->predicate subclauses)) + :not [:not (filter-clause->predicate subclause)] + nil (qp/filter-clause->predicate (if (= filter-type :between) + (rewrite-between clause) + clause)))) (defn apply-filter "Apply custom generic SQL filter. This is the place to perform query rewrites." - [_ korma-form {clause :filter}] - (k/where korma-form (resolve-subclauses clause))) + [_ honeysql-form {clause :filter}] + (h/where honeysql-form (filter-clause->predicate clause))) (defn execute-query "Execute a query against Crate database. @@ -47,7 +44,7 @@ (into [sql] params) sql)] (let [[columns & rows] (jdbc/query t-conn statement, :identifiers identity, :as-arrays? true)] - {:rows rows + {:rows (or rows []) :columns columns})))) (catch java.sql.SQLException e (let [^String message (or (->> (.getMessage e) ; error message comes back like 'Column "ZID" not found; SQL statement: ... [error-code]' sometimes diff --git a/src/metabase/driver/crate/util.clj b/src/metabase/driver/crate/util.clj index 5ca314e39b8fa4efaed17a08264c1e2b3b45a50b..8d9c014cfaae4a034eb863e8cf935976c51ab948 100644 --- a/src/metabase/driver/crate/util.clj +++ b/src/metabase/driver/crate/util.clj @@ -1,45 +1,51 @@ (ns metabase.driver.crate.util (:refer-clojure :exclude [second]) - (:require [korma.core :as k] - [korma.sql.utils :as kutils] + (:require (honeysql [core :as hsql] + [format :as hformat]) [metabase.driver.generic-sql.query-processor :as qp] [metabase.util :as u] - [metabase.util.korma-extensions :as kx]) + [metabase.util.honeysql-extensions :as hx]) (:import java.sql.Timestamp)) +;; register the try_cast function with HoneySQL +;; (hsql/format (hsql/call :crate-try-cast :TIMESTAMP :field)) -> "try_cast(field as TIMESTAMP)" +(defmethod hformat/fn-handler "crate-try-cast" [_ klass expr] + (str "try_cast(" (hformat/to-sql expr) " as " (name klass) ")")) + (defn unix-timestamp->timestamp "Converts datetime string to a valid timestamp" [_ expr seconds-or-milliseconds] (case seconds-or-milliseconds - :seconds (recur nil (kx/* expr 1000) :milliseconds) - :milliseconds (kutils/func (str "TRY_CAST(%s as TIMESTAMP)") [expr]))) + :seconds (recur nil (hx/* expr 1000) :milliseconds) + :milliseconds (hsql/call :crate-try-cast :TIMESTAMP expr))) (defn- date-trunc "date_trunc('interval', timezone, timestamp): truncates a timestamp to a given interval" [unit expr] (let [timezone (get-in qp/*query* [:settings :report-timezone])] (if (nil? timezone) - (k/sqlfn :DATE_TRUNC (kx/literal unit) expr) - (k/sqlfn :DATE_TRUNC (kx/literal unit) timezone expr)))) + (hsql/call :date_trunc (hx/literal unit) expr) + (hsql/call :date_trunc (hx/literal unit) timezone expr)))) (defn- date-format "date_format('format_string', timezone, timestamp): formats the timestamp as string" [fmt expr] (let [timezone (get-in qp/*query* [:settings :report-timezone])] (if (nil? timezone) - (k/sqlfn :DATE_FORMAT fmt expr) - (k/sqlfn :DATE_FORMAT fmt timezone expr)))) + (hsql/call :date_format fmt expr) + (hsql/call :date_format fmt timezone expr)))) (defn- extract "extract(field from expr): extracts subfields of a timestamp" [unit expr] - (case unit + (if-not (= unit :day_of_week) + (hsql/call :extract unit expr) ;; Crate DOW starts with Monday (1) to Sunday (7) - :day_of_week (kx/+ (kx/mod (kutils/func (format "EXTRACT(%s FROM %%s)" (name unit)) [expr]) 7) 1) - (kutils/func (format "EXTRACT(%s FROM %%s)" (name unit)) [expr]))) + (hx/+ (hx/mod (hsql/call :extract unit expr) + 7) + 1))) -(def ^:private extract-integer - (comp kx/->integer extract)) +(def ^:private extract-integer (comp hx/->integer extract)) (def ^:private ^:const second 1000) (def ^:private ^:const minute (* 60 second)) @@ -53,7 +59,7 @@ "ISQLDriver `date` implementation" [_ unit expr] (let [v (if (instance? Timestamp expr) - (kx/literal (u/date->iso-8601 expr)) + (hx/literal (u/date->iso-8601 expr)) expr)] (case unit :default (date-format (str "%Y-%m-%d %H:%i:%s") v) @@ -67,7 +73,7 @@ :day-of-month (extract-integer :day_of_month v) :day-of-year (extract-integer :day_of_year v) ;; Crate weeks start on Monday, so shift this date into the proper bucket and then decrement the resulting day - :week (date-format (str "%Y-%m-%d") (kx/- (date-trunc :week (kx/+ v day)) day)) + :week (date-format (str "%Y-%m-%d") (hx/- (date-trunc :week (hx/+ v day)) day)) :week-of-year (extract-integer :week v) :month (date-format (str "%Y-%m-%d") (date-trunc :month v)) :month-of-year (extract-integer :month v) @@ -76,17 +82,17 @@ :year (extract-integer :year v)))) (defn- sql-interval [unit amount] - (format "CURRENT_TIMESTAMP + %d" (* unit amount))) + (format "current_timestamp + %d" (* unit amount))) (defn date-interval "defines the sql command required for date-interval calculation" [_ unit amount] (case unit - :quarter (recur nil :month (kx/* amount 3)) - :year (k/raw (sql-interval year amount)) - :month (k/raw (sql-interval month amount)) - :week (k/raw (sql-interval week amount)) - :day (k/raw (sql-interval day amount)) - :hour (k/raw (sql-interval hour amount)) - :minute (k/raw (sql-interval minute amount)) - :second (k/raw (sql-interval second amount)))) + :quarter (recur nil :month (hx/* amount 3)) + :year (hsql/raw (sql-interval year amount)) + :month (hsql/raw (sql-interval month amount)) + :week (hsql/raw (sql-interval week amount)) + :day (hsql/raw (sql-interval day amount)) + :hour (hsql/raw (sql-interval hour amount)) + :minute (hsql/raw (sql-interval minute amount)) + :second (hsql/raw (sql-interval second amount)))) diff --git a/src/metabase/driver/druid/query_processor.clj b/src/metabase/driver/druid/query_processor.clj index 87bbadcc680098503d18f30f5f6db9d67993650d..d285fbf1cb67836c46cc44fe2508d6b5bd6b83da 100644 --- a/src/metabase/driver/druid/query_processor.clj +++ b/src/metabase/driver/druid/query_processor.clj @@ -112,11 +112,11 @@ (defn- ag:doubleMax [field] (case (dimension-or-metric? field) - :metric {:type :doubleMin - :name :min + :metric {:type :doubleMax + :name :max :fieldName (->rvalue field)} :dimension {:type :javascript - :name :min + :name :max :fieldNames [(->rvalue field)] :fnReset "function() { return Number.MIN_VALUE ; }" :fnAggregate "function(current, x) { return Math.max(current, (parseFloat(x) || Number.MIN_VALUE)); }" diff --git a/src/metabase/driver/generic_sql.clj b/src/metabase/driver/generic_sql.clj index c94040026599aecefa1539b4070ae32f58f5f287..d818d5c08602c8996d8b3825e36fe2dde1a023ac 100644 --- a/src/metabase/driver/generic_sql.clj +++ b/src/metabase/driver/generic_sql.clj @@ -1,7 +1,10 @@ (ns metabase.driver.generic-sql (:require [clojure.java.jdbc :as jdbc] + [clojure.math.numeric-tower :as math] [clojure.set :as set] [clojure.tools.logging :as log] + (honeysql [core :as hsql] + [format :as hformat]) (korma [core :as k] [db :as kdb]) [metabase.driver :as driver] @@ -10,7 +13,7 @@ (metabase.models [field :as field] [table :as table]) [metabase.util :as u] - [metabase.util.korma-extensions :as kx]) + [metabase.util.honeysql-extensions :as hx]) (:import java.sql.DatabaseMetaData java.util.Map clojure.lang.Keyword @@ -18,8 +21,6 @@ (metabase.query_processor.interface Field Value) (clojure.lang PersistentVector))) -(declare korma-entity) - (defprotocol ISQLDriver "Methods SQL-based drivers should implement in order to use `IDriverSQLDefaultsMixin`. Methods marked *OPTIONAL* have default implementations in `ISQLDriverDefaultsMixin`." @@ -32,14 +33,14 @@ ;; The following apply-* methods define how the SQL Query Processor handles given query clauses. Each method is called when a matching clause is present ;; in QUERY, and should return an appropriately modified version of KORMA-QUERY. Most drivers can use the default implementations for all of these methods, ;; but some may need to override one or more (e.g. SQL Server needs to override the behavior of `apply-limit`, since T-SQL uses `TOP` instead of `LIMIT`). - (apply-aggregation [this korma-query, ^Map query] "*OPTIONAL*.") - (apply-breakout [this korma-query, ^Map query] "*OPTIONAL*.") - (apply-fields [this korma-query, ^Map query] "*OPTIONAL*.") - (apply-filter [this korma-query, ^Map query] "*OPTIONAL*.") - (apply-join-tables [this korma-query, ^Map query] "*OPTIONAL*.") - (apply-limit [this korma-query, ^Map query] "*OPTIONAL*.") - (apply-order-by [this korma-query, ^Map query] "*OPTIONAL*.") - (apply-page [this korma-query, ^Map query] "*OPTIONAL*.") + (apply-aggregation [this honeysql-form, ^Map query] "*OPTIONAL*.") + (apply-breakout [this honeysql-form, ^Map query] "*OPTIONAL*.") + (apply-fields [this honeysql-form, ^Map query] "*OPTIONAL*.") + (apply-filter [this honeysql-form, ^Map query] "*OPTIONAL*.") + (apply-join-tables [this honeysql-form, ^Map query] "*OPTIONAL*.") + (apply-limit [this honeysql-form, ^Map query] "*OPTIONAL*.") + (apply-order-by [this honeysql-form, ^Map query] "*OPTIONAL*.") + (apply-page [this honeysql-form, ^Map query] "*OPTIONAL*.") (column->base-type ^clojure.lang.Keyword [this, ^Keyword column-type] "Given a native DB column type, return the corresponding `Field` `base-type`.") @@ -52,7 +53,7 @@ "Given a `Database` DETAILS-MAP, return a JDBC connection spec.") (current-datetime-fn [this] - "*OPTIONAL*. Korma form that should be used to get the current `DATETIME` (or equivalent). Defaults to `(k/sqlfn* :NOW)`.") + "*OPTIONAL*. Korma form that should be used to get the current `DATETIME` (or equivalent). Defaults to `:%now`.") (date [this, ^Keyword unit, field-or-value] "Return a korma form for truncating a date or timestamp field or value to a given resolution, or extracting a date component.") @@ -60,6 +61,11 @@ (excluded-schemas ^java.util.Set [this] "*OPTIONAL*. Set of string names of schemas to skip syncing tables from.") + (field-percent-urls [this field] + "*OPTIONAL*. Implementation of the `:field-percent-urls-fn` to be passed to `make-analyze-table`. + The default implementation is `fast-field-percent-urls`, which avoids a full table scan. Substitue this with `slow-field-percent-urls` for databases + where this doesn't work, such as SQL Server") + (field->alias ^String [this, ^Field field] "*OPTIONAL*. Return the alias that should be used to for FIELD, i.e. in an `AS` clause. The default implementation calls `name`, which returns the *unqualified* name of `Field`. @@ -71,6 +77,13 @@ is eventually passed as a parameter in a prepared statement. Drivers such as BigQuery that don't support prepared statements can skip this behavior by returning a korma `raw` form instead, or other drivers can perform custom type conversion as appropriate.") + (quote-style ^clojure.lang.Keyword [this] + "*OPTIONAL*. Return the quoting style that should be used by [HoneySQL](https://github.com/jkk/honeysql) when building a SQL statement. + Defaults to `:ansi`, but other valid options are `:mysql`, `:sqlserver`, `:oracle`, and `:h2` (added in `metabase.util.honeysql-extensions`; + like `:ansi`, but uppercases the result). + + (hsql/format ... :quoting (quote-style driver))") + (set-timezone-sql ^String [this] "*OPTIONAL*. This should be a prepared JDBC SQL statement string to be used to set the timezone for the current transaction. @@ -79,13 +92,17 @@ (stddev-fn ^clojure.lang.Keyword [this] "*OPTIONAL*. Keyword name of the SQL function that should be used to do a standard deviation aggregation. Defaults to `:STDDEV`.") - (string-length-fn ^clojure.lang.Keyword [this] - "Keyword name of the SQL function that should be used to get the length of a string, e.g. `:LENGTH`.") + (string-length-fn ^clojure.lang.Keyword [this, ^Keyword field-key] + "Return a HoneySQL form appropriate for getting the length of a `Field` identified by fully-qualified FIELD-KEY. + An implementation should return something like: + + (hsql/call :length (hx/cast :VARCHAR field-key))") (unix-timestamp->timestamp [this, field-or-value, ^Keyword seconds-or-milliseconds] "Return a korma form appropriate for converting a Unix timestamp integer field or value to an proper SQL `Timestamp`. SECONDS-OR-MILLISECONDS refers to the resolution of the int in question and with be either `:seconds` or `:milliseconds`.")) +;; This does something important for the Crate driver, apparently (what?) (extend-protocol jdbc/IResultSetReadColumn (class (object-array [])) (result-set-read-column [x _ _] (PersistentVector/adopt x))) @@ -142,15 +159,12 @@ (defn escape-field-name "Escape dots in a field name so Korma doesn't get confused and separate them. Returns a keyword." ^clojure.lang.Keyword [k] - (keyword (kx/escape-name (name k)))) + (keyword (hx/escape-dots (name k)))) (defn- can-connect? [driver details] (let [connection (connection-details->spec driver details)] - (= 1 (-> (k/exec-raw connection "SELECT 1" :results) - first - vals - first)))) + (= 1 (first (vals (first (jdbc/query connection ["SELECT 1"]))))))) (defn pattern-based-column->base-type "Return a `column->base-type` function that matches types based on a sequence of pattern / base-type pairs." @@ -162,57 +176,114 @@ (re-find pattern column-type) base-type (seq more) (recur more)))))) + +(defn honeysql-form->sql+args + "Convert HONEYSQL-FORM to a vector of SQL string and params, like you'd pass to JDBC." + [driver honeysql-form] + {:pre [(map? honeysql-form)]} + (binding [hformat/*subquery?* false] + (hsql/format honeysql-form + :quoting (quote-style driver) + :allow-dashed-names? true))) + +(defn- qualify+escape ^clojure.lang.Keyword + ([table] + (hx/qualify-and-escape-dots (:schema table) (:name table))) + ([table field] + (hx/qualify-and-escape-dots (:schema table) (:name table) (:name field)))) + +(defn- query + "Execute a HONEYSQL-FROM query against DATABASE, DRIVER, and optionally TABLE." + ([driver database honeysql-form] + (jdbc/query (db->jdbc-connection-spec database) + (honeysql-form->sql+args driver honeysql-form))) + ([driver database table honeysql-form] + (query driver database (merge {:from [(qualify+escape table)]} + honeysql-form)))) + + (defn- field-values-lazy-seq [driver field] - (let [table (field/table field) - name-components (field/qualified-name-components field) - transform-fn (if (contains? #{:TextField :CharField} (:base_type field)) - u/jdbc-clob->str - identity) - - field-k (keyword (:name field)) - select* (-> (k/select* (korma-entity table)) - (k/fields (escape-field-name field-k))) - fetch-one-page (fn [page-num] - (for [row (k/exec (apply-page driver select* {:page {:items driver/field-values-lazy-seq-chunk-size, :page page-num}}))] - (transform-fn (row field-k)))) + (let [table (field/table field) + db (table/database table) + field-k (qualify+escape table field) + pk-field (field/Field (table/pk-field-id table)) + pk-field-k (when pk-field + (qualify+escape table pk-field)) + transform-fn (if (contains? #{:TextField :CharField} (:base_type field)) + u/jdbc-clob->str + identity) + select* {:select [[field-k :field]] ; if we don't specify an explicit ORDER BY some DBs like Redshift will return them in a (seemingly) random order + :order-by [[(or pk-field-k field-k) :asc]]} ; try to order by the table's Primary Key to avoid doing full table scans + fetch-one-page (fn [page-num] + (for [{v :field} (query driver db table (apply-page driver select* {:page {:items driver/field-values-lazy-seq-chunk-size + :page (inc page-num)}}))] + (transform-fn v))) ;; This function returns a chunked lazy seq that will fetch some range of results, e.g. 0 - 500, then concat that chunk of results ;; with a recursive call to (lazily) fetch the next chunk of results, until we run out of results or hit the limit. - fetch-page (fn -fetch-page [page-num] - (lazy-seq - (let [results (fetch-one-page page-num) - total-items-fetched (* (inc page-num) driver/field-values-lazy-seq-chunk-size)] - (concat results (when (and (seq results) - (< total-items-fetched driver/max-sync-lazy-seq-results) - (= (count results) driver/field-values-lazy-seq-chunk-size)) - (-fetch-page (inc page-num)))))))] + fetch-page (fn -fetch-page [page-num] + (lazy-seq + (let [results (fetch-one-page page-num) + total-items-fetched (* (inc page-num) driver/field-values-lazy-seq-chunk-size)] + (concat results (when (and (seq results) + (< total-items-fetched driver/max-sync-lazy-seq-results) + (= (count results) driver/field-values-lazy-seq-chunk-size)) + (-fetch-page (inc page-num)))))))] (fetch-page 0))) -(defn- table-rows-seq [_ database table] - (k/select (korma-entity database table))) - -(defn- field-avg-length [driver field] - (or (some-> (korma-entity (field/table field)) - (k/select (k/aggregate (avg (k/sqlfn* (string-length-fn driver) - ;; TODO: multi-byte data on postgres causes exception - (kx/cast :CHAR (escape-field-name (:name field))))) - :len)) - first - :len - int) - 0)) - -(defn- field-percent-urls [_ field] - (or (let [korma-table (korma-entity (field/table field))] - (when-let [total-non-null-count (:count (first (k/select korma-table - (k/aggregate (count (k/raw "*")) :count) - (k/where {(escape-field-name (:name field)) [not= nil]}))))] - (when (> total-non-null-count 0) - (when-let [url-count (:count (first (k/select korma-table - (k/aggregate (count (k/raw "*")) :count) - (k/where {(escape-field-name (:name field)) [like "http%://_%.__%"]}))))] - (float (/ url-count total-non-null-count)))))) - 0.0)) + +(defn- table-rows-seq [driver database table] + (let [pk-field (field/Field (table/pk-field-id table))] + (query driver database table (merge {:select [:*]} + (when pk-field + {:order-by [[(qualify+escape table pk-field) :asc]]}))))) + +(defn- field-avg-length + [driver field] + (let [table (field/table field) + db (table/database table)] + (or (some-> (query driver db table {:select [[(hsql/call :avg (string-length-fn driver (qualify+escape table field))) :len]]}) + first + :len + math/round + int) + 0))) + +(defn- url-percentage [url-count total-count] + (if (and total-count (> total-count 0) url-count) + (float (/ url-count total-count)) + 0.0)) + +(defn slow-field-percent-urls + "Slow implementation of `field-percent-urls` that (probably) requires a full table scan. + Only use this for DBs where `fast-field-percent-urls` doesn't work correctly, like SQLServer." + [driver field] + (let [table (field/table field) + db (table/database table) + field-k (qualify+escape table field) + total-count (:count (first (query driver db table {:select [[:%count.* :count]] + :where [:not= field-k nil]}))) + url-count (:count (first (query driver db table {:select [[:%count.* :count]] + :where [:like field-k (hx/literal "http%://_%.__%")]})))] + (url-percentage url-count total-count))) + + +(defn fast-field-percent-urls + "Fast, default implementation of `field-percent-urls` that avoids a full table scan." + [driver field] + (let [table (field/table field) + db (table/database table) + field-k (qualify+escape table field) + pk-field (field/Field (table/pk-field-id table)) + results (map :is_url (query driver db table (merge {:select [[(hsql/call :like field-k (hx/literal "http%://_%.__%")) :is_url]] + :where [:not= field-k nil] + :limit driver/max-sync-lazy-seq-results} + (when pk-field + {:order-by [[(qualify+escape table pk-field) :asc]]})))) + total-count (count results) + url-count (count (filter #(or (true? %) (= % 1)) results))] + (url-percentage url-count total-count))) + (defn features "Default implementation of `IDriver` `features` for SQL drivers." @@ -304,8 +375,8 @@ "Default implementation of `analyze-table` for SQL drivers." [driver table new-table-ids] ((analyze/make-analyze-table driver - :field-avg-length-fn (partial field-avg-length driver) - :field-percent-urls-fn (partial field-percent-urls driver)) + :field-avg-length-fn (partial field-avg-length driver) + :field-percent-urls-fn (partial field-percent-urls driver)) driver table new-table-ids)) @@ -325,10 +396,12 @@ :apply-order-by (resolve 'metabase.driver.generic-sql.query-processor/apply-order-by) :apply-page (resolve 'metabase.driver.generic-sql.query-processor/apply-page) :column->special-type (constantly nil) - :current-datetime-fn (constantly (k/sqlfn* :NOW)) + :current-datetime-fn (constantly :%now) :excluded-schemas (constantly nil) :field->alias (u/drop-first-arg name) + :field-percent-urls fast-field-percent-urls :prepare-value (u/drop-first-arg :value) + :quote-style (constantly :ansi) :set-timezone-sql (constantly nil) :stddev-fn (constantly :STDDEV)}) @@ -354,21 +427,34 @@ ;;; ### Util Fns +(defn create-db + "Like `korma.db/create-db`, but adds a fn to unescape escaped dots when generating SQL." + [spec] + (update-in (kdb/create-db spec) [:options :naming :fields] comp hx/unescape-dots)) + + (defn- db->korma-db "Return a Korma DB spec for Metabase DATABASE." [{:keys [details engine], :as database}] (let [spec (connection-details->spec (driver/engine->driver engine) details)] - (assoc (kx/create-db spec) + (assoc (create-db spec) :pool (db->jdbc-connection-spec database)))) +(defn create-entity + "Like `korma.db/create-entity`, but takes a sequence of name components instead; escapes dots in names as well." + [name-components] + (k/create-entity (apply str (interpose "." (for [s name-components + :when (seq s)] + (name (hx/escape-dots (name s)))))))) + (defn korma-entity "Return a Korma entity for [DB and] TABLE. - (-> (sel :one Table :id 100) + (-> (Table :id 100) korma-entity (select (aggregate (count :*) :count)))" ([table] (korma-entity (table/database table) table)) ([db table] (let [{schema :schema, table-name :name} table] (k/database - (kx/create-entity [schema table-name]) + (create-entity [schema table-name]) (db->korma-db db))))) diff --git a/src/metabase/driver/generic_sql/query_processor.clj b/src/metabase/driver/generic_sql/query_processor.clj index f1efe68c09d1e5041149db72af1ec473e36f3272..1acb0201b0084b8ee74228131af659b58699efd6 100644 --- a/src/metabase/driver/generic_sql/query_processor.clj +++ b/src/metabase/driver/generic_sql/query_processor.clj @@ -1,23 +1,19 @@ (ns metabase.driver.generic-sql.query-processor - "The Query Processor is responsible for translating the Metabase Query Language into korma SQL forms." + "The Query Processor is responsible for translating the Metabase Query Language into HoneySQL SQL forms." (:require [clojure.java.jdbc :as jdbc] (clojure [string :as s] [walk :as walk]) [clojure.tools.logging :as log] - [clj-time.coerce :as tc] - [clj-time.core :as t] - [clj-time.format :as tf] - (korma [core :as k] - [db :as kdb]) - (korma.sql [engine :as kengine] - [fns :as kfns]) + (honeysql [core :as hsql] + [format :as hformat] + [helpers :as h]) (metabase [config :as config] [driver :as driver]) [metabase.driver.generic-sql :as sql] [metabase.query-processor :as qp] metabase.query-processor.interface [metabase.util :as u] - [metabase.util.korma-extensions :as kx]) + [metabase.util.honeysql-extensions :as hx]) (:import java.sql.Timestamp java.util.Date (metabase.query_processor.interface AgFieldRef @@ -33,7 +29,12 @@ "The outer query currently being processed." nil) -(defn- driver [] (:driver *query*)) +(defn- driver [] {:pre [(map? *query*)]} (:driver *query*)) + +;; register the function "distinct-count" with HoneySQL +;; (hsql/format :%distinct-count.x) -> "count(distinct x)" +(defmethod hformat/fn-handler "distinct-count" [_ field] + (str "count(distinct " (hformat/to-sql field) ")")) ;;; ## Formatting @@ -42,7 +43,7 @@ "Generate a FORM `AS` FIELD alias using the name information of FIELD." [form field] (if-let [alias (sql/field->alias (driver) field)] - [form alias] + [form (hx/qualify-and-escape-dots alias)] form)) ;; TODO - Consider moving this into query processor interface and making it a method on `ExpressionRef` instead ? @@ -54,7 +55,7 @@ (defprotocol ^:private IGenericSQLFormattable (formatted [this] - "Return an appropriate korma form for an object.")) + "Return an appropriate HoneySQL form for an object.")) (extend-protocol IGenericSQLFormattable nil (formatted [_] nil) @@ -63,12 +64,7 @@ Expression (formatted [{:keys [operator args]}] - (apply (case operator - :+ kx/+ - :- kx/- - :* kx/* - :/ kx// - :lower (partial k/sqlfn* :LOWER)) + (apply (partial hsql/call operator) (map formatted args))) ExpressionRef @@ -78,7 +74,7 @@ Field (formatted [{:keys [schema-name table-name special-type field-name]}] - (let [field (keyword (kx/combine+escape-name-components [schema-name table-name field-name]))] + (let [field (keyword (hx/qualify-and-escape-dots schema-name table-name field-name))] (case special-type :timestamp_seconds (sql/unix-timestamp->timestamp (driver) field :seconds) :timestamp_milliseconds (sql/unix-timestamp->timestamp (driver) field :milliseconds) @@ -117,130 +113,114 @@ ;;; ## Clause Handlers (defn apply-aggregation - "Apply an `aggregation` clause to KORMA-FORM. Default implementation of `apply-aggregation` for SQL drivers." - ([driver korma-form {{:keys [aggregation-type field]} :aggregation}] - (apply-aggregation driver korma-form aggregation-type (formatted field))) - - ([driver korma-form aggregation-type field] - (if-not field - ;; aggregation clauses w/o a Field - (do (assert (= aggregation-type :count)) - (k/aggregate korma-form (count (k/raw "*")) :count)) - ;; aggregation clauses with a Field - (case aggregation-type - :avg (k/aggregate korma-form (avg field) :avg) - :count (k/aggregate korma-form (count field) :count) - :distinct (k/aggregate korma-form (count (k/sqlfn :DISTINCT field)) :count) ; why not call it :distinct? This complicates things - :stddev (k/fields korma-form [(k/sqlfn* (sql/stddev-fn driver) field) :stddev]) - :sum (k/aggregate korma-form (sum field) :sum) - :min (k/aggregate korma-form (min field) :min) - :max (k/aggregate korma-form (max field) :max))))) + "Apply an `aggregation` clause to HONEYSQL-FORM. Default implementation of `apply-aggregation` for SQL drivers." + ([driver honeysql-form {{:keys [aggregation-type field]} :aggregation}] + (apply-aggregation driver honeysql-form aggregation-type (formatted field))) + + ([driver honeysql-form aggregation-type field] + (h/merge-select honeysql-form [(if-not field + ;; aggregation clauses w/o a field + (do (assert (= aggregation-type :count)) + :%count.*) + ;; aggregation clauses w/ a Field + (hsql/call (case aggregation-type + :avg :avg + :count :count + :distinct :distinct-count + :stddev (sql/stddev-fn driver) + :sum :sum + :min :min + :max :max) + field)) + (if (= aggregation-type :distinct) + :count + aggregation-type)]))) (defn apply-breakout - "Apply a `breakout` clause to KORMA-FORM. Default implementation of `apply-breakout` for SQL drivers." - [_ korma-form {breakout-fields :breakout, fields-fields :fields}] - (-> korma-form + "Apply a `breakout` clause to HONEYSQL-FORM. Default implementation of `apply-breakout` for SQL drivers." + [_ honeysql-form {breakout-fields :breakout, fields-fields :fields}] + (-> honeysql-form ;; Group by all the breakout fields - ((partial apply k/group) (map formatted breakout-fields)) - ;; Add fields form only for fields that weren't specified in :fields clause -- we don't want to include it twice, or korma will barf - ((partial apply k/fields) (for [field breakout-fields - :when (not (contains? (set fields-fields) field))] - (as (formatted field) field))))) + ((partial apply h/group) (map formatted breakout-fields)) + ;; Add fields form only for fields that weren't specified in :fields clause -- we don't want to include it twice, or HoneySQL will barf + ((partial apply h/merge-select) (for [field breakout-fields + :when (not (contains? (set fields-fields) field))] + (as (formatted field) field))))) (defn apply-fields - "Apply a `fields` clause to KORMA-FORM. Default implementation of `apply-fields` for SQL drivers." - [_ korma-form {fields :fields}] - (apply k/fields korma-form (for [field fields] - (as (formatted field) field)))) + "Apply a `fields` clause to HONEYSQL-FORM. Default implementation of `apply-fields` for SQL drivers." + [_ honeysql-form {fields :fields}] + (apply h/merge-select honeysql-form (for [field fields] + (as (formatted field) field)))) (defn filter-subclause->predicate - "Given a filter SUBCLAUSE, return a Korma filter predicate form for use in korma `where`." + "Given a filter SUBCLAUSE, return a HoneySQL filter predicate form for use in HoneySQL `where`." [{:keys [filter-type field value], :as filter}] {:pre [(map? filter) field]} (let [field (formatted field)] - {field (case filter-type - :between ['between [(formatted (:min-val filter)) (formatted (:max-val filter))]] - :starts-with ['like (formatted (update value :value (fn [s] (str s \%)))) ] - :contains ['like (formatted (update value :value (fn [s] (str \% s \%))))] - :ends-with ['like (formatted (update value :value (fn [s] (str \% s))))] - :> ['> (formatted value)] - :< ['< (formatted value)] - :>= ['>= (formatted value)] - :<= ['<= (formatted value)] - := ['= (formatted value)] - :!= ['not= (formatted value)])})) + (case filter-type + :between [:between field (formatted (:min-val filter)) (formatted (:max-val filter))] + :starts-with [:like field (formatted (update value :value (fn [s] (str s \%)))) ] + :contains [:like field (formatted (update value :value (fn [s] (str \% s \%))))] + :ends-with [:like field (formatted (update value :value (fn [s] (str \% s))))] + :> [:> field (formatted value)] + :< [:< field (formatted value)] + :>= [:>= field (formatted value)] + :<= [:<= field (formatted value)] + := [:= field (formatted value)] + :!= [:not= field (formatted value)]))) (defn filter-clause->predicate - "Given a filter CLAUSE, return a Korma filter predicate form for use in korma `where`. If this is a compound - clause then we call `filter-subclause->predicate` on all of the subclauses." + "Given a filter CLAUSE, return a HoneySQL filter predicate form for use in HoneySQL `where`. + If this is a compound clause then we call `filter-subclause->predicate` on all of the subclauses." [{:keys [compound-type subclause subclauses], :as clause}] (case compound-type - :and (apply kfns/pred-and (map filter-clause->predicate subclauses)) - :or (apply kfns/pred-or (map filter-clause->predicate subclauses)) - :not (kfns/pred-not (kengine/pred-map (filter-subclause->predicate subclause))) + :and (apply vector :and (map filter-clause->predicate subclauses)) + :or (apply vector :or (map filter-clause->predicate subclauses)) + :not [:not (filter-subclause->predicate subclause)] nil (filter-subclause->predicate clause))) (defn apply-filter - "Apply a `filter` clause to KORMA-FORM. Default implementation of `apply-filter` for SQL drivers." - [_ korma-form {clause :filter}] - (k/where korma-form (filter-clause->predicate clause))) + "Apply a `filter` clause to HONEYSQL-FORM. Default implementation of `apply-filter` for SQL drivers." + [_ honeysql-form {clause :filter}] + (h/where honeysql-form (filter-clause->predicate clause))) (defn apply-join-tables - "Apply expanded query `join-tables` clause to KORMA-FORM. Default implementation of `apply-join-tables` for SQL drivers." - [_ korma-form {join-tables :join-tables, {source-table-name :name, source-schema :schema} :source-table}] - (loop [korma-form korma-form, [{:keys [table-name pk-field source-field schema]} & more] join-tables] - (let [table-name (if (seq schema) - (str schema \. table-name) - table-name) - source-table-name (if (seq source-schema) - (str source-schema \. source-table-name) - source-table-name) - korma-form (k/join korma-form table-name - (= (keyword (str source-table-name \. (:field-name source-field))) - (keyword (str table-name \. (:field-name pk-field)))))] + "Apply expanded query `join-tables` clause to HONEYSQL-FORM. Default implementation of `apply-join-tables` for SQL drivers." + [_ honeysql-form {join-tables :join-tables, {source-table-name :name, source-schema :schema} :source-table}] + (loop [honeysql-form honeysql-form, [{:keys [table-name pk-field source-field schema]} & more] join-tables] + (let [honeysql-form (h/merge-left-join honeysql-form + (hx/qualify-and-escape-dots schema table-name) + [:= (hx/qualify-and-escape-dots source-schema source-table-name (:field-name source-field)) + (hx/qualify-and-escape-dots schema table-name (:field-name pk-field))])] (if (seq more) - (recur korma-form more) - korma-form)))) + (recur honeysql-form more) + honeysql-form)))) (defn apply-limit - "Apply `limit` clause to KORMA-FORM. Default implementation of `apply-limit` for SQL drivers." - [_ korma-form {value :limit}] - (k/limit korma-form value)) + "Apply `limit` clause to HONEYSQL-FORM. Default implementation of `apply-limit` for SQL drivers." + [_ honeysql-form {value :limit}] + (h/limit honeysql-form value)) (defn apply-order-by - "Apply `order-by` clause to KORMA-FORM. Default implementation of `apply-order-by` for SQL drivers." - [_ korma-form {subclauses :order-by}] - (loop [korma-form korma-form, [{:keys [field direction]} & more] subclauses] - (let [korma-form (k/order korma-form (formatted field) (case direction - :ascending :ASC - :descending :DESC))] + "Apply `order-by` clause to HONEYSQL-FORM. Default implementation of `apply-order-by` for SQL drivers." + [_ honeysql-form {subclauses :order-by}] + (loop [honeysql-form honeysql-form, [{:keys [field direction]} & more] subclauses] + (let [honeysql-form (h/merge-order-by honeysql-form [(formatted field) (case direction + :ascending :asc + :descending :desc)])] (if (seq more) - (recur korma-form more) - korma-form)))) + (recur honeysql-form more) + honeysql-form)))) (defn apply-page - "Apply `page` clause to KORMA-FORM. Default implementation of `apply-page` for SQL drivers." - [_ korma-form {{:keys [items page]} :page}] - (-> korma-form - (k/limit items) - (k/offset (* items (dec page))))) - -(defn- should-log-korma-form? [] - (and (config/config-bool :mb-db-logging) - (not qp/*disable-qp-logging*))) - -(defn pprint-korma-form - "Removing empty/`nil` kv pairs from KORMA-FORM and strip ns qualifiers (e.g. `(keyword (name :korma.sql.utils/func))` -> `:func`)." - [korma-form] - (u/pprint-to-str (walk/postwalk (fn [x] (cond - (keyword? x) (keyword (name x)) ; strip off ns qualifiers from keywords - (fn? x) (class x) - :else x)) - (into {} (for [[k v] (dissoc korma-form :db :ent :from :options :aliases :results :type :alias) - :when (or (not (sequential? v)) - (seq v))] - {k v}))))) + "Apply `page` clause to HONEYSQL-FORM. Default implementation of `apply-page` for SQL drivers." + [_ honeysql-form {{:keys [items page]} :page}] + (-> honeysql-form + (h/limit items) + (h/offset (* items (dec page))))) +;; TODO - not sure "pprint" is an appropriate name for this since this function doesn't print anything (defn pprint-sql "Add newlines to the SQL to make it more readable." [sql] @@ -255,87 +235,97 @@ (s/replace #"\sAND\s" "\n AND ") (s/replace #"\sOR\s" "\n OR ")))) -(defn log-korma-form - "Log a korma form and the SQL it corresponds to logging is enabled." - ([korma-form] - (when (should-log-korma-form?) - (log-korma-form korma-form (try (k/as-sql korma-form) - (catch Throwable e - (log/error (u/format-color 'red "Invalid korma form: %s" (.getMessage e)))))))) - - ([korma-form, ^String sql] - (when (should-log-korma-form?) - (log/debug (u/format-color 'green "\nKORMA FORM: 😋\n%s" (pprint-korma-form korma-form)) - (u/format-color 'blue "\nSQL: 😈\n%s\n" (pprint-sql sql)))))) - +;; TODO - make this a protocol method ? +(defn- apply-source-table [_ honeysql-form {{table-name :name, schema :schema} :source-table}] + {:pre [table-name]} + (h/from honeysql-form (hx/qualify-and-escape-dots schema table-name))) (def ^:private clause-handlers - {:aggregation #'sql/apply-aggregation ; use the vars rather than the functions themselves because them implementation - :breakout #'sql/apply-breakout ; will get swapped around and we'll be left with old version of the function that nobody implements - :fields #'sql/apply-fields - :filter #'sql/apply-filter - :join-tables #'sql/apply-join-tables - :limit #'sql/apply-limit - :order-by #'sql/apply-order-by - :page #'sql/apply-page}) + {:aggregation #'sql/apply-aggregation ; use the vars rather than the functions themselves because them implementation + :breakout #'sql/apply-breakout ; will get swapped around and we'll be left with old version of the function that nobody implements + :fields #'sql/apply-fields + :filter #'sql/apply-filter + :join-tables #'sql/apply-join-tables + :limit #'sql/apply-limit + :order-by #'sql/apply-order-by + :page #'sql/apply-page + :source-table apply-source-table}) (defn- apply-clauses "Loop through all the `clause->handler` entries; if the query contains a given clause, apply the handler fn." - [driver korma-form query] - (loop [korma-form korma-form, [[clause f] & more] (seq clause-handlers)] - (let [korma-form (if (clause query) - (f driver korma-form query) - korma-form)] + [driver honeysql-form query] + (loop [honeysql-form honeysql-form, [[clause f] & more] (seq clause-handlers)] + (let [honeysql-form (if (clause query) + (f driver honeysql-form query) + honeysql-form)] (if (seq more) - (recur korma-form more) - korma-form)))) + (recur honeysql-form more) + honeysql-form)))) -(defn build-korma-form - "Build the korma form we will call `k/exec` on." - [driver {inner-query :query :as outer-query} entity] - (binding [*query* outer-query] - (apply-clauses driver (k/select* entity) inner-query))) +(defn build-honeysql-form + "Build the HoneySQL form we will compile to SQL and execute." + [driverr {inner-query :query}] + {:pre [(map? inner-query)]} + (apply-clauses driverr {} inner-query)) (defn mbql->native "Transpile MBQL query into a native SQL statement." - [driver {{:keys [source-table]} :query, database :database, :as outer-query}] - (let [entity ((resolve 'metabase.driver.generic-sql/korma-entity) database source-table) - korma-form (build-korma-form driver outer-query entity) - form-with-sql (kengine/bind-query korma-form (kengine/->sql korma-form))] - {:query (:sql-str form-with-sql) - :params (:params form-with-sql)})) + [driver {inner-query :query, database :database, :as outer-query}] + (binding [*query* outer-query] + (let [honeysql-form (build-honeysql-form driver outer-query) + [sql & args] (sql/honeysql-form->sql+args driver honeysql-form)] + {:query (str "-- " (qp/query->remark outer-query) "\n" sql) + :params args}))) + + +(defn- maybe-set-timezone! + "Set the timezone if applicable, catching exceptions if it fails." + [driver settings connection] + (when-let [timezone (:report-timezone settings)] + (log/debug (u/format-color 'green "%s" (sql/set-timezone-sql driver))) + (try (jdbc/db-do-prepared connection (sql/set-timezone-sql driver) [timezone]) + (catch Throwable e + (log/error (u/format-color 'red "Failed to set timezone: %s" (.getMessage e))))))) + +(defn- run-query + "Run the query itself." + [{sql :query, params :params} connection] + (let [sql (hx/unescape-dots sql) + statement (into [sql] params) + [columns & rows] (jdbc/query connection statement, :identifiers identity, :as-arrays? true)] + {:rows (or rows []) + :columns columns})) + +(defn- exception->nice-error-message ^String [^java.sql.SQLException e] + (or (->> (.getMessage e) ; error message comes back like 'Column "ZID" not found; SQL statement: ... [error-code]' sometimes + (re-find #"^(.*);") ; the user already knows the SQL, and error code is meaningless + second) ; so just return the part of the exception that is relevant + (.getMessage e))) + +(defn- do-with-try-catch {:style/indent 0} [f] + (try (f) + (catch java.sql.SQLException e + (throw (Exception. (exception->nice-error-message e)))))) + +(defn- do-with-auto-commit-disabled + "Disable auto-commit for this transaction, that way shady queries are unable to modify the database; execute F in a try-finally block. + In the `finally`, rollback any changes made during this transaction just to be extra-double-sure JDBC doesn't try to commit them automatically for us." + {:style/indent 1} + [{^java.sql.Connection connection :connection}, f] + (.setAutoCommit connection false) + (try (f) + (finally (.rollback connection)))) (defn execute-query "Process and run a native (raw SQL) QUERY." - [driver {:keys [database settings], {sql :query, params :params} :native}] - (try (let [db-conn (sql/db->jdbc-connection-spec database)] - (jdbc/with-db-transaction [t-conn db-conn] - (let [^java.sql.Connection jdbc-connection (:connection t-conn)] - ;; Disable auto-commit for this transaction, that way shady queries are unable to modify the database - (.setAutoCommit jdbc-connection false) - (try - ;; Set the timezone if applicable - (when-let [timezone (:report-timezone settings)] - (log/debug (u/format-color 'green "%s" (sql/set-timezone-sql driver))) - (try (jdbc/db-do-prepared t-conn (sql/set-timezone-sql driver) [timezone]) - (catch Throwable e - (log/error (u/format-color 'red "Failed to set timezone: %s" (.getMessage e)))))) - - ;; Now run the query itself - (let [statement (if params - (into [sql] params) - sql)] - (let [[columns & rows] (jdbc/query t-conn statement, :identifiers identity, :as-arrays? true)] - {:rows rows - :columns columns})) - - ;; Rollback any changes made during this transaction just to be extra-double-sure JDBC doesn't try to commit them automatically for us - (finally (.rollback jdbc-connection)))))) - (catch java.sql.SQLException e - (let [^String message (or (->> (.getMessage e) ; error message comes back like 'Column "ZID" not found; SQL statement: ... [error-code]' sometimes - (re-find #"^(.*);") ; the user already knows the SQL, and error code is meaningless - second) ; so just return the part of the exception that is relevant - (.getMessage e))] - (throw (Exception. message)))))) + [driver {:keys [database settings], query :native}] + (do-with-try-catch + (fn [] + (let [db-connection (sql/db->jdbc-connection-spec database)] + (jdbc/with-db-transaction [transaction-connection db-connection] + (do-with-auto-commit-disabled transaction-connection + (fn [] + (maybe-set-timezone! driver settings transaction-connection) + (run-query query transaction-connection)))))))) diff --git a/src/metabase/driver/h2.clj b/src/metabase/driver/h2.clj index 07979d304f2c5b9ef749a314ec03aa26a98f9ffe..e5ab944ffd27ba3617558e58911227488c538bc2 100644 --- a/src/metabase/driver/h2.clj +++ b/src/metabase/driver/h2.clj @@ -1,13 +1,13 @@ (ns metabase.driver.h2 + ;; TODO - This namespace should be reworked to use `u/drop-first-arg` like newer drivers (:require [clojure.string :as s] - (korma [core :as k] - [db :as kdb]) - [korma.sql.utils :as kutils] + [honeysql.core :as hsql] + [korma.db :as kdb] [metabase.db :as db] [metabase.driver :as driver] [metabase.driver.generic-sql :as sql] [metabase.util :as u] - [metabase.util.korma-extensions :as kx])) + [metabase.util.honeysql-extensions :as hx])) (defn- column->base-type [_ column-type] ({:ARRAY :UnknownField @@ -109,10 +109,12 @@ (defn- unix-timestamp->timestamp [_ expr seconds-or-milliseconds] - (kutils/func (format "TIMESTAMPADD('%s', %%s, TIMESTAMP '1970-01-01T00:00:00Z')" (case seconds-or-milliseconds - :seconds "SECOND" - :milliseconds "MILLISECOND")) - [expr])) + (hsql/call :timestampadd + (hx/literal (case seconds-or-milliseconds + :seconds "second" + :milliseconds "millisecond")) + expr + (hsql/raw "timestamp '1970-01-01T00:00:00Z'"))) (defn- process-query-in-context [_ qp] @@ -134,25 +136,25 @@ ;; H2 doesn't have date_trunc() we fake it by formatting a date to an appropriate string ;; and then converting back to a date. ;; Format strings are the same as those of SimpleDateFormat. -(defn- format-datetime [format-str expr] (k/sqlfn :FORMATDATETIME expr (kx/literal format-str))) -(defn- parse-datetime [format-str expr] (k/sqlfn :PARSEDATETIME expr (kx/literal format-str))) +(defn- format-datetime [format-str expr] (hsql/call :formatdatetime expr (hx/literal format-str))) +(defn- parse-datetime [format-str expr] (hsql/call :parsedatetime expr (hx/literal format-str))) (defn- trunc-with-format [format-str expr] (parse-datetime format-str (format-datetime format-str expr))) (defn- date [_ unit expr] (case unit :default expr :minute (trunc-with-format "yyyyMMddHHmm" expr) - :minute-of-hour (kx/minute expr) + :minute-of-hour (hx/minute expr) :hour (trunc-with-format "yyyyMMddHH" expr) - :hour-of-day (kx/hour expr) - :day (kx/->date expr) - :day-of-week (k/sqlfn :DAY_OF_WEEK expr) - :day-of-month (k/sqlfn :DAY_OF_MONTH expr) - :day-of-year (k/sqlfn :DAY_OF_YEAR expr) + :hour-of-day (hx/hour expr) + :day (hx/->date expr) + :day-of-week (hsql/call :day_of_week expr) + :day-of-month (hsql/call :day_of_month expr) + :day-of-year (hsql/call :day_of_year expr) :week (trunc-with-format "YYYYww" expr) ; Y = week year; w = week in year - :week-of-year (kx/week expr) + :week-of-year (hx/week expr) :month (trunc-with-format "yyyyMM" expr) - :month-of-year (kx/month expr) + :month-of-year (hx/month expr) ;; Rounding dates to quarters is a bit involved but still doable. Here's the plan: ;; * extract the year and quarter from the date; ;; * convert the quarter (1 - 4) to the corresponding starting month (1, 4, 7, or 10). @@ -163,19 +165,17 @@ ;; Postgres DATE_TRUNC('quarter', x) ;; becomes PARSEDATETIME(CONCAT(YEAR(x), ((QUARTER(x) * 3) - 2)), 'yyyyMM') :quarter (parse-datetime "yyyyMM" - (kx/concat (kx/year expr) (kx/- (kx/* (kx/quarter expr) + (hx/concat (hx/year expr) (hx/- (hx/* (hx/quarter expr) 3) 2))) - :quarter-of-year (kx/quarter expr) - :year (kx/year expr))) - -(def ^:private now (k/sqlfn :NOW)) + :quarter-of-year (hx/quarter expr) + :year (hx/year expr))) ;; TODO - maybe rename this relative-date ? (defn- date-interval [_ unit amount] (if (= unit :quarter) - (recur nil :month (kx/* amount 3)) - (k/sqlfn :DATEADD (kx/literal (s/upper-case (name unit))) amount now))) + (recur nil :month (hx/* amount 3)) + (hsql/call :dateadd (hx/literal unit) amount :%now))) (defn- humanize-connection-error-message [_ message] @@ -192,6 +192,10 @@ #".*" ; default message)) +(defn- string-length-fn [field-key] + (hsql/call :length field-key)) + + (defrecord H2Driver [] clojure.lang.Named (getName [_] "H2")) @@ -213,7 +217,7 @@ :column->base-type column->base-type :connection-details->spec connection-details->spec :date date - :string-length-fn (constantly :LENGTH) + :string-length-fn (u/drop-first-arg string-length-fn) :unix-timestamp->timestamp unix-timestamp->timestamp})) (driver/register-driver! :h2 (H2Driver.)) diff --git a/src/metabase/driver/mongo/query_processor.clj b/src/metabase/driver/mongo/query_processor.clj index cdbd52db2c702c55dc84ef11092b23e856b91e3f..a6431d97988684bd20d3c25777e7e5359a980605 100644 --- a/src/metabase/driver/mongo/query_processor.clj +++ b/src/metabase/driver/mongo/query_processor.clj @@ -326,7 +326,9 @@ ;;; # process + run -(defn- generate-aggregation-pipeline [query] +(defn- generate-aggregation-pipeline + "Generate the aggregation pipeline. Returns a sequence of maps representing each stage." + [query] (loop [pipeline [], [f & more] [add-initial-projection handle-filter handle-breakout+aggregation diff --git a/src/metabase/driver/mysql.clj b/src/metabase/driver/mysql.clj index 547a063ad04e1e02680aee3270f01b332bbb5858..a0538a0c4605aa2982d0c5b315fc7020606ca122 100644 --- a/src/metabase/driver/mysql.clj +++ b/src/metabase/driver/mysql.clj @@ -1,29 +1,12 @@ (ns metabase.driver.mysql (:require (clojure [set :as set] [string :as s]) - (korma [core :as k] - [db :as kdb] - mysql) - (korma.sql [engine :refer [sql-func]] - [utils :as kutils]) + [honeysql.core :as hsql] + [korma.db :as kdb] [metabase.driver :as driver] [metabase.driver.generic-sql :as sql] [metabase.util :as u] - [metabase.util.korma-extensions :as kx])) - -;;; # Korma 0.4.2 Bug Workaround -;; (Buggy code @ https://github.com/korma/Korma/blob/684178c386df529558bbf82097635df6e75fb339/src/korma/mysql.clj) -;; This looks like it's been fixed upstream but until a new release is available we'll have to hack the function here - -(defn- mysql-count [query v] - (sql-func "COUNT" (if (and (or (instance? clojure.lang.Named v) ; the issue was that name was being called on things that like maps when we tried to get COUNT(DISTINCT(...)) - (string? v)) ; which would barf since maps don't implement clojure.lang.Named - (= (name v) "*")) - (kutils/generated "*") - v))) - -(intern 'korma.mysql 'count mysql-count) - + [metabase.util.honeysql-extensions :as hx])) ;;; # IMPLEMENTATION @@ -76,15 +59,16 @@ (-> details (set/rename-keys {:dbname :db}) kdb/mysql - (update :subname (u/rpartial str connection-args-string)))) + (update :subname #(str % connection-args-string (when-not (:ssl details) + "&useSSL=false"))))) ; newer versions of MySQL will complain if you don't explicitly disable SSL (defn- unix-timestamp->timestamp [expr seconds-or-milliseconds] - (k/sqlfn :FROM_UNIXTIME (case seconds-or-milliseconds - :seconds expr - :milliseconds (kx// expr (k/raw 1000))))) + (hsql/call :from_unixtime (case seconds-or-milliseconds + :seconds expr + :milliseconds (hx// expr 1000)))) -(defn- date-format [format-str expr] (k/sqlfn :DATE_FORMAT expr (kx/literal format-str))) -(defn- str-to-date [format-str expr] (k/sqlfn :STR_TO_DATE expr (kx/literal format-str))) +(defn- date-format [format-str expr] (hsql/call :date_format expr (hx/literal format-str))) +(defn- str-to-date [format-str expr] (hsql/call :str_to_date expr (hx/literal format-str))) ;; Since MySQL doesn't have date_trunc() we fake it by formatting a date to an appropriate string and then converting back to a date. ;; See http://dev.mysql.com/doc/refman/5.6/en/date-and-time-functions.html#function_date-format for an explanation of format specifiers @@ -95,38 +79,40 @@ (case unit :default expr :minute (trunc-with-format "%Y-%m-%d %H:%i" expr) - :minute-of-hour (kx/minute expr) + :minute-of-hour (hx/minute expr) :hour (trunc-with-format "%Y-%m-%d %H" expr) - :hour-of-day (kx/hour expr) - :day (k/sqlfn :DATE expr) - :day-of-week (k/sqlfn :DAYOFWEEK expr) - :day-of-month (k/sqlfn :DAYOFMONTH expr) - :day-of-year (k/sqlfn :DAYOFYEAR expr) + :hour-of-day (hx/hour expr) + :day (hsql/call :date expr) + :day-of-week (hsql/call :dayofweek expr) + :day-of-month (hsql/call :dayofmonth expr) + :day-of-year (hsql/call :dayofyear expr) ;; To convert a YEARWEEK (e.g. 201530) back to a date you need tell MySQL which day of the week to use, ;; because otherwise as far as MySQL is concerned you could be talking about any of the days in that week :week (str-to-date "%X%V %W" - (kx/concat (k/sqlfn :YEARWEEK expr) - (kx/literal " Sunday"))) + (hx/concat (hsql/call :yearweek expr) + (hx/literal " Sunday"))) ;; mode 6: Sunday is first day of week, first week of year is the first one with 4+ days - :week-of-year (kx/inc (kx/week expr 6)) + :week-of-year (hx/inc (hx/week expr 6)) :month (str-to-date "%Y-%m-%d" - (kx/concat (date-format "%Y-%m" expr) - (kx/literal "-01"))) - :month-of-year (kx/month expr) + (hx/concat (date-format "%Y-%m" expr) + (hx/literal "-01"))) + :month-of-year (hx/month expr) ;; Truncating to a quarter is trickier since there aren't any format strings. ;; See the explanation in the H2 driver, which does the same thing but with slightly different syntax. :quarter (str-to-date "%Y-%m-%d" - (kx/concat (kx/year expr) - (kx/literal "-") - (kx/- (kx/* (kx/quarter expr) + (hx/concat (hx/year expr) + (hx/literal "-") + (hx/- (hx/* (hx/quarter expr) 3) 2) - (kx/literal "-01"))) - :quarter-of-year (kx/quarter expr) - :year (kx/year expr))) + (hx/literal "-01"))) + :quarter-of-year (hx/quarter expr) + :year (hx/year expr))) (defn- date-interval [unit amount] - (kutils/generated (format "DATE_ADD(NOW(), INTERVAL %d %s)" (int amount) (s/upper-case (name unit))))) + (hsql/call :date_add + :%now + (hsql/raw (format "INTERVAL %d %s" (int amount) (name unit))))) (defn- humanize-connection-error-message [message] (condp re-matches message @@ -145,6 +131,9 @@ #".*" ; default message)) +(defn- string-length-fn [field-key] + (hsql/call :char_length field-key)) + (defrecord MySQLDriver [] clojure.lang.Named @@ -182,7 +171,8 @@ :connection-details->spec (u/drop-first-arg connection-details->spec) :date (u/drop-first-arg date) :excluded-schemas (constantly #{"INFORMATION_SCHEMA"}) - :string-length-fn (constantly :CHAR_LENGTH) + :quote-style (constantly :mysql) + :string-length-fn (u/drop-first-arg string-length-fn) ;; If this fails you need to load the timezone definitions from your system into MySQL; ;; run the command `mysql_tzinfo_to_sql /usr/share/zoneinfo | mysql -u root mysql` ;; See https://dev.mysql.com/doc/refman/5.7/en/time-zone-support.html for details diff --git a/src/metabase/driver/postgres.clj b/src/metabase/driver/postgres.clj index 5e3f7d6b404a458ee5cae457a4264d4d98acdbeb..bb03b648fcc5c72443d3f956ca788d4381cef5d6 100644 --- a/src/metabase/driver/postgres.clj +++ b/src/metabase/driver/postgres.clj @@ -1,15 +1,15 @@ (ns metabase.driver.postgres + ;; TODO - rework this to be like newer-style namespaces that use `u/drop-first-arg` (:require [clojure.java.jdbc :as jdbc] (clojure [set :refer [rename-keys], :as set] [string :as s]) [clojure.tools.logging :as log] - (korma [core :as k] - [db :as kdb]) - [korma.sql.utils :as kutils] + [honeysql.core :as hsql] + [korma.db :as kdb] [metabase.driver :as driver] [metabase.driver.generic-sql :as sql] [metabase.util :as u] - [metabase.util.korma-extensions :as kx]) + [metabase.util.honeysql-extensions :as hx]) ;; This is necessary for when NonValidatingFactory is passed in the sslfactory connection string argument, ;; e.x. when connecting to a Heroku Postgres database from outside of Heroku. (:import org.postgresql.ssl.NonValidatingFactory)) @@ -107,16 +107,15 @@ (defn- unix-timestamp->timestamp [expr seconds-or-milliseconds] (case seconds-or-milliseconds - :seconds (k/sqlfn :TO_TIMESTAMP expr) - :milliseconds (recur (kx// expr 1000) :seconds))) + :seconds (hsql/call :to_timestamp expr) + :milliseconds (recur (hx// expr 1000) :seconds))) -(defn- date-trunc [unit expr] (k/sqlfn :DATE_TRUNC (kx/literal unit) expr)) -(defn- extract [unit expr] (kutils/func (format "EXTRACT(%s FROM %%s)" (name unit)) - [expr])) +(defn- date-trunc [unit expr] (hsql/call :date_trunc (hx/literal unit) expr)) +(defn- extract [unit expr] (hsql/call :extract unit expr)) -(def ^:private extract-integer (comp kx/->integer extract)) +(def ^:private extract-integer (comp hx/->integer extract)) -(def ^:private ^:const one-day (k/raw "INTERVAL '1 day'")) +(def ^:private ^:const one-day (hsql/raw "INTERVAL '1 day'")) (defn- date [unit expr] (case unit @@ -125,15 +124,15 @@ :minute-of-hour (extract-integer :minute expr) :hour (date-trunc :hour expr) :hour-of-day (extract-integer :hour expr) - :day (kx/->date expr) + :day (hx/->date expr) ;; Postgres DOW is 0 (Sun) - 6 (Sat); increment this to be consistent with Java, H2, MySQL, and Mongo (1-7) - :day-of-week (kx/inc (extract-integer :dow expr)) + :day-of-week (hx/inc (extract-integer :dow expr)) :day-of-month (extract-integer :day expr) :day-of-year (extract-integer :doy expr) ;; Postgres weeks start on Monday, so shift this date into the proper bucket and then decrement the resulting day - :week (kx/- (date-trunc :week (kx/+ expr one-day)) + :week (hx/- (date-trunc :week (hx/+ expr one-day)) one-day) - :week-of-year (extract-integer :week (kx/+ expr one-day)) + :week-of-year (extract-integer :week (hx/+ expr one-day)) :month (date-trunc :month expr) :month-of-year (extract-integer :month expr) :quarter (date-trunc :quarter expr) @@ -141,7 +140,7 @@ :year (extract-integer :year expr))) (defn- date-interval [unit amount] - (k/raw (format "(NOW() + INTERVAL '%d %s')" (int amount) (name unit)))) + (hsql/raw (format "(NOW() + INTERVAL '%d %s')" (int amount) (name unit)))) (defn- humanize-connection-error-message [message] (condp re-matches message @@ -177,10 +176,8 @@ "Fetch the Materialized Views for a Postgres DATABASE. These are returned as a set of maps, the same format as `:tables` returned by `describe-database`." [database] - (try (set (for [{:keys [schemaname matviewname]} (jdbc/query (sql/db->jdbc-connection-spec database) - ["SELECT schemaname, matviewname FROM pg_matviews;"])] - {:schema schemaname - :name matviewname})) + (try (set (jdbc/query (sql/db->jdbc-connection-spec database) + ["SELECT schemaname AS \"schema\", matviewname AS \"name\" FROM pg_matviews;"])) (catch Throwable e (log/error "Failed to fetch materialized views for this database:" (.getMessage e))))) @@ -191,6 +188,9 @@ [driver database] (update (sql/describe-database driver database) :tables (u/rpartial set/union (materialized-views database)))) +(defn- string-length-fn [field-key] + (hsql/call :char_length (hx/cast :VARCHAR field-key))) + (defrecord PostgresDriver [] clojure.lang.Named @@ -205,7 +205,7 @@ :date (u/drop-first-arg date) :prepare-value (u/drop-first-arg prepare-value) :set-timezone-sql (constantly "UPDATE pg_settings SET setting = ? WHERE name ILIKE 'timezone';") - :string-length-fn (constantly :CHAR_LENGTH) + :string-length-fn (u/drop-first-arg string-length-fn) :unix-timestamp->timestamp (u/drop-first-arg unix-timestamp->timestamp)})) (u/strict-extend PostgresDriver diff --git a/src/metabase/driver/redshift.clj b/src/metabase/driver/redshift.clj index d11d7e383dacaf13090647fbf03c2b4032d08d0f..a08b3e412ca0cb5b7132e50183a548c7efbd4f5b 100644 --- a/src/metabase/driver/redshift.clj +++ b/src/metabase/driver/redshift.clj @@ -1,27 +1,27 @@ (ns metabase.driver.redshift "Amazon Redshift Driver." (:require [clojure.java.jdbc :as jdbc] - (korma [core :as k] - [db :as kdb]) + [honeysql.core :as hsql] + [korma.db :as kdb] (metabase [config :as config] [driver :as driver]) (metabase.driver [generic-sql :as sql] [postgres :as postgres]) [metabase.util :as u] - [metabase.util.korma-extensions :as kx])) + [metabase.util.honeysql-extensions :as hx])) (defn- connection-details->spec [details] (kdb/postgres (merge details postgres/ssl-params))) ; always connect to redshift over SSL (defn- date-interval [unit amount] - (k/raw (format "(GETDATE() + INTERVAL '%d %s')" (int amount) (name unit)))) + (hsql/call :+ :%getdate (hsql/raw (format "INTERVAL '%d %s'" (int amount) (name unit))))) (defn- unix-timestamp->timestamp [expr seconds-or-milliseconds] (case seconds-or-milliseconds - :seconds (kx/+ (k/raw "TIMESTAMP '1970-01-01T00:00:00Z'") - (kx/* expr - (k/raw "INTERVAL '1 second'"))) - :milliseconds (recur (kx// expr 1000) :seconds))) + :seconds (hx/+ (hsql/raw "TIMESTAMP '1970-01-01T00:00:00Z'") + (hx/* expr + (hsql/raw "INTERVAL '1 second'"))) + :milliseconds (recur (hx// expr 1000) :seconds))) ;; The Postgres JDBC .getImportedKeys method doesn't work for Redshift, and we're not allowed to access information_schema.constraint_column_usage, ;; so we'll have to use this custom query instead @@ -86,7 +86,7 @@ sql/ISQLDriver (merge postgres/PostgresISQLDriverMixin {:connection-details->spec (u/drop-first-arg connection-details->spec) - :current-datetime-fn (constantly (k/sqlfn* :GETDATE)) + :current-datetime-fn (constantly :%getdate) :set-timezone-sql (constantly nil) :unix-timestamp->timestamp (u/drop-first-arg unix-timestamp->timestamp)} ;; HACK ! When we test against Redshift we use a session-unique schema so we can run simultaneous tests against a single remote host; diff --git a/src/metabase/driver/sqlite.clj b/src/metabase/driver/sqlite.clj index 63f8b8042a09d90850ddc5f193a1a78b16fafa46..f7c5a893e8faf0da9bb016bf3bdd7d891428ed5f 100644 --- a/src/metabase/driver/sqlite.clj +++ b/src/metabase/driver/sqlite.clj @@ -1,12 +1,13 @@ (ns metabase.driver.sqlite (:require [clojure.set :as set] - (korma [core :as k] - [db :as kdb]) + (honeysql [core :as hsql] + [format :as hformat]) + [korma.db :as kdb] [metabase.config :as config] [metabase.driver :as driver] [metabase.driver.generic-sql :as sql] [metabase.util :as u] - [metabase.util.korma-extensions :as kx])) + [metabase.util.honeysql-extensions :as hx])) ;; We'll do regex pattern matching here for determining Field types ;; because SQLite types can have optional lengths, e.g. NVARCHAR(100) or NUMERIC(10,5) @@ -28,11 +29,16 @@ [#"DATETIME" :DateTimeField] [#"DATE" :DateField]]) -(def ^:private ->date (partial k/sqlfn* :DATE)) -(def ^:private ->datetime (partial k/sqlfn* :DATETIME)) +;; register the SQLite concatnation operator `||` with HoneySQL as `sqlite-concat` +;; (hsql/format (hsql/call :sqlite-concat :a :b)) -> "(a || b)" +(defmethod hformat/fn-handler "sqlite-concat" [_ & args] + (str "(" (apply str (interpose " || " (map hformat/to-sql args))) ")")) + +(def ^:private ->date (partial hsql/call :date)) +(def ^:private ->datetime (partial hsql/call :datetime)) (defn- strftime [format-str expr] - (k/sqlfn :STRFTIME (kx/literal format-str) expr)) + (hsql/call :strftime (hx/literal format-str) expr)) (defn- date "Apply truncation / extraction to a date field or value for SQLite. @@ -40,26 +46,26 @@ [unit expr] ;; Convert Timestamps to ISO 8601 strings before passing to SQLite, otherwise they don't seem to work correctly (let [v (if (instance? java.sql.Timestamp expr) - (kx/literal (u/date->iso-8601 expr)) + (hx/literal (u/date->iso-8601 expr)) expr)] (case unit :default v :second (->datetime (strftime "%Y-%m-%d %H:%M:%S" v)) :minute (->datetime (strftime "%Y-%m-%d %H:%M" v)) - :minute-of-hour (kx/->integer (strftime "%M" v)) + :minute-of-hour (hx/->integer (strftime "%M" v)) :hour (->datetime (strftime "%Y-%m-%d %H:00" v)) - :hour-of-day (kx/->integer (strftime "%H" v)) + :hour-of-day (hx/->integer (strftime "%H" v)) :day (->date v) ;; SQLite day of week (%w) is Sunday = 0 <-> Saturday = 6. We want 1 - 7 so add 1 - :day-of-week (kx/->integer (kx/inc (strftime "%w" v))) - :day-of-month (kx/->integer (strftime "%d" v)) - :day-of-year (kx/->integer (strftime "%j" v)) + :day-of-week (hx/->integer (hx/inc (strftime "%w" v))) + :day-of-month (hx/->integer (strftime "%d" v)) + :day-of-year (hx/->integer (strftime "%j" v)) ;; Move back 6 days, then forward to the next Sunday - :week (->date v, (kx/literal "-6 days"), (kx/literal "weekday 0")) + :week (->date v, (hx/literal "-6 days"), (hx/literal "weekday 0")) ;; SQLite first week of year is 0, so add 1 - :week-of-year (kx/->integer (kx/inc (strftime "%W" v))) - :month (->date v, (kx/literal "start of month")) - :month-of-year (kx/->integer (strftime "%m" v)) + :week-of-year (hx/->integer (hx/inc (strftime "%W" v))) + :month (->date v, (hx/literal "start of month")) + :month-of-year (hx/->integer (strftime "%m" v)) ;; DATE(DATE(%s, 'start of month'), '-' || ((STRFTIME('%m', %s) - 1) % 3) || ' months') ;; -> DATE(DATE('2015-11-16', 'start of month'), '-' || ((STRFTIME('%m', '2015-11-16') - 1) % 3) || ' months') ;; -> DATE('2015-11-01', '-' || ((11 - 1) % 3) || ' months') @@ -67,17 +73,17 @@ ;; -> DATE('2015-11-01', '-1 months') ;; -> '2015-10-01' :quarter (->date - (->date v, (kx/literal "start of month")) - (kx/infix "||" - (kx/literal "-") - (kx/mod (kx/dec (strftime "%m" v)) - 3) - (kx/literal " months"))) + (->date v, (hx/literal "start of month")) + (hsql/call :sqlite-concat + (hx/literal "-") + (hx/mod (hx/dec (strftime "%m" v)) + 3) + (hx/literal " months"))) ;; q = (m + 2) / 3 - :quarter-of-year (kx// (kx/+ (strftime "%m" v) + :quarter-of-year (hx// (hx/+ (strftime "%m" v) 2) 3) - :year (kx/->integer (strftime "%Y" v))))) + :year (hx/->integer (strftime "%Y" v))))) (defn- date-interval [unit amount] (let [[multiplier sqlite-unit] (case unit @@ -94,13 +100,24 @@ ;; It's important to call `date` on 'now' to apply bucketing *before* adding/subtracting dates to handle certain edge cases as discussed in issue #2275 (https://github.com/metabase/metabase/issues/2275). ;; Basically, March 30th minus one month becomes Feb 30th in SQLite, which becomes March 2nd. DATE(DATETIME('2016-03-30', '-1 month'), 'start of month') is thus March 1st. ;; The SQL we produce instead (for "last month") ends up looking something like: DATE(DATETIME(DATE('2015-03-30', 'start of month'), '-1 month'), 'start of month'). It's a little verbose, but gives us the correct answer (Feb 1st). - (->datetime (date unit (kx/literal "now")) - (kx/literal (format "%+d %s" (* amount multiplier) sqlite-unit))))) + (->datetime (date unit (hx/literal "now")) + (hx/literal (format "%+d %s" (* amount multiplier) sqlite-unit))))) (defn- unix-timestamp->timestamp [expr seconds-or-milliseconds] (case seconds-or-milliseconds - :seconds (->datetime expr (kx/literal "unixepoch")) - :milliseconds (recur (kx// expr 1000) :seconds))) + :seconds (->datetime expr (hx/literal "unixepoch")) + :milliseconds (recur (hx// expr 1000) :seconds))) + +;; SQLite doesn't support `TRUE`/`FALSE`; it uses `1`/`0`, respectively; convert these booleans to numbers. +(defn- prepare-value [{value :value}] + (cond + (true? value) 1 + (false? value) 0 + :else value)) + +(defn- string-length-fn [field-key] + (hsql/call :length field-key)) + (defrecord SQLiteDriver [] clojure.lang.Named @@ -126,11 +143,11 @@ (merge (sql/ISQLDriverDefaultsMixin) {:active-tables sql/post-filtered-active-tables :column->base-type (sql/pattern-based-column->base-type pattern->type) - :connection-details->spec (fn [_ details] - (kdb/sqlite3 details)) - :current-datetime-fn (constantly (k/raw "DATETIME('now')")) + :connection-details->spec (u/drop-first-arg kdb/sqlite3) + :current-datetime-fn (constantly (hsql/raw "datetime('now')")) :date (u/drop-first-arg date) - :string-length-fn (constantly :LENGTH) + :prepare-value (u/drop-first-arg prepare-value) + :string-length-fn (u/drop-first-arg string-length-fn) :unix-timestamp->timestamp (u/drop-first-arg unix-timestamp->timestamp)})) (driver/register-driver! :sqlite (SQLiteDriver.)) diff --git a/src/metabase/driver/sqlserver.clj b/src/metabase/driver/sqlserver.clj index 2a847a5b002256a20c33eb9e8a7ed745e94fb7a7..81256b4a71671d3ed6521cf927713a5a7814cf64 100644 --- a/src/metabase/driver/sqlserver.clj +++ b/src/metabase/driver/sqlserver.clj @@ -1,11 +1,11 @@ (ns metabase.driver.sqlserver (:require [clojure.string :as s] - (korma [core :as k] - [db :as kdb]) + [honeysql.core :as hsql] + [korma.db :as kdb] [metabase.driver :as driver] [metabase.driver.generic-sql :as sql] [metabase.util :as u] - [metabase.util.korma-extensions :as kx]) + [metabase.util.honeysql-extensions :as hx]) (:import net.sourceforge.jtds.jdbc.Driver)) ; need to import this in order to load JDBC driver (defn- column->base-type @@ -71,63 +71,74 @@ ssl (str ";ssl=require")))))) (defn- date-part [unit expr] - (k/sqlfn :DATEPART (k/raw (name unit)) expr)) + (hsql/call :datepart (hsql/raw (name unit)) expr)) (defn- date-add [unit & exprs] - (apply k/sqlfn* :DATEADD (k/raw (name unit)) exprs)) + (apply hsql/call :dateadd (hsql/raw (name unit)) exprs)) (defn- date "See also the [jTDS SQL <-> Java types table](http://jtds.sourceforge.net/typemap.html)" [unit expr] (case unit :default expr - :minute (kx/cast :SMALLDATETIME expr) + :minute (hx/cast :smalldatetime expr) :minute-of-hour (date-part :minute expr) - :hour (kx/->datetime (kx/format "yyyy-MM-dd HH:00:00" expr)) + :hour (hx/->datetime (hx/format "yyyy-MM-dd HH:00:00" expr)) :hour-of-day (date-part :hour expr) ;; jTDS is retarded; I sense an ongoing theme here. It returns DATEs as strings instead of as java.sql.Dates ;; like every other SQL DB we support. Work around that by casting to DATE for truncation then back to DATETIME so we get the type we want - :day (kx/->datetime (kx/->date expr)) + :day (hx/->datetime (hx/->date expr)) :day-of-week (date-part :weekday expr) :day-of-month (date-part :day expr) :day-of-year (date-part :dayofyear expr) ;; Subtract the number of days needed to bring us to the first day of the week, then convert to date ;; The equivalent SQL looks like: ;; CAST(DATEADD(day, 1 - DATEPART(weekday, %s), CAST(%s AS DATE)) AS DATETIME) - :week (kx/->datetime + :week (hx/->datetime (date-add :day - (kx/- 1 (date-part :weekday expr)) - (kx/->date expr))) + (hx/- 1 (date-part :weekday expr)) + (hx/->date expr))) :week-of-year (date-part :iso_week expr) - :month (kx/->datetime (kx/format "yyyy-MM-01" expr)) + :month (hx/->datetime (hx/format "yyyy-MM-01" expr)) :month-of-year (date-part :month expr) ;; Format date as yyyy-01-01 then add the appropriate number of quarter ;; Equivalent SQL: ;; DATEADD(quarter, DATEPART(quarter, %s) - 1, FORMAT(%s, 'yyyy-01-01')) :quarter (date-add :quarter - (kx/dec (date-part :quarter expr)) - (kx/format "yyyy-01-01" expr)) + (hx/dec (date-part :quarter expr)) + (hx/format "yyyy-01-01" expr)) :quarter-of-year (date-part :quarter expr) :year (date-part :year expr))) (defn- date-interval [unit amount] - (date-add unit amount (k/sqlfn :GETUTCDATE))) + (date-add unit amount :%getutcdate)) (defn- unix-timestamp->timestamp [expr seconds-or-milliseconds] (case seconds-or-milliseconds ;; The second argument to DATEADD() gets casted to a 32-bit integer. BIGINT is 64 bites, so we tend to run into ;; integer overflow errors (especially for millisecond timestamps). ;; Work around this by converting the timestamps to minutes instead before calling DATEADD(). - :seconds (date-add :minute (kx// expr 60) (kx/literal "1970-01-01")) - :milliseconds (recur (kx// expr 1000) :seconds))) + :seconds (date-add :minute (hx// expr 60) (hx/literal "1970-01-01")) + :milliseconds (recur (hx// expr 1000) :seconds))) -(defn- apply-limit [korma-query {value :limit}] - (k/modifier korma-query (format "TOP %d" value))) +(defn- apply-limit [honeysql-form {value :limit}] + (assoc honeysql-form :modifiers [(format "TOP %d" value)])) + +(defn- apply-page [honeysql-form {{:keys [items page]} :page}] + (assoc honeysql-form :offset (hsql/raw (format "%d ROWS FETCH NEXT %d ROWS ONLY" + (* items (dec page)) + items)))) + +;; SQLServer doesn't support `TRUE`/`FALSE`; it uses `1`/`0`, respectively; convert these booleans to numbers. +(defn- prepare-value [{value :value}] + (cond + (true? value) 1 + (false? value) 0 + :else value)) + +(defn- string-length-fn [field-key] + (hsql/call :len (hx/cast :VARCHAR field-key))) -(defn- apply-page [korma-query {{:keys [items page]} :page}] - (k/offset korma-query (format "%d ROWS FETCH NEXT %d ROWS ONLY" - (* items (dec page)) - items))) (defrecord SQLServerDriver [] clojure.lang.Named @@ -173,11 +184,13 @@ :apply-page (u/drop-first-arg apply-page) :column->base-type (u/drop-first-arg column->base-type) :connection-details->spec (u/drop-first-arg connection-details->spec) - :current-datetime-fn (constantly (k/sqlfn* :GETUTCDATE)) + :current-datetime-fn (constantly :%getutcdate) :date (u/drop-first-arg date) :excluded-schemas (constantly #{"sys" "INFORMATION_SCHEMA"}) - :stddev-fn (constantly :STDEV) - :string-length-fn (constantly :LEN) + :field-percent-urls sql/slow-field-percent-urls + :prepare-value (u/drop-first-arg prepare-value) + :stddev-fn (constantly :stdev) + :string-length-fn (u/drop-first-arg string-length-fn) :unix-timestamp->timestamp (u/drop-first-arg unix-timestamp->timestamp)})) (driver/register-driver! :sqlserver (SQLServerDriver.)) diff --git a/src/metabase/events/activity_feed.clj b/src/metabase/events/activity_feed.clj index f04ca01d4b7a76aeaf7aa12ff647e235ab3c7b40..ab0e44c52312dceac6a227bc53918378f9585592 100644 --- a/src/metabase/events/activity_feed.clj +++ b/src/metabase/events/activity_feed.clj @@ -55,9 +55,9 @@ add-remove-card-details (fn [{:keys [dashcards] :as obj}] ;; we expect that the object has just a dashboard :id at the top level ;; plus a `:dashcards` attribute which is a vector of the cards added/removed - (-> (db/sel :one [Dashboard :description :name :public_perms], :id (events/object->model-id topic obj)) + (-> (db/select-one [Dashboard :description :name :public_perms], :id (events/object->model-id topic obj)) (assoc :dashcards (for [{:keys [id card_id]} dashcards] - (-> (db/sel :one [Card :name :description :public_perms], :id card_id) + (-> (db/select-one [Card :name :description :public_perms], :id card_id) (assoc :id id) (assoc :card_id card_id))))))] (activity/record-activity diff --git a/src/metabase/events/notifications.clj b/src/metabase/events/notifications.clj index 03c2c1090acfd0b5e3354e3c87e54011cb068781..43cd6ed75cbaec0e1e6c4b17743c0ee7f1f22785 100644 --- a/src/metabase/events/notifications.clj +++ b/src/metabase/events/notifications.clj @@ -1,5 +1,6 @@ (ns metabase.events.notifications (:require [clojure.core.async :as async] + [clojure.set :as set] [clojure.tools.logging :as log] [metabase.db :as db] [metabase.email.messages :as messages] @@ -42,20 +43,23 @@ ;; otherwise pull out dependent card ids and add dashboard/pulse dependencies (let [card-ids (mapv :model_id (get deps-by-model "Card"))] (assoc deps-by-model - "Dashboard" (->> (db/sel :many :fields [DashboardCard :dashboard_id] :card_id [in card-ids]) - (map #(clojure.set/rename-keys % {:dashboard_id :model_id}))) - "Pulse" (->> (db/sel :many :fields [PulseCard :pulse_id] :card_id [in card-ids]) - (map #(clojure.set/rename-keys % {:pulse_id :model_id}))))))) + "Dashboard" (for [dashcard (db/select [DashboardCard :dashboard_id], :card_id [:in card-ids])] + (set/rename-keys dashcard {:dashboard_id :model_id})) + "Pulse" (for [pulsecard (db/select [PulseCard :pulse_id], :card_id [:in card-ids])] + (set/rename-keys pulsecard {:pulse_id :model_id})))))) (defn- pull-dependencies [model model-id] - (when-let [deps (db/sel :many :fields [Dependency :model :model_id] :dependent_on_model model :dependent_on_id model-id)] + (when-let [deps (db/select [Dependency :model :model_id] + :dependent_on_model model + :dependent_on_id model-id)] (let [deps-by-model (-> (group-by :model deps) add-objects-dependent-on-cards) deps-with-details (for [model (keys deps-by-model) :let [ids (mapv :model_id (get deps-by-model model))]] ;; TODO: this is slightly dangerous because we assume :name and :creator_id are available - (->> (db/sel :many :fields [(model->entity (keyword model)) :id :name :creator_id] :id [in ids]) - (map #(assoc % :model model))))] + (for [object (db/select [(model->entity (keyword model)) :id :name :creator_id] + :id [:in ids])] + (assoc object :model model)))] ;; we end up with a list of lists, so flatten before returning (flatten deps-with-details)))) diff --git a/src/metabase/events/sync_database.clj b/src/metabase/events/sync_database.clj index a10d1ea564f00fe6048b29100d95cb857670862d..b90cdccbf0bee997c27ef53e43ca1d5215904cf7 100644 --- a/src/metabase/events/sync_database.clj +++ b/src/metabase/events/sync_database.clj @@ -26,7 +26,7 @@ ;; try/catch here to prevent individual topic processing exceptions from bubbling up. better to handle them here. (try (when-let [{topic :topic object :item} sync-database-event] - (when-let [database (db/sel :one Database :id (events/object->model-id topic object))] + (when-let [database (Database (events/object->model-id topic object))] ;; just kick off a sync on another thread (future (try (sync-database/sync-database! database) diff --git a/src/metabase/metabot.clj b/src/metabase/metabot.clj index f25b999f653fd90622e988138cfacb58ddb5ec14..abbd5da50a04d541cba801e78855c440fa8c1e94 100644 --- a/src/metabase/metabot.clj +++ b/src/metabase/metabot.clj @@ -6,7 +6,6 @@ [clojure.tools.logging :as log] [aleph.http :as aleph] [cheshire.core :as json] - [korma.core :as k] (manifold [bus :as bus] [deferred :as d] [stream :as s]) @@ -72,18 +71,18 @@ (defn ^:metabot list "Implementation of the `metabot list cards` command." [& _] - (let [cards (db/sel :many :fields ['Card :id :name] (k/order :id :DESC) (k/limit 20))] + (let [cards (db/select ['Card :id :name], {:order-by [[:id :desc]], :limit 20})] (str "Here's your " (count cards) " most recent cards:\n" (format-cards cards)))) (defn- card-with-name [card-name] - (first (u/prog1 (db/sel :many :fields ['Card :id :name], (k/where {(k/sqlfn :LOWER :name) [like (str \% (str/lower-case card-name) \%)]})) + (first (u/prog1 (db/select ['Card :id :name], :%lower.name [:like (str \% (str/lower-case card-name) \%)]) (when (> (count <>) 1) (throw (Exception. (str "Could you be a little more specific? I found these cards with names that matched:\n" (format-cards <>)))))))) (defn- id-or-name->card [card-id-or-name] (cond - (integer? card-id-or-name) (db/sel :one :fields ['Card :id :name], :id card-id-or-name) + (integer? card-id-or-name) (db/select-one ['Card :id :name], :id card-id-or-name) (or (string? card-id-or-name) (symbol? card-id-or-name)) (card-with-name card-id-or-name) :else (throw (Exception. (format "I don't know what Card `%s` is. Give me a Card ID or name."))))) diff --git a/src/metabase/middleware.clj b/src/metabase/middleware.clj index 969bfb31bc005e6833e1bf3e8a11ad1b487de726..15089b51212ff7db1e95034054f5290a78781cd7 100644 --- a/src/metabase/middleware.clj +++ b/src/metabase/middleware.clj @@ -3,7 +3,6 @@ (:require [clojure.tools.logging :as log] (cheshire factory [generate :refer [add-encoder encode-str encode-nil]]) - [korma.core :as k] [metabase.api.common :refer [*current-user* *current-user-id*]] [metabase.config :as config] [metabase.db :as db] @@ -60,12 +59,11 @@ (fn [{:keys [metabase-session-id] :as request}] ;; TODO - what kind of validations can we do on the sessionid to make sure it's safe to handle? str? alphanumeric? (handler (or (when (and metabase-session-id ((resolve 'metabase.core/initialized?))) - (when-let [session (first (k/select Session - ;; NOTE: we join with the User table and ensure user.is_active = true - (k/with User (k/where {:is_active true})) - (k/fields :created_at :user_id) - (k/where {:id metabase-session-id})))] - (let [session-age-ms (- (System/currentTimeMillis) (.getTime ^java.util.Date (get session :created_at (java.util.Date. 0))))] + (when-let [session (db/select-one [Session :created_at :user_id] + (db/join [Session :user_id] [User :id]) + (db/qualify User :is_active) true + (db/qualify Session :id) metabase-session-id)] + (let [session-age-ms (- (System/currentTimeMillis) (.getTime ^java.util.Date (get session :created_at (u/->Date 0))))] ;; If the session exists and is not expired (max-session-age > session-age) then validation is good (when (and session (> (config/config-int :max-session-age) (quot session-age-ms 60000))) (assoc request :metabase-user-id (:user_id session)))))) @@ -89,7 +87,9 @@ (fn [request] (if-let [current-user-id (:metabase-user-id request)] (binding [*current-user-id* current-user-id - *current-user* (delay (db/sel :one `[User ~@(models/default-fields User) :is_active :is_staff], :id current-user-id))] + *current-user* (delay (db/select-one (vec (concat [User :is_active :is_staff] + (models/default-fields User))) + :id current-user-id))] (handler request)) (handler request)))) diff --git a/src/metabase/models/card.clj b/src/metabase/models/card.clj index 7e82b1954a2baac7ea40aca7001aec08837bcacd..c27cf9f5bc4298430da7c99d331b0d04d25f980a 100644 --- a/src/metabase/models/card.clj +++ b/src/metabase/models/card.clj @@ -1,6 +1,5 @@ (ns metabase.models.card - (:require [korma.core :as k] - [medley.core :as m] + (:require [medley.core :as m] [metabase.db :as db] (metabase.models [card-label :refer [CardLabel]] [dependency :as dependency] @@ -27,20 +26,15 @@ "Return the number of Dashboards this Card is in." {:hydrate :dashboard_count} [{:keys [id]}] - (-> (k/select @(ns-resolve 'metabase.models.dashboard-card 'DashboardCard) - (k/aggregate (count :*) :dashboards) - (k/where {:card_id id})) - first - :dashboards)) + (:count (db/select-one ['DashboardCard [:%count.* :count]] + :card_id id))) (defn labels "Return `Labels` for CARD." {:hydrate :labels} [{:keys [id]}] - (if-let [label-ids (seq (db/sel :many :field [CardLabel :label_id] :card_id id))] - (db/sel :many Label - (k/where {:id [in label-ids]}) - (k/order (k/sqlfn :LOWER :name))) + (if-let [label-ids (seq (db/select-field :label_id CardLabel, :card_id id))] + (db/select Label, :id [:in label-ids], {:order-by [:%lower.name]}) [])) (defn- pre-cascade-delete [{:keys [id]}] @@ -60,7 +54,7 @@ [_ _ instance] (->> (dissoc instance :created_at :updated_at) (into {}) ; if it's a record type like CardInstance we need to convert it to a regular map or filter-vals won't work - (m/filter-vals (complement delay?)))) + (m/filter-vals (complement delay?)))) ; TODO - I don't think this is necessary anymore ! ;;; ## ---------------------------------------- DEPENDENCIES ---------------------------------------- diff --git a/src/metabase/models/dashboard.clj b/src/metabase/models/dashboard.clj index 1bd277c8d50e933c8ecdee5f4fdc059c09cef2d4..ebe27facce0985433483380c0783d81562a260a4 100644 --- a/src/metabase/models/dashboard.clj +++ b/src/metabase/models/dashboard.clj @@ -1,6 +1,5 @@ (ns metabase.models.dashboard (:require [clojure.data :refer [diff]] - [korma.core :as k] [metabase.db :as db] (metabase.models [dashboard-card :refer [DashboardCard] :as dashboard-card] [interface :as i] @@ -13,7 +12,7 @@ "Return the `DashboardCards` associated with DASHBOARD, in the order they were created." {:hydrate :ordered_cards, :arglists '([dashboard])} [{:keys [id]}] - (db/sel :many DashboardCard, :dashboard_id id, (k/order :created_at :asc))) + (db/select DashboardCard, :dashboard_id id, {:order-by [[:created_at :asc]]})) (defn- pre-cascade-delete [{:keys [id]}] (db/cascade-delete! 'Revision :model "Dashboard" :model_id id) @@ -52,7 +51,7 @@ ;; Now update the cards as needed (let [serialized-cards (:cards serialized-dashboard) id->serialized-card (zipmap (map :id serialized-cards) serialized-cards) - current-cards (db/sel :many :fields [DashboardCard :sizeX :sizeY :row :col :id :card_id], :dashboard_id dashboard-id) + current-cards (db/select [DashboardCard :sizeX :sizeY :row :col :id :card_id], :dashboard_id dashboard-id) id->current-card (zipmap (map :id current-cards) current-cards) all-dashcard-ids (concat (map :id serialized-cards) (map :id current-cards))] diff --git a/src/metabase/models/dashboard_card.clj b/src/metabase/models/dashboard_card.clj index 733d0e6f6c4c1a350e5edceaf51867409c4b2fd1..e849867be4afb954b6df6b083cab88e4f95b6036 100644 --- a/src/metabase/models/dashboard_card.clj +++ b/src/metabase/models/dashboard_card.clj @@ -1,6 +1,5 @@ (ns metabase.models.dashboard-card (:require [clojure.set :as set] - [korma.core :as k] [korma.db :as kdb] [metabase.db :as db] [metabase.events :as events] @@ -10,12 +9,7 @@ [metabase.models.interface :as i] [metabase.util :as u])) -(i/defentity DashboardCard :report_dashboardcard - ;; TODO - This can be removed once we finish the transition from korma -> HoneySQL. - ;; This needs to be here for the time being because things that go through regular `korma.core/select` don't get ran - ;; through `post-select` (like they would if they went through a `metabase.db/` function) and we don't want to have to fix - ;; naming manually everywhere - (k/transform (u/rpartial set/rename-keys {:sizex :sizeX, :sizey :sizeY}))) +(i/defentity DashboardCard :report_dashboardcard) (defn- pre-insert [dashcard] (let [defaults {:sizeX 2 @@ -41,18 +35,16 @@ "Return the `Dashboard` associated with the `DashboardCard`." [{:keys [dashboard_id]}] {:pre [(integer? dashboard_id)]} - (db/sel :one 'metabase.models.dashboard/Dashboard :id dashboard_id)) + (db/select-one 'Dashboard, :id dashboard_id)) (defn ^:hydrate series "Return the `Cards` associated as additional series on this `DashboardCard`." [{:keys [id]}] - (->> (k/select Card - (k/join DashboardCardSeries (= :dashboardcard_series.card_id :id)) - (k/fields :id :name :description :display :dataset_query :visualization_settings) - (k/where {:dashboardcard_series.dashboardcard_id id}) - (k/order :dashboardcard_series.position :asc)) - (map (partial i/do-post-select Card)))) + (db/select [Card :id :name :description :display :dataset_query :visualization_settings] + (db/join [Card :id] [DashboardCardSeries :card_id]) + (db/qualify DashboardCardSeries :dashboardcard_id) id + {:order-by [[(db/qualify DashboardCardSeries :position) :asc]]})) ;;; ## ---------------------------------------- CRUD FNS ---------------------------------------- @@ -62,7 +54,7 @@ "Fetch a single `DashboardCard` by its ID value." [id] {:pre [(integer? id)]} - (-> (db/sel :one DashboardCard :id id) + (-> (DashboardCard id) (hydrate :series))) (defn update-dashboard-card-series @@ -81,8 +73,10 @@ (db/cascade-delete! DashboardCardSeries :dashboardcard_id id) ;; now just insert all of the series that were given to us (when-not (empty? card-ids) - (let [cards (map-indexed (fn [idx itm] {:dashboardcard_id id :card_id itm :position idx}) card-ids)] - (k/insert DashboardCardSeries (k/values cards))))) + (let [cards (map-indexed (fn [i card-id] + {:dashboardcard_id id, :card_id card-id, :position i}) + card-ids)] + (db/insert-many! DashboardCardSeries cards)))) (defn update-dashboard-card "Update an existing `DashboardCard`, including all `DashboardCardSeries`. @@ -96,7 +90,7 @@ (when (and sizeX sizeY row col) (db/update! DashboardCard id, :sizeX sizeX, :sizeY sizeY, :row row, :col col)) ;; update series (only if they changed) - (when (not= series (db/sel :many :field [DashboardCardSeries :card_id] :dashboardcard_id id (k/order :position :asc))) + (when (not= series (map :card_id (db/select [DashboardCardSeries :card_id], :dashboardcard_id id, {:order-by [[:position :asc]]}))) (update-dashboard-card-series dashboard-card series)) ;; fetch the fully updated dashboard card then return it (and fire off an event) (->> (retrieve-dashboard-card id) diff --git a/src/metabase/models/database.clj b/src/metabase/models/database.clj index 0767a799f93346dc4525690c62ada4e51bb798f6..eea054b3d83273164cee0a3128833e562d0f1b7a 100644 --- a/src/metabase/models/database.clj +++ b/src/metabase/models/database.clj @@ -1,6 +1,5 @@ (ns metabase.models.database (:require [cheshire.generate :refer [add-encoder encode-map]] - [korma.core :as k] [metabase.api.common :refer [*current-user*]] [metabase.db :as db] [metabase.models.interface :as i] @@ -26,7 +25,7 @@ (defn ^:hydrate tables "Return the `Tables` associated with this `Database`." [{:keys [id]}] - (db/sel :many 'Table :db_id id, :active true, (k/order :display_name :ASC))) + (db/select 'Table, :db_id id, :active true, {:order-by [[:display_name :asc]]})) (u/strict-extend (class Database) i/IEntity diff --git a/src/metabase/models/dependency.clj b/src/metabase/models/dependency.clj index d57d14ae8970d3bbe19296551387112d18f034e2..da3346866555346935ad73f9b562e163978b0ea2 100644 --- a/src/metabase/models/dependency.clj +++ b/src/metabase/models/dependency.clj @@ -1,6 +1,5 @@ (ns metabase.models.dependency (:require [clojure.set :as set] - [korma.core :as k] [metabase.db :as db] [metabase.models.interface :as i] [metabase.util :as u])) @@ -30,7 +29,7 @@ [entity id] {:pre [(i/metabase-entity? entity) (integer? id)]} - (db/sel :many Dependency :model (:name entity) :model_id id)) + (db/select Dependency, :model (:name entity), :model_id id)) (defn update-dependencies "Update the set of `Dependency` objects for a given entity." @@ -44,7 +43,7 @@ (when (every? integer? (k deps)) (for [val (k deps)] {:dependent_on_model (name k), :dependent_on_id val}))) - dependencies-old (set (db/sel :many :fields [Dependency :dependent_on_model :dependent_on_id] :model entity-name :model_id id)) + dependencies-old (set (db/select [Dependency :dependent_on_model :dependent_on_id], :model entity-name, :model_id id)) dependencies-new (->> (mapv dependency-set (keys deps)) (filter identity) flatten @@ -53,11 +52,12 @@ dependencies- (set/difference dependencies-old dependencies-new)] (when (seq dependencies+) (let [vs (map #(merge % {:model entity-name, :model_id id, :created_at (u/new-sql-timestamp)}) dependencies+)] - (k/insert Dependency (k/values vs)))) + (db/insert-many! Dependency vs))) (when (seq dependencies-) (doseq [{:keys [dependent_on_model dependent_on_id]} dependencies-] ;; batch delete would be nice here, but it's tougher with multiple conditions - (k/delete Dependency (k/where {:model entity-name - :model_id id - :dependent_on_model dependent_on_model - :dependent_on_id dependent_on_id})))))) + (db/delete! Dependency + :model entity-name + :model_id id + :dependent_on_model dependent_on_model + :dependent_on_id dependent_on_id))))) diff --git a/src/metabase/models/field.clj b/src/metabase/models/field.clj index e4b1cf616112860c3bce5030077f0841aacc15fb..c48594d9d13d7a2ea4e0af6f66b6e52514f878ed 100644 --- a/src/metabase/models/field.clj +++ b/src/metabase/models/field.clj @@ -1,7 +1,6 @@ (ns metabase.models.field (:require [clojure.data :as d] [clojure.string :as s] - [korma.core :as k] [medley.core :as m] [metabase.db :as db] (metabase.models [common :as common] @@ -103,14 +102,14 @@ (defn ^:hydrate values "Return the `FieldValues` associated with this FIELD." [{:keys [id]}] - (db/sel :many [FieldValues :field_id :values], :field_id id)) + (db/select [FieldValues :field_id :values], :field_id id)) (defn qualified-name-components "Return the pieces that represent a path to FIELD, of the form `[table-name parent-fields-name* field-name]`." [{field-name :name, table-id :table_id, parent-id :parent_id}] (conj (if-let [parent (Field parent-id)] (qualified-name-components parent) - [(db/sel :one :field ['Table :name], :id table-id)]) + [(db/select-one-field :name 'Table, :id table-id)]) field-name)) (defn qualified-name @@ -122,7 +121,7 @@ "Return the `Table` associated with this `Field`." {:arglists '([field])} [{:keys [table_id]}] - (db/sel :one 'Table, :id table_id)) + (db/select-one 'Table, :id table_id)) (u/strict-extend (class Field) i/IEntity (merge i/IEntityDefaults diff --git a/src/metabase/models/field_values.clj b/src/metabase/models/field_values.clj index e09fca1483d3fc9d22dd9f214ea6949472d235b6..0acf075d33bd34d0152828f8e77ec9d17a97e91c 100644 --- a/src/metabase/models/field_values.clj +++ b/src/metabase/models/field_values.clj @@ -53,7 +53,7 @@ [{field-id :id, :as field}] {:pre [(integer? field-id) (field-should-have-field-values? field)]} - (if-let [field-values (db/sel :one FieldValues :field_id field-id)] + (if-let [field-values (FieldValues :field_id field-id)] (db/update! FieldValues (:id field-values) :values ((resolve 'metabase.db.metadata-queries/field-distinct-values) field)) (create-field-values field))) @@ -66,7 +66,7 @@ [{field-id :id :as field} & [human-readable-values]] {:pre [(integer? field-id)]} (when (field-should-have-field-values? field) - (or (db/sel :one FieldValues :field_id field-id) + (or (FieldValues :field_id field-id) (create-field-values field human-readable-values)))) (defn save-field-values @@ -74,7 +74,7 @@ [field-id values] {:pre [(integer? field-id) (coll? values)]} - (if-let [field-values (db/sel :one FieldValues :field_id field-id)] + (if-let [field-values (FieldValues :field_id field-id)] (db/update! FieldValues (:id field-values), :values values) (db/insert! FieldValues :field_id field-id, :values values))) diff --git a/src/metabase/models/hydrate.clj b/src/metabase/models/hydrate.clj index dc5dfc900c3184023bc60569dab6c37422c6f7ed..70ebe1acc361bf74a2473380bb862e334c6deffc 100644 --- a/src/metabase/models/hydrate.clj +++ b/src/metabase/models/hydrate.clj @@ -180,8 +180,8 @@ ids (set (for [result results :when (not (get result dest-key))] (source-key result))) - objs (into {} (for [obj (db/sel :many entity :id [in ids])] - {(:id obj) obj}))] + objs (when (seq ids) + (u/key-by :id (db/select entity, :id [:in ids])))] (for [{source-id source-key :as result} results] (if (get result dest-key) result @@ -191,7 +191,7 @@ ;; ### Helper Fns (def ^:private hydration-key->entity - "Delay that returns map of `hydration-key` -> korma entity. + "Delay that returns map of `hydration-key` -> model entity. e.g. `:user -> User`. This is built pulling the `hydration-keys` set from all of our entities." diff --git a/src/metabase/models/interface.clj b/src/metabase/models/interface.clj index 0c8b5bf01cd767367bb7c7a414c91b6c9fec0efb..d040b04e42506bb3c2f322421138b6965665cb61 100644 --- a/src/metabase/models/interface.clj +++ b/src/metabase/models/interface.clj @@ -49,11 +49,6 @@ (let [defaults {:version 1}] (merge defaults query))) ; set some default values") - (post-insert [this] - "Gets called by `insert!` after an object is inserted into the DB. (This object is fetched via `select`). - A good place to do asynchronous tasks such as creating related objects. - Implementations should return the newly created object.") - (pre-update [this] "Called by `update!` before DB operations happen. A good place to set updated values for fields like `updated_at`.") @@ -184,7 +179,6 @@ :can-read? (fn [this & _] (throw (UnsupportedOperationException. (format "No implementation of can-read? for %s; please provide one." (class this))))) :can-write? (fn [this & _] (throw (UnsupportedOperationException. (format "No implementation of can-write? for %s; please provide one." (class this))))) :pre-insert identity - :post-insert identity :pre-update identity :post-select identity :pre-cascade-delete (constantly nil)}) @@ -223,14 +217,13 @@ and have their own unique record type. `defentity` defines a backing record type following the format `<entity>Instance`. For example, the class associated with - `User` is `metabase.models.user/UserInstance`. This class is used for both the titular korma entity (e.g. `User`) and + `User` is `metabase.models.user/UserInstance`. This class is used for both the titular entity (e.g. `User`) and for objects that are fetched from the DB. This means they can share the `IEntity` protocol and simplifies the interface somewhat; functions like `types` work on either the entity or instances fetched from the DB. - (defentity User :metabase_user) ; creates class `UserInstance` and korma entity `User` + (defentity User :metabase_user) ; creates class `UserInstance` and DB entity `User` - (metabase.db/sel :one User, ...) ; use with `metabase.db` functions. All results are instances of `UserInstance` - (korma.core/select User ...) ; use with korma functions. Results will be regular maps + (metabase.db/select User, ...) ; use with `metabase.db` functions. All results are instances of `UserInstance` The record type automatically extends `IEntity` with `IEntityDefaults`, but you may call `extend` again if you need to override default behaviors: @@ -244,12 +237,12 @@ (Database) ; return a seq of *all* Databases (as instances of `DatabaseInstance`) (Database 1) ; return Database 1" - {:arglist '([entity table-name] [entity docstr? table-name & korma-forms]) + {:arglist '([entity table-name] [entity docstr? table-name]) :style/indent 2} [entity & args] - (let [[docstr [table-name & korma-forms]] (u/optional string? args) - instance (symbol (str entity "Instance")) - map->instance (symbol (str "map->" instance))] + (let [[docstr [table-name]] (u/optional string? args) + instance (symbol (str entity "Instance")) + map->instance (symbol (str "map->" instance))] `(do (defrecord ~instance [] clojure.lang.IFn @@ -305,6 +298,5 @@ (format "Korma entity for '%s' table; instance of %s." (name table-name) instance))) (-> (k/create-entity ~(name entity)) (k/table ~table-name) - ~@korma-forms (assoc ::entity true) ~map->instance))))) diff --git a/src/metabase/models/label.clj b/src/metabase/models/label.clj index 626470968172d2116f361560a5bba4c1abf52b6b..1b327f120b22f0d3d02051afb5a5c8f2991dbd31 100644 --- a/src/metabase/models/label.clj +++ b/src/metabase/models/label.clj @@ -1,6 +1,5 @@ (ns metabase.models.label - (:require [korma.core :as k] - [metabase.db :as db] + (:require [metabase.db :as db] [metabase.models.interface :as i] [metabase.util :as u])) diff --git a/src/metabase/models/metric.clj b/src/metabase/models/metric.clj index 74f769c70917d6d7be8bd712ff842ed7e993b128..6cef41cdbecd928002f377b4a6a259ab1e70eddd 100644 --- a/src/metabase/models/metric.clj +++ b/src/metabase/models/metric.clj @@ -1,6 +1,5 @@ (ns metabase.models.metric - (:require [korma.core :as k] - [medley.core :as m] + (:require [medley.core :as m] [metabase.db :as db] [metabase.events :as events] (metabase.models [dependency :as dependency] @@ -108,8 +107,8 @@ {:pre [(integer? table-id) (keyword? state)]} (-> (if (= :all state) - (db/sel :many Metric :table_id table-id, (k/order :name :ASC)) - (db/sel :many Metric :table_id table-id, :is_active (if (= :active state) true false), (k/order :name :ASC))) + (db/select Metric, :table_id table-id, {:order-by [[:name :asc]]}) + (db/select Metric, :table_id table-id, :is_active (= :active state), {:order-by [[:name :asc]]})) (hydrate :creator)))) (defn update-metric! diff --git a/src/metabase/models/pulse.clj b/src/metabase/models/pulse.clj index 7137f0ca890b97752e939ceeb147d6d6c124a5bf..e8c562194fa2960c304edb37ba246398504c9b67 100644 --- a/src/metabase/models/pulse.clj +++ b/src/metabase/models/pulse.clj @@ -1,6 +1,5 @@ (ns metabase.models.pulse - (:require (korma [core :as k] - [db :as kdb]) + (:require [korma.db :as kdb] [medley.core :as m] [metabase.db :as db] [metabase.events :as events] @@ -22,20 +21,19 @@ (defn ^:hydrate channels "Return the `PulseChannels` associated with this PULSE." [{:keys [id]}] - (db/sel :many PulseChannel, :pulse_id id)) + (db/select PulseChannel, :pulse_id id)) (defn- pre-cascade-delete [{:keys [id]}] (db/cascade-delete! PulseCard :pulse_id id) (db/cascade-delete! PulseChannel :pulse_id id)) (defn ^:hydrate cards - "Return the `Cards` assoicated with this PULSE." + "Return the `Cards` associated with this PULSE." [{:keys [id]}] - (k/select Card - (k/join PulseCard (= :pulse_card.card_id :id)) - (k/fields :id :name :description :display) - (k/where {:pulse_card.pulse_id id}) - (k/order :pulse_card.position :asc))) + (db/select [Card :id :name :description :display] + (db/join [Card :id] [PulseCard :card_id]) + (db/qualify PulseCard :pulse_id) id + {:order-by [[(db/qualify PulseCard :position) :asc]]})) (u/strict-extend (class Pulse) i/IEntity @@ -50,6 +48,7 @@ ;; ## Persistence Functions +;; TODO - this should be renamed `update-pulse-cards!` (defn update-pulse-cards "Update the `PulseCards` for a given PULSE. CARD-IDS should be a definitive collection of *all* IDs of cards for the pulse in the desired order. @@ -67,8 +66,9 @@ ;; now just insert all of the cards that were given to us (when-not (empty? card-ids) (let [cards (map-indexed (fn [idx itm] {:pulse_id id :card_id itm :position idx}) card-ids)] - (k/insert PulseCard (k/values cards))))) + (db/insert-many! PulseCard cards)))) +;; TODO - Rename to `create-update-delete-channel!` (defn- create-update-delete-channel "Utility function which determines how to properly update a single pulse channel." [pulse-id new-channel existing-channel] @@ -90,6 +90,7 @@ ;; 4. NOT in channels, NOT in db-channels = NO-OP :else nil))) +;; TODO - Rename to `update-pulse-channels!` (defn update-pulse-channels "Update the `PulseChannels` for a given PULSE. CHANNELS should be a definitive collection of *all* of the channels for the the pulse. @@ -103,7 +104,7 @@ (coll? channels) (every? map? channels)]} (let [new-channels (group-by (comp keyword :channel_type) channels) - old-channels (group-by (comp keyword :channel_type) (db/sel :many PulseChannel :pulse_id id)) + old-channels (group-by (comp keyword :channel_type) (db/select PulseChannel :pulse_id id)) handle-channel #(create-update-delete-channel id (first (get new-channels %)) (first (get old-channels %)))] (assert (= 0 (count (get new-channels nil))) "Cannot have channels without a :channel_type attribute") ;; for each of our possible channel types call our handler function @@ -113,17 +114,18 @@ "Fetch a single `Pulse` by its ID value." [id] {:pre [(integer? id)]} - (-> (db/sel :one Pulse :id id) + (-> (Pulse id) (hydrate :creator :cards [:channels :recipients]) (m/dissoc-in [:details :emails]))) (defn retrieve-pulses "Fetch all `Pulses`." [] - (for [pulse (-> (db/sel :many Pulse (k/order :name :ASC)) + (for [pulse (-> (db/select Pulse, {:order-by [[:name :asc]]}) (hydrate :creator :cards [:channels :recipients]))] (m/dissoc-in pulse [:details :emails]))) +;; TODO - rename to `update-pulse!` (defn update-pulse "Update an existing `Pulse`, including all associated data such as: `PulseCards`, `PulseChannels`, and `PulseChannelRecipients`. @@ -140,7 +142,7 @@ ;; update the pulse itself (db/update! Pulse id, :name name) ;; update cards (only if they changed) - (when (not= cards (db/sel :many :field [PulseCard :card_id] :pulse_id id (k/order :position :asc))) + (when (not= cards (map :card_id (db/select [PulseCard :card_id], :pulse_id id, {:order-by [[:position :asc]]}))) (update-pulse-cards pulse cards)) ;; update channels (update-pulse-channels pulse channels) @@ -148,6 +150,7 @@ (->> (retrieve-pulse id) (events/publish-event :pulse-update)))) +;; TODO - rename to `create-pulse!` (defn create-pulse "Create a new `Pulse` by inserting it into the database along with all associated pieces of data such as: `PulseCards`, `PulseChannels`, and `PulseChannelRecipients`. diff --git a/src/metabase/models/pulse_channel.clj b/src/metabase/models/pulse_channel.clj index 347df96fd51dae3325e1467b6139e3abff17c63a..55bd2ae05faa9d373b70d454136f83bc3aa5b070 100644 --- a/src/metabase/models/pulse_channel.clj +++ b/src/metabase/models/pulse_channel.clj @@ -1,7 +1,6 @@ (ns metabase.models.pulse-channel (:require [clojure.set :as set] [cheshire.generate :refer [add-encoder encode-map]] - [korma.core :as k] [medley.core :as m] [metabase.db :as db] (metabase.models [pulse-channel-recipient :refer [PulseChannelRecipient]] @@ -114,8 +113,10 @@ "Return the `PulseChannelRecipients` associated with this PULSE-CHANNEL." [{:keys [id details]}] (into (mapv (partial array-map :email) (:emails details)) - (db/sel :many [User :id :email :first_name :last_name] - (k/where {:id [in (k/subselect PulseChannelRecipient (k/fields :user_id) (k/where {:pulse_channel_id id}))]})))) + (db/select [User :id :email :first_name :last_name] + :id [:in {:select [:user_id] + :from [(db/entity->table-name PulseChannelRecipient)] + :where [:= :pulse_channel_id id]}]))) (defn- pre-cascade-delete [{:keys [id]}] (db/cascade-delete! PulseChannelRecipient :pulse_channel_id id)) @@ -132,7 +133,6 @@ ;; ## Persistence Functions -;; TODO - fix docstring !! (defn retrieve-scheduled-channels "Fetch all `PulseChannels` that are scheduled to run at a given time described by HOUR, WEEKDAY, MONTHDAY, and MONTHWEEK. @@ -157,21 +157,20 @@ :else "invalid") monthly-schedule-day-or-nil (when (= :other monthday) weekday)] - (k/select PulseChannel - (k/fields :id :pulse_id :schedule_type :channel_type) - (k/where (and (= :enabled true) - (or (= :schedule_type "hourly") - (and (= :schedule_type "daily") - (= :schedule_hour hour)) - (and (= :schedule_type "weekly") - (= :schedule_hour hour) - (= :schedule_day weekday)) - (and (= :schedule_type "monthly") - (= :schedule_hour hour) - (= :schedule_frame schedule-frame) - (or (= :schedule_day weekday) - ;; this is here specifically to allow for cases where day doesn't have to match - (= :schedule_day monthly-schedule-day-or-nil))))))))) + (db/select [PulseChannel :id :pulse_id :schedule_type :channel_type] + {:where [:and [:= :enabled true] + [:or [:= :schedule_type "hourly"] + [:and [:= :schedule_type "daily"] + [:= :schedule_hour hour]] + [:and [:= :schedule_type "weekly"] + [:= :schedule_hour hour] + [:= :schedule_day weekday]] + [:and [:= :schedule_type "monthly"] + [:= :schedule_hour hour] + [:= :schedule_frame schedule-frame] + [:or [:= :schedule_day weekday] + ;; this is here specifically to allow for cases where day doesn't have to match + [:= :schedule_day monthly-schedule-day-or-nil]]]]]}))) (defn update-recipients! "Update the `PulseChannelRecipients` for PULSE-CHANNEL. @@ -183,16 +182,19 @@ {:pre [(integer? id) (coll? user-ids) (every? integer? user-ids)]} - (let [recipients-old (set (db/sel :many :field [PulseChannelRecipient :user_id] :pulse_channel_id id)) + (let [recipients-old (set (db/select-field :user_id PulseChannelRecipient, :pulse_channel_id id)) recipients-new (set user-ids) recipients+ (set/difference recipients-new recipients-old) recipients- (set/difference recipients-old recipients-new)] (when (seq recipients+) (let [vs (map #(assoc {:pulse_channel_id id} :user_id %) recipients+)] - (k/insert PulseChannelRecipient (k/values vs)))) + (db/insert-many! PulseChannelRecipient vs))) (when (seq recipients-) - (k/delete PulseChannelRecipient (k/where {:pulse_channel_id id :user_id [in recipients-]}))))) + (db/delete! PulseChannelRecipient + :pulse_channel_id id + :user_id [:in recipients-])))) +;; TODO - rename -> `update-pulse-channel!` (defn update-pulse-channel "Updates an existing `PulseChannel` along with all related data associated with the channel such as `PulseChannelRecipients`." [{:keys [id channel_type enabled details recipients schedule_type schedule_day schedule_hour schedule_frame] @@ -220,6 +222,7 @@ (when (supports-recipients? channel_type) (update-recipients! id (or (get recipients-by-type true) []))))) +;; TODO - rename -> `create-pulse-channel!` (defn create-pulse-channel "Create a new `PulseChannel` along with all related data associated with the channel such as `PulseChannelRecipients`." [{:keys [channel_type details pulse_id recipients schedule_type schedule_day schedule_hour schedule_frame] diff --git a/src/metabase/models/raw_table.clj b/src/metabase/models/raw_table.clj index 809ac2fa54196ba0f4d87a9efada84ff20880b84..f0a219b6af6a393ffeb0570a7606b1d41f121d1e 100644 --- a/src/metabase/models/raw_table.clj +++ b/src/metabase/models/raw_table.clj @@ -1,6 +1,5 @@ (ns metabase.models.raw-table - (:require [korma.core :as k] - [metabase.db :as db] + (:require [metabase.db :as db] [metabase.models.interface :as i] [metabase.models.raw-column :refer [RawColumn]] [metabase.util :as u])) @@ -30,14 +29,15 @@ (defn ^:hydrate columns "Return the `RawColumns` belonging to RAW-TABLE." [{:keys [id]}] - (db/sel :many RawColumn :raw_table_id id, (k/order :name :ASC))) + (db/select RawColumn, :raw_table_id id, {:order-by [[:name :asc]]})) (defn active-tables "Return the active `RawColumns` belonging to RAW-TABLE." [database-id] - (db/sel :many RawTable :database_id database-id, :active true, (k/order :schema :ASC), (k/order :name :ASC))) + (db/select RawTable, :database_id database-id, :active true, {:order-by [[:schema :asc] + [:name :asc]]})) (defn active-columns "Return the active `RawColumns` belonging to RAW-TABLE." [{:keys [id]}] - (db/sel :many RawColumn :raw_table_id id, :active true, (k/order :name :ASC))) + (db/select RawColumn, :raw_table_id id, :active true, {:order-by [[:name :asc]]})) diff --git a/src/metabase/models/revision.clj b/src/metabase/models/revision.clj index aae8986eff85658bb318ee72129a42446a980d75..7d75aaf05e544c9c49ea296324cb8ccaf67f7b29 100644 --- a/src/metabase/models/revision.clj +++ b/src/metabase/models/revision.clj @@ -1,13 +1,12 @@ (ns metabase.models.revision (:require [clojure.data :as data] - [korma.core :as k] + [korma.db :as kdb] [metabase.db :as db] (metabase.models [hydrate :refer [hydrate]] [interface :as i] [user :refer [User]]) [metabase.models.revision.diff :refer [diff-string]] - [metabase.util :as u] - [korma.db :as kdb])) + [metabase.util :as u])) (def ^:const max-revisions "Maximum number of revisions to keep for each individual object. After this limit is surpassed, the oldest revisions will be deleted." @@ -60,16 +59,12 @@ ;;; # Revision Entity -(defn- post-select [{:keys [message] :as revision}] - (assoc revision :message (u/jdbc-clob->str message))) - (i/defentity Revision :revision) (u/strict-extend (class Revision) i/IEntity (merge i/IEntityDefaults - {:types (constantly {:object :json}) - :post-select post-select + {:types (constantly {:object :json, :message :clob}) :pre-insert (u/rpartial assoc :timestamp (u/new-sql-timestamp)) :pre-update (fn [& _] (throw (Exception. "You cannot update a Revision!")))})) @@ -93,7 +88,7 @@ [entity id] {:pre [(i/metabase-entity? entity) (integer? id)]} - (db/sel :many Revision :model (:name entity), :model_id id, (k/order :id :DESC))) + (db/select Revision, :model (:name entity), :model_id id, {:order-by [[:id :desc]]})) (defn revisions+details "Fetch `revisions` for ENTITY with ID and add details." @@ -109,7 +104,7 @@ "Delete old revisions of ENTITY with ID when there are more than `max-revisions` in the DB." [entity id] {:pre [(i/metabase-entity? entity) (integer? id)]} - (when-let [old-revisions (seq (drop max-revisions (db/sel :many :id Revision, :model (:name entity), :model_id id, (k/order :timestamp :DESC))))] + (when-let [old-revisions (seq (drop max-revisions (map :id (db/select [Revision :id], :model (:name entity), :model_id id, {:order-by [[:timestamp :desc]]}))))] (db/cascade-delete! Revision :id [:in old-revisions]))) (defn push-revision @@ -149,12 +144,12 @@ (integer? user-id) (db/exists? User :id user-id) (integer? revision-id)]} - (let [serialized-instance (db/sel :one :field [Revision :object] :model (:name entity), :model_id id, :id revision-id)] + (let [serialized-instance (db/select-one-field :object Revision, :model (:name entity), :model_id id, :id revision-id)] (kdb/transaction ;; Do the reversion of the object (revert-to-revision entity id user-id serialized-instance) ;; Push a new revision to record this change - (let [last-revision (db/sel :one Revision, :model (:name entity), :model_id id, (k/order :id :DESC)) + (let [last-revision (Revision :model (:name entity), :model_id id, {:order-by [[:id :desc]]}) new-revision (db/insert! Revision :model (:name entity) :model_id id diff --git a/src/metabase/models/segment.clj b/src/metabase/models/segment.clj index 6e3a30065baaf626b33472deb6faeb1d251dfdfe..d79f792a36da340f982f969c41de0b2ec3b79b61 100644 --- a/src/metabase/models/segment.clj +++ b/src/metabase/models/segment.clj @@ -1,6 +1,5 @@ (ns metabase.models.segment - (:require [korma.core :as k] - [medley.core :as m] + (:require [medley.core :as m] [metabase.db :as db] [metabase.events :as events] (metabase.models [hydrate :refer [hydrate]] @@ -82,7 +81,7 @@ "Fetch a single `Segment` by its ID value." [id] {:pre [(integer? id)]} - (-> (db/sel :one Segment :id id) + (-> (Segment id) (hydrate :creator))) (defn retrieve-segments @@ -93,8 +92,8 @@ ([table-id state] {:pre [(integer? table-id) (keyword? state)]} (-> (if (= :all state) - (db/sel :many Segment :table_id table-id (k/order :name :ASC)) - (db/sel :many Segment :table_id table-id :is_active (if (= :active state) true false) (k/order :name :ASC))) + (db/select Segment, :table_id table-id, {:order-by [[:name :asc]]}) + (db/select Segment, :table_id table-id, :is_active (= :active state), {:order-by [[:name :asc]]})) (hydrate :creator)))) (defn update-segment diff --git a/src/metabase/models/session.clj b/src/metabase/models/session.clj index a30ef8958e5dd3b99044d52fdbaae61bea69374e..5ab0b19701a353e9ca4d5e97e14a56dbbf8f9fc8 100644 --- a/src/metabase/models/session.clj +++ b/src/metabase/models/session.clj @@ -1,12 +1,10 @@ (ns metabase.models.session - (:require [korma.core :as k] - [metabase.db :as db] + (:require [metabase.db :as db] (metabase.models [interface :as i] [user :refer [User]]) [metabase.util :as u])) -(i/defentity Session :core_session - (k/belongs-to User {:fk :user_id})) +(i/defentity Session :core_session) (defn- pre-insert [session] (assoc session :created_at (u/new-sql-timestamp))) @@ -20,6 +18,6 @@ (defn first-session-for-user "Retrieves the first Session `:id` for a given user (if available), or nil otherwise." - [user-id] + ^String [user-id] {:pre [(integer? user-id)]} - (db/sel :one :field [Session :id], :user_id user-id, (k/order :created_at :ASC))) + (db/select-one-id Session, :user_id user-id, {:order-by [[:created_at :asc]]})) diff --git a/src/metabase/models/setting.clj b/src/metabase/models/setting.clj index 3818c740526a27352fe21fcea441de1272397d54..1c287a82f7befcfd462dcdf8140e2b90cead6b0d 100644 --- a/src/metabase/models/setting.clj +++ b/src/metabase/models/setting.clj @@ -1,8 +1,8 @@ (ns metabase.models.setting (:refer-clojure :exclude [get set]) - (:require [clojure.string :as s] + (:require (clojure [string :as s] + [walk :as walk]) [environ.core :as env] - [korma.core :as k] [metabase.config :as config] [metabase.db :as db] [metabase.events :as events] @@ -71,7 +71,7 @@ (restore-cache-if-needed) (if (contains? @cached-setting->value k) (@cached-setting->value k) - (let [v (db/sel :one :field [Setting :value] :key (name k))] + (let [v (db/select-one-field :value Setting, :key (name k))] (swap! cached-setting->value assoc k v) v))) @@ -101,12 +101,11 @@ [k v] {:pre [(keyword? k) (string? v)]} (if (get k) - (k/update Setting - (k/set-fields {:value v}) - (k/where {:key (name k)})) - (k/insert Setting - (k/values {:key (name k) - :value v}))) + (db/update-where! Setting {:key (name k)} + :value v) + (db/insert! Setting + :key (name k) + :value v)) (restore-cache-if-needed) (swap! cached-setting->value assoc k v) v) @@ -255,8 +254,7 @@ (defn- restore-cache-if-needed [] (when-not @cached-setting->value (db/setup-db-if-needed) - (reset! cached-setting->value (into {} (for [{k :key, v :value} (db/sel :many Setting)] - {(keyword k) v}))))) + (reset! cached-setting->value (walk/keywordize-keys (db/select-field->field :key :value Setting))))) (def ^:private cached-setting->value "Map of setting name (keyword) -> string value, as they exist in the DB." diff --git a/src/metabase/models/table.clj b/src/metabase/models/table.clj index 0834b75d68ccdc474f96661d518e6bcb6a41024a..267e8a6c62fecf6a975c37bbd82fb86f205b9e1b 100644 --- a/src/metabase/models/table.clj +++ b/src/metabase/models/table.clj @@ -1,6 +1,5 @@ (ns metabase.models.table - (:require [korma.core :as k] - [metabase.db :as db] + (:require [metabase.db :as db] (metabase.models [common :as common] [database :refer [Database]] [field :refer [Field]] @@ -32,7 +31,7 @@ (defn ^:hydrate fields "Return the `FIELDS` belonging to TABLE." [{:keys [id]}] - (db/sel :many Field :table_id id, :visibility_type [not= "retired"], (k/order :position :ASC) (k/order :name :ASC))) + (db/select Field, :table_id id, :visibility_type [:not= "retired"], {:order-by [[:position :asc] [:name :asc]]})) (defn ^:hydrate metrics "Retrieve the metrics for TABLE." @@ -48,16 +47,17 @@ "Return the `FieldValues` for all `Fields` belonging to TABLE." {:hydrate :field_values, :arglists '([table])} [{:keys [id]}] - (let [field-ids (db/sel :many :id Field, :table_id id, :visibility_type "normal" - (k/order :position :asc) - (k/order :name :asc))] - (db/sel :many :field->field [FieldValues :field_id :values] :field_id [in field-ids]))) + (let [field-ids (db/select-ids Field + :table_id id + :visibility_type "normal" + {:order-by [[:position :asc] [:name :asc]]})] + (db/select-field->field :field_id :values FieldValues, :field_id [:in field-ids]))) (defn pk-field-id "Return the ID of the primary key `Field` for TABLE." {:hydrate :pk_field, :arglists '([table])} [{:keys [id]}] - (db/sel :one :id Field, :table_id id, :special_type "id", :visibility_type [not-in ["sensitive" "retired"]])) + (db/select-one-id Field, :table_id id, :special_type "id", :visibility_type [:not-in ["sensitive" "retired"]])) (def ^{:arglists '([table])} database "Return the `Database` associated with this `Table`." @@ -80,22 +80,20 @@ "Retrieve the `Database` ID for the given table-id." [table-id] {:pre [(integer? table-id)]} - (db/sel :one :field [Table :db_id] :id table-id)) + (db/select-one-field :db_id Table, :id table-id)) (defn retire-tables "Retire all `Tables` in the list of TABLE-IDs along with all of each tables `Fields`." [table-ids] - {:pre [(set? table-ids) - (every? integer? table-ids)]} - ;; retire the tables - (k/update Table - (k/where {:id [in table-ids]}) - (k/set-fields {:active false})) - ;; retire the fields of retired tables - (k/update Field - (k/where {:table_id [in table-ids]}) - (k/set-fields {:visibility_type "retired"}))) + {:pre [(set? table-ids) (every? integer? table-ids)]} + (when (seq table-ids) + ;; retire the tables + (db/update-where! Table {:id [:in table-ids]} + :active false) + ;; retire the fields of retired tables + (db/update-where! Field {:table_id [:in table-ids]} + :visibility_type "retired"))) (defn update-table "Update `Table` with the data from TABLE-DEF." diff --git a/src/metabase/models/user.clj b/src/metabase/models/user.clj index 402a87e484864af0dcf82d52fbdc621cbc4490fe..e19d633f27f62831ed29875733a47386d36ad9ff 100644 --- a/src/metabase/models/user.clj +++ b/src/metabase/models/user.clj @@ -1,7 +1,6 @@ (ns metabase.models.user (:require [clojure.string :as s] [cemerick.friend.credentials :as creds] - [korma.core :as k] [metabase.db :as db] [metabase.email.messages :as email] (metabase.models [interface :as i] @@ -120,4 +119,4 @@ (defn instance-created-at "The date the instance was created. We use the :date_joined of the first user to determine this." [] - (db/sel :one :field [User :date_joined] (k/order :date_joined :ASC) (k/limit 1))) + (db/select-one-field :date_joined User, {:order-by [[:date_joined :asc]]})) diff --git a/src/metabase/query_processor.clj b/src/metabase/query_processor.clj index 077309d08e69d01229b309eb20a4eec1a639fa28..a66860d3479c279acb4d98c214f0e1c35847a21f 100644 --- a/src/metabase/query_processor.clj +++ b/src/metabase/query_processor.clj @@ -2,7 +2,6 @@ "Preprocessor that does simple transformations to all incoming queries, simplifing the driver-specific implementations." (:require [clojure.walk :as walk] [clojure.tools.logging :as log] - [korma.core :as k] [medley.core :as m] schema.utils [swiss.arrows :refer [<<-]] @@ -146,7 +145,7 @@ etc, into their concrete details which are necessary for query formation by the executing driver." [qp] (fn [{database-id :database, :as query}] - (let [resolved-db (db/sel :one :fields [database/Database :name :id :engine :details] :id database-id) + (let [resolved-db (db/select-one [database/Database :name :id :engine :details], :id database-id) query (if-not (mbql-query? query) query ;; for MBQL queries we expand first, then resolve @@ -204,12 +203,12 @@ (defn- fields-for-source-table "Return the all fields for SOURCE-TABLE, for use as an implicit `:fields` clause." [{source-table-id :id, :as source-table}] - (for [field (db/sel :many :fields [Field :name :display_name :base_type :special_type :visibility_type :display_name :table_id :id :position :description] - :table_id source-table-id - :visibility_type [not-in ["sensitive" "retired"]] - :parent_id nil - (k/order :position :asc) - (k/order :id :desc))] + (for [field (db/select [Field :name :display_name :base_type :special_type :visibility_type :table_id :id :position :description] + :table_id source-table-id + :visibility_type [:not-in ["sensitive" "retired"]] + :parent_id nil + {:order-by [[:position :asc] + [:id :desc]]})] (let [field (resolve/resolve-table (map->Field (resolve/rename-mb-field-keys field)) {source-table-id source-table})] (if (datetime-field? field) @@ -429,6 +428,12 @@ (assert (sequential? (:columns <>))) (assert (every? u/string-or-keyword? (:columns <>)))))))) +(defn query->remark + "Genarate an approparite REMARK to be prepended to a query to give DBAs additional information about the query being executed. + See documentation for `mbql->native` and [issue #2386](https://github.com/metabase/metabase/issues/2386) for more information." + ^String [{{:keys [executed-by uuid query-hash query-type]} :info, :as info}] + {:pre [(map? info)]} + (format "Metabase:: userID: %s executionID: %s queryType: %s queryHash: %s" executed-by uuid query-type query-hash)) (defn- run-query "The end of the QP middleware which actually executes the query on the driver. @@ -496,23 +501,22 @@ ;; TODO: it probably makes sense to throw an error or return a failure response here if we can't get a driver (let [driver (driver/database-id->driver (:database query))] (binding [*driver* driver] - (let [driver-process-in-context (partial driver/process-query-in-context driver)] - ((<<- wrap-catch-exceptions - pre-add-settings - pre-expand-macros - pre-expand-resolve - driver-process-in-context - post-add-row-count-and-status - post-format-rows - pre-add-implicit-fields - pre-add-implicit-breakout-order-by - cumulative-sum - cumulative-count - limit - post-check-results-format - pre-log-query - guard-multiple-calls - run-query) (assoc query :driver driver)))))) + ((<<- wrap-catch-exceptions + pre-add-settings + pre-expand-macros + pre-expand-resolve + (driver/process-query-in-context driver) + post-add-row-count-and-status + post-format-rows + pre-add-implicit-fields + pre-add-implicit-breakout-order-by + cumulative-sum + cumulative-count + limit + post-check-results-format + pre-log-query + guard-multiple-calls + run-query) (assoc query :driver driver))))) ;;; +----------------------------------------------------------------------------------------------------+ @@ -521,6 +525,13 @@ (declare query-fail query-complete save-query-execution) +(defn- assert-valid-query-result [query-result] + (when-not (contains? query-result :status) + (throw (Exception. "invalid response from database driver. no :status provided"))) + (when (= :failed (:status query-result)) + (log/error (u/pprint-to-str 'red query-result)) + (throw (Exception. (str (get query-result :error "general error")))))) + (defn dataset-query "Process and run a json based dataset query and return results. @@ -534,11 +545,12 @@ Possible caller-options include: - :executed_by [int] (user_id of caller)" + :executed_by [int] (user_id of caller)" {:arglists '([query options])} [query {:keys [executed_by]}] {:pre [(integer? executed_by)]} - (let [query-execution {:uuid (.toString (java.util.UUID/randomUUID)) + (let [query-uuid (.toString (java.util.UUID/randomUUID)) + query-execution {:uuid query-uuid :executor_id executed_by :json_query query :query_id nil @@ -553,15 +565,15 @@ :result_data "{}" :raw_query "" :additional_info "" - :start_time_millis (System/currentTimeMillis)}] + :start_time_millis (System/currentTimeMillis)} + query (assoc query :info {:executed-by executed_by + :uuid query-uuid + :query-hash (hash query) + :query-type (if (mbql-query? query) "MBQL" "native")})] (try - (let [query-result (process-query query)] - (when-not (contains? query-result :status) - (throw (Exception. "invalid response from database driver. no :status provided"))) - (when (= :failed (:status query-result)) - (log/error (u/pprint-to-str 'red query-result)) - (throw (Exception. (str (get query-result :error "general error"))))) - (query-complete query-execution query-result)) + (let [result (process-query query)] + (assert-valid-query-result result) + (query-complete query-execution result)) (catch Throwable e (log/error (u/format-color 'red "Query failure: %s" (.getMessage e))) (query-fail query-execution (.getMessage e)))))) @@ -577,7 +589,7 @@ (-> query-execution (dissoc :start_time_millis) (merge updates) - (save-query-execution) + save-query-execution (dissoc :raw_query :result_rows :version) ;; this is just for the response for clien (assoc :error error-message @@ -597,7 +609,7 @@ (:start_time_millis query-execution)) :result_rows (get query-result :row_count 0)) (dissoc :start_time_millis) - (save-query-execution) + save-query-execution ;; at this point we've saved and we just need to massage things into our final response format (dissoc :error :raw_query :result_rows :version) (merge query-result))) diff --git a/src/metabase/query_processor/annotate.clj b/src/metabase/query_processor/annotate.clj index 1fe64aa2fa628d74f074be8300241caea209a41d..ca427982302a88b9a9b0eca3a317948381c5d847 100644 --- a/src/metabase/query_processor/annotate.clj +++ b/src/metabase/query_processor/annotate.clj @@ -213,14 +213,14 @@ ;; Fetch the ForeignKey objects whose origin is in the returned Fields, create a map of origin-field-id->destination-field-id ([fields fk-ids] (when (seq fk-ids) - (fk-field->dest-fn fields fk-ids (db/sel :many :field->field [Field :id :fk_target_field_id] - :id [in fk-ids] - :fk_target_field_id [not= nil])))) + (fk-field->dest-fn fields fk-ids (db/select-id->field :fk_target_field_id Field + :id [:in fk-ids] + :fk_target_field_id [:not= nil])))) ;; Fetch the destination Fields referenced by the ForeignKeys ([fields fk-ids id->dest-id] (when (seq id->dest-id) - (fk-field->dest-fn fields fk-ids id->dest-id (db/sel :many :id->fields [Field :id :name :display_name :table_id :description :base_type :special_type :visibility_type] - :id [in (vals id->dest-id)])))) + (fk-field->dest-fn fields fk-ids id->dest-id (u/key-by :id (db/select [Field :id :name :display_name :table_id :description :base_type :special_type :visibility_type] + :id [:in (vals id->dest-id)]))))) ;; Return a function that will return the corresponding destination Field for a given Field ([fields fk-ids id->dest-id dest-id->field] (fn [{:keys [id]}] @@ -233,8 +233,9 @@ (for [field fields] (let [{:keys [table_id], :as dest-field} (field->dest field)] (assoc field - :target dest-field - :extra_info (if table_id {:target_table_id table_id} {})))))) + :target (when dest-field + (into {} dest-field)) + :extra_info (if table_id {:target_table_id table_id} {})))))) (defn- resolve-sort-and-format-columns "Collect the Fields referenced in QUERY, sort them according to the rules at the top diff --git a/src/metabase/query_processor/macros.clj b/src/metabase/query_processor/macros.clj index eb742a8eb5cfaf3bf0e763d1e7923e7c364f1158..c1e6c337d02e8866bc4e0ff09dd58c8c56a08191 100644 --- a/src/metabase/query_processor/macros.clj +++ b/src/metabase/query_processor/macros.clj @@ -18,8 +18,7 @@ form# (throw (Exception. (format ~(format "%s failed: invalid clause: %%s" fn-name) form#))))))) (defparser segment-parse-filter-subclause - ["SEGMENT" (segment-id :guard integer?)] (-> (db/sel :one :field [metabase.models.segment/Segment :definition] :id segment-id) - :filter) + ["SEGMENT" (segment-id :guard integer?)] (:filter (db/select-one-field :definition 'Segment, :id segment-id)) subclause subclause) (defparser segment-parse-filter @@ -46,8 +45,8 @@ query-dict ;; we have an aggregation clause, so lets see if we are using a METRIC (if-let [metric-def (match (get-in query-dict [:query :aggregation]) - ["METRIC" (metric-id :guard integer?)] (db/sel :one :field [metabase.models.metric/Metric :definition] :id metric-id) - _ nil)] + ["METRIC" (metric-id :guard integer?)] (db/select-one-field :definition 'Metric, :id metric-id) + _ nil)] ;; we have a metric, so merge its definition into the existing query-dict (-> query-dict (assoc-in [:query :aggregation] (:aggregation metric-def)) diff --git a/src/metabase/query_processor/resolve.clj b/src/metabase/query_processor/resolve.clj index 1cdee6403f867e7497eb362800aac7a4022db99d..41441f6a4e9d07bf1568293f4b7bf8ea4007c313 100644 --- a/src/metabase/query_processor/resolve.clj +++ b/src/metabase/query_processor/resolve.clj @@ -25,14 +25,14 @@ (defn rename-mb-field-keys "Rename the keys in a Metabase `Field` to match the format of those in Query Expander `Fields`." [field] - (set/rename-keys field {:id :field-id - :name :field-name - :display_name :field-display-name - :special_type :special-type - :visibility_type :visibility-type - :base_type :base-type - :table_id :table-id - :parent_id :parent-id})) + (set/rename-keys (into {} field) {:id :field-id + :name :field-name + :display_name :field-display-name + :special_type :special-type + :visibility_type :visibility-type + :base_type :base-type + :table_id :table-id + :parent_id :parent-id})) ;;; # ------------------------------------------------------------ IRESOLVE PROTOCOL ------------------------------------------------------------ @@ -183,8 +183,8 @@ ;; If there are no more Field IDs to resolve we're done. expanded-query-dict ;; Otherwise fetch + resolve the Fields in question - (let [fields (->> (db/sel :many :id->fields [field/Field :name :display_name :base_type :special_type :visibility_type :table_id :parent_id :description] - :id [in field-ids]) + (let [fields (->> (u/key-by :id (db/select [field/Field :name :display_name :base_type :special_type :visibility_type :table_id :parent_id :description :id] + :id [:in field-ids])) (m/map-vals rename-mb-field-keys) (m/map-vals #(assoc % :parent (when-let [parent-id (:parent-id %)] (map->FieldPlaceholder {:field-id parent-id})))))] @@ -204,15 +204,19 @@ (when-not (seq fk-field-ids) (throw (Exception. "You must use the fk-> form to reference Fields that are not part of the source_table."))) (let [ ;; Build a map of source table FK field IDs -> field names - fk-field-id->field-name (db/sel :many :id->field [field/Field :name], :id [in fk-field-ids], :table_id source-table-id, :special_type "fk") + fk-field-id->field-name (db/select-id->field :name field/Field + :id [:in fk-field-ids] + :table_id source-table-id + :special_type "fk") ;; Build a map of join table PK field IDs -> source table FK field IDs - pk-field-id->fk-field-id (db/sel :many :field->field [field/Field :fk_target_field_id :id], - :id [in (set (keys fk-field-id->field-name))] - :fk_target_field_id [not= nil]) + pk-field-id->fk-field-id (db/select-field->id :fk_target_field_id field/Field + :id [:in (keys fk-field-id->field-name)] + :fk_target_field_id [:not= nil]) ;; Build a map of join table ID -> PK field info - join-table-id->pk-field (let [pk-fields (db/sel :many :fields [field/Field :id :table_id :name], :id [in (set (keys pk-field-id->fk-field-id))])] + join-table-id->pk-field (let [pk-fields (db/select [field/Field :id :table_id :name] + :id [:in (keys pk-field-id->fk-field-id)])] (zipmap (map :table_id pk-fields) pk-fields))] ;; Now build the :join-tables clause @@ -232,7 +236,8 @@ [{{source-table-id :source-table} :query, :keys [table-ids fk-field-ids], :as expanded-query-dict}] {:pre [(integer? source-table-id)]} (let [table-ids (conj table-ids source-table-id) - table-id->table (db/sel :many :id->fields [Table :schema :name :id], :id [in table-ids]) + table-id->table (u/key-by :id (db/select [Table :schema :name :id] + :id [:in table-ids])) join-tables (vals (dissoc table-id->table source-table-id))] (as-> expanded-query-dict <> (assoc-in <> [:query :source-table] (or (table-id->table source-table-id) diff --git a/src/metabase/sample_data.clj b/src/metabase/sample_data.clj index 135c4a383b321616ae2e8bf790c135b0405b0699..56abf6989ab3bee6fe1d97f53b680cc9f6f9fd92 100644 --- a/src/metabase/sample_data.clj +++ b/src/metabase/sample_data.clj @@ -11,8 +11,10 @@ (def ^:private ^:const sample-dataset-name "Sample Dataset") (def ^:private ^:const sample-dataset-filename "sample-dataset.db.mv.db") -(defn add-sample-dataset! [] - (when-not (db/sel :one Database :is_sample true) +(defn add-sample-dataset! + "Add the sample dataset as a Metabase DB if it doesn't already exist." + [] + (when-not (db/exists? Database :is_sample true) (try (log/info "Loading sample dataset...") (let [resource (io/resource sample-dataset-filename)] @@ -29,12 +31,14 @@ :is_sample true)] (sync-database/sync-database! db)))) (catch Throwable e - (log/error (u/format-color 'red "Failed to load sample dataset: %s" (.getMessage e))))))) + (log/error (u/format-color 'red "Failed to load sample dataset: %s\n%s" (.getMessage e) (u/pprint-to-str (u/filtered-stacktrace e)))))))) -(defn update-sample-dataset-if-needed! [] +(defn update-sample-dataset-if-needed! + "Re-sync the sample dataset DB if it exists." + [] ;; TODO - it would be a bit nicer if we skipped this when the data hasn't changed - (when-let [db (db/sel :one Database :is_sample true)] + (when-let [db (Database :is_sample true)] (try (sync-database/sync-database! db) (catch Throwable e - (log/error (u/format-color 'red "Failed to update sample dataset: %s" (.getMessage e))))))) + (log/error (u/format-color 'red "Failed to update sample dataset: %s\n%s" (.getMessage e) (u/pprint-to-str (u/filtered-stacktrace e)))))))) diff --git a/src/metabase/sync_database.clj b/src/metabase/sync_database.clj index 0617087e17dc29142860d45525b664a13b1457f4..ee9e0406b985388090f4d110a5f0d18ec86c4af4 100644 --- a/src/metabase/sync_database.clj +++ b/src/metabase/sync_database.clj @@ -85,7 +85,9 @@ (when full-sync? (analyze/analyze-data-shape-for-tables! driver database))) - (events/publish-event :database-sync-end {:database_id (:id database) :custom_id tracking-hash :running_time (int (/ (- (System/nanoTime) start-time) 1000000.0))}) ; convert to ms + (events/publish-event :database-sync-end {:database_id (:id database) + :custom_id tracking-hash + :running_time (int (/ (- (System/nanoTime) start-time) 1000000.0))}) ; convert to ms (log/info (u/format-color 'magenta "Finished syncing %s database '%s'. (%s)" (name driver) (:name database) (u/format-nanoseconds (- (System/nanoTime) start-time)))))) @@ -94,11 +96,11 @@ (let [start-time (System/nanoTime)] (log/info (u/format-color 'magenta "Syncing table '%s' from %s database '%s'..." (:display_name table) (name driver) (:name database))) - (binding [qp/*disable-qp-logging* true + (binding [qp/*disable-qp-logging* true db/*disable-db-logging* true] ;; if the Table has a RawTable backing it then do an introspection and sync - (when-let [raw-tbl (db/sel :one raw-table/RawTable :id (:raw_table_id table))] - (introspect/introspect-raw-table-and-update! driver database raw-tbl) + (when-let [raw-table (raw-table/RawTable (:raw_table_id table))] + (introspect/introspect-raw-table-and-update! driver database raw-table) (sync/update-data-models-for-table! table)) ;; if this table comes from a dynamic schema db then run that sync process now diff --git a/src/metabase/sync_database/analyze.clj b/src/metabase/sync_database/analyze.clj index 6903da2c3d599811ea2f12929984d7d9d75dacc1..a94d30c4f017782acbab7f14245ee03c76ebbe3b 100644 --- a/src/metabase/sync_database/analyze.clj +++ b/src/metabase/sync_database/analyze.clj @@ -4,7 +4,6 @@ [clojure.string :as s] [clojure.tools.logging :as log] [cheshire.core :as json] - [korma.core :as k] [schema.core :as schema] [metabase.db :as db] [metabase.db.metadata-queries :as queries] @@ -153,6 +152,7 @@ (defn make-analyze-table "Make a generic implementation of `analyze-table`." + {:style/indent 1} [driver & {:keys [field-avg-length-fn field-percent-urls-fn] :or {field-avg-length-fn (partial driver/default-field-avg-length driver) field-percent-urls-fn (partial driver/default-field-percent-urls driver)}}] @@ -175,7 +175,7 @@ (defn analyze-table-data-shape! "Analyze the data shape for a single `Table`." [driver {table-id :id, :as table}] - (let [new-field-ids (set (db/sel :many :id field/Field, :table_id table-id, :visibility_type [not= "retired"], :last_analyzed nil))] + (let [new-field-ids (db/select-ids field/Field, :table_id table-id, :visibility_type [:not= "retired"], :last_analyzed nil)] ;; TODO: this call should include the database (when-let [table-stats (u/prog1 (driver/analyze-table driver table new-field-ids) (when <> @@ -198,10 +198,9 @@ (field-values/clear-field-values id)))) ;; update :last_analyzed for all fields in the table - (k/update field/Field - (k/set-fields {:last_analyzed (u/new-sql-timestamp)}) - (k/where {:table_id table-id - :visibility_type [not= "retired"]})))) + (db/update-where! field/Field {:table_id table-id + :visibility_type [:not= "retired"]} + :last_analyzed (u/new-sql-timestamp)))) (defn analyze-data-shape-for-tables! "Perform in-depth analysis on the data shape for all `Tables` in a given DATABASE. @@ -211,7 +210,7 @@ (log/info (u/format-color 'blue "Analyzing data in %s database '%s' (this may take a while) ..." (name driver) (:name database))) (let [start-time-ns (System/nanoTime) - tables (db/sel :many table/Table :db_id database-id, :active true) + tables (db/select table/Table, :db_id database-id, :active true) tables-count (count tables) finished-tables-count (atom 0)] (doseq [{table-name :name, :as table} tables] diff --git a/src/metabase/sync_database/introspect.clj b/src/metabase/sync_database/introspect.clj index fba9bcffd62045e69c8a85aef95ee9727c95cfd5..f05821924d9e40c355a24e621aec70579df3a0b9 100644 --- a/src/metabase/sync_database/introspect.clj +++ b/src/metabase/sync_database/introspect.clj @@ -2,8 +2,7 @@ "Functions which handle the raw sync process." (:require [clojure.set :as set] [clojure.tools.logging :as log] - (korma [core :as k] - [db :as kdb]) + [korma.db :as kdb] [schema.core :as schema] (metabase [db :as db] [driver :as driver]) @@ -24,16 +23,15 @@ [{table-id :id, database-id :database_id, :as table} fks] {:pre [(integer? table-id) (integer? database-id)]} (kdb/transaction - ;; start by simply resetting all fks and then we'll add them back as defined - (k/update RawColumn - (k/where {:raw_table_id table-id}) - (k/set-fields {:fk_target_column_id nil})) + ;; start by simply resetting all fks and then we'll add them back as defined + (db/update-where! RawColumn {:raw_table_id table-id} + :fk_target_column_id nil) ;; now lookup column-ids and set the fks on this table as needed (doseq [{:keys [fk-column-name dest-column-name dest-table]} fks] - (when-let [source-column-id (db/sel :one :field [RawColumn :id], :raw_table_id table-id, :name fk-column-name)] - (when-let [dest-table-id (db/sel :one :field [RawTable :id], :database_id database-id, :schema (:schema dest-table), :name (:name dest-table))] - (when-let [dest-column-id (db/sel :one :id RawColumn, :raw_table_id dest-table-id, :name dest-column-name)] + (when-let [source-column-id (db/select-one-id RawColumn, :raw_table_id table-id, :name fk-column-name)] + (when-let [dest-table-id (db/select-one-id RawTable, :database_id database-id, :schema (:schema dest-table), :name (:name dest-table))] + (when-let [dest-column-id (db/select-one-id RawColumn, :raw_table_id dest-table-id, :name dest-column-name)] (log/debug (u/format-color 'cyan "Marking foreign key '%s.%s' -> '%s.%s'." (named-table table) fk-column-name (named-table dest-table) dest-column-name)) (db/update! RawColumn source-column-id :fk_target_column_id dest-column-id))))))) @@ -44,11 +42,10 @@ [{:keys [id]} columns] {:pre [(integer? id) (coll? columns) (every? map? columns)]} (kdb/transaction - (let [existing-columns (into {} (for [{:keys [name] :as column} (db/sel :many :fields [RawColumn :id :name] :raw_table_id id)] - {name column}))] + (let [raw-column-name->id (db/select-field->id :name RawColumn, :raw_table_id id)] ;; deactivate any columns which were removed - (doseq [[column-name {column-id :id}] (sort-by :name existing-columns)] + (doseq [[column-name column-id] (sort-by first raw-column-name->id)] (when-not (some #(= column-name (:name %)) columns) (log/debug (u/format-color 'cyan "Marked column %s as inactive." column-name)) (db/update! RawColumn column-id, :active false))) @@ -59,7 +56,7 @@ {:base-type base-type} (when special-type {:special-type special-type})) is_pk (true? pk?)] - (if-let [{column-id :id} (get existing-columns column-name)] + (if-let [column-id (get raw-column-name->id column-name)] ;; column already exists, update it (db/update! RawColumn column-id :name column-name @@ -105,21 +102,20 @@ {:pre [(coll? table-ids) (every? integer? table-ids)]} (let [table-ids (filter identity table-ids)] (kdb/transaction - ;; disable the tables - (k/update RawTable - (k/where {:id [in table-ids]}) - (k/set-fields {:active false})) - ;; whenever a table is disabled we need to disable all of its fields too (and remove fk references) - (k/update RawColumn - (k/where {:raw_table_id [in table-ids]}) - (k/set-fields {:active false, :fk_target_column_id nil}))))) + ;; disable the tables + (db/update-where! RawTable {:id [:in table-ids]} + :active false) + ;; whenever a table is disabled we need to disable all of its fields too (and remove fk references) + (db/update-where! RawColumn {:raw_table_id [:in table-ids]} + :active false + :fk_target_column_id nil)))) (defn introspect-raw-table-and-update! "Introspect a single `RawTable` and persist the results as `RawTables` and `RawColumns`. Uses the various `describe-*` functions on the IDriver protocol to gather information." - [driver database raw-tbl] - (let [table-def (select-keys raw-tbl [:schema :name]) + [driver database raw-table] + (let [table-def (select-keys raw-table [:schema :name]) table-def (if (contains? (driver/features driver) :dynamic-schema) ;; dynamic schemas are handled differently, we'll handle them elsewhere (assoc table-def :fields []) @@ -129,16 +125,16 @@ ;; save the latest updates from the introspection (if table-def - (update-raw-table! raw-tbl table-def) + (update-raw-table! raw-table table-def) ;; if we didn't get back a table-def then this table must not exist anymore - (disable-raw-tables! [(:id raw-tbl)])) + (disable-raw-tables! [(:id raw-table)])) ;; if we support FKs then try updating those as well (when (and table-def (contains? (driver/features driver) :foreign-keys)) (when-let [table-fks (u/prog1 (driver/describe-table-fks driver database table-def) (schema/validate i/DescribeTableFKs <>))] - (save-all-table-fks! raw-tbl table-fks))))) + (save-all-table-fks! raw-table table-fks))))) ;;; ------------------------------------------------------------ INTROSPECT-DATABASE-AND-UPDATE-RAW-TABLES! ------------------------------------------------------------ @@ -156,8 +152,8 @@ ;; static schema databases get introspected now (u/prog1 (driver/describe-table driver database table-def) (schema/validate i/DescribeTable <>)))] - (if-let [raw-tbl (get existing-tables (select-keys table-def [:schema :name]))] - (update-raw-table! raw-tbl table-def) + (if-let [raw-table (get existing-tables (select-keys table-def [:schema :name]))] + (update-raw-table! raw-table table-def) (create-raw-table! (:id database) table-def))) (catch Throwable t (log/error (u/format-color 'red "Unexpected error introspecting table schema: %s" (named-table table-schema table-name)) t)) @@ -183,8 +179,8 @@ (try (when-let [table-fks (u/prog1 (driver/describe-table-fks driver database table-def) (schema/validate i/DescribeTableFKs <>))] - (when-let [raw-tbl (db/sel :one RawTable :database_id (:id database), :schema table-schema, :name table-name)] - (save-all-table-fks! raw-tbl table-fks))) + (when-let [raw-table (RawTable :database_id (:id database), :schema table-schema, :name table-name)] + (save-all-table-fks! raw-table table-fks))) (catch Throwable t (log/error (u/format-color 'red "Unexpected error introspecting table fks: %s" (named-table table-schema table-name)) t)))))) @@ -194,8 +190,8 @@ ;; This is a protection against cases where the returned table-def has no :schema key (map (u/rpartial update :schema identity) tables))) -(defn- db->existing-tables [database] - (into {} (for [{:keys [name schema] :as table} (db/sel :many :fields [RawTable :id :schema :name] :database_id (:id database))] +(defn- db->name+schema->table [database] + (into {} (for [{:keys [name schema] :as table} (db/select [RawTable :id :schema :name], :database_id (:id database))] {{:name name, :schema schema} table}))) @@ -204,12 +200,12 @@ Uses the various `describe-*` functions on the IDriver protocol to gather information." [driver database] (log/info (u/format-color 'magenta "Introspecting schema on %s database '%s' ..." (name driver) (:name database))) - (let [start-time-ns (System/nanoTime) - tables (db->tables driver database) - existing-tables (db->existing-tables database)] + (let [start-time-ns (System/nanoTime) + tables (db->tables driver database) + name+schema->table (db->name+schema->table database)] - (introspect-tables! driver database tables existing-tables) - (disable-old-tables! tables existing-tables) + (introspect-tables! driver database tables name+schema->table) + (disable-old-tables! tables name+schema->table) (sync-fks! driver database tables) (log/info (u/format-color 'magenta "Introspection completed on %s database '%s' (%s)" (name driver) (:name database) (u/format-nanoseconds (- (System/nanoTime) start-time-ns)))))) diff --git a/src/metabase/sync_database/sync.clj b/src/metabase/sync_database/sync.clj index b96a70842774f8ad8fb010d11fad459c07b7aa85..cae65a3ec4972ebba7ce99b811408066a481686d 100644 --- a/src/metabase/sync_database/sync.clj +++ b/src/metabase/sync_database/sync.clj @@ -2,7 +2,6 @@ (:require (clojure [set :as set] [string :as s]) [clojure.tools.logging :as log] - [korma.core :as k] (metabase [db :as db] [driver :as driver]) (metabase.models [field :refer [Field], :as field] @@ -21,8 +20,8 @@ (every? map? fk-sources)]} (doseq [{fk-source-id :source-column, fk-target-id :target-column} fk-sources] ;; TODO: eventually limit this to just "core" schema tables - (when-let [source-field-id (db/sel :one :id Field, :raw_column_id fk-source-id, :visibility_type [not= "retired"])] - (when-let [target-field-id (db/sel :one :id Field, :raw_column_id fk-target-id, :visibility_type [not= "retired"])] + (when-let [source-field-id (db/select-one-id Field, :raw_column_id fk-source-id, :visibility_type [:not= "retired"])] + (when-let [target-field-id (db/select-one-id Field, :raw_column_id fk-target-id, :visibility_type [:not= "retired"])] (db/update! Field source-field-id :special_type :fk :fk_target_field_id target-field-id))))) @@ -47,17 +46,19 @@ (doseq [{:keys [keypath value]} (driver/table-rows-seq driver database _metabase_metadata)] ;; TODO: this does not support schemas in dbs :( (let [[_ table-name field-name k] (re-matches #"^([^.]+)\.(?:([^.]+)\.)?([^.]+)$" keypath)] - (try (when (not= 1 (if field-name - (k/update Field - (k/where {:name field-name, :table_id (k/subselect Table - (k/fields :id) - ;; TODO: this needs to support schemas - ;; TODO: eventually limit this to "core" schema tables - (k/where {:db_id (:id database), :name table-name, :active true}))}) - (k/set-fields {(keyword k) value})) - (k/update Table - (k/where {:name table-name, :db_id (:id database)}) - (k/set-fields {(keyword k) value})))) + (try (when-not (if field-name + (when-let [table-id (db/select-one-id Table + ;; TODO: this needs to support schemas + ;; TODO: eventually limit this to "core" schema tables + :db_id (:id database) + :name table-name + :active true)] + (db/update-where! Field {:name field-name + :table_id table-id} + (keyword k) value)) + (db/update-where! Table {:name table-name + :db_id (:id database)} + (keyword k) value)) (log/error (u/format-color "Error syncing _metabase_metadata: no matching keypath: %s" keypath))) (catch Throwable e (log/error (u/format-color 'red "Error in _metabase_metadata: %s" (.getMessage e)))))))) @@ -70,17 +71,15 @@ If there is a new raw column, then a new field is created. If a raw column has been updated, then we update the values for the field." [{table-id :id, raw-table-id :raw_table_id}] - (let [active-raw-columns (raw-table/active-columns {:id raw-table-id}) - active-column-ids (set (map :id active-raw-columns)) - existing-fields (into {} (for [{raw-column-id :raw_column_id, :as fld} (db/sel :many Field, :table_id table-id, :visibility_type [not= "retired"], :parent_id nil)] - {raw-column-id fld}))] + (let [active-raw-columns (raw-table/active-columns {:id raw-table-id}) + active-column-ids (set (map :id active-raw-columns)) + raw-column-id->field (u/key-by :raw_column_id (db/select Field, :table_id table-id, :visibility_type [:not= "retired"], :parent_id nil))] ;; retire any fields which were disabled in the schema (including child nested fields) - (doseq [[raw-column-id {field-id :id}] existing-fields] + (doseq [[raw-column-id {field-id :id}] raw-column-id->field] (when-not (contains? active-column-ids raw-column-id) - (k/update Field - (k/where (or {:id field-id} - {:parent_id field-id})) - (k/set-fields {:visibility_type "retired"})))) + (db/update! Field {:where [:or [:= :id field-id] + [:= :parent_id field-id]] + :set {:visibility_type "retired"}}))) ;; create/update the active columns (doseq [{raw-column-id :id, :keys [details], :as column} active-raw-columns] @@ -89,7 +88,7 @@ :is_pk :pk?}) (assoc :base-type (keyword (:base-type details)) :special-type (keyword (:special-type details))))] - (if-let [existing-field (get existing-fields raw-column-id)] + (if-let [existing-field (get raw-column-id->field raw-column-id)] ;; field already exists, so we UPDATE it (field/update-field! existing-field column) ;; looks like a new field, so we CREATE it @@ -102,23 +101,19 @@ [{database-id :id}] {:pre [(integer? database-id)]} ;; retire tables (and their fields) as needed - (let [tables-to-remove (set (map :id (k/select Table - (k/fields :id) - ;; NOTE: something really wrong happening with SQLKorma here which requires us - ;; to be explicit about :metabase_table.raw_table_id in the join condition - ;; without this it seems to want to join against metabase_field !? - (k/join RawTable (= :raw_table.id :metabase_table.raw_table_id)) - (k/where {:db_id database-id - :active true - :raw_table.active false}))))] - (table/retire-tables tables-to-remove))) + (when-let [table-ids-to-remove (db/select-ids Table + (db/join [Table :raw_table_id] [RawTable :id]) + :db_id database-id + (db/qualify Table :active) true + (db/qualify RawTable :active) false)] + (table/retire-tables table-ids-to-remove))) (defn update-data-models-for-table! "Update the working `Table` and `Field` metadata for a given `Table` based on the latest raw schema information. This function uses the data in `RawTable` and `RawColumn` to update the working data models as needed." [{raw-table-id :raw_table_id, table-id :id, :as existing-table}] - (when-let [{database-id :database_id, :as raw-table} (db/sel :one RawTable :id raw-table-id)] + (when-let [{database-id :database_id, :as raw-table} (RawTable raw-table-id)] (try (if-not (:active raw-table) ;; looks like the table has been deactivated, so lets retire this Table and its fields @@ -128,16 +123,11 @@ (save-table-fields! (table/update-table existing-table raw-table)) ;; handle setting any fk relationships - (when-let [table-fks (k/select RawColumn - (k/fields [:id :source-column] - [:fk_target_column_id :target-column]) - ;; NOTE: something really wrong happening with SQLKorma here which requires us - ;; to be explicit about :metabase_table.raw_table_id in the join condition - ;; without this it seems to want to join against metabase_field !? - (k/join RawTable (= :raw_table.id :raw_column.raw_table_id)) - (k/where {:raw_table.database_id database-id - :raw_table.id raw-table-id}) - (k/where (not= :raw_column.fk_target_column_id nil)))] + (when-let [table-fks (db/select [RawColumn [:id :source-column] [:fk_target_column_id :target-column]] + (db/join [RawColumn :raw_table_id] [RawTable :id]) + (db/qualify RawTable :database_id) database-id + (db/qualify RawTable :id) raw-table-id + (db/qualify RawColumn :fk_target_column_id) [:not= nil])] (save-fks! table-fks)))) (catch Throwable t @@ -197,15 +187,10 @@ (defn- set-fk-relationships! "Handle setting any FK relationships. This must be done after fully syncing the tables/fields because we need all tables/fields in place." [database] - (when-let [db-fks (k/select RawColumn - (k/fields [:id :source-column] - [:fk_target_column_id :target-column]) - ;; NOTE: something really wrong happening with SQLKorma here which requires us - ;; to be explicit about :metabase_table.raw_table_id in the join condition - ;; without this it seems to want to join against metabase_field !? - (k/join RawTable (= :raw_table.id :raw_column.raw_table_id)) - (k/where {:raw_table.database_id (:id database)}) - (k/where (not= :raw_column.fk_target_column_id nil)))] + (when-let [db-fks (db/select [RawColumn [:id :source-column] [:fk_target_column_id :target-column]] + (db/join [RawColumn :raw_table_id] [RawTable :id]) + (db/qualify RawTable :database_id) (:id database) + (db/qualify RawColumn :fk_target_column_id) [:not= nil])] (save-fks! db-fks))) (defn- maybe-sync-metabase-metadata-table! @@ -228,10 +213,8 @@ ;; retire any tables which were disabled (retire-tables! database) - (let [raw-tables (raw-table/active-tables database-id) - existing-tables (into {} (for [{raw-table-id :raw_table_id, :as table} (db/sel :many Table, :db_id database-id, :active true)] - {raw-table-id table}))] - - (create-and-update-tables! database existing-tables raw-tables) + (let [raw-tables (raw-table/active-tables database-id) + raw-table-id->table (u/key-by :raw_table_id (db/select Table, :db_id database-id, :active true))] + (create-and-update-tables! database raw-table-id->table raw-tables) (set-fk-relationships! database) (maybe-sync-metabase-metadata-table! database raw-tables))) diff --git a/src/metabase/sync_database/sync_dynamic.clj b/src/metabase/sync_database/sync_dynamic.clj index bc1037824a4f9131515e243fcb961d6487adf097..6d3a563911eff10f55b4d81b7e744e7673d3d529 100644 --- a/src/metabase/sync_database/sync_dynamic.clj +++ b/src/metabase/sync_database/sync_dynamic.clj @@ -19,8 +19,7 @@ All field-defs provided are assumed to be children of the given FIELD." [{parent-id :id, table-id :table_id, :as parent-field} nested-field-defs] ;; NOTE: remember that we never retire any fields in dynamic-schema tables - (let [existing-field-name->field (into {} (for [{table-name :name, :as tbl} (db/sel :many field/Field, :parent_id parent-id)] - {table-name tbl}))] + (let [existing-field-name->field (u/key-by :name (db/select field/Field, :parent_id parent-id))] (u/prog1 (set/difference (set (map :name nested-field-defs)) (set (keys existing-field-name->field))) (when (seq <>) (log/debug (u/format-color 'blue "Found new nested fields for field '%s': %s" (:name parent-field) <>)))) @@ -44,8 +43,7 @@ {:pre [(integer? table-id) (coll? field-defs) (every? map? field-defs)]} - (let [field-name->field (into {} (for [{field-name :name, :as fld} (db/sel :many field/Field, :table_id table-id, :parent_id nil)] - {field-name fld}))] + (let [field-name->field (u/key-by :name (db/select field/Field, :table_id table-id, :parent_id nil))] ;; NOTE: with dynamic schemas we never disable fields ;; create/update the fields (doseq [{field-name :name, :keys [nested-fields], :as field-def} field-defs] @@ -61,15 +59,15 @@ (defn scan-table-and-update-data-model! "Update the working `Table` and `Field` metadata for the given `Table`." [driver database {raw-table-id :raw_table_id, table-id :id, :as existing-table}] - (when-let [raw-tbl (db/sel :one raw-table/RawTable :id raw-table-id)] + (when-let [raw-table (raw-table/RawTable raw-table-id)] (try - (if-not (:active raw-tbl) + (if-not (:active raw-table) ;; looks like table was deactivated, so lets retire this Table (table/retire-tables #{table-id}) ;; otherwise we ask the driver for an updated table description and save that info (let [table-def (u/prog1 (driver/describe-table driver database (select-keys existing-table [:name :schema])) (schema/validate i/DescribeTable <>))] - (-> (table/update-table existing-table raw-tbl) + (-> (table/update-table existing-table raw-table) (save-table-fields! (:fields table-def))))) ;; NOTE: dynamic schemas don't have FKs (catch Throwable t @@ -88,16 +86,15 @@ ;; retire any tables which are no longer with us (sync/retire-tables! database) - (let [raw-tables (raw-table/active-tables database-id) - existing-tables (into {} (for [{raw-table-id :raw_table_id, :as table} (db/sel :many table/Table, :db_id database-id, :active true)] - {raw-table-id table}))] + (let [raw-tables (raw-table/active-tables database-id) + raw-table-id->table (u/key-by :raw_table_id (db/select table/Table, :db_id database-id, :active true))] ;; create/update tables (and their fields) ;; NOTE: we make sure to skip the _metabase_metadata table here. it's not a normal table. (doseq [{raw-table-id :id, :as raw-tbl} (filter #(not= "_metabase_metadata" (s/lower-case (:name %))) raw-tables)] (try (let [table-def (u/prog1 (driver/describe-table driver database (select-keys raw-tbl [:name :schema])) (schema/validate i/DescribeTable <>))] - (if-let [existing-table (get existing-tables raw-table-id)] + (if-let [existing-table (get raw-table-id->table raw-table-id)] ;; table already exists, update it (-> (table/update-table existing-table raw-tbl) (save-table-fields! (:fields table-def))) diff --git a/src/metabase/task/follow_up_emails.clj b/src/metabase/task/follow_up_emails.clj index 83e3f75aea9fd9b368471a47d11c4272d8b611d8..95110e3bede8978755cfc39f64d9c94c4dd18c0c 100644 --- a/src/metabase/task/follow_up_emails.clj +++ b/src/metabase/task/follow_up_emails.clj @@ -6,7 +6,6 @@ (clojurewerkz.quartzite [jobs :as jobs] [triggers :as triggers]) [clojurewerkz.quartzite.schedule.cron :as cron] - [korma.core :as k] [metabase.db :as db] [metabase.email :as email] [metabase.email.messages :as messages] @@ -104,7 +103,7 @@ (let [tracking? (setting/get :anon-tracking-enabled)] (or (nil? tracking?) (= "true" tracking?)))) ;; grab the oldest admins email address, that's who we'll send to - (when-let [admin (db/sel :one user/User :is_superuser true (k/order :date_joined))] + (when-let [admin (user/User :is_superuser true, {:order-by [:date_joined]})] (messages/send-follow-up-email (:email admin) "follow-up"))) (catch Throwable t (log/error "Problem sending follow-up email" t)) @@ -115,11 +114,11 @@ "Send an email to the instance admin about why Metabase usage has died down." [] ;; grab the oldest admins email address, that's who we'll send to - (when-let [admin (db/sel :one user/User :is_superuser true (k/order :date_joined))] + (when-let [admin (user/User :is_superuser true, {:order-by [:date_joined]})] ;; inactive = no users created, no activity created, no dash/card views (past 7 days) - (let [last-user (c/from-sql-time (db/sel :one :field [user/User :date_joined] (k/order :date_joined :DESC) (k/limit 1))) - last-activity (c/from-sql-time (db/sel :one :field [activity/Activity :timestamp] (k/order :timestamp :DESC) (k/limit 1))) - last-view (c/from-sql-time (db/sel :one :field [view-log/ViewLog :timestamp] (k/order :timestamp :DESC) (k/limit 1))) + (let [last-user (c/from-sql-time (db/select-one-field :date_joined user/User, {:order-by [[:date_joined :desc]]})) + last-activity (c/from-sql-time (db/select-one-field :timestamp activity/Activity, {:order-by [[:timestamp :desc]]})) + last-view (c/from-sql-time (db/select-one-field :timestamp view-log/ViewLog, {:order-by [[:timestamp :desc]]})) two-weeks-ago (t/minus (t/now) (t/days 14))] (when (and (t/before? last-user two-weeks-ago) (t/before? last-activity two-weeks-ago) diff --git a/src/metabase/task/sync_databases.clj b/src/metabase/task/sync_databases.clj index 74535b335a7ba0e86eea86ec2b55ae841aaee2f8..1e7d1003ebef6fd6fce4dc4a487b1ebf8fd49755 100644 --- a/src/metabase/task/sync_databases.clj +++ b/src/metabase/task/sync_databases.clj @@ -17,20 +17,18 @@ (defonce ^:private sync-databases-trigger (atom nil)) ;; simple job which looks up all databases and runs a sync on them -(jobs/defjob SyncDatabases - [ctx] - (dorun - (for [database (db/sel :many Database :is_sample false)] ; skip Sample Dataset DB - (try - ;; NOTE: this happens synchronously for now to avoid excessive load if there are lots of databases - (if-not (and (= 0 (t/hour (t/now))) - (driver/driver-supports? (driver/engine->driver (:engine database)) :dynamic-schema)) - ;; most of the time we do a quick sync and avoid the lengthy analysis process - (sync-database/sync-database! database :full-sync? false) - ;; at midnight we run the full sync - (sync-database/sync-database! database :full-sync? true)) - (catch Throwable e - (log/error "Error syncing database: " (:id database) e)))))) +(jobs/defjob SyncDatabases [_] + (doseq [database (db/select Database, :is_sample false)] ; skip Sample Dataset DB + (try + ;; NOTE: this happens synchronously for now to avoid excessive load if there are lots of databases + (if-not (and (= 0 (t/hour (t/now))) + (driver/driver-supports? (driver/engine->driver (:engine database)) :dynamic-schema)) + ;; most of the time we do a quick sync and avoid the lengthy analysis process + (sync-database/sync-database! database :full-sync? false) + ;; at midnight we run the full sync + (sync-database/sync-database! database :full-sync? true)) + (catch Throwable e + (log/error "Error syncing database: " (:id database) e))))) (defn task-init "Automatically called during startup; start the job for syncing databases." diff --git a/src/metabase/util.clj b/src/metabase/util.clj index fe21083fd2913988f2c100f405fa44af54354e6c..1e0b454a702a151f818c7d8d469b77cf61792b2c 100644 --- a/src/metabase/util.clj +++ b/src/metabase/util.clj @@ -238,7 +238,7 @@ (defn format-nanoseconds "Format a time interval in nanoseconds to something more readable (µs/ms/etc.) Useful for logging elapsed time when using `(System/nanotime)`" - [nanoseconds] + ^String [nanoseconds] (loop [n nanoseconds, [[unit divisor] & more] [[:ns 1000] [:µs 1000] [:ms 1000] [:s 60] [:mins 60] [:hours Integer/MAX_VALUE]]] (if (and (> n divisor) (seq more)) @@ -611,7 +611,7 @@ "Execute F, a function that takes no arguments, and return the results. If F fails with an exception, retry F up to NUM-RETRIES times until it succeeds. - Consider using the `auto-retry` macro instead of calling this function directly." + Consider using the `auto-retry` macro instead of calling this function directly." {:style/indent 1} [num-retries f] (if (<= num-retries 0) @@ -634,3 +634,14 @@ [x] (or (string? x) (keyword? x))) + +(defn key-by + "Convert a sequential COLL to a map of `(f item)` -> `item`. + This is similar to `group-by`, but the resultant map's values are single items from COLL rather than sequences of items. + (Because only a single item is kept for each value of `f`, items producing duplicate values will be discarded). + + (key-by :id [{:id 1, :name :a} {:id 2, :name :b}]) -> {1 {:id 1, :name :a}, 2 {:id 2, :name :b}}" + {:style/indent 1} + [f coll] + (into {} (for [item coll] + {(f item) item}))) diff --git a/src/metabase/util/honeysql_extensions.clj b/src/metabase/util/honeysql_extensions.clj index 76c3ee73bbf238ef79c8edae84e7982e58b2b6ef..67dac762e6a98c2fc197f81d7c049aa0f83fd9de 100644 --- a/src/metabase/util/honeysql_extensions.clj +++ b/src/metabase/util/honeysql_extensions.clj @@ -1,10 +1,100 @@ (ns metabase.util.honeysql-extensions - "Tweaks an utils for HoneySQL." + (:refer-clojure :exclude [+ - / * mod inc dec cast concat format]) (:require [clojure.string :as s] - [honeysql.format :as hformat])) + (honeysql [core :as hsql] + [format :as hformat] + helpers)) + (:import honeysql.format.ToSql)) + +(alter-meta! #'honeysql.core/format assoc :style/indent 1) +(alter-meta! #'honeysql.core/call assoc :style/indent 1) +(alter-meta! #'honeysql.helpers/merge-left-join assoc :arglists '([m & clauses])) ; for some reason the metadata on these helper functions is wrong which causes Eastwood to fail, see https://github.com/jkk/honeysql/issues/123 + ;; Add an `:h2` quote style that uppercases the identifier (let [quote-fns @(resolve 'honeysql.format/quote-fns) ansi-quote-fn (:ansi quote-fns)] (intern 'honeysql.format 'quote-fns (assoc quote-fns :h2 (comp s/upper-case ansi-quote-fn)))) + +;; register the `extract` function with HoneySQL +;; (hsql/format (hsql/call :extract :a :b)) -> "extract(a from b)" +(defmethod hformat/fn-handler "extract" [_ unit expr] + (str "extract(" (name unit) " from " (hformat/to-sql expr) ")")) + +;; HoneySQL automatically assumes that dots within keywords are used to separate schema / table / field / etc. +;; To handle weird situations where people actually put dots *within* a single identifier we'll replace those dots with lozenges, +;; let HoneySQL do its thing, then switch them back at the last second +;; +;; TODO - Maybe instead of this lozengey hackiness it would make more sense just to add a new "identifier" record type that implements `ToSql` in a more intelligent way +(defn escape-dots + "Replace dots in a string with WHITE MEDIUM LOZENGES (⬨)." + ^clojure.lang.Keyword [s] + (s/replace (name s) #"\." "⬨")) + +(defn qualify-and-escape-dots + "Combine several NAME-COMPONENTS into a single Keyword, and escape dots in each name by replacing them with WHITE MEDIUM LOZENGES (⬨). + + (qualify-and-escape-dots :ab.c :d) -> :ab⬨c.d" + ^clojure.lang.Keyword [& name-components] + (apply hsql/qualify (for [s name-components + :when s] + (escape-dots s)))) + +(defn unescape-dots + "Unescape lozenge-escaped names in a final SQL string (or vector including params). + Use this to undo escaping done by `qualify-and-escape-dots` after HoneySQL compiles a statement to SQL." + ^String [sql-string-or-vector] + (when sql-string-or-vector + (if (string? sql-string-or-vector) + (s/replace sql-string-or-vector #"⬨" ".") + (vec (cons (unescape-dots (first sql-string-or-vector)) + (rest sql-string-or-vector)))))) + + +(defrecord Literal [literal] + ToSql + (to-sql [_] + (str \' (name literal) \'))) + +(defn literal + "Wrap keyword or string S in single quotes and a HoneySQL `raw` form." + [s] + (Literal. s)) + + +(def ^{:arglists '([& exprs])} + "Math operator. Interpose `+` between EXPRS and wrap in parentheses." (partial hsql/call :+)) +(def ^{:arglists '([& exprs])} - "Math operator. Interpose `-` between EXPRS and wrap in parentheses." (partial hsql/call :-)) +(def ^{:arglists '([& exprs])} / "Math operator. Interpose `/` between EXPRS and wrap in parentheses." (partial hsql/call :/)) +(def ^{:arglists '([& exprs])} * "Math operator. Interpose `*` between EXPRS and wrap in parentheses." (partial hsql/call :*)) +(def ^{:arglists '([& exprs])} mod "Math operator. Interpose `%` between EXPRS and wrap in parentheses." (partial hsql/call :%)) + +(defn inc "Add 1 to X." [x] (+ x 1)) +(defn dec "Subtract 1 from X." [x] (- x 1)) + + +(defn cast + "Generate a statement like `cast(x AS c)`/" + [c x] + (hsql/call :cast x (hsql/raw (name c)))) + +(defn format + "SQL `FORMAT` function." + [format-str expr] + (hsql/call :format expr (literal format-str))) + +(defn ->date "CAST X to a `date`." [x] (cast :date x)) +(defn ->datetime "CAST X to a `datetime`." [x] (cast :datetime x)) +(defn ->timestamp "CAST X to a `timestamp`." [x] (cast :timestamp x)) +(defn ->timestamp-with-time-zone "CAST X to a `timestamp with time zone`." [x] (cast "timestamp with time zone" x)) +(defn ->integer "CAST X to a `integer`." [x] (cast :integer x)) + +;;; Random SQL fns. Not all DBs support all these! +(def ^{:arglists '([& exprs])} floor "SQL `floor` function." (partial hsql/call :floor)) +(def ^{:arglists '([& exprs])} hour "SQL `hour` function." (partial hsql/call :hour)) +(def ^{:arglists '([& exprs])} minute "SQL `minute` function." (partial hsql/call :minute)) +(def ^{:arglists '([& exprs])} week "SQL `week` function." (partial hsql/call :week)) +(def ^{:arglists '([& exprs])} month "SQL `month` function." (partial hsql/call :month)) +(def ^{:arglists '([& exprs])} quarter "SQL `quarter` function."(partial hsql/call :quarter)) +(def ^{:arglists '([& exprs])} year "SQL `year` function." (partial hsql/call :year)) +(def ^{:arglists '([& exprs])} concat "SQL `concat` function." (partial hsql/call :concat)) diff --git a/src/metabase/util/korma_extensions.clj b/src/metabase/util/korma_extensions.clj deleted file mode 100644 index 62ed31c58967c8f6cb41cbd99a26a4fda89236cf..0000000000000000000000000000000000000000 --- a/src/metabase/util/korma_extensions.clj +++ /dev/null @@ -1,120 +0,0 @@ -(ns metabase.util.korma-extensions - "Extensions and utility functions for [SQL Korma](http://www.sqlkorma.com/docs)." - (:refer-clojure :exclude [+ - / * mod inc dec cast concat format]) - (:require [clojure.core.match :refer [match]] - [clojure.string :as s] - (korma [core :as k] - [db :as kdb]) - (korma.sql [engine :as kengine] - [utils :as kutils]) - [metabase.util :as u])) - -;;; Korma bugfixes - -;; `korma.sql.fns/pred-not=` doesn't take into account `false` values, and ends up generating SQL appropriate for `nil`, -;; such as "WHERE `field` IS NOT `false`". This is invalid and causes most of our DBs to explode. -;; Replace this wonky implementation with one that works properly -(defn- pred-not= [x y] - (match [x y] - [nil nil] nil - [ _ nil] (kengine/infix x "IS NOT" y) - [nil _] (kengine/infix y "IS NOT" x) - [ _ _] (kengine/infix x "<>" y))) - -(intern 'korma.sql.fns 'pred-not= pred-not=) - -;;; DB util fns - -;; Korma assumes dots are used to separate schemas/tables/fields, and stores names as a single string. -;; e.g. a Table name might be "public.bits", which becomes SQL like "public"."bits". -;; This works fine 99.9% of the time, but there are crazies who put dots in their Table names, e.g. "objects.stuff". -;; Since korma doesn't know how to handle this situation, we'll replace the dots *within* names with unicode -;; WHITE MEDIUM LOZENGE (⬨) and tell korma to switch the triangles back to dots when generating SQL. -;; Hopefully no one uses WHITE MEDIUM LOZENGE in their table/field names. -(def ^{:arglists '([s])} ^String escape-name "Replace dots in a string with WHITE MEDIUM LOZENGES (⬨)." (u/rpartial s/replace #"\." "⬨")) -(def ^{:arglists '([s])} ^String unescape-name "Replace WHITE MEDIUM LOZENGES (⬨) in a string with dots." (u/rpartial s/replace #"⬨" ".")) - -(defn create-db - "Like `korma.db/create-db`, but adds a fn to unescape escaped dots when generating SQL." - [spec] - (update-in (kdb/create-db spec) [:options :naming :fields] comp unescape-name)) - -(defn combine+escape-name-components - "Combine a sequence of keyword or string NAME-COMPONENTS into a single dot-separated korma string. - Since korma doesn't know how to handle dots inside names, they're replaced with unicode - WHITE MEDIUM LOZENGE (⬨), which are switched back to dots when the SQL is generated. - Blank strings in NAME-COMPONENTS are automatically skipped." - ^String [name-components] - (apply str (interpose "." (for [s name-components - :when (seq s)] - (escape-name (name s)))))) - -(def ^{:arglists '([name-components])} create-entity - "Like `korma.db/create-entity`, but takes a sequence of name components instead; escapes dots in names as well." - (comp k/create-entity combine+escape-name-components)) - -;;; util fns - -(defn wrap - "Wrap form X in parentheses." - [x] - (kutils/func "(%s)" [x])) - -(defn infix - "Interpose OPERATOR between ARGS and wrap the result in parentheses. - - (infix \"+\" :x :y :z) -> \"(x + y + z)\";" - [operator x y & more] - (let [x+y (kengine/infix x operator y)] - (if (seq more) - (apply infix operator x+y more) - (wrap x+y)))) - -(defn math-infix - "Interpose OPERATOR between ARGS and wrap the result in parentheses. - Integer literals in ARGS are automatically wrapped in a `k/raw` form." - [operator & args] - (apply infix operator (for [arg args] - (cond-> arg - (number? arg) k/raw)))) - -(def ^{:arglists '([& exprs])} + "Math operator. Interpose `+` between EXPRS and wrap in parentheses." (partial math-infix "+")) -(def ^{:arglists '([& exprs])} - "Math operator. Interpose `-` between EXPRS and wrap in parentheses." (partial math-infix "-")) -(def ^{:arglists '([& exprs])} / "Math operator. Interpose `/` between EXPRS and wrap in parentheses." (partial math-infix "/")) -(def ^{:arglists '([& exprs])} * "Math operator. Interpose `*` between EXPRS and wrap in parentheses." (partial math-infix "*")) -(def ^{:arglists '([& exprs])} mod "Math operator. Interpose `%` between EXPRS and wrap in parentheses." (partial math-infix "%")) - -(defn inc "Add 1 to X." [x] (+ x 1)) -(defn dec "Subtract 1 from X." [x] (- x 1)) - -(defn literal - "Wrap keyword or string S in single quotes and a korma `raw` form." - [s] - (k/raw (str \' (name s) \'))) - -(defn cast - "Generate a statement like `CAST(x AS c)`/" - [c x] - (kutils/func (clojure.core/format "CAST(%%s AS %s)" (name c)) - [x])) - -(defn format - "SQL `FORMAT` function." - [format-str expr] - (k/sqlfn :FORMAT expr (literal format-str))) - -(defn ->date "CAST X to a `DATE`." [x] (cast :DATE x)) -(defn ->datetime "CAST X to a `DATETIME`." [x] (cast :DATETIME x)) -(defn ->timestamp "CAST X to a `TIMESTAMP`." [x] (cast :TIMESTAMP x)) -(defn ->timestamp-with-time-zone "CAST X to a `TIMESTAMP WITH TIME ZONE`." [x] (cast "TIMESTAMP WITH TIME ZONE" x)) -(defn ->integer "CAST X to a `INTEGER`." [x] (cast :INTEGER x)) - -;;; Random SQL fns. Not all DBs support all these! -(def ^{:arglists '([& exprs])} floor "SQL `FLOOR` function." (partial k/sqlfn* :FLOOR)) -(def ^{:arglists '([& exprs])} hour "SQL `HOUR` function." (partial k/sqlfn* :HOUR)) -(def ^{:arglists '([& exprs])} minute "SQL `MINUTE` function." (partial k/sqlfn* :MINUTE)) -(def ^{:arglists '([& exprs])} week "SQL `WEEK` function." (partial k/sqlfn* :WEEK)) -(def ^{:arglists '([& exprs])} month "SQL `MONTH` function." (partial k/sqlfn* :MONTH)) -(def ^{:arglists '([& exprs])} quarter "SQL `QUARTER` function."(partial k/sqlfn* :QUARTER)) -(def ^{:arglists '([& exprs])} year "SQL `YEAR` function." (partial k/sqlfn* :YEAR)) -(def ^{:arglists '([& exprs])} concat "SQL `CONCAT` function." (partial k/sqlfn* :CONCAT)) diff --git a/test/metabase/api/activity_test.clj b/test/metabase/api/activity_test.clj index fb65b6b65f6d3a25e1e1c98d66a6e35ae2b9086f..a9cf8dc0be8d7797bc447a8dd429330cb882443d 100644 --- a/test/metabase/api/activity_test.clj +++ b/test/metabase/api/activity_test.clj @@ -1,7 +1,6 @@ (ns metabase.api.activity-test "Tests for /api/activity endpoints." (:require [expectations :refer :all] - [korma.core :as k] [metabase.db :as db] [metabase.http-client :refer :all] (metabase.models [activity :refer [Activity]] @@ -20,7 +19,7 @@ ; 2. :user and :model_exists are hydrated ; NOTE: timestamp matching was being a real PITA so I cheated a bit. ideally we'd fix that -(expect-let [_ (k/delete Activity) +(expect-let [_ (db/cascade-delete! Activity) activity1 (db/insert! Activity :topic "install" :details {} @@ -40,7 +39,7 @@ :model "user" :details {} :timestamp (u/->Timestamp "2015-09-10T05:33:43.641Z"))] - [(match-$ (db/sel :one Activity :id (:id activity2)) + [(match-$ (Activity (:id activity2)) {:id $ :topic "dashboard-create" :user_id $ @@ -63,7 +62,7 @@ :table nil :custom_id nil :details $}) - (match-$ (db/sel :one Activity :id (:id activity3)) + (match-$ (Activity (:id activity3)) {:id $ :topic "user-joined" :user_id $ @@ -86,7 +85,7 @@ :table nil :custom_id nil :details $}) - (match-$ (db/sel :one Activity :id (:id activity1)) + (match-$ (Activity (:id activity1)) {:id $ :topic "install" :user_id nil diff --git a/test/metabase/api/card_test.clj b/test/metabase/api/card_test.clj index cc1652bbb2fb740560e0cd94148c7baf68e22e09..6dfdb0c46c5667199d7862f9abf415c6daef42e8 100644 --- a/test/metabase/api/card_test.clj +++ b/test/metabase/api/card_test.clj @@ -207,9 +207,9 @@ (expect-with-temp [Card [{card-id :id, original-name :name}]] [original-name updated-name] - [(db/sel :one :field [Card :name] :id card-id) + [(db/select-one-field :name Card, :id card-id) (do ((user->client :rasta) :put 200 (str "card/" card-id) {:name updated-name}) - (db/sel :one :field [Card :name] :id card-id))])) + (db/select-one-field :name Card, :id card-id))])) (defmacro ^:private with-temp-card {:style/indent 1} [binding & body] diff --git a/test/metabase/api/dashboard_test.clj b/test/metabase/api/dashboard_test.clj index 2672aeda9207116bfe99954252d0c4560361ae1f..c75994f36744629d9b31d33df0ad8a964c96517c 100644 --- a/test/metabase/api/dashboard_test.clj +++ b/test/metabase/api/dashboard_test.clj @@ -192,7 +192,8 @@ (dissoc :id :dashboard_id :card_id) (update :created_at #(not (nil? %))) (update :updated_at #(not (nil? %)))) - (db/sel :many :fields [DashboardCard :sizeX :sizeY :col :row] :dashboard_id dashboard-id)])) + (map (partial into {}) + (db/select [DashboardCard :sizeX :sizeY :col :row], :dashboard_id dashboard-id))])) ;; new dashboard card w/ additional series (expect @@ -211,7 +212,7 @@ :sizeY 2 :col 4 :row 4}] - [0]] + #{0}] (tu/with-temp* [Dashboard [{dashboard-id :id}] Card [{card-id :id}] Card [{series-id-1 :id} {:name "Series Card"}]] @@ -220,8 +221,9 @@ :col 4 :series [{:id series-id-1}]})] [(remove-ids-and-boolean-timestamps dashboard-card) - (db/sel :many :fields [DashboardCard :sizeX :sizeY :col :row] :dashboard_id dashboard-id) - (db/sel :many :field [DashboardCardSeries :position] :dashboardcard_id (:id dashboard-card))]))) + (map (partial into {}) + (db/select [DashboardCard :sizeX :sizeY :col :row], :dashboard_id dashboard-id)) + (db/select-field :position DashboardCardSeries, :dashboardcard_id (:id dashboard-card))]))) ;; ## DELETE /api/dashboard/:id/cards @@ -237,9 +239,9 @@ DashboardCard [{dashcard-id :id} {:dashboard_id dashboard-id, :card_id card-id}] DashboardCardSeries [_ {:dashboardcard_id dashcard-id, :card_id series-id-1, :position 0}] DashboardCardSeries [_ {:dashboardcard_id dashcard-id, :card_id series-id-2, :position 1}]] - [(count (db/sel :many :id DashboardCard, :dashboard_id dashboard-id)) + [(count (db/select-ids DashboardCard, :dashboard_id dashboard-id)) ((user->client :rasta) :delete 200 (format "dashboard/%d/cards" dashboard-id) :dashcardId dashcard-id) - (count (db/sel :many :id DashboardCard, :dashboard_id dashboard-id))])) + (count (db/select-ids DashboardCard, :dashboard_id dashboard-id))])) ;; ## PUT /api/dashboard/:id/cards diff --git a/test/metabase/api/database_test.clj b/test/metabase/api/database_test.clj index 3c58454cddae602b612c60e36205b11ffa34ba76..c317c1c006e82e5bf1e05617398ad696ca186110 100644 --- a/test/metabase/api/database_test.clj +++ b/test/metabase/api/database_test.clj @@ -1,6 +1,5 @@ (ns metabase.api.database-test (:require [expectations :refer :all] - [korma.core :as k] (metabase [db :as db] [driver :as driver]) (metabase.models [database :refer [Database]] @@ -79,7 +78,7 @@ ;; Check that we can create a Database (let [db-name (random-name)] (expect-eval-actual-first - (match-$ (db/sel :one Database :name db-name) + (match-$ (Database :name db-name) {:created_at $ :engine "postgres" ; string because it's coming back from API instead of DB :id $ @@ -97,7 +96,7 @@ ;; Check that we can delete a Database (expect-let [db-name (random-name) {db-id :id} (create-db db-name) - sel-db-name (fn [] (db/sel :one :field [Database :name] :id db-id))] + sel-db-name (fn [] (db/select-one-field :name Database, :id db-id))] [db-name nil] [(sel-db-name) @@ -107,8 +106,9 @@ ;; ## PUT /api/database/:id ;; Check that we can update fields in a Database (expect-let [[old-name new-name] (repeatedly 2 random-name) - {db-id :id} (create-db old-name) - sel-db (fn [] (db/sel :one :fields [Database :name :engine :details :is_full_sync] :id db-id))] + {db-id :id} (create-db old-name) + sel-db (fn [] (dissoc (into {} (db/select-one [Database :name :engine :details :is_full_sync], :id db-id)) + :features))] [{:details {:host "localhost", :port 5432, :dbname "fakedb", :user "cam", :ssl true} :engine :postgres :name old-name @@ -146,7 +146,7 @@ :description nil :features (mapv name (driver/features (driver/engine->driver engine)))}))) ;; (?) I don't remember why we have to do this for postgres but not any other of the bonus drivers - (match-$ (db/sel :one Database :name db-name) + (match-$ (Database :name db-name) {:created_at $ :engine "postgres" :id $ @@ -171,7 +171,7 @@ ;; GET /api/databases (include tables) (let [db-name (str "A" (random-name))] ; make sure this name comes before "test-data" (expect-eval-actual-first - (set (concat [(match-$ (db/sel :one Database :name db-name) + (set (concat [(match-$ (Database :name db-name) {:created_at $ :engine "postgres" :id $ @@ -196,7 +196,7 @@ :is_full_sync true :organization_id nil :description nil - :tables (->> (db/sel :many Table :db_id (:id database)) + :tables (->> (db/select Table, :db_id (:id database)) (mapv table-details) (sort-by :name)) :features (mapv name (driver/features (driver/engine->driver engine)))}))))))) diff --git a/test/metabase/api/dataset_test.clj b/test/metabase/api/dataset_test.clj index 389e5a13e0c34c244592fe571c9f1e42ffdbee51..6b5bc1036bc3e1d8ebe82acc9f06fa8a4cf60ae6 100644 --- a/test/metabase/api/dataset_test.clj +++ b/test/metabase/api/dataset_test.clj @@ -89,7 +89,7 @@ (query checkins (ql/aggregation (ql/count)))))] [(format-response result) - (format-response (db/sel :one QueryExecution :uuid (:uuid result)))])) + (format-response (QueryExecution :uuid (:uuid result)))])) ;; Even if a query fails we still expect a 200 response from the api (expect @@ -121,7 +121,7 @@ :type "native" :native {:query "foobar"}})] [(format-response result) - (format-response (db/sel :one QueryExecution :uuid (:uuid result)))])) + (format-response (QueryExecution :uuid (:uuid result)))])) ;; GET /api/dataset/card/:id @@ -186,4 +186,4 @@ [(-> result (update :card remove-ids-and-boolean-timestamps) (update :result format-response)) - (format-response (db/sel :one QueryExecution :uuid (get-in result [:result :uuid])))]))) + (format-response (QueryExecution :uuid (get-in result [:result :uuid])))]))) diff --git a/test/metabase/api/field_test.clj b/test/metabase/api/field_test.clj index c7c250dbb9817ea0347a4f3bbc625dd8e704d02d..84ab05681a11c24f1135c54254e4df204ec998bb 100644 --- a/test/metabase/api/field_test.clj +++ b/test/metabase/api/field_test.clj @@ -93,69 +93,69 @@ :description nil :special_type nil :visibility_type :sensitive}] - (tu/with-temp Database [{database-id :id} {:name "Field Test" - :engine :yeehaw - :details {} - :is_sample false}] - (tu/with-temp Table [{table-id :id} {:name "Field Test" - :db_id database-id - :active true}] - (tu/with-temp Field [{field-id :id} {:table_id table-id - :name "Field Test" - :base_type :TextField - :field_type :info - :special_type nil - :active true - :preview_display true - :position 1}] - (let [original-val (simple-field-details (db/sel :one Field :id field-id))] - ;; set it - ((user->client :crowberto) :put 200 (format "field/%d" field-id) {:name "something else" - :display_name "yay" - :description "foobar" - :special_type :name - :visibility_type :sensitive}) - (let [updated-val (simple-field-details (db/sel :one Field :id field-id))] - ;; unset it - ((user->client :crowberto) :put 200 (format "field/%d" field-id) {:description nil - :special_type nil}) - [original-val - updated-val - (simple-field-details (db/sel :one Field :id field-id))])))))) + (tu/with-temp* [Database [{database-id :id} {:name "Field Test" + :engine :yeehaw + :details {} + :is_sample false}] + Table [{table-id :id} {:name "Field Test" + :db_id database-id + :active true}] + Field [{field-id :id} {:table_id table-id + :name "Field Test" + :base_type :TextField + :field_type :info + :special_type nil + :active true + :preview_display true + :position 1}]] + (let [original-val (simple-field-details (Field field-id))] + ;; set it + ((user->client :crowberto) :put 200 (format "field/%d" field-id) {:name "something else" + :display_name "yay" + :description "foobar" + :special_type :name + :visibility_type :sensitive}) + (let [updated-val (simple-field-details (Field field-id))] + ;; unset it + ((user->client :crowberto) :put 200 (format "field/%d" field-id) {:description nil + :special_type nil}) + [original-val + updated-val + (simple-field-details (Field field-id))])))) ;; when we set the special-type from :fk to something else, make sure fk_target_field_id is set to nil (expect [true nil] - (tu/with-temp Database [{database-id :id} {:name "Field Test" - :engine :yeehaw - :details {} - :is_sample false}] - (tu/with-temp Table [{table-id :id} {:name "Field Test" - :db_id database-id - :active true}] - (tu/with-temp Field [{field-id1 :id} {:table_id table-id - :name "Target Field" - :base_type :TextField - :field_type :info - :special_type :id - :active true - :preview_display true - :position 1}] - (tu/with-temp Field [{field-id :id} {:table_id table-id - :name "Field Test" - :base_type :TextField - :field_type :info - :special_type :fk - :fk_target_field_id field-id1 - :active true - :preview_display true - :position 1}] - (let [original-val (boolean (db/sel :one :field [Field :fk_target_field_id] :id field-id))] - ;; unset the :fk special-type - ((user->client :crowberto) :put 200 (format "field/%d" field-id) {:special_type :name}) - [original-val - (db/sel :one :field [Field :fk_target_field_id] :id field-id)])))))) + (tu/with-temp* [Database [{database-id :id} {:name "Field Test" + :engine :yeehaw + :details {} + :is_sample false}] + Table [{table-id :id} {:name "Field Test" + :db_id database-id + :active true}] + Field [{field-id1 :id} {:table_id table-id + :name "Target Field" + :base_type :TextField + :field_type :info + :special_type :id + :active true + :preview_display true + :position 1}] + Field [{field-id :id} {:table_id table-id + :name "Field Test" + :base_type :TextField + :field_type :info + :special_type :fk + :fk_target_field_id field-id1 + :active true + :preview_display true + :position 1}]] + (let [original-val (boolean (db/select-one-field :fk_target_field_id Field, :id field-id))] + ;; unset the :fk special-type + ((user->client :crowberto) :put 200 (format "field/%d" field-id) {:special_type :name}) + [original-val + (db/select-one-field :fk_target_field_id Field, :id field-id)]))) ;; check that you can't set a field to :timestamp_seconds if it's not of a proper base_type (expect @@ -165,13 +165,13 @@ Table [{table-id :id} {:db_id database-id}] Field [{field-id :id} {:table_id table-id}]] [((user->client :crowberto) :put 400 (str "field/" field-id) {:special_type :timestamp_seconds}) - (db/sel :one :field [Field :special_type], :id field-id)])) + (db/select-one-field :special_type Field, :id field-id)])) (defn- field->field-values "Fetch the `FieldValues` object that corresponds to a given `Field`." [table-kw field-kw] - (db/sel :one FieldValues :field_id (id table-kw field-kw))) + (FieldValues :field_id (id table-kw field-kw))) ;; ## GET /api/field/:id/values ;; Should return something useful for a field that has special_type :category @@ -199,7 +199,7 @@ ;; Check that we can set values (tu/expect-eval-actual-first [{:status "success"} - (tu/match-$ (db/sel :one FieldValues :field_id (id :venues :price)) + (tu/match-$ (FieldValues :field_id (id :venues :price)) {:field_id (id :venues :price) :human_readable_values {:1 "$" :2 "$$" @@ -218,7 +218,7 @@ ;; Check that we can unset values (tu/expect-eval-actual-first [{:status "success"} - (tu/match-$ (db/sel :one FieldValues :field_id (id :venues :price)) + (tu/match-$ (FieldValues :field_id (id :venues :price)) {:field_id (id :venues :price) :human_readable_values {} :values [1 2 3 4] diff --git a/test/metabase/api/pulse_test.clj b/test/metabase/api/pulse_test.clj index 78937fed9346a6c5350d54200924be10a0dc1cba..380348fdfbbbe522a729be11fe31f6680547d98c 100644 --- a/test/metabase/api/pulse_test.clj +++ b/test/metabase/api/pulse_test.clj @@ -1,7 +1,6 @@ (ns metabase.api.pulse-test "Tests for /api/pulse endpoints." (:require [expectations :refer :all] - [korma.core :as k] [metabase.db :as db] (metabase [http-client :as http] [middleware :as middleware]) @@ -34,10 +33,6 @@ card-ids (filter identity (map :id cards))] (pulse/create-pulse name (user->id :crowberto) card-ids []))) -(defn- delete-existing-pulses! [] - (doseq [pulse-id (db/sel :many :id Pulse)] - (db/cascade-delete! Pulse :id pulse-id))) - (defn- user-details [user] (match-$ user {:id $ @@ -231,7 +226,7 @@ ;; ## GET /api/pulse -(expect-let [_ (delete-existing-pulses!) +(expect-let [_ (db/cascade-delete! Pulse) pulse1 (new-pulse! :name "ABC") pulse2 (new-pulse! :name "DEF")] [(pulse-details pulse1) diff --git a/test/metabase/api/session_test.clj b/test/metabase/api/session_test.clj index 7ec0af7cb6291b98651f59a4ea3a2a49eedc407e..799c3b0b051eaa875c32ae6659be0570de30e30b 100644 --- a/test/metabase/api/session_test.clj +++ b/test/metabase/api/session_test.clj @@ -14,7 +14,7 @@ ;; ## POST /api/session ;; Test that we can login (expect-eval-actual-first - (db/sel :one :fields [Session :id] :user_id (user->id :rasta)) + (db/select-one [Session :id], :user_id (user->id :rasta)) (do (db/delete! Session, :user_id (user->id :rasta)) ; delete all other sessions for the bird first (client :post 200 "session" (user->credentials :rasta)))) @@ -63,7 +63,7 @@ (expect true (let [reset-fields-set? (fn [] - (let [{:keys [reset_token reset_triggered]} (db/sel :one :fields [User :reset_token :reset_triggered] :id (user->id :rasta))] + (let [{:keys [reset_token reset_triggered]} (db/select-one [User :reset_token :reset_triggered], :id (user->id :rasta))] (boolean (and reset_token reset_triggered))))] ;; make sure user is starting with no values (db/update! User (user->id :rasta), :reset_token nil, :reset_triggered nil) @@ -109,13 +109,13 @@ ;; New creds *should* work (client :post 200 "session" (:new creds)) ;; Double check that reset token was cleared - (db/sel :one :fields [User :reset_token :reset_triggered] :id id))) + (db/select-one [User :reset_token :reset_triggered], :id id))) ;; Check that password reset returns a valid session token (let [user-last-name (random-name)] (expect-eval-actual-first - (let [id (db/sel :one :id User, :last_name user-last-name) - session-id (db/sel :one :id Session, :user_id id)] + (let [id (db/select-one-id User, :last_name user-last-name) + session-id (db/select-one-id Session, :user_id id)] {:success true :session_id session-id}) (let [{:keys [email id]} (create-user :password "password", :last_name user-last-name, :reset_triggered (System/currentTimeMillis)) diff --git a/test/metabase/api/setup_test.clj b/test/metabase/api/setup_test.clj index 90bf31bfc0d850384701d37c3e7180b8658dcfa0..f17e42cceb1282687673bae868da87322d84a468 100644 --- a/test/metabase/api/setup_test.clj +++ b/test/metabase/api/setup_test.clj @@ -15,9 +15,7 @@ ;; Check that we can create a new superuser via setup-token (let [user-name (random-name)] (expect-eval-actual-first - [(match-$ (->> (db/sel :one User :email (str user-name "@metabase.com")) - (:id) - (db/sel :one Session :user_id)) + [(match-$ (Session :user_id (db/select-one-id User, :email (str user-name "@metabase.com"))) {:id $id}) (str user-name "@metabase.com")] (let [resp (http/client :post 200 "setup" {:token (setup/token-create) diff --git a/test/metabase/api/table_test.clj b/test/metabase/api/table_test.clj index 73d01405634a8419019701d6d0882188809d3fbe..d870115623638ab1a516de7ba311c69da36278b8 100644 --- a/test/metabase/api/table_test.clj +++ b/test/metabase/api/table_test.clj @@ -44,30 +44,30 @@ ;; ## GET /api/table ;; These should come back in alphabetical order and include relevant metadata (expect - #{{:name (format-name "categories") - :display_name "Categories" - :db_id (id) - :active true - :rows 75 - :id (id :categories)} - {:name (format-name "checkins") - :display_name "Checkins" - :db_id (id) - :active true - :rows 1000 - :id (id :checkins)} - {:name (format-name "users") - :display_name "Users" - :db_id (id) - :active true - :rows 15 - :id (id :users)} - {:name (format-name "venues") - :display_name "Venues" - :db_id (id) - :active true - :rows 100 - :id (id :venues)}} + #{{:name (format-name "categories") + :display_name "Categories" + :db_id (id) + :active true + :rows 75 + :id (id :categories)} + {:name (format-name "checkins") + :display_name "Checkins" + :db_id (id) + :active true + :rows 1000 + :id (id :checkins)} + {:name (format-name "users") + :display_name "Users" + :db_id (id) + :active true + :rows 15 + :id (id :users)} + {:name (format-name "venues") + :display_name "Venues" + :db_id (id) + :active true + :rows 100 + :id (id :venues)}} (->> ((user->client :rasta) :get 200 "table") (filter #(= (:db_id %) (id))) ; prevent stray tables from affecting unit test results (map #(dissoc % :raw_table_id :db :created_at :updated_at :schema :entity_name :description :entity_type :visibility_type)) @@ -220,7 +220,7 @@ ;;; GET api/table/:id/query_metadata?include_sensitive_fields ;;; Make sure that getting the User table *does* include info about the password field, but not actual values themselves (expect - (match-$ (db/sel :one Table :id (id :users)) + (match-$ (Table (id :users)) {:description nil :entity_type nil :visibility_type nil @@ -228,7 +228,7 @@ :schema "PUBLIC" :name "USERS" :display_name "Users" - :fields [(match-$ (db/sel :one Field :id (id :users :id)) + :fields [(match-$ (Field (id :users :id)) {:description nil :table_id (id :users) :special_type "id" @@ -248,7 +248,7 @@ :parent_id nil :raw_column_id $ :last_analyzed $}) - (match-$ (db/sel :one Field :id (id :users :last_login)) + (match-$ (Field (id :users :last_login)) {:description nil :table_id (id :users) :special_type nil @@ -268,7 +268,7 @@ :parent_id nil :raw_column_id $ :last_analyzed $}) - (match-$ (db/sel :one Field :id (id :users :name)) + (match-$ (Field (id :users :name)) {:description nil :table_id (id :users) :special_type "name" @@ -288,7 +288,7 @@ :parent_id nil :raw_column_id $ :last_analyzed $}) - (match-$ (db/sel :one Field :table_id (id :users) :name "PASSWORD") + (match-$ (Field :table_id (id :users), :name "PASSWORD") {:description nil :table_id (id :users) :special_type "category" @@ -478,8 +478,8 @@ ;; ## GET /api/table/:id/fks ;; We expect a single FK from CHECKINS.USER_ID -> USERS.ID (expect - (let [checkins-user-field (db/sel :one Field :table_id (id :checkins) :name "USER_ID") - users-id-field (db/sel :one Field :table_id (id :users) :name "ID")] + (let [checkins-user-field (Field :table_id (id :checkins), :name "USER_ID") + users-id-field (Field :table_id (id :users), :name "ID")] [{:origin_id (:id checkins-user-field) :destination_id (:id users-id-field) :relationship "Mt1" @@ -558,13 +558,13 @@ ;; ## POST /api/table/:id/reorder (expect-eval-actual-first {:result "success"} - (let [categories-id-field (db/sel :one Field :table_id (id :categories) :name "ID") - categories-name-field (db/sel :one Field :table_id (id :categories) :name "NAME") + (let [categories-id-field (Field :table_id (id :categories), :name "ID") + categories-name-field (Field :table_id (id :categories), :name "NAME") api-response ((user->client :crowberto) :post 200 (format "table/%d/reorder" (id :categories)) {:new_order [(:id categories-name-field) (:id categories-id-field)]})] ;; check the modified values (have to do it here because the api response tells us nothing) - (assert (= 0 (:position (db/sel :one :fields [Field :position] :id (:id categories-name-field))))) - (assert (= 1 (:position (db/sel :one :fields [Field :position] :id (:id categories-id-field))))) + (assert (= 0 (db/select-one-field :position Field, :id (:id categories-name-field)))) + (assert (= 1 (db/select-one-field :position Field, :id (:id categories-id-field)))) ;; put the values back to their previous state (db/update! Field (:id categories-name-field), :position 0) (db/update! Field (:id categories-id-field), :position 0) diff --git a/test/metabase/api/user_test.clj b/test/metabase/api/user_test.clj index 11e6596ae52c30c7fbb82de83262041a469270a9..96cd746929f48d4382256f02343ef18e99798038 100644 --- a/test/metabase/api/user_test.clj +++ b/test/metabase/api/user_test.clj @@ -1,7 +1,6 @@ (ns metabase.api.user-test "Tests for /api/user endpoints." (:require [expectations :refer :all] - [korma.core :as k] (metabase [db :as db] [http-client :as http] [middleware :as middleware]) @@ -70,7 +69,7 @@ ;; Test that we can create a new User (let [rand-name (random-name)] (expect-eval-actual-first - (match-$ (db/sel :one User :first_name rand-name) + (match-$ (User :first_name rand-name) {:id $ :email $ :first_name rand-name @@ -85,7 +84,7 @@ ;; Test that reactivating a disabled account works (let [rand-name (random-name)] (expect-eval-actual-first - (match-$ (db/sel :one User :first_name rand-name :is_active true) + (match-$ (User :first_name rand-name, :is_active true) {:id $ :email $ :first_name rand-name @@ -184,9 +183,10 @@ ;; ## PUT /api/user/:id ;; Test that we can edit a User (expect-let [{old-first :first_name, last-name :last_name, old-email :email, id :id, :as user} (create-user) - new-first (random-name) - new-email (.toLowerCase ^String (str new-first "@metabase.com")) - fetch-user (fn [] (db/sel :one :fields [User :first_name :last_name :is_superuser :email] :id id))] + new-first (random-name) + new-email (.toLowerCase ^String (str new-first "@metabase.com")) + fetch-user (fn [] (dissoc (into {} (db/select-one [User :first_name :last_name :is_superuser :email], :id id)) + :common_name))] [{:email old-email :is_superuser false :last_name last-name @@ -201,7 +201,7 @@ (fetch-user))]) ;; Test that a normal user cannot change the :is_superuser flag for themselves -(expect-let [fetch-user (fn [] (db/sel :one :fields [User :first_name :last_name :is_superuser :email] :id (user->id :rasta)))] +(expect-let [fetch-user (fn [] (db/select-one [User :first_name :last_name :is_superuser :email], :id (user->id :rasta)))] [(fetch-user)] [(do ((user->client :rasta) :put 200 (str "user/" (user->id :rasta)) (-> (fetch-user) (assoc :is_superuser true))) @@ -231,7 +231,7 @@ (metabase.http-client/client creds :put 200 (format "user/%d/password" id) {:password "abc123!!DEF" :old_password (:password creds)}) ;; now simply grab the lastest pass from the db and compare to the one we have from before reset - (not= password (db/sel :one :field [User :password] :email (:email creds))))) + (not= password (db/select-one-field :password User, :email (:email creds))))) ;; Check that a non-superuser CANNOT update someone else's password (expect "You don't have permissions to do that." @@ -260,7 +260,7 @@ (let [creds {:email "def@metabase.com" :password "def123"}] [(metabase.http-client/client creds :put 200 (format "user/%d/qbnewb" id)) - (db/sel :one :field [User :is_qbnewb] :id id)]))) + (db/select-one-field :is_qbnewb User, :id id)]))) ;; Check that a non-superuser CANNOT update someone else's password diff --git a/test/metabase/db/metadata_queries_test.clj b/test/metabase/db/metadata_queries_test.clj index f114f778735ecfbde46cf4ecab4edb4443061849..b50bf09fdce48e8a750e10fae52ca7dc5d4fc90c 100644 --- a/test/metabase/db/metadata_queries_test.clj +++ b/test/metabase/db/metadata_queries_test.clj @@ -6,7 +6,7 @@ [metabase.test.data :refer :all])) (defn- fetch-field [table-kw field-kw] - (db/sel :one Field :id (id table-kw field-kw))) + (Field :id (id table-kw field-kw))) ;; ### FIELD-DISTINCT-COUNT (expect 100 diff --git a/test/metabase/driver/generic_sql_test.clj b/test/metabase/driver/generic_sql_test.clj index c8dcbd2073896244b0cde0672061938a08fd59d1..6b1f89c9972726bc4959272be1432e75a2fe87b5 100644 --- a/test/metabase/driver/generic_sql_test.clj +++ b/test/metabase/driver/generic_sql_test.clj @@ -2,24 +2,27 @@ (:require [expectations :refer :all] (metabase [db :as db] [driver :as driver]) - [metabase.driver.generic-sql :refer :all] + (metabase.driver [generic-sql :refer :all] + h2) (metabase.models [field :refer [Field]] [table :refer [Table], :as table]) [metabase.test.data :refer :all] + (metabase.test.data [dataset-definitions :as defs] + [datasets :as datasets]) [metabase.test.util :refer [resolve-private-fns]]) (:import metabase.driver.h2.H2Driver)) -(def users-table - (delay (db/sel :one Table :name "USERS"))) +(def ^:private users-table (delay (Table :name "USERS"))) +(def ^:private venues-table (delay (Table (id :venues)))) +(def ^:private users-name-field (delay (Field (id :users :name)))) -(def venues-table - (delay (Table (id :venues)))) - -(def korma-users-table - (delay (korma-entity @users-table))) - -(def users-name-field - (delay (Field (id :users :name)))) +(def ^:private generic-sql-engines + (delay (set (for [engine datasets/all-valid-engines + :let [driver (driver/engine->driver engine)] + :when (not= engine :bigquery) ; bigquery doesn't use the generic sql implementations of things like `field-avg-length` + :when (extends? ISQLDriver (class driver))] + (do (require (symbol (str "metabase.test.data." (name engine))) :reload) ; otherwise it gets all snippy if you try to do `lein test metabase.driver.generic-sql-test` + engine))))) ;; DESCRIBE-DATABASE @@ -75,3 +78,41 @@ {:id (id :venues :name), :values nil} {:id (id :venues :price), :values [1 2 3 4]}]} (driver/analyze-table (H2Driver.) @venues-table (set (mapv :id (table/fields @venues-table))))) + +(resolve-private-fns metabase.driver.generic-sql field-avg-length field-values-lazy-seq table-rows-seq) + +;;; FIELD-AVG-LENGTH +(datasets/expect-with-engines @generic-sql-engines + ;; Not sure why some databases give different values for this but they're close enough that I'll allow them + (if (contains? #{:redshift :sqlserver} datasets/*engine*) + 15 + 16) + (field-avg-length datasets/*driver* (db/select-one 'Field :id (id :venues :name)))) + +;;; FIELD-VALUES-LAZY-SEQ +(datasets/expect-with-engines @generic-sql-engines + ["Red Medicine" + "Stout Burgers & Beers" + "The Apple Pan" + "Wurstküche" + "Brite Spot Family Restaurant"] + (take 5 (field-values-lazy-seq datasets/*driver* (db/select-one 'Field :id (id :venues :name))))) + + +;;; TABLE-ROWS-SEQ +(datasets/expect-with-engines @generic-sql-engines + [{:name "Red Medicine", :price 3, :category_id 4, :id 1} + {:name "Stout Burgers & Beers", :price 2, :category_id 11, :id 2} + {:name "The Apple Pan", :price 2, :category_id 11, :id 3} + {:name "Wurstküche", :price 2, :category_id 29, :id 4} + {:name "Brite Spot Family Restaurant", :price 2, :category_id 20, :id 5}] + (for [row (take 5 (table-rows-seq datasets/*driver* + (db/select-one 'Database :id (id)) + (db/select-one 'Table :id (id :venues))))] + (dissoc row :latitude :longitude))) ; different DBs use different precisions for these + +;;; FIELD-PERCENT-URLS +(datasets/expect-with-engines @generic-sql-engines + 0.5 + (dataset half-valid-urls + (field-percent-urls datasets/*driver* (db/select-one 'Field :id (id :urls :url))))) diff --git a/test/metabase/driver/mongo_test.clj b/test/metabase/driver/mongo_test.clj index 0837f9cf1ec36314f99175ec19bd7b0834e95f11..6dfda34a4966be271e50e3e5996f595be723c06f 100644 --- a/test/metabase/driver/mongo_test.clj +++ b/test/metabase/driver/mongo_test.clj @@ -1,7 +1,6 @@ (ns metabase.driver.mongo-test "Tests for Mongo driver." (:require [expectations :refer :all] - [korma.core :as k] [metabase.db :as db] [metabase.driver :as driver] (metabase.models [database :refer [Database]] @@ -138,7 +137,10 @@ {:rows 1000, :active true, :name "checkins"} {:rows 15, :active true, :name "users"} {:rows 100, :active true, :name "venues"}] - (db/sel :many :fields [Table :name :active :rows], :db_id (:id (mongo-db)), (k/order :name))) + (for [field (db/select [Table :name :active :rows] + :db_id (:id (mongo-db)) + {:order-by [:name]})] + (into {} field))) ;; Test that Fields got synced correctly, and types are correct (expect-when-testing-mongo @@ -159,5 +161,8 @@ {:special_type :name, :base_type :TextField, :name "name"} {:special_type :category, :base_type :IntegerField, :name "price"}]] (for [nm table-names] - (db/sel :many :fields [Field :name :base_type :special_type], :active true, :table_id (:id (table-name->table nm)) - (k/order :name)))) + (for [field (db/select [Field :name :base_type :special_type] + :active true + :table_id (:id (table-name->table nm)) + {:order-by [:name]})] + (into {} field)))) diff --git a/test/metabase/driver/postgres_test.clj b/test/metabase/driver/postgres_test.clj index 92467ae867e72900fff268bc4daae62fd6fe32a4..fddd981156a82a6c8447587dbca8c9a4fe256928 100644 --- a/test/metabase/driver/postgres_test.clj +++ b/test/metabase/driver/postgres_test.clj @@ -1,7 +1,7 @@ (ns metabase.driver.postgres-test (:require [clojure.java.jdbc :as jdbc] [expectations :refer :all] - [korma.core :as k] + [honeysql.core :as hsql] (metabase [db :as db] [driver :as driver]) [metabase.driver.generic-sql :as sql] @@ -54,8 +54,8 @@ (i/create-database-definition "Postgres with a JSON Field" ["venues" [{:field-name "address", :base-type {:native "json"}}] - [[(k/raw "to_json('{\"street\": \"431 Natoma\", \"city\": \"San Francisco\", \"state\": \"CA\", \"zip\": 94103}'::text)")]]])] - (db/sel :one :field [Field :special_type] :id (data/id :venues :address)))) + [[(hsql/raw "to_json('{\"street\": \"431 Natoma\", \"city\": \"San Francisco\", \"state\": \"CA\", \"zip\": 94103}'::text)")]]])] + (db/select-one-field :special_type Field, :id (data/id :venues :address)))) ;;; # UUID Support diff --git a/test/metabase/events/activity_feed_test.clj b/test/metabase/events/activity_feed_test.clj index 41abe531365abf20b81e2eb5d87b64a349e9c715..f0d0f9c5010cb810cb7c4d20809bfccfc9b18c75 100644 --- a/test/metabase/events/activity_feed_test.clj +++ b/test/metabase/events/activity_feed_test.clj @@ -1,6 +1,5 @@ (ns metabase.events.activity-feed-test (:require [expectations :refer :all] - [korma.core :as k] [metabase.db :as db] [metabase.events.activity-feed :refer :all] (metabase.models [activity :refer [Activity]] @@ -98,10 +97,10 @@ :name (:name card) :public_perms (:public_perms card)}} (do - (k/delete Activity) + (db/cascade-delete! Activity) (process-activity-event {:topic :card-create :item card}) - (-> (db/sel :one Activity :topic "card-create") + (-> (Activity :topic "card-create") (select-keys [:topic :user_id :model :model_id :database_id :table_id :details])))) ;; `:card-update` event @@ -116,10 +115,10 @@ :name (:name card) :public_perms (:public_perms card)}} (do - (k/delete Activity) + (db/cascade-delete! Activity) (process-activity-event {:topic :card-update :item card}) - (-> (db/sel :one Activity :topic "card-update") + (-> (Activity :topic "card-update") (select-keys [:topic :user_id :model :model_id :database_id :table_id :details])))) ;; `:card-delete` event @@ -134,10 +133,10 @@ :name (:name card) :public_perms (:public_perms card)}} (do - (k/delete Activity) + (db/cascade-delete! Activity) (process-activity-event {:topic :card-delete :item card}) - (-> (db/sel :one Activity :topic "card-delete") + (-> (Activity :topic "card-delete") (select-keys [:topic :user_id :model :model_id :database_id :table_id :details])))) ;; `:dashboard-create` event @@ -150,10 +149,10 @@ :name (:name dashboard) :public_perms (:public_perms dashboard)}} (do - (k/delete Activity) + (db/cascade-delete! Activity) (process-activity-event {:topic :dashboard-create :item dashboard}) - (-> (db/sel :one Activity :topic "dashboard-create") + (-> (Activity :topic "dashboard-create") (select-keys [:topic :user_id :model :model_id :details])))) ;; `:dashboard-delete` event @@ -166,10 +165,10 @@ :name (:name dashboard) :public_perms (:public_perms dashboard)}} (do - (k/delete Activity) + (db/cascade-delete! Activity) (process-activity-event {:topic :dashboard-delete :item dashboard}) - (-> (db/sel :one Activity :topic "dashboard-delete") + (-> (Activity :topic "dashboard-delete") (select-keys [:topic :user_id :model :model_id :details])))) ;; `:dashboard-add-cards` event @@ -187,10 +186,10 @@ :id (:id dashcard) :card_id (:id card)}]}} (do - (k/delete Activity) + (db/cascade-delete! Activity) (process-activity-event {:topic :dashboard-add-cards :item {:id (:id dashboard) :actor_id (:id user) :dashcards [dashcard]}}) - (-> (db/sel :one Activity :topic "dashboard-add-cards") + (-> (Activity :topic "dashboard-add-cards") (select-keys [:topic :user_id :model :model_id :details])))) ;; `:dashboard-remove-cards` event @@ -208,10 +207,10 @@ :id (:id dashcard) :card_id (:id card)}]}} (do - (k/delete Activity) + (db/cascade-delete! Activity) (process-activity-event {:topic :dashboard-remove-cards :item {:id (:id dashboard) :actor_id (:id user) :dashcards [dashcard]}}) - (-> (db/sel :one Activity :topic "dashboard-remove-cards") + (-> (Activity :topic "dashboard-remove-cards") (select-keys [:topic :user_id :model :model_id :details])))) ;; `:database-sync-*` events @@ -239,14 +238,14 @@ ; :description (:description (db)) ; :engine (name (:engine (db)))}}] ; (do -; (k/delete Activity) +; (db/cascade-delete! Activity ; (let [_ (process-activity-event {:topic :database-sync-begin ; :item {:database_id (id) :custom_id "abc"}}) -; activity1 (-> (db/sel :one Activity :topic "database-sync") +; activity1 (-> (Activity :topic "database-sync") ; (select-keys [:topic :user_id :model :model_id :database_id :custom_id :details])) ; _ (process-activity-event {:topic :database-sync-end ; :item {:database_id (id) :custom_id "abc"}}) -; activity2 (-> (db/sel :one Activity :topic "database-sync") +; activity2 (-> (Activity :topic "database-sync") ; (select-keys [:topic :user_id :model :model_id :database_id :custom_id :details]) ; (assoc-in [:details :running_time] 0)) ; activity-cnt (:cnt (first (k/select Activity (k/aggregate (count :*) :cnt) (k/where {:topic "database-sync"}))))] @@ -262,10 +261,10 @@ :model_id nil :details {}} (do - (k/delete Activity) + (db/cascade-delete! Activity) (process-activity-event {:topic :install :item {}}) - (-> (db/sel :one Activity :topic "install") + (-> (Activity :topic "install") (select-keys [:topic :user_id :model :model_id :details])))) ;; `:metric-create` @@ -279,10 +278,10 @@ :details {:name (:name metric) :description (:description metric)}} (do - (k/delete Activity) + (db/cascade-delete! Activity) (process-activity-event {:topic :metric-create :item metric}) - (-> (db/sel :one Activity :topic "metric-create") + (-> (Activity :topic "metric-create") (select-keys [:topic :user_id :model :model_id :database_id :table_id :details])))) ;; `:metric-update` @@ -297,14 +296,14 @@ :description (:description metric) :revision_message "update this mofo"}} (do - (k/delete Activity) + (db/cascade-delete! Activity) (process-activity-event {:topic :metric-update :item (-> metric (assoc :actor_id (:id user) :revision_message "update this mofo") ;; doing this specifically to ensure :actor_id is utilized (dissoc :creator_id))}) - (-> (db/sel :one Activity :topic "metric-update") + (-> (Activity :topic "metric-update") (select-keys [:topic :user_id :model :model_id :database_id :table_id :details])))) ;; `:metric-delete` @@ -319,11 +318,11 @@ :description (:description metric) :revision_message "deleted"}} (do - (k/delete Activity) + (db/cascade-delete! Activity) (process-activity-event {:topic :metric-delete :item (assoc metric :actor_id (:id user) :revision_message "deleted")}) - (-> (db/sel :one Activity :topic "metric-delete") + (-> (Activity :topic "metric-delete") (select-keys [:topic :user_id :model :model_id :database_id :table_id :details])))) ;; `:pulse-create` event @@ -337,10 +336,10 @@ :details {:name (:name pulse) :public_perms (:public_perms pulse)}} (do - (k/delete Activity) + (db/cascade-delete! Activity) (process-activity-event {:topic :pulse-create :item pulse}) - (-> (db/sel :one Activity :topic "pulse-create") + (-> (Activity :topic "pulse-create") (select-keys [:topic :user_id :model :model_id :database_id :table_id :details])))) ;; `:pulse-delete` event @@ -354,10 +353,10 @@ :details {:name (:name pulse) :public_perms (:public_perms pulse)}} (do - (k/delete Activity) + (db/cascade-delete! Activity) (process-activity-event {:topic :pulse-delete :item pulse}) - (-> (db/sel :one Activity :topic "pulse-delete") + (-> (Activity :topic "pulse-delete") (select-keys [:topic :user_id :model :model_id :database_id :table_id :details])))) ;; `:segment-create` @@ -371,10 +370,10 @@ :details {:name (:name segment) :description (:description segment)}} (do - (k/delete Activity) + (db/cascade-delete! Activity) (process-activity-event {:topic :segment-create :item segment}) - (-> (db/sel :one Activity :topic "segment-create") + (-> (Activity :topic "segment-create") (select-keys [:topic :user_id :model :model_id :database_id :table_id :details])))) ;; `:segment-update` @@ -389,14 +388,14 @@ :description (:description segment) :revision_message "update this mofo"}} (do - (k/delete Activity) + (db/cascade-delete! Activity) (process-activity-event {:topic :segment-update :item (-> segment (assoc :actor_id (:id user) :revision_message "update this mofo") ;; doing this specifically to ensure :actor_id is utilized (dissoc :creator_id))}) - (-> (db/sel :one Activity :topic "segment-update") + (-> (Activity :topic "segment-update") (select-keys [:topic :user_id :model :model_id :database_id :table_id :details])))) ;; `:segment-delete` @@ -411,11 +410,11 @@ :description (:description segment) :revision_message "deleted"}} (do - (k/delete Activity) + (db/cascade-delete! Activity) (process-activity-event {:topic :segment-delete :item (assoc segment :actor_id (:id user) :revision_message "deleted")}) - (-> (db/sel :one Activity :topic "segment-delete") + (-> (Activity :topic "segment-delete") (select-keys [:topic :user_id :model :model_id :database_id :table_id :details])))) ;; `:user-login` event @@ -426,10 +425,10 @@ :model_id user-id :details {}} (do - (k/delete Activity) + (db/cascade-delete! Activity) (process-activity-event {:topic :user-login :item {:user_id user-id :session_id session-id :first_login true}}) - (-> (db/sel :one Activity :topic "user-joined") + (-> (Activity :topic "user-joined") (select-keys [:topic :user_id :model :model_id :details])))) diff --git a/test/metabase/events/dependencies_test.clj b/test/metabase/events/dependencies_test.clj index c1053966f3e004e242460f6eb46734461dafbe46..2fa93e0b251b3ba8bb67a3601a47b358d67951f6 100644 --- a/test/metabase/events/dependencies_test.clj +++ b/test/metabase/events/dependencies_test.clj @@ -26,7 +26,8 @@ :filter ["AND" [">" 4 "2014-10-19"] ["=" 5 "yes"] ["SEGMENT" 2] ["SEGMENT" 3]]}}}] (process-dependencies-event {:topic :card-create :item card}) - (set (db/sel :many :fields [Dependency :dependent_on_model :dependent_on_id], :model "Card", :model_id (:id card))))) + (set (map (partial into {}) + (db/select [Dependency :dependent_on_model :dependent_on_id], :model "Card", :model_id (:id card)))))) ;; `:card-update` event (expect @@ -36,7 +37,7 @@ :query {:source_table (id :categories)}}}] (process-dependencies-event {:topic :card-create :item card}) - (db/sel :many :fields [Dependency :dependent_on_model :dependent_on_id], :model "Card", :model_id (:id card)))) + (db/select [Dependency :dependent_on_model :dependent_on_id], :model "Card", :model_id (:id card)))) ;; `:metric-create` event (expect @@ -51,7 +52,8 @@ :filter ["AND" ["SEGMENT" 18] ["SEGMENT" 35]]}}]] (process-dependencies-event {:topic :metric-create :item metric}) - (set (db/sel :many :fields [Dependency :dependent_on_model :dependent_on_id], :model "Metric", :model_id (:id metric))))) + (set (map (partial into {}) + (db/select [Dependency :dependent_on_model :dependent_on_id], :model "Metric", :model_id (:id metric)))))) ;; `:card-update` event (expect @@ -66,4 +68,5 @@ :filter ["AND" ["SEGMENT" 18] ["SEGMENT" 35]]}}]] (process-dependencies-event {:topic :metric-update :item metric}) - (set (db/sel :many :fields [Dependency :dependent_on_model :dependent_on_id], :model "Metric", :model_id (:id metric))))) + (set (map (partial into {}) + (db/select [Dependency :dependent_on_model :dependent_on_id], :model "Metric", :model_id (:id metric)))))) diff --git a/test/metabase/events/last_login_test.clj b/test/metabase/events/last_login_test.clj index 6df64ecb29175f7b0ee39b197bb5ae9cfb338c86..b1498dee152638f8c6578837eca8f81a92841c4b 100644 --- a/test/metabase/events/last_login_test.clj +++ b/test/metabase/events/last_login_test.clj @@ -25,6 +25,6 @@ (process-last-login-event {:topic :user-login :item {:user_id user-id :session_id "doesntmatter"}}) - (let [user (db/sel :one User :id user-id)] + (let [user (User :id user-id)] {:orig-last-login last-login :upd-last-login (nil? (:last_login user))}))) diff --git a/test/metabase/events/revision_test.clj b/test/metabase/events/revision_test.clj index bdf90983fb9daa774f647f75fa02ec047789e4d1..582e035c5d2f148f81f2fff014fb3289d447496e 100644 --- a/test/metabase/events/revision_test.clj +++ b/test/metabase/events/revision_test.clj @@ -1,6 +1,5 @@ (ns metabase.events.revision-test (:require [expectations :refer :all] - [korma.core :as k] [metabase.db :as db] [metabase.events.revision :refer :all] (metabase.models [card :refer [Card]] @@ -72,7 +71,7 @@ (do (process-revision-event {:topic :card-create :item card}) - (-> (db/sel :one Revision :model "Card" :model_id card-id) + (-> (Revision :model "Card", :model_id card-id) (select-keys [:model :model_id :user_id :object :is_reversion :is_creation])))) ;; :card-update @@ -86,7 +85,7 @@ (do (process-revision-event {:topic :card-update :item card}) - (-> (db/sel :one Revision :model "Card" :model_id card-id) + (-> (Revision :model "Card", :model_id card-id) (select-keys [:model :model_id :user_id :object :is_reversion :is_creation])))) ;; :dashboard-create @@ -100,7 +99,7 @@ (do (process-revision-event {:topic :dashboard-create :item dashboard}) - (-> (db/sel :one Revision :model "Dashboard" :model_id dashboard-id) + (-> (Revision :model "Dashboard", :model_id dashboard-id) (select-keys [:model :model_id :user_id :object :is_reversion :is_creation])))) ;; :dashboard-update @@ -114,7 +113,7 @@ (do (process-revision-event {:topic :dashboard-update :item dashboard}) - (-> (db/sel :one Revision :model "Dashboard" :model_id dashboard-id) + (-> (Revision :model "Dashboard", :model_id dashboard-id) (select-keys [:model :model_id :user_id :object :is_reversion :is_creation])))) ;; :dashboard-add-cards @@ -132,7 +131,7 @@ :item {:id dashboard-id :actor_id (user->id :crowberto) :dashcards [dashcard]}}) - (-> (db/sel :one Revision :model "Dashboard" :model_id dashboard-id) + (-> (Revision :model "Dashboard", :model_id dashboard-id) (select-keys [:model :model_id :user_id :object :is_reversion :is_creation])))) ;; :dashboard-remove-cards @@ -151,7 +150,7 @@ :item {:id dashboard-id :actor_id (user->id :crowberto) :dashcards [dashcard]}}) - (-> (db/sel :one Revision :model "Dashboard" :model_id dashboard-id) + (-> (Revision :model "Dashboard", :model_id dashboard-id) (select-keys [:model :model_id :user_id :object :is_reversion :is_creation])))) ;; :dashboard-reposition-cards @@ -176,7 +175,7 @@ :item {:id dashboard-id :actor_id (user->id :crowberto) :dashcards [(assoc dashcard :sizeX 4)]}}) - (-> (db/sel :one Revision :model "Dashboard" :model_id dashboard-id) + (-> (Revision :model "Dashboard", :model_id dashboard-id) (select-keys [:model :model_id :user_id :object :is_reversion :is_creation])))) @@ -198,7 +197,7 @@ (process-revision-event {:topic :metric-create :item metric}) - (let [revision (db/sel :one :fields [Revision :model :user_id :object :is_reversion :is_creation :message], :model "Metric", :model_id (:id metric))] + (let [revision (db/select-one [Revision :model :user_id :object :is_reversion :is_creation :message], :model "Metric", :model_id (:id metric))] (assoc revision :object (dissoc (:object revision) :id :table_id))))) ;; :metric-update @@ -220,7 +219,7 @@ :item (assoc metric :actor_id (user->id :crowberto) :revision_message "updated")}) - (let [revision (db/sel :one :fields [Revision :model :user_id :object :is_reversion :is_creation :message], :model "Metric", :model_id (:id metric))] + (let [revision (db/select-one [Revision :model :user_id :object :is_reversion :is_creation :message], :model "Metric", :model_id (:id metric))] (assoc revision :object (dissoc (:object revision) :id :table_id))))) ;; :metric-delete @@ -240,7 +239,7 @@ Metric [metric {:table_id id, :definition {:a "b"}, :is_active false}]] (process-revision-event {:topic :metric-delete :item metric}) - (let [revision (db/sel :one :fields [Revision :model :user_id :object :is_reversion :is_creation :message], :model "Metric", :model_id (:id metric))] + (let [revision (db/select-one [Revision :model :user_id :object :is_reversion :is_creation :message], :model "Metric", :model_id (:id metric))] (assoc revision :object (dissoc (:object revision) :id :table_id))))) @@ -262,7 +261,7 @@ :definition {:a "b"}}]] (process-revision-event {:topic :segment-create :item segment}) - (let [revision (-> (db/sel :one Revision :model "Segment" :model_id (:id segment)) + (let [revision (-> (Revision :model "Segment", :model_id (:id segment)) (select-keys [:model :user_id :object :is_reversion :is_creation :message]))] (assoc revision :object (dissoc (:object revision) :id :table_id))))) @@ -286,7 +285,7 @@ :item (assoc segment :actor_id (user->id :crowberto) :revision_message "updated")}) - (update (db/sel :one :fields [Revision :model :user_id :object :is_reversion :is_creation :message], :model "Segment", :model_id (:id segment)) + (update (db/select-one [Revision :model :user_id :object :is_reversion :is_creation :message], :model "Segment", :model_id (:id segment)) :object (u/rpartial dissoc :id :table_id)))) ;; :segment-delete @@ -308,5 +307,5 @@ :is_active false}]] (process-revision-event {:topic :segment-delete :item segment}) - (update (db/sel :one :fields [Revision :model :user_id :object :is_reversion :is_creation :message], :model "Segment", :model_id (:id segment)) + (update (db/select-one [Revision :model :user_id :object :is_reversion :is_creation :message], :model "Segment", :model_id (:id segment)) :object (u/rpartial dissoc :id :table_id)))) diff --git a/test/metabase/events/view_log_test.clj b/test/metabase/events/view_log_test.clj index 26346122d7bccefdc921f5c7614eaf73f3230421..abaf5444c19666da8193406987aaaba7e871ffb8 100644 --- a/test/metabase/events/view_log_test.clj +++ b/test/metabase/events/view_log_test.clj @@ -1,6 +1,5 @@ (ns metabase.events.view-log-test (:require [expectations :refer :all] - [korma.core :as k] [metabase.db :as db] [metabase.events.view-log :refer :all] (metabase.models [user :refer [User]] @@ -29,7 +28,7 @@ (do (process-view-count-event {:topic :card-create :item card}) - (-> (db/sel :one ViewLog :user_id user-id) + (-> (ViewLog :user_id user-id) (select-keys [:user_id :model :model_id])))) ;; `:card-read` event @@ -42,7 +41,7 @@ (do (process-view-count-event {:topic :card-read :item card}) - (-> (db/sel :one ViewLog :user_id user-id) + (-> (ViewLog :user_id user-id) (select-keys [:user_id :model :model_id])))) ;; `:dashboard-read` event @@ -55,5 +54,5 @@ (do (process-view-count-event {:topic :dashboard-read :item dashboard}) - (-> (db/sel :one ViewLog :user_id user-id) + (-> (ViewLog :user_id user-id) (select-keys [:user_id :model :model_id])))) diff --git a/test/metabase/middleware_test.clj b/test/metabase/middleware_test.clj index 6e9f77e1210760de0b87a2bac1202fcf7fd6d45a..76535003d14648144cc33d5d21863785c8f88715 100644 --- a/test/metabase/middleware_test.clj +++ b/test/metabase/middleware_test.clj @@ -1,14 +1,14 @@ (ns metabase.middleware-test (:require [cheshire.core :as json] [expectations :refer :all] - [korma.core :as k] + [ring.mock.request :as mock] [metabase.api.common :refer [*current-user-id* *current-user*]] + [metabase.db :as db] [metabase.middleware :refer :all] [metabase.models.session :refer [Session]] [metabase.test.data :refer :all] [metabase.test.data.users :refer :all] - [metabase.util :as u] - [ring.mock.request :as mock])) + [metabase.util :as u])) ;; =========================== TEST wrap-session-id middleware =========================== @@ -64,9 +64,10 @@ (.toString (java.util.UUID/randomUUID))) ;; valid session ID -(expect (user->id :rasta) +(expect + (user->id :rasta) (let [session-id (random-session-id)] - (k/insert Session (k/values {:id session-id, :user_id (user->id :rasta), :created_at (u/new-sql-timestamp)})) + (db/simple-insert! Session, :id session-id, :user_id (user->id :rasta), :created_at (u/new-sql-timestamp)) (-> (auth-enforced-handler (request-with-session-id session-id)) :metabase-user-id))) @@ -74,9 +75,10 @@ ;; expired session-id ;; create a new session (specifically created some time in the past so it's EXPIRED) ;; should fail due to session expiration -(expect response-unauthentic +(expect + response-unauthentic (let [session-id (random-session-id)] - (k/insert Session (k/values {:id session-id, :user_id (user->id :rasta), :created_at (java.sql.Timestamp. 0)})) + (db/simple-insert! Session, :id session-id, :user_id (user->id :rasta), :created_at (java.sql.Timestamp. 0)) (auth-enforced-handler (request-with-session-id session-id)))) @@ -86,7 +88,7 @@ ;; NOTE that :trashbird is our INACTIVE test user (expect response-unauthentic (let [session-id (random-session-id)] - (k/insert Session (k/values {:id session-id, :user_id (user->id :trashbird), :created_at (u/new-sql-timestamp)})) + (db/simple-insert! Session, :id session-id, :user_id (user->id :trashbird), :created_at (u/new-sql-timestamp)) (auth-enforced-handler (request-with-session-id session-id)))) diff --git a/test/metabase/models/common_test.clj b/test/metabase/models/common_test.clj index dae6a9038da241982191a77f57bfa8959eeccb44..5028c03d7ee1b6fa7e3224f08803988e3b11daa3 100644 --- a/test/metabase/models/common_test.clj +++ b/test/metabase/models/common_test.clj @@ -18,3 +18,4 @@ (expect "Foo Bar" (name->human-readable-name "foo bar")) (expect "Dashboard Card Subscription" (name->human-readable-name "dashboardcardsubscription")) (expect "Foo ID" (name->human-readable-name "foo_id")) +(expect "Receiver ID" (name->human-readable-name "receiver_id")) diff --git a/test/metabase/models/dashboard_card_test.clj b/test/metabase/models/dashboard_card_test.clj index 9e13556f66e794f1462d7758a2b2dc1318ea0bd2..cea3af4bbf1d2ba0adf93bf457516bcb7b31172d 100644 --- a/test/metabase/models/dashboard_card_test.clj +++ b/test/metabase/models/dashboard_card_test.clj @@ -67,11 +67,11 @@ ;; update-dashboard-card-series (expect - [[] - ["card1"] - ["card2"] - ["card2" "card1"] - ["card1" "card3"]] + [#{} + #{"card1"} + #{"card2"} + #{"card2" "card1"} + #{"card1" "card3"}] (tu/with-temp* [Dashboard [{dashboard-id :id} {:name "Test Dashboard" :public_perms 0 :creator_id (user->id :rasta)}] @@ -82,9 +82,8 @@ Card [{card-id3 :id} {:name "card3"}]] (let [upd-series (fn [series] (update-dashboard-card-series {:id dashcard-id} series) - (->> (db/sel :many :field [DashboardCardSeries :card_id] :dashboardcard_id dashcard-id) - (mapv (fn [card_id] - (db/sel :one :field [Card :name] :id card_id)))))] + (set (for [card-id (db/select-field :card_id DashboardCardSeries, :dashboardcard_id dashcard-id)] + (db/select-one-field :name Card, :id card-id))))] [(upd-series []) (upd-series [card-id-1]) (upd-series [card-id-2]) diff --git a/test/metabase/models/dependency_test.clj b/test/metabase/models/dependency_test.clj index db9a0547f1113f0cf31d29f14f523770fe5544c1..107a09da42332c08a65bd3e424d3500846212d17 100644 --- a/test/metabase/models/dependency_test.clj +++ b/test/metabase/models/dependency_test.clj @@ -1,6 +1,5 @@ (ns metabase.models.dependency-test (:require [expectations :refer :all] - [korma.core :as k] [metabase.db :as db] (metabase.models [dependency :refer :all] [interface :as i]) @@ -66,7 +65,7 @@ #{} (do (update-dependencies Mock 2 {:test ["a" "b" "c"]}) - (set (db/sel :many Dependency :model "Mock" :model_id 2)))) + (set (db/select Dependency, :model "Mock", :model_id 2)))) ;; valid working dependencies list (expect @@ -84,7 +83,7 @@ :dependent_on_id 3}} (do (update-dependencies Mock 7 {:test [1 2 3]}) - (format-dependencies (db/sel :many Dependency :model "Mock" :model_id 7)))) + (format-dependencies (db/select Dependency, :model "Mock", :model_id 7)))) ;; delete dependencies that are no longer in the list (expect @@ -104,4 +103,4 @@ :dependent_on_id 5 :created_at (u/new-sql-timestamp)) (update-dependencies Mock 1 {:test [1 2]}) - (format-dependencies (db/sel :many Dependency :model "Mock" :model_id 1)))) + (format-dependencies (db/select Dependency, :model "Mock", :model_id 1)))) diff --git a/test/metabase/models/field_values_test.clj b/test/metabase/models/field_values_test.clj index 8a173fab525fe79d0aea6ff28ba9ad53e4a91ca5..4ca72000be210f3c176dd366dcd139823c2b03b8 100644 --- a/test/metabase/models/field_values_test.clj +++ b/test/metabase/models/field_values_test.clj @@ -51,6 +51,6 @@ Table [{table-id :id} {:db_id database-id}] Field [{field-id :id} {:table_id table-id}] FieldValues [_ {:field_id field-id, :values "[1,2,3]"}]] - [(db/sel :one :field [FieldValues :values] :field_id field-id) + [(db/select-one-field :values FieldValues, :field_id field-id) (clear-field-values field-id) - (db/sel :one :field [FieldValues :values] :field_id field-id)])) + (db/select-one-field :values FieldValues, :field_id field-id)])) diff --git a/test/metabase/models/pulse_channel_test.clj b/test/metabase/models/pulse_channel_test.clj index 9521d65cb193c4a301542836985580171eb64043..6c39ca59db0e7e140241c0dd594545d5a5366351 100644 --- a/test/metabase/models/pulse_channel_test.clj +++ b/test/metabase/models/pulse_channel_test.clj @@ -104,7 +104,7 @@ (defn- create-channel-then-select! [channel] (when-let [new-channel-id (create-pulse-channel channel)] - (-> (db/sel :one PulseChannel :id new-channel-id) + (-> (PulseChannel new-channel-id) (hydrate :recipients) (update :recipients #(sort-by :email %)) (dissoc :id :pulse_id :created_at :updated_at) @@ -113,7 +113,7 @@ (defn- update-channel-then-select! [{:keys [id] :as channel}] (update-pulse-channel channel) - (-> (db/sel :one PulseChannel :id id) + (-> (PulseChannel id) (hydrate :recipients) (dissoc :id :pulse_id :created_at :updated_at) (m/dissoc-in [:details :emails]))) @@ -256,16 +256,16 @@ ;; update-recipients! (expect - [[] - [(user->id :rasta)] - [(user->id :crowberto)] - [(user->id :crowberto) (user->id :rasta)] - [(user->id :rasta) (user->id :trashbird)]] + [nil + #{(user->id :rasta)} + #{(user->id :crowberto)} + #{(user->id :crowberto) (user->id :rasta)} + #{(user->id :rasta) (user->id :trashbird)}] (tu/with-temp* [Pulse [{pulse-id :id}] PulseChannel [{channel-id :id} {:pulse_id pulse-id}]] (let [upd-recipients! (fn [recipients] (update-recipients! channel-id recipients) - (db/sel :many :field [PulseChannelRecipient :user_id] :pulse_channel_id channel-id))] + (db/select-field :user_id PulseChannelRecipient, :pulse_channel_id channel-id))] [(upd-recipients! []) (upd-recipients! [(user->id :rasta)]) (upd-recipients! [(user->id :crowberto)]) @@ -276,19 +276,19 @@ ;; retrieve-scheduled-channels ;; test a simple scenario with a single Pulse and 2 channels on hourly/daily schedules (expect - [[{:schedule_type "hourly", :channel_type "slack"}] - [{:schedule_type "hourly", :channel_type "slack"}] - [{:schedule_type "daily", :channel_type "email"} - {:schedule_type "hourly", :channel_type "slack"}] - [{:schedule_type "daily", :channel_type "email"} - {:schedule_type "hourly", :channel_type "slack"}]] + [[{:schedule_type :hourly, :channel_type :slack}] + [{:schedule_type :hourly, :channel_type :slack}] + [{:schedule_type :daily, :channel_type :email} + {:schedule_type :hourly, :channel_type :slack}] + [{:schedule_type :daily, :channel_type :email} + {:schedule_type :hourly, :channel_type :slack}]] (tu/with-temp* [Pulse [{pulse-id :id}] PulseChannel [_ {:pulse_id pulse-id}] PulseChannel [_ {:pulse_id pulse-id, :channel_type :slack, :schedule_type :hourly}] PulseChannel [_ {:pulse_id pulse-id, :channel_type :email, :schedule_type :hourly, :enabled false}]] (let [retrieve-channels (fn [hour day] (for [channel (retrieve-scheduled-channels hour day :other :other)] - (dissoc channel :id :pulse_id)))] + (dissoc (into {} channel) :id :pulse_id)))] [(retrieve-channels nil nil) (retrieve-channels 12 nil) (retrieve-channels 15 nil) @@ -296,13 +296,13 @@ ;; more complex scenario with 2 Pulses, including weekly scheduling (expect - [[{:schedule_type "hourly", :channel_type "slack"}] - [{:schedule_type "hourly", :channel_type "slack"} - {:schedule_type "daily", :channel_type "slack"}] - [{:schedule_type "daily", :channel_type "email"} - {:schedule_type "hourly", :channel_type "slack"}] - [{:schedule_type "hourly", :channel_type "slack"} - {:schedule_type "weekly", :channel_type "email"}]] + [[{:schedule_type :hourly, :channel_type :slack}] + [{:schedule_type :hourly, :channel_type :slack} + {:schedule_type :daily, :channel_type :slack}] + [{:schedule_type :daily, :channel_type :email} + {:schedule_type :hourly, :channel_type :slack}] + [{:schedule_type :hourly, :channel_type :slack} + {:schedule_type :weekly, :channel_type :email}]] (tu/with-temp* [Pulse [{pulse-1-id :id}] Pulse [{pulse-2-id :id}] PulseChannel [_ {:pulse_id pulse-1-id, :enabled true, :channel_type :email, :schedule_type :daily}] @@ -311,7 +311,7 @@ PulseChannel [_ {:pulse_id pulse-2-id, :enabled true, :channel_type :email, :schedule_type :weekly, :schedule_hour 8, :schedule_day "mon"}]] (let [retrieve-channels (fn [hour day] (for [channel (retrieve-scheduled-channels hour day :other :other)] - (dissoc channel :id :pulse_id)))] + (dissoc (into {} channel) :id :pulse_id)))] [(retrieve-channels nil nil) (retrieve-channels 10 nil) (retrieve-channels 15 nil) @@ -320,12 +320,12 @@ ;; specific test for various monthly scheduling permutations (expect [[] - [{:schedule_type "monthly", :channel_type "email"} - {:schedule_type "monthly", :channel_type "slack"}] - [{:schedule_type "monthly", :channel_type "slack"}] + [{:schedule_type :monthly, :channel_type :email} + {:schedule_type :monthly, :channel_type :slack}] + [{:schedule_type :monthly, :channel_type :slack}] [] - [{:schedule_type "monthly", :channel_type "slack"}] - [{:schedule_type "monthly", :channel_type "email"}]] + [{:schedule_type :monthly, :channel_type :slack}] + [{:schedule_type :monthly, :channel_type :email}]] (tu/with-temp* [Pulse [{pulse-1-id :id}] Pulse [{pulse-2-id :id}] PulseChannel [_ {:pulse_id pulse-1-id, :channel_type :email, :schedule_type :monthly, :schedule_hour 12, :schedule_frame :first}] @@ -334,7 +334,7 @@ PulseChannel [_ {:pulse_id pulse-2-id, :channel_type :email, :schedule_type :monthly, :schedule_hour 8, :schedule_day "fri", :schedule_frame :last}]] (let [retrieve-channels (fn [hour weekday monthday monthweek] (for [channel (retrieve-scheduled-channels hour weekday monthday monthweek)] - (dissoc channel :id :pulse_id)))] + (dissoc (into {} channel) :id :pulse_id)))] ;; simple starter which should be empty [(retrieve-channels nil nil :other :other) ;; this should capture BOTH first absolute day of month + first monday of month schedules diff --git a/test/metabase/models/pulse_test.clj b/test/metabase/models/pulse_test.clj index 506e6b0baba1f47f2ddae16e8e158e17a74b2d8a..5b192e4075ed8424a6185de1b98f75c82e564830 100644 --- a/test/metabase/models/pulse_test.clj +++ b/test/metabase/models/pulse_test.clj @@ -46,18 +46,18 @@ :creator (user-details :rasta) :name "Lodi Dodi" :public_perms 2 - :cards [{:name "Test Card", - :description nil, - :display "table"}] - :channels [{:enabled true - :schedule_type :daily, - :schedule_hour 15, - :schedule_frame nil, - :channel_type :email, - :details {:other "stuff"}, - :schedule_day nil, - :recipients [{:email "foo@bar.com"} - (dissoc (user-details :rasta) :is_superuser :is_qbnewb)]}]} + :cards [{:name "Test Card" + :description nil + :display :table}] + :channels [{:enabled true + :schedule_type :daily + :schedule_hour 15 + :schedule_frame nil + :channel_type :email + :details {:other "stuff"} + :schedule_day nil + :recipients [{:email "foo@bar.com"} + (dissoc (user-details :rasta) :is_superuser :is_qbnewb)]}]} (tu/with-temp* [Pulse [{pulse-id :id} {:name "Lodi Dodi"}] PulseChannel [{channel-id :id :as channel} {:pulse_id pulse-id :details {:other "stuff" @@ -76,19 +76,19 @@ ;; update-pulse-cards (expect - [[] - ["card1"] - ["card2"] - ["card2" "card1"] - ["card1" "card3"]] + [#{} + #{"card1"} + #{"card2"} + #{"card2" "card1"} + #{"card1" "card3"}] (tu/with-temp* [Pulse [{pulse-id :id}] Card [{card-id-1 :id} {:name "card1"}] Card [{card-id-2 :id} {:name "card2"}] Card [{card-id-3 :id} {:name "card3"}]] (let [upd-cards! (fn [cards] (update-pulse-cards {:id pulse-id} cards) - (doall (for [card-id (db/sel :many :field [PulseCard :card_id] :pulse_id pulse-id)] - (db/sel :one :field [Card :name] :id card-id))))] + (set (for [card-id (db/select-field :card_id PulseCard, :pulse_id pulse-id)] + (db/select-one-field :name Card, :id card-id))))] [(upd-cards! []) (upd-cards! [card-id-1]) (upd-cards! [card-id-2]) @@ -111,7 +111,7 @@ :schedule_type :daily :schedule_hour 4 :recipients [{:email "foo@bar.com"} {:id (user->id :rasta)}]}]) - (-> (db/sel :one PulseChannel :pulse_id id) + (-> (PulseChannel :pulse_id id) (hydrate :recipients) (dissoc :id :pulse_id :created_at :updated_at) (m/dissoc-in [:details :emails])))) @@ -121,21 +121,21 @@ (expect {:creator_id (user->id :rasta) :name "Booyah!" - :channels [{:enabled true - :schedule_type :daily - :schedule_hour 18 + :channels [{:enabled true + :schedule_type :daily + :schedule_hour 18 :schedule_frame nil - :channel_type :email - :recipients [{:email "foo@bar.com"}] - :schedule_day nil}] + :channel_type :email + :recipients [{:email "foo@bar.com"}] + :schedule_day nil}] :cards [{:name "Test Card" :description nil - :display "table"}]} + :display :table}]} (tu/with-temp Card [{:keys [id]} {:name "Test Card"}] (create-pulse-then-select! "Booyah!" (user->id :rasta) [id] [{:channel_type :email - :schedule_type :daily - :schedule_hour 18 - :recipients [{:email "foo@bar.com"}]}]))) + :schedule_type :daily + :schedule_hour 18 + :recipients [{:email "foo@bar.com"}]}]))) ;; update-pulse ;; basic update. we are testing several things here @@ -149,29 +149,29 @@ {:creator_id (user->id :rasta) :name "We like to party" :public_perms 2 - :cards [{:name "Bar Card", - :description nil, - :display "bar"} - {:name "Test Card", - :description nil, - :display "table"}] - :channels [{:enabled true - :schedule_type :daily, - :schedule_hour 18, - :schedule_frame nil, - :channel_type :email, - :schedule_day nil, - :recipients [{:email "foo@bar.com"} - (dissoc (user-details :crowberto) :is_superuser :is_qbnewb)]}]} + :cards [{:name "Bar Card" + :description nil + :display :bar} + {:name "Test Card" + :description nil + :display :table}] + :channels [{:enabled true + :schedule_type :daily + :schedule_hour 18 + :schedule_frame nil + :channel_type :email + :schedule_day nil + :recipients [{:email "foo@bar.com"} + (dissoc (user-details :crowberto) :is_superuser :is_qbnewb)]}]} (tu/with-temp* [Pulse [{pulse-id :id}] Card [{card-id-1 :id} {:name "Test Card"}] Card [{card-id-2 :id} {:name "Bar Card", :display :bar}]] (update-pulse-then-select! {:id pulse-id - :name "We like to party" - :creator_id (user->id :crowberto) - :cards [card-id-2 card-id-1] - :channels [{:channel_type :email - :schedule_type :daily - :schedule_hour 18 - :recipients [{:email "foo@bar.com"} - {:id (user->id :crowberto)}]}]}))) + :name "We like to party" + :creator_id (user->id :crowberto) + :cards [card-id-2 card-id-1] + :channels [{:channel_type :email + :schedule_type :daily + :schedule_hour 18 + :recipients [{:email "foo@bar.com"} + {:id (user->id :crowberto)}]}]}))) diff --git a/test/metabase/models/revision_test.clj b/test/metabase/models/revision_test.clj index 3563c549f16f7d6d29e7178db1dbc08bef884c2f..24a8d9727bcf328faa78de8551b591ca41e47c69 100644 --- a/test/metabase/models/revision_test.clj +++ b/test/metabase/models/revision_test.clj @@ -1,6 +1,5 @@ (ns metabase.models.revision-test (:require [expectations :refer :all] - [korma.core :refer [table]] [medley.core :as m] [metabase.db :as db] (metabase.models [card :refer [Card]] diff --git a/test/metabase/models/session_test.clj b/test/metabase/models/session_test.clj index 8519102ade08ca2be54394d96622200333aadc03..f576aa707bcc5a8f9156d129c704aced89b13a39 100644 --- a/test/metabase/models/session_test.clj +++ b/test/metabase/models/session_test.clj @@ -1,6 +1,6 @@ (ns metabase.models.session-test (:require [expectations :refer :all] - [korma.core :as k] + [metabase.db :as db] (metabase.models [session :refer :all] [user :refer [User]]) [metabase.test.util :refer :all] @@ -14,20 +14,20 @@ :last_name (random-name) :email (str (random-name) "@metabase.com") :password "nada"}] - (k/insert Session - (k/values [{:id "the-greatest-day-ever" - :user_id user-id - :created_at (u/->Timestamp "1980-10-19T05:05:05.000Z")} - {:id "even-more-greatness" - :user_id user-id - :created_at (u/->Timestamp "1980-10-19T05:08:05.000Z")} - {:id "the-world-of-bi-changes-forever" - :user_id user-id - :created_at (u/->Timestamp "2015-10-21")} - {:id "something-could-have-happened" - :user_id user-id - :created_at (u/->Timestamp "1999-12-31")} - {:id "now" - :user_id user-id - :created_at (u/new-sql-timestamp)}])) + (db/simple-insert-many! Session + [{:id "the-greatest-day-ever" + :user_id user-id + :created_at (u/->Timestamp "1980-10-19T05:05:05.000Z")} + {:id "even-more-greatness" + :user_id user-id + :created_at (u/->Timestamp "1980-10-19T05:08:05.000Z")} + {:id "the-world-of-bi-changes-forever" + :user_id user-id + :created_at (u/->Timestamp "2015-10-21")} + {:id "something-could-have-happened" + :user_id user-id + :created_at (u/->Timestamp "1999-12-31")} + {:id "now" + :user_id user-id + :created_at (u/new-sql-timestamp)}]) (first-session-for-user user-id))) diff --git a/test/metabase/models/setting_test.clj b/test/metabase/models/setting_test.clj index 7ea012f82b18270cd54fcf6a82e9268cb6a9ff14..2b642d5238b3a17133f505c7c55edb5b92d636bf 100644 --- a/test/metabase/models/setting_test.clj +++ b/test/metabase/models/setting_test.clj @@ -21,7 +21,7 @@ (defn db-fetch-setting "Fetch `Setting` value from the DB to verify things work as we expect." [setting-name] - (db/sel :one :field [Setting :value], :key (name setting-name))) + (db/select-one-field :value Setting, :key (name setting-name))) (defn setting-exists? [setting-name] (boolean (Setting :key (name setting-name)))) diff --git a/test/metabase/query_processor_test.clj b/test/metabase/query_processor_test.clj index ec23b694f4d68e4aa41667f0fcb72cfe1184cd72..83e9b8c2032872e2029cc02b23a350ebd34b2a3e 100644 --- a/test/metabase/query_processor_test.clj +++ b/test/metabase/query_processor_test.clj @@ -13,7 +13,7 @@ [interface :as qpi]) [metabase.test.data :refer :all] (metabase.test.data [dataset-definitions :as defs] - [datasets :as datasets :refer [*data-loader* *engine*]] + [datasets :as datasets :refer [*driver* *engine*]] [interface :refer [create-database-definition], :as i]) [metabase.test.util :as tu] [metabase.util :as u])) @@ -986,7 +986,7 @@ ;; There were 9 "sad toucan incidents" on 2015-06-02 (expect-with-non-timeseries-dbs - (if (i/has-questionable-timezone-support? *data-loader*) + (if (i/has-questionable-timezone-support? *driver*) 10 9) (count (rows (dataset sad-toucan-incidents @@ -1010,7 +1010,7 @@ ["2015-06-10" 9]] ;; SQL Server, Mongo, and Redshift don't have a concept of timezone so results are all grouped by UTC - (i/has-questionable-timezone-support? *data-loader*) + (i/has-questionable-timezone-support? *driver*) [["2015-06-01T00:00:00.000Z" 6] ["2015-06-02T00:00:00.000Z" 10] ["2015-06-03T00:00:00.000Z" 4] @@ -1413,7 +1413,7 @@ ["2015-06-02 08:20:00" 1] ["2015-06-02 11:11:00" 1]] - (i/has-questionable-timezone-support? *data-loader*) + (i/has-questionable-timezone-support? *driver*) [["2015-06-01T10:31:00.000Z" 1] ["2015-06-01T16:06:00.000Z" 1] ["2015-06-01T17:23:00.000Z" 1] @@ -1465,7 +1465,7 @@ ["2015-06-02 11:00:00" 1] ["2015-06-02 13:00:00" 1]] - (i/has-questionable-timezone-support? *data-loader*) + (i/has-questionable-timezone-support? *driver*) [["2015-06-01T10:00:00.000Z" 1] ["2015-06-01T16:00:00.000Z" 1] ["2015-06-01T17:00:00.000Z" 1] @@ -1491,7 +1491,7 @@ (sad-toucan-incidents-with-bucketing :hour)) (expect-with-non-timeseries-dbs - (if (i/has-questionable-timezone-support? *data-loader*) + (if (i/has-questionable-timezone-support? *driver*) [[0 13] [1 8] [2 4] [3 7] [4 5] [5 13] [6 10] [7 8] [8 9] [9 7]] [[0 8] [1 9] [2 7] [3 10] [4 10] [5 9] [6 6] [7 5] [8 7] [9 7]]) (sad-toucan-incidents-with-bucketing :hour-of-day)) @@ -1510,7 +1510,7 @@ ["2015-06-09" 7] ["2015-06-10" 9]] - (i/has-questionable-timezone-support? *data-loader*) + (i/has-questionable-timezone-support? *driver*) [["2015-06-01T00:00:00.000Z" 6] ["2015-06-02T00:00:00.000Z" 10] ["2015-06-03T00:00:00.000Z" 4] @@ -1536,19 +1536,19 @@ (sad-toucan-incidents-with-bucketing :day)) (expect-with-non-timeseries-dbs - (if (i/has-questionable-timezone-support? *data-loader*) + (if (i/has-questionable-timezone-support? *driver*) [[1 28] [2 38] [3 29] [4 27] [5 24] [6 30] [7 24]] [[1 29] [2 36] [3 33] [4 29] [5 13] [6 38] [7 22]]) (sad-toucan-incidents-with-bucketing :day-of-week)) (expect-with-non-timeseries-dbs - (if (i/has-questionable-timezone-support? *data-loader*) + (if (i/has-questionable-timezone-support? *driver*) [[1 6] [2 10] [3 4] [4 9] [5 9] [6 8] [7 8] [8 9] [9 7] [10 9]] [[1 8] [2 9] [3 9] [4 4] [5 11] [6 8] [7 6] [8 10] [9 6] [10 10]]) (sad-toucan-incidents-with-bucketing :day-of-month)) (expect-with-non-timeseries-dbs - (if (i/has-questionable-timezone-support? *data-loader*) + (if (i/has-questionable-timezone-support? *driver*) [[152 6] [153 10] [154 4] [155 9] [156 9] [157 8] [158 8] [159 9] [160 7] [161 9]] [[152 8] [153 9] [154 9] [155 4] [156 11] [157 8] [158 6] [159 10] [160 6] [161 10]]) (sad-toucan-incidents-with-bucketing :day-of-year)) @@ -1562,7 +1562,7 @@ ["2015-06-21" 60] ["2015-06-28" 7]] - (i/has-questionable-timezone-support? *data-loader*) + (i/has-questionable-timezone-support? *driver*) [["2015-05-31T00:00:00.000Z" 46] ["2015-06-07T00:00:00.000Z" 47] ["2015-06-14T00:00:00.000Z" 40] @@ -1619,7 +1619,7 @@ (vec (for [i (range -15 15)] ;; Create timestamps using relative dates (e.g. `DATEADD(second, -195, GETUTCDATE())` instead of generating `java.sql.Timestamps` here so ;; they'll be in the DB's native timezone. Some DBs refuse to use the same timezone we're running the tests from *cough* SQL Server *cough* - [(u/prog1 (driver/date-interval *data-loader* :second (* i interval-seconds)) + [(u/prog1 (driver/date-interval *driver* :second (* i interval-seconds)) (assert <>))]))])) (def ^:private checkins:4-per-minute (partial database-def-with-timestamps 15)) diff --git a/test/metabase/sync_database/introspect_test.clj b/test/metabase/sync_database/introspect_test.clj index 772ff96aa2fc9fc4091023ea1961f4f48574cf27..5a92c6e310fb650adec944eb41991fd8a26c6006 100644 --- a/test/metabase/sync_database/introspect_test.clj +++ b/test/metabase/sync_database/introspect_test.clj @@ -1,6 +1,5 @@ (ns metabase.sync-database.introspect-test (:require [expectations :refer :all] - [korma.core :as k] [metabase.db :as db] [metabase.mock.moviedb :as moviedb] (metabase.models [database :refer [Database]] @@ -14,11 +13,11 @@ save-all-table-columns! save-all-table-fks! create-raw-table! update-raw-table! disable-raw-tables!) (defn get-tables [database-id] - (->> (hydrate/hydrate (db/sel :many RawTable :database_id database-id (k/order :id)) :columns) + (->> (hydrate/hydrate (db/select RawTable, :database_id database-id, {:order-by [:id]}) :columns) (mapv tu/boolean-ids-and-timestamps))) (defn get-table [table-id] - (->> (hydrate/hydrate (db/sel :one RawTable :raw_table_id table-id) :columns) + (->> (hydrate/hydrate (RawTable :raw_table_id table-id) :columns) (mapv tu/boolean-ids-and-timestamps))) ;; save-all-table-fks @@ -113,7 +112,7 @@ RawColumn [_ {:raw_table_id raw-table-id2, :name "user_id"}] RawTable [{raw-table-id3 :id, :as table2} {:database_id database-id, :schema nil, :name "users"}] RawColumn [_ {:raw_table_id raw-table-id3, :name "id"}]] - (let [get-columns #(->> (db/sel :many RawColumn :raw_table_id raw-table-id1 (k/order :id)) + (let [get-columns #(->> (db/select RawColumn, :raw_table_id raw-table-id1, {:order-by [:id]}) (mapv tu/boolean-ids-and-timestamps))] ;; original list should not have any fks [(get-columns) @@ -209,7 +208,7 @@ :updated_at true}]] (tu/with-temp* [Database [{database-id :id}] RawTable [{raw-table-id :id, :as table} {:database_id database-id}]] - (let [get-columns #(->> (db/sel :many RawColumn :raw_table_id raw-table-id (k/order :id)) + (let [get-columns #(->> (db/select RawColumn, :raw_table_id raw-table-id, {:order-by [:id]}) (mapv tu/boolean-ids-and-timestamps))] ;; original list should be empty [(get-columns) diff --git a/test/metabase/sync_database/sync_dynamic_test.clj b/test/metabase/sync_database/sync_dynamic_test.clj index cab50c21487bf0bdefe751d8502f2c30f063c4cb..adc611456bbeeba223308e8f7f357f81963bddc9 100644 --- a/test/metabase/sync_database/sync_dynamic_test.clj +++ b/test/metabase/sync_database/sync_dynamic_test.clj @@ -1,6 +1,5 @@ (ns metabase.sync-database.sync-dynamic-test (:require [expectations :refer :all] - [korma.core :as k] [metabase.db :as db] [metabase.mock.toucanery :as toucanery] (metabase.models [database :refer [Database]] @@ -16,7 +15,7 @@ save-table-fields!) (defn- get-tables [database-id] - (->> (hydrate/hydrate (db/sel :many Table :db_id database-id (k/order :id)) :fields) + (->> (hydrate/hydrate (db/select Table, :db_id database-id, {:order-by [:id]}) :fields) (mapv tu/boolean-ids-and-timestamps))) @@ -254,7 +253,7 @@ RawTable [{raw-table-id :id, :as table} {:database_id database-id}] Table [{table-id :id, :as tbl} {:db_id database-id, :raw_table_id raw-table-id}]] (let [get-fields (fn [] - (for [field (db/sel :many Field, :table_id table-id, (k/order :id))] + (for [field (db/select Field, :table_id table-id, {:order-by [:id]})] (dissoc (tu/boolean-ids-and-timestamps field) :active :field_type :position :preview_display)))] ;; start with no fields @@ -293,7 +292,7 @@ ;; do a quick introspection to add the RawTables to the db (introspect/introspect-database-and-update-raw-tables! driver db) ;; stub out the Table we are going to sync for real below - (let [raw-table-id (db/sel :one :id RawTable, :database_id database-id, :name "transactions") + (let [raw-table-id (db/select-one-id RawTable, :database_id database-id, :name "transactions") tbl (db/insert! Table :db_id database-id :raw_table_id raw-table-id @@ -309,9 +308,9 @@ (get-tables database-id)) ;; one more time, but lets disable the table this time and ensure that's handled properly (do - (k/update RawTable - (k/set-fields {:active false}) - (k/where {:database_id database-id, :name "transactions"})) + (db/update-where! RawTable {:database_id database-id + :name "transactions"} + :active false) (scan-table-and-update-data-model! driver db tbl) (get-tables database-id))])))) @@ -331,7 +330,7 @@ (introspect/introspect-database-and-update-raw-tables! driver db) [;; first check that the raw tables stack up as expected, especially that fields were skipped because this is a :dynamic-schema db - (->> (hydrate/hydrate (db/sel :many RawTable :database_id database-id (k/order :id)) :columns) + (->> (hydrate/hydrate (db/select RawTable, :database_id database-id, {:order-by [:id]}) :columns) (mapv tu/boolean-ids-and-timestamps)) ;; now lets run a sync and check what we got (do @@ -343,8 +342,8 @@ (get-tables database-id)) ;; one more time, but lets disable a table this time and ensure that's handled properly (do - (k/update RawTable - (k/set-fields {:active false}) - (k/where {:database_id database-id, :name "transactions"})) + (db/update-where! RawTable {:database_id database-id + :name "transactions"} + :active false) (scan-database-and-update-data-model! driver db) (get-tables database-id))]))) diff --git a/test/metabase/sync_database/sync_test.clj b/test/metabase/sync_database/sync_test.clj index 8ee6a204086a3499391827d38abce5dcedd8b28c..7bb00aa5881e2d2da73b7dbdecde7bc10d2468d2 100644 --- a/test/metabase/sync_database/sync_test.clj +++ b/test/metabase/sync_database/sync_test.clj @@ -1,6 +1,5 @@ (ns metabase.sync-database.sync-test (:require [expectations :refer :all] - [korma.core :as k] [metabase.db :as db] (metabase.mock [moviedb :as moviedb] [schema-per-customer :as schema-per-customer]) @@ -20,7 +19,7 @@ save-fks! save-table-fields!) (defn- get-tables [database-id] - (->> (hydrate/hydrate (db/sel :many Table :db_id database-id (k/order :id)) :fields) + (->> (hydrate/hydrate (db/select Table, :db_id database-id, {:order-by [:id]}) :fields) (mapv tu/boolean-ids-and-timestamps))) @@ -42,7 +41,7 @@ Field [{target1 :id} {:table_id t2, :raw_column_id raw-target1, :name "target1"}] Field [{target2 :id} {:table_id t2, :raw_column_id raw-target2, :name "target2"}]] (let [get-fields (fn [table-id] - (->> (db/sel :many :fields [Field :name :special_type :fk_target_field_id] :table_id table-id) + (->> (db/select [Field :name :special_type :fk_target_field_id], :table_id table-id) (mapv tu/boolean-ids-and-timestamps)))] [ ;; original list should not have any fks (get-fields t1) @@ -76,13 +75,13 @@ (tu/with-temp* [Database [{database-id :id, :as db} {:engine :moviedb}]] ;; setup a couple things we'll use in the test (introspect/introspect-database-and-update-raw-tables! (moviedb/->MovieDbDriver) db) - (let [raw-table-id (db/sel :one :id RawTable, :database_id database-id, :name "movies") + (let [raw-table-id (db/select-one-id RawTable, :database_id database-id, :name "movies") table (db/insert! Table :db_id database-id :raw_table_id raw-table-id :name "movies" :active true) - get-table #(-> (db/sel :one :fields [Table :id :name :description] :id (:id table)) + get-table #(-> (db/select-one [Table :id :name :description], :id (:id table)) (hydrate/hydrate :fields) (update :fields (fn [fields] (for [f fields @@ -265,7 +264,7 @@ RawColumn [{raw-column-id2 :id} {:raw_table_id raw-table-id, :name "Second", :details {:special-type :category, :base-type "TextField"}}] RawColumn [{raw-column-id3 :id} {:raw_table_id raw-table-id, :name "Third", :details {:base-type "BooleanField"}}] Table [{table-id :id, :as tbl} {:db_id database-id, :raw_table_id raw-table-id}]] - (let [get-fields #(->> (db/sel :many Field :table_id table-id (k/order :id)) + (let [get-fields #(->> (db/select Field, :table_id table-id, {:order-by [:id]}) (mapv tu/boolean-ids-and-timestamps) (mapv (fn [m] (dissoc m :active :field_type :position :preview_display)))) @@ -280,7 +279,7 @@ first-sync ;; now add another column and modify the first (do - (db/update! RawColumn, raw-column-id1 :is_pk false, :details {:base-type "DecimalField"}) + (db/update! RawColumn raw-column-id1, :is_pk false, :details {:base-type "DecimalField"}) (save-table-fields! tbl) (get-fields)) ;; now disable the first column @@ -294,25 +293,25 @@ ;; retire-tables! (expect (let [disabled-movies-table (fn [tbl] - (if-not (= "movies" (:name tbl)) - tbl - (assoc tbl :active false - :fields [])))] + (if-not (= "movies" (:name tbl)) + tbl + (assoc tbl :active false + :fields [])))] [moviedb/moviedb-tables-and-fields (mapv disabled-movies-table moviedb/moviedb-tables-and-fields)]) (tu/with-temp* [Database [{database-id :id, :as db} {:engine :moviedb}]] ;; setup a couple things we'll use in the test (introspect/introspect-database-and-update-raw-tables! (moviedb/->MovieDbDriver) db) (update-data-models-from-raw-tables! db) - (let [get-tables #(->> (hydrate/hydrate (db/sel :many Table :db_id database-id (k/order :id)) :fields) + (let [get-tables #(->> (hydrate/hydrate (db/select Table, :db_id database-id, {:order-by [:id]}) :fields) (mapv tu/boolean-ids-and-timestamps))] ;; here we go [(get-tables) (do ;; disable the table - (k/update RawTable - (k/set-fields {:active false}) - (k/where {:database_id database-id, :name "movies"})) + (db/update-where! RawTable {:database_id database-id + :name "movies"} + :active false) ;; run our retires function (retire-tables! db) ;; now we should see the table and its fields disabled @@ -340,7 +339,7 @@ (introspect/introspect-database-and-update-raw-tables! driver db) ;; stub out the Table we are going to sync for real below - (let [raw-table-id (db/sel :one :id RawTable, :database_id database-id, :name "roles") + (let [raw-table-id (db/select-one-id RawTable, :database_id database-id, :name "roles") tbl (db/insert! Table :db_id database-id :raw_table_id raw-table-id @@ -356,9 +355,9 @@ (get-tables database-id)) ;; one more time, but lets disable the table this time and ensure that's handled properly (do - (k/update RawTable - (k/set-fields {:active false}) - (k/where {:database_id database-id, :name "roles"})) + (db/update-where! RawTable {:database_id database-id + :name "roles"} + :active false) (update-data-models-for-table! tbl) (get-tables database-id))])))) @@ -378,7 +377,7 @@ (introspect/introspect-database-and-update-raw-tables! driver db) [;; first check that the raw tables stack up as expected - (->> (hydrate/hydrate (db/sel :many RawTable :database_id database-id (k/order :id)) :columns) + (->> (hydrate/hydrate (db/select RawTable, :database_id database-id, {:order-by [:id]}) :columns) (mapv tu/boolean-ids-and-timestamps)) ;; now lets run a sync and check what we got (do @@ -390,9 +389,9 @@ (get-tables database-id)) ;; one more time, but lets disable a table this time and ensure that's handled properly (do - (k/update RawTable - (k/set-fields {:active false}) - (k/where {:database_id database-id, :name "roles"})) + (db/update-where! RawTable {:database_id database-id + :name "roles"} + :active false) (update-data-models-from-raw-tables! db) (get-tables database-id))]))) @@ -401,12 +400,12 @@ "Convert :fk_target_[column|field]_id into more testable information with table/schema names." [m] (let [resolve-raw-column (fn [column-id] - (when-let [{col-name :name, table :raw_table_id} (db/sel :one :fields [RawColumn :raw_table_id :name] :id column-id)] - (-> (db/sel :one :fields [RawTable :schema :name] :id table) + (when-let [{col-name :name, table :raw_table_id} (db/select-one [RawColumn :raw_table_id :name], :id column-id)] + (-> (db/select-one [RawTable :schema :name], :id table) (assoc :col-name col-name)))) resolve-field (fn [field-id] - (when-let [{col-name :name, table :table_id} (db/sel :one :fields [Field :table_id :name] :id field-id)] - (-> (db/sel :one :fields [Table :schema :name] :id table) + (when-let [{col-name :name, table :table_id} (db/select-one [Field :table_id :name], :id field-id)] + (-> (db/select-one [Table :schema :name], :id table) (assoc :col-name col-name)))) resolve-fk (fn [m] (cond @@ -427,14 +426,14 @@ schema-per-customer/schema-per-customer-tables-and-fields] (tu/with-temp* [Database [{database-id :id, :as db} {:engine :schema-per-customer}]] (let [driver (schema-per-customer/->SchemaPerCustomerDriver) - db-tables #(->> (hydrate/hydrate (db/sel :many Table :db_id % (k/order :id)) :fields) + db-tables #(->> (hydrate/hydrate (db/select Table, :db_id %, {:order-by [:id]}) :fields) (mapv resolve-fk-targets) (mapv tu/boolean-ids-and-timestamps))] ;; do a quick introspection to add the RawTables to the db (introspect/introspect-database-and-update-raw-tables! driver db) [;; first check that the raw tables stack up as expected - (->> (hydrate/hydrate (db/sel :many RawTable :database_id database-id (k/order :id)) :columns) + (->> (hydrate/hydrate (db/select RawTable, :database_id database-id, {:order-by [:id]}) :columns) (mapv resolve-fk-targets) (mapv tu/boolean-ids-and-timestamps)) ;; now lets run a sync and check what we got @@ -467,4 +466,5 @@ #{{:name "SOUTH_MIGRATIONHISTORY", :visibility_type :cruft} {:name "ACQUIRED_TOUCANS", :visibility_type nil}} (data/dataset metabase.sync-database.sync-test/db-with-some-cruft - (set (db/sel :many :fields [Table :name :visibility_type] :db_id (data/id))))) + (set (for [table (db/select [Table :name :visibility_type], :db_id (data/id))] + (into {} table))))) diff --git a/test/metabase/sync_database_test.clj b/test/metabase/sync_database_test.clj index 9b83c729931a88d2f7f64095915b82e063343c32..8fb3f002026d02aa5d072698ec582ce124e8fbc2 100644 --- a/test/metabase/sync_database_test.clj +++ b/test/metabase/sync_database_test.clj @@ -1,15 +1,14 @@ (ns metabase.sync-database-test (:require [expectations :refer :all] - [korma.core :as k] [metabase.db :as db] [metabase.driver :as driver] [metabase.driver.generic-sql :refer [korma-entity]] (metabase.models [database :refer [Database]] - [field :refer [Field] :as field] + [field :refer [Field]] [field-values :refer [FieldValues]] [hydrate :refer :all] - [raw-table :as raw-table] - [table :refer [Table] :as table]) + [raw-table :refer [RawTable]] + [table :refer [Table]]) [metabase.sync-database :refer :all] (metabase.test [data :refer :all] [util :refer [resolve-private-fns] :as tu]))) @@ -55,15 +54,15 @@ (driver/register-driver! :sync-test (SyncTestDriver.)) -(def ^:private users-table (delay (db/sel :one table/Table, :name "USERS"))) -(def ^:private venues-table (delay (table/Table (id :venues)))) +(def ^:private users-table (delay (Table, :name "USERS"))) +(def ^:private venues-table (delay (Table (id :venues)))) (def ^:private korma-users-table (delay (korma-entity @users-table))) -(def ^:private users-name-field (delay (field/Field (id :users :name)))) +(def ^:private users-name-field (delay (Field (id :users :name)))) (defn- table-details [table] (into {} (-> (dissoc table :db :pk_field :field_values) - (assoc :fields (for [field (db/sel :many Field, :table_id (:id table), (k/order :name))] + (assoc :fields (for [field (db/select Field, :table_id (:id table), {:order-by [:name]})] (into {} (dissoc field :table :db :children :qualified-name :qualified-name-components :values :target)))) tu/boolean-ids-and-timestamps))) @@ -192,7 +191,7 @@ ;; we are purposely running the sync twice to test for possible logic issues which only manifest ;; on resync of a database, such as adding tables that already exist or duplicating fields (sync-database! fake-db) - (mapv table-details (db/sel :many table/Table, :db_id (:id fake-db), (k/order :name))))) + (mapv table-details (db/select Table, :db_id (:id fake-db), {:order-by [:name]})))) ;; ## SYNC TABLE @@ -267,13 +266,13 @@ :updated_at true :last_analyzed true}]} (tu/with-temp* [Database [fake-db {:engine :sync-test}] - raw-table/RawTable [{raw-table-id :id} {:database_id (:id fake-db), :name "movie", :schema "default"}] + RawTable [{raw-table-id :id} {:database_id (:id fake-db), :name "movie", :schema "default"}] Table [fake-table {:raw_table_id raw-table-id :name "movie" :schema "default" :db_id (:id fake-db)}]] (sync-table! fake-table) - (table-details (db/sel :one Table, :id (:id fake-table))))) + (table-details (Table (:id fake-table))))) ;; test that we prevent running simultaneous syncs on the same database @@ -318,16 +317,16 @@ [[1 2 3] [1 2 3]] (tu/with-temp* [Database [fake-db {:engine :sync-test}] - raw-table/RawTable [fake-table {:database_id (:id fake-db), :name "movie", :schema "default"}]] + RawTable [fake-table {:database_id (:id fake-db), :name "movie", :schema "default"}]] (sync-database! fake-db) - (let [table-id (db/sel :one :id table/Table, :raw_table_id (:id fake-table)) - field-id (db/sel :one :id field/Field, :table_id table-id, :name "title")] + (let [table-id (db/select-one-id Table, :raw_table_id (:id fake-table)) + field-id (db/select-one-id Field, :table_id table-id, :name "title")] (tu/with-temp FieldValues [_ {:field_id field-id :values "[1,2,3]"}] - (let [initial-field-values (db/sel :one :field [FieldValues :values], :field_id field-id)] + (let [initial-field-values (db/select-one-field :values FieldValues, :field_id field-id)] (sync-database! fake-db) [initial-field-values - (db/sel :one :field [FieldValues :values], :field_id field-id)]))))) + (db/select-one-field :values FieldValues, :field_id field-id)]))))) ;; ## Individual Helper Fns @@ -338,20 +337,20 @@ :id :latitude :id] - (let [get-special-type (fn [] (db/sel :one :field [field/Field :special_type] :id (id :venues :id)))] + (let [get-special-type (fn [] (db/select-one-field :special_type Field, :id (id :venues :id)))] [;; Special type should be :id to begin with (get-special-type) ;; Clear out the special type - (do (db/update! field/Field (id :venues :id), :special_type nil) + (do (db/update! Field (id :venues :id), :special_type nil) (get-special-type)) ;; Calling sync-table! should set the special type again (do (sync-table! @venues-table) (get-special-type)) ;; sync-table! should *not* change the special type of fields that are marked with a different type - (do (db/update! field/Field (id :venues :id), :special_type :latitude) + (do (db/update! Field (id :venues :id), :special_type :latitude) (get-special-type)) ;; Make sure that sync-table runs set-table-pks-if-needed! - (do (db/update! field/Field (id :venues :id), :special_type nil) + (do (db/update! Field (id :venues :id), :special_type nil) (sync-table! @venues-table) (get-special-type))])) @@ -360,13 +359,13 @@ ;; Check that Foreign Key relationships were created on sync as we expect (expect (id :venues :id) - (db/sel :one :field [field/Field :fk_target_field_id] :id (id :checkins :venue_id))) + (db/select-one-field :fk_target_field_id Field, :id (id :checkins :venue_id))) (expect (id :users :id) - (db/sel :one :field [field/Field :fk_target_field_id] :id (id :checkins :user_id))) + (db/select-one-field :fk_target_field_id Field, :id (id :checkins :user_id))) (expect (id :categories :id) - (db/sel :one :field [field/Field :fk_target_field_id] :id (id :venues :category_id))) + (db/select-one-field :fk_target_field_id Field, :id (id :venues :category_id))) ;; Check that sync-table! causes FKs to be set like we'd expect (expect [{:special_type :fk, :fk_target_field_id true} @@ -374,23 +373,23 @@ {:special_type :fk, :fk_target_field_id true}] (let [field-id (id :checkins :user_id) get-special-type-and-fk-exists? (fn [] - (-> (db/sel :one :fields [field/Field :special_type :fk_target_field_id] :id field-id) - (update :fk_target_field_id #(db/exists? field/Field :id %))))] + (into {} (-> (db/select-one [Field :special_type :fk_target_field_id], :id field-id) + (update :fk_target_field_id #(db/exists? Field :id %)))))] [ ;; FK should exist to start with (get-special-type-and-fk-exists?) ;; Clear out FK / special_type - (do (db/update! field/Field field-id, :special_type nil, :fk_target_field_id nil) + (do (db/update! Field field-id, :special_type nil, :fk_target_field_id nil) (get-special-type-and-fk-exists?)) ;; Run sync-table and they should be set again - (let [table (table/Table (id :checkins))] + (let [table (Table (id :checkins))] (sync-table! table) (get-special-type-and-fk-exists?))])) ;;; ## FieldValues Syncing -(let [get-field-values (fn [] (db/sel :one :field [FieldValues :values] :field_id (id :venues :price))) - get-field-values-id (fn [] (db/sel :one :id FieldValues :field_id (id :venues :price)))] +(let [get-field-values (fn [] (db/select-one-field :values FieldValues, :field_id (id :venues :price))) + get-field-values-id (fn [] (db/select-one-id FieldValues, :field_id (id :venues :price)))] ;; Test that when we delete FieldValues syncing the Table again will cause them to be re-created (expect [[1 2 3 4] ; 1 diff --git a/test/metabase/test/data.clj b/test/metabase/test/data.clj index 8189ec25f26e2f5697f64202852efddf43b24f95..40b240c3d0e8518693a18509d4288100dc5e9e2c 100644 --- a/test/metabase/test/data.clj +++ b/test/metabase/test/data.clj @@ -13,7 +13,7 @@ [metabase.query-processor.expand :as ql] [metabase.query-processor.interface :as qi] [metabase.sync-database :as sync-database] - (metabase.test.data [datasets :refer [*data-loader*]] + (metabase.test.data [datasets :refer [*driver*]] [dataset-definitions :as defs] [h2 :as h2] [interface :as i]) @@ -29,8 +29,8 @@ ;; These functions offer a generic way to get bits of info like Table + Field IDs from any of our many driver/dataset combos. (defn get-or-create-test-data-db! - "Get or create the Test Data database for DATA-LOADER, which defaults to `*data-loader*`." - ([] (get-or-create-test-data-db! *data-loader*)) + "Get or create the Test Data database for DATA-LOADER, which defaults to `*driver*`." + ([] (get-or-create-test-data-db! *driver*)) ([data-loader] (get-or-create-database! data-loader defs/test-data))) (def ^:dynamic ^:private *get-db* get-or-create-test-data-db!) @@ -97,7 +97,7 @@ "Wrap inner QUERY with `:database` ID and other 'outer query' kvs. DB ID is fetched by looking up the Database for the query's `:source-table`." {:style/indent 0} [query :- qi/Query] - {:database (db/sel :one :field [Table :db_id], :id (:source-table query)) + {:database (db/select-one-field :db_id Table, :id (:source-table query)) :type :query :query query}) @@ -116,22 +116,22 @@ (defn format-name [nm] - (i/format-name *data-loader* (name nm))) + (i/format-name *driver* (name nm))) (defn- get-table-id-or-explode [db-id table-name] (let [table-name (format-name table-name)] - (or (db/sel :one :id Table, :db_id db-id, :name table-name) + (or (db/select-one-id Table, :db_id db-id, :name table-name) (throw (Exception. (format "No Table '%s' found for Database %d.\nFound: %s" table-name db-id - (u/pprint-to-str (db/sel :many :id->field [Table :name], :db_id db-id, :active true)))))))) + (u/pprint-to-str (db/select-id->field :name Table, :db_id db-id, :active true)))))))) (defn- get-field-id-or-explode [table-id field-name & {:keys [parent-id]}] (let [field-name (format-name field-name)] - (or (db/sel :one :id Field, :active true, :table_id table-id, :name field-name, :parent_id parent-id) + (or (db/select-one-id Field, :active true, :table_id table-id, :name field-name, :parent_id parent-id) (throw (Exception. (format "Couldn't find Field %s for Table %d.\nFound: %s" (str \' field-name \' (when parent-id (format " (parent: %d)" parent-id))) table-id - (u/pprint-to-str (db/sel :many :id->field [Field :name], :active true, :table_id table-id)))))))) + (u/pprint-to-str (db/select-id->field :name Field, :active true, :table_id table-id)))))))) (defn id "Get the ID of the current database or one of its `Tables` or `Fields`. @@ -153,13 +153,13 @@ (defn fks-supported? "Does the current engine support foreign keys?" [] - (contains? (driver/features *data-loader*) :foreign-keys)) + (contains? (driver/features *driver*) :foreign-keys)) -(defn default-schema [] (i/default-schema *data-loader*)) -(defn id-field-type [] (i/id-field-type *data-loader*)) +(defn default-schema [] (i/default-schema *driver*)) +(defn id-field-type [] (i/id-field-type *driver*)) (defn expected-base-type->actual [base-type] - (i/expected-base-type->actual *data-loader* base-type)) + (i/expected-base-type->actual *driver* base-type)) ;; ## Loading / Deleting Test Datasets @@ -167,9 +167,9 @@ (defn get-or-create-database! "Create DBMS database associated with DATABASE-DEFINITION, create corresponding Metabase `Databases`/`Tables`/`Fields`, and sync the `Database`. DATASET-LOADER should be an object that implements `IDatasetLoader`; it defaults to the value returned by the method `dataset-loader` for the - current dataset (`*data-loader*`), which is H2 by default." + current dataset (`*driver*`), which is H2 by default." ([^DatabaseDefinition database-definition] - (get-or-create-database! *data-loader* database-definition)) + (get-or-create-database! *driver* database-definition)) ([dataset-loader {:keys [database-name], :as ^DatabaseDefinition database-definition}] (let [engine (i/engine dataset-loader)] (or (i/metabase-instance database-definition engine) @@ -193,7 +193,7 @@ (throw (Exception. (format "Table '%s' not loaded from definiton:\n%s\nFound:\n%s" table-name (u/pprint-to-str (dissoc table-definition :rows)) - (u/pprint-to-str (db/sel :many :fields [Table :schema :name], :db_id (:id db))))))))] + (u/pprint-to-str (db/select [Table :schema :name], :db_id (:id db))))))))] (doseq [{:keys [field-name field-type visibility-type special-type], :as field-definition} (:field-definitions table-definition)] (let [field (delay (or (i/metabase-instance field-definition @table) (throw (Exception. (format "Field '%s' not loaded from definition:\n" @@ -213,9 +213,9 @@ (defn remove-database! "Delete Metabase `Database`, `Fields` and `Tables` associated with DATABASE-DEFINITION, then remove the physical database from the associated DBMS. DATASET-LOADER should be an object that implements `IDatasetLoader`; by default it is the value returned by the method `dataset-loader` for the - current dataset, bound to `*data-loader*`." + current dataset, bound to `*driver*`." ([^DatabaseDefinition database-definition] - (remove-database! *data-loader* database-definition)) + (remove-database! *driver* database-definition)) ([dataset-loader ^DatabaseDefinition database-definition] ;; Delete the Metabase Database and associated objects (db/cascade-delete! Database :id (:id (i/metabase-instance database-definition (i/engine dataset-loader)))) @@ -241,7 +241,7 @@ (defn do-with-temp-db [^DatabaseDefinition dbdef f] - (let [loader *data-loader* + (let [loader *driver* dbdef (i/map->DatabaseDefinition (assoc dbdef :short-lived? true))] (swap! loader->loaded-db-def conj [loader dbdef]) (binding [db/*disable-db-logging* true] diff --git a/test/metabase/test/data/bigquery.clj b/test/metabase/test/data/bigquery.clj index 7d3dbd077654a10242de33c462d2669fa0ceb937..1cfd90ff948e4ed49629fc17cc91e3af564cbf1a 100644 --- a/test/metabase/test/data/bigquery.clj +++ b/test/metabase/test/data/bigquery.clj @@ -95,35 +95,35 @@ (.setQuery (format "SELECT COUNT(*) FROM [%s.%s]" dataset-id table-id)))))))))) ;; This is a dirty HACK -(defn- ^DateTime timestamp-korma-form->GoogleDateTime - "Convert the korma form we normally use to wrap a `Timestamp` to a Google `DateTime`." - [{[{^String s :korma.sql.utils/generated}] :korma.sql.utils/args}] - {:pre [(seq s)]} +(defn- ^DateTime timestamp-honeysql-form->GoogleDateTime + "Convert the HoneySQL form we normally use to wrap a `Timestamp` to a Google `DateTime`." + [{[{s :literal}] :args}] + {:pre [(string? s) (seq s)]} (DateTime. (u/->Timestamp (s/replace s #"'" "")))) -(defn- insert-data! [^String dataset-id, ^String table-id, row-maps] - {:pre [(seq dataset-id) (seq table-id) (sequential? row-maps) (seq row-maps) (every? map? row-maps)]} - (execute (.insertAll (.tabledata bigquery) project-id dataset-id table-id - (doto (TableDataInsertAllRequest.) - (.setRows (for [row-map row-maps] - (let [data (TableRow.)] - (doseq [[k v] row-map - :let [v (if (:korma.sql.utils/func v) - (timestamp-korma-form->GoogleDateTime v) - v)]] - (.set data (name k) v)) - (doto (TableDataInsertAllRequest$Rows.) - (.setJson data)))))))) - ;; Wait up to 30 seconds for all the rows to be loaded and become available by BigQuery - (let [expected-row-count (count row-maps)] - (loop [seconds-to-wait-for-load 30] - (let [actual-row-count (table-row-count dataset-id table-id)] - (cond - (= expected-row-count actual-row-count) :ok - (> seconds-to-wait-for-load 0) (do (Thread/sleep 1000) - (recur (dec seconds-to-wait-for-load))) - :else (throw (Exception. (format "Failed to load table data for %s.%s: expected %d rows, loaded %d" - dataset-id table-id expected-row-count actual-row-count)))))))) + (defn- insert-data! [^String dataset-id, ^String table-id, row-maps] + {:pre [(seq dataset-id) (seq table-id) (sequential? row-maps) (seq row-maps) (every? map? row-maps)]} + (execute (.insertAll (.tabledata bigquery) project-id dataset-id table-id + (doto (TableDataInsertAllRequest.) + (.setRows (for [row-map row-maps] + (let [data (TableRow.)] + (doseq [[k v] row-map + :let [v (if (instance? honeysql.types.SqlCall v) + (timestamp-honeysql-form->GoogleDateTime v) + v)]] + (.set data (name k) v)) + (doto (TableDataInsertAllRequest$Rows.) + (.setJson data)))))))) + ;; Wait up to 30 seconds for all the rows to be loaded and become available by BigQuery + (let [expected-row-count (count row-maps)] + (loop [seconds-to-wait-for-load 30] + (let [actual-row-count (table-row-count dataset-id table-id)] + (cond + (= expected-row-count actual-row-count) :ok + (> seconds-to-wait-for-load 0) (do (Thread/sleep 1000) + (recur (dec seconds-to-wait-for-load))) + :else (throw (Exception. (format "Failed to load table data for %s.%s: expected %d rows, loaded %d" + dataset-id table-id expected-row-count actual-row-count)))))))) (def ^:private ^:const base-type->bigquery-type {:BigIntegerField :INTEGER @@ -160,7 +160,7 @@ :id (inc i))))) (defn- load-tabledef! [dataset-name {:keys [table-name field-definitions], :as tabledef}] - (let [table-name (normalize-name table-name)] + (let [table-name (normalize-name table-name)] (create-table! dataset-name table-name (fielddefs->field-name->base-type field-definitions)) (insert-data! dataset-name table-name (tabledef->prepared-rows tabledef)))) diff --git a/test/metabase/test/data/crate.clj b/test/metabase/test/data/crate.clj index 299646bcfed62e74e8a05d5e889f148f290c583e..710a9554183b9bf93d53158d47c86b8b895bd11b 100644 --- a/test/metabase/test/data/crate.clj +++ b/test/metabase/test/data/crate.clj @@ -23,11 +23,11 @@ (defn- timestamp->CrateDateTime [value] - (if (instance? java.sql.Timestamp value) - (.getTime (u/->Timestamp value)) - (if (and (instance? clojure.lang.PersistentArrayMap value) (contains? value :korma.sql.utils/generated)) - (+ (read-string (s/replace (:korma.sql.utils/generated value) #"CURRENT_TIMESTAMP \+" "")) (.getTime (u/new-sql-timestamp))) - value))) + (cond + (instance? java.sql.Timestamp value) (.getTime ^java.sql.Timestamp value) + (instance? honeysql.types.SqlRaw value) (+ (Integer/parseInt (s/trim (s/replace (:s value) #"current_timestamp \+" ""))) + (System/currentTimeMillis)) + :else value)) (defn- escape-field-names "Escape the field-name keys in ROW-OR-ROWS." diff --git a/test/metabase/test/data/dataset_definitions.clj b/test/metabase/test/data/dataset_definitions.clj index 462e56a6288fc2b6a11fe7cb34f9046577e45ec0..f4cf1903279bdc48150dd779bf545a7da50c5978 100644 --- a/test/metabase/test/data/dataset_definitions.clj +++ b/test/metabase/test/data/dataset_definitions.clj @@ -25,6 +25,9 @@ (def-database-definition-edn geographical-tips) +;; A tiny dataset where half the NON-NULL values are valid URLs +(def-database-definition-edn half-valid-urls) + ;; A very tiny dataset with a list of places and a booleans (def-database-definition-edn places-cam-likes) diff --git a/test/metabase/test/data/dataset_definitions/half-valid-urls.edn b/test/metabase/test/data/dataset_definitions/half-valid-urls.edn new file mode 100644 index 0000000000000000000000000000000000000000..44e9eb8ccce8ead151697fd75698f123e7b812a3 --- /dev/null +++ b/test/metabase/test/data/dataset_definitions/half-valid-urls.edn @@ -0,0 +1,11 @@ +[["urls" [{:field-name "url" + :base-type :TextField}] + [["http://www.camsaul.com"] + ["http://camsaul.com"] + ["https://en.wikipedia.org/wiki/Toucan"] + ["ABC"] + ["DEF"] + [nil] + ["https://en.wikipedia.org/wiki/Bird"] + ["EFG"] + [""]]]] diff --git a/test/metabase/test/data/datasets.clj b/test/metabase/test/data/datasets.clj index 315b404a2ce0745e83167b1afd3e359c3890c74e..c8e7e5e5737b87d0314aee4d5ca7a23da6f10c90 100644 --- a/test/metabase/test/data/datasets.clj +++ b/test/metabase/test/data/datasets.clj @@ -10,6 +10,7 @@ [metabase.test.data.interface :as i])) (driver/find-and-load-drivers!) + (def ^:const all-valid-engines (set (keys (driver/available-drivers)))) @@ -60,19 +61,19 @@ (require (symbol (str "metabase.test.data." (name engine))) :reload))) (driver/engine->driver engine)) -(def ^:dynamic *data-loader* - "The dataset we're currently testing against, bound by `with-engine`. +(def ^:dynamic *driver* + "The driver we're currently testing against, bound by `with-engine`. This is just a regular driver, e.g. `MySQLDriver`, with an extra promise keyed by `:dbpromise` that is used to store the `test-data` dataset when you call `load-data!`." (driver/engine->driver default-engine)) (defn do-with-engine [engine f] - (binding [*engine* engine - *data-loader* (engine->driver engine)] + (binding [*engine* engine + *driver* (engine->driver engine)] (f))) (defmacro with-engine - "Bind `*data-loader*` to the dataset with ENGINE and execute BODY." + "Bind `*driver*` to the dataset with ENGINE and execute BODY." [engine & body] `(do-with-engine ~engine (fn [] ~@body))) @@ -83,7 +84,7 @@ ~@body)) (defmacro with-engine-when-testing - "When testing ENGINE, binding `*data-loader*` and executes BODY." + "When testing ENGINE, binding `*driver*` and executes BODY." [engine & body] `(when-testing-engine ~engine (with-engine ~engine @@ -97,7 +98,7 @@ ~actual))) (defmacro expect-with-engine - "Generate a unit test that only runs if we're currently testing against ENGINE, and that binds `*data-loader*` to the current dataset." + "Generate a unit test that only runs if we're currently testing against ENGINE, and that binds `*driver*` to the current dataset." [engine expected actual] `(expect-when-testing-engine ~engine (with-engine ~engine ~expected) @@ -105,7 +106,7 @@ (defmacro expect-with-engines "Generate unit tests for all datasets in ENGINES; each test will only run if we're currently testing the corresponding dataset. - `*data-loader*` is bound to the current dataset inside each test." + `*driver*` is bound to the current dataset inside each test." [engines expected actual] ;; Make functions to get expected/actual so the code is only compiled one time instead of for every single driver ;; speeds up loading of metabase.driver.query-processor-test significantly @@ -120,7 +121,7 @@ (defmacro expect-with-all-engines "Generate unit tests for all valid datasets; each test will only run if we're currently testing the corresponding dataset. - `*data-loader*` is bound to the current dataset inside each test." + `*driver*` is bound to the current dataset inside each test." [expected actual] `(expect-with-engines all-valid-engines ~expected ~actual)) diff --git a/test/metabase/test/data/generic_sql.clj b/test/metabase/test/data/generic_sql.clj index 98b7f51b4ebc01da90431d64d07a0d01cc3c7a69..e9bec764bb30f7b5921582ff9887fec14001cb56 100644 --- a/test/metabase/test/data/generic_sql.clj +++ b/test/metabase/test/data/generic_sql.clj @@ -3,6 +3,9 @@ (:require [clojure.java.jdbc :as jdbc] [clojure.string :as s] [clojure.tools.logging :as log] + (honeysql [core :as hsql] + [format :as hformat] + [helpers :as h]) (korma [core :as k] [db :as kdb]) [medley.core :as m] @@ -11,7 +14,7 @@ (metabase.test.data [datasets :as datasets] [interface :as i]) [metabase.util :as u] - [metabase.util.korma-extensions :as kx]) + [metabase.util.honeysql-extensions :as hx]) (:import clojure.lang.Keyword (metabase.test.data.interface DatabaseDefinition FieldDefinition @@ -51,6 +54,11 @@ (korma-entity [this, ^DatabaseDefinition dbdef, ^TableDefinition tabledef] "*Optional* Return a korma-entity for TABLEDEF.") + (prepare-identifier [this, ^String identifier] + "*OPTIONAL*. Prepare an identifier, such as a Table or Field name, when it is used in a SQL query. + This is used by drivers like H2 to transform names to upper-case. + The default implementation is `identity`.") + (pk-field-name ^String [this] "*Optional* Name of a PK field. Defaults to `\"id\"`.") @@ -80,7 +88,7 @@ Uses `sql/connection-details->spec` by default.") (load-data! [this, ^DatabaseDefinition dbdef, ^TableDefinition tabledef] - "*Optional*. Load the rows for a specific table into a DB.") + "*Optional*. Load the rows for a specific table into a DB. `load-data-chunked` is the default implementation.") (execute-sql! [driver ^Keyword context, ^DatabaseDefinition dbdef, ^String sql] "Execute a string of raw SQL. Context is either `:server` or `:db`.")) @@ -138,10 +146,8 @@ (str \" nm \")) (defn- quote+combine-names [driver names] - (->> names - (map (partial quote-name driver)) - (interpose \.) - (apply str))) + (apply str (interpose \. (for [n names] + (name (hx/qualify-and-escape-dots (quote-name driver n))))))) (defn- default-qualify+quote-name ([driver db-name] @@ -156,8 +162,8 @@ (assoc spec :make-pool? (not (:short-lived? spec))))) (defn default-korma-entity [driver {:keys [database-name], :as dbdef} {:keys [table-name]}] - (k/database (kx/create-entity (qualified-name-components driver database-name table-name)) - (kx/create-db (database->spec driver :db dbdef)))) + (k/database (sql/create-entity (qualified-name-components driver database-name table-name)) + (sql/create-db (database->spec driver :db dbdef)))) ;;; Loading Table Data ;; Since different DBs have constraints on how we can do this, the logic is broken out into a few different functions @@ -180,12 +186,8 @@ "Add IDs to each row, presumabily for doing a parallel insert. This arg should go before `load-data-chunked` or `load-data-one-at-a-time`." [insert!] (fn [rows] - (insert! (pmap (fn [[i row]] - (assoc row :id (inc i))) - (m/indexed rows))) - ;; (insert! (vec (for [[i row] (m/indexed rows)] - ;; (assoc row :id (inc i))))) - )) + (insert! (vec (for [[i row] (m/indexed rows)] + (assoc row :id (inc i))))))) (defn load-data-chunked "Insert rows in chunks, which default to 200 rows each." @@ -208,16 +210,29 @@ (into {} (for [[k v] row-or-rows] {(sql/escape-field-name k) v})))) -(defn load-data-with-debug-logging - "Add debug logging to the data loading fn. This should passed as the first arg to `make-load-data-fn`." - [insert!] - (fn [rows] - (println (u/format-color 'blue "Inserting %d rows like:\n%s" (count rows) (s/replace (k/sql-only (k/insert :some-table (k/values (escape-field-names (first rows))))) - #"\"SOME-TABLE\"" - "[table]"))) - (let [start-time (System/currentTimeMillis)] - (insert! rows) - (println (u/format-color 'green "Inserting %d rows took %d ms." (count rows) (- (System/currentTimeMillis) start-time)))))) +(defn- do-insert! + "Insert ROWS-OR-ROWS into TABLE-NAME for the DRIVER database defined by SPEC." + [driver spec table-name row-or-rows] + (let [prepare-key (comp keyword (partial prepare-identifier driver) name) + rows (if (sequential? row-or-rows) + row-or-rows + [row-or-rows]) + columns (keys (first rows)) + values (for [row rows] + (for [value (map row columns)] + (sql/prepare-value driver {:value value}))) + hsql-form (-> (apply h/columns (for [column columns] + (hx/qualify-and-escape-dots (prepare-key column)))) + (h/insert-into (prepare-key table-name)) + (h/values values)) + sql+args (hx/unescape-dots (binding [hformat/*subquery?* false] + (hsql/format hsql-form + :quoting (sql/quote-style driver) + :allow-dashed-names? true)))] + (try (jdbc/execute! spec sql+args) + (catch java.sql.SQLException e + (println (u/format-color 'red "INSERT FAILED: \n%s\n" sql+args)) + (jdbc/print-sql-exception-chain e))))) (defn make-load-data-fn "Create a `load-data!` function. This creates a function to actually insert a row or rows, wraps it with any WRAP-INSERT-FNS, @@ -225,13 +240,8 @@ [& wrap-insert-fns] (fn [driver dbdef tabledef] (let [entity (korma-entity driver dbdef tabledef) - ;; _ (assert (or (delay? (:pool (:db entity))) - ;; (println "Expected pooled connection:" (u/pprint-to-str 'cyan entity)))) - insert! ((apply comp wrap-insert-fns) (fn [row-or-rows] - ;; (let [id (:id row-or-rows)] - ;; (when (zero? (mod id 50)) - ;; (println id))) - (k/insert entity (k/values (escape-field-names row-or-rows))))) + spec (database->spec driver :db dbdef) + insert! ((apply comp wrap-insert-fns) (partial do-insert! driver spec (:table entity))) rows (load-data-get-rows driver dbdef tabledef)] (insert! rows)))) @@ -249,7 +259,6 @@ (not (s/blank? (s/replace sql #";" "")))) ;; Remove excess semicolons, otherwise snippy DBs like Oracle will barf (let [sql (s/replace sql #";+" ";")] - ;; (println (u/format-color 'blue "[SQL] <<<%s>>>" sql)) (try (jdbc/execute! (database->spec driver context dbdef) [sql] :transaction? false, :multi? true) (catch java.sql.SQLException e @@ -261,9 +270,7 @@ (println "Error executing SQL:" sql) (println (format "Caught Exception: %s %s\n%s" (class e) (.getMessage e) (with-out-str (.printStackTrace e)))) - (throw e))) - ;; (println (u/format-color 'blue "[OK]")) - )))) + (throw e))))))) (def DefaultsMixin @@ -278,6 +285,7 @@ :korma-entity default-korma-entity :load-data! load-data-chunked! :pk-field-name (constantly "id") + :prepare-identifier (u/drop-first-arg identity) :qualified-name-components default-qualified-name-components :qualify+quote-name default-qualify+quote-name :quote-name default-quote-name}) @@ -317,7 +325,7 @@ (swap! statements conj (add-fk-sql driver dbdef tabledef fielddef))))) ;; exec the combined statement - (execute-sql! driver :db dbdef (apply str (interpose ";\n" @statements)))) + (execute-sql! driver :db dbdef (apply str (interpose ";\n" (map hx/unescape-dots @statements))))) ;; Now load the data for each Table (doseq [tabledef table-definitions] @@ -338,8 +346,19 @@ (defn execute-when-testing! "Execute a prepared SQL-AND-ARGS against Database with spec returned by GET-CONNECTION-SPEC only when running tests against ENGINE. Useful for doing engine-specific setup or teardown." + {:style/indent 2} [engine get-connection-spec & sql-and-args] (datasets/when-testing-engine engine (println (u/format-color 'blue "[%s] %s" (name engine) (first sql-and-args))) (jdbc/execute! (get-connection-spec) sql-and-args) (println (u/format-color 'blue "[OK]")))) + +(defn query-when-testing! + "Execute a prepared SQL-AND-ARGS **query** against Database with spec returned by GET-CONNECTION-SPEC only when running tests against ENGINE. + Useful for doing engine-specific setup or teardown where `execute-when-testing!` won't work because the query returns results." + {:style/indent 2} + [engine get-connection-spec & sql-and-args] + (datasets/when-testing-engine engine + (println (u/format-color 'blue "[%s] %s" (name engine) (first sql-and-args))) + (u/prog1 (jdbc/query (get-connection-spec) sql-and-args) + (println (u/format-color 'blue "[OK] -> %s" (vec <>)))))) diff --git a/test/metabase/test/data/h2.clj b/test/metabase/test/data/h2.clj index c0ca1e530220b1d08b579e9e32c621a3c3c9bee4..d142c46e6f6935eb8280d23d86ded7a87587bd11 100644 --- a/test/metabase/test/data/h2.clj +++ b/test/metabase/test/data/h2.clj @@ -1,5 +1,6 @@ (ns metabase.test.data.h2 "Code for creating / destroying an H2 database from a `DatabaseDefinition`." + ;; TODO - rework this namespace to use `u/drop-first-arg` where appropriate (:require [clojure.core.reducers :as r] [clojure.string :as s] (korma [core :as k] @@ -44,7 +45,7 @@ :naming {:keys s/lower-case :fields s/upper-case})))))) -(defn create-db-sql [_ {:keys [short-lived?]}] +(def ^:private ^:const ^String create-db-sql (str ;; We don't need to actually do anything to create a database here. Just disable the undo ;; log (i.e., transactions) for this DB session because the bulk operations to load data don't need to be atomic @@ -54,10 +55,8 @@ "CREATE USER IF NOT EXISTS GUEST PASSWORD 'guest';\n" ;; Set DB_CLOSE_DELAY here because only admins are allowed to do it, so we can't set it via the connection string. - ;; Set it to to -1 (no automatic closing) if the DB isn't "short-lived", - ;; otherwise set it to 1 (close after idling for 1 sec) so things like inserting rows persist long enough for us to - ;; run queries without us needing to start a connection pool - (format "SET DB_CLOSE_DELAY %d;" (if short-lived? 1 -1)))) + ;; Set it to to -1 (no automatic closing) + "SET DB_CLOSE_DELAY -1;")) (defn- create-table-sql [this dbdef {:keys [table-name], :as tabledef}] (str @@ -71,22 +70,20 @@ generic/IGenericSQLDatasetLoader (let [{:keys [execute-sql!], :as mixin} generic/DefaultsMixin] (merge mixin - {:create-db-sql create-db-sql + {:create-db-sql (constantly create-db-sql) :create-table-sql create-table-sql - :database->spec (fn [this context dbdef] - ;; Don't use the h2 driver implementation, which makes the connection string read-only & if-exists only - (kdb/h2 (i/database->connection-details this context dbdef))) + :database->spec (comp kdb/h2 i/database->connection-details) ; Don't use the h2 driver implementation, which makes the connection string read-only & if-exists only :drop-db-if-exists-sql (constantly nil) :execute-sql! (fn [this _ dbdef sql] ;; we always want to use 'server' context when execute-sql! is called ;; (never try connect as GUEST, since we're not giving them priviledges to create tables / etc) (execute-sql! this :server dbdef sql)) - :field-base-type->sql-type (fn [_ base-type] - (field-base-type->sql-type base-type)) + :field-base-type->sql-type (u/drop-first-arg field-base-type->sql-type) :korma-entity korma-entity :load-data! generic/load-data-all-at-once! :pk-field-name (constantly "ID") :pk-sql-type (constantly "BIGINT AUTO_INCREMENT") + :prepare-identifier (u/drop-first-arg s/upper-case) :quote-name quote-name})) i/IDatasetLoader @@ -94,7 +91,6 @@ {:database->connection-details database->connection-details :default-schema (constantly "PUBLIC") :engine (constantly :h2) - :format-name (fn [_ table-or-field-name] - (s/upper-case table-or-field-name)) + :format-name (u/drop-first-arg s/upper-case) :has-questionable-timezone-support? (constantly true) :id-field-type (constantly :BigIntegerField)})) diff --git a/test/metabase/test/data/interface.clj b/test/metabase/test/data/interface.clj index 159b7c7d5451c7b995d9213c222df848d41642e2..dbf0ae9d6591c3bd63b10e4c49295550e33534f8 100644 --- a/test/metabase/test/data/interface.clj +++ b/test/metabase/test/data/interface.clj @@ -43,20 +43,18 @@ (extend-protocol IMetabaseInstance FieldDefinition (metabase-instance [this table] - (db/sel :one Field :table_id (:id table), :name [in #{(s/lower-case (:field-name this)) ; HACKY! - (s/upper-case (:field-name this))}])) + (Field :table_id (:id table), :%lower.name (s/lower-case (:field-name this)))) TableDefinition (metabase-instance [this database] - (db/sel :one Table :db_id (:id database), :name [in #{(s/lower-case (:table-name this)) - (s/upper-case (:table-name this))}])) + (Table :db_id (:id database), :%lower.name (s/lower-case (:table-name this)))) DatabaseDefinition (metabase-instance [{:keys [database-name]} engine-kw] (assert (string? database-name)) (assert (keyword? engine-kw)) (db/setup-db-if-needed :auto-migrate true) - (db/sel :one Database :name database-name, :engine (name engine-kw)))) + (Database :name database-name, :engine (name engine-kw)))) ;; ## IDatasetLoader diff --git a/test/metabase/test/data/postgres.clj b/test/metabase/test/data/postgres.clj index 71eda961ef4c28decd4537b9c771bc9e0e2ec824..44ff2aec2ecea6389e8723a201cf9d0c7e1030d6 100644 --- a/test/metabase/test/data/postgres.clj +++ b/test/metabase/test/data/postgres.clj @@ -1,7 +1,8 @@ (ns metabase.test.data.postgres "Code for creating / destroying a Postgres database from a `DatabaseDefinition`." (:require [environ.core :refer [env]] - metabase.driver.postgres + (metabase.driver [generic-sql :as sql] + postgres) (metabase.test.data [generic-sql :as generic] [interface :as i]) [metabase.util :as u]) @@ -44,3 +45,10 @@ :engine (constantly :postgres) ;; TODO: this is suspect, but it works :has-questionable-timezone-support? (constantly true)})) + +;; it's super obnoxious when testing locally to have tests fail because someone is already connected to the test-data DB (meaning we can't drop it), so close all connections to it beforehand +(defn- kill-connections-to-test-data-db! + {:expectations-options :before-run} + [] + (generic/query-when-testing! :postgres (fn [] (sql/connection-details->spec (PostgresDriver.) (database->connection-details :server {}))) + "SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE pid <> pg_backend_pid() AND pg_stat_activity.datname = 'test-data';")) diff --git a/test/metabase/test/data/sqlite.clj b/test/metabase/test/data/sqlite.clj index deda29e89cbabfe8491fc928e26e888901346a52..66e81d828cef9211d68bdbf69d6177f1eff0d5bb 100644 --- a/test/metabase/test/data/sqlite.clj +++ b/test/metabase/test/data/sqlite.clj @@ -1,14 +1,14 @@ (ns metabase.test.data.sqlite (:require [clojure.string :as s] - [korma.core :as k] + [honeysql.core :as hsql] metabase.driver.sqlite (metabase.test.data [generic-sql :as generic] [interface :as i]) - [metabase.util :as u]) + [metabase.util :as u] + [metabase.util.honeysql-extensions :as hx]) (:import metabase.driver.sqlite.SQLiteDriver)) -(defn- database->connection-details - [_ context {:keys [short-lived?], :as dbdef}] +(defn- database->connection-details [context {:keys [short-lived?], :as dbdef}] {:short-lived? short-lived? :db (str (i/escaped-name dbdef) ".sqlite")}) @@ -25,13 +25,14 @@ :TimeField "TIME"}) (defn- load-data-stringify-dates - "Our SQLite JDBC driver doesn't seem to handle Dates/Timestamps correctly so just convert them to string before INSERTing them into the Database." + "Our SQLite JDBC driver doesn't seem to like Dates/Timestamps so just convert them to strings before INSERTing them into the Database." [insert!] (fn [rows] (insert! (for [row rows] (into {} (for [[k v] row] - [k (u/cond-as-> v v - (instance? java.util.Date v) (k/raw (format "DATETIME('%s')" (u/date->iso-8601 v))))])))))) + [k (if-not (instance? java.util.Date v) + v + (hsql/call :datetime (hx/literal (u/date->iso-8601 v))))])))))) (u/strict-extend SQLiteDriver generic/IGenericSQLDatasetLoader @@ -42,9 +43,8 @@ :execute-sql! generic/sequentially-execute-sql! :load-data! (generic/make-load-data-fn load-data-stringify-dates generic/load-data-chunked) :pk-sql-type (constantly "INTEGER") - :field-base-type->sql-type (fn [_ base-type] - (field-base-type->sql-type base-type))}) + :field-base-type->sql-type (u/drop-first-arg field-base-type->sql-type)}) i/IDatasetLoader (merge generic/IDatasetLoaderMixin - {:database->connection-details database->connection-details + {:database->connection-details (u/drop-first-arg database->connection-details) :engine (constantly :sqlite)})) diff --git a/test/metabase/test/data/sqlserver.clj b/test/metabase/test/data/sqlserver.clj index 8e916c4997bfe235db5dd5dc84e542c4852483a3..514ec0cfb3737de741baa4b57d9a6a4f2b6c213c 100644 --- a/test/metabase/test/data/sqlserver.clj +++ b/test/metabase/test/data/sqlserver.clj @@ -81,21 +81,23 @@ ([_ db-name table-name field-name] [(+suffix db-name) "dbo" table-name field-name])) +(defn- create-db! [driver dbdef] + (swap! db-name-counter inc) + (create-db! driver dbdef)) + (u/strict-extend SQLServerDriver generic/IGenericSQLDatasetLoader (merge generic/DefaultsMixin {:drop-db-if-exists-sql drop-db-if-exists-sql :drop-table-if-exists-sql drop-table-if-exists-sql - :field-base-type->sql-type (fn [_ base-type] (field-base-type->sql-type base-type)) + :field-base-type->sql-type (u/drop-first-arg field-base-type->sql-type) :pk-sql-type (constantly "INT IDENTITY(1,1)") :qualified-name-components qualified-name-components}) i/IDatasetLoader (let [{:keys [create-db!], :as mixin} generic/IDatasetLoaderMixin] (merge mixin - {:create-db! (fn [this dbdef] - (swap! db-name-counter inc) - (create-db! this dbdef)) + {:create-db! create-db! :database->connection-details database->connection-details :default-schema (constantly "dbo") :engine (constantly :sqlserver)}))) diff --git a/test/metabase/test/data/users.clj b/test/metabase/test/data/users.clj index 12624f74f68707c9ceffb2b727400eca290f56ef..8d1c69ea78c741d1371442b8d09fceb082861762 100644 --- a/test/metabase/test/data/users.clj +++ b/test/metabase/test/data/users.clj @@ -124,7 +124,7 @@ :or {superuser false active true}}] {:pre [(string? email) (string? first) (string? last) (string? password) (m/boolean? superuser) (m/boolean? active)]} - (or (db/sel :one User :email email) + (or (User :email email) (db/insert! User :email email :first_name first diff --git a/test/metabase/timeseries_query_processor_test.clj b/test/metabase/timeseries_query_processor_test.clj index fbfc4e029c8730b3eba6534f8e96ae24f02c36e8..ac53354195aef5b87049e2725e306d0f0d34a531 100644 --- a/test/metabase/timeseries_query_processor_test.clj +++ b/test/metabase/timeseries_query_processor_test.clj @@ -25,7 +25,7 @@ [] (doseq [engine event-based-dbs] (datasets/with-engine-when-testing engine - (data/do-with-temp-db (flattened-db-def) (fn [& _]))))) + (data/do-with-temp-db (flattened-db-def) (constantly nil))))) (defmacro ^:private with-flattened-dbdef [& body] `(data/with-temp-db [~'_ (flattened-db-def)] @@ -679,11 +679,19 @@ ;;; MIN & MAX +;; tests for dimension columns +(expect-with-timeseries-dbs [4.0] (first-row (data/run-query checkins + (ql/aggregation (ql/max $venue_price))))) + (expect-with-timeseries-dbs [1.0] (first-row (data/run-query checkins - (ql/aggregation (ql/min $venue_price))))) + (ql/aggregation (ql/min $venue_price))))) -(expect-with-timeseries-dbs [4.0] (first-row (data/run-query checkins - (ql/aggregation (ql/max $venue_price))))) +;; tests for metric columns +(expect-with-timeseries-dbs [1.0] (first-row (data/run-query checkins + (ql/aggregation (ql/max $count))))) + +(expect-with-timeseries-dbs [1.0] (first-row (data/run-query checkins + (ql/aggregation (ql/min $count))))) (expect-with-timeseries-dbs [["1" 34.0071] ["2" 33.7701] ["3" 10.0646] ["4" 33.983]] ; some sort of weird quirk w/ druid where all columns in breakout get converted to strings diff --git a/test/metabase/util_test.clj b/test/metabase/util_test.clj index f62b3a0e7d48977005c32bf0ee6e045438af8941..f0ee9da16215c87b24efba1adfcc98da1ebe70c7 100644 --- a/test/metabase/util_test.clj +++ b/test/metabase/util_test.clj @@ -102,3 +102,11 @@ (cond-as-> 100 <> (even? <>) (inc <>) (odd? <>) (inc <>))) + + +;;; TESTS FOR key-by +(expect + {1 {:id 1, :name "Rasta"} + 2 {:id 2, :name "Lucky"}} + (key-by :id [{:id 1, :name "Rasta"} + {:id 2, :name "Lucky"}])) diff --git a/webpack.config.js b/webpack.config.js index 7718825f1bdbaf31994242b7932a6eb5c4b70107..92dbebb5775679e890cf6c57b79bfbed028120e1 100644 --- a/webpack.config.js +++ b/webpack.config.js @@ -7,8 +7,8 @@ var webpackPostcssTools = require('webpack-postcss-tools'); var CommonsChunkPlugin = webpack.optimize.CommonsChunkPlugin; var ExtractTextPlugin = require('extract-text-webpack-plugin'); var HtmlWebpackPlugin = require('html-webpack-plugin'); - var UnusedFilesWebpackPlugin = require("unused-files-webpack-plugin").default; +var FlowStatusWebpackPlugin = require('flow-status-webpack-plugin'); var _ = require('underscore'); var glob = require('glob'); @@ -19,7 +19,7 @@ function hasArg(arg) { return process.argv.filter(regex.test.bind(regex)).length > 0; } -var SRC_PATH = __dirname + '/frontend/src'; +var SRC_PATH = __dirname + '/frontend/src/metabase'; var BUILD_PATH = __dirname + '/resources/frontend_client'; @@ -161,7 +161,13 @@ var config = module.exports = { }, plugins: [ - new UnusedFilesWebpackPlugin(), + new UnusedFilesWebpackPlugin({ + globOptions: { + ignore: [ + "**/types/*.js" + ] + } + }), // Separates out modules common to multiple entry points into a single common file that should be loaded first. // Not currently useful but necessary for code-splitting new CommonsChunkPlugin({ @@ -229,6 +235,8 @@ if (NODE_ENV === "development" || NODE_ENV === "hot") { config.resolve.alias[name] = unminified; } } + + config.plugins.push(new FlowStatusWebpackPlugin()) } if (NODE_ENV === "hot" || isWatching) {