Skip to content
Snippets Groups Projects
Commit 22be5068 authored by Allen Gilliland's avatar Allen Gilliland
Browse files

Merge branch 'master' into newsletter_on_setup

parents 62afdf88 84be4678
No related branches found
No related tags found
No related merge requests found
Showing
with 557 additions and 660 deletions
......@@ -23,18 +23,18 @@
(conda 0)
(context 2)
(create-database-definition 1)
(dataset-case 0)
(engine-case 0)
(execute-query 1)
(execute-sql! 2)
(expect 1)
(expect-eval-actual-first 1)
(expect-expansion 0)
(expect-let 1)
(expect-when-testing-dataset 1)
(expect-when-testing-engine 1)
(expect-when-testing-mongo 1)
(expect-with-all-drivers 1)
(expect-with-dataset 1)
(expect-with-datasets 1)
(expect-with-engine 1)
(expect-with-engines 1)
(format-color 2)
(if-questionable-timezone-support 0)
(if-sqlserver 0)
......@@ -58,12 +58,12 @@
(pre-insert 1)
(pre-update 1)
(project 1)
(qp-expect-with-all-datasets 1)
(qp-expect-with-datasets 1)
(qp-expect-with-all-engines 1)
(qp-expect-with-engines 1)
(query-with-temp-db 1)
(resolve-private-fns 1)
(symbol-macrolet 1)
(sync-in-context 2)
(upd 2)
(when-testing-dataset 1)
(when-testing-engine 1)
(with-credentials 1)))))))
......@@ -29,7 +29,7 @@ test:
# 4) Bikeshed linter
# 5) runs JS linter + JS test
# 6) runs lein uberjar. (We don't run bin/build because we're not really concerned about `npm install` (etc) in this test, which runs elsewhere)
- case $CIRCLE_NODE_INDEX in 0) MB_TEST_DATASETS=h2,mongo,mysql lein test ;; 1) MB_TEST_DATASETS=h2,sqlserver MB_DB_TYPE=postgres MB_DB_DBNAME=circle_test MB_DB_PORT=5432 MB_DB_USER=ubuntu MB_DB_HOST=localhost lein test ;; 2) MB_TEST_DATASETS=h2,postgres MB_DB_TYPE=mysql MB_DB_DBNAME=circle_test MB_DB_PORT=3306 MB_DB_USER=ubuntu MB_DB_HOST=localhost lein test ;; 3) lein eastwood ;; 4) lein bikeshed --max-line-length 240 ;; 5) npm install && npm run lint && npm run build && npm run test ;; 6) lein uberjar ;; esac:
- case $CIRCLE_NODE_INDEX in 0) ENGINES=h2,mongo,mysql lein test ;; 1) ENGINES=h2,sqlserver MB_DB_TYPE=postgres MB_DB_DBNAME=circle_test MB_DB_PORT=5432 MB_DB_USER=ubuntu MB_DB_HOST=localhost lein test ;; 2) ENGINES=h2,postgres MB_DB_TYPE=mysql MB_DB_DBNAME=circle_test MB_DB_PORT=3306 MB_DB_USER=ubuntu MB_DB_HOST=localhost lein test ;; 3) lein eastwood ;; 4) lein bikeshed --max-line-length 240 ;; 5) npm install && npm run lint && npm run build && npm run test ;; 6) lein uberjar ;; esac:
parallel: true
deployment:
master:
......
......@@ -124,9 +124,9 @@ or a specific test with
lein test metabase.api.session-test
By default, the tests only run against the `h2` dataset (built-in test database). You can specify which datasets/drivers to run tests against with the env var `MB_TEST_DATASETS`:
By default, the tests only run against the `h2` dataset (built-in test database). You can specify which datasets/drivers to run tests against with the env var `ENGINES`:
MB_TEST_DATASETS=h2,postgres,mysql,mongo lein test
ENGINES=h2,postgres,mysql,mongo lein test
At the time of this writing, the valid datasets are `h2`, `postgres`, `mysql`, and `mongo`.
......
var ActivityServices = angular.module('metabase.activity.services', ['ngResource', 'ngCookies']);
ActivityServices.factory('Activity', ['$resource', '$cookies', function($resource, $cookies) {
return $resource('/api/activity', {}, {
list: {
method: 'GET',
isArray: true
},
recent_views: {
url: '/api/activity/recent_views',
method: 'GET',
isArray: true
}
});
}]);
......@@ -5,7 +5,7 @@ import DatabaseList from "./components/DatabaseList.jsx";
import DatabaseEdit from "./components/DatabaseEdit.jsx";
var DatabasesControllers = angular.module('metabaseadmin.databases.controllers', ['metabase.metabase.services']);
var DatabasesControllers = angular.module('metabaseadmin.databases.controllers', ['metabase.services']);
DatabasesControllers.controller('DatabaseList', ['$scope', '$routeParams', 'Metabase', function($scope, $routeParams, Metabase) {
......@@ -102,42 +102,10 @@ DatabasesControllers.controller('DatabaseEdit', ['$scope', '$routeParams', '$loc
};
var save = function(database, details) {
// validate_connection needs engine so add it to request body
details.engine = database.engine;
function handleError(error) {
$scope.$broadcast("form:api-error", error);
throw error;
}
// for an existing DB check that connection is valid before save
if ($routeParams.databaseId) {
return Metabase.validate_connection(details).$promise.catch(handleError).then(function() {
return update(database, details);
});
// for a new DB we want to infer SSL support. First try to connect w/ SSL. If that fails, disable SSL
return update(database, details);
} else {
const engineSupportsSSL = _.contains(_.map($scope.engines[database.engine]['details-fields'], 'name'),
'ssl');
function createDB() {
console.log('Successfully connected to database with SSL = ' + details.ssl + '.');
return create(database, details);
}
// if the engine supports SSL, try connecting with SSL first, and then without
if (engineSupportsSSL) {
details.ssl = true;
return Metabase.validate_connection(details).$promise.catch(function() {
console.log('Unable to connect with SSL. Trying with SSL = false.');
details.ssl = false;
return Metabase.validate_connection(details).$promise;
}).then(createDB).catch(handleError);
} else {
delete details.ssl;
return Metabase.validate_connection(details).$promise.catch(handleError).then(createDB);
}
return create(database, details);
}
};
......
import React, { Component, PropTypes } from "react";
import _ from "underscore";
import MetadataHeader from './MetadataHeader.jsx';
import MetadataTableList from './MetadataTableList.jsx';
......@@ -23,6 +24,7 @@ export default class MetadataEditor extends Component {
databaseId: PropTypes.number,
databases: PropTypes.array.isRequired,
selectDatabase: PropTypes.func.isRequired,
databaseMetadata: PropTypes.object,
tableId: PropTypes.number,
tables: PropTypes.object.isRequired,
selectTable: PropTypes.func.isRequired,
......@@ -63,7 +65,7 @@ export default class MetadataEditor extends Component {
}
render() {
var table = this.props.tables[this.props.tableId];
var table = (this.props.databaseMetadata) ? _.findWhere(this.props.databaseMetadata.tables, {id: this.props.tableId}) : null;
var content;
if (table) {
if (this.state.isShowingSchema) {
......@@ -100,7 +102,7 @@ export default class MetadataEditor extends Component {
<div className="MetadataEditor-main flex flex-row flex-full mt2">
<MetadataTableList
tableId={this.props.tableId}
tables={this.props.tables}
tables={(this.props.databaseMetadata) ? this.props.databaseMetadata.tables : []}
selectTable={this.props.selectTable}
/>
{content}
......
......@@ -51,8 +51,8 @@ function($scope, $route, $routeParams, $location, $q, $timeout, databases, Metab
$scope.tables = {};
if ($scope.databaseId != null) {
try {
await loadTableMetadata();
await loadIdFields();
await loadDatabaseMetadata();
$timeout(() => $scope.$digest());
} catch (error) {
console.warn("error loading tables", error)
......@@ -60,14 +60,10 @@ function($scope, $route, $routeParams, $location, $q, $timeout, databases, Metab
}
}, true);
async function loadTableMetadata() {
var tables = await Metabase.db_tables({ 'dbId': $scope.databaseId }).$promise;
await* tables.map(async function(table) {
$scope.tables[table.id] = await Metabase.table_query_metadata({
'tableId': table.id,
'include_sensitive_fields': true
}).$promise;
computeMetadataStrength($scope.tables[table.id]);
async function loadDatabaseMetadata() {
$scope.databaseMetadata = await Metabase.db_metadata({ 'dbId': $scope.databaseId }).$promise;
$scope.databaseMetadata.tables.map(function(table, index) {
table.metadataStrength = computeMetadataStrength(table);
});
}
......@@ -94,7 +90,7 @@ function($scope, $route, $routeParams, $location, $q, $timeout, databases, Metab
$scope.updateTable = function(table) {
return Metabase.table_update(table).$promise.then(function(result) {
_.each(result, (value, key) => { if (key.charAt(0) !== "$") { table[key] = value } });
computeMetadataStrength($scope.tables[table.id]);
table.metadataStrength = computeMetadataStrength(table);
$timeout(() => $scope.$digest());
});
};
......@@ -102,7 +98,8 @@ function($scope, $route, $routeParams, $location, $q, $timeout, databases, Metab
$scope.updateField = function(field) {
return Metabase.field_update(field).$promise.then(function(result) {
_.each(result, (value, key) => { if (key.charAt(0) !== "$") { field[key] = value } });
computeMetadataStrength($scope.tables[field.table_id]);
let table = _.findWhere($scope.databaseMetadata.tables, {id: field.table_id});
table.metadataStrength = computeMetadataStrength(table);
return loadIdFields();
}).then(function() {
$timeout(() => $scope.$digest());
......@@ -126,7 +123,7 @@ function($scope, $route, $routeParams, $location, $q, $timeout, databases, Metab
}
});
table.metadataStrength = completed / total;
return (completed / total);
}
$scope.updateFieldSpecialType = async function(field) {
......
......@@ -3,7 +3,6 @@ var Metabase = angular.module('metabase', [
'ngRoute',
'ngCookies',
'ui.bootstrap', // bootstrap LIKE widgets via angular directives
'metabase.activity.services',
'metabase.auth',
'metabase.filters',
'metabase.directives',
......
......@@ -5,84 +5,6 @@ import { normal, harmony } from 'metabase/lib/colors'
// Card Services
var CardServices = angular.module('metabase.card.services', ['ngResource', 'ngCookies']);
CardServices.factory('Card', ['$resource', '$cookies', function($resource, $cookies) {
return $resource('/api/card/:cardId', {}, {
list: {
url: '/api/card/?f=:filterMode',
method: 'GET',
isArray: true
},
create: {
url: '/api/card',
method: 'POST',
headers: {
'X-CSRFToken': function() {
return $cookies.csrftoken;
}
}
},
get: {
method: 'GET',
params: {
cardId: '@cardId'
}
},
update: {
method: 'PUT',
params: {
cardId: '@id'
},
headers: {
'X-CSRFToken': function() {
return $cookies.csrftoken;
}
}
},
delete: {
method: 'DELETE',
params: {
cardId: '@cardId'
},
headers: {
'X-CSRFToken': function() {
return $cookies.csrftoken;
}
}
},
isfavorite: {
url: '/api/card/:cardId/favorite',
method: 'GET',
params: {
cardId: '@cardId'
}
},
favorite: {
url: '/api/card/:cardId/favorite',
method: 'POST',
params: {
cardId: '@cardId'
},
headers: {
'X-CSRFToken': function() {
return $cookies.csrftoken;
}
}
},
unfavorite: {
url: '/api/card/:cardId/favorite',
method: 'DELETE',
params: {
cardId: '@cardId'
},
headers: {
'X-CSRFToken': function() {
return $cookies.csrftoken;
}
}
}
});
}]);
CardServices.service('VisualizationUtils', [function() {
this.visualizationTypes = {
scalar: {
......
......@@ -3,7 +3,6 @@ var Dashboard = angular.module('metabase.dashboard', [
'ngRoute',
'metabase.directives',
'metabase.services',
'metabase.dashboard.services',
'metabase.dashboard.controllers',
'metabase.card.services'
]);
......
// Dashboard Services
var DashboardServices = angular.module('metabase.dashboard.services', ['ngResource', 'ngCookies']);
DashboardServices.factory('Dashboard', ['$resource', '$cookies', function($resource, $cookies) {
return $resource('/api/dashboard/:dashId', {}, {
list: {
url:'/api/dashboard?org=:orgId&f=:filterMode',
method:'GET',
isArray:true
},
create: {
url:'/api/dashboard',
method:'POST',
headers: {'X-CSRFToken': function() { return $cookies.csrftoken; }},
},
get: {
method:'GET',
params:{dashId:'@dashId'},
},
update: {
method:'PUT',
params:{dashId:'@id'},
headers: {'X-CSRFToken': function() { return $cookies.csrftoken; }},
},
delete: {
method:'DELETE',
params:{dashId:'@dashId'},
headers: {'X-CSRFToken': function() { return $cookies.csrftoken; }},
},
addcard: {
url:'/api/dashboard/:dashId/cards',
method:'POST',
params:{dashId:'@dashId'},
headers: {'X-CSRFToken': function() { return $cookies.csrftoken; }},
},
removecard: {
url:'/api/dashboard/:dashId/cards',
method:'DELETE',
params:{dashId:'@dashId'},
headers: {'X-CSRFToken': function() { return $cookies.csrftoken; }},
},
reposition_cards: {
url:'/api/dashboard/:dashId/reposition',
method:'POST',
params:{dashId:'@dashId'},
headers: {'X-CSRFToken': function() { return $cookies.csrftoken; }},
}
});
}]);
// Metabase Services
var MetabaseServices = angular.module('metabase.metabase.services', [
'ngResource',
'ngCookies',
'metabase.services'
]);
MetabaseServices.factory('Metabase', ['$resource', '$cookies', 'MetabaseCore', function($resource, $cookies, MetabaseCore) {
return $resource('/api/meta', {}, {
db_list: {
url: '/api/database/?org=:orgId',
method: 'GET',
isArray: true
},
db_create: {
url: '/api/database/',
method: 'POST',
headers: {
'X-CSRFToken': function() {
return $cookies.csrftoken;
}
}
},
validate_connection: {
url: '/api/database/validate/',
method: 'POST',
headers: {
'X-CSRFToken': function() {
return $cookies.csrftoken;
}
}
},
db_add_sample_dataset: {
url: '/api/database/sample_dataset',
method: 'POST'
},
db_get: {
url: '/api/database/:dbId',
method: 'GET',
params: {
dbId: '@dbId'
}
},
db_update: {
url: '/api/database/:dbId',
method: 'PUT',
params: {
dbId: '@id'
},
headers: {
'X-CSRFToken': function() {
return $cookies.csrftoken;
}
}
},
db_delete: {
url: '/api/database/:dbId',
method: 'DELETE',
params: {
dbId: '@dbId'
},
headers: {
'X-CSRFToken': function() {
return $cookies.csrftoken;
}
}
},
db_metadata: {
url: '/api/database/:dbId/metadata',
method: 'GET',
params: {
dbId: '@dbId'
}
},
db_tables: {
url: '/api/database/:dbId/tables',
method: 'GET',
params: {
dbId: '@dbId'
},
isArray: true
},
db_idfields: {
url: '/api/database/:dbId/idfields',
method: 'GET',
params: {
dbId: '@dbId'
},
isArray: true
},
db_autocomplete_suggestions: {
url: '/api/database/:dbId/autocomplete_suggestions?prefix=:prefix',
method: 'GET',
params: {
dbId: '@dbId'
},
isArray: true
},
db_sync_metadata: {
url: '/api/database/:dbId/sync',
method: 'POST',
params: {
dbId: '@dbId'
},
headers: {
'X-CSRFToken': function() {
return $cookies.csrftoken;
}
}
},
table_list: {
url: '/api/table/',
method: 'GET',
params: {
tableId: '@tableId'
},
isArray: true
},
table_get: {
url: '/api/table/:tableId',
method: 'GET',
params: {
tableId: '@tableId'
}
},
table_update: {
url: '/api/table/:tableId',
method: 'PUT',
params: {
tableId: '@id'
},
headers: {
'X-CSRFToken': function() {
return $cookies.csrftoken;
}
}
},
table_fields: {
url: '/api/table/:tableId/fields',
method: 'GET',
params: {
tableId: '@tableId'
},
isArray: true
},
table_fks: {
url: '/api/table/:tableId/fks',
method: 'GET',
params: {
tableId: '@tableId'
},
isArray: true
},
table_reorder_fields: {
url: '/api/table/:tableId/reorder',
method: 'POST',
params: {
tableId: '@tableId'
},
headers: {
'X-CSRFToken': function() {
return $cookies.csrftoken;
}
},
},
table_query_metadata: {
url: '/api/table/:tableId/query_metadata',
method: 'GET',
params: {
dbId: '@tableId'
}
},
table_sync_metadata: {
url: '/api/table/:tableId/sync',
method: 'POST',
params: {
tableId: '@tableId'
},
headers: {
'X-CSRFToken': function() {
return $cookies.csrftoken;
}
}
},
field_get: {
url: '/api/field/:fieldId',
method: 'GET',
params: {
fieldId: '@fieldId'
}
},
field_summary: {
url: '/api/field/:fieldId/summary',
method: 'GET',
params: {
fieldId: '@fieldId'
},
isArray: true
},
field_values: {
url: '/api/field/:fieldId/values',
method: 'GET',
params: {
fieldId: '@fieldId'
}
},
field_value_map_update: {
url: '/api/field/:fieldId/value_map_update',
method: 'POST',
params: {
fieldId: '@fieldId'
},
headers: {
'X-CSRFToken': function() {
return $cookies.csrftoken;
}
}
},
field_update: {
url: '/api/field/:fieldId',
method: 'PUT',
params: {
fieldId: '@id'
},
headers: {
'X-CSRFToken': function() {
return $cookies.csrftoken;
}
}
},
field_foreignkeys: {
url: '/api/field/:fieldId/foreignkeys',
method: 'GET',
params: {
fieldId: '@fieldId'
},
isArray: true
},
field_addfk: {
url: '/api/field/:fieldId/foreignkeys',
method: 'POST',
params: {
fieldId: '@fieldId'
},
headers: {
'X-CSRFToken': function() {
return $cookies.csrftoken;
}
}
},
dataset: {
url: '/api/dataset',
method: 'POST',
headers: {
'X-CSRFToken': function() {
return $cookies.csrftoken;
}
}
}
});
}]);
MetabaseServices.factory('ForeignKey', ['$resource', '$cookies', function($resource, $cookies) {
return $resource('/api/foreignkey/:fkID', {}, {
delete: {
method: 'DELETE',
params: {
fkID: '@fkID'
},
headers: {
'X-CSRFToken': function() {
return $cookies.csrftoken;
}
},
},
});
}]);
......@@ -259,9 +259,342 @@ MetabaseServices.service('MetabaseCore', ['User', function(User) {
}]);
// User Services
// API Services
var CoreServices = angular.module('metabase.core.services', ['ngResource', 'ngCookies']);
CoreServices.factory('Activity', ['$resource', '$cookies', function($resource, $cookies) {
return $resource('/api/activity', {}, {
list: {
method: 'GET',
isArray: true
},
recent_views: {
url: '/api/activity/recent_views',
method: 'GET',
isArray: true
}
});
}]);
CoreServices.factory('Card', ['$resource', '$cookies', function($resource, $cookies) {
return $resource('/api/card/:cardId', {}, {
list: {
url: '/api/card/?f=:filterMode',
method: 'GET',
isArray: true
},
create: {
url: '/api/card',
method: 'POST'
},
get: {
method: 'GET',
params: {
cardId: '@cardId'
}
},
update: {
method: 'PUT',
params: {
cardId: '@id'
}
},
delete: {
method: 'DELETE',
params: {
cardId: '@cardId'
}
},
isfavorite: {
url: '/api/card/:cardId/favorite',
method: 'GET',
params: {
cardId: '@cardId'
}
},
favorite: {
url: '/api/card/:cardId/favorite',
method: 'POST',
params: {
cardId: '@cardId'
}
},
unfavorite: {
url: '/api/card/:cardId/favorite',
method: 'DELETE',
params: {
cardId: '@cardId'
}
}
});
}]);
CoreServices.factory('Dashboard', ['$resource', '$cookies', function($resource, $cookies) {
return $resource('/api/dashboard/:dashId', {}, {
list: {
url:'/api/dashboard?org=:orgId&f=:filterMode',
method:'GET',
isArray:true
},
create: {
url:'/api/dashboard',
method:'POST'
},
get: {
method:'GET',
params:{dashId:'@dashId'},
},
update: {
method:'PUT',
params:{dashId:'@id'}
},
delete: {
method:'DELETE',
params:{dashId:'@dashId'}
},
addcard: {
url:'/api/dashboard/:dashId/cards',
method:'POST',
params:{dashId:'@dashId'}
},
removecard: {
url:'/api/dashboard/:dashId/cards',
method:'DELETE',
params:{dashId:'@dashId'}
},
reposition_cards: {
url:'/api/dashboard/:dashId/reposition',
method:'POST',
params:{dashId:'@dashId'}
}
});
}]);
CoreServices.factory('ForeignKey', ['$resource', '$cookies', function($resource, $cookies) {
return $resource('/api/foreignkey/:fkID', {}, {
delete: {
method: 'DELETE',
params: {
fkID: '@fkID'
}
}
});
}]);
CoreServices.factory('Metabase', ['$resource', '$cookies', 'MetabaseCore', function($resource, $cookies, MetabaseCore) {
return $resource('/api/meta', {}, {
db_list: {
url: '/api/database/?org=:orgId',
method: 'GET',
isArray: true
},
db_create: {
url: '/api/database/',
method: 'POST'
},
db_add_sample_dataset: {
url: '/api/database/sample_dataset',
method: 'POST'
},
db_get: {
url: '/api/database/:dbId',
method: 'GET',
params: {
dbId: '@dbId'
}
},
db_update: {
url: '/api/database/:dbId',
method: 'PUT',
params: {
dbId: '@id'
}
},
db_delete: {
url: '/api/database/:dbId',
method: 'DELETE',
params: {
dbId: '@dbId'
}
},
db_metadata: {
url: '/api/database/:dbId/metadata',
method: 'GET',
params: {
dbId: '@dbId'
}
},
db_tables: {
url: '/api/database/:dbId/tables',
method: 'GET',
params: {
dbId: '@dbId'
},
isArray: true
},
db_idfields: {
url: '/api/database/:dbId/idfields',
method: 'GET',
params: {
dbId: '@dbId'
},
isArray: true
},
db_autocomplete_suggestions: {
url: '/api/database/:dbId/autocomplete_suggestions?prefix=:prefix',
method: 'GET',
params: {
dbId: '@dbId'
},
isArray: true
},
db_sync_metadata: {
url: '/api/database/:dbId/sync',
method: 'POST',
params: {
dbId: '@dbId'
}
},
table_list: {
url: '/api/table/',
method: 'GET',
params: {
tableId: '@tableId'
},
isArray: true
},
table_get: {
url: '/api/table/:tableId',
method: 'GET',
params: {
tableId: '@tableId'
}
},
table_update: {
url: '/api/table/:tableId',
method: 'PUT',
params: {
tableId: '@id'
}
},
table_fields: {
url: '/api/table/:tableId/fields',
method: 'GET',
params: {
tableId: '@tableId'
},
isArray: true
},
table_fks: {
url: '/api/table/:tableId/fks',
method: 'GET',
params: {
tableId: '@tableId'
},
isArray: true
},
table_reorder_fields: {
url: '/api/table/:tableId/reorder',
method: 'POST',
params: {
tableId: '@tableId'
}
},
table_query_metadata: {
url: '/api/table/:tableId/query_metadata',
method: 'GET',
params: {
dbId: '@tableId'
}
},
table_sync_metadata: {
url: '/api/table/:tableId/sync',
method: 'POST',
params: {
tableId: '@tableId'
}
},
field_get: {
url: '/api/field/:fieldId',
method: 'GET',
params: {
fieldId: '@fieldId'
}
},
field_summary: {
url: '/api/field/:fieldId/summary',
method: 'GET',
params: {
fieldId: '@fieldId'
},
isArray: true
},
field_values: {
url: '/api/field/:fieldId/values',
method: 'GET',
params: {
fieldId: '@fieldId'
}
},
field_value_map_update: {
url: '/api/field/:fieldId/value_map_update',
method: 'POST',
params: {
fieldId: '@fieldId'
}
},
field_update: {
url: '/api/field/:fieldId',
method: 'PUT',
params: {
fieldId: '@id'
}
},
field_foreignkeys: {
url: '/api/field/:fieldId/foreignkeys',
method: 'GET',
params: {
fieldId: '@fieldId'
},
isArray: true
},
field_addfk: {
url: '/api/field/:fieldId/foreignkeys',
method: 'POST',
params: {
fieldId: '@fieldId'
}
},
dataset: {
url: '/api/dataset',
method: 'POST'
}
});
}]);
CoreServices.factory('Revision', ['$resource', function($resource) {
return $resource('/api/revision', {}, {
list: {
url: '/api/revision',
method: 'GET',
isArray: true,
params: {
'entity': '@entity',
'id': '@id'
}
},
revert: {
url: '/api/revision/revert',
method: 'POST',
params: {
'entity': '@entity',
'id': '@id',
'revision_id': '@revision_id'
}
}
});
}]);
CoreServices.factory('Session', ['$resource', '$cookies', function($resource, $cookies) {
return $resource('/api/session/', {}, {
create: {
......@@ -290,6 +623,43 @@ CoreServices.factory('Session', ['$resource', '$cookies', function($resource, $c
});
}]);
CoreServices.factory('Settings', ['$resource', function($resource) {
return $resource('/api/setting', {}, {
list: {
url: '/api/setting',
method: 'GET',
isArray: true,
},
// POST endpoint handles create + update in this case
put: {
url: '/api/setting/:key',
method: 'PUT',
params: {
key: '@key'
}
},
delete: {
url: '/api/setting/:key',
method: 'DELETE',
params: {
key: '@key'
}
}
});
}]);
CoreServices.factory('Setup', ['$resource', '$cookies', function($resource, $cookies) {
return $resource('/api/setup/', {}, {
create: {
method: 'POST'
},
validate_db: {
url: '/api/setup/validate',
method: 'POST'
}
});
}]);
CoreServices.factory('User', ['$resource', '$cookies', function($resource, $cookies) {
return $resource('/api/user/:userId', {}, {
create: {
......@@ -343,57 +713,6 @@ CoreServices.factory('User', ['$resource', '$cookies', function($resource, $cook
});
}]);
CoreServices.factory('Settings', ['$resource', function($resource) {
return $resource('/api/setting', {}, {
list: {
url: '/api/setting',
method: 'GET',
isArray: true,
},
// POST endpoint handles create + update in this case
put: {
url: '/api/setting/:key',
method: 'PUT',
params: {
key: '@key'
}
},
delete: {
url: '/api/setting/:key',
method: 'DELETE',
params: {
key: '@key'
}
}
});
}]);
CoreServices.factory('Revision', ['$resource', function($resource) {
return $resource('/api/revision', {}, {
list: {
url: '/api/revision',
method: 'GET',
isArray: true,
params: {
'entity': '@entity',
'id': '@id'
}
},
revert: {
url: '/api/revision/revert',
method: 'POST',
params: {
'entity': '@entity',
'id': '@id',
'revision_id': '@revision_id'
}
}
});
}]);
CoreServices.factory('Util', ['$resource', '$cookies', function($resource, $cookies) {
return $resource('/api/util/', {}, {
password_check: {
......
......@@ -16,7 +16,7 @@ const finalCreateStore = compose(
const reducer = combineReducers(reducers);
var SetupControllers = angular.module('metabase.setup.controllers', ['metabase.setup.services']);
var SetupControllers = angular.module('metabase.setup.controllers', ['metabase.services']);
SetupControllers.controller('SetupController', ['$scope', '$location', '$timeout', 'ipCookie', function($scope, $location, $timeout, ipCookie) {
$scope.Component = SetupApp;
$scope.props = {
......
var SetupServices = angular.module('metabase.setup.services', ['ngResource', 'ngCookies']);
SetupServices.factory('Setup', ['$resource', '$cookies', function($resource, $cookies) {
return $resource('/api/setup/', {}, {
create: {
method: 'POST'
},
validate_db: {
url: '/api/setup/validate',
method: 'POST'
}
});
}]);
import 'metabase/services';
import 'metabase/metabase/metabase.services';
describe('metabase.metabase.services', function() {
beforeEach(angular.mock.module('metabase.metabase.services'));
describe('metabase.services', function() {
beforeEach(angular.mock.module('metabase.services'));
describe('Metabase', function() {
it('should return empty list of databases', inject(function(Metabase, $httpBackend) {
......
......@@ -8,7 +8,7 @@
[metabase.events :as events]
(metabase.models common
[hydrate :refer [hydrate]]
[database :refer [Database]]
[database :refer [Database protected-password]]
[field :refer [Field]]
[table :refer [Table]])
[metabase.sample-data :as sample-data]
......@@ -19,6 +19,35 @@
[symb value :nillable]
(checkp-contains? (set (map name (keys @driver/available-drivers))) symb value))
(defn test-database-connection
"Try out the connection details for a database and useful error message if connection fails, returns `nil` if connection succeeds."
[engine {:keys [host port] :as details}]
(when (not (metabase.config/is-test?))
(let [engine (keyword engine)
details (assoc details :engine engine)
response-invalid (fn [field m] {:valid false
field m ; work with the new {:field error-message} format
:message m})] ; but be backwards-compatible with the UI as it exists right now
(try
(cond
(driver/can-connect-with-details? engine details :rethrow-exceptions) nil
(and host port (u/host-port-up? host port)) (response-invalid :dbname (format "Connection to '%s:%d' successful, but could not connect to DB." host port))
(and host (u/host-up? host)) (response-invalid :port (format "Connection to '%s' successful, but port %d is invalid." port))
host (response-invalid :host (format "'%s' is not reachable" host))
:else (response-invalid :db "Unable to connect to database."))
(catch Throwable e
(response-invalid :dbname (.getMessage e)))))))
(defn supports-ssl?
"Predicate function which determines if a given `engine` supports the `:ssl` setting."
[engine]
{:pre [(driver/is-engine? engine)]}
(let [driver-props (->> (driver/engine->driver engine)
:details-fields
(map :name)
set)]
(contains? driver-props "ssl")))
(defendpoint GET "/"
"Fetch all `Databases`."
[]
......@@ -30,10 +59,26 @@
{name [Required NonEmptyString]
engine [Required DBEngine]
details [Required Dict]}
;; TODO - we should validate the contents of `details` here based on the engine
(check-superuser)
(let-500 [new-db (ins Database :name name :engine engine :details details)]
(events/publish-event :database-create new-db)))
;; this function tries connecting over ssl and non-ssl to establish a connection
;; if it succeeds it returns the `details` that worked, otherwise it returns an error
(let [try-connection (fn [engine details]
(let [error (test-database-connection engine details)]
(if (and error
(true? (:ssl details)))
(recur engine (assoc details :ssl false))
(or error details))))
details (if (supports-ssl? engine)
(assoc details :ssl true)
details)
details-or-error (try-connection engine details)]
(if-not (false? (:valid details-or-error))
;; no error, proceed with creation
(let-500 [new-db (ins Database :name name :engine engine :details details-or-error)]
(events/publish-event :database-create new-db))
;; failed to connect, return error
{:status 400
:body details-or-error})))
(defendpoint POST "/sample_dataset"
"Add the sample dataset as a new `Database`."
......@@ -42,25 +87,6 @@
(sample-data/add-sample-dataset!)
(sel :one Database :is_sample true))
;; Stub function that will eventually validate a connection string
(defendpoint POST "/validate"
"Validate that we can connect to a `Database`."
[:as {{:keys [host port engine] :as details} :body}]
(let [engine (keyword engine)
details (assoc details :engine engine)
response-invalid (fn [field m] {:status 400 :body {:valid false
field m ; work with the new {:field error-message} format
:message m}})] ; but be backwards-compatible with the UI as it exists right now
(try
(cond
(driver/can-connect-with-details? engine details :rethrow-exceptions) {:valid true}
(and host port (u/host-port-up? host port)) (response-invalid :dbname (format "Connection to '%s:%d' successful, but could not connect to DB." host port))
(and host (u/host-up? host)) (response-invalid :port (format "Connection to '%s' successful, but port %d is invalid." port))
host (response-invalid :host (format "'%s' is not reachable" host))
:else (response-invalid :db "Unable to connect to database."))
(catch Throwable e
(response-invalid :dbname (.getMessage e))))))
(defendpoint GET "/:id"
"Get `Database` with ID."
[id]
......@@ -69,13 +95,28 @@
(defendpoint PUT "/:id"
"Update a `Database`."
[id :as {{:keys [name engine details]} :body}]
{name NonEmptyString, details Dict} ; TODO - check that engine is a valid choice
(write-check Database id)
(check-500 (upd-non-nil-keys Database id
:name name
:engine engine
:details details))
(Database id))
{name [Required NonEmptyString]
engine [Required DBEngine]
details [Required Dict]}
(check-superuser)
(let-404 [database (Database id)]
(let [details (if-not (= protected-password (:password details))
details
(assoc details :password (get-in database [:details :password])))
conn-error (test-database-connection engine details)]
(if-not conn-error
;; no error, proceed with update
(do
;; TODO: is there really a reason to let someone change the engine on an existing database?
;; that seems like the kind of thing that will almost never work in any practical way
(check-500 (upd-non-nil-keys Database id
:name name
:engine engine
:details details))
(Database id))
;; failed to connect, return error
{:status 400
:body conn-error}))))
(defendpoint DELETE "/:id"
"Delete a `Database`."
......
......@@ -38,6 +38,13 @@
;; | QP INTERNAL IMPLEMENTATION |
;; +----------------------------------------------------------------------------------------------------+
(defn structured-query?
"Predicate function which returns `true` if the given query represents a structured style query, `false` otherwise."
[query]
(= :query (keyword (:type query))))
(defn- wrap-catch-exceptions [qp]
(fn [query]
(try (qp query)
......@@ -55,7 +62,9 @@
(defn- pre-expand [qp]
(fn [query]
(qp (resolve/resolve (expand/expand query)))))
(qp (if (structured-query? query)
(resolve/resolve (expand/expand query))
query))))
(defn- post-add-row-count-and-status
......@@ -80,40 +89,46 @@
"Add an implicit `fields` clause to queries with `rows` aggregations."
[qp]
(fn [{{:keys [source-table], {source-table-id :id} :source-table} :query, :as query}]
(qp (if-not (should-add-implicit-fields? query)
query
(let [fields (for [field (sel :many :fields [Field :name :display_name :base_type :special_type :preview_display :display_name :table_id :id :position :description]
:table_id source-table-id
:active true
:field_type [not= "sensitive"]
:parent_id nil
(k/order :position :asc) (k/order :id :desc))]
(let [field (-> (resolve/rename-mb-field-keys field)
map->Field
(resolve/resolve-table {source-table-id source-table}))]
(if (or (contains? #{:DateField :DateTimeField} (:base-type field))
(contains? #{:timestamp_seconds :timestamp_milliseconds} (:special-type field)))
(map->DateTimeField {:field field, :unit :day})
field)))]
(if-not (seq fields)
(do (log/warn (format "Table '%s' has no Fields associated with it." (:name source-table)))
query)
(-> query
(assoc-in [:query :fields-is-implicit] true)
(assoc-in [:query :fields] fields))))))))
(if (structured-query? query)
(qp (if-not (should-add-implicit-fields? query)
query
(let [fields (for [field (sel :many :fields [Field :name :display_name :base_type :special_type :preview_display :display_name :table_id :id :position :description]
:table_id source-table-id
:active true
:field_type [not= "sensitive"]
:parent_id nil
(k/order :position :asc) (k/order :id :desc))]
(let [field (-> (resolve/rename-mb-field-keys field)
map->Field
(resolve/resolve-table {source-table-id source-table}))]
(if (or (contains? #{:DateField :DateTimeField} (:base-type field))
(contains? #{:timestamp_seconds :timestamp_milliseconds} (:special-type field)))
(map->DateTimeField {:field field, :unit :day})
field)))]
(if-not (seq fields)
(do (log/warn (format "Table '%s' has no Fields associated with it." (:name source-table)))
query)
(-> query
(assoc-in [:query :fields-is-implicit] true)
(assoc-in [:query :fields] fields))))))
;; for non-structured queries we do nothing
(qp query))))
(defn- pre-add-implicit-breakout-order-by
"`Fields` specified in `breakout` should add an implicit ascending `order-by` subclause *unless* that field is *explicitly* referenced in `order-by`."
[qp]
(fn [{{breakout-fields :breakout, order-by :order-by} :query, :as query}]
(let [order-by-fields (set (map :field order-by))
implicit-breakout-order-by-fields (filter (partial (complement contains?) order-by-fields)
breakout-fields)]
(qp (cond-> query
(seq implicit-breakout-order-by-fields) (update-in [:query :order-by] concat (for [field implicit-breakout-order-by-fields]
(map->OrderBySubclause {:field field
:direction :ascending}))))))))
(if (structured-query? query)
(let [order-by-fields (set (map :field order-by))
implicit-breakout-order-by-fields (filter (partial (complement contains?) order-by-fields)
breakout-fields)]
(qp (cond-> query
(seq implicit-breakout-order-by-fields) (update-in [:query :order-by] concat (for [field implicit-breakout-order-by-fields]
(map->OrderBySubclause {:field field
:direction :ascending}))))))
;; for non-structured queries we do nothing
(qp query))))
(defn- pre-cumulative-sum
......@@ -173,9 +188,12 @@
(defn- cumulative-sum [qp]
(fn [query]
(let [[cumulative-sum-field query] (pre-cumulative-sum query)]
(cond->> (qp query)
cumulative-sum-field (post-cumulative-sum cumulative-sum-field)))))
(if (structured-query? query)
(let [[cumulative-sum-field query] (pre-cumulative-sum query)]
(cond->> (qp query)
cumulative-sum-field (post-cumulative-sum cumulative-sum-field)))
;; for non-structured queries we do nothing
(qp query))))
(defn- limit
......@@ -193,7 +211,8 @@
(defn- pre-log-query [qp]
(fn [query]
(when-not *disable-qp-logging*
(when (and (structured-query? query)
(not *disable-qp-logging*))
(log/debug (u/format-color 'magenta "\n\nPREPROCESSED/EXPANDED: 😻\n%s"
(u/pprint-to-str
;; Remove empty kv pairs because otherwise expanded query is HUGE
......@@ -208,6 +227,16 @@
(qp query)))
(defn- wrap-guard-multiple-calls
"Throw an exception if a QP function accidentally calls (QP QUERY) more than once."
[qp]
(let [called? (atom false)]
(fn [query]
(assert (not @called?) "(QP QUERY) IS BEING CALLED MORE THAN ONCE!")
(reset! called? true)
(qp query))))
;; +------------------------------------------------------------------------------------------------------------------------+
;; | QUERY PROCESSOR |
;; +------------------------------------------------------------------------------------------------------------------------+
......@@ -237,18 +266,14 @@
;; Pre-processing happens from top-to-bottom, i.e. the QUERY passed to the function returned by POST-ADD-ROW-COUNT-AND-STATUS is the
;; query as modified by PRE-EXPAND.
;;
;; Pre-processing then happens in order from bottom-to-top; i.e. POST-ANNOTATE gets to modify the results, then LIMIT, then CUMULATIVE-SUM, etc.
;; Post-processing then happens in order from bottom-to-top; i.e. POST-ANNOTATE gets to modify the results, then LIMIT, then CUMULATIVE-SUM, etc.
(defn- wrap-guard-multiple-calls
"Throw an exception if a QP function accidentally calls (QP QUERY) more than once."
[qp]
(let [called? (atom false)]
(fn [query]
(assert (not @called?) "(QP QUERY) IS BEING CALLED MORE THAN ONCE!")
(reset! called? true)
(qp query))))
(defn- process-structured [{:keys [driver], :as query}]
(defn process
"Process a QUERY and return the results."
[driver query]
(when-not *disable-qp-logging*
(log/debug (u/format-color 'blue "\nQUERY: 😎\n%s" (u/pprint-to-str query))))
(let [driver-process-query (:process-query driver)
driver-wrap-process-query (or (:process-query-in-context driver)
(fn [qp] qp))]
......@@ -263,26 +288,5 @@
annotate/post-annotate
pre-log-query
wrap-guard-multiple-calls
driver-process-query) query)))
(defn- process-native [{:keys [driver], :as query}]
(let [driver-process-query (:process-query driver)
driver-wrap-process-query (or (:process-query-in-context driver)
(fn [qp] qp))]
((<<- wrap-catch-exceptions
driver-wrap-process-query
post-add-row-count-and-status
limit
wrap-guard-multiple-calls
driver-process-query) query)))
(defn process
"Process a QUERY and return the results."
[driver query]
(when-not *disable-qp-logging*
(log/debug (u/format-color 'blue "\nQUERY: 😎\n%s" (u/pprint-to-str query))))
((case (keyword (:type query))
:native process-native
:query process-structured)
(assoc query
:driver driver)))
driver-process-query) (assoc query
:driver driver))))
......@@ -248,12 +248,15 @@
expected by the frontend."
[qp]
(fn [query]
(let [results (qp query)
result-keys (set (keys (first results)))
cols (resolve-sort-and-format-columns (:query query) result-keys)
columns (mapv :name cols)]
{:cols (vec (for [col cols]
(update col :name name)))
:columns (mapv name columns)
:rows (for [row results]
(mapv row columns))})))
(if (= :query (keyword (:type query)))
(let [results (qp query)
result-keys (set (keys (first results)))
cols (resolve-sort-and-format-columns (:query query) result-keys)
columns (mapv :name cols)]
{:cols (vec (for [col cols]
(update col :name name)))
:columns (mapv name columns)
:rows (for [row results]
(mapv row columns))})
;; for non-structured queries we do nothing
(qp query))))
......@@ -5,6 +5,9 @@
[metabase.models.interface :refer :all]
[metabase.util :as u]))
(def ^:const protected-password
"**MetabasePass**")
(defrecord DatabaseInstance []
;; preserve normal IFn behavior so things like ((sel :one Database) :id) work correctly
clojure.lang.IFn
......@@ -15,6 +18,7 @@
(api-serialize [this]
;; If current user isn't an admin strip out DB details which may include things like password
(cond-> this
(get-in this [:details :password]) (assoc-in [:details :password] protected-password)
(not (:is_superuser @*current-user*)) (dissoc :details))))
(extend-ICanReadWrite DatabaseInstance :read :always, :write :superuser)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment