diff --git a/.dir-locals.el b/.dir-locals.el index ea242e3242033a5c34b99df5d89d5f0bc2b0d8d8..9619e4c333449fa95c3d874d5f29d2cf64f10570 100644 --- a/.dir-locals.el +++ b/.dir-locals.el @@ -18,6 +18,7 @@ (catch-api-exceptions 0) (check 1) (checkp 1) + (cond-as-> 2) (cond-let 0) (conda 0) (context 2) @@ -35,6 +36,7 @@ (expect-with-dataset 1) (expect-with-datasets 1) (format-color 2) + (if-sqlserver 0) (ins 1) (let-400 1) (let-404 1) diff --git a/.gitmodules b/.gitmodules index d271c26dcc42be6d77d819ebd062bbb63a34ae67..08742d1385c072ada76dac65db61788fd0968f30 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,4 @@ +# don't use private/SSH remotes (e.x. git@github.com) because they break Heroku deploys [submodule "OSX/Vendor/INAppStoreWindow"] path = OSX/Vendor/INAppStoreWindow - url = git@github.com:indragiek/INAppStoreWindow.git + url = https://github.com/indragiek/INAppStoreWindow.git diff --git a/README.md b/README.md index 9f2b15911e12317fcff0a129e190f3f73517e01a..d6312923d839586a45e838f3d2dba81dbddf0ba4 100644 --- a/README.md +++ b/README.md @@ -32,9 +32,9 @@ To run the jar you will need to have a Java Runtime installed. As a quick check If you see something like - java version "1.8.0_31" - Java(TM) SE Runtime Environment (build 1.8.0_31-b13) - Java HotSpot(TM) 64-Bit Server VM (build 25.31-b07, mixed mode) + java version "1.8.0_51" + Java(TM) SE Runtime Environment (build 1.8.0_51-b16) + Java HotSpot(TM) 64-Bit Server VM (build 25.51-b03, mixed mode) you are good to go. Otherwise, download the Java Runtime Environment at http://java.com/ diff --git a/bin/release/aws-eb/.ebextensions/01_metabase.config b/bin/release/aws-eb/.ebextensions/01_metabase.config new file mode 100644 index 0000000000000000000000000000000000000000..01b1b7dad053dd8a5fead6d617d85032b3b0c9f4 --- /dev/null +++ b/bin/release/aws-eb/.ebextensions/01_metabase.config @@ -0,0 +1,50 @@ +###### +# Metabase Report server Elastic Beanstalk configuration +# Modify the environmental variables below to customize your installation +# Comment out a variable to disable a feature +##### +container_commands: + #customize_env: + #env: + #NGINX_SERVER_NAME: metabase.example.com + #NGINX_FORCE_SSL: 1 + #PAPERTRAIL_HOSTNAME: $HOSTNAME + #PAPERTRAIL_HOST: foobar.papertrailapp.com + #PAPERTRAIL_PORT: 12345 + #PAPERTRAIL_FILES: /var/log/nginx/access.log /var/log/nginx/error.log + #command: true + #ignoreErrors: false + + 01_server-name: + command: ".ebextensions/metabase_config/metabase-setup.sh server_name" + test: test $NGINX_SERVER_NAME + ignoreErrors: true + + 02_server_https: + command: ".ebextensions/metabase_config/metabase-setup.sh server_https" + test: test $NGINX_FORCE_SSL + ignoreErrors: true + + 03_log_x_real_ip: + command: ".ebextensions/metabase_config/metabase-setup.sh log_x_real_ip" + ignoreErrors: true + + 04_install_papertrail: + command: ".ebextensions/metabase_config/metabase-setup.sh install_papertrail" + test: test $PAPERTRAIL_HOST + ignoreErrors: true + + 05_try_papertrail: + command: "/sbin/service remote_syslog restart" + test: test -e /etc/log_files.yml + ignoreErrors: true + + 06_try_nginx: + command: "/sbin/service nginx restart" + test: nginx -t + ignoreErrors: false + +option_settings: + - namespace: aws:elasticbeanstalk:command + option_name: Timeout + value: 600 diff --git a/bin/release/aws-eb/.ebextensions/extend_timeout.config b/bin/release/aws-eb/.ebextensions/extend_timeout.config deleted file mode 100644 index f81a14bf2db8ec8d8510555e9740e77132601388..0000000000000000000000000000000000000000 --- a/bin/release/aws-eb/.ebextensions/extend_timeout.config +++ /dev/null @@ -1,4 +0,0 @@ -option_settings: - - namespace: aws:elasticbeanstalk:command - option_name: Timeout - value: 600 diff --git a/bin/release/aws-eb/.ebextensions/metabase_config/metabase-setup.sh b/bin/release/aws-eb/.ebextensions/metabase_config/metabase-setup.sh new file mode 100755 index 0000000000000000000000000000000000000000..2f6a9eb1f14a93491269cec9318f4c778f336b5f --- /dev/null +++ b/bin/release/aws-eb/.ebextensions/metabase_config/metabase-setup.sh @@ -0,0 +1,103 @@ +#!/bin/bash +#### +# Metabase Report server Elastic Beanstalk metabase-setup.sh +# Modify the environmental variables to customize your installation +# Unset a variable to disable a feature +#### + +# add files to papertrail +pt_files () { + sed -i '/ - .*/d' /etc/log_files.yml + set -f + for file in $PAPERTRAIL_FILES; do + sed -i 's|files:|files:\n - '$file'|' /etc/log_files.yml + done + set +f +} + +# papertail remote host +pt_remote_host () { + sed -i "s/.*host:.*/ host: $PAPERTRAIL_HOST/" /etc/log_files.yml +} + +# papertail remote port +pt_port () { + sed -i "s/.*port:.*/ port: $PAPERTRAIL_PORT/" /etc/log_files.yml +} + +# papertail local host +pt_local_host () { + eval export PAPERTRAIL_HOSTNAME=$PAPERTRAIL_HOSTNAME # expand vars like $HOSTNAME + sed -i "s/.*hostname:.*/hostname: $PAPERTRAIL_HOSTNAME/" /etc/log_files.yml +} + +# nginx server name +server_name () { + [[ "$NGINX_SERVER_NAME" ]] && cp_default_server + cd /etc/nginx/sites-available/ + if [[ "$NGINX_SERVER_NAME" ]] ; then + if ! grep -q server_name elasticbeanstalk-nginx-docker-proxy.conf ; then + sed -i "s|listen 80\;|listen 80\;\n server_name $NGINX_SERVER_NAME \*\.$NGINX_SERVER_NAME\;\n|" elasticbeanstalk-nginx-docker-proxy.conf + fi + else + # no hostname passed, disable default_server + sed -i '/server_name/d' elasticbeanstalk-nginx-docker-proxy.conf + [[ -e /etc/nginx/sites-enabled/default_server ]] && rm /etc/nginx/sites-enabled/default_server + fi +} + +# enable https redirect +server_https () { + cd /etc/nginx/sites-available/ + if [[ "$NGINX_FORCE_SSL" ]] && ! grep -q https elasticbeanstalk-nginx-docker-proxy.conf ; then + sed -i 's|location \/ {|location \/ {\n\n if ($http_x_forwarded_proto != "https") {\n rewrite ^ https:\/\/$host$request_uri? permanent;\n }\n|' elasticbeanstalk-nginx-docker-proxy.conf + fi +} + +# download, install and configure papertrail +install_papertrail () { + cp .ebextensions/metabase_config/papertrail/log_files.yml /etc/log_files.yml && chmod 644 /etc/log_files.yml + cp .ebextensions/metabase_config/papertrail/remote_syslog /etc/init.d/remote_syslog && chmod 555 /etc/init.d/remote_syslog + cd /tmp/ + wget -q "https://github.com/papertrail/remote_syslog2/releases/download/v0.14/remote_syslog_linux_amd64.tar.gz" && + tar xzf remote_syslog_linux_amd64.tar.gz + /sbin/service remote_syslog stop + mv /tmp/remote_syslog/remote_syslog /usr/local/bin/ + rm -rf remote_syslog_linux_amd64.tar.gz remote_syslog + # Setup Papertrail + [[ "$PAPERTRAIL_HOST" ]] && pt_remote_host + [[ "$PAPERTRAIL_PORT" ]] && pt_port + [[ "$PAPERTRAIL_FILES" ]] && pt_files + [[ "$PAPERTRAIL_HOSTNAME" ]] && pt_local_host +} + +# enable default_server to drop DNS poisoning +cp_default_server () { + cp .ebextensions/metabase_config/nginx/default_server /etc/nginx/sites-available/default_server + [[ ! -e /etc/nginx/sites-enabled/default_server ]] && + ln -s /etc/nginx/sites-available/default_server /etc/nginx/sites-enabled/default_server +} + +# update nginx logging to include x_real_ip +log_x_real_ip () { + cp .ebextensions/metabase_config/nginx/log_x_real_ip.conf /etc/nginx/conf.d/log_x_real_ip.conf + cd /etc/nginx/sites-available + if ! grep -q access_log *-proxy.conf ; then + sed -i 's|location \/ {|location \/ {\n\n access_log \/var\/log\/nginx\/access.log log_x_real_ip;\n|' *-proxy.conf + fi +} + +case $1 in +server_name) + server_name + ;; +server_https) + server_https + ;; +install_papertrail) + install_papertrail + ;; +log_x_real_ip) + log_x_real_ip + ;; +esac diff --git a/bin/release/aws-eb/.ebextensions/metabase_config/nginx/default_server b/bin/release/aws-eb/.ebextensions/metabase_config/nginx/default_server new file mode 100644 index 0000000000000000000000000000000000000000..49d14b428c447151e9fd9600c4d1669ddf8a50d8 --- /dev/null +++ b/bin/release/aws-eb/.ebextensions/metabase_config/nginx/default_server @@ -0,0 +1,19 @@ +# /etc/nginx/sites-available/default_server +server { + listen 80 default_server; + + location /api/health { + access_log off; + proxy_pass http://docker; + proxy_http_version 1.1; + proxy_set_header Connection $connection_upgrade; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + } + + location / { + return 444; + } +} diff --git a/bin/release/aws-eb/.ebextensions/metabase_config/nginx/log_x_real_ip.conf b/bin/release/aws-eb/.ebextensions/metabase_config/nginx/log_x_real_ip.conf new file mode 100644 index 0000000000000000000000000000000000000000..a6cbe070a52ec63cb19d028d905a5b5d417252ae --- /dev/null +++ b/bin/release/aws-eb/.ebextensions/metabase_config/nginx/log_x_real_ip.conf @@ -0,0 +1,4 @@ +# /etc/nginx/conf.d/log_x_real_ip.conf +log_format log_x_real_ip '$proxy_add_x_forwarded_for - [$time_local] ' + '$request $status $body_bytes_sent ' + '$http_referer $http_user_agent'; diff --git a/bin/release/aws-eb/.ebextensions/metabase_config/papertrail/log_files.yml b/bin/release/aws-eb/.ebextensions/metabase_config/papertrail/log_files.yml new file mode 100644 index 0000000000000000000000000000000000000000..519dbaa5255cae289bcb5771499120781f4ab075 --- /dev/null +++ b/bin/release/aws-eb/.ebextensions/metabase_config/papertrail/log_files.yml @@ -0,0 +1,9 @@ +# /etc/log_files.yml - Papertrail +files: + - /var/log/nginx/access.log +hostname: +destination: + host: + port: + protocol: tls +new_file_check_interval: "10" # Check every 10 seconds diff --git a/bin/release/aws-eb/.ebextensions/metabase_config/papertrail/remote_syslog b/bin/release/aws-eb/.ebextensions/metabase_config/papertrail/remote_syslog new file mode 100644 index 0000000000000000000000000000000000000000..e5ec6aa5ba7d23abb3b525f994a66ce74b65c166 --- /dev/null +++ b/bin/release/aws-eb/.ebextensions/metabase_config/papertrail/remote_syslog @@ -0,0 +1,127 @@ +#!/bin/bash +# /etc/init.d/remote_syslog +# +# remote_syslog This shell script takes care of starting and stopping +# remote_syslog daemon +# +# chkconfig: - 58 74 +# description: papertrail/remote_syslog \ +# https://github.com/papertrail/remote_syslog2/blob/master/examples/remote_syslog.init.d + +### BEGIN INIT INFO +# Provides: remote_syslog +# Required-Start: $network $local_fs $remote_fs +# Required-Stop: $network $local_fs $remote_fs +# Should-Start: $syslog $named ntpdate +# Should-Stop: $syslog $named +# Short-Description: start and stop remote_errolog +# Description: papertrail/remote_syslog2 +# https://github.com/papertrail/remote_syslog2/blob/master/examples/remote_syslog.init.d +### END INIT INFO + +# Source function library. +. /etc/init.d/functions + +# Source networking configuration. +. /etc/sysconfig/network + +prog="/usr/local/bin/remote_syslog" +config="/etc/log_files.yml" +pid_dir="/var/run" + +EXTRAOPTIONS="" + +pid_file="$pid_dir/remote_syslog.pid" + +PATH=/sbin:/bin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin + +RETVAL=0 + +is_running(){ + # Do we have PID-file? + if [ -f "$pid_file" ]; then + # Check if proc is running + pid=`cat "$pid_file" 2> /dev/null` + if [[ $pid != "" ]]; then + exepath=`readlink /proc/"$pid"/exe 2> /dev/null` + exe=`basename "$exepath"` + if [[ $exe == "remote_syslog" ]]; then + # Process is running + return 0 + fi + fi + fi + return 1 +} + +start(){ + echo -n $"Starting $prog: " + unset HOME MAIL USER USERNAME + $prog -c $config --pid-file=$pid_file $EXTRAOPTIONS + RETVAL=$? + echo + return $RETVAL +} + +stop(){ + echo -n $"Stopping $prog: " + if (is_running); then + kill `cat $pid_file` + RETVAL=$? + echo + return $RETVAL + else + echo "$pid_file not found" + fi +} + +status(){ + echo -n $"Checking for $pid_file: " + + if (is_running); then + echo "found" + else + echo "not found" + fi +} + +reload(){ + restart +} + +restart(){ + stop + start +} + +condrestart(){ + is_running && restart + return 0 +} + +# See how we were called. +case "$1" in + start) + start + ;; + stop) + stop + ;; + status) + status + ;; + restart) + restart + ;; + reload) + reload + ;; + condrestart) + condrestart + ;; + *) + echo $"Usage: $0 {start|stop|status|restart|condrestart|reload}" + RETVAL=1 +esac + +exit $RETVAL diff --git a/bin/start b/bin/start index 479f5c4ab90cdac9599b4d08059a5b307344d54f..da0a16451a534a140a7403d017a53598679dcf4b 100755 --- a/bin/start +++ b/bin/start @@ -8,10 +8,12 @@ if [ "$PORT" ]; then export MB_JETTY_PORT="$PORT" fi +# Heroku Postgres if [ "$DATABASE_URL" ]; then export MB_DB_CONNECTION_URI="$DATABASE_URL" fi +# Mailgun (Heroku) if [ "$MAILGUN_SMTP_LOGIN" ]; then export MB_EMAIL_SMTP_HOST="$MAILGUN_SMTP_SERVER" export MB_EMAIL_SMTP_PORT="$MAILGUN_SMTP_PORT" @@ -19,6 +21,42 @@ if [ "$MAILGUN_SMTP_LOGIN" ]; then export MB_EMAIL_SMTP_PASSWORD="$MAILGUN_SMTP_PASSWORD" fi +# SendGrid (Heroku) +if [ "$SENDGRID_USERNAME" ]; then + export MB_EMAIL_SMTP_HOST="smtp.sendgrid.net" + export MB_EMAIL_SMTP_PORT="587" + export MB_EMAIL_SMTP_USERNAME="$SENDGRID_USERNAME" + export MB_EMAIL_SMTP_PASSWORD="$SENDGRID_PASSWORD" + export MB_EMAIL_SMTP_SECURITY="tls" +fi + +# Mandrill (Heroku) +if [ "$MANDRILL_USERNAME" ]; then + export MB_EMAIL_SMTP_HOST="smtp.mandrillapp.com" + export MB_EMAIL_SMTP_PORT="587" + export MB_EMAIL_SMTP_USERNAME="$MANDRILL_USERNAME" + export MB_EMAIL_SMTP_PASSWORD="$MANDRILL_APIKEY" +fi + +# Postmark (Heroku) +# NOTE: requires configuring sender signature for "from" address +if [ "$POSTMARK_API_TOKEN" ]; then + export MB_EMAIL_SMTP_HOST="$POSTMARK_SMTP_SERVER" + export MB_EMAIL_SMTP_PORT="25" + export MB_EMAIL_SMTP_USERNAME="$POSTMARK_API_TOKEN" + export MB_EMAIL_SMTP_PASSWORD="$POSTMARK_API_TOKEN" + export MB_EMAIL_SMTP_SECURITY="tls" +fi + +# SparkPost (Heroku) +# NOTE: requires additional configuration +if [ "$SPARKPOST_SMTP_USERNAME" ]; then + export MB_EMAIL_SMTP_HOST="$SPARKPOST_SMTP_HOST" + export MB_EMAIL_SMTP_PORT="$SPARKPOST_SMTP_PORT" + export MB_EMAIL_SMTP_USERNAME="$SPARKPOST_SMTP_USERNAME" + export MB_EMAIL_SMTP_PASSWORD="$SPARKPOST_SMTP_PASSWORD" +fi + # AWS Elastic Beanstalk w/ RDS if [ ! -z "$RDS_HOSTNAME" ]; then # EEK: this is a bit fragile. if user picks a non-standard port for their db we are screwed :( diff --git a/circle.yml b/circle.yml index 80fe9f10a6b841f1157fe4fb7fc7b7542cc54540..1d764718a5669f6dd1e5afe5d612606a03b860b8 100644 --- a/circle.yml +++ b/circle.yml @@ -16,13 +16,14 @@ database: - mysql_tzinfo_to_sql /usr/share/zoneinfo | mysql -u ubuntu mysql test: override: - # 0) runs unit tests w/ H2 local DB. Runs against Mongo, H2, Postgres - # 1) runs unit tests w/ Postgres local DB. Runs against H2, MySQL - # 2) runs Eastwood linter - # 3) Bikeshed linter - # 4) runs JS linter + JS test - # 5) runs lein uberjar. (We don't run bin/build because we're not really concerned about `npm install` (etc) in this test, which runs elsewhere) - - case $CIRCLE_NODE_INDEX in 0) MB_TEST_DATASETS=h2,mongo,postgres lein test ;; 1) MB_TEST_DATASETS=h2,mysql MB_DB_TYPE=postgres MB_DB_DBNAME=circle_test MB_DB_PORT=5432 MB_DB_USER=ubuntu MB_DB_HOST=localhost lein test ;; 2) lein eastwood ;; 3) lein bikeshed --max-line-length 240 ;; 4) npm install && npm run lint && npm run build && npm run test ;; 5) lein uberjar ;; esac: + # 0) runs unit tests w/ H2 local DB. Runs against H2, Mongo, MySQL + # 1) runs unit tests w/ Postgres local DB. Runs against H2, SQL Server + # 2) runs unit tests w/ MySQL local DB. Runs against H2, Postgres + # 3) runs Eastwood linter + # 4) Bikeshed linter + # 5) runs JS linter + JS test + # 6) runs lein uberjar. (We don't run bin/build because we're not really concerned about `npm install` (etc) in this test, which runs elsewhere) + - case $CIRCLE_NODE_INDEX in 0) MB_TEST_DATASETS=h2,mongo,mysql lein test ;; 1) MB_TEST_DATASETS=h2,sqlserver MB_DB_TYPE=postgres MB_DB_DBNAME=circle_test MB_DB_PORT=5432 MB_DB_USER=ubuntu MB_DB_HOST=localhost lein test ;; 2) MB_TEST_DATASETS=h2,postgres MB_DB_TYPE=mysql MB_DB_DBNAME=circle_test MB_DB_PORT=3306 MB_DB_USER=ubuntu MB_DB_HOST=localhost lein test ;; 3) lein eastwood ;; 4) lein bikeshed --max-line-length 240 ;; 5) npm install && npm run lint && npm run build && npm run test ;; 6) lein uberjar ;; esac: parallel: true deployment: master: diff --git a/docs/administration-guide/02-setting-up-email.md b/docs/administration-guide/02-setting-up-email.md index 30d7f5c9a543f74e14c3712e60136004c6fdb9a9..69e415d9bc12834d0ca8adfb05eef808550cf583 100644 --- a/docs/administration-guide/02-setting-up-email.md +++ b/docs/administration-guide/02-setting-up-email.md @@ -16,7 +16,7 @@ You should see this form: **If you use Google Apps:** * In the **SMTP host** field, enter smtp.gmail.com * Fill in 465 for the **SMTP port** field -* For the **SMTP Security** field, enter **TLS** +* For the **SMTP Security** field, enter **SSL** * In the **SMTP username** field, enter your Google Apps email address (e.g. hello@yourdomain.com) * Enter your Google Apps password in the **SMTP password** field * Enter the email address you would like to be used as the sender of system notifications in the **From Address* field. diff --git a/docs/developers-guide.md b/docs/developers-guide.md index 411b6c01c0f44708f575490591d1af3ba556a1c9..62121fb8503815f758a65cdaeded5f1b542feeab 100644 --- a/docs/developers-guide.md +++ b/docs/developers-guide.md @@ -189,7 +189,7 @@ Will give you a list of out-of-date dependencies. Once's this repo is made public, this Clojars badge will work and show the status as well: -[](http://jarkeeper.com/metabase/metabase) +[](https://jarkeeper.com/metabase/metabase) ## Documentation diff --git a/frontend/src/admin/databases/components/DatabaseEditForms.jsx b/frontend/src/admin/databases/components/DatabaseEditForms.jsx index 9a732ee53382896799707cc621a917a798640c8d..4dceb38284f745baa75e4092da369161230f297c 100644 --- a/frontend/src/admin/databases/components/DatabaseEditForms.jsx +++ b/frontend/src/admin/databases/components/DatabaseEditForms.jsx @@ -17,6 +17,7 @@ export default class DatabaseEditForms extends Component { static propTypes = { database: PropTypes.object, details: PropTypes.object, + engines: PropTypes.object.isRequired, hiddenFields: PropTypes.object, save: PropTypes.func.isRequired }; @@ -34,7 +35,7 @@ export default class DatabaseEditForms extends Component { } render() { - let { database, details, hiddenFields, ENGINES } = this.props; + let { database, details, hiddenFields, engines } = this.props; let { formError, formSuccess } = this.state; let errors = {}; @@ -47,24 +48,25 @@ export default class DatabaseEditForms extends Component { <label className="Select Form-offset mt1"> <select className="Select" value={database.engine} onChange={(e) => this.props.selectEngine(e.target.value)}> <option value="" disabled>Select a database type</option> - {Object.keys(ENGINES).map(engine => - <option value={engine}>{ENGINES[engine].name}</option> - )} + {Object.keys(engines).map(engine => + <option value={engine}>{engines[engine]['driver-name']}</option> + )} </select> </label> </div> { database.engine ? - <DatabaseDetailsForm - details={{ ...details, name: database.name }} - engine={database.engine} - formError={formError} - formSuccess={formSuccess} - hiddenFields={hiddenFields} - submitFn={this.detailsCaptured.bind(this)} - submitButtonText={'Save'}> - </DatabaseDetailsForm> - : null } + <DatabaseDetailsForm + details={{ ...details, name: database.name }} + engine={database.engine} + engines={engines} + formError={formError} + formSuccess={formSuccess} + hiddenFields={hiddenFields} + submitFn={this.detailsCaptured.bind(this)} + submitButtonText={'Save'}> + </DatabaseDetailsForm> + : null } </div> } </LoadingAndErrorWrapper> diff --git a/frontend/src/admin/databases/components/DatabaseList.jsx b/frontend/src/admin/databases/components/DatabaseList.jsx index 8e35e33f7ce402d2af1a1b1c314251d83a5eda19..cbbc8b8461bb561c1b4e2f8159166909349841a9 100644 --- a/frontend/src/admin/databases/components/DatabaseList.jsx +++ b/frontend/src/admin/databases/components/DatabaseList.jsx @@ -12,11 +12,11 @@ export default class DatabaseList extends Component { static propTypes = { databases: PropTypes.array, hasSampleDataset: PropTypes.bool, - ENGINES: PropTypes.object + engines: PropTypes.object }; render() { - let { databases, hasSampleDataset, created, ENGINES } = this.props; + let { databases, hasSampleDataset, created, engines } = this.props; return ( <div className="wrapper"> @@ -41,17 +41,17 @@ export default class DatabaseList extends Component { <a className="text-bold link" href={"/admin/databases/"+database.id}>{database.name}</a> </td> <td> - {ENGINES[database.engine].name} + {engines && engines[database.engine] ? engines[database.engine]['driver-name'] : database.engine} </td> <td className="Table-actions"> <ModalWithTrigger - ref="deleteDatabaseModal" + ref={"deleteDatabaseModal_"+database.id} triggerClasses="Button Button--danger" triggerElement="Delete" > <DeleteDatabaseModal database={database} - onClose={() => this.refs.deleteDatabaseModal.toggle()} + onClose={() => this.refs["deleteDatabaseModal_"+database.id].close()} onDelete={() => this.props.delete(database.id)} /> </ModalWithTrigger> diff --git a/frontend/src/admin/databases/databases.controllers.js b/frontend/src/admin/databases/databases.controllers.js index 02032d8c99e3d515917bce7aafd22db719b7bd93..5a35125996139a060f0f01d1ca19bbbf6b8abfa7 100644 --- a/frontend/src/admin/databases/databases.controllers.js +++ b/frontend/src/admin/databases/databases.controllers.js @@ -6,10 +6,9 @@ import _ from "underscore"; var DatabasesControllers = angular.module('metabaseadmin.databases.controllers', ['metabase.metabase.services']); -DatabasesControllers.controller('DatabaseList', ['$scope', '$routeParams', 'Metabase', 'MetabaseCore', function($scope, $routeParams, Metabase, MetabaseCore) { +DatabasesControllers.controller('DatabaseList', ['$scope', '$routeParams', 'Metabase', function($scope, $routeParams, Metabase) { $scope.DatabaseList = DatabaseList; - $scope.ENGINES = MetabaseCore.ENGINES; $scope.databases = []; $scope.hasSampleDataset = false; @@ -50,6 +49,15 @@ DatabasesControllers.controller('DatabaseList', ['$scope', '$routeParams', 'Meta } }; + // load engine info from form_input endpoint. We need this to convert DB engine keys (e.g. 'postgres') to display names (e.g. 'PostgreSQL') + Metabase.db_form_input(function(formInput){ + $scope.engines = formInput.engines; + console.log('ENGINES: ', $scope.engines); + }, function(error) { + console.log('Error loading database form input: ', error); + }); + + // fetch DBs from the backend Metabase.db_list(function(databases) { $scope.databases = databases; $scope.hasSampleDataset = hasSampleDataset(databases); @@ -58,12 +66,10 @@ DatabasesControllers.controller('DatabaseList', ['$scope', '$routeParams', 'Meta }); }]); -DatabasesControllers.controller('DatabaseEdit', ['$scope', '$routeParams', '$location', 'Metabase', 'MetabaseCore', - function($scope, $routeParams, $location, Metabase, MetabaseCore) { +DatabasesControllers.controller('DatabaseEdit', ['$scope', '$routeParams', '$location', 'Metabase', + function($scope, $routeParams, $location, Metabase) { $scope.DatabaseEdit = DatabaseEdit; - $scope.ENGINES = MetabaseCore.ENGINES; - // if we're adding a new database then hide the SSL field; we'll determine it automatically <3 $scope.hiddenFields = { ssl: true @@ -71,7 +77,7 @@ DatabasesControllers.controller('DatabaseEdit', ['$scope', '$routeParams', '$loc $scope.selectEngine = function(engine) { $scope.details.engine = $scope.database.engine = engine; - } + }; // update an existing Database var update = function(database, details) { @@ -104,30 +110,39 @@ DatabasesControllers.controller('DatabaseEdit', ['$scope', '$routeParams', '$loc // validate_connection needs engine so add it to request body details.engine = database.engine; + function handleError(error) { + $scope.$broadcast("form:api-error", error); + throw error; + } + // for an existing DB check that connection is valid before save if ($routeParams.databaseId) { - return Metabase.validate_connection(details).$promise.catch(function(error) { - $scope.$broadcast("form:api-error", error); - throw error; - }).then(function() { + return Metabase.validate_connection(details).$promise.catch(handleError).then(function() { return update(database, details); }); // for a new DB we want to infer SSL support. First try to connect w/ SSL. If that fails, disable SSL } else { - details.ssl = true; + const engineSupportsSSL = _.contains(_.map($scope.engines[database.engine]['details-fields'], 'name'), + 'ssl'); - return Metabase.validate_connection(details).$promise.catch(function() { - console.log('Unable to connect with SSL. Trying with SSL = false.'); - details.ssl = false; - return Metabase.validate_connection(details).$promise; - }).then(function() { + function createDB() { console.log('Successfully connected to database with SSL = ' + details.ssl + '.'); return create(database, details); - }).catch(function(error) { - $scope.$broadcast("form:api-error", error); - throw error; - }); + } + + // if the engine supports SSL, try connecting with SSL first, and then without + if (engineSupportsSSL) { + details.ssl = true; + return Metabase.validate_connection(details).$promise.catch(function() { + console.log('Unable to connect with SSL. Trying with SSL = false.'); + details.ssl = false; + return Metabase.validate_connection(details).$promise; + }).then(createDB).catch(handleError); + } else { + delete details.ssl; + return Metabase.validate_connection(details).$promise.catch(handleError).then(createDB); + } } }; @@ -153,16 +168,9 @@ DatabasesControllers.controller('DatabaseEdit', ['$scope', '$routeParams', '$loc $scope.redirectToDatabases = function() { $scope.$apply(() => $location.path('/admin/databases/')); - } - - // load our form input data - Metabase.db_form_input(function(form_input) { - $scope.form_input = form_input; - }, function(error) { - console.log('error getting database form_input', error); - }); + }; - if ($routeParams.databaseId) { + function loadExistingDB() { // load existing database for editing Metabase.db_get({ 'dbId': $routeParams.databaseId @@ -176,15 +184,27 @@ DatabasesControllers.controller('DatabaseEdit', ['$scope', '$routeParams', '$loc $location.path('/admin/databases/'); } }); - } else { + } + + function prepareEmptyDB() { // prepare an empty database for creation $scope.database = { name: '', - engine: Object.keys(MetabaseCore.ENGINES)[0], + engine: Object.keys($scope.engines)[0], details: {}, created: false }; $scope.details = {}; } + + // Ok, now load the engines from the form_input API endpoint + Metabase.db_form_input(function(formInput){ + $scope.engines = formInput.engines; + + if ($routeParams.databaseId) loadExistingDB(); + else prepareEmptyDB(); + }, function(error){ + console.log('Error loading database form input: ', error); + }); } ]); diff --git a/frontend/src/admin/settings/settings.controllers.js b/frontend/src/admin/settings/settings.controllers.js index 362a90f890b438a9e0240db76b2181b8cc75a013..ad213d0510f9fc30e4a63311b2f734b5fdaf5a29 100644 --- a/frontend/src/admin/settings/settings.controllers.js +++ b/frontend/src/admin/settings/settings.controllers.js @@ -25,8 +25,9 @@ var TIMEZONES = [ var EXTRA_SETTINGS_METADATA = { "site-name": { display_name: "Site Name", section: "General", index: 0, type: "string" }, "-site-url": { display_name: "Site URL", section: "General", index: 1, type: "string" }, - "report-timezone": { display_name: "Report Timezone", section: "General", index: 2, type: "select", options: TIMEZONES, placeholder: "Select a timezone" }, - "anon-tracking-enabled":{ display_name: "Anonymous Tracking", section: "General", index: 3, type: "boolean" }, + "admin-email": { display_name: "Email Address for Help Requests", section: "General", index: 2, type: "string" }, + "report-timezone": { display_name: "Report Timezone", section: "General", index: 3, type: "select", options: TIMEZONES, placeholder: "Select a timezone" }, + "anon-tracking-enabled":{ display_name: "Anonymous Tracking", section: "General", index: 4, type: "boolean" }, "email-smtp-host": { display_name: "SMTP Host", section: "Email", index: 0, type: "string" }, "email-smtp-port": { display_name: "SMTP Port", section: "Email", index: 1, type: "string" }, "email-smtp-security": { display_name: "SMTP Security", section: "Email", index: 2, type: "radio", options: { none: "None", tls: "TLS", ssl: "SSL" } }, diff --git a/frontend/src/card/card.controllers.js b/frontend/src/card/card.controllers.js index e9dea044e01ff409609d6ac74aac99cd74b8a654..66e7ac310d762f665d73d203904b68ba929b2006 100644 --- a/frontend/src/card/card.controllers.js +++ b/frontend/src/card/card.controllers.js @@ -254,7 +254,7 @@ CardControllers.controller('CardDetail', [ if (!coldef || !coldef.special_type) return false; - if (coldef.special_type === 'id' || (coldef.special_type === 'fk' && coldef.target)) { + if (coldef.table_id != null && coldef.special_type === 'id' || (coldef.special_type === 'fk' && coldef.target)) { return true; } else { return false; diff --git a/frontend/src/card/card.util.js b/frontend/src/card/card.util.js index a9b553da61fe1c5f181174d61cc8b915b7a9e448..5668b117ae08834311f3a177d19682e7a669dc7c 100644 --- a/frontend/src/card/card.util.js +++ b/frontend/src/card/card.util.js @@ -12,11 +12,20 @@ export function serializeCardForUrl(card) { display: card.display, visualization_settings: card.visualization_settings }; - return btoa(JSON.stringify(cardCopy)); + return utf8_to_b64(JSON.stringify(cardCopy)); } export function deserializeCardFromUrl(serialized) { - return JSON.parse(atob(serialized)); + return JSON.parse(b64_to_utf8(serialized)); +} + +// escaping before base64 encoding is necessary for non-ASCII characters +// https://developer.mozilla.org/en-US/docs/Web/API/WindowBase64/btoa +function utf8_to_b64(str) { + return window.btoa(unescape(encodeURIComponent(str))); +} +function b64_to_utf8(str) { + return decodeURIComponent(escape(window.atob(str))); } export function urlForCardState(state, dirty) { diff --git a/frontend/src/components/DashboardsDropdown.jsx b/frontend/src/components/DashboardsDropdown.jsx index 95da003a144bb4c383061f120b41caef48612a80..4c786fd09102c1cd54fae70068f77874fc3f2de7 100644 --- a/frontend/src/components/DashboardsDropdown.jsx +++ b/frontend/src/components/DashboardsDropdown.jsx @@ -29,7 +29,7 @@ export default class DashboardsDropdown extends Component { _.bindAll(this, "toggleDropdown", "closeDropdown", "toggleModal", "closeModal"); } - propTypes = { + static propTypes = { createDashboardFn: PropTypes.func.isRequired, dashboards: PropTypes.array.isRequired }; diff --git a/frontend/src/components/Header.jsx b/frontend/src/components/Header.jsx index 347cf93bda0b35f6f686e92d9a5e32407bee3290..6974ef1d36fcaf7b8ff2068010e6d4247d992df3 100644 --- a/frontend/src/components/Header.jsx +++ b/frontend/src/components/Header.jsx @@ -22,7 +22,7 @@ export default class Header extends Component { <span className="EditHeader-title">{this.props.editingTitle}</span> <span className="EditHeader-subtitle mx1">{this.props.editingSubtitle}</span> <span className="flex-align-right"> - {this.props.editingButtons.map((button, buttonIndex) => <span key={buttonIndex}>{button}</span>)} + {this.props.editingButtons} </span> </div> ); @@ -65,11 +65,9 @@ export default class Header extends Component { } var headerButtons = this.props.headerButtons.map((section, sectionIndex) => { - return ( + return section && section.length > 0 && ( <span key={sectionIndex} className="Header-buttonSection"> - {section.map((button, buttonIndex) => { - return <span key={buttonIndex}>{button}</span>; - })} + {section} </span> ); }); diff --git a/frontend/src/components/HistoryModal.jsx b/frontend/src/components/HistoryModal.jsx index ddb10568bb39387f8f2d13d80c38b881fe2921e9..9cd0795f48e13568c27a025e85b856639409e597 100644 --- a/frontend/src/components/HistoryModal.jsx +++ b/frontend/src/components/HistoryModal.jsx @@ -36,13 +36,6 @@ export default class HistoryModal extends Component { async componentDidMount() { let { entityType, entityId } = this.props; - // HACK: close if opened with null entityId to work around issue with modals mysteriously opening - // https://github.com/metabase/metabase-init/issues/917 - if (entityId == null) { - this.props.onClose(); - return; - } - try { await this.props.onFetchRevisions({ entity: entityType, id: entityId }); } catch (error) { diff --git a/frontend/src/components/database/DatabaseDetailsForm.jsx b/frontend/src/components/database/DatabaseDetailsForm.jsx index 700c52d8dde28db78a6989a871665a5505ec1a7c..d7b7bc0faba8600f5ecead99075e40956cdd5a4c 100644 --- a/frontend/src/components/database/DatabaseDetailsForm.jsx +++ b/frontend/src/components/database/DatabaseDetailsForm.jsx @@ -1,7 +1,6 @@ import React, { Component, PropTypes } from "react"; import cx from "classnames"; -import MetabaseCore from "metabase/lib/core"; import FormField from "metabase/components/form/FormField.jsx"; import FormLabel from "metabase/components/form/FormLabel.jsx"; import FormMessage from "metabase/components/form/FormMessage.jsx"; @@ -30,6 +29,7 @@ export default class DatabaseDetailsForm extends Component { static propTypes = { details: PropTypes.object, engine: PropTypes.string.isRequired, + engines: PropTypes.object.isRequired, formError: PropTypes.object, hiddenFields: PropTypes.object, submitButtonText: PropTypes.string.isRequired, @@ -37,7 +37,7 @@ export default class DatabaseDetailsForm extends Component { }; validateForm() { - let { engine } = this.props; + let { engine, engines } = this.props; let { details } = this.state; let valid = true; @@ -48,8 +48,8 @@ export default class DatabaseDetailsForm extends Component { } // go over individual fields - for (let field of MetabaseCore.ENGINES[engine].fields) { - if (field.required && isEmpty(details[field.fieldName])) { + for (let field of engines[engine]['details-fields']) { + if (field.required && isEmpty(details[field.name])) { valid = false; break; } @@ -75,7 +75,7 @@ export default class DatabaseDetailsForm extends Component { formSubmitted(e) { e.preventDefault(); - let { engine, submitFn } = this.props; + let { engine, engines, submitFn } = this.props; let { details } = this.state; let request = { @@ -84,15 +84,13 @@ export default class DatabaseDetailsForm extends Component { details: {} }; - for (let field of MetabaseCore.ENGINES[engine].fields) { - let val = details[field.fieldName] === "" ? null : details[field.fieldName]; - if (val == null && field.placeholderIsDefault) { - val = field.placeholder; - } - if (field.transform) { - val = field.transform(val); - } - request.details[field.fieldName] = val; + for (let field of engines[engine]['details-fields']) { + let val = details[field.name] === "" ? null : details[field.name]; + + if (val && field.type === 'integer') val = parseInt(val); + if (val == null && field.default) val = field.default; + + request.details[field.name] = val; } submitFn(request); @@ -100,54 +98,49 @@ export default class DatabaseDetailsForm extends Component { renderFieldInput(field, fieldIndex) { let { details } = this.state; - let value = details && details[field.fieldName] || ""; + let value = details && details[field.name] || ""; switch(field.type) { - case 'select': + case 'boolean': return ( <div className="Form-input Form-offset full Button-group"> - {field.choices.map(choice => - <div - className={cx("Button", details[field.fieldName] == choice.value ? "Button--" + choice.selectionAccent : null)} - onClick={(e) => { this.onChange(field.fieldName, choice.value)}} - > - {choice.name} - </div> - )} + <div className={cx('Button', details[field.name] === true ? 'Button--active' : null)} onClick={(e) => { this.onChange(field.name, true) }}> + Yes + </div> + <div className={cx('Button', details[field.name] === false ? 'Button--danger' : null)} onClick={(e) => { this.onChange(field.name, false) }}> + No + </div> </div> ); - case 'text': - case 'password': + default: return ( <input - type={field.type} + type={field.type === 'password' ? 'password' : 'text'} className="Form-input Form-offset full" - ref={field.fieldName} - name={field.fieldName} + ref={field.name} + name={field.name} value={value} - placeholder={field.placeholder} - onChange={(e) => this.onChange(field.fieldName, e.target.value)} + placeholder={field.default || field.placeholder} + onChange={(e) => this.onChange(field.name, e.target.value)} required={field.required} autoFocus={fieldIndex === 0} /> - ); + ); } } render() { - let { engine, formError, formSuccess, hiddenFields, submitButtonText } = this.props; + let { engine, engines, formError, formSuccess, hiddenFields, submitButtonText } = this.props; let { valid } = this.state; let fields = [ { - displayName: "Name", - fieldName: "name", - type: "text", + name: 'name', + 'display-name': 'Name', placeholder: "How would you like to refer to this database?", - placeholderIsDefault: false, required: true }, - ...MetabaseCore.ENGINES[engine].fields + ...engines[engine]['details-fields'] ]; hiddenFields = hiddenFields || {}; @@ -155,13 +148,13 @@ export default class DatabaseDetailsForm extends Component { return ( <form onSubmit={this.formSubmitted.bind(this)} noValidate> <div className="FormInputGroup"> - { fields.filter(field => !hiddenFields[field.fieldName]).map((field, fieldIndex) => - <FormField key={field.fieldName} fieldName={field.fieldName}> - <FormLabel title={field.displayName} fieldName={field.fieldName}></FormLabel> + { fields.filter(field => !hiddenFields[field.name]).map((field, fieldIndex) => + <FormField key={field.name} fieldName={field.name}> + <FormLabel title={field['display-name']} fieldName={field.name}></FormLabel> {this.renderFieldInput(field, fieldIndex)} <span className="Form-charm"></span> </FormField> - )} + )} </div> <div className="Form-actions"> diff --git a/frontend/src/dashboard/components/DashboardHeader.jsx b/frontend/src/dashboard/components/DashboardHeader.jsx index ea31d36581c5e857084241b77d70c0013e5d24a4..df90738b43617e121efc83bae3475e81272a79de 100644 --- a/frontend/src/dashboard/components/DashboardHeader.jsx +++ b/frontend/src/dashboard/components/DashboardHeader.jsx @@ -80,22 +80,21 @@ export default class DashboardHeader extends Component { } getEditingButtons() { - var editingButtons = []; - editingButtons.push( + return [ <ActionButton + key="save" actionFn={() => this.onSave()} className="Button Button--small Button--primary text-uppercase" normalText="Save" activeText="Saving…" failedText="Save failed" successText="Saved" - /> - ); - editingButtons.push( - <a className="Button Button--small text-uppercase" href="#" onClick={() => this.onCancel()}>Cancel</a> - ); - editingButtons.push( + />, + <a key="cancel" className="Button Button--small text-uppercase" onClick={() => this.onCancel()}> + Cancel + </a>, <ModalWithTrigger + key="delete" ref="deleteDashboardModal" triggerClasses="Button Button--small text-uppercase" triggerElement="Delete" @@ -107,18 +106,18 @@ export default class DashboardHeader extends Component { onDelete={() => this.onDelete()} /> </ModalWithTrigger> - ); - return editingButtons; + ]; } getHeaderButtons() { - var buttonSections = []; + var buttonSections = [[],[]]; var { dashboard } = this.props; if (this.props.isEditing) { - buttonSections.push([ + buttonSections[0].push( <ModalWithTrigger + key="history" ref="dashboardHistory" triggerElement={<Icon className="text-brand-hover" name="history" width="16px" height="16px" />} > @@ -133,26 +132,21 @@ export default class DashboardHeader extends Component { onReverted={() => this.onRevertedRevision()} /> </ModalWithTrigger> - ]); + ); } if (dashboard && dashboard.can_write && !this.props.isEditing) { - buttonSections.push([ - <a title="Edit Dashboard Layout" className="text-brand-hover cursor-pointer" onClick={() => this.onEdit()}> + buttonSections[0].push( + <a key="edit" title="Edit Dashboard Layout" className="text-brand-hover cursor-pointer" onClick={() => this.onEdit()}> <Icon name="pencil" width="16px" height="16px" /> </a> - ]); + ); } - // buttonSections.push([ - // <a title="Add Question to Dashboard" className="text-brand-hover" onClick={() => this.addQuestion()}> - // <Icon name="add" width="16px" height="16px" /> - // </a> - // ]); - var isEmpty = dashboard.ordered_cards.length === 0; - buttonSections.push([ + buttonSections[1].push( <ModalWithTrigger + key="add" ref="addQuestionModal" triggerElement={ <a title="Add a question to this dashboard"> @@ -167,7 +161,7 @@ export default class DashboardHeader extends Component { onClose={() => this.refs.addQuestionModal.toggle()} /> </ModalWithTrigger> - ]); + ); return buttonSections; } diff --git a/frontend/src/lib/core.js b/frontend/src/lib/core.js index 9ef84097f918e7f129529c0dd354e93d03a413d7..ae682f522f0b837aa86640216da9d782fd800b2e 100644 --- a/frontend/src/lib/core.js +++ b/frontend/src/lib/core.js @@ -263,178 +263,4 @@ import _ from "underscore"; }); }; - // The various DB engines we support <3 - // TODO - this should probably come back from the API, no? - // - // NOTE: - // A database's connection details is stored in a JSON map in the field database.details. - // - // ENGINE DICT FORMAT: - // * name - human-facing name to use for this DB engine - // * fields - array of available fields to display when a user adds/edits a DB of this type. Each field should be a dict of the format below: - // - // FIELD DICT FORMAT: - // * displayName - user-facing name for the Field - // * fieldName - name used for the field in a database details dict - // * transform - function to apply to this value before passing to the API, such as 'parseInt'. (default: none) - // * placeholder - placeholder value that should be used in text input for this field (default: none) - // * placeholderIsDefault - if true, use the value of 'placeholder' as the default value of this field if none is specified (default: false) - // (if you set this, don't set 'required', or user will still have to add a value for the field) - // * required - require the user to enter a value for this field? (default: false) - // * choices - array of possible values for this field. If provided, display a button toggle instead of a text input. - // Each choice should be a dict of the format below: (optional) - // - // CHOICE DICT FORMAT: - // * name - User-facing name for the choice. - // * value - Value to use for the choice in the database connection details dict. - // * selectionAccent - What accent type should be applied to the field when its value is chosen? Either 'active' (currently green), or 'danger' (currently red). - this.ENGINES = { - postgres: { - name: 'PostgreSQL', - fields: [{ - displayName: "Host", - fieldName: "host", - type: "text", - placeholder: "localhost", - placeholderIsDefault: true - }, { - displayName: "Port", - fieldName: "port", - type: "text", - transform: parseInt, - placeholder: "5432", - placeholderIsDefault: true - }, { - displayName: "Database name", - fieldName: "dbname", - type: "text", - placeholder: "birds_of_the_world", - required: true - }, { - displayName: "Database username", - fieldName: "user", - type: "text", - placeholder: "What username do you use to login to the database?", - required: true - }, { - displayName: "Database password", - fieldName: "password", - type: "password", - placeholder: "*******" - }, { - displayName: "Use a secure connection (SSL)?", - fieldName: "ssl", - type: "select", - choices: [{ - name: 'Yes', - value: true, - selectionAccent: 'active' - }, { - name: 'No', - value: false, - selectionAccent: 'danger' - }] - }] - }, - mysql: { - name: 'MySQL', - fields: [{ - displayName: "Host", - fieldName: "host", - type: "text", - placeholder: "localhost", - placeholderIsDefault: true - }, { - displayName: "Port", - fieldName: "port", - type: "text", - transform: parseInt, - placeholder: "3306", - placeholderIsDefault: true - }, { - displayName: "Database name", - fieldName: "dbname", - type: "text", - placeholder: "birds_of_the_world", - required: true - }, { - displayName: "Database username", - fieldName: "user", - type: "text", - placeholder: "What username do you use to login to the database?", - required: true - }, { - displayName: "Database password", - fieldName: "password", - type: "password", - placeholder: "*******" - }] - }, - h2: { - name: 'H2', - fields: [{ - displayName: "Connection String", - fieldName: "db", - type: "text", - placeholder: "file:/Users/camsaul/bird_sightings/toucans;AUTO_SERVER=TRUE" - }] - }, - mongo: { - name: 'MongoDB', - fields: [{ - displayName: "Host", - fieldName: "host", - type: "text", - placeholder: "localhost", - placeholderIsDefault: true - }, { - displayName: "Port", - fieldName: "port", - type: "text", - transform: parseInt, - placeholder: "27017", - placeholderIsDefault: true - }, { - displayName: "Database name", - fieldName: "dbname", - type: "text", - placeholder: "carrierPigeonDeliveries", - required: true - }, { - displayName: "Database username", - fieldName: "user", - type: "text", - placeholder: "What username do you use to login to the database?" - }, { - displayName: "Database password", - fieldName: "pass", - type: "password", - placeholder: "******" - }] - } - }; - - // Prepare database details before being sent to the API. - // This includes applying 'transform' functions and adding default values where applicable. - this.prepareDatabaseDetails = function(details) { - if (!details.engine) throw "Missing key 'engine' in database request details; please add this as API expects it in the request body."; - - // iterate over each field definition - this.ENGINES[details.engine].fields.forEach(function(field) { - var fieldName = field.fieldName; - - // set default value if applicable - if (!details[fieldName] && field.placeholderIsDefault) { - details[fieldName] = field.placeholder; - } - - // apply transformation function if applicable - if (details[fieldName] && field.transform) { - details[fieldName] = field.transform(details[fieldName]); - } - }); - - return details; - }; - }).apply(exports); diff --git a/frontend/src/lib/schema_metadata.js b/frontend/src/lib/schema_metadata.js index b0fab5bd65e15c6195e3e39bdb7370b4f583fc8b..7d9464384a8c611c1e3eb0f435379729695d1160 100644 --- a/frontend/src/lib/schema_metadata.js +++ b/frontend/src/lib/schema_metadata.js @@ -46,7 +46,8 @@ const TYPES = { }, [SUMMABLE]: { - include: [NUMBER] + include: [NUMBER], + exclude: [ENTITY, LOCATION, DATE_TIME] }, [CATEGORY]: { base: ["BooleanField"], @@ -67,7 +68,15 @@ export function isFieldType(type, field) { return true; } } - // recursively check to see if it's another field th: + // recursively check to see if it's NOT another field type: + if (def.exclude) { + for (let excludeType of def.exclude) { + if (isFieldType(excludeType, field)) { + return false; + } + } + } + // recursively check to see if it's another field type: if (def.include) { for (let includeType of def.include) { if (isFieldType(includeType, field)) { @@ -143,7 +152,8 @@ function equivalentArgument(field, table) { if (isCategory(field)) { if (field.id in table.field_values && table.field_values[field.id].length > 0) { let validValues = table.field_values[field.id]; - validValues.sort(); + // this sort function works for both numbers and strings: + validValues.sort((a, b) => a === b ? 0 : (a < b ? -1 : 1)); return { type: "select", values: validValues @@ -418,3 +428,21 @@ export function hasLatitudeAndLongitudeColumns(columnDefs) { } return hasLatitude && hasLongitude; } + +export function foreignKeyCountsByOriginTable(fks) { + if (fks === null || !Array.isArray(fks)) { + return null; + } + + return fks.map(function(fk) { + return ('origin' in fk) ? fk.origin.table.id : null; + }).reduce(function(prev, curr, idx, array) { + if (curr in prev) { + prev[curr]++; + } else { + prev[curr] = 1; + } + + return prev; + }, {}); +} diff --git a/frontend/src/metabase/metabase.services.js b/frontend/src/metabase/metabase.services.js index feb5cab970b5ac9f9f24e4111552c71719f155cd..601604dd1ee5d17715e851577d011f7d27105bbd 100644 --- a/frontend/src/metabase/metabase.services.js +++ b/frontend/src/metabase/metabase.services.js @@ -23,10 +23,6 @@ MetabaseServices.factory('Metabase', ['$resource', '$cookies', 'MetabaseCore', f 'X-CSRFToken': function() { return $cookies.csrftoken; } - }, - transformRequest: function(data) { - data = MetabaseCore.prepareDatabaseDetails(data); - return angular.toJson(data); } }, validate_connection: { @@ -36,10 +32,6 @@ MetabaseServices.factory('Metabase', ['$resource', '$cookies', 'MetabaseCore', f 'X-CSRFToken': function() { return $cookies.csrftoken; } - }, - transformRequest: function(data) { - data = MetabaseCore.prepareDatabaseDetails(data); - return angular.toJson(data); } }, db_add_sample_dataset: { @@ -290,4 +282,3 @@ MetabaseServices.factory('ForeignKey', ['$resource', '$cookies', function($resou }); }]); - diff --git a/frontend/src/query_builder/DataReferenceTable.jsx b/frontend/src/query_builder/DataReferenceTable.jsx index 977627579c413587fffd69d77846d9cb25515b8f..f979447201dafa9b0c06a229850d1748fbcad2af 100644 --- a/frontend/src/query_builder/DataReferenceTable.jsx +++ b/frontend/src/query_builder/DataReferenceTable.jsx @@ -1,7 +1,7 @@ import React, { Component, PropTypes } from "react"; import DataReferenceQueryButton from './DataReferenceQueryButton.jsx'; - +import { foreignKeyCountsByOriginTable } from 'metabase/lib/schema_metadata'; import inflection from 'inflection'; import cx from "classnames"; @@ -90,10 +90,16 @@ export default class DataReferenceTable extends Component { }); pane = <ul>{fields}</ul>; } else if (this.state.pane === "connections") { - var connections = this.state.tableForeignKeys.map((fk, index) => { + const fkCountsByTable = foreignKeyCountsByOriginTable(this.state.tableForeignKeys); + + var connections = this.state.tableForeignKeys.sort(function(a, b) { + return a.origin.table.display_name.localeCompare(b.origin.table.display_name); + }).map((fk, index) => { + const via = (fkCountsByTable[fk.origin.table.id] > 1) ? (<span className="text-grey-3 text-light h5"> via {fk.origin.display_name}</span>) : null; + return ( <li key={fk.id} className="p1 border-row-divider"> - <a className="text-brand text-brand-darken-hover no-decoration" href="#" onClick={this.props.showField.bind(null, fk.origin)}>{fk.origin.table.display_name}</a> + <a className="text-brand text-brand-darken-hover no-decoration" href="#" onClick={this.props.showField.bind(null, fk.origin)}>{fk.origin.table.display_name}{via}</a> </li> ); }); diff --git a/frontend/src/query_builder/DataSelector.jsx b/frontend/src/query_builder/DataSelector.jsx index 36d3e47d6d92741d5d44bcf937d7bc5f16f34b5c..a2d60d1c25eaa0d1be0634a6e731448a360d21b0 100644 --- a/frontend/src/query_builder/DataSelector.jsx +++ b/frontend/src/query_builder/DataSelector.jsx @@ -109,8 +109,11 @@ export default class DataSelector extends Component { name: table.display_name, database: database, table: table - })) - })) + })).sort(function(a, b) { + return a.name.localeCompare(b.name); + }) + })); + } else { sections = [{ items: this.props.databases.map(database => ({ diff --git a/frontend/src/query_builder/QueryHeader.jsx b/frontend/src/query_builder/QueryHeader.jsx index 02df16924f95d9118184c2f3fd70c5d53c519dd9..401db29f1f20bb4c4d422e3ba0954c7f1a055d01 100644 --- a/frontend/src/query_builder/QueryHeader.jsx +++ b/frontend/src/query_builder/QueryHeader.jsx @@ -124,11 +124,12 @@ export default React.createClass({ }, getHeaderButtons: function() { - var buttons = []; + var buttonSections = [[],[]]; if (this.props.cardIsNewFn() && this.props.cardIsDirtyFn()) { - buttons.push( + buttonSections[0].push( <ModalWithTrigger + key="save" ref="saveModal" triggerClasses="h4 px1 text-grey-4 text-brand-hover text-uppercase" triggerElement="Save" @@ -144,8 +145,9 @@ export default React.createClass({ } if (!this.props.cardIsNewFn()) { - buttons.push( + buttonSections[0].push( <ModalWithTrigger + key="history" ref="cardHistory" triggerElement={<Icon name="history" width="16px" height="16px" />} > @@ -163,47 +165,45 @@ export default React.createClass({ } if (this.props.cardIsNewFn() && !this.props.cardIsDirtyFn()) { - buttons.push( + buttonSections[0].push( <QueryModeToggle + key="queryModeToggle" currentQueryMode={this.props.card.dataset_query.type} setQueryModeFn={this.setQueryMode} /> ); } - var dataReferenceButtonClasses = cx({ - 'mx1': true, - 'transition-color': true, + var dataReferenceButtonClasses = cx('mx1 transition-color', { 'text-grey-4': !this.props.isShowingDataReference, 'text-brand': this.props.isShowingDataReference, 'text-brand-hover': !this.state.isShowingDataReference }); - var dataReferenceButton = ( - <a href="#" className={dataReferenceButtonClasses} title="Get help on what data means"> + buttonSections[1].push( + <a key="dataReference" href="#" className={dataReferenceButtonClasses} title="Get help on what data means"> <Icon name='reference' width="16px" height="16px" onClick={this.toggleDataReference}></Icon> </a> ); - return [buttons, [dataReferenceButton]]; + return buttonSections; }, getEditingButtons: function() { - var editingButtons = []; - editingButtons.push( + return [ <ActionButton + key="save" actionFn={() => this.onSave()} className="Button Button--small Button--primary text-uppercase" normalText="Save" activeText="Saving…" failedText="Save failed" successText="Saved" - /> - ); - editingButtons.push( - <a className="Button Button--small text-uppercase" href="#" onClick={() => this.onCancel()}>Cancel</a> - ); - editingButtons.push( + />, + <a key="cancel" className="Button Button--small text-uppercase" onClick={() => this.onCancel()}> + Cancel + </a>, <ModalWithTrigger + key="delete" ref="deleteModal" triggerClasses="Button Button--small text-uppercase" triggerElement="Delete" @@ -214,8 +214,7 @@ export default React.createClass({ closeFn={() => this.refs.deleteModal.toggle()} /> </ModalWithTrigger> - ); - return editingButtons; + ]; }, render: function() { diff --git a/frontend/src/query_builder/QueryVisualizationObjectDetailTable.jsx b/frontend/src/query_builder/QueryVisualizationObjectDetailTable.jsx index 7d776afff900929f7213e1c7f2072f153f03c136..71933278f21bf3117bdffbd130527c182b62993e 100644 --- a/frontend/src/query_builder/QueryVisualizationObjectDetailTable.jsx +++ b/frontend/src/query_builder/QueryVisualizationObjectDetailTable.jsx @@ -4,6 +4,7 @@ import ExpandableString from './ExpandableString.jsx'; import Icon from 'metabase/components/Icon.jsx'; import IconBorder from 'metabase/components/IconBorder.jsx'; import LoadingSpinner from 'metabase/components/LoadingSpinner.jsx'; +import { foreignKeyCountsByOriginTable } from 'metabase/lib/schema_metadata'; import { singularize, inflect } from 'inflection'; import cx from "classnames"; @@ -102,8 +103,12 @@ export default class QueryVisualizationObjectDetailTable extends Component { return (<p className="my4 text-centered">No relationships found.</p>); } + const fkCountsByTable = foreignKeyCountsByOriginTable(this.props.tableForeignKeys); + var component = this; - var relationships = this.props.tableForeignKeys.map(function(fk) { + var relationships = this.props.tableForeignKeys.sort(function(a, b) { + return a.origin.table.display_name.localeCompare(b.origin.table.display_name); + }).map(function(fk) { var fkCount = ( <LoadingSpinner width="25px" height="25px" /> @@ -128,11 +133,12 @@ export default class QueryVisualizationObjectDetailTable extends Component { ); var relationName = inflect(fk.origin.table.display_name, fkCountValue); + const via = (fkCountsByTable[fk.origin.table.id] > 1) ? (<span className="text-grey-3 text-normal"> via {fk.origin.display_name}</span>) : null; var info = ( <div> <h2>{fkCount}</h2> - <h5 className="block">{relationName}</h5> + <h5 className="block">{relationName}{via}</h5> </div> ); var fkReference; diff --git a/frontend/src/query_builder/QueryVisualizationTable.jsx b/frontend/src/query_builder/QueryVisualizationTable.jsx index ea19b54e9a669fadb0fcc1e3996be73bca0e1026..64879b19e834d20bb25ebbe0b02d17794146fdd4 100644 --- a/frontend/src/query_builder/QueryVisualizationTable.jsx +++ b/frontend/src/query_builder/QueryVisualizationTable.jsx @@ -11,6 +11,13 @@ import { formatValue, capitalize } from "metabase/lib/formatting"; import _ from "underscore"; import cx from "classnames"; +const QUICK_FILTERS = [ + { name: "<", value: "<" }, + { name: "=", value: "=" }, + { name: "≠", value: "!=" }, + { name: ">", value: ">" } +]; + export default class QueryVisualizationTable extends Component { constructor(props, context) { super(props, context); @@ -185,8 +192,8 @@ export default class QueryVisualizationTable extends Component { > <div className="bg-white bordered shadowed p1"> <ul className="h1 flex align-center"> - { ["<", "=", "≠", ">"].map(operator => - <li key={operator} className="p2 text-brand-hover" onClick={this.popoverFilterClicked.bind(this, rowIndex, cellDataKey, operator)}>{operator}</li> + { QUICK_FILTERS.map(({ name, value }) => + <li key={value} className="p2 text-brand-hover" onClick={this.popoverFilterClicked.bind(this, rowIndex, cellDataKey, value)}>{name}</li> )} </ul> </div> diff --git a/frontend/src/setup/components/DatabaseStep.jsx b/frontend/src/setup/components/DatabaseStep.jsx index b57abbbd506c6800f5d005f93a1a2ee166bd7939..e50236d04392c510a267c28b1bbc4841fea5eab0 100644 --- a/frontend/src/setup/components/DatabaseStep.jsx +++ b/frontend/src/setup/components/DatabaseStep.jsx @@ -4,7 +4,7 @@ import _ from "underscore"; import DatabaseDetailsForm from "metabase/components/database/DatabaseDetailsForm.jsx"; import FormField from "metabase/components/form/FormField.jsx"; import MetabaseAnalytics from "metabase/lib/analytics"; -import MetabaseCore from "metabase/lib/core"; +import MetabaseSettings from "metabase/lib/settings"; import StepTitle from './StepTitle.jsx' import CollapsedStep from "./CollapsedStep.jsx"; @@ -92,18 +92,15 @@ export default class DatabaseStep extends Component { } renderEngineSelect() { + let engines = MetabaseSettings.get('engines'); let { engine } = this.state, - engines = _.keys(MetabaseCore.ENGINES).sort(); - - let options = [(<option value="">Select the type of Database you use</option>)]; - engines.forEach(function(opt) { - options.push((<option key={opt} value={opt}>{MetabaseCore.ENGINES[opt].name}</option>)) - }); + engineNames = _.keys(engines).sort(); return ( <label className="Select Form-offset mt1"> <select ref="engine" defaultValue={engine} onChange={this.chooseDatabaseEngine.bind(this)}> - {options} + <option value="">Select the type of Database you use</option> + {engineNames.map(opt => <option key={opt} value={opt}>{engines[opt]['driver-name']}</option>)} </select> </label> ); @@ -112,6 +109,7 @@ export default class DatabaseStep extends Component { render() { let { activeStep, databaseDetails, dispatch, stepNumber } = this.props; let { engine, formError } = this.state; + let engines = MetabaseSettings.get('engines'); let stepText = 'Add your data'; if (activeStep > stepNumber) { @@ -134,19 +132,20 @@ export default class DatabaseStep extends Component { </FormField> { engine !== "" ? - <DatabaseDetailsForm - details={(databaseDetails && 'details' in databaseDetails) ? databaseDetails.details : null} - engine={engine} - formError={formError} - hiddenFields={{ ssl: true }} - submitFn={this.detailsCaptured.bind(this)} - submitButtonText={'Next'}> - </DatabaseDetailsForm> - : null } - - <div className="Form-field Form-offset"> - <a className="link" href="#" onClick={this.skipDatabase.bind(this)}>I'll add my data later</a> - </div> + <DatabaseDetailsForm + details={(databaseDetails && 'details' in databaseDetails) ? databaseDetails.details : null} + engine={engine} + engines={engines} + formError={formError} + hiddenFields={{ ssl: true }} + submitFn={this.detailsCaptured.bind(this)} + submitButtonText={'Next'}> + </DatabaseDetailsForm> + : null } + + <div className="Form-field Form-offset"> + <a className="link" href="#" onClick={this.skipDatabase.bind(this)}>I'll add my data later</a> + </div> </div> </section> ); diff --git a/frontend/src/setup/reducers.js b/frontend/src/setup/reducers.js index 96f0abb9c70fea7521be6fde726e80cb3a746373..7ce670c4587bef5d4137eee55975e01455b34e45 100644 --- a/frontend/src/setup/reducers.js +++ b/frontend/src/setup/reducers.js @@ -29,7 +29,7 @@ export const allowTracking = handleActions({ }, true); export const setupError = handleActions({ - [SUBMIT_SETUP]: { next: (state, { payload }) => payload} + [SUBMIT_SETUP]: { next: (state, { payload }) => payload } }, null); export const setupComplete = handleActions({ diff --git a/frontend/test/unit/lib/schema_metadata.spec.js b/frontend/test/unit/lib/schema_metadata.spec.js index 74547d87513acbc48360e44b192142b3689f0bf6..e33605e3f023b0aba8348e41381fc5c71d81ccce 100644 --- a/frontend/test/unit/lib/schema_metadata.spec.js +++ b/frontend/test/unit/lib/schema_metadata.spec.js @@ -7,7 +7,8 @@ import { NUMBER, BOOLEAN, LOCATION, - COORDINATE + COORDINATE, + foreignKeyCountsByOriginTable } from 'metabase/lib/schema_metadata'; describe('schema_metadata', () => { @@ -44,4 +45,16 @@ describe('schema_metadata', () => { expect(getFieldType({ base_type: 'DERP DERP DERP' })).toEqual(undefined) }); }); + + describe('foreignKeyCountsByOriginTable', () => { + it('should work with null input', () => { + expect(foreignKeyCountsByOriginTable(null)).toEqual(null) + }); + it('should require an array as input', () => { + expect(foreignKeyCountsByOriginTable({})).toEqual(null) + }); + it('should count occurrences by origin.table.id', () => { + expect(foreignKeyCountsByOriginTable([{ origin: {table: {id: 123}} }, { origin: {table: {id: 123}} }, { origin: {table: {id: 123}} }, { origin: {table: {id: 456}} }])).toEqual({123: 3, 456: 1}) + }); + }); }); diff --git a/npm-shrinkwrap.json b/npm-shrinkwrap.json index a8c25a2e0fe7272217cf34d3c39eb36900e3bc15..71784865e06e7f9449675b65dfd6711d872dfcd1 100644 --- a/npm-shrinkwrap.json +++ b/npm-shrinkwrap.json @@ -4,7 +4,7 @@ "dependencies": { "ace-builds": { "version": "1.2.0", - "from": "git://github.com/ajaxorg/ace-builds.git#0982db4853e3c967756b83b70638b9761d7f801d", + "from": "git://github.com/ajaxorg/ace-builds.git#v1.2.0", "resolved": "git://github.com/ajaxorg/ace-builds.git#0982db4853e3c967756b83b70638b9761d7f801d" }, "angular": { @@ -14,7 +14,7 @@ }, "angular-cookie": { "version": "1.0.0", - "from": "git://github.com/ivpusic/angular-cookie.git#3440b84102bf97d83d344ab6db726b4f8cadd09a", + "from": "git://github.com/ivpusic/angular-cookie.git#v4.0.6", "resolved": "git://github.com/ivpusic/angular-cookie.git#3440b84102bf97d83d344ab6db726b4f8cadd09a" }, "angular-cookies": { @@ -1179,7 +1179,7 @@ "dependencies": { "minimist": { "version": "0.0.8", - "from": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + "from": "minimist@0.0.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" } } @@ -5863,124 +5863,124 @@ "dependencies": { "bluebird": { "version": "2.10.2", - "from": "https://registry.npmjs.org/bluebird/-/bluebird-2.10.2.tgz", + "from": "bluebird@>=2.9.34 <3.0.0", "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-2.10.2.tgz" }, "blueimp-tmpl": { "version": "2.5.5", - "from": "https://registry.npmjs.org/blueimp-tmpl/-/blueimp-tmpl-2.5.5.tgz", + "from": "blueimp-tmpl@>=2.5.4 <3.0.0", "resolved": "https://registry.npmjs.org/blueimp-tmpl/-/blueimp-tmpl-2.5.5.tgz" }, "html-minifier": { "version": "0.7.2", - "from": "https://registry.npmjs.org/html-minifier/-/html-minifier-0.7.2.tgz", + "from": "html-minifier@>=0.7.2 <0.8.0", "resolved": "https://registry.npmjs.org/html-minifier/-/html-minifier-0.7.2.tgz", "dependencies": { "change-case": { "version": "2.3.0", - "from": "https://registry.npmjs.org/change-case/-/change-case-2.3.0.tgz", + "from": "change-case@>=2.3.0 <2.4.0", "resolved": "https://registry.npmjs.org/change-case/-/change-case-2.3.0.tgz", "dependencies": { "camel-case": { - "version": "1.1.2", - "from": "https://registry.npmjs.org/camel-case/-/camel-case-1.1.2.tgz", - "resolved": "https://registry.npmjs.org/camel-case/-/camel-case-1.1.2.tgz" + "version": "1.2.0", + "from": "camel-case@>=1.1.1 <2.0.0", + "resolved": "https://registry.npmjs.org/camel-case/-/camel-case-1.2.0.tgz" }, "constant-case": { "version": "1.1.1", - "from": "https://registry.npmjs.org/constant-case/-/constant-case-1.1.1.tgz", + "from": "constant-case@>=1.1.0 <2.0.0", "resolved": "https://registry.npmjs.org/constant-case/-/constant-case-1.1.1.tgz" }, "dot-case": { "version": "1.1.1", - "from": "https://registry.npmjs.org/dot-case/-/dot-case-1.1.1.tgz", + "from": "dot-case@>=1.1.0 <2.0.0", "resolved": "https://registry.npmjs.org/dot-case/-/dot-case-1.1.1.tgz" }, "is-lower-case": { "version": "1.1.1", - "from": "https://registry.npmjs.org/is-lower-case/-/is-lower-case-1.1.1.tgz", + "from": "is-lower-case@>=1.1.0 <2.0.0", "resolved": "https://registry.npmjs.org/is-lower-case/-/is-lower-case-1.1.1.tgz" }, "is-upper-case": { "version": "1.1.1", - "from": "https://registry.npmjs.org/is-upper-case/-/is-upper-case-1.1.1.tgz", + "from": "is-upper-case@>=1.1.0 <2.0.0", "resolved": "https://registry.npmjs.org/is-upper-case/-/is-upper-case-1.1.1.tgz" }, "lower-case": { "version": "1.1.2", - "from": "https://registry.npmjs.org/lower-case/-/lower-case-1.1.2.tgz", + "from": "lower-case@>=1.1.1 <2.0.0", "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-1.1.2.tgz" }, "lower-case-first": { "version": "1.0.0", - "from": "https://registry.npmjs.org/lower-case-first/-/lower-case-first-1.0.0.tgz", + "from": "lower-case-first@>=1.0.0 <2.0.0", "resolved": "https://registry.npmjs.org/lower-case-first/-/lower-case-first-1.0.0.tgz" }, "param-case": { "version": "1.1.1", - "from": "https://registry.npmjs.org/param-case/-/param-case-1.1.1.tgz", + "from": "param-case@>=1.1.0 <2.0.0", "resolved": "https://registry.npmjs.org/param-case/-/param-case-1.1.1.tgz" }, "pascal-case": { "version": "1.1.1", - "from": "https://registry.npmjs.org/pascal-case/-/pascal-case-1.1.1.tgz", + "from": "pascal-case@>=1.1.0 <2.0.0", "resolved": "https://registry.npmjs.org/pascal-case/-/pascal-case-1.1.1.tgz" }, "path-case": { "version": "1.1.1", - "from": "https://registry.npmjs.org/path-case/-/path-case-1.1.1.tgz", + "from": "path-case@>=1.1.0 <2.0.0", "resolved": "https://registry.npmjs.org/path-case/-/path-case-1.1.1.tgz" }, "sentence-case": { "version": "1.1.2", - "from": "https://registry.npmjs.org/sentence-case/-/sentence-case-1.1.2.tgz", + "from": "sentence-case@>=1.1.1 <2.0.0", "resolved": "https://registry.npmjs.org/sentence-case/-/sentence-case-1.1.2.tgz" }, "snake-case": { "version": "1.1.1", - "from": "https://registry.npmjs.org/snake-case/-/snake-case-1.1.1.tgz", + "from": "snake-case@>=1.1.0 <2.0.0", "resolved": "https://registry.npmjs.org/snake-case/-/snake-case-1.1.1.tgz" }, "swap-case": { "version": "1.1.1", - "from": "https://registry.npmjs.org/swap-case/-/swap-case-1.1.1.tgz", + "from": "swap-case@>=1.1.0 <2.0.0", "resolved": "https://registry.npmjs.org/swap-case/-/swap-case-1.1.1.tgz" }, "title-case": { "version": "1.1.1", - "from": "https://registry.npmjs.org/title-case/-/title-case-1.1.1.tgz", + "from": "title-case@>=1.1.0 <2.0.0", "resolved": "https://registry.npmjs.org/title-case/-/title-case-1.1.1.tgz" }, "upper-case": { "version": "1.1.2", - "from": "https://registry.npmjs.org/upper-case/-/upper-case-1.1.2.tgz", + "from": "upper-case@>=1.1.1 <2.0.0", "resolved": "https://registry.npmjs.org/upper-case/-/upper-case-1.1.2.tgz" }, "upper-case-first": { "version": "1.1.1", - "from": "https://registry.npmjs.org/upper-case-first/-/upper-case-first-1.1.1.tgz", + "from": "upper-case-first@>=1.1.0 <2.0.0", "resolved": "https://registry.npmjs.org/upper-case-first/-/upper-case-first-1.1.1.tgz" } } }, "clean-css": { "version": "3.1.9", - "from": "https://registry.npmjs.org/clean-css/-/clean-css-3.1.9.tgz", + "from": "clean-css@>=3.1.0 <3.2.0", "resolved": "https://registry.npmjs.org/clean-css/-/clean-css-3.1.9.tgz", "dependencies": { "commander": { "version": "2.6.0", - "from": "https://registry.npmjs.org/commander/-/commander-2.6.0.tgz", + "from": "commander@>=2.6.0 <2.7.0", "resolved": "https://registry.npmjs.org/commander/-/commander-2.6.0.tgz" }, "source-map": { "version": "0.1.43", - "from": "https://registry.npmjs.org/source-map/-/source-map-0.1.43.tgz", + "from": "source-map@>=0.1.43 <0.2.0", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.1.43.tgz", "dependencies": { "amdefine": { "version": "1.0.0", - "from": "https://registry.npmjs.org/amdefine/-/amdefine-1.0.0.tgz", + "from": "amdefine@>=0.0.4", "resolved": "https://registry.npmjs.org/amdefine/-/amdefine-1.0.0.tgz" } } @@ -5989,32 +5989,32 @@ }, "cli": { "version": "0.6.6", - "from": "https://registry.npmjs.org/cli/-/cli-0.6.6.tgz", + "from": "cli@>=0.6.0 <0.7.0", "resolved": "https://registry.npmjs.org/cli/-/cli-0.6.6.tgz", "dependencies": { "glob": { "version": "3.2.11", - "from": "https://registry.npmjs.org/glob/-/glob-3.2.11.tgz", + "from": "glob@>=3.2.1 <3.3.0", "resolved": "https://registry.npmjs.org/glob/-/glob-3.2.11.tgz", "dependencies": { "inherits": { "version": "2.0.1", - "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", + "from": "inherits@>=2.0.0 <3.0.0", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" }, "minimatch": { "version": "0.3.0", - "from": "https://registry.npmjs.org/minimatch/-/minimatch-0.3.0.tgz", + "from": "minimatch@>=0.3.0 <0.4.0", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.3.0.tgz", "dependencies": { "lru-cache": { "version": "2.7.0", - "from": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.0.tgz", + "from": "lru-cache@>=2.0.0 <3.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.0.tgz" }, "sigmund": { "version": "1.0.1", - "from": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz", + "from": "sigmund@>=1.0.0 <1.1.0", "resolved": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz" } } @@ -6023,44 +6023,44 @@ }, "exit": { "version": "0.1.2", - "from": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", + "from": "exit@0.1.2", "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz" } } }, "concat-stream": { "version": "1.4.10", - "from": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.4.10.tgz", + "from": "concat-stream@>=1.4.0 <1.5.0", "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.4.10.tgz", "dependencies": { "inherits": { "version": "2.0.1", - "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", + "from": "inherits@>=2.0.1 <2.1.0", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" }, "typedarray": { "version": "0.0.6", - "from": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", + "from": "typedarray@>=0.0.5 <0.1.0", "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz" }, "readable-stream": { "version": "1.1.13", - "from": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.13.tgz", + "from": "readable-stream@>=1.1.9 <1.2.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.13.tgz", "dependencies": { "core-util-is": { "version": "1.0.1", - "from": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.1.tgz", + "from": "core-util-is@>=1.0.0 <1.1.0", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.1.tgz" }, "isarray": { "version": "0.0.1", - "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "from": "isarray@0.0.1", "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" }, "string_decoder": { "version": "0.10.31", - "from": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "from": "string_decoder@>=0.10.0 <0.11.0", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" } } @@ -6069,54 +6069,54 @@ }, "uglify-js": { "version": "2.4.24", - "from": "https://registry.npmjs.org/uglify-js/-/uglify-js-2.4.24.tgz", + "from": "uglify-js@>=2.4.0 <2.5.0", "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-2.4.24.tgz", "dependencies": { "async": { "version": "0.2.10", - "from": "https://registry.npmjs.org/async/-/async-0.2.10.tgz", + "from": "async@>=0.2.6 <0.3.0", "resolved": "https://registry.npmjs.org/async/-/async-0.2.10.tgz" }, "source-map": { "version": "0.1.34", - "from": "https://registry.npmjs.org/source-map/-/source-map-0.1.34.tgz", + "from": "source-map@0.1.34", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.1.34.tgz", "dependencies": { "amdefine": { "version": "1.0.0", - "from": "https://registry.npmjs.org/amdefine/-/amdefine-1.0.0.tgz", + "from": "amdefine@>=0.0.4", "resolved": "https://registry.npmjs.org/amdefine/-/amdefine-1.0.0.tgz" } } }, "uglify-to-browserify": { "version": "1.0.2", - "from": "https://registry.npmjs.org/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz", + "from": "uglify-to-browserify@>=1.0.0 <1.1.0", "resolved": "https://registry.npmjs.org/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz" }, "yargs": { "version": "3.5.4", - "from": "https://registry.npmjs.org/yargs/-/yargs-3.5.4.tgz", + "from": "yargs@>=3.5.4 <3.6.0", "resolved": "https://registry.npmjs.org/yargs/-/yargs-3.5.4.tgz", "dependencies": { "camelcase": { "version": "1.2.1", - "from": "https://registry.npmjs.org/camelcase/-/camelcase-1.2.1.tgz", + "from": "camelcase@>=1.0.2 <2.0.0", "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-1.2.1.tgz" }, "decamelize": { - "version": "1.0.0", - "from": "https://registry.npmjs.org/decamelize/-/decamelize-1.0.0.tgz", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.0.0.tgz" + "version": "1.1.1", + "from": "decamelize@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.1.1.tgz" }, "window-size": { "version": "0.1.0", - "from": "https://registry.npmjs.org/window-size/-/window-size-0.1.0.tgz", + "from": "window-size@0.1.0", "resolved": "https://registry.npmjs.org/window-size/-/window-size-0.1.0.tgz" }, "wordwrap": { "version": "0.0.2", - "from": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.2.tgz", + "from": "wordwrap@0.0.2", "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.2.tgz" } } @@ -6125,14 +6125,14 @@ }, "relateurl": { "version": "0.2.6", - "from": "https://registry.npmjs.org/relateurl/-/relateurl-0.2.6.tgz", + "from": "relateurl@>=0.2.0 <0.3.0", "resolved": "https://registry.npmjs.org/relateurl/-/relateurl-0.2.6.tgz" } } }, "lodash": { "version": "3.10.1", - "from": "https://registry.npmjs.org/lodash/-/lodash-3.10.1.tgz", + "from": "lodash@>=3.10.0 <4.0.0", "resolved": "https://registry.npmjs.org/lodash/-/lodash-3.10.1.tgz" } } @@ -8071,40 +8071,6 @@ "version": "1.0.2", "from": "https://registry.npmjs.org/ultron/-/ultron-1.0.2.tgz", "resolved": "https://registry.npmjs.org/ultron/-/ultron-1.0.2.tgz" - }, - "bufferutil": { - "version": "1.2.1", - "from": "https://registry.npmjs.org/bufferutil/-/bufferutil-1.2.1.tgz", - "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-1.2.1.tgz", - "dependencies": { - "bindings": { - "version": "1.2.1", - "from": "https://registry.npmjs.org/bindings/-/bindings-1.2.1.tgz", - "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.2.1.tgz" - }, - "nan": { - "version": "2.0.9", - "from": "https://registry.npmjs.org/nan/-/nan-2.0.9.tgz", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.0.9.tgz" - } - } - }, - "utf-8-validate": { - "version": "1.2.1", - "from": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-1.2.1.tgz", - "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-1.2.1.tgz", - "dependencies": { - "bindings": { - "version": "1.2.1", - "from": "https://registry.npmjs.org/bindings/-/bindings-1.2.1.tgz", - "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.2.1.tgz" - }, - "nan": { - "version": "2.0.9", - "from": "https://registry.npmjs.org/nan/-/nan-2.0.9.tgz", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.0.9.tgz" - } - } } } } @@ -8289,40 +8255,6 @@ "version": "1.0.2", "from": "https://registry.npmjs.org/ultron/-/ultron-1.0.2.tgz", "resolved": "https://registry.npmjs.org/ultron/-/ultron-1.0.2.tgz" - }, - "bufferutil": { - "version": "1.2.1", - "from": "https://registry.npmjs.org/bufferutil/-/bufferutil-1.2.1.tgz", - "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-1.2.1.tgz", - "dependencies": { - "bindings": { - "version": "1.2.1", - "from": "https://registry.npmjs.org/bindings/-/bindings-1.2.1.tgz", - "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.2.1.tgz" - }, - "nan": { - "version": "2.0.9", - "from": "https://registry.npmjs.org/nan/-/nan-2.0.9.tgz", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.0.9.tgz" - } - } - }, - "utf-8-validate": { - "version": "1.2.1", - "from": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-1.2.1.tgz", - "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-1.2.1.tgz", - "dependencies": { - "bindings": { - "version": "1.2.1", - "from": "https://registry.npmjs.org/bindings/-/bindings-1.2.1.tgz", - "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.2.1.tgz" - }, - "nan": { - "version": "2.0.9", - "from": "https://registry.npmjs.org/nan/-/nan-2.0.9.tgz", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.0.9.tgz" - } - } } } }, @@ -9091,787 +9023,1874 @@ "resolved": "https://registry.npmjs.org/mime/-/mime-1.3.4.tgz" } } - } - } - }, - "moment": { - "version": "2.10.6", - "from": "https://registry.npmjs.org/moment/-/moment-2.10.6.tgz", - "resolved": "https://registry.npmjs.org/moment/-/moment-2.10.6.tgz" - }, - "node-libs-browser": { - "version": "0.5.3", - "from": "https://registry.npmjs.org/node-libs-browser/-/node-libs-browser-0.5.3.tgz", - "resolved": "https://registry.npmjs.org/node-libs-browser/-/node-libs-browser-0.5.3.tgz", - "dependencies": { - "assert": { - "version": "1.3.0", - "from": "https://registry.npmjs.org/assert/-/assert-1.3.0.tgz", - "resolved": "https://registry.npmjs.org/assert/-/assert-1.3.0.tgz" - }, - "browserify-zlib": { - "version": "0.1.4", - "from": "https://registry.npmjs.org/browserify-zlib/-/browserify-zlib-0.1.4.tgz", - "resolved": "https://registry.npmjs.org/browserify-zlib/-/browserify-zlib-0.1.4.tgz", - "dependencies": { - "pako": { - "version": "0.2.8", - "from": "https://registry.npmjs.org/pako/-/pako-0.2.8.tgz", - "resolved": "https://registry.npmjs.org/pako/-/pako-0.2.8.tgz" - } - } }, - "buffer": { - "version": "3.5.0", - "from": "https://registry.npmjs.org/buffer/-/buffer-3.5.0.tgz", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-3.5.0.tgz", + "webpack": { + "version": "1.12.4", + "from": "webpack@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-1.12.4.tgz", "dependencies": { - "base64-js": { - "version": "0.0.8", - "from": "https://registry.npmjs.org/base64-js/-/base64-js-0.0.8.tgz", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-0.0.8.tgz" + "async": { + "version": "1.5.0", + "from": "async@>=1.3.0 <2.0.0", + "resolved": "https://registry.npmjs.org/async/-/async-1.5.0.tgz" }, - "ieee754": { - "version": "1.1.6", - "from": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.6.tgz", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.6.tgz" + "clone": { + "version": "1.0.2", + "from": "clone@>=1.0.2 <2.0.0", + "resolved": "https://registry.npmjs.org/clone/-/clone-1.0.2.tgz" }, - "is-array": { - "version": "1.0.1", - "from": "https://registry.npmjs.org/is-array/-/is-array-1.0.1.tgz", - "resolved": "https://registry.npmjs.org/is-array/-/is-array-1.0.1.tgz" - } - } - }, - "console-browserify": { - "version": "1.1.0", - "from": "https://registry.npmjs.org/console-browserify/-/console-browserify-1.1.0.tgz", - "resolved": "https://registry.npmjs.org/console-browserify/-/console-browserify-1.1.0.tgz", - "dependencies": { - "date-now": { - "version": "0.1.4", - "from": "https://registry.npmjs.org/date-now/-/date-now-0.1.4.tgz", - "resolved": "https://registry.npmjs.org/date-now/-/date-now-0.1.4.tgz" - } - } - }, - "constants-browserify": { - "version": "0.0.1", - "from": "https://registry.npmjs.org/constants-browserify/-/constants-browserify-0.0.1.tgz", - "resolved": "https://registry.npmjs.org/constants-browserify/-/constants-browserify-0.0.1.tgz" - }, - "crypto-browserify": { - "version": "3.2.8", - "from": "https://registry.npmjs.org/crypto-browserify/-/crypto-browserify-3.2.8.tgz", - "resolved": "https://registry.npmjs.org/crypto-browserify/-/crypto-browserify-3.2.8.tgz", - "dependencies": { - "pbkdf2-compat": { - "version": "2.0.1", - "from": "https://registry.npmjs.org/pbkdf2-compat/-/pbkdf2-compat-2.0.1.tgz", - "resolved": "https://registry.npmjs.org/pbkdf2-compat/-/pbkdf2-compat-2.0.1.tgz" + "enhanced-resolve": { + "version": "0.9.1", + "from": "enhanced-resolve@>=0.9.0 <0.10.0", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-0.9.1.tgz", + "dependencies": { + "graceful-fs": { + "version": "4.1.2", + "from": "graceful-fs@>=4.1.2 <5.0.0", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.2.tgz" + } + } }, - "ripemd160": { + "esprima": { + "version": "2.7.0", + "from": "esprima@>=2.5.0 <3.0.0", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-2.7.0.tgz" + }, + "interpret": { + "version": "0.6.6", + "from": "interpret@>=0.6.4 <0.7.0", + "resolved": "https://registry.npmjs.org/interpret/-/interpret-0.6.6.tgz" + }, + "memory-fs": { "version": "0.2.0", - "from": "https://registry.npmjs.org/ripemd160/-/ripemd160-0.2.0.tgz", - "resolved": "https://registry.npmjs.org/ripemd160/-/ripemd160-0.2.0.tgz" + "from": "memory-fs@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/memory-fs/-/memory-fs-0.2.0.tgz" }, - "sha.js": { - "version": "2.2.6", - "from": "https://registry.npmjs.org/sha.js/-/sha.js-2.2.6.tgz", - "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.2.6.tgz" - } - } - }, - "domain-browser": { - "version": "1.1.4", - "from": "https://registry.npmjs.org/domain-browser/-/domain-browser-1.1.4.tgz", - "resolved": "https://registry.npmjs.org/domain-browser/-/domain-browser-1.1.4.tgz" - }, - "events": { - "version": "1.1.0", - "from": "https://registry.npmjs.org/events/-/events-1.1.0.tgz", - "resolved": "https://registry.npmjs.org/events/-/events-1.1.0.tgz" - }, - "http-browserify": { - "version": "1.7.0", - "from": "https://registry.npmjs.org/http-browserify/-/http-browserify-1.7.0.tgz", - "resolved": "https://registry.npmjs.org/http-browserify/-/http-browserify-1.7.0.tgz", - "dependencies": { - "Base64": { - "version": "0.2.1", - "from": "https://registry.npmjs.org/Base64/-/Base64-0.2.1.tgz", - "resolved": "https://registry.npmjs.org/Base64/-/Base64-0.2.1.tgz" + "mkdirp": { + "version": "0.5.1", + "from": "mkdirp@>=0.5.0 <0.6.0", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "dependencies": { + "minimist": { + "version": "0.0.8", + "from": "minimist@0.0.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" + } + } }, - "inherits": { - "version": "2.0.1", - "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" - } - } - }, - "https-browserify": { - "version": "0.0.0", - "from": "https://registry.npmjs.org/https-browserify/-/https-browserify-0.0.0.tgz", - "resolved": "https://registry.npmjs.org/https-browserify/-/https-browserify-0.0.0.tgz" - }, - "os-browserify": { - "version": "0.1.2", - "from": "https://registry.npmjs.org/os-browserify/-/os-browserify-0.1.2.tgz", - "resolved": "https://registry.npmjs.org/os-browserify/-/os-browserify-0.1.2.tgz" - }, - "path-browserify": { - "version": "0.0.0", - "from": "https://registry.npmjs.org/path-browserify/-/path-browserify-0.0.0.tgz", - "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-0.0.0.tgz" - }, - "process": { - "version": "0.11.2", - "from": "https://registry.npmjs.org/process/-/process-0.11.2.tgz", - "resolved": "https://registry.npmjs.org/process/-/process-0.11.2.tgz" - }, - "punycode": { - "version": "1.3.2", - "from": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz" - }, - "querystring-es3": { - "version": "0.2.1", - "from": "https://registry.npmjs.org/querystring-es3/-/querystring-es3-0.2.1.tgz", - "resolved": "https://registry.npmjs.org/querystring-es3/-/querystring-es3-0.2.1.tgz" - }, - "readable-stream": { - "version": "1.1.13", - "from": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.13.tgz", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.13.tgz", - "dependencies": { - "core-util-is": { - "version": "1.0.1", - "from": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.1.tgz", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.1.tgz" + "optimist": { + "version": "0.6.1", + "from": "optimist@>=0.6.0 <0.7.0", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", + "dependencies": { + "wordwrap": { + "version": "0.0.3", + "from": "wordwrap@>=0.0.2 <0.1.0", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz" + }, + "minimist": { + "version": "0.0.10", + "from": "minimist@>=0.0.1 <0.1.0", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz" + } + } }, - "isarray": { - "version": "0.0.1", - "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + "supports-color": { + "version": "3.1.2", + "from": "supports-color@>=3.1.0 <4.0.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.1.2.tgz", + "dependencies": { + "has-flag": { + "version": "1.0.0", + "from": "has-flag@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz" + } + } }, - "inherits": { - "version": "2.0.1", - "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" - } - } - }, - "stream-browserify": { - "version": "1.0.0", - "from": "https://registry.npmjs.org/stream-browserify/-/stream-browserify-1.0.0.tgz", - "resolved": "https://registry.npmjs.org/stream-browserify/-/stream-browserify-1.0.0.tgz", - "dependencies": { - "inherits": { - "version": "2.0.1", - "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" - } - } - }, - "string_decoder": { - "version": "0.10.31", - "from": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" - }, - "timers-browserify": { - "version": "1.4.1", - "from": "https://registry.npmjs.org/timers-browserify/-/timers-browserify-1.4.1.tgz", - "resolved": "https://registry.npmjs.org/timers-browserify/-/timers-browserify-1.4.1.tgz" - }, - "tty-browserify": { - "version": "0.0.0", - "from": "https://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.0.tgz", - "resolved": "https://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.0.tgz" - }, - "url": { - "version": "0.10.3", - "from": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", - "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", - "dependencies": { - "querystring": { - "version": "0.2.0", - "from": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", - "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz" - } - } - }, - "util": { - "version": "0.10.3", - "from": "https://registry.npmjs.org/util/-/util-0.10.3.tgz", - "resolved": "https://registry.npmjs.org/util/-/util-0.10.3.tgz", - "dependencies": { - "inherits": { - "version": "2.0.1", - "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" - } - } - }, - "vm-browserify": { - "version": "0.0.4", - "from": "https://registry.npmjs.org/vm-browserify/-/vm-browserify-0.0.4.tgz", - "resolved": "https://registry.npmjs.org/vm-browserify/-/vm-browserify-0.0.4.tgz", - "dependencies": { - "indexof": { - "version": "0.0.1", - "from": "https://registry.npmjs.org/indexof/-/indexof-0.0.1.tgz", - "resolved": "https://registry.npmjs.org/indexof/-/indexof-0.0.1.tgz" - } - } - } - } - }, - "normalizr": { - "version": "0.1.3", - "from": "https://registry.npmjs.org/normalizr/-/normalizr-0.1.3.tgz", - "resolved": "https://registry.npmjs.org/normalizr/-/normalizr-0.1.3.tgz", - "dependencies": { - "lodash": { - "version": "3.10.1", - "from": "https://registry.npmjs.org/lodash/-/lodash-3.10.1.tgz", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-3.10.1.tgz" - } - } - }, - "password-generator": { - "version": "2.0.1", - "from": "https://registry.npmjs.org/password-generator/-/password-generator-2.0.1.tgz", - "resolved": "https://registry.npmjs.org/password-generator/-/password-generator-2.0.1.tgz", - "dependencies": { - "optimist": { - "version": "0.6.1", - "from": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", - "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", - "dependencies": { - "wordwrap": { - "version": "0.0.3", - "from": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", - "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz" + "tapable": { + "version": "0.1.9", + "from": "tapable@>=0.1.8 <0.2.0", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-0.1.9.tgz" }, - "minimist": { - "version": "0.0.10", - "from": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz" - } - } - } - } - }, - "protractor": { - "version": "2.4.0", - "from": "https://registry.npmjs.org/protractor/-/protractor-2.4.0.tgz", - "resolved": "https://registry.npmjs.org/protractor/-/protractor-2.4.0.tgz", - "dependencies": { - "request": { - "version": "2.57.0", - "from": "https://registry.npmjs.org/request/-/request-2.57.0.tgz", - "resolved": "https://registry.npmjs.org/request/-/request-2.57.0.tgz", - "dependencies": { - "bl": { - "version": "0.9.4", - "from": "https://registry.npmjs.org/bl/-/bl-0.9.4.tgz", - "resolved": "https://registry.npmjs.org/bl/-/bl-0.9.4.tgz", + "uglify-js": { + "version": "2.5.0", + "from": "uglify-js@>=2.5.0 <2.6.0", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-2.5.0.tgz", "dependencies": { - "readable-stream": { - "version": "1.0.33", - "from": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.33.tgz", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.33.tgz", + "async": { + "version": "0.2.10", + "from": "async@>=0.2.6 <0.3.0", + "resolved": "https://registry.npmjs.org/async/-/async-0.2.10.tgz" + }, + "source-map": { + "version": "0.5.3", + "from": "source-map@>=0.5.1 <0.6.0", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.3.tgz" + }, + "uglify-to-browserify": { + "version": "1.0.2", + "from": "uglify-to-browserify@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz" + }, + "yargs": { + "version": "3.5.4", + "from": "yargs@>=3.5.4 <3.6.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-3.5.4.tgz", "dependencies": { - "core-util-is": { - "version": "1.0.1", - "from": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.1.tgz", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.1.tgz" + "camelcase": { + "version": "1.2.1", + "from": "camelcase@>=1.0.2 <2.0.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-1.2.1.tgz" }, - "isarray": { - "version": "0.0.1", - "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + "decamelize": { + "version": "1.1.1", + "from": "decamelize@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.1.1.tgz" }, - "string_decoder": { - "version": "0.10.31", - "from": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + "window-size": { + "version": "0.1.0", + "from": "window-size@0.1.0", + "resolved": "https://registry.npmjs.org/window-size/-/window-size-0.1.0.tgz" }, - "inherits": { - "version": "2.0.1", - "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" + "wordwrap": { + "version": "0.0.2", + "from": "wordwrap@0.0.2", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.2.tgz" } } } } }, - "caseless": { - "version": "0.10.0", - "from": "https://registry.npmjs.org/caseless/-/caseless-0.10.0.tgz", - "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.10.0.tgz" - }, - "forever-agent": { - "version": "0.6.1", - "from": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", - "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz" - }, - "form-data": { - "version": "0.2.0", - "from": "https://registry.npmjs.org/form-data/-/form-data-0.2.0.tgz", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-0.2.0.tgz", + "watchpack": { + "version": "0.2.9", + "from": "watchpack@>=0.2.1 <0.3.0", + "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-0.2.9.tgz", "dependencies": { "async": { "version": "0.9.2", - "from": "https://registry.npmjs.org/async/-/async-0.9.2.tgz", + "from": "async@>=0.9.0 <0.10.0", "resolved": "https://registry.npmjs.org/async/-/async-0.9.2.tgz" }, - "combined-stream": { - "version": "0.0.7", - "from": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz", + "chokidar": { + "version": "1.2.0", + "from": "chokidar@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-1.2.0.tgz", "dependencies": { - "delayed-stream": { - "version": "0.0.5", - "from": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz" - } - } - } - } - }, - "json-stringify-safe": { - "version": "5.0.1", - "from": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz" - }, - "mime-types": { - "version": "2.0.14", - "from": "https://registry.npmjs.org/mime-types/-/mime-types-2.0.14.tgz", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.0.14.tgz", - "dependencies": { - "mime-db": { - "version": "1.12.0", - "from": "https://registry.npmjs.org/mime-db/-/mime-db-1.12.0.tgz", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.12.0.tgz" - } - } - }, - "node-uuid": { - "version": "1.4.3", - "from": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.3.tgz", - "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.3.tgz" - }, - "qs": { - "version": "3.1.0", - "from": "https://registry.npmjs.org/qs/-/qs-3.1.0.tgz", - "resolved": "https://registry.npmjs.org/qs/-/qs-3.1.0.tgz" - }, - "tunnel-agent": { - "version": "0.4.1", - "from": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.1.tgz", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.1.tgz" - }, - "tough-cookie": { - "version": "2.0.0", - "from": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.0.0.tgz", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.0.0.tgz" - }, - "http-signature": { - "version": "0.11.0", - "from": "https://registry.npmjs.org/http-signature/-/http-signature-0.11.0.tgz", - "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-0.11.0.tgz", - "dependencies": { - "assert-plus": { - "version": "0.1.5", - "from": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.1.5.tgz", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.1.5.tgz" - }, - "asn1": { - "version": "0.1.11", - "from": "https://registry.npmjs.org/asn1/-/asn1-0.1.11.tgz", - "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.1.11.tgz" - }, - "ctype": { - "version": "0.5.3", - "from": "https://registry.npmjs.org/ctype/-/ctype-0.5.3.tgz", - "resolved": "https://registry.npmjs.org/ctype/-/ctype-0.5.3.tgz" - } - } - }, - "oauth-sign": { - "version": "0.8.0", - "from": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.0.tgz", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.0.tgz" - }, - "hawk": { - "version": "2.3.1", - "from": "https://registry.npmjs.org/hawk/-/hawk-2.3.1.tgz", - "resolved": "https://registry.npmjs.org/hawk/-/hawk-2.3.1.tgz", - "dependencies": { - "hoek": { - "version": "2.16.3", - "from": "https://registry.npmjs.org/hoek/-/hoek-2.16.3.tgz", - "resolved": "https://registry.npmjs.org/hoek/-/hoek-2.16.3.tgz" - }, - "boom": { - "version": "2.9.0", - "from": "https://registry.npmjs.org/boom/-/boom-2.9.0.tgz", - "resolved": "https://registry.npmjs.org/boom/-/boom-2.9.0.tgz" - }, - "cryptiles": { - "version": "2.0.5", - "from": "https://registry.npmjs.org/cryptiles/-/cryptiles-2.0.5.tgz", - "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-2.0.5.tgz" - }, - "sntp": { - "version": "1.0.9", - "from": "https://registry.npmjs.org/sntp/-/sntp-1.0.9.tgz", - "resolved": "https://registry.npmjs.org/sntp/-/sntp-1.0.9.tgz" - } - } - }, - "aws-sign2": { - "version": "0.5.0", - "from": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.5.0.tgz", - "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.5.0.tgz" - }, - "stringstream": { - "version": "0.0.4", - "from": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.4.tgz", - "resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.4.tgz" - }, - "combined-stream": { - "version": "1.0.5", - "from": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz", - "dependencies": { - "delayed-stream": { - "version": "1.0.0", - "from": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" - } - } - }, - "isstream": { - "version": "0.1.2", - "from": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", - "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz" - }, - "har-validator": { - "version": "1.8.0", - "from": "https://registry.npmjs.org/har-validator/-/har-validator-1.8.0.tgz", - "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-1.8.0.tgz", - "dependencies": { - "bluebird": { - "version": "2.10.2", - "from": "https://registry.npmjs.org/bluebird/-/bluebird-2.10.2.tgz", - "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-2.10.2.tgz" - }, - "chalk": { - "version": "1.1.1", - "from": "https://registry.npmjs.org/chalk/-/chalk-1.1.1.tgz", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.1.tgz", - "dependencies": { - "ansi-styles": { - "version": "2.1.0", - "from": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.1.0.tgz", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.1.0.tgz" + "anymatch": { + "version": "1.3.0", + "from": "anymatch@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-1.3.0.tgz", + "dependencies": { + "micromatch": { + "version": "2.3.2", + "from": "micromatch@>=2.1.5 <3.0.0", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-2.3.2.tgz", + "dependencies": { + "arr-diff": { + "version": "1.1.0", + "from": "arr-diff@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-1.1.0.tgz", + "dependencies": { + "arr-flatten": { + "version": "1.0.1", + "from": "arr-flatten@>=1.0.1 <2.0.0", + "resolved": "https://registry.npmjs.org/arr-flatten/-/arr-flatten-1.0.1.tgz" + }, + "array-slice": { + "version": "0.2.3", + "from": "array-slice@>=0.2.3 <0.3.0", + "resolved": "https://registry.npmjs.org/array-slice/-/array-slice-0.2.3.tgz" + } + } + }, + "array-unique": { + "version": "0.2.1", + "from": "array-unique@>=0.2.1 <0.3.0", + "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.2.1.tgz" + }, + "braces": { + "version": "1.8.2", + "from": "braces@>=1.8.1 <2.0.0", + "resolved": "https://registry.npmjs.org/braces/-/braces-1.8.2.tgz", + "dependencies": { + "expand-range": { + "version": "1.8.1", + "from": "expand-range@>=1.8.1 <2.0.0", + "resolved": "https://registry.npmjs.org/expand-range/-/expand-range-1.8.1.tgz", + "dependencies": { + "fill-range": { + "version": "2.2.2", + "from": "fill-range@>=2.1.0 <3.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-2.2.2.tgz", + "dependencies": { + "is-number": { + "version": "1.1.2", + "from": "is-number@>=1.1.2 <2.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-1.1.2.tgz" + }, + "isobject": { + "version": "1.0.2", + "from": "isobject@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/isobject/-/isobject-1.0.2.tgz" + }, + "randomatic": { + "version": "1.1.1", + "from": "randomatic@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/randomatic/-/randomatic-1.1.1.tgz", + "dependencies": { + "is-number": { + "version": "2.0.2", + "from": "is-number@>=2.0.2 <3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-2.0.2.tgz", + "dependencies": { + "kind-of": { + "version": "1.1.0", + "from": "kind-of@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-1.1.0.tgz" + } + } + } + } + }, + "repeat-string": { + "version": "1.5.2", + "from": "repeat-string@>=1.5.2 <2.0.0", + "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.5.2.tgz" + } + } + } + } + }, + "preserve": { + "version": "0.2.0", + "from": "preserve@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/preserve/-/preserve-0.2.0.tgz" + }, + "repeat-element": { + "version": "1.1.2", + "from": "repeat-element@>=1.1.2 <2.0.0", + "resolved": "https://registry.npmjs.org/repeat-element/-/repeat-element-1.1.2.tgz" + } + } + }, + "expand-brackets": { + "version": "0.1.4", + "from": "expand-brackets@>=0.1.4 <0.2.0", + "resolved": "https://registry.npmjs.org/expand-brackets/-/expand-brackets-0.1.4.tgz" + }, + "extglob": { + "version": "0.3.1", + "from": "extglob@>=0.3.1 <0.4.0", + "resolved": "https://registry.npmjs.org/extglob/-/extglob-0.3.1.tgz", + "dependencies": { + "ansi-green": { + "version": "0.1.1", + "from": "ansi-green@>=0.1.1 <0.2.0", + "resolved": "https://registry.npmjs.org/ansi-green/-/ansi-green-0.1.1.tgz", + "dependencies": { + "ansi-wrap": { + "version": "0.1.0", + "from": "ansi-wrap@0.1.0", + "resolved": "https://registry.npmjs.org/ansi-wrap/-/ansi-wrap-0.1.0.tgz" + } + } + }, + "success-symbol": { + "version": "0.1.0", + "from": "success-symbol@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/success-symbol/-/success-symbol-0.1.0.tgz" + } + } + }, + "filename-regex": { + "version": "2.0.0", + "from": "filename-regex@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/filename-regex/-/filename-regex-2.0.0.tgz" + }, + "is-extglob": { + "version": "1.0.0", + "from": "is-extglob@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-1.0.0.tgz" + }, + "kind-of": { + "version": "2.0.1", + "from": "kind-of@>=2.0.1 <3.0.0", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-2.0.1.tgz", + "dependencies": { + "is-buffer": { + "version": "1.1.0", + "from": "is-buffer@>=1.0.2 <2.0.0", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.0.tgz" + } + } + }, + "lazy-cache": { + "version": "0.2.4", + "from": "lazy-cache@>=0.2.3 <0.3.0", + "resolved": "https://registry.npmjs.org/lazy-cache/-/lazy-cache-0.2.4.tgz" + }, + "normalize-path": { + "version": "2.0.0", + "from": "normalize-path@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.0.0.tgz" + }, + "object.omit": { + "version": "2.0.0", + "from": "object.omit@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/object.omit/-/object.omit-2.0.0.tgz", + "dependencies": { + "for-own": { + "version": "0.1.3", + "from": "for-own@>=0.1.3 <0.2.0", + "resolved": "https://registry.npmjs.org/for-own/-/for-own-0.1.3.tgz", + "dependencies": { + "for-in": { + "version": "0.1.4", + "from": "for-in@>=0.1.4 <0.2.0", + "resolved": "https://registry.npmjs.org/for-in/-/for-in-0.1.4.tgz" + } + } + }, + "is-extendable": { + "version": "0.1.1", + "from": "is-extendable@>=0.1.1 <0.2.0", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz" + } + } + }, + "parse-glob": { + "version": "3.0.4", + "from": "parse-glob@>=3.0.4 <4.0.0", + "resolved": "https://registry.npmjs.org/parse-glob/-/parse-glob-3.0.4.tgz", + "dependencies": { + "glob-base": { + "version": "0.3.0", + "from": "glob-base@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/glob-base/-/glob-base-0.3.0.tgz" + }, + "is-dotfile": { + "version": "1.0.2", + "from": "is-dotfile@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/is-dotfile/-/is-dotfile-1.0.2.tgz" + } + } + }, + "regex-cache": { + "version": "0.4.2", + "from": "regex-cache@>=0.4.2 <0.5.0", + "resolved": "https://registry.npmjs.org/regex-cache/-/regex-cache-0.4.2.tgz", + "dependencies": { + "is-equal-shallow": { + "version": "0.1.3", + "from": "is-equal-shallow@>=0.1.1 <0.2.0", + "resolved": "https://registry.npmjs.org/is-equal-shallow/-/is-equal-shallow-0.1.3.tgz" + }, + "is-primitive": { + "version": "2.0.0", + "from": "is-primitive@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/is-primitive/-/is-primitive-2.0.0.tgz" + } + } + } + } + } + } }, - "escape-string-regexp": { - "version": "1.0.3", - "from": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.3.tgz", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.3.tgz" + "arrify": { + "version": "1.0.0", + "from": "arrify@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.0.tgz" }, - "has-ansi": { + "async-each": { + "version": "0.1.6", + "from": "async-each@>=0.1.5 <0.2.0", + "resolved": "https://registry.npmjs.org/async-each/-/async-each-0.1.6.tgz" + }, + "glob-parent": { "version": "2.0.0", - "from": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", - "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", + "from": "glob-parent@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-2.0.0.tgz" + }, + "is-binary-path": { + "version": "1.0.1", + "from": "is-binary-path@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-1.0.1.tgz", "dependencies": { - "ansi-regex": { - "version": "2.0.0", - "from": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz" + "binary-extensions": { + "version": "1.3.1", + "from": "binary-extensions@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-1.3.1.tgz" } } }, - "strip-ansi": { - "version": "3.0.0", - "from": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.0.tgz", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.0.tgz", + "is-glob": { + "version": "2.0.1", + "from": "is-glob@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-2.0.1.tgz", "dependencies": { - "ansi-regex": { - "version": "2.0.0", - "from": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz" + "is-extglob": { + "version": "1.0.0", + "from": "is-extglob@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-1.0.0.tgz" } } }, - "supports-color": { - "version": "2.0.0", - "from": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz" - } - } - }, - "commander": { - "version": "2.8.1", - "from": "https://registry.npmjs.org/commander/-/commander-2.8.1.tgz", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.8.1.tgz", - "dependencies": { - "graceful-readlink": { - "version": "1.0.1", - "from": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz", - "resolved": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz" - } - } - }, - "is-my-json-valid": { - "version": "2.12.2", - "from": "https://registry.npmjs.org/is-my-json-valid/-/is-my-json-valid-2.12.2.tgz", - "resolved": "https://registry.npmjs.org/is-my-json-valid/-/is-my-json-valid-2.12.2.tgz", - "dependencies": { - "generate-function": { - "version": "2.0.0", - "from": "https://registry.npmjs.org/generate-function/-/generate-function-2.0.0.tgz", - "resolved": "https://registry.npmjs.org/generate-function/-/generate-function-2.0.0.tgz" - }, - "generate-object-property": { - "version": "1.2.0", - "from": "https://registry.npmjs.org/generate-object-property/-/generate-object-property-1.2.0.tgz", - "resolved": "https://registry.npmjs.org/generate-object-property/-/generate-object-property-1.2.0.tgz", + "lodash.flatten": { + "version": "3.0.2", + "from": "lodash.flatten@>=3.0.2 <4.0.0", + "resolved": "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-3.0.2.tgz", "dependencies": { - "is-property": { - "version": "1.0.2", - "from": "https://registry.npmjs.org/is-property/-/is-property-1.0.2.tgz", - "resolved": "https://registry.npmjs.org/is-property/-/is-property-1.0.2.tgz" + "lodash._baseflatten": { + "version": "3.1.4", + "from": "lodash._baseflatten@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/lodash._baseflatten/-/lodash._baseflatten-3.1.4.tgz", + "dependencies": { + "lodash.isarguments": { + "version": "3.0.4", + "from": "lodash.isarguments@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.0.4.tgz" + }, + "lodash.isarray": { + "version": "3.0.4", + "from": "lodash.isarray@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/lodash.isarray/-/lodash.isarray-3.0.4.tgz" + } + } + }, + "lodash._isiterateecall": { + "version": "3.0.9", + "from": "lodash._isiterateecall@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/lodash._isiterateecall/-/lodash._isiterateecall-3.0.9.tgz" } } }, - "jsonpointer": { - "version": "2.0.0", - "from": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-2.0.0.tgz", - "resolved": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-2.0.0.tgz" + "path-is-absolute": { + "version": "1.0.0", + "from": "path-is-absolute@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.0.tgz" }, - "xtend": { - "version": "4.0.0", - "from": "https://registry.npmjs.org/xtend/-/xtend-4.0.0.tgz", - "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.0.tgz" - } - } - } - } - } - } - }, - "selenium-webdriver": { - "version": "2.47.0", - "from": "https://registry.npmjs.org/selenium-webdriver/-/selenium-webdriver-2.47.0.tgz", - "resolved": "https://registry.npmjs.org/selenium-webdriver/-/selenium-webdriver-2.47.0.tgz", - "dependencies": { - "rimraf": { - "version": "2.4.3", - "from": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.3.tgz", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.3.tgz", - "dependencies": { - "glob": { - "version": "5.0.15", - "from": "https://registry.npmjs.org/glob/-/glob-5.0.15.tgz", - "resolved": "https://registry.npmjs.org/glob/-/glob-5.0.15.tgz", - "dependencies": { - "inflight": { - "version": "1.0.4", - "from": "https://registry.npmjs.org/inflight/-/inflight-1.0.4.tgz", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.4.tgz", - "dependencies": { - "wrappy": { - "version": "1.0.1", - "from": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.1.tgz", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.1.tgz" - } - } - }, - "inherits": { - "version": "2.0.1", - "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" - }, - "minimatch": { - "version": "3.0.0", - "from": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.0.tgz", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.0.tgz", + "readdirp": { + "version": "2.0.0", + "from": "readdirp@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-2.0.0.tgz", "dependencies": { - "brace-expansion": { - "version": "1.1.1", - "from": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.1.tgz", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.1.tgz", + "minimatch": { + "version": "2.0.10", + "from": "minimatch@>=2.0.10 <3.0.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-2.0.10.tgz", "dependencies": { - "balanced-match": { - "version": "0.2.0", - "from": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.2.0.tgz", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.2.0.tgz" + "brace-expansion": { + "version": "1.1.1", + "from": "brace-expansion@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.1.tgz", + "dependencies": { + "balanced-match": { + "version": "0.2.1", + "from": "balanced-match@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.2.1.tgz" + }, + "concat-map": { + "version": "0.0.1", + "from": "concat-map@0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" + } + } + } + } + }, + "readable-stream": { + "version": "2.0.4", + "from": "readable-stream@>=2.0.2 <3.0.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.4.tgz", + "dependencies": { + "core-util-is": { + "version": "1.0.1", + "from": "core-util-is@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.1.tgz" }, - "concat-map": { + "inherits": { + "version": "2.0.1", + "from": "inherits@>=2.0.1 <2.1.0", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" + }, + "isarray": { "version": "0.0.1", - "from": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" + "from": "isarray@0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "process-nextick-args": { + "version": "1.0.3", + "from": "process-nextick-args@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.3.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "string_decoder@>=0.10.0 <0.11.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + }, + "util-deprecate": { + "version": "1.0.2", + "from": "util-deprecate@>=1.0.1 <1.1.0", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" } } } } }, - "once": { - "version": "1.3.2", - "from": "https://registry.npmjs.org/once/-/once-1.3.2.tgz", - "resolved": "https://registry.npmjs.org/once/-/once-1.3.2.tgz", + "fsevents": { + "version": "1.0.5", + "from": "fsevents@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.0.5.tgz", "dependencies": { - "wrappy": { + "nan": { + "version": "2.1.0", + "from": "nan@>=2.0.2 <3.0.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.1.0.tgz" + }, + "node-pre-gyp": { + "version": "0.6.15", + "from": "node-pre-gyp@latest", + "resolved": "https://registry.npmjs.org/node-pre-gyp/-/node-pre-gyp-0.6.15.tgz", + "dependencies": { + "nopt": { + "version": "3.0.4", + "from": "nopt@>=3.0.1 <3.1.0", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-3.0.4.tgz" + } + } + }, + "abbrev": { + "version": "1.0.7", + "from": "abbrev@1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.7.tgz" + }, + "ansi-regex": { + "version": "2.0.0", + "from": "ansi-regex@^2.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz" + }, + "ansi": { + "version": "0.3.0", + "from": "ansi@~0.3.0", + "resolved": "https://registry.npmjs.org/ansi/-/ansi-0.3.0.tgz" + }, + "ansi-styles": { + "version": "2.1.0", + "from": "ansi-styles@^2.1.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.1.0.tgz" + }, + "are-we-there-yet": { + "version": "1.0.4", + "from": "are-we-there-yet@~1.0.0", + "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.0.4.tgz" + }, + "asn1": { + "version": "0.1.11", + "from": "asn1@0.1.11", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.1.11.tgz" + }, + "assert-plus": { + "version": "0.1.5", + "from": "assert-plus@^0.1.5", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.1.5.tgz" + }, + "aws-sign2": { + "version": "0.6.0", + "from": "aws-sign2@~0.6.0", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.6.0.tgz" + }, + "async": { + "version": "1.5.0", + "from": "async@^1.4.0", + "resolved": "https://registry.npmjs.org/async/-/async-1.5.0.tgz" + }, + "balanced-match": { + "version": "0.2.1", + "from": "balanced-match@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.2.1.tgz" + }, + "block-stream": { + "version": "0.0.8", + "from": "block-stream@*", + "resolved": "https://registry.npmjs.org/block-stream/-/block-stream-0.0.8.tgz" + }, + "boom": { + "version": "2.10.1", + "from": "boom@^2.8.x", + "resolved": "https://registry.npmjs.org/boom/-/boom-2.10.1.tgz" + }, + "caseless": { + "version": "0.11.0", + "from": "caseless@~0.11.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.11.0.tgz" + }, + "brace-expansion": { + "version": "1.1.1", + "from": "brace-expansion@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.1.tgz" + }, + "chalk": { + "version": "1.1.1", + "from": "chalk@^1.1.1", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.1.tgz" + }, + "combined-stream": { + "version": "1.0.5", + "from": "combined-stream@~1.0.5", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz" + }, + "commander": { + "version": "2.9.0", + "from": "commander@^2.8.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.9.0.tgz" + }, + "concat-map": { + "version": "0.0.1", + "from": "concat-map@0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" + }, + "core-util-is": { "version": "1.0.1", - "from": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.1.tgz", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.1.tgz" - } - } - }, - "path-is-absolute": { - "version": "1.0.0", - "from": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.0.tgz", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.0.tgz" - } - } - } - } - }, - "tmp": { - "version": "0.0.24", - "from": "https://registry.npmjs.org/tmp/-/tmp-0.0.24.tgz", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.24.tgz" - }, - "ws": { - "version": "0.8.0", - "from": "https://registry.npmjs.org/ws/-/ws-0.8.0.tgz", - "resolved": "https://registry.npmjs.org/ws/-/ws-0.8.0.tgz", - "dependencies": { - "options": { - "version": "0.0.6", - "from": "https://registry.npmjs.org/options/-/options-0.0.6.tgz", - "resolved": "https://registry.npmjs.org/options/-/options-0.0.6.tgz" - }, - "ultron": { - "version": "1.0.2", - "from": "https://registry.npmjs.org/ultron/-/ultron-1.0.2.tgz", - "resolved": "https://registry.npmjs.org/ultron/-/ultron-1.0.2.tgz" - }, - "bufferutil": { - "version": "1.2.1", - "from": "https://registry.npmjs.org/bufferutil/-/bufferutil-1.2.1.tgz", - "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-1.2.1.tgz", - "dependencies": { - "bindings": { - "version": "1.2.1", - "from": "https://registry.npmjs.org/bindings/-/bindings-1.2.1.tgz", - "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.2.1.tgz" - }, - "nan": { - "version": "2.0.9", - "from": "https://registry.npmjs.org/nan/-/nan-2.0.9.tgz", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.0.9.tgz" - } - } - }, - "utf-8-validate": { - "version": "1.2.1", - "from": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-1.2.1.tgz", - "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-1.2.1.tgz", - "dependencies": { - "bindings": { - "version": "1.2.1", - "from": "https://registry.npmjs.org/bindings/-/bindings-1.2.1.tgz", - "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.2.1.tgz" - }, - "nan": { - "version": "2.0.9", - "from": "https://registry.npmjs.org/nan/-/nan-2.0.9.tgz", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.0.9.tgz" - } - } - } - } - }, - "xml2js": { - "version": "0.4.4", - "from": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.4.tgz", - "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.4.tgz", - "dependencies": { - "sax": { - "version": "0.6.1", - "from": "https://registry.npmjs.org/sax/-/sax-0.6.1.tgz", - "resolved": "https://registry.npmjs.org/sax/-/sax-0.6.1.tgz" - }, - "xmlbuilder": { - "version": "3.1.0", - "from": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-3.1.0.tgz", - "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-3.1.0.tgz", - "dependencies": { - "lodash": { - "version": "3.10.1", - "from": "https://registry.npmjs.org/lodash/-/lodash-3.10.1.tgz", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-3.10.1.tgz" - } - } - } - } - } - } - }, - "minijasminenode": { - "version": "1.1.1", - "from": "https://registry.npmjs.org/minijasminenode/-/minijasminenode-1.1.1.tgz", - "resolved": "https://registry.npmjs.org/minijasminenode/-/minijasminenode-1.1.1.tgz" - }, - "jasminewd": { - "version": "1.1.0", - "from": "https://registry.npmjs.org/jasminewd/-/jasminewd-1.1.0.tgz", - "resolved": "https://registry.npmjs.org/jasminewd/-/jasminewd-1.1.0.tgz" - }, - "jasminewd2": { - "version": "0.0.6", - "from": "https://registry.npmjs.org/jasminewd2/-/jasminewd2-0.0.6.tgz", - "resolved": "https://registry.npmjs.org/jasminewd2/-/jasminewd2-0.0.6.tgz" - }, - "jasmine": { - "version": "2.3.2", - "from": "https://registry.npmjs.org/jasmine/-/jasmine-2.3.2.tgz", - "resolved": "https://registry.npmjs.org/jasmine/-/jasmine-2.3.2.tgz", - "dependencies": { - "exit": { - "version": "0.1.2", - "from": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", - "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz" - } - } - }, - "saucelabs": { - "version": "1.0.1", - "from": "https://registry.npmjs.org/saucelabs/-/saucelabs-1.0.1.tgz", - "resolved": "https://registry.npmjs.org/saucelabs/-/saucelabs-1.0.1.tgz", - "dependencies": { - "https-proxy-agent": { - "version": "1.0.0", - "from": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-1.0.0.tgz", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-1.0.0.tgz", - "dependencies": { - "agent-base": { - "version": "2.0.1", - "from": "https://registry.npmjs.org/agent-base/-/agent-base-2.0.1.tgz", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-2.0.1.tgz", - "dependencies": { - "semver": { - "version": "5.0.3", - "from": "https://registry.npmjs.org/semver/-/semver-5.0.3.tgz", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.0.3.tgz" - } - } - }, + "from": "core-util-is@~1.0.0", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.1.tgz" + }, + "cryptiles": { + "version": "2.0.5", + "from": "cryptiles@2.x.x", + "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-2.0.5.tgz" + }, + "ctype": { + "version": "0.5.3", + "from": "ctype@0.5.3", + "resolved": "https://registry.npmjs.org/ctype/-/ctype-0.5.3.tgz" + }, + "debug": { + "version": "0.7.4", + "from": "debug@~0.7.2", + "resolved": "https://registry.npmjs.org/debug/-/debug-0.7.4.tgz" + }, + "deep-extend": { + "version": "0.2.11", + "from": "deep-extend@~0.2.5", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.2.11.tgz" + }, + "delayed-stream": { + "version": "1.0.0", + "from": "delayed-stream@~1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" + }, + "delegates": { + "version": "0.1.0", + "from": "delegates@^0.1.0", + "resolved": "https://registry.npmjs.org/delegates/-/delegates-0.1.0.tgz" + }, + "escape-string-regexp": { + "version": "1.0.3", + "from": "escape-string-regexp@^1.0.2", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.3.tgz" + }, + "extend": { + "version": "3.0.0", + "from": "extend@~3.0.0", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.0.tgz" + }, + "forever-agent": { + "version": "0.6.1", + "from": "forever-agent@~0.6.1", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz" + }, + "form-data": { + "version": "1.0.0-rc3", + "from": "form-data@~1.0.0-rc3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-1.0.0-rc3.tgz" + }, + "fstream": { + "version": "1.0.8", + "from": "fstream@^1.0.2", + "resolved": "https://registry.npmjs.org/fstream/-/fstream-1.0.8.tgz" + }, + "gauge": { + "version": "1.2.2", + "from": "gauge@~1.2.0", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-1.2.2.tgz" + }, + "generate-function": { + "version": "2.0.0", + "from": "generate-function@^2.0.0", + "resolved": "https://registry.npmjs.org/generate-function/-/generate-function-2.0.0.tgz" + }, + "generate-object-property": { + "version": "1.2.0", + "from": "generate-object-property@^1.1.0", + "resolved": "https://registry.npmjs.org/generate-object-property/-/generate-object-property-1.2.0.tgz" + }, + "graceful-fs": { + "version": "4.1.2", + "from": "graceful-fs@4.1", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.2.tgz" + }, + "graceful-readlink": { + "version": "1.0.1", + "from": "graceful-readlink@>= 1.0.0", + "resolved": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz" + }, + "har-validator": { + "version": "2.0.2", + "from": "har-validator@~2.0.2", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-2.0.2.tgz" + }, + "has-ansi": { + "version": "2.0.0", + "from": "has-ansi@^2.0.0", + "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz" + }, + "has-unicode": { + "version": "1.0.1", + "from": "has-unicode@^1.0.0", + "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-1.0.1.tgz" + }, + "hawk": { + "version": "3.1.0", + "from": "hawk@~3.1.0", + "resolved": "https://registry.npmjs.org/hawk/-/hawk-3.1.0.tgz" + }, + "hoek": { + "version": "2.16.3", + "from": "hoek@2.x.x", + "resolved": "https://registry.npmjs.org/hoek/-/hoek-2.16.3.tgz" + }, + "http-signature": { + "version": "0.11.0", + "from": "http-signature@~0.11.0", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-0.11.0.tgz" + }, + "inherits": { + "version": "2.0.1", + "from": "inherits@*", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" + }, + "ini": { + "version": "1.3.4", + "from": "ini@~1.3.0", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.4.tgz" + }, + "is-my-json-valid": { + "version": "2.12.2", + "from": "is-my-json-valid@^2.12.2", + "resolved": "https://registry.npmjs.org/is-my-json-valid/-/is-my-json-valid-2.12.2.tgz" + }, + "is-property": { + "version": "1.0.2", + "from": "is-property@^1.0.0", + "resolved": "https://registry.npmjs.org/is-property/-/is-property-1.0.2.tgz" + }, + "isarray": { + "version": "0.0.1", + "from": "isarray@0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "isstream": { + "version": "0.1.2", + "from": "isstream@~0.1.2", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz" + }, + "json-stringify-safe": { + "version": "5.0.1", + "from": "json-stringify-safe@~5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz" + }, + "jsonpointer": { + "version": "2.0.0", + "from": "jsonpointer@2.0.0", + "resolved": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-2.0.0.tgz" + }, + "lodash._basetostring": { + "version": "3.0.1", + "from": "lodash._basetostring@^3.0.0", + "resolved": "https://registry.npmjs.org/lodash._basetostring/-/lodash._basetostring-3.0.1.tgz" + }, + "lodash._createpadding": { + "version": "3.6.1", + "from": "lodash._createpadding@^3.0.0", + "resolved": "https://registry.npmjs.org/lodash._createpadding/-/lodash._createpadding-3.6.1.tgz" + }, + "lodash.pad": { + "version": "3.1.1", + "from": "lodash.pad@^3.0.0", + "resolved": "https://registry.npmjs.org/lodash.pad/-/lodash.pad-3.1.1.tgz" + }, + "lodash.padleft": { + "version": "3.1.1", + "from": "lodash.padleft@^3.0.0", + "resolved": "https://registry.npmjs.org/lodash.padleft/-/lodash.padleft-3.1.1.tgz" + }, + "lodash.padright": { + "version": "3.1.1", + "from": "lodash.padright@^3.0.0", + "resolved": "https://registry.npmjs.org/lodash.padright/-/lodash.padright-3.1.1.tgz" + }, + "lodash.repeat": { + "version": "3.0.1", + "from": "lodash.repeat@^3.0.0", + "resolved": "https://registry.npmjs.org/lodash.repeat/-/lodash.repeat-3.0.1.tgz" + }, + "mime-db": { + "version": "1.19.0", + "from": "mime-db@~1.19.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.19.0.tgz" + }, + "mime-types": { + "version": "2.1.7", + "from": "mime-types@~2.1.7", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.7.tgz" + }, + "minimist": { + "version": "0.0.8", + "from": "minimist@0.0.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" + }, + "mkdirp": { + "version": "0.5.1", + "from": "mkdirp@>=0.3.0 <0.4.0||>=0.4.0 <0.5.0||>=0.5.0 <0.6.0", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz" + }, + "node-uuid": { + "version": "1.4.3", + "from": "node-uuid@~1.4.3", + "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.3.tgz" + }, + "npmlog": { + "version": "1.2.1", + "from": "npmlog@~1.2.0", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-1.2.1.tgz" + }, + "oauth-sign": { + "version": "0.8.0", + "from": "oauth-sign@~0.8.0", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.0.tgz" + }, + "once": { + "version": "1.1.1", + "from": "once@~1.1.1", + "resolved": "https://registry.npmjs.org/once/-/once-1.1.1.tgz" + }, + "path-is-absolute": { + "version": "1.0.0", + "from": "path-is-absolute@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.0.tgz" + }, + "pinkie": { + "version": "1.0.0", + "from": "pinkie@^1.0.0", + "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-1.0.0.tgz" + }, + "pinkie-promise": { + "version": "1.0.0", + "from": "pinkie-promise@^1.0.0", + "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-1.0.0.tgz" + }, + "qs": { + "version": "5.2.0", + "from": "qs@~5.2.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-5.2.0.tgz" + }, + "readable-stream": { + "version": "1.1.13", + "from": "readable-stream@^1.1.13", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.13.tgz" + }, + "request": { + "version": "2.65.0", + "from": "request@2.x", + "resolved": "https://registry.npmjs.org/request/-/request-2.65.0.tgz" + }, + "semver": { + "version": "5.0.3", + "from": "semver@~5.0.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.0.3.tgz" + }, + "sntp": { + "version": "1.0.9", + "from": "sntp@1.x.x", + "resolved": "https://registry.npmjs.org/sntp/-/sntp-1.0.9.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "string_decoder@~0.10.x", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + }, + "stringstream": { + "version": "0.0.5", + "from": "stringstream@~0.0.4", + "resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz" + }, + "strip-ansi": { + "version": "3.0.0", + "from": "strip-ansi@^3.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.0.tgz" + }, + "strip-json-comments": { + "version": "0.1.3", + "from": "strip-json-comments@0.1.x", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-0.1.3.tgz" + }, + "tar": { + "version": "2.2.1", + "from": "tar@~2.2.0", + "resolved": "https://registry.npmjs.org/tar/-/tar-2.2.1.tgz" + }, + "supports-color": { + "version": "2.0.0", + "from": "supports-color@^2.0.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz" + }, + "tough-cookie": { + "version": "2.2.0", + "from": "tough-cookie@~2.2.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.2.0.tgz" + }, + "tunnel-agent": { + "version": "0.4.1", + "from": "tunnel-agent@~0.4.1", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.1.tgz" + }, + "uid-number": { + "version": "0.0.3", + "from": "uid-number@0.0.3", + "resolved": "https://registry.npmjs.org/uid-number/-/uid-number-0.0.3.tgz" + }, + "wrappy": { + "version": "1.0.1", + "from": "wrappy@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.1.tgz" + }, + "xtend": { + "version": "4.0.1", + "from": "xtend@^4.0.0", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz" + }, + "fstream-ignore": { + "version": "1.0.3", + "from": "fstream-ignore@~1.0.3", + "resolved": "https://registry.npmjs.org/fstream-ignore/-/fstream-ignore-1.0.3.tgz", + "dependencies": { + "minimatch": { + "version": "3.0.0", + "from": "minimatch@>=3.0.0 <4.0.0" + } + } + }, + "inflight": { + "version": "1.0.4", + "from": "inflight@>=1.0.4 <2.0.0", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.4.tgz", + "dependencies": { + "once": { + "version": "1.3.2", + "from": "once@>=1.3.0 <2.0.0" + } + } + }, + "rc": { + "version": "1.1.2", + "from": "rc@~1.1.0", + "resolved": "https://registry.npmjs.org/rc/-/rc-1.1.2.tgz", + "dependencies": { + "minimist": { + "version": "1.2.0", + "from": "minimist@^1.1.2", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz" + } + } + }, + "rimraf": { + "version": "2.4.3", + "from": "rimraf@~2.4.0", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.3.tgz", + "dependencies": { + "glob": { + "version": "5.0.15", + "from": "glob@>=5.0.14 <6.0.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-5.0.15.tgz" + }, + "minimatch": { + "version": "3.0.0", + "from": "minimatch@>=2.0.0 <3.0.0||>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.0.tgz" + }, + "once": { + "version": "1.3.2", + "from": "once@>=1.3.0 <2.0.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.3.2.tgz" + } + } + }, + "bl": { + "version": "1.0.0", + "from": "bl@~1.0.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-1.0.0.tgz", + "dependencies": { + "readable-stream": { + "version": "2.0.4", + "from": "readable-stream@~2.0.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.4.tgz", + "dependencies": { + "core-util-is": { + "version": "1.0.1", + "from": "core-util-is@~1.0.0", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.1.tgz" + }, + "inherits": { + "version": "2.0.1", + "from": "inherits@~2.0.1", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" + }, + "isarray": { + "version": "0.0.1", + "from": "isarray@0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "process-nextick-args": { + "version": "1.0.3", + "from": "process-nextick-args@~1.0.0", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.3.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "string_decoder@~0.10.x", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + }, + "util-deprecate": { + "version": "1.0.2", + "from": "util-deprecate@~1.0.1", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" + } + } + } + } + }, + "tar-pack": { + "version": "3.1.0", + "from": "tar-pack@~3.1.0", + "resolved": "https://registry.npmjs.org/tar-pack/-/tar-pack-3.1.0.tgz", + "dependencies": { + "readable-stream": { + "version": "1.0.33", + "from": "readable-stream@~1.0.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.33.tgz", + "dependencies": { + "core-util-is": { + "version": "1.0.1", + "from": "core-util-is@~1.0.0", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.1.tgz" + }, + "inherits": { + "version": "2.0.1", + "from": "inherits@~2.0.1", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" + }, + "isarray": { + "version": "0.0.1", + "from": "isarray@0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "string_decoder@~0.10.x", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + } + } + }, + "rimraf": { + "version": "2.2.8", + "from": "rimraf@~2.2.0", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz" + } + } + } + } + } + } + }, + "graceful-fs": { + "version": "4.1.2", + "from": "graceful-fs@>=4.1.2 <5.0.0", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.2.tgz" + } + } + }, + "webpack-core": { + "version": "0.6.8", + "from": "webpack-core@>=0.6.0 <0.7.0", + "resolved": "https://registry.npmjs.org/webpack-core/-/webpack-core-0.6.8.tgz", + "dependencies": { + "source-map": { + "version": "0.4.4", + "from": "source-map@>=0.4.1 <0.5.0", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.4.4.tgz", + "dependencies": { + "amdefine": { + "version": "1.0.0", + "from": "amdefine@>=0.0.4", + "resolved": "https://registry.npmjs.org/amdefine/-/amdefine-1.0.0.tgz" + } + } + }, + "source-list-map": { + "version": "0.1.5", + "from": "source-list-map@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/source-list-map/-/source-list-map-0.1.5.tgz" + } + } + } + } + } + } + }, + "moment": { + "version": "2.10.6", + "from": "https://registry.npmjs.org/moment/-/moment-2.10.6.tgz", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.10.6.tgz" + }, + "node-libs-browser": { + "version": "0.5.3", + "from": "https://registry.npmjs.org/node-libs-browser/-/node-libs-browser-0.5.3.tgz", + "resolved": "https://registry.npmjs.org/node-libs-browser/-/node-libs-browser-0.5.3.tgz", + "dependencies": { + "assert": { + "version": "1.3.0", + "from": "https://registry.npmjs.org/assert/-/assert-1.3.0.tgz", + "resolved": "https://registry.npmjs.org/assert/-/assert-1.3.0.tgz" + }, + "browserify-zlib": { + "version": "0.1.4", + "from": "https://registry.npmjs.org/browserify-zlib/-/browserify-zlib-0.1.4.tgz", + "resolved": "https://registry.npmjs.org/browserify-zlib/-/browserify-zlib-0.1.4.tgz", + "dependencies": { + "pako": { + "version": "0.2.8", + "from": "https://registry.npmjs.org/pako/-/pako-0.2.8.tgz", + "resolved": "https://registry.npmjs.org/pako/-/pako-0.2.8.tgz" + } + } + }, + "buffer": { + "version": "3.5.0", + "from": "https://registry.npmjs.org/buffer/-/buffer-3.5.0.tgz", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-3.5.0.tgz", + "dependencies": { + "base64-js": { + "version": "0.0.8", + "from": "https://registry.npmjs.org/base64-js/-/base64-js-0.0.8.tgz", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-0.0.8.tgz" + }, + "ieee754": { + "version": "1.1.6", + "from": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.6.tgz", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.6.tgz" + }, + "is-array": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/is-array/-/is-array-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/is-array/-/is-array-1.0.1.tgz" + } + } + }, + "console-browserify": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/console-browserify/-/console-browserify-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/console-browserify/-/console-browserify-1.1.0.tgz", + "dependencies": { + "date-now": { + "version": "0.1.4", + "from": "https://registry.npmjs.org/date-now/-/date-now-0.1.4.tgz", + "resolved": "https://registry.npmjs.org/date-now/-/date-now-0.1.4.tgz" + } + } + }, + "constants-browserify": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/constants-browserify/-/constants-browserify-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/constants-browserify/-/constants-browserify-0.0.1.tgz" + }, + "crypto-browserify": { + "version": "3.2.8", + "from": "https://registry.npmjs.org/crypto-browserify/-/crypto-browserify-3.2.8.tgz", + "resolved": "https://registry.npmjs.org/crypto-browserify/-/crypto-browserify-3.2.8.tgz", + "dependencies": { + "pbkdf2-compat": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/pbkdf2-compat/-/pbkdf2-compat-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/pbkdf2-compat/-/pbkdf2-compat-2.0.1.tgz" + }, + "ripemd160": { + "version": "0.2.0", + "from": "https://registry.npmjs.org/ripemd160/-/ripemd160-0.2.0.tgz", + "resolved": "https://registry.npmjs.org/ripemd160/-/ripemd160-0.2.0.tgz" + }, + "sha.js": { + "version": "2.2.6", + "from": "https://registry.npmjs.org/sha.js/-/sha.js-2.2.6.tgz", + "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.2.6.tgz" + } + } + }, + "domain-browser": { + "version": "1.1.4", + "from": "https://registry.npmjs.org/domain-browser/-/domain-browser-1.1.4.tgz", + "resolved": "https://registry.npmjs.org/domain-browser/-/domain-browser-1.1.4.tgz" + }, + "events": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/events/-/events-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/events/-/events-1.1.0.tgz" + }, + "http-browserify": { + "version": "1.7.0", + "from": "https://registry.npmjs.org/http-browserify/-/http-browserify-1.7.0.tgz", + "resolved": "https://registry.npmjs.org/http-browserify/-/http-browserify-1.7.0.tgz", + "dependencies": { + "Base64": { + "version": "0.2.1", + "from": "https://registry.npmjs.org/Base64/-/Base64-0.2.1.tgz", + "resolved": "https://registry.npmjs.org/Base64/-/Base64-0.2.1.tgz" + }, + "inherits": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" + } + } + }, + "https-browserify": { + "version": "0.0.0", + "from": "https://registry.npmjs.org/https-browserify/-/https-browserify-0.0.0.tgz", + "resolved": "https://registry.npmjs.org/https-browserify/-/https-browserify-0.0.0.tgz" + }, + "os-browserify": { + "version": "0.1.2", + "from": "https://registry.npmjs.org/os-browserify/-/os-browserify-0.1.2.tgz", + "resolved": "https://registry.npmjs.org/os-browserify/-/os-browserify-0.1.2.tgz" + }, + "path-browserify": { + "version": "0.0.0", + "from": "https://registry.npmjs.org/path-browserify/-/path-browserify-0.0.0.tgz", + "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-0.0.0.tgz" + }, + "process": { + "version": "0.11.2", + "from": "https://registry.npmjs.org/process/-/process-0.11.2.tgz", + "resolved": "https://registry.npmjs.org/process/-/process-0.11.2.tgz" + }, + "punycode": { + "version": "1.3.2", + "from": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz" + }, + "querystring-es3": { + "version": "0.2.1", + "from": "https://registry.npmjs.org/querystring-es3/-/querystring-es3-0.2.1.tgz", + "resolved": "https://registry.npmjs.org/querystring-es3/-/querystring-es3-0.2.1.tgz" + }, + "readable-stream": { + "version": "1.1.13", + "from": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.13.tgz", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.13.tgz", + "dependencies": { + "core-util-is": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.1.tgz" + }, + "isarray": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "inherits": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" + } + } + }, + "stream-browserify": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/stream-browserify/-/stream-browserify-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/stream-browserify/-/stream-browserify-1.0.0.tgz", + "dependencies": { + "inherits": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" + } + } + }, + "string_decoder": { + "version": "0.10.31", + "from": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + }, + "timers-browserify": { + "version": "1.4.1", + "from": "https://registry.npmjs.org/timers-browserify/-/timers-browserify-1.4.1.tgz", + "resolved": "https://registry.npmjs.org/timers-browserify/-/timers-browserify-1.4.1.tgz" + }, + "tty-browserify": { + "version": "0.0.0", + "from": "https://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.0.tgz", + "resolved": "https://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.0.tgz" + }, + "url": { + "version": "0.10.3", + "from": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", + "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", + "dependencies": { + "querystring": { + "version": "0.2.0", + "from": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz" + } + } + }, + "util": { + "version": "0.10.3", + "from": "https://registry.npmjs.org/util/-/util-0.10.3.tgz", + "resolved": "https://registry.npmjs.org/util/-/util-0.10.3.tgz", + "dependencies": { + "inherits": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" + } + } + }, + "vm-browserify": { + "version": "0.0.4", + "from": "https://registry.npmjs.org/vm-browserify/-/vm-browserify-0.0.4.tgz", + "resolved": "https://registry.npmjs.org/vm-browserify/-/vm-browserify-0.0.4.tgz", + "dependencies": { + "indexof": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/indexof/-/indexof-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/indexof/-/indexof-0.0.1.tgz" + } + } + } + } + }, + "normalizr": { + "version": "0.1.3", + "from": "https://registry.npmjs.org/normalizr/-/normalizr-0.1.3.tgz", + "resolved": "https://registry.npmjs.org/normalizr/-/normalizr-0.1.3.tgz", + "dependencies": { + "lodash": { + "version": "3.10.1", + "from": "https://registry.npmjs.org/lodash/-/lodash-3.10.1.tgz", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-3.10.1.tgz" + } + } + }, + "password-generator": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/password-generator/-/password-generator-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/password-generator/-/password-generator-2.0.1.tgz", + "dependencies": { + "optimist": { + "version": "0.6.1", + "from": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", + "dependencies": { + "wordwrap": { + "version": "0.0.3", + "from": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz" + }, + "minimist": { + "version": "0.0.10", + "from": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz" + } + } + } + } + }, + "protractor": { + "version": "2.4.0", + "from": "https://registry.npmjs.org/protractor/-/protractor-2.4.0.tgz", + "resolved": "https://registry.npmjs.org/protractor/-/protractor-2.4.0.tgz", + "dependencies": { + "request": { + "version": "2.57.0", + "from": "https://registry.npmjs.org/request/-/request-2.57.0.tgz", + "resolved": "https://registry.npmjs.org/request/-/request-2.57.0.tgz", + "dependencies": { + "bl": { + "version": "0.9.4", + "from": "https://registry.npmjs.org/bl/-/bl-0.9.4.tgz", + "resolved": "https://registry.npmjs.org/bl/-/bl-0.9.4.tgz", + "dependencies": { + "readable-stream": { + "version": "1.0.33", + "from": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.33.tgz", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.33.tgz", + "dependencies": { + "core-util-is": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.1.tgz" + }, + "isarray": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + }, + "inherits": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" + } + } + } + } + }, + "caseless": { + "version": "0.10.0", + "from": "https://registry.npmjs.org/caseless/-/caseless-0.10.0.tgz", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.10.0.tgz" + }, + "forever-agent": { + "version": "0.6.1", + "from": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz" + }, + "form-data": { + "version": "0.2.0", + "from": "https://registry.npmjs.org/form-data/-/form-data-0.2.0.tgz", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-0.2.0.tgz", + "dependencies": { + "async": { + "version": "0.9.2", + "from": "https://registry.npmjs.org/async/-/async-0.9.2.tgz", + "resolved": "https://registry.npmjs.org/async/-/async-0.9.2.tgz" + }, + "combined-stream": { + "version": "0.0.7", + "from": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz", + "dependencies": { + "delayed-stream": { + "version": "0.0.5", + "from": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz" + } + } + } + } + }, + "json-stringify-safe": { + "version": "5.0.1", + "from": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz" + }, + "mime-types": { + "version": "2.0.14", + "from": "https://registry.npmjs.org/mime-types/-/mime-types-2.0.14.tgz", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.0.14.tgz", + "dependencies": { + "mime-db": { + "version": "1.12.0", + "from": "https://registry.npmjs.org/mime-db/-/mime-db-1.12.0.tgz", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.12.0.tgz" + } + } + }, + "node-uuid": { + "version": "1.4.3", + "from": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.3.tgz", + "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.3.tgz" + }, + "qs": { + "version": "3.1.0", + "from": "https://registry.npmjs.org/qs/-/qs-3.1.0.tgz", + "resolved": "https://registry.npmjs.org/qs/-/qs-3.1.0.tgz" + }, + "tunnel-agent": { + "version": "0.4.1", + "from": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.1.tgz", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.1.tgz" + }, + "tough-cookie": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.0.0.tgz" + }, + "http-signature": { + "version": "0.11.0", + "from": "https://registry.npmjs.org/http-signature/-/http-signature-0.11.0.tgz", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-0.11.0.tgz", + "dependencies": { + "assert-plus": { + "version": "0.1.5", + "from": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.1.5.tgz", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.1.5.tgz" + }, + "asn1": { + "version": "0.1.11", + "from": "https://registry.npmjs.org/asn1/-/asn1-0.1.11.tgz", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.1.11.tgz" + }, + "ctype": { + "version": "0.5.3", + "from": "https://registry.npmjs.org/ctype/-/ctype-0.5.3.tgz", + "resolved": "https://registry.npmjs.org/ctype/-/ctype-0.5.3.tgz" + } + } + }, + "oauth-sign": { + "version": "0.8.0", + "from": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.0.tgz", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.0.tgz" + }, + "hawk": { + "version": "2.3.1", + "from": "https://registry.npmjs.org/hawk/-/hawk-2.3.1.tgz", + "resolved": "https://registry.npmjs.org/hawk/-/hawk-2.3.1.tgz", + "dependencies": { + "hoek": { + "version": "2.16.3", + "from": "https://registry.npmjs.org/hoek/-/hoek-2.16.3.tgz", + "resolved": "https://registry.npmjs.org/hoek/-/hoek-2.16.3.tgz" + }, + "boom": { + "version": "2.9.0", + "from": "https://registry.npmjs.org/boom/-/boom-2.9.0.tgz", + "resolved": "https://registry.npmjs.org/boom/-/boom-2.9.0.tgz" + }, + "cryptiles": { + "version": "2.0.5", + "from": "https://registry.npmjs.org/cryptiles/-/cryptiles-2.0.5.tgz", + "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-2.0.5.tgz" + }, + "sntp": { + "version": "1.0.9", + "from": "https://registry.npmjs.org/sntp/-/sntp-1.0.9.tgz", + "resolved": "https://registry.npmjs.org/sntp/-/sntp-1.0.9.tgz" + } + } + }, + "aws-sign2": { + "version": "0.5.0", + "from": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.5.0.tgz", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.5.0.tgz" + }, + "stringstream": { + "version": "0.0.4", + "from": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.4.tgz", + "resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.4.tgz" + }, + "combined-stream": { + "version": "1.0.5", + "from": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz", + "dependencies": { + "delayed-stream": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" + } + } + }, + "isstream": { + "version": "0.1.2", + "from": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz" + }, + "har-validator": { + "version": "1.8.0", + "from": "https://registry.npmjs.org/har-validator/-/har-validator-1.8.0.tgz", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-1.8.0.tgz", + "dependencies": { + "bluebird": { + "version": "2.10.2", + "from": "https://registry.npmjs.org/bluebird/-/bluebird-2.10.2.tgz", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-2.10.2.tgz" + }, + "chalk": { + "version": "1.1.1", + "from": "https://registry.npmjs.org/chalk/-/chalk-1.1.1.tgz", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.1.tgz", + "dependencies": { + "ansi-styles": { + "version": "2.1.0", + "from": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.1.0.tgz", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.1.0.tgz" + }, + "escape-string-regexp": { + "version": "1.0.3", + "from": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.3.tgz", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.3.tgz" + }, + "has-ansi": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", + "dependencies": { + "ansi-regex": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz" + } + } + }, + "strip-ansi": { + "version": "3.0.0", + "from": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.0.tgz", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.0.tgz", + "dependencies": { + "ansi-regex": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz" + } + } + }, + "supports-color": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz" + } + } + }, + "commander": { + "version": "2.8.1", + "from": "https://registry.npmjs.org/commander/-/commander-2.8.1.tgz", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.8.1.tgz", + "dependencies": { + "graceful-readlink": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz" + } + } + }, + "is-my-json-valid": { + "version": "2.12.2", + "from": "https://registry.npmjs.org/is-my-json-valid/-/is-my-json-valid-2.12.2.tgz", + "resolved": "https://registry.npmjs.org/is-my-json-valid/-/is-my-json-valid-2.12.2.tgz", + "dependencies": { + "generate-function": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/generate-function/-/generate-function-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/generate-function/-/generate-function-2.0.0.tgz" + }, + "generate-object-property": { + "version": "1.2.0", + "from": "https://registry.npmjs.org/generate-object-property/-/generate-object-property-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/generate-object-property/-/generate-object-property-1.2.0.tgz", + "dependencies": { + "is-property": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/is-property/-/is-property-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/is-property/-/is-property-1.0.2.tgz" + } + } + }, + "jsonpointer": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-2.0.0.tgz" + }, + "xtend": { + "version": "4.0.0", + "from": "https://registry.npmjs.org/xtend/-/xtend-4.0.0.tgz", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.0.tgz" + } + } + } + } + } + } + }, + "selenium-webdriver": { + "version": "2.47.0", + "from": "https://registry.npmjs.org/selenium-webdriver/-/selenium-webdriver-2.47.0.tgz", + "resolved": "https://registry.npmjs.org/selenium-webdriver/-/selenium-webdriver-2.47.0.tgz", + "dependencies": { + "rimraf": { + "version": "2.4.3", + "from": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.3.tgz", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.3.tgz", + "dependencies": { + "glob": { + "version": "5.0.15", + "from": "https://registry.npmjs.org/glob/-/glob-5.0.15.tgz", + "resolved": "https://registry.npmjs.org/glob/-/glob-5.0.15.tgz", + "dependencies": { + "inflight": { + "version": "1.0.4", + "from": "https://registry.npmjs.org/inflight/-/inflight-1.0.4.tgz", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.4.tgz", + "dependencies": { + "wrappy": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.1.tgz" + } + } + }, + "inherits": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" + }, + "minimatch": { + "version": "3.0.0", + "from": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.0.tgz", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.0.tgz", + "dependencies": { + "brace-expansion": { + "version": "1.1.1", + "from": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.1.tgz", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.1.tgz", + "dependencies": { + "balanced-match": { + "version": "0.2.0", + "from": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.2.0.tgz", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.2.0.tgz" + }, + "concat-map": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" + } + } + } + } + }, + "once": { + "version": "1.3.2", + "from": "https://registry.npmjs.org/once/-/once-1.3.2.tgz", + "resolved": "https://registry.npmjs.org/once/-/once-1.3.2.tgz", + "dependencies": { + "wrappy": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.1.tgz" + } + } + }, + "path-is-absolute": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.0.tgz" + } + } + } + } + }, + "tmp": { + "version": "0.0.24", + "from": "https://registry.npmjs.org/tmp/-/tmp-0.0.24.tgz", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.24.tgz" + }, + "ws": { + "version": "0.8.0", + "from": "https://registry.npmjs.org/ws/-/ws-0.8.0.tgz", + "resolved": "https://registry.npmjs.org/ws/-/ws-0.8.0.tgz", + "dependencies": { + "options": { + "version": "0.0.6", + "from": "https://registry.npmjs.org/options/-/options-0.0.6.tgz", + "resolved": "https://registry.npmjs.org/options/-/options-0.0.6.tgz" + }, + "ultron": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/ultron/-/ultron-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/ultron/-/ultron-1.0.2.tgz" + } + } + }, + "xml2js": { + "version": "0.4.4", + "from": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.4.tgz", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.4.tgz", + "dependencies": { + "sax": { + "version": "0.6.1", + "from": "https://registry.npmjs.org/sax/-/sax-0.6.1.tgz", + "resolved": "https://registry.npmjs.org/sax/-/sax-0.6.1.tgz" + }, + "xmlbuilder": { + "version": "3.1.0", + "from": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-3.1.0.tgz", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-3.1.0.tgz", + "dependencies": { + "lodash": { + "version": "3.10.1", + "from": "https://registry.npmjs.org/lodash/-/lodash-3.10.1.tgz", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-3.10.1.tgz" + } + } + } + } + } + } + }, + "minijasminenode": { + "version": "1.1.1", + "from": "https://registry.npmjs.org/minijasminenode/-/minijasminenode-1.1.1.tgz", + "resolved": "https://registry.npmjs.org/minijasminenode/-/minijasminenode-1.1.1.tgz" + }, + "jasminewd": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/jasminewd/-/jasminewd-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/jasminewd/-/jasminewd-1.1.0.tgz" + }, + "jasminewd2": { + "version": "0.0.6", + "from": "https://registry.npmjs.org/jasminewd2/-/jasminewd2-0.0.6.tgz", + "resolved": "https://registry.npmjs.org/jasminewd2/-/jasminewd2-0.0.6.tgz" + }, + "jasmine": { + "version": "2.3.2", + "from": "https://registry.npmjs.org/jasmine/-/jasmine-2.3.2.tgz", + "resolved": "https://registry.npmjs.org/jasmine/-/jasmine-2.3.2.tgz", + "dependencies": { + "exit": { + "version": "0.1.2", + "from": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", + "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz" + } + } + }, + "saucelabs": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/saucelabs/-/saucelabs-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/saucelabs/-/saucelabs-1.0.1.tgz", + "dependencies": { + "https-proxy-agent": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-1.0.0.tgz", + "dependencies": { + "agent-base": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/agent-base/-/agent-base-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-2.0.1.tgz", + "dependencies": { + "semver": { + "version": "5.0.3", + "from": "https://registry.npmjs.org/semver/-/semver-5.0.3.tgz", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.0.3.tgz" + } + } + }, "debug": { "version": "2.2.0", "from": "https://registry.npmjs.org/debug/-/debug-2.2.0.tgz", @@ -10035,33 +11054,33 @@ }, "react-grid-layout": { "version": "0.8.5", - "from": "https://registry.npmjs.org/react-grid-layout/-/react-grid-layout-0.8.5.tgz", + "from": "react-grid-layout@>=0.8.5 <0.9.0", "resolved": "https://registry.npmjs.org/react-grid-layout/-/react-grid-layout-0.8.5.tgz", "dependencies": { "deep-equal": { "version": "1.0.1", - "from": "https://registry.npmjs.org/deep-equal/-/deep-equal-1.0.1.tgz", + "from": "deep-equal@>=1.0.0 <2.0.0", "resolved": "https://registry.npmjs.org/deep-equal/-/deep-equal-1.0.1.tgz" }, "object-assign": { "version": "2.1.1", - "from": "https://registry.npmjs.org/object-assign/-/object-assign-2.1.1.tgz", + "from": "object-assign@>=2.0.0 <3.0.0", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-2.1.1.tgz" }, "react-draggable": { - "version": "0.3.1", - "from": "git://github.com/strml/react-draggable.git#b7f2b30d74c1390eff32ec6e04b3925b691c5358", - "resolved": "git://github.com/strml/react-draggable.git#b7f2b30d74c1390eff32ec6e04b3925b691c5358" + "version": "0.3.3", + "from": "strml/react-draggable#v0.3.3", + "resolved": "git://github.com/strml/react-draggable.git#6302ffa23e5ac0b98e888b6ff277f53e5f82d67c" }, "react-resizable": { "version": "0.3.3", - "from": "https://registry.npmjs.org/react-resizable/-/react-resizable-0.3.3.tgz", + "from": "react-resizable@>=0.3.2 <0.4.0", "resolved": "https://registry.npmjs.org/react-resizable/-/react-resizable-0.3.3.tgz", "dependencies": { "react-draggable": { "version": "0.3.1", - "from": "git+https://github.com/strml/react-draggable.git#7a455326100add5809017fafe7a7b58eb26a8b16", - "resolved": "git+https://github.com/strml/react-draggable.git#7a455326100add5809017fafe7a7b58eb26a8b16" + "from": "strml/react-draggable#v0.3.1", + "resolved": "git://github.com/strml/react-draggable.git#7a455326100add5809017fafe7a7b58eb26a8b16" } } } @@ -10220,6 +11239,100 @@ "resolved": "https://registry.npmjs.org/smart-mixin/-/smart-mixin-1.2.1.tgz" } } + }, + "react": { + "version": "0.14.2", + "from": "react@>=0.13.3||>=0.14.0-beta3 <0.15.0", + "resolved": "https://registry.npmjs.org/react/-/react-0.14.2.tgz", + "dependencies": { + "envify": { + "version": "3.4.0", + "from": "envify@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/envify/-/envify-3.4.0.tgz", + "dependencies": { + "through": { + "version": "2.3.8", + "from": "through@>=2.3.4 <2.4.0", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz" + }, + "jstransform": { + "version": "10.1.0", + "from": "jstransform@>=10.0.1 <11.0.0", + "resolved": "https://registry.npmjs.org/jstransform/-/jstransform-10.1.0.tgz", + "dependencies": { + "base62": { + "version": "0.1.1", + "from": "base62@0.1.1", + "resolved": "https://registry.npmjs.org/base62/-/base62-0.1.1.tgz" + }, + "esprima-fb": { + "version": "13001.1001.0-dev-harmony-fb", + "from": "esprima-fb@13001.1001.0-dev-harmony-fb", + "resolved": "https://registry.npmjs.org/esprima-fb/-/esprima-fb-13001.1001.0-dev-harmony-fb.tgz" + }, + "source-map": { + "version": "0.1.31", + "from": "source-map@0.1.31", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.1.31.tgz", + "dependencies": { + "amdefine": { + "version": "1.0.0", + "from": "amdefine@>=0.0.4", + "resolved": "https://registry.npmjs.org/amdefine/-/amdefine-1.0.0.tgz" + } + } + } + } + } + } + }, + "fbjs": { + "version": "0.3.2", + "from": "fbjs@>=0.3.1 <0.4.0", + "resolved": "https://registry.npmjs.org/fbjs/-/fbjs-0.3.2.tgz", + "dependencies": { + "core-js": { + "version": "1.2.6", + "from": "core-js@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-1.2.6.tgz" + }, + "loose-envify": { + "version": "1.1.0", + "from": "loose-envify@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.1.0.tgz", + "dependencies": { + "js-tokens": { + "version": "1.0.2", + "from": "js-tokens@>=1.0.1 <2.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-1.0.2.tgz" + } + } + }, + "promise": { + "version": "7.0.4", + "from": "promise@>=7.0.3 <8.0.0", + "resolved": "https://registry.npmjs.org/promise/-/promise-7.0.4.tgz", + "dependencies": { + "asap": { + "version": "2.0.3", + "from": "asap@>=2.0.3 <2.1.0", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.3.tgz" + } + } + }, + "ua-parser-js": { + "version": "0.7.9", + "from": "ua-parser-js@>=0.7.9 <0.8.0", + "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-0.7.9.tgz" + }, + "whatwg-fetch": { + "version": "0.9.0", + "from": "whatwg-fetch@>=0.9.0 <0.10.0", + "resolved": "https://registry.npmjs.org/whatwg-fetch/-/whatwg-fetch-0.9.0.tgz" + } + } + } + } } } }, @@ -11963,83 +13076,292 @@ } } }, - "batch": { - "version": "0.5.2", - "from": "https://registry.npmjs.org/batch/-/batch-0.5.2.tgz", - "resolved": "https://registry.npmjs.org/batch/-/batch-0.5.2.tgz" - }, - "debug": { - "version": "2.2.0", - "from": "https://registry.npmjs.org/debug/-/debug-2.2.0.tgz", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.2.0.tgz", + "batch": { + "version": "0.5.2", + "from": "https://registry.npmjs.org/batch/-/batch-0.5.2.tgz", + "resolved": "https://registry.npmjs.org/batch/-/batch-0.5.2.tgz" + }, + "debug": { + "version": "2.2.0", + "from": "https://registry.npmjs.org/debug/-/debug-2.2.0.tgz", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.2.0.tgz", + "dependencies": { + "ms": { + "version": "0.7.1", + "from": "https://registry.npmjs.org/ms/-/ms-0.7.1.tgz", + "resolved": "https://registry.npmjs.org/ms/-/ms-0.7.1.tgz" + } + } + }, + "escape-html": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.2.tgz" + }, + "http-errors": { + "version": "1.3.1", + "from": "https://registry.npmjs.org/http-errors/-/http-errors-1.3.1.tgz", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.3.1.tgz", + "dependencies": { + "inherits": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" + }, + "statuses": { + "version": "1.2.1", + "from": "https://registry.npmjs.org/statuses/-/statuses-1.2.1.tgz", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.2.1.tgz" + } + } + }, + "mime-types": { + "version": "2.1.7", + "from": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.7.tgz", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.7.tgz", + "dependencies": { + "mime-db": { + "version": "1.19.0", + "from": "https://registry.npmjs.org/mime-db/-/mime-db-1.19.0.tgz", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.19.0.tgz" + } + } + }, + "parseurl": { + "version": "1.3.0", + "from": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.0.tgz", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.0.tgz" + } + } + }, + "socket.io": { + "version": "1.3.7", + "from": "https://registry.npmjs.org/socket.io/-/socket.io-1.3.7.tgz", + "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-1.3.7.tgz", + "dependencies": { + "engine.io": { + "version": "1.5.4", + "from": "https://registry.npmjs.org/engine.io/-/engine.io-1.5.4.tgz", + "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-1.5.4.tgz", + "dependencies": { + "base64id": { + "version": "0.1.0", + "from": "https://registry.npmjs.org/base64id/-/base64id-0.1.0.tgz", + "resolved": "https://registry.npmjs.org/base64id/-/base64id-0.1.0.tgz" + }, + "debug": { + "version": "1.0.3", + "from": "https://registry.npmjs.org/debug/-/debug-1.0.3.tgz", + "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.3.tgz", + "dependencies": { + "ms": { + "version": "0.6.2", + "from": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", + "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz" + } + } + }, + "engine.io-parser": { + "version": "1.2.2", + "from": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-1.2.2.tgz", + "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-1.2.2.tgz", + "dependencies": { + "after": { + "version": "0.8.1", + "from": "https://registry.npmjs.org/after/-/after-0.8.1.tgz", + "resolved": "https://registry.npmjs.org/after/-/after-0.8.1.tgz" + }, + "arraybuffer.slice": { + "version": "0.0.6", + "from": "https://registry.npmjs.org/arraybuffer.slice/-/arraybuffer.slice-0.0.6.tgz", + "resolved": "https://registry.npmjs.org/arraybuffer.slice/-/arraybuffer.slice-0.0.6.tgz" + }, + "base64-arraybuffer": { + "version": "0.1.2", + "from": "https://registry.npmjs.org/base64-arraybuffer/-/base64-arraybuffer-0.1.2.tgz", + "resolved": "https://registry.npmjs.org/base64-arraybuffer/-/base64-arraybuffer-0.1.2.tgz" + }, + "blob": { + "version": "0.0.4", + "from": "https://registry.npmjs.org/blob/-/blob-0.0.4.tgz", + "resolved": "https://registry.npmjs.org/blob/-/blob-0.0.4.tgz" + }, + "has-binary": { + "version": "0.1.6", + "from": "https://registry.npmjs.org/has-binary/-/has-binary-0.1.6.tgz", + "resolved": "https://registry.npmjs.org/has-binary/-/has-binary-0.1.6.tgz", + "dependencies": { + "isarray": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + } + } + }, + "utf8": { + "version": "2.1.0", + "from": "https://registry.npmjs.org/utf8/-/utf8-2.1.0.tgz", + "resolved": "https://registry.npmjs.org/utf8/-/utf8-2.1.0.tgz" + } + } + }, + "ws": { + "version": "0.8.0", + "from": "https://registry.npmjs.org/ws/-/ws-0.8.0.tgz", + "resolved": "https://registry.npmjs.org/ws/-/ws-0.8.0.tgz", + "dependencies": { + "options": { + "version": "0.0.6", + "from": "https://registry.npmjs.org/options/-/options-0.0.6.tgz", + "resolved": "https://registry.npmjs.org/options/-/options-0.0.6.tgz" + }, + "ultron": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/ultron/-/ultron-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/ultron/-/ultron-1.0.2.tgz" + } + } + } + } + }, + "socket.io-parser": { + "version": "2.2.4", + "from": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-2.2.4.tgz", + "resolved": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-2.2.4.tgz", "dependencies": { - "ms": { - "version": "0.7.1", - "from": "https://registry.npmjs.org/ms/-/ms-0.7.1.tgz", - "resolved": "https://registry.npmjs.org/ms/-/ms-0.7.1.tgz" + "debug": { + "version": "0.7.4", + "from": "https://registry.npmjs.org/debug/-/debug-0.7.4.tgz", + "resolved": "https://registry.npmjs.org/debug/-/debug-0.7.4.tgz" + }, + "json3": { + "version": "3.2.6", + "from": "https://registry.npmjs.org/json3/-/json3-3.2.6.tgz", + "resolved": "https://registry.npmjs.org/json3/-/json3-3.2.6.tgz" + }, + "component-emitter": { + "version": "1.1.2", + "from": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.1.2.tgz", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.1.2.tgz" + }, + "isarray": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "benchmark": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/benchmark/-/benchmark-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/benchmark/-/benchmark-1.0.0.tgz" } } }, - "escape-html": { - "version": "1.0.2", - "from": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.2.tgz", - "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.2.tgz" - }, - "http-errors": { - "version": "1.3.1", - "from": "https://registry.npmjs.org/http-errors/-/http-errors-1.3.1.tgz", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.3.1.tgz", + "socket.io-adapter": { + "version": "0.3.1", + "from": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-0.3.1.tgz", + "resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-0.3.1.tgz", "dependencies": { - "inherits": { - "version": "2.0.1", - "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" + "debug": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/debug/-/debug-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.2.tgz", + "dependencies": { + "ms": { + "version": "0.6.2", + "from": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", + "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz" + } + } }, - "statuses": { - "version": "1.2.1", - "from": "https://registry.npmjs.org/statuses/-/statuses-1.2.1.tgz", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.2.1.tgz" + "socket.io-parser": { + "version": "2.2.2", + "from": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-2.2.2.tgz", + "resolved": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-2.2.2.tgz", + "dependencies": { + "debug": { + "version": "0.7.4", + "from": "https://registry.npmjs.org/debug/-/debug-0.7.4.tgz", + "resolved": "https://registry.npmjs.org/debug/-/debug-0.7.4.tgz" + }, + "json3": { + "version": "3.2.6", + "from": "https://registry.npmjs.org/json3/-/json3-3.2.6.tgz", + "resolved": "https://registry.npmjs.org/json3/-/json3-3.2.6.tgz" + }, + "component-emitter": { + "version": "1.1.2", + "from": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.1.2.tgz", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.1.2.tgz" + }, + "isarray": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "benchmark": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/benchmark/-/benchmark-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/benchmark/-/benchmark-1.0.0.tgz" + } + } + }, + "object-keys": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/object-keys/-/object-keys-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.0.1.tgz" } } }, - "mime-types": { - "version": "2.1.7", - "from": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.7.tgz", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.7.tgz", + "has-binary-data": { + "version": "0.1.3", + "from": "https://registry.npmjs.org/has-binary-data/-/has-binary-data-0.1.3.tgz", + "resolved": "https://registry.npmjs.org/has-binary-data/-/has-binary-data-0.1.3.tgz", "dependencies": { - "mime-db": { - "version": "1.19.0", - "from": "https://registry.npmjs.org/mime-db/-/mime-db-1.19.0.tgz", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.19.0.tgz" + "isarray": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" } } }, - "parseurl": { - "version": "1.3.0", - "from": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.0.tgz", - "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.0.tgz" + "debug": { + "version": "2.1.0", + "from": "https://registry.npmjs.org/debug/-/debug-2.1.0.tgz", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.1.0.tgz", + "dependencies": { + "ms": { + "version": "0.6.2", + "from": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", + "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz" + } + } } } }, - "socket.io": { + "socket.io-client": { "version": "1.3.7", - "from": "https://registry.npmjs.org/socket.io/-/socket.io-1.3.7.tgz", - "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-1.3.7.tgz", + "from": "https://registry.npmjs.org/socket.io-client/-/socket.io-client-1.3.7.tgz", + "resolved": "https://registry.npmjs.org/socket.io-client/-/socket.io-client-1.3.7.tgz", "dependencies": { - "engine.io": { + "debug": { + "version": "0.7.4", + "from": "https://registry.npmjs.org/debug/-/debug-0.7.4.tgz", + "resolved": "https://registry.npmjs.org/debug/-/debug-0.7.4.tgz" + }, + "engine.io-client": { "version": "1.5.4", - "from": "https://registry.npmjs.org/engine.io/-/engine.io-1.5.4.tgz", - "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-1.5.4.tgz", + "from": "https://registry.npmjs.org/engine.io-client/-/engine.io-client-1.5.4.tgz", + "resolved": "https://registry.npmjs.org/engine.io-client/-/engine.io-client-1.5.4.tgz", "dependencies": { - "base64id": { - "version": "0.1.0", - "from": "https://registry.npmjs.org/base64id/-/base64id-0.1.0.tgz", - "resolved": "https://registry.npmjs.org/base64id/-/base64id-0.1.0.tgz" + "component-inherit": { + "version": "0.0.3", + "from": "https://registry.npmjs.org/component-inherit/-/component-inherit-0.0.3.tgz", + "resolved": "https://registry.npmjs.org/component-inherit/-/component-inherit-0.0.3.tgz" }, "debug": { - "version": "1.0.3", - "from": "https://registry.npmjs.org/debug/-/debug-1.0.3.tgz", - "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.3.tgz", + "version": "1.0.4", + "from": "https://registry.npmjs.org/debug/-/debug-1.0.4.tgz", + "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.4.tgz", "dependencies": { "ms": { "version": "0.6.2", @@ -12073,18 +13395,6 @@ "from": "https://registry.npmjs.org/blob/-/blob-0.0.4.tgz", "resolved": "https://registry.npmjs.org/blob/-/blob-0.0.4.tgz" }, - "has-binary": { - "version": "0.1.6", - "from": "https://registry.npmjs.org/has-binary/-/has-binary-0.1.6.tgz", - "resolved": "https://registry.npmjs.org/has-binary/-/has-binary-0.1.6.tgz", - "dependencies": { - "isarray": { - "version": "0.0.1", - "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" - } - } - }, "utf8": { "version": "2.1.0", "from": "https://registry.npmjs.org/utf8/-/utf8-2.1.0.tgz", @@ -12092,6 +13402,75 @@ } } }, + "has-cors": { + "version": "1.0.3", + "from": "https://registry.npmjs.org/has-cors/-/has-cors-1.0.3.tgz", + "resolved": "https://registry.npmjs.org/has-cors/-/has-cors-1.0.3.tgz", + "dependencies": { + "global": { + "version": "2.0.1", + "from": "https://github.com/component/global/archive/v2.0.1.tar.gz", + "resolved": "https://github.com/component/global/archive/v2.0.1.tar.gz" + } + } + }, + "parsejson": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/parsejson/-/parsejson-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/parsejson/-/parsejson-0.0.1.tgz", + "dependencies": { + "better-assert": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/better-assert/-/better-assert-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/better-assert/-/better-assert-1.0.2.tgz", + "dependencies": { + "callsite": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/callsite/-/callsite-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/callsite/-/callsite-1.0.0.tgz" + } + } + } + } + }, + "parseqs": { + "version": "0.0.2", + "from": "https://registry.npmjs.org/parseqs/-/parseqs-0.0.2.tgz", + "resolved": "https://registry.npmjs.org/parseqs/-/parseqs-0.0.2.tgz", + "dependencies": { + "better-assert": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/better-assert/-/better-assert-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/better-assert/-/better-assert-1.0.2.tgz", + "dependencies": { + "callsite": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/callsite/-/callsite-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/callsite/-/callsite-1.0.0.tgz" + } + } + } + } + }, + "parseuri": { + "version": "0.0.4", + "from": "https://registry.npmjs.org/parseuri/-/parseuri-0.0.4.tgz", + "resolved": "https://registry.npmjs.org/parseuri/-/parseuri-0.0.4.tgz", + "dependencies": { + "better-assert": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/better-assert/-/better-assert-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/better-assert/-/better-assert-1.0.2.tgz", + "dependencies": { + "callsite": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/callsite/-/callsite-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/callsite/-/callsite-1.0.0.tgz" + } + } + } + } + }, "ws": { "version": "0.8.0", "from": "https://registry.npmjs.org/ws/-/ws-0.8.0.tgz", @@ -12106,65 +13485,41 @@ "version": "1.0.2", "from": "https://registry.npmjs.org/ultron/-/ultron-1.0.2.tgz", "resolved": "https://registry.npmjs.org/ultron/-/ultron-1.0.2.tgz" - }, - "bufferutil": { - "version": "1.2.1", - "from": "https://registry.npmjs.org/bufferutil/-/bufferutil-1.2.1.tgz", - "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-1.2.1.tgz", - "dependencies": { - "bindings": { - "version": "1.2.1", - "from": "https://registry.npmjs.org/bindings/-/bindings-1.2.1.tgz", - "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.2.1.tgz" - }, - "nan": { - "version": "2.0.9", - "from": "https://registry.npmjs.org/nan/-/nan-2.0.9.tgz", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.0.9.tgz" - } - } - }, - "utf-8-validate": { - "version": "1.2.1", - "from": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-1.2.1.tgz", - "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-1.2.1.tgz", - "dependencies": { - "bindings": { - "version": "1.2.1", - "from": "https://registry.npmjs.org/bindings/-/bindings-1.2.1.tgz", - "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.2.1.tgz" - }, - "nan": { - "version": "2.0.9", - "from": "https://registry.npmjs.org/nan/-/nan-2.0.9.tgz", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.0.9.tgz" - } - } } } + }, + "xmlhttprequest": { + "version": "1.5.0", + "from": "https://github.com/rase-/node-XMLHttpRequest/archive/a6b6f2.tar.gz", + "resolved": "https://github.com/rase-/node-XMLHttpRequest/archive/a6b6f2.tar.gz" } } }, + "component-bind": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/component-bind/-/component-bind-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/component-bind/-/component-bind-1.0.0.tgz" + }, + "component-emitter": { + "version": "1.1.2", + "from": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.1.2.tgz", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.1.2.tgz" + }, + "object-component": { + "version": "0.0.3", + "from": "https://registry.npmjs.org/object-component/-/object-component-0.0.3.tgz", + "resolved": "https://registry.npmjs.org/object-component/-/object-component-0.0.3.tgz" + }, "socket.io-parser": { "version": "2.2.4", "from": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-2.2.4.tgz", "resolved": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-2.2.4.tgz", "dependencies": { - "debug": { - "version": "0.7.4", - "from": "https://registry.npmjs.org/debug/-/debug-0.7.4.tgz", - "resolved": "https://registry.npmjs.org/debug/-/debug-0.7.4.tgz" - }, "json3": { "version": "3.2.6", "from": "https://registry.npmjs.org/json3/-/json3-3.2.6.tgz", "resolved": "https://registry.npmjs.org/json3/-/json3-3.2.6.tgz" }, - "component-emitter": { - "version": "1.1.2", - "from": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.1.2.tgz", - "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.1.2.tgz" - }, "isarray": { "version": "0.0.1", "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", @@ -12177,407 +13532,1206 @@ } } }, - "socket.io-adapter": { - "version": "0.3.1", - "from": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-0.3.1.tgz", - "resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-0.3.1.tgz", + "has-binary": { + "version": "0.1.6", + "from": "https://registry.npmjs.org/has-binary/-/has-binary-0.1.6.tgz", + "resolved": "https://registry.npmjs.org/has-binary/-/has-binary-0.1.6.tgz", "dependencies": { - "debug": { + "isarray": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + } + } + }, + "indexof": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/indexof/-/indexof-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/indexof/-/indexof-0.0.1.tgz" + }, + "parseuri": { + "version": "0.0.2", + "from": "https://registry.npmjs.org/parseuri/-/parseuri-0.0.2.tgz", + "resolved": "https://registry.npmjs.org/parseuri/-/parseuri-0.0.2.tgz", + "dependencies": { + "better-assert": { "version": "1.0.2", - "from": "https://registry.npmjs.org/debug/-/debug-1.0.2.tgz", - "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.2.tgz", - "dependencies": { - "ms": { - "version": "0.6.2", - "from": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", - "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz" - } - } - }, - "socket.io-parser": { - "version": "2.2.2", - "from": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-2.2.2.tgz", - "resolved": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-2.2.2.tgz", + "from": "https://registry.npmjs.org/better-assert/-/better-assert-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/better-assert/-/better-assert-1.0.2.tgz", "dependencies": { - "debug": { - "version": "0.7.4", - "from": "https://registry.npmjs.org/debug/-/debug-0.7.4.tgz", - "resolved": "https://registry.npmjs.org/debug/-/debug-0.7.4.tgz" - }, - "json3": { - "version": "3.2.6", - "from": "https://registry.npmjs.org/json3/-/json3-3.2.6.tgz", - "resolved": "https://registry.npmjs.org/json3/-/json3-3.2.6.tgz" - }, - "component-emitter": { - "version": "1.1.2", - "from": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.1.2.tgz", - "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.1.2.tgz" - }, - "isarray": { - "version": "0.0.1", - "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" - }, - "benchmark": { + "callsite": { "version": "1.0.0", - "from": "https://registry.npmjs.org/benchmark/-/benchmark-1.0.0.tgz", - "resolved": "https://registry.npmjs.org/benchmark/-/benchmark-1.0.0.tgz" + "from": "https://registry.npmjs.org/callsite/-/callsite-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/callsite/-/callsite-1.0.0.tgz" } } - }, - "object-keys": { - "version": "1.0.1", - "from": "https://registry.npmjs.org/object-keys/-/object-keys-1.0.1.tgz", - "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.0.1.tgz" } } }, - "has-binary-data": { + "to-array": { "version": "0.1.3", - "from": "https://registry.npmjs.org/has-binary-data/-/has-binary-data-0.1.3.tgz", - "resolved": "https://registry.npmjs.org/has-binary-data/-/has-binary-data-0.1.3.tgz", + "from": "https://registry.npmjs.org/to-array/-/to-array-0.1.3.tgz", + "resolved": "https://registry.npmjs.org/to-array/-/to-array-0.1.3.tgz" + }, + "backo2": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/backo2/-/backo2-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/backo2/-/backo2-1.0.2.tgz" + } + } + }, + "stream-cache": { + "version": "0.0.2", + "from": "https://registry.npmjs.org/stream-cache/-/stream-cache-0.0.2.tgz", + "resolved": "https://registry.npmjs.org/stream-cache/-/stream-cache-0.0.2.tgz" + }, + "strip-ansi": { + "version": "3.0.0", + "from": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.0.tgz", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.0.tgz", + "dependencies": { + "ansi-regex": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz" + } + } + }, + "supports-color": { + "version": "3.1.1", + "from": "https://registry.npmjs.org/supports-color/-/supports-color-3.1.1.tgz", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.1.1.tgz", + "dependencies": { + "has-flag": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz" + } + } + }, + "webpack-dev-middleware": { + "version": "1.2.0", + "from": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-1.2.0.tgz", + "dependencies": { + "memory-fs": { + "version": "0.2.0", + "from": "https://registry.npmjs.org/memory-fs/-/memory-fs-0.2.0.tgz", + "resolved": "https://registry.npmjs.org/memory-fs/-/memory-fs-0.2.0.tgz" + }, + "mime": { + "version": "1.3.4", + "from": "https://registry.npmjs.org/mime/-/mime-1.3.4.tgz", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.3.4.tgz" + } + } + }, + "webpack": { + "version": "1.12.4", + "from": "webpack@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-1.12.4.tgz", + "dependencies": { + "async": { + "version": "1.5.0", + "from": "async@>=1.3.0 <2.0.0", + "resolved": "https://registry.npmjs.org/async/-/async-1.5.0.tgz" + }, + "clone": { + "version": "1.0.2", + "from": "clone@>=1.0.2 <2.0.0", + "resolved": "https://registry.npmjs.org/clone/-/clone-1.0.2.tgz" + }, + "enhanced-resolve": { + "version": "0.9.1", + "from": "enhanced-resolve@>=0.9.0 <0.10.0", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-0.9.1.tgz", "dependencies": { - "isarray": { - "version": "0.0.1", - "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + "graceful-fs": { + "version": "4.1.2", + "from": "graceful-fs@>=4.1.2 <5.0.0", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.2.tgz" } } }, - "debug": { - "version": "2.1.0", - "from": "https://registry.npmjs.org/debug/-/debug-2.1.0.tgz", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.1.0.tgz", + "esprima": { + "version": "2.7.0", + "from": "esprima@>=2.5.0 <3.0.0", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-2.7.0.tgz" + }, + "interpret": { + "version": "0.6.6", + "from": "interpret@>=0.6.4 <0.7.0", + "resolved": "https://registry.npmjs.org/interpret/-/interpret-0.6.6.tgz" + }, + "loader-utils": { + "version": "0.2.11", + "from": "loader-utils@>=0.2.11 <0.3.0", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-0.2.11.tgz", "dependencies": { - "ms": { - "version": "0.6.2", - "from": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", - "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz" + "big.js": { + "version": "3.1.3", + "from": "big.js@>=3.0.2 <4.0.0", + "resolved": "https://registry.npmjs.org/big.js/-/big.js-3.1.3.tgz" + }, + "json5": { + "version": "0.4.0", + "from": "json5@>=0.4.0 <0.5.0", + "resolved": "https://registry.npmjs.org/json5/-/json5-0.4.0.tgz" } } - } - } - }, - "socket.io-client": { - "version": "1.3.7", - "from": "https://registry.npmjs.org/socket.io-client/-/socket.io-client-1.3.7.tgz", - "resolved": "https://registry.npmjs.org/socket.io-client/-/socket.io-client-1.3.7.tgz", - "dependencies": { - "debug": { - "version": "0.7.4", - "from": "https://registry.npmjs.org/debug/-/debug-0.7.4.tgz", - "resolved": "https://registry.npmjs.org/debug/-/debug-0.7.4.tgz" }, - "engine.io-client": { - "version": "1.5.4", - "from": "https://registry.npmjs.org/engine.io-client/-/engine.io-client-1.5.4.tgz", - "resolved": "https://registry.npmjs.org/engine.io-client/-/engine.io-client-1.5.4.tgz", + "memory-fs": { + "version": "0.2.0", + "from": "memory-fs@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/memory-fs/-/memory-fs-0.2.0.tgz" + }, + "mkdirp": { + "version": "0.5.1", + "from": "mkdirp@>=0.5.0 <0.6.0", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", "dependencies": { - "component-inherit": { - "version": "0.0.3", - "from": "https://registry.npmjs.org/component-inherit/-/component-inherit-0.0.3.tgz", - "resolved": "https://registry.npmjs.org/component-inherit/-/component-inherit-0.0.3.tgz" + "minimist": { + "version": "0.0.8", + "from": "minimist@0.0.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" + } + } + }, + "tapable": { + "version": "0.1.9", + "from": "tapable@>=0.1.8 <0.2.0", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-0.1.9.tgz" + }, + "uglify-js": { + "version": "2.5.0", + "from": "uglify-js@>=2.5.0 <2.6.0", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-2.5.0.tgz", + "dependencies": { + "async": { + "version": "0.2.10", + "from": "async@>=0.2.6 <0.3.0", + "resolved": "https://registry.npmjs.org/async/-/async-0.2.10.tgz" + }, + "source-map": { + "version": "0.5.3", + "from": "source-map@>=0.5.1 <0.6.0", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.3.tgz" + }, + "uglify-to-browserify": { + "version": "1.0.2", + "from": "uglify-to-browserify@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz" }, - "debug": { - "version": "1.0.4", - "from": "https://registry.npmjs.org/debug/-/debug-1.0.4.tgz", - "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.4.tgz", + "yargs": { + "version": "3.5.4", + "from": "yargs@>=3.5.4 <3.6.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-3.5.4.tgz", "dependencies": { - "ms": { - "version": "0.6.2", - "from": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", - "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz" + "camelcase": { + "version": "1.2.1", + "from": "camelcase@>=1.0.2 <2.0.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-1.2.1.tgz" + }, + "decamelize": { + "version": "1.1.1", + "from": "decamelize@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.1.1.tgz" + }, + "window-size": { + "version": "0.1.0", + "from": "window-size@0.1.0", + "resolved": "https://registry.npmjs.org/window-size/-/window-size-0.1.0.tgz" + }, + "wordwrap": { + "version": "0.0.2", + "from": "wordwrap@0.0.2", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.2.tgz" } } + } + } + }, + "watchpack": { + "version": "0.2.9", + "from": "watchpack@>=0.2.1 <0.3.0", + "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-0.2.9.tgz", + "dependencies": { + "async": { + "version": "0.9.2", + "from": "async@>=0.9.0 <0.10.0", + "resolved": "https://registry.npmjs.org/async/-/async-0.9.2.tgz" }, - "engine.io-parser": { - "version": "1.2.2", - "from": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-1.2.2.tgz", - "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-1.2.2.tgz", + "chokidar": { + "version": "1.2.0", + "from": "chokidar@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-1.2.0.tgz", "dependencies": { - "after": { - "version": "0.8.1", - "from": "https://registry.npmjs.org/after/-/after-0.8.1.tgz", - "resolved": "https://registry.npmjs.org/after/-/after-0.8.1.tgz" + "anymatch": { + "version": "1.3.0", + "from": "anymatch@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-1.3.0.tgz", + "dependencies": { + "micromatch": { + "version": "2.3.2", + "from": "micromatch@>=2.1.5 <3.0.0", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-2.3.2.tgz", + "dependencies": { + "arr-diff": { + "version": "1.1.0", + "from": "arr-diff@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-1.1.0.tgz", + "dependencies": { + "arr-flatten": { + "version": "1.0.1", + "from": "arr-flatten@>=1.0.1 <2.0.0", + "resolved": "https://registry.npmjs.org/arr-flatten/-/arr-flatten-1.0.1.tgz" + }, + "array-slice": { + "version": "0.2.3", + "from": "array-slice@>=0.2.3 <0.3.0", + "resolved": "https://registry.npmjs.org/array-slice/-/array-slice-0.2.3.tgz" + } + } + }, + "array-unique": { + "version": "0.2.1", + "from": "array-unique@>=0.2.1 <0.3.0", + "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.2.1.tgz" + }, + "braces": { + "version": "1.8.2", + "from": "braces@>=1.8.1 <2.0.0", + "resolved": "https://registry.npmjs.org/braces/-/braces-1.8.2.tgz", + "dependencies": { + "expand-range": { + "version": "1.8.1", + "from": "expand-range@>=1.8.1 <2.0.0", + "resolved": "https://registry.npmjs.org/expand-range/-/expand-range-1.8.1.tgz", + "dependencies": { + "fill-range": { + "version": "2.2.2", + "from": "fill-range@>=2.1.0 <3.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-2.2.2.tgz", + "dependencies": { + "is-number": { + "version": "1.1.2", + "from": "is-number@>=1.1.2 <2.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-1.1.2.tgz" + }, + "isobject": { + "version": "1.0.2", + "from": "isobject@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/isobject/-/isobject-1.0.2.tgz" + }, + "randomatic": { + "version": "1.1.1", + "from": "randomatic@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/randomatic/-/randomatic-1.1.1.tgz", + "dependencies": { + "is-number": { + "version": "2.0.2", + "from": "is-number@>=2.0.2 <3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-2.0.2.tgz", + "dependencies": { + "kind-of": { + "version": "1.1.0", + "from": "kind-of@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-1.1.0.tgz" + } + } + } + } + }, + "repeat-string": { + "version": "1.5.2", + "from": "repeat-string@>=1.5.2 <2.0.0", + "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.5.2.tgz" + } + } + } + } + }, + "preserve": { + "version": "0.2.0", + "from": "preserve@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/preserve/-/preserve-0.2.0.tgz" + }, + "repeat-element": { + "version": "1.1.2", + "from": "repeat-element@>=1.1.2 <2.0.0", + "resolved": "https://registry.npmjs.org/repeat-element/-/repeat-element-1.1.2.tgz" + } + } + }, + "expand-brackets": { + "version": "0.1.4", + "from": "expand-brackets@>=0.1.4 <0.2.0", + "resolved": "https://registry.npmjs.org/expand-brackets/-/expand-brackets-0.1.4.tgz" + }, + "extglob": { + "version": "0.3.1", + "from": "extglob@>=0.3.1 <0.4.0", + "resolved": "https://registry.npmjs.org/extglob/-/extglob-0.3.1.tgz", + "dependencies": { + "ansi-green": { + "version": "0.1.1", + "from": "ansi-green@>=0.1.1 <0.2.0", + "resolved": "https://registry.npmjs.org/ansi-green/-/ansi-green-0.1.1.tgz", + "dependencies": { + "ansi-wrap": { + "version": "0.1.0", + "from": "ansi-wrap@0.1.0", + "resolved": "https://registry.npmjs.org/ansi-wrap/-/ansi-wrap-0.1.0.tgz" + } + } + }, + "success-symbol": { + "version": "0.1.0", + "from": "success-symbol@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/success-symbol/-/success-symbol-0.1.0.tgz" + } + } + }, + "filename-regex": { + "version": "2.0.0", + "from": "filename-regex@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/filename-regex/-/filename-regex-2.0.0.tgz" + }, + "is-extglob": { + "version": "1.0.0", + "from": "is-extglob@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-1.0.0.tgz" + }, + "kind-of": { + "version": "2.0.1", + "from": "kind-of@>=2.0.1 <3.0.0", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-2.0.1.tgz", + "dependencies": { + "is-buffer": { + "version": "1.1.0", + "from": "is-buffer@>=1.0.2 <2.0.0", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.0.tgz" + } + } + }, + "lazy-cache": { + "version": "0.2.4", + "from": "lazy-cache@>=0.2.3 <0.3.0", + "resolved": "https://registry.npmjs.org/lazy-cache/-/lazy-cache-0.2.4.tgz" + }, + "normalize-path": { + "version": "2.0.0", + "from": "normalize-path@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.0.0.tgz" + }, + "object.omit": { + "version": "2.0.0", + "from": "object.omit@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/object.omit/-/object.omit-2.0.0.tgz", + "dependencies": { + "for-own": { + "version": "0.1.3", + "from": "for-own@>=0.1.3 <0.2.0", + "resolved": "https://registry.npmjs.org/for-own/-/for-own-0.1.3.tgz", + "dependencies": { + "for-in": { + "version": "0.1.4", + "from": "for-in@>=0.1.4 <0.2.0", + "resolved": "https://registry.npmjs.org/for-in/-/for-in-0.1.4.tgz" + } + } + }, + "is-extendable": { + "version": "0.1.1", + "from": "is-extendable@>=0.1.1 <0.2.0", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz" + } + } + }, + "parse-glob": { + "version": "3.0.4", + "from": "parse-glob@>=3.0.4 <4.0.0", + "resolved": "https://registry.npmjs.org/parse-glob/-/parse-glob-3.0.4.tgz", + "dependencies": { + "glob-base": { + "version": "0.3.0", + "from": "glob-base@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/glob-base/-/glob-base-0.3.0.tgz" + }, + "is-dotfile": { + "version": "1.0.2", + "from": "is-dotfile@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/is-dotfile/-/is-dotfile-1.0.2.tgz" + } + } + }, + "regex-cache": { + "version": "0.4.2", + "from": "regex-cache@>=0.4.2 <0.5.0", + "resolved": "https://registry.npmjs.org/regex-cache/-/regex-cache-0.4.2.tgz", + "dependencies": { + "is-equal-shallow": { + "version": "0.1.3", + "from": "is-equal-shallow@>=0.1.1 <0.2.0", + "resolved": "https://registry.npmjs.org/is-equal-shallow/-/is-equal-shallow-0.1.3.tgz" + }, + "is-primitive": { + "version": "2.0.0", + "from": "is-primitive@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/is-primitive/-/is-primitive-2.0.0.tgz" + } + } + } + } + } + } }, - "arraybuffer.slice": { - "version": "0.0.6", - "from": "https://registry.npmjs.org/arraybuffer.slice/-/arraybuffer.slice-0.0.6.tgz", - "resolved": "https://registry.npmjs.org/arraybuffer.slice/-/arraybuffer.slice-0.0.6.tgz" + "arrify": { + "version": "1.0.0", + "from": "arrify@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.0.tgz" }, - "base64-arraybuffer": { - "version": "0.1.2", - "from": "https://registry.npmjs.org/base64-arraybuffer/-/base64-arraybuffer-0.1.2.tgz", - "resolved": "https://registry.npmjs.org/base64-arraybuffer/-/base64-arraybuffer-0.1.2.tgz" + "async-each": { + "version": "0.1.6", + "from": "async-each@>=0.1.5 <0.2.0", + "resolved": "https://registry.npmjs.org/async-each/-/async-each-0.1.6.tgz" }, - "blob": { - "version": "0.0.4", - "from": "https://registry.npmjs.org/blob/-/blob-0.0.4.tgz", - "resolved": "https://registry.npmjs.org/blob/-/blob-0.0.4.tgz" + "glob-parent": { + "version": "2.0.0", + "from": "glob-parent@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-2.0.0.tgz" }, - "utf8": { - "version": "2.1.0", - "from": "https://registry.npmjs.org/utf8/-/utf8-2.1.0.tgz", - "resolved": "https://registry.npmjs.org/utf8/-/utf8-2.1.0.tgz" - } - } - }, - "has-cors": { - "version": "1.0.3", - "from": "https://registry.npmjs.org/has-cors/-/has-cors-1.0.3.tgz", - "resolved": "https://registry.npmjs.org/has-cors/-/has-cors-1.0.3.tgz", - "dependencies": { - "global": { - "version": "2.0.1", - "from": "https://github.com/component/global/archive/v2.0.1.tar.gz", - "resolved": "https://github.com/component/global/archive/v2.0.1.tar.gz" - } - } - }, - "parsejson": { - "version": "0.0.1", - "from": "https://registry.npmjs.org/parsejson/-/parsejson-0.0.1.tgz", - "resolved": "https://registry.npmjs.org/parsejson/-/parsejson-0.0.1.tgz", - "dependencies": { - "better-assert": { - "version": "1.0.2", - "from": "https://registry.npmjs.org/better-assert/-/better-assert-1.0.2.tgz", - "resolved": "https://registry.npmjs.org/better-assert/-/better-assert-1.0.2.tgz", + "is-binary-path": { + "version": "1.0.1", + "from": "is-binary-path@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-1.0.1.tgz", "dependencies": { - "callsite": { - "version": "1.0.0", - "from": "https://registry.npmjs.org/callsite/-/callsite-1.0.0.tgz", - "resolved": "https://registry.npmjs.org/callsite/-/callsite-1.0.0.tgz" + "binary-extensions": { + "version": "1.3.1", + "from": "binary-extensions@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-1.3.1.tgz" } } - } - } - }, - "parseqs": { - "version": "0.0.2", - "from": "https://registry.npmjs.org/parseqs/-/parseqs-0.0.2.tgz", - "resolved": "https://registry.npmjs.org/parseqs/-/parseqs-0.0.2.tgz", - "dependencies": { - "better-assert": { - "version": "1.0.2", - "from": "https://registry.npmjs.org/better-assert/-/better-assert-1.0.2.tgz", - "resolved": "https://registry.npmjs.org/better-assert/-/better-assert-1.0.2.tgz", + }, + "is-glob": { + "version": "2.0.1", + "from": "is-glob@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-2.0.1.tgz", "dependencies": { - "callsite": { + "is-extglob": { "version": "1.0.0", - "from": "https://registry.npmjs.org/callsite/-/callsite-1.0.0.tgz", - "resolved": "https://registry.npmjs.org/callsite/-/callsite-1.0.0.tgz" + "from": "is-extglob@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-1.0.0.tgz" } } - } - } - }, - "parseuri": { - "version": "0.0.4", - "from": "https://registry.npmjs.org/parseuri/-/parseuri-0.0.4.tgz", - "resolved": "https://registry.npmjs.org/parseuri/-/parseuri-0.0.4.tgz", - "dependencies": { - "better-assert": { - "version": "1.0.2", - "from": "https://registry.npmjs.org/better-assert/-/better-assert-1.0.2.tgz", - "resolved": "https://registry.npmjs.org/better-assert/-/better-assert-1.0.2.tgz", + }, + "lodash.flatten": { + "version": "3.0.2", + "from": "lodash.flatten@>=3.0.2 <4.0.0", + "resolved": "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-3.0.2.tgz", "dependencies": { - "callsite": { - "version": "1.0.0", - "from": "https://registry.npmjs.org/callsite/-/callsite-1.0.0.tgz", - "resolved": "https://registry.npmjs.org/callsite/-/callsite-1.0.0.tgz" + "lodash._baseflatten": { + "version": "3.1.4", + "from": "lodash._baseflatten@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/lodash._baseflatten/-/lodash._baseflatten-3.1.4.tgz", + "dependencies": { + "lodash.isarguments": { + "version": "3.0.4", + "from": "lodash.isarguments@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.0.4.tgz" + }, + "lodash.isarray": { + "version": "3.0.4", + "from": "lodash.isarray@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/lodash.isarray/-/lodash.isarray-3.0.4.tgz" + } + } + }, + "lodash._isiterateecall": { + "version": "3.0.9", + "from": "lodash._isiterateecall@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/lodash._isiterateecall/-/lodash._isiterateecall-3.0.9.tgz" } } - } - } - }, - "ws": { - "version": "0.8.0", - "from": "https://registry.npmjs.org/ws/-/ws-0.8.0.tgz", - "resolved": "https://registry.npmjs.org/ws/-/ws-0.8.0.tgz", - "dependencies": { - "options": { - "version": "0.0.6", - "from": "https://registry.npmjs.org/options/-/options-0.0.6.tgz", - "resolved": "https://registry.npmjs.org/options/-/options-0.0.6.tgz" }, - "ultron": { - "version": "1.0.2", - "from": "https://registry.npmjs.org/ultron/-/ultron-1.0.2.tgz", - "resolved": "https://registry.npmjs.org/ultron/-/ultron-1.0.2.tgz" + "path-is-absolute": { + "version": "1.0.0", + "from": "path-is-absolute@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.0.tgz" + }, + "readdirp": { + "version": "2.0.0", + "from": "readdirp@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-2.0.0.tgz", + "dependencies": { + "minimatch": { + "version": "2.0.10", + "from": "minimatch@>=2.0.10 <3.0.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-2.0.10.tgz", + "dependencies": { + "brace-expansion": { + "version": "1.1.1", + "from": "brace-expansion@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.1.tgz", + "dependencies": { + "balanced-match": { + "version": "0.2.1", + "from": "balanced-match@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.2.1.tgz" + }, + "concat-map": { + "version": "0.0.1", + "from": "concat-map@0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" + } + } + } + } + }, + "readable-stream": { + "version": "2.0.4", + "from": "readable-stream@>=2.0.2 <3.0.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.4.tgz", + "dependencies": { + "core-util-is": { + "version": "1.0.1", + "from": "core-util-is@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.1.tgz" + }, + "inherits": { + "version": "2.0.1", + "from": "inherits@>=2.0.1 <2.1.0", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" + }, + "isarray": { + "version": "0.0.1", + "from": "isarray@0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "process-nextick-args": { + "version": "1.0.3", + "from": "process-nextick-args@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.3.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "string_decoder@>=0.10.0 <0.11.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + }, + "util-deprecate": { + "version": "1.0.2", + "from": "util-deprecate@>=1.0.1 <1.1.0", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" + } + } + } + } }, - "bufferutil": { - "version": "1.2.1", - "from": "https://registry.npmjs.org/bufferutil/-/bufferutil-1.2.1.tgz", - "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-1.2.1.tgz", + "fsevents": { + "version": "1.0.5", + "from": "fsevents@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.0.5.tgz", "dependencies": { - "bindings": { + "nan": { + "version": "2.1.0", + "from": "nan@>=2.0.2 <3.0.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.1.0.tgz" + }, + "node-pre-gyp": { + "version": "0.6.15", + "from": "node-pre-gyp@latest", + "resolved": "https://registry.npmjs.org/node-pre-gyp/-/node-pre-gyp-0.6.15.tgz", + "dependencies": { + "nopt": { + "version": "3.0.4", + "from": "nopt@>=3.0.1 <3.1.0", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-3.0.4.tgz" + } + } + }, + "abbrev": { + "version": "1.0.7", + "from": "abbrev@1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.7.tgz" + }, + "ansi": { + "version": "0.3.0", + "from": "ansi@~0.3.0", + "resolved": "https://registry.npmjs.org/ansi/-/ansi-0.3.0.tgz" + }, + "ansi-regex": { + "version": "2.0.0", + "from": "ansi-regex@^2.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz" + }, + "ansi-styles": { + "version": "2.1.0", + "from": "ansi-styles@^2.1.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.1.0.tgz" + }, + "are-we-there-yet": { + "version": "1.0.4", + "from": "are-we-there-yet@~1.0.0", + "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.0.4.tgz" + }, + "asn1": { + "version": "0.1.11", + "from": "asn1@0.1.11", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.1.11.tgz" + }, + "assert-plus": { + "version": "0.1.5", + "from": "assert-plus@^0.1.5", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.1.5.tgz" + }, + "async": { + "version": "1.5.0", + "from": "async@^1.4.0", + "resolved": "https://registry.npmjs.org/async/-/async-1.5.0.tgz" + }, + "aws-sign2": { + "version": "0.6.0", + "from": "aws-sign2@~0.6.0", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.6.0.tgz" + }, + "balanced-match": { + "version": "0.2.1", + "from": "balanced-match@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.2.1.tgz" + }, + "block-stream": { + "version": "0.0.8", + "from": "block-stream@*", + "resolved": "https://registry.npmjs.org/block-stream/-/block-stream-0.0.8.tgz" + }, + "boom": { + "version": "2.10.1", + "from": "boom@^2.8.x", + "resolved": "https://registry.npmjs.org/boom/-/boom-2.10.1.tgz" + }, + "brace-expansion": { + "version": "1.1.1", + "from": "brace-expansion@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.1.tgz" + }, + "caseless": { + "version": "0.11.0", + "from": "caseless@~0.11.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.11.0.tgz" + }, + "chalk": { + "version": "1.1.1", + "from": "chalk@^1.1.1", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.1.tgz" + }, + "combined-stream": { + "version": "1.0.5", + "from": "combined-stream@~1.0.5", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz" + }, + "commander": { + "version": "2.9.0", + "from": "commander@^2.8.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.9.0.tgz" + }, + "concat-map": { + "version": "0.0.1", + "from": "concat-map@0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" + }, + "core-util-is": { + "version": "1.0.1", + "from": "core-util-is@~1.0.0", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.1.tgz" + }, + "cryptiles": { + "version": "2.0.5", + "from": "cryptiles@2.x.x", + "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-2.0.5.tgz" + }, + "ctype": { + "version": "0.5.3", + "from": "ctype@0.5.3", + "resolved": "https://registry.npmjs.org/ctype/-/ctype-0.5.3.tgz" + }, + "deep-extend": { + "version": "0.2.11", + "from": "deep-extend@~0.2.5", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.2.11.tgz" + }, + "delayed-stream": { + "version": "1.0.0", + "from": "delayed-stream@~1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" + }, + "delegates": { + "version": "0.1.0", + "from": "delegates@^0.1.0", + "resolved": "https://registry.npmjs.org/delegates/-/delegates-0.1.0.tgz" + }, + "debug": { + "version": "0.7.4", + "from": "debug@~0.7.2", + "resolved": "https://registry.npmjs.org/debug/-/debug-0.7.4.tgz" + }, + "escape-string-regexp": { + "version": "1.0.3", + "from": "escape-string-regexp@^1.0.2", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.3.tgz" + }, + "extend": { + "version": "3.0.0", + "from": "extend@~3.0.0", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.0.tgz" + }, + "forever-agent": { + "version": "0.6.1", + "from": "forever-agent@~0.6.1", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz" + }, + "form-data": { + "version": "1.0.0-rc3", + "from": "form-data@~1.0.0-rc3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-1.0.0-rc3.tgz" + }, + "fstream": { + "version": "1.0.8", + "from": "fstream@^1.0.2", + "resolved": "https://registry.npmjs.org/fstream/-/fstream-1.0.8.tgz" + }, + "gauge": { + "version": "1.2.2", + "from": "gauge@~1.2.0", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-1.2.2.tgz" + }, + "generate-function": { + "version": "2.0.0", + "from": "generate-function@^2.0.0", + "resolved": "https://registry.npmjs.org/generate-function/-/generate-function-2.0.0.tgz" + }, + "generate-object-property": { + "version": "1.2.0", + "from": "generate-object-property@^1.1.0", + "resolved": "https://registry.npmjs.org/generate-object-property/-/generate-object-property-1.2.0.tgz" + }, + "graceful-fs": { + "version": "4.1.2", + "from": "graceful-fs@4.1", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.2.tgz" + }, + "graceful-readlink": { + "version": "1.0.1", + "from": "graceful-readlink@>= 1.0.0", + "resolved": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz" + }, + "har-validator": { + "version": "2.0.2", + "from": "har-validator@~2.0.2", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-2.0.2.tgz" + }, + "has-ansi": { + "version": "2.0.0", + "from": "has-ansi@^2.0.0", + "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz" + }, + "has-unicode": { + "version": "1.0.1", + "from": "has-unicode@^1.0.0", + "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-1.0.1.tgz" + }, + "hoek": { + "version": "2.16.3", + "from": "hoek@2.x.x", + "resolved": "https://registry.npmjs.org/hoek/-/hoek-2.16.3.tgz" + }, + "hawk": { + "version": "3.1.0", + "from": "hawk@~3.1.0", + "resolved": "https://registry.npmjs.org/hawk/-/hawk-3.1.0.tgz" + }, + "http-signature": { + "version": "0.11.0", + "from": "http-signature@~0.11.0", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-0.11.0.tgz" + }, + "inherits": { + "version": "2.0.1", + "from": "inherits@*", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" + }, + "ini": { + "version": "1.3.4", + "from": "ini@~1.3.0", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.4.tgz" + }, + "is-my-json-valid": { + "version": "2.12.2", + "from": "is-my-json-valid@^2.12.2", + "resolved": "https://registry.npmjs.org/is-my-json-valid/-/is-my-json-valid-2.12.2.tgz" + }, + "is-property": { + "version": "1.0.2", + "from": "is-property@^1.0.0", + "resolved": "https://registry.npmjs.org/is-property/-/is-property-1.0.2.tgz" + }, + "isarray": { + "version": "0.0.1", + "from": "isarray@0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "isstream": { + "version": "0.1.2", + "from": "isstream@~0.1.2", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz" + }, + "json-stringify-safe": { + "version": "5.0.1", + "from": "json-stringify-safe@~5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz" + }, + "jsonpointer": { + "version": "2.0.0", + "from": "jsonpointer@2.0.0", + "resolved": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-2.0.0.tgz" + }, + "lodash._basetostring": { + "version": "3.0.1", + "from": "lodash._basetostring@^3.0.0", + "resolved": "https://registry.npmjs.org/lodash._basetostring/-/lodash._basetostring-3.0.1.tgz" + }, + "lodash._createpadding": { + "version": "3.6.1", + "from": "lodash._createpadding@^3.0.0", + "resolved": "https://registry.npmjs.org/lodash._createpadding/-/lodash._createpadding-3.6.1.tgz" + }, + "lodash.pad": { + "version": "3.1.1", + "from": "lodash.pad@^3.0.0", + "resolved": "https://registry.npmjs.org/lodash.pad/-/lodash.pad-3.1.1.tgz" + }, + "lodash.padleft": { + "version": "3.1.1", + "from": "lodash.padleft@^3.0.0", + "resolved": "https://registry.npmjs.org/lodash.padleft/-/lodash.padleft-3.1.1.tgz" + }, + "lodash.padright": { + "version": "3.1.1", + "from": "lodash.padright@^3.0.0", + "resolved": "https://registry.npmjs.org/lodash.padright/-/lodash.padright-3.1.1.tgz" + }, + "lodash.repeat": { + "version": "3.0.1", + "from": "lodash.repeat@^3.0.0", + "resolved": "https://registry.npmjs.org/lodash.repeat/-/lodash.repeat-3.0.1.tgz" + }, + "mime-db": { + "version": "1.19.0", + "from": "mime-db@~1.19.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.19.0.tgz" + }, + "mime-types": { + "version": "2.1.7", + "from": "mime-types@~2.1.7", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.7.tgz" + }, + "minimist": { + "version": "0.0.8", + "from": "minimist@0.0.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" + }, + "mkdirp": { + "version": "0.5.1", + "from": "mkdirp@>=0.3.0 <0.4.0||>=0.4.0 <0.5.0||>=0.5.0 <0.6.0", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz" + }, + "node-uuid": { + "version": "1.4.3", + "from": "node-uuid@~1.4.3", + "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.3.tgz" + }, + "npmlog": { "version": "1.2.1", - "from": "https://registry.npmjs.org/bindings/-/bindings-1.2.1.tgz", - "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.2.1.tgz" + "from": "npmlog@~1.2.0", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-1.2.1.tgz" + }, + "oauth-sign": { + "version": "0.8.0", + "from": "oauth-sign@~0.8.0", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.0.tgz" + }, + "once": { + "version": "1.1.1", + "from": "once@~1.1.1", + "resolved": "https://registry.npmjs.org/once/-/once-1.1.1.tgz" + }, + "path-is-absolute": { + "version": "1.0.0", + "from": "path-is-absolute@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.0.tgz" + }, + "pinkie": { + "version": "1.0.0", + "from": "pinkie@^1.0.0", + "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-1.0.0.tgz" + }, + "pinkie-promise": { + "version": "1.0.0", + "from": "pinkie-promise@^1.0.0", + "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-1.0.0.tgz" + }, + "qs": { + "version": "5.2.0", + "from": "qs@~5.2.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-5.2.0.tgz" + }, + "readable-stream": { + "version": "1.1.13", + "from": "readable-stream@^1.1.13", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.13.tgz" + }, + "request": { + "version": "2.65.0", + "from": "request@2.x", + "resolved": "https://registry.npmjs.org/request/-/request-2.65.0.tgz" + }, + "semver": { + "version": "5.0.3", + "from": "semver@~5.0.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.0.3.tgz" + }, + "sntp": { + "version": "1.0.9", + "from": "sntp@1.x.x", + "resolved": "https://registry.npmjs.org/sntp/-/sntp-1.0.9.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "string_decoder@~0.10.x", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + }, + "stringstream": { + "version": "0.0.5", + "from": "stringstream@~0.0.4", + "resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz" + }, + "strip-ansi": { + "version": "3.0.0", + "from": "strip-ansi@^3.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.0.tgz" + }, + "strip-json-comments": { + "version": "0.1.3", + "from": "strip-json-comments@0.1.x", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-0.1.3.tgz" + }, + "supports-color": { + "version": "2.0.0", + "from": "supports-color@^2.0.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz" + }, + "tar": { + "version": "2.2.1", + "from": "tar@~2.2.0", + "resolved": "https://registry.npmjs.org/tar/-/tar-2.2.1.tgz" + }, + "tough-cookie": { + "version": "2.2.0", + "from": "tough-cookie@~2.2.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.2.0.tgz" + }, + "tunnel-agent": { + "version": "0.4.1", + "from": "tunnel-agent@~0.4.1", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.1.tgz" + }, + "uid-number": { + "version": "0.0.3", + "from": "uid-number@0.0.3", + "resolved": "https://registry.npmjs.org/uid-number/-/uid-number-0.0.3.tgz" + }, + "wrappy": { + "version": "1.0.1", + "from": "wrappy@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.1.tgz" + }, + "xtend": { + "version": "4.0.1", + "from": "xtend@^4.0.0", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz" + }, + "fstream-ignore": { + "version": "1.0.3", + "from": "fstream-ignore@~1.0.3", + "resolved": "https://registry.npmjs.org/fstream-ignore/-/fstream-ignore-1.0.3.tgz", + "dependencies": { + "minimatch": { + "version": "3.0.0", + "from": "minimatch@>=3.0.0 <4.0.0" + } + } }, - "nan": { - "version": "2.0.9", - "from": "https://registry.npmjs.org/nan/-/nan-2.0.9.tgz", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.0.9.tgz" - } - } - }, - "utf-8-validate": { - "version": "1.2.1", - "from": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-1.2.1.tgz", - "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-1.2.1.tgz", - "dependencies": { - "bindings": { - "version": "1.2.1", - "from": "https://registry.npmjs.org/bindings/-/bindings-1.2.1.tgz", - "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.2.1.tgz" + "inflight": { + "version": "1.0.4", + "from": "inflight@>=1.0.4 <2.0.0", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.4.tgz", + "dependencies": { + "once": { + "version": "1.3.2", + "from": "once@>=1.3.0 <2.0.0" + } + } }, - "nan": { - "version": "2.0.9", - "from": "https://registry.npmjs.org/nan/-/nan-2.0.9.tgz", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.0.9.tgz" + "rc": { + "version": "1.1.2", + "from": "rc@~1.1.0", + "resolved": "https://registry.npmjs.org/rc/-/rc-1.1.2.tgz", + "dependencies": { + "minimist": { + "version": "1.2.0", + "from": "minimist@^1.1.2", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz" + } + } + }, + "rimraf": { + "version": "2.4.3", + "from": "rimraf@~2.4.0", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.3.tgz", + "dependencies": { + "glob": { + "version": "5.0.15", + "from": "glob@>=5.0.14 <6.0.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-5.0.15.tgz" + }, + "minimatch": { + "version": "3.0.0", + "from": "minimatch@>=2.0.0 <3.0.0||>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.0.tgz" + }, + "once": { + "version": "1.3.2", + "from": "once@>=1.3.0 <2.0.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.3.2.tgz" + } + } + }, + "bl": { + "version": "1.0.0", + "from": "bl@~1.0.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-1.0.0.tgz", + "dependencies": { + "readable-stream": { + "version": "2.0.4", + "from": "readable-stream@~2.0.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.4.tgz", + "dependencies": { + "core-util-is": { + "version": "1.0.1", + "from": "core-util-is@~1.0.0", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.1.tgz" + }, + "inherits": { + "version": "2.0.1", + "from": "inherits@~2.0.1", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" + }, + "isarray": { + "version": "0.0.1", + "from": "isarray@0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "process-nextick-args": { + "version": "1.0.3", + "from": "process-nextick-args@~1.0.0", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.3.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "string_decoder@~0.10.x", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + }, + "util-deprecate": { + "version": "1.0.2", + "from": "util-deprecate@~1.0.1", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" + } + } + } + } + }, + "tar-pack": { + "version": "3.1.0", + "from": "tar-pack@~3.1.0", + "resolved": "https://registry.npmjs.org/tar-pack/-/tar-pack-3.1.0.tgz", + "dependencies": { + "readable-stream": { + "version": "1.0.33", + "from": "readable-stream@~1.0.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.33.tgz", + "dependencies": { + "core-util-is": { + "version": "1.0.1", + "from": "core-util-is@~1.0.0", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.1.tgz" + }, + "inherits": { + "version": "2.0.1", + "from": "inherits@~2.0.1", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" + }, + "isarray": { + "version": "0.0.1", + "from": "isarray@0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "string_decoder@~0.10.x", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + } + } + }, + "rimraf": { + "version": "2.2.8", + "from": "rimraf@~2.2.0", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz" + } + } } } } } }, - "xmlhttprequest": { - "version": "1.5.0", - "from": "https://github.com/rase-/node-XMLHttpRequest/archive/a6b6f2.tar.gz", - "resolved": "https://github.com/rase-/node-XMLHttpRequest/archive/a6b6f2.tar.gz" - } - } - }, - "component-bind": { - "version": "1.0.0", - "from": "https://registry.npmjs.org/component-bind/-/component-bind-1.0.0.tgz", - "resolved": "https://registry.npmjs.org/component-bind/-/component-bind-1.0.0.tgz" - }, - "component-emitter": { - "version": "1.1.2", - "from": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.1.2.tgz", - "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.1.2.tgz" - }, - "object-component": { - "version": "0.0.3", - "from": "https://registry.npmjs.org/object-component/-/object-component-0.0.3.tgz", - "resolved": "https://registry.npmjs.org/object-component/-/object-component-0.0.3.tgz" - }, - "socket.io-parser": { - "version": "2.2.4", - "from": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-2.2.4.tgz", - "resolved": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-2.2.4.tgz", - "dependencies": { - "json3": { - "version": "3.2.6", - "from": "https://registry.npmjs.org/json3/-/json3-3.2.6.tgz", - "resolved": "https://registry.npmjs.org/json3/-/json3-3.2.6.tgz" - }, - "isarray": { - "version": "0.0.1", - "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" - }, - "benchmark": { - "version": "1.0.0", - "from": "https://registry.npmjs.org/benchmark/-/benchmark-1.0.0.tgz", - "resolved": "https://registry.npmjs.org/benchmark/-/benchmark-1.0.0.tgz" - } - } - }, - "has-binary": { - "version": "0.1.6", - "from": "https://registry.npmjs.org/has-binary/-/has-binary-0.1.6.tgz", - "resolved": "https://registry.npmjs.org/has-binary/-/has-binary-0.1.6.tgz", - "dependencies": { - "isarray": { - "version": "0.0.1", - "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + "graceful-fs": { + "version": "4.1.2", + "from": "graceful-fs@>=4.1.2 <5.0.0", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.2.tgz" } } }, - "indexof": { - "version": "0.0.1", - "from": "https://registry.npmjs.org/indexof/-/indexof-0.0.1.tgz", - "resolved": "https://registry.npmjs.org/indexof/-/indexof-0.0.1.tgz" - }, - "parseuri": { - "version": "0.0.2", - "from": "https://registry.npmjs.org/parseuri/-/parseuri-0.0.2.tgz", - "resolved": "https://registry.npmjs.org/parseuri/-/parseuri-0.0.2.tgz", + "webpack-core": { + "version": "0.6.8", + "from": "webpack-core@>=0.6.0 <0.7.0", + "resolved": "https://registry.npmjs.org/webpack-core/-/webpack-core-0.6.8.tgz", "dependencies": { - "better-assert": { - "version": "1.0.2", - "from": "https://registry.npmjs.org/better-assert/-/better-assert-1.0.2.tgz", - "resolved": "https://registry.npmjs.org/better-assert/-/better-assert-1.0.2.tgz", + "source-map": { + "version": "0.4.4", + "from": "source-map@>=0.4.1 <0.5.0", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.4.4.tgz", "dependencies": { - "callsite": { + "amdefine": { "version": "1.0.0", - "from": "https://registry.npmjs.org/callsite/-/callsite-1.0.0.tgz", - "resolved": "https://registry.npmjs.org/callsite/-/callsite-1.0.0.tgz" + "from": "amdefine@>=0.0.4", + "resolved": "https://registry.npmjs.org/amdefine/-/amdefine-1.0.0.tgz" } } + }, + "source-list-map": { + "version": "0.1.5", + "from": "source-list-map@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/source-list-map/-/source-list-map-0.1.5.tgz" } } - }, - "to-array": { - "version": "0.1.3", - "from": "https://registry.npmjs.org/to-array/-/to-array-0.1.3.tgz", - "resolved": "https://registry.npmjs.org/to-array/-/to-array-0.1.3.tgz" - }, - "backo2": { - "version": "1.0.2", - "from": "https://registry.npmjs.org/backo2/-/backo2-1.0.2.tgz", - "resolved": "https://registry.npmjs.org/backo2/-/backo2-1.0.2.tgz" - } - } - }, - "stream-cache": { - "version": "0.0.2", - "from": "https://registry.npmjs.org/stream-cache/-/stream-cache-0.0.2.tgz", - "resolved": "https://registry.npmjs.org/stream-cache/-/stream-cache-0.0.2.tgz" - }, - "strip-ansi": { - "version": "3.0.0", - "from": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.0.tgz", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.0.tgz", - "dependencies": { - "ansi-regex": { - "version": "2.0.0", - "from": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz" - } - } - }, - "supports-color": { - "version": "3.1.1", - "from": "https://registry.npmjs.org/supports-color/-/supports-color-3.1.1.tgz", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.1.1.tgz", - "dependencies": { - "has-flag": { - "version": "1.0.0", - "from": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz" - } - } - }, - "webpack-dev-middleware": { - "version": "1.2.0", - "from": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-1.2.0.tgz", - "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-1.2.0.tgz", - "dependencies": { - "memory-fs": { - "version": "0.2.0", - "from": "https://registry.npmjs.org/memory-fs/-/memory-fs-0.2.0.tgz", - "resolved": "https://registry.npmjs.org/memory-fs/-/memory-fs-0.2.0.tgz" - }, - "mime": { - "version": "1.3.4", - "from": "https://registry.npmjs.org/mime/-/mime-1.3.4.tgz", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.3.4.tgz" } } } diff --git a/package.json b/package.json index 46778efcbea115ca4d8a0290b41fe6efdfe8f28f..84982211a4666a0273041702f365e17a509c5599 100644 --- a/package.json +++ b/package.json @@ -84,6 +84,7 @@ "build": "./node_modules/webpack/bin/webpack.js", "build-watch": "./node_modules/webpack/bin/webpack.js --watch", "build-hot": "NODE_ENV=hot ./node_modules/webpack/bin/webpack.js && NODE_ENV=hot node_modules/webpack-dev-server/bin/webpack-dev-server.js --hot --inline --content-base frontend", + "shrinkwrap": "npm shrinkwrap --dev", "start": "npm run build && lein ring server" } } diff --git a/project.clj b/project.clj index 211823d7540efbf1e3cf48c981f5ed093e19f629..61cae437805ce044bee4d0657ae2ad5bec4eb3bf 100644 --- a/project.clj +++ b/project.clj @@ -9,7 +9,6 @@ "generate-sample-dataset" ["with-profile" "+generate-sample-dataset" "run"]} :dependencies [[org.clojure/clojure "1.7.0"] [org.clojure/core.async "0.1.346.0-17112a-alpha"] - [org.clojure/core.logic "0.8.10"] [org.clojure/core.match "0.3.0-alpha4"] ; optimized pattern matching library for Clojure [org.clojure/core.memoize "0.5.7"] ; needed by core.match; has useful FIFO, LRU, etc. caching mechanisms [org.clojure/data.csv "0.1.3"] ; CSV parsing / generation @@ -17,7 +16,6 @@ [org.clojure/java.jdbc "0.4.2"] ; basic jdbc access from clojure [org.clojure/math.numeric-tower "0.0.4"] ; math functions like `ceil` [org.clojure/tools.logging "0.3.1"] ; logging framework - [org.clojure/tools.macro "0.1.5"] ; tools for writing macros [org.clojure/tools.namespace "0.2.10"] [amalloy/ring-gzip-middleware "0.1.3"] ; Ring middleware to GZIP responses if client can handle it [cheshire "5.5.0"] ; fast JSON encoding (used by Ring JSON middleware) @@ -41,6 +39,7 @@ com.sun.jmx/jmxri]] [medley "0.7.0"] ; lightweight lib of useful functions [mysql/mysql-connector-java "5.1.37"] ; MySQL JDBC driver + [net.sourceforge.jtds/jtds "1.3.1"] ; Open Source SQL Server driver [org.liquibase/liquibase-core "3.4.1"] ; migration management (Java lib) [org.slf4j/slf4j-log4j12 "1.7.12"] [org.yaml/snakeyaml "1.16"] ; YAML parser (required by liquibase) diff --git a/resources/migrations/018_add_data_migrations_table.yaml b/resources/migrations/018_add_data_migrations_table.yaml new file mode 100644 index 0000000000000000000000000000000000000000..34cf546455dbd07022575394b755afbdf16a0ace --- /dev/null +++ b/resources/migrations/018_add_data_migrations_table.yaml @@ -0,0 +1,27 @@ +databaseChangeLog: + - changeSet: + id: 18 + author: camsaul + validCheckSum: 7:07d501a6e52c14691f7f895d137e565f + validCheckSum: 7:329d897d44ba9893fdafc9ce7e876d73 + changes: + - createTable: + tableName: data_migrations + columns: + - column: + name: id + type: VARCHAR(254) + constraints: + primaryKey: true + nullable: false + - column: + name: timestamp + type: DATETIME + constraints: + nullable: false + - createIndex: + tableName: data_migrations + indexName: idx_data_migrations_id + columns: + column: + name: id diff --git a/resources/migrations/019_add_schema_column_to_table.yaml b/resources/migrations/019_add_schema_column_to_table.yaml new file mode 100644 index 0000000000000000000000000000000000000000..f8324856dee356f2c72e1aac4d30987793d0e465 --- /dev/null +++ b/resources/migrations/019_add_schema_column_to_table.yaml @@ -0,0 +1,11 @@ +databaseChangeLog: + - changeSet: + id: 19 + author: camsaul + changes: + - addColumn: + tableName: metabase_table + columns: + - column: + name: schema + type: VARCHAR(256) diff --git a/resources/migrations/liquibase.json b/resources/migrations/liquibase.json index 815d1513dc9eed314ee400bf161496b6d2afd20f..dd40971e4b708fa0a2b6aec31ac289f10fb2cb61 100644 --- a/resources/migrations/liquibase.json +++ b/resources/migrations/liquibase.json @@ -15,6 +15,8 @@ {"include": {"file": "migrations/014_add_view_log_table.yaml"}}, {"include": {"file": "migrations/015_add_revision_is_creation_field.yaml"}}, {"include": {"file": "migrations/016_user_last_login_allow_null.yaml"}}, - {"include": {"file": "migrations/017_add_database_is_sample_field.yaml"}} + {"include": {"file": "migrations/017_add_database_is_sample_field.yaml"}}, + {"include": {"file": "migrations/018_add_data_migrations_table.yaml"}}, + {"include": {"file": "migrations/019_add_schema_column_to_table.yaml"}} ] } diff --git a/src/metabase/api/database.clj b/src/metabase/api/database.clj index 7fa4e2a8f06d841a0489868a5d9bab011571f127..1eaf29f7457694fb3c0a61aa0f409a2b27dc11c4 100644 --- a/src/metabase/api/database.clj +++ b/src/metabase/api/database.clj @@ -17,7 +17,7 @@ (defannotation DBEngine "Param must be a valid database engine type, e.g. `h2` or `postgres`." [symb value :nillable] - (checkp-contains? (set (map name (keys driver/available-drivers))) symb value)) + (checkp-contains? (set (map name (keys @driver/available-drivers))) symb value)) (defendpoint GET "/" "Fetch all `Databases`." @@ -42,12 +42,6 @@ (sample-data/add-sample-dataset!) (sel :one Database :is_sample true)) -(defendpoint GET "/form_input" - "Values of options for the create/edit `Database` UI." - [] - {:timezones metabase.models.common/timezones - :engines driver/available-drivers}) - ;; Stub function that will eventually validate a connection string (defendpoint POST "/validate" "Validate that we can connect to a `Database`." diff --git a/src/metabase/api/routes.clj b/src/metabase/api/routes.clj index e13507ad311d9fbfb622ff952aae235640fdc8b8..1a8e50d05bbad112916351dce85c7be2fe201bc6 100644 --- a/src/metabase/api/routes.clj +++ b/src/metabase/api/routes.clj @@ -36,8 +36,8 @@ (context "/field" [] (+auth field/routes)) (context "/foreignkey" [] (+auth fk/routes)) (GET "/health" [] (if ((resolve 'metabase.core/initialized?)) - {:status 200 :body {:status "ok"}} - {:status 503 :body {:status "initializing" :progress ((resolve 'metabase.core/initialization-progress))}})) + {:status 200 :body {:status "ok"}} + {:status 503 :body {:status "initializing" :progress ((resolve 'metabase.core/initialization-progress))}})) (context "/notify" [] (+apikey notify/routes)) (context "/revision" [] (+auth revision/routes)) (context "/session" [] session/routes) diff --git a/src/metabase/api/setup.clj b/src/metabase/api/setup.clj index 42c1d1d5259203c7e7e9157f12ab662abadf8ad4..2e9f4659da9194805e4623e4ed1e73d782c17fe4 100644 --- a/src/metabase/api/setup.clj +++ b/src/metabase/api/setup.clj @@ -1,6 +1,7 @@ (ns metabase.api.setup (:require [compojure.core :refer [defroutes POST]] - [metabase.api.common :refer :all] + (metabase.api [common :refer :all] + [database :refer [annotation:DBEngine]]) [metabase.db :refer :all] [metabase.driver :as driver] [metabase.events :as events] @@ -11,11 +12,6 @@ [metabase.setup :as setup] [metabase.util :as u])) -(defannotation DBEngine - "Param must be a valid database engine type, e.g. `h2` or `postgres`." - [symb value :nillable] - (checkp-contains? (set (map name (keys driver/available-drivers))) symb value)) - (defannotation SetupToken "Check that param matches setup token or throw a 403." [symb value] @@ -46,6 +42,7 @@ (set-user-password (:id new-user) password) ;; set a couple preferences (setting/set :site-name site_name) + (setting/set :admin-email email) (setting/set :anon-tracking-enabled (or allow_tracking "true")) ;; setup database (if needed) (when (driver/is-engine? engine) diff --git a/src/metabase/config.clj b/src/metabase/config.clj index 8b15bcaf6fc3f395f9c44bec536b93bd29193adf..dd1aea33c32543a0934eb373fa83f91bdd18d8e2 100644 --- a/src/metabase/config.clj +++ b/src/metabase/config.clj @@ -23,7 +23,7 @@ ;; check here for all available options: ;; https://github.com/ring-clojure/ring/blob/master/ring-jetty-adapter/src/ring/adapter/jetty.clj :mb-jetty-port "3000" - :mb-jetty-join "false" + :mb-jetty-join "true" ;; Other Application Settings :mb-password-complexity "normal" ;:mb-password-length "8" diff --git a/src/metabase/core.clj b/src/metabase/core.clj index 7a5ab066e7e2e991e0e0d9de650f4e5e19885cd9..8b794fb0220fa44b7e3000cedc137a19880a0ab9 100644 --- a/src/metabase/core.clj +++ b/src/metabase/core.clj @@ -32,6 +32,8 @@ (defsetting -site-url "The base URL of this Metabase instance, e.g. \"http://metabase.my-company.com\"") +(defsetting admin-email "The email address users should be referred to if they encounter a problem.") + (defsetting anon-tracking-enabled "Enable the collection of anonymous usage data in order to help Metabase improve." "true") (defn site-url @@ -71,7 +73,7 @@ ;;; ## ---------------------------------------- LIFECYCLE ---------------------------------------- -(def ^:private metabase-initialization-progress +(defonce ^:private metabase-initialization-progress (atom 0)) (defn initialized? @@ -166,16 +168,23 @@ "Start the embedded Jetty web server." [] (when-not @jetty-instance - (let [jetty-config (cond-> (m/filter-vals identity {:port (config/config-int :mb-jetty-port) - :host (config/config-str :mb-jetty-host) - :max-threads (config/config-int :mb-jetty-maxthreads) - :min-threads (config/config-int :mb-jetty-minthreads) - :max-queued (config/config-int :mb-jetty-maxqueued) - :max-idle-time (config/config-int :mb-jetty-maxidletime)}) - (config/config-str :mb-jetty-join) (assoc :join? (config/config-bool :mb-jetty-join)) - (config/config-str :mb-jetty-daemon) (assoc :daemon? (config/config-bool :mb-jetty-daemon)))] - (log/info "Launching Embedded Jetty Webserver with config:\n" (with-out-str (clojure.pprint/pprint jetty-config))) - (->> (ring-jetty/run-jetty app jetty-config) + (let [jetty-ssl-config (m/filter-vals identity {:ssl-port (config/config-int :mb-jetty-ssl-port) + :keystore (config/config-str :mb-jetty-ssl-keystore) + :key-password (config/config-str :mb-jetty-ssl-keystore-password) + :truststore (config/config-str :mb-jetty-ssl-truststore) + :trust-password (config/config-str :mb-jetty-ssl-truststore-password)}) + jetty-config (cond-> (m/filter-vals identity {:port (config/config-int :mb-jetty-port) + :host (config/config-str :mb-jetty-host) + :max-threads (config/config-int :mb-jetty-maxthreads) + :min-threads (config/config-int :mb-jetty-minthreads) + :max-queued (config/config-int :mb-jetty-maxqueued) + :max-idle-time (config/config-int :mb-jetty-maxidletime)}) + (config/config-str :mb-jetty-daemon) (assoc :daemon? (config/config-bool :mb-jetty-daemon)) + (config/config-str :mb-jetty-ssl) (-> (assoc :ssl? true) + (merge jetty-ssl-config)))] + (log/info "Launching Embedded Jetty Webserver with config:\n" (with-out-str (clojure.pprint/pprint (m/filter-keys (fn [k] (not (re-matches #".*password.*" (str k)))) jetty-config)))) + ;; NOTE: we always start jetty w/ join=false so we can start the server first then do init in the background + (->> (ring-jetty/run-jetty app (assoc jetty-config :join? false)) (reset! jetty-instance))))) (defn stop-jetty @@ -198,14 +207,15 @@ ;; run our initialization process (init) ;; Ok, now block forever while Jetty does its thing - (.join ^org.eclipse.jetty.server.Server @jetty-instance) + (when (config/config-bool :mb-jetty-join) + (.join ^org.eclipse.jetty.server.Server @jetty-instance)) (catch Exception e (.printStackTrace e) (log/error "Metabase Initialization FAILED: " (.getMessage e))))) (defn- run-cmd [cmd & args] (let [cmd->fn {:migrate (fn [direction] - (db/migrate (keyword direction)))}] + (db/migrate @db/db-connection-details (keyword direction)))}] (if-let [f (cmd->fn cmd)] (do (apply f args) (println "Success.") diff --git a/src/metabase/db.clj b/src/metabase/db.clj index 02566adc2d08eeb831954f761052a5fffe1d3d57..a96c00c0e76c20e041f8516317e0498610bd4d87 100644 --- a/src/metabase/db.clj +++ b/src/metabase/db.clj @@ -54,7 +54,7 @@ codec/form-decode walk/keywordize-keys)))) -(def ^:private db-connection-details +(def db-connection-details "Connection details that can be used when pretending the Metabase DB is itself a `Database` (e.g., to use the Generic SQL driver functions on the Metabase DB itself)." (delay (or (when-let [uri (config/config-str :mb-db-connection-uri)] @@ -75,14 +75,16 @@ :user (config/config-str :mb-db-user) :password (config/config-str :mb-db-pass)})))) -(def ^:private jdbc-connection-details - "Connection details for Korma / JDBC." - (delay (let [details @db-connection-details] - (case (:type details) - :h2 (kdb/h2 (assoc details :naming {:keys s/lower-case - :fields s/upper-case})) - :mysql (kdb/mysql (assoc details :db (:dbname details))) - :postgres (kdb/postgres (assoc details :db (:dbname details))))))) +(defn jdbc-details + "Takes our own MB details map and formats them properly for connection details for Korma / JDBC." + [db-details] + {:pre [(map? db-details)]} + ;; TODO: it's probably a good idea to put some more validation here and be really strict about what's in `db-details` + (case (:type db-details) + :h2 (kdb/h2 (assoc db-details :naming {:keys s/lower-case + :fields s/upper-case})) + :mysql (kdb/mysql (assoc db-details :db (:dbname db-details))) + :postgres (kdb/postgres (assoc db-details :db (:dbname db-details))))) ;; ## MIGRATE @@ -95,27 +97,27 @@ * `:up` - Migrate up * `:down` - Rollback *all* migrations + * `:down-one` - Rollback a single migration * `:print` - Just print the SQL for running the migrations, don't actually run them. * `:release-locks` - Manually release migration locks left by an earlier failed migration. (This shouldn't be necessary now that we run migrations inside a transaction, but is available just in case)." - ([direction] - (migrate @jdbc-connection-details direction)) - ([jdbc-connection-details direction] - (try - (jdbc/with-db-transaction [conn jdbc-connection-details] - (let [^Database database (-> (DatabaseFactory/getInstance) - (.findCorrectDatabaseImplementation (JdbcConnection. (jdbc/get-connection conn)))) - ^Liquibase liquibase (Liquibase. changelog-file (ClassLoaderResourceAccessor.) database)] - (case direction - :up (.update liquibase "") - :down (.rollback liquibase 10000 "") - :print (let [writer (StringWriter.)] - (.update liquibase "" writer) - (.toString writer)) - :release-locks (.forceReleaseLocks liquibase)))) - (catch Throwable e - (throw (DatabaseException. e)))))) + [db-details direction] + (try + (jdbc/with-db-transaction [conn (jdbc-details db-details)] + (let [^Database database (-> (DatabaseFactory/getInstance) + (.findCorrectDatabaseImplementation (JdbcConnection. (jdbc/get-connection conn)))) + ^Liquibase liquibase (Liquibase. changelog-file (ClassLoaderResourceAccessor.) database)] + (case direction + :up (.update liquibase "") + :down (.rollback liquibase 10000 "") + :down-one (.rollback liquibase 1 "") + :print (let [writer (StringWriter.)] + (.update liquibase "" writer) + (.toString writer)) + :release-locks (.forceReleaseLocks liquibase)))) + (catch Throwable e + (throw (DatabaseException. e))))) ;; ## SETUP-DB @@ -146,23 +148,24 @@ (defn setup-db "Do general perparation of database by validating that we can connect. Caller can specify if we should run any pending database migrations." - [& {:keys [auto-migrate] - :or {auto-migrate true}}] + [& {:keys [db-details auto-migrate] + :or {db-details @db-connection-details + auto-migrate true}}] (reset! setup-db-has-been-called? true) ;; Test DB connection and throw exception if we have any troubles connecting (log/info "Verifying Database Connection ...") - (assert (db-can-connect? {:engine (:type @db-connection-details) - :details @db-connection-details}) + (assert (db-can-connect? {:engine (:type db-details) + :details db-details}) "Unable to connect to Metabase DB.") (log/info "Verify Database Connection ... CHECK") ;; Run through our DB migration process and make sure DB is fully prepared (if auto-migrate - (migrate :up) + (migrate db-details :up) ;; if we are not doing auto migrations then print out migration sql for user to run manually ;; then throw an exception to short circuit the setup process and make it clear we can't proceed - (let [sql (migrate :print)] + (let [sql (migrate db-details :print)] (log/info (str "Database Upgrade Required\n\n" "NOTICE: Your database requires updates to work with this version of Metabase. " "Please execute the following sql commands on your database before proceeding.\n\n" @@ -173,7 +176,7 @@ (log/info "Database Migrations Current ... CHECK") ;; Establish our 'default' Korma DB Connection - (kdb/default-connection (kdb/create-db @jdbc-connection-details)) + (kdb/default-connection (kdb/create-db (jdbc-details db-details))) ;; Do any custom code-based migrations now that the db structure is up to date ;; NOTE: we use dynamic resolution to prevent circular dependencies diff --git a/src/metabase/db/migrations.clj b/src/metabase/db/migrations.clj index 792098c84c88fbdc2f8e8b5ac1cae9e50b997bc7..a4460fc3035eefd4ddc3794a9cbfbe5e3a56fef0 100644 --- a/src/metabase/db/migrations.clj +++ b/src/metabase/db/migrations.clj @@ -1,28 +1,89 @@ (ns metabase.db.migrations + "Clojure-land data migration definitions and fns for running them." (:require [clojure.tools.logging :as log] [korma.core :as k] [metabase.db :as db] (metabase.models [card :refer [Card]] [database :refer [Database]] + [table :refer [Table]] [setting :as setting]) - [metabase.sample-data :as sample-data])) + [metabase.sample-data :as sample-data] + [metabase.util :as u])) -(defn- set-card-database-and-table-ids - "Upgrade for the `Card` model when `:database_id`, `:table_id`, and `:query_type` were added and needed populating. +;;; # Migration Helpers - This reads through all saved cards, extracts the JSON from the `:dataset_query`, and tries to populate - the values for `:database_id`, `:table_id`, and `:query_type` if possible." +(defn- migration-ran? [migration-name] + (-> (k/select :data_migrations + (k/aggregate (count :*) :count) + (k/where {:id (name migration-name)})) + first :count (> 0))) + +(defn- run-migration-if-needed + "Run migration defined by MIGRATION-VAR if needed. + + (run-migration-if-needed #'set-card-database-and-table-ids)" + [migration-var] + (let [migration-name (name (:name (meta migration-var)))] + (when-not (migration-ran? migration-name) + (log/info (format "Running data migration '%s'..." migration-name)) + (@migration-var) + (k/insert "data_migrations" + (k/values {:id migration-name + :timestamp (u/new-sql-timestamp)})) + (log/info "[ok]")))) + +(def ^:private data-migrations (atom [])) + +(defmacro ^:private defmigration + "Define a new data migration. This is just a simple wrapper around `defn-` that adds the resulting var to that `data-migrations` atom." + [migration-name & body] + `(do (defn- ~migration-name [] ~@body) + (swap! data-migrations conj #'~migration-name))) + +(defn run-all + "Run all data migrations defined by `defmigration`." [] + (dorun (map run-migration-if-needed @data-migrations))) + + +;;; # Migration Definitions + +;; Upgrade for the `Card` model when `:database_id`, `:table_id`, and `:query_type` were added and needed populating. +;; +;; This reads through all saved cards, extracts the JSON from the `:dataset_query`, and tries to populate +;; the values for `:database_id`, `:table_id`, and `:query_type` if possible. +(defmigration set-card-database-and-table-ids ;; only execute when `:database_id` column on all cards is `nil` (when (= 0 (:cnt (first (k/select Card (k/aggregate (count :*) :cnt) (k/where (not= :database_id nil)))))) - (log/info "Data migration: Setting database/table/type fields on all Cards.") (doseq [{id :id {:keys [type] :as dataset-query} :dataset_query} (db/sel :many [Card :id :dataset_query])] (when type ;; simply resave the card with the dataset query which will automatically set the database, table, and type (db/upd Card id :dataset_query dataset-query))))) -(defn run-all - "Run all coded data migrations." - [] - ;; Append to the bottom of this list so that these run in chronological order - (set-card-database-and-table-ids)) + +;; Set the `:ssl` key in `details` to `false` for all existing MongoDB `Databases`. +;; UI was automatically setting `:ssl` to `true` for every database added as part of the auto-SSL detection. +;; Since Mongo did *not* support SSL, all existing Mongo DBs should actually have this key set to `false`. +(defmigration set-mongodb-databases-ssl-false + (doseq [{:keys [id details]} (db/sel :many :fields [Database :id :details] :engine "mongo")] + (db/upd Database id, :details (assoc details :ssl false)))) + + +;; Set default values for :schema in existing tables now that we've added the column +;; That way sync won't get confused next time around +(defmigration set-default-schemas + (doseq [[engine default-schema] [["postgres" "public"] + ["h2" "PUBLIC"]]] + (k/update Table + (k/set-fields {:schema default-schema}) + (k/where {:schema nil + :db_id [in (k/subselect Database + (k/fields :id) + (k/where {:engine engine}))]})))) + + +;; Populate the initial value for the `:admin-email` setting for anyone who hasn't done it yet +(defmigration set-admin-email + (when-not (setting/get :admin-email) + (when-let [email (db/sel :one :field ['User :email] (k/where {:is_superuser true :is_active true}))] + (setting/set :admin-email email)))) diff --git a/src/metabase/driver.clj b/src/metabase/driver.clj index 8c9d689facada85c5b4c736330b279b20ca632bf..255036bc1035524be6c4ad8492b95caf91ad13be 100644 --- a/src/metabase/driver.clj +++ b/src/metabase/driver.clj @@ -1,11 +1,11 @@ (ns metabase.driver - (:require clojure.java.classpath + (:require [clojure.java.classpath :as classpath] [clojure.string :as s] [clojure.tools.logging :as log] + [clojure.tools.namespace.find :as ns-find] [medley.core :as m] [metabase.db :refer [ins sel upd]] - (metabase.driver [interface :as i] - [query-processor :as qp]) + [metabase.driver.query-processor :as qp] (metabase.models [database :refer [Database]] [query-execution :refer [QueryExecution]]) [metabase.models.setting :refer [defsetting]] @@ -13,47 +13,294 @@ (declare -dataset-query query-fail query-complete save-query-execution) -;; ## CONFIG +;;; ## INTERFACE + CONSTANTS + +(def ^:const max-sync-lazy-seq-results + "The maximum number of values we should return when using `field-values-lazy-seq`. + This many is probably fine for inferring special types and what-not; we don't want + to scan millions of values at any rate." + 10000) + +(def ^:const connection-error-messages + "Generic error messages that drivers should return in their implementation of `humanize-connection-error-message`." + {:cannot-connect-check-host-and-port "Hmm, we couldn't connect to the database. Make sure your host and port settings are correct." + :database-name-incorrect "Looks like the database name is incorrect." + :invalid-hostname "It looks like your host is invalid. Please double-check it and try again." + :password-incorrect "Looks like your password is incorrect." + :password-required "Looks like you forgot to enter your password." + :username-incorrect "Looks like your username is incorrect." + :username-or-password-incorrect "Looks like the username or password is incorrect."}) + +(def ^:private ^:const feature->required-fns + "Map of optional driver features (as keywords) to a set of functions drivers that support that feature must define." + {:foreign-keys #{:table-fks} + :nested-fields #{:active-nested-field-name->type} + :set-timezone nil + :standard-deviation-aggregations nil + :unix-timestamp-special-type-fields nil}) + +(def ^:private ^:const optional-features + (set (keys feature->required-fns))) + +(def ^:private ^:const required-fns + #{:can-connect? + :active-tables + :active-column-names->type + :table-pks + :field-values-lazy-seq + :process-query}) + +(def ^:private ^:const optional-fns + #{:humanize-connection-error-message + :sync-in-context + :process-query-in-context + :table-rows-seq + :field-avg-length + :field-percent-urls + :driver-specific-sync-field!}) + +(defn verify-driver + "Verify that a Metabase DB driver contains the expected properties and that they are the correct type." + [{:keys [driver-name details-fields features], :as driver}] + ;; Check :driver-name is a string + (assert driver-name + "Missing property :driver-name.") + (assert (string? driver-name) + ":driver-name must be a string.") + + ;; Check the :details-fields + (assert details-fields + "Driver is missing property :details-fields.") + (assert (vector? details-fields) + ":details-fields should be a vector.") + (doseq [f details-fields] + (assert (map? f) + (format "Details fields must be maps: %s" f)) + (assert (:name f) + (format "Details field %s is missing a :name property." f)) + (assert (:display-name f) + (format "Details field %s is missing a :display-name property." f)) + (when (:type f) + (assert (contains? #{:string :integer :password :boolean} (:type f)) + (format "Invalid type %s in details field %s." (:type f) f))) + (when (:default f) + (assert (not (:placeholder f)) + (format "Fields should not define both :default and :placeholder: %s" f)) + (assert (not (:required f)) + (format "Fields that define a :default cannot be :required: %s" f)))) -(defsetting report-timezone "Connection timezone to use when executing queries. Defaults to system timezone.") + ;; Check that all required functions are defined + (doseq [f required-fns] + (assert (f driver) + (format "Missing fn: %s" f)) + (assert (fn? (f driver)) + (format "Not a fn: %s" (f driver)))) + + ;; Check that all features declared are valid + (when features + (assert (and (set? features) + (every? keyword? features)) + ":features must be a set of keywords.") + (doseq [feature features] + (assert (contains? optional-features feature) + (format "Not a valid feature: %s" feature)) + (doseq [f (feature->required-fns feature)] + (assert (f driver) + (format "Drivers that support feature %s must have fn %s." feature f)) + (assert (fn? (f driver)) + (format "Not a fn: %s" f))))) + + ;; Check that the optional fns, if included, are actually fns + (doseq [f optional-fns] + (when (f driver) + (assert (fn? (f driver)) + (format "Not a fn: %s" f))))) + +(defmacro defdriver + "Define and validate a new Metabase DB driver. + + All drivers must include the following keys: + +#### PROPERTIES + +* `:driver-name` + + A human-readable string naming the DB this driver works with, e.g. `\"PostgreSQL\"`. + +* `:details-fields` + + A vector of maps that contain information about connection properties that should + be exposed to the user for databases that will use this driver. This information is used to build the UI for editing + a `Database` `details` map, and for validating it on the Backend. It should include things like `host`, + `port`, and other driver-specific parameters. Each field information map should have the following properties: + + * `:name` + + The key that should be used to store this property in the `details` map. + + * `:display-name` + + Human-readable name that should be displayed to the User in UI for editing this field. + + * `:type` *(OPTIONAL)* + + `:string`, `:integer`, `:boolean`, or `:password`. Defaults to `:string`. + + * `:default` *(OPTIONAL)* + + A default value for this field if the user hasn't set an explicit value. This is shown in the UI as a placeholder. + + * `:placeholder` *(OPTIONAL)* + + Placeholder value to show in the UI if user hasn't set an explicit value. Similar to `:default`, but this value is + *not* saved to `:details` if no explicit value is set. Since `:default` values are also shown as placeholders, you + cannot specify both `:default` and `:placeholder`. + + * `:required` *(OPTIONAL)* + + Is this property required? Defaults to `false`. + +* `:features` *(OPTIONAL)* + + A set of keyword names of optional features supported by this driver, such as `:foreign-keys`. + +#### FUNCTIONS + +* `(can-connect? [details-map])` + + Check whether we can connect to a `Database` with DETAILS-MAP and perform a simple query. For example, a SQL database might + try running a query like `SELECT 1;`. This function should return `true` or `false`. + +* `(active-tables [database])` + + Return a set of maps containing information about the active tables/views, collections, or equivalent that currently exist in DATABASE. + Each map should contain the key `:name`, which is the string name of the table. For databases that have a concept of schemas, + this map should also include the string name of the table's `:schema`. + +* `(active-column-names->type [table])` + + Return a map of string names of active columns (or equivalent) -> `Field` `base_type` for TABLE (or equivalent). + +* `(table-pks [table])` + + Return a set of string names of active Fields that are primary keys for TABLE (or equivalent). +* `(field-values-lazy-seq [field])` -;; ## Constants + Return a lazy sequence of all values of FIELD. + This is used to implement `mark-json-field!`, and fallback implentations of `mark-no-preview-display-field!` and `mark-url-field!` + if drivers *don't* implement `field-avg-length` and `field-percent-urls`, respectively. -(def ^:const available-drivers - "Available DB drivers." - {:h2 {:id "h2" - :name "H2"} - :postgres {:id "postgres" - :name "Postgres"} - :mongo {:id "mongo" - :name "MongoDB"} - :mysql {:id "mysql" - :name "MySQL"}}) +* `(process-query [query])` + + Process a native or structured QUERY. This function is called by `metabase.driver/process-query` after performing various driver-unspecific + steps like Query Expansion and other preprocessing. + +* `(table-fks [table])` *(REQUIRED FOR DRIVERS THAT SUPPORT `:foreign-keys`)* + + Return a set of maps containing info about FK columns for TABLE. + Each map should contain the following keys: + + * `fk-column-name` + * `dest-table-name` + * `dest-column-name` + +* `(active-nested-field-name->type [field])` *(REQUIRED FOR DRIVERS THAT SUPPORT `:nested-fields`)* + + Return a map of string names of active child `Fields` of FIELD -> `Field.base_type`. + +* `(humanize-connection-error-message [message])` *(OPTIONAL)* + + Return a humanized (user-facing) version of an connection error message string. + Generic error messages are provided in the constant `connection-error-messages`; return one of these whenever possible. + +* `(sync-in-context [database f])` *(OPTIONAL)* + + Drivers may provide this function if they need to do special setup before a sync operation such as `sync-database!`. The sync + operation itself is encapsulated as the lambda F, which must be called with no arguments. + + (defn sync-in-context [database f] + (with-jdbc-metadata [_ database] + (f))) + +* `(process-query-in-context [f])` *(OPTIONAL)* + + Similar to `sync-in-context`, but for running queries rather than syncing. This should be used to do things like open DB connections + that need to remain open for the duration of post-processing. This function follows a middleware pattern and is injected into the QP + middleware stack immediately after the Query Expander; in other words, it will receive the expanded query. + See the Mongo and H2 drivers for examples of how this is intended to be used. + +* `(table-rows-seq [database table-name])` *(OPTIONAL)* + + Return a sequence of all the rows in a table with a given TABLE-NAME. + Currently, this is only used for iterating over the values in a `_metabase_metadata` table. As such, the results are not expected to be returned lazily. + +* `(field-avg-length [field])` *(OPTIONAL)* + + If possible, provide an efficent DB-level function to calculate the average length of non-nil values of textual FIELD, which is used to determine whether a `Field` + should be marked as a `:category`. If this function is not provided, a fallback implementation that iterates over results in Clojure-land is used instead. + +* `(field-percent-urls [field])` *(OPTIONAL)* + + If possible, provide an efficent DB-level function to calculate what percentage of non-nil values of textual FIELD are valid URLs, which is used to determine + whether a `Field` should be marked as a `:url`. If this function is not provided, a fallback implementation that iterates over results in Clojure-land is used instead. + +* `(driver-specific-sync-field! [field])` *(OPTIONAL)* + + This is a chance for drivers to do custom `Field` syncing specific to their database. + For example, the Postgres driver can mark Postgres JSON fields as `special_type = json`. + As with the other Field syncing functions in `metabase.driver.sync`, this method should return the modified FIELD, if any, or `nil`." + [driver-name driver-map] + `(def ~(vary-meta driver-name assoc :metabase.driver/driver (keyword driver-name)) + (let [m# ~driver-map] + (verify-driver m#) + m#))) + + +;;; ## CONFIG + +(defsetting report-timezone "Connection timezone to use when executing queries. Defaults to system timezone.") + +(defn- -available-drivers [] + (->> (for [namespace (->> (ns-find/find-namespaces (classpath/classpath)) + (filter (fn [ns-symb] + (re-matches #"^metabase\.driver\.[a-z0-9_]+$" (name ns-symb)))))] + (do (require namespace) + (->> (ns-publics namespace) + (map (fn [[symb varr]] + (when (::driver (meta varr)) + {(keyword symb) (select-keys @varr [:details-fields + :driver-name + :features])}))) + (into {})))) + (into {}))) + +(def available-drivers + "Delay to a map of info about available drivers." + (delay (-available-drivers))) (defn is-engine? - "Predicate function which validates if the given argument represents a valid driver identifier." + "Is ENGINE a valid driver name?" [engine] - (if (not (nil? engine)) - (contains? (set (map name (keys available-drivers))) (name engine)) - false)) + (when engine + (contains? (set (keys @available-drivers)) (keyword engine)))) (defn class->base-type "Return the `Field.base_type` that corresponds to a given class returned by the DB." [klass] - (or ({Boolean :BooleanField - Double :FloatField - Float :FloatField - Integer :IntegerField - Long :IntegerField - String :TextField - java.math.BigDecimal :DecimalField - java.math.BigInteger :BigIntegerField - java.sql.Date :DateField - java.sql.Timestamp :DateTimeField - java.util.Date :DateField - java.util.UUID :TextField - org.postgresql.util.PGobject :UnknownField} klass) + (or ({Boolean :BooleanField + Double :FloatField + Float :FloatField + Integer :IntegerField + Long :IntegerField + String :TextField + java.math.BigDecimal :DecimalField + java.math.BigInteger :BigIntegerField + java.sql.Date :DateField + java.sql.Timestamp :DateTimeField + java.util.Date :DateField + java.util.UUID :TextField + org.postgresql.util.PGobject :UnknownField} klass) (cond (isa? klass clojure.lang.IPersistentMap) :DictionaryField) (do (log/warn (format "Don't know how to map class '%s' to a Field base_type, falling back to :UnknownField." klass)) @@ -65,13 +312,12 @@ "Return the driver instance that should be used for given ENGINE. This loads the corresponding driver if needed; it is expected that it resides in a var named - metabase.driver.<engine>/driver" + metabase.driver.<engine>/<engine>" [engine] - {:pre [(keyword? engine) - (contains? (set (keys available-drivers)) engine)]} + {:pre [engine]} (let [nmspc (symbol (format "metabase.driver.%s" (name engine)))] (require nmspc) - @(ns-resolve nmspc 'driver))) + @(ns-resolve nmspc (symbol (name engine))))) ;; Can the type of a DB change? @@ -89,15 +335,21 @@ ;; ## Implementation-Agnostic Driver API +(def ^:private ^:const can-connect-timeout-ms + "Consider `can-connect?`/`can-connect-with-details?` to have failed after this many milliseconds." + 5000) + (defn can-connect? "Check whether we can connect to DATABASE and perform a basic query (such as `SELECT 1`)." [database] {:pre [(map? database)]} - (try - (i/can-connect? (engine->driver (:engine database)) database) - (catch Throwable e - (log/error "Failed to connect to database:" (.getMessage e)) - false))) + (let [driver (engine->driver (:engine database))] + (try + (u/with-timeout can-connect-timeout-ms + ((:can-connect? driver) (:details database))) + (catch Throwable e + (log/error "Failed to connect to database:" (.getMessage e)) + false)))) (defn can-connect-with-details? "Check whether we can connect to a database with ENGINE and DETAILS-MAP and perform a basic query. @@ -107,15 +359,18 @@ (can-connect-with-details? :postgres {:host \"localhost\", :port 5432, ...})" [engine details-map & [rethrow-exceptions]] {:pre [(keyword? engine) - (contains? (set (keys available-drivers)) engine) + (is-engine? engine) (map? details-map)]} - (let [driver (engine->driver engine)] + (let [{:keys [humanize-connection-error-message], :as driver} (engine->driver engine)] (try - (i/can-connect-with-details? driver details-map) + (u/with-timeout can-connect-timeout-ms + ((:can-connect? driver) details-map)) (catch Throwable e (log/error "Failed to connect to database:" (.getMessage e)) (when rethrow-exceptions - (let [message (i/humanize-connection-error-message driver (.getMessage e))] + (let [^String message ((or humanize-connection-error-message + identity) + (.getMessage e))] (throw (Exception. message)))) false)))) @@ -159,31 +414,32 @@ [query {:keys [executed_by] :as options}] {:pre [(integer? executed_by)]} - (let [query-execution {:uuid (.toString (java.util.UUID/randomUUID)) - :executor_id executed_by - :json_query query - :query_id nil - :version 0 - :status :starting - :error "" - :started_at (u/new-sql-timestamp) - :finished_at (u/new-sql-timestamp) - :running_time 0 - :result_rows 0 - :result_file "" - :result_data "{}" - :raw_query "" - :additional_info ""}] - (let [query-execution (assoc query-execution :start_time_millis (System/currentTimeMillis))] - (try - (let [query-result (process-query query)] - (when-not (contains? query-result :status) - (throw (Exception. "invalid response from database driver. no :status provided"))) - (when (= :failed (:status query-result)) - (throw (Exception. ^String (get query-result :error "general error")))) - (query-complete query-execution query-result)) - (catch Exception e - (query-fail query-execution (.getMessage e))))))) + (let [query-execution {:uuid (.toString (java.util.UUID/randomUUID)) + :executor_id executed_by + :json_query query + :query_id nil + :version 0 + :status :starting + :error "" + :started_at (u/new-sql-timestamp) + :finished_at (u/new-sql-timestamp) + :running_time 0 + :result_rows 0 + :result_file "" + :result_data "{}" + :raw_query "" + :additional_info "" + :start_time_millis (System/currentTimeMillis)}] + (try + (let [query-result (process-query query)] + (when-not (contains? query-result :status) + (throw (Exception. "invalid response from database driver. no :status provided"))) + (when (= :failed (:status query-result)) + (throw (Exception. ^String (get query-result :error "general error")))) + (query-complete query-execution query-result)) + (catch Exception e + (log/error (u/format-color 'red "Query failure: %s" (.getMessage e))) + (query-fail query-execution (.getMessage e)))))) (defn query-fail "Save QueryExecution state and construct a failed query response" diff --git a/src/metabase/driver/generic_sql.clj b/src/metabase/driver/generic_sql.clj index de9df5ca5cc7e507526c03032a681b9aad684cc8..46db081be22e043df1fee1fb961fbb3ba7e8f977 100644 --- a/src/metabase/driver/generic_sql.clj +++ b/src/metabase/driver/generic_sql.clj @@ -3,20 +3,12 @@ [clojure.tools.logging :as log] [korma.core :as k] [korma.sql.utils :as utils] - [metabase.driver :as driver] - (metabase.driver [interface :refer [max-sync-lazy-seq-results IDriver ISyncDriverTableFKs ISyncDriverFieldAvgLength ISyncDriverFieldPercentUrls]] - [sync :as driver-sync]) - (metabase.driver.generic-sql [interface :as i] - [query-processor :as qp] + [metabase.driver :refer [max-sync-lazy-seq-results defdriver]] + (metabase.driver.generic-sql [query-processor :as qp] [util :refer :all]) + [metabase.models.field :as field] [metabase.util :as u])) -(def ^:const features - "Features supported by *all* Generic SQL drivers." - #{:foreign-keys - :standard-deviation-aggregations - :unix-timestamp-special-type-fields}) - (def ^:private ^:const field-values-lazy-seq-chunk-size "How many Field values should we fetch at a time for `field-values-lazy-seq`?" ;; Hopefully this is a good balance between @@ -25,53 +17,43 @@ ;; 3. Not fetching too many results for things like mark-json-field! which will fail after the first result that isn't valid JSON 500) -(defn- can-connect-with-details? [driver details] - (let [connection (i/connection-details->connection-spec driver details)] +(defn- can-connect? [connection-details->spec details] + (let [connection (connection-details->spec details)] (= 1 (-> (k/exec-raw connection "SELECT 1" :results) first vals first)))) -(defn- can-connect? [driver database] - (can-connect-with-details? driver (i/database->connection-details driver database))) - -(defn- wrap-process-query-middleware [_ qp] - (fn [query] - (qp query))) - -(defn- process-query [_ query] +(defn- process-query [query] (qp/process-and-run query)) -(defn- sync-in-context [_ database do-sync-fn] +(defn- sync-in-context [database do-sync-fn] (with-jdbc-metadata [_ database] (do-sync-fn))) -(defn- active-table-names [_ database] +(defn- active-tables [excluded-schemas database] (with-jdbc-metadata [^java.sql.DatabaseMetaData md database] - (->> (.getTables md nil nil nil (into-array String ["TABLE", "VIEW"])) - jdbc/result-set-seq - (map :table_name) - set))) + (set (for [table (filter #(not (contains? excluded-schemas (:table_schem %))) + (jdbc/result-set-seq (.getTables md nil nil nil (into-array String ["TABLE", "VIEW"]))))] + {:name (:table_name table) + :schema (:table_schem table)})))) -(defn- active-column-names->type [{:keys [column->base-type]} table] +(defn- active-column-names->type [column->base-type table] {:pre [(map? column->base-type)]} (with-jdbc-metadata [^java.sql.DatabaseMetaData md @(:db table)] - (->> (.getColumns md nil nil (:name table) nil) - jdbc/result-set-seq - (filter #(not= (:table_schem %) "INFORMATION_SCHEMA")) ; filter out internal tables - (map (fn [{:keys [column_name type_name]}] - {column_name (or (column->base-type (keyword type_name)) - :UnknownField)})) - (into {})))) + (into {} (for [{:keys [column_name type_name]} (jdbc/result-set-seq (.getColumns md nil (:schema table) (:name table) nil))] + {column_name (or (column->base-type (keyword type_name)) + (do (log/warn (format "Don't know how to map column type '%s' to a Field base_type, falling back to :UnknownField." type_name)) + :UnknownField))})))) -(defn- table-pks [_ table] +(defn- table-pks [table] (with-jdbc-metadata [^java.sql.DatabaseMetaData md @(:db table)] (->> (.getPrimaryKeys md nil nil (:name table)) jdbc/result-set-seq (map :column_name) set))) -(defn- field-values-lazy-seq [_ {:keys [qualified-name-components table], :as field}] +(defn- field-values-lazy-seq [{:keys [qualified-name-components table], :as field}] (assert (and (map? field) (delay? qualified-name-components) (delay? table)) @@ -96,12 +78,12 @@ (fetch-chunk 0 field-values-lazy-seq-chunk-size max-sync-lazy-seq-results))) -(defn- table-rows-seq [_ database table-name] +(defn- table-rows-seq [database table-name] (k/select (-> (k/create-entity table-name) (k/database (db->korma-db database))))) -(defn- table-fks [_ table] +(defn- table-fks [table] (with-jdbc-metadata [^java.sql.DatabaseMetaData md @(:db table)] (->> (.getImportedKeys md nil nil (:name table)) jdbc/result-set-seq @@ -111,10 +93,9 @@ :dest-column-name (:pkcolumn_name result)})) set))) -(defn- field-avg-length [{:keys [sql-string-length-fn], :as driver} field] - {:pre [(keyword? sql-string-length-fn)]} +(defn- field-avg-length [string-length-fn field] (or (some-> (korma-entity @(:table field)) - (k/select (k/aggregate (avg (k/sqlfn* sql-string-length-fn + (k/select (k/aggregate (avg (k/sqlfn* string-length-fn (utils/func "CAST(%s AS CHAR)" [(keyword (:name field))]))) :len)) @@ -123,43 +104,130 @@ int) 0)) -(defn- field-percent-urls [_ field] +(defn- field-percent-urls [field] (or (let [korma-table (korma-entity @(:table field))] (when-let [total-non-null-count (:count (first (k/select korma-table - (k/aggregate (count :*) :count) + (k/aggregate (count (k/raw "*")) :count) (k/where {(keyword (:name field)) [not= nil]}))))] (when (> total-non-null-count 0) (when-let [url-count (:count (first (k/select korma-table - (k/aggregate (count :*) :count) + (k/aggregate (count (k/raw "*")) :count) (k/where {(keyword (:name field)) [like "http%://_%.__%"]}))))] (float (/ url-count total-non-null-count)))))) 0.0)) -(def ^:const GenericSQLIDriverMixin - "Generic SQL implementation of the `IDriver` protocol. - - (extend H2Driver - IDriver - GenericSQLIDriverMixin)" - {:can-connect? can-connect? - :can-connect-with-details? can-connect-with-details? - :wrap-process-query-middleware wrap-process-query-middleware - :process-query process-query - :sync-in-context sync-in-context - :active-table-names active-table-names - :active-column-names->type active-column-names->type - :table-pks table-pks - :field-values-lazy-seq field-values-lazy-seq - :table-rows-seq table-rows-seq}) - -(def ^:const GenericSQLISyncDriverTableFKsMixin - "Generic SQL implementation of the `ISyncDriverTableFKs` protocol." - {:table-fks table-fks}) - -(def ^:const GenericSQLISyncDriverFieldAvgLengthMixin - "Generic SQL implementation of the `ISyncDriverFieldAvgLengthMixin` protocol." - {:field-avg-length field-avg-length}) - -(def ^:const GenericSQLISyncDriverFieldPercentUrlsMixin - "Generic SQL implementation of the `ISyncDriverFieldPercentUrls` protocol." - {:field-percent-urls field-percent-urls}) +(def ^:private ^:const required-fns + "Functions that concrete SQL drivers must define." + #{:connection-details->spec + :unix-timestamp->timestamp + :date + :date-interval}) + +(defn- verify-sql-driver [{:keys [column->base-type string-length-fn], :as driver}] + ;; Check the :column->base-type map + (assert column->base-type + "SQL drivers must define :column->base-type.") + (assert (map? column->base-type) + ":column->base-type should be a map") + (doseq [[k v] column->base-type] + (assert (keyword? k) + (format "Not a keyword: %s" k)) + (assert (contains? field/base-types v) + (format "Invalid field base-type: %s" v))) + + ;; Check :string-length-fn + (assert string-length-fn + "SQL drivers must define :string-length-fn.") + (assert (keyword? string-length-fn) + ":string-length-fn must be a keyword.") + + ;; Check required fns + (doseq [f required-fns] + (assert (f driver) + (format "SQL drivers must define %s." f)) + (assert (fn? (f driver)) + (format "%s must be a fn." f)))) + +(defn sql-driver + "Create a Metabase DB driver using the Generic SQL functions. + + A SQL driver must define the following properties / functions: + + * `column->base-type` + + A map of native DB column types (as keywords) to the `Field` `base-types` they map to. + + * `string-length-fn` + + Keyword name of the SQL function that should be used to get the length of a string, e.g. `:LENGTH`. + + * `stddev-fn` *(OPTIONAL)* + + Keyword name of the SQL function that should be used to get the length of a string. Defaults to `:STDDEV`. + + * `current-timestamp-fn` *(OPTIONAL)* + + Keyword name of the SQL function that should be used to get the current `DATETIME` (or equivalent). Defaults to `:NOW`. + + * `(connection-details->spec [details-map])` + + Given a `Database` DETAILS-MAP, return a JDBC connection spec. + + * `(unix-timestamp->timestamp [seconds-or-milliseconds field-or-value])` + + Return a korma form appropriate for converting a Unix timestamp integer field or value to an proper SQL `Timestamp`. + SECONDS-OR-MILLISECONDS refers to the resolution of the int in question and with be either `:seconds` or `:milliseconds`. + + * `set-timezone-sql` *(OPTIONAL)* + + This should be a prepared JDBC SQL statement string to be used to set the timezone for the current transaction. + + \"SET @@session.timezone = ?;\" + + * `(date [this ^Keyword unit field-or-value])` + + Return a korma form for truncating a date or timestamp field or value to a given resolution, or extracting a + date component. + + * `(date-interval [unit amount])` + + Return a korma form for a date relative to NOW(), e.g. on that would produce SQL like `(NOW() + INTERVAL '1 month')`. + + * `excluded-schemas` *(OPTIONAL)* + + Set of string names of schemas to skip syncing tables from. + + * `qp-clause->handler` *(OPTIONAL)* + + A map of query processor clause keywords to functions of the form `(fn [korma-query query-map])` that are used apply them. + By default, its value is `metabase.driver.generic-sql.query-processor/clause->handler`. These functions are exposed in this way so drivers + can override default clause application behavior where appropriate -- for example, SQL Server needs to override the function used to apply the + `:limit` clause, since T-SQL uses `TOP` rather than `LIMIT`." + [driver] + ;; Verify the driver + (verify-sql-driver driver) + (merge + {:features (set (cond-> [:foreign-keys + :standard-deviation-aggregations + :unix-timestamp-special-type-fields] + (:set-timezone-sql driver) (conj :set-timezone))) + :qp-clause->handler qp/clause->handler + :can-connect? (partial can-connect? (:connection-details->spec driver)) + :process-query process-query + :sync-in-context sync-in-context + :active-tables (partial active-tables (:excluded-schemas driver)) + :active-column-names->type (partial active-column-names->type (:column->base-type driver)) + :table-pks table-pks + :field-values-lazy-seq field-values-lazy-seq + :table-rows-seq table-rows-seq + :table-fks table-fks + :field-avg-length (partial field-avg-length (:string-length-fn driver)) + :field-percent-urls field-percent-urls + :date-interval (let [date-interval (:date-interval driver)] + (fn [unit amount] + ;; Add some extra param validation + {:pre [(contains? #{:second :minute :hour :day :week :month :quarter :year} unit)]} + (date-interval unit amount))) + :stddev-fn :STDDEV + :current-datetime-fn :NOW} + driver)) diff --git a/src/metabase/driver/generic_sql/interface.clj b/src/metabase/driver/generic_sql/interface.clj deleted file mode 100644 index cf7913c2506dd865c05674b6d5d346006fd6c509..0000000000000000000000000000000000000000 --- a/src/metabase/driver/generic_sql/interface.clj +++ /dev/null @@ -1,26 +0,0 @@ -(ns metabase.driver.generic-sql.interface - (:import clojure.lang.Keyword)) - -(defprotocol ISqlDriverDatabaseSpecific - "Methods a DB-specific concrete SQL driver should implement. - They should also have the following properties: - - * `column->base-type` - * `sql-string-length-fn`" - - (connection-details->connection-spec [this connection-details]) - (database->connection-details [this database]) - - (unix-timestamp->timestamp [this ^Keyword seconds-or-milliseconds field-or-value] - "Return a korma form appropriate for converting a Unix timestamp integer field or value to an proper SQL `Timestamp`. - SECONDS-OR-MILLISECONDS refers to the resolution of the int in question and with be either `:seconds` or `:milliseconds`.") - - (timezone->set-timezone-sql [this timezone] - "Return a string that represents the SQL statement that should be used to set the timezone - for the current transaction.") - - (date [this ^Keyword unit field-or-value] - "Return a korma form for truncating a date or timestamp field or value to a given resolution, or extracting a date component.") - - (date-interval [this ^Keyword unit ^Integer amount] - "Return a korma form for a date relative to NOW(), e.g. on that would produce SQL like `(NOW() + INTERVAL '1 month')`.")) diff --git a/src/metabase/driver/generic_sql/native.clj b/src/metabase/driver/generic_sql/native.clj index 28a95110ffd2f71640dfa480adacd66d9823036f..4a91b9d2bc0a6dfa129c320321d08b96437c4c5a 100644 --- a/src/metabase/driver/generic_sql/native.clj +++ b/src/metabase/driver/generic_sql/native.clj @@ -6,10 +6,9 @@ db) [metabase.db :refer [sel]] [metabase.driver :as driver] - [metabase.driver.interface :refer [supports?]] - (metabase.driver.generic-sql [interface :as i] - [util :refer :all]) - [metabase.models.database :refer [Database]])) + [metabase.driver.generic-sql.util :refer :all] + [metabase.models.database :refer [Database]] + [metabase.util :as u])) (defn- value->base-type "Attempt to match a value we get back from the DB with the corresponding base-type`." @@ -18,37 +17,41 @@ (defn process-and-run "Process and run a native (raw SQL) QUERY." - {:arglists '([query])} [{{sql :query} :native, database-id :database, :as query}] - {:pre [(string? sql) - (integer? database-id)]} - (log/debug "QUERY: \n" - (with-out-str (clojure.pprint/pprint (update query :driver class)))) - (try (let [database (sel :one [Database :engine :details] :id database-id) - db (-> database - db->korma-db - korma.db/get-connection) - [columns & [first-row :as rows]] (jdbc/with-db-transaction [conn db :read-only? true] - ;; If timezone is specified in the Query and the driver supports setting the timezone - ;; then execute SQL to set it - (when-let [timezone (or (-> query :native :timezone) - (driver/report-timezone))] - (when (seq timezone) - (let [driver (driver/engine->driver (:engine database))] - (when (supports? driver :set-timezone) - (log/debug "Setting timezone to:" timezone) - (jdbc/db-do-prepared conn (i/timezone->set-timezone-sql driver timezone)))))) - (jdbc/query conn sql :as-arrays? true))] - ;; TODO - Why don't we just use annotate? - {:rows rows - :columns columns - :cols (map (fn [column first-value] - {:name column - :base_type (value->base-type first-value)}) - columns first-row)}) + (try (let [database (sel :one :fields [Database :engine :details] :id database-id) + db-conn (-> database + db->korma-db + korma.db/get-connection) + {:keys [features set-timezone-sql]} (driver/engine->driver (:engine database))] + + (jdbc/with-db-transaction [t-conn db-conn] + (let [^java.sql.Connection jdbc-connection (:connection t-conn)] + ;; Disable auto-commit for this transaction, that way shady queries are unable to modify the database + (.setAutoCommit jdbc-connection false) + (try + ;; Set the timezone if applicable + (when-let [timezone (driver/report-timezone)] + (when (and (seq timezone) + (contains? features :set-timezone)) + (log/debug (u/format-color 'green "%s" set-timezone-sql)) + (try (jdbc/db-do-prepared t-conn set-timezone-sql [timezone]) + (catch Throwable e + (log/error (u/format-color 'red "Failed to set timezone: %s" (.getMessage e))))))) + + ;; Now run the query itself + (log/debug (u/format-color 'green "%s" sql)) + (let [[columns & [first-row :as rows]] (jdbc/query t-conn sql, :as-arrays? true)] + {:rows rows + :columns columns + :cols (for [[column first-value] (zipmap columns first-row)] + {:name column + :base_type (value->base-type first-value)})}) + + ;; Rollback any changes made during this transaction just to be extra-double-sure JDBC doesn't try to commit them automatically for us + (finally (.rollback jdbc-connection)))))) (catch java.sql.SQLException e - (let [^String message (or (->> (.getMessage e) ; error message comes back like 'Column "ZID" not found; SQL statement: ... [error-code]' sometimes + (let [^String message (or (->> (.getMessage e) ; error message comes back like 'Column "ZID" not found; SQL statement: ... [error-code]' sometimes (re-find #"^(.*);") ; the user already knows the SQL, and error code is meaningless - second) ; so just return the part of the exception that is relevant + second) ; so just return the part of the exception that is relevant (.getMessage e))] (throw (Exception. message)))))) diff --git a/src/metabase/driver/generic_sql/query_processor.clj b/src/metabase/driver/generic_sql/query_processor.clj index bfe3b9e4f2cb25261ed03cb7c71453d76724608c..b0993f867d713226c5181f23a55f5a86a7f4347b 100644 --- a/src/metabase/driver/generic_sql/query_processor.clj +++ b/src/metabase/driver/generic_sql/query_processor.clj @@ -1,18 +1,18 @@ (ns metabase.driver.generic-sql.query-processor "The Query Processor is responsible for translating the Metabase Query Language into korma SQL forms." (:require [clojure.core.match :refer [match]] - [clojure.tools.logging :as log] + [clojure.java.jdbc :as jdbc] [clojure.string :as s] - [clojure.walk :as walk] - [korma.core :refer :all, :exclude [update]] - [korma.sql.utils :as utils] + [clojure.tools.logging :as log] + (korma [core :as k] + [db :as kdb]) + (korma.sql [fns :as kfns] + [utils :as utils]) [metabase.config :as config] [metabase.driver :as driver] - (metabase.driver [interface :refer [supports?]] - [query-processor :as qp]) - (metabase.driver.generic-sql [interface :as i] - [native :as native] + (metabase.driver.generic-sql [native :as native] [util :refer :all]) + [metabase.driver.query-processor :as qp] [metabase.util :as u]) (:import java.sql.Timestamp java.util.Date @@ -23,73 +23,9 @@ RelativeDateTimeValue Value))) -(declare apply-form - log-korma-form) - -;; # INTERFACE - - -(def ^:dynamic ^:private *query* nil) - -(defn process-structured - "Convert QUERY into a korma `select` form, execute it, and annotate the results." - [{{:keys [source-table]} :query, database :database, :as query}] - (binding [*query* query] - (try - ;; Process the expanded query and generate a korma form - (let [korma-select-form `(select ~'entity ~@(->> (map apply-form (:query query)) - (filter identity) - (mapcat #(if (vector? %) % [%])))) - set-timezone-sql (when-let [timezone (driver/report-timezone)] - (when (seq timezone) - (let [driver (:driver *query*)] - (when (supports? driver :set-timezone) - `(exec-raw ~(i/timezone->set-timezone-sql driver timezone)))))) - korma-form `(let [~'entity (korma-entity ~database ~source-table)] - ~(if set-timezone-sql `(korma.db/with-db (:db ~'entity) - (korma.db/transaction - ~set-timezone-sql - ~korma-select-form)) - korma-select-form))] - - ;; Log generated korma form - (when (config/config-bool :mb-db-logging) - (log-korma-form korma-form)) - - (eval korma-form)) - - (catch java.sql.SQLException e - (let [^String message (or (->> (.getMessage e) ; error message comes back like "Error message ... [status-code]" sometimes - (re-find #"(?s)(^.*)\s+\[[\d-]+\]$") ; status code isn't useful and makes unit tests hard to write so strip it off - second) ; (?s) = Pattern.DOTALL - tell regex `.` to match newline characters as well - (.getMessage e))] - (throw (Exception. message))))))) - -(defn process-and-run - "Process and run a query and return results." - [{:keys [type] :as query}] - (case (keyword type) - :native (native/process-and-run query) - :query (process-structured query))) - - -;; # IMPLEMENTATION - -;; ## Query Clause Processors - -(defmulti apply-form - "Given a Query clause like - - {:aggregation [\"count\"]} - - call the matching implementation which should either return `nil` or translate it into a korma clause like - - (aggregate (count :*) :count) - - An implementation of `apply-form` may optionally return a vector of several forms to insert into the generated korma `select` form." - (fn [[clause-name _]] clause-name)) +(def ^:private ^:dynamic *query* nil) -(defmethod apply-form :default [form]) ;; nothing +;;; ## Formatting (defprotocol IGenericSQLFormattable (formatted [this] [this include-as?])) @@ -99,9 +35,9 @@ (formatted ([this] (formatted this false)) - ([{:keys [table-name special-type field-name], :as field} include-as?] - (let [->timestamp (partial i/unix-timestamp->timestamp (:driver *query*)) - field (cond-> (keyword (str table-name \. field-name)) + ([{:keys [schema-name table-name special-type field-name], :as field} include-as?] + (let [->timestamp (:unix-timestamp->timestamp (:driver *query*)) + field (cond-> (keyword (str (when schema-name (str schema-name \.)) table-name \. field-name)) (= special-type :timestamp_seconds) (->timestamp :seconds) (= special-type :timestamp_milliseconds) (->timestamp :milliseconds))] (if include-as? [field (keyword field-name)] @@ -112,7 +48,7 @@ ([this] (formatted this false)) ([{unit :unit, {:keys [field-name base-type special-type], :as field} :field} include-as?] - (let [field (i/date (:driver *query*) unit (formatted field))] + (let [field ((:date (:driver *query*)) unit (formatted field))] (if include-as? [field (keyword field-name)] field)))) @@ -145,48 +81,48 @@ (formatted this false)) ([{value :value, {unit :unit} :field} _] ;; prevent Clojure from converting this to #inst literal, which is a util.date - (i/date (:driver *query*) unit `(Timestamp/valueOf ~(.toString value))))) + ((:date (:driver *query*)) unit value))) RelativeDateTimeValue (formatted ([this] (formatted this false)) ([{:keys [amount unit], {field-unit :unit} :field} _] - (let [driver (:driver *query*)] - (i/date driver field-unit (if (zero? amount) - (sqlfn :NOW) - (i/date-interval driver unit amount))))))) + (let [{:keys [date date-interval]} (:driver *query*)] + (date field-unit (if (zero? amount) + (k/sqlfn* (-> *query* :driver :current-datetime-fn)) + (date-interval unit amount))))))) -(defmethod apply-form :aggregation [[_ {:keys [aggregation-type field]}]] +;;; ## Clause Handlers + +(defn- apply-aggregation [korma-query {{:keys [aggregation-type field]} :aggregation}] (if-not field ;; aggregation clauses w/o a Field (case aggregation-type - :rows nil ; don't need to do anything special for `rows` - `select` selects all rows by default - :count `(aggregate (~'count :*) :count)) + :rows korma-query ; don't need to do anything special for `rows` - `select` selects all rows by default + :count (k/aggregate korma-query (count (k/raw "*")) :count)) ;; aggregation clauses with a Field (let [field (formatted field)] (case aggregation-type - :avg `(aggregate (~'avg ~field) :avg) - :count `(aggregate (~'count ~field) :count) - :distinct `(aggregate (~'count (sqlfn :DISTINCT ~field)) :count) - :stddev `(fields [(sqlfn :stddev ~field) :stddev]) - :sum `(aggregate (~'sum ~field) :sum))))) - - -(defmethod apply-form :breakout [[_ fields]] - `[ ;; Group by all the breakout fields - (group ~@(map formatted fields)) - - ;; Add fields form only for fields that weren't specified in :fields clause -- we don't want to include it twice, or korma will barf - (fields ~@(->> fields - (filter (partial (complement contains?) (set (:fields (:query *query*))))) - (map (u/rpartial formatted :include-as))))]) - - -(defmethod apply-form :fields [[_ fields]] - `(fields ~@(map (u/rpartial formatted :include-as) fields))) - + :avg (k/aggregate korma-query (avg field) :avg) + :count (k/aggregate korma-query (count field) :count) + :distinct (k/aggregate korma-query (count (k/sqlfn :DISTINCT field)) :count) + :stddev (k/fields korma-query [(k/sqlfn* (-> *query* :driver :stddev-fn) field) :stddev]) + :sum (k/aggregate korma-query (sum field) :sum))))) + +(defn- apply-breakout [korma-query {fields :breakout}] + (-> korma-query + ;; Group by all the breakout fields + ((partial apply k/group) (map formatted fields)) + ;; Add fields form only for fields that weren't specified in :fields clause -- we don't want to include it twice, or korma will barf + ((partial apply k/fields) (->> fields + (filter (partial (complement contains?) (set (:fields (:query *query*))))) + (map (u/rpartial formatted :include-as)))))) + +(defn- apply-fields [korma-query {fields :fields}] + (apply k/fields korma-query (for [field fields] + (formatted field :include-as)))) (defn- filter-subclause->predicate "Given a filter SUBCLAUSE, return a Korma filter predicate form for use in korma `where`." @@ -194,10 +130,8 @@ (if (= filter-type :inside) ;; INSIDE filter subclause (let [{:keys [lat lon]} filter] - (list 'and {(formatted (:field lat)) ['< (formatted (:max lat))]} - {(formatted (:field lat)) ['> (formatted (:min lat))]} - {(formatted (:field lon)) ['< (formatted (:max lon))]} - {(formatted (:field lon)) ['> (formatted (:min lon))]})) + (kfns/pred-and {(formatted (:field lat)) ['between [(formatted (:min lat)) (formatted (:max lat))]]} + {(formatted (:field lon)) ['between [(formatted (:min lon)) (formatted (:max lon))]]})) ;; all other filter subclauses (let [field (formatted (:field filter)) @@ -218,62 +152,117 @@ (defn- filter-clause->predicate [{:keys [compound-type subclauses], :as clause}] (case compound-type - :and `(~'and ~@(map filter-clause->predicate subclauses)) - :or `(~'or ~@(map filter-clause->predicate subclauses)) + :and (apply kfns/pred-and (map filter-clause->predicate subclauses)) + :or (apply kfns/pred-or (map filter-clause->predicate subclauses)) nil (filter-subclause->predicate clause))) -(defmethod apply-form :filter [[_ clause]] - `(where ~(filter-clause->predicate clause))) +(defn- apply-filter [korma-query {clause :filter}] + (k/where korma-query (filter-clause->predicate clause))) + +(defn- apply-join-tables [korma-query {join-tables :join-tables, {source-table-name :name} :source-table}] + (loop [korma-query korma-query, [{:keys [table-name pk-field source-field]} & more] join-tables] + (let [korma-query (k/join korma-query table-name + (= (keyword (format "%s.%s" source-table-name (:field-name source-field))) + (keyword (format "%s.%s" table-name (:field-name pk-field)))))] + (if (seq more) + (recur korma-query more) + korma-query)))) + +(defn- apply-limit [korma-query {value :limit}] + (k/limit korma-query value)) + +(defn- apply-order-by [korma-query {subclauses :order-by}] + (loop [korma-query korma-query, [{:keys [field direction]} & more] subclauses] + (let [korma-query (k/order korma-query (formatted field) (case direction + :ascending :ASC + :descending :DESC))] + (if (seq more) + (recur korma-query more) + korma-query)))) + +(defn- apply-page [korma-query {{:keys [items page]} :page}] + (-> korma-query + (k/limit items) + (k/offset (* items (dec page))))) +(defn- log-korma-form + [korma-form] + (when (config/config-bool :mb-db-logging) + (when-not qp/*disable-qp-logging* + (log/debug + (u/format-color 'green "\nKORMA FORM: 😋\n%s" (u/pprint-to-str (dissoc korma-form :db :ent :from :options :aliases :results :type :alias)))) + (try + (log/debug + (u/format-color 'blue "\nSQL: 😈\n%s\n" (-> (k/as-sql korma-form) + (s/replace #"\sFROM" "\nFROM") ; add newlines to the SQL to make it more readable + (s/replace #"\sLEFT JOIN" "\nLEFT JOIN") + (s/replace #"\sWHERE" "\nWHERE") + (s/replace #"\sGROUP BY" "\nGROUP BY") + (s/replace #"\sORDER BY" "\nORDER BY") + (s/replace #"\sLIMIT" "\nLIMIT") + (s/replace #"\sAND" "\n AND") + (s/replace #"\sOR" "\n OR")))) + ;; (k/as-sql korma-form) will barf if the korma form is invalid + (catch Throwable e + (log/error (u/format-color 'red "Invalid korma form: %s" (.getMessage e)))))))) + +(def ^:const clause->handler + "A map of QL clauses to fns that handle them. Each function is called like + + (fn [korma-query query]) + + and should return an appropriately modified KORMA-QUERY. SQL drivers contain a copy of this map keyed by `:qp-clause->handler`. + Most drivers can use the default implementations for all clauses, but some may need to override one or more (e.g. SQL Server needs to + override the behavior of `apply-limit`, since T-SQL uses `TOP` instead of `LIMIT`)." + {:aggregation apply-aggregation + :breakout apply-breakout + :fields apply-fields + :filter apply-filter + :join-tables apply-join-tables + :limit apply-limit + :order-by apply-order-by + :page apply-page}) -(defmethod apply-form :join-tables [[_ join-tables]] - (vec (for [{:keys [table-name pk-field source-field]} join-tables] - `(join ~table-name - (~'= ~(keyword (format "%s.%s" (:name (:source-table (:query *query*))) (:field-name source-field))) - ~(keyword (format "%s.%s" table-name (:field-name pk-field)))))))) - - -(defmethod apply-form :limit [[_ value]] - `(limit ~value)) - - -(defmethod apply-form :order-by [[_ subclauses]] - (vec (for [{:keys [field direction]} subclauses] - `(order ~(formatted field) - ~(case direction - :ascending :ASC - :descending :DESC))))) - -;; TODO - page can be preprocessed away -- converted to a :limit clause and an :offset clause -;; implement this at some point. -(defmethod apply-form :page [[_ {:keys [items page]}]] - {:pre [(integer? items) - (> items 0) - (integer? page) - (> page 0)]} - `[(limit ~items) - (offset ~(* items (- page 1)))]) - +(defn process-structured + "Convert QUERY into a korma `select` form, execute it, and annotate the results." + [{{:keys [source-table] :as query} :query, driver :driver, database :database, :as outer-query}] + (binding [*query* outer-query] + (try + (let [entity (korma-entity database source-table) + timezone (driver/report-timezone) + ;; Loop through all the :qp-clause->handler entries in the current driver. If the query contains a given clause, apply its handler fn. + korma-query (loop [korma-query (k/select* entity), [[clause f] & more] (seq (:qp-clause->handler driver))] + (let [korma-query (if (clause query) + (f korma-query query) + korma-query)] + (if (seq more) + (recur korma-query more) + korma-query)))] + + (log-korma-form korma-query) + + (kdb/with-db (:db entity) + (if (and (seq timezone) + (contains? (:features driver) :set-timezone)) + (try (kdb/transaction (k/exec-raw [(:set-timezone-sql driver) [timezone]]) + (k/exec korma-query)) + (catch Throwable e + (log/error (u/format-color 'red "Failed to set timezone:\n%s" + (with-out-str (jdbc/print-sql-exception-chain e)))) + (k/exec korma-query))) + (k/exec korma-query)))) -;; ## Debugging Functions (Internal) + (catch java.sql.SQLException e + (jdbc/print-sql-exception-chain e) + (let [^String message (or (->> (.getMessage e) ; error message comes back like "Error message ... [status-code]" sometimes + (re-find #"(?s)(^.*)\s+\[[\d-]+\]$") ; status code isn't useful and makes unit tests hard to write so strip it off + second) ; (?s) = Pattern.DOTALL - tell regex `.` to match newline characters as well + (.getMessage e))] + (throw (Exception. message))))))) -(defn- log-korma-form - [korma-form] - (when-not qp/*disable-qp-logging* - (log/debug - (u/format-color 'green "\n\nKORMA FORM: ðŸ˜\n%s" (->> (nth korma-form 2) ; korma form is wrapped in a let clause. Discard it - (walk/prewalk (fn [form] ; strip korma.core/ qualifications from symbols in the form - (cond ; to remove some of the clutter - (symbol? form) (symbol (name form)) - (keyword? form) (keyword (name form)) - :else form))) - (u/pprint-to-str))) - (u/format-color 'blue "\nSQL: 😈\n%s\n" (-> (eval (let [[let-form binding-form & body] korma-form] ; wrap the (select ...) form in a sql-only clause - `(~let-form ~binding-form ; has to go there to work correctly - (sql-only ~@body)))) - (s/replace #"\sFROM" "\nFROM") ; add newlines to the SQL to make it more readable - (s/replace #"\sLEFT JOIN" "\nLEFT JOIN") - (s/replace #"\sWHERE" "\nWHERE") - (s/replace #"\sGROUP BY" "\nGROUP BY") - (s/replace #"\sORDER BY" "\nORDER BY") - (s/replace #"\sLIMIT" "\nLIMIT")))))) +(defn process-and-run + "Process and run a query and return results." + [{:keys [type] :as query}] + (case (keyword type) + :native (native/process-and-run query) + :query (process-structured query))) diff --git a/src/metabase/driver/generic_sql/util.clj b/src/metabase/driver/generic_sql/util.clj index fd4a1da838c05606650abb5071cb86dc18b35845..7c19cd3112f10c813c845a9b698ad0eba1a390cf 100644 --- a/src/metabase/driver/generic_sql/util.clj +++ b/src/metabase/driver/generic_sql/util.clj @@ -7,19 +7,16 @@ [db :as kdb]) [korma.sql.utils :as utils] [metabase.driver :as driver] - [metabase.driver.query-processor :as qp] - [metabase.driver.generic-sql.interface :as i])) + [metabase.driver.query-processor :as qp])) (defn- db->connection-spec "Return a JDBC connection spec for a Metabase `Database`." [{{:keys [short-lived?]} :details, :as database}] - (let [driver (driver/engine->driver (:engine database)) - database->connection-details (partial i/database->connection-details driver) - connection-details->connection-spec (partial i/connection-details->connection-spec driver)] - (merge (-> database database->connection-details connection-details->connection-spec) + (let [{:keys [connection-details->spec]} (driver/engine->driver (:engine database))] + (assoc (connection-details->spec (:details database)) ;; unless this is a temp DB, we need to make a pool or the connection will be closed before we get a chance to unCLOB-er the results during JSON serialization ;; TODO - what will we do once we have CLOBS in temp DBs? - {:make-pool? (not short-lived?)}))) + :make-pool? (not short-lived?)))) (def ^{:arglists '([database])} db->korma-db @@ -79,9 +76,11 @@ ([{db-delay :db, :as table}] {:pre [(delay? db-delay)]} (korma-entity @db-delay table)) - ([db {table-name :name}] + ([db {schema :schema, table-name :name}] {:pre [(map? db)]} - {:table table-name + {:table (if (seq schema) + (str schema \. table-name) + table-name) :pk :id :db (db->korma-db db)})) diff --git a/src/metabase/driver/h2.clj b/src/metabase/driver/h2.clj index 166b0aa7e7e19a5be8631b0103d3d780e61ae409..39e4b86a7e9abbc8f2a1ab133bd1f9c843f12baa 100644 --- a/src/metabase/driver/h2.clj +++ b/src/metabase/driver/h2.clj @@ -3,12 +3,9 @@ [korma.db :as kdb] [korma.sql.utils :as utils] [metabase.db :as db] - [metabase.driver :as driver] - (metabase.driver [generic-sql :as generic-sql, :refer [GenericSQLIDriverMixin GenericSQLISyncDriverTableFKsMixin - GenericSQLISyncDriverFieldAvgLengthMixin GenericSQLISyncDriverFieldPercentUrlsMixin]] - [interface :as i, :refer [IDriver ISyncDriverTableFKs ISyncDriverFieldAvgLength ISyncDriverFieldPercentUrls]]) - (metabase.driver.generic-sql [interface :refer [ISqlDriverDatabaseSpecific]] - [util :refer [funcs]]) + [metabase.driver :as driver, :refer [defdriver]] + [metabase.driver.generic-sql :refer [sql-driver]] + [metabase.driver.generic-sql.util :refer [funcs]] [metabase.models.database :refer [Database]])) (def ^:private ^:const column->base-type @@ -104,20 +101,17 @@ (file+options->connection-string file (merge options {"IFEXISTS" "TRUE" "ACCESS_MODE_DATA" "r"})))) -(defn- connection-details->connection-spec [_ details] +(defn- connection-details->spec [details] (kdb/h2 (if db/*allow-potentailly-unsafe-connections* details (update details :db connection-string-set-safe-options)))) -(defn- database->connection-details [_ {:keys [details]}] - details) - -(defn- unix-timestamp->timestamp [_ field-or-value seconds-or-milliseconds] +(defn- unix-timestamp->timestamp [field-or-value seconds-or-milliseconds] (utils/func (format "TIMESTAMPADD('%s', %%s, TIMESTAMP '1970-01-01T00:00:00Z')" (case seconds-or-milliseconds :seconds "SECOND" :milliseconds "MILLISECOND")) [field-or-value])) -(defn- wrap-process-query-middleware [_ qp] +(defn- process-query-in-context [qp] (fn [{query-type :type, :as query}] {:pre [query-type]} ;; For :native queries check to make sure the DB in question has a (non-default) NAME property specified in the connection string. @@ -153,7 +147,7 @@ ["YEAR(%s)" field-or-value] ["((QUARTER(%s) * 3) - 2)" field-or-value]])) -(defn- date [_ unit field-or-value] +(defn- date [unit field-or-value] (if (= unit :quarter) (trunc-to-quarter field-or-value) (utils/func (case unit @@ -175,49 +169,36 @@ [field-or-value]))) ;; TODO - maybe rename this relative-date ? -(defn- date-interval [_ unit amount] - (utils/generated (format (case unit - :minute "DATEADD('MINUTE', %d, NOW())" - :hour "DATEADD('HOUR', %d, NOW())" - :day "DATEADD('DAY', %d, NOW())" - :week "DATEADD('WEEK', %d, NOW())" - :month "DATEADD('MONTH', %d, NOW())" - :quarter "DATEADD('MONTH', (%d * 3), NOW())" - :year "DATEADD('YEAR', %d, NOW())") - amount))) - -(defn- humanize-connection-error-message [_ message] +(defn- date-interval [unit amount] + (utils/generated (if (= unit :quarter) + (format "DATEADD('MONTH', (%d * 3), NOW())" amount) + (format "DATEADD('%s', %d, NOW())" (s/upper-case (name unit)) amount)))) + +(defn- humanize-connection-error-message [message] (condp re-matches message #"^A file path that is implicitly relative to the current working directory is not allowed in the database URL .*$" - (i/connection-error-messages :cannot-connect-check-host-and-port) + (driver/connection-error-messages :cannot-connect-check-host-and-port) #"^Database .* not found .*$" - (i/connection-error-messages :cannot-connect-check-host-and-port) + (driver/connection-error-messages :cannot-connect-check-host-and-port) #"^Wrong user name or password .*$" - (i/connection-error-messages :username-or-password-incorrect) + (driver/connection-error-messages :username-or-password-incorrect) #".*" ; default message)) - -(defrecord H2Driver []) - -(extend H2Driver - ISqlDriverDatabaseSpecific {:connection-details->connection-spec connection-details->connection-spec - :database->connection-details database->connection-details - :date date - :date-interval date-interval - :unix-timestamp->timestamp unix-timestamp->timestamp} - ;; Override the generic SQL implementation of wrap-process-query-middleware so we can block unsafe native queries (see above) - IDriver (assoc GenericSQLIDriverMixin - :humanize-connection-error-message humanize-connection-error-message - :wrap-process-query-middleware wrap-process-query-middleware) - ISyncDriverTableFKs GenericSQLISyncDriverTableFKsMixin - ISyncDriverFieldAvgLength GenericSQLISyncDriverFieldAvgLengthMixin - ISyncDriverFieldPercentUrls GenericSQLISyncDriverFieldPercentUrlsMixin) - -(def ^:const driver - (map->H2Driver {:column->base-type column->base-type - :features generic-sql/features - :sql-string-length-fn :LENGTH})) +(defdriver h2 + (sql-driver {:driver-name "H2" + :details-fields [{:name "db" + :display-name "Connection String" + :placeholder "file:/Users/camsaul/bird_sightings/toucans;AUTO_SERVER=TRUE" + :required true}] + :column->base-type column->base-type + :string-length-fn :LENGTH + :connection-details->spec connection-details->spec + :date date + :date-interval date-interval + :unix-timestamp->timestamp unix-timestamp->timestamp + :humanize-connection-error-message humanize-connection-error-message + :process-query-in-context process-query-in-context})) diff --git a/src/metabase/driver/interface.clj b/src/metabase/driver/interface.clj deleted file mode 100644 index 43d67bd7229ee3d625cce0665cd8bac29447eb1a..0000000000000000000000000000000000000000 --- a/src/metabase/driver/interface.clj +++ /dev/null @@ -1,160 +0,0 @@ -(ns metabase.driver.interface - "Protocols that DB drivers implement. Thus, the interface such drivers provide." - (:import (clojure.lang Keyword))) - -(def ^:const driver-optional-features - "A set on optional features (as keywords) that may or may not be supported by individual drivers." - #{:foreign-keys - :nested-fields ; are nested Fields (i.e., Mongo-style nested keys) supported? - :set-timezone - :standard-deviation-aggregations - :unix-timestamp-special-type-fields}) - -(def ^:const max-sync-lazy-seq-results - "The maximum number of values we should return when using `field-values-lazy-seq`. - This many is probably fine for inferring special types and what-not; we don't want - to scan millions of values at any rate." - 10000) - -(def ^:const connection-error-messages - "Generic error messages that drivers should return in their implementation of `humanize-connection-error-message`." - {:cannot-connect-check-host-and-port "Hmm, we couldn't connect to the database. Make sure your host and port settings are correct." - :database-name-incorrect "Looks like the database name is incorrect." - :invalid-hostname "It looks like your host is invalid. Please double-check it and try again." - :password-incorrect "Looks like your password is incorrect." - :password-required "Looks like you forgot to enter your password." - :username-incorrect "Looks like your username is incorrect." - :username-or-password-incorrect "Looks like the username or password is incorrect."}) - -;; ## IDriver Protocol - -(defprotocol IDriver - "Methods all drivers must implement. - They should also include the following properties: - - * `features` (optional) - A set containing one or more `driver-optional-features`" - - ;; Connection - (can-connect? [this database] - "Check whether we can connect to DATABASE and perform a simple query. - (To check whether we can connect to a database given only its details, use `can-connect-with-details?` instead). - - (can-connect? driver (sel :one Database :id 1))") - (can-connect-with-details? [this details-map] - "Check whether we can connect to a database and performa a simple query. - Returns true if we can, otherwise returns `false` or throws an `Exception`. - - (can-connect-with-details? driver {:engine :postgres, :dbname \"book\", ...})") - - (humanize-connection-error-message ^String [this ^String message] - "Return a humanized (user-facing) version of an connection error message string. - Generic error messages are provided in the constant `connection-error-messages`; return one of these whenever possible.") - - ;; Syncing - (sync-in-context [this database do-sync-fn] - "This function is basically around-advice for `sync-database!` and `sync-table!` operations. - Implementers can setup any context necessary for syncing, then need to call DO-SYNC-FN, - which takes no args. - - (sync-in-context [_ database do-sync-fn] - (with-jdbc-metadata [_ database] - (do-sync-fn)))") - (active-table-names [this database] - "Return a set of string names of tables, collections, or equivalent that currently exist in DATABASE.") - (active-column-names->type [this table] - "Return a map of string names of active columns (or equivalent) -> `Field` `base_type` for TABLE (or equivalent).") - (table-pks [this table] - "Return a set of string names of active Fields that are primary keys for TABLE (or equivalent).") - (field-values-lazy-seq [this field] - "Return a lazy sequence of all values of Field. - This is used to implement `mark-json-field!`, and fallback implentations of `mark-no-preview-display-field!` and `mark-url-field!` - if drivers *don't* implement `ISyncDriverFieldAvgLength` or `ISyncDriverFieldPercentUrls`, respectively.") - (table-rows-seq [this database table-name] - "Return a sequence of all the rows in a table with a given TABLE-NAME. - Currently, this is only used for iterating over the values in a `_metabase_metadata` table. As such, the results are not expected to be returned lazily. - - (table-rows-seq driver (Database 2) \"_metabase_metadata\") - -> [{:keypath \"people.description\" - :value \"...\"} - ...]") - - ;; Query Processing - (process-query [this query] - "Process a native or structured query. - (Don't use this directly; instead, use `metabase.driver/process-query`, - which does things like preprocessing before calling the appropriate implementation.)") - (wrap-process-query-middleware [this qp-fn] - "Custom QP middleware for this driver. - Like `sync-in-context`, but for running queries rather than syncing. This is basically around-advice for the QP pre and post-processing stages. - This should be used to do things like open DB connections that need to remain open for the duration of post-processing. - This middleware is injected into the QP middleware stack immediately after the Query Expander; in other words, it will receive the expanded query. - See the Mongo driver for and example of how this is intended to be used.")) - - -;; ## ISyncDriverTableFKs Protocol (Optional) - -(defprotocol ISyncDriverTableFKs - "Optional protocol to provide FK information for a TABLE. - If a sync driver implements it, Table FKs will be synced; otherwise, the step will be skipped." - (table-fks [this table] - "Return a set of maps containing info about FK columns for TABLE. - Each map should contain the following keys: - - * fk-column-name - * dest-table-name - * dest-column-name")) - - -(defprotocol ISyncDriverFieldNestedFields - "Optional protocol that should provide information about the subfields of a FIELD when applicable. - Drivers that declare support for `:nested-fields` should implement this protocol." - (active-nested-field-name->type [this field] - "Return a map of string names of active child `Fields` of FIELD -> `Field.base_type`.")) - - -;; ## ISyncDriverField Protocols (Optional) - -;; These are optional protocol that drivers can implement to be used instead of falling back to field-values-lazy-seq for certain Field -;; syncing operations, which involves iterating over a few thousand values of the Field in Clojure-land. Since that's slower, it's -;; preferable to provide implementations of ISyncDriverFieldAvgLength/ISyncDriverFieldPercentUrls when possible. - -(defprotocol ISyncDriverFieldAvgLength - "Optional. If this isn't provided, a fallback implementation that calculates average length in Clojure-land will be used instead." - (field-avg-length [this field] - "Return the average length of all non-nil values of textual FIELD.")) - -(defprotocol ISyncDriverFieldPercentUrls - "Optional. If this isn't provided, a fallback implementation that calculates URL percentage in Clojure-land will be used instead." - (field-percent-urls [this field] - "Return the percentage of non-nil values of textual FIELD that are valid URLs.")) - - -;;; ## ISyncDriverSpecificSyncField (Optional) - -(defprotocol ISyncDriverSpecificSyncField - "Optional. Do driver-specific syncing for a FIELD." - (driver-specific-sync-field! [this field] - "This is a chance for drivers to do custom Field syncing specific to their database. - For example, the Postgres driver can mark Postgres JSON fields as `special_type = json`. - As with the other Field syncing functions in `metabase.driver.sync`, this method should return the modified - FIELD, if any, or `nil`.")) - - -;; ## Helper Functions - -(def ^:private valid-feature? (partial contains? driver-optional-features)) - -(defn supports? - "Does DRIVER support FEATURE?" - [{:keys [features]} ^Keyword feature] - {:pre [(set? features) - (every? valid-feature? features) - (valid-feature? feature)]} - (contains? features feature)) - -(defn assert-driver-supports - "Helper fn. Assert that DRIVER supports FEATURE." - [driver ^Keyword feature] - (when-not (supports? driver feature) - (throw (Exception. (format "%s is not supported by this driver." (name feature)))))) diff --git a/src/metabase/driver/mongo.clj b/src/metabase/driver/mongo.clj index a83778fc3066886147d479557fb1846a80e3c94d..bb8a0f28f81f5f99e178298aa8d4a7c2e059a1ec 100644 --- a/src/metabase/driver/mongo.clj +++ b/src/metabase/driver/mongo.clj @@ -11,8 +11,7 @@ [core :as mg] [db :as mdb] [query :as mq]) - [metabase.driver :as driver] - [metabase.driver.interface :as i, :refer [IDriver ISyncDriverFieldNestedFields]] + [metabase.driver :as driver, :refer [defdriver]] (metabase.driver.mongo [query-processor :as qp] [util :refer [*mongo-connection* with-mongo-connection values->base-type]]) [metabase.util :as u])) @@ -27,7 +26,7 @@ [table] (with-mongo-connection [^com.mongodb.DB conn @(:db table)] (->> (mc/find-maps conn (:name table)) - (take i/max-sync-lazy-seq-results) + (take driver/max-sync-lazy-seq-results) (map keys) (map set) (reduce set/union)))) @@ -38,54 +37,51 @@ {:pre [(map? field)] :post [(keyword? %)]} (with-mongo-connection [_ @(:db @(:table field))] - (values->base-type (field-values-lazy-seq driver field)))) + (values->base-type (field-values-lazy-seq field)))) ;;; ## MongoDriver -(defn- can-connect? [_ database] - (with-mongo-connection [^com.mongodb.DB conn database] +(defn- can-connect? [details] + (with-mongo-connection [^com.mongodb.DB conn details] (= (-> (cmd/db-stats conn) (conv/from-db-object :keywordize) :ok) 1.0))) -(defn- can-connect-with-details? [this details] - (can-connect? this {:details details})) - -(defn- humanize-connection-error-message [_ message] +(defn- humanize-connection-error-message [message] (condp re-matches message #"^Timed out after \d+ ms while waiting for a server .*$" - (i/connection-error-messages :cannot-connect-check-host-and-port) + (driver/connection-error-messages :cannot-connect-check-host-and-port) #"^host and port should be specified in host:port format$" - (i/connection-error-messages :invalid-hostname) + (driver/connection-error-messages :invalid-hostname) #"^Password can not be null when the authentication mechanism is unspecified$" - (i/connection-error-messages :password-required) + (driver/connection-error-messages :password-required) - #".*" ; default + #".*" ; default message)) -(defn- wrap-process-query-middleware [_ qp] +(defn- process-query-in-context [qp] (fn [query] (with-mongo-connection [^com.mongodb.DB conn (:database query)] (qp query)))) -(defn- process-query [_ query] +(defn- process-query [query] (qp/process-and-run query)) ;;; ### Syncing -(defn- sync-in-context [_ database do-sync-fn] +(defn- sync-in-context [database do-sync-fn] (with-mongo-connection [_ database] (do-sync-fn))) -(defn- active-table-names [_ database] +(defn- active-tables [database] (with-mongo-connection [^com.mongodb.DB conn database] - (-> (mdb/get-collection-names conn) - (set/difference #{"system.indexes"})))) + (set (for [collection (set/difference (mdb/get-collection-names conn) #{"system.indexes"})] + {:name collection})))) -(defn- active-column-names->type [_ table] +(defn- active-column-names->type [table] (with-mongo-connection [_ @(:db table)] (into {} (for [column-name (table->column-names table)] {(name column-name) @@ -93,7 +89,7 @@ :table (delay table) :qualified-name-components (delay [(:name table) (name column-name)])})})))) -(defn- field-values-lazy-seq [_ {:keys [qualified-name-components table], :as field}] +(defn- field-values-lazy-seq [{:keys [qualified-name-components table], :as field}] (assert (and (map? field) (delay? qualified-name-components) (delay? table)) @@ -108,11 +104,11 @@ (mq/with-collection *mongo-connection* (:name table) (mq/fields [(apply str (interpose "." name-components))])))))) -(defn- active-nested-field-name->type [this field] +(defn- active-nested-field-name->type [field] ;; Build a map of nested-field-key -> type -> count ;; TODO - using an atom isn't the *fastest* thing in the world (but is the easiest); consider alternate implementation (let [field->type->count (atom {})] - (doseq [val (take i/max-sync-lazy-seq-results (field-values-lazy-seq this field))] + (doseq [val (take driver/max-sync-lazy-seq-results (field-values-lazy-seq field))] (when (map? val) (doseq [[k v] val] (swap! field->type->count update-in [k (type v)] #(if % (inc %) 1))))) @@ -125,22 +121,38 @@ first ; keep just the type driver/class->base-type)))))) - -(defrecord MongoDriver []) - -(extend MongoDriver - IDriver {:can-connect? can-connect? - :can-connect-with-details? can-connect-with-details? - :humanize-connection-error-message humanize-connection-error-message - :wrap-process-query-middleware wrap-process-query-middleware - :process-query process-query - :sync-in-context sync-in-context - :active-table-names active-table-names - :active-column-names->type active-column-names->type - :table-pks (constantly #{"_id"}) - :field-values-lazy-seq field-values-lazy-seq} - ISyncDriverFieldNestedFields {:active-nested-field-name->type active-nested-field-name->type}) - -(def driver - "Concrete instance of the MongoDB driver." - (map->MongoDriver {:features #{:nested-fields}})) +(defdriver mongo + {:driver-name "MongoDB" + :details-fields [{:name "host" + :display-name "Host" + :default "localhost"} + {:name "port" + :display-name "Port" + :type :integer + :default 27017} + {:name "dbname" + :display-name "Database name" + :placeholder "carrierPigeonDeliveries" + :required true} + {:name "user" + :display-name "Database username" + :placeholder "What username do you use to login to the database?"} + {:name "pass" + :display-name "Database password" + :type :password + :placeholder "******"} + {:name "ssl" + :display-name "Use a secure connection (SSL)?" + :type :boolean + :default false}] + :features #{:nested-fields} + :can-connect? can-connect? + :active-tables active-tables + :field-values-lazy-seq field-values-lazy-seq + :active-column-names->type active-column-names->type + :table-pks (constantly #{"_id"}) + :process-query process-query + :process-query-in-context process-query-in-context + :sync-in-context sync-in-context + :humanize-connection-error-message humanize-connection-error-message + :active-nested-field-name->type active-nested-field-name->type}) diff --git a/src/metabase/driver/mongo/query_processor.clj b/src/metabase/driver/mongo/query_processor.clj index 7a76f7e48901088c29eb42201ae93403f34af20c..e26addf607cbb36aa153333956cf178383720598 100644 --- a/src/metabase/driver/mongo/query_processor.clj +++ b/src/metabase/driver/mongo/query_processor.clj @@ -12,9 +12,7 @@ [operators :refer :all] [query :refer :all]) [metabase.db :refer :all] - [metabase.driver :as driver] - (metabase.driver [interface :as i] - [query-processor :as qp]) + [metabase.driver.query-processor :as qp] [metabase.driver.query-processor.interface :refer [qualified-name-components]] [metabase.driver.mongo.util :refer [with-mongo-connection *mongo-connection* values->base-type]] [metabase.models.field :as field] @@ -162,7 +160,7 @@ (constantly true)) field-id (or (:field-id field) ; Field (:field-id (:field field)))] ; DateTimeField - (->> (i/field-values-lazy-seq @(ns-resolve 'metabase.driver.mongo 'driver) (sel :one field/Field :id field-id)) ; resolve driver at runtime to avoid circular deps + (->> (@(resolve 'metabase.driver.mongo/field-values-lazy-seq) (sel :one field/Field :id field-id)) ; resolve driver at runtime to avoid circular deps (filter identity) (map hash) (map #(conj! values %)) diff --git a/src/metabase/driver/mongo/util.clj b/src/metabase/driver/mongo/util.clj index d7093db5f81d01e7af9ba55fbb6f238f436ccc7a..94dcb818d5561bce80e5f9e91879d6276f966268 100644 --- a/src/metabase/driver/mongo/util.clj +++ b/src/metabase/driver/mongo/util.clj @@ -22,12 +22,16 @@ Bound by top-level `with-mongo-connection` so it may be reused within its body." nil) -(def ^:private mongo-connection-options - ;; Have to use the Java builder directly since monger's wrapper method doesn't support .serverSelectionTimeout :unamused: +(defn- build-connection-options + "Build connection options for Mongo. + We have to use `MongoClientOptions.Builder` directly to configure our Mongo connection + since Monger's wrapper method doesn't support `.serverSelectionTimeout` or `.sslEnabled`." + [& {:keys [ssl?]}] (-> (com.mongodb.MongoClientOptions$Builder.) (.connectTimeout connection-timeout-ms) (.serverSelectionTimeout connection-timeout-ms) - (.build))) + (.sslEnabled ssl?) + .build)) ;; The arglists metadata for mg/connect are actually *WRONG* -- the function additionally supports a 3-arg airity where you can pass ;; options and credentials, as we'd like to do. We need to go in and alter the metadata of this function ourselves because otherwise @@ -40,12 +44,12 @@ "Run F with a new connection (bound to `*mongo-connection*`) to DATABASE. Don't use this directly; use `with-mongo-connection`." [f database] - (let [{:keys [dbname host port user pass] - :or {port 27017, pass ""}} (cond - (string? database) {:dbname database} - (:dbname (:details database)) (:details database) ; entire Database obj - (:dbname database) database ; connection details map only - :else (throw (Exception. (str "with-mongo-connection failed: bad connection details:" (:details database))))) + (let [{:keys [dbname host port user pass ssl] + :or {port 27017, pass "", ssl false}} (cond + (string? database) {:dbname database} + (:dbname (:details database)) (:details database) ; entire Database obj + (:dbname database) database ; connection details map only + :else (throw (Exception. (str "with-mongo-connection failed: bad connection details:" (:details database))))) user (when (seq user) ; ignore empty :user and :pass strings user) pass (when (seq pass) @@ -53,7 +57,7 @@ server-address (mg/server-address host port) credentials (when user (mcred/create user dbname pass)) - connect (partial mg/connect server-address mongo-connection-options) + connect (partial mg/connect server-address (build-connection-options :ssl? ssl)) conn (if credentials (connect credentials) (connect)) diff --git a/src/metabase/driver/mysql.clj b/src/metabase/driver/mysql.clj index 627ec5aa6a5c9733aa7afa57ac9b40627bc79afa..0fae70b429b5348a9d60622efe42524e08e2a677 100644 --- a/src/metabase/driver/mysql.clj +++ b/src/metabase/driver/mysql.clj @@ -6,12 +6,9 @@ mysql) (korma.sql [engine :refer [sql-func]] [utils :as utils]) - (metabase.driver [generic-sql :as generic-sql, :refer [GenericSQLIDriverMixin GenericSQLISyncDriverTableFKsMixin - GenericSQLISyncDriverFieldAvgLengthMixin GenericSQLISyncDriverFieldPercentUrlsMixin]] - [interface :as i, :refer [IDriver ISyncDriverTableFKs ISyncDriverFieldAvgLength ISyncDriverFieldPercentUrls - ISyncDriverSpecificSyncField driver-specific-sync-field!]]) - (metabase.driver.generic-sql [interface :refer [ISqlDriverDatabaseSpecific]] - [util :refer [funcs]]))) + [metabase.driver :as driver, :refer [defdriver]] + [metabase.driver.generic-sql :refer [sql-driver]] + [metabase.driver.generic-sql.util :refer [funcs]])) ;;; # Korma 0.4.2 Bug Workaround ;; (Buggy code @ https://github.com/korma/Korma/blob/684178c386df529558bbf82097635df6e75fb339/src/korma/mysql.clj) @@ -61,26 +58,17 @@ :VARCHAR :TextField :YEAR :IntegerField}) -(defn- connection-details->connection-spec [_ details] +(defn- connection-details->spec [details] (-> details (set/rename-keys {:dbname :db}) kdb/mysql)) -(defn- database->connection-details [_ {:keys [details]}] - details) - -(defn- unix-timestamp->timestamp [_ field-or-value seconds-or-milliseconds] +(defn- unix-timestamp->timestamp [field-or-value seconds-or-milliseconds] (utils/func (case seconds-or-milliseconds :seconds "FROM_UNIXTIME(%s)" :milliseconds "FROM_UNIXTIME(%s / 1000)") [field-or-value])) -(defn- timezone->set-timezone-sql [_ timezone] - ;; If this fails you need to load the timezone definitions from your system into MySQL; - ;; run the command `mysql_tzinfo_to_sql /usr/share/zoneinfo | mysql -u root mysql` - ;; See https://dev.mysql.com/doc/refman/5.7/en/time-zone-support.html for details - (format "SET @@session.time_zone = '%s';" timezone)) - ;; Since MySQL doesn't have date_trunc() we fake it by formatting a date to an appropriate string and then converting back to a date. ;; See http://dev.mysql.com/doc/refman/5.6/en/date-and-time-functions.html#function_date-format for an explanation of format specifiers (defn- trunc-with-format [format-str] @@ -97,7 +85,7 @@ ["((QUARTER(%s) * 3) - 2)" field-or-value] (k/raw "'-01'")])) -(defn- date [_ unit field-or-value] +(defn- date [unit field-or-value] (if (= unit :quarter) (trunc-to-quarter field-or-value) (utils/func (case unit @@ -121,50 +109,57 @@ :year "YEAR(%s)") [field-or-value]))) -(defn- date-interval [_ unit amount] - (utils/generated (format (case unit - :minute "DATE_ADD(NOW(), INTERVAL %d MINUTE)" - :hour "DATE_ADD(NOW(), INTERVAL %d HOUR)" - :day "DATE_ADD(NOW(), INTERVAL %d DAY)" - :week "DATE_ADD(NOW(), INTERVAL %d WEEK)" - :month "DATE_ADD(NOW(), INTERVAL %d MONTH)" - :quarter "DATE_ADD(NOW(), INTERVAL %d QUARTER)" - :year "DATE_ADD(NOW(), INTERVAL %d YEAR)") - amount))) - -(defn- humanize-connection-error-message [_ message] +(defn- date-interval [unit amount] + (utils/generated (format "DATE_ADD(NOW(), INTERVAL %d %s)" amount (s/upper-case (name unit))))) + +(defn- humanize-connection-error-message [message] (condp re-matches message #"^Communications link failure\s+The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.$" - (i/connection-error-messages :cannot-connect-check-host-and-port) + (driver/connection-error-messages :cannot-connect-check-host-and-port) #"^Unknown database .*$" - (i/connection-error-messages :database-name-incorrect) + (driver/connection-error-messages :database-name-incorrect) #"Access denied for user.*$" - (i/connection-error-messages :username-or-password-incorrect) + (driver/connection-error-messages :username-or-password-incorrect) #"Must specify port after ':' in connection string" - (i/connection-error-messages :invalid-hostname) + (driver/connection-error-messages :invalid-hostname) #".*" ; default message)) -(defrecord MySQLDriver []) - -(extend MySQLDriver - ISqlDriverDatabaseSpecific {:connection-details->connection-spec connection-details->connection-spec - :database->connection-details database->connection-details - :unix-timestamp->timestamp unix-timestamp->timestamp - :date date - :date-interval date-interval - :timezone->set-timezone-sql timezone->set-timezone-sql} - IDriver (assoc GenericSQLIDriverMixin - :humanize-connection-error-message humanize-connection-error-message) - ISyncDriverTableFKs GenericSQLISyncDriverTableFKsMixin - ISyncDriverFieldAvgLength GenericSQLISyncDriverFieldAvgLengthMixin - ISyncDriverFieldPercentUrls GenericSQLISyncDriverFieldPercentUrlsMixin) - -(def ^:const driver - (map->MySQLDriver {:column->base-type column->base-type - :features (conj generic-sql/features :set-timezone) - :sql-string-length-fn :CHAR_LENGTH})) +(defdriver mysql + (sql-driver + {:driver-name "MySQL" + :details-fields [{:name "host" + :display-name "Host" + :default "localhost"} + {:name "port" + :display-name "Port" + :type :integer + :default 3306} + {:name "dbname" + :display-name "Database name" + :placeholder "birds_of_the_word" + :required true} + {:name "user" + :display-name "Database username" + :placeholder "What username do you use to login to the database?" + :required true} + {:name "password" + :display-name "Database password" + :type :password + :placeholder "*******"}] + :column->base-type column->base-type + :string-length-fn :CHAR_LENGTH + :excluded-schemas #{"INFORMATION_SCHEMA"} + :connection-details->spec connection-details->spec + :unix-timestamp->timestamp unix-timestamp->timestamp + :date date + :date-interval date-interval + ;; If this fails you need to load the timezone definitions from your system into MySQL; + ;; run the command `mysql_tzinfo_to_sql /usr/share/zoneinfo | mysql -u root mysql` + ;; See https://dev.mysql.com/doc/refman/5.7/en/time-zone-support.html for details + :set-timezone-sql "SET @@session.time_zone = ?;" + :humanize-connection-error-message humanize-connection-error-message})) diff --git a/src/metabase/driver/postgres.clj b/src/metabase/driver/postgres.clj index f21085a947b6530ae827fb259f0da8c7f12b3910..80ea4af89303fbfbab64fe932447f075101d1afa 100644 --- a/src/metabase/driver/postgres.clj +++ b/src/metabase/driver/postgres.clj @@ -9,14 +9,9 @@ [swiss.arrows :refer :all] [metabase.db :refer [upd]] [metabase.models.field :refer [Field]] - [metabase.driver :as driver] - (metabase.driver [generic-sql :as generic-sql, :refer [GenericSQLIDriverMixin GenericSQLISyncDriverTableFKsMixin - GenericSQLISyncDriverFieldAvgLengthMixin GenericSQLISyncDriverFieldPercentUrlsMixin]] - [interface :as i, :refer [IDriver ISyncDriverTableFKs ISyncDriverFieldAvgLength ISyncDriverFieldPercentUrls - ISyncDriverSpecificSyncField]]) - [metabase.driver.generic-sql :as generic-sql] - (metabase.driver.generic-sql [interface :refer [ISqlDriverDatabaseSpecific]] - [util :refer [with-jdbc-metadata]])) + [metabase.driver :as driver, :refer [defdriver]] + [metabase.driver.generic-sql :refer [sql-driver]] + [metabase.driver.generic-sql.util :refer [with-jdbc-metadata]]) ;; This is necessary for when NonValidatingFactory is passed in the sslfactory connection string argument, ;; e.x. when connecting to a Heroku Postgres database from outside of Heroku. (:import org.postgresql.ssl.NonValidatingFactory)) @@ -88,34 +83,24 @@ :sslmode "require" :sslfactory "org.postgresql.ssl.NonValidatingFactory"}) ; HACK Why enable SSL if we disable certificate validation? -(defn- connection-details->connection-spec [_ {:keys [ssl] :as details-map}] +(defn- connection-details->spec [{:keys [ssl] :as details-map}] (-> details-map + (update :port (fn [port] + (if (string? port) (Integer/parseInt port) + port))) (dissoc :ssl) ; remove :ssl in case it's false; DB will still try (& fail) to connect if the key is there (merge (when ssl ; merging ssl-params will add :ssl back in if desirable ssl-params)) (rename-keys {:dbname :db}) kdb/postgres)) -(defn- database->connection-details [_ {:keys [details]}] - (let [{:keys [host port]} details] - (-> details - (assoc :host host - :ssl (:ssl details) - :port (if (string? port) (Integer/parseInt port) - port)) - (rename-keys {:dbname :db})))) - -(defn- unix-timestamp->timestamp [_ field-or-value seconds-or-milliseconds] +(defn- unix-timestamp->timestamp [field-or-value seconds-or-milliseconds] (utils/func (case seconds-or-milliseconds :seconds "TO_TIMESTAMP(%s)" :milliseconds "TO_TIMESTAMP(%s / 1000)") [field-or-value])) -(defn- timezone->set-timezone-sql [_ timezone] - (format "SET LOCAL timezone TO '%s';" timezone)) - - -(defn- driver-specific-sync-field! [_ {:keys [table], :as field}] +(defn- driver-specific-sync-field! [{:keys [table], :as field}] (with-jdbc-metadata [^java.sql.DatabaseMetaData md @(:db @table)] (let [[{:keys [type_name]}] (->> (.getColumns md nil nil (:name @table) (:name field)) jdbc/result-set-seq)] @@ -123,7 +108,7 @@ (upd Field (:id field) :special_type :json) (assoc field :special_type :json))))) -(defn- date [_ unit field-or-value] +(defn- date [unit field-or-value] (utils/func (case unit :default "CAST(%s AS TIMESTAMP)" :minute "DATE_TRUNC('minute', %s)" @@ -145,33 +130,25 @@ :year "CAST(EXTRACT(YEAR FROM %s) AS INTEGER)") [field-or-value])) -(defn- date-interval [_ unit amount] - (utils/generated (format (case unit - :minute "(NOW() + INTERVAL '%d minute')" - :hour "(NOW() + INTERVAL '%d hour')" - :day "(NOW() + INTERVAL '%d day')" - :week "(NOW() + INTERVAL '%d week')" - :month "(NOW() + INTERVAL '%d month')" - :quarter "(NOW() + INTERVAL '%d quarter')" - :year "(NOW() + INTERVAL '%d year')") - amount))) +(defn- date-interval [unit amount] + (utils/generated (format "(NOW() + INTERVAL '%d %s')" amount (name unit)))) -(defn- humanize-connection-error-message [_ message] +(defn- humanize-connection-error-message [message] (condp re-matches message #"^FATAL: database \".*\" does not exist$" - (i/connection-error-messages :database-name-incorrect) + (driver/connection-error-messages :database-name-incorrect) #"^No suitable driver found for.*$" - (i/connection-error-messages :invalid-hostname) + (driver/connection-error-messages :invalid-hostname) #"^Connection refused. Check that the hostname and port are correct and that the postmaster is accepting TCP/IP connections.$" - (i/connection-error-messages :cannot-connect-check-host-and-port) + (driver/connection-error-messages :cannot-connect-check-host-and-port) #"^FATAL: role \".*\" does not exist$" - (i/connection-error-messages :username-incorrect) + (driver/connection-error-messages :username-incorrect) #"^FATAL: password authentication failed for user.*$" - (i/connection-error-messages :password-incorrect) + (driver/connection-error-messages :password-incorrect) #"^FATAL: .*$" ; all other FATAL messages: strip off the 'FATAL' part, capitalize, and add a period (let [[_ message] (re-matches #"^FATAL: (.*$)" message)] @@ -180,23 +157,38 @@ #".*" ; default message)) -(defrecord PostgresDriver []) - -(extend PostgresDriver - ISqlDriverDatabaseSpecific {:connection-details->connection-spec connection-details->connection-spec - :database->connection-details database->connection-details - :unix-timestamp->timestamp unix-timestamp->timestamp - :date date - :date-interval date-interval - :timezone->set-timezone-sql timezone->set-timezone-sql} - ISyncDriverSpecificSyncField {:driver-specific-sync-field! driver-specific-sync-field!} - IDriver (assoc GenericSQLIDriverMixin - :humanize-connection-error-message humanize-connection-error-message) - ISyncDriverTableFKs GenericSQLISyncDriverTableFKsMixin - ISyncDriverFieldAvgLength GenericSQLISyncDriverFieldAvgLengthMixin - ISyncDriverFieldPercentUrls GenericSQLISyncDriverFieldPercentUrlsMixin) - -(def ^:const driver - (map->PostgresDriver {:column->base-type column->base-type - :features (conj generic-sql/features :set-timezone) - :sql-string-length-fn :CHAR_LENGTH})) +(defdriver postgres + (sql-driver + {:driver-name "PostgreSQL" + :details-fields [{:name "host" + :display-name "Host" + :default "localhost"} + {:name "port" + :display-name "Port" + :type :integer + :default 5432} + {:name "dbname" + :display-name "Database name" + :placeholder "birds_of_the_word" + :required true} + {:name "user" + :display-name "Database username" + :placeholder "What username do you use to login to the database?" + :required true} + {:name "password" + :display-name "Database password" + :type :password + :placeholder "*******"} + {:name "ssl" + :display-name "Use a secure connection (SSL)?" + :type :boolean + :default false}] + :string-length-fn :CHAR_LENGTH + :column->base-type column->base-type + :connection-details->spec connection-details->spec + :unix-timestamp->timestamp unix-timestamp->timestamp + :date date + :date-interval date-interval + :set-timezone-sql "UPDATE pg_settings SET setting = ? WHERE name ILIKE 'timezone';" + :driver-specific-sync-field! driver-specific-sync-field! + :humanize-connection-error-message humanize-connection-error-message})) diff --git a/src/metabase/driver/query_processor.clj b/src/metabase/driver/query_processor.clj index aa4863ffd0faf95159752a222e75b1e5c588cc66..e652ba3c1b35448a88081a8a31d215e4babaec02 100644 --- a/src/metabase/driver/query_processor.clj +++ b/src/metabase/driver/query_processor.clj @@ -8,7 +8,6 @@ [medley.core :as m] [swiss.arrows :refer [<<-]] [metabase.db :refer :all] - [metabase.driver.interface :as i] (metabase.driver.query-processor [annotate :as annotate] [expand :as expand] [interface :refer :all] @@ -44,6 +43,7 @@ (try (qp query) (catch Throwable e {:status :failed + :class (class e) :error (or (.getMessage e) (str e)) :stacktrace (u/filtered-stacktrace e) :query (dissoc query :database :driver) @@ -193,10 +193,10 @@ (fn [f] (if-not (map? f) f (m/filter-vals identity (into {} f)))) - ;; obscure DB details when logging. Just log the class of driver because we don't care about its properties + ;; obscure DB details when logging. Just log the name of driver because we don't care about its properties (-> query (assoc-in [:database :details] "😋 ") ; :yum: - (update :driver class))))))) + (update :driver :driver-name))))))) (qp query))) @@ -241,8 +241,9 @@ (qp query)))) (defn- process-structured [{:keys [driver], :as query}] - (let [driver-process-query (partial i/process-query driver) - driver-wrap-process-query (partial i/wrap-process-query-middleware driver)] + (let [driver-process-query (:process-query driver) + driver-wrap-process-query (or (:process-query-in-context driver) + (fn [qp] qp))] ((<<- wrap-catch-exceptions pre-expand driver-wrap-process-query @@ -257,8 +258,9 @@ driver-process-query) query))) (defn- process-native [{:keys [driver], :as query}] - (let [driver-process-query (partial i/process-query driver) - driver-wrap-process-query (partial i/wrap-process-query-middleware driver)] + (let [driver-process-query (:process-query driver) + driver-wrap-process-query (or (:process-query-in-context driver) + (fn [qp] qp))] ((<<- wrap-catch-exceptions driver-wrap-process-query post-add-row-count-and-status diff --git a/src/metabase/driver/query_processor/annotate.clj b/src/metabase/driver/query_processor/annotate.clj index b054789617c1c924caa9a61b80a0914335ab1442..e0e55ea2a2a684108987e06cd40919bc34ef9746 100644 --- a/src/metabase/driver/query_processor/annotate.clj +++ b/src/metabase/driver/query_processor/annotate.clj @@ -1,17 +1,14 @@ (ns metabase.driver.query-processor.annotate (:refer-clojure :exclude [==]) - (:require [clojure.core.logic :refer :all] - (clojure.core.logic [arithmetic :as ar] - [fd :as fd]) - [clojure.tools.macro :refer [macrolet]] - (clojure [set :as set] + (:require (clojure [set :as set] [string :as s]) + [clojure.tools.logging :as log] + [medley.core :as m] [metabase.db :refer [sel]] [metabase.driver.query-processor.interface :as i] (metabase.models [field :refer [Field], :as field] [foreign-key :refer [ForeignKey]]) - [metabase.util :as u] - [metabase.util.logic :refer :all])) + [metabase.util :as u])) ;; Fields should be returned in the following order: ;; 1. Breakout Fields @@ -35,245 +32,219 @@ ;; When two Fields have the same :position and :special_type "group", fall back to sorting Fields alphabetically by name. ;; This is arbitrary, but it makes the QP deterministic by keeping the results in a consistent order, which makes it testable. -;;; # ---------------------------------------- FIELD COLLECTION ---------------------------------------- - -;; Walk the expanded query and collect the fields found therein. Associate some additional info to each that we'll pass to core.logic so it knows -;; how to order the results - -(defn- field-qualify-name [field] - (assoc field :field-name (keyword (apply str (->> (rest (i/qualified-name-components field)) - (interpose ".")))))) - -(defn- flatten-collect-fields [form] - (let [fields (transient [])] - (clojure.walk/prewalk (fn [f] - (cond - (= (type f) metabase.driver.query_processor.interface.Field) - (do - (conj! fields f) - ;; HACK !!! - ;; Nested Mongo fields come back inside of their parent when you specify them in the fields clause - ;; e.g. (Q fields venue...name) will return rows like {:venue {:name "Kyle's Low-Carb Grill"}} - ;; Until we fix this the right way we'll just include the parent Field in the :query-fields list so the pattern - ;; matching works correctly. - ;; (This hack was part of the old annotation code too, it just sticks out better because it's no longer hidden amongst the others) - (when (:parent f) - (conj! fields (:parent f)))) - - ;; For a DateTimeField we'll flatten it back into regular Field but include the :unit info for the frontend - ;; Recurse so this fn will handle the resulting Field normally - (= (type f) metabase.driver.query_processor.interface.DateTimeField) - (recur (assoc (:field f) - :unit (:unit f))) - - :else f)) - form) - (->> (persistent! fields) - distinct - (map field-qualify-name) - (mapv (u/rpartial dissoc :parent :parent-id :table-name))))) - -(defn- flatten-collect-ids-domain [form] - (apply fd/domain (sort (map :field-id (flatten-collect-fields form))))) - - -;;; # ---------------------------------------- COLUMN RESOLUTION & ORDERING (CORE.LOGIC) ---------------------------------------- - -;; Use core.logic to determine the appropriate ordering / result Fields - -(defn- field-name° [field field-name] - (featurec field {:field-name field-name})) - -(defn- make-field-in° [items] - (if-not (seq items) - (constantly fail) - (let [ids-domain (flatten-collect-ids-domain items)] - (fn [field] - (fresh [id] - (featurec field {:field-id id}) - (fd/in id ids-domain)))))) - -(defn- breakout-field° [{:keys [breakout]}] - (make-field-in° breakout)) - -(defn- explicit-fields-field° [{:keys [fields-is-implicit fields], :as query}] - (if fields-is-implicit (constantly fail) - (make-field-in° fields))) - -(defn- aggregate-field° [{{ag-type :aggregation-type, ag-field :field} :aggregation}] - (if-not (contains? #{:avg :count :distinct :stddev :sum} ag-type) - (constantly fail) - (let [ag-field (if (contains? #{:count :distinct} ag-type) - {:base-type :IntegerField - :field-name :count - :field-display-name "count" - :special-type :number} - (-> ag-field - (select-keys [:base-type :special-type]) - (assoc :field-name (if (= ag-type :distinct) :count - ag-type)) - (assoc :field-display-name (if (= ag-type :distinct) "count" - (name ag-type)))))] - (fn [out] - (trace-lvars "*" out) - (== out ag-field))))) - -(defn- unknown-field° [field-name out] - (all - (== out {:base-type :UnknownField - :special-type nil - :field-name field-name - :field-display-name field-name}) - (trace-lvars "UNKNOWN FIELD - NOT PRESENT IN EXPANDED QUERY (!)" out))) - -(defn- field° [query] - (let [ag-field° (aggregate-field° query) - normal-field° (let [field-name->field (let [fields (flatten-collect-fields query)] - (zipmap (map :field-name fields) fields))] - (fn [field-name out] - (if-let [field (field-name->field field-name)] - (== out field) - fail)))] - (fn [field-name field] - (conda - ((normal-field° field-name field)) - ((ag-field° field)))))) - -(def ^:const ^:private field-groups - {:breakout 0 - :aggregation 1 - :explicit-fields 2 - :other 3}) - -(defn- field-group° [query] - (let [breakout° (breakout-field° query) - agg° (aggregate-field° query) - xfields° (explicit-fields-field° query)] - (fn [field out] - (conda - ((breakout° field) (== out (field-groups :breakout))) - ((agg° field) (== out (field-groups :aggregation))) - ((xfields° field) (== out (field-groups :explicit-fields))) - (s# (== out (field-groups :other))))))) - -(defn- field-position° [field out] - (featurec field {:position out})) - -(def ^:const ^:private special-type-groups - {:id 0 - :name 1 - :other 2}) - -(defn- special-type-group° [field out] - (conda - ((featurec field {:special-type :id}) (== out (special-type-groups :id))) - ((featurec field {:special-type :name}) (== out (special-type-groups :name))) - (s# (== out (special-type-groups :other))))) - -(defn- field-name< [query] - (fn [f1 f2] - (fresh [name-1 name-2] - (field-name° f1 name-1) - (field-name° f2 name-2) - (matches-seq-order° name-1 name-2 (:result-keys query))))) - -(defn- clause-position< [query] - (let [group° (field-group° query) - breakout-fields (flatten-collect-fields (:breakout query)) - fields-fields (flatten-collect-fields (:fields query))] - (fn [f1 f2] - (conda - ((group° f1 (field-groups :breakout)) (matches-seq-order° f1 f2 breakout-fields)) - ((group° f1 (field-groups :explicit-fields)) (matches-seq-order° f1 f2 fields-fields)))))) - -(defn- fields-sorted° [query] - (let [group° (field-group° query) - name< (field-name< query) - clause-pos< (clause-position< query)] - (fn [f1 f2] - (macrolet [(<-or-== [f & ==-clauses] `(conda - ((fresh [v#] - (~f ~'f1 v#) - (~f ~'f2 v#)) ~@==-clauses) - ((fresh [v1# v2#] - (~f ~'f1 v1#) - (~f ~'f2 v2#) - (ar/< v1# v2#)) ~'s#)))] - (<-or-== group° - (<-or-== field-position° - (conda - ((group° f1 (field-groups :other)) (<-or-== special-type-group° - (name< f1 f2))) - ((clause-pos< f1 f2))))))))) - -(defn- resolve+order-cols [{:keys [result-keys], :as query}] - (when (seq result-keys) - (first (let [fields (vec (lvars (count result-keys))) - known-field° (field° query)] - (run 1 [q] - (everyg (fn [[result-key field]] - (conda - ((known-field° result-key field)) - ((unknown-field° result-key field)))) - (zipmap result-keys fields)) - (sorted-permutation° (fields-sorted° query) fields q)))))) - - -;;; # ---------------------------------------- COLUMN DETAILS ---------------------------------------- - -;; Format the results in the way the front-end expects. - -(defn- format-col [col] - (merge {:description nil - :id nil - :table_id nil} - (-> col - (set/rename-keys {:base-type :base_type - :field-id :id - :field-name :name - :field-display-name :display_name - :special-type :special_type - :preview-display :preview_display - :table-id :table_id}) - (dissoc :position)))) - -(defn- add-fields-extra-info +;;; ## Field Resolution + +(defn- collect-fields + "Return a sequence of all the `Fields` inside THIS, recursing as needed for collections. + For maps, add or `conj` to property `:path`, recording the keypath used to reach each `Field.` + + (collect-fields {:name \"id\", ...}) -> [{:name \"id\", ...}] + (collect-fields [{:name \"id\", ...}]) -> [{:name \"id\", ...}] + (collect-fields {:a {:name \"id\", ...}) -> [{:name \"id\", :path [:a], ...}]" + [this] + {:post [(every? (partial instance? metabase.driver.query_processor.interface.Field) %)]} + (condp instance? this + ;; For a DateTimeField we'll flatten it back into regular Field but include the :unit info for the frontend. + ;; Recurse so it is otherwise handled normally + metabase.driver.query_processor.interface.DateTimeField + (let [{:keys [field unit]} this] + (collect-fields (assoc field :unit unit))) + + metabase.driver.query_processor.interface.Field + (if-let [parent (:parent this)] + ;; Nested Mongo fields come back inside of their parent when you specify them in the fields clause + ;; e.g. (Q fields venue...name) will return rows like {:venue {:name "Kyle's Low-Carb Grill"}} + ;; Until we fix this the right way we'll just include the parent Field in the :query-fields list so the pattern + ;; matching works correctly. + [this parent] + [this]) + + clojure.lang.IPersistentMap + (for [[k v] (seq this) + field (collect-fields v) + :when field] + (assoc field :source k)) + + clojure.lang.Sequential + (for [[i field] (m/indexed (mapcat collect-fields this))] + (assoc field :clause-position i)) + + nil)) + +(defn- qualify-field-name + "Update the `field-name` to reflect the name we expect to see coming back from the query. + (This is for handling Mongo nested Fields, I think (?))" + [field] + {:post [(keyword? (:field-name %))]} + (assoc field :field-name (->> (rest (i/qualified-name-components field)) + (interpose ".") + (apply str) + keyword))) + +(defn- add-aggregate-field-if-needed + "Add a Field containing information about an aggregate column such as `:count` or `:distinct` if needed." + [{{ag-type :aggregation-type, ag-field :field, :as ag} :aggregation} fields] + (if (or (not ag-type) + (= ag-type :rows)) + fields + (conj fields (merge {:source :aggregation} + (if (contains? #{:count :distinct} ag-type) + {:base-type :IntegerField + :field-name :count + :field-display-name :count + :special-type :number} + (merge (select-keys ag-field [:base-type :special-type]) + {:field-name ag-type + :field-display-name ag-type})))))) + +(defn- add-unknown-fields-if-needed + "When create info maps for any fields we didn't expect to come back from the query. + Ideally, this should never happen, but on the off chance it does we still want to return it in the results." + [actual-keys fields] + {:pre [(set? actual-keys) + (every? keyword? actual-keys)]} + (let [expected-keys (set (map :field-name fields)) + _ (assert (every? keyword? expected-keys)) + missing-keys (set/difference actual-keys expected-keys)] + (when (seq missing-keys) + (log/error (u/format-color 'red "Unknown fields - returned by results but not present in expanded query: %s\nExpected: %s\nActual: %s" + missing-keys expected-keys actual-keys))) + (concat fields (for [k missing-keys] + {:base-type :UnknownField + :special-type nil + :field-name k + :field-display-name k})))) + + +;;; ## Field Sorting + +;; We sort Fields with a "importance" vector like [source-importance position special-type-importance name] + +(defn- source-importance-fn + "Create a function to return a importance for FIELD based on its source clause in the query. + e.g. if a Field comes from a `:breakout` clause, we should return that column first in the results." + [{:keys [fields-is-implicit]}] + (fn [{:keys [source]}] + (or (when (= source :breakout) + :0-breakout) + (when (= source :aggregation) + :1-aggregation) + (when-not fields-is-implicit + (when (= source :fields) + :2-fields)) + :3-other))) + +(defn- special-type-importance + "Return a importance for FIELD based on the relative importance of its `:special-type`. + i.e. a Field with special type `:id` should be sorted ahead of all other Fields in the results." + [{:keys [special-type]}] + (condp = special-type + :id :0-id + :name :1-name + :2-other)) + +(defn- field-importance-fn + "Create a function to return an \"importance\" vector for use in sorting FIELD." + [query] + (let [source-importance (source-importance-fn query)] + (fn [{:keys [position clause-position field-name source], :as field}] + [(source-importance field) + (or position + (when (= source :fields) + clause-position) + Integer/MAX_VALUE) + (special-type-importance field) + field-name]))) + +(defn- sort-fields + "Sort FIELDS by their \"importance\" vectors." + [query fields] + (let [field-importance (field-importance-fn query)] + (log/debug (u/format-color 'yellow "Sorted fields:\n%s" (u/pprint-to-str (sort (map field-importance fields))))) + (sort-by field-importance fields))) + +(defn- convert-field-to-expected-format + "Rename keys, provide default values, etc. for FIELD so it is in the format expected by the frontend." + [field] + {:pre [field] + :post [(keyword? (:name %))]} + (let [defaults {:description nil + :id nil + :table_id nil}] + (-> (merge defaults field) + (update :field-display-name name) + (set/rename-keys {:base-type :base_type + :field-id :id + :field-name :name + :field-display-name :display_name + :schema-name :schema_name + :special-type :special_type + :preview-display :preview_display + :table-id :table_id}) + (dissoc :position :clause-position :source :parent :parent-id :table-name)))) + +(defn- fk-field->dest-fn + "Fetch fk info and return a function that returns the destination Field of a given Field." + ([fields] + (or (fk-field->dest-fn fields (for [{:keys [special_type id]} fields + :when (= special_type :fk)] + id)) + (constantly nil))) + ;; Fetch the ForeignKey objects whose origin is in the returned Fields, create a map of origin-field-id->destination-field-id + ([fields fk-ids] + (when (seq fk-ids) + (fk-field->dest-fn fields fk-ids (sel :many :field->field [ForeignKey :origin_id :destination_id] + :origin_id [in fk-ids] + :destination_id [not= nil])))) + ;; Fetch the destination Fields referenced by the ForeignKeys + ([fields fk-ids id->dest-id] + (when (seq (vals id->dest-id)) + (fk-field->dest-fn fields fk-ids id->dest-id (sel :many :id->fields [Field :id :name :display_name :table_id :description :base_type :special_type :preview_display] + :id [in (vals id->dest-id)])))) + ;; Return a function that will return the corresponding destination Field for a given Field + ([fields fk-ids id->dest-id dest-id->field] + (fn [{:keys [id]}] + (some-> id id->dest-id dest-id->field)))) + +(defn- add-extra-info-to-fk-fields "Add `:extra_info` about `ForeignKeys` to `Fields` whose `special_type` is `:fk`." [fields] - ;; Get a sequence of add Field IDs that have a :special_type of FK - (let [fk-field-ids (->> fields - (filter #(= (:special_type %) :fk)) - (map :id) - (filter identity)) - ;; Look up the Foreign keys info if applicable. - ;; Build a map of FK Field IDs -> Destination Field IDs - field-id->dest-field-id (when (seq fk-field-ids) - (sel :many :field->field [ForeignKey :origin_id :destination_id], :origin_id [in fk-field-ids], :destination_id [not= nil])) - - ;; Build a map of Destination Field IDs -> Destination Fields - dest-field-id->field (when (and (seq fk-field-ids) - (seq (vals field-id->dest-field-id))) - (sel :many :id->fields [Field :id :name :display_name :table_id :description :base_type :special_type :preview_display], :id [in (vals field-id->dest-field-id)]))] - - ;; Add the :extra_info + :target to every Field. For non-FK Fields, these are just {} and nil, respectively. - (vec (for [{field-id :id, :as field} fields] - (let [dest-field (when (seq fk-field-ids) - (some->> field-id - field-id->dest-field-id - dest-field-id->field))] - (assoc field - :target dest-field - :extra_info (if-not dest-field {} - {:target_table_id (:table_id dest-field)}))))))) - -(defn post-annotate [qp] + (let [field->dest (fk-field->dest-fn fields)] + (for [field fields] + (let [{:keys [table_id], :as dest-field} (field->dest field)] + (assoc field + :target dest-field + :extra_info (if table_id {:target_table_id table_id} {})))))) + +(defn- resolve-sort-and-format-columns + "Collect the Fields referenced in QUERY, sort them according to the rules at the top + of this page, format them as expected by the frontend, and return the results." + [query result-keys] + {:pre [(set? result-keys)]} + (when (seq result-keys) + (->> (collect-fields query) + (map qualify-field-name) + (add-aggregate-field-if-needed query) + (map (u/rpartial update :field-name keyword)) + (add-unknown-fields-if-needed result-keys) + (sort-fields query) + (map convert-field-to-expected-format) + (filter (comp (partial contains? result-keys) :name)) + (m/distinct-by :name) + add-extra-info-to-fk-fields))) + +(defn post-annotate + "QP middleware that runs directly after the the query is ran. This stage: + + 1. Sorts the results according to the rules at the top of this page + 2. Resolves the Fields returned in the results and adds information like `:columns` and `:cols` + expected by the frontend." + [qp] (fn [query] - (let [results (qp query) - cols (->> (assoc (:query query) :result-keys (vec (sort (keys (first results))))) - resolve+order-cols - (map format-col) - add-fields-extra-info) - columns (map :name cols)] + (let [results (qp query) + result-keys (set (keys (first results))) + cols (resolve-sort-and-format-columns (:query query) result-keys) + columns (mapv :name cols)] {:cols (vec (for [col cols] (update col :name name))) :columns (mapv name columns) diff --git a/src/metabase/driver/query_processor/expand.clj b/src/metabase/driver/query_processor/expand.clj index aee9f083c9d7c3c7652f14b9407cd606dd4ff7d4..1ea9947becbba0a1a5602cc45922350e062ee670 100644 --- a/src/metabase/driver/query_processor/expand.clj +++ b/src/metabase/driver/query_processor/expand.clj @@ -9,7 +9,6 @@ [korma.core :as k] [swiss.arrows :refer [-<>]] [metabase.db :refer [sel]] - [metabase.driver.interface :as driver] [metabase.driver.query-processor.interface :refer :all] [metabase.util :as u]) (:import (clojure.lang Keyword))) @@ -24,15 +23,14 @@ ;; ## -------------------- Expansion - Impl -------------------- - - (def ^:private ^:dynamic *original-query-dict* "The entire original Query dict being expanded." nil) (defn- assert-driver-supports [^Keyword feature] {:pre [(:driver *original-query-dict*)]} - (driver/assert-driver-supports (:driver *original-query-dict*) feature)) + (when-not (contains? (:features (:driver *original-query-dict*)) feature) + (throw (Exception. (format "%s is not supported by this driver." (name feature)))))) (defn- non-empty-clause? [clause] (and clause diff --git a/src/metabase/driver/query_processor/interface.clj b/src/metabase/driver/query_processor/interface.clj index 05906e8da9a056d3f2a982ea0f61879cee4512da..75ecb94fc734ee133de247b9ad567ee4d2328928 100644 --- a/src/metabase/driver/query_processor/interface.clj +++ b/src/metabase/driver/query_processor/interface.clj @@ -40,6 +40,7 @@ ^Keyword base-type ^Keyword special-type ^Integer table-id + ^String schema-name ^String table-name ^Integer position ^String description diff --git a/src/metabase/driver/query_processor/resolve.clj b/src/metabase/driver/query_processor/resolve.clj index 31e249e993dfddb05ce2bcd33e2657ea81c7977a..5333c15de20f785f576a33b1c74fda143c9a8976 100644 --- a/src/metabase/driver/query_processor/resolve.clj +++ b/src/metabase/driver/query_processor/resolve.clj @@ -79,8 +79,11 @@ :else this)) (defmethod resolve-table Field [{:keys [table-id], :as this} table-id->table] - (assoc this :table-name (:name (or (table-id->table table-id) - (throw (Exception. (format "Query expansion failed: could not find table %d." table-id))))))) + (let [table (or (table-id->table table-id) + (throw (Exception. (format "Query expansion failed: could not find table %d." table-id))))] + (assoc this + :table-name (:name table) + :schema-name (:schema table)))) ;; ## FieldPlaceholder @@ -194,7 +197,7 @@ [{{source-table-id :source-table} :query, database-id :database, :keys [table-ids fk-field-ids], :as expanded-query-dict}] {:pre [(integer? source-table-id)]} (let [table-ids (conj table-ids source-table-id) - table-id->table (sel :many :id->fields [Table :name :id] :id [in table-ids]) + table-id->table (sel :many :id->fields [Table :schema :name :id] :id [in table-ids]) join-tables (vals (dissoc table-id->table source-table-id))] (-<> expanded-query-dict diff --git a/src/metabase/driver/sqlserver.clj b/src/metabase/driver/sqlserver.clj new file mode 100644 index 0000000000000000000000000000000000000000..75bb8492850eebb19b740f898e21eeb3ba6e7fcb --- /dev/null +++ b/src/metabase/driver/sqlserver.clj @@ -0,0 +1,144 @@ +(ns metabase.driver.sqlserver + (:require [clojure.string :as s] + (korma [core :as k] + [db :as kdb]) + [korma.sql.utils :as utils] + [metabase.driver :refer [defdriver]] + [metabase.driver.generic-sql :refer [sql-driver]] + [metabase.driver.generic-sql.util :refer [funcs]]) + (:import net.sourceforge.jtds.jdbc.Driver)) ; need to import this in order to load JDBC driver + +(def ^:private ^:const column->base-type + "See [this page](https://msdn.microsoft.com/en-us/library/ms187752.aspx) for details." + {:bigint :BigIntegerField + :binary :UnknownField + :bit :BooleanField ; actually this is 1 / 0 instead of true / false ... + :char :CharField + :cursor :UnknownField + :date :DateField + :datetime :DateTimeField + :datetime2 :DateTimeField + :datetimeoffset :DateTimeField + :decimal :DecimalField + :float :FloatField + :geography :UnknownField + :geometry :UnknownField + :hierarchyid :UnknownField + :image :UnknownField + :int :IntegerField + :money :DecimalField + :nchar :CharField + :ntext :TextField + :numeric :DecimalField + :nvarchar :TextField + :real :FloatField + :smalldatetime :DateTimeField + :smallint :IntegerField + :smallmoney :DecimalField + :sql_variant :UnknownField + :table :UnknownField + :text :TextField + :time :TimeField + :timestamp :UnknownField ; not a standard SQL timestamp, see https://msdn.microsoft.com/en-us/library/ms182776.aspx + :tinyint :IntegerField + :uniqueidentifier :UUIDField + :varbinary :UnknownField + :varchar :TextField + :xml :UnknownField + (keyword "int identity") :IntegerField}) ; auto-incrementing integer (ie pk) field + +(defn- connection-details->spec [details] + (-> (kdb/mssql details) + ;; swap out Microsoft Driver details for jTDS ones + (assoc :classname "net.sourceforge.jtds.jdbc.Driver" + :subprotocol "jtds:sqlserver") + ;; adjust the connection URL to match up with the jTDS format (see http://jtds.sourceforge.net/faq.html#urlFormat) + (update :subname #(s/replace % #";database=" "/")))) + +;; See also the [jTDS SQL <-> Java types table](http://jtds.sourceforge.net/typemap.html) +(defn- date [unit field-or-value] + (case unit + :default (utils/func "CAST(%s AS DATETIME)" [field-or-value]) + :minute (utils/func "CAST(%s AS SMALLDATETIME)" [field-or-value]) + :minute-of-hour (utils/func "DATEPART(minute, %s)" [field-or-value]) + :hour (utils/func "CAST(FORMAT(%s, 'yyyy-MM-dd HH:00:00') AS DATETIME)" [field-or-value]) + :hour-of-day (utils/func "DATEPART(hour, %s)" [field-or-value]) + ;; jTDS is retarded; I sense an ongoing theme here. It returns DATEs as strings instead of as java.sql.Dates + ;; like every other SQL DB we support. Work around that by casting to DATE for truncation then back to DATETIME so we get the type we want + :day (utils/func "CAST(CAST(%s AS DATE) AS DATETIME)" [field-or-value]) + :day-of-week (utils/func "DATEPART(weekday, %s)" [field-or-value]) + :day-of-month (utils/func "DATEPART(day, %s)" [field-or-value]) + :day-of-year (utils/func "DATEPART(dayofyear, %s)" [field-or-value]) + ;; Subtract the number of days needed to bring us to the first day of the week, then convert to date + ;; The equivalent SQL looks like: + ;; CAST(DATEADD(day, 1 - DATEPART(weekday, %s), CAST(%s AS DATE)) AS DATETIME) + ;; But we have to use this ridiculous 'funcs' function in order to generate the korma form we want (AFAIK) + ;; utils/func only handles multiple arguments if they are comma separated and injected into a single `%s` format placeholder + :week (funcs "CAST(%s AS DATETIME)" + ["DATEADD(day, %s)" + ["1 - DATEPART(weekday, %s)" field-or-value] + ["CAST(%s AS DATE)" field-or-value]]) + :week-of-year (utils/func "DATEPART(iso_week, %s)" [field-or-value]) + :month (utils/func "CAST(FORMAT(%s, 'yyyy-MM-01') AS DATETIME)" [field-or-value]) + :month-of-year (utils/func "DATEPART(month, %s)" [field-or-value]) + ;; Format date as yyyy-01-01 then add the appropriate number of quarter + ;; Equivalent SQL: + ;; DATEADD(quarter, DATEPART(quarter, %s) - 1, FORMAT(%s, 'yyyy-01-01')) + :quarter (funcs "DATEADD(quarter, %s)" + ["DATEPART(quarter, %s) - 1" field-or-value] + ["FORMAT(%s, 'yyyy-01-01')" field-or-value]) + :quarter-of-year (utils/func "DATEPART(quarter, %s)" [field-or-value]) + :year (utils/func "DATEPART(year, %s)" [field-or-value]))) + +(defn- date-interval [unit amount] + (utils/generated (format "DATEADD(%s, %d, GETUTCDATE())" (name unit) amount))) + +(defn- unix-timestamp->timestamp [field-or-value seconds-or-milliseconds] + (utils/func (case seconds-or-milliseconds + ;; The second argument to DATEADD() gets casted to a 32-bit integer. BIGINT is 64 bites, so we tend to run into + ;; integer overflow errors (especially for millisecond timestamps). + ;; Work around this by converting the timestamps to minutes instead before calling DATEADD(). + :seconds "DATEADD(minute, (%s / 60), '1970-01-01')" + :milliseconds "DATEADD(minute, (%s / 60000), '1970-01-01')") + [field-or-value])) + +(defn- apply-limit [korma-query {value :limit}] + (k/modifier korma-query (format "TOP %d" value))) + +(defn- apply-page [korma-query {{:keys [items page]} :page}] + (k/offset korma-query (format "%d ROWS FETCH NEXT %d ROWS ONLY" + (* items (dec page)) + items))) + +(defdriver sqlserver + (-> (sql-driver {:driver-name "SQL Server" + :details-fields [{:name "host" + :display-name "Host" + :default "localhost"} + {:name "port" + :display-name "Port" + :type :integer + :default 1433} + {:name "dbname" + :display-name "Database name" + :placeholder "birds_of_the_word" + :required true} + {:name "user" + :display-name "Database username" + :placeholder "What username do you use to login to the database?" + :required true} + {:name "password" + :display-name "Database password" + :type :password + :placeholder "*******"}] + :string-length-fn :LEN + :stddev-fn :STDEV + :current-datetime-fn :GETUTCDATE + :excluded-schemas #{"sys" "INFORMATION_SCHEMA"} + :column->base-type column->base-type + :connection-details->spec connection-details->spec + :date date + :date-interval date-interval + :unix-timestamp->timestamp unix-timestamp->timestamp}) + (update :qp-clause->handler merge {:limit apply-limit + :page apply-page}))) diff --git a/src/metabase/driver/sync.clj b/src/metabase/driver/sync.clj index 624faf49cbc4cce0f14d20a338629eb2c7113e22..e2144f8bdada4ff7cf24110e0a7ad245a2233115 100644 --- a/src/metabase/driver/sync.clj +++ b/src/metabase/driver/sync.clj @@ -8,8 +8,8 @@ [korma.core :as k] [medley.core :as m] [metabase.db :refer :all] - (metabase.driver [interface :refer :all] - [query-processor :as qp]) + [metabase.driver :refer [max-sync-lazy-seq-results]] + [metabase.driver.query-processor :as qp] [metabase.driver.sync.queries :as queries] [metabase.events :as events] (metabase.models [common :as common] @@ -38,56 +38,80 @@ ;; ## sync-database! and sync-table! +(defn- validate-active-tables [results] + (when-not (and (set? results) + (every? map? results) + (every? :name results)) + (throw (Exception. "Invalid results returned by active-tables. Results should be a set of maps like {:name \"table_name\", :schema \"schema_name_or_nil\"}.")))) + +(defn- mark-inactive-tables! + "Mark any `Tables` that are no longer active as such. These are ones that exist in the DB but didn't come back from `active-tables`." + [database active-tables existing-table->id] + (doseq [[[{table :name, schema :schema, :as table}] table-id] existing-table->id] + (when-not (contains? active-tables table) + (upd Table table-id :active false) + (log/info (u/format-color 'cyan "Marked table %s.%s%s as inactive." (:name database) (if schema (str schema \.) "") table)) + + ;; We need to mark driver Table's Fields as inactive so we don't expose them in UI such as FK selector (etc.) + (k/update Field + (k/where {:table_id table-id}) + (k/set-fields {:active false}))))) + +(defn- create-new-tables! + "Create new `Tables` as needed. These are ones that came back from `active-tables` but don't already exist in the DB." + [database active-tables existing-table->id] + (let [existing-tables (set (keys existing-table->id)) + new-tables (set/difference active-tables existing-tables)] + (when (seq new-tables) + (log/debug (u/format-color 'blue "Found new tables: %s" (vec (for [{table :name, schema :schema} new-tables] + (if schema + (str schema \. table) + table))))) + (doseq [{table :name, schema :schema} new-tables] + ;; If it's a _metabase_metadata table then we'll handle later once everything else has been synced + (when-not (= (s/lower-case table) "_metabase_metadata") + (ins Table :db_id (:id database), :active true, :schema schema, :name table)))))) + +(defn- fetch-and-sync-database-active-tables! [driver database] + (sync-database-active-tables! driver (for [table (sel :many Table, :db_id (:id database) :active true)] + ;; replace default delays with ones that reuse database (and don't require a DB call) + (assoc table :db (delay database))))) + +(defn- -sync-database! [{:keys [active-tables], :as driver} database] + (let [active-tables (active-tables database) + existing-table->id (into {} (for [{:keys [name schema id]} (sel :many :fields [Table :name :schema :id], :db_id (:id database), :active true)] + {{:name name, :schema schema} id}))] + (validate-active-tables active-tables) + + (mark-inactive-tables! database active-tables existing-table->id) + (create-new-tables! database active-tables existing-table->id)) + + (fetch-and-sync-database-active-tables! driver database) + + ;; Ok, now if we had a _metabase_metadata table from earlier we can go ahead and sync from it + (sync-metabase-metadata-table! driver database)) + +(defn- -sync-database-with-tracking! [driver database] + (let [start-time (System/currentTimeMillis) + tracking-hash (str (java.util.UUID/randomUUID))] + (log/info (u/format-color 'magenta "Syncing %s database '%s'..." (name (:engine database)) (:name database))) + (events/publish-event :database-sync-begin {:database_id (:id database) :custom_id tracking-hash}) + + (-sync-database! driver database) + + (events/publish-event :database-sync-end {:database_id (:id database) :custom_id tracking-hash :running_time (- (System/currentTimeMillis) start-time)}) + (log/info (u/format-color 'magenta "Finished syncing %s database %s. (%d ms)" (name (:engine database)) (:name database) + (- (System/currentTimeMillis) start-time))))) + (defn sync-database! "Sync DATABASE and all its Tables and Fields." - [driver database] + [{:keys [sync-in-context], :as driver} database] (binding [qp/*disable-qp-logging* true *sel-disable-logging* true] - (sync-in-context driver database - (fn [] - (let [start-time (System/currentTimeMillis) - tracking-hash (str (java.util.UUID/randomUUID))] - (log/info (u/format-color 'magenta "Syncing %s database '%s'..." (name (:engine database)) (:name database))) - (events/publish-event :database-sync-begin {:database_id (:id database) :custom_id tracking-hash}) - - (let [active-table-names (active-table-names driver database) - table-name->id (sel :many :field->id [Table :name] :db_id (:id database) :active true)] - (assert (set? active-table-names) "active-table-names should return a set.") - (assert (every? string? active-table-names) "active-table-names should return the names of Tables as *strings*.") - - ;; First, let's mark any Tables that are no longer active as such. - ;; These are ones that exist in table-name->id but not in active-table-names. - (doseq [[table-name table-id] table-name->id] - (when-not (contains? active-table-names table-name) - (upd Table table-id :active false) - (log/info (u/format-color 'cyan "Marked table %s.%s as inactive." (:name database) table-name)) - - ;; We need to mark driver Table's Fields as inactive so we don't expose them in UI such as FK selector (etc.) - (k/update Field - (k/where {:table_id table-id}) - (k/set-fields {:active false})))) - - ;; Next, we'll create new Tables (ones that came back in active-table-names but *not* in table-name->id) - (let [existing-table-names (set (keys table-name->id)) - new-table-names (set/difference active-table-names existing-table-names)] - (when (seq new-table-names) - (log/debug (u/format-color 'blue "Found new tables: %s" new-table-names)) - (doseq [new-table-name new-table-names] - ;; If it's a _metabase_metadata table then we'll handle later once everything else has been synced - (when-not (= (s/lower-case new-table-name) "_metabase_metadata") - (ins Table :db_id (:id database), :active true, :name new-table-name)))))) - - ;; Now sync the active tables - (->> (sel :many Table :db_id (:id database) :active true) - (map #(assoc % :db (delay database))) ; replace default delays with ones that reuse database (and don't require a DB call) - (sync-database-active-tables! driver)) - - ;; Ok, now if we had a _metabase_metadata table from earlier we can go ahead and sync from it - (sync-metabase-metadata-table! driver database) - - (events/publish-event :database-sync-end {:database_id (:id database) :custom_id tracking-hash :running_time (- (System/currentTimeMillis) start-time)}) - (log/info (u/format-color 'magenta "Finished syncing %s database %s. (%d ms)" (name (:engine database)) (:name database) - (- (System/currentTimeMillis) start-time)))))))) + (let [f (partial -sync-database-with-tracking! driver database)] + (if sync-in-context + (sync-in-context database f) + (f))))) (defn- sync-metabase-metadata-table! "Databases may include a table named `_metabase_metadata` (case-insentive) which includes descriptions or other metadata about the `Tables` and `Fields` @@ -103,35 +127,38 @@ `keypath` is of the form `table-name.key` or `table-name.field-name.key`, where `key` is the name of some property of `Table` or `Field`. - This functionality is currently only used by the Sample Dataset." - [driver database] - (doseq [table-name (active-table-names driver database)] - (when (= (s/lower-case table-name) "_metabase_metadata") - (doseq [{:keys [keypath value]} (table-rows-seq driver database table-name)] - (let [[_ table-name field-name k] (re-matches #"^([^.]+)\.(?:([^.]+)\.)?([^.]+)$" keypath)] - (try (when (not= 1 (if field-name - (k/update Field - (k/where {:name field-name, :table_id (k/subselect Table - (k/fields :id) - (k/where {:db_id (:id database), :name table-name}))}) - (k/set-fields {(keyword k) value})) - (k/update Table - (k/where {:name table-name, :db_id (:id database)}) - (k/set-fields {(keyword k) value})))) - (log/error (u/format-color "Error syncing _metabase_metadata: no matching keypath: %s" keypath))) - (catch Throwable e - (log/error (u/format-color 'red "Error in _metabase_metadata: %s" (.getMessage e)))))))))) + This functionality is currently only used by the Sample Dataset. In order to use this functionality, drivers must implement optional fn `:table-rows-seq`." + [{:keys [table-rows-seq active-tables]} database] + (when table-rows-seq + (doseq [{table-name :name} (active-tables database)] + (when (= (s/lower-case table-name) "_metabase_metadata") + (doseq [{:keys [keypath value]} (table-rows-seq database table-name)] + (let [[_ table-name field-name k] (re-matches #"^([^.]+)\.(?:([^.]+)\.)?([^.]+)$" keypath)] + (try (when (not= 1 (if field-name + (k/update Field + (k/where {:name field-name, :table_id (k/subselect Table + (k/fields :id) + (k/where {:db_id (:id database), :name table-name}))}) + (k/set-fields {(keyword k) value})) + (k/update Table + (k/where {:name table-name, :db_id (:id database)}) + (k/set-fields {(keyword k) value})))) + (log/error (u/format-color "Error syncing _metabase_metadata: no matching keypath: %s" keypath))) + (catch Throwable e + (log/error (u/format-color 'red "Error in _metabase_metadata: %s" (.getMessage e))))))))))) (defn sync-table! "Sync a *single* TABLE by running all the sync steps for it. This is used *instead* of `sync-database!` when syncing just one Table is desirable." - [driver table] - (let [database @(:db table)] + [{:keys [sync-in-context], :as driver} table] + (let [database @(:db table) + f (fn [] + (sync-database-active-tables! driver [table]) + (events/publish-event :table-sync {:table_id (:id table)}))] (binding [qp/*disable-qp-logging* true] - (sync-in-context driver database - (fn [] - (sync-database-active-tables! driver [table]) - (events/publish-event :table-sync {:table_id (:id table)})))))) + (if sync-in-context + (sync-in-context database f) + (f))))) ;; ### sync-database-active-tables! -- runs the sync-table steps over sequence of Tables @@ -174,13 +201,13 @@ [driver active-tables] (let [active-tables (sort-by :name active-tables)] ;; First, create all the Fields / PKs for all of the Tables - (u/pdoseq [table active-tables] + (doseq [table active-tables] (u/try-apply sync-table-active-fields-and-pks! driver table)) ;; After that, we can do all the other syncing for the Tables (let [tables-count (count active-tables) finished-tables-count (atom 0)] - (u/pdoseq [table active-tables] + (doseq [table active-tables] ;; make sure table has :display_name (u/try-apply update-table-display-name! table) @@ -237,10 +264,10 @@ (defn- sync-table-active-fields-and-pks! "Create new Fields (and mark old ones as inactive) for TABLE, and update PK fields." - [driver table] + [{:keys [active-column-names->type table-pks], :as driver} table] (let [database @(:db table)] ;; Now do the syncing for Table's Fields - (let [active-column-names->type (active-column-names->type driver table) + (let [active-column-names->type (active-column-names->type table) existing-field-name->field (sel :many :field->fields [Field :name :base_type :id], :table_id (:id table), :active true, :parent_id nil)] (assert (map? active-column-names->type) "active-column-names->type should return a map.") @@ -274,7 +301,7 @@ ;; TODO - we need to add functionality to update nested Field base types as well! ;; Now mark PK fields as such if needed - (let [pk-fields (table-pks driver table)] + (let [pk-fields (table-pks table)] (u/try-apply update-table-pks! table pk-fields))))) @@ -292,9 +319,9 @@ (if (= field-count field-distinct-count) :1t1 :Mt1))) -(defn- sync-table-fks! [driver table] - (when (extends? ISyncDriverTableFKs (type driver)) - (let [fks (table-fks driver table)] +(defn- sync-table-fks! [{:keys [features table-fks]} table] + (when (contains? features :foreign-keys) + (let [fks (table-fks table)] (assert (and (set? fks) (every? map? fks) (every? :fk-column-name fks) @@ -364,9 +391,9 @@ (defn- maybe-driver-specific-sync-field! "If driver implements `ISyncDriverSpecificSyncField`, call `driver-specific-sync-field!`." - [driver field] - (when (satisfies? ISyncDriverSpecificSyncField driver) - (driver-specific-sync-field! driver field))) + [{:keys [driver-specific-sync-field!]} field] + (when driver-specific-sync-field! + (driver-specific-sync-field! field))) ;; ### set-field-display-name-if-needed! @@ -399,27 +426,29 @@ (inc non-nil-count) more))))) -(extend-protocol ISyncDriverFieldPercentUrls ; Default implementation - Object - (field-percent-urls [this field] - (let [field-values (->> (field-values-lazy-seq this field) - (filter identity) - (take max-sync-lazy-seq-results))] - (percent-valid-urls field-values)))) +(defn- default-field-percent-urls + "Default implementation for optional driver fn `:field-percent-urls` that calculates percentage in Clojure-land." + [{:keys [field-values-lazy-seq]} field] + (->> (field-values-lazy-seq field) + (filter identity) + (take max-sync-lazy-seq-results) + percent-valid-urls)) (defn- mark-url-field! "If FIELD is texual, doesn't have a `special_type`, and its non-nil values are primarily URLs, mark it as `special_type` `url`." - [driver field] + [{:keys [field-percent-urls], :as driver} field] (when (and (not (:special_type field)) (contains? #{:CharField :TextField} (:base_type field))) - (when-let [percent-urls (field-percent-urls driver field)] - (assert (float? percent-urls)) - (assert (>= percent-urls 0.0)) - (assert (<= percent-urls 100.0)) - (when (> percent-urls percent-valid-url-threshold) - (log/debug (u/format-color 'green "Field '%s' is %d%% URLs. Marking it as a URL." @(:qualified-name field) (int (math/round (* 100 percent-urls))))) - (upd Field (:id field) :special_type :url) - (assoc field :special_type :url))))) + (let [field-percent-urls (or field-percent-urls + (partial default-field-percent-urls driver))] + (when-let [percent-urls (field-percent-urls field)] + (assert (float? percent-urls)) + (assert (>= percent-urls 0.0)) + (assert (<= percent-urls 100.0)) + (when (> percent-urls percent-valid-url-threshold) + (log/debug (u/format-color 'green "Field '%s' is %d%% URLs. Marking it as a URL." @(:qualified-name field) (int (math/round (* 100 percent-urls))))) + (upd Field (:id field) :special_type :url) + (assoc field :special_type :url)))))) ;; ### mark-category-field-or-update-field-values! @@ -455,26 +484,26 @@ "Fields whose values' average length is greater than this amount should be marked as `preview_display = false`." 50) -(extend-protocol ISyncDriverFieldAvgLength ; Default implementation - Object - (field-avg-length [this field] - (let [field-values (->> (field-values-lazy-seq this field) - (filter identity) - (take max-sync-lazy-seq-results)) ; as with field-percent-urls it's probably fine to consider the first 10,000 values rather than potentially millions - field-values-count (count field-values)] - (if (= field-values-count 0) 0 - (int (math/round (/ (->> field-values - (map str) - (map count) - (reduce +)) - field-values-count))))))) +(defn- default-field-avg-length [{:keys [field-values-lazy-seq]} field] + (let [field-values (->> (field-values-lazy-seq field) + (filter identity) + (take max-sync-lazy-seq-results)) + field-values-count (count field-values)] + (if (= field-values-count 0) 0 + (int (math/round (/ (->> field-values + (map str) + (map count) + (reduce +)) + field-values-count)))))) (defn- mark-no-preview-display-field! "If FIELD's is textual and its average length is too great, mark it so it isn't displayed in the UI." - [driver field] + [{:keys [field-avg-length], :as driver} field] (when (and (:preview_display field) (contains? #{:CharField :TextField} (:base_type field))) - (let [avg-len (field-avg-length driver field)] + (let [field-avg-length (or field-avg-length + (partial default-field-avg-length driver)) + avg-len (field-avg-length field)] (assert (integer? avg-len) "field-avg-length should return an integer.") (when (> avg-len average-length-no-preview-threshold) (log/debug (u/format-color 'green "Field '%s' has an average length of %d. Not displaying it in previews." @(:qualified-name field) avg-len)) @@ -506,10 +535,10 @@ (defn- mark-json-field! "Mark FIELD as `:json` if it's textual, doesn't already have a special type, the majority of it's values are non-nil, and all of its non-nil values are valid serialized JSON dictionaries or arrays." - [driver field] + [{:keys [field-values-lazy-seq]} field] (when (and (not (:special_type field)) (contains? #{:CharField :TextField} (:base_type field)) - (values-are-valid-json? (->> (field-values-lazy-seq driver field) + (values-are-valid-json? (->> (field-values-lazy-seq field) (take max-sync-lazy-seq-results)))) (log/debug (u/format-color 'green "Field '%s' looks like it contains valid JSON objects. Setting special_type to :json." @(:qualified-name field))) (upd Field (:id field) :special_type :json, :preview_display false) @@ -593,11 +622,10 @@ (assoc field :special_type special-type)))) -(defn- sync-field-nested-fields! [driver field] +(defn- sync-field-nested-fields! [{:keys [features active-nested-field-name->type], :as driver} field] (when (and (= (:base_type field) :DictionaryField) - (supports? driver :nested-fields) ; if one of these is true - (satisfies? ISyncDriverFieldNestedFields driver)) ; the other should be :wink: - (let [nested-field-name->type (active-nested-field-name->type driver field)] + (contains? features :nested-fields)) + (let [nested-field-name->type (active-nested-field-name->type field)] ;; fetch existing nested fields (let [existing-nested-field-name->id (sel :many :field->id [Field :name], :table_id (:table_id field), :active true, :parent_id (:id field))] diff --git a/src/metabase/email.clj b/src/metabase/email.clj index 2556f6ed6c4b1b41f04b2936d3e4689f5be64ab5..e6fb08497b448c5406dfd9bc5a8e5438780ab7bd 100644 --- a/src/metabase/email.clj +++ b/src/metabase/email.clj @@ -7,7 +7,7 @@ ;; ## CONFIG (defsetting email-from-address "Email address used as the sender of system notifications." "notifications@metabase.com") -(defsetting email-smtp-host "SMTP host." "smtp.mandrillapp.com") +(defsetting email-smtp-host "SMTP host.") (defsetting email-smtp-username "SMTP username.") (defsetting email-smtp-password "SMTP password.") (defsetting email-smtp-port "SMTP port." "587") @@ -39,10 +39,8 @@ (string? message)]} (try ;; Check to make sure all valid settings are set! - (when-not (email-smtp-username) - (throw (Exception. "SMTP username is not set."))) - (when-not (email-smtp-password) - (throw (Exception. "SMTP password is not set."))) + (when-not (email-smtp-host) + (throw (Exception. "SMTP host is not set."))) ;; Now send the email (let [{error :error error-message :message} (*send-email-fn* (-> {:host (email-smtp-host) :user (email-smtp-username) diff --git a/src/metabase/middleware.clj b/src/metabase/middleware.clj index 6b205a86bbb9beb14005f03dd288370ba02047da..48f9eff17b007dc7accb26b835c6ba548d7ad239 100644 --- a/src/metabase/middleware.clj +++ b/src/metabase/middleware.clj @@ -4,7 +4,7 @@ [clojure.tools.logging :as log] [clojure.walk :as walk] (cheshire factory - [generate :refer [add-encoder encode-str]]) + [generate :refer [add-encoder encode-str encode-nil]]) [korma.core :as k] [medley.core :refer [filter-vals map-vals]] [metabase.api.common :refer [*current-user* *current-user-id*]] @@ -147,9 +147,11 @@ ;; Encode BSON IDs like strings (add-encoder org.bson.types.ObjectId encode-str) +;; Encode BSON undefined like nil +(add-encoder org.bson.BsonUndefined encode-nil) + ;; serialize sql dates (i.e., QueryProcessor results) like YYYY-MM-DD instead of as a full-blown timestamp -(add-encoder java.sql.Date (fn [^java.sql.Date date ^com.fasterxml.jackson.core.JsonGenerator json-generator] - (.writeString json-generator (.toString date)))) +(add-encoder java.sql.Date encode-str) (defn- remove-fns-and-delays "Remove values that are fns or delays from map M." diff --git a/src/metabase/models/setting.clj b/src/metabase/models/setting.clj index d8657687cc487cac5f5574efe7551522cdeb8b74..222767315eedc31f0d10f1dd3e15eed880c15f46 100644 --- a/src/metabase/models/setting.clj +++ b/src/metabase/models/setting.clj @@ -155,7 +155,8 @@ (defn public-settings "Return a simple map of key/value pairs which represent the public settings for the front-end application." [] - {:ga_code "UA-60817802-1" + {:engines (deref @(ns-resolve 'metabase.driver 'available-drivers)) + :ga_code "UA-60817802-1" :password_complexity (password/active-password-complexity) :setup_token (setup/token-value) :timezones common/timezones @@ -164,8 +165,8 @@ :anon_tracking_enabled (let [tracking? (get :anon-tracking-enabled)] (or (nil? tracking?) (= "true" tracking?))) :site_name (get :site-name) - :email_configured (not (s/blank? (get :email-smtp-host))) - :admin_email (sel :one :field ['User :email] (k/where {:is_superuser true :is_active true}))}) + :email_configured (not (s/blank? (or (get :email-smtp-host) (get-from-env-var :email-smtp-host)))) + :admin_email (get :admin-email)}) ;; # IMPLEMENTATION diff --git a/src/metabase/util.clj b/src/metabase/util.clj index 9f890c7d6f51e1840ca35aa53e943982a9315d88..74f4ebcf9bc71be8a93ff8c307b128805d095c41 100644 --- a/src/metabase/util.clj +++ b/src/metabase/util.clj @@ -1,11 +1,12 @@ (ns metabase.util "Common utility functions useful throughout the codebase." - (:require [clojure.pprint :refer [pprint]] + (:require [clojure.java.jdbc :as jdbc] + [clojure.pprint :refer [pprint]] [clojure.tools.logging :as log] - [colorize.core :as color] - [medley.core :as m] + [clj-time.coerce :as coerce] [clj-time.format :as time] - [clj-time.coerce :as coerce]) + [colorize.core :as color] + [medley.core :as m]) (:import (java.net Socket InetSocketAddress InetAddress) @@ -222,16 +223,6 @@ ~@body) ~collection))) -(defmacro try-apply - "Call F with PARAMS inside a try-catch block and log exceptions caught." - [f & params] - `(try - (~f ~@params) - (catch Throwable e# - (log/error (color/red ~(format "Caught exception in %s:" f) - (or (.getMessage e#) e#) - (with-out-str (.printStackTrace e#))))))) - (defn indecies-satisfying "Return a set of indencies in COLL that satisfy PRED. @@ -246,10 +237,13 @@ (defn format-color "Like `format`, but uses a function in `colorize.core` to colorize the output. - COLOR-SYMB should be a symbol like `green`. + COLOR-SYMB should be a quoted symbol like `green`, `red`, `yellow`, `blue`, + `cyan`, `magenta`, etc. See the entire list of avaliable colors + [here](https://github.com/ibdknox/colorize/blob/master/src/colorize/core.clj). (format-color 'red \"Fatal error: %s\" error-message)" [color-symb format-string & args] + {:pre [(symbol? color-symb)]} ((ns-resolve 'colorize.core color-symb) (apply format format-string args))) (defn pprint-to-str @@ -276,7 +270,100 @@ [^Throwable e] (when e (when-let [stacktrace (.getStackTrace e)] - (->> (map str (.getStackTrace e)) - (filterv (partial re-find #"metabase")))))) + (filterv (partial re-find #"metabase") + (map str (.getStackTrace e)))))) + +(defn wrap-try-catch + "Returns a new function that wraps F in a `try-catch`. When an exception is caught, it is logged + with `log/error` and returns `nil`." + ([f] + (wrap-try-catch f nil)) + ([f f-name] + (let [exception-message (if f-name + (format "Caught exception in %s: " f-name) + "Caught exception: ")] + (fn [& args] + (try + (apply f args) + (catch java.sql.SQLException e + (log/error (color/red exception-message "\n" + (with-out-str (jdbc/print-sql-exception-chain e)) "\n" + (pprint-to-str (filtered-stacktrace e))))) + (catch Throwable e + (log/error (color/red exception-message (or (.getMessage e) e) "\n" + (pprint-to-str (filtered-stacktrace e)))))))))) + +(defn try-apply + "Like `apply`, but wraps F inside a `try-catch` block and logs exceptions caught." + [^clojure.lang.IFn f & args] + (apply (wrap-try-catch f) args)) + +(defn wrap-try-catch! + "Re-intern FN-SYMB as a new fn that wraps the original with a `try-catch`. Intended for debugging. + + (defn z [] (throw (Exception. \"!\"))) + (z) ; -> exception + + (wrap-try-catch! 'z) + (z) ; -> nil; exception logged with log/error" + [fn-symb] + {:pre [(symbol? fn-symb) + (fn? @(resolve fn-symb))]} + (let [varr (resolve fn-symb) + {nmspc :ns, symb :name} (meta varr)] + (println (format "wrap-try-catch! %s/%s" nmspc symb)) + (intern nmspc symb (wrap-try-catch @varr fn-symb)))) + +(defn ns-wrap-try-catch! + "Re-intern all functions in NAMESPACE as ones that wrap the originals with a `try-catch`. + Defaults to the current namespace. You may optionally exclude a set of symbols using the kwarg `:exclude`. + + (ns-wrap-try-catch!) + (ns-wrap-try-catch! 'metabase.driver) + (ns-wrap-try-catch! 'metabase.driver :exclude 'query-complete) + + Intended for debugging." + {:arglists '([namespace? :exclude & excluded-symbs])} + [& args] + (let [[nmspc args] (optional #(try-apply the-ns [%]) args *ns*) + excluded (when (= (first args) :exclude) + (set (rest args)))] + (doseq [[symb varr] (ns-interns nmspc)] + (when (fn? @varr) + (when-not (contains? excluded symb) + (wrap-try-catch! (symbol (str (ns-name nmspc) \/ symb)))))))) + +(defn deref-with-timeout + "Call `deref` on a FUTURE and throw an exception if it takes more than TIMEOUT-MS." + [futur timeout-ms] + (let [result (deref futur timeout-ms ::timeout)] + (when (= result ::timeout) + (throw (Exception. (format "Timed out after %d milliseconds." timeout-ms)))) + result)) + +(defmacro with-timeout + "Run BODY in a `future` and throw an exception if it fails to complete after TIMEOUT-MS." + [timeout-ms & body] + `(deref-with-timeout (future ~@body) ~timeout-ms)) + +(defmacro cond-as-> + "Anaphoric version of `cond->`. Binds EXPR to NAME through a series + of pairs of TEST and FORM. NAME is successively bound to the value + of each FORM whose TEST succeeds. + + (defn maybe-wrap-fn [before after f] + (as-> f <> + (fn? before) (fn [] (before) (<>)) + (fn? after) (fn [] (try (<>) + (finally (after))))))" + {:arglists '([expr nm tst form & more])} + [expr nm & clauses] + {:pre [(even? (count clauses))]} + `(let [~nm ~expr + ~@(apply concat (for [[tst form] (partition 2 clauses)] + [nm `(if ~tst + ~form + ~nm)]))] + ~nm)) (require-dox-in-this-namespace) diff --git a/src/metabase/util/logic.clj b/src/metabase/util/logic.clj deleted file mode 100644 index 1acdb153a9c394e4405b8715ee79747db21563eb..0000000000000000000000000000000000000000 --- a/src/metabase/util/logic.clj +++ /dev/null @@ -1,61 +0,0 @@ -(ns metabase.util.logic - "Useful relations for `core.logic`." - (:refer-clojure :exclude [==]) - (:require [clojure.core.logic :refer :all])) - -(defna butlast° - "A relation such that BUSTLASTV is all items but the last item LASTV of list L." - [butlastv lastv l] - ([[] ?x [?x]]) - ([_ _ [?x . ?more]] (fresh [more-butlast] - (butlast° more-butlast lastv ?more) - (conso ?x more-butlast butlastv)))) - -(defna split° - "A relation such that HALF1 and HALF2 are even divisions of list L. - If L has an odd number of items, HALF1 will have one more item than HALF2." - [half1 half2 l] - ([[] [] []]) - ([[?x] [] [?x]]) - ([[?x] [?y] [?x ?y]]) - ([[?x ?y . ?more-half1-butlast] [?more-half1-last . ?more-half2] [?x ?y . ?more]] - (fresh [more-half1] - (split° more-half1 ?more-half2 ?more) - (butlast° ?more-half1-butlast ?more-half1-last more-half1)))) - -(defn sorted-into° - "A relation such that OUT is the list L with V sorted into it doing comparisons with PRED-F." - [pred-f l v out] - (matche [l] - ([[]] (== out [v])) - ([[?x . ?more]] (conda - ((pred-f v ?x) (conso v (lcons ?x ?more) out)) - (s# (fresh [more] - (sorted-into° pred-f ?more v more) - (conso ?x more out))))))) - -(defna sorted-permutation° - "A relation such that OUT is a permutation of L where all items are sorted by PRED-F." - [pred-f l out] - ([_ [] []]) - ([_ [?x . ?more] _] (fresh [more] - (sorted-permutation° pred-f ?more more) - (sorted-into° pred-f more ?x out)))) - -(defn matches-seq-order° - "A relation such that V1 is present and comes before V2 in list L." - [v1 v2 l] - (conda - ;; This is just an optimization for cases where L isn't a logic var; it's much faster <3 - ((nonlvaro l) ((fn -ordered° [[item & more]] - (conda - ((== v1 item) s#) - ((== v2 item) fail) - ((when (seq more) s#) (-ordered° more)))) - l)) - (s# (conda - ((firsto l v1)) - ((firsto l v2) fail) - ((fresh [more] - (resto l more) - (matches-seq-order° v1 v2 more))))))) diff --git a/test/metabase/api/activity_test.clj b/test/metabase/api/activity_test.clj index 5bdb44088d86730ff5543aa54cadbc0d05bb9b97..cdf13149ce48f9cf53efdfef42bb8dbffbee32d6 100644 --- a/test/metabase/api/activity_test.clj +++ b/test/metabase/api/activity_test.clj @@ -158,8 +158,10 @@ :model model :model_id model-id :timestamp (u/new-sql-timestamp)) - ;; we sleep a few milliseconds to ensure no events have the same timestamp - (Thread/sleep 5))] + ;; we sleep a bit to ensure no events have the same timestamp + ;; sadly, MySQL doesn't support milliseconds so we have to wait a second + ;; otherwise our records are out of order and this test fails :( + (Thread/sleep 1000))] (do (create-view (user->id :crowberto) "card" (:id card2)) (create-view (user->id :crowberto) "dashboard" (:id dash1)) diff --git a/test/metabase/api/database_test.clj b/test/metabase/api/database_test.clj index 9f7a6196a83d802f393b18458a42321431e32a6a..7ff3e80e47d2bf8fb9d67ad857489d9100433310 100644 --- a/test/metabase/api/database_test.clj +++ b/test/metabase/api/database_test.clj @@ -21,23 +21,6 @@ :dbname "fakedb" :user "cam"}})) -;; # FORM INPUT - -;; ## GET /api/database/form_input -(expect - {:engines driver/available-drivers - :timezones ["GMT" - "UTC" - "US/Alaska" - "US/Arizona" - "US/Central" - "US/Eastern" - "US/Hawaii" - "US/Mountain" - "US/Pacific" - "America/Costa_Rica"]} - ((user->client :crowberto) :get 200 "database/form_input")) - ;; # DB LIFECYCLE ENDPOINTS ;; ## GET /api/database/:id @@ -162,66 +145,67 @@ ;; ## GET /api/meta/table/:id/query_metadata ;; TODO - add in example with Field :values (expect - (match-$ (db) - {:created_at $ - :engine "h2" - :id $ - :updated_at $ - :name "Test Database" - :is_sample false - :organization_id nil - :description nil - :tables [(match-$ (Table (id :categories)) - {:description nil - :entity_type nil - :visibility_type nil - :name "CATEGORIES" - :display_name "Categories" - :fields [(match-$ (Field (id :categories :id)) - {:description nil - :table_id (id :categories) - :special_type "id" - :name "ID" - :display_name "Id" - :updated_at $ - :active true - :id $ - :field_type "info" - :position 0 - :target nil - :preview_display true - :created_at $ - :base_type "BigIntegerField" - :parent_id nil - :parent nil - :values []}) - (match-$ (Field (id :categories :name)) - {:description nil - :table_id (id :categories) - :special_type "name" - :name "NAME" - :display_name "Name" - :updated_at $ - :active true - :id $ - :field_type "info" - :position 0 - :target nil - :preview_display true - :created_at $ - :base_type "TextField" - :parent_id nil - :parent nil - :values []})] - :rows 75 - :updated_at $ - :entity_name nil - :active true - :id (id :categories) - :db_id (db-id) - :created_at $})]}) - (let [resp ((user->client :rasta) :get 200 (format "database/%d/metadata" (db-id)))] - (assoc resp :tables (filter #(= "CATEGORIES" (:name %)) (:tables resp))))) + (match-$ (db) + {:created_at $ + :engine "h2" + :id $ + :updated_at $ + :name "Test Database" + :is_sample false + :organization_id nil + :description nil + :tables [(match-$ (Table (id :categories)) + {:description nil + :entity_type nil + :visibility_type nil + :schema "PUBLIC" + :name "CATEGORIES" + :display_name "Categories" + :fields [(match-$ (Field (id :categories :id)) + {:description nil + :table_id (id :categories) + :special_type "id" + :name "ID" + :display_name "Id" + :updated_at $ + :active true + :id $ + :field_type "info" + :position 0 + :target nil + :preview_display true + :created_at $ + :base_type "BigIntegerField" + :parent_id nil + :parent nil + :values []}) + (match-$ (Field (id :categories :name)) + {:description nil + :table_id (id :categories) + :special_type "name" + :name "NAME" + :display_name "Name" + :updated_at $ + :active true + :id $ + :field_type "info" + :position 0 + :target nil + :preview_display true + :created_at $ + :base_type "TextField" + :parent_id nil + :parent nil + :values []})] + :rows 75 + :updated_at $ + :entity_name nil + :active true + :id (id :categories) + :db_id (db-id) + :created_at $})]}) + (let [resp ((user->client :rasta) :get 200 (format "database/%d/metadata" (db-id)))] + (assoc resp :tables (filter #(= "CATEGORIES" (:name %)) (:tables resp))))) ;; # DB TABLES ENDPOINTS @@ -231,11 +215,11 @@ (expect (let [db-id (db-id)] [(match-$ (Table (id :categories)) - {:description nil, :entity_type nil, :visibility_type nil, :name "CATEGORIES", :rows 75, :updated_at $, :entity_name nil, :active true, :id $, :db_id db-id, :created_at $, :display_name "Categories"}) + {:description nil, :entity_type nil, :visibility_type nil, :schema "PUBLIC", :name "CATEGORIES", :rows 75, :updated_at $, :entity_name nil, :active true, :id $, :db_id db-id, :created_at $, :display_name "Categories"}) (match-$ (Table (id :checkins)) - {:description nil, :entity_type nil, :visibility_type nil, :name "CHECKINS", :rows 1000, :updated_at $, :entity_name nil, :active true, :id $, :db_id db-id, :created_at $, :display_name "Checkins"}) + {:description nil, :entity_type nil, :visibility_type nil, :schema "PUBLIC", :name "CHECKINS", :rows 1000, :updated_at $, :entity_name nil, :active true, :id $, :db_id db-id, :created_at $, :display_name "Checkins"}) (match-$ (Table (id :users)) - {:description nil, :entity_type nil, :visibility_type nil, :name "USERS", :rows 15, :updated_at $, :entity_name nil, :active true, :id $, :db_id db-id, :created_at $, :display_name "Users"}) + {:description nil, :entity_type nil, :visibility_type nil, :schema "PUBLIC", :name "USERS", :rows 15, :updated_at $, :entity_name nil, :active true, :id $, :db_id db-id, :created_at $, :display_name "Users"}) (match-$ (Table (id :venues)) - {:description nil, :entity_type nil, :visibility_type nil, :name "VENUES", :rows 100, :updated_at $, :entity_name nil, :active true, :id $, :db_id db-id, :created_at $, :display_name "Venues"})]) + {:description nil, :entity_type nil, :visibility_type nil, :schema "PUBLIC", :name "VENUES", :rows 100, :updated_at $, :entity_name nil, :active true, :id $, :db_id db-id, :created_at $, :display_name "Venues"})]) ((user->client :rasta) :get 200 (format "database/%d/tables" (db-id)))) diff --git a/test/metabase/api/field_test.clj b/test/metabase/api/field_test.clj index 14b9bba345d4abe4aa571d791914f5dc04bdd26d..fffc0ef87e18236229aa70e042284904632473ed 100644 --- a/test/metabase/api/field_test.clj +++ b/test/metabase/api/field_test.clj @@ -13,44 +13,45 @@ ;; ## GET /api/field/:id (expect (match-$ (Field (id :users :name)) - {:description nil - :table_id (id :users) - :table (match-$ (Table (id :users)) - {:description nil - :entity_type nil - :visibility_type nil - :db (match-$ (db) - {:created_at $ - :engine "h2" - :id $ - :updated_at $ - :name "Test Database" - :is_sample false - :organization_id nil - :description nil}) - :name "USERS" - :display_name "Users" - :rows 15 - :updated_at $ - :entity_name nil - :active true - :id (id :users) - :db_id (db-id) - :created_at $}) - :special_type "category" ; metabase.driver.generic-sql.sync/check-for-low-cardinality should have marked this as such because it had no other special_type - :name "NAME" - :display_name "Name" - :updated_at $ - :active true - :id (id :users :name) - :field_type "info" - :position 0 + {:description nil + :table_id (id :users) + :table (match-$ (Table (id :users)) + {:description nil + :entity_type nil + :visibility_type nil + :db (match-$ (db) + {:created_at $ + :engine "h2" + :id $ + :updated_at $ + :name "Test Database" + :is_sample false + :organization_id nil + :description nil}) + :schema "PUBLIC" + :name "USERS" + :display_name "Users" + :rows 15 + :updated_at $ + :entity_name nil + :active true + :id (id :users) + :db_id (db-id) + :created_at $}) + :special_type "category" ; metabase.driver.generic-sql.sync/check-for-low-cardinality should have marked this as such because it had no other special_type + :name "NAME" + :display_name "Name" + :updated_at $ + :active true + :id (id :users :name) + :field_type "info" + :position 0 :preview_display true :created_at $ :base_type "TextField" :parent_id nil :parent nil}) - ((user->client :rasta) :get 200 (format "field/%d" (id :users :name)))) + ((user->client :rasta) :get 200 (format "field/%d" (id :users :name)))) ;; ## GET /api/field/:id/summary diff --git a/test/metabase/api/setup_test.clj b/test/metabase/api/setup_test.clj index 9daf4c723af79bceba21440654c7e25b5a3f77d9..804d3a70e6e1a7650e6c8a4064d7348acd59fb3c 100644 --- a/test/metabase/api/setup_test.clj +++ b/test/metabase/api/setup_test.clj @@ -4,6 +4,7 @@ [metabase.db :refer :all] [metabase.http-client :as http] (metabase.models [session :refer [Session]] + [setting :as setting] [user :refer [User]]) [metabase.setup :as setup] (metabase.test [data :refer :all] @@ -14,10 +15,11 @@ ;; Check that we can create a new superuser via setup-token (let [user-name (random-name)] (expect-eval-actual-first - (match-$ (->> (sel :one User :email (str user-name "@metabase.com")) - (:id) - (sel :one Session :user_id)) + [(match-$ (->> (sel :one User :email (str user-name "@metabase.com")) + (:id) + (sel :one Session :user_id)) {:id $id}) + (str user-name "@metabase.com")] (let [resp (http/client :post 200 "setup" {:token (setup/token-create) :prefs {:site_name "Metabase Test"} :user {:first_name user-name @@ -27,7 +29,7 @@ ;; reset our setup token (setup/token-create) ;; return api response - resp))) + [resp (setting/get :admin-email)]))) ;; Test input validations diff --git a/test/metabase/api/table_test.clj b/test/metabase/api/table_test.clj index 39bb606a814450cb2013c32b8b798924ad645862..784a7a8b4d45c702f17f8e08e08ce8022aea1dcb 100644 --- a/test/metabase/api/table_test.clj +++ b/test/metabase/api/table_test.clj @@ -52,35 +52,36 @@ :rows 100 :id (id :venues)}])))) (->> ((user->client :rasta) :get 200 "table") - (map #(dissoc % :db :created_at :updated_at :entity_name :description :entity_type :visibility_type)) + (map #(dissoc % :db :created_at :updated_at :schema :entity_name :description :entity_type :visibility_type)) set)) ;; ## GET /api/table/:id (expect (match-$ (Table (id :venues)) - {:description nil - :entity_type nil + {:description nil + :entity_type nil :visibility_type nil - :db (match-$ (db) - {:created_at $ - :engine "h2" - :id $ - :updated_at $ - :name "Test Database" - :is_sample false - :organization_id nil - :description nil}) - :name "VENUES" - :display_name "Venues" - :rows 100 - :updated_at $ - :entity_name nil - :active true - :pk_field (deref $pk_field) - :id (id :venues) - :db_id (db-id) - :created_at $}) - ((user->client :rasta) :get 200 (format "table/%d" (id :venues)))) + :db (match-$ (db) + {:created_at $ + :engine "h2" + :id $ + :updated_at $ + :name "Test Database" + :is_sample false + :organization_id nil + :description nil}) + :schema "PUBLIC" + :name "VENUES" + :display_name "Venues" + :rows 100 + :updated_at $ + :entity_name nil + :active true + :pk_field (deref $pk_field) + :id (id :venues) + :db_id (db-id) + :created_at $}) + ((user->client :rasta) :get 200 (format "table/%d" (id :venues)))) ;; ## GET /api/table/:id/fields (expect [(match-$ (Field (id :categories :id)) @@ -120,63 +121,64 @@ ;; ## GET /api/table/:id/query_metadata (expect (match-$ (Table (id :categories)) - {:description nil - :entity_type nil + {:description nil + :entity_type nil :visibility_type nil - :db (match-$ (db) - {:created_at $ - :engine "h2" - :id $ - :updated_at $ - :name "Test Database" - :is_sample false - :organization_id nil - :description nil}) - :name "CATEGORIES" - :display_name "Categories" - :fields [(match-$ (Field (id :categories :id)) - {:description nil - :table_id (id :categories) - :special_type "id" - :name "ID" - :display_name "Id" - :updated_at $ - :active true - :id $ - :field_type "info" - :position 0 - :target nil - :preview_display true - :created_at $ - :base_type "BigIntegerField" - :parent_id nil - :parent nil}) - (match-$ (Field (id :categories :name)) - {:description nil - :table_id (id :categories) - :special_type "name" - :name "NAME" - :display_name "Name" - :updated_at $ - :active true - :id $ - :field_type "info" - :position 0 - :target nil - :preview_display true - :created_at $ - :base_type "TextField" - :parent_id nil - :parent nil})] - :field_values {} - :rows 75 - :updated_at $ - :entity_name nil - :active true - :id (id :categories) - :db_id (db-id) - :created_at $}) - ((user->client :rasta) :get 200 (format "table/%d/query_metadata" (id :categories)))) + :db (match-$ (db) + {:created_at $ + :engine "h2" + :id $ + :updated_at $ + :name "Test Database" + :is_sample false + :organization_id nil + :description nil}) + :schema "PUBLIC" + :name "CATEGORIES" + :display_name "Categories" + :fields [(match-$ (Field (id :categories :id)) + {:description nil + :table_id (id :categories) + :special_type "id" + :name "ID" + :display_name "Id" + :updated_at $ + :active true + :id $ + :field_type "info" + :position 0 + :target nil + :preview_display true + :created_at $ + :base_type "BigIntegerField" + :parent_id nil + :parent nil}) + (match-$ (Field (id :categories :name)) + {:description nil + :table_id (id :categories) + :special_type "name" + :name "NAME" + :display_name "Name" + :updated_at $ + :active true + :id $ + :field_type "info" + :position 0 + :target nil + :preview_display true + :created_at $ + :base_type "TextField" + :parent_id nil + :parent nil})] + :field_values {} + :rows 75 + :updated_at $ + :entity_name nil + :active true + :id (id :categories) + :db_id (db-id) + :created_at $}) + ((user->client :rasta) :get 200 (format "table/%d/query_metadata" (id :categories)))) (def ^:private user-last-login-date-strs @@ -201,212 +203,214 @@ ;;; Make sure that getting the User table *does* include info about the password field, but not actual values themselves (expect (match-$ (sel :one Table :id (id :users)) - {:description nil - :entity_type nil + {:description nil + :entity_type nil :visibility_type nil - :db (match-$ (db) - {:created_at $ - :engine "h2" - :id $ - :updated_at $ - :name "Test Database" - :is_sample false - :organization_id nil - :description nil}) - :name "USERS" - :display_name "Users" - :fields [(match-$ (sel :one Field :id (id :users :id)) - {:description nil - :table_id (id :users) - :special_type "id" - :name "ID" - :display_name "Id" - :updated_at $ - :active true - :id $ - :field_type "info" - :position 0 - :target nil - :preview_display true - :created_at $ - :base_type "BigIntegerField" - :parent_id nil - :parent nil}) - (match-$ (sel :one Field :id (id :users :last_login)) - {:description nil - :table_id (id :users) - :special_type "category" - :name "LAST_LOGIN" - :display_name "Last Login" - :updated_at $ - :active true - :id $ - :field_type "info" - :position 0 - :target nil - :preview_display true - :created_at $ - :base_type "DateTimeField" - :parent_id nil - :parent nil}) - (match-$ (sel :one Field :id (id :users :name)) - {:description nil - :table_id (id :users) - :special_type "category" - :name "NAME" - :display_name "Name" - :updated_at $ - :active true - :id $ - :field_type "info" - :position 0 - :target nil - :preview_display true - :created_at $ - :base_type "TextField" - :parent_id nil - :parent nil}) - (match-$ (sel :one Field :table_id (id :users) :name "PASSWORD") - {:description nil - :table_id (id :users) - :special_type "category" - :name "PASSWORD" - :display_name "Password" - :updated_at $ - :active true - :id $ - :field_type "sensitive" - :position 0 - :target nil - :preview_display true - :created_at $ - :base_type "TextField" - :parent_id nil - :parent nil})] - :rows 15 - :updated_at $ - :entity_name nil - :active true - :id (id :users) - :db_id (db-id) - :field_values {(keyword (str (id :users :last_login))) - user-last-login-date-strs + :db (match-$ (db) + {:created_at $ + :engine "h2" + :id $ + :updated_at $ + :name "Test Database" + :is_sample false + :organization_id nil + :description nil}) + :schema "PUBLIC" + :name "USERS" + :display_name "Users" + :fields [(match-$ (sel :one Field :id (id :users :id)) + {:description nil + :table_id (id :users) + :special_type "id" + :name "ID" + :display_name "Id" + :updated_at $ + :active true + :id $ + :field_type "info" + :position 0 + :target nil + :preview_display true + :created_at $ + :base_type "BigIntegerField" + :parent_id nil + :parent nil}) + (match-$ (sel :one Field :id (id :users :last_login)) + {:description nil + :table_id (id :users) + :special_type "category" + :name "LAST_LOGIN" + :display_name "Last Login" + :updated_at $ + :active true + :id $ + :field_type "info" + :position 0 + :target nil + :preview_display true + :created_at $ + :base_type "DateTimeField" + :parent_id nil + :parent nil}) + (match-$ (sel :one Field :id (id :users :name)) + {:description nil + :table_id (id :users) + :special_type "category" + :name "NAME" + :display_name "Name" + :updated_at $ + :active true + :id $ + :field_type "info" + :position 0 + :target nil + :preview_display true + :created_at $ + :base_type "TextField" + :parent_id nil + :parent nil}) + (match-$ (sel :one Field :table_id (id :users) :name "PASSWORD") + {:description nil + :table_id (id :users) + :special_type "category" + :name "PASSWORD" + :display_name "Password" + :updated_at $ + :active true + :id $ + :field_type "sensitive" + :position 0 + :target nil + :preview_display true + :created_at $ + :base_type "TextField" + :parent_id nil + :parent nil})] + :rows 15 + :updated_at $ + :entity_name nil + :active true + :id (id :users) + :db_id (db-id) + :field_values {(keyword (str (id :users :last_login))) + user-last-login-date-strs - (keyword (str (id :users :name))) - ["Broen Olujimi" - "Conchúr Tihomir" - "Dwight Gresham" - "Felipinho Asklepios" - "Frans Hevel" - "Kaneonuskatew Eiran" - "Kfir Caj" - "Nils Gotam" - "Plato Yeshua" - "Quentin Sören" - "Rüstem Hebel" - "Shad Ferdynand" - "Simcha Yan" - "Spiros Teofil" - "Szymon Theutrich"]} - :created_at $}) - ((user->client :rasta) :get 200 (format "table/%d/query_metadata?include_sensitive_fields=true" (id :users)))) + (keyword (str (id :users :name))) + ["Broen Olujimi" + "Conchúr Tihomir" + "Dwight Gresham" + "Felipinho Asklepios" + "Frans Hevel" + "Kaneonuskatew Eiran" + "Kfir Caj" + "Nils Gotam" + "Plato Yeshua" + "Quentin Sören" + "Rüstem Hebel" + "Shad Ferdynand" + "Simcha Yan" + "Spiros Teofil" + "Szymon Theutrich"]} + :created_at $}) + ((user->client :rasta) :get 200 (format "table/%d/query_metadata?include_sensitive_fields=true" (id :users)))) ;;; GET api/table/:id/query_metadata ;;; Make sure that getting the User table does *not* include password info (expect (match-$ (Table (id :users)) - {:description nil - :entity_type nil + {:description nil + :entity_type nil :visibility_type nil - :db (match-$ (db) - {:created_at $ - :engine "h2" - :id $ - :updated_at $ - :name "Test Database" - :is_sample false - :organization_id nil - :description nil}) - :name "USERS" - :display_name "Users" - :fields [(match-$ (Field (id :users :id)) - {:description nil - :table_id (id :users) - :special_type "id" - :name "ID" - :display_name "Id" - :updated_at $ - :active true - :id $ - :field_type "info" - :position 0 - :target nil - :preview_display true - :created_at $ - :base_type "BigIntegerField" - :parent_id nil - :parent nil}) - (match-$ (Field (id :users :last_login)) - {:description nil - :table_id (id :users) - :special_type "category" - :name "LAST_LOGIN" - :display_name "Last Login" - :updated_at $ - :active true - :id $ - :field_type "info" - :position 0 - :target nil - :preview_display true - :created_at $ - :base_type "DateTimeField" - :parent_id nil - :parent nil}) - (match-$ (Field (id :users :name)) - {:description nil - :table_id (id :users) - :special_type "category" - :name "NAME" - :display_name "Name" - :updated_at $ - :active true - :id $ - :field_type "info" - :position 0 - :target nil - :preview_display true - :created_at $ - :base_type "TextField" - :parent_id nil - :parent nil})] - :rows 15 - :updated_at $ - :entity_name nil - :active true - :id (id :users) - :db_id (db-id) - :field_values {(keyword (str (id :users :last_login))) - user-last-login-date-strs + :db (match-$ (db) + {:created_at $ + :engine "h2" + :id $ + :updated_at $ + :name "Test Database" + :is_sample false + :organization_id nil + :description nil}) + :schema "PUBLIC" + :name "USERS" + :display_name "Users" + :fields [(match-$ (Field (id :users :id)) + {:description nil + :table_id (id :users) + :special_type "id" + :name "ID" + :display_name "Id" + :updated_at $ + :active true + :id $ + :field_type "info" + :position 0 + :target nil + :preview_display true + :created_at $ + :base_type "BigIntegerField" + :parent_id nil + :parent nil}) + (match-$ (Field (id :users :last_login)) + {:description nil + :table_id (id :users) + :special_type "category" + :name "LAST_LOGIN" + :display_name "Last Login" + :updated_at $ + :active true + :id $ + :field_type "info" + :position 0 + :target nil + :preview_display true + :created_at $ + :base_type "DateTimeField" + :parent_id nil + :parent nil}) + (match-$ (Field (id :users :name)) + {:description nil + :table_id (id :users) + :special_type "category" + :name "NAME" + :display_name "Name" + :updated_at $ + :active true + :id $ + :field_type "info" + :position 0 + :target nil + :preview_display true + :created_at $ + :base_type "TextField" + :parent_id nil + :parent nil})] + :rows 15 + :updated_at $ + :entity_name nil + :active true + :id (id :users) + :db_id (db-id) + :field_values {(keyword (str (id :users :last_login))) + user-last-login-date-strs - (keyword (str (id :users :name))) - ["Broen Olujimi" - "Conchúr Tihomir" - "Dwight Gresham" - "Felipinho Asklepios" - "Frans Hevel" - "Kaneonuskatew Eiran" - "Kfir Caj" - "Nils Gotam" - "Plato Yeshua" - "Quentin Sören" - "Rüstem Hebel" - "Shad Ferdynand" - "Simcha Yan" - "Spiros Teofil" - "Szymon Theutrich"]} - :created_at $}) - ((user->client :rasta) :get 200 (format "table/%d/query_metadata" (id :users)))) + (keyword (str (id :users :name))) + ["Broen Olujimi" + "Conchúr Tihomir" + "Dwight Gresham" + "Felipinho Asklepios" + "Frans Hevel" + "Kaneonuskatew Eiran" + "Kfir Caj" + "Nils Gotam" + "Plato Yeshua" + "Quentin Sören" + "Rüstem Hebel" + "Shad Ferdynand" + "Simcha Yan" + "Spiros Teofil" + "Szymon Theutrich"]} + :created_at $}) + ((user->client :rasta) :get 200 (format "table/%d/query_metadata" (id :users)))) ;; ## PUT /api/table/:id @@ -415,34 +419,35 @@ ;; reset Table back to its original state (upd Table (id :users) :display_name "Users" :entity_type nil :visibility_type nil :description nil) table) - {:description "What a nice table!" - :entity_type "person" + {:description "What a nice table!" + :entity_type "person" :visibility_type "hidden" - :db (match-$ (db) - {:description nil - :organization_id $ - :name "Test Database" - :is_sample false - :updated_at $ - :details $ - :id $ - :engine "h2" - :created_at $}) - :name "USERS" - :rows 15 - :updated_at $ - :entity_name nil - :display_name "Userz" - :active true - :pk_field (deref $pk_field) - :id $ - :db_id (db-id) - :created_at $}) - (do ((user->client :crowberto) :put 200 (format "table/%d" (id :users)) {:display_name "Userz" - :entity_type "person" - :visibility_type "hidden" - :description "What a nice table!"}) - ((user->client :crowberto) :get 200 (format "table/%d" (id :users))))) + :db (match-$ (db) + {:description nil + :organization_id $ + :name "Test Database" + :is_sample false + :updated_at $ + :details $ + :id $ + :engine "h2" + :created_at $}) + :schema "PUBLIC" + :name "USERS" + :rows 15 + :updated_at $ + :entity_name nil + :display_name "Userz" + :active true + :pk_field (deref $pk_field) + :id $ + :db_id (db-id) + :created_at $}) + (do ((user->client :crowberto) :put 200 (format "table/%d" (id :users)) {:display_name "Userz" + :entity_type "person" + :visibility_type "hidden" + :description "What a nice table!"}) + ((user->client :crowberto) :get 200 (format "table/%d" (id :users))))) ;; ## GET /api/table/:id/fks @@ -473,27 +478,28 @@ :created_at $ :updated_at $ :table (match-$ (Table (id :checkins)) - {:description nil - :entity_type nil + {:description nil + :entity_type nil :visibility_type nil - :name "CHECKINS" - :display_name "Checkins" - :rows 1000 - :updated_at $ - :entity_name nil - :active true - :id $ - :db_id $ - :created_at $ - :db (match-$ (db) - {:description nil, - :organization_id nil, - :name "Test Database", - :is_sample false, - :updated_at $, - :id $, - :engine "h2", - :created_at $})})}) + :schema "PUBLIC" + :name "CHECKINS" + :display_name "Checkins" + :rows 1000 + :updated_at $ + :entity_name nil + :active true + :id $ + :db_id $ + :created_at $ + :db (match-$ (db) + {:description nil, + :organization_id nil, + :name "Test Database", + :is_sample false, + :updated_at $, + :id $, + :engine "h2", + :created_at $})})}) :destination (match-$ users-id-field {:id $ :table_id $ @@ -511,18 +517,19 @@ :created_at $ :updated_at $ :table (match-$ (Table (id :users)) - {:description nil - :entity_type nil + {:description nil + :entity_type nil :visibility_type nil - :name "USERS" - :display_name "Users" - :rows 15 - :updated_at $ - :entity_name nil - :active true - :id $ - :db_id $ - :created_at $})})})] + :schema "PUBLIC" + :name "USERS" + :display_name "Users" + :rows 15 + :updated_at $ + :entity_name nil + :active true + :id $ + :db_id $ + :created_at $})})})] ((user->client :rasta) :get 200 (format "table/%d/fks" (id :users)))) diff --git a/test/metabase/api/user_test.clj b/test/metabase/api/user_test.clj index 3d7ca755df13cc0c8564ae0ab3ee7906e322717d..885f56b345148248d05bb039f4ee7308898dee54 100644 --- a/test/metabase/api/user_test.clj +++ b/test/metabase/api/user_test.clj @@ -214,27 +214,20 @@ ;; ## PUT /api/user/:id/password ;; Test that a User can change their password -(let [user-last-name (random-name)] - (expect-eval-actual-first - (let [{user-id :id} (sel :one User :last_name user-last-name)] - (sel :one :fields [Session :id] :user_id user-id (k/order :created_at :desc))) ; get the latest Session for this User - (let [password {:old "password" - :new "whateverUP12!!"} - {:keys [email id] :as user} (create-user :password (:old password) :last_name user-last-name) - creds {:old {:password (:old password) - :email email} - :new {:password (:new password) - :email email}}] - ;; Check that creds work - (metabase.http-client/client :post 200 "session" (:old creds)) - ;; Change the PW - (metabase.http-client/client (:old creds) :put 200 (format "user/%d/password" id) {:password (:new password) - :old_password (:old password)}) - ;; Old creds should no longer work - (assert (= (metabase.http-client/client :post 400 "session" (:old creds)) - {:errors {:password "did not match stored password"}})) - ;; New creds *should* work - (metabase.http-client/client :post 200 "session" (:new creds))))) +(expect-let [creds {:email "abc@metabase.com" + :password "def"} + {:keys [id password]} (ins User + :first_name "test" + :last_name "user" + :email "abc@metabase.com" + :password "def")] + true + (do + ;; use API to reset the users password + (metabase.http-client/client creds :put 200 (format "user/%d/password" id) {:password "abc123!!DEF" + :old_password (:password creds)}) + ;; now simply grab the lastest pass from the db and compare to the one we have from before reset + (not= password (sel :one :field [User :password] :email (:email creds))))) ;; Check that a non-superuser CANNOT update someone else's password (expect "You don't have permissions to do that." diff --git a/test/metabase/driver/generic_sql/native_test.clj b/test/metabase/driver/generic_sql/native_test.clj index 07245c805aad00f3add0d608578b7fae908da092..3c1c9c26e4d78ef9b31b0c7e262e87f0a6794d22 100644 --- a/test/metabase/driver/generic_sql/native_test.clj +++ b/test/metabase/driver/generic_sql/native_test.clj @@ -34,7 +34,8 @@ ;; Check that we get proper error responses for malformed SQL (expect {:status :failed - :error "Column \"ZID\" not found"} + :class java.lang.Exception + :error "Column \"ZID\" not found"} (dissoc (driver/process-query {:native {:query "SELECT ZID FROM CHECKINS LIMIT 2;"} ; make sure people know it's to be expected :type :native :database (db-id)}) diff --git a/test/metabase/driver/generic_sql_test.clj b/test/metabase/driver/generic_sql_test.clj index f9026bc67a747701f1ad86928875e2a50d70533e..d6dd9daf03a038db1fa84672500fba68a0108a4c 100644 --- a/test/metabase/driver/generic_sql_test.clj +++ b/test/metabase/driver/generic_sql_test.clj @@ -1,9 +1,7 @@ (ns metabase.driver.generic-sql-test (:require [expectations :refer :all] [metabase.db :refer :all] - [metabase.driver :as driver] - (metabase.driver [h2 :as h2] - [interface :as i]) + [metabase.driver.h2 :refer [h2]] [metabase.driver.generic-sql.util :refer [korma-entity]] (metabase.models [field :refer [Field]] [foreign-key :refer [ForeignKey]] @@ -15,7 +13,7 @@ (delay (sel :one Table :name "USERS"))) (def venues-table - (delay (sel :one Table :name "VENUES"))) + (delay (Table (id :venues)))) (def korma-users-table (delay (korma-entity @users-table))) @@ -23,10 +21,13 @@ (def users-name-field (delay (Field (id :users :name)))) -;; ACTIVE-TABLE-NAMES +;; ACTIVE-TABLES (expect - #{"CATEGORIES" "VENUES" "CHECKINS" "USERS"} - (i/active-table-names h2/driver (db))) + #{{:name "CATEGORIES", :schema "PUBLIC"} + {:name "VENUES", :schema "PUBLIC"} + {:name "CHECKINS", :schema "PUBLIC"} + {:name "USERS", :schema "PUBLIC"}} + ((:active-tables h2) (db))) ;; ACTIVE-COLUMN-NAMES->TYPE (expect @@ -36,15 +37,15 @@ "PRICE" :IntegerField "CATEGORY_ID" :IntegerField "ID" :BigIntegerField} - (i/active-column-names->type h2/driver @venues-table)) + ((:active-column-names->type h2) @venues-table)) ;; ## TEST TABLE-PK-NAMES ;; Pretty straightforward (expect #{"ID"} - (i/table-pks h2/driver @venues-table)) + ((:table-pks h2) @venues-table)) ;; ## TEST FIELD-AVG-LENGTH (expect 13 - (i/field-avg-length h2/driver @users-name-field)) + ((:field-avg-length h2) @users-name-field)) diff --git a/test/metabase/driver/h2_test.clj b/test/metabase/driver/h2_test.clj index 50b1d06cc6329693a224dc88220b3276ab376a53..013f6db7bb164cd14ef7df34045bbb8a96f5d926 100644 --- a/test/metabase/driver/h2_test.clj +++ b/test/metabase/driver/h2_test.clj @@ -1,19 +1,11 @@ (ns metabase.driver.h2-test (:require [expectations :refer :all] [metabase.db :as db] - (metabase.driver [h2 :refer :all] - [interface :refer [can-connect?]]) - [metabase.driver.generic-sql.interface :as i] + [metabase.driver.h2 :refer :all] [metabase.test.util :refer [resolve-private-fns]])) (resolve-private-fns metabase.driver.h2 connection-string->file+options file+options->connection-string connection-string-set-safe-options) -;; # Check that database->connection-details works -(expect {:db - "file:/Users/cam/birdly/bird_sightings.db;AUTO_SERVER=TRUE;DB_CLOSE_DELAY=-1"} - (i/database->connection-details driver {:details {:db "file:/Users/cam/birdly/bird_sightings.db;AUTO_SERVER=TRUE;DB_CLOSE_DELAY=-1"}})) - - ;; Check that the functions for exploding a connection string's options work as expected (expect ["file:my-file" {"OPTION_1" "TRUE", "OPTION_2" "100", "LOOK_I_INCLUDED_AN_EXTRA_SEMICOLON" "NICE_TRY"}] @@ -34,8 +26,7 @@ ;; Make sure we *cannot* connect to a non-existent database (expect :exception-thrown - (try (can-connect? driver {:engine :h2 - :details {:db (str (System/getProperty "user.dir") "/toucan_sightings")}}) + (try ((:can-connect? h2) {:db (str (System/getProperty "user.dir") "/toucan_sightings")}) (catch org.h2.jdbc.JdbcSQLException e (and (re-matches #"Database .+ not found .+" (.getMessage e)) :exception-thrown)))) @@ -43,5 +34,4 @@ ;; Check that we can connect to a non-existent Database when we enable potentailly unsafe connections (e.g. to the Metabase database) (expect true (binding [db/*allow-potentailly-unsafe-connections* true] - (can-connect? driver {:engine :h2 - :details {:db (str (System/getProperty "user.dir") "/pigeon_sightings")}}))) + ((:can-connect? h2) {:db (str (System/getProperty "user.dir") "/pigeon_sightings")}))) diff --git a/test/metabase/driver/mongo_test.clj b/test/metabase/driver/mongo_test.clj index 7f460990eea55f8f5922fc8cc993bb38f1ff9a59..966fcfd2d6403bd2d8ed021d867795385d69285e 100644 --- a/test/metabase/driver/mongo_test.clj +++ b/test/metabase/driver/mongo_test.clj @@ -4,8 +4,7 @@ [korma.core :as k] [metabase.db :refer :all] [metabase.driver :as driver] - (metabase.driver [interface :as i] - [mongo :as mongo]) + [metabase.driver.mongo :refer [mongo]] [metabase.driver.mongo.test-data :refer :all] (metabase.models [field :refer [Field]] [table :refer [Table]]) @@ -51,7 +50,7 @@ "Return an object that can be passed like a `Table` to driver sync functions." [table-name] {:pre [(keyword? table-name)]} - {:db mongo-test-db + {:db mongo-test-db :name (name table-name)}) (defn- field-name->fake-field @@ -110,10 +109,13 @@ (resolve-private-fns metabase.driver.mongo field->base-type table->column-names) -;; ### active-table-names +;; ### active-tables (expect-when-testing-mongo - #{"checkins" "categories" "users" "venues"} - (i/active-table-names mongo/driver @mongo-test-db)) + #{{:name "checkins"} + {:name "categories"} + {:name "users"} + {:name "venues"}} + ((:active-tables mongo) @mongo-test-db)) ;; ### table->column-names (expect-when-testing-mongo @@ -154,14 +156,14 @@ {"_id" :IntegerField, "name" :TextField, "longitude" :FloatField, "latitude" :FloatField, "price" :IntegerField, "category_id" :IntegerField}] ; venues (->> table-names (map table-name->fake-table) - (mapv (partial i/active-column-names->type mongo/driver)))) + (mapv (:active-column-names->type mongo)))) ;; ### table-pks (expect-when-testing-mongo [#{"_id"} #{"_id"} #{"_id"} #{"_id"}] ; _id for every table (->> table-names (map table-name->fake-table) - (mapv (partial i/table-pks mongo/driver)))) + (mapv (:table-pks mongo)))) ;; ## Big-picture tests for the way data should look post-sync diff --git a/test/metabase/driver/postgres_test.clj b/test/metabase/driver/postgres_test.clj index 1f595f25e907edb865935d004e2e956f2d1452c6..a14ee96a8fd3a16e09744ccdd70d5657afe61ebd 100644 --- a/test/metabase/driver/postgres_test.clj +++ b/test/metabase/driver/postgres_test.clj @@ -1,25 +1,10 @@ (ns metabase.driver.postgres-test (:require [expectations :refer :all] - [metabase.driver.generic-sql.interface :as i] - [metabase.driver.postgres :refer :all] + [metabase.driver.postgres :refer [postgres]] (metabase.test.data [datasets :refer [expect-with-dataset]] [interface :refer [def-database-definition]]) [metabase.test.util.q :refer [Q]])) -;; # Check that database->connection details still works whether we're dealing with new-style or legacy details -;; ## new-style -(expect {:db "bird_sightings" - :ssl false - :port 5432 - :host "localhost" - :user "camsaul"} - (i/database->connection-details driver {:details {:ssl false - :host "localhost" - :port 5432 - :dbname "bird_sightings" - :user "camsaul"}})) - - ;; # Check that SSL params get added the connection details in the way we'd like ;; ## no SSL -- this should *not* include the key :ssl (regardless of its value) since that will cause the PG driver to use SSL anyway (expect @@ -28,11 +13,11 @@ :subprotocol "postgresql" :subname "//localhost:5432/bird_sightings" :make-pool? true} - (i/connection-details->connection-spec driver {:ssl false - :host "localhost" - :port 5432 - :dbname "bird_sightings" - :user "camsaul"})) + ((:connection-details->spec postgres) {:ssl false + :host "localhost" + :port 5432 + :dbname "bird_sightings" + :user "camsaul"})) ;; ## ssl - check that expected params get added (expect @@ -44,11 +29,11 @@ :user "camsaul" :sslfactory "org.postgresql.ssl.NonValidatingFactory" :subname "//localhost:5432/bird_sightings"} - (i/connection-details->connection-spec driver {:ssl true - :host "localhost" - :port 5432 - :dbname "bird_sightings" - :user "camsaul"})) + ((:connection-details->spec postgres) {:ssl true + :host "localhost" + :port 5432 + :dbname "bird_sightings" + :user "camsaul"})) ;;; # UUID Support (def-database-definition ^:private with-uuid ["users" @@ -61,8 +46,8 @@ ;; Check that we can load a Postgres Database with a :UUIDField (expect-with-dataset :postgres - {:cols [{:description nil, :base_type :IntegerField, :name "id", :display_name "Id", :preview_display true, :special_type :id, :target nil, :extra_info {}} - {:description nil, :base_type :UUIDField, :name "user_id", :display_name "User Id", :preview_display true, :special_type :category, :target nil, :extra_info {}}], + {:cols [{:description nil, :base_type :IntegerField, :schema_name "public", :name "id", :display_name "Id", :preview_display true, :special_type :id, :target nil, :extra_info {}} + {:description nil, :base_type :UUIDField, :schema_name "public", :name "user_id", :display_name "User Id", :preview_display true, :special_type :category, :target nil, :extra_info {}}], :columns ["id" "user_id"], :rows [[1 #uuid "4f01dcfd-13f7-430c-8e6f-e505c0851027"] [2 #uuid "4652b2e7-d940-4d55-a971-7e484566663e"] diff --git a/test/metabase/driver/query_processor_test.clj b/test/metabase/driver/query_processor_test.clj index d676c6f5f454f7c18717728b5c52c94293dada68..6136414b384f194a4a7de5875094f80ee4b2c20d 100644 --- a/test/metabase/driver/query_processor_test.clj +++ b/test/metabase/driver/query_processor_test.clj @@ -1,16 +1,17 @@ (ns metabase.driver.query-processor-test "Query processing tests that can be ran between any of the available drivers, and should give the same results." - (:require [clojure.walk :as walk] + (:require [clojure.math.numeric-tower :as math] [expectations :refer :all] + [korma.core :as k] [metabase.db :refer :all] [metabase.driver :as driver] [metabase.driver.query-processor :refer :all] (metabase.models [field :refer [Field]] [table :refer [Table]]) - [metabase.test.data :refer :all] (metabase.test.data [dataset-definitions :as defs] - [datasets :as datasets :refer [*dataset*]] + [datasets :as datasets :refer [*dataset* *engine*]] [interface :refer [create-database-definition]]) + [metabase.test.data :refer :all] [metabase.test.util.q :refer [Q]] [metabase.util :as u])) @@ -19,6 +20,9 @@ ;; ### Helper Fns + Macros +(def ^:private ^:const sql-engines + #{:h2 :postgres :mysql :sqlserver}) + (defmacro ^:private qp-expect-with-all-datasets [data q-form & post-process-fns] `(datasets/expect-with-all-datasets {:status :completed @@ -46,50 +50,52 @@ ;; These are meant for inclusion in the expected output of the QP tests, to save us from writing the same results several times ;; #### categories + +(defn- col-defaults [] + {:extra_info {} + :target nil + :description nil + :preview_display true + :schema_name (default-schema)}) + (defn- categories-col "Return column information for the `categories` column named by keyword COL." [col] - (case col - :id {:extra_info {} :target nil :special_type :id, :base_type (id-field-type), :description nil, - :name (format-name "id") :display_name "Id" :preview_display true :table_id (id :categories), :id (id :categories :id)} - :name {:extra_info {} :target nil :special_type :name, :base_type :TextField, :description nil, - :name (format-name "name") :display_name "Name" :preview_display true :table_id (id :categories), :id (id :categories :name)})) + (merge + (col-defaults) + {:table_id (id :categories) + :id (id :categories col)} + (case col + :id {:special_type :id + :base_type (id-field-type) + :name (format-name "id") + :display_name "Id"} + :name {:special_type :name + :base_type :TextField + :name (format-name "name") + :display_name "Name"}))) ;; #### users (defn- users-col "Return column information for the `users` column named by keyword COL." [col] - (case col - :id {:extra_info {} - :target nil - :special_type :id - :base_type (id-field-type) - :description nil - :name (format-name "id") - :display_name "Id" - :preview_display true - :table_id (id :users) - :id (id :users :id)} - :name {:extra_info {} - :target nil - :special_type :category - :base_type :TextField - :description nil - :name (format-name "name") - :display_name "Name" - :preview_display true - :table_id (id :users) - :id (id :users :name)} - :last_login {:extra_info {} - :target nil - :special_type :category - :base_type (timestamp-field-type) - :description nil - :name (format-name "last_login") - :display_name "Last Login" - :preview_display true - :table_id (id :users) - :id (id :users :last_login)})) + (merge + (col-defaults) + {:table_id (id :users) + :id (id :users col)} + (case col + :id {:special_type :id + :base_type (id-field-type) + :name (format-name "id") + :display_name "Id"} + :name {:special_type :category + :base_type :TextField + :name (format-name "name") + :display_name "Name"} + :last_login {:special_type :category + :base_type (timestamp-field-type) + :name (format-name "last_login") + :display_name "Last Login"}))) ;; #### venues (defn- venues-columns @@ -100,71 +106,41 @@ (defn- venues-col "Return column information for the `venues` column named by keyword COL." [col] - (case col - :id {:extra_info {} - :target nil - :special_type :id - :base_type (id-field-type) - :description nil - :name (format-name "id") - :display_name "Id" - :preview_display true - :table_id (id :venues) - :id (id :venues :id)} - :category_id {:extra_info (if (fks-supported?) {:target_table_id (id :categories)} - {}) - :target (if (fks-supported?) (-> (categories-col :id) - (dissoc :target :extra_info)) - nil) - :special_type (if (fks-supported?) :fk - :category) - :base_type :IntegerField - :description nil - :name (format-name "category_id") - :display_name "Category Id" - :preview_display true - :table_id (id :venues) - :id (id :venues :category_id)} - :price {:extra_info {} - :target nil - :special_type :category - :base_type :IntegerField - :description nil - :name (format-name "price") - :display_name "Price" - :preview_display true - :table_id (id :venues) - :id (id :venues :price)} - :longitude {:extra_info {} - :target nil - :special_type :longitude, - :base_type :FloatField, - :description nil - :name (format-name "longitude") - :display_name "Longitude" - :preview_display true - :table_id (id :venues) - :id (id :venues :longitude)} - :latitude {:extra_info {} - :target nil - :special_type :latitude - :base_type :FloatField - :description nil - :name (format-name "latitude") - :display_name "Latitude" - :preview_display true - :table_id (id :venues) - :id (id :venues :latitude)} - :name {:extra_info {} - :target nil - :special_type :name - :base_type :TextField - :description nil - :name (format-name "name") - :display_name "Name" - :preview_display true - :table_id (id :venues) - :id (id :venues :name)})) + (merge + (col-defaults) + {:table_id (id :venues) + :id (id :venues col)} + (case col + :id {:special_type :id + :base_type (id-field-type) + :name (format-name "id") + :display_name "Id"} + :category_id {:extra_info (if (fks-supported?) {:target_table_id (id :categories)} + {}) + :target (if (fks-supported?) (-> (categories-col :id) + (dissoc :target :extra_info :schema_name)) + nil) + :special_type (if (fks-supported?) :fk + :category) + :base_type :IntegerField + :name (format-name "category_id") + :display_name "Category Id"} + :price {:special_type :category + :base_type :IntegerField + :name (format-name "price") + :display_name "Price"} + :longitude {:special_type :longitude, + :base_type :FloatField, + :name (format-name "longitude") + :display_name "Longitude"} + :latitude {:special_type :latitude + :base_type :FloatField + :name (format-name "latitude") + :display_name "Latitude"} + :name {:special_type :name + :base_type :TextField + :name (format-name "name") + :display_name "Name"}))) (defn- venues-cols "`cols` information for all the columns in `venues`." @@ -175,45 +151,35 @@ (defn- checkins-col "Return column information for the `checkins` column named by keyword COL." [col] - (case col - :id {:extra_info {} - :target nil - :special_type :id - :base_type (id-field-type) - :description nil - :name (format-name "id") - :display_name "Id" - :preview_display true - :table_id (id :checkins) - :id (id :checkins :id)} - :venue_id {:extra_info (if (fks-supported?) {:target_table_id (id :venues)} - {}) - :target (if (fks-supported?) (-> (venues-col :id) - (dissoc :target :extra_info)) - nil) - :special_type (when (fks-supported?) - :fk) - :base_type :IntegerField - :description nil - :name (format-name "venue_id") - :display_name "Venue Id" - :preview_display true - :table_id (id :checkins) - :id (id :checkins :venue_id)} - :user_id {:extra_info (if (fks-supported?) {:target_table_id (id :users)} - {}) - :target (if (fks-supported?) (-> (users-col :id) - (dissoc :target :extra_info)) - nil) - :special_type (if (fks-supported?) :fk - :category) - :base_type :IntegerField - :description nil - :name (format-name "user_id") - :display_name "User Id" - :preview_display true - :table_id (id :checkins) - :id (id :checkins :user_id)})) + (merge + (col-defaults) + {:table_id (id :checkins) + :id (id :checkins col)} + (case col + :id {:special_type :id + :base_type (id-field-type) + :name (format-name "id") + :display_name "Id"} + :venue_id {:extra_info (if (fks-supported?) {:target_table_id (id :venues)} + {}) + :target (if (fks-supported?) (-> (venues-col :id) + (dissoc :target :extra_info :schema_name)) + nil) + :special_type (when (fks-supported?) + :fk) + :base_type :IntegerField + :name (format-name "venue_id") + :display_name "Venue Id"} + :user_id {:extra_info (if (fks-supported?) {:target_table_id (id :users)} + {}) + :target (if (fks-supported?) (-> (users-col :id) + (dissoc :target :extra_info :schema_name)) + nil) + :special_type (if (fks-supported?) :fk + :category) + :base_type :IntegerField + :name (format-name "user_id") + :display_name "User Id"}))) ;;; #### aggregate columns @@ -236,8 +202,7 @@ :extra_info {} :target nil})) ([ag-col-kw {:keys [base_type special_type]}] - {:pre [base_type - special_type]} + {:pre [base_type special_type]} {:base_type base_type :special_type special_type :id nil @@ -575,13 +540,14 @@ ;;; SQL-Only for the time being ;; ## "STDDEV" AGGREGATION -(qp-expect-with-datasets #{:h2 :postgres :mysql} +(qp-expect-with-datasets sql-engines {:columns ["stddev"] :cols [(aggregate-col :stddev (venues-col :latitude))] :rows [[(datasets/dataset-case - :h2 3.43467255295115 ; annoying :/ - :postgres 3.4346725529512736 - :mysql 3.417456040761316)]]} + :h2 3.43467255295115 ; annoying :/ + :postgres 3.4346725529512736 + :mysql 3.417456040761316 + :sqlserver 3.43467255295126)]]} (Q aggregate stddev latitude of venues)) ;; Make sure standard deviation fails for the Mongo driver since its not supported @@ -594,7 +560,7 @@ ;;; ## order_by aggregate fields (SQL-only for the time being) ;;; ### order_by aggregate ["count"] -(qp-expect-with-datasets #{:h2 :postgres :mysql} +(qp-expect-with-datasets sql-engines {:columns [(format-name "price") "count"] :rows [[4 6] @@ -609,7 +575,7 @@ ;;; ### order_by aggregate ["sum" field-id] -(qp-expect-with-datasets #{:h2 :postgres :mysql} +(qp-expect-with-datasets sql-engines {:columns [(format-name "price") "sum"] :rows [[2 (->sum-type 2855)] @@ -624,7 +590,7 @@ ;;; ### order_by aggregate ["distinct" field-id] -(qp-expect-with-datasets #{:h2 :postgres :mysql} +(qp-expect-with-datasets sql-engines {:columns [(format-name "price") "count"] :rows [[4 6] @@ -639,13 +605,29 @@ ;;; ### order_by aggregate ["avg" field-id] -(datasets/expect-with-dataset :h2 +(datasets/expect-with-datasets sql-engines {:columns [(format-name "price") "avg"] - :rows [[3 22] - [2 28] - [1 32] - [4 53]] + :rows [[3 (datasets/dataset-case + :h2 22 + :postgres 22.0000000000000000M + :mysql 22.0000M + :sqlserver 22)] + [2 (datasets/dataset-case + :h2 28 + :postgres 28.2881355932203390M + :mysql 28.2881M + :sqlserver 28)] + [1 (datasets/dataset-case + :h2 32 + :postgres 32.8181818181818182M + :mysql 32.8182M + :sqlserver 32)] + [4 (datasets/dataset-case + :h2 53 + :postgres 53.5000000000000000M + :mysql 53.5000M + :sqlserver 53)]] :cols [(venues-col :price) (aggregate-col :avg (venues-col :category_id))]} (Q return :data @@ -654,53 +636,25 @@ breakout price order ag.0+)) -;; Values are slightly different for Postgres -(datasets/expect-with-dataset :postgres - {:rows [[3 22.0000000000000000M] - [2 28.2881355932203390M] - [1 32.8181818181818182M] - [4 53.5000000000000000M]] - :columns [(format-name "price") - "avg"] - :cols [(venues-col :price) - (aggregate-col :avg (venues-col :category_id))]} - (Q return :data - of venues - aggregate avg category_id - breakout price - order ag.0+)) - ;;; ### order_by aggregate ["stddev" field-id] -(datasets/expect-with-dataset :h2 - {:columns [(format-name "price") - "stddev"] - :rows [[3 26.19160170741759] - [1 24.112111881665186] - [2 21.418692164795292] - [4 14.788509052639485]] - :cols [(venues-col :price) - (aggregate-col :stddev (venues-col :category_id))]} - (Q return :data - of venues - aggregate stddev category_id - breakout price - order ag.0-)) - -(datasets/expect-with-dataset :postgres +;; MySQL has a nasty tendency to return different results on different systems so just round everything to the nearest int. +;; It also seems to give slightly different results than less-sucky DBs as evidenced below +(datasets/expect-with-datasets sql-engines {:columns [(format-name "price") "stddev"] - :rows [[3 26.1916017074175897M] - [1 24.1121118816651851M] - [2 21.4186921647952867M] - [4 14.7885090526394851M]] + :rows [[3 (datasets/dataset-case :h2 26, :postgres 26, :mysql 25, :sqlserver 26)] + [1 24] + [2 21] + [4 (datasets/dataset-case :h2 15, :postgres 15, :mysql 14, :sqlserver 15)]] :cols [(venues-col :price) (aggregate-col :stddev (venues-col :category_id))]} - (Q return :data - of venues - aggregate stddev category_id - breakout price - order ag.0-)) - + (-> (Q return :data + of venues + aggregate stddev category_id + breakout price + order ag.0-) + (update :rows (partial mapv (fn [[x y]] + [x (int (math/round y))]))))) ;;; ### make sure that rows where preview_display = false are included and properly marked up (datasets/expect-with-all-datasets @@ -749,16 +703,27 @@ ;; Filter out the timestamps from the results since they're hard to test :/ (-> (Q aggregate rows of users order id+) - (update-in [:data :rows] (partial mapv (partial filterv #(not (isa? (type %) java.util.Date))))))) + (update-in [:data :rows] (partial mapv (fn [[id last-login name]] + [id name]))))) ;; +------------------------------------------------------------------------------------------------------------------------+ ;; | UNIX TIMESTAMP SPECIAL_TYPE FIELDS | ;; +------------------------------------------------------------------------------------------------------------------------+ +(defmacro if-sqlserver + "SQLServer lacks timezone support; the groupings in sad-toucan-incidents happen in UTC rather than US/Pacfic time. This + macro is provided as a convenience for specifying the *slightly* different expected results in the multi-driver unit tests below." + [then else] + `(if (= *engine* :sqlserver) + ~then + ~else)) + ;; There were 9 "sad toucan incidents" on 2015-06-02 -(datasets/expect-with-datasets #{:h2 :postgres :mysql} - 9 +(datasets/expect-with-datasets sql-engines + (if-sqlserver + 10 + 9) (Q dataset sad-toucan-incidents of incidents filter and > timestamp "2015-06-01" @@ -768,24 +733,36 @@ ;;; Unix timestamp breakouts -- SQL only -(datasets/expect-with-datasets #{:h2 :postgres :mysql} - [["2015-06-01" 8] - ["2015-06-02" 9] - ["2015-06-03" 9] - ["2015-06-04" 4] - ["2015-06-05" 11] - ["2015-06-06" 8] - ["2015-06-07" 6] - ["2015-06-08" 10] - ["2015-06-09" 6] - ["2015-06-10" 10]] - (->> (Q dataset sad-toucan-incidents - aggregate count of incidents - breakout timestamp - limit 10 - return rows) - (map (fn [[^java.util.Date date count]] - [(.toString date) (int count)])))) +(datasets/expect-with-datasets sql-engines + (if-sqlserver + ;; SQL Server doesn't have a concept of timezone so results are all grouped by UTC + ;; This is technically correct but the results differ from less-wack DBs + [[#inst "2015-06-01T07" 6] + [#inst "2015-06-02T07" 10] + [#inst "2015-06-03T07" 4] + [#inst "2015-06-04T07" 9] + [#inst "2015-06-05T07" 9] + [#inst "2015-06-06T07" 8] + [#inst "2015-06-07T07" 8] + [#inst "2015-06-08T07" 9] + [#inst "2015-06-09T07" 7] + [#inst "2015-06-10T07" 9]] + ;; Postgres, MySQL, and H2 -- grouped by DB timezone, US/Pacific in this case + [[#inst "2015-06-01T07" 8] + [#inst "2015-06-02T07" 9] + [#inst "2015-06-03T07" 9] + [#inst "2015-06-04T07" 4] + [#inst "2015-06-05T07" 11] + [#inst "2015-06-06T07" 8] + [#inst "2015-06-07T07" 6] + [#inst "2015-06-08T07" 10] + [#inst "2015-06-09T07" 6] + [#inst "2015-06-10T07" 10]]) + (Q dataset sad-toucan-incidents + aggregate count of incidents + breakout timestamp + limit 10 + return rows)) ;; +------------------------------------------------------------------------------------------------------------------------+ @@ -794,7 +771,7 @@ ;; The top 10 cities by number of Tupac sightings ;; Test that we can breakout on an FK field (Note how the FK Field is returned in the results) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-datasets sql-engines [["Arlington" 16] ["Albany" 15] ["Portland" 14] @@ -817,7 +794,7 @@ ;; Number of Tupac sightings in the Expa office ;; (he was spotted here 60 times) ;; Test that we can filter on an FK field -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-datasets sql-engines 60 (Q dataset tupac-sightings return first-row first @@ -828,7 +805,7 @@ ;; THE 10 MOST RECENT TUPAC SIGHTINGS (!) ;; (What he was doing when we saw him, sighting ID) ;; Check that we can include an FK field in the :fields clause -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-datasets sql-engines [[772 "In the Park"] [894 "Working at a Pet Store"] [684 "At the Airport"] @@ -851,7 +828,7 @@ ;; (this query targets sightings and orders by cities.name and categories.name) ;; 2. Check that we can join MULTIPLE tables in a single query ;; (this query joins both cities and categories) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-datasets sql-engines ;; CITY_ID, CATEGORY_ID, ID ;; Cities are already alphabetized in the source data which is why CITY_ID is sorted [[1 12 6] @@ -1091,33 +1068,57 @@ limit 10 return rows))) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} - [[#inst "2015-06-01T10:31" 1] - [#inst "2015-06-01T16:06" 1] - [#inst "2015-06-01T17:23" 1] - [#inst "2015-06-01T18:55" 1] - [#inst "2015-06-01T21:04" 1] - [#inst "2015-06-01T21:19" 1] - [#inst "2015-06-02T02:13" 1] - [#inst "2015-06-02T05:37" 1] - [#inst "2015-06-02T08:20" 1] - [#inst "2015-06-02T11:11" 1]] +(datasets/expect-with-datasets sql-engines + (if-sqlserver + [[#inst "2015-06-01T17:31" 1] + [#inst "2015-06-01T23:06" 1] + [#inst "2015-06-02T00:23" 1] + [#inst "2015-06-02T01:55" 1] + [#inst "2015-06-02T04:04" 1] + [#inst "2015-06-02T04:19" 1] + [#inst "2015-06-02T09:13" 1] + [#inst "2015-06-02T12:37" 1] + [#inst "2015-06-02T15:20" 1] + [#inst "2015-06-02T18:11" 1]] + + [[#inst "2015-06-01T10:31" 1] + [#inst "2015-06-01T16:06" 1] + [#inst "2015-06-01T17:23" 1] + [#inst "2015-06-01T18:55" 1] + [#inst "2015-06-01T21:04" 1] + [#inst "2015-06-01T21:19" 1] + [#inst "2015-06-02T02:13" 1] + [#inst "2015-06-02T05:37" 1] + [#inst "2015-06-02T08:20" 1] + [#inst "2015-06-02T11:11" 1]]) (sad-toucan-incidents-with-bucketing :default)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} - [[#inst "2015-06-01T10:31" 1] - [#inst "2015-06-01T16:06" 1] - [#inst "2015-06-01T17:23" 1] - [#inst "2015-06-01T18:55" 1] - [#inst "2015-06-01T21:04" 1] - [#inst "2015-06-01T21:19" 1] - [#inst "2015-06-02T02:13" 1] - [#inst "2015-06-02T05:37" 1] - [#inst "2015-06-02T08:20" 1] - [#inst "2015-06-02T11:11" 1]] +(datasets/expect-with-datasets sql-engines + (if-sqlserver + [[#inst "2015-06-01T17:31" 1] + [#inst "2015-06-01T23:06" 1] + [#inst "2015-06-02T00:23" 1] + [#inst "2015-06-02T01:55" 1] + [#inst "2015-06-02T04:04" 1] + [#inst "2015-06-02T04:19" 1] + [#inst "2015-06-02T09:13" 1] + [#inst "2015-06-02T12:37" 1] + [#inst "2015-06-02T15:20" 1] + [#inst "2015-06-02T18:11" 1]] + + [[#inst "2015-06-01T10:31" 1] + [#inst "2015-06-01T16:06" 1] + [#inst "2015-06-01T17:23" 1] + [#inst "2015-06-01T18:55" 1] + [#inst "2015-06-01T21:04" 1] + [#inst "2015-06-01T21:19" 1] + [#inst "2015-06-02T02:13" 1] + [#inst "2015-06-02T05:37" 1] + [#inst "2015-06-02T08:20" 1] + [#inst "2015-06-02T11:11" 1]]) (sad-toucan-incidents-with-bucketing :minute)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-datasets sql-engines [[0 5] [1 4] [2 2] @@ -1130,129 +1131,136 @@ [9 1]] (sad-toucan-incidents-with-bucketing :minute-of-hour)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} - [[#inst "2015-06-01T10" 1] - [#inst "2015-06-01T16" 1] - [#inst "2015-06-01T17" 1] - [#inst "2015-06-01T18" 1] - [#inst "2015-06-01T21" 2] - [#inst "2015-06-02T02" 1] - [#inst "2015-06-02T05" 1] - [#inst "2015-06-02T08" 1] - [#inst "2015-06-02T11" 1] - [#inst "2015-06-02T13" 1]] +(datasets/expect-with-datasets sql-engines + (if-sqlserver + [[#inst "2015-06-01T17" 1] + [#inst "2015-06-01T23" 1] + [#inst "2015-06-02T00" 1] + [#inst "2015-06-02T01" 1] + [#inst "2015-06-02T04" 2] + [#inst "2015-06-02T09" 1] + [#inst "2015-06-02T12" 1] + [#inst "2015-06-02T15" 1] + [#inst "2015-06-02T18" 1] + [#inst "2015-06-02T20" 1]] + + [[#inst "2015-06-01T10" 1] + [#inst "2015-06-01T16" 1] + [#inst "2015-06-01T17" 1] + [#inst "2015-06-01T18" 1] + [#inst "2015-06-01T21" 2] + [#inst "2015-06-02T02" 1] + [#inst "2015-06-02T05" 1] + [#inst "2015-06-02T08" 1] + [#inst "2015-06-02T11" 1] + [#inst "2015-06-02T13" 1]]) (sad-toucan-incidents-with-bucketing :hour)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} - [[0 8] - [1 9] - [2 7] - [3 10] - [4 10] - [5 9] - [6 6] - [7 5] - [8 7] - [9 7]] +(datasets/expect-with-datasets sql-engines + (if-sqlserver + [[0 13] [1 8] [2 4] [3 7] [4 5] [5 13] [6 10] [7 8] [8 9] [9 7]] + [[0 8] [1 9] [2 7] [3 10] [4 10] [5 9] [6 6] [7 5] [8 7] [9 7]]) (sad-toucan-incidents-with-bucketing :hour-of-day)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} - [[#inst "2015-06-01T07" 8] - [#inst "2015-06-02T07" 9] - [#inst "2015-06-03T07" 9] - [#inst "2015-06-04T07" 4] - [#inst "2015-06-05T07" 11] - [#inst "2015-06-06T07" 8] - [#inst "2015-06-07T07" 6] - [#inst "2015-06-08T07" 10] - [#inst "2015-06-09T07" 6] - [#inst "2015-06-10T07" 10]] +(datasets/expect-with-datasets sql-engines + (if-sqlserver + [[#inst "2015-06-01T07" 6] + [#inst "2015-06-02T07" 10] + [#inst "2015-06-03T07" 4] + [#inst "2015-06-04T07" 9] + [#inst "2015-06-05T07" 9] + [#inst "2015-06-06T07" 8] + [#inst "2015-06-07T07" 8] + [#inst "2015-06-08T07" 9] + [#inst "2015-06-09T07" 7] + [#inst "2015-06-10T07" 9]] + + [[#inst "2015-06-01T07" 8] + [#inst "2015-06-02T07" 9] + [#inst "2015-06-03T07" 9] + [#inst "2015-06-04T07" 4] + [#inst "2015-06-05T07" 11] + [#inst "2015-06-06T07" 8] + [#inst "2015-06-07T07" 6] + [#inst "2015-06-08T07" 10] + [#inst "2015-06-09T07" 6] + [#inst "2015-06-10T07" 10]]) (sad-toucan-incidents-with-bucketing :day)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} - [[1 29] - [2 36] - [3 33] - [4 29] - [5 13] - [6 38] - [7 22]] +(datasets/expect-with-datasets sql-engines + (if-sqlserver + [[1 28] [2 38] [3 29] [4 27] [5 24] [6 30] [7 24]] + [[1 29] [2 36] [3 33] [4 29] [5 13] [6 38] [7 22]]) (sad-toucan-incidents-with-bucketing :day-of-week)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} - [[1 8] - [2 9] - [3 9] - [4 4] - [5 11] - [6 8] - [7 6] - [8 10] - [9 6] - [10 10]] +(datasets/expect-with-datasets sql-engines + (if-sqlserver + [[1 6] [2 10] [3 4] [4 9] [5 9] [6 8] [7 8] [8 9] [9 7] [10 9]] + [[1 8] [2 9] [3 9] [4 4] [5 11] [6 8] [7 6] [8 10] [9 6] [10 10]]) (sad-toucan-incidents-with-bucketing :day-of-month)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} - [[152 8] - [153 9] - [154 9] - [155 4] - [156 11] - [157 8] - [158 6] - [159 10] - [160 6] - [161 10]] +(datasets/expect-with-datasets sql-engines + (if-sqlserver + [[152 6] [153 10] [154 4] [155 9] [156 9] [157 8] [158 8] [159 9] [160 7] [161 9]] + [[152 8] [153 9] [154 9] [155 4] [156 11] [157 8] [158 6] [159 10] [160 6] [161 10]]) (sad-toucan-incidents-with-bucketing :day-of-year)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} - [[#inst "2015-05-31T07" 49] - [#inst "2015-06-07T07" 47] - [#inst "2015-06-14T07" 39] - [#inst "2015-06-21T07" 58] - [#inst "2015-06-28T07" 7]] +(datasets/expect-with-datasets sql-engines + (if-sqlserver + [[#inst "2015-05-31T07" 46] + [#inst "2015-06-07T07" 47] + [#inst "2015-06-14T07" 40] + [#inst "2015-06-21T07" 60] + [#inst "2015-06-28T07" 7]] + + [[#inst "2015-05-31T07" 49] + [#inst "2015-06-07T07" 47] + [#inst "2015-06-14T07" 39] + [#inst "2015-06-21T07" 58] + [#inst "2015-06-28T07" 7]]) (sad-toucan-incidents-with-bucketing :week)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} - [[23 49] - [24 47] - [25 39] - [26 58] - [27 7]] +(datasets/expect-with-datasets sql-engines + (if-sqlserver + [[23 54] [24 46] [25 39] [26 61]] + [[23 49] [24 47] [25 39] [26 58] [27 7]]) (sad-toucan-incidents-with-bucketing :week-of-year)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-datasets sql-engines [[#inst "2015-06-01T07" 200]] (sad-toucan-incidents-with-bucketing :month)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-datasets sql-engines [[6 200]] (sad-toucan-incidents-with-bucketing :month-of-year)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-datasets sql-engines [[#inst "2015-04-01T07" 200]] (sad-toucan-incidents-with-bucketing :quarter)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-datasets sql-engines [[2 200]] (sad-toucan-incidents-with-bucketing :quarter-of-year)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-datasets sql-engines [[2015 200]] (sad-toucan-incidents-with-bucketing :year)) ;; RELATIVE DATES (defn- database-def-with-timestamps [interval-seconds] - (create-database-definition "DB" - ["checkins" - [{:field-name "timestamp" - :base-type :DateTimeField}] - (vec (for [i (range -15 15)] - [(java.sql.Timestamp. (+ (System/currentTimeMillis) (* i 1000 interval-seconds)))]))])) - -(def ^:private checkins:4-per-minute (database-def-with-timestamps 15)) -(def ^:private checkins:4-per-hour (database-def-with-timestamps (* 60 15))) -(def ^:private checkins:1-per-day (database-def-with-timestamps (* 60 60 24))) + (let [{:keys [date-interval]} (driver/engine->driver *engine*)] + (create-database-definition "DB" + ["checkins" + [{:field-name "timestamp" + :base-type :DateTimeField}] + (vec (for [i (range -15 15)] + ;; Create timestamps using relative dates (e.g. `DATEADD(second, -195, GETUTCDATE())` instead of generating `java.sql.Timestamps` here so + ;; they'll be in the DB's native timezone. Some DBs refuse to use the same timezone we're running the tests from *cough* SQL Server *cough* + [(date-interval :second (* i interval-seconds))]))]))) + +(def ^:private checkins:4-per-minute (partial database-def-with-timestamps 15)) +(def ^:private checkins:4-per-hour (partial database-def-with-timestamps (* 60 15))) +(def ^:private checkins:1-per-day (partial database-def-with-timestamps (* 60 60 24))) (defn- count-of-grouping [db field-grouping & relative-datetime-args] (with-temp-db [_ db] @@ -1260,24 +1268,24 @@ filter = ["datetime_field" (id :checkins :timestamp) "as" (name field-grouping)] (apply vector "relative_datetime" relative-datetime-args) return first-row first))) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} 4 (count-of-grouping checkins:4-per-minute :minute "current")) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} 4 (count-of-grouping checkins:4-per-minute :minute -1 "minute")) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} 4 (count-of-grouping checkins:4-per-minute :minute 1 "minute")) +(datasets/expect-with-datasets sql-engines 4 (count-of-grouping (checkins:4-per-minute) :minute "current")) +(datasets/expect-with-datasets sql-engines 4 (count-of-grouping (checkins:4-per-minute) :minute -1 "minute")) +(datasets/expect-with-datasets sql-engines 4 (count-of-grouping (checkins:4-per-minute) :minute 1 "minute")) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} 4 (count-of-grouping checkins:4-per-hour :hour "current")) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} 4 (count-of-grouping checkins:4-per-hour :hour -1 "hour")) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} 4 (count-of-grouping checkins:4-per-hour :hour 1 "hour")) +(datasets/expect-with-datasets sql-engines 4 (count-of-grouping (checkins:4-per-hour) :hour "current")) +(datasets/expect-with-datasets sql-engines 4 (count-of-grouping (checkins:4-per-hour) :hour -1 "hour")) +(datasets/expect-with-datasets sql-engines 4 (count-of-grouping (checkins:4-per-hour) :hour 1 "hour")) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} 1 (count-of-grouping checkins:1-per-day :day "current")) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} 1 (count-of-grouping checkins:1-per-day :day -1 "day")) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} 1 (count-of-grouping checkins:1-per-day :day 1 "day")) +(datasets/expect-with-datasets sql-engines 1 (count-of-grouping (checkins:1-per-day) :day "current")) +(datasets/expect-with-datasets sql-engines 1 (count-of-grouping (checkins:1-per-day) :day -1 "day")) +(datasets/expect-with-datasets sql-engines 1 (count-of-grouping (checkins:1-per-day) :day 1 "day")) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} 7 (count-of-grouping checkins:1-per-day :week "current")) +(datasets/expect-with-datasets sql-engines 7 (count-of-grouping (checkins:1-per-day) :week "current")) ;; SYNTACTIC SUGAR -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-datasets sql-engines 1 - (with-temp-db [_ checkins:1-per-day] + (with-temp-db [_ (checkins:1-per-day)] (-> (driver/process-query {:database (db-id) :type :query @@ -1286,9 +1294,9 @@ :filter ["TIME_INTERVAL" (id :checkins :timestamp) "current" "day"]}}) :data :rows first first))) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-datasets sql-engines 7 - (with-temp-db [_ checkins:1-per-day] + (with-temp-db [_ (checkins:1-per-day)] (-> (driver/process-query {:database (db-id) :type :query @@ -1296,3 +1304,39 @@ :aggregation ["count"] :filter ["TIME_INTERVAL" (id :checkins :timestamp) "last" "week"]}}) :data :rows first first))) + +;; Make sure that when referencing the same field multiple times with different units we return the one +;; that actually reflects the units the results are in. +;; eg when we breakout by one unit and filter by another, make sure the results and the col info +;; use the unit used by breakout +(defn- date-bucketing-unit-when-you [& {:keys [breakout-by filter-by]}] + (with-temp-db [_ (checkins:1-per-day)] + (let [results (driver/process-query + {:database (db-id) + :type :query + :query {:source_table (id :checkins) + :aggregation ["count"] + :breakout [["datetime_field" (id :checkins :timestamp) "as" breakout-by]] + :filter ["TIME_INTERVAL" (id :checkins :timestamp) "current" filter-by]}})] + {:rows (-> results :row_count) + :unit (-> results :data :cols first :unit)}))) + +(datasets/expect-with-datasets sql-engines + {:rows 1, :unit :day} + (date-bucketing-unit-when-you :breakout-by "day", :filter-by "day")) + +(datasets/expect-with-datasets sql-engines + {:rows 7, :unit :day} + (date-bucketing-unit-when-you :breakout-by "day", :filter-by "week")) + +(datasets/expect-with-datasets sql-engines + {:rows 1, :unit :week} + (date-bucketing-unit-when-you :breakout-by "week", :filter-by "day")) + +(datasets/expect-with-datasets sql-engines + {:rows 1, :unit :quarter} + (date-bucketing-unit-when-you :breakout-by "quarter", :filter-by "day")) + +(datasets/expect-with-datasets sql-engines + {:rows 1, :unit :hour} + (date-bucketing-unit-when-you :breakout-by "hour", :filter-by "day")) diff --git a/test/metabase/driver/sync_test.clj b/test/metabase/driver/sync_test.clj index 8307fa680798e9488e15406b4ba7f6cb74c1f78b..1f02ea22fc07a6957c3776873f302edd60770e13 100644 --- a/test/metabase/driver/sync_test.clj +++ b/test/metabase/driver/sync_test.clj @@ -4,7 +4,6 @@ [metabase.db :refer :all] [metabase.driver :as driver] (metabase.driver [h2 :as h2] - [interface :as i] [sync :as sync]) [metabase.driver.generic-sql.util :refer [korma-entity]] (metabase.models [field :refer [Field]] @@ -21,7 +20,7 @@ (delay (sel :one Table :name "USERS"))) (def venues-table - (delay (sel :one Table :name "VENUES"))) + (delay (Table (id :venues)))) (def korma-users-table (delay (korma-entity @users-table))) diff --git a/test/metabase/email/messages_test.clj b/test/metabase/email/messages_test.clj index 72d5862554c00e9f374b12954b347d588f23bc97..d2247cdca82cd6570af03f99d91ded8215ac3b5e 100644 --- a/test/metabase/email/messages_test.clj +++ b/test/metabase/email/messages_test.clj @@ -27,13 +27,10 @@ `(binding [email/*send-email-fn* fake-inbox-email-fn] (reset-inbox!) ;; Push some fake settings for SMTP username + password, and restore originals when done - (let [orig-username# (email/email-smtp-username) - orig-password# (email/email-smtp-password)] - (email/email-smtp-username "fake_smtp_username") - (email/email-smtp-password "ABCD1234!!") + (let [orig-hostname# (email/email-smtp-host)] + (email/email-smtp-host "fake_smtp_host") (try ~@body - (finally (email/email-smtp-username orig-username#) - (email/email-smtp-password orig-password#)))))) + (finally (email/email-smtp-host orig-hostname#)))))) ;; new user email ;; NOTE: we are not validating the content of the email body namely because it's got randomized elements and thus diff --git a/test/metabase/test/data.clj b/test/metabase/test/data.clj index a6fc14085940468a702eaa539f38304aa9ef18de..ae884c8c900decd3306b746f08210422223eb8b7 100644 --- a/test/metabase/test/data.clj +++ b/test/metabase/test/data.clj @@ -56,6 +56,7 @@ (:id (db))) (defn fks-supported? [] (datasets/fks-supported? *dataset*)) +(defn default-schema [] (datasets/default-schema *dataset*)) (defn format-name [name] (datasets/format-name *dataset* name)) (defn id-field-type [] (datasets/id-field-type *dataset*)) (defn sum-field-type [] (datasets/sum-field-type *dataset*)) @@ -76,17 +77,13 @@ (or (metabase-instance database-definition engine) (do ;; Create the database - (create-physical-db! dataset-loader database-definition) - - ;; Load data - (doseq [^TableDefinition table-definition (:table-definitions database-definition)] - (load-table-data! dataset-loader database-definition table-definition)) + (create-db! dataset-loader database-definition) ;; Add DB object to Metabase DB (let [db (ins Database :name database-name :engine (name engine) - :details (database->connection-details dataset-loader database-definition))] + :details (database->connection-details dataset-loader :db database-definition))] ;; Sync the database (driver/sync-database! db) @@ -94,13 +91,16 @@ ;; Add extra metadata like Field field-type, base-type, etc. (doseq [^TableDefinition table-definition (:table-definitions database-definition)] (let [table-name (:table-name table-definition) - table (delay (let [table (metabase-instance table-definition db)] - (assert table) - table))] + table (delay (or (metabase-instance table-definition db) + (throw (Exception. (format "Table '%s' not loaded from definiton:\n%s\nFound:\n%s" + table-name + (u/pprint-to-str (dissoc table-definition :rows)) + (u/pprint-to-str (sel :many :fields [Table :schema :name], :db_id (:id db))))))))] (doseq [{:keys [field-name field-type special-type], :as field-definition} (:field-definitions table-definition)] - (let [field (delay (let [field (metabase-instance field-definition @table)] - (assert field) - field))] + (let [field (delay (or (metabase-instance field-definition @table) + (throw (Exception. (format "Field '%s' not loaded from definition:\n" + field-name + (u/pprint-to-str field-definition))))))] (when field-type (log/debug (format "SET FIELD TYPE %s.%s -> %s" table-name field-name field-type)) (upd Field (:id @field) :field_type (name field-type))) @@ -120,7 +120,7 @@ (cascade-delete Database :id (:id (metabase-instance database-definition (engine dataset-loader)))) ;; now delete the DBMS database - (drop-physical-db! dataset-loader database-definition))) + (destroy-db! dataset-loader database-definition))) ;; ## Temporary Dataset Macros @@ -183,7 +183,6 @@ dbdef (map->DatabaseDefinition (assoc dbdef :short-lived? true))] (try (binding [*sel-disable-logging* true] - (remove-database! loader dbdef) (let [db (-> (get-or-create-database! loader dbdef) temp-db-add-getter-delay)] (assert db) diff --git a/test/metabase/test/data/datasets.clj b/test/metabase/test/data/datasets.clj index 0311c108dd0f3a8c5dc532b6827fac8985871a9c..42a3a55dc71bf5708cd6b171d1b099c16cfc4151 100644 --- a/test/metabase/test/data/datasets.clj +++ b/test/metabase/test/data/datasets.clj @@ -6,6 +6,7 @@ [environ.core :refer [env]] [expectations :refer :all] [metabase.db :refer :all] + [metabase.driver :as driver] [metabase.driver.mongo.test-data :as mongo-data] (metabase.models [field :refer [Field]] [table :refer [Table]]) @@ -13,7 +14,8 @@ [h2 :as h2] [mongo :as mongo] [mysql :as mysql] - [postgres :as postgres]) + [postgres :as postgres] + [sqlserver :as sqlserver]) [metabase.util :as u])) ;; # IDataset @@ -34,6 +36,8 @@ "Given keyword TABLE-NAME and FIELD-NAME, return the corresponding `Field` ID.") (fks-supported? [this] "Does this driver support Foreign Keys?") + (default-schema [this] + "Return the default schema name that tables for this DB should be expected to have.") (format-name [this table-or-field-name] "Transform a lowercase string `Table` or `Field` name in a way appropriate for this dataset (e.g., `h2` would want to upcase these names; `mongo` would want to use `\"_id\"` in place of `\"id\"`.") @@ -75,6 +79,7 @@ table-or-field-name)) (fks-supported? [_] false) + (default-schema [_] nil) (id-field-type [_] :IntegerField) (sum-field-type [_] :IntegerField) (timestamp-field-type [_] :DateField)) @@ -133,6 +138,7 @@ (Table (table-name->id this (s/upper-case (name table-name))))) :field-name->id (fn [this table-name field-name] (memoized-field-name->id (:id (db this)) (s/upper-case (name table-name)) (s/upper-case (name field-name)))) + :default-schema (constantly "PUBLIC") :format-name (fn [_ table-or-field-name] (clojure.string/upper-case table-or-field-name)) :id-field-type (constantly :BigIntegerField) @@ -148,6 +154,7 @@ (merge GenericSQLIDatasetMixin {:dataset-loader (fn [_] (postgres/dataset-loader)) + :default-schema (constantly "public") :sum-field-type (constantly :IntegerField)})) @@ -160,17 +167,32 @@ (merge GenericSQLIDatasetMixin {:dataset-loader (fn [_] (mysql/dataset-loader)) + :default-schema (constantly nil) :sum-field-type (constantly :BigIntegerField)})) +;;; ### SQLServer + +(defrecord SQLServerDriverData [dbpromise]) + +(extend SQLServerDriverData + IDataset + (merge GenericSQLIDatasetMixin + {:dataset-loader (fn [_] + (sqlserver/dataset-loader)) + :default-schema (constantly "dbo") + :sum-field-type (constantly :IntegerField)})) + + ;; # Concrete Instances (def dataset-name->dataset "Map of dataset keyword name -> dataset instance (i.e., an object that implements `IDataset`)." - {:mongo (MongoDriverData.) - :h2 (H2DriverData. (promise)) - :postgres (PostgresDriverData. (promise)) - :mysql (MySQLDriverData. (promise))}) + {:mongo (MongoDriverData.) + :h2 (H2DriverData. (promise)) + :postgres (PostgresDriverData. (promise)) + :mysql (MySQLDriverData. (promise)) + :sqlserver (SQLServerDriverData. (promise))}) (def ^:const all-valid-dataset-names "Set of names of all valid datasets." @@ -216,15 +238,21 @@ (def ^:dynamic *dataset* "The dataset we're currently testing against, bound by `with-dataset`. - Defaults to `:h2`." + Defaults to `(dataset-name->dataset :h2)`." (dataset-name->dataset (if (contains? test-dataset-names :h2) :h2 (first test-dataset-names)))) +(def ^:dynamic *engine* + "Keyword name of the engine that we're currently testing against. Defaults to `:h2`." + :h2) + (defmacro with-dataset "Bind `*dataset*` to the dataset with DATASET-NAME and execute BODY." [dataset-name & body] - `(binding [*dataset* (dataset-name->dataset ~dataset-name)] - ~@body)) + `(let [engine# ~dataset-name] + (binding [*engine* engine# + *dataset* (dataset-name->dataset engine#)] + ~@body))) (defmacro when-testing-dataset "Execute BODY only if we're currently testing against DATASET-NAME." @@ -261,9 +289,8 @@ "Generate unit tests for all datasets in DATASET-NAMES; each test will only run if we're currently testing the corresponding dataset. `*dataset*` is bound to the current dataset inside each test." [dataset-names expected actual] - `(do ~@(map (fn [dataset-name] - `(expect-with-dataset ~dataset-name ~expected ~actual)) - dataset-names))) + `(do ~@(for [dataset-name (eval dataset-names)] + `(expect-with-dataset ~dataset-name ~expected ~actual)))) (defmacro expect-with-all-datasets "Generate unit tests for all valid datasets; each test will only run if we're currently testing the corresponding dataset. @@ -280,6 +307,6 @@ [& pairs] `(cond ~@(mapcat (fn [[dataset then]] (assert (contains? all-valid-dataset-names dataset)) - [`(= *dataset* (dataset-name->dataset ~dataset)) + [`(= *engine* ~dataset) then]) (partition 2 pairs)))) diff --git a/test/metabase/test/data/generic_sql.clj b/test/metabase/test/data/generic_sql.clj index 6262a713ee8fcdcf0f941c1fbf1bd01d0f1529d2..613914957e4f9c4f853325287fa9fbdb82cb26bb 100644 --- a/test/metabase/test/data/generic_sql.clj +++ b/test/metabase/test/data/generic_sql.clj @@ -1,87 +1,228 @@ (ns metabase.test.data.generic-sql "Common functionality for various Generic SQL dataset loaders." - (:require [clojure.tools.logging :as log] - [korma.core :as k] - [metabase.test.data.interface :as i]) - (:import (metabase.test.data.interface DatabaseDefinition + (:require [clojure.java.jdbc :as jdbc] + [clojure.tools.logging :as log] + (korma [core :as k] + [db :as kdb]) + [metabase.driver :as driver] + [metabase.test.data.interface :as i] + [metabase.util :as u]) + (:import clojure.lang.Keyword + (metabase.test.data.interface DatabaseDefinition + FieldDefinition TableDefinition))) -(defn- quote-name [{:keys [quote-character], :or {quote-character \"}} nm] - (str quote-character nm quote-character)) +;;; ## ------------------------------------------------------------ IGenericDatasetLoader + default impls ------------------------------------------------------------ (defprotocol IGenericSQLDatasetLoader - "Methods that generic SQL dataset loaders should implement so they can use the shared functions in `metabase.test.data.generic-sql`. - - (Optional) Properies: - - * `quote-character`: Character to use to quote table & field names in raw SQL. Defaults to double-quote." - (execute-sql! [this ^DatabaseDefinition database-definition ^String raw-sql] - "Execute RAW-SQL against database defined by DATABASE-DEFINITION.") - - (korma-entity [this ^DatabaseDefinition database-definition ^TableDefinition table-definition] - "Return a Korma entity (e.g., one that can be passed to `select` or `sel` for the table - defined by TABLE-DEFINITION in the database defined by DATABASE-DEFINITION.") + "Methods for loading `DatabaseDefinition` in a SQL database. + A type that implements `IGenericSQLDatasetLoader` can be made to implement most of `IDatasetLoader` + by using the `IDatasetLoaderMixin`. + Methods marked *Optional* below have a default implementation specified in `DefaultsMixin`." + (field-base-type->sql-type [this ^Keyword base-type] + "Return a native SQL type that should be used for fields of BASE-TYPE.") (pk-sql-type ^String [this] - "SQL that should be used for creating the PK Table ID, e.g. `SERIAL` or `BIGINT AUTOINCREMENT`.") - + "SQL type of a primary key field.") + + ;; *Optional* SQL Statements + (create-db-sql ^String [this ^DatabaseDefinition dbdef] + "*Optional* Return a `CREATE DATABASE` statement.") + (drop-db-if-exists-sql ^String [this ^DatabaseDefinition dbdef] + "*Optional* Return a `DROP DATABASE` statement.") + (create-table-sql ^String [this ^DatabaseDefinition dbdef, ^TableDefinition tabledef] + "*Optional* Return a `CREATE TABLE` statement.") + (drop-table-if-exists-sql ^String [this ^DatabaseDefinition dbdef, ^TableDefinition tabledef] + "*Optional* Return a `DROP TABLE IF EXISTS` statement.") + (add-fk-sql ^String [this ^DatabaseDefinition dbdef, ^TableDefinition tabledef, ^FieldDefinition fielddef] + "*Optional* Return a `ALTER TABLE ADD CONSTRAINT FOREIGN KEY` statement.") + + ;; Other optional methods + (korma-entity [this ^DatabaseDefinition dbdef, ^TableDefinition tabledef] + "*Optional* Return a korma-entity for TABLEDEF.") (pk-field-name ^String [this] - "e.g. `id` or `ID`.") - - (field-base-type->sql-type ^String [this base-type] - "Given a `Field.base_type`, return the SQL type we should use for that column when creating a DB.")) - - -(defn create-physical-table! [dataset-loader database-definition {:keys [table-name field-definitions], :as table-definition}] - ;; Drop the table if it already exists - (i/drop-physical-table! dataset-loader database-definition table-definition) - - ;; Now create the new table - (execute-sql! dataset-loader database-definition - (let [quot (partial quote-name dataset-loader) - pk-field-name (quot (pk-field-name dataset-loader))] - (format "CREATE TABLE %s (%s, %s %s, PRIMARY KEY (%s));" - (quot table-name) - (->> field-definitions - (map (fn [{:keys [field-name base-type]}] - (format "%s %s" (quot field-name) (field-base-type->sql-type dataset-loader base-type)))) - (interpose ", ") - (apply str)) - pk-field-name (pk-sql-type dataset-loader) - pk-field-name)))) - - -(defn drop-physical-table! [dataset-loader database-definition {:keys [table-name]}] - (execute-sql! dataset-loader database-definition - (format "DROP TABLE IF EXISTS %s;" (quote-name dataset-loader table-name)))) - - -(defn create-physical-db! [dataset-loader {:keys [table-definitions], :as database-definition}] - (let [quot (partial quote-name dataset-loader)] - ;; Create all the Tables - (doseq [^TableDefinition table-definition table-definitions] - (i/create-physical-table! dataset-loader database-definition table-definition)) - - ;; Now add the foreign key constraints - (doseq [{:keys [table-name field-definitions]} table-definitions] - (doseq [{dest-table-name :fk, field-name :field-name} field-definitions] - (when dest-table-name - (let [dest-table-name (name dest-table-name)] - (execute-sql! dataset-loader database-definition - (format "ALTER TABLE %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s);" - (quot table-name) - (quot (format "FK_%s_%s_%s" table-name field-name dest-table-name)) - (quot field-name) - (quot dest-table-name) - (quot (pk-field-name dataset-loader)))))))))) - - -(defn load-table-data! [dataset-loader database-definition table-definition] - (let [rows (:rows table-definition) - fields-for-insert (map :field-name (:field-definitions table-definition))] - (-> (korma-entity dataset-loader database-definition table-definition) - (k/insert (k/values (->> (for [row rows] - (for [v row] - (if (instance? java.util.Date v) (java.sql.Timestamp. (.getTime ^java.util.Date v)) - v))) - (map (partial zipmap fields-for-insert)))))))) + "*Optional* Name of a PK field. Defaults to `\"id\"`.") + (qualified-name-components [this ^String database-name] + [this ^String database-name, ^String table-name] + [this ^String database-name, ^String table-name, ^String field-name] + "*Optional*. Return a vector of String names that can be used to refer to a database, table, or field. + This is provided so loaders have the opportunity to inject things like schema names or even modify the names themselves. + + (qualified-name-components [loader \"my-db\" \"my-table\"]) -> [\"my-db\" \"dbo\" \"my-table\"] + + By default, this qualifies field names with their table name, but otherwise does no other specific qualification.") + (quote-name ^String [this ^String nm] + "*Optional*. Quote a name. Defaults to using double quotes.") + (qualify+quote-name ^String [this ^String database-name] + ^String [this ^String database-name, ^String table-name] + ^String [this ^String database-name, ^String table-name, ^String field-name] + "*Optional*. Qualify names and combine into a single, quoted name. By default, this combines the results of `qualified-name-components` + and `quote-name`. + + (qualify+quote-name [loader \"my-db\" \"my-table\"]) -> \"my-db\".\"dbo\".\"my-table\"") + (driver [this] + "*Optional*. Return the driver associated with this dataset loader.") + (database->spec [this ^Keyword context, ^DatabaseDefinition dbdef] + "*Optional*. Return a JDBC spec that should be used to connect to DBDEF. + Uses `connection-details->spec` from `driver` by default.") + (load-table-data! [this ^DatabaseDefinition dbdef, ^TableDefinition tabledef] + "*Optional*. Load the rows for a specific table into a DB.") + (execute-sql! [loader ^Keyword context, ^DatabaseDefinition dbdef, ^String sql] + "Execute a string of raw SQL. Context is either `:server` or `:db`.")) + + +(defn- default-create-db-sql [loader {:keys [database-name]}] + (format "CREATE DATABASE %s;" (qualify+quote-name loader database-name))) + +(defn default-drop-db-if-exists-sql [loader {:keys [database-name]}] + (format "DROP DATABASE IF EXISTS %s;" (qualify+quote-name loader database-name))) + +(defn default-create-table-sql [loader {:keys [database-name], :as dbdef} {:keys [table-name field-definitions]}] + (let [quot (partial quote-name loader) + pk-field-name (quot (pk-field-name loader))] + (format "CREATE TABLE %s (%s, %s %s, PRIMARY KEY (%s));" + (qualify+quote-name loader database-name table-name) + (->> field-definitions + (map (fn [{:keys [field-name base-type]}] + (format "%s %s" (quot field-name) (if (map? base-type) + (:native base-type) + (field-base-type->sql-type loader base-type))))) + (interpose ", ") + (apply str)) + pk-field-name (pk-sql-type loader) + pk-field-name))) + +(defn- default-drop-table-if-exists-sql [loader {:keys [databse-name]} {:keys [table-name]}] + (format "DROP TABLE IF EXISTS %s;" (qualify+quote-name loader databse-name table-name))) + +(defn default-add-fk-sql [loader {:keys [database-name]} {:keys [table-name]} {dest-table-name :fk, field-name :field-name}] + (let [quot (partial quote-name loader) + dest-table-name (name dest-table-name)] + (format "ALTER TABLE %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s);" + (qualify+quote-name loader database-name table-name) + (quot (format "FK_%s_%s_%s" table-name field-name dest-table-name)) + (quot field-name) + (qualify+quote-name loader database-name dest-table-name) + (quot (pk-field-name loader))))) + +(defn- default-qualified-name-components + ([_ db-name] + [db-name]) + ([_ db-name table-name] + [table-name]) + ([_ db-name table-name field-name] + [table-name field-name])) + +(defn- default-quote-name [_ nm] + (str \" nm \")) + +(defn- quote+combine-names [loader names] + (->> names + (map (partial quote-name loader)) + (interpose \.) + (apply str))) + +(defn- default-qualify+quote-name + ([loader db-name] + (quote+combine-names loader (qualified-name-components loader db-name))) + ([loader db-name table-name] + (quote+combine-names loader (qualified-name-components loader db-name table-name))) + ([loader db-name table-name field-name] + (quote+combine-names loader (qualified-name-components loader db-name table-name field-name)))) + +(defn- default-driver [loader] + (driver/engine->driver (i/engine loader))) + +(defn- default-database->spec [loader context {:keys [short-lived?], :as dbdef}] + (assoc ((:connection-details->spec (driver loader)) (i/database->connection-details loader context dbdef)) + :short-lived? short-lived? + :make-pool? false)) + +(defn default-korma-entity [loader {:keys [database-name], :as dbdef} {:keys [table-name]}] + (-> (k/create-entity (->> (qualified-name-components loader database-name table-name) + (interpose \.) ; we just want a table name like "table-name" or "db-name.dbo.table-name" here + (apply str))) ; korma will split on the periods and re-qualify the individual parts for us + (k/database (kdb/create-db (database->spec loader :db dbdef))))) + +(defn- default-load-table-data! [loader dbdef tabledef] + (let [rows (:rows tabledef) + fields-for-insert (mapv :field-name (:field-definitions tabledef)) + entity (korma-entity loader dbdef tabledef)] + ;; Insert groups of 200 rows at a time + ;; otherwise SQL Server will be *very* snippy if we try to run queries with too many parameters in them + (doseq [group (partition-all 200 rows)] + (k/insert entity (k/values (mapv (partial zipmap fields-for-insert) + (for [row group] + (for [v row] + (if (instance? java.util.Date v) (java.sql.Timestamp. (.getTime ^java.util.Date v)) + v))))))))) + +(defn default-execute-sql! [loader context dbdef sql] + (when (seq sql) + (try + (jdbc/execute! (database->spec loader context dbdef) [sql] :transaction? false, :multi? true) + (catch java.sql.SQLException e + (println "Error executing SQL:" sql) + (println (format "Caught SQLException:\n%s" + (with-out-str (jdbc/print-sql-exception-chain e)))) + (throw e)) + (catch Throwable e + (println "Error executing SQL:" sql) + (println (format "Caught Exception: %s %s\n%s" (class e) (.getMessage e) + (with-out-str (.printStackTrace e)))) + (throw e))))) + + +(def DefaultsMixin + "Default implementations for methods marked *Optional* in `IGenericSQLDatasetLoader`." + {:add-fk-sql default-add-fk-sql + :create-db-sql default-create-db-sql + :create-table-sql default-create-table-sql + :database->spec default-database->spec + :driver default-driver + :drop-db-if-exists-sql default-drop-db-if-exists-sql + :drop-table-if-exists-sql default-drop-table-if-exists-sql + :execute-sql! default-execute-sql! + :korma-entity default-korma-entity + :load-table-data! default-load-table-data! + :pk-field-name (constantly "id") + :qualified-name-components default-qualified-name-components + :qualify+quote-name default-qualify+quote-name + :quote-name default-quote-name}) + + +;; ## ------------------------------------------------------------ IDatasetLoader impl ------------------------------------------------------------ + +(defn- create-db! [loader {:keys [table-definitions], :as dbdef}] + ;; Exec SQL for creating the DB + (execute-sql! loader :server dbdef (str (drop-db-if-exists-sql loader dbdef) ";\n" + (create-db-sql loader dbdef))) + + ;; Build combined statement for creating tables + FKs + (let [statements (atom [])] + + ;; Add the SQL for creating each Table + (doseq [tabledef table-definitions] + (swap! statements conj (drop-table-if-exists-sql loader dbdef tabledef)) + (swap! statements conj (create-table-sql loader dbdef tabledef))) + + ;; Add the SQL for adding FK constraints + (doseq [{:keys [field-definitions], :as tabledef} table-definitions] + (doseq [{:keys [fk], :as fielddef} field-definitions] + (when fk + (swap! statements conj (add-fk-sql loader dbdef tabledef fielddef))))) + + ;; exec the combined statement + (execute-sql! loader :db dbdef (apply str (interpose ";\n" @statements)))) + + ;; Now load the data for each Table + (doseq [tabledef table-definitions] + (load-table-data! loader dbdef tabledef))) + +(defn- destroy-db! [loader dbdef] + (execute-sql! loader :server dbdef (drop-db-if-exists-sql loader dbdef))) + +(def ^:const IDatasetLoaderMixin + "Mixin for `IGenericSQLDatasetLoader` types to implemnt `create-db!` and `destroy-db!` from `IDatasetLoader`." + {:create-db! create-db! + :destroy-db! destroy-db!}) diff --git a/test/metabase/test/data/h2.clj b/test/metabase/test/data/h2.clj index 9b3aa789bf20c74e04fe162bc22d11902ea09e81..c717cfd64ea955107e78af1a8d773f7f7fb41384 100644 --- a/test/metabase/test/data/h2.clj +++ b/test/metabase/test/data/h2.clj @@ -1,15 +1,11 @@ (ns metabase.test.data.h2 "Code for creating / destroying an H2 database from a `DatabaseDefinition`." - (:require [clojure.java.io :as io] - [clojure.tools.logging :as log] + (:require [clojure.core.reducers :as r] [clojure.string :as s] (korma [core :as k] [db :as kdb]) (metabase.test.data [generic-sql :as generic] - [interface :refer :all])) - (:import (metabase.test.data.interface DatabaseDefinition - FieldDefinition - TableDefinition))) + [interface :as i]))) (def ^:private ^:const field-base-type->sql-type {:BigIntegerField "BIGINT" @@ -25,91 +21,76 @@ ;; ## DatabaseDefinition helper functions -(defn- connection-details - "Return a Metabase `Database.details` for H2 database defined by DATABASE-DEFINITION." - [^DatabaseDefinition {:keys [short-lived?], :as database-definition}] - {:db (format "mem:%s" (escaped-name database-definition)) - :short-lived? short-lived?}) +(def ^:private ^:dynamic *dbdef* + nil) -(defn- korma-connection-pool - "Return an H2 korma connection pool to H2 database defined by DATABASE-DEFINITION." - [^DatabaseDefinition database-definition] - (kdb/create-db (kdb/h2 (assoc (connection-details database-definition) - :naming {:keys s/lower-case - :fields s/upper-case})))) +(defn- database->connection-details + [_ context {:keys [short-lived?], :as dbdef}] + {:short-lived? short-lived? + :db (str "mem:" (i/escaped-name dbdef) (when (= context :db) + ;; Return details with the GUEST user added so SQL queries are allowed. + ";USER=GUEST;PASSWORD=guest"))}) -;; ## Implementation -(defn- format-for-h2 [obj] - (cond - (:database-name obj) (update-in obj [:table-definitions] (partial map format-for-h2)) - (:table-name obj) (-> obj - (update-in [:table-name] s/upper-case) - (update-in [:field-definitions] (partial map format-for-h2))) - (:field-name obj) (cond-> (update-in obj [:field-name] s/upper-case) - (:fk obj) (update-in [:fk] (comp s/upper-case name))))) +(defn quote-name [_ nm] + (str \" (s/upper-case nm) \")) +(defn- korma-entity [_ dbdef {:keys [table-name]}] + (-> (k/create-entity table-name) + (k/database (kdb/create-db (kdb/h2 (assoc (database->connection-details nil :db dbdef) + :naming {:keys s/lower-case + :fields s/upper-case})))))) -;; ## Public Concrete DatasetLoader instance - -;; For some reason this doesn't seem to work if we define IDatasetLoader methods inline, but does work when we explicitly use extend-protocol -(defrecord H2DatasetLoader [] - generic/IGenericSQLDatasetLoader - (generic/execute-sql! [_ database-definition raw-sql] - (log/debug raw-sql) - (k/exec-raw (korma-connection-pool database-definition) raw-sql)) - - (generic/korma-entity [_ database-definition table-definition] - (-> (k/create-entity (:table-name table-definition)) - (k/database (korma-connection-pool database-definition)))) - - (generic/pk-sql-type [_] "BIGINT AUTO_INCREMENT") - (generic/pk-field-name [_] "ID") - - (generic/field-base-type->sql-type [_ field-type] - (field-base-type->sql-type field-type))) +(defn create-db-sql [_ {:keys [short-lived?]}] + (str + ;; We don't need to actually do anything to create a database here. Just disable the undo + ;; log (i.e., transactions) for this DB session because the bulk operations to load data don't need to be atomic + "SET UNDO_LOG = 0;\n" + ;; Create a non-admin account 'GUEST' which will be used from here on out + "CREATE USER IF NOT EXISTS GUEST PASSWORD 'guest';\n" -(extend-protocol IDatasetLoader - H2DatasetLoader - (engine [_] - :h2) + ;; Set DB_CLOSE_DELAY here because only admins are allowed to do it, so we can't set it via the connection string. + ;; Set it to to -1 (no automatic closing) if the DB isn't "short-lived", + ;; otherwise set it to 1 (close after idling for 1 sec) so things like inserting rows persist long enough for us to + ;; run queries without us needing to start a connection pool + (format "SET DB_CLOSE_DELAY %d;" (if short-lived? 1 -1)))) - (database->connection-details [_ database-definition] - ;; Return details with the GUEST user added so SQL queries are allowed. - (let [details (connection-details database-definition)] - (update details :db str ";USER=GUEST;PASSWORD=guest"))) +(defn- create-table-sql [this dbdef {:keys [table-name], :as tabledef}] + (str + (generic/default-create-table-sql this dbdef tabledef) ";\n" - (drop-physical-db! [_ database-definition] - ;; Nothing to do here - there are no physical dbs <3 - ) + ;; Grant the GUEST account r/w permissions for this table + (format "GRANT ALL ON %s TO GUEST;" (quote-name this table-name)))) - (create-physical-table! [this database-definition table-definition] - (generic/create-physical-table! this database-definition (format-for-h2 table-definition))) - (create-physical-db! [this database-definition] - ;; Disable the undo log (i.e., transactions) for this DB session because the bulk operations to load data don't need to be atomic - (generic/execute-sql! this database-definition "SET UNDO_LOG = 0;") +(defrecord H2DatasetLoader []) - ;; Create the "physical" database which in this case actually just means creating the schema - (generic/create-physical-db! this (format-for-h2 database-definition)) - - ;; Now create a non-admin account 'GUEST' which will be used from here on out - (generic/execute-sql! this database-definition "CREATE USER IF NOT EXISTS GUEST PASSWORD 'guest';") - ;; Grant the GUEST account SELECT permissions for all the Tables in this DB - (doseq [{:keys [table-name]} (:table-definitions database-definition)] - (generic/execute-sql! this database-definition (format "GRANT SELECT ON %s TO GUEST;" table-name))) - - ;; If this isn't a "short-lived" database we need to set DB_CLOSE_DELAY to -1 here because only admins are allowed to do it - ;; so we can't set it via the connection string :/ - (when-not (:short-lived? database-definition) - (generic/execute-sql! this database-definition "SET DB_CLOSE_DELAY -1;"))) - - (load-table-data! [this database-definition table-definition] - (generic/load-table-data! this database-definition table-definition)) - - (drop-physical-table! [this database-definition table-definition] - (generic/drop-physical-table! this database-definition (format-for-h2 table-definition)))) +(extend H2DatasetLoader + generic/IGenericSQLDatasetLoader + (let [{:keys [execute-sql!], :as mixin} generic/DefaultsMixin] + (merge mixin + {:create-db-sql create-db-sql + :create-table-sql create-table-sql + :drop-db-if-exists-sql (constantly nil) + :korma-entity korma-entity + :pk-field-name (constantly "ID") + :pk-sql-type (constantly "BIGINT AUTO_INCREMENT") + :quote-name quote-name + :database->spec (fn [this context dbdef] + ;; Don't use the h2 driver implementation, which makes the connection string read-only & if-exists only + (kdb/h2 (i/database->connection-details this context dbdef))) + :execute-sql! (fn [this _ dbdef sql] + ;; we always want to use 'server' context when execute-sql! is called + ;; (never try connect as GUEST, since we're not giving them priviledges to create tables / etc) + (execute-sql! this :server dbdef sql)) + :field-base-type->sql-type (fn [_ base-type] + (field-base-type->sql-type base-type))})) + + i/IDatasetLoader + (merge generic/IDatasetLoaderMixin + {:database->connection-details database->connection-details + :engine (constantly :h2)})) (defn dataset-loader [] (->H2DatasetLoader)) diff --git a/test/metabase/test/data/interface.clj b/test/metabase/test/data/interface.clj index ed844828cdbf8938f4c083e621d1e0f279745415..7d7ac5f97721d08d32854f0a1cc331b25ae9b72f 100644 --- a/test/metabase/test/data/interface.clj +++ b/test/metabase/test/data/interface.clj @@ -64,28 +64,22 @@ (engine [this] "Return the engine keyword associated with this database, e.g. `:h2` or `:mongo`.") - (database->connection-details [this ^DatabaseDefinition database-definition] - "Return the connection details map that should be used to connect to this database.") + (database->connection-details [this ^Keyword context, ^DatabaseDefinition database-definition] + "Return the connection details map that should be used to connect to this database (i.e. a Metabase `Database` details map) + CONTEXT is one of: - ;; create-physical-database, etc. - (create-physical-db! [this ^DatabaseDefinition database-definition] - "Create a new database from DATABASE-DEFINITION, including adding tables, fields, and foreign key constraints. - This refers to the actual *DBMS* database itself, *not* a Metabase `Database` object. - This method should *not* add data to the database, create any metabase objects (such as `Database`), or trigger syncing.") + * `:server` - Return details for making the connection in a way that isn't DB-specific (e.g., for creating/destroying databases) + * `:db` - Return details for connecting specifically to the DB.") - (drop-physical-db! [this ^DatabaseDefinition database-definition] + (create-db! [this ^DatabaseDefinition database-definition] + "Create a new database from DATABASE-DEFINITION, including adding tables, fields, and foreign key constraints, + and add the appropriate data. This method should drop existing databases with the same name if applicable. + (This refers to creating the actual *DBMS* database itself, *not* a Metabase `Database` object.)") + + (destroy-db! [this ^DatabaseDefinition database-definition] "Destroy database, if any, associated with DATABASE-DEFINITION. This refers to destroying a *DBMS* database -- removing an H2 file, dropping a Postgres database, etc. - This does not need to remove corresponding Metabase definitions -- this is handled by `DatasetLoader`.") - - (create-physical-table! [this ^DatabaseDefinition database-definition, ^TableDefinition table-definition] - "Create a new DBMS table/collection/etc for TABLE-DEFINITION. Don't load any data.") - - (load-table-data! [this ^DatabaseDefinition database-definition, ^TableDefinition table-definition] - "Load data for the DMBS table/collection/etc. corresponding to TABLE-DEFINITION.") - - (drop-physical-table! [this ^DatabaseDefinition database-definition, ^TableDefinition table-definition] - "Drop the DBMS table/collection/etc. associated with TABLE-DEFINITION.")) + This does not need to remove corresponding Metabase definitions -- this is handled by `DatasetLoader`.")) ;; ## Helper Functions for Creating New Definitions diff --git a/test/metabase/test/data/mongo.clj b/test/metabase/test/data/mongo.clj index 8e21f47110cffbdcd53d3d0783ac1d923624557a..745fda03462d31e474623be4158e57f12429cbf0 100644 --- a/test/metabase/test/data/mongo.clj +++ b/test/metabase/test/data/mongo.clj @@ -3,37 +3,23 @@ (:require (monger [collection :as mc] [core :as mg]) [metabase.driver.mongo.util :refer [with-mongo-connection]] - [metabase.test.data.interface :refer :all]) - (:import (metabase.test.data.interface DatabaseDefinition - FieldDefinition - TableDefinition))) - -(defrecord MongoDatasetLoader []) -(extend-protocol IDatasetLoader - MongoDatasetLoader - (engine [_] - :mongo) - - (database->connection-details [_ database-definition] - {:dbname (escaped-name database-definition) - :host "localhost"}) - - ;; Nothing to do here ! DB created when we connect to it - (create-physical-db! [_ _]) - - (drop-physical-db! [this database-definition] - (with-open [mongo-connection (mg/connect (database->connection-details this database-definition))] - (mg/drop-db mongo-connection (escaped-name database-definition)))) - - ;; Nothing to do here, collection is created when we add documents to it - (create-physical-table! [_ _ _]) - - (drop-physical-table! [this database-definition {:keys [table-name]}] - (with-mongo-connection [^com.mongodb.DB mongo-db (database->connection-details this database-definition)] - (mc/drop mongo-db (name table-name)))) - - (load-table-data! [this database-definition {:keys [field-definitions table-name rows]}] - (with-mongo-connection [^com.mongodb.DB mongo-db (database->connection-details this database-definition)] + [metabase.test.data.interface :as i])) + +(defn- database->connection-details + ([dbdef] + {:dbname (i/escaped-name dbdef) + :host "localhost"}) + ([_ _ dbdef] + (database->connection-details dbdef))) + +(defn- destroy-db! [_ dbdef] + (with-open [mongo-connection (mg/connect (database->connection-details dbdef))] + (mg/drop-db mongo-connection (i/escaped-name dbdef)))) + +(defn- create-db! [this {:keys [table-definitions], :as dbdef}] + (destroy-db! this dbdef) + (with-mongo-connection [^com.mongodb.DB mongo-db (database->connection-details dbdef)] + (doseq [{:keys [field-definitions table-name rows]} table-definitions] (let [field-names (->> field-definitions (map :field-name) (map keyword))] @@ -52,5 +38,15 @@ ;; If row already exists then nothing to do (catch com.mongodb.MongoException _)))))))) + +(defrecord MongoDatasetLoader []) + +(extend MongoDatasetLoader + i/IDatasetLoader + {:create-db! create-db! + :destroy-db! destroy-db! + :database->connection-details database->connection-details + :engine (constantly :mongo)}) + (defn ^MongoDatasetLoader dataset-loader [] (->MongoDatasetLoader)) diff --git a/test/metabase/test/data/mysql.clj b/test/metabase/test/data/mysql.clj index e6cf15679eae8fcbd23998bb86a59cceab5e9ea8..8e38ba533e9c87c6bfc1b875d8065401bb07204a 100644 --- a/test/metabase/test/data/mysql.clj +++ b/test/metabase/test/data/mysql.clj @@ -1,15 +1,9 @@ (ns metabase.test.data.mysql "Code for creating / destroying a MySQL database from a `DatabaseDefinition`." - (:require [clojure.java.jdbc :as jdbc] - [clojure.tools.logging :as log] + (:require [clojure.string :as s] [environ.core :refer [env]] - (korma [core :as k] - [db :as kdb]) (metabase.test.data [generic-sql :as generic] - [interface :refer :all])) - (:import (metabase.test.data.interface DatabaseDefinition - FieldDefinition - TableDefinition))) + [interface :as i]))) (def ^:private ^:const field-base-type->sql-type {:BigIntegerField "BIGINT" @@ -23,78 +17,41 @@ :TextField "TEXT" :TimeField "TIME"}) -(defn- mysql-connection-details [^DatabaseDefinition {:keys [short-lived?]}] - {:host "localhost" - :port 3306 - :short-lived? short-lived? - :user (if (env :circleci) "ubuntu" - "root")}) - -(defn- db-connection-details [^DatabaseDefinition database-definition] - (assoc (mysql-connection-details database-definition) - :db (:database-name database-definition))) - -(defn- execute! [scope ^DatabaseDefinition database-definition & format-strings] - (jdbc/execute! (-> ((case scope - :mysql mysql-connection-details - :db db-connection-details) database-definition) - kdb/mysql - (assoc :make-pool? false)) - [(apply format format-strings)] - :transaction? false)) - - -(defrecord MySQLDatasetLoader [] +(defn- database->connection-details [_ context {:keys [database-name short-lived?]}] + (merge {:host "localhost" + :port 3306 + :timezone :America/Los_Angeles + :short-lived? short-lived? + :user (if (env :circleci) "ubuntu" + "root")} + (when (= context :db) + {:db database-name}))) + +(defn- quote-name [_ nm] + (str \` nm \`)) + +;; MySQL's JDBC driver doesn't support executing multiple SQL statements at once +;; so split them up and execute them one-at-a-time +(defn- execute-sql! [loader context dbdef sql] + (doseq [statement (map s/trim (s/split sql #";+"))] + (when (seq statement) + (generic/default-execute-sql! loader context dbdef statement)))) + +(defrecord MySQLDatasetLoader []) + +(extend MySQLDatasetLoader generic/IGenericSQLDatasetLoader - (generic/execute-sql! [_ database-definition raw-sql] - (log/debug raw-sql) - (execute! :db database-definition raw-sql)) - - (generic/korma-entity [_ database-definition table-definition] - (-> (k/create-entity (:table-name table-definition)) - (k/database (-> (db-connection-details database-definition) - kdb/mysql - (assoc :make-pool? false) - kdb/create-db)))) - - (generic/pk-sql-type [_] "INTEGER NOT NULL AUTO_INCREMENT") - (generic/pk-field-name [_] "id") - - (generic/field-base-type->sql-type [_ field-type] - (if (map? field-type) (:native field-type) - (field-base-type->sql-type field-type)))) - -(extend-protocol IDatasetLoader - MySQLDatasetLoader - (engine [_] - :mysql) - - (database->connection-details [_ database-definition] - (assoc (db-connection-details database-definition) - :timezone :America/Los_Angeles)) - - (drop-physical-db! [_ database-definition] - (execute! :mysql database-definition "DROP DATABASE IF EXISTS `%s`;" (:database-name database-definition))) - - (drop-physical-table! [this database-definition table-definition] - (generic/drop-physical-table! this database-definition table-definition)) - - (create-physical-table! [this database-definition table-definition] - (generic/create-physical-table! this database-definition table-definition)) - - (create-physical-db! [this database-definition] - (drop-physical-db! this database-definition) - (execute! :mysql database-definition "CREATE DATABASE `%s`;" (:database-name database-definition)) - - ;; double check that we can connect to the newly created DB - (metabase.driver/can-connect-with-details? :mysql (db-connection-details database-definition) :rethrow-exceptions) - - ;; call the generic implementation to create Tables + FKs - (generic/create-physical-db! this database-definition)) - - (load-table-data! [this database-definition table-definition] - (generic/load-table-data! this database-definition table-definition))) + (merge generic/DefaultsMixin + {:execute-sql! execute-sql! + :pk-sql-type (constantly "INTEGER NOT NULL AUTO_INCREMENT") + :quote-name quote-name + :field-base-type->sql-type (fn [_ base-type] + (field-base-type->sql-type base-type))}) + i/IDatasetLoader + (merge generic/IDatasetLoaderMixin + {:database->connection-details database->connection-details + :engine (constantly :mysql)})) (defn dataset-loader [] - (map->MySQLDatasetLoader {:quote-character \`})) + (->MySQLDatasetLoader)) diff --git a/test/metabase/test/data/postgres.clj b/test/metabase/test/data/postgres.clj index eb93d87a52aa1c18af38c81b0ba94953cf4c2c9a..f34f7e007fac4bb56cee29dd80f946fbf1a52c90 100644 --- a/test/metabase/test/data/postgres.clj +++ b/test/metabase/test/data/postgres.clj @@ -1,15 +1,8 @@ (ns metabase.test.data.postgres "Code for creating / destroying a Postgres database from a `DatabaseDefinition`." - (:require [clojure.java.jdbc :as jdbc] - [clojure.tools.logging :as log] - [environ.core :refer [env]] - (korma [core :as k] - [db :as kdb]) + (:require [environ.core :refer [env]] (metabase.test.data [generic-sql :as generic] - [interface :refer :all])) - (:import (metabase.test.data.interface DatabaseDefinition - FieldDefinition - TableDefinition))) + [interface :as i]))) (def ^:private ^:const field-base-type->sql-type {:BigIntegerField "BIGINT" @@ -24,79 +17,33 @@ :TimeField "TIME" :UUIDField "UUID"}) -(defn- pg-connection-details [^DatabaseDefinition {:keys [short-lived?]}] +(defn- database->connection-details [_ context {:keys [database-name short-lived?]}] (merge {:host "localhost" :port 5432 + :timezone :America/Los_Angeles :short-lived? short-lived?} - ;; HACK (when (env :circleci) - {:user "ubuntu"}))) + {:user "ubuntu"}) + (when (= context :db) + {:db database-name}))) -(defn- db-connection-details [^DatabaseDefinition database-definition] - (assoc (pg-connection-details database-definition) - :db (:database-name database-definition))) +(defn- drop-table-if-exists-sql [_ _ {:keys [table-name]}] + (format "DROP TABLE IF EXISTS \"%s\" CASCADE;" table-name)) -(defn- execute! [scope ^DatabaseDefinition database-definition & format-strings] - (jdbc/execute! (-> ((case scope - :pg pg-connection-details - :db db-connection-details) database-definition) - kdb/postgres - (assoc :make-pool? false)) - [(apply format format-strings)] - :transaction? false)) +(defrecord PostgresDatasetLoader []) -(defrecord PostgresDatasetLoader [] +(extend PostgresDatasetLoader generic/IGenericSQLDatasetLoader - (generic/execute-sql! [_ database-definition raw-sql] - (log/debug raw-sql) - (execute! :db database-definition raw-sql)) - - (generic/korma-entity [_ database-definition table-definition] - (-> (k/create-entity (:table-name table-definition)) - (k/database (-> (db-connection-details database-definition) - kdb/postgres - (assoc :make-pool? false) - kdb/create-db)))) - - (generic/pk-sql-type [_] "SERIAL") - (generic/pk-field-name [_] "id") - - (generic/field-base-type->sql-type [_ field-type] - (if (map? field-type) (:native field-type) - (field-base-type->sql-type field-type)))) - -(extend-protocol IDatasetLoader - PostgresDatasetLoader - (engine [_] - :postgres) - - (database->connection-details [_ database-definition] - (assoc (db-connection-details database-definition) - :timezone :America/Los_Angeles)) - - (drop-physical-db! [_ database-definition] - (execute! :pg database-definition "DROP DATABASE IF EXISTS \"%s\";" (:database-name database-definition))) - - (drop-physical-table! [this database-definition table-definition] - (generic/drop-physical-table! this database-definition table-definition)) - - (create-physical-table! [this database-definition table-definition] - (generic/create-physical-table! this database-definition table-definition)) - - (create-physical-db! [this {:keys [database-name], :as database-definition}] - (drop-physical-db! this database-definition) - (execute! :pg database-definition "CREATE DATABASE \"%s\";" database-name) - - ;; double check that we can connect to the newly created DB - (metabase.driver/can-connect-with-details? :postgres (db-connection-details database-definition) :rethrow-exceptions) - - ;; call the generic implementation to create Tables + FKs - (generic/create-physical-db! this database-definition)) - - (load-table-data! [this database-definition table-definition] - (generic/load-table-data! this database-definition table-definition))) - + (merge generic/DefaultsMixin + {:drop-table-if-exists-sql drop-table-if-exists-sql + :pk-sql-type (constantly "SERIAL") + :field-base-type->sql-type (fn [_ base-type] + (field-base-type->sql-type base-type))}) + i/IDatasetLoader + (merge generic/IDatasetLoaderMixin + {:database->connection-details database->connection-details + :engine (constantly :postgres)})) (defn dataset-loader [] (->PostgresDatasetLoader)) diff --git a/test/metabase/test/data/sqlserver.clj b/test/metabase/test/data/sqlserver.clj new file mode 100644 index 0000000000000000000000000000000000000000..677b945f67ff6c6b9f7b678b622b2595a03a5d77 --- /dev/null +++ b/test/metabase/test/data/sqlserver.clj @@ -0,0 +1,122 @@ +(ns metabase.test.data.sqlserver + "Code for creating / destroying a SQLServer database from a `DatabaseDefinition`." + (:require [clojure.java.jdbc :as jdbc] + [clojure.string :as s] + [environ.core :refer [env]] + [metabase.driver.sqlserver :refer [sqlserver]] + (metabase.test.data [generic-sql :as generic] + [interface :as i]))) + +(def ^:private ^:const field-base-type->sql-type + {:BigIntegerField "BIGINT" + :BooleanField "BIT" + :CharField "VARCHAR(254)" + :DateField "DATE" + :DateTimeField "DATETIME" + :DecimalField "DECIMAL" + :FloatField "FLOAT" + :IntegerField "INTEGER" + :TextField "TEXT" + :TimeField "TIME"}) + +(def ^:private db-name-counter + "We destroy and create the same temporary databases serveral times when running our query processor tests. + + To kick other users off of the database when we destroy it, we `ALTER DATABASE SET SINGLE_USER ROLLBACK IMMEDIATE`. + This has the side effect of preventing any other connections to the database. If our tests barf for any reason, + we're left with a database that can't be connected to until the hanging connection gets killed at some indeterminate point in the future. + In other cases, JDBC will attempt to reuse connections to the same database, which fail once it it's in SINGLE_USER mode. + + To prevent our tests from failing for silly reasons, we'll instead generate database names like `sad-toucan-incidents_100`. We'll pick + a random number to start with, and for each subsequent database we create during the test run we'll increment this counter. Thus, + we'll create `sad-toucan-incidents_101`, then `tupac-sightings_102`, and so forth." + (atom (rand-int 10000))) + +(defn- +suffix [db-name] + (str db-name \_ @db-name-counter)) + +(defn- get-db-env-var + "Since we run our tests on non-Windows machines, we need to connect to a remote server for running tests. + Look up the relevant env var or throw an exception if it's not set. + + (get-db-env-var :user) ; Look up `MB_SQL_SERVER_USER`" + [env-var & [default]] + (or (env (keyword (format "mb-sql-server-%s" (name env-var)))) + default + (throw (Exception. (format "In order to test SQL Server, you must specify the env var MB_SQL_SERVER_%s." + (s/upper-case (name env-var))))))) + +(defn- database->connection-details [_ context {:keys [database-name short-lived?]}] + {:host (get-db-env-var :host) + :port (Integer/parseInt (get-db-env-var :port "1433")) + :user (get-db-env-var :user) + :password (get-db-env-var :password) + :db (when (= context :db) + (+suffix database-name)) + :short-lived? short-lived?}) + + +(defn- drop-db-if-exists-sql [_ {:keys [database-name]}] + ;; Kill all open connections to the DB & drop it + (apply format "IF EXISTS (SELECT name FROM master.dbo.sysdatabases WHERE name = N'%s') + BEGIN + ALTER DATABASE \"%s\" SET SINGLE_USER WITH ROLLBACK IMMEDIATE; + DROP DATABASE \"%s\"; + END;" + (repeat 3 (+suffix database-name)))) + +(defn- drop-table-if-exists-sql [_ {:keys [database-name]} {:keys [table-name]}] + (let [db-name (+suffix database-name)] + (format "IF object_id('%s.dbo.%s') IS NOT NULL DROP TABLE \"%s\".dbo.\"%s\";" db-name table-name db-name table-name))) + +(defn- qualified-name-components + ([_ db-name] + [(+suffix db-name)]) + ([_ db-name table-name] + [(+suffix db-name) "dbo" table-name]) + ([_ db-name table-name field-name] + [(+suffix db-name) "dbo" table-name field-name])) + + +(defrecord SQLServerDatasetLoader []) + +(extend SQLServerDatasetLoader + generic/IGenericSQLDatasetLoader + (merge generic/DefaultsMixin + {:drop-db-if-exists-sql drop-db-if-exists-sql + :drop-table-if-exists-sql drop-table-if-exists-sql + :field-base-type->sql-type (fn [_ base-type] (field-base-type->sql-type base-type)) + :pk-sql-type (constantly "INT IDENTITY(1,1)") + :qualified-name-components qualified-name-components}) + i/IDatasetLoader + (let [{:keys [create-db!], :as mixin} generic/IDatasetLoaderMixin] + (merge mixin + {:create-db! (fn [this dbdef] + (swap! db-name-counter inc) + (create-db! this dbdef)) + :database->connection-details database->connection-details + :engine (constantly :sqlserver)}))) + +(defn dataset-loader [] + (->SQLServerDatasetLoader)) + + +(defn- cleanup-leftover-dbs + "Clean up any leftover DBs that weren't destroyed by the last test run (eg, if it failed for some reason). + This is important because we're limited to a quota of 30 DBs on RDS." + {:expectations-options :before-run} + [] + (when (contains? @(resolve 'metabase.test.data.datasets/test-dataset-names) :sqlserver) + (let [connection-spec ((sqlserver :connection-details->spec) (database->connection-details nil :server nil)) + leftover-dbs (mapv :name (jdbc/query connection-spec "SELECT name + FROM master.dbo.sysdatabases + WHERE name NOT IN ('tempdb', 'master', 'model', 'msdb', 'rdsadmin');"))] + (with-redefs [+suffix identity] + (doseq [db leftover-dbs] + (try + (println (format "Deleting leftover SQL Server DB '%s'..." db)) + ;; (jdbc/execute! connection-spec [(drop-db-if-exists-sql nil {:database-name db})]) + ;; Don't try to kill other connections to this DB with SET SINGLE_USER -- some other instance (eg CI) might be using it + (jdbc/execute! connection-spec [(format "DROP DATABASE \"%s\";" db)]) + (println "[ok]") + (catch Throwable _))))))) diff --git a/test/metabase/test/util/q.clj b/test/metabase/test/util/q.clj index c41f6d5894dacb0eeb73271954a0262abbe05adf..294dfa59bc20086104e208134774074c80b4f20c 100644 --- a/test/metabase/test/util/q.clj +++ b/test/metabase/test/util/q.clj @@ -63,8 +63,8 @@ (defn resolve-dataset [dataset] (var-get (core/or (resolve dataset) - (ns-resolve 'metabase.test.data.dataset-definitions dataset) - (throw (Exception. (format "Don't know how to find dataset '%s'." dataset)))))) + (ns-resolve 'metabase.test.data.dataset-definitions dataset) + (throw (Exception. (format "Don't know how to find dataset '%s'." dataset)))))) ;;; # DSL KEYWORD MACROS diff --git a/test/metabase/test_util.clj b/test/metabase/test_util.clj index 58f7fb906567ba6ec44f5f6c27ae8f776bf002ae..98bbdfc4ca83917fc5548a4c1cfd1250274cfc01 100644 --- a/test/metabase/test_util.clj +++ b/test/metabase/test_util.clj @@ -81,3 +81,24 @@ (expect -7 ((rpartial - 5 10) 8)) + + +;;; ## cond-as-> +(expect 100 + (cond-as-> 100 <>)) + +(expect 106 + (cond-as-> 100 <> + true (+ 1 <>) + false (+ 10 <>) + :ok (+ 5 <>))) + +(expect 101 + (cond-as-> 100 <> + (odd? <>) (inc <>) + (even? <>) (inc <>))) + +(expect 102 + (cond-as-> 100 <> + (even? <>) (inc <>) + (odd? <>) (inc <>)))