Skip to content
Snippets Groups Projects
Commit da136a5e authored by Tom Robinson's avatar Tom Robinson
Browse files

Merge branch 'master' of github.com:metabase/metabase into binning-strategies

parents 95d47581 136dfb17
No related branches found
No related tags found
No related merge requests found
Showing
with 917 additions and 18 deletions
......@@ -42,6 +42,7 @@
"flowtype/use-flow-type": 1
},
"globals": {
"pending": false
},
"env": {
"browser": true,
......
[ignore]
.*/node_modules/react/node_modules/.*
.*/node_modules/postcss-import/node_modules/.*
.*/node_modules/documentation/.*
.*/node_modules/.*/\(lib\|test\).*\.json$
[include]
......
......@@ -2,7 +2,7 @@
# images hosted on Docker Hub https://hub.docker.com/r/metabase/metabase/ which use the
# Dockerfile located at ./bin/docker/Dockerfile
FROM java:openjdk-7-jre-alpine
FROM java:openjdk-8-jre-alpine
ENV JAVA_HOME=/usr/lib/jvm/default-jvm
ENV PATH /usr/local/bin:$PATH
......
{
"AWSTemplateFormatVersion": "2010-09-09",
"Description": "Deploys Metabase by creating an ElasticBeanstalk application in an contained in a CloudFormation stack.",
"Parameters": {
"MetabaseVersion": {
"Description": "Metabase Version",
"Type": "String",
"Default": "@@MB_TAG@@"
},
"KeyName": {
"Description": "Name of an existing EC2 KeyPair to enable SSH access to the AWS Elastic Beanstalk instance",
"Type": "AWS::EC2::KeyPair::KeyName"
},
"instanceType": {
"Description": "The Type of EC2 instance to use for running Metabase",
"Type": "String",
"Default": "t2.small"
},
"VPCid": {
"Description": "The VPC to use for running Metabase",
"Type": "AWS::EC2::VPC::Id"
},
"Subnets": {
"Description": "The VPC subnet(s) to use for running Metabase",
"Type": "List<AWS::EC2::Subnet::Id>"
}
},
"Resources": {
"metabaseApplication": {
"Type": "AWS::ElasticBeanstalk::Application",
"Properties": {
"Description": "Metabase Application",
"ApplicationVersions": [
{
"VersionLabel": {
"Ref": "MetabaseVersion"
},
"Description": "Metabase Application Version",
"SourceBundle": {
"S3Bucket": "downloads.metabase.com",
"S3Key": {
"Fn::Join": [
"/",
[
{
"Ref": "MetabaseVersion"
},
"metabase-aws-eb.zip"
]
]
}
}
}
],
"ConfigurationTemplates": [
{
"TemplateName": "MetabaseConfiguration",
"Description": "Metabase Application Configuration",
"SolutionStackName": "64bit Amazon Linux 2017.03 v2.7.0 running Docker 17.03.1-ce",
"OptionSettings": [
{
"Namespace": "aws:autoscaling:launchconfiguration",
"OptionName": "EC2KeyName",
"Value": {
"Ref": "KeyName"
}
},
{
"Namespace": "aws:rds:dbinstance",
"OptionName": "DBEngine",
"Value": "postgres"
},
{
"Namespace": "aws:rds:dbinstance",
"OptionName": "DBAllocatedStorage",
"Value": "10"
},
{
"Namespace": "aws:rds:dbinstance",
"OptionName": "DBInstanceClass",
"Value": "db.t2.small"
},
{
"Namespace": "aws:rds:dbinstance",
"OptionName": "MultiAZDatabase",
"Value": "false"
},
{
"Namespace": "aws:rds:dbinstance",
"OptionName": "DBDeletionPolicy",
"Value": "Snapshot"
},
{
"Namespace": "aws:autoscaling:launchconfiguration",
"OptionName": "InstanceType",
"Value": {
"Ref": "instanceType"
}
},
{
"Namespace": "aws:ec2:vpc",
"OptionName": "VPCId",
"Value": {
"Ref": "VPCid"
}
},
{
"Namespace": "aws:ec2:vpc",
"OptionName": "Subnets",
"Value": {
"Fn::Join": [
",",
{
"Ref": "Subnets"
}
]
}
}
]
}
]
}
},
"metabaseEnvironment": {
"Type": "AWS::ElasticBeanstalk::Environment",
"Properties": {
"ApplicationName": {
"Ref": "metabaseApplication"
},
"Description": "AWS Elastic Beanstalk Environment for Metabase",
"TemplateName": "MetabaseConfiguration",
"VersionLabel": {
"Ref": "MetabaseVersion"
}
}
}
},
"Outputs": {
"URL": {
"Description": "Metabase URL",
"Value": {
"Fn::GetAtt": [
"metabaseEnvironment",
"EndpointURL"
]
}
}
}
}
......@@ -12,6 +12,9 @@ export LANG=en_US.UTF-8
export LANGUAGE=$LANG
export LC_ALL=$LANG
CF_TEMPLATE=cloudformation-elasticbeanstalk.json
CF_TEMPLATE_PATH=/tmp/$CF_TEMPLATE
S3_CF_TEMPLATE_PATH=s3://$ARTIFACTS_S3BUCKET/eb/$CF_TEMPLATE
make_eb_version() {
MB_TAG=$1
......@@ -37,6 +40,9 @@ make_eb_version() {
sed "s/@@MB_REPOSITORY@@/${MB_DOCKER_REPOSITORY}/" < ${BASEDIR}/Dockerrun.aws.json.template > ${BASEDIR}/Dockerrun.aws.json.tmp
sed "s/@@MB_TAG@@/${MB_TAG}/" < ${BASEDIR}/Dockerrun.aws.json.tmp > ${BASEDIR}/Dockerrun.aws.json
# set the default version in the cloudformation template from the template-template (yo dawg i heard you like templates ;)
sed "s/@@MB_TAG@@/${MB_TAG}/" < ${BASEDIR}/$CF_TEMPLATE.template > $CF_TEMPLATE_PATH
# create our EB zip file
cd $BASEDIR; zip -r ${RELEASE_FILE} .ebextensions Dockerrun.aws.json; cd $CURRENTDIR
......@@ -57,6 +63,10 @@ upload_eb_version() {
echo "uploading /tmp/${MB_TAG}.zip -> $ARTIFACTS_S3BUCKET/eb/"
aws s3 cp /tmp/${MB_TAG}.zip s3://$ARTIFACTS_S3BUCKET/eb/${MB_TAG}.zip
echo "uploading $CF_TEMPLATE_PATH -> $S3_CF_TEMPLATE_PATH"
aws s3 cp $CF_TEMPLATE_PATH $S3_CF_TEMPLATE_PATH
}
create_eb_version() {
......
......@@ -119,4 +119,4 @@ fi
# Launch the application
# exec is here twice on purpose to ensure that metabase runs as PID 1 (the init process)
# and thus receives signals sent to the container. This allows it to shutdown cleanly on exit
exec su metabase -s /bin/sh -c "exec java $JAVA_OPTS -jar /app/metabase.jar"
exec su metabase -s /bin/sh -c "exec java $JAVA_OPTS -jar /app/metabase.jar $@"
......@@ -74,6 +74,24 @@ This is also where you set mark special fields in a table:
* Entity Name — different from the entity key, this is the field whose heading represents what each row in the table *is*. For example, in a Users table, the User column might be the entity name.
* Foreign Key — this is a field in this table that uniquely identifies a *row* in another table. In other words, this is a field that, almost always, points to the primary key of another table. For example, in a Products table, you might have a Customer ID field that points to a Customers table, where Customer ID is the primary key.
### Remapping field values
One thing that happens commonly in tables is that you'll have a foreign key field, like `Product ID`, with a bunch of ID values in it, when what you actually want to see most of the time is the entity name, like the `Product Title`. You might also have fields which contain coded values that you'd prefer to show up as translated or readable values in your tables and charts — like changing `0`, `1`, and `2` to `Female`, `Male`, and `Other` for example.
To do this in Metabase, click on the gear icon to the right of a field's Type dropdown in the Data Model section of the Admin Panel. You'll see a form with these options:
![Remapping form](./images/remapping/form.png)
`Visibility` and `Type` are the same as on the main Data Model page, but `Display values` lets you choose to swap out a field's values with something else.
Foreign key remapping lets you swap out a foreign key's values with the values of any other field in the connected table. In this example, we're swapping out the `Product ID` field's values with the values in the `Title` field in the Product table:
![Remapping form](./images/remapping/fk-mapping.png)
Another option is custom remapping, which is currently only possible for numeric fields. This lets you map every number that occurs in this field to either a different numeric value or even to a text value, like in this example:
![Remapping form](./images/remapping/custom-mapping.png)
---
## Next: managing users
......
docs/administration-guide/images/remapping/custom-mapping.png

40.9 KiB

docs/administration-guide/images/remapping/fk-mapping.png

21 KiB

docs/administration-guide/images/remapping/form.png

53.3 KiB

......@@ -2,6 +2,8 @@
Metabase provides an official Docker image via Dockerhub that can be used for deployments on any system that is running Docker.
If you're trying to upgrade your Metabase version on Docker, check out these [upgrading instructions](./start.md#upgrading-metabase).
### Launching Metabase on a new container
Here's a quick one-liner to get you off the ground (please note, we recommend further configuration for production deployments below):
......@@ -69,7 +71,35 @@ In this scenario all you need to do is make sure you launch Metabase with the co
Keep in mind that Metabase will be connecting from within your docker container, so make sure that either you're using a fully qualified hostname or that you've set a proper entry in your container's `/etc/hosts file`.
See instructions for [migrating from H2 to MySQL or Postgres](./start.md#migrating-from-using-the-h2-database-to-mysql-or-postgres).
### Migrating from H2 to Postgres as the Metabase application database
For general information, see instructions for [migrating from H2 to MySQL or Postgres](./start.md#migrating-from-using-the-h2-database-to-mysql-or-postgres).
To migrate an existing Metabase container from an H2 application database to another database container (e.g. Postgres, MySQL), there are a few considerations to keep in mind:
* The target database container must be accessible (i.e. on an available network)
* The target database container must be supported (e.g. MySQL, Postgres)
* The existing H2 database should be [mapped outside the running container](#mounting-a-mapped-file-storage-volume)
The migration process involves 2 main steps:
1. Stop the existing Metabase container
2. Run a new, temporary Metabase container to perform the migration
Using a Postgres container as the target, here's an example invocation:
docker run --name metabase-migration \
-v /path/metabase/data:/metabase-data \
-e "MB_DB_FILE=/metabase-data/metabase.db" \
-e "MB_DB_TYPE=postgres" \
-e "MB_DB_DBNAME=metabase" \
-e "MB_DB_PORT=5432" \
-e "MB_DB_USER=<username>" \
-e "MB_DB_PASS=<password>" \
-e "MB_DB_HOST=my-database-host" \
metabase/metabase load-from-h2
To further explain the example: in addition to specifying the target database connection details, set the `MB_DB_FILE` environment variable for the source H2 database location, and pass the argument `load-from-h2` to begin migrating.
### Setting the Java Timezone
......@@ -103,12 +133,12 @@ The DB contents will be left in a directory named metabase.db.
Note that some older versions of metabase stored their db in a different default location.
docker cp CONTAINER_ID:/metabase.db.mv.db metabase.db.mv.db
### Fixing OutOfMemoryErrors in some hosted environments
On some hosts Metabase can fail to start with an error message like:
java.lang.OutOfMemoryError: Java heap space
If that happens, you'll need to set a JVM option to manually configure the maximum amount of memory the JVM uses for the heap. Refer
to [these instructions](./start.md#metabase-fails-to-start-due-to-heap-space-outofmemoryerrors) for details on how to do that.
......@@ -43,7 +43,7 @@ Community support only at this time, but we have reports of Metabase instances r
# Upgrading Metabase
Before you attempt to upgrade Metabase, you should make a backup of the database just in case. While it is unlikely you will need to rollback, it will do wonders for your peace of mind.
Before you attempt to upgrade Metabase, you should make a backup of the application database just in case. While it is unlikely you will need to roll back, it will do wonders for your peace of mind.
How you upgrade Metabase depends on how you are running it. See below for information on how to update Metabase on managed platforms.
......@@ -51,14 +51,14 @@ How you upgrade Metabase depends on how you are running it. See below for inform
#### Docker Image
If you are running it via docker, then you simply kill the docker process, and start a new container with the latest image. On startup, Metabase will perform any upgrade tasks it needs to perform, and once it is finished, you'll be running the new version.
If you are running Metabase via docker, then you simply need to kill the Docker process and start a new container with the latest Metabase image. On startup, Metabase will perform any upgrade tasks it needs to perform, and once it's finished you'll be running the new version.
#### Jar file
If you are running the JVM Jar file directly, then you simply kill the process, and restart the server. On startup, Metabase will perform any upgrade tasks it needs to perform, and once it is finished, you'll be running the new version.
If you are running the JVM Jar file directly, then you simply kill the process and restart the server. On startup, Metabase will perform any upgrade tasks it needs to perform, and once it's finished you'll be running the new version.
#### Mac OS X Application
If you are using the Metabase app, you will be notified when there is a new version available. You will see a dialog displaying the changes in the latest version and prompt you to upgrade.
#### macOS Application
If you are using the Metabase macOS app, you will be notified when there is a new version available. You will see a dialog displaying the changes in the latest version and prompting you to upgrade.
![Autoupdate Confirmation Dialog](images/AutoupdateScreenshot.png)
......@@ -113,7 +113,7 @@ You'll just need to set a JVM option to let it know explicitly how much memory i
java -Xmx2g -jar metabase.jar
Adjust this number as appropriate for your shared hosting instance. Make sure to set the number lower than the total amount of RAM available on your instance, because Metabase isn't the only process that'll be running. Generally, leaving 1-2 GB of RAM for these other processes should be enough; for example, you might set `-Xmx` to `1g` for an instance with 2 GB of RAM, `2g` for one with 4 GB of RAM, `6g` for an instance with 8 GB of RAM, and so forth. You may need to experment with these settings a bit to find the right number.
Adjust this number as appropriate for your shared hosting instance. Make sure to set the number lower than the total amount of RAM available on your instance, because Metabase isn't the only process that'll be running. Generally, leaving 1-2 GB of RAM for these other processes should be enough; for example, you might set `-Xmx` to `1g` for an instance with 2 GB of RAM, `2g` for one with 4 GB of RAM, `6g` for an instance with 8 GB of RAM, and so forth. You may need to experiment with these settings a bit to find the right number.
As above, you can use the environment variable `JAVA_TOOL_OPTIONS` to set JVM args instead of passing them directly to `java`. This is useful when running the Docker image,
for example.
......@@ -128,7 +128,7 @@ On Windows 10, if you see an error message like
Exception in thread "main" java.lang.AssertionError: Assert failed: Unable to connect to Metabase DB.
when running the JAR, you can unblock the file by right-clicking, clicking "Properties", and then clicking "Unblock".
when running the JAR, you can unblock the file by right-clicking, clicking "Properties," and then clicking "Unblock."
See Microsoft's documentation [here](https://blogs.msdn.microsoft.com/delay/p/unblockingdownloadedfile/) for more details on unblocking downloaded files.
There are a few other reasons why Metabase might not be able to connect to your H2 DB. Metabase connects to the DB over a TCP port, and it's possible
......
## Asking Questions
## Asking questions
---
Metabase's two core concepts are questions and their corresponding answers. Everything else is based around questions and answers. To ask Metabase a question, click the New Question button at the top of the screen to go to the question builder.
Metabase's two core concepts are questions and their corresponding answers. Everything else is based around questions and answers. To ask Metabase a question, click the New Question button at the top of the screen to go to the question builder. (Note: to [create a new SQL query](04-asking-questions.html#using-sql), click the console icon in the top right of the new question screen.)
![queryinterfacebar](images/QueryInterfaceBar.png)
Questions are made up of a number of parts: source data, filters, and answer output.
### Source Data
### Source data
---
All of the data in databases are in tables. Typically, tables will be named for the thing that each row in the table contains. For example, in a Customers table, each row in the table would represent a single customer. This means that when you’re thinking about how to phrase your question, you’ll need to decide what your question is about, and which table has that information in it. The first dropdown menu in the question builder is where you’ll choose the table you want.
All of the data in databases are in tables. Typically, tables will be named for the thing that each row in the table contains. For example, in a Customers table, each row in the table would represent a single customer. This means that when you’re thinking about how to phrase your question, you’ll need to decide what your question is about, and which table has that information in it.
The first dropdown menu in the question builder is where you’ll choose the database and table you want.
#### Using saved questions as source data
If you've [saved some questions](06-sharing-answers.html), in the Data menu you'll see the option to use one of your saved questions as source data. What this means in practice is that you can do things like use complex SQL queries to create new tables that can be used in a question just like any other table in your database.
You can use any saved question as source data, provided you have [permission](../administration-guide/05-setting-permissions.html) to view that question. You can even use questions that were saved as a chart rather than a table. The only caveat is that you can't use a saved question which itself uses a saved question as source data. (That's more inception than Metabase can handle!)
### Filters
---
......@@ -47,7 +55,7 @@ Now the relative date will be referencing the past 30 days from *today*, *not* f
#### Using segments
If your Metabase admins have created special named filters, called segments, for the table you’re viewing, they’ll appear at the top of the filter dropdown in purple text with a star next to them. These are shortcuts to sets of filters that are commonly used in your organization. They might be something like “Active Users,” or “Most Popular Products.”
### Answer Output
### Answer output
---
The last section of the question builder is where you select what you want the output of your answer to be, under the View dropdown. You’re basically telling Metabase, “I want to view the…” Metabase can output the answer to your question in four different ways:
......
// Origin: https://github.com/flowtype/flow-typed/blob/master/definitions/npm/redux-actions_v2.x.x/flow_v0.34.x-/redux-actions_v2.x.x.js
declare module 'redux-actions' {
/*
* Use `ActionType` to get the type of the action created by a given action
* creator. For example:
*
* import { creatAction, type ActionType } from 'redux-actions'
*
* const increment = createAction(INCREMENT, (count: number) => count)
*
* function myReducer(state: State = initState, action: ActionType<typeof increment>): State {
* // Flow will infer that the type of `action.payload` is `number`
* }
*/
declare type ActionType<ActionCreator> = _ActionType<*, ActionCreator>;
declare type _ActionType<R, Fn: (payload: *, ...rest: any[]) => R> = R;
/*
* To get the most from Flow type checking use a `payloadCreator` argument
* with `createAction`. Make sure that Flow can infer the argument type of the
* `payloadCreator`. That will allow Flow to infer the payload type of actions
* created by that action creator in other parts of the program. For example:
*
* const increment = createAction(INCREMENT, (count: number) => count)
*
*/
declare function createAction<T, P>(
type: T,
$?: empty // hack to force Flow to not use this signature when more than one argument is given
): (payload: P, ...rest: any[]) => { type: T, payload: P, error?: boolean };
declare function createAction<T, P, P2>(
type: T,
payloadCreator: (_: P) => P2,
$?: empty
): (payload: P, ...rest: any[]) => { type: T, payload: P2, error?: boolean };
declare function createAction<T, P, P2, M>(
type: T,
payloadCreator: (_: P) => P2,
metaCreator: (_: P) => M
): (payload: P, ...rest: any[]) => { type: T, payload: P2, error?: boolean, meta: M };
declare function createAction<T, P, M>(
type: T,
payloadCreator: null | void,
metaCreator: (_: P) => M
): (payload: P, ...rest: any[]) => { type: T, payload: P, error?: boolean, meta: M };
// `createActions` is quite difficult to write a type for. Maybe try not to
// use this one?
declare function createActions(actionMap: Object, ...identityActions: string[]): Object;
declare function createActions(...identityActions: string[]): Object;
declare type Reducer<S, A> = (state: S, action: A) => S;
declare type ReducerMap<S, A> =
| { next: Reducer<S, A> }
| { throw: Reducer<S, A> }
| { next: Reducer<S, A>, throw: Reducer<S, A> }
/*
* To get full advantage from Flow, use a type annotation on the action
* argument to your reducer when creating a reducer with `handleAction` or
* `handleActions`. For example:
*
* import { type Reducer } from 'redux'
* import { createAction, handleAction, type Action } from 'redux-actions'
*
* const increment = createAction(INCREMENT, (count: number) => count)
*
* const reducer = handleAction(INCREMENT, (state, { payload }: ActionType<typeof increment>) => {
* // Flow infers that the type of `payload` is number
* }, defaultState)
*/
declare function handleAction<Type, State, Action: { type: Type }>(
type: Type,
reducer: Reducer<State, Action> | ReducerMap<State, Action>,
defaultState: State
): Reducer<State, Action>;
declare function handleActions<State, Action>(
reducers: { [key: string]: Reducer<State, Action> | ReducerMap<State, Action> },
defaultState?: State
): Reducer<State, Action>;
declare function combineActions(...types: (string | Symbol | Function)[]) : string;
}
......@@ -28,7 +28,7 @@ declare module "underscore" {
declare function some<T>(a: Array<T>, pred: (val: T)=>boolean): boolean;
declare function all<T>(a: Array<T>, pred: (val: T)=>boolean): boolean;
declare function any<T>(a: Array<T>, pred: (val: T)=>boolean): boolean;
declare function contains<T>(a: Array<T>, pred: (val: T)=>boolean): boolean;
declare function contains<T>(a: Array<T>, val: T): boolean;
declare function initial<T>(a: Array<T>, n?: number): Array<T>;
declare function rest<T>(a: Array<T>, index?: number): Array<T>;
......@@ -38,6 +38,9 @@ declare module "underscore" {
declare function filter<T>(o: {[key:string]: T}, pred: (val: T, k: string)=>boolean): T[];
declare function isEmpty(o: any): boolean;
declare function isString(o: any): boolean;
declare function isObject(o: any): boolean;
declare function isArray(o: any): boolean;
declare function groupBy<T>(a: Array<T>, iteratee: string|(val: T, index: number)=>any): {[key:string]: T[]};
......@@ -53,6 +56,9 @@ declare module "underscore" {
declare function pick(o: {[key: any]: any}, ...properties: string[]): {[key: any]: any};
declare function pick(o: {[key: any]: any}, predicate: (val: any, key: any, object: {[key: any]: any})=>boolean): {[key: any]: any};
declare function pluck(o: Array<{[key: any]: any}>, propertyNames: string): Array<any>;
declare function has(o: {[key: any]: any}, ...properties: string[]): boolean;
declare function difference<T>(array: T[], ...others: T[][]): T[];
declare function flatten(a: Array<any>): Array<any>;
......
{
"rules": {
"flowtype/require-valid-file-annotation": 1
}
}
/* @flow weak */
export default class Action {
perform() {}
}
export class ActionClick {}
import Action from "./Action";
describe("Action", () => {
describe("perform", () => {
it("should perform the action", () => {
new Action().perform();
});
});
});
export default class Dashboard {
getParameters() {}
}
import React from "react";
import Icon from "metabase/components/Icon";
import { stripId, inflect } from "metabase/lib/formatting";
import Query_DEPRECATED from "metabase/lib/query";
import { mbqlEq } from "metabase/lib/query/util";
import _ from "underscore";
import Field from "./metadata/Field";
import Metadata from "./metadata/Metadata";
import type {
ConcreteField,
LocalFieldReference,
ForeignFieldReference,
DatetimeField,
ExpressionReference,
DatetimeUnit
} from "metabase/meta/types/Query";
import type { IconName } from "metabase/meta/types";
/**
* A dimension option returned by the query_metadata API
*/
type DimensionOption = {
mbql: any,
name?: string
};
/**
* Dimension base class, represents an MBQL field reference.
*
* Used for displaying fields (like Created At) and their "sub-dimensions" (like Created At by Day)
* in field lists and active value widgets for filters, aggregations and breakouts.
*
* @abstract
*/
export default class Dimension {
_parent: ?Dimension;
_args: any;
_metadata: ?Metadata;
/**
* Dimension constructor
*/
constructor(
parent: ?Dimension,
args: any[],
metadata?: Metadata
): Dimension {
this._parent = parent;
this._args = args;
this._metadata = metadata || (parent && parent._metadata);
}
/**
* Parses an MBQL expression into an appropriate Dimension subclass, if possible.
* Metadata should be provided if you intend to use the display name or render methods.
*/
static parseMBQL(mbql: ConcreteField, metadata?: Metadata): ?Dimension {
for (const D of DIMENSION_TYPES) {
const dimension = D.parseMBQL(mbql, metadata);
if (dimension != null) {
return dimension;
}
}
return null;
}
/**
* Returns true if these two dimensions are identical to one another.
*/
static isEqual(a: ?Dimension | ConcreteField, b: ?Dimension): boolean {
let dimensionA: ?Dimension = a instanceof Dimension
? a
: // $FlowFixMe
Dimension.parseMBQL(a, this._metadata);
let dimensionB: ?Dimension = b instanceof Dimension
? b
: // $FlowFixMe
Dimension.parseMBQL(b, this._metadata);
return !!dimensionA && !!dimensionB && dimensionA.isEqual(dimensionB);
}
/**
* Sub-dimensions for the provided dimension of this type.
* @abstract
*/
// TODO Atte Keinänen 5/21/17: Rename either this or the instance method with the same name
// Also making it clear in the method name that we're working with sub-dimensions would be good
static dimensions(parent: Dimension): Dimension[] {
return [];
}
/**
* The default sub-dimension for the provided dimension of this type, if any.
* @abstract
*/
static defaultDimension(parent: Dimension): ?Dimension {
return null;
}
/**
* Returns "sub-dimensions" of this dimension.
* @abstract
*/
// TODO Atte Keinänen 5/21/17: Rename either this or the static method with the same name
// Also making it clear in the method name that we're working with sub-dimensions would be good
dimensions(
DimensionTypes: typeof Dimension[] = DIMENSION_TYPES
): Dimension[] {
const dimensionOptions = this.field().dimension_options;
if (dimensionOptions) {
return dimensionOptions.map(option =>
this._dimensionForOption(option));
} else {
return [].concat(
...DimensionTypes.map(DimensionType =>
DimensionType.dimensions(this))
);
}
}
/**
* Returns the default sub-dimension of this dimension, if any.
* @abstract
*/
defaultDimension(DimensionTypes: any[] = DIMENSION_TYPES): ?Dimension {
const defaultDimensionOption = this.field().default_dimension_option;
if (defaultDimensionOption) {
return this._dimensionForOption(defaultDimensionOption);
} else {
for (const DimensionType of DimensionTypes) {
const defaultDimension = DimensionType.defaultDimension(this);
if (defaultDimension) {
return defaultDimension;
}
}
}
return null;
}
// Internal method gets a Dimension from a DimensionOption
_dimensionForOption(option: DimensionOption) {
// fill in the parent field ref
const fieldRef = this.baseDimension().mbql();
let mbql = option.mbql;
if (mbql) {
mbql = [mbql[0], fieldRef, ...mbql.slice(2)];
} else {
mbql = fieldRef;
}
let dimension = Dimension.parseMBQL(mbql, this._metadata);
if (option.name) {
dimension.subDisplayName = () => option.name;
dimension.subTriggerDisplayName = () => option.name;
}
return dimension;
}
/**
* Is this dimension idential to another dimension or MBQL clause
*/
isEqual(other: ?Dimension | ConcreteField): boolean {
if (other == null) {
return false;
}
let otherDimension: ?Dimension = other instanceof Dimension
? other
: Dimension.parseMBQL(other, this._metadata);
if (!otherDimension) {
return false;
}
// must be instace of the same class
if (this.constructor !== otherDimension.constructor) {
return false;
}
// must both or neither have a parent
if (!this._parent !== !otherDimension._parent) {
return false;
}
// parents must be equal
if (this._parent && !this._parent.isEqual(otherDimension._parent)) {
return false;
}
// args must be equal
if (!_.isEqual(this._args, otherDimension._args)) {
return false;
}
return true;
}
/**
* Does this dimension have the same underlying base dimension, typically a field
*/
isSameBaseDimension(other: ?Dimension | ConcreteField): boolean {
if (other == null) {
return false;
}
let otherDimension: ?Dimension = other instanceof Dimension
? other
: Dimension.parseMBQL(other, this._metadata);
const baseDimensionA = this.baseDimension();
const baseDimensionB = otherDimension && otherDimension.baseDimension();
return !!baseDimensionA &&
!!baseDimensionB &&
baseDimensionA.isEqual(baseDimensionB);
}
/**
* The base dimension of this dimension, typically a field. May return itself.
*/
baseDimension(): Dimension {
return this;
}
/**
* The underlying field for this dimension
*/
field(): Field {
return new Field();
}
/**
* Valid operators on this dimension
*/
operators() {
return this.field().operators || [];
}
/**
* The operator with the provided operator name (e.x. `=`, `<`, etc)
*/
operator(op) {
return this.field().operator(op);
}
/**
* The display name of this dimension, e.x. the field's display_name
* @abstract
*/
displayName(): string {
return "";
}
/**
* The name to be shown when this dimension is being displayed as a sub-dimension of another
* @abstract
*/
subDisplayName(): string {
return "";
}
/**
* A shorter version of subDisplayName, e.x. to be shown in the dimension picker trigger
* @abstract
*/
subTriggerDisplayName(): string {
return "";
}
/**
* An icon name representing this dimension's type, to be used in the <Icon> component.
* @abstract
*/
icon(): ?IconName {
return null;
}
/**
* Renders a dimension to React
*/
render(): ?React$Element<any> {
return [this.displayName()];
}
}
/**
* Field based dimension, abstract class for `field-id`, `fk->`, `datetime-field`, etc
* @abstract
*/
export class FieldDimension extends Dimension {
field(): Field {
if (this._parent instanceof FieldDimension) {
return this._parent.field();
}
return new Field();
}
displayName(): string {
return stripId(
Query_DEPRECATED.getFieldPathName(
this.field().id,
this.field().table
)
);
}
subDisplayName(): string {
if (this._parent) {
// foreign key, show the field name
return this.field().display_name;
} else if (this.field().isNumber()) {
return "Continuous (no binning)";
} else {
return "Default";
}
}
icon() {
return this.field().icon();
}
}
/**
* Field ID-based dimension, `["field-id", field-id]`
*/
export class FieldIDDimension extends FieldDimension {
static parseMBQL(mbql: ConcreteField, metadata?: ?Metadata) {
if (typeof mbql === "number") {
// DEPRECATED: bare field id
return new FieldIDDimension(null, [mbql], metadata);
} else if (Array.isArray(mbql) && mbqlEq(mbql[0], "field-id")) {
return new FieldIDDimension(null, mbql.slice(1), metadata);
}
return null;
}
mbql(): LocalFieldReference {
return ["field-id", this._args[0]];
}
field() {
return (this._metadata && this._metadata.fields[this._args[0]]) ||
new Field();
}
}
/**
* Foreign key-based dimension, `["fk->", fk-field-id, dest-field-id]`
*/
export class FKDimension extends FieldDimension {
static parseMBQL(mbql: ConcreteField, metadata?: ?Metadata): ?Dimension {
if (Array.isArray(mbql) && mbqlEq(mbql[0], "fk->")) {
// $FlowFixMe
const fkRef: ForeignFieldReference = mbql;
const parent = Dimension.parseMBQL(fkRef[1], metadata);
return new FKDimension(parent, fkRef.slice(2));
}
return null;
}
static dimensions(parent: Dimension): Dimension[] {
if (parent instanceof FieldDimension) {
const field = parent.field();
if (field.target && field.target.table) {
return field.target.table.fields.map(
field => new FKDimension(parent, [field.id])
);
}
}
return [];
}
mbql(): ForeignFieldReference {
// TODO: not sure `this._parent._args[0]` is the best way to handle this?
// we don't want the `["field-id", ...]` wrapper from the `this._parent.mbql()`
return ["fk->", this._parent._args[0], this._args[0]];
}
field() {
return (this._metadata && this._metadata.fields[this._args[0]]) ||
new Field();
}
render() {
return [
stripId(this._parent.field().display_name),
<Icon name="connections" className="px1" size={10} />,
this.field().display_name
];
}
}
import { DATETIME_UNITS, formatBucketing } from "metabase/lib/query_time";
const isFieldDimension = dimension =>
dimension instanceof FieldIDDimension || dimension instanceof FKDimension;
/**
* DatetimeField dimension, `["datetime-field", field-reference, datetime-unit]`
*/
export class DatetimeFieldDimension extends FieldDimension {
static parseMBQL(mbql: ConcreteField, metadata?: ?Metadata): ?Dimension {
if (Array.isArray(mbql) && mbqlEq(mbql[0], "datetime-field")) {
const parent = Dimension.parseMBQL(mbql[1], metadata);
// DEPRECATED: ["datetime-field", id, "of", unit]
if (mbql.length === 4) {
return new DatetimeFieldDimension(parent, mbql.slice(3));
} else {
return new DatetimeFieldDimension(parent, mbql.slice(2));
}
}
return null;
}
static dimensions(parent: Dimension): Dimension[] {
if (isFieldDimension(parent) && parent.field().isDate()) {
return DATETIME_UNITS.map(
unit => new DatetimeFieldDimension(parent, [unit])
);
}
return [];
}
static defaultDimension(parent: Dimension): ?Dimension {
if (isFieldDimension(parent) && parent.field().isDate()) {
return new DatetimeFieldDimension(parent, ["day"]);
}
return null;
}
mbql(): DatetimeField {
return ["datetime-field", this._parent.mbql(), this._args[0]];
}
baseDimension(): Dimension {
return this._parent.baseDimension();
}
bucketing(): DatetimeUnit {
return this._args[0];
}
subDisplayName(): string {
return formatBucketing(this._args[0]);
}
subTriggerDisplayName(): string {
return "by " + formatBucketing(this._args[0]).toLowerCase();
}
render() {
return [...super.render(), ": ", this.subDisplayName()];
}
}
/**
* Binned dimension, `["binning-strategy", field-reference, strategy, ...args]`
*/
export class BinnedDimension extends FieldDimension {
static parseMBQL(mbql: ConcreteField, metadata?: ?Metadata) {
if (Array.isArray(mbql) && mbqlEq(mbql[0], "binning-strategy")) {
const parent = Dimension.parseMBQL(mbql[1], metadata);
return new BinnedDimension(parent, mbql.slice(2));
}
return null;
}
static dimensions(parent: Dimension): Dimension[] {
if (isFieldDimension(parent) && parent.field().isNumber()) {
return [5, 10, 25, 100].map(
bins => new BinnedDimension(parent, ["default", bins])
);
}
return [];
}
mbql() {
return ["binning-strategy", this._parent.mbql(), ...this._args];
}
baseDimension(): Dimension {
return this._parent.baseDimension();
}
subDisplayName(): string {
if (this._args[0] === "default") {
return `Quantized into ${this._args[1]} ${inflect("bins", this._args[1])}`;
}
return JSON.stringify(this._args);
}
subTriggerDisplayName(): string {
if (this._args[0] === "default") {
return `${this._args[1]} ${inflect("bins", this._args[1])}`;
}
return "";
}
}
/**
* Expression reference, `["expression", expression-name]`
*/
export class ExpressionDimension extends Dimension {
tag = "Custom";
static parseMBQL(mbql: any, metadata?: ?Metadata): ?Dimension {
if (Array.isArray(mbql) && mbqlEq(mbql[0], "expression")) {
return new ExpressionDimension(null, mbql.slice(1));
}
}
mbql(): ExpressionReference {
return ["expression", this._args[0]];
}
displayName(): string {
return this._args[0];
}
icon(): IconName {
// TODO: eventually will need to get the type from the return type of the expression
return "int";
}
}
/**
* Aggregation reference, `["aggregation", aggregation-index]`
*/
export class AggregationDimension extends Dimension {
static parseMBQL(mbql: any, metadata?: ?Metadata): ?Dimension {
if (Array.isArray(mbql) && mbqlEq(mbql[0], "aggregation")) {
return new AggregationDimension(null, mbql.slice(1));
}
}
constructor(parent, args, metadata, displayName) {
super(parent, args, metadata);
this._displayName = displayName;
}
displayName(): string {
return this._displayName;
}
mbql() {
return ["aggregation", this._args[0]];
}
icon() {
return "int";
}
}
const DIMENSION_TYPES: typeof Dimension[] = [
FieldIDDimension,
FKDimension,
DatetimeFieldDimension,
ExpressionDimension,
BinnedDimension,
AggregationDimension
];
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment