Skip to content
Snippets Groups Projects
Unverified Commit 8fc321ac authored by Ariya Hidayat's avatar Ariya Hidayat Committed by GitHub
Browse files

CI with GitHub Action: run tokenizer fuzzing tests (#18841)

parent 8d8e472b
No related merge requests found
name: Fuzzing
on:
push:
branches:
- '**'
paths:
- 'frontend/**'
- 'shared/**'
- 'enterprise/frontend/**'
- 'docs/**'
- '**/package.json'
- '**/yarn.lock'
- '**/.eslintrc'
- '.github/workflows/**'
pull_request:
jobs:
fe-fuzz-tokenizer:
runs-on: ubuntu-20.04
timeout-minutes: 7
steps:
- uses: actions/checkout@v2
- name: Prepare Node.js
uses: actions/setup-node@v1
with:
node-version: 14.x
- name: Get M2 cache
uses: actions/cache@v2
with:
path: ~/.m2
key: ${{ runner.os }}-cljs-${{ hashFiles('**/shadow-cljs.edn') }}
- name: Get yarn cache
uses: actions/cache@v2
with:
path: ~/.cache/yarn
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
- run: yarn install --frozen-lockfile --prefer-offline
- run: yarn test-unit frontend/test/metabase/lib/expressions/fuzz.tokenizer.unit.spec.js
env:
MB_FUZZ: 1
name: Run fuzz testing on the tokenizer
import { tokenize } from "metabase/lib/expressions/tokenizer";
import { generateExpression } from "./generator";
const fuzz = process.env.MB_FUZZ ? describe : describe.skip;
describe("metabase/lib/expressions/tokenizer", () => {
// quick sanity check before the real fuzzing
it("should tokenize custom expresssion", () => {
expect(() => tokenize("CASE([Deal],[Price]*7e-1,[Price]")).not.toThrow();
});
});
fuzz("FUZZING metabase/lib/expressions/tokenizer", () => {
const MAX_SEED = 5e4;
for (let seed = 0; seed < MAX_SEED; ++seed) {
it("should handle generated expression from seed " + seed, () => {
const { expression } = generateExpression(seed);
expect(() => tokenize(expression)).not.toThrow();
});
it("should not error on generated expression from seed " + seed, () => {
const { expression } = generateExpression(seed);
expect(tokenize(expression).errors).toEqual([]);
});
}
});
......@@ -4,8 +4,6 @@ import {
OPERATOR as OP,
} from "metabase/lib/expressions/tokenizer";
import { generateExpression } from "./generator";
describe("metabase/lib/expressions/tokenizer", () => {
const types = expr => tokenize(expr).tokens.map(t => t.type);
const ops = expr => tokenize(expr).tokens.map(t => t.op);
......@@ -142,20 +140,3 @@ describe("metabase/lib/expressions/tokenizer", () => {
expect(errors(" #")[0].pos).toEqual(4);
});
});
if (process.env.MB_FUZZ) {
describe("FUZZING metabase/lib/expressions/tokenizer", () => {
const MAX_SEED = 5e4;
for (let seed = 0; seed < MAX_SEED; ++seed) {
it("should handle generated expression from seed " + seed, () => {
const { expression } = generateExpression(seed);
expect(() => tokenize(expression)).not.toThrow();
});
it("should not error on generated expression from seed " + seed, () => {
const { expression } = generateExpression(seed);
expect(tokenize(expression).errors).toEqual([]);
});
}
});
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment