Skip to content

Commit

Permalink
Merge pull request #2454 from terascope/fix-e2e-tests
Browse files Browse the repository at this point in the history
v0.72.1 - Serialize asset unzipping and update e2e tests
  • Loading branch information
jsnoble authored Feb 11, 2021
2 parents 40b03a8 + d15be00 commit 950fcb7
Show file tree
Hide file tree
Showing 38 changed files with 147 additions and 165 deletions.
16 changes: 9 additions & 7 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
dist: bionic
os: linux
language: node_js
node_js: '12.19'
node_js: '12.20'
# use a smaller depth for faster builds
git:
depth: 10
Expand Down Expand Up @@ -48,31 +48,33 @@ jobs:
script: yarn --silent test --suite unit-a

- name: 'Unit Test Suite (node 12)'
node_js: '12.19'
node_js: '12.20'
# run only on pull-requests or cron
if: branch = master AND type IN (pull_request, cron)
script: yarn --silent test --suite unit-b

- script:
name: 'ES Test Suite (elasticsearch 6) (node 12)'
node_js: '12.19'
node_js: '12.20'
# run only on pull-requests and cron
if: branch = master AND type IN (pull_request, cron)
script: yarn --silent test --suite elasticsearch --elasticsearch-version 6.8.6 --elasticsearch-api-version 6.5

- script:
name: 'ES Test Suite (elasticsearch 7) (node 12)'
node_js: '12.19'
node_js: '12.20'
# run only on pull-requests
if: branch = master AND type IN (pull_request) AND commit_message !~ /^WIP/
script: yarn --silent test --suite elasticsearch --elasticsearch-version 7.2.1 --elasticsearch-api-version 7.0 --report-coverage false

- script:
name: 'End-to-End Test Suite (elasticsearch 6) (node 12)'
node_js: '12.19'
node_js: '12.20'
# run only on pull-requests and cron
if: branch = master AND type IN (pull_request, cron) AND fork = false
script: yarn --silent --cwd e2e test
script:
- export SERVICE_HEAP_OPTS="-Xms768m -Xmx768m"
- yarn --silent --cwd e2e test

- stage: "Releases"
name: 'Publish prerelease packages and docker image'
Expand All @@ -84,7 +86,7 @@ jobs:

- script:
name: 'Publish packages, docs and expiremental docker image'
node_js: '12.19'
node_js: '12.20'
# run a push to master
if: tag IS blank AND branch = master AND type NOT IN (pull_request, cron)
script:
Expand Down
11 changes: 4 additions & 7 deletions docs/asset-bundles/development.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,23 +56,20 @@ operator just like any other operator, as shown below:
"name": "Update Rate Test",
"lifecycle": "once",
"workers": 1,
"assets": ["elasticsearch", "example"],
"assets": ["elasticsearch", "standard", "example"],
"operations": [
{
"_op": "elasticsearch_data_generator",
"_op": "data_generator",
"size": 5000
},
{
"_op": "count",
"log_level": "debug"
},
{
"_op": "elasticsearch_index_selector",
"index": "update-test-1",
"type": "events"
},
{
"_op": "elasticsearch_bulk",
"index": "update-test-1",
"type": "events",
"size": 5000
}
]
Expand Down
4 changes: 2 additions & 2 deletions docs/jobs/builtin-operations.md
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,10 @@ Example Job: `examples/jobs/script/test_script_job.json`
"name": "ES DataGen test script",
"lifecycle": "persistent",
"workers": 1,
"assets": ["elasticsearch"],
"assets": ["standard"],
"operations": [
{
"_op": "elasticsearch_data_generator",
"_op": "data_generator",
"size": 100000,
"stress_test": true
},
Expand Down
9 changes: 3 additions & 6 deletions docs/jobs/configuration.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ The first operation in the [operations](#operations) list, reads from a particul
| --------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------- | -------- |
| `_op` | Name of operation, it must reflect the exact name of the file | `String` | required |
| `_encoding` | Used for specifying the data encoding type when using `DataEntity.fromBuffer`. Defaults to `json`. | `String` | optional |
| `_dead_letter_action` | This action will specify what to do when failing to parse or transform a record. ​​​​​The following builtin actions are supported, "throw", "log", or "none". If none of the actions are specified it will try and use a registered [Dead Letter Queue](./dead-letter-queue.md) API under that name. The API must be already be created by a operation before it can used.​ | `String` | required |
| `_dead_letter_action` | This action will specify what to do when failing to parse or transform a record. ​​​​​The following builtin actions are supported, "throw", "log", or "none". If none of the actions are specified it will try and use a registered [Dead Letter Queue](./dead-letter-queue.md) API under that name. The API must be already be created by a operation before it can used.​ | `String` | required |

### apis

Expand Down Expand Up @@ -62,13 +62,10 @@ The first operation in the [operations](#operations) list, reads from a particul
"_op": "custom_op",
"some": "configuration"
},
{
"_op": "elasticsearch_index_selector",
"index": "bigdata3",
"type": "events"
},
{
"_op": "elasticsearch_bulk",
"index": "bigdata3",
"type": "events",
"size": 5000
}
]
Expand Down
10 changes: 4 additions & 6 deletions docs/packages/teraslice-client-js/overview.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,18 +27,16 @@ var job = {
"name": "Data Generator",
"lifecycle": "once",
"workers": 1,
"assets": ["standard", "elasticsearch"],
"operations": [
{
"_op": "elasticsearch_data_generator",
"_op": "data_generator",
"size": 1
},
{
"_op": "elasticsearch_index_selector",
"index": "client-test-logs",
"type": "events"
},
{
"_op": "elasticsearch_bulk",
"index": "client-test-logs",
"type": "events",
"size": 50
}
]
Expand Down
4 changes: 2 additions & 2 deletions e2e/test/cases/assets/simple-spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ describe('assets', () => {
const result = await teraslice.assets.upload(fileStream, { blocking: true });
// NOTE: In this case, the asset is referenced by the ID
// assigned by teraslice and not it's name.
jobSpec.assets = [result._id, 'elasticsearch'];
jobSpec.assets = [result._id, 'standard', 'elasticsearch'];

const ex = await submitAndStart(jobSpec);

Expand Down Expand Up @@ -116,7 +116,7 @@ describe('assets', () => {

it('can directly ask for the new asset to be used', async () => {
const jobSpec = misc.newJob('generator-asset');
jobSpec.assets = ['ex1:0.1.1', 'elasticsearch'];
jobSpec.assets = ['ex1:0.1.1', 'standard', 'elasticsearch'];
const { workers } = jobSpec;

const assetResponse = await teraslice.assets.getAsset('ex1', '0.1.1');
Expand Down
6 changes: 5 additions & 1 deletion e2e/test/download-assets.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,10 @@ const bundles = [
{
repo: 'kafka-assets',
name: 'kafka'
},
{
repo: 'standard-assets',
name: 'standard'
}
];

Expand All @@ -42,7 +46,7 @@ function getOlderAsset(assets, assetName) {
}

function filterRelease(release) {
return !release.draft && !release.prerelease;
return !release.draft;
}

function filterAsset(asset) {
Expand Down
11 changes: 4 additions & 7 deletions e2e/test/fixtures/jobs/generate-to-es.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,20 +4,17 @@
"lifecycle": "persistent",
"workers": 2,
"analytics": false,
"assets": ["elasticsearch"],
"assets": ["elasticsearch", "standard"],
"max_retries": 0,
"operations": [
{
"_op": "elasticsearch_data_generator",
"_op": "data_generator",
"size": 100
},
{
"_op": "elasticsearch_index_selector",
"index": "replace-me-1000",
"type": "events"
},
{
"_op": "elasticsearch_bulk",
"index": "replace-me-1000",
"type": "events",
"size": 1000
}
]
Expand Down
4 changes: 2 additions & 2 deletions e2e/test/fixtures/jobs/generator-asset.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,12 @@
"slicers": 1,
"lifecycle": "persistent",
"workers": 2,
"assets": ["ex1", "elasticsearch"],
"assets": ["ex1", "standard"],
"max_retries": 0,
"analytics": false,
"operations": [
{
"_op": "elasticsearch_data_generator",
"_op": "data_generator",
"size": 1000
},
{
Expand Down
4 changes: 2 additions & 2 deletions e2e/test/fixtures/jobs/generator.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@
"lifecycle": "persistent",
"workers": 3,
"analytics": false,
"assets": ["elasticsearch"],
"assets": ["standard"],
"max_retries": 0,
"operations": [
{
"_op": "elasticsearch_data_generator",
"_op": "data_generator",
"size": 1000,
"stress_test": false
},
Expand Down
7 changes: 2 additions & 5 deletions e2e/test/fixtures/jobs/id.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,10 @@
"size": 500,
"key_type": "base64url"
},
{
"_op": "elasticsearch_index_selector",
"index": "test-id_reindex-1000",
"type": "events"
},
{
"_op": "elasticsearch_bulk",
"index": "test-id_reindex-1000",
"type": "events",
"size": 200
}
]
Expand Down
7 changes: 2 additions & 5 deletions e2e/test/fixtures/jobs/kafka-reader.json
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,10 @@
"_encoding": "json"
},
{
"_op": "elasticsearch_index_selector",
"_op": "elasticsearch_bulk",
"type": "events",
"index": "replace-me-1000",
"preserve_id": true
},
{
"_op": "elasticsearch_bulk",
"preserve_id": true,
"size": 500
}
]
Expand Down
7 changes: 2 additions & 5 deletions e2e/test/fixtures/jobs/multisend.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,10 @@
"size": 500
},
{
"_op": "elasticsearch_index_selector",
"_op": "elasticsearch_bulk",
"index": "multisend-1000",
"preserve_id": true,
"type": "change"
},
{
"_op": "elasticsearch_bulk",
"type": "change",
"multisend_index_append": false,
"size": 100,
"multisend": true,
Expand Down
7 changes: 2 additions & 5 deletions e2e/test/fixtures/jobs/reindex.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,10 @@
"preserve_id": true
},
{
"_op": "elasticsearch_index_selector",
"_op": "elasticsearch_bulk",
"type": "change",
"index": "replace-me-1000",
"preserve_id": true
},
{
"_op": "elasticsearch_bulk",
"preserve_id": true,
"size": 50
}
]
Expand Down
13 changes: 5 additions & 8 deletions e2e/test/global.setup.js
Original file line number Diff line number Diff line change
Expand Up @@ -65,19 +65,16 @@ async function generateTestData() {
name: `Generate: ${indexName}`,
lifecycle: 'once',
workers: 1,
assets: ['elasticsearch'],
assets: ['elasticsearch', 'standard'],
operations: [
{
_op: 'elasticsearch_data_generator',
_op: 'data_generator',
size: count
},
{
_op: 'elasticsearch_index_selector',
index: indexName,
type: 'events'
},
{
_op: 'elasticsearch_bulk',
index: indexName,
type: 'events',
size: 1000
}
]
Expand Down Expand Up @@ -107,7 +104,7 @@ async function generateTestData() {
}

try {
await Promise.all(misc.EXAMLPE_INDEX_SIZES.map((size) => generate(size)));
await Promise.all(misc.EXAMPLE_INDEX_SIZES.map((size) => generate(size)));
// we need fully active jobs so we can get proper meta data for recovery state tests
signale.success('Data generation is done', getElapsed(startTime));
} catch (err) {
Expand Down
8 changes: 4 additions & 4 deletions e2e/test/misc.js
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ const CONFIG_PATH = path.join(BASE_PATH, '.config');
const ASSETS_PATH = path.join(BASE_PATH, '.assets');
const SPEC_INDEX_PREFIX = `${TEST_INDEX_PREFIX}spec`;
const EXAMPLE_INDEX_PREFIX = `${TEST_INDEX_PREFIX}example`;
const EXAMLPE_INDEX_SIZES = [100, 1000];
const EXAMPLE_INDEX_SIZES = [100, 1000];

// the uniq cluster name
const CLUSTER_NAME = newId(`${TEST_INDEX_PREFIX}teracluster`, true, 2);
Expand All @@ -38,7 +38,7 @@ const DEFAULT_WORKERS = 2;
// The teraslice-master + the number of teraslice-worker instances (see the docker-compose.yml)
const DEFAULT_NODES = DEFAULT_WORKERS + 1;
// The number of workers per number (see the process-master.yaml and process-worker.yaml)
const WORKERS_PER_NODE = 12;
const WORKERS_PER_NODE = 8;

const compose = new Compose('docker-compose.yml');
const signale = require('./signale');
Expand Down Expand Up @@ -76,7 +76,7 @@ function newSpecIndex(name) {
}

function getExampleIndex(size) {
if (!EXAMLPE_INDEX_SIZES.includes(size)) {
if (!EXAMPLE_INDEX_SIZES.includes(size)) {
throw new Error(`No example index with ${size}`);
}

Expand Down Expand Up @@ -187,7 +187,7 @@ module.exports = {
globalTeardown,
newSpecIndex,
resetLogs,
EXAMLPE_INDEX_SIZES,
EXAMPLE_INDEX_SIZES,
EXAMPLE_INDEX_PREFIX,
SPEC_INDEX_PREFIX,
ELASTICSEARCH_HOST,
Expand Down
11 changes: 4 additions & 7 deletions examples/jobs/data_generator.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,20 +2,17 @@
"name": "Data Generator",
"lifecycle": "persistent",
"workers": 1,
"assets": ["elasticsearch", "standard"],
"operations": [
{
"_op": "elasticsearch_data_generator",
"_op": "data_generator",
"size": 5000
},
{
"_op": "elasticsearch_index_selector",
"index": "example-logs",
"type": "events"
},
{
"_op": "elasticsearch_bulk",
"index": "example-logs",
"type": "events",
"size": 5000
}
]
}

Loading

0 comments on commit 950fcb7

Please sign in to comment.