Skip to content

Commit

Permalink
Merge pull request #14 from thedumbterminal/errors
Browse files Browse the repository at this point in the history
Better errors when dealing with union types
  • Loading branch information
thedumbterminal authored Nov 11, 2018
2 parents d3d8772 + a2630e9 commit 358462f
Show file tree
Hide file tree
Showing 6 changed files with 46 additions and 37 deletions.
6 changes: 5 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,8 +1,12 @@
# Changelog

## v0.1.2 (11/11/2018)

* Improved error message when dealing with union types.

## v0.1.1 (01/11/2018)

* Bugfix for jsbq command.
* Bug fix for jsbq command.

## v0.1.0 (23/10/2018)

Expand Down
7 changes: 1 addition & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,15 +25,10 @@ Note that some features involve bespoke interpretation of schema details suited
For embedded usage the following will allow and support runtime schema conversion and table maintenance:

const jsonSchemaBigquery = require('jsonschema-bigquery')
const bigquerySchema = jsonSchemaBigquery.run(jsonSchemaObject)

Please ensure that the input JSON schema is dereferenced so that all external references have been resolved. [json-schema-ref-parser](https://www.npmjs.com/package/json-schema-ref-parser) can do this, prior to using this module.

## Test

npm test

A standalone test script is also included that supports batch creation of tables in a nominated test dataset

## TODO

* Error messages.
3 changes: 1 addition & 2 deletions bin/jsbq
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ const { promisify } = require('util')

jsbq.process = async (project, datasetName, jsonSchema) => {
logger.info('Processing json schema...')
const tableOptions = converter.run(jsonSchema,'p','t')
const tableOptions = converter.run(jsonSchema)
logger.info(JSON.stringify(tableOptions))

logger.info('Extracting table name from schema...')
Expand Down Expand Up @@ -52,4 +52,3 @@ jsbq.run = async () => {
}

jsbq.run()

16 changes: 8 additions & 8 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "jsonschema-bigquery",
"version": "0.1.1",
"version": "0.1.2",
"description": "Convert JSON schema to Google BigQuery schema",
"main": "src/converter.js",
"scripts": {
Expand Down Expand Up @@ -34,13 +34,13 @@
},
"homepage": "https://github.com/thedumbterminal/jsonschema-bigquery#readme",
"devDependencies": {
"eslint": "5.6.1",
"eslint-config-standard": "12.0.0",
"eslint-plugin-import": "2.14.0",
"eslint-plugin-mocha": "5.2.0",
"eslint-plugin-node": "7.0.1",
"eslint-plugin-promise": "4.0.1",
"eslint-plugin-standard": "4.0.0",
"eslint": "^5.6.1",
"eslint-config-standard": "^12.0.0",
"eslint-plugin-import": "^2.14.0",
"eslint-plugin-mocha": "^5.2.0",
"eslint-plugin-node": "^7.0.1",
"eslint-plugin-promise": "^4.0.1",
"eslint-plugin-standard": "^4.0.0",
"mocha": "^5.2.0"
},
"dependencies": {
Expand Down
31 changes: 13 additions & 18 deletions src/converter.js
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ function array(name, node, mode){
if (_.has(items_with_description,'description')){
items_with_description['description'] = node['description']
}
return visit(name, items_with_description, 'REPEATED')
return converter._visit(name, items_with_description, 'REPEATED')
}

function object_(name, node, mode){
Expand All @@ -129,7 +129,7 @@ function object_(name, node, mode){

const fields = Object.keys(properties).map( key => {
const required = required_properties.includes(key) ? 'REQUIRED' : 'NULLABLE'
return visit(key,properties[key], required)
return converter._visit(key,properties[key], required)
})

const result = { 'name': name,
Expand Down Expand Up @@ -161,7 +161,14 @@ function simple(name, type_, node, mode){
return scalar(name, actual_type, mode, node['description'])
}

function visit(name, node, mode='NULLABLE'){
function get_table_id(schema){
const id = schema['id'].split('/')
const name = id[-4,-2]
const version = id[-2].split('.')[0]
return '_'.join(name + [version])
}

converter._visit = (name, node, mode='NULLABLE') => {
var merged_node = node
const ofs = ['allOf', 'anyOf', 'oneOf']
for ( x=0 ; x<ofs.length ; x++ ){
Expand All @@ -176,7 +183,7 @@ function visit(name, node, mode='NULLABLE'){
if (Array.isArray(type_)){
non_null_types = type_.filter( scalar_type => scalar_type !== 'null')
if (non_null_types.length > 1){
throw new Error('union type not supported: {node}')
throw new Error(`union type not supported:\n${JSON.stringify(node, null, 2)}`)
}
if ( type_.includes('null')){
actual_mode = 'NULLABLE'
Expand All @@ -186,22 +193,10 @@ function visit(name, node, mode='NULLABLE'){
return simple(name, type_, merged_node, actual_mode)
}

function convert(input_schema){
converter.run = (input_schema) => {
return {
schema: {
fields: visit('root', input_schema).fields
fields: converter._visit('root', input_schema).fields
}
}
}

function get_table_id(schema){
const id = schema['id'].split('/')
const name = id[-4,-2]
const version = id[-2].split('.')[0]
return '_'.join(name + [version])
}

// The original bigjson entry point
converter.run = (input_schema, project, dataset) => {
return convert(input_schema)
}
20 changes: 18 additions & 2 deletions test/index.js → test/converter.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@ const converter = require('../src/converter')
const assert = require('assert')
const fs = require('fs')

describe('index', () => {
describe('convert()', () => {
describe('converter', () => {
describe('run()', () => {
const sampleDir = './test/samples'
const testDirs = fs.readdirSync(sampleDir)

Expand All @@ -23,4 +23,20 @@ describe('index', () => {
})
})
})

describe('_visit()', () => {
context('when multiple types are given', () => {
it('throws an error', () => {
assert.throws(() => {
const node = {
type: [
'string',
'boolean'
]
}
converter._visit('test', node)
}, /union type not supported/)
})
})
})
})

0 comments on commit 358462f

Please sign in to comment.