@naturalcycles/common-type
Advanced tools
Comparing version 1.3.0 to 1.4.0
@@ -0,1 +1,8 @@ | ||
# [1.4.0](https://github.com/NaturalCycles/common-type/compare/v1.3.0...v1.4.0) (2021-08-08) | ||
### Features | ||
* support Partial and Required types ([54bd357](https://github.com/NaturalCycles/common-type/commit/54bd35737daac37037cff410f878940a5cc52e3a)) | ||
# [1.3.0](https://github.com/NaturalCycles/common-type/compare/v1.2.0...v1.3.0) (2021-08-07) | ||
@@ -2,0 +9,0 @@ |
import { CommonTypeCfg } from './commonTypeCfg'; | ||
import { JsonSchema } from './tsToJsonSchema'; | ||
import { JsonSchema } from './model'; | ||
export declare function commonTypeGenerate(cfg: CommonTypeCfg): Promise<void>; | ||
export declare function generateSchemasFromFilePaths(filePaths: string[]): JsonSchema[]; |
@@ -23,23 +23,7 @@ "use strict"; | ||
} | ||
const schemaMap = {}; | ||
let errors = 0; | ||
files.forEach(filePath => { | ||
try { | ||
const fileString = fs.readFileSync(filePath, 'utf8'); | ||
const schemas = tsToJsonSchema_1.tsFileToJsonSchemas(fileString, filePath); | ||
console.log(`${filePath}: ${schemas.length} schemas(s) generated`); | ||
schemas.forEach(s => { | ||
if (schemaMap[s.$id]) { | ||
console.warn(`!!! ${s.$id} duplicated in ${filePath}, it will override previous schema with same $id`); | ||
} | ||
schemaMap[s.$id] = s; | ||
}); | ||
} | ||
catch (err) { | ||
errors++; | ||
console.log(`${filePath} ts parse error:`, err); | ||
} | ||
}); | ||
console.log(`${Object.keys(schemaMap).length} schema(s) generated, ${errors} errors`); | ||
// todo: process include/exclude types | ||
const schemaMap = js_lib_1._by(tsToJsonSchema_1.tsFilesToJsonSchemas(files.map(fileName => ({ | ||
fileName, | ||
fileString: fs.readFileSync(fileName, 'utf8'), | ||
}))), s => s.$id); | ||
console.log(`${Object.keys(schemaMap).length} schema(s) generated`); | ||
if (includeSchemas?.length) { | ||
@@ -89,9 +73,7 @@ const includeRegexes = includeSchemas.map(s => new RegExp(s)); | ||
function generateSchemasFromFilePaths(filePaths) { | ||
const schemas = []; | ||
filePaths.forEach(filePath => { | ||
const fileString = fs.readFileSync(filePath, 'utf8'); | ||
schemas.push(...tsToJsonSchema_1.tsFileToJsonSchemas(fileString, filePath)); | ||
}); | ||
return schemas; | ||
return tsToJsonSchema_1.tsFilesToJsonSchemas(filePaths.map(fileName => ({ | ||
fileName, | ||
fileString: fs.readFileSync(fileName, 'utf8'), | ||
}))); | ||
} | ||
exports.generateSchemasFromFilePaths = generateSchemasFromFilePaths; |
@@ -1,67 +0,9 @@ | ||
import { StringMap } from '@naturalcycles/js-lib'; | ||
import * as ts from 'typescript'; | ||
export interface BaseJsonSchema { | ||
$schema?: string; | ||
$id?: string; | ||
title?: string; | ||
description?: string; | ||
/** | ||
* This is a temporary "intermediate AST" field that is used inside the parser. | ||
* In the final schema this field will NOT be present. | ||
*/ | ||
requiredField?: boolean; | ||
} | ||
export interface CombinationJsonSchema extends BaseJsonSchema { | ||
oneOf?: JsonSchema[]; | ||
allOf?: JsonSchema[]; | ||
anyOf?: JsonSchema[]; | ||
not?: JsonSchema; | ||
} | ||
export interface ConstJsonSchema extends BaseJsonSchema { | ||
const: string | number | boolean; | ||
} | ||
export interface StringJsonSchema extends BaseJsonSchema { | ||
type: 'string'; | ||
pattern?: string; | ||
} | ||
export interface NumberJsonSchema extends BaseJsonSchema { | ||
type: 'number'; | ||
} | ||
export interface IntegerJsonSchema extends BaseJsonSchema { | ||
type: 'integer'; | ||
} | ||
export interface BooleanJsonSchema extends BaseJsonSchema { | ||
type: 'boolean'; | ||
} | ||
export interface NullJsonSchema extends BaseJsonSchema { | ||
type: 'null'; | ||
} | ||
export interface EnumJsonSchema extends BaseJsonSchema { | ||
enum: (string | number)[]; | ||
} | ||
export interface RefJsonSchema extends BaseJsonSchema { | ||
$ref: string; | ||
} | ||
export interface ObjectJsonSchema extends BaseJsonSchema { | ||
type: 'object'; | ||
properties?: StringMap<JsonSchema>; | ||
required: string[]; | ||
additionalProperties: boolean; | ||
patternProperties?: StringMap<JsonSchema>; | ||
propertyNames?: JsonSchema; | ||
} | ||
export interface ArrayJsonSchema extends BaseJsonSchema { | ||
type: 'array'; | ||
items: JsonSchema; | ||
minItems?: number; | ||
maxItems?: number; | ||
} | ||
export interface TupleJsonSchema extends BaseJsonSchema { | ||
type: 'array'; | ||
items: JsonSchema[]; | ||
minItems: number; | ||
maxItems: number; | ||
} | ||
export declare type JsonSchema = BaseJsonSchema | CombinationJsonSchema | RefJsonSchema | ConstJsonSchema | EnumJsonSchema | StringJsonSchema | NumberJsonSchema | IntegerJsonSchema | BooleanJsonSchema | NullJsonSchema | ObjectJsonSchema | ArrayJsonSchema | TupleJsonSchema; | ||
export declare function tsFileToJsonSchemas(fileString: string, fileName?: string): JsonSchema[]; | ||
export declare function nodeToJsonSchema(n: ts.Node, file: ts.SourceFile): JsonSchema | undefined; | ||
import { JsonSchema } from './model'; | ||
/** | ||
* It accepts multiple files to be able to do multiple passes | ||
* to be able to generate "by-product schemas" such as *Partial, *Required. | ||
*/ | ||
export declare function tsFilesToJsonSchemas(files: { | ||
fileString: string; | ||
fileName: string; | ||
}[]): JsonSchema[]; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.nodeToJsonSchema = exports.tsFileToJsonSchemas = void 0; | ||
exports.tsFilesToJsonSchemas = void 0; | ||
const js_lib_1 = require("@naturalcycles/js-lib"); | ||
@@ -8,233 +8,295 @@ const ts = require("typescript"); | ||
const $schema = 'http://json-schema.org/draft-07/schema#'; | ||
function tsFileToJsonSchemas(fileString, fileName = 'anonymousFileName') { | ||
const schemas = tsFileToJsonSchemasPass(fileString, fileName); | ||
// todo: Find all Partial/Required schemas | ||
return schemas; | ||
/** | ||
* It accepts multiple files to be able to do multiple passes | ||
* to be able to generate "by-product schemas" such as *Partial, *Required. | ||
*/ | ||
function tsFilesToJsonSchemas(files) { | ||
const schemaMap = {}; | ||
const secondPassSchemas = []; | ||
files.forEach(f => { | ||
try { | ||
const g = new TSToJSONSchemaGenerator(); | ||
const { schemas, secondPassSchemas: newSecondPassSchemas } = g.run(f.fileString, f.fileName); | ||
secondPassSchemas.push(...newSecondPassSchemas); | ||
schemas.forEach(s => { | ||
if (schemaMap[s.$id]) { | ||
console.warn(`!!! ${s.$id} duplicated in ${f.fileName}, it will override previous schema with same $id`); | ||
} | ||
schemaMap[s.$id] = s; | ||
}); | ||
} | ||
catch (err) { | ||
console.log(`${f.fileName} ts parse error:`, err); | ||
} | ||
}); | ||
// Let's do a second pass to process Partial/Required schemas | ||
secondPassSchemas | ||
.filter(s => s.endsWith('Partial')) | ||
.forEach(name => { | ||
const originalName = js_lib_1._substringBeforeLast(name, 'Partial'); | ||
const originalSchema = schemaMap[$idToRef(originalName)]; | ||
js_lib_1._assert(originalSchema, `${originalName} schema not found to generate ${name}`); | ||
const s = { | ||
...originalSchema, | ||
$id: $idToRef(name), | ||
required: [], // Apply Partial effect | ||
}; | ||
schemaMap[s.$id] = s; | ||
}); | ||
secondPassSchemas | ||
.filter(s => s.endsWith('Required')) | ||
.forEach(name => { | ||
const originalName = js_lib_1._substringBeforeLast(name, 'Required'); | ||
const originalSchema = schemaMap[$idToRef(originalName)]; | ||
js_lib_1._assert(originalSchema, `${originalName} schema not found to generate ${name}`); | ||
const s = { | ||
...originalSchema, | ||
$id: $idToRef(name), | ||
}; | ||
// Apply Required effect | ||
s.required = Object.keys(s.properties || {}); | ||
schemaMap[s.$id] = s; | ||
}); | ||
return js_lib_1._stringMapValues(schemaMap); | ||
} | ||
exports.tsFileToJsonSchemas = tsFileToJsonSchemas; | ||
function tsFileToJsonSchemasPass(fileString, fileName = 'anonymousFileName') { | ||
const file = ts.createSourceFile(fileName, fileString, ts.ScriptTarget.Latest); | ||
const schemas = []; | ||
file.forEachChild(n => { | ||
if (ts.isInterfaceDeclaration(n) || ts.isClassDeclaration(n)) { | ||
const props = n.members.map(n => nodeToJsonSchema(n, file)).filter(Boolean); | ||
const s = { | ||
$id: $idToRef(n.name.text), | ||
type: 'object', | ||
properties: Object.fromEntries(props.map(p => [p.$id, js_lib_1._omit(p, ['$id', 'requiredField'])])), | ||
required: props.filter(p => p.requiredField).map(p => p.$id), | ||
additionalProperties: false, | ||
...parseJsdoc(n), | ||
}; | ||
if (n.heritageClauses?.length) { | ||
const otherSchemas = n.heritageClauses[0].types.map(tt => tt.expression.text) | ||
.filter(Boolean) | ||
.filter(id => !ignoreRefs.includes(id)) | ||
.map($id => ({ $ref: $idToRef($id) })); | ||
if (!otherSchemas.length) { | ||
schemas.push(s); | ||
exports.tsFilesToJsonSchemas = tsFilesToJsonSchemas; | ||
/** | ||
* It is implemented as Class with internal state, | ||
* because "internal state" is needed, e.g to produce | ||
* "by-product schemas" such as *Partial, *Required. | ||
*/ | ||
class TSToJSONSchemaGenerator { | ||
constructor() { | ||
this.secondPassSchemas = new Set(); | ||
} | ||
run(fileString, fileName) { | ||
this.file = ts.createSourceFile(fileName, fileString, ts.ScriptTarget.Latest); | ||
const schemas = []; | ||
this.file.forEachChild(n => { | ||
if (ts.isInterfaceDeclaration(n) || ts.isClassDeclaration(n)) { | ||
const props = n.members.map(n => this.nodeToJsonSchema(n)).filter(Boolean); | ||
const s = { | ||
$id: $idToRef(n.name.text), | ||
type: 'object', | ||
properties: Object.fromEntries(props.map(p => [p.$id, js_lib_1._omit(p, ['$id', 'requiredField'])])), | ||
required: props.filter(p => p.requiredField).map(p => p.$id), | ||
additionalProperties: false, | ||
...parseJsdoc(n), | ||
}; | ||
if (n.heritageClauses?.length) { | ||
const otherSchemas = n.heritageClauses[0].types.map(tt => tt.expression.text) | ||
.filter(Boolean) | ||
.filter(id => !ignoreRefs.includes(id)) | ||
.map($id => ({ $ref: $idToRef($id) })); | ||
if (!otherSchemas.length) { | ||
schemas.push(s); | ||
} | ||
else { | ||
schemas.push({ | ||
$id: s.$id, | ||
allOf: [js_lib_1._omit(s, ['$id']), ...otherSchemas], | ||
}); | ||
} | ||
} | ||
else { | ||
schemas.push({ | ||
$id: s.$id, | ||
allOf: [js_lib_1._omit(s, ['$id']), ...otherSchemas], | ||
}); | ||
schemas.push(s); | ||
} | ||
} | ||
else { | ||
else if (ts.isTypeAliasDeclaration(n)) { | ||
const s = { | ||
$id: $idToRef(n.name.text), | ||
...this.typeNodeToJsonSchema(n.type), | ||
...parseJsdoc(n), | ||
}; | ||
schemas.push(s); | ||
} | ||
} | ||
else if (ts.isTypeAliasDeclaration(n)) { | ||
const s = { | ||
$id: $idToRef(n.name.text), | ||
...typeNodeToJsonSchema(n.type, file), | ||
...parseJsdoc(n), | ||
}; | ||
schemas.push(s); | ||
} | ||
else if (ts.isEnumDeclaration(n)) { | ||
const enumItems = n.members.map(m => { | ||
js_lib_1._assert(ts.isIdentifier(m.name), `enum name !isIdentifier`); | ||
const k = m.name.text; | ||
js_lib_1._assert(m.initializer, `no enum initializer! ${k}`); | ||
let v; | ||
if (ts.isNumericLiteral(m.initializer) || ts.isPrefixUnaryExpression(m.initializer)) { | ||
v = Number(m.initializer.getFullText(file)); | ||
} | ||
else if (ts.isStringLiteral(m.initializer)) { | ||
v = m.initializer.text; | ||
} | ||
else { | ||
console.log(m.initializer); | ||
throw new Error(`unknown enum initializer type`); | ||
} | ||
return { | ||
k, | ||
v, | ||
}; | ||
}); | ||
schemas.push({ | ||
$id: $idToRef(n.name.text), | ||
// We currently only include values | ||
enum: enumItems.map(e => e.v), | ||
}); | ||
} | ||
}); | ||
return schemas.map(s => ({ | ||
$schema, | ||
...s, | ||
})); | ||
} | ||
// Here we should get: name of the property, required-ness | ||
function nodeToJsonSchema(n, file) { | ||
// Find PropertySignature (kind 163) | ||
if ((!ts.isPropertySignature(n) && !ts.isPropertyDeclaration(n) && !ts.isGetAccessor(n)) || | ||
!n.type) | ||
return; | ||
const schema = typeNodeToJsonSchema(n.type, file); | ||
if (!n.questionToken) | ||
schema.requiredField = true; | ||
if (n.name.text !== undefined) { | ||
schema.$id = n.name.text; | ||
} | ||
// console.log(schema) | ||
Object.assign(schema, parseJsdoc(n)); | ||
return schema; | ||
} | ||
exports.nodeToJsonSchema = nodeToJsonSchema; | ||
// Here we should get "type" of jsonSchema | ||
function typeNodeToJsonSchema(type, file) { | ||
if (type.kind === ts.SyntaxKind.StringKeyword) { | ||
// todo: extra properties? | ||
return { type: 'string' }; | ||
} | ||
else if (type.kind === ts.SyntaxKind.NumberKeyword) { | ||
return { type: 'number' }; | ||
} | ||
else if (type.kind === ts.SyntaxKind.BooleanKeyword) { | ||
return { type: 'boolean' }; | ||
} | ||
// Union type (A || B) | ||
if (ts.isUnionTypeNode(type)) { | ||
else if (ts.isEnumDeclaration(n)) { | ||
const enumItems = n.members.map(m => { | ||
js_lib_1._assert(ts.isIdentifier(m.name), `enum name !isIdentifier`); | ||
const k = m.name.text; | ||
js_lib_1._assert(m.initializer, `no enum initializer! ${k}`); | ||
let v; | ||
if (ts.isNumericLiteral(m.initializer) || ts.isPrefixUnaryExpression(m.initializer)) { | ||
v = Number(m.initializer.getFullText(this.file)); | ||
} | ||
else if (ts.isStringLiteral(m.initializer)) { | ||
v = m.initializer.text; | ||
} | ||
else { | ||
console.log(m.initializer); | ||
throw new Error(`unknown enum initializer type`); | ||
} | ||
return { | ||
k, | ||
v, | ||
}; | ||
}); | ||
schemas.push({ | ||
$id: $idToRef(n.name.text), | ||
// We currently only include values | ||
enum: enumItems.map(e => e.v), | ||
}); | ||
} | ||
}); | ||
return { | ||
oneOf: type.types.map(t => typeNodeToJsonSchema(t, file)), | ||
schemas: schemas.map(s => ({ | ||
$schema, | ||
...s, | ||
})), | ||
secondPassSchemas: [...this.secondPassSchemas], | ||
}; | ||
} | ||
// Intersection type (A & B) | ||
if (ts.isIntersectionTypeNode(type)) { | ||
return { | ||
allOf: type.types.map(t => typeNodeToJsonSchema(t, file)), | ||
}; | ||
nodeToJsonSchema(n) { | ||
// Find PropertySignature (kind 163) | ||
if ((!ts.isPropertySignature(n) && !ts.isPropertyDeclaration(n) && !ts.isGetAccessor(n)) || | ||
!n.type) { | ||
return; | ||
} | ||
const schema = this.typeNodeToJsonSchema(n.type); | ||
if (!n.questionToken) | ||
schema.requiredField = true; | ||
if (n.name.text !== undefined) { | ||
schema.$id = n.name.text; | ||
} | ||
// console.log(schema) | ||
Object.assign(schema, parseJsdoc(n)); | ||
return schema; | ||
} | ||
// Parenthesized type | ||
if (ts.isParenthesizedTypeNode(type)) { | ||
return typeNodeToJsonSchema(type.type, file); | ||
} | ||
// Array type | ||
if (ts.isArrayTypeNode(type)) { | ||
return { | ||
type: 'array', | ||
items: typeNodeToJsonSchema(type.elementType, file), | ||
}; | ||
} | ||
// Tuple type | ||
if (ts.isTupleTypeNode(type)) { | ||
const items = type.elements.map(n => typeNodeToJsonSchema(n, file)); | ||
return { | ||
type: 'array', | ||
items, | ||
minItems: items.length, | ||
maxItems: items.length, | ||
}; | ||
} | ||
// Object type (literal) | ||
if (ts.isTypeLiteralNode(type)) { | ||
const props = type.members.map(n => nodeToJsonSchema(n, file)).filter(Boolean); | ||
return { | ||
type: 'object', | ||
properties: Object.fromEntries(props.map(p => [p.$id, js_lib_1._omit(p, ['$id', 'requiredField'])])), | ||
required: props.filter(p => p.requiredField).map(p => p.$id), | ||
additionalProperties: false, | ||
}; | ||
} | ||
// Object type (reference) | ||
// Can also be Partial<T>, Required<T>, etc | ||
if (ts.isTypeReferenceNode(type)) { | ||
// Can be StringMap | ||
const typeName = type.typeName.text; | ||
if (typeName === 'StringMap') { | ||
const valueType = type.typeArguments?.length | ||
? typeNodeToJsonSchema(type.typeArguments[0], file) | ||
: { type: 'string' }; | ||
// Here we should get "type" of jsonSchema | ||
typeNodeToJsonSchema(type) { | ||
if (type.kind === ts.SyntaxKind.StringKeyword) { | ||
// todo: extra properties? | ||
return { type: 'string' }; | ||
} | ||
else if (type.kind === ts.SyntaxKind.NumberKeyword) { | ||
return { type: 'number' }; | ||
} | ||
else if (type.kind === ts.SyntaxKind.BooleanKeyword) { | ||
return { type: 'boolean' }; | ||
} | ||
// Union type (A || B) | ||
if (ts.isUnionTypeNode(type)) { | ||
return { | ||
type: 'object', | ||
additionalProperties: false, | ||
required: [], | ||
patternProperties: { | ||
'.*': valueType, | ||
}, | ||
oneOf: type.types.map(t => this.typeNodeToJsonSchema(t)), | ||
}; | ||
} | ||
if (typeName === 'Partial') { | ||
const valueType = type.typeArguments[0]; | ||
const s = typeNodeToJsonSchema(valueType, file); | ||
js_lib_1._assert(s.$ref, 'We only support Partial for $ref schemas'); | ||
s.$ref = $idToRef($refToId(s.$ref) + 'Partial'); | ||
// todo: need to generate ${x}Partial schema in 2nd pass | ||
return s; | ||
// Intersection type (A & B) | ||
if (ts.isIntersectionTypeNode(type)) { | ||
return { | ||
allOf: type.types.map(t => this.typeNodeToJsonSchema(t)), | ||
}; | ||
} | ||
if (typeName === 'Required') { | ||
const valueType = type.typeArguments[0]; | ||
const s = typeNodeToJsonSchema(valueType, file); | ||
js_lib_1._assert(s.$ref, 'We only support Required for $ref schemas'); | ||
s.$ref = $idToRef($refToId(s.$ref) + 'Required'); | ||
// todo: need to generate ${x}Required schema in 2nd pass | ||
return s; | ||
// Parenthesized type | ||
if (ts.isParenthesizedTypeNode(type)) { | ||
return this.typeNodeToJsonSchema(type.type); | ||
} | ||
return { | ||
$ref: $idToRef(typeName), | ||
}; | ||
} | ||
// Literal type | ||
if (ts.isLiteralTypeNode(type)) { | ||
// e.g `someType: 'literal string'` | ||
if (ts.isStringLiteral(type.literal)) { | ||
// Array type | ||
if (ts.isArrayTypeNode(type)) { | ||
return { | ||
const: type.literal.text, | ||
type: 'array', | ||
items: this.typeNodeToJsonSchema(type.elementType), | ||
}; | ||
} | ||
else if (ts.isNumericLiteral(type.literal)) { | ||
// Tuple type | ||
if (ts.isTupleTypeNode(type)) { | ||
const items = type.elements.map(n => this.typeNodeToJsonSchema(n)); | ||
return { | ||
const: Number(type.literal.text), | ||
type: 'array', | ||
items, | ||
minItems: items.length, | ||
maxItems: items.length, | ||
}; | ||
} | ||
else if (type.literal.kind === ts.SyntaxKind.TrueKeyword || | ||
type.literal.kind === ts.SyntaxKind.FalseKeyword) { | ||
// Object type (literal) | ||
if (ts.isTypeLiteralNode(type)) { | ||
const props = type.members.map(n => this.nodeToJsonSchema(n)).filter(Boolean); | ||
return { | ||
const: Boolean(type.literal.getText(file)), | ||
type: 'object', | ||
properties: Object.fromEntries(props.map(p => [p.$id, js_lib_1._omit(p, ['$id', 'requiredField'])])), | ||
required: props.filter(p => p.requiredField).map(p => p.$id), | ||
additionalProperties: false, | ||
}; | ||
} | ||
else if (type.literal.kind === ts.SyntaxKind.NullKeyword) { | ||
// Object type (reference) | ||
// Can also be Partial<T>, Required<T>, etc | ||
if (ts.isTypeReferenceNode(type)) { | ||
// Can be StringMap | ||
const typeName = type.typeName.text; | ||
if (typeName === 'StringMap') { | ||
const valueType = type.typeArguments?.length | ||
? this.typeNodeToJsonSchema(type.typeArguments[0]) | ||
: { type: 'string' }; | ||
return { | ||
type: 'object', | ||
additionalProperties: false, | ||
required: [], | ||
patternProperties: { | ||
'.*': valueType, | ||
}, | ||
}; | ||
} | ||
if (typeName === 'Partial') { | ||
const valueType = type.typeArguments[0]; | ||
const s = this.typeNodeToJsonSchema(valueType); | ||
js_lib_1._assert(s.$ref, 'We only support Partial for $ref schemas'); | ||
const partialSchemaName = $refToId(s.$ref) + 'Partial'; | ||
this.secondPassSchemas.add(partialSchemaName); | ||
s.$ref = $idToRef(partialSchemaName); | ||
return s; | ||
} | ||
if (typeName === 'Required') { | ||
const valueType = type.typeArguments[0]; | ||
const s = this.typeNodeToJsonSchema(valueType); | ||
js_lib_1._assert(s.$ref, 'We only support Required for $ref schemas'); | ||
const requiredSchemaName = $refToId(s.$ref) + 'Required'; | ||
this.secondPassSchemas.add(requiredSchemaName); | ||
s.$ref = $idToRef(requiredSchemaName); | ||
return s; | ||
} | ||
return { | ||
type: 'null', | ||
$ref: $idToRef(typeName), | ||
}; | ||
} | ||
else { | ||
console.log(`unknown literal type`, type.literal); | ||
throw new Error(`unknown literal type (see above)`); | ||
// Literal type | ||
if (ts.isLiteralTypeNode(type)) { | ||
// e.g `someType: 'literal string'` | ||
if (ts.isStringLiteral(type.literal)) { | ||
return { | ||
const: type.literal.text, | ||
}; | ||
} | ||
else if (ts.isNumericLiteral(type.literal)) { | ||
return { | ||
const: Number(type.literal.text), | ||
}; | ||
} | ||
else if (type.literal.kind === ts.SyntaxKind.TrueKeyword || | ||
type.literal.kind === ts.SyntaxKind.FalseKeyword) { | ||
return { | ||
const: Boolean(type.literal.getText(this.file)), | ||
}; | ||
} | ||
else if (type.literal.kind === ts.SyntaxKind.NullKeyword) { | ||
return { | ||
type: 'null', | ||
}; | ||
} | ||
else { | ||
console.log(`unknown literal type`, type.literal); | ||
throw new Error(`unknown literal type (see above)`); | ||
} | ||
} | ||
// any | ||
if (type.kind === ts.SyntaxKind.AnyKeyword) { | ||
return { | ||
// description: 'any', | ||
}; // schema matching "anything" | ||
} | ||
console.log(type); | ||
try { | ||
console.log(type.getFullText(this.file)); | ||
} | ||
catch { } | ||
throw new Error(`unknown type kind: ${type.kind}`); | ||
} | ||
// any | ||
if (type.kind === ts.SyntaxKind.AnyKeyword) { | ||
return { | ||
// description: 'any', | ||
}; // schema matching "anything" | ||
} | ||
console.log(type); | ||
try { | ||
console.log(type.getFullText(file)); | ||
} | ||
catch { } | ||
throw new Error(`unknown type kind: ${type.kind}`); | ||
} | ||
@@ -241,0 +303,0 @@ function parseJsdoc(n) { |
@@ -5,3 +5,3 @@ { | ||
"prepare": "husky install", | ||
"commonType-debug": "tsn src/bin/common-type" | ||
"debug": "tsn src/bin/common-type" | ||
}, | ||
@@ -41,3 +41,3 @@ "dependencies": { | ||
}, | ||
"version": "1.3.0", | ||
"version": "1.4.0", | ||
"description": "Common Type interface and generator", | ||
@@ -44,0 +44,0 @@ "author": "Natural Cycles Team", |
@@ -6,3 +6,2 @@ ## @naturalcycles/common-type | ||
[![npm](https://img.shields.io/npm/v/@naturalcycles/common-type/latest.svg)](https://www.npmjs.com/package/@naturalcycles/common-type) | ||
[![min.gz size](https://badgen.net/bundlephobia/minzip/@naturalcycles/common-type)](https://bundlephobia.com/result?p=@naturalcycles/common-type) | ||
[![code style: prettier](https://img.shields.io/badge/code_style-prettier-ff69b4.svg?style=flat-square)](https://github.com/prettier/prettier) | ||
@@ -33,2 +32,7 @@ | ||
## Similar projects | ||
- https://github.com/YousefED/typescript-json-schema | ||
- https://github.com/vega/ts-json-schema-generator | ||
## Develop | ||
@@ -38,15 +42,24 @@ | ||
## todo | ||
## Supported | ||
- [x] parse multiple files, "cross-link" them | ||
- [x] Link to schemas instead of inlining them (let ajv compile/combine them for us) | ||
- Schemas from: | ||
- Interfaces | ||
- Types | ||
- Enums | ||
- Classes | ||
- `string`, `number`, `boolean` | ||
- Literal types, e.g `someString`, 15, `true` | ||
- `null` type | ||
- `object`, `array` | ||
- `tuple` | ||
- `enum` (`string` and `number` values) | ||
- optional/required properties | ||
- Union types (`|`), Intersections (`&`), `extends` | ||
- `StringMap` (aka Dictionary) | ||
- jsdoc `@validationType` (e.g. `integer`) | ||
- Type references, e.g. `p: Person` (where `Person` is defined elsewhere, can be in another file) | ||
- Parsing a list of files (not necessary for all types to be in 1 file) | ||
- `Partial`, `Required` | ||
- [x] null type | ||
- [x] union types support | ||
- [x] nullable union types support | ||
- [x] array of union types | ||
- [x] tuple type | ||
- [x] StringMap support | ||
- [x] fix dataFlags bug | ||
- [x] includeTypes/excludeTypes in cfg | ||
## todo | ||
@@ -63,11 +76,8 @@ Non-structural validation: | ||
`Partial`, `Required`, `Omit`, `Pick` | ||
Generic interfaces e.g: | ||
```ts | ||
interface MyType<T> { | ||
current: T | ||
future: T | ||
} | ||
``` | ||
- `Record<A, B>` | ||
- Indexed properties (`{ [name: string]: string }`) | ||
- `Omit`, `Pick` | ||
- Generic interfaces, e.g. `interface MyType<T> { current: T, future: T }` | ||
- `typeof` | ||
- `keyof` | ||
- Conditional types |
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
27379
18
618
80
0