@stencila/dockter
Advanced tools
Comparing version 0.11.0 to 0.11.1
@@ -13,2 +13,3 @@ "use strict"; | ||
const zlib_1 = __importDefault(require("zlib")); | ||
const ndjson = require('ndjson'); | ||
/** | ||
@@ -108,4 +109,3 @@ * Builds Docker images from Dockerfiles | ||
let id; | ||
stream.on('data', data => { | ||
data = JSON.parse(data); | ||
stream.pipe(ndjson.parse()).on('data', (data) => { | ||
if (data.error) { | ||
@@ -181,3 +181,2 @@ messages.push({ | ||
const copy = instruction.args; | ||
console.error(step, instruction.name, copy.join(' ')); | ||
const to = copy.pop(); | ||
@@ -201,3 +200,2 @@ const pack = tar_fs_1.default.pack(dir, { | ||
const script = instruction.args; | ||
console.error(step, 'RUN', script); | ||
const exec = await container.exec({ | ||
@@ -204,0 +202,0 @@ Cmd: ['bash', '-c', `${script}`], |
@@ -58,3 +58,3 @@ "use strict"; | ||
try { | ||
value = await node_persist_1.default.getItem(url); | ||
value = false; // await persist.getItem(url) | ||
} | ||
@@ -61,0 +61,0 @@ catch (error) { |
@@ -64,3 +64,2 @@ "use strict"; | ||
const pkg = new schema_1.SoftwarePackage(); | ||
// TODO: populate properties based on CodeMeta crosswalk (see note above) | ||
// schema:Thing | ||
@@ -72,2 +71,27 @@ pkg.name = data.name; | ||
pkg.runtimePlatform = 'Node.js'; | ||
pkg.license = data.license; | ||
pkg.description = data.description; | ||
if (data.author) { | ||
if (typeof data.author === 'string') { | ||
pkg.authors = [schema_1.Person.fromText(data.author)]; | ||
} | ||
else { | ||
let authorStr = ''; | ||
if (data.author.name) | ||
authorStr = data.author.name; | ||
if (data.author.email) | ||
authorStr += ` <${data.author.email}>`; | ||
if (data.author.url) | ||
authorStr += ` (${data.author.url})`; | ||
pkg.authors = [schema_1.Person.fromText(authorStr)]; | ||
} | ||
} | ||
if (data.repository) { | ||
if (typeof data.repository === 'string') { | ||
pkg.codeRepository = data.repository; | ||
} | ||
else { | ||
pkg.codeRepository = data.repository.url; | ||
} | ||
} | ||
// stencila:SoftwarePackage | ||
@@ -74,0 +98,0 @@ if (data.dependencies) { |
{ | ||
"name": "@stencila/dockter", | ||
"version": "0.11.0", | ||
"version": "0.11.1", | ||
"description": "A Docker image builder for researchers", | ||
@@ -85,2 +85,3 @@ "main": "dist/index.js", | ||
"js-yaml": "^3.12.0", | ||
"ndjson": "^1.5.0", | ||
"node-persist": "^3.0.1", | ||
@@ -87,0 +88,0 @@ "rimraf": "^2.6.2", |
@@ -9,2 +9,14 @@ import crypto from 'crypto' | ||
const ndjson = require('ndjson') | ||
interface DockerMessageAux { | ||
ID?: string | ||
} | ||
interface DockerMessage { | ||
error?: string | ||
stream?: string | ||
aux?: DockerMessageAux | ||
} | ||
/** | ||
@@ -112,4 +124,3 @@ * Builds Docker images from Dockerfiles | ||
let id: string | ||
stream.on('data', data => { | ||
data = JSON.parse(data) | ||
stream.pipe(ndjson.parse()).on('data', (data: DockerMessage) => { | ||
if (data.error) { | ||
@@ -184,3 +195,2 @@ messages.push({ | ||
const copy = instruction.args as Array<string> | ||
console.error(step, instruction.name, copy.join(' ')) | ||
const to = copy.pop() as string | ||
@@ -205,3 +215,2 @@ const pack = tarFs.pack(dir, { | ||
const script = instruction.args as string | ||
console.error(step, 'RUN', script) | ||
const exec = await container.exec({ | ||
@@ -208,0 +217,0 @@ Cmd: ['bash', '-c', `${script}`], |
@@ -66,3 +66,3 @@ import fs from 'fs' | ||
try { | ||
value = await persist.getItem(url) | ||
value = false // await persist.getItem(url) | ||
} catch (error) { | ||
@@ -69,0 +69,0 @@ if (error.message.includes('does not look like a valid storage file')) { |
@@ -63,4 +63,2 @@ // @ts-ignore | ||
// TODO: populate properties based on CodeMeta crosswalk (see note above) | ||
// schema:Thing | ||
@@ -75,37 +73,61 @@ pkg.name = data.name | ||
pkg.license = data.license | ||
pkg.description = data.description | ||
if (data.author) { | ||
if (typeof data.author === 'string') { | ||
pkg.authors = [Person.fromText(data.author)] | ||
} else { | ||
let authorStr = '' | ||
if (data.author.name) authorStr = data.author.name | ||
if (data.author.email) authorStr += ` <${data.author.email}>` | ||
if (data.author.url) authorStr += ` (${data.author.url})` | ||
pkg.authors = [Person.fromText(authorStr)] | ||
} | ||
} | ||
if (data.repository) { | ||
if (typeof data.repository === 'string') { | ||
pkg.codeRepository = data.repository | ||
} else { | ||
pkg.codeRepository = data.repository.url | ||
} | ||
} | ||
// stencila:SoftwarePackage | ||
if (data.dependencies) { | ||
pkg.softwareRequirements = await Promise.all( | ||
Object.entries(data.dependencies).map(async ([name, versionRange]) => { | ||
// Determine the minimum version that satisfies the range specified in the | ||
// If we can't determine a minimum version from the versionRange | ||
// (e.g. because it's a github url) then try to get latest | ||
let version = 'latest' | ||
if (versionRange !== 'latest' || versionRange !== '*') { | ||
const range = semver.validRange(versionRange as string) | ||
if (range) { | ||
const match = range.match(/(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)/) | ||
if (match) version = match[0] | ||
Object.entries(data.dependencies).map(async ([name, versionRange]) => { | ||
// Determine the minimum version that satisfies the range specified in the | ||
// If we can't determine a minimum version from the versionRange | ||
// (e.g. because it's a github url) then try to get latest | ||
let version = 'latest' | ||
if (versionRange !== 'latest' || versionRange !== '*') { | ||
const range = semver.validRange(versionRange as string) | ||
if (range) { | ||
const match = range.match(/(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)/) | ||
if (match) version = match[0] | ||
} | ||
} | ||
} | ||
// Fetch meta-data from NPM | ||
const data = await this.fetch(`https://registry.npmjs.org/${name}/${version}`, { | ||
json: true, | ||
headers: { | ||
'Accept': 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*' | ||
// Fetch meta-data from NPM | ||
const data = await this.fetch(`https://registry.npmjs.org/${name}/${version}`, { | ||
json: true, | ||
headers: { | ||
'Accept': 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*' | ||
} | ||
}) | ||
if (data) { | ||
return this.createPackage(data) | ||
} else { | ||
// All we know is name and version, so return that | ||
const dependency = new SoftwarePackage() | ||
dependency.name = name | ||
dependency.version = versionRange as string | ||
dependency.runtimePlatform = 'Node.js' | ||
return dependency | ||
} | ||
}) | ||
if (data) { | ||
return this.createPackage(data) | ||
} else { | ||
// All we know is name and version, so return that | ||
const dependency = new SoftwarePackage() | ||
dependency.name = name | ||
dependency.version = versionRange as string | ||
dependency.runtimePlatform = 'Node.js' | ||
return dependency | ||
} | ||
}) | ||
) | ||
@@ -112,0 +134,0 @@ } |
@@ -7,3 +7,7 @@ { | ||
"scripts": {}, | ||
"author": "", | ||
"author": { | ||
"name": "Jason Bloggs", | ||
"email": "j.bloggs@example.com", | ||
"url": "https://jbloggs.example.com" | ||
}, | ||
"license": "ISC", | ||
@@ -16,3 +20,7 @@ "dependencies": { | ||
"a-package-that-is-not-on-npm": "org/repo" | ||
}, | ||
"repository": { | ||
"type": "git", | ||
"url": "https://github.com/stencila/dockter/" | ||
} | ||
} |
import fixture from './fixture' | ||
import JavascriptParser from '../src/JavascriptParser' | ||
import { SoftwarePackage } from '@stencila/schema' | ||
import { Person, SoftwarePackage } from '@stencila/schema' | ||
import fs from 'fs' | ||
import { REQUEST_CACHE_DIR } from '../src/Doer' | ||
@@ -8,58 +10,73 @@ // Increase timeout (in milliseconds) to allow for HTTP requests | ||
jest.setTimeout(30 * 60 * 1000) | ||
describe('JavascriptParser', () => { | ||
beforeEach(() => { | ||
if (fs.existsSync(REQUEST_CACHE_DIR)) { | ||
for (let item of fs.readdirSync(REQUEST_CACHE_DIR)) { | ||
try { | ||
fs.unlinkSync(REQUEST_CACHE_DIR + '/' + item) | ||
} catch (e) { | ||
// Cleanups might execute in parallel in multiple test runs so don't worry if remove fails | ||
} | ||
} | ||
} | ||
}) | ||
/** | ||
* When applied to an empty folder, parse should return null. | ||
*/ | ||
test('parse:empty', async () => { | ||
const parser = new JavascriptParser(fixture('empty')) | ||
expect(await parser.parse()).toBeNull() | ||
}) | ||
/** | ||
* When applied to an empty folder, parse should return null. | ||
*/ | ||
test('parse:empty', async () => { | ||
const parser = new JavascriptParser(fixture('empty')) | ||
expect(await parser.parse()).toBeNull() | ||
}) | ||
/** | ||
* When applied to a folder with no JS code, parse should return null. | ||
*/ | ||
test('parse:r-date', async () => { | ||
const parser = new JavascriptParser(fixture('empty')) | ||
expect(await parser.parse()).toBeNull() | ||
}) | ||
/** | ||
* When applied to a folder with no JS code, parse should return null. | ||
*/ | ||
test('parse:r-date', async () => { | ||
const parser = new JavascriptParser(fixture('empty')) | ||
expect(await parser.parse()).toBeNull() | ||
}) | ||
/** | ||
* When applied to a folder with a `package.json` file, parse should return | ||
* a `SoftwarePackage` with `name`, `softwareRequirements` etc | ||
* populated correctly. | ||
*/ | ||
test('parse:js-package', async () => { | ||
const parser = new JavascriptParser(fixture('js-package')) | ||
const pkg = await parser.parse() as SoftwarePackage | ||
expect(pkg.name).toEqual('js-package') | ||
expect(pkg.softwareRequirements.length).toEqual(5) | ||
const expecteds = [ | ||
['is-array', '1.0.1'], | ||
['mkdirp', '0.5.1'], | ||
['rimraf', '2.6.2'], | ||
['array-swap', '0.0.2'], | ||
['a-package-that-is-not-on-npm', 'org/repo'] | ||
] | ||
for (let index in expecteds) { | ||
let {name, version} = pkg.softwareRequirements[index] | ||
expect(name).toEqual(expecteds[index][0]) | ||
expect(version).toEqual(expecteds[index][1]) | ||
} | ||
}) | ||
/** | ||
* When applied to a folder with a `package.json` file, parse should return | ||
* a `SoftwarePackage` with `name`, `softwareRequirements` etc | ||
* populated correctly. | ||
*/ | ||
test('parse:js-package', async () => { | ||
const parser = new JavascriptParser(fixture('js-package')) | ||
const pkg = await parser.parse() as SoftwarePackage | ||
expect(pkg.name).toEqual('js-package') | ||
expect(pkg.license).toEqual('ISC') | ||
expect(pkg.authors).toEqual([Person.fromText('Jason Bloggs <j.bloggs@example.com> (https://jbloggs.example.com)')]) | ||
expect(pkg.codeRepository).toEqual('https://github.com/stencila/dockter/') | ||
expect(pkg.softwareRequirements.length).toEqual(5) | ||
const expecteds = [ | ||
['is-array', '1.0.1'], | ||
['mkdirp', '0.5.1'], | ||
['rimraf', '2.6.2'], | ||
['array-swap', '0.0.2'], | ||
['a-package-that-is-not-on-npm', 'org/repo'] | ||
] | ||
for (let index in expecteds) { | ||
let { name, version } = pkg.softwareRequirements[index] | ||
expect(name).toEqual(expecteds[index][0]) | ||
expect(version).toEqual(expecteds[index][1]) | ||
} | ||
}) | ||
/** | ||
* When applied to a folder with a `*.js` files, parse should return | ||
* a `SoftwarePackage` with `name`, `softwareRequirements` etc | ||
* populated correctly. | ||
*/ | ||
test('parse:js-sources', async () => { | ||
const parser = new JavascriptParser(fixture('js-sources')) | ||
const pkg = await parser.parse() as SoftwarePackage | ||
expect(pkg.name).toEqual('js-sources') | ||
expect(pkg.softwareRequirements.length).toEqual(2) | ||
expect(pkg.softwareRequirements[1].name).toEqual('array-swap') | ||
expect(pkg.softwareRequirements[0].name).toEqual('is-sorted') | ||
/** | ||
* When applied to a folder with a `*.js` files, parse should return | ||
* a `SoftwarePackage` with `name`, `softwareRequirements` etc | ||
* populated correctly. | ||
*/ | ||
test('parse:js-sources', async () => { | ||
const parser = new JavascriptParser(fixture('js-sources')) | ||
const pkg = await parser.parse() as SoftwarePackage | ||
expect(pkg.name).toEqual('js-sources') | ||
expect(pkg.softwareRequirements.length).toEqual(2) | ||
expect(pkg.softwareRequirements[1].name).toEqual('array-swap') | ||
expect(pkg.softwareRequirements[0].name).toEqual('is-sorted') | ||
}) | ||
}) |
@@ -12,3 +12,7 @@ import fixture from './fixture' | ||
for (let item of fs.readdirSync(REQUEST_CACHE_DIR)) { | ||
fs.unlinkSync(REQUEST_CACHE_DIR + '/' + item) | ||
try { | ||
fs.unlinkSync(REQUEST_CACHE_DIR + '/' + item) | ||
} catch (e) { | ||
// Cleanups might execute in parallel in multiple test runs so don't worry if remove fails | ||
} | ||
} | ||
@@ -15,0 +19,0 @@ } |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
1356318
149
5975
16
+ Addedndjson@^1.5.0
+ Addedjson-stringify-safe@5.0.1(transitive)
+ Addedndjson@1.5.0(transitive)
+ Addedsplit2@2.2.0(transitive)
+ Addedthrough2@2.0.5(transitive)