Comparing version 1.5.0 to 2.0.0
@@ -0,1 +1,5 @@ | ||
# 2.0.0 / 2020-02-14 | ||
- Breaking change: Support Node >= 10 | ||
# 1.5.0 / 2019-12-17 | ||
@@ -2,0 +6,0 @@ |
@@ -1,2 +0,2 @@ | ||
var ogr2ogr = require('../') | ||
const ogr2ogr = require('../') | ||
@@ -3,0 +3,0 @@ ogr2ogr('../test/samples/sample.shp.zip').exec(function(er, data) { |
@@ -1,16 +0,18 @@ | ||
var ogr2ogr = require('../') | ||
const ogr2ogr = require('../') | ||
var geojson = { | ||
let geojson = { | ||
type: 'FeatureCollection', | ||
features: [{ | ||
type: 'Feature', | ||
geometry: { | ||
type: 'Point', | ||
coordinates: [102.0, 0.5], | ||
features: [ | ||
{ | ||
type: 'Feature', | ||
geometry: { | ||
type: 'Point', | ||
coordinates: [102.0, 0.5], | ||
}, | ||
properties: {area: '51'}, | ||
}, | ||
properties: {area: '51'}, | ||
}], | ||
], | ||
} | ||
var ogr = ogr2ogr(geojson).project('EPSG:3857') | ||
let ogr = ogr2ogr(geojson).project('EPSG:3857') | ||
@@ -17,0 +19,0 @@ ogr.exec(function(er, data) { |
@@ -1,3 +0,9 @@ | ||
var ogr2ogr = require('../') | ||
var url = 'https://gist.github.com/wavded/7376428/raw/971548233e441615a426794c766223488492ddb9/test.geojson' | ||
ogr2ogr(url).format('kml').stream().pipe(process.stdout) | ||
const ogr2ogr = require('../') | ||
let url = | ||
'https://gist.github.com/wavded/7376428/raw/971548233e441615a426794c766223488492ddb9/test.geojson' | ||
ogr2ogr(url) | ||
.format('kml') | ||
.stream() | ||
.pipe(process.stdout) |
@@ -1,3 +0,9 @@ | ||
var ogr2ogr = require('../') | ||
var url = 'https://gist.github.com/wavded/7376428/raw/971548233e441615a426794c766223488492ddb9/test.geojson' | ||
ogr2ogr(url).format('map').stream().pipe(process.stdout) | ||
const ogr2ogr = require('../') | ||
let url = | ||
'https://gist.github.com/wavded/7376428/raw/971548233e441615a426794c766223488492ddb9/test.geojson' | ||
ogr2ogr(url) | ||
.format('map') | ||
.stream() | ||
.pipe(process.stdout) |
@@ -1,5 +0,5 @@ | ||
var ogr2ogr = require('../') | ||
var fs = require('fs') | ||
const ogr2ogr = require('../') | ||
const fs = require('fs') | ||
var rs = fs.createReadStream('../test/samples/sample.shp.zip') | ||
let rs = fs.createReadStream('../test/samples/sample.shp.zip') | ||
@@ -6,0 +6,0 @@ ogr2ogr(rs, 'ESRI Shapefile').exec(function(er, data) { |
@@ -1,13 +0,15 @@ | ||
var path = require('path'), | ||
ogr2ogr = require(path.resolve(__dirname + '/../')), | ||
fs = require('fs'); | ||
const path = require('path') | ||
const ogr2ogr = require('../') | ||
const fs = require('fs') | ||
// Use options ["--config", "SHAPE_RESTORE_SHX", "TRUE"] to recreate the shx file if it doesn't exist | ||
ogr2ogr(path.resolve(__dirname + '/../test/samples/sample.lonely.shp')) | ||
.options(['--config', 'SHAPE_RESTORE_SHX', 'TRUE']) | ||
.exec(function(er, data) { | ||
if (er) console.error(er) | ||
console.log(data) | ||
// Use options ["--config", "SHAPE_RESTORE_SHX", "TRUE"] to recreate the shx file if it doesn't exist | ||
var st = ogr2ogr(path.resolve(__dirname + '/../test/samples/sample.lonely.shp')) | ||
.options(["--config", "SHAPE_RESTORE_SHX", "TRUE"]) | ||
.exec(function(er, data) { | ||
if (er) console.error(er) | ||
console.log(data); | ||
fs.unlinkSync(path.resolve(__dirname + '/../test/samples/sample.lonely.shx')); | ||
}) | ||
fs.unlinkSync( | ||
path.resolve(__dirname + '/../test/samples/sample.lonely.shx') | ||
) | ||
}) |
@@ -1,14 +0,13 @@ | ||
var path = require('path'), | ||
ogr2ogr = require(path.resolve(__dirname + '/../')), | ||
fs = require('fs'); | ||
const path = require('path') | ||
const ogr2ogr = require('../') | ||
// Set option ["--config", "CPL_DEBUG", "ON"] to enable ogr2ogr debug output, then hook a callback to `onStderr` method | ||
let st = ogr2ogr(path.resolve(__dirname + '/../test/samples/simple.shp.zip')) | ||
.options(['--config', 'CPL_DEBUG', 'ON']) | ||
.onStderr(function(data) { | ||
console.log(data) | ||
}) | ||
.stream() | ||
// Set option ["--config", "CPL_DEBUG", "ON"] to enable ogr2ogr debug output, then hook a callback to `onStderr` method | ||
var st = ogr2ogr(path.resolve(__dirname + '/../test/samples/simple.shp.zip')) | ||
.options(["--config", "CPL_DEBUG", "ON"]) | ||
.onStderr(function(data) { | ||
console.log(data); | ||
}) | ||
.stream() | ||
st.on('error', console.error) | ||
st.pipe(process.stdout); | ||
st.pipe(process.stdout) |
@@ -1,5 +0,5 @@ | ||
var ogr2ogr = require('../') | ||
const ogr2ogr = require('../') | ||
var st = ogr2ogr('../test/samples/sample.shp.zip').stream() | ||
let st = ogr2ogr('../test/samples/sample.shp.zip').stream() | ||
st.on('error', console.error) | ||
st.pipe(process.stdout) |
166
index.js
@@ -1,8 +0,8 @@ | ||
var path = require('path') | ||
var cp = require('child_process') | ||
var zip = require('./modules/zip') | ||
var csv = require('./modules/csv') | ||
var util = require('./modules/util') | ||
var stream = require('stream') | ||
var EE = require('events').EventEmitter | ||
const path = require('path') | ||
const cp = require('child_process') | ||
const zip = require('./modules/zip') | ||
const csv = require('./modules/csv') | ||
const util = require('./modules/util') | ||
const stream = require('stream') | ||
const EE = require('events').EventEmitter | ||
@@ -24,15 +24,15 @@ function logCommand(args) { | ||
if (mixed instanceof stream) { | ||
var driver = util.getDriver(fmt || path.extname(mixed.path).replace('.', '')) | ||
let driver = util.getDriver( | ||
fmt || path.extname(mixed.path).replace('.', '') | ||
) | ||
if (!driver) throw new Error('Streams require a valid input format') | ||
this._inStream = mixed | ||
this._inDriver = driver | ||
} | ||
else if (typeof mixed == 'object') { | ||
} else if (typeof mixed == 'object') { | ||
this._inGeoJSON = mixed | ||
} | ||
else { | ||
} else { | ||
this._inPath = mixed | ||
} | ||
this._onStderr=function() {}; | ||
this._onStderr = function() {} | ||
this._driver = {} | ||
@@ -50,3 +50,3 @@ this._args = [] | ||
Ogr2ogr.prototype.format = function(fmt) { | ||
var driver = util.getDriver(fmt) | ||
let driver = util.getDriver(fmt) | ||
this._driver = driver | ||
@@ -89,3 +89,3 @@ this._format = driver.format || fmt || 'GeoJSON' | ||
Ogr2ogr.prototype.onStderr = function(fn) { | ||
this._onStderr=fn; | ||
this._onStderr = fn | ||
return this | ||
@@ -95,14 +95,19 @@ } | ||
Ogr2ogr.prototype.exec = function(cb) { | ||
var ogr2ogr = this | ||
var buf = [] | ||
var one = util.oneCallback(cb) | ||
let ogr2ogr = this | ||
let buf = [] | ||
let one = util.oneCallback(cb) | ||
this.stream() | ||
.on('data', function(chunk) { buf.push(chunk) }) | ||
.on('data', function(chunk) { | ||
buf.push(chunk) | ||
}) | ||
.on('error', one) | ||
.on('close', function() { | ||
var data = Buffer.concat(buf) | ||
let data = Buffer.concat(buf) | ||
if (ogr2ogr._format == 'GeoJSON') { | ||
try { data = JSON.parse(data) } | ||
catch (er) { return one(er) } | ||
try { | ||
data = JSON.parse(data) | ||
} catch (er) { | ||
return one(er) | ||
} | ||
} | ||
@@ -117,37 +122,35 @@ one(null, data) | ||
Ogr2ogr.prototype.promise = function(cb) { | ||
var ogr2ogr = this | ||
var buf = [] | ||
Ogr2ogr.prototype.promise = function() { | ||
let ogr2ogr = this | ||
let buf = [] | ||
return new Promise((resolve, reject) => { | ||
this.stream() | ||
.on('data', function(chunk) { | ||
buf.push(chunk) | ||
}) | ||
.on('error', er => reject(er)) | ||
.on('close', function() { | ||
var data = Buffer.concat(buf) | ||
if (ogr2ogr._format == 'GeoJSON') { | ||
try { | ||
data = JSON.parse(data); | ||
} catch (er) { | ||
reject(er) | ||
} | ||
} | ||
resolve(data) | ||
}) | ||
}) | ||
return new Promise((resolve, reject) => { | ||
this.stream() | ||
.on('data', function(chunk) { | ||
buf.push(chunk) | ||
}) | ||
.on('error', er => reject(er)) | ||
.on('close', function() { | ||
let data = Buffer.concat(buf) | ||
if (ogr2ogr._format == 'GeoJSON') { | ||
try { | ||
data = JSON.parse(data) | ||
} catch (er) { | ||
reject(er) | ||
} | ||
} | ||
resolve(data) | ||
}) | ||
}) | ||
} | ||
Ogr2ogr.prototype._getOrgInPath = function(cb) { | ||
var ogr2ogr = this | ||
var one = util.oneCallback(cb) | ||
let ogr2ogr = this | ||
let one = util.oneCallback(cb) | ||
if (this._inStream) { | ||
util.writeStream(this._inStream, this._inDriver.output, getInFilePath) | ||
} | ||
else if (this._inGeoJSON) { | ||
} else if (this._inGeoJSON) { | ||
util.writeGeoJSON(this._inGeoJSON, getInFilePath) | ||
} | ||
else { | ||
} else { | ||
getInFilePath(null, this._inPath) | ||
@@ -161,3 +164,6 @@ } | ||
ogr2ogr._isZipIn = /zip|kmz/.test(path.extname(fpath)) && !/^\/vsizip\//.test(fpath) && !/\.gdb\.zip$/i.test(fpath) | ||
ogr2ogr._isZipIn = | ||
/zip|kmz/.test(path.extname(fpath)) && | ||
!/^\/vsizip\//.test(fpath) && | ||
!/\.gdb\.zip$/i.test(fpath) | ||
ogr2ogr._isCsvIn = /csv/.test(path.extname(fpath)) | ||
@@ -173,4 +179,3 @@ ogr2ogr._isZipOut = ogr2ogr._driver.output == 'zip' | ||
}) | ||
} | ||
else if (ogr2ogr._isCsvIn) { | ||
} else if (ogr2ogr._isCsvIn) { | ||
csv.makeVrt(fpath, function(err, vrt) { | ||
@@ -186,4 +191,3 @@ if (vrt && /\.vrt$/.test(vrt)) { | ||
}) | ||
} | ||
else { | ||
} else { | ||
one(null, fpath) | ||
@@ -195,4 +199,4 @@ } | ||
Ogr2ogr.prototype._run = function() { | ||
var ogr2ogr = this | ||
var ostream = new stream.PassThrough() | ||
let ogr2ogr = this | ||
let ostream = new stream.PassThrough() | ||
@@ -203,3 +207,3 @@ this._getOrgInPath(function(er, ogrInPath) { | ||
ogr2ogr._ogrInPath = ogrInPath | ||
var args = ['-f', ogr2ogr._format] | ||
let args = ['-f', ogr2ogr._format] | ||
if (ogr2ogr._skipfailures) args.push('-skipfailures') | ||
@@ -214,16 +218,17 @@ if (ogr2ogr._sourceSrs) args.push('-s_srs', ogr2ogr._sourceSrs) | ||
var errbuf = '' | ||
var commandOptions = this._env ? { env: this._env } : undefined | ||
var s = cp.spawn('ogr2ogr', logCommand(args), commandOptions) | ||
let errbuf = '' | ||
let commandOptions = this._env ? {env: this._env} : undefined | ||
let s = cp.spawn('ogr2ogr', logCommand(args), commandOptions) | ||
if (!ogr2ogr._isZipOut) s.stdout.pipe(ostream, {end: false}) | ||
var one = util.oneCallback(wrapUp) | ||
let one = util.oneCallback(wrapUp) | ||
let killTimeout | ||
s.stderr.setEncoding('ascii') | ||
s.stderr.on('data', function(chunk) { | ||
ogr2ogr._onStderr(chunk); | ||
if(/Error/i.test(chunk)) { | ||
s.emit('error', chunk); | ||
ogr2ogr._onStderr(chunk) | ||
if (/Error/i.test(chunk)) { | ||
s.emit('error', chunk) | ||
} else { | ||
@@ -239,8 +244,15 @@ errbuf += chunk | ||
clearTimeout(killTimeout) | ||
one(code ? new Error(errbuf || 'ogr2ogr failed to do the conversion') : null) | ||
one( | ||
code ? new Error(errbuf || 'ogr2ogr failed to do the conversion') : null | ||
) | ||
}) | ||
var killTimeout = setTimeout(function() { | ||
killTimeout = setTimeout(function() { | ||
if (s._handle) { | ||
ostream.emit('error', new Error('ogr2ogr took longer than ' + ogr2ogr._timeout + ' to complete')) | ||
ostream.emit( | ||
'error', | ||
new Error( | ||
'ogr2ogr took longer than ' + ogr2ogr._timeout + ' to complete' | ||
) | ||
) | ||
s.stdout.destroy() | ||
@@ -264,5 +276,10 @@ s.stderr.destroy() | ||
var zs = zip.createZipStream(ogr2ogr._ogrOutPath) | ||
zs.on('error', function(er2) { ostream.emit('error', er2) }) | ||
zs.on('end', function() { ostream.emit('close'); ogr2ogr._clean() }) | ||
let zs = zip.createZipStream(ogr2ogr._ogrOutPath) | ||
zs.on('error', function(er2) { | ||
ostream.emit('error', er2) | ||
}) | ||
zs.on('end', function() { | ||
ostream.emit('close') | ||
ogr2ogr._clean() | ||
}) | ||
zs.pipe(ostream) | ||
@@ -275,8 +292,7 @@ } | ||
Ogr2ogr.prototype._clean = function() { | ||
var all = util.allCallback(this._testClean) | ||
let all = util.allCallback(this._testClean) | ||
if (this._inStream && this._driver.output == 'zip') { | ||
util.rmDir(this._inPath, all()) | ||
} | ||
else if (this._inStream || this._inGeoJSON) { | ||
} else if (this._inStream || this._inGeoJSON) { | ||
util.rmFile(this._inPath, all()) | ||
@@ -283,0 +299,0 @@ } |
@@ -1,18 +0,18 @@ | ||
var fs = require('fs') | ||
var path = require('path') | ||
var CSV = require('comma-separated-values') | ||
var util = require('./util') | ||
const fs = require('fs') | ||
const path = require('path') | ||
const CSV = require('comma-separated-values') | ||
const util = require('./util') | ||
var BASE_VRT = '<OGRVRTDataSource>\n\ | ||
<OGRVRTLayer name="{{name}}">\n\ | ||
<SrcDataSource>{{file}}</SrcDataSource>\n\ | ||
<GeometryType>wkbUnknown</GeometryType>\n\ | ||
<GeometryField encoding="{{enc}}" {{encopt}} />\n\ | ||
</OGRVRTLayer>\n\ | ||
</OGRVRTDataSource>' | ||
let BASE_VRT = `<OGRVRTDataSource> | ||
<OGRVRTLayer name="{{name}}"> | ||
<SrcDataSource>{{file}}</SrcDataSource> | ||
<GeometryType>wkbUnknown</GeometryType> | ||
<GeometryField encoding="{{enc}}" {{encopt}} /> | ||
</OGRVRTLayer> | ||
</OGRVRTDataSource>` | ||
var extractHead = function(fpath, cb) { | ||
var sf = fs.createReadStream(fpath) | ||
var one = util.oneCallback(cb) | ||
var data = '' | ||
let extractHead = function(fpath, cb) { | ||
let sf = fs.createReadStream(fpath) | ||
let one = util.oneCallback(cb) | ||
let data = '' | ||
sf.on('data', function(chunk) { | ||
@@ -27,9 +27,12 @@ data += chunk | ||
sf.on('error', one) | ||
sf.on('end', util.oneCallback(function() { | ||
CSV.forEach(data.split(/(?:\n|\r\n|\r)/g).shift(), function(head) { | ||
one(null, head) | ||
sf.on( | ||
'end', | ||
util.oneCallback(function() { | ||
CSV.forEach(data.split(/(?:\n|\r\n|\r)/g).shift(), function(head) { | ||
one(null, head) | ||
}) | ||
// if there is nothing to parse | ||
one() | ||
}) | ||
// if there is nothing to parse | ||
one() | ||
})) | ||
) | ||
} | ||
@@ -41,22 +44,23 @@ | ||
var geo = {} | ||
let geo = {} | ||
headers.forEach(function(header) { | ||
var ht = (String(header)).trim() | ||
let ht = String(header).trim() | ||
switch (true) { | ||
case /\b(lon|longitude|lng|x)\b/i.test(ht): | ||
geo.x = header | ||
break | ||
case /\b(lat|latitude|y)\b/i.test(ht): | ||
geo.y = header | ||
break | ||
case /\b(the_geom|geom)\b/i.test(ht): | ||
geo.geom = header | ||
break | ||
default: | ||
case /\b(lon|longitude|lng|x)\b/i.test(ht): | ||
geo.x = header | ||
break | ||
case /\b(lat|latitude|y)\b/i.test(ht): | ||
geo.y = header | ||
break | ||
case /\b(the_geom|geom)\b/i.test(ht): | ||
geo.geom = header | ||
break | ||
default: | ||
} | ||
}) | ||
if (!geo.geom && !geo.x) return cb(null, fpath) // no geometry fields, parse attributes | ||
// no geometry fields, parse attributes | ||
if (!geo.geom && !geo.x) return cb(null, fpath) | ||
var vrtData = util.tmpl(BASE_VRT, { | ||
let vrtData = util.tmpl(BASE_VRT, { | ||
file: fpath, | ||
@@ -70,3 +74,3 @@ name: path.basename(fpath, '.csv'), | ||
var vrtPath = util.genTmpPath() + '.vrt' | ||
let vrtPath = util.genTmpPath() + '.vrt' | ||
return fs.writeFile(vrtPath, vrtData, function(er2) { | ||
@@ -73,0 +77,0 @@ cb(er2, vrtPath) |
[ | ||
{ | ||
"format": "BNA", | ||
"aliases": [ "bna" ], | ||
"aliases": ["bna"], | ||
"output": "bna" | ||
},{ | ||
}, | ||
{ | ||
"format": "CSV", | ||
"aliases": [ "csv" ], | ||
"aliases": ["csv"], | ||
"output": "csv" | ||
},{ | ||
}, | ||
{ | ||
"format": "DGN", | ||
"aliases": [ "dgn" ], | ||
"aliases": ["dgn"], | ||
"output": "dgn" | ||
},{ | ||
}, | ||
{ | ||
"format": "DXF", | ||
"aliases": [ "dxf" ], | ||
"aliases": ["dxf"], | ||
"output": "dxf" | ||
},{ | ||
}, | ||
{ | ||
"format": "ESRI Shapefile", | ||
"aliases": [ "shp", "prj", "shx", "dbf" ], | ||
"aliases": ["shp", "prj", "shx", "dbf"], | ||
"output": "zip" | ||
},{ | ||
}, | ||
{ | ||
"format": "Geoconcept", | ||
"aliases": [ "gxt", "txt" ], | ||
"aliases": ["gxt", "txt"], | ||
"output": "gxt" | ||
},{ | ||
}, | ||
{ | ||
"format": "GeoJSON", | ||
"aliases": [ "json", "geojson" ], | ||
"aliases": ["json", "geojson"], | ||
"output": "geojson" | ||
},{ | ||
}, | ||
{ | ||
"format": "GeoRSS", | ||
"aliases": [ "rss", "xml", "georss" ], | ||
"aliases": ["rss", "xml", "georss"], | ||
"output": "dxf" | ||
},{ | ||
}, | ||
{ | ||
"format": "GML", | ||
"aliases": [ "gml", "gfs" ], | ||
"aliases": ["gml", "gfs"], | ||
"output": "gml" | ||
},{ | ||
}, | ||
{ | ||
"format": "GMT", | ||
"aliases": [ "gmt" ], | ||
"aliases": ["gmt"], | ||
"output": "gmt" | ||
},{ | ||
}, | ||
{ | ||
"format": "GPX", | ||
"aliases": [ "gpx" ], | ||
"aliases": ["gpx"], | ||
"output": "gpx" | ||
},{ | ||
}, | ||
{ | ||
"format": "GPKG", | ||
"aliases": [ "gpkg" ], | ||
"aliases": ["gpkg"], | ||
"output": "gpkg" | ||
},{ | ||
}, | ||
{ | ||
"format": "KML", | ||
"aliases": ["kml", "kmz"], | ||
"output": "kml" | ||
},{ | ||
}, | ||
{ | ||
"format": "MapInfo File", | ||
"aliases": [ "map", "tab", "dat", "id", "ind"], | ||
"aliases": ["map", "tab", "dat", "id", "ind"], | ||
"output": "zip" | ||
},{ | ||
}, | ||
{ | ||
"format": "TIGER", | ||
"aliases": [ "rt1", "rt2", "rt3", "rt4", "rt5", "rt6", "rt7", "rt8", "rt9", "rta", "rtc", "rth", "rti", "rtp", "rtr", "rts", "rtz" ], | ||
"aliases": [ | ||
"rt1", | ||
"rt2", | ||
"rt3", | ||
"rt4", | ||
"rt5", | ||
"rt6", | ||
"rt7", | ||
"rt8", | ||
"rt9", | ||
"rta", | ||
"rtc", | ||
"rth", | ||
"rti", | ||
"rtp", | ||
"rtr", | ||
"rts", | ||
"rtz" | ||
], | ||
"output": "zip" | ||
},{ | ||
}, | ||
{ | ||
"format": "PGDump", | ||
"aliases": ["sql"], | ||
"output": "sql" | ||
},{ | ||
}, | ||
{ | ||
"format": "PostgreSQL", | ||
"aliases": [], | ||
"output": "sql" | ||
},{ | ||
}, | ||
{ | ||
"format": "VRT", | ||
"aliases": [ "vrt" ], | ||
"aliases": ["vrt"], | ||
"output": "vrt" | ||
} | ||
] |
{ | ||
"S57": { }, | ||
"Memory": { }, | ||
"LIBKML": { }, | ||
"Interlis 1": { }, | ||
"Interlis 2": { }, | ||
"SQLite": { }, | ||
"ODBC": { }, | ||
"MSSQLSpatial": { }, | ||
"PostgreSQL": { }, | ||
"PGDump": { }, | ||
"PCIDSK": { }, | ||
"GPSTrackMaker": { }, | ||
"GPSTrackMaker": { }, | ||
"PGDump": { }, | ||
"GPSBabel": { }, | ||
"GFT": { }, | ||
"CouchDB": { }, | ||
"ODS": { }, | ||
"XLSX": { }, | ||
"ElasticSearch": { }, | ||
"PDF": { } | ||
"S57": {}, | ||
"Memory": {}, | ||
"LIBKML": {}, | ||
"Interlis 1": {}, | ||
"Interlis 2": {}, | ||
"SQLite": {}, | ||
"ODBC": {}, | ||
"MSSQLSpatial": {}, | ||
"PostgreSQL": {}, | ||
"PGDump": {}, | ||
"PCIDSK": {}, | ||
"GPSTrackMaker": {}, | ||
"GPSBabel": {}, | ||
"GFT": {}, | ||
"CouchDB": {}, | ||
"ODS": {}, | ||
"XLSX": {}, | ||
"ElasticSearch": {}, | ||
"PDF": {} | ||
} |
@@ -1,11 +0,11 @@ | ||
var path = require('path') | ||
var tmpdir = require('os').tmpdir() | ||
const path = require('path') | ||
let tmpdir = require('os').tmpdir() | ||
if (tmpdir === '/src') tmpdir = '/tmp' // docker issue | ||
var fs = require('fs') | ||
var rimraf = require('rimraf') | ||
var drivers = require('./drivers.json') | ||
const fs = require('fs') | ||
const rimraf = require('rimraf') | ||
const drivers = require('./drivers.json') | ||
exports.tmpl = function(tmpl, data) { | ||
for (var label in data) { | ||
for (let label in data) { | ||
tmpl = tmpl.replace('{{' + label + '}}', data[label]) | ||
@@ -16,14 +16,21 @@ } | ||
var genInc = Date.now() | ||
var genTmpPath = exports.genTmpPath = function() { | ||
let genInc = Date.now() | ||
let genTmpPath = (exports.genTmpPath = function() { | ||
return path.join(tmpdir, 'ogr_' + (genInc++).toString(14)) | ||
}) | ||
exports.rmParentDir = function(fpath, cb) { | ||
rimraf(path.dirname(fpath), cb) | ||
} | ||
exports.rmDir = function(dpath, cb) { | ||
rimraf(dpath, cb) | ||
} | ||
exports.rmFile = function(fpath, cb) { | ||
fs.unlink(fpath, cb) | ||
} | ||
exports.rmParentDir = function(fpath, cb) { rimraf(path.dirname(fpath), cb) } | ||
exports.rmDir = function(dpath, cb) { rimraf(dpath, cb) } | ||
exports.rmFile = function(fpath, cb) { fs.unlink(fpath, cb) } | ||
exports.getDriver = function(fmt) { | ||
for (var i = 0; i < drivers.length; i++) { | ||
if (drivers[i].format == fmt || drivers[i].aliases.indexOf(fmt) > -1) return drivers[i] | ||
for (let i = 0; i < drivers.length; i++) { | ||
if (drivers[i].format == fmt || drivers[i].aliases.indexOf(fmt) > -1) | ||
return drivers[i] | ||
} | ||
@@ -34,7 +41,8 @@ return {} | ||
exports.writeStream = function(ins, ext, cb) { | ||
var fpath = genTmpPath() + '.' + ext | ||
var ws = fs.createWriteStream(fpath) | ||
var one = exports.oneCallback(cb) | ||
let fpath = genTmpPath() + '.' + ext | ||
let ws = fs.createWriteStream(fpath) | ||
let one = exports.oneCallback(cb) | ||
ins.pipe(ws) | ||
ins | ||
.pipe(ws) | ||
.on('error', one) | ||
@@ -47,3 +55,3 @@ .on('finish', function() { | ||
exports.writeGeoJSON = function(obj, cb) { | ||
var fpath = genTmpPath() + '.json' | ||
let fpath = genTmpPath() + '.json' | ||
fs.writeFile(fpath, JSON.stringify(obj), function(er) { | ||
@@ -55,3 +63,3 @@ cb(er, fpath) | ||
exports.oneCallback = function(cb) { | ||
var called = false | ||
let called = false | ||
return function(er, data) { | ||
@@ -64,27 +72,10 @@ if (called) return | ||
// exports.chainCallback = function (/* args, cb */) { | ||
// var initArgs = Array.prototype.slice.call(arguments) | ||
// var cb = initArgs.pop() | ||
// initArgs.unshift(null) | ||
// | ||
// return function () { | ||
// var fns = Array.prototype.slice.call(arguments) | ||
// | ||
// function run (er, data) { | ||
// var fn = fns.shift() | ||
// if (!op || er) cb(er, data) | ||
// fn.call(null, er, data) | ||
// } | ||
// | ||
// initArgs.push(run) | ||
// fns.shift().apply(null, initArgs) | ||
// } | ||
// } | ||
exports.allCallback = function(cb) { | ||
var one = exports.oneCallback(cb) | ||
var expect = 0 | ||
var total = 0 | ||
let one = exports.oneCallback(cb) | ||
let expect = 0 | ||
let total = 0 | ||
setImmediate(function() { if (expect == 0) one(null, total) }) | ||
setImmediate(function() { | ||
if (expect == 0) one(null, total) | ||
}) | ||
@@ -91,0 +82,0 @@ return function() { |
@@ -1,12 +0,12 @@ | ||
var path = require('path') | ||
var fs = require('fs') | ||
var findit = require('findit') | ||
var DecompressZip = require('decompress-zip') | ||
var archiver = require('archiver') | ||
var util = require('./util') | ||
const path = require('path') | ||
const fs = require('fs') | ||
const findit = require('findit') | ||
const DecompressZip = require('decompress-zip') | ||
const archiver = require('archiver') | ||
const util = require('./util') | ||
exports.extract = function(fpath, cb) { | ||
var zip = new DecompressZip(fpath) | ||
var zipPath = util.genTmpPath() | ||
var one = util.oneCallback(cb) | ||
let zip = new DecompressZip(fpath) | ||
let zipPath = util.genTmpPath() | ||
let one = util.oneCallback(cb) | ||
@@ -21,10 +21,11 @@ zip | ||
var validOgrRe = /^\.(shp|kml|tab|itf|000|rt1|gml|vrt)$/i | ||
var macosxRe = /__MACOSX/ | ||
let validOgrRe = /^\.(shp|kml|tab|itf|000|rt1|gml|vrt)$/i | ||
let macosxRe = /__MACOSX/ | ||
exports.findOgrFile = function(dpath, cb) { | ||
var finder = findit(dpath) | ||
var found | ||
let finder = findit(dpath) | ||
let found | ||
finder.on('file', function(file) { | ||
if (!macosxRe.test(file) && validOgrRe.test(path.extname(file))) found = file | ||
if (!macosxRe.test(file) && validOgrRe.test(path.extname(file))) | ||
found = file | ||
}) | ||
@@ -42,3 +43,3 @@ finder.on('error', function(er) { | ||
exports.createZipStream = function(dpath) { | ||
var zs = archiver('zip') | ||
let zs = archiver('zip') | ||
@@ -49,3 +50,3 @@ fs.readdir(dpath, function(er, files) { | ||
files.forEach(function(file) { | ||
var f = fs.createReadStream(path.join(dpath, file)) | ||
let f = fs.createReadStream(path.join(dpath, file)) | ||
zs.append(f, {name: file}) | ||
@@ -52,0 +53,0 @@ }) |
{ | ||
"name": "ogr2ogr", | ||
"version": "1.5.0", | ||
"version": "2.0.0", | ||
"description": "ogr2ogr wrapper w/ multiple format support", | ||
@@ -18,18 +18,24 @@ "keywords": [ | ||
"scripts": { | ||
"test": "nyc tape \"test/*-test.js\"" | ||
"test": "nyc tape \"test/*-test.js\"", | ||
"lint": "eslint --max-warnings 0 modules examples test *.js", | ||
"fmt": "prettier --write \"**/*.{html,js,json,md}\"" | ||
}, | ||
"main": "./index.js", | ||
"dependencies": { | ||
"archiver": "^1.1.0", | ||
"archiver": "^3.1.1", | ||
"comma-separated-values": "^3.6.0", | ||
"decompress-zip": "^0.3.0", | ||
"decompress-zip": "^0.2.2", | ||
"findit": "^2.0.0", | ||
"rimraf": "^2.2.8" | ||
"rimraf": "^3.0.2" | ||
}, | ||
"devDependencies": { | ||
"tape": "^4.6.0" | ||
"eslint": "^6.8.0", | ||
"eslint-config-prettier": "^6.10.0", | ||
"eslint-plugin-prettier": "^3.1.2", | ||
"prettier": "^1.19.1", | ||
"tape": "^4.13.0" | ||
}, | ||
"engines": { | ||
"node": ">=6" | ||
"node": ">=10" | ||
} | ||
} |
@@ -1,2 +0,2 @@ | ||
[](https://jenkins.adc4gis.com/job/ogr2ogr/) [](https://npmjs.com/package/ogr2ogr)  [](https://github.com/applieddataconsultants/eslint-config-adc) | ||
[](https://jenkins.adc4gis.com/job/ogr2ogr/) [](https://npmjs.com/package/ogr2ogr)  | ||
@@ -23,3 +23,3 @@ ogr2ogr enables spatial file conversion and reprojection of spatial data through the use of ogr2ogr (gdal) tool | ||
ogr.exec(function (er, data) { | ||
ogr.exec(function(er, data) { | ||
if (er) console.error(er) | ||
@@ -36,7 +36,6 @@ console.log(data) | ||
```javascript | ||
var data = await ogr2ogr('/path/to/another/spatial/file').promise(); | ||
console.log(data); | ||
var data = await ogr2ogr('/path/to/another/spatial/file').promise() | ||
console.log(data) | ||
``` | ||
See `/examples` for usage examples and `/test/api.js`. | ||
@@ -63,5 +62,5 @@ | ||
var shapefile = ogr2ogr('/path/to/spatial/file.geojson') | ||
.format('ESRI Shapefile') | ||
.skipfailures() | ||
.stream() | ||
.format('ESRI Shapefile') | ||
.skipfailures() | ||
.stream() | ||
shapefile.pipe(fs.createWriteStream('/shapefile.zip')) | ||
@@ -83,3 +82,3 @@ ``` | ||
If you want to debug what is the ogr2ogr binary doing internally, you can attach a callback to the output, | ||
If you want to debug what is the ogr2ogr binary doing internally, you can attach a callback to the output, | ||
provided you have passed the option [CPL_DEBUG](https://trac.osgeo.org/gdal/wiki/ConfigOptions#CPL_DEBUG) | ||
@@ -89,9 +88,9 @@ | ||
var shapefile = ogr2ogr('/path/to/spatial/file.geojson') | ||
.format('ESRI Shapefile') | ||
.skipfailures() | ||
.options(["--config", "CPL_DEBUG", "ON"]) | ||
.onStderr(function(data) { | ||
console.log(data); | ||
}) | ||
.stream() | ||
.format('ESRI Shapefile') | ||
.skipfailures() | ||
.options(['--config', 'CPL_DEBUG', 'ON']) | ||
.onStderr(function(data) { | ||
console.log(data) | ||
}) | ||
.stream() | ||
shapefile.pipe(fs.createWriteStream('/shapefile.zip')) | ||
@@ -121,6 +120,6 @@ ``` | ||
It is trivial to handle the conversion of ESRI Shapefiles when they are packed in a zipfile that contains (at least) the `shp` and `shx` files. | ||
This library is also capable of converting uncompresses ESRI Shapefiles if you use the `shp` file as the input file | ||
This library is also capable of converting uncompresses ESRI Shapefiles if you use the `shp` file as the input file | ||
**and the shx file is in the same folder**. | ||
However, it is also possible to convert single `shp` files that lack an `shx` file by forcing its creation | ||
However, it is also possible to convert single `shp` files that lack an `shx` file by forcing its creation | ||
using ogr2ogr option [SHAPE_RESTORE_SHX](https://trac.osgeo.org/gdal/wiki/ConfigOptions#SHAPE_RESTORE_SHX) provided you have installed | ||
@@ -131,7 +130,6 @@ GDAL/OGR version 2.1.0 or newer. | ||
var geojson = ogr2ogr('/path/to/spatial/lonely.shp') | ||
.options(["--config", "SHAPE_RESTORE_SHX", "TRUE"]) | ||
.stream() | ||
.options(['--config', 'SHAPE_RESTORE_SHX', 'TRUE']) | ||
.stream() | ||
geojson.pipe(fs.createWriteStream('/lonely.json')) | ||
``` | ||
@@ -141,5 +139,2 @@ | ||
# License | ||
@@ -149,3 +144,3 @@ | ||
Copyright (c) 2017 Marc Harter <wavded@gmail.com> | ||
Copyright (c) 2020 Marc Harter <wavded@gmail.com> | ||
@@ -152,0 +147,0 @@ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: |
Sorry, the diff of this file is not supported yet
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
83065
24
648
5
5
143
+ Addedarchiver@3.1.1(transitive)
+ Addedarchiver-utils@2.1.0(transitive)
+ Addedbl@4.1.0(transitive)
+ Addedcompress-commons@2.1.1(transitive)
+ Addedcrc32-stream@3.0.1(transitive)
+ Addeddecompress-zip@0.2.2(transitive)
+ Addedlodash.defaults@4.2.0(transitive)
+ Addedlodash.difference@4.5.0(transitive)
+ Addedlodash.flatten@4.4.0(transitive)
+ Addedlodash.isplainobject@4.0.6(transitive)
+ Addedlodash.union@4.6.0(transitive)
+ Addednormalize-path@3.0.0(transitive)
+ Addedreadable-stream@3.6.2(transitive)
+ Addedrimraf@3.0.2(transitive)
+ Addedtar-stream@2.2.0(transitive)
+ Addedzip-stream@2.1.3(transitive)
- Removedarchiver@1.3.0(transitive)
- Removedarchiver-utils@1.3.0(transitive)
- Removedbl@1.2.3(transitive)
- Removedbuffer-alloc@1.2.0(transitive)
- Removedbuffer-alloc-unsafe@1.1.0(transitive)
- Removedbuffer-fill@1.0.0(transitive)
- Removedcompress-commons@1.2.2(transitive)
- Removedcrc32-stream@2.0.0(transitive)
- Removeddecompress-zip@0.3.3(transitive)
- Removednormalize-path@2.1.1(transitive)
- Removedremove-trailing-separator@1.1.0(transitive)
- Removedrimraf@2.7.1(transitive)
- Removedtar-stream@1.6.2(transitive)
- Removedto-buffer@1.1.1(transitive)
- Removedwalkdir@0.0.11(transitive)
- Removedxtend@4.0.2(transitive)
- Removedzip-stream@1.2.0(transitive)
Updatedarchiver@^3.1.1
Updateddecompress-zip@^0.2.2
Updatedrimraf@^3.0.2