Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

georender-pack

Package Overview
Dependencies
Maintainers
2
Versions
18
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

georender-pack - npm Package Compare versions

Comparing version 4.1.0 to 4.2.0

157

decode.js

@@ -10,5 +10,6 @@ var getNormals = require('polyline-normals')

area: { types: 0, ids: 0, positions: 0, cells: 0 },
areaBorder: { types: 0, ids: 0, positions: 0, normals: 0 }
areaBorder: { types: 0, ids: 0, positions: 0, normals: 0 },
}
buffers.forEach(function (buf) {
for (var bufi = 0; bufi < buffers.length; bufi++) {
var buf = buffers[bufi]
if (buf.length === 0) return

@@ -40,3 +41,2 @@ var featureType = buf.readUInt8(0)

var plen = varint.decode(buf, offset) //pcount
var plenAB = plen
offset+=varint.decode.bytes

@@ -47,6 +47,6 @@ offset+=plen*8

sizes.area.positions+=plen*2
sizes.areaBorder.types+=plenAB*2+2
sizes.areaBorder.ids+=plenAB*2+2
sizes.areaBorder.positions+=plenAB*4+4
sizes.areaBorder.normals+=plenAB*4+4
sizes.areaBorder.types+=plen*2+2
sizes.areaBorder.ids+=plen*2+2
sizes.areaBorder.positions+=plen*4+4
sizes.areaBorder.normals+=plen*4+4
var clen = varint.decode(buf, offset) //clen

@@ -56,3 +56,40 @@ offset+=varint.decode.bytes

}
})
else if (featureType === 4) {
varint.decode(buf, offset) //types
offset+=varint.decode.bytes
varint.decode(buf, offset) //id
offset+=varint.decode.bytes
var plen = varint.decode(buf, offset) //pcount
offset+=varint.decode.bytes
offset+=plen*8
sizes.area.types+=plen
sizes.area.ids+=plen*2
sizes.area.positions+=plen*2
var clen = varint.decode(buf, offset) //clen
offset+=varint.decode.bytes
sizes.area.cells+=clen*3
var elen = varint.decode(buf, offset) //elen
offset+=varint.decode.bytes
var esize = 2, eprev = 0
for (var i=0; i<elen; i++) {
var e = varint.decode(buf, offset)
offset+=varint.decode.bytes
if (e === 0) { // edge break
eprev = 0
esize+=2
} else if (e % 2 === 0) { // edge index
eprev = Math.floor(e/2)-1
esize+=2
} else { // edge range
var e1 = Math.floor(e/2)-1
esize+=(e1-eprev)*2
eprev = e1
}
}
sizes.areaBorder.types+=esize
sizes.areaBorder.ids+=esize
sizes.areaBorder.positions+=esize*2
sizes.areaBorder.normals+=esize*2
}
}
var data = {

@@ -84,3 +121,3 @@ point: {

normals: new Float32Array(sizes.areaBorder.normals)
}
},
}

@@ -91,6 +128,7 @@ var offsets = {

area: { types: 0, ids: 0, positions: 0, cells: 0, labels: 0 },
areaBorder: { types: 0, ids: 0, positions: 0, normals: 0 }
areaBorder: { types: 0, ids: 0, positions: 0, normals: 0 },
}
var pindex = 0
buffers.forEach(function (buf) {
for (var bufi = 0; bufi < buffers.length; bufi++) {
var buf = buffers[bufi]
if (buf.length === 0) return

@@ -211,3 +249,2 @@ var offset = 0

var normals = getNormals(pos, true)
var startNorm = 0
var positionsCount = 2+6+(4*pos.length)

@@ -240,3 +277,2 @@

data.areaBorder.positions[offsets.areaBorder.positions++] = pos[0][1]
startNorm = offsets.areaBorder.normals
data.areaBorder.normals[offsets.areaBorder.normals++] = normals[0][0][0]*scale

@@ -281,5 +317,61 @@ data.areaBorder.normals[offsets.areaBorder.normals++] = normals[0][0][1]*scale

}
})
data.areaBorder.positions = data.areaBorder.positions.subarray(0, offsets.areaBorder.positions)
data.areaBorder.normals = data.areaBorder.normals.subarray(0, offsets.areaBorder.normals)
else if (featureType === 4) {
var type = varint.decode(buf, offset)
offset+=varint.decode.bytes
var id = varint.decode(buf, offset)
offset+=varint.decode.bytes
var plen = varint.decode(buf, offset)
offset+=varint.decode.bytes
var pstart = offsets.area.positions
var lon, lat
for (var i=0; i<plen; i++) {
lon = buf.readFloatLE(offset)
offset+=4
lat = buf.readFloatLE(offset)
data.area.types[offsets.area.types++] = type
data.area.ids[offsets.area.ids++] = id
data.area.positions[offsets.area.positions++] = lon
data.area.positions[offsets.area.positions++] = lat
offset+=4
}
var clen = varint.decode(buf, offset)
offset+=varint.decode.bytes
for (var i=0; i<clen*3; i++) {
var c = varint.decode(buf, offset)
data.area.cells[offsets.area.cells++] = c + pindex
offset+=varint.decode.bytes
}
var elen = varint.decode(buf, offset)
offset+=varint.decode.bytes
var positions = [], eprev = 0
for (var i=0; i<elen; i++) {
var e = varint.decode(buf, offset)
offset+=varint.decode.bytes
if (e === 0) { // edge break
addAreaBorderPositions(data, offsets, positions, id, type)
positions = []
} else if (e % 2 === 0) { // edge index
var ei = Math.floor(e/2)-1
positions.push([
data.area.positions[pstart+ei*2+0],
data.area.positions[pstart+ei*2+1]
])
eprev = ei
} else { // edge range
var e0 = eprev+1
var e1 = Math.floor(e/2)-1
for (var ex = e0; ex <= e1; ex++) {
positions.push([
data.area.positions[pstart+ex*2+0],
data.area.positions[pstart+ex*2+1]
])
}
eprev = e1
}
}
addAreaBorderPositions(data, offsets, positions, id, type)
pindex+=plen
offset = decodeLabels(buf, offset, data.area, id)
}
}
return data

@@ -300,1 +392,34 @@ }

}
function addAreaBorderPositions(data, offsets, positions, id, type) {
if (positions.length < 2) return
var normals = getNormals(positions, false)
var scale = Math.sqrt(normals[0][1])
data.areaBorder.ids[offsets.areaBorder.ids++] = id
data.areaBorder.types[offsets.areaBorder.types++] = type
data.areaBorder.positions[offsets.areaBorder.positions++] = positions[0][0]
data.areaBorder.positions[offsets.areaBorder.positions++] = positions[0][1]
data.areaBorder.normals[offsets.areaBorder.normals++] = normals[0][0][0]*scale
data.areaBorder.normals[offsets.areaBorder.normals++] = normals[0][0][1]*scale
for (var j = 0; j < positions.length; j++) {
scale = Math.sqrt(normals[j][1])
data.areaBorder.ids[offsets.areaBorder.ids++] = id
data.areaBorder.ids[offsets.areaBorder.ids++] = id
data.areaBorder.types[offsets.areaBorder.types++] = type
data.areaBorder.types[offsets.areaBorder.types++] = type
data.areaBorder.positions[offsets.areaBorder.positions++] = positions[j][0]
data.areaBorder.positions[offsets.areaBorder.positions++] = positions[j][1]
data.areaBorder.positions[offsets.areaBorder.positions++] = positions[j][0]
data.areaBorder.positions[offsets.areaBorder.positions++] = positions[j][1]
data.areaBorder.normals[offsets.areaBorder.normals++] = normals[j][0][0]*scale
data.areaBorder.normals[offsets.areaBorder.normals++] = normals[j][0][1]*scale
data.areaBorder.normals[offsets.areaBorder.normals++] = -normals[j][0][0]*scale
data.areaBorder.normals[offsets.areaBorder.normals++] = -normals[j][0][1]*scale
}
data.areaBorder.ids[offsets.areaBorder.ids++] = id
data.areaBorder.types[offsets.areaBorder.types++] = type
data.areaBorder.positions[offsets.areaBorder.positions++] = positions[j-1][0]
data.areaBorder.positions[offsets.areaBorder.positions++] = positions[j-1][1]
data.areaBorder.normals[offsets.areaBorder.normals++] = -normals[j-1][0][0]*scale
data.areaBorder.normals[offsets.areaBorder.normals++] = -normals[j-1][0][1]*scale
}

2

package.json
{
"name": "georender-pack",
"version": "4.1.0",
"version": "4.2.0",
"description": "pack and unpack osm data based on the peermaps buffer schema",

@@ -5,0 +5,0 @@ "main": "index.js",

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc