Latest Threat Research:SANDWORM_MODE: Shai-Hulud-Style npm Worm Hijacks CI Workflows and Poisons AI Toolchains.Details
Socket
Book a DemoInstallSign in
Socket

mbutil

Package Overview
Dependencies
Maintainers
2
Versions
6
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

mbutil - npm Package Compare versions

Comparing version
0.0.1
to
0.0.2
mb-util

Sorry, the diff of this file is not supported yet

+1
from util import *
#!/usr/bin/env python
# MBUtil: a tool for MBTiles files
# Supports importing, exporting, and more
#
# (c) Development Seed 2011
# Licensed under BSD
import sqlite3, uuid, sys, logging, time, os, json, zlib, glob, shutil
logger = logging.getLogger(__name__)
def mbtiles_setup(cur):
cur.execute("""
create table tiles (
zoom_level integer,
tile_column integer,
tile_row integer,
tile_data blob);
""")
cur.execute("""create table metadata
(name text, value text);""")
cur.execute("""create unique index name on metadata (name);""")
cur.execute("""create unique index tile_index on tiles
(zoom_level, tile_column, tile_row);""")
def mbtiles_connect(mbtiles_file):
try:
con = sqlite3.connect(mbtiles_file)
return con
except Exception, e:
logger.error("Could not connect to database")
logger.exception(e)
sys.exit(1)
def optimize_connection(cur):
cur.execute("""PRAGMA synchronous=0""")
cur.execute("""PRAGMA locking_mode=EXCLUSIVE""")
cur.execute("""PRAGMA journal_mode=TRUNCATE""")
def compression_prepare(cur, con):
cur.execute("""
CREATE TABLE if not exists images (
tile_data blob,
tile_id VARCHAR(256));
""")
cur.execute("""
CREATE TABLE if not exists map (
zoom_level integer,
tile_column integer,
tile_row integer,
tile_id VARCHAR(256));
""")
def optimize_database(cur):
logger.debug('analyzing db')
cur.execute("""ANALYZE;""")
logger.debug('cleaning db')
cur.execute("""VACUUM;""")
def compression_do(cur, con, chunk):
overlapping = 0
unique = 0
total = 0
cur.execute("select count(zoom_level) from tiles")
res = cur.fetchone()
total_tiles = res[0]
logging.debug("%d total tiles to fetch" % total_tiles)
for i in range(total_tiles / chunk):
logging.debug("%d / %d rounds done" % (i, (total_tiles / chunk)))
ids = []
files = []
start = time.time()
cur.execute("""select zoom_level, tile_column, tile_row, tile_data
from tiles where rowid > ? and rowid <= ?""", ((i * chunk), ((i + 1) * chunk)))
logger.debug("select: %s" % (time.time() - start))
rows = cur.fetchall()
for r in rows:
total = total + 1
if r[3] in files:
overlapping = overlapping + 1
start = time.time()
query = """insert into map
(zoom_level, tile_column, tile_row, tile_id)
values (?, ?, ?, ?)"""
logger.debug("insert: %s" % (time.time() - start))
cur.execute(query, (r[0], r[1], r[2], ids[files.index(r[3])]))
else:
unique = unique + 1
id = str(uuid.uuid4())
ids.append(id)
files.append(r[3])
start = time.time()
query = """insert into images
(tile_id, tile_data)
values (?, ?)"""
cur.execute(query, (str(id), sqlite3.Binary(r[3])))
logger.debug("insert into images: %s" % (time.time() - start))
start = time.time()
query = """insert into map
(zoom_level, tile_column, tile_row, tile_id)
values (?, ?, ?, ?)"""
cur.execute(query, (r[0], r[1], r[2], id))
logger.debug("insert into map: %s" % (time.time() - start))
con.commit()
def compression_finalize(cur):
cur.execute("""drop table tiles;""")
cur.execute("""create view tiles as
select map.zoom_level as zoom_level,
map.tile_column as tile_column,
map.tile_row as tile_row,
images.tile_data as tile_data FROM
map JOIN images on images.tile_id = map.tile_id;""")
cur.execute("""
CREATE UNIQUE INDEX map_index on map
(zoom_level, tile_column, tile_row);""")
cur.execute("""
CREATE UNIQUE INDEX images_id on images
(tile_id);""")
cur.execute("""vacuum;""")
cur.execute("""analyze;""")
def disk_to_mbtiles(directory_path, mbtiles_file, **kwargs):
logger.info("Importing disk to MBTiles")
logger.debug("%s --> %s" % (directory_path, mbtiles_file))
con = mbtiles_connect(mbtiles_file)
cur = con.cursor()
optimize_connection(cur)
mbtiles_setup(cur)
try:
metadata = json.load(open('%s/metadata.json' % directory_path, 'r'))
for name, value in metadata.items():
cur.execute('insert into metadata (name, value) values (?, ?)',
(name, value))
logger.info('metadata from metadata.json restored')
except IOError, e:
logger.warning('metadata.json not found')
count = 0
start_time = time.time()
msg = ""
for r1, zs, ignore in os.walk(directory_path):
for z in zs:
for r2, xs, ignore in os.walk(os.path.join(r1, z)):
for x in xs:
for r2, ignore, ys in os.walk(os.path.join(r1, z, x)):
for y in ys:
f = open(os.path.join(r1, z, x, y), 'rb')
cur.execute("""insert into tiles (zoom_level,
tile_column, tile_row, tile_data) values
(?, ?, ?, ?);""",
(z, x, y.split('.')[0], sqlite3.Binary(f.read())))
f.close()
count = count + 1
if (count % 100) == 0:
for c in msg: sys.stdout.write(chr(8))
msg = "%s tiles inserted (%d tiles/sec)" % (count, count / (time.time() - start_time))
sys.stdout.write(msg)
logger.debug('tiles inserted.')
optimize_database(con)
def mbtiles_to_disk(mbtiles_file, directory_path, **kwargs):
logger.debug("Exporting MBTiles to disk")
logger.debug("%s --> %s" % (mbtiles_file, directory_path))
con = mbtiles_connect(mbtiles_file)
cur = con.cursor()
os.mkdir("%s" % directory_path)
metadata = dict(con.execute('select name, value from metadata;').fetchall())
json.dump(metadata, open('%s/metadata.json' % directory_path, 'w'),indent=4)
count = con.execute('select count(zoom_level) from tiles;').fetchone()[0]
done = 0
msg =''
service_version = metadata.get('version', '1.0.0')
base_path = os.path.join(directory_path,
service_version,
metadata.get('name', 'layer')
)
if not os.path.isdir(base_path):
os.makedirs(base_path)
# if interactivity
formatter = metadata.get('formatter')
if formatter:
layer_json = os.path.join(base_path,'layer.json')
formatter_json = {"formatter":formatter}
open(layer_json,'w').write('grid(' + json.dumps(formatter_json) + ')')
tiles = con.execute('select zoom_level, tile_column, tile_row, tile_data from tiles;')
t = tiles.fetchone()
while t:
tile_dir = os.path.join(base_path, str(t[0]), str(t[1]))
if not os.path.isdir(tile_dir):
os.makedirs(tile_dir)
tile = os.path.join(tile_dir,'%s.%s' % (t[2],metadata.get('format', 'png')))
f = open(tile, 'wb')
f.write(t[3])
f.close()
done = done + 1
for c in msg: sys.stdout.write(chr(8))
logger.info('%s / %s tiles exported' % (done, count))
t = tiles.fetchone()
# grids
count = con.execute('select count(zoom_level) from grids;').fetchone()[0]
done = 0
msg =''
try:
grids = con.execute('select zoom_level, tile_column, tile_row, grid from grids;')
g = grids.fetchone()
except sqlite3.OperationalError:
g = None # no grids table
while g:
zoom_level = g[0]
tile_column = g[1]
tile_row = g[2]
grid_dir = os.path.join(base_path, str(zoom_level), str(tile_column))
if not os.path.isdir(grid_dir):
os.makedirs(grid_dir)
grid = os.path.join(grid_dir,'%s.grid.json' % (tile_row))
f = open(grid, 'w')
grid_json = json.loads(zlib.decompress(g[3]))
# join up with the grid 'data' which is in pieces when stored in mbtiles file
grid_data_cursor = con.execute('select key_name, key_json FROM grid_data WHERE zoom_level = %(zoom_level)d and tile_column = %(tile_column)d and tile_row = %(tile_row)d;' % locals())
grid_data = grid_data_cursor.fetchone()
data = {}
while grid_data:
data[grid_data[0]] = json.loads(grid_data[1])
grid_data = grid_data_cursor.fetchone()
grid_json['data'] = data
f.write('grid(' + json.dumps(grid_json) + ')')
f.close()
done = done + 1
for c in msg: sys.stdout.write(chr(8))
logger.info('%s / %s grids exported' % (done, count))
g = grids.fetchone()
+20
-5
Metadata-Version: 1.0
Name: mbutil
Version: 0.0.1
Version: 0.0.2
Summary: An importer and exporter for MBTiles

@@ -15,5 +15,18 @@ Home-page: https://github.com/mapbox/mbutil

Git checkout (requires git)
git clone git://github.com/mapbox/mbutil.git
./mbutil.py -h
cd mbutil
./mb-util -h
# then to install the mb-util command globally:
sudo python setup.py install
# then you can run:
mb-util
Python installation (requires easy_install)
easy_install mbutil
mb-util -h
## Usage

@@ -23,11 +36,11 @@

./mbutil.py World_Light.mbtiles adirectory
mb-util World_Light.mbtiles adirectory
Import a directory into a `mbtiles` file
./mbutil.py directory World_Light.mbtiles
mb-util directory World_Light.mbtiles
## Requirements
* Python `>= 2.5`
* Python `>= 2.6`

@@ -47,3 +60,5 @@ ## Metadata

- Tom MacWright (tmcw)
- Dane Springmeyer (springmeyer)
- Mathieu Leplatre (leplatrem)
Platform: UNKNOWN
+19
-4

@@ -7,5 +7,18 @@ # MBUtil

Git checkout (requires git)
git clone git://github.com/mapbox/mbutil.git
./mbutil.py -h
cd mbutil
./mb-util -h
# then to install the mb-util command globally:
sudo python setup.py install
# then you can run:
mb-util
Python installation (requires easy_install)
easy_install mbutil
mb-util -h
## Usage

@@ -15,11 +28,11 @@

./mbutil.py World_Light.mbtiles adirectory
mb-util World_Light.mbtiles adirectory
Import a directory into a `mbtiles` file
./mbutil.py directory World_Light.mbtiles
mb-util directory World_Light.mbtiles
## Requirements
* Python `>= 2.5`
* Python `>= 2.6`

@@ -39,1 +52,3 @@ ## Metadata

- Tom MacWright (tmcw)
- Dane Springmeyer (springmeyer)
- Mathieu Leplatre (leplatrem)

@@ -5,7 +5,7 @@ from distutils.core import setup

name='mbutil',
version='0.0.1',
version='0.0.2',
author='Tom MacWright',
author_email='macwright@gmail.com',
packages=[],
scripts=['mbutil.py'],
packages=['mbutil'],
scripts=['mb-util'],
url='https://github.com/mapbox/mbutil',

@@ -12,0 +12,0 @@ license='LICENSE.md',

#!/usr/bin/env python
# MBUtil: a tool for MBTiles files
# Supports importing, exporting, and more
#
# (c) Development Seed 2011
# Licensed under BSD
import sqlite3, uuid, sys, logging, time, os, json
from optparse import OptionParser
logging.basicConfig(level=logging.DEBUG)
def mbtiles_setup(cur):
cur.execute("""
create table tiles (
zoom_level integer,
tile_column integer,
tile_row integer,
tile_data blob);
""")
cur.execute("""create table metadata
(name text, value text);""")
cur.execute("""create unique index name on metadata (name);""")
cur.execute("""create unique index tile_index on tiles
(zoom_level, tile_column, tile_row);""")
def mbtiles_connect(mbtiles_file):
try:
con = sqlite3.connect(mbtiles_file)
return con
except Exception, e:
print "Could not connect to database"
print e
sys.exit(1)
def optimize_connection(cur):
cur.execute("""PRAGMA synchronous=0""")
cur.execute("""PRAGMA locking_mode=EXCLUSIVE""")
cur.execute("""PRAGMA journal_mode=TRUNCATE""")
def compression_prepare(cur, con):
cur.execute("""
CREATE TABLE if not exists images (
tile_data blob,
tile_id VARCHAR(256));
""")
cur.execute("""
CREATE TABLE if not exists map (
zoom_level integer,
tile_column integer,
tile_row integer,
tile_id VARCHAR(256));
""")
def optimize_database(cur):
print 'analyzing db'
cur.execute("""ANALYZE;""")
print 'cleaning db'
cur.execute("""VACUUM;""")
def compression_do(cur, con, chunk):
overlapping = 0
unique = 0
total = 0
cur.execute("select count(zoom_level) from tiles")
res = cur.fetchone()
total_tiles = res[0]
logging.debug("%d total tiles to fetch" % total_tiles)
for i in range(total_tiles / chunk):
logging.debug("%d / %d rounds done" % (i, (total_tiles / chunk)))
ids = []
files = []
start = time.time()
cur.execute("""select zoom_level, tile_column, tile_row, tile_data
from tiles where rowid > ? and rowid <= ?""", ((i * chunk), ((i + 1) * chunk)))
print "select: %s" % (time.time() - start)
rows = cur.fetchall()
for r in rows:
total = total + 1
if r[3] in files:
overlapping = overlapping + 1
start = time.time()
query = """insert into map
(zoom_level, tile_column, tile_row, tile_id)
values (?, ?, ?, ?)"""
print "insert: %s" % (time.time() - start)
cur.execute(query, (r[0], r[1], r[2], ids[files.index(r[3])]))
else:
unique = unique + 1
id = str(uuid.uuid4())
ids.append(id)
files.append(r[3])
start = time.time()
query = """insert into images
(tile_id, tile_data)
values (?, ?)"""
cur.execute(query, (str(id), sqlite3.Binary(r[3])))
print "insert into images: %s" % (time.time() - start)
start = time.time()
query = """insert into map
(zoom_level, tile_column, tile_row, tile_id)
values (?, ?, ?, ?)"""
cur.execute(query, (r[0], r[1], r[2], id))
print "insert into map: %s" % (time.time() - start)
con.commit()
def compression_finalize(cur):
cur.execute("""drop table tiles;""")
cur.execute("""create view tiles as
select map.zoom_level as zoom_level,
map.tile_column as tile_column,
map.tile_row as tile_row,
images.tile_data as tile_data FROM
map JOIN images on images.tile_id = map.tile_id;""")
cur.execute("""
CREATE UNIQUE INDEX map_index on map
(zoom_level, tile_column, tile_row);""")
cur.execute("""
CREATE UNIQUE INDEX images_id on images
(tile_id);""")
cur.execute("""vacuum;""")
cur.execute("""analyze;""")
def disk_to_mbtiles(directory_path, mbtiles_file):
print "Importing disk to MBTiles"
print "%s --> %s" % (directory_path, mbtiles_file)
con = mbtiles_connect(mbtiles_file)
cur = con.cursor()
optimize_connection(cur)
mbtiles_setup(cur)
try:
metadata = json.load(open('%s/metadata.json' % directory_path, 'r'))
for name, value in metadata.items():
cur.execute('insert into metadata (name, value) values (?, ?)',
(name, value))
print 'metadata from metadata.json restored'
except Exception, e:
print e
print 'metadata.json not found'
count = 0
start_time = time.time()
msg = ""
for r1, zs, ignore in os.walk(directory_path):
for z in zs:
for r2, xs, ignore in os.walk(os.path.join(r1, z)):
for x in xs:
for r2, ignore, ys in os.walk(os.path.join(r1, z, x)):
for y in ys:
f = open(os.path.join(r1, z, x, y), 'rb')
cur.execute("""insert into tiles (zoom_level,
tile_row, tile_column, tile_data) values
(?, ?, ?, ?);""",
(z, x, y, sqlite3.Binary(f.read())))
f.close()
count = count + 1
if (count % 100) == 0:
for c in msg: sys.stdout.write(chr(8))
msg = "%s tiles inserted (%d tiles/sec)" % (count, count / (time.time() - start_time))
sys.stdout.write(msg)
print 'tiles inserted.'
optimize_database(con)
def mbtiles_to_disk(mbtiles_file, directory_path):
print "Exporting MBTiles to disk"
print "%s --> %s" % (mbtiles_file, directory_path)
con = mbtiles_connect(mbtiles_file)
cur = con.cursor()
os.mkdir("%s" % directory_path)
metadata = dict(con.execute('select name, value from metadata;').fetchall())
json.dump(metadata, open('%s/metadata.json' % directory_path, 'w'))
count = con.execute('select count(zoom_level) from tiles;').fetchone()[0]
done = 0
msg =''
tiles = con.execute('select zoom_level, tile_row, tile_column, tile_data from tiles;')
t = tiles.fetchone()
while t:
if not os.path.isdir("%s/%s/%s/" % (directory_path, t[0], t[1])):
os.makedirs("%s/%s/%s/" % (directory_path, t[0], t[1]))
f = open('%s/%s/%s/%s.%s' %
(directory_path, t[0], t[1], t[2], metadata.get('format', 'png')), 'wb')
f.write(t[3])
f.close()
done = done + 1
for c in msg: sys.stdout.write(chr(8))
msg = '%s / %s tiles exported' % (done, count)
sys.stdout.write(msg)
t = tiles.fetchone()
if __name__ == '__main__':
parser = OptionParser(usage="usage: %prog [options] input output")
parser.add_option('-w', '--window', dest='window',
help='compression window size. larger values faster, dangerouser',
type='int',
default=2000)
(options, args) = parser.parse_args()
# Transfer operations
if len(args) == 2:
if os.path.isfile(args[0]) and not os.path.exists(args[1]):
mbtiles_file, directory_path = args
mbtiles_to_disk(mbtiles_file, directory_path)
if os.path.isfile(args[0]) and os.path.exists(args[1]):
sys.stderr.write('To export MBTiles to disk, specify a directory that does not yet exist\n')
sys.exit(1)
if os.path.isdir(args[0]) and not os.path.isfile(args[0]):
directory_path, mbtiles_file = args
disk_to_mbtiles(directory_path, mbtiles_file)
if os.path.isdir(args[0]) and os.path.isfile(args[1]):
sys.stderr.write('Importing tiles into already-existing MBTiles is not yet supported\n')
sys.exit(1)
else:
parser.print_help()