Compare commits
6 Commits
master
...
Sophox/mas
Author | SHA1 | Date | |
---|---|---|---|
|
6e74be938d | ||
|
08e3f00920 | ||
|
1bf8d30e4a | ||
|
05b142f3ee | ||
|
f6468ccc2f | ||
|
c916c911ab |
173
.gitignore
vendored
Normal file
173
.gitignore
vendored
Normal file
@ -0,0 +1,173 @@
|
|||||||
|
# Created by .ignore support plugin (hsz.mobi)
|
||||||
|
### VirtualEnv template
|
||||||
|
# Virtualenv
|
||||||
|
# http://iamzed.com/2009/05/07/a-primer-on-virtualenv/
|
||||||
|
.Python
|
||||||
|
[Bb]in
|
||||||
|
[Ii]nclude
|
||||||
|
[Ll]ib
|
||||||
|
[Ll]ib64
|
||||||
|
[Ll]ocal
|
||||||
|
[Ss]cripts
|
||||||
|
pyvenv.cfg
|
||||||
|
.venv
|
||||||
|
pip-selfcheck.json
|
||||||
|
### Python template
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
.python-version
|
||||||
|
|
||||||
|
# celery beat schedule file
|
||||||
|
celerybeat-schedule
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
### JetBrains template
|
||||||
|
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm
|
||||||
|
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
||||||
|
|
||||||
|
# User-specific stuff
|
||||||
|
.idea/**/workspace.xml
|
||||||
|
.idea/**/tasks.xml
|
||||||
|
.idea/**/dictionaries
|
||||||
|
.idea/**/shelf
|
||||||
|
|
||||||
|
# Sensitive or high-churn files
|
||||||
|
.idea/**/dataSources/
|
||||||
|
.idea/**/dataSources.ids
|
||||||
|
.idea/**/dataSources.local.xml
|
||||||
|
.idea/**/sqlDataSources.xml
|
||||||
|
.idea/**/dynamic.xml
|
||||||
|
.idea/**/uiDesigner.xml
|
||||||
|
.idea/**/dbnavigator.xml
|
||||||
|
|
||||||
|
# Gradle
|
||||||
|
.idea/**/gradle.xml
|
||||||
|
.idea/**/libraries
|
||||||
|
|
||||||
|
# CMake
|
||||||
|
cmake-build-debug/
|
||||||
|
cmake-build-release/
|
||||||
|
|
||||||
|
# Mongo Explorer plugin
|
||||||
|
.idea/**/mongoSettings.xml
|
||||||
|
|
||||||
|
# File-based project format
|
||||||
|
*.iws
|
||||||
|
|
||||||
|
# IntelliJ
|
||||||
|
out/
|
||||||
|
|
||||||
|
# mpeltonen/sbt-idea plugin
|
||||||
|
.idea_modules/
|
||||||
|
|
||||||
|
# JIRA plugin
|
||||||
|
atlassian-ide-plugin.xml
|
||||||
|
|
||||||
|
# Cursive Clojure plugin
|
||||||
|
.idea/replstate.xml
|
||||||
|
|
||||||
|
# Crashlytics plugin (for Android Studio and IntelliJ)
|
||||||
|
com_crashlytics_export_strings.xml
|
||||||
|
crashlytics.properties
|
||||||
|
crashlytics-build.properties
|
||||||
|
fabric.properties
|
||||||
|
|
||||||
|
# Editor-based Rest Client
|
||||||
|
.idea/httpRequests
|
||||||
|
|
14
Dockerfile
14
Dockerfile
@ -1,10 +1,12 @@
|
|||||||
FROM python:3.7
|
FROM python:3.6
|
||||||
RUN mkdir -p /usr/src/app
|
LABEL MAINTAINER "Yuri Astrakhan <YuriAstrakhan@gmail.com>"
|
||||||
|
|
||||||
WORKDIR /usr/src/app
|
WORKDIR /usr/src/app
|
||||||
|
|
||||||
VOLUME /mapping
|
# Copy requirements.txt first to avoid pip install on every code change
|
||||||
|
COPY ./requirements.txt /usr/src/app/
|
||||||
COPY . /usr/src/app/
|
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
CMD ["python", "-u","/usr/src/app/server.py"]
|
COPY . /usr/src/app/
|
||||||
|
|
||||||
|
ENTRYPOINT ["python", "server.py"]
|
||||||
|
16
README.md
16
README.md
@ -1,2 +1,16 @@
|
|||||||
# postserve
|
# postserve
|
||||||
Use the ST_AsMVT function to render tiles directly in Postgres
|
|
||||||
|
This is an OpenMapTiles map vector tile test server. It requires the prepared SQL statement
|
||||||
|
generated by the `generate-sqltovmt` tools script.
|
||||||
|
|
||||||
|
To run, use this command, replacing `myfile.sql` with the name of the generated file in the current dir.
|
||||||
|
|
||||||
|
```
|
||||||
|
docker run -it --rm --net=host -v "$PWD:/data" openmaptiles/postserve /data/myfile.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
To see help, use
|
||||||
|
|
||||||
|
```
|
||||||
|
docker run -it --rm --net=host -v "$PWD:/data" openmaptiles/postserve --help
|
||||||
|
```
|
||||||
|
@ -1,6 +1,3 @@
|
|||||||
asyncpg==0.19.0
|
tornado==6.0.1
|
||||||
Click==7.0
|
psycopg2-binary==2.7.7
|
||||||
mercantile==1.1.2
|
docopt==0.6.2
|
||||||
pyproj==2.4.0
|
|
||||||
PyYAML==5.1.2
|
|
||||||
tornado==6.0.3
|
|
||||||
|
192
server.py
192
server.py
@ -1,108 +1,104 @@
|
|||||||
import asyncpg
|
#!/usr/bin/env python
|
||||||
|
"""
|
||||||
|
This is a simple vector tile server that returns a PBF tile for /tiles/{z}/{x}/{y}.pbf requests
|
||||||
|
|
||||||
|
Use these environment variables to configure PostgreSQL access:
|
||||||
|
POSTGRES_HOST, POSTGRES_PORT, POSTGRES_DB, POSTGRES_PASSWORD
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
server <prepared-sql-file> [--fname <name>] [--port <port>]
|
||||||
|
server --help
|
||||||
|
server --version
|
||||||
|
|
||||||
|
<prepared-sql-file> SQL file generated by generate-sqltomvt script with the --prepared flag
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--fname=<name> Name of the generated function [default: gettile]
|
||||||
|
-p --port=<port> Serve on this port [default: 8080]
|
||||||
|
--help Show this screen.
|
||||||
|
--version Show version.
|
||||||
|
"""
|
||||||
|
import logging
|
||||||
|
import tornado.ioloop
|
||||||
|
import tornado.web
|
||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
|
from docopt import docopt
|
||||||
|
import psycopg2
|
||||||
|
|
||||||
import logging
|
|
||||||
import tornado
|
|
||||||
import tornado.web
|
|
||||||
from tornado.log import enable_pretty_logging
|
|
||||||
import mercantile
|
|
||||||
import pyproj
|
|
||||||
import yaml
|
|
||||||
import sys
|
|
||||||
import itertools
|
|
||||||
|
|
||||||
log = logging.getLogger('tornado.application')
|
|
||||||
|
|
||||||
def GetTM2Source(file):
|
|
||||||
with open(file,'r') as stream:
|
|
||||||
tm2source = yaml.load(stream)
|
|
||||||
return tm2source
|
|
||||||
|
|
||||||
def GenerateFunction(layers):
|
|
||||||
queries = []
|
|
||||||
function = "CREATE OR REPLACE FUNCTION gettile(geometry, numeric, numeric, numeric) RETURNS SETOF bytea AS $$"
|
|
||||||
for layer in layers['Layer']:
|
|
||||||
layer_query = layer['Datasource']['table'].strip()
|
|
||||||
layer_query = layer_query[1:len(layer_query)-6] # Remove enough characters to remove first and last () and "AS t"
|
|
||||||
layer_query = layer_query.replace("geometry", "ST_AsMVTGeom(geometry,!bbox!,4096,0,true) AS mvtgeometry")
|
|
||||||
base_query = "SELECT ST_ASMVT('"+layer['id']+"', 4096, 'mvtgeometry', tile) FROM ("+layer_query+" WHERE ST_AsMVTGeom(geometry, !bbox!,4096,0,true) IS NOT NULL) AS tile"
|
|
||||||
queries.append(base_query.replace("!bbox!","$1").replace("!scale_denominator!","$2").replace("!pixel_width!","$3").replace("!pixel_height!","$4"))
|
|
||||||
function = function + " UNION ALL ".join(queries) + ";$$ LANGUAGE SQL"
|
|
||||||
print(function)
|
|
||||||
return(function)
|
|
||||||
|
|
||||||
dsn = 'postgresql://'+os.getenv('POSTGRES_USER','openmaptiles')+':'+os.getenv('POSTGRES_PASSWORD','openmaptiles')+'@'+os.getenv('POSTGRES_HOST','postgres')+':'+os.getenv('POSTGRES_PORT','5432')+'/'+os.getenv('POSTGRES_DB','openmaptiles')
|
|
||||||
|
|
||||||
def bounds(zoom,x,y):
|
|
||||||
inProj = pyproj.Proj(init='epsg:4326')
|
|
||||||
outProj = pyproj.Proj(init='epsg:3857')
|
|
||||||
lnglatbbox = mercantile.bounds(x,y,zoom)
|
|
||||||
ws = (pyproj.transform(inProj,outProj,lnglatbbox[0],lnglatbbox[1]))
|
|
||||||
en = (pyproj.transform(inProj,outProj,lnglatbbox[2],lnglatbbox[3]))
|
|
||||||
return {'w':ws[0],'s':ws[1],'e':en[0],'n':en[1]}
|
|
||||||
|
|
||||||
def zoom_to_scale_denom(zoom): # For !scale_denominator!
|
|
||||||
# From https://github.com/openstreetmap/mapnik-stylesheets/blob/master/zoom-to-scale.txt
|
|
||||||
map_width_in_metres = 40075016.68557849
|
|
||||||
tile_width_in_pixels = 256.0
|
|
||||||
standardized_pixel_size = 0.00028
|
|
||||||
map_width_in_pixels = tile_width_in_pixels*(2.0**zoom)
|
|
||||||
return str(map_width_in_metres/(map_width_in_pixels * standardized_pixel_size))
|
|
||||||
|
|
||||||
def replace_tokens(query,s,w,n,e,scale_denom):
|
|
||||||
return query.replace("!bbox!","ST_MakeBox2D(ST_Point("+w+", "+s+"), ST_Point("+e+", "+n+"))").replace("!scale_denominator!",scale_denom).replace("!pixel_width!","256").replace("!pixel_height!","256")
|
|
||||||
|
|
||||||
async def get_mvt(connection,zoom,x,y):
|
|
||||||
try: # Sanitize the inputs
|
|
||||||
sani_zoom,sani_x,sani_y = float(zoom),float(x),float(y)
|
|
||||||
del zoom,x,y
|
|
||||||
except:
|
|
||||||
print('suspicious')
|
|
||||||
return 1
|
|
||||||
|
|
||||||
scale_denom = zoom_to_scale_denom(sani_zoom)
|
|
||||||
tilebounds = bounds(sani_zoom,sani_x,sani_y)
|
|
||||||
s,w,n,e = str(tilebounds['s']),str(tilebounds['w']),str(tilebounds['n']),str(tilebounds['e'])
|
|
||||||
final_query = "SELECT gettile(!bbox!, !scale_denominator!, !pixel_width!, !pixel_height!);"
|
|
||||||
sent_query = replace_tokens(final_query,s,w,n,e,scale_denom)
|
|
||||||
log.info(sent_query)
|
|
||||||
response = await connection.fetch(sent_query)
|
|
||||||
layers = filter(None,list(itertools.chain.from_iterable(response)))
|
|
||||||
final_tile = b''
|
|
||||||
for layer in layers:
|
|
||||||
final_tile = final_tile + io.BytesIO(layer).getvalue()
|
|
||||||
return final_tile
|
|
||||||
|
|
||||||
class GetTile(tornado.web.RequestHandler):
|
class GetTile(tornado.web.RequestHandler):
|
||||||
def initialize(self, pool):
|
def initialize(self, fname, connection, query):
|
||||||
self.pool = pool
|
self.fname = fname
|
||||||
|
self.db_connection = connection
|
||||||
|
self.db_query = query
|
||||||
|
|
||||||
async def get(self, zoom,x,y):
|
def get(self, z, x, y):
|
||||||
self.set_header("Content-Type", "application/x-protobuf")
|
z, x, y = int(z), int(x), int(y)
|
||||||
self.set_header("Content-Disposition", "attachment")
|
cursor = self.db_connection.cursor()
|
||||||
self.set_header("Access-Control-Allow-Origin", "*")
|
try:
|
||||||
async with self.pool.acquire() as connection:
|
cursor.execute(self.db_query, (z, x, y))
|
||||||
response = await get_mvt(connection, zoom,x,y)
|
result = cursor.fetchall()
|
||||||
self.write(response)
|
if result:
|
||||||
|
self.set_header("Content-Type", "application/x-protobuf")
|
||||||
|
self.set_header("Content-Disposition", "attachment")
|
||||||
|
self.set_header("Access-Control-Allow-Origin", "*")
|
||||||
|
value = io.BytesIO(result[0][0]).getvalue()
|
||||||
|
self.write(value)
|
||||||
|
print('{0}({1},{2},{3}) --> {4:,} bytes'.format(self.fname, z, x, y, len(value)))
|
||||||
|
else:
|
||||||
|
self.clear()
|
||||||
|
self.set_status(404)
|
||||||
|
print('{0}({1},{2},{3}) is EMPTY'.format(self.fname, z, x, y))
|
||||||
|
except Exception as err:
|
||||||
|
print('{0}({1},{2},{3}) threw an exception'.format(self.fname, z, x, y, err))
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
cursor.close()
|
||||||
|
|
||||||
async def get_pool():
|
|
||||||
pool = await asyncpg.create_pool(dsn = dsn)
|
|
||||||
layers = GetTM2Source(os.getenv("MAPPING_FILE", "/mapping/data.yml"))
|
|
||||||
# Make this prepared statement from the tm2source
|
|
||||||
create_function = GenerateFunction(layers)
|
|
||||||
async with pool.acquire() as connection:
|
|
||||||
await connection.execute(create_function)
|
|
||||||
return pool
|
|
||||||
|
|
||||||
def m():
|
def main(args):
|
||||||
enable_pretty_logging()
|
pgdb = os.getenv('POSTGRES_DB', 'openmaptiles')
|
||||||
io_loop = tornado.ioloop.IOLoop.current()
|
pghost = os.getenv('POSTGRES_HOST', 'localhost')
|
||||||
pool = io_loop.run_sync(get_pool)
|
pgport = os.getenv('POSTGRES_PORT', '5432')
|
||||||
application = tornado.web.Application([(r"/tiles/([0-9]+)/([0-9]+)/([0-9]+).pbf", GetTile, dict(pool=pool))])
|
print('Connecting to PostgreSQL at {0}:{1}, db={2}...'.format(pghost, pgport, pgdb))
|
||||||
print("Postserve started..")
|
|
||||||
application.listen(int(os.getenv("LISTEN_PORT", "8080")))
|
connection = psycopg2.connect(
|
||||||
io_loop.start()
|
dbname=pgdb,
|
||||||
|
host=pghost,
|
||||||
|
port=pgport,
|
||||||
|
user=os.getenv('POSTGRES_USER', 'openmaptiles'),
|
||||||
|
password=os.getenv('POSTGRES_PASSWORD', 'openmaptiles'),
|
||||||
|
)
|
||||||
|
|
||||||
|
sqlfile = args['<prepared-sql-file>']
|
||||||
|
with open(sqlfile, 'r') as stream:
|
||||||
|
prepared = stream.read()
|
||||||
|
|
||||||
|
print('Using prepared SQL:\n\n-------\n\n' + prepared + '\n\n-------\n\n')
|
||||||
|
|
||||||
|
cursor = connection.cursor()
|
||||||
|
try:
|
||||||
|
cursor.execute(prepared)
|
||||||
|
finally:
|
||||||
|
cursor.close()
|
||||||
|
|
||||||
|
fname = args['--fname']
|
||||||
|
query = "EXECUTE {0}(%s, %s, %s)".format(fname)
|
||||||
|
print('Loaded {0}\nWill use "{1}" to get vector tiles.'.format(sqlfile, query))
|
||||||
|
|
||||||
|
tornado.log.access_log.setLevel(logging.ERROR)
|
||||||
|
port = int(args['--port'])
|
||||||
|
application = tornado.web.Application([(
|
||||||
|
r"/tiles/([0-9]+)/([0-9]+)/([0-9]+).pbf",
|
||||||
|
GetTile,
|
||||||
|
dict(fname=fname, connection=connection, query=query)
|
||||||
|
)])
|
||||||
|
application.listen(port)
|
||||||
|
|
||||||
|
print("Postserve started, listening on 0.0.0.0:{0}".format(port))
|
||||||
|
tornado.ioloop.IOLoop.instance().start()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
m()
|
main(docopt(__doc__, version="1.0"))
|
||||||
|
Loading…
x
Reference in New Issue
Block a user