Use direct Postgres connection, proper CLI, cleanup

Minimum requirements, proper parameter processing, docs
This commit is contained in:
Yuri Astrakhan 2019-03-21 19:44:09 -04:00
parent 05b142f3ee
commit 1bf8d30e4a
3 changed files with 211 additions and 20 deletions

173
.gitignore vendored Normal file
View File

@ -0,0 +1,173 @@
# Created by .ignore support plugin (hsz.mobi)
### VirtualEnv template
# Virtualenv
# http://iamzed.com/2009/05/07/a-primer-on-virtualenv/
.Python
[Bb]in
[Ii]nclude
[Ll]ib
[Ll]ib64
[Ll]ocal
[Ss]cripts
pyvenv.cfg
.venv
pip-selfcheck.json
### Python template
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# pyenv
.python-version
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
### JetBrains template
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# User-specific stuff
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/**/dictionaries
.idea/**/shelf
# Sensitive or high-churn files
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
# Gradle
.idea/**/gradle.xml
.idea/**/libraries
# CMake
cmake-build-debug/
cmake-build-release/
# Mongo Explorer plugin
.idea/**/mongoSettings.xml
# File-based project format
*.iws
# IntelliJ
out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Cursive Clojure plugin
.idea/replstate.xml
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
# Editor-based Rest Client
.idea/httpRequests

View File

@ -1,3 +1,2 @@
tornado==6.0.1 tornado==6.0.1
sqlalchemy==1.3.1
psycopg2-binary==2.7.7 psycopg2-binary==2.7.7

View File

@ -18,62 +18,81 @@ Options:
--help Show this screen. --help Show this screen.
--version Show version. --version Show version.
""" """
import logging
import tornado.ioloop import tornado.ioloop
import tornado.web import tornado.web
import io import io
import os import os
from docopt import docopt from docopt import docopt
from sqlalchemy import create_engine, inspect import psycopg2
from sqlalchemy.orm import sessionmaker
class GetTile(tornado.web.RequestHandler): class GetTile(tornado.web.RequestHandler):
def initialize(self, session, query): def initialize(self, fname, connection, query):
self.db_session = session self.fname = fname
self.db_connection = connection
self.db_query = query self.db_query = query
def get(self, z, x, y): def get(self, z, x, y):
z, x, y = int(z), int(x), int(y) z, x, y = int(z), int(x), int(y)
cursor = self.db_connection.cursor()
try: try:
result = self.db_session.execute(self.db_query, params=dict(z=z, x=x, y=y)).fetchall() cursor.execute(self.db_query, (z, x, y))
result = cursor.fetchall()
if result: if result:
self.set_header("Content-Type", "application/x-protobuf") self.set_header("Content-Type", "application/x-protobuf")
self.set_header("Content-Disposition", "attachment") self.set_header("Content-Disposition", "attachment")
self.set_header("Access-Control-Allow-Origin", "*") self.set_header("Access-Control-Allow-Origin", "*")
value = io.BytesIO(result[0][0]).getvalue() value = io.BytesIO(result[0][0]).getvalue()
self.write(value) self.write(value)
print('{0},{1},{2} returned {3} bytes'.format(z, x, y, len(value))) print('{0}({1},{2},{3}) --> {4:,} bytes'.format(self.fname, z, x, y, len(value)))
else: else:
self.clear() self.clear()
self.set_status(404) self.set_status(404)
print('Got NULL result for {0},{1},{2}'.format(z, x, y)) print('{0}({1},{2},{3}) is EMPTY'.format(self.fname, z, x, y))
except Exception as err: except Exception as err:
print('{0},{1},{2} threw an exception {3}'.format(z, x, y, err)) print('{0}({1},{2},{3}) threw an exception'.format(self.fname, z, x, y, err))
raise raise
finally:
cursor.close()
def main(args): def main(args):
pgdb = os.getenv('POSTGRES_DB', 'openmaptiles')
pghost = os.getenv('POSTGRES_HOST', 'localhost')
pgport = os.getenv('POSTGRES_PORT', '5432')
print('Connecting to PostgreSQL at {0}:{1}, db={2}...'.format(pghost, pgport, pgdb))
connection = psycopg2.connect(
dbname=pgdb,
host=pghost,
port=pgport,
user=os.getenv('POSTGRES_USER', 'openmaptiles'),
password=os.getenv('POSTGRES_PASSWORD', 'openmaptiles'),
)
sqlfile = args['<prepared-sql-file>'] sqlfile = args['<prepared-sql-file>']
with open(sqlfile, 'r') as stream: with open(sqlfile, 'r') as stream:
prepared = stream.read() prepared = stream.read()
pghost = os.getenv('POSTGRES_HOST', 'localhost') + ':' + os.getenv('POSTGRES_PORT', '5432') print('Using prepared SQL:\n\n-------\n\n' + prepared + '\n\n-------\n\n')
pgdb = os.getenv('POSTGRES_DB', 'openmaptiles')
pgcreds = os.getenv('POSTGRES_USER', 'openmaptiles') + ':' + os.getenv('POSTGRES_PASSWORD', 'openmaptiles')
engine = create_engine('postgresql://' + pgcreds + '@' + pghost + '/' + pgdb)
print('Connecting to PostgreSQL at {0}, db={1}'.format(pghost, pgdb)) cursor = connection.cursor()
inspector = inspect(engine) try:
session = sessionmaker(bind=engine)() cursor.execute(prepared)
session.execute(prepared) finally:
cursor.close()
query = "EXECUTE {0}(:z, :x, :y)".format(args['--fname']) fname = args['--fname']
print('Loaded {0}, will use "{1}" to get vector tiles.'.format(sqlfile, query)) query = "EXECUTE {0}(%s, %s, %s)".format(fname)
print('Loaded {0}\nWill use "{1}" to get vector tiles.'.format(sqlfile, query))
tornado.log.access_log.setLevel(logging.ERROR)
port = int(args['--port']) port = int(args['--port'])
application = tornado.web.Application([( application = tornado.web.Application([(
r"/tiles/([0-9]+)/([0-9]+)/([0-9]+).pbf", r"/tiles/([0-9]+)/([0-9]+)/([0-9]+).pbf",
GetTile, GetTile,
dict(session=session, query=query) dict(fname=fname, connection=connection, query=query)
)]) )])
application.listen(port) application.listen(port)