diff --git a/Makefile b/Makefile index 6178c87..6fde901 100644 --- a/Makefile +++ b/Makefile @@ -64,7 +64,7 @@ pypi-upload: install-releasetools install-package: @test -e $(python) || python3 -m venv $(venv) - $(pip) install --prefer-binary --editable=.[test,develop,release,sql] + $(pip) install --prefer-binary --editable=.[test,sql,datasette,develop,release] install-doctools: @test -e $(python) || python3 -m venv $(venv) diff --git a/doc/export/sqlite.rst b/doc/export/sqlite.rst index 2339086..12c7057 100644 --- a/doc/export/sqlite.rst +++ b/doc/export/sqlite.rst @@ -95,6 +95,19 @@ Run a query on the ``dwd_phenology`` view. For more example SQL statements, see also :ref:`SQLite DWD archive usage `. +datasette +--------- +`Datasette`_ is a tool for exploring and publishing data. It helps people take data of +any shape or size and publish that as an interactive, explorable website and accompanying +API. + +:: + + datasette serve --port 7777 *.sqlite + +https://datasette.io/tutorials/explore + + ******* Details diff --git a/phenodata/dwd/datasette_app/__init__.py b/phenodata/dwd/datasette_app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/phenodata/dwd/datasette_app/__main__.py b/phenodata/dwd/datasette_app/__main__.py new file mode 100644 index 0000000..de63ae3 --- /dev/null +++ b/phenodata/dwd/datasette_app/__main__.py @@ -0,0 +1,57 @@ +import asyncio +import logging +from pathlib import Path + +import uvicorn +from datasette.app import Datasette +from datasette.cli import check_databases +from datasette.utils import SpatialiteNotFound, StartupError + + +logger = logging.getLogger(__name__) + +here = Path(__file__).parent + + +def main(): + #datasette.cli.serve() + + files = ["phenodata-dwd-annual-recent.sqlite"] + options = {"config_dir": here} + #print("options:", options) + #scsdc + + try: + ds = Datasette(files, **options) + except SpatialiteNotFound: + raise IOError("Could not find SpatiaLite extension") + except StartupError as e: + raise IOError(e.args[0]) + + # Run the "startup" plugin hooks + asyncio.get_event_loop().run_until_complete(ds.invoke_startup()) + + # Run async soundness checks - but only if we're not under pytest + asyncio.get_event_loop().run_until_complete(check_databases(ds)) + + url = None + host = "localhost" + port = 7777 + + # Start the server + url = "http://{}:{}{}?token={}".format( + host, port, ds.urls.path("-/auth-token"), ds._root_token + ) + logger.info(url) + uvicorn_kwargs = dict( + host=host, port=port, log_level="info", lifespan="on", workers=1 + ) + + # Bind to a Unix domain socket + #if uds: + # uvicorn_kwargs["uds"] = uds + uvicorn.run(ds.app(), **uvicorn_kwargs) + + +if __name__ == "__main__": + main() diff --git a/phenodata/dwd/datasette_app/settings.json b/phenodata/dwd/datasette_app/settings.json new file mode 100644 index 0000000..b58a58a --- /dev/null +++ b/phenodata/dwd/datasette_app/settings.json @@ -0,0 +1,9 @@ +{ + "cache_size_kb": 50000, + "default_cache_ttl": 86400, + "facet_time_limit_ms": 1000, + "force_https_urls": false, + "max_csv_mb": 500, + "num_sql_threads": 10, + "sql_time_limit_ms": 7420 +} diff --git a/setup.py b/setup.py index 77c2ced..638eba9 100644 --- a/setup.py +++ b/setup.py @@ -86,6 +86,23 @@ zip_safe=False, install_requires=requires, extras_require={ + 'datasette': [ + 'datasette<1', + 'datasette-atom<1', + 'datasette-ics<1', + # 'datasette-configure-fts<2', + 'datasette-cluster-map<1', + 'datasette-copyable<1', + 'datasette-dashboards<1', + 'datasette-graphql<3', + 'datasette-gzip<1', + 'datasette-query-files<1', + 'datasette-query-links<1', + #'datasette-redirect-to-https<1', + 'datasette-search-all<2', + #'datasette-total-page-time<1', # Internal Server Error + 'datasette-vega<1', + ], 'sql': ['duckdb>=0.3,<0.8'], 'test': test_requires, },