Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ Write the date in place of the "Unreleased" in the case a new version is release
documentation and one test.
- Deletion of nodes or metadata revisions now requires deletion scopes,
rather than writing scopes.
- In-memory SQLite databases are connection pooled / cached.

## Fixed

Expand Down
5 changes: 4 additions & 1 deletion tiled/_tests/test_sync.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import asyncio
import contextlib
import tempfile
import uuid

import awkward
import h5py
Expand All @@ -24,7 +25,9 @@
def client_factory(readable_storage=None):
with tempfile.TemporaryDirectory() as tempdir:
catalog = in_memory(
writable_storage=str(tempdir), readable_storage=readable_storage
named_memory=str(uuid.uuid4())[:8],
writable_storage=str(tempdir),
readable_storage=readable_storage,
)
app = build_app(catalog)
with Context.from_app(app) as context:
Expand Down
12 changes: 10 additions & 2 deletions tiled/_tests/test_validation.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
"""
This tests tiled's validation registry
"""
import uuid

import numpy as np
import pandas as pd
import pytest
Expand Down Expand Up @@ -50,7 +52,10 @@ def client(tmpdir_module):
{
"tree": "tiled.catalog:in_memory",
"path": "/",
"args": {"writable_storage": str(tmpdir_module)},
"args": {
"named_memory": str(uuid.uuid4())[:8],
"writable_storage": str(tmpdir_module),
},
},
],
"specs": [
Expand Down Expand Up @@ -104,7 +109,10 @@ def test_unknown_spec_strict(tmpdir):
{
"tree": "tiled.catalog:in_memory",
"path": "/",
"args": {"writable_storage": str(tmpdir)},
"args": {
"named_memory": str(uuid.uuid4())[:8],
"writable_storage": str(tmpdir),
},
},
],
"specs": [
Expand Down
4 changes: 2 additions & 2 deletions tiled/server/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -562,13 +562,13 @@ async def startup_event():
make_admin_by_identity,
purge_expired,
)
from .connection_pool import open_database_connection_pool
from .connection_pool import is_memory_sqlite, open_database_connection_pool

# This creates a connection pool and stashes it in a module-global
# registry, keyed on database_settings, where it can be retrieved by
# the Dependency get_database_session.
engine = open_database_connection_pool(settings.database_settings)
if not engine.url.database or engine.url.query.get("mode") == "memory":
if is_memory_sqlite(engine.url):
# Special-case for in-memory SQLite: Because it is transient we can
# skip over anything related to migrations.
await initialize_database(engine)
Expand Down
10 changes: 5 additions & 5 deletions tiled/server/connection_pool.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from sqlalchemy import event
from sqlalchemy.engine import URL, make_url
from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, create_async_engine
from sqlalchemy.pool import AsyncAdaptedQueuePool
from sqlalchemy.pool import AsyncAdaptedQueuePool, StaticPool

from ..server.settings import DatabaseSettings, Settings, get_settings
from ..utils import ensure_specified_sql_driver, safe_json_dump, sanitize_uri
Expand Down Expand Up @@ -60,8 +60,8 @@ def open_database_connection_pool(database_settings: DatabaseSettings) -> AsyncE
ensure_specified_sql_driver(database_settings.uri),
echo=DEFAULT_ECHO,
json_serializer=json_serializer,
poolclass=StaticPool,
)

else:
engine = create_async_engine(
ensure_specified_sql_driver(database_settings.uri),
Expand All @@ -73,9 +73,9 @@ def open_database_connection_pool(database_settings: DatabaseSettings) -> AsyncE
pool_pre_ping=database_settings.pool_pre_ping,
)

# Cache the engine so we don't create more than one pool per database_settings.
monitor_db_pool(engine.pool, sanitize_uri(database_settings.uri)[0])
_connection_pools[database_settings] = engine
# Cache the engine so we don't create more than one pool per database_settings.
monitor_db_pool(engine.pool, sanitize_uri(database_settings.uri)[0])
_connection_pools[database_settings] = engine

# For SQLite, ensure that foreign key constraints are enforced.
if engine.dialect.name == "sqlite":
Expand Down