fix (data): Parallel connection pool.

This commit is contained in:
2023-08-23 17:31:38 +03:00
parent 5bca9bca33
commit df9b835cd5
27 changed files with 1175 additions and 1021 deletions

View File

@@ -2,6 +2,7 @@ from typing import Type
import json
from data import RowModel, Table, ORDER
from meta.logger import log_wrap, set_logging_context
class ModelData:
@@ -60,6 +61,7 @@ class ModelData:
It only updates.
"""
# TODO: Better way of getting the key?
# TODO: Transaction
if not isinstance(parent_id, tuple):
parent_id = (parent_id, )
model = cls._model
@@ -83,6 +85,8 @@ class ListData:
This assumes the list is the only data stored in the table,
and removes list entries by deleting rows.
"""
setting_id: str
# Table storing the setting data
_table_interface: Table
@@ -100,10 +104,12 @@ class ListData:
_cache = None # Map[id -> value]
@classmethod
@log_wrap(isolate=True)
async def _reader(cls, parent_id, use_cache=True, **kwargs):
"""
Read in all entries associated to the given id.
"""
set_logging_context(action="Read cls.setting_id")
if cls._cache is not None and parent_id in cls._cache and use_cache:
return cls._cache[parent_id]
@@ -121,53 +127,56 @@ class ListData:
return data
@classmethod
@log_wrap(isolate=True)
async def _writer(cls, id, data, add_only=False, remove_only=False, **kwargs):
"""
Write the provided list to storage.
"""
set_logging_context(action="Write cls.setting_id")
table = cls._table_interface
conn = await table.connector.get_connection()
async with conn.transaction():
# Handle None input as an empty list
if data is None:
data = []
async with table.connector.connection() as conn:
table.connector.conn = conn
async with conn.transaction():
# Handle None input as an empty list
if data is None:
data = []
current = await cls._reader(id, use_cache=False, **kwargs)
if not cls._order_column and (add_only or remove_only):
to_insert = [item for item in data if item not in current] if not remove_only else []
to_remove = data if remove_only else (
[item for item in current if item not in data] if not add_only else []
)
current = await cls._reader(id, use_cache=False, **kwargs)
if not cls._order_column and (add_only or remove_only):
to_insert = [item for item in data if item not in current] if not remove_only else []
to_remove = data if remove_only else (
[item for item in current if item not in data] if not add_only else []
)
# Handle required deletions
if to_remove:
params = {
cls._id_column: id,
cls._data_column: to_remove
}
await table.delete_where(**params)
# Handle required deletions
if to_remove:
params = {
cls._id_column: id,
cls._data_column: to_remove
}
await table.delete_where(**params)
# Handle required insertions
if to_insert:
columns = (cls._id_column, cls._data_column)
values = [(id, value) for value in to_insert]
await table.insert_many(columns, *values)
# Handle required insertions
if to_insert:
columns = (cls._id_column, cls._data_column)
values = [(id, value) for value in to_insert]
await table.insert_many(columns, *values)
if cls._cache is not None:
new_current = [item for item in current + to_insert if item not in to_remove]
cls._cache[id] = new_current
else:
# Remove all and add all to preserve order
delete_params = {cls._id_column: id}
await table.delete_where(**delete_params)
if cls._cache is not None:
new_current = [item for item in current + to_insert if item not in to_remove]
cls._cache[id] = new_current
else:
# Remove all and add all to preserve order
delete_params = {cls._id_column: id}
await table.delete_where(**delete_params)
if data:
columns = (cls._id_column, cls._data_column)
values = [(id, value) for value in data]
await table.insert_many(columns, *values)
if data:
columns = (cls._id_column, cls._data_column)
values = [(id, value) for value in data]
await table.insert_many(columns, *values)
if cls._cache is not None:
cls._cache[id] = data
if cls._cache is not None:
cls._cache[id] = data
class KeyValueData: