Add model batch removal optimization for filtering

This commit is contained in:
miruka 2020-05-06 13:39:53 -04:00
parent 5432958121
commit c502aa4245
6 changed files with 40 additions and 10 deletions

View File

@ -58,7 +58,7 @@ class ModelFilter(ModelProxy):
take_out = []
bring_back = []
for key, item in self.items():
for key, item in sorted(self.items(), key=lambda kv: kv[1]):
if not self.accept_item(item):
take_out.append(key)
@ -66,6 +66,7 @@ class ModelFilter(ModelProxy):
if self.accept_item(item):
bring_back.append(key)
with self.batch_remove():
for key in take_out:
self.filtered_out[key] = self.pop(key)

View File

@ -1,6 +1,8 @@
# SPDX-License-Identifier: LGPL-3.0-or-later
import itertools
from bisect import bisect
from contextlib import contextmanager
from threading import RLock
from typing import (
TYPE_CHECKING, Any, Dict, Iterator, List, MutableMapping, Optional,
@ -42,6 +44,8 @@ class Model(MutableMapping):
self.take_items_ownership: bool = True
self._active_batch_remove_indice: Optional[List[int]] = None
if self.sync_id:
self.instances[self.sync_id] = self
@ -144,7 +148,10 @@ class Model(MutableMapping):
proxy.source_item_deleted(self, key)
if self.sync_id:
if self._active_batch_remove_indice is None:
ModelItemDeleted(self.sync_id, index)
else:
self._active_batch_remove_indice.append(index)
def __iter__(self) -> Iterator:
@ -170,3 +177,18 @@ class Model(MutableMapping):
new = type(self)(sync_id=sync_id)
new.update(self)
return new
@contextmanager
def batch_remove(self):
try:
self._active_batch_remove_indice = []
yield None
finally:
indice = self._active_batch_remove_indice
groups = [list(group) for item, group in itertools.groupby(indice)]
for grp in groups:
ModelItemDeleted(self.sync_id, index=grp[0], count=len(grp))
self._active_batch_remove_indice = None

View File

@ -43,6 +43,7 @@ class ModelProxy(Model):
def source_cleared(self, source: Model) -> None:
if self.accept_source(source):
with self.batch_remove():
for source_sync_id, key in self.copy():
if source_sync_id == source.sync_id:
del self[source_sync_id, key]

View File

@ -82,6 +82,7 @@ class ModelItemDeleted(ModelEvent):
"""Indicate the removal of a `ModelItem` from a `Backend` `Model`."""
index: int = field()
count: int = 1
@dataclass

View File

@ -24,6 +24,7 @@ QtObject {
const onError = Globals.pendingCoroutines[uuid].onError
delete Globals.pendingCoroutines[uuid]
Globals.pendingCoroutinesChanged()
if (error) {
const type = py.getattr(py.getattr(error, "__class__"), "__name__")
@ -43,6 +44,7 @@ QtObject {
function onLoopException(message, error, traceback) {
if (traceback.includes("429, None")) return
// No need to log these here, the asyncio exception handler does it
const type = py.getattr(py.getattr(error, "__class__"), "__name__")
utils.showError(type, traceback, message)
@ -68,9 +70,10 @@ QtObject {
}
function onModelItemDeleted(syncId, index) {
// print("delete", syncId, index)
ModelStore.get(syncId).remove(index)
function onModelItemDeleted(syncId, index, count=1) {
// print("delete", syncId, index, count)
print(syncId, index, count)
ModelStore.get(syncId).remove(index, count)
}

View File

@ -26,6 +26,7 @@ Python {
const future = privates.makeFuture()
Globals.pendingCoroutines[uuid] = {future, onSuccess, onError}
Globals.pendingCoroutinesChanged()
call("BRIDGE.call_backend_coro", [name, uuid, args], pyFuture => {
future.privates.pythonFuture = pyFuture
@ -43,6 +44,7 @@ Python {
const uuid = accountId + "." + name + "." + CppUtils.uuid()
Globals.pendingCoroutines[uuid] = {onSuccess, onError}
Globals.pendingCoroutinesChanged()
const call_args = [accountId, name, uuid, args]