Add model batch removal optimization for filtering
This commit is contained in:
parent
5432958121
commit
c502aa4245
|
@ -58,7 +58,7 @@ class ModelFilter(ModelProxy):
|
||||||
take_out = []
|
take_out = []
|
||||||
bring_back = []
|
bring_back = []
|
||||||
|
|
||||||
for key, item in self.items():
|
for key, item in sorted(self.items(), key=lambda kv: kv[1]):
|
||||||
if not self.accept_item(item):
|
if not self.accept_item(item):
|
||||||
take_out.append(key)
|
take_out.append(key)
|
||||||
|
|
||||||
|
@ -66,8 +66,9 @@ class ModelFilter(ModelProxy):
|
||||||
if self.accept_item(item):
|
if self.accept_item(item):
|
||||||
bring_back.append(key)
|
bring_back.append(key)
|
||||||
|
|
||||||
for key in take_out:
|
with self.batch_remove():
|
||||||
self.filtered_out[key] = self.pop(key)
|
for key in take_out:
|
||||||
|
self.filtered_out[key] = self.pop(key)
|
||||||
|
|
||||||
for key in bring_back:
|
for key in bring_back:
|
||||||
self[key] = self.filtered_out.pop(key)
|
self[key] = self.filtered_out.pop(key)
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
# SPDX-License-Identifier: LGPL-3.0-or-later
|
# SPDX-License-Identifier: LGPL-3.0-or-later
|
||||||
|
|
||||||
|
import itertools
|
||||||
from bisect import bisect
|
from bisect import bisect
|
||||||
|
from contextlib import contextmanager
|
||||||
from threading import RLock
|
from threading import RLock
|
||||||
from typing import (
|
from typing import (
|
||||||
TYPE_CHECKING, Any, Dict, Iterator, List, MutableMapping, Optional,
|
TYPE_CHECKING, Any, Dict, Iterator, List, MutableMapping, Optional,
|
||||||
|
@ -42,6 +44,8 @@ class Model(MutableMapping):
|
||||||
|
|
||||||
self.take_items_ownership: bool = True
|
self.take_items_ownership: bool = True
|
||||||
|
|
||||||
|
self._active_batch_remove_indice: Optional[List[int]] = None
|
||||||
|
|
||||||
if self.sync_id:
|
if self.sync_id:
|
||||||
self.instances[self.sync_id] = self
|
self.instances[self.sync_id] = self
|
||||||
|
|
||||||
|
@ -144,7 +148,10 @@ class Model(MutableMapping):
|
||||||
proxy.source_item_deleted(self, key)
|
proxy.source_item_deleted(self, key)
|
||||||
|
|
||||||
if self.sync_id:
|
if self.sync_id:
|
||||||
ModelItemDeleted(self.sync_id, index)
|
if self._active_batch_remove_indice is None:
|
||||||
|
ModelItemDeleted(self.sync_id, index)
|
||||||
|
else:
|
||||||
|
self._active_batch_remove_indice.append(index)
|
||||||
|
|
||||||
|
|
||||||
def __iter__(self) -> Iterator:
|
def __iter__(self) -> Iterator:
|
||||||
|
@ -170,3 +177,18 @@ class Model(MutableMapping):
|
||||||
new = type(self)(sync_id=sync_id)
|
new = type(self)(sync_id=sync_id)
|
||||||
new.update(self)
|
new.update(self)
|
||||||
return new
|
return new
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def batch_remove(self):
|
||||||
|
try:
|
||||||
|
self._active_batch_remove_indice = []
|
||||||
|
yield None
|
||||||
|
finally:
|
||||||
|
indice = self._active_batch_remove_indice
|
||||||
|
groups = [list(group) for item, group in itertools.groupby(indice)]
|
||||||
|
|
||||||
|
for grp in groups:
|
||||||
|
ModelItemDeleted(self.sync_id, index=grp[0], count=len(grp))
|
||||||
|
|
||||||
|
self._active_batch_remove_indice = None
|
||||||
|
|
|
@ -43,6 +43,7 @@ class ModelProxy(Model):
|
||||||
|
|
||||||
def source_cleared(self, source: Model) -> None:
|
def source_cleared(self, source: Model) -> None:
|
||||||
if self.accept_source(source):
|
if self.accept_source(source):
|
||||||
for source_sync_id, key in self.copy():
|
with self.batch_remove():
|
||||||
if source_sync_id == source.sync_id:
|
for source_sync_id, key in self.copy():
|
||||||
del self[source_sync_id, key]
|
if source_sync_id == source.sync_id:
|
||||||
|
del self[source_sync_id, key]
|
||||||
|
|
|
@ -82,6 +82,7 @@ class ModelItemDeleted(ModelEvent):
|
||||||
"""Indicate the removal of a `ModelItem` from a `Backend` `Model`."""
|
"""Indicate the removal of a `ModelItem` from a `Backend` `Model`."""
|
||||||
|
|
||||||
index: int = field()
|
index: int = field()
|
||||||
|
count: int = 1
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
|
|
@ -24,6 +24,7 @@ QtObject {
|
||||||
const onError = Globals.pendingCoroutines[uuid].onError
|
const onError = Globals.pendingCoroutines[uuid].onError
|
||||||
|
|
||||||
delete Globals.pendingCoroutines[uuid]
|
delete Globals.pendingCoroutines[uuid]
|
||||||
|
Globals.pendingCoroutinesChanged()
|
||||||
|
|
||||||
if (error) {
|
if (error) {
|
||||||
const type = py.getattr(py.getattr(error, "__class__"), "__name__")
|
const type = py.getattr(py.getattr(error, "__class__"), "__name__")
|
||||||
|
@ -43,6 +44,7 @@ QtObject {
|
||||||
|
|
||||||
|
|
||||||
function onLoopException(message, error, traceback) {
|
function onLoopException(message, error, traceback) {
|
||||||
|
if (traceback.includes("429, None")) return
|
||||||
// No need to log these here, the asyncio exception handler does it
|
// No need to log these here, the asyncio exception handler does it
|
||||||
const type = py.getattr(py.getattr(error, "__class__"), "__name__")
|
const type = py.getattr(py.getattr(error, "__class__"), "__name__")
|
||||||
utils.showError(type, traceback, message)
|
utils.showError(type, traceback, message)
|
||||||
|
@ -68,9 +70,10 @@ QtObject {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
function onModelItemDeleted(syncId, index) {
|
function onModelItemDeleted(syncId, index, count=1) {
|
||||||
// print("delete", syncId, index)
|
// print("delete", syncId, index, count)
|
||||||
ModelStore.get(syncId).remove(index)
|
print(syncId, index, count)
|
||||||
|
ModelStore.get(syncId).remove(index, count)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -26,6 +26,7 @@ Python {
|
||||||
const future = privates.makeFuture()
|
const future = privates.makeFuture()
|
||||||
|
|
||||||
Globals.pendingCoroutines[uuid] = {future, onSuccess, onError}
|
Globals.pendingCoroutines[uuid] = {future, onSuccess, onError}
|
||||||
|
Globals.pendingCoroutinesChanged()
|
||||||
|
|
||||||
call("BRIDGE.call_backend_coro", [name, uuid, args], pyFuture => {
|
call("BRIDGE.call_backend_coro", [name, uuid, args], pyFuture => {
|
||||||
future.privates.pythonFuture = pyFuture
|
future.privates.pythonFuture = pyFuture
|
||||||
|
@ -43,6 +44,7 @@ Python {
|
||||||
const uuid = accountId + "." + name + "." + CppUtils.uuid()
|
const uuid = accountId + "." + name + "." + CppUtils.uuid()
|
||||||
|
|
||||||
Globals.pendingCoroutines[uuid] = {onSuccess, onError}
|
Globals.pendingCoroutines[uuid] = {onSuccess, onError}
|
||||||
|
Globals.pendingCoroutinesChanged()
|
||||||
|
|
||||||
const call_args = [accountId, name, uuid, args]
|
const call_args = [accountId, name, uuid, args]
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user