Fixed indentation (w). Probably fixed redactions sometimes displaying viewing user's name in place of actor's name. Fixed room history never loading sometimes (but not missing chunks in the middle yet).
This commit is contained in:
parent
bc20e47fb1
commit
b6543b09cc
|
@ -22,42 +22,42 @@ ROOT = Path(__file__).parent
|
|||
|
||||
|
||||
class Watcher(DefaultWatcher):
|
||||
def accept_change(self, entry: os.DirEntry) -> bool:
|
||||
path = Path(entry.path)
|
||||
def accept_change(self, entry: os.DirEntry) -> bool:
|
||||
path = Path(entry.path)
|
||||
|
||||
for bad in ("src/config", "src/themes"):
|
||||
if path.is_relative_to(ROOT / bad):
|
||||
return False
|
||||
for bad in ("src/config", "src/themes"):
|
||||
if path.is_relative_to(ROOT / bad):
|
||||
return False
|
||||
|
||||
for good in ("src", "submodules"):
|
||||
if path.is_relative_to(ROOT / good):
|
||||
return True
|
||||
for good in ("src", "submodules"):
|
||||
if path.is_relative_to(ROOT / good):
|
||||
return True
|
||||
|
||||
return False
|
||||
return False
|
||||
|
||||
def should_watch_dir(self, entry: os.DirEntry) -> bool:
|
||||
return super().should_watch_dir(entry) and self.accept_change(entry)
|
||||
def should_watch_dir(self, entry: os.DirEntry) -> bool:
|
||||
return super().should_watch_dir(entry) and self.accept_change(entry)
|
||||
|
||||
def should_watch_file(self, entry: os.DirEntry) -> bool:
|
||||
return super().should_watch_file(entry) and self.accept_change(entry)
|
||||
def should_watch_file(self, entry: os.DirEntry) -> bool:
|
||||
return super().should_watch_file(entry) and self.accept_change(entry)
|
||||
|
||||
|
||||
def cmd(*parts) -> subprocess.CompletedProcess:
|
||||
return subprocess.run(parts, cwd=ROOT, check=True)
|
||||
return subprocess.run(parts, cwd=ROOT, check=True)
|
||||
|
||||
|
||||
def run_app(args=sys.argv[1:]) -> None:
|
||||
print("\n\x1b[36m", "─" * term_size().columns, "\x1b[0m\n", sep="")
|
||||
print("\n\x1b[36m", "─" * term_size().columns, "\x1b[0m\n", sep="")
|
||||
|
||||
with suppress(KeyboardInterrupt):
|
||||
cmd("qmake", "moment.pro", "CONFIG+=dev")
|
||||
cmd("make")
|
||||
cmd("./moment", "-name", "dev", *args)
|
||||
with suppress(KeyboardInterrupt):
|
||||
cmd("qmake", "moment.pro", "CONFIG+=dev")
|
||||
cmd("make")
|
||||
cmd("./moment", "-name", "dev", *args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) > 2 and sys.argv[1] in ("-h", "--help"):
|
||||
print(__doc__)
|
||||
else:
|
||||
(ROOT / "Makefile").exists() and cmd("make", "clean")
|
||||
run_process(ROOT, run_app, callback=print, watcher_cls=Watcher)
|
||||
if len(sys.argv) > 2 and sys.argv[1] in ("-h", "--help"):
|
||||
print(__doc__)
|
||||
else:
|
||||
(ROOT / "Makefile").exists() and cmd("make", "clean")
|
||||
run_process(ROOT, run_app, callback=print, watcher_cls=Watcher)
|
||||
|
|
|
@ -2,31 +2,31 @@ import json
|
|||
import yaml
|
||||
|
||||
with open("moment.flatpak.base.yaml") as f:
|
||||
base = yaml.load(f, Loader=yaml.FullLoader)
|
||||
base = yaml.load(f, Loader=yaml.FullLoader)
|
||||
|
||||
with open("flatpak-pip.json") as f:
|
||||
modules = json.load(f)["modules"]
|
||||
modules = json.load(f)["modules"]
|
||||
|
||||
# set some modules in front as dependencies and dropping matrix-nio
|
||||
# which is declared separately
|
||||
front = []
|
||||
back = []
|
||||
for m in modules:
|
||||
n = m["name"]
|
||||
if n.startswith("python3-") and \
|
||||
n[len("python3-"):] in ["cffi", "importlib-metadata", "multidict", "pytest-runner", "setuptools-scm"]:
|
||||
front.append(m)
|
||||
else:
|
||||
back.append(m)
|
||||
n = m["name"]
|
||||
if n.startswith("python3-") and \
|
||||
n[len("python3-"):] in ["cffi", "importlib-metadata", "multidict", "pytest-runner", "setuptools-scm"]:
|
||||
front.append(m)
|
||||
else:
|
||||
back.append(m)
|
||||
|
||||
# replace placeholder with modules
|
||||
phold = None
|
||||
for i in range(len(base["modules"])):
|
||||
if base["modules"][i]["name"] == "PLACEHOLDER PYTHON DEPENDENCIES":
|
||||
phold = i
|
||||
break
|
||||
if base["modules"][i]["name"] == "PLACEHOLDER PYTHON DEPENDENCIES":
|
||||
phold = i
|
||||
break
|
||||
|
||||
base["modules"] = base["modules"][:i] + front + back + base["modules"][i+1:]
|
||||
|
||||
with open("moment.flatpak.yaml", "w") as f:
|
||||
f.write(yaml.dump(base, sort_keys=False, indent=2))
|
||||
f.write(yaml.dump(base, sort_keys=False, indent=2))
|
||||
|
|
|
@ -4,29 +4,29 @@ import html
|
|||
import re
|
||||
from pathlib import Path
|
||||
|
||||
root = Path(__file__).resolve().parent.parent
|
||||
root = Path(__file__).resolve().parent.parent
|
||||
title_pattern = re.compile(r"## (\d+\.\d+\.\d+) \((\d{4}-\d\d-\d\d)\)")
|
||||
release_lines = [" <releases>"]
|
||||
|
||||
for line in (root / "docs" / "CHANGELOG.md").read_text().splitlines():
|
||||
match = title_pattern.match(line)
|
||||
match = title_pattern.match(line)
|
||||
|
||||
if match:
|
||||
args = (html.escape(match.group(1)), html.escape(match.group(2)))
|
||||
release_lines.append(' <release version="%s" date="%s"/>' % args)
|
||||
if match:
|
||||
args = (html.escape(match.group(1)), html.escape(match.group(2)))
|
||||
release_lines.append(' <release version="%s" date="%s"/>' % args)
|
||||
|
||||
appdata = root / "packaging" / "moment.metainfo.xml"
|
||||
appdata = root / "packaging" / "moment.metainfo.xml"
|
||||
in_releases = False
|
||||
final_lines = []
|
||||
|
||||
for line in appdata.read_text().splitlines():
|
||||
if line == " <releases>":
|
||||
in_releases = True
|
||||
final_lines += release_lines
|
||||
elif line == " </releases>":
|
||||
in_releases = False
|
||||
if line == " <releases>":
|
||||
in_releases = True
|
||||
final_lines += release_lines
|
||||
elif line == " </releases>":
|
||||
in_releases = False
|
||||
|
||||
if not in_releases:
|
||||
final_lines.append(line)
|
||||
if not in_releases:
|
||||
final_lines.append(line)
|
||||
|
||||
appdata.write_text("\n".join(final_lines))
|
||||
|
|
|
@ -13,7 +13,7 @@ documentation in the following modules first:
|
|||
- `nio_callbacks`
|
||||
"""
|
||||
|
||||
__app_name__ = "moment"
|
||||
__app_name__ = "moment"
|
||||
__display_name__ = "Moment"
|
||||
__reverse_dns__ = "xyz.mx-moment"
|
||||
__version__ = "0.7.3"
|
||||
__version__ = "0.7.3"
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -17,442 +17,442 @@ ColorTuple = Tuple[float, float, float, float]
|
|||
|
||||
@dataclass(repr=False)
|
||||
class Color:
|
||||
"""A color manipulable in HSLuv, HSL, RGB, hexadecimal and by SVG name.
|
||||
"""A color manipulable in HSLuv, HSL, RGB, hexadecimal and by SVG name.
|
||||
|
||||
The `Color` object constructor accepts hexadecimal string
|
||||
("#RGB", "#RRGGBB" or "#RRGGBBAA") or another `Color` to copy.
|
||||
The `Color` object constructor accepts hexadecimal string
|
||||
("#RGB", "#RRGGBB" or "#RRGGBBAA") or another `Color` to copy.
|
||||
|
||||
Attributes representing the color in HSLuv, HSL, RGB, hexadecimal and
|
||||
SVG name formats can be accessed and modified on these `Color` objects.
|
||||
Attributes representing the color in HSLuv, HSL, RGB, hexadecimal and
|
||||
SVG name formats can be accessed and modified on these `Color` objects.
|
||||
|
||||
The `hsluv()`/`hsluva()`, `hsl()`/`hsla()` and `rgb()`/`rgba()`
|
||||
functions in this module are provided to create an object by specifying
|
||||
a color in other formats.
|
||||
The `hsluv()`/`hsluva()`, `hsl()`/`hsla()` and `rgb()`/`rgba()`
|
||||
functions in this module are provided to create an object by specifying
|
||||
a color in other formats.
|
||||
|
||||
Copies of objects with modified attributes can be created with the
|
||||
with the `Color.but()`, `Color.plus()` and `Copy.times()` methods.
|
||||
Copies of objects with modified attributes can be created with the
|
||||
with the `Color.but()`, `Color.plus()` and `Copy.times()` methods.
|
||||
|
||||
If the `hue` is outside of the normal 0-359 range, the number is
|
||||
interpreted as `hue % 360`, e.g. `360` is `0`, `460` is `100`,
|
||||
or `-20` is `340`.
|
||||
"""
|
||||
If the `hue` is outside of the normal 0-359 range, the number is
|
||||
interpreted as `hue % 360`, e.g. `360` is `0`, `460` is `100`,
|
||||
or `-20` is `340`.
|
||||
"""
|
||||
|
||||
# The saturation and luv are properties due to the need for a setter
|
||||
# capping the value between 0-100, as hsluv handles numbers outside
|
||||
# this range incorrectly.
|
||||
# The saturation and luv are properties due to the need for a setter
|
||||
# capping the value between 0-100, as hsluv handles numbers outside
|
||||
# this range incorrectly.
|
||||
|
||||
color_or_hex: InitVar[str] = "#00000000"
|
||||
hue: float = field(init=False, default=0)
|
||||
_saturation: float = field(init=False, default=0)
|
||||
_luv: float = field(init=False, default=0)
|
||||
alpha: float = field(init=False, default=1)
|
||||
color_or_hex: InitVar[str] = "#00000000"
|
||||
hue: float = field(init=False, default=0)
|
||||
_saturation: float = field(init=False, default=0)
|
||||
_luv: float = field(init=False, default=0)
|
||||
alpha: float = field(init=False, default=1)
|
||||
|
||||
def __post_init__(self, color_or_hex: Union["Color", str]) -> None:
|
||||
if isinstance(color_or_hex, Color):
|
||||
hsluva = color_or_hex.hsluva
|
||||
self.hue, self.saturation, self.luv, self.alpha = hsluva
|
||||
else:
|
||||
self.hex = color_or_hex
|
||||
def __post_init__(self, color_or_hex: Union["Color", str]) -> None:
|
||||
if isinstance(color_or_hex, Color):
|
||||
hsluva = color_or_hex.hsluva
|
||||
self.hue, self.saturation, self.luv, self.alpha = hsluva
|
||||
else:
|
||||
self.hex = color_or_hex
|
||||
|
||||
# HSLuv
|
||||
# HSLuv
|
||||
|
||||
@property
|
||||
def hsluva(self) -> ColorTuple:
|
||||
return (self.hue, self.saturation, self.luv, self.alpha)
|
||||
@property
|
||||
def hsluva(self) -> ColorTuple:
|
||||
return (self.hue, self.saturation, self.luv, self.alpha)
|
||||
|
||||
@hsluva.setter
|
||||
def hsluva(self, value: ColorTuple) -> None:
|
||||
self.hue, self.saturation, self.luv, self.alpha = value
|
||||
@hsluva.setter
|
||||
def hsluva(self, value: ColorTuple) -> None:
|
||||
self.hue, self.saturation, self.luv, self.alpha = value
|
||||
|
||||
@property
|
||||
def saturation(self) -> float:
|
||||
return self._saturation
|
||||
@property
|
||||
def saturation(self) -> float:
|
||||
return self._saturation
|
||||
|
||||
@saturation.setter
|
||||
def saturation(self, value: float) -> None:
|
||||
self._saturation = max(0, min(100, value))
|
||||
@saturation.setter
|
||||
def saturation(self, value: float) -> None:
|
||||
self._saturation = max(0, min(100, value))
|
||||
|
||||
@property
|
||||
def luv(self) -> float:
|
||||
return self._luv
|
||||
@property
|
||||
def luv(self) -> float:
|
||||
return self._luv
|
||||
|
||||
@luv.setter
|
||||
def luv(self, value: float) -> None:
|
||||
self._luv = max(0, min(100, value))
|
||||
@luv.setter
|
||||
def luv(self, value: float) -> None:
|
||||
self._luv = max(0, min(100, value))
|
||||
|
||||
# HSL
|
||||
# HSL
|
||||
|
||||
@property
|
||||
def hsla(self) -> ColorTuple:
|
||||
r, g, b = (self.red / 255, self.green / 255, self.blue / 255)
|
||||
h, l, s = colorsys.rgb_to_hls(r, g, b)
|
||||
return (h * 360, s * 100, l * 100, self.alpha)
|
||||
@property
|
||||
def hsla(self) -> ColorTuple:
|
||||
r, g, b = (self.red / 255, self.green / 255, self.blue / 255)
|
||||
h, l, s = colorsys.rgb_to_hls(r, g, b)
|
||||
return (h * 360, s * 100, l * 100, self.alpha)
|
||||
|
||||
@hsla.setter
|
||||
def hsla(self, value: ColorTuple) -> None:
|
||||
h, s, l = (value[0] / 360, value[1] / 100, value[2] / 100) # noqa
|
||||
r, g, b = colorsys.hls_to_rgb(h, l, s)
|
||||
self.rgba = (r * 255, g * 255, b * 255, value[3])
|
||||
@hsla.setter
|
||||
def hsla(self, value: ColorTuple) -> None:
|
||||
h, s, l = (value[0] / 360, value[1] / 100, value[2] / 100) # noqa
|
||||
r, g, b = colorsys.hls_to_rgb(h, l, s)
|
||||
self.rgba = (r * 255, g * 255, b * 255, value[3])
|
||||
|
||||
@property
|
||||
def light(self) -> float:
|
||||
return self.hsla[2]
|
||||
@property
|
||||
def light(self) -> float:
|
||||
return self.hsla[2]
|
||||
|
||||
@light.setter
|
||||
def light(self, value: float) -> None:
|
||||
self.hsla = (self.hue, self.saturation, value, self.alpha)
|
||||
@light.setter
|
||||
def light(self, value: float) -> None:
|
||||
self.hsla = (self.hue, self.saturation, value, self.alpha)
|
||||
|
||||
# RGB
|
||||
# RGB
|
||||
|
||||
@property
|
||||
def rgba(self) -> ColorTuple:
|
||||
r, g, b = hsluv_to_rgb(self.hsluva)
|
||||
return r * 255, g * 255, b * 255, self.alpha
|
||||
@property
|
||||
def rgba(self) -> ColorTuple:
|
||||
r, g, b = hsluv_to_rgb(self.hsluva)
|
||||
return r * 255, g * 255, b * 255, self.alpha
|
||||
|
||||
@rgba.setter
|
||||
def rgba(self, value: ColorTuple) -> None:
|
||||
r, g, b = (value[0] / 255, value[1] / 255, value[2] / 255)
|
||||
self.hsluva = rgb_to_hsluv((r, g, b)) + (self.alpha,)
|
||||
@rgba.setter
|
||||
def rgba(self, value: ColorTuple) -> None:
|
||||
r, g, b = (value[0] / 255, value[1] / 255, value[2] / 255)
|
||||
self.hsluva = rgb_to_hsluv((r, g, b)) + (self.alpha,)
|
||||
|
||||
@property
|
||||
def red(self) -> float:
|
||||
return self.rgba[0]
|
||||
@property
|
||||
def red(self) -> float:
|
||||
return self.rgba[0]
|
||||
|
||||
@red.setter
|
||||
def red(self, value: float) -> None:
|
||||
self.rgba = (value, self.green, self.blue, self.alpha)
|
||||
@red.setter
|
||||
def red(self, value: float) -> None:
|
||||
self.rgba = (value, self.green, self.blue, self.alpha)
|
||||
|
||||
@property
|
||||
def green(self) -> float:
|
||||
return self.rgba[1]
|
||||
@property
|
||||
def green(self) -> float:
|
||||
return self.rgba[1]
|
||||
|
||||
@green.setter
|
||||
def green(self, value: float) -> None:
|
||||
self.rgba = (self.red, value, self.blue, self.alpha)
|
||||
@green.setter
|
||||
def green(self, value: float) -> None:
|
||||
self.rgba = (self.red, value, self.blue, self.alpha)
|
||||
|
||||
@property
|
||||
def blue(self) -> float:
|
||||
return self.rgba[2]
|
||||
@property
|
||||
def blue(self) -> float:
|
||||
return self.rgba[2]
|
||||
|
||||
@blue.setter
|
||||
def blue(self, value: float) -> None:
|
||||
self.rgba = (self.red, self.green, value, self.alpha)
|
||||
@blue.setter
|
||||
def blue(self, value: float) -> None:
|
||||
self.rgba = (self.red, self.green, value, self.alpha)
|
||||
|
||||
# Hexadecimal
|
||||
# Hexadecimal
|
||||
|
||||
@property
|
||||
def hex(self) -> str:
|
||||
rgb = hsluv_to_hex(self.hsluva)
|
||||
alpha = builtins.hex(int(self.alpha * 255))[2:]
|
||||
alpha = f"0{alpha}" if len(alpha) == 1 else alpha
|
||||
return f"{alpha if self.alpha < 1 else ''}{rgb}".lower()
|
||||
@property
|
||||
def hex(self) -> str:
|
||||
rgb = hsluv_to_hex(self.hsluva)
|
||||
alpha = builtins.hex(int(self.alpha * 255))[2:]
|
||||
alpha = f"0{alpha}" if len(alpha) == 1 else alpha
|
||||
return f"{alpha if self.alpha < 1 else ''}{rgb}".lower()
|
||||
|
||||
@hex.setter
|
||||
def hex(self, value: str) -> None:
|
||||
if len(value) == 4:
|
||||
template = "#{r}{r}{g}{g}{b}{b}"
|
||||
value = template.format(r=value[1], g=value[2], b=value[3])
|
||||
@hex.setter
|
||||
def hex(self, value: str) -> None:
|
||||
if len(value) == 4:
|
||||
template = "#{r}{r}{g}{g}{b}{b}"
|
||||
value = template.format(r=value[1], g=value[2], b=value[3])
|
||||
|
||||
alpha = int(value[-2:] if len(value) == 9 else "ff", 16) / 255
|
||||
alpha = int(value[-2:] if len(value) == 9 else "ff", 16) / 255
|
||||
|
||||
self.hsluva = hex_to_hsluv(value) + (alpha,)
|
||||
self.hsluva = hex_to_hsluv(value) + (alpha,)
|
||||
|
||||
# name color
|
||||
# name color
|
||||
|
||||
@property
|
||||
def name(self) -> Optional[str]:
|
||||
try:
|
||||
return SVGColor(self.hex).name
|
||||
except ValueError:
|
||||
return None
|
||||
@property
|
||||
def name(self) -> Optional[str]:
|
||||
try:
|
||||
return SVGColor(self.hex).name
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
@name.setter
|
||||
def name(self, value: str) -> None:
|
||||
self.hex = SVGColor[value.lower()].value.hex
|
||||
@name.setter
|
||||
def name(self, value: str) -> None:
|
||||
self.hex = SVGColor[value.lower()].value.hex
|
||||
|
||||
# Other methods
|
||||
# Other methods
|
||||
|
||||
def __repr__(self) -> str:
|
||||
r, g, b = int(self.red), int(self.green), int(self.blue)
|
||||
h, s, luv = int(self.hue), int(self.saturation), int(self.luv)
|
||||
l = int(self.light) # noqa
|
||||
a = self.alpha
|
||||
block = f"\x1b[38;2;{r};{g};{b}m█████\x1b[0m"
|
||||
sep = "\x1b[1;33m/\x1b[0m"
|
||||
end = f" {sep} {self.name}" if self.name else ""
|
||||
# Need a terminal with true color support to render the block!
|
||||
return (
|
||||
f"{block} hsluva({h}, {s}, {luv}, {a}) {sep} "
|
||||
f"hsla({h}, {s}, {l}, {a}) {sep} rgba({r}, {g}, {b}, {a}) {sep} "
|
||||
f"{self.hex}{end}"
|
||||
)
|
||||
def __repr__(self) -> str:
|
||||
r, g, b = int(self.red), int(self.green), int(self.blue)
|
||||
h, s, luv = int(self.hue), int(self.saturation), int(self.luv)
|
||||
l = int(self.light) # noqa
|
||||
a = self.alpha
|
||||
block = f"\x1b[38;2;{r};{g};{b}m█████\x1b[0m"
|
||||
sep = "\x1b[1;33m/\x1b[0m"
|
||||
end = f" {sep} {self.name}" if self.name else ""
|
||||
# Need a terminal with true color support to render the block!
|
||||
return (
|
||||
f"{block} hsluva({h}, {s}, {luv}, {a}) {sep} "
|
||||
f"hsla({h}, {s}, {l}, {a}) {sep} rgba({r}, {g}, {b}, {a}) {sep} "
|
||||
f"{self.hex}{end}"
|
||||
)
|
||||
|
||||
def but(
|
||||
self,
|
||||
hue: Optional[float] = None,
|
||||
saturation: Optional[float] = None,
|
||||
luv: Optional[float] = None,
|
||||
alpha: Optional[float] = None,
|
||||
*,
|
||||
hsluva: Optional[ColorTuple] = None,
|
||||
hsla: Optional[ColorTuple] = None,
|
||||
rgba: Optional[ColorTuple] = None,
|
||||
hex: Optional[str] = None,
|
||||
name: Optional[str] = None,
|
||||
light: Optional[float] = None,
|
||||
red: Optional[float] = None,
|
||||
green: Optional[float] = None,
|
||||
blue: Optional[float] = None,
|
||||
) -> "Color":
|
||||
"""Return a copy of this `Color` with overriden attributes.
|
||||
def but(
|
||||
self,
|
||||
hue: Optional[float] = None,
|
||||
saturation: Optional[float] = None,
|
||||
luv: Optional[float] = None,
|
||||
alpha: Optional[float] = None,
|
||||
*,
|
||||
hsluva: Optional[ColorTuple] = None,
|
||||
hsla: Optional[ColorTuple] = None,
|
||||
rgba: Optional[ColorTuple] = None,
|
||||
hex: Optional[str] = None,
|
||||
name: Optional[str] = None,
|
||||
light: Optional[float] = None,
|
||||
red: Optional[float] = None,
|
||||
green: Optional[float] = None,
|
||||
blue: Optional[float] = None,
|
||||
) -> "Color":
|
||||
"""Return a copy of this `Color` with overriden attributes.
|
||||
|
||||
Example:
|
||||
>>> first = Color(100, 50, 50)
|
||||
>>> second = c.but(hue=20, saturation=100)
|
||||
>>> second.hsluva
|
||||
(20, 50, 100, 1)
|
||||
"""
|
||||
Example:
|
||||
>>> first = Color(100, 50, 50)
|
||||
>>> second = c.but(hue=20, saturation=100)
|
||||
>>> second.hsluva
|
||||
(20, 50, 100, 1)
|
||||
"""
|
||||
|
||||
new = copy(self)
|
||||
new = copy(self)
|
||||
|
||||
for arg, value in locals().items():
|
||||
if arg not in ("new", "self") and value is not None:
|
||||
setattr(new, arg, value)
|
||||
for arg, value in locals().items():
|
||||
if arg not in ("new", "self") and value is not None:
|
||||
setattr(new, arg, value)
|
||||
|
||||
return new
|
||||
return new
|
||||
|
||||
def plus(
|
||||
self,
|
||||
hue: Optional[float] = None,
|
||||
saturation: Optional[float] = None,
|
||||
luv: Optional[float] = None,
|
||||
alpha: Optional[float] = None,
|
||||
*,
|
||||
light: Optional[float] = None,
|
||||
red: Optional[float] = None,
|
||||
green: Optional[float] = None,
|
||||
blue: Optional[float] = None,
|
||||
) -> "Color":
|
||||
"""Return a copy of this `Color` with values added to attributes.
|
||||
def plus(
|
||||
self,
|
||||
hue: Optional[float] = None,
|
||||
saturation: Optional[float] = None,
|
||||
luv: Optional[float] = None,
|
||||
alpha: Optional[float] = None,
|
||||
*,
|
||||
light: Optional[float] = None,
|
||||
red: Optional[float] = None,
|
||||
green: Optional[float] = None,
|
||||
blue: Optional[float] = None,
|
||||
) -> "Color":
|
||||
"""Return a copy of this `Color` with values added to attributes.
|
||||
|
||||
Example:
|
||||
>>> first = Color(100, 50, 50)
|
||||
>>> second = c.plus(hue=10, saturation=-20)
|
||||
>>> second.hsluva
|
||||
(110, 30, 50, 1)
|
||||
"""
|
||||
Example:
|
||||
>>> first = Color(100, 50, 50)
|
||||
>>> second = c.plus(hue=10, saturation=-20)
|
||||
>>> second.hsluva
|
||||
(110, 30, 50, 1)
|
||||
"""
|
||||
|
||||
new = copy(self)
|
||||
new = copy(self)
|
||||
|
||||
for arg, value in locals().items():
|
||||
if arg not in ("new", "self") and value is not None:
|
||||
setattr(new, arg, getattr(self, arg) + value)
|
||||
for arg, value in locals().items():
|
||||
if arg not in ("new", "self") and value is not None:
|
||||
setattr(new, arg, getattr(self, arg) + value)
|
||||
|
||||
return new
|
||||
return new
|
||||
|
||||
def times(
|
||||
self,
|
||||
hue: Optional[float] = None,
|
||||
saturation: Optional[float] = None,
|
||||
luv: Optional[float] = None,
|
||||
alpha: Optional[float] = None,
|
||||
*,
|
||||
light: Optional[float] = None,
|
||||
red: Optional[float] = None,
|
||||
green: Optional[float] = None,
|
||||
blue: Optional[float] = None,
|
||||
) -> "Color":
|
||||
"""Return a copy of this `Color` with multiplied attributes.
|
||||
def times(
|
||||
self,
|
||||
hue: Optional[float] = None,
|
||||
saturation: Optional[float] = None,
|
||||
luv: Optional[float] = None,
|
||||
alpha: Optional[float] = None,
|
||||
*,
|
||||
light: Optional[float] = None,
|
||||
red: Optional[float] = None,
|
||||
green: Optional[float] = None,
|
||||
blue: Optional[float] = None,
|
||||
) -> "Color":
|
||||
"""Return a copy of this `Color` with multiplied attributes.
|
||||
|
||||
Example:
|
||||
>>> first = Color(100, 50, 50, 0.8)
|
||||
>>> second = c.times(luv=2, alpha=0.5)
|
||||
>>> second.hsluva
|
||||
(100, 50, 100, 0.4)
|
||||
"""
|
||||
Example:
|
||||
>>> first = Color(100, 50, 50, 0.8)
|
||||
>>> second = c.times(luv=2, alpha=0.5)
|
||||
>>> second.hsluva
|
||||
(100, 50, 100, 0.4)
|
||||
"""
|
||||
|
||||
new = copy(self)
|
||||
new = copy(self)
|
||||
|
||||
for arg, value in locals().items():
|
||||
if arg not in ("new", "self") and value is not None:
|
||||
setattr(new, arg, getattr(self, arg) * value)
|
||||
for arg, value in locals().items():
|
||||
if arg not in ("new", "self") and value is not None:
|
||||
setattr(new, arg, getattr(self, arg) * value)
|
||||
|
||||
return new
|
||||
return new
|
||||
|
||||
|
||||
class SVGColor(Enum):
|
||||
"""Standard SVG/HTML/CSS colors, with the addition of `transparent`."""
|
||||
"""Standard SVG/HTML/CSS colors, with the addition of `transparent`."""
|
||||
|
||||
aliceblue = Color("#f0f8ff")
|
||||
antiquewhite = Color("#faebd7")
|
||||
aqua = Color("#00ffff")
|
||||
aquamarine = Color("#7fffd4")
|
||||
azure = Color("#f0ffff")
|
||||
beige = Color("#f5f5dc")
|
||||
bisque = Color("#ffe4c4")
|
||||
black = Color("#000000")
|
||||
blanchedalmond = Color("#ffebcd")
|
||||
blue = Color("#0000ff")
|
||||
blueviolet = Color("#8a2be2")
|
||||
brown = Color("#a52a2a")
|
||||
burlywood = Color("#deb887")
|
||||
cadetblue = Color("#5f9ea0")
|
||||
chartreuse = Color("#7fff00")
|
||||
chocolate = Color("#d2691e")
|
||||
coral = Color("#ff7f50")
|
||||
cornflowerblue = Color("#6495ed")
|
||||
cornsilk = Color("#fff8dc")
|
||||
crimson = Color("#dc143c")
|
||||
cyan = Color("#00ffff")
|
||||
darkblue = Color("#00008b")
|
||||
darkcyan = Color("#008b8b")
|
||||
darkgoldenrod = Color("#b8860b")
|
||||
darkgray = Color("#a9a9a9")
|
||||
darkgreen = Color("#006400")
|
||||
darkgrey = Color("#a9a9a9")
|
||||
darkkhaki = Color("#bdb76b")
|
||||
darkmagenta = Color("#8b008b")
|
||||
darkolivegreen = Color("#556b2f")
|
||||
darkorange = Color("#ff8c00")
|
||||
darkorchid = Color("#9932cc")
|
||||
darkred = Color("#8b0000")
|
||||
darksalmon = Color("#e9967a")
|
||||
darkseagreen = Color("#8fbc8f")
|
||||
darkslateblue = Color("#483d8b")
|
||||
darkslategray = Color("#2f4f4f")
|
||||
darkslategrey = Color("#2f4f4f")
|
||||
darkturquoise = Color("#00ced1")
|
||||
darkviolet = Color("#9400d3")
|
||||
deeppink = Color("#ff1493")
|
||||
deepskyblue = Color("#00bfff")
|
||||
dimgray = Color("#696969")
|
||||
dimgrey = Color("#696969")
|
||||
dodgerblue = Color("#1e90ff")
|
||||
firebrick = Color("#b22222")
|
||||
floralwhite = Color("#fffaf0")
|
||||
forestgreen = Color("#228b22")
|
||||
fuchsia = Color("#ff00ff")
|
||||
gainsboro = Color("#dcdcdc")
|
||||
ghostwhite = Color("#f8f8ff")
|
||||
gold = Color("#ffd700")
|
||||
goldenrod = Color("#daa520")
|
||||
gray = Color("#808080")
|
||||
green = Color("#008000")
|
||||
greenyellow = Color("#adff2f")
|
||||
grey = Color("#808080")
|
||||
honeydew = Color("#f0fff0")
|
||||
hotpink = Color("#ff69b4")
|
||||
indianred = Color("#cd5c5c")
|
||||
indigo = Color("#4b0082")
|
||||
ivory = Color("#fffff0")
|
||||
khaki = Color("#f0e68c")
|
||||
lavender = Color("#e6e6fa")
|
||||
lavenderblush = Color("#fff0f5")
|
||||
lawngreen = Color("#7cfc00")
|
||||
lemonchiffon = Color("#fffacd")
|
||||
lightblue = Color("#add8e6")
|
||||
lightcoral = Color("#f08080")
|
||||
lightcyan = Color("#e0ffff")
|
||||
lightgoldenrodyellow = Color("#fafad2")
|
||||
lightgray = Color("#d3d3d3")
|
||||
lightgreen = Color("#90ee90")
|
||||
lightgrey = Color("#d3d3d3")
|
||||
lightpink = Color("#ffb6c1")
|
||||
lightsalmon = Color("#ffa07a")
|
||||
lightseagreen = Color("#20b2aa")
|
||||
lightskyblue = Color("#87cefa")
|
||||
lightslategray = Color("#778899")
|
||||
lightslategrey = Color("#778899")
|
||||
lightsteelblue = Color("#b0c4de")
|
||||
lightyellow = Color("#ffffe0")
|
||||
lime = Color("#00ff00")
|
||||
limegreen = Color("#32cd32")
|
||||
linen = Color("#faf0e6")
|
||||
magenta = Color("#ff00ff")
|
||||
maroon = Color("#800000")
|
||||
mediumaquamarine = Color("#66cdaa")
|
||||
mediumblue = Color("#0000cd")
|
||||
mediumorchid = Color("#ba55d3")
|
||||
mediumpurple = Color("#9370db")
|
||||
mediumseagreen = Color("#3cb371")
|
||||
mediumslateblue = Color("#7b68ee")
|
||||
mediumspringgreen = Color("#00fa9a")
|
||||
mediumturquoise = Color("#48d1cc")
|
||||
mediumvioletred = Color("#c71585")
|
||||
midnightblue = Color("#191970")
|
||||
mintcream = Color("#f5fffa")
|
||||
mistyrose = Color("#ffe4e1")
|
||||
moccasin = Color("#ffe4b5")
|
||||
navajowhite = Color("#ffdead")
|
||||
navy = Color("#000080")
|
||||
oldlace = Color("#fdf5e6")
|
||||
olive = Color("#808000")
|
||||
olivedrab = Color("#6b8e23")
|
||||
orange = Color("#ffa500")
|
||||
orangered = Color("#ff4500")
|
||||
orchid = Color("#da70d6")
|
||||
palegoldenrod = Color("#eee8aa")
|
||||
palegreen = Color("#98fb98")
|
||||
paleturquoise = Color("#afeeee")
|
||||
palevioletred = Color("#db7093")
|
||||
papayawhip = Color("#ffefd5")
|
||||
peachpuff = Color("#ffdab9")
|
||||
peru = Color("#cd853f")
|
||||
pink = Color("#ffc0cb")
|
||||
plum = Color("#dda0dd")
|
||||
powderblue = Color("#b0e0e6")
|
||||
purple = Color("#800080")
|
||||
rebeccapurple = Color("#663399")
|
||||
red = Color("#ff0000")
|
||||
rosybrown = Color("#bc8f8f")
|
||||
royalblue = Color("#4169e1")
|
||||
saddlebrown = Color("#8b4513")
|
||||
salmon = Color("#fa8072")
|
||||
sandybrown = Color("#f4a460")
|
||||
seagreen = Color("#2e8b57")
|
||||
seashell = Color("#fff5ee")
|
||||
sienna = Color("#a0522d")
|
||||
silver = Color("#c0c0c0")
|
||||
skyblue = Color("#87ceeb")
|
||||
slateblue = Color("#6a5acd")
|
||||
slategray = Color("#708090")
|
||||
slategrey = Color("#708090")
|
||||
snow = Color("#fffafa")
|
||||
springgreen = Color("#00ff7f")
|
||||
steelblue = Color("#4682b4")
|
||||
tan = Color("#d2b48c")
|
||||
teal = Color("#008080")
|
||||
thistle = Color("#d8bfd8")
|
||||
tomato = Color("#ff6347")
|
||||
transparent = Color("#00000000") # not standard but exists in QML
|
||||
turquoise = Color("#40e0d0")
|
||||
violet = Color("#ee82ee")
|
||||
wheat = Color("#f5deb3")
|
||||
white = Color("#ffffff")
|
||||
whitesmoke = Color("#f5f5f5")
|
||||
yellow = Color("#ffff00")
|
||||
yellowgreen = Color("#9acd32")
|
||||
aliceblue = Color("#f0f8ff")
|
||||
antiquewhite = Color("#faebd7")
|
||||
aqua = Color("#00ffff")
|
||||
aquamarine = Color("#7fffd4")
|
||||
azure = Color("#f0ffff")
|
||||
beige = Color("#f5f5dc")
|
||||
bisque = Color("#ffe4c4")
|
||||
black = Color("#000000")
|
||||
blanchedalmond = Color("#ffebcd")
|
||||
blue = Color("#0000ff")
|
||||
blueviolet = Color("#8a2be2")
|
||||
brown = Color("#a52a2a")
|
||||
burlywood = Color("#deb887")
|
||||
cadetblue = Color("#5f9ea0")
|
||||
chartreuse = Color("#7fff00")
|
||||
chocolate = Color("#d2691e")
|
||||
coral = Color("#ff7f50")
|
||||
cornflowerblue = Color("#6495ed")
|
||||
cornsilk = Color("#fff8dc")
|
||||
crimson = Color("#dc143c")
|
||||
cyan = Color("#00ffff")
|
||||
darkblue = Color("#00008b")
|
||||
darkcyan = Color("#008b8b")
|
||||
darkgoldenrod = Color("#b8860b")
|
||||
darkgray = Color("#a9a9a9")
|
||||
darkgreen = Color("#006400")
|
||||
darkgrey = Color("#a9a9a9")
|
||||
darkkhaki = Color("#bdb76b")
|
||||
darkmagenta = Color("#8b008b")
|
||||
darkolivegreen = Color("#556b2f")
|
||||
darkorange = Color("#ff8c00")
|
||||
darkorchid = Color("#9932cc")
|
||||
darkred = Color("#8b0000")
|
||||
darksalmon = Color("#e9967a")
|
||||
darkseagreen = Color("#8fbc8f")
|
||||
darkslateblue = Color("#483d8b")
|
||||
darkslategray = Color("#2f4f4f")
|
||||
darkslategrey = Color("#2f4f4f")
|
||||
darkturquoise = Color("#00ced1")
|
||||
darkviolet = Color("#9400d3")
|
||||
deeppink = Color("#ff1493")
|
||||
deepskyblue = Color("#00bfff")
|
||||
dimgray = Color("#696969")
|
||||
dimgrey = Color("#696969")
|
||||
dodgerblue = Color("#1e90ff")
|
||||
firebrick = Color("#b22222")
|
||||
floralwhite = Color("#fffaf0")
|
||||
forestgreen = Color("#228b22")
|
||||
fuchsia = Color("#ff00ff")
|
||||
gainsboro = Color("#dcdcdc")
|
||||
ghostwhite = Color("#f8f8ff")
|
||||
gold = Color("#ffd700")
|
||||
goldenrod = Color("#daa520")
|
||||
gray = Color("#808080")
|
||||
green = Color("#008000")
|
||||
greenyellow = Color("#adff2f")
|
||||
grey = Color("#808080")
|
||||
honeydew = Color("#f0fff0")
|
||||
hotpink = Color("#ff69b4")
|
||||
indianred = Color("#cd5c5c")
|
||||
indigo = Color("#4b0082")
|
||||
ivory = Color("#fffff0")
|
||||
khaki = Color("#f0e68c")
|
||||
lavender = Color("#e6e6fa")
|
||||
lavenderblush = Color("#fff0f5")
|
||||
lawngreen = Color("#7cfc00")
|
||||
lemonchiffon = Color("#fffacd")
|
||||
lightblue = Color("#add8e6")
|
||||
lightcoral = Color("#f08080")
|
||||
lightcyan = Color("#e0ffff")
|
||||
lightgoldenrodyellow = Color("#fafad2")
|
||||
lightgray = Color("#d3d3d3")
|
||||
lightgreen = Color("#90ee90")
|
||||
lightgrey = Color("#d3d3d3")
|
||||
lightpink = Color("#ffb6c1")
|
||||
lightsalmon = Color("#ffa07a")
|
||||
lightseagreen = Color("#20b2aa")
|
||||
lightskyblue = Color("#87cefa")
|
||||
lightslategray = Color("#778899")
|
||||
lightslategrey = Color("#778899")
|
||||
lightsteelblue = Color("#b0c4de")
|
||||
lightyellow = Color("#ffffe0")
|
||||
lime = Color("#00ff00")
|
||||
limegreen = Color("#32cd32")
|
||||
linen = Color("#faf0e6")
|
||||
magenta = Color("#ff00ff")
|
||||
maroon = Color("#800000")
|
||||
mediumaquamarine = Color("#66cdaa")
|
||||
mediumblue = Color("#0000cd")
|
||||
mediumorchid = Color("#ba55d3")
|
||||
mediumpurple = Color("#9370db")
|
||||
mediumseagreen = Color("#3cb371")
|
||||
mediumslateblue = Color("#7b68ee")
|
||||
mediumspringgreen = Color("#00fa9a")
|
||||
mediumturquoise = Color("#48d1cc")
|
||||
mediumvioletred = Color("#c71585")
|
||||
midnightblue = Color("#191970")
|
||||
mintcream = Color("#f5fffa")
|
||||
mistyrose = Color("#ffe4e1")
|
||||
moccasin = Color("#ffe4b5")
|
||||
navajowhite = Color("#ffdead")
|
||||
navy = Color("#000080")
|
||||
oldlace = Color("#fdf5e6")
|
||||
olive = Color("#808000")
|
||||
olivedrab = Color("#6b8e23")
|
||||
orange = Color("#ffa500")
|
||||
orangered = Color("#ff4500")
|
||||
orchid = Color("#da70d6")
|
||||
palegoldenrod = Color("#eee8aa")
|
||||
palegreen = Color("#98fb98")
|
||||
paleturquoise = Color("#afeeee")
|
||||
palevioletred = Color("#db7093")
|
||||
papayawhip = Color("#ffefd5")
|
||||
peachpuff = Color("#ffdab9")
|
||||
peru = Color("#cd853f")
|
||||
pink = Color("#ffc0cb")
|
||||
plum = Color("#dda0dd")
|
||||
powderblue = Color("#b0e0e6")
|
||||
purple = Color("#800080")
|
||||
rebeccapurple = Color("#663399")
|
||||
red = Color("#ff0000")
|
||||
rosybrown = Color("#bc8f8f")
|
||||
royalblue = Color("#4169e1")
|
||||
saddlebrown = Color("#8b4513")
|
||||
salmon = Color("#fa8072")
|
||||
sandybrown = Color("#f4a460")
|
||||
seagreen = Color("#2e8b57")
|
||||
seashell = Color("#fff5ee")
|
||||
sienna = Color("#a0522d")
|
||||
silver = Color("#c0c0c0")
|
||||
skyblue = Color("#87ceeb")
|
||||
slateblue = Color("#6a5acd")
|
||||
slategray = Color("#708090")
|
||||
slategrey = Color("#708090")
|
||||
snow = Color("#fffafa")
|
||||
springgreen = Color("#00ff7f")
|
||||
steelblue = Color("#4682b4")
|
||||
tan = Color("#d2b48c")
|
||||
teal = Color("#008080")
|
||||
thistle = Color("#d8bfd8")
|
||||
tomato = Color("#ff6347")
|
||||
transparent = Color("#00000000") # not standard but exists in QML
|
||||
turquoise = Color("#40e0d0")
|
||||
violet = Color("#ee82ee")
|
||||
wheat = Color("#f5deb3")
|
||||
white = Color("#ffffff")
|
||||
whitesmoke = Color("#f5f5f5")
|
||||
yellow = Color("#ffff00")
|
||||
yellowgreen = Color("#9acd32")
|
||||
|
||||
|
||||
def hsluva(
|
||||
hue: float = 0, saturation: float = 0, luv: float = 0, alpha: float = 1,
|
||||
hue: float = 0, saturation: float = 0, luv: float = 0, alpha: float = 1,
|
||||
) -> Color:
|
||||
"""Return a `Color` from `(0-359, 0-100, 0-100, 0-1)` HSLuv arguments."""
|
||||
return Color().but(hue, saturation, luv, alpha)
|
||||
"""Return a `Color` from `(0-359, 0-100, 0-100, 0-1)` HSLuv arguments."""
|
||||
return Color().but(hue, saturation, luv, alpha)
|
||||
|
||||
|
||||
def hsla(
|
||||
hue: float = 0, saturation: float = 0, light: float = 0, alpha: float = 1,
|
||||
hue: float = 0, saturation: float = 0, light: float = 0, alpha: float = 1,
|
||||
) -> Color:
|
||||
"""Return a `Color` from `(0-359, 0-100, 0-100, 0-1)` HSL arguments."""
|
||||
return Color().but(hue, saturation, light=light, alpha=alpha)
|
||||
"""Return a `Color` from `(0-359, 0-100, 0-100, 0-1)` HSL arguments."""
|
||||
return Color().but(hue, saturation, light=light, alpha=alpha)
|
||||
|
||||
|
||||
def rgba(
|
||||
red: float = 0, green: float = 0, blue: float = 0, alpha: float = 1,
|
||||
red: float = 0, green: float = 0, blue: float = 0, alpha: float = 1,
|
||||
) -> Color:
|
||||
"""Return a `Color` from `(0-255, 0-255, 0-255, 0-1)` RGB arguments."""
|
||||
return Color().but(red=red, green=green, blue=blue, alpha=alpha)
|
||||
"""Return a `Color` from `(0-255, 0-255, 0-255, 0-1)` RGB arguments."""
|
||||
return Color().but(red=red, green=green, blue=blue, alpha=alpha)
|
||||
|
||||
|
||||
# Aliases
|
||||
|
|
|
@ -12,117 +12,117 @@ import nio
|
|||
|
||||
@dataclass
|
||||
class MatrixError(Exception):
|
||||
"""An error returned by a Matrix server."""
|
||||
"""An error returned by a Matrix server."""
|
||||
|
||||
http_code: int = 400
|
||||
m_code: Optional[str] = None
|
||||
message: Optional[str] = None
|
||||
content: str = ""
|
||||
http_code: int = 400
|
||||
m_code: Optional[str] = None
|
||||
message: Optional[str] = None
|
||||
content: str = ""
|
||||
|
||||
@classmethod
|
||||
async def from_nio(cls, response: nio.ErrorResponse) -> "MatrixError":
|
||||
"""Return a `MatrixError` subclass from a nio `ErrorResponse`."""
|
||||
@classmethod
|
||||
async def from_nio(cls, response: nio.ErrorResponse) -> "MatrixError":
|
||||
"""Return a `MatrixError` subclass from a nio `ErrorResponse`."""
|
||||
|
||||
http_code = response.transport_response.status
|
||||
m_code = response.status_code
|
||||
message = response.message
|
||||
content = await response.transport_response.text()
|
||||
http_code = response.transport_response.status
|
||||
m_code = response.status_code
|
||||
message = response.message
|
||||
content = await response.transport_response.text()
|
||||
|
||||
for subcls in cls.__subclasses__():
|
||||
if subcls.m_code and subcls.m_code == m_code:
|
||||
return subcls(http_code, m_code, message, content)
|
||||
for subcls in cls.__subclasses__():
|
||||
if subcls.m_code and subcls.m_code == m_code:
|
||||
return subcls(http_code, m_code, message, content)
|
||||
|
||||
# If error doesn't have a M_CODE, look for a generic http error class
|
||||
for subcls in cls.__subclasses__():
|
||||
if not subcls.m_code and subcls.http_code == http_code:
|
||||
return subcls(http_code, m_code, message, content)
|
||||
# If error doesn't have a M_CODE, look for a generic http error class
|
||||
for subcls in cls.__subclasses__():
|
||||
if not subcls.m_code and subcls.http_code == http_code:
|
||||
return subcls(http_code, m_code, message, content)
|
||||
|
||||
return cls(http_code, m_code, message, content)
|
||||
return cls(http_code, m_code, message, content)
|
||||
|
||||
|
||||
@dataclass
|
||||
class MatrixUnrecognized(MatrixError):
|
||||
http_code: int = 400
|
||||
m_code: str = "M_UNRECOGNIZED"
|
||||
http_code: int = 400
|
||||
m_code: str = "M_UNRECOGNIZED"
|
||||
|
||||
|
||||
@dataclass
|
||||
class MatrixInvalidAccessToken(MatrixError):
|
||||
http_code: int = 401
|
||||
m_code: str = "M_UNKNOWN_TOKEN"
|
||||
http_code: int = 401
|
||||
m_code: str = "M_UNKNOWN_TOKEN"
|
||||
|
||||
|
||||
@dataclass
|
||||
class MatrixUnauthorized(MatrixError):
|
||||
http_code: int = 401
|
||||
m_code: str = "M_UNAUTHORIZED"
|
||||
http_code: int = 401
|
||||
m_code: str = "M_UNAUTHORIZED"
|
||||
|
||||
|
||||
@dataclass
|
||||
class MatrixForbidden(MatrixError):
|
||||
http_code: int = 403
|
||||
m_code: str = "M_FORBIDDEN"
|
||||
http_code: int = 403
|
||||
m_code: str = "M_FORBIDDEN"
|
||||
|
||||
|
||||
@dataclass
|
||||
class MatrixBadJson(MatrixError):
|
||||
http_code: int = 403
|
||||
m_code: str = "M_BAD_JSON"
|
||||
http_code: int = 403
|
||||
m_code: str = "M_BAD_JSON"
|
||||
|
||||
|
||||
@dataclass
|
||||
class MatrixNotJson(MatrixError):
|
||||
http_code: int = 403
|
||||
m_code: str = "M_NOT_JSON"
|
||||
http_code: int = 403
|
||||
m_code: str = "M_NOT_JSON"
|
||||
|
||||
|
||||
@dataclass
|
||||
class MatrixUserDeactivated(MatrixError):
|
||||
http_code: int = 403
|
||||
m_code: str = "M_USER_DEACTIVATED"
|
||||
http_code: int = 403
|
||||
m_code: str = "M_USER_DEACTIVATED"
|
||||
|
||||
|
||||
@dataclass
|
||||
class MatrixNotFound(MatrixError):
|
||||
http_code: int = 404
|
||||
m_code: str = "M_NOT_FOUND"
|
||||
http_code: int = 404
|
||||
m_code: str = "M_NOT_FOUND"
|
||||
|
||||
|
||||
@dataclass
|
||||
class MatrixTooLarge(MatrixError):
|
||||
http_code: int = 413
|
||||
m_code: str = "M_TOO_LARGE"
|
||||
http_code: int = 413
|
||||
m_code: str = "M_TOO_LARGE"
|
||||
|
||||
|
||||
@dataclass
|
||||
class MatrixBadGateway(MatrixError):
|
||||
http_code: int = 502
|
||||
m_code: Optional[str] = None
|
||||
http_code: int = 502
|
||||
m_code: Optional[str] = None
|
||||
|
||||
|
||||
# Client errors
|
||||
|
||||
@dataclass
|
||||
class InvalidUserId(Exception):
|
||||
user_id: str = field()
|
||||
user_id: str = field()
|
||||
|
||||
|
||||
@dataclass
|
||||
class InvalidUserInContext(Exception):
|
||||
user_id: str = field()
|
||||
user_id: str = field()
|
||||
|
||||
|
||||
@dataclass
|
||||
class UserFromOtherServerDisallowed(Exception):
|
||||
user_id: str = field()
|
||||
user_id: str = field()
|
||||
|
||||
|
||||
@dataclass
|
||||
class UneededThumbnail(Exception):
|
||||
pass
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class BadMimeType(Exception):
|
||||
wanted: str = field()
|
||||
got: str = field()
|
||||
wanted: str = field()
|
||||
got: str = field()
|
||||
|
|
|
@ -19,503 +19,503 @@ from .color import SVGColor
|
|||
|
||||
|
||||
def parse_colour(inline, m, state):
|
||||
colour = m.group(1)
|
||||
text = m.group(2)
|
||||
return "colour", colour, text
|
||||
colour = m.group(1)
|
||||
text = m.group(2)
|
||||
return "colour", colour, text
|
||||
|
||||
|
||||
def render_html_colour(colour, text):
|
||||
return f'<span data-mx-color="{colour}">{text}</span>'
|
||||
return f'<span data-mx-color="{colour}">{text}</span>'
|
||||
|
||||
|
||||
def plugin_matrix(md):
|
||||
# test string: r"<b>(x) <r>(x) \<a>b>(x) <a\>b>(x) <b>(\(z) <c>(foo\)xyz)"
|
||||
colour = (
|
||||
r"^<(.+?)>" # capture the colour in `<colour>`
|
||||
r"\((.+?)" # capture text in `(text`
|
||||
r"(?<!\\)(?:\\\\)*" # ignore the next `)` if it's \escaped
|
||||
r"\)" # finish on a `)`
|
||||
)
|
||||
# test string: r"<b>(x) <r>(x) \<a>b>(x) <a\>b>(x) <b>(\(z) <c>(foo\)xyz)"
|
||||
colour = (
|
||||
r"^<(.+?)>" # capture the colour in `<colour>`
|
||||
r"\((.+?)" # capture text in `(text`
|
||||
r"(?<!\\)(?:\\\\)*" # ignore the next `)` if it's \escaped
|
||||
r"\)" # finish on a `)`
|
||||
)
|
||||
|
||||
# Mark colour as high priority as otherwise e.g. <red>(hi) matches the
|
||||
# inline_html rule instead of the colour rule.
|
||||
md.inline.rules.insert(1, "colour")
|
||||
md.inline.register_rule("colour", colour, parse_colour)
|
||||
# Mark colour as high priority as otherwise e.g. <red>(hi) matches the
|
||||
# inline_html rule instead of the colour rule.
|
||||
md.inline.rules.insert(1, "colour")
|
||||
md.inline.register_rule("colour", colour, parse_colour)
|
||||
|
||||
if md.renderer.NAME == "html":
|
||||
md.renderer.register("colour", render_html_colour)
|
||||
if md.renderer.NAME == "html":
|
||||
md.renderer.register("colour", render_html_colour)
|
||||
|
||||
|
||||
class HTMLProcessor:
|
||||
"""Provide HTML filtering and conversion from Markdown.
|
||||
|
||||
Filtering sanitizes HTML and ensures it complies both with the Matrix
|
||||
specification:
|
||||
https://matrix.org/docs/spec/client_server/latest#m-room-message-msgtypes
|
||||
and the supported Qt HTML subset for usage in QML:
|
||||
https://doc.qt.io/qt-5/richtext-html-subset.html
|
||||
|
||||
Some methods take an `outgoing` argument, specifying if the HTML is
|
||||
intended to be sent to matrix servers or used locally in our application.
|
||||
|
||||
For local usage, extra transformations are applied:
|
||||
|
||||
- Wrap text lines starting with a `>` in `<span>` with a `quote` class.
|
||||
This allows them to be styled appropriately from QML.
|
||||
|
||||
Some methods take an `inline` argument, which return text appropriate
|
||||
for UI elements restricted to display a single line, e.g. the room
|
||||
last message subtitles in QML or notifications.
|
||||
In inline filtered HTML, block tags are stripped or substituted and
|
||||
newlines are turned into ⏎ symbols (U+23CE).
|
||||
"""
|
||||
|
||||
inline_tags = {
|
||||
"span", "font", "a", "sup", "sub", "b", "i", "s", "u", "code",
|
||||
"mx-reply",
|
||||
}
|
||||
|
||||
block_tags = {
|
||||
"h1", "h2", "h3", "h4", "h5", "h6", "blockquote",
|
||||
"p", "ul", "ol", "li", "hr", "br", "img",
|
||||
"table", "thead", "tbody", "tr", "th", "td", "pre",
|
||||
"mx-reply",
|
||||
}
|
||||
|
||||
opaque_id = r"[a-zA-Z\d._-]+?"
|
||||
user_id_localpart = r"[\x21-\x39\x3B-\x7E]+?"
|
||||
|
||||
user_id_regex = re.compile(
|
||||
rf"(?P<body>@{user_id_localpart}:(?P<host>[a-zA-Z\d.:-]*[a-zA-Z\d]))",
|
||||
)
|
||||
room_id_regex = re.compile(
|
||||
rf"(?P<body>!{opaque_id}:(?P<host>[a-zA-Z\d.:-]*[a-zA-Z\d]))",
|
||||
)
|
||||
room_alias_regex = re.compile(
|
||||
r"(?=^|\W)(?P<body>#\S+?:(?P<host>[a-zA-Z\d.:-]*[a-zA-Z\d]))",
|
||||
)
|
||||
|
||||
link_regexes = [re.compile(r, re.IGNORECASE)
|
||||
if isinstance(r, str) else r for r in [
|
||||
# Normal :// URLs
|
||||
(r"(?P<body>[a-z\d]+://(?P<host>[a-z\d._-]+(?:\:\d+)?)"
|
||||
r"(?:/[/\-.,\w#%&?:;=~!$*+^@']*)?(?:\([/\-_.,a-z\d#%&?;=~]*\))?)"),
|
||||
|
||||
# mailto: and tel:
|
||||
r"mailto:(?P<body>[a-z0-9._-]+@(?P<host>[a-z0-9.:-]*[a-z\d]))",
|
||||
r"tel:(?P<body>[0-9+-]+)(?P<host>)",
|
||||
|
||||
# magnet:
|
||||
r"(?P<body>magnet:\?xt=urn:[a-z0-9]+:.+)(?P<host>)",
|
||||
|
||||
user_id_regex, room_id_regex, room_alias_regex,
|
||||
]]
|
||||
|
||||
matrix_to_regex = re.compile(r"^https?://matrix.to/#/", re.IGNORECASE)
|
||||
|
||||
link_is_matrix_to_regex = re.compile(
|
||||
r"https?://matrix.to/#/\S+", re.IGNORECASE,
|
||||
)
|
||||
link_is_user_id_regex = re.compile(
|
||||
r"https?://matrix.to/#/@\S+", re.IGNORECASE,
|
||||
)
|
||||
link_is_room_id_regex = re.compile(
|
||||
r"https?://matrix.to/#/!\S+", re.IGNORECASE,
|
||||
)
|
||||
link_is_room_alias_regex = re.compile(
|
||||
r"https?://matrix.to/#/#\S+", re.IGNORECASE,
|
||||
)
|
||||
link_is_message_id_regex = re.compile(
|
||||
r"https?://matrix.to/#/[!#]\S+/\$\S+", re.IGNORECASE,
|
||||
)
|
||||
|
||||
inline_quote_regex = re.compile(r"(^|⏎|>)(\s*>[^⏎\n]*)", re.MULTILINE)
|
||||
|
||||
quote_regex = re.compile(
|
||||
r"(^|<span/?>|<p/?>|<br/?>|<h\d/?>|<mx-reply/?>)"
|
||||
r"(\s*>.*?)"
|
||||
r"(<span/?>|</?p>|<br/?>|</?h\d>|</mx-reply/?>|$)",
|
||||
re.MULTILINE,
|
||||
)
|
||||
|
||||
extra_newlines_regex = re.compile(r"\n(\n*)")
|
||||
|
||||
|
||||
def __init__(self) -> None:
|
||||
# The whitespace remover doesn't take <pre> into account
|
||||
sanitizer.normalize_overall_whitespace = lambda html, *args, **kw: html
|
||||
sanitizer.normalize_whitespace_in_text_or_tail = \
|
||||
lambda el, *args, **kw: el
|
||||
|
||||
# hard_wrap: convert all \n to <br> without required two spaces
|
||||
# escape: escape HTML characters in the input string, e.g. tags
|
||||
self._markdown_to_html = mistune.create_markdown(
|
||||
hard_wrap = True,
|
||||
escape = True,
|
||||
renderer = "html",
|
||||
plugins = ['strikethrough', plugin_matrix],
|
||||
)
|
||||
|
||||
|
||||
def mentions_in_html(self, html: str) -> List[Tuple[str, str]]:
|
||||
"""Return list of (text, href) tuples for all mention links in html."""
|
||||
|
||||
if not html.strip():
|
||||
return []
|
||||
|
||||
return [
|
||||
(a_tag.text, href)
|
||||
for a_tag, _, href, _ in lxml.html.iterlinks(html)
|
||||
if a_tag.text and
|
||||
self.link_is_matrix_to_regex.match(unquote(href.strip()))
|
||||
]
|
||||
|
||||
|
||||
def from_markdown(
|
||||
self,
|
||||
text: str,
|
||||
inline: bool = False,
|
||||
outgoing: bool = False,
|
||||
display_name_mentions: Optional[Dict[str, str]] = None,
|
||||
) -> str:
|
||||
"""Return filtered HTML from Markdown text."""
|
||||
|
||||
return self.filter(
|
||||
self._markdown_to_html(text),
|
||||
inline,
|
||||
outgoing,
|
||||
display_name_mentions,
|
||||
)
|
||||
|
||||
|
||||
def filter(
|
||||
self,
|
||||
html: str,
|
||||
inline: bool = False,
|
||||
outgoing: bool = False,
|
||||
display_name_mentions: Optional[Dict[str, str]] = None,
|
||||
) -> str:
|
||||
"""Filter and return HTML."""
|
||||
|
||||
mentions = display_name_mentions
|
||||
|
||||
sanit = Sanitizer(self.sanitize_settings(inline, outgoing, mentions))
|
||||
html = sanit.sanitize(html).rstrip("\n")
|
||||
|
||||
if not html.strip():
|
||||
return html
|
||||
|
||||
tree = etree.fromstring(
|
||||
html, parser=etree.HTMLParser(encoding="utf-8"),
|
||||
)
|
||||
|
||||
for a_tag in tree.iterdescendants("a"):
|
||||
self._mentions_to_matrix_to_links(a_tag, mentions, outgoing)
|
||||
|
||||
if not outgoing:
|
||||
self._matrix_to_links_add_classes(a_tag)
|
||||
|
||||
html = etree.tostring(tree, encoding="utf-8", method="html").decode()
|
||||
html = sanit.sanitize(html).rstrip("\n")
|
||||
|
||||
if outgoing:
|
||||
return html
|
||||
|
||||
# Client-side modifications
|
||||
|
||||
html = self.quote_regex.sub(r'\1<span class="quote">\2</span>\3', html)
|
||||
|
||||
if not inline:
|
||||
return html
|
||||
|
||||
return self.inline_quote_regex.sub(
|
||||
r'\1<span class="quote">\2</span>', html,
|
||||
)
|
||||
|
||||
|
||||
def sanitize_settings(
|
||||
self,
|
||||
inline: bool = False,
|
||||
outgoing: bool = False,
|
||||
display_name_mentions: Optional[Dict[str, str]] = None,
|
||||
) -> dict:
|
||||
"""Return an html_sanitizer configuration."""
|
||||
|
||||
# https://matrix.org/docs/spec/client_server/latest#m-room-message-msgtypes
|
||||
|
||||
inline_tags = self.inline_tags
|
||||
all_tags = inline_tags | self.block_tags
|
||||
|
||||
inlines_attributes = {
|
||||
"font": {"color"},
|
||||
"a": {"href", "class", "data-mention"},
|
||||
"code": {"class"},
|
||||
}
|
||||
attributes = {**inlines_attributes, **{
|
||||
"ol": {"start"},
|
||||
"hr": {"width"},
|
||||
"span": {"data-mx-color"},
|
||||
"img": {
|
||||
"data-mx-emote", "src", "alt", "title", "width", "height",
|
||||
},
|
||||
}}
|
||||
|
||||
username_link_regexes = [re.compile(r) for r in [
|
||||
rf"(?<!\w)(?P<body>{re.escape(name or user_id)})(?!\w)(?P<host>)"
|
||||
for user_id, name in (display_name_mentions or {}).items()
|
||||
]]
|
||||
|
||||
return {
|
||||
"tags": inline_tags if inline else all_tags,
|
||||
"attributes": inlines_attributes if inline else attributes,
|
||||
"empty": {} if inline else {"hr", "br", "img"},
|
||||
"separate": {"a"} if inline else {
|
||||
"a", "p", "li", "table", "tr", "th", "td", "br", "hr", "img",
|
||||
},
|
||||
"whitespace": {},
|
||||
"keep_typographic_whitespace": True,
|
||||
"add_nofollow": False,
|
||||
"autolink": {
|
||||
"link_regexes":
|
||||
self.link_regexes + username_link_regexes, # type: ignore
|
||||
"avoid_hosts": [],
|
||||
},
|
||||
"sanitize_href": lambda href: href,
|
||||
"element_preprocessors": [
|
||||
sanitizer.bold_span_to_strong,
|
||||
sanitizer.italic_span_to_em,
|
||||
sanitizer.tag_replacer("strong", "b"),
|
||||
sanitizer.tag_replacer("em", "i"),
|
||||
sanitizer.tag_replacer("strike", "s"),
|
||||
sanitizer.tag_replacer("del", "s"),
|
||||
sanitizer.tag_replacer("form", "p"),
|
||||
sanitizer.tag_replacer("div", "p"),
|
||||
sanitizer.tag_replacer("caption", "p"),
|
||||
sanitizer.target_blank_noopener,
|
||||
|
||||
self._span_color_to_font if not outgoing else lambda el: el,
|
||||
|
||||
self._img_to_a,
|
||||
self._remove_extra_newlines,
|
||||
self._newlines_to_return_symbol if inline else lambda el: el,
|
||||
self._reply_to_inline if inline else lambda el: el,
|
||||
],
|
||||
"element_postprocessors": [
|
||||
self._font_color_to_span if outgoing else lambda el: el,
|
||||
self._hr_to_dashes if not outgoing else lambda el: el,
|
||||
],
|
||||
"is_mergeable": lambda e1, e2: e1.attrib == e2.attrib,
|
||||
}
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _span_color_to_font(el: HtmlElement) -> HtmlElement:
|
||||
"""Convert HTML `<span data-mx-color=...` to `<font color=...>`."""
|
||||
"""Provide HTML filtering and conversion from Markdown.
|
||||
|
||||
Filtering sanitizes HTML and ensures it complies both with the Matrix
|
||||
specification:
|
||||
https://matrix.org/docs/spec/client_server/latest#m-room-message-msgtypes
|
||||
and the supported Qt HTML subset for usage in QML:
|
||||
https://doc.qt.io/qt-5/richtext-html-subset.html
|
||||
|
||||
Some methods take an `outgoing` argument, specifying if the HTML is
|
||||
intended to be sent to matrix servers or used locally in our application.
|
||||
|
||||
For local usage, extra transformations are applied:
|
||||
|
||||
- Wrap text lines starting with a `>` in `<span>` with a `quote` class.
|
||||
This allows them to be styled appropriately from QML.
|
||||
|
||||
Some methods take an `inline` argument, which return text appropriate
|
||||
for UI elements restricted to display a single line, e.g. the room
|
||||
last message subtitles in QML or notifications.
|
||||
In inline filtered HTML, block tags are stripped or substituted and
|
||||
newlines are turned into ⏎ symbols (U+23CE).
|
||||
"""
|
||||
|
||||
inline_tags = {
|
||||
"span", "font", "a", "sup", "sub", "b", "i", "s", "u", "code",
|
||||
"mx-reply",
|
||||
}
|
||||
|
||||
block_tags = {
|
||||
"h1", "h2", "h3", "h4", "h5", "h6", "blockquote",
|
||||
"p", "ul", "ol", "li", "hr", "br", "img",
|
||||
"table", "thead", "tbody", "tr", "th", "td", "pre",
|
||||
"mx-reply",
|
||||
}
|
||||
|
||||
opaque_id = r"[a-zA-Z\d._-]+?"
|
||||
user_id_localpart = r"[\x21-\x39\x3B-\x7E]+?"
|
||||
|
||||
user_id_regex = re.compile(
|
||||
rf"(?P<body>@{user_id_localpart}:(?P<host>[a-zA-Z\d.:-]*[a-zA-Z\d]))",
|
||||
)
|
||||
room_id_regex = re.compile(
|
||||
rf"(?P<body>!{opaque_id}:(?P<host>[a-zA-Z\d.:-]*[a-zA-Z\d]))",
|
||||
)
|
||||
room_alias_regex = re.compile(
|
||||
r"(?=^|\W)(?P<body>#\S+?:(?P<host>[a-zA-Z\d.:-]*[a-zA-Z\d]))",
|
||||
)
|
||||
|
||||
link_regexes = [re.compile(r, re.IGNORECASE)
|
||||
if isinstance(r, str) else r for r in [
|
||||
# Normal :// URLs
|
||||
(r"(?P<body>[a-z\d]+://(?P<host>[a-z\d._-]+(?:\:\d+)?)"
|
||||
r"(?:/[/\-.,\w#%&?:;=~!$*+^@']*)?(?:\([/\-_.,a-z\d#%&?;=~]*\))?)"),
|
||||
|
||||
# mailto: and tel:
|
||||
r"mailto:(?P<body>[a-z0-9._-]+@(?P<host>[a-z0-9.:-]*[a-z\d]))",
|
||||
r"tel:(?P<body>[0-9+-]+)(?P<host>)",
|
||||
|
||||
# magnet:
|
||||
r"(?P<body>magnet:\?xt=urn:[a-z0-9]+:.+)(?P<host>)",
|
||||
|
||||
user_id_regex, room_id_regex, room_alias_regex,
|
||||
]]
|
||||
|
||||
matrix_to_regex = re.compile(r"^https?://matrix.to/#/", re.IGNORECASE)
|
||||
|
||||
link_is_matrix_to_regex = re.compile(
|
||||
r"https?://matrix.to/#/\S+", re.IGNORECASE,
|
||||
)
|
||||
link_is_user_id_regex = re.compile(
|
||||
r"https?://matrix.to/#/@\S+", re.IGNORECASE,
|
||||
)
|
||||
link_is_room_id_regex = re.compile(
|
||||
r"https?://matrix.to/#/!\S+", re.IGNORECASE,
|
||||
)
|
||||
link_is_room_alias_regex = re.compile(
|
||||
r"https?://matrix.to/#/#\S+", re.IGNORECASE,
|
||||
)
|
||||
link_is_message_id_regex = re.compile(
|
||||
r"https?://matrix.to/#/[!#]\S+/\$\S+", re.IGNORECASE,
|
||||
)
|
||||
|
||||
inline_quote_regex = re.compile(r"(^|⏎|>)(\s*>[^⏎\n]*)", re.MULTILINE)
|
||||
|
||||
quote_regex = re.compile(
|
||||
r"(^|<span/?>|<p/?>|<br/?>|<h\d/?>|<mx-reply/?>)"
|
||||
r"(\s*>.*?)"
|
||||
r"(<span/?>|</?p>|<br/?>|</?h\d>|</mx-reply/?>|$)",
|
||||
re.MULTILINE,
|
||||
)
|
||||
|
||||
extra_newlines_regex = re.compile(r"\n(\n*)")
|
||||
|
||||
|
||||
def __init__(self) -> None:
|
||||
# The whitespace remover doesn't take <pre> into account
|
||||
sanitizer.normalize_overall_whitespace = lambda html, *args, **kw: html
|
||||
sanitizer.normalize_whitespace_in_text_or_tail = \
|
||||
lambda el, *args, **kw: el
|
||||
|
||||
# hard_wrap: convert all \n to <br> without required two spaces
|
||||
# escape: escape HTML characters in the input string, e.g. tags
|
||||
self._markdown_to_html = mistune.create_markdown(
|
||||
hard_wrap = True,
|
||||
escape = True,
|
||||
renderer = "html",
|
||||
plugins = ['strikethrough', plugin_matrix],
|
||||
)
|
||||
|
||||
|
||||
def mentions_in_html(self, html: str) -> List[Tuple[str, str]]:
|
||||
"""Return list of (text, href) tuples for all mention links in html."""
|
||||
|
||||
if not html.strip():
|
||||
return []
|
||||
|
||||
return [
|
||||
(a_tag.text, href)
|
||||
for a_tag, _, href, _ in lxml.html.iterlinks(html)
|
||||
if a_tag.text and
|
||||
self.link_is_matrix_to_regex.match(unquote(href.strip()))
|
||||
]
|
||||
|
||||
|
||||
def from_markdown(
|
||||
self,
|
||||
text: str,
|
||||
inline: bool = False,
|
||||
outgoing: bool = False,
|
||||
display_name_mentions: Optional[Dict[str, str]] = None,
|
||||
) -> str:
|
||||
"""Return filtered HTML from Markdown text."""
|
||||
|
||||
return self.filter(
|
||||
self._markdown_to_html(text),
|
||||
inline,
|
||||
outgoing,
|
||||
display_name_mentions,
|
||||
)
|
||||
|
||||
|
||||
def filter(
|
||||
self,
|
||||
html: str,
|
||||
inline: bool = False,
|
||||
outgoing: bool = False,
|
||||
display_name_mentions: Optional[Dict[str, str]] = None,
|
||||
) -> str:
|
||||
"""Filter and return HTML."""
|
||||
|
||||
mentions = display_name_mentions
|
||||
|
||||
sanit = Sanitizer(self.sanitize_settings(inline, outgoing, mentions))
|
||||
html = sanit.sanitize(html).rstrip("\n")
|
||||
|
||||
if not html.strip():
|
||||
return html
|
||||
|
||||
tree = etree.fromstring(
|
||||
html, parser=etree.HTMLParser(encoding="utf-8"),
|
||||
)
|
||||
|
||||
for a_tag in tree.iterdescendants("a"):
|
||||
self._mentions_to_matrix_to_links(a_tag, mentions, outgoing)
|
||||
|
||||
if not outgoing:
|
||||
self._matrix_to_links_add_classes(a_tag)
|
||||
|
||||
html = etree.tostring(tree, encoding="utf-8", method="html").decode()
|
||||
html = sanit.sanitize(html).rstrip("\n")
|
||||
|
||||
if outgoing:
|
||||
return html
|
||||
|
||||
# Client-side modifications
|
||||
|
||||
html = self.quote_regex.sub(r'\1<span class="quote">\2</span>\3', html)
|
||||
|
||||
if not inline:
|
||||
return html
|
||||
|
||||
return self.inline_quote_regex.sub(
|
||||
r'\1<span class="quote">\2</span>', html,
|
||||
)
|
||||
|
||||
|
||||
def sanitize_settings(
|
||||
self,
|
||||
inline: bool = False,
|
||||
outgoing: bool = False,
|
||||
display_name_mentions: Optional[Dict[str, str]] = None,
|
||||
) -> dict:
|
||||
"""Return an html_sanitizer configuration."""
|
||||
|
||||
# https://matrix.org/docs/spec/client_server/latest#m-room-message-msgtypes
|
||||
|
||||
inline_tags = self.inline_tags
|
||||
all_tags = inline_tags | self.block_tags
|
||||
|
||||
inlines_attributes = {
|
||||
"font": {"color"},
|
||||
"a": {"href", "class", "data-mention"},
|
||||
"code": {"class"},
|
||||
}
|
||||
attributes = {**inlines_attributes, **{
|
||||
"ol": {"start"},
|
||||
"hr": {"width"},
|
||||
"span": {"data-mx-color"},
|
||||
"img": {
|
||||
"data-mx-emote", "src", "alt", "title", "width", "height",
|
||||
},
|
||||
}}
|
||||
|
||||
username_link_regexes = [re.compile(r) for r in [
|
||||
rf"(?<!\w)(?P<body>{re.escape(name or user_id)})(?!\w)(?P<host>)"
|
||||
for user_id, name in (display_name_mentions or {}).items()
|
||||
]]
|
||||
|
||||
return {
|
||||
"tags": inline_tags if inline else all_tags,
|
||||
"attributes": inlines_attributes if inline else attributes,
|
||||
"empty": {} if inline else {"hr", "br", "img"},
|
||||
"separate": {"a"} if inline else {
|
||||
"a", "p", "li", "table", "tr", "th", "td", "br", "hr", "img",
|
||||
},
|
||||
"whitespace": {},
|
||||
"keep_typographic_whitespace": True,
|
||||
"add_nofollow": False,
|
||||
"autolink": {
|
||||
"link_regexes":
|
||||
self.link_regexes + username_link_regexes, # type: ignore
|
||||
"avoid_hosts": [],
|
||||
},
|
||||
"sanitize_href": lambda href: href,
|
||||
"element_preprocessors": [
|
||||
sanitizer.bold_span_to_strong,
|
||||
sanitizer.italic_span_to_em,
|
||||
sanitizer.tag_replacer("strong", "b"),
|
||||
sanitizer.tag_replacer("em", "i"),
|
||||
sanitizer.tag_replacer("strike", "s"),
|
||||
sanitizer.tag_replacer("del", "s"),
|
||||
sanitizer.tag_replacer("form", "p"),
|
||||
sanitizer.tag_replacer("div", "p"),
|
||||
sanitizer.tag_replacer("caption", "p"),
|
||||
sanitizer.target_blank_noopener,
|
||||
|
||||
self._span_color_to_font if not outgoing else lambda el: el,
|
||||
|
||||
self._img_to_a,
|
||||
self._remove_extra_newlines,
|
||||
self._newlines_to_return_symbol if inline else lambda el: el,
|
||||
self._reply_to_inline if inline else lambda el: el,
|
||||
],
|
||||
"element_postprocessors": [
|
||||
self._font_color_to_span if outgoing else lambda el: el,
|
||||
self._hr_to_dashes if not outgoing else lambda el: el,
|
||||
],
|
||||
"is_mergeable": lambda e1, e2: e1.attrib == e2.attrib,
|
||||
}
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _span_color_to_font(el: HtmlElement) -> HtmlElement:
|
||||
"""Convert HTML `<span data-mx-color=...` to `<font color=...>`."""
|
||||
|
||||
if el.tag not in ("span", "font"):
|
||||
return el
|
||||
if el.tag not in ("span", "font"):
|
||||
return el
|
||||
|
||||
color = el.attrib.pop("data-mx-color", None)
|
||||
if color:
|
||||
el.tag = "font"
|
||||
el.attrib["color"] = color
|
||||
color = el.attrib.pop("data-mx-color", None)
|
||||
if color:
|
||||
el.tag = "font"
|
||||
el.attrib["color"] = color
|
||||
|
||||
return el
|
||||
return el
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _font_color_to_span(el: HtmlElement) -> HtmlElement:
|
||||
"""Convert HTML `<font color=...>` to `<span data-mx-color=...`."""
|
||||
@staticmethod
|
||||
def _font_color_to_span(el: HtmlElement) -> HtmlElement:
|
||||
"""Convert HTML `<font color=...>` to `<span data-mx-color=...`."""
|
||||
|
||||
if el.tag not in ("span", "font"):
|
||||
return el
|
||||
|
||||
color = el.attrib.pop("color", None)
|
||||
if color:
|
||||
el.tag = "span"
|
||||
el.attrib["data-mx-color"] = color
|
||||
|
||||
return el
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _img_to_a(el: HtmlElement) -> HtmlElement:
|
||||
"""Linkify images by wrapping `<img>` tags in `<a>`."""
|
||||
|
||||
if el.tag != "img":
|
||||
return el
|
||||
|
||||
src = el.attrib.get("src", "")
|
||||
width = el.attrib.get("width")
|
||||
height = el.attrib.get("height")
|
||||
is_emote = "data-mx-emote" in el.attrib
|
||||
|
||||
if src.startswith("mxc://"):
|
||||
el.attrib["src"] = nio.Api.mxc_to_http(src)
|
||||
|
||||
if is_emote and not width and not height:
|
||||
el.attrib["width"] = 32
|
||||
el.attrib["height"] = 32
|
||||
if el.tag not in ("span", "font"):
|
||||
return el
|
||||
|
||||
color = el.attrib.pop("color", None)
|
||||
if color:
|
||||
el.tag = "span"
|
||||
el.attrib["data-mx-color"] = color
|
||||
|
||||
return el
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _img_to_a(el: HtmlElement) -> HtmlElement:
|
||||
"""Linkify images by wrapping `<img>` tags in `<a>`."""
|
||||
|
||||
if el.tag != "img":
|
||||
return el
|
||||
|
||||
src = el.attrib.get("src", "")
|
||||
width = el.attrib.get("width")
|
||||
height = el.attrib.get("height")
|
||||
is_emote = "data-mx-emote" in el.attrib
|
||||
|
||||
if src.startswith("mxc://"):
|
||||
el.attrib["src"] = nio.Api.mxc_to_http(src)
|
||||
|
||||
if is_emote and not width and not height:
|
||||
el.attrib["width"] = 32
|
||||
el.attrib["height"] = 32
|
||||
|
||||
elif is_emote and width and not height:
|
||||
el.attrib["height"] = width
|
||||
elif is_emote and width and not height:
|
||||
el.attrib["height"] = width
|
||||
|
||||
elif is_emote and height and not width:
|
||||
el.attrib["width"] = height
|
||||
elif is_emote and height and not width:
|
||||
el.attrib["width"] = height
|
||||
|
||||
elif not is_emote and (not width or not height):
|
||||
el.tag = "a"
|
||||
el.attrib["href"] = el.attrib.pop("src", "")
|
||||
el.text = el.attrib.pop("alt", None) or el.attrib["href"]
|
||||
elif not is_emote and (not width or not height):
|
||||
el.tag = "a"
|
||||
el.attrib["href"] = el.attrib.pop("src", "")
|
||||
el.text = el.attrib.pop("alt", None) or el.attrib["href"]
|
||||
|
||||
return el
|
||||
return el
|
||||
|
||||
|
||||
def _remove_extra_newlines(self, el: HtmlElement) -> HtmlElement:
|
||||
r"""Remove excess `\n` characters from HTML elements.
|
||||
def _remove_extra_newlines(self, el: HtmlElement) -> HtmlElement:
|
||||
r"""Remove excess `\n` characters from HTML elements.
|
||||
|
||||
This is done to avoid additional blank lines when the CSS directive
|
||||
`white-space: pre` is used.
|
||||
This is done to avoid additional blank lines when the CSS directive
|
||||
`white-space: pre` is used.
|
||||
|
||||
Text inside `<pre>` tags is ignored, except for the final newlines.
|
||||
"""
|
||||
Text inside `<pre>` tags is ignored, except for the final newlines.
|
||||
"""
|
||||
|
||||
pre_parent = any(parent.tag == "pre" for parent in el.iterancestors())
|
||||
pre_parent = any(parent.tag == "pre" for parent in el.iterancestors())
|
||||
|
||||
if el.tag != "pre" and not pre_parent:
|
||||
if el.text:
|
||||
el.text = self.extra_newlines_regex.sub(r"\1", el.text)
|
||||
if el.tail:
|
||||
el.tail = self.extra_newlines_regex.sub(r"\1", el.tail)
|
||||
else:
|
||||
if el.text and el.text.endswith("\n"):
|
||||
el.text = el.text[:-1]
|
||||
if el.tail and el.tail.endswith("\n"):
|
||||
el.tail = el.tail[:-1]
|
||||
if el.tag != "pre" and not pre_parent:
|
||||
if el.text:
|
||||
el.text = self.extra_newlines_regex.sub(r"\1", el.text)
|
||||
if el.tail:
|
||||
el.tail = self.extra_newlines_regex.sub(r"\1", el.tail)
|
||||
else:
|
||||
if el.text and el.text.endswith("\n"):
|
||||
el.text = el.text[:-1]
|
||||
if el.tail and el.tail.endswith("\n"):
|
||||
el.tail = el.tail[:-1]
|
||||
|
||||
return el
|
||||
return el
|
||||
|
||||
|
||||
def _newlines_to_return_symbol(self, el: HtmlElement) -> HtmlElement:
|
||||
"""Turn newlines into unicode return symbols (⏎, U+23CE).
|
||||
def _newlines_to_return_symbol(self, el: HtmlElement) -> HtmlElement:
|
||||
"""Turn newlines into unicode return symbols (⏎, U+23CE).
|
||||
|
||||
The symbol is added to blocks with siblings (e.g. a `<p>` followed by
|
||||
another `<p>`) and `<br>` tags.
|
||||
The `<br>` themselves will be removed by the inline sanitizer.
|
||||
"""
|
||||
The symbol is added to blocks with siblings (e.g. a `<p>` followed by
|
||||
another `<p>`) and `<br>` tags.
|
||||
The `<br>` themselves will be removed by the inline sanitizer.
|
||||
"""
|
||||
|
||||
is_block_with_siblings = (el.tag in self.block_tags and
|
||||
next(el.itersiblings(), None) is not None)
|
||||
is_block_with_siblings = (el.tag in self.block_tags and
|
||||
next(el.itersiblings(), None) is not None)
|
||||
|
||||
if el.tag == "br" or is_block_with_siblings:
|
||||
el.tail = f" ⏎ {el.tail or ''}"
|
||||
if el.tag == "br" or is_block_with_siblings:
|
||||
el.tail = f" ⏎ {el.tail or ''}"
|
||||
|
||||
|
||||
# Replace left \n in text/tail of <pre> content by the return symbol.
|
||||
if el.text:
|
||||
el.text = re.sub(r"\n", r" ⏎ ", el.text)
|
||||
# Replace left \n in text/tail of <pre> content by the return symbol.
|
||||
if el.text:
|
||||
el.text = re.sub(r"\n", r" ⏎ ", el.text)
|
||||
|
||||
if el.tail:
|
||||
el.tail = re.sub(r"\n", r" ⏎ ", el.tail)
|
||||
if el.tail:
|
||||
el.tail = re.sub(r"\n", r" ⏎ ", el.tail)
|
||||
|
||||
return el
|
||||
return el
|
||||
|
||||
|
||||
def _reply_to_inline(self, el: HtmlElement) -> HtmlElement:
|
||||
"""Shorten <mx-reply> to only include the replied to event's sender."""
|
||||
def _reply_to_inline(self, el: HtmlElement) -> HtmlElement:
|
||||
"""Shorten <mx-reply> to only include the replied to event's sender."""
|
||||
|
||||
if el.tag != "mx-reply":
|
||||
return el
|
||||
if el.tag != "mx-reply":
|
||||
return el
|
||||
|
||||
try:
|
||||
user_id = el.find("blockquote").findall("a")[1].text
|
||||
text = f"↩ {user_id[1: ].split(':')[0]}: " # U+21A9 arrow
|
||||
tail = el.tail.rstrip().rstrip("⏎")
|
||||
except (AttributeError, IndexError):
|
||||
return el
|
||||
try:
|
||||
user_id = el.find("blockquote").findall("a")[1].text
|
||||
text = f"↩ {user_id[1: ].split(':')[0]}: " # U+21A9 arrow
|
||||
tail = el.tail.rstrip().rstrip("⏎")
|
||||
except (AttributeError, IndexError):
|
||||
return el
|
||||
|
||||
el.clear()
|
||||
el.text = text
|
||||
el.tail = tail
|
||||
return el
|
||||
el.clear()
|
||||
el.text = text
|
||||
el.tail = tail
|
||||
return el
|
||||
|
||||
|
||||
def _mentions_to_matrix_to_links(
|
||||
self,
|
||||
el: HtmlElement,
|
||||
display_name_mentions: Optional[Dict[str, str]] = None,
|
||||
outgoing: bool = False,
|
||||
) -> HtmlElement:
|
||||
"""Turn user ID, usernames and room ID/aliases into matrix.to URL.
|
||||
def _mentions_to_matrix_to_links(
|
||||
self,
|
||||
el: HtmlElement,
|
||||
display_name_mentions: Optional[Dict[str, str]] = None,
|
||||
outgoing: bool = False,
|
||||
) -> HtmlElement:
|
||||
"""Turn user ID, usernames and room ID/aliases into matrix.to URL.
|
||||
|
||||
After the HTML sanitizer autolinks these, the links's hrefs are the
|
||||
link text, e.g. `<a href="@foo:bar.com">@foo:bar.com</a>`.
|
||||
We turn them into proper matrix.to URL in this function.
|
||||
"""
|
||||
After the HTML sanitizer autolinks these, the links's hrefs are the
|
||||
link text, e.g. `<a href="@foo:bar.com">@foo:bar.com</a>`.
|
||||
We turn them into proper matrix.to URL in this function.
|
||||
"""
|
||||
|
||||
if el.tag != "a" or not el.attrib.get("href"):
|
||||
return el
|
||||
if el.tag != "a" or not el.attrib.get("href"):
|
||||
return el
|
||||
|
||||
id_regexes = (
|
||||
self.user_id_regex, self.room_id_regex, self.room_alias_regex,
|
||||
)
|
||||
id_regexes = (
|
||||
self.user_id_regex, self.room_id_regex, self.room_alias_regex,
|
||||
)
|
||||
|
||||
for regex in id_regexes:
|
||||
if regex.match(unquote(el.attrib["href"])):
|
||||
el.attrib["href"] = f"https://matrix.to/#/{el.attrib['href']}"
|
||||
return el
|
||||
for regex in id_regexes:
|
||||
if regex.match(unquote(el.attrib["href"])):
|
||||
el.attrib["href"] = f"https://matrix.to/#/{el.attrib['href']}"
|
||||
return el
|
||||
|
||||
for user_id, name in (display_name_mentions or {}).items():
|
||||
if unquote(el.attrib["href"]) == (name or user_id):
|
||||
el.attrib["href"] = f"https://matrix.to/#/{user_id}"
|
||||
return el
|
||||
for user_id, name in (display_name_mentions or {}).items():
|
||||
if unquote(el.attrib["href"]) == (name or user_id):
|
||||
el.attrib["href"] = f"https://matrix.to/#/{user_id}"
|
||||
return el
|
||||
|
||||
return el
|
||||
return el
|
||||
|
||||
|
||||
def _matrix_to_links_add_classes(self, el: HtmlElement) -> HtmlElement:
|
||||
"""Add special CSS classes to matrix.to mention links."""
|
||||
def _matrix_to_links_add_classes(self, el: HtmlElement) -> HtmlElement:
|
||||
"""Add special CSS classes to matrix.to mention links."""
|
||||
|
||||
href = unquote(el.attrib.get("href", ""))
|
||||
href = unquote(el.attrib.get("href", ""))
|
||||
|
||||
if not href or not el.text:
|
||||
return el
|
||||
if not href or not el.text:
|
||||
return el
|
||||
|
||||
|
||||
el.text = self.matrix_to_regex.sub("", el.text or "")
|
||||
el.text = self.matrix_to_regex.sub("", el.text or "")
|
||||
|
||||
# This must be first, or link will be mistaken by room ID/alias regex
|
||||
if self.link_is_message_id_regex.match(href):
|
||||
el.attrib["class"] = "mention message-id-mention"
|
||||
el.attrib["data-mention"] = el.text.strip()
|
||||
# This must be first, or link will be mistaken by room ID/alias regex
|
||||
if self.link_is_message_id_regex.match(href):
|
||||
el.attrib["class"] = "mention message-id-mention"
|
||||
el.attrib["data-mention"] = el.text.strip()
|
||||
|
||||
elif self.link_is_user_id_regex.match(href):
|
||||
if el.text.strip().startswith("@"):
|
||||
el.attrib["class"] = "mention user-id-mention"
|
||||
else:
|
||||
el.attrib["class"] = "mention username-mention"
|
||||
elif self.link_is_user_id_regex.match(href):
|
||||
if el.text.strip().startswith("@"):
|
||||
el.attrib["class"] = "mention user-id-mention"
|
||||
else:
|
||||
el.attrib["class"] = "mention username-mention"
|
||||
|
||||
el.attrib["data-mention"] = el.text.strip()
|
||||
el.attrib["data-mention"] = el.text.strip()
|
||||
|
||||
elif self.link_is_room_id_regex.match(href):
|
||||
el.attrib["class"] = "mention room-id-mention"
|
||||
el.attrib["data-mention"] = el.text.strip()
|
||||
elif self.link_is_room_id_regex.match(href):
|
||||
el.attrib["class"] = "mention room-id-mention"
|
||||
el.attrib["data-mention"] = el.text.strip()
|
||||
|
||||
elif self.link_is_room_alias_regex.match(href):
|
||||
el.attrib["class"] = "mention room-alias-mention"
|
||||
el.attrib["data-mention"] = el.text.strip()
|
||||
elif self.link_is_room_alias_regex.match(href):
|
||||
el.attrib["class"] = "mention room-alias-mention"
|
||||
el.attrib["data-mention"] = el.text.strip()
|
||||
|
||||
return el
|
||||
return el
|
||||
|
||||
|
||||
def _hr_to_dashes(self, el: HtmlElement) -> HtmlElement:
|
||||
if el.tag != "hr":
|
||||
return el
|
||||
def _hr_to_dashes(self, el: HtmlElement) -> HtmlElement:
|
||||
if el.tag != "hr":
|
||||
return el
|
||||
|
||||
el.tag = "p"
|
||||
el.attrib["class"] = "ruler"
|
||||
el.text = "─" * 19
|
||||
return el
|
||||
el.tag = "p"
|
||||
el.attrib["class"] = "ruler"
|
||||
el.text = "─" * 19
|
||||
return el
|
||||
|
||||
|
||||
HTML_PROCESSOR = HTMLProcessor()
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -24,354 +24,354 @@ from .models.model import Model
|
|||
from .utils import Size, atomic_write, current_task
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .backend import Backend
|
||||
from .backend import Backend
|
||||
|
||||
if sys.version_info < (3, 8):
|
||||
import pyfastcopy # noqa
|
||||
import pyfastcopy # noqa
|
||||
|
||||
CONCURRENT_DOWNLOADS_LIMIT = asyncio.BoundedSemaphore(8)
|
||||
CONCURRENT_DOWNLOADS_LIMIT = asyncio.BoundedSemaphore(8)
|
||||
ACCESS_LOCKS: DefaultDict[str, asyncio.Lock] = DefaultDict(asyncio.Lock)
|
||||
|
||||
|
||||
@dataclass
|
||||
class MediaCache:
|
||||
"""Matrix downloaded media cache."""
|
||||
"""Matrix downloaded media cache."""
|
||||
|
||||
backend: "Backend" = field()
|
||||
base_dir: Path = field()
|
||||
backend: "Backend" = field()
|
||||
base_dir: Path = field()
|
||||
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
self.thumbs_dir = self.base_dir / "thumbnails"
|
||||
self.downloads_dir = self.base_dir / "downloads"
|
||||
def __post_init__(self) -> None:
|
||||
self.thumbs_dir = self.base_dir / "thumbnails"
|
||||
self.downloads_dir = self.base_dir / "downloads"
|
||||
|
||||
self.thumbs_dir.mkdir(parents=True, exist_ok=True)
|
||||
self.downloads_dir.mkdir(parents=True, exist_ok=True)
|
||||
self.thumbs_dir.mkdir(parents=True, exist_ok=True)
|
||||
self.downloads_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
async def get_media(self, *args) -> Path:
|
||||
"""Return `Media(self, ...).get()`'s result. Intended for QML."""
|
||||
return await Media(self, *args).get()
|
||||
async def get_media(self, *args) -> Path:
|
||||
"""Return `Media(self, ...).get()`'s result. Intended for QML."""
|
||||
return await Media(self, *args).get()
|
||||
|
||||
|
||||
async def get_thumbnail(self, width: float, height: float, *args) -> Path:
|
||||
"""Return `Thumbnail(self, ...).get()`'s result. Intended for QML."""
|
||||
# QML sometimes pass float sizes, which matrix API doesn't like.
|
||||
size = (round(width), round(height))
|
||||
return await Thumbnail(
|
||||
self, *args, wanted_size=size, # type: ignore
|
||||
).get()
|
||||
async def get_thumbnail(self, width: float, height: float, *args) -> Path:
|
||||
"""Return `Thumbnail(self, ...).get()`'s result. Intended for QML."""
|
||||
# QML sometimes pass float sizes, which matrix API doesn't like.
|
||||
size = (round(width), round(height))
|
||||
return await Thumbnail(
|
||||
self, *args, wanted_size=size, # type: ignore
|
||||
).get()
|
||||
|
||||
|
||||
@dataclass
|
||||
class Media:
|
||||
"""A matrix media file that is downloaded or has yet to be.
|
||||
"""A matrix media file that is downloaded or has yet to be.
|
||||
|
||||
If the `room_id` is not set, no `Transfer` model item will be registered
|
||||
while this media is being downloaded.
|
||||
"""
|
||||
If the `room_id` is not set, no `Transfer` model item will be registered
|
||||
while this media is being downloaded.
|
||||
"""
|
||||
|
||||
cache: "MediaCache" = field()
|
||||
client_user_id: str = field()
|
||||
mxc: str = field()
|
||||
title: str = field()
|
||||
room_id: Optional[str] = None
|
||||
filesize: Optional[int] = None
|
||||
crypt_dict: Optional[Dict[str, Any]] = field(default=None, repr=False)
|
||||
cache: "MediaCache" = field()
|
||||
client_user_id: str = field()
|
||||
mxc: str = field()
|
||||
title: str = field()
|
||||
room_id: Optional[str] = None
|
||||
filesize: Optional[int] = None
|
||||
crypt_dict: Optional[Dict[str, Any]] = field(default=None, repr=False)
|
||||
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
self.mxc = re.sub(r"#auto$", "", self.mxc)
|
||||
def __post_init__(self) -> None:
|
||||
self.mxc = re.sub(r"#auto$", "", self.mxc)
|
||||
|
||||
if not re.match(r"^mxc://.+/.+", self.mxc):
|
||||
raise ValueError(f"Invalid mxc URI: {self.mxc}")
|
||||
if not re.match(r"^mxc://.+/.+", self.mxc):
|
||||
raise ValueError(f"Invalid mxc URI: {self.mxc}")
|
||||
|
||||
|
||||
@property
|
||||
def local_path(self) -> Path:
|
||||
"""The path where the file either exists or should be downloaded.
|
||||
@property
|
||||
def local_path(self) -> Path:
|
||||
"""The path where the file either exists or should be downloaded.
|
||||
|
||||
The returned paths are in this form:
|
||||
```
|
||||
<base download folder>/<homeserver domain>/
|
||||
<file title>_<mxc id>.<file extension>`
|
||||
```
|
||||
e.g. `~/.cache/moment/downloads/matrix.org/foo_Hm24ar11i768b0el.png`.
|
||||
"""
|
||||
The returned paths are in this form:
|
||||
```
|
||||
<base download folder>/<homeserver domain>/
|
||||
<file title>_<mxc id>.<file extension>`
|
||||
```
|
||||
e.g. `~/.cache/moment/downloads/matrix.org/foo_Hm24ar11i768b0el.png`.
|
||||
"""
|
||||
|
||||
parsed = urlparse(self.mxc)
|
||||
mxc_id = parsed.path.lstrip("/")
|
||||
title = Path(self.title)
|
||||
filename = f"{title.stem}_{mxc_id}{title.suffix}"
|
||||
return self.cache.downloads_dir / parsed.netloc / filename
|
||||
parsed = urlparse(self.mxc)
|
||||
mxc_id = parsed.path.lstrip("/")
|
||||
title = Path(self.title)
|
||||
filename = f"{title.stem}_{mxc_id}{title.suffix}"
|
||||
return self.cache.downloads_dir / parsed.netloc / filename
|
||||
|
||||
|
||||
async def get(self) -> Path:
|
||||
"""Return the cached file's path, downloading it first if needed."""
|
||||
async def get(self) -> Path:
|
||||
"""Return the cached file's path, downloading it first if needed."""
|
||||
|
||||
async with ACCESS_LOCKS[self.mxc]:
|
||||
try:
|
||||
return await self.get_local()
|
||||
except FileNotFoundError:
|
||||
return await self.create()
|
||||
async with ACCESS_LOCKS[self.mxc]:
|
||||
try:
|
||||
return await self.get_local()
|
||||
except FileNotFoundError:
|
||||
return await self.create()
|
||||
|
||||
|
||||
async def get_local(self) -> Path:
|
||||
"""Return a cached local existing path for this media or raise."""
|
||||
async def get_local(self) -> Path:
|
||||
"""Return a cached local existing path for this media or raise."""
|
||||
|
||||
if not self.local_path.exists():
|
||||
raise FileNotFoundError()
|
||||
if not self.local_path.exists():
|
||||
raise FileNotFoundError()
|
||||
|
||||
return self.local_path
|
||||
return self.local_path
|
||||
|
||||
|
||||
async def create(self) -> Path:
|
||||
"""Download and cache the media file to disk."""
|
||||
async def create(self) -> Path:
|
||||
"""Download and cache the media file to disk."""
|
||||
|
||||
async with CONCURRENT_DOWNLOADS_LIMIT:
|
||||
data = await self._get_remote_data()
|
||||
async with CONCURRENT_DOWNLOADS_LIMIT:
|
||||
data = await self._get_remote_data()
|
||||
|
||||
self.local_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
self.local_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
async with atomic_write(self.local_path, binary=True) as (file, done):
|
||||
await file.write(data)
|
||||
done()
|
||||
async with atomic_write(self.local_path, binary=True) as (file, done):
|
||||
await file.write(data)
|
||||
done()
|
||||
|
||||
if type(self) is Media:
|
||||
for event in self.cache.backend.mxc_events[self.mxc]:
|
||||
event.media_local_path = self.local_path
|
||||
if type(self) is Media:
|
||||
for event in self.cache.backend.mxc_events[self.mxc]:
|
||||
event.media_local_path = self.local_path
|
||||
|
||||
return self.local_path
|
||||
return self.local_path
|
||||
|
||||
|
||||
async def _get_remote_data(self) -> bytes:
|
||||
"""Return the file's data from the matrix server, decrypt if needed."""
|
||||
async def _get_remote_data(self) -> bytes:
|
||||
"""Return the file's data from the matrix server, decrypt if needed."""
|
||||
|
||||
client = self.cache.backend.clients[self.client_user_id]
|
||||
client = self.cache.backend.clients[self.client_user_id]
|
||||
|
||||
transfer: Optional[Transfer] = None
|
||||
model: Optional[Model] = None
|
||||
transfer: Optional[Transfer] = None
|
||||
model: Optional[Model] = None
|
||||
|
||||
if self.room_id:
|
||||
model = self.cache.backend.models[self.room_id, "transfers"]
|
||||
transfer = Transfer(
|
||||
id = uuid4(),
|
||||
is_upload = False,
|
||||
filepath = self.local_path,
|
||||
total_size = self.filesize or 0,
|
||||
status = TransferStatus.Transfering,
|
||||
)
|
||||
assert model is not None
|
||||
client.transfer_tasks[transfer.id] = current_task() # type: ignore
|
||||
model[str(transfer.id)] = transfer
|
||||
if self.room_id:
|
||||
model = self.cache.backend.models[self.room_id, "transfers"]
|
||||
transfer = Transfer(
|
||||
id = uuid4(),
|
||||
is_upload = False,
|
||||
filepath = self.local_path,
|
||||
total_size = self.filesize or 0,
|
||||
status = TransferStatus.Transfering,
|
||||
)
|
||||
assert model is not None
|
||||
client.transfer_tasks[transfer.id] = current_task() # type: ignore
|
||||
model[str(transfer.id)] = transfer
|
||||
|
||||
try:
|
||||
parsed = urlparse(self.mxc)
|
||||
resp = await client.download(
|
||||
server_name = parsed.netloc,
|
||||
media_id = parsed.path.lstrip("/"),
|
||||
)
|
||||
except (nio.TransferCancelledError, asyncio.CancelledError):
|
||||
if transfer and model:
|
||||
del model[str(transfer.id)]
|
||||
del client.transfer_tasks[transfer.id]
|
||||
raise
|
||||
try:
|
||||
parsed = urlparse(self.mxc)
|
||||
resp = await client.download(
|
||||
server_name = parsed.netloc,
|
||||
media_id = parsed.path.lstrip("/"),
|
||||
)
|
||||
except (nio.TransferCancelledError, asyncio.CancelledError):
|
||||
if transfer and model:
|
||||
del model[str(transfer.id)]
|
||||
del client.transfer_tasks[transfer.id]
|
||||
raise
|
||||
|
||||
if transfer and model:
|
||||
del model[str(transfer.id)]
|
||||
del client.transfer_tasks[transfer.id]
|
||||
if transfer and model:
|
||||
del model[str(transfer.id)]
|
||||
del client.transfer_tasks[transfer.id]
|
||||
|
||||
return await self._decrypt(resp.body)
|
||||
return await self._decrypt(resp.body)
|
||||
|
||||
|
||||
async def _decrypt(self, data: bytes) -> bytes:
|
||||
"""Decrypt an encrypted file's data."""
|
||||
async def _decrypt(self, data: bytes) -> bytes:
|
||||
"""Decrypt an encrypted file's data."""
|
||||
|
||||
if not self.crypt_dict:
|
||||
return data
|
||||
if not self.crypt_dict:
|
||||
return data
|
||||
|
||||
func = functools.partial(
|
||||
nio.crypto.attachments.decrypt_attachment,
|
||||
data,
|
||||
self.crypt_dict["key"]["k"],
|
||||
self.crypt_dict["hashes"]["sha256"],
|
||||
self.crypt_dict["iv"],
|
||||
)
|
||||
func = functools.partial(
|
||||
nio.crypto.attachments.decrypt_attachment,
|
||||
data,
|
||||
self.crypt_dict["key"]["k"],
|
||||
self.crypt_dict["hashes"]["sha256"],
|
||||
self.crypt_dict["iv"],
|
||||
)
|
||||
|
||||
# Run in a separate thread
|
||||
return await asyncio.get_event_loop().run_in_executor(None, func)
|
||||
# Run in a separate thread
|
||||
return await asyncio.get_event_loop().run_in_executor(None, func)
|
||||
|
||||
|
||||
@classmethod
|
||||
async def from_existing_file(
|
||||
cls,
|
||||
cache: "MediaCache",
|
||||
client_user_id: str,
|
||||
mxc: str,
|
||||
existing: Path,
|
||||
overwrite: bool = False,
|
||||
**kwargs,
|
||||
) -> "Media":
|
||||
"""Copy an existing file to cache and return a `Media` for it."""
|
||||
@classmethod
|
||||
async def from_existing_file(
|
||||
cls,
|
||||
cache: "MediaCache",
|
||||
client_user_id: str,
|
||||
mxc: str,
|
||||
existing: Path,
|
||||
overwrite: bool = False,
|
||||
**kwargs,
|
||||
) -> "Media":
|
||||
"""Copy an existing file to cache and return a `Media` for it."""
|
||||
|
||||
media = cls(
|
||||
cache = cache,
|
||||
client_user_id = client_user_id,
|
||||
mxc = mxc,
|
||||
title = existing.name,
|
||||
filesize = existing.stat().st_size,
|
||||
**kwargs,
|
||||
)
|
||||
media.local_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
media = cls(
|
||||
cache = cache,
|
||||
client_user_id = client_user_id,
|
||||
mxc = mxc,
|
||||
title = existing.name,
|
||||
filesize = existing.stat().st_size,
|
||||
**kwargs,
|
||||
)
|
||||
media.local_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if not media.local_path.exists() or overwrite:
|
||||
func = functools.partial(shutil.copy, existing, media.local_path)
|
||||
await asyncio.get_event_loop().run_in_executor(None, func)
|
||||
if not media.local_path.exists() or overwrite:
|
||||
func = functools.partial(shutil.copy, existing, media.local_path)
|
||||
await asyncio.get_event_loop().run_in_executor(None, func)
|
||||
|
||||
return media
|
||||
return media
|
||||
|
||||
|
||||
@classmethod
|
||||
async def from_bytes(
|
||||
cls,
|
||||
cache: "MediaCache",
|
||||
client_user_id: str,
|
||||
mxc: str,
|
||||
filename: str,
|
||||
data: bytes,
|
||||
overwrite: bool = False,
|
||||
**kwargs,
|
||||
) -> "Media":
|
||||
"""Create a cached file from bytes data and return a `Media` for it."""
|
||||
@classmethod
|
||||
async def from_bytes(
|
||||
cls,
|
||||
cache: "MediaCache",
|
||||
client_user_id: str,
|
||||
mxc: str,
|
||||
filename: str,
|
||||
data: bytes,
|
||||
overwrite: bool = False,
|
||||
**kwargs,
|
||||
) -> "Media":
|
||||
"""Create a cached file from bytes data and return a `Media` for it."""
|
||||
|
||||
media = cls(
|
||||
cache, client_user_id, mxc, filename, filesize=len(data), **kwargs,
|
||||
)
|
||||
media.local_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
media = cls(
|
||||
cache, client_user_id, mxc, filename, filesize=len(data), **kwargs,
|
||||
)
|
||||
media.local_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if not media.local_path.exists() or overwrite:
|
||||
path = media.local_path
|
||||
if not media.local_path.exists() or overwrite:
|
||||
path = media.local_path
|
||||
|
||||
async with atomic_write(path, binary=True) as (file, done):
|
||||
await file.write(data)
|
||||
done()
|
||||
async with atomic_write(path, binary=True) as (file, done):
|
||||
await file.write(data)
|
||||
done()
|
||||
|
||||
return media
|
||||
return media
|
||||
|
||||
|
||||
@dataclass
|
||||
class Thumbnail(Media):
|
||||
"""A matrix media's thumbnail, which is downloaded or has yet to be."""
|
||||
"""A matrix media's thumbnail, which is downloaded or has yet to be."""
|
||||
|
||||
wanted_size: Size = (800, 600)
|
||||
wanted_size: Size = (800, 600)
|
||||
|
||||
server_size: Optional[Size] = field(init=False, repr=False, default=None)
|
||||
server_size: Optional[Size] = field(init=False, repr=False, default=None)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def normalize_size(size: Size) -> Size:
|
||||
"""Return standard `(width, height)` matrix thumbnail dimensions.
|
||||
@staticmethod
|
||||
def normalize_size(size: Size) -> Size:
|
||||
"""Return standard `(width, height)` matrix thumbnail dimensions.
|
||||
|
||||
The Matrix specification defines a few standard thumbnail dimensions
|
||||
for homeservers to store and return: 32x32, 96x96, 320x240, 640x480,
|
||||
and 800x600.
|
||||
The Matrix specification defines a few standard thumbnail dimensions
|
||||
for homeservers to store and return: 32x32, 96x96, 320x240, 640x480,
|
||||
and 800x600.
|
||||
|
||||
This method returns the best matching size for a `size` without
|
||||
upscaling, e.g. passing `(641, 480)` will return `(800, 600)`.
|
||||
"""
|
||||
This method returns the best matching size for a `size` without
|
||||
upscaling, e.g. passing `(641, 480)` will return `(800, 600)`.
|
||||
"""
|
||||
|
||||
if size[0] > 640 or size[1] > 480:
|
||||
return (800, 600)
|
||||
if size[0] > 640 or size[1] > 480:
|
||||
return (800, 600)
|
||||
|
||||
if size[0] > 320 or size[1] > 240:
|
||||
return (640, 480)
|
||||
if size[0] > 320 or size[1] > 240:
|
||||
return (640, 480)
|
||||
|
||||
if size[0] > 96 or size[1] > 96:
|
||||
return (320, 240)
|
||||
if size[0] > 96 or size[1] > 96:
|
||||
return (320, 240)
|
||||
|
||||
if size[0] > 32 or size[1] > 32:
|
||||
return (96, 96)
|
||||
if size[0] > 32 or size[1] > 32:
|
||||
return (96, 96)
|
||||
|
||||
return (32, 32)
|
||||
return (32, 32)
|
||||
|
||||
|
||||
@property
|
||||
def local_path(self) -> Path:
|
||||
"""The path where the thumbnail either exists or should be downloaded.
|
||||
@property
|
||||
def local_path(self) -> Path:
|
||||
"""The path where the thumbnail either exists or should be downloaded.
|
||||
|
||||
The returned paths are in this form:
|
||||
```
|
||||
<base thumbnail folder>/<homeserver domain>/<standard size>/
|
||||
<file title>_<mxc id>.<file extension>`
|
||||
```
|
||||
e.g.
|
||||
`~/.cache/moment/thumbnails/matrix.org/32x32/foo_Hm24ar11i768b0el.png`.
|
||||
"""
|
||||
The returned paths are in this form:
|
||||
```
|
||||
<base thumbnail folder>/<homeserver domain>/<standard size>/
|
||||
<file title>_<mxc id>.<file extension>`
|
||||
```
|
||||
e.g.
|
||||
`~/.cache/moment/thumbnails/matrix.org/32x32/foo_Hm24ar11i768b0el.png`.
|
||||
"""
|
||||
|
||||
size = self.normalize_size(self.server_size or self.wanted_size)
|
||||
size_dir = f"{size[0]}x{size[1]}"
|
||||
size = self.normalize_size(self.server_size or self.wanted_size)
|
||||
size_dir = f"{size[0]}x{size[1]}"
|
||||
|
||||
parsed = urlparse(self.mxc)
|
||||
mxc_id = parsed.path.lstrip("/")
|
||||
title = Path(self.title)
|
||||
filename = f"{title.stem}_{mxc_id}{title.suffix}"
|
||||
parsed = urlparse(self.mxc)
|
||||
mxc_id = parsed.path.lstrip("/")
|
||||
title = Path(self.title)
|
||||
filename = f"{title.stem}_{mxc_id}{title.suffix}"
|
||||
|
||||
return self.cache.thumbs_dir / parsed.netloc / size_dir / filename
|
||||
return self.cache.thumbs_dir / parsed.netloc / size_dir / filename
|
||||
|
||||
|
||||
async def get_local(self) -> Path:
|
||||
"""Return an existing thumbnail path or raise `FileNotFoundError`.
|
||||
async def get_local(self) -> Path:
|
||||
"""Return an existing thumbnail path or raise `FileNotFoundError`.
|
||||
|
||||
If we have a bigger size thumbnail downloaded than the `wanted_size`
|
||||
for the media, return it instead of asking the server for a
|
||||
smaller thumbnail.
|
||||
"""
|
||||
If we have a bigger size thumbnail downloaded than the `wanted_size`
|
||||
for the media, return it instead of asking the server for a
|
||||
smaller thumbnail.
|
||||
"""
|
||||
|
||||
if self.local_path.exists():
|
||||
return self.local_path
|
||||
if self.local_path.exists():
|
||||
return self.local_path
|
||||
|
||||
try_sizes = ((32, 32), (96, 96), (320, 240), (640, 480), (800, 600))
|
||||
parts = list(self.local_path.parts)
|
||||
size = self.normalize_size(self.server_size or self.wanted_size)
|
||||
try_sizes = ((32, 32), (96, 96), (320, 240), (640, 480), (800, 600))
|
||||
parts = list(self.local_path.parts)
|
||||
size = self.normalize_size(self.server_size or self.wanted_size)
|
||||
|
||||
for width, height in try_sizes:
|
||||
if width < size[0] or height < size[1]:
|
||||
continue
|
||||
for width, height in try_sizes:
|
||||
if width < size[0] or height < size[1]:
|
||||
continue
|
||||
|
||||
parts[-2] = f"{width}x{height}"
|
||||
path = Path("/".join(parts))
|
||||
parts[-2] = f"{width}x{height}"
|
||||
path = Path("/".join(parts))
|
||||
|
||||
if path.exists():
|
||||
return path
|
||||
if path.exists():
|
||||
return path
|
||||
|
||||
raise FileNotFoundError()
|
||||
raise FileNotFoundError()
|
||||
|
||||
|
||||
async def _get_remote_data(self) -> bytes:
|
||||
"""Return the (decrypted) media file's content from the server."""
|
||||
async def _get_remote_data(self) -> bytes:
|
||||
"""Return the (decrypted) media file's content from the server."""
|
||||
|
||||
parsed = urlparse(self.mxc)
|
||||
client = self.cache.backend.clients[self.client_user_id]
|
||||
parsed = urlparse(self.mxc)
|
||||
client = self.cache.backend.clients[self.client_user_id]
|
||||
|
||||
if self.crypt_dict:
|
||||
# Matrix makes encrypted thumbs only available through the download
|
||||
# end-point, not the thumbnail one
|
||||
resp = await client.download(
|
||||
server_name = parsed.netloc,
|
||||
media_id = parsed.path.lstrip("/"),
|
||||
)
|
||||
else:
|
||||
resp = await client.thumbnail(
|
||||
server_name = parsed.netloc,
|
||||
media_id = parsed.path.lstrip("/"),
|
||||
width = self.wanted_size[0],
|
||||
height = self.wanted_size[1],
|
||||
)
|
||||
if self.crypt_dict:
|
||||
# Matrix makes encrypted thumbs only available through the download
|
||||
# end-point, not the thumbnail one
|
||||
resp = await client.download(
|
||||
server_name = parsed.netloc,
|
||||
media_id = parsed.path.lstrip("/"),
|
||||
)
|
||||
else:
|
||||
resp = await client.thumbnail(
|
||||
server_name = parsed.netloc,
|
||||
media_id = parsed.path.lstrip("/"),
|
||||
width = self.wanted_size[0],
|
||||
height = self.wanted_size[1],
|
||||
)
|
||||
|
||||
decrypted = await self._decrypt(resp.body)
|
||||
decrypted = await self._decrypt(resp.body)
|
||||
|
||||
with io.BytesIO(decrypted) as img:
|
||||
# The server may return a thumbnail bigger than what we asked for
|
||||
self.server_size = PILImage.open(img).size
|
||||
with io.BytesIO(decrypted) as img:
|
||||
# The server may return a thumbnail bigger than what we asked for
|
||||
self.server_size = PILImage.open(img).size
|
||||
|
||||
return decrypted
|
||||
return decrypted
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
|
||||
from typing import (
|
||||
TYPE_CHECKING, Any, Callable, Collection, Dict, List, Optional, Tuple,
|
||||
TYPE_CHECKING, Any, Callable, Collection, Dict, List, Optional, Tuple,
|
||||
)
|
||||
|
||||
from . import SyncId
|
||||
|
@ -10,185 +10,185 @@ from .model import Model
|
|||
from .proxy import ModelProxy
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .model_item import ModelItem
|
||||
from .model_item import ModelItem
|
||||
|
||||
|
||||
class ModelFilter(ModelProxy):
|
||||
"""Filter data from one or more source models."""
|
||||
"""Filter data from one or more source models."""
|
||||
|
||||
def __init__(self, sync_id: SyncId) -> None:
|
||||
self.filtered_out: Dict[Tuple[Optional[SyncId], str], "ModelItem"] = {}
|
||||
self.items_changed_callbacks: List[Callable[[], None]] = []
|
||||
super().__init__(sync_id)
|
||||
def __init__(self, sync_id: SyncId) -> None:
|
||||
self.filtered_out: Dict[Tuple[Optional[SyncId], str], "ModelItem"] = {}
|
||||
self.items_changed_callbacks: List[Callable[[], None]] = []
|
||||
super().__init__(sync_id)
|
||||
|
||||
|
||||
def accept_item(self, item: "ModelItem") -> bool:
|
||||
"""Return whether an item should be present or filtered out."""
|
||||
return True
|
||||
def accept_item(self, item: "ModelItem") -> bool:
|
||||
"""Return whether an item should be present or filtered out."""
|
||||
return True
|
||||
|
||||
|
||||
def source_item_set(
|
||||
self,
|
||||
source: Model,
|
||||
key,
|
||||
value: "ModelItem",
|
||||
_changed_fields: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
with self.write_lock:
|
||||
if self.accept_source(source):
|
||||
value = self.convert_item(value)
|
||||
def source_item_set(
|
||||
self,
|
||||
source: Model,
|
||||
key,
|
||||
value: "ModelItem",
|
||||
_changed_fields: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
with self.write_lock:
|
||||
if self.accept_source(source):
|
||||
value = self.convert_item(value)
|
||||
|
||||
if self.accept_item(value):
|
||||
self.__setitem__(
|
||||
(source.sync_id, key), value, _changed_fields,
|
||||
)
|
||||
self.filtered_out.pop((source.sync_id, key), None)
|
||||
else:
|
||||
self.filtered_out[source.sync_id, key] = value
|
||||
self.pop((source.sync_id, key), None)
|
||||
if self.accept_item(value):
|
||||
self.__setitem__(
|
||||
(source.sync_id, key), value, _changed_fields,
|
||||
)
|
||||
self.filtered_out.pop((source.sync_id, key), None)
|
||||
else:
|
||||
self.filtered_out[source.sync_id, key] = value
|
||||
self.pop((source.sync_id, key), None)
|
||||
|
||||
for callback in self.items_changed_callbacks:
|
||||
callback()
|
||||
for callback in self.items_changed_callbacks:
|
||||
callback()
|
||||
|
||||
|
||||
def source_item_deleted(self, source: Model, key) -> None:
|
||||
with self.write_lock:
|
||||
if self.accept_source(source):
|
||||
try:
|
||||
del self[source.sync_id, key]
|
||||
except KeyError:
|
||||
del self.filtered_out[source.sync_id, key]
|
||||
def source_item_deleted(self, source: Model, key) -> None:
|
||||
with self.write_lock:
|
||||
if self.accept_source(source):
|
||||
try:
|
||||
del self[source.sync_id, key]
|
||||
except KeyError:
|
||||
del self.filtered_out[source.sync_id, key]
|
||||
|
||||
for callback in self.items_changed_callbacks:
|
||||
callback()
|
||||
for callback in self.items_changed_callbacks:
|
||||
callback()
|
||||
|
||||
|
||||
def source_cleared(self, source: Model) -> None:
|
||||
with self.write_lock:
|
||||
if self.accept_source(source):
|
||||
for source_sync_id, key in self.copy():
|
||||
if source_sync_id == source.sync_id:
|
||||
try:
|
||||
del self[source.sync_id, key]
|
||||
except KeyError:
|
||||
del self.filtered_out[source.sync_id, key]
|
||||
def source_cleared(self, source: Model) -> None:
|
||||
with self.write_lock:
|
||||
if self.accept_source(source):
|
||||
for source_sync_id, key in self.copy():
|
||||
if source_sync_id == source.sync_id:
|
||||
try:
|
||||
del self[source.sync_id, key]
|
||||
except KeyError:
|
||||
del self.filtered_out[source.sync_id, key]
|
||||
|
||||
for callback in self.items_changed_callbacks:
|
||||
callback()
|
||||
for callback in self.items_changed_callbacks:
|
||||
callback()
|
||||
|
||||
|
||||
def refilter(
|
||||
self,
|
||||
only_if: Optional[Callable[["ModelItem"], bool]] = None,
|
||||
) -> None:
|
||||
"""Recheck every item to decide if they should be filtered out."""
|
||||
def refilter(
|
||||
self,
|
||||
only_if: Optional[Callable[["ModelItem"], bool]] = None,
|
||||
) -> None:
|
||||
"""Recheck every item to decide if they should be filtered out."""
|
||||
|
||||
with self.write_lock:
|
||||
take_out = []
|
||||
bring_back = []
|
||||
with self.write_lock:
|
||||
take_out = []
|
||||
bring_back = []
|
||||
|
||||
for key, item in sorted(self.items(), key=lambda kv: kv[1]):
|
||||
if only_if and not only_if(item):
|
||||
continue
|
||||
for key, item in sorted(self.items(), key=lambda kv: kv[1]):
|
||||
if only_if and not only_if(item):
|
||||
continue
|
||||
|
||||
if not self.accept_item(item):
|
||||
take_out.append(key)
|
||||
if not self.accept_item(item):
|
||||
take_out.append(key)
|
||||
|
||||
for key, item in self.filtered_out.items():
|
||||
if only_if and not only_if(item):
|
||||
continue
|
||||
for key, item in self.filtered_out.items():
|
||||
if only_if and not only_if(item):
|
||||
continue
|
||||
|
||||
if self.accept_item(item):
|
||||
bring_back.append(key)
|
||||
if self.accept_item(item):
|
||||
bring_back.append(key)
|
||||
|
||||
with self.batch_remove():
|
||||
for key in take_out:
|
||||
self.filtered_out[key] = self.pop(key)
|
||||
with self.batch_remove():
|
||||
for key in take_out:
|
||||
self.filtered_out[key] = self.pop(key)
|
||||
|
||||
for key in bring_back:
|
||||
self[key] = self.filtered_out.pop(key)
|
||||
for key in bring_back:
|
||||
self[key] = self.filtered_out.pop(key)
|
||||
|
||||
if take_out or bring_back:
|
||||
for callback in self.items_changed_callbacks:
|
||||
callback()
|
||||
if take_out or bring_back:
|
||||
for callback in self.items_changed_callbacks:
|
||||
callback()
|
||||
|
||||
|
||||
class FieldStringFilter(ModelFilter):
|
||||
"""Filter source models based on if their fields matches a string.
|
||||
"""Filter source models based on if their fields matches a string.
|
||||
|
||||
This is used for filter fields in QML: the user enters some text and only
|
||||
items with a certain field (typically `display_name`) that starts with the
|
||||
entered text will be shown.
|
||||
This is used for filter fields in QML: the user enters some text and only
|
||||
items with a certain field (typically `display_name`) that starts with the
|
||||
entered text will be shown.
|
||||
|
||||
Matching is done using "smart case": insensitive if the filter text is
|
||||
all lowercase, sensitive otherwise.
|
||||
"""
|
||||
Matching is done using "smart case": insensitive if the filter text is
|
||||
all lowercase, sensitive otherwise.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
sync_id: SyncId,
|
||||
fields: Collection[str],
|
||||
no_filter_accept_all_items: bool = True,
|
||||
) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
sync_id: SyncId,
|
||||
fields: Collection[str],
|
||||
no_filter_accept_all_items: bool = True,
|
||||
) -> None:
|
||||
|
||||
self.fields = fields
|
||||
self.no_filter_accept_all_items = no_filter_accept_all_items
|
||||
self._filter: str = ""
|
||||
self.fields = fields
|
||||
self.no_filter_accept_all_items = no_filter_accept_all_items
|
||||
self._filter: str = ""
|
||||
|
||||
|
||||
super().__init__(sync_id)
|
||||
super().__init__(sync_id)
|
||||
|
||||
|
||||
@property
|
||||
def filter(self) -> str:
|
||||
return self._filter
|
||||
@property
|
||||
def filter(self) -> str:
|
||||
return self._filter
|
||||
|
||||
|
||||
@filter.setter
|
||||
def filter(self, value: str) -> None:
|
||||
if value != self._filter:
|
||||
self._filter = value
|
||||
self.refilter()
|
||||
@filter.setter
|
||||
def filter(self, value: str) -> None:
|
||||
if value != self._filter:
|
||||
self._filter = value
|
||||
self.refilter()
|
||||
|
||||
|
||||
def accept_item(self, item: "ModelItem") -> bool:
|
||||
if not self.filter:
|
||||
return self.no_filter_accept_all_items
|
||||
def accept_item(self, item: "ModelItem") -> bool:
|
||||
if not self.filter:
|
||||
return self.no_filter_accept_all_items
|
||||
|
||||
fields = {f: getattr(item, f) for f in self.fields}
|
||||
filtr = self.filter
|
||||
lowercase = filtr.lower()
|
||||
fields = {f: getattr(item, f) for f in self.fields}
|
||||
filtr = self.filter
|
||||
lowercase = filtr.lower()
|
||||
|
||||
if lowercase == filtr:
|
||||
# Consider case only if filter isn't all lowercase
|
||||
filtr = lowercase
|
||||
fields = {name: value.lower() for name, value in fields.items()}
|
||||
if lowercase == filtr:
|
||||
# Consider case only if filter isn't all lowercase
|
||||
filtr = lowercase
|
||||
fields = {name: value.lower() for name, value in fields.items()}
|
||||
|
||||
return self.match(fields, filtr)
|
||||
return self.match(fields, filtr)
|
||||
|
||||
|
||||
def match(self, fields: Dict[str, str], filtr: str) -> bool:
|
||||
for value in fields.values():
|
||||
if value.startswith(filtr):
|
||||
return True
|
||||
def match(self, fields: Dict[str, str], filtr: str) -> bool:
|
||||
for value in fields.values():
|
||||
if value.startswith(filtr):
|
||||
return True
|
||||
|
||||
return False
|
||||
return False
|
||||
|
||||
|
||||
class FieldSubstringFilter(FieldStringFilter):
|
||||
"""Fuzzy-like alternative to `FieldStringFilter`.
|
||||
"""Fuzzy-like alternative to `FieldStringFilter`.
|
||||
|
||||
All words in the filter string must fully or partially match words in the
|
||||
item field values, e.g. "red l" can match "red light",
|
||||
"tired legs", "light red" (order of the filter words doesn't matter),
|
||||
but not just "red" or "light" by themselves.
|
||||
"""
|
||||
All words in the filter string must fully or partially match words in the
|
||||
item field values, e.g. "red l" can match "red light",
|
||||
"tired legs", "light red" (order of the filter words doesn't matter),
|
||||
but not just "red" or "light" by themselves.
|
||||
"""
|
||||
|
||||
def match(self, fields: Dict[str, str], filtr: str) -> bool:
|
||||
text = " ".join(fields.values())
|
||||
def match(self, fields: Dict[str, str], filtr: str) -> bool:
|
||||
text = " ".join(fields.values())
|
||||
|
||||
for word in filtr.split():
|
||||
if word and word not in text:
|
||||
return False
|
||||
for word in filtr.split():
|
||||
if word and word not in text:
|
||||
return False
|
||||
|
||||
return True
|
||||
return True
|
||||
|
|
|
@ -23,415 +23,415 @@ ZERO_DATE = datetime.fromtimestamp(0)
|
|||
|
||||
|
||||
class TypeSpecifier(AutoStrEnum):
|
||||
"""Enum providing clarification of purpose for some matrix events."""
|
||||
"""Enum providing clarification of purpose for some matrix events."""
|
||||
|
||||
Unset = auto()
|
||||
ProfileChange = auto()
|
||||
MembershipChange = auto()
|
||||
Unset = auto()
|
||||
ProfileChange = auto()
|
||||
MembershipChange = auto()
|
||||
|
||||
|
||||
class PingStatus(AutoStrEnum):
|
||||
"""Enum for the status of a homeserver ping operation."""
|
||||
"""Enum for the status of a homeserver ping operation."""
|
||||
|
||||
Done = auto()
|
||||
Pinging = auto()
|
||||
Failed = auto()
|
||||
Done = auto()
|
||||
Pinging = auto()
|
||||
Failed = auto()
|
||||
|
||||
|
||||
class RoomNotificationOverride(AutoStrEnum):
|
||||
"""Possible per-room notification override settings, as displayed in the
|
||||
left sidepane's context menu when right-clicking a room.
|
||||
"""
|
||||
UseDefaultSettings = auto()
|
||||
AllEvents = auto()
|
||||
HighlightsOnly = auto()
|
||||
IgnoreEvents = auto()
|
||||
"""Possible per-room notification override settings, as displayed in the
|
||||
left sidepane's context menu when right-clicking a room.
|
||||
"""
|
||||
UseDefaultSettings = auto()
|
||||
AllEvents = auto()
|
||||
HighlightsOnly = auto()
|
||||
IgnoreEvents = auto()
|
||||
|
||||
|
||||
@dataclass(eq=False)
|
||||
class Homeserver(ModelItem):
|
||||
"""A homeserver we can connect to. The `id` field is the server's URL."""
|
||||
"""A homeserver we can connect to. The `id` field is the server's URL."""
|
||||
|
||||
id: str = field()
|
||||
name: str = field()
|
||||
site_url: str = field()
|
||||
country: str = field()
|
||||
ping: int = -1
|
||||
status: PingStatus = PingStatus.Pinging
|
||||
stability: float = -1
|
||||
downtimes_ms: List[float] = field(default_factory=list)
|
||||
id: str = field()
|
||||
name: str = field()
|
||||
site_url: str = field()
|
||||
country: str = field()
|
||||
ping: int = -1
|
||||
status: PingStatus = PingStatus.Pinging
|
||||
stability: float = -1
|
||||
downtimes_ms: List[float] = field(default_factory=list)
|
||||
|
||||
def __lt__(self, other: "Homeserver") -> bool:
|
||||
return (self.name.lower(), self.id) < (other.name.lower(), other.id)
|
||||
def __lt__(self, other: "Homeserver") -> bool:
|
||||
return (self.name.lower(), self.id) < (other.name.lower(), other.id)
|
||||
|
||||
|
||||
@dataclass(eq=False)
|
||||
class Account(ModelItem):
|
||||
"""A logged in matrix account."""
|
||||
"""A logged in matrix account."""
|
||||
|
||||
id: str = field()
|
||||
order: int = -1
|
||||
display_name: str = ""
|
||||
avatar_url: str = ""
|
||||
max_upload_size: int = 0
|
||||
profile_updated: datetime = ZERO_DATE
|
||||
connecting: bool = False
|
||||
total_unread: int = 0
|
||||
total_highlights: int = 0
|
||||
local_unreads: bool = False
|
||||
ignored_users: Set[str] = field(default_factory=set)
|
||||
id: str = field()
|
||||
order: int = -1
|
||||
display_name: str = ""
|
||||
avatar_url: str = ""
|
||||
max_upload_size: int = 0
|
||||
profile_updated: datetime = ZERO_DATE
|
||||
connecting: bool = False
|
||||
total_unread: int = 0
|
||||
total_highlights: int = 0
|
||||
local_unreads: bool = False
|
||||
ignored_users: Set[str] = field(default_factory=set)
|
||||
|
||||
# For some reason, Account cannot inherit Presence, because QML keeps
|
||||
# complaining type error on unknown file
|
||||
presence_support: bool = False
|
||||
save_presence: bool = True
|
||||
presence: Presence.State = Presence.State.offline
|
||||
currently_active: bool = False
|
||||
last_active_at: datetime = ZERO_DATE
|
||||
status_msg: str = ""
|
||||
# For some reason, Account cannot inherit Presence, because QML keeps
|
||||
# complaining type error on unknown file
|
||||
presence_support: bool = False
|
||||
save_presence: bool = True
|
||||
presence: Presence.State = Presence.State.offline
|
||||
currently_active: bool = False
|
||||
last_active_at: datetime = ZERO_DATE
|
||||
status_msg: str = ""
|
||||
|
||||
def __lt__(self, other: "Account") -> bool:
|
||||
return (self.order, self.id) < (other.order, other.id)
|
||||
def __lt__(self, other: "Account") -> bool:
|
||||
return (self.order, self.id) < (other.order, other.id)
|
||||
|
||||
|
||||
@dataclass(eq=False)
|
||||
class PushRule(ModelItem):
|
||||
"""A push rule configured for one of our account."""
|
||||
"""A push rule configured for one of our account."""
|
||||
|
||||
id: Tuple[str, str] = field() # (kind.value, rule_id)
|
||||
kind: nio.PushRuleKind = field()
|
||||
rule_id: str = field()
|
||||
order: int = field()
|
||||
default: bool = field()
|
||||
enabled: bool = True
|
||||
conditions: List[Dict[str, Any]] = field(default_factory=list)
|
||||
pattern: str = ""
|
||||
actions: List[Dict[str, Any]] = field(default_factory=list)
|
||||
notify: bool = False
|
||||
highlight: bool = False
|
||||
bubble: bool = False
|
||||
sound: str = "" # usually "default" when set
|
||||
urgency_hint: bool = False
|
||||
id: Tuple[str, str] = field() # (kind.value, rule_id)
|
||||
kind: nio.PushRuleKind = field()
|
||||
rule_id: str = field()
|
||||
order: int = field()
|
||||
default: bool = field()
|
||||
enabled: bool = True
|
||||
conditions: List[Dict[str, Any]] = field(default_factory=list)
|
||||
pattern: str = ""
|
||||
actions: List[Dict[str, Any]] = field(default_factory=list)
|
||||
notify: bool = False
|
||||
highlight: bool = False
|
||||
bubble: bool = False
|
||||
sound: str = "" # usually "default" when set
|
||||
urgency_hint: bool = False
|
||||
|
||||
def __lt__(self, other: "PushRule") -> bool:
|
||||
return (
|
||||
self.kind is nio.PushRuleKind.underride,
|
||||
self.kind is nio.PushRuleKind.sender,
|
||||
self.kind is nio.PushRuleKind.room,
|
||||
self.kind is nio.PushRuleKind.content,
|
||||
self.kind is nio.PushRuleKind.override,
|
||||
self.order,
|
||||
self.id,
|
||||
) < (
|
||||
other.kind is nio.PushRuleKind.underride,
|
||||
other.kind is nio.PushRuleKind.sender,
|
||||
other.kind is nio.PushRuleKind.room,
|
||||
other.kind is nio.PushRuleKind.content,
|
||||
other.kind is nio.PushRuleKind.override,
|
||||
other.order,
|
||||
other.id,
|
||||
)
|
||||
def __lt__(self, other: "PushRule") -> bool:
|
||||
return (
|
||||
self.kind is nio.PushRuleKind.underride,
|
||||
self.kind is nio.PushRuleKind.sender,
|
||||
self.kind is nio.PushRuleKind.room,
|
||||
self.kind is nio.PushRuleKind.content,
|
||||
self.kind is nio.PushRuleKind.override,
|
||||
self.order,
|
||||
self.id,
|
||||
) < (
|
||||
other.kind is nio.PushRuleKind.underride,
|
||||
other.kind is nio.PushRuleKind.sender,
|
||||
other.kind is nio.PushRuleKind.room,
|
||||
other.kind is nio.PushRuleKind.content,
|
||||
other.kind is nio.PushRuleKind.override,
|
||||
other.order,
|
||||
other.id,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Room(ModelItem):
|
||||
"""A matrix room we are invited to, are or were member of."""
|
||||
"""A matrix room we are invited to, are or were member of."""
|
||||
|
||||
id: str = field()
|
||||
for_account: str = ""
|
||||
given_name: str = ""
|
||||
display_name: str = ""
|
||||
main_alias: str = ""
|
||||
avatar_url: str = ""
|
||||
plain_topic: str = ""
|
||||
topic: str = ""
|
||||
inviter_id: str = ""
|
||||
inviter_name: str = ""
|
||||
inviter_avatar: str = ""
|
||||
left: bool = False
|
||||
id: str = field()
|
||||
for_account: str = ""
|
||||
given_name: str = ""
|
||||
display_name: str = ""
|
||||
main_alias: str = ""
|
||||
avatar_url: str = ""
|
||||
plain_topic: str = ""
|
||||
topic: str = ""
|
||||
inviter_id: str = ""
|
||||
inviter_name: str = ""
|
||||
inviter_avatar: str = ""
|
||||
left: bool = False
|
||||
|
||||
typing_members: List[str] = field(default_factory=list)
|
||||
typing_members: List[str] = field(default_factory=list)
|
||||
|
||||
federated: bool = True
|
||||
encrypted: bool = False
|
||||
unverified_devices: bool = False
|
||||
invite_required: bool = True
|
||||
guests_allowed: bool = True
|
||||
federated: bool = True
|
||||
encrypted: bool = False
|
||||
unverified_devices: bool = False
|
||||
invite_required: bool = True
|
||||
guests_allowed: bool = True
|
||||
|
||||
default_power_level: int = 0
|
||||
own_power_level: int = 0
|
||||
can_invite: bool = False
|
||||
can_kick: bool = False
|
||||
can_redact_all: bool = False
|
||||
can_send_messages: bool = False
|
||||
can_set_name: bool = False
|
||||
can_set_topic: bool = False
|
||||
can_set_avatar: bool = False
|
||||
can_set_encryption: bool = False
|
||||
can_set_join_rules: bool = False
|
||||
can_set_guest_access: bool = False
|
||||
can_set_power_levels: bool = False
|
||||
default_power_level: int = 0
|
||||
own_power_level: int = 0
|
||||
can_invite: bool = False
|
||||
can_kick: bool = False
|
||||
can_redact_all: bool = False
|
||||
can_send_messages: bool = False
|
||||
can_set_name: bool = False
|
||||
can_set_topic: bool = False
|
||||
can_set_avatar: bool = False
|
||||
can_set_encryption: bool = False
|
||||
can_set_join_rules: bool = False
|
||||
can_set_guest_access: bool = False
|
||||
can_set_power_levels: bool = False
|
||||
|
||||
last_event_date: datetime = ZERO_DATE
|
||||
last_event_date: datetime = ZERO_DATE
|
||||
|
||||
unreads: int = 0
|
||||
highlights: int = 0
|
||||
local_unreads: bool = False
|
||||
unreads: int = 0
|
||||
highlights: int = 0
|
||||
local_unreads: bool = False
|
||||
|
||||
notification_setting: RoomNotificationOverride = \
|
||||
RoomNotificationOverride.UseDefaultSettings
|
||||
notification_setting: RoomNotificationOverride = \
|
||||
RoomNotificationOverride.UseDefaultSettings
|
||||
|
||||
lexical_sorting: bool = False
|
||||
pinned: bool = False
|
||||
lexical_sorting: bool = False
|
||||
pinned: bool = False
|
||||
|
||||
# Allowed keys: "last_event_date", "unreads", "highlights", "local_unreads"
|
||||
# Keys in this dict will override their corresponding item fields for the
|
||||
# __lt__ method. This is used when we want to lock a room's position,
|
||||
# e.g. to avoid having the room move around when it is focused in the GUI
|
||||
_sort_overrides: Dict[str, Any] = field(default_factory=dict)
|
||||
# Allowed keys: "last_event_date", "unreads", "highlights", "local_unreads"
|
||||
# Keys in this dict will override their corresponding item fields for the
|
||||
# __lt__ method. This is used when we want to lock a room's position,
|
||||
# e.g. to avoid having the room move around when it is focused in the GUI
|
||||
_sort_overrides: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
def _sorting(self, key: str) -> Any:
|
||||
return self._sort_overrides.get(key, getattr(self, key))
|
||||
def _sorting(self, key: str) -> Any:
|
||||
return self._sort_overrides.get(key, getattr(self, key))
|
||||
|
||||
def __lt__(self, other: "Room") -> bool:
|
||||
by_activity = not self.lexical_sorting
|
||||
def __lt__(self, other: "Room") -> bool:
|
||||
by_activity = not self.lexical_sorting
|
||||
|
||||
return (
|
||||
self.for_account,
|
||||
other.pinned,
|
||||
self.left, # Left rooms may have an inviter_id, check them first
|
||||
bool(other.inviter_id),
|
||||
bool(by_activity and other._sorting("highlights")),
|
||||
bool(by_activity and other._sorting("unreads")),
|
||||
bool(by_activity and other._sorting("local_unreads")),
|
||||
other._sorting("last_event_date") if by_activity else ZERO_DATE,
|
||||
(self.display_name or self.id).lower(),
|
||||
self.id,
|
||||
return (
|
||||
self.for_account,
|
||||
other.pinned,
|
||||
self.left, # Left rooms may have an inviter_id, check them first
|
||||
bool(other.inviter_id),
|
||||
bool(by_activity and other._sorting("highlights")),
|
||||
bool(by_activity and other._sorting("unreads")),
|
||||
bool(by_activity and other._sorting("local_unreads")),
|
||||
other._sorting("last_event_date") if by_activity else ZERO_DATE,
|
||||
(self.display_name or self.id).lower(),
|
||||
self.id,
|
||||
|
||||
) < (
|
||||
other.for_account,
|
||||
self.pinned,
|
||||
other.left,
|
||||
bool(self.inviter_id),
|
||||
bool(by_activity and self._sorting("highlights")),
|
||||
bool(by_activity and self._sorting("unreads")),
|
||||
bool(by_activity and self._sorting("local_unreads")),
|
||||
self._sorting("last_event_date") if by_activity else ZERO_DATE,
|
||||
(other.display_name or other.id).lower(),
|
||||
other.id,
|
||||
)
|
||||
) < (
|
||||
other.for_account,
|
||||
self.pinned,
|
||||
other.left,
|
||||
bool(self.inviter_id),
|
||||
bool(by_activity and self._sorting("highlights")),
|
||||
bool(by_activity and self._sorting("unreads")),
|
||||
bool(by_activity and self._sorting("local_unreads")),
|
||||
self._sorting("last_event_date") if by_activity else ZERO_DATE,
|
||||
(other.display_name or other.id).lower(),
|
||||
other.id,
|
||||
)
|
||||
|
||||
|
||||
@dataclass(eq=False)
|
||||
class AccountOrRoom(Account, Room):
|
||||
"""The left sidepane in the GUI lists a mixture of accounts and rooms
|
||||
giving a tree view illusion. Since all items in a QML ListView must have
|
||||
the same available properties, this class inherits both
|
||||
`Account` and `Room` to fulfill that purpose.
|
||||
"""
|
||||
"""The left sidepane in the GUI lists a mixture of accounts and rooms
|
||||
giving a tree view illusion. Since all items in a QML ListView must have
|
||||
the same available properties, this class inherits both
|
||||
`Account` and `Room` to fulfill that purpose.
|
||||
"""
|
||||
|
||||
type: Union[Type[Account], Type[Room]] = Account
|
||||
account_order: int = -1
|
||||
type: Union[Type[Account], Type[Room]] = Account
|
||||
account_order: int = -1
|
||||
|
||||
def __lt__(self, other: "AccountOrRoom") -> bool: # type: ignore
|
||||
by_activity = not self.lexical_sorting
|
||||
def __lt__(self, other: "AccountOrRoom") -> bool: # type: ignore
|
||||
by_activity = not self.lexical_sorting
|
||||
|
||||
return (
|
||||
self.account_order,
|
||||
self.id if self.type is Account else self.for_account,
|
||||
other.type is Account,
|
||||
other.pinned,
|
||||
self.left,
|
||||
bool(other.inviter_id),
|
||||
bool(by_activity and other._sorting("highlights")),
|
||||
bool(by_activity and other._sorting("unreads")),
|
||||
bool(by_activity and other._sorting("local_unreads")),
|
||||
other._sorting("last_event_date") if by_activity else ZERO_DATE,
|
||||
(self.display_name or self.id).lower(),
|
||||
self.id,
|
||||
return (
|
||||
self.account_order,
|
||||
self.id if self.type is Account else self.for_account,
|
||||
other.type is Account,
|
||||
other.pinned,
|
||||
self.left,
|
||||
bool(other.inviter_id),
|
||||
bool(by_activity and other._sorting("highlights")),
|
||||
bool(by_activity and other._sorting("unreads")),
|
||||
bool(by_activity and other._sorting("local_unreads")),
|
||||
other._sorting("last_event_date") if by_activity else ZERO_DATE,
|
||||
(self.display_name or self.id).lower(),
|
||||
self.id,
|
||||
|
||||
) < (
|
||||
other.account_order,
|
||||
other.id if other.type is Account else other.for_account,
|
||||
self.type is Account,
|
||||
self.pinned,
|
||||
other.left,
|
||||
bool(self.inviter_id),
|
||||
bool(by_activity and self._sorting("highlights")),
|
||||
bool(by_activity and self._sorting("unreads")),
|
||||
bool(by_activity and self._sorting("local_unreads")),
|
||||
self._sorting("last_event_date") if by_activity else ZERO_DATE,
|
||||
(other.display_name or other.id).lower(),
|
||||
other.id,
|
||||
)
|
||||
) < (
|
||||
other.account_order,
|
||||
other.id if other.type is Account else other.for_account,
|
||||
self.type is Account,
|
||||
self.pinned,
|
||||
other.left,
|
||||
bool(self.inviter_id),
|
||||
bool(by_activity and self._sorting("highlights")),
|
||||
bool(by_activity and self._sorting("unreads")),
|
||||
bool(by_activity and self._sorting("local_unreads")),
|
||||
self._sorting("last_event_date") if by_activity else ZERO_DATE,
|
||||
(other.display_name or other.id).lower(),
|
||||
other.id,
|
||||
)
|
||||
|
||||
|
||||
@dataclass(eq=False)
|
||||
class Member(ModelItem):
|
||||
"""A member in a matrix room."""
|
||||
"""A member in a matrix room."""
|
||||
|
||||
id: str = field()
|
||||
display_name: str = ""
|
||||
avatar_url: str = ""
|
||||
typing: bool = False
|
||||
power_level: int = 0
|
||||
invited: bool = False
|
||||
ignored: bool = False
|
||||
profile_updated: datetime = ZERO_DATE
|
||||
last_read_event: str = ""
|
||||
id: str = field()
|
||||
display_name: str = ""
|
||||
avatar_url: str = ""
|
||||
typing: bool = False
|
||||
power_level: int = 0
|
||||
invited: bool = False
|
||||
ignored: bool = False
|
||||
profile_updated: datetime = ZERO_DATE
|
||||
last_read_event: str = ""
|
||||
|
||||
presence: Presence.State = Presence.State.offline
|
||||
currently_active: bool = False
|
||||
last_active_at: datetime = ZERO_DATE
|
||||
status_msg: str = ""
|
||||
presence: Presence.State = Presence.State.offline
|
||||
currently_active: bool = False
|
||||
last_active_at: datetime = ZERO_DATE
|
||||
status_msg: str = ""
|
||||
|
||||
def __lt__(self, other: "Member") -> bool:
|
||||
return (
|
||||
self.invited,
|
||||
other.power_level,
|
||||
self.ignored,
|
||||
Presence.State.offline if self.ignored else self.presence,
|
||||
(self.display_name or self.id[1:]).lower(),
|
||||
self.id,
|
||||
) < (
|
||||
other.invited,
|
||||
self.power_level,
|
||||
other.ignored,
|
||||
Presence.State.offline if other.ignored else other.presence,
|
||||
(other.display_name or other.id[1:]).lower(),
|
||||
other.id,
|
||||
)
|
||||
def __lt__(self, other: "Member") -> bool:
|
||||
return (
|
||||
self.invited,
|
||||
other.power_level,
|
||||
self.ignored,
|
||||
Presence.State.offline if self.ignored else self.presence,
|
||||
(self.display_name or self.id[1:]).lower(),
|
||||
self.id,
|
||||
) < (
|
||||
other.invited,
|
||||
self.power_level,
|
||||
other.ignored,
|
||||
Presence.State.offline if other.ignored else other.presence,
|
||||
(other.display_name or other.id[1:]).lower(),
|
||||
other.id,
|
||||
)
|
||||
|
||||
|
||||
class TransferStatus(AutoStrEnum):
|
||||
"""Enum describing the status of an upload operation."""
|
||||
"""Enum describing the status of an upload operation."""
|
||||
|
||||
Preparing = auto()
|
||||
Transfering = auto()
|
||||
Caching = auto()
|
||||
Error = auto()
|
||||
Preparing = auto()
|
||||
Transfering = auto()
|
||||
Caching = auto()
|
||||
Error = auto()
|
||||
|
||||
|
||||
@dataclass(eq=False)
|
||||
class Transfer(ModelItem):
|
||||
"""Represent a running or failed file upload/download operation."""
|
||||
"""Represent a running or failed file upload/download operation."""
|
||||
|
||||
id: UUID = field()
|
||||
is_upload: bool = field()
|
||||
filepath: Path = Path("-")
|
||||
id: UUID = field()
|
||||
is_upload: bool = field()
|
||||
filepath: Path = Path("-")
|
||||
|
||||
total_size: int = 0
|
||||
transferred: int = 0
|
||||
speed: float = 0
|
||||
time_left: timedelta = timedelta(0)
|
||||
paused: bool = False
|
||||
total_size: int = 0
|
||||
transferred: int = 0
|
||||
speed: float = 0
|
||||
time_left: timedelta = timedelta(0)
|
||||
paused: bool = False
|
||||
|
||||
status: TransferStatus = TransferStatus.Preparing
|
||||
error: OptionalExceptionType = type(None)
|
||||
error_args: Tuple[Any, ...] = ()
|
||||
status: TransferStatus = TransferStatus.Preparing
|
||||
error: OptionalExceptionType = type(None)
|
||||
error_args: Tuple[Any, ...] = ()
|
||||
|
||||
start_date: datetime = field(init=False, default_factory=datetime.now)
|
||||
start_date: datetime = field(init=False, default_factory=datetime.now)
|
||||
|
||||
|
||||
def __lt__(self, other: "Transfer") -> bool:
|
||||
return (self.start_date, self.id) > (other.start_date, other.id)
|
||||
def __lt__(self, other: "Transfer") -> bool:
|
||||
return (self.start_date, self.id) > (other.start_date, other.id)
|
||||
|
||||
|
||||
@dataclass(eq=False)
|
||||
class Event(ModelItem):
|
||||
"""A matrix state event or message."""
|
||||
"""A matrix state event or message."""
|
||||
|
||||
id: str = field()
|
||||
event_id: str = field()
|
||||
event_type: Type[nio.Event] = field()
|
||||
date: datetime = field()
|
||||
sender_id: str = field()
|
||||
sender_name: str = field()
|
||||
sender_avatar: str = field()
|
||||
fetch_profile: bool = False
|
||||
id: str = field()
|
||||
event_id: str = field()
|
||||
event_type: Type[nio.Event] = field()
|
||||
date: datetime = field()
|
||||
sender_id: str = field()
|
||||
sender_name: str = field()
|
||||
sender_avatar: str = field()
|
||||
fetch_profile: bool = False
|
||||
|
||||
content: str = ""
|
||||
inline_content: str = ""
|
||||
reason: str = ""
|
||||
links: List[str] = field(default_factory=list)
|
||||
mentions: List[Tuple[str, str]] = field(default_factory=list)
|
||||
content: str = ""
|
||||
inline_content: str = ""
|
||||
reason: str = ""
|
||||
links: List[str] = field(default_factory=list)
|
||||
mentions: List[Tuple[str, str]] = field(default_factory=list)
|
||||
|
||||
type_specifier: TypeSpecifier = TypeSpecifier.Unset
|
||||
type_specifier: TypeSpecifier = TypeSpecifier.Unset
|
||||
|
||||
target_id: str = ""
|
||||
target_name: str = ""
|
||||
target_avatar: str = ""
|
||||
redacter_id: str = ""
|
||||
redacter_name: str = ""
|
||||
target_id: str = ""
|
||||
target_name: str = ""
|
||||
target_avatar: str = ""
|
||||
redacter_id: str = ""
|
||||
redacter_name: str = ""
|
||||
|
||||
# {user_id: server_timestamp} - QML can't parse dates from JSONified dicts
|
||||
last_read_by: Dict[str, int] = field(default_factory=dict)
|
||||
read_by_count: int = 0
|
||||
# {user_id: server_timestamp} - QML can't parse dates from JSONified dicts
|
||||
last_read_by: Dict[str, int] = field(default_factory=dict)
|
||||
read_by_count: int = 0
|
||||
|
||||
is_local_echo: bool = False
|
||||
source: Optional[nio.Event] = None
|
||||
is_local_echo: bool = False
|
||||
source: Optional[nio.Event] = None
|
||||
|
||||
media_url: str = ""
|
||||
media_http_url: str = ""
|
||||
media_title: str = ""
|
||||
media_width: int = 0
|
||||
media_height: int = 0
|
||||
media_duration: int = 0
|
||||
media_size: int = 0
|
||||
media_mime: str = ""
|
||||
media_crypt_dict: Dict[str, Any] = field(default_factory=dict)
|
||||
media_local_path: Union[str, Path] = ""
|
||||
media_url: str = ""
|
||||
media_http_url: str = ""
|
||||
media_title: str = ""
|
||||
media_width: int = 0
|
||||
media_height: int = 0
|
||||
media_duration: int = 0
|
||||
media_size: int = 0
|
||||
media_mime: str = ""
|
||||
media_crypt_dict: Dict[str, Any] = field(default_factory=dict)
|
||||
media_local_path: Union[str, Path] = ""
|
||||
|
||||
thumbnail_url: str = ""
|
||||
thumbnail_mime: str = ""
|
||||
thumbnail_width: int = 0
|
||||
thumbnail_height: int = 0
|
||||
thumbnail_crypt_dict: Dict[str, Any] = field(default_factory=dict)
|
||||
thumbnail_url: str = ""
|
||||
thumbnail_mime: str = ""
|
||||
thumbnail_width: int = 0
|
||||
thumbnail_height: int = 0
|
||||
thumbnail_crypt_dict: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
def __lt__(self, other: "Event") -> bool:
|
||||
return (self.date, self.id) > (other.date, other.id)
|
||||
def __lt__(self, other: "Event") -> bool:
|
||||
return (self.date, self.id) > (other.date, other.id)
|
||||
|
||||
@property
|
||||
def plain_content(self) -> str:
|
||||
"""Plaintext version of the event's content."""
|
||||
@property
|
||||
def plain_content(self) -> str:
|
||||
"""Plaintext version of the event's content."""
|
||||
|
||||
if isinstance(self.source, nio.RoomMessageText):
|
||||
return self.source.body
|
||||
if isinstance(self.source, nio.RoomMessageText):
|
||||
return self.source.body
|
||||
|
||||
return strip_html_tags(self.content)
|
||||
return strip_html_tags(self.content)
|
||||
|
||||
@staticmethod
|
||||
def parse_links(text: str) -> List[str]:
|
||||
"""Return list of URLs (`<a href=...>` tags) present in the content."""
|
||||
@staticmethod
|
||||
def parse_links(text: str) -> List[str]:
|
||||
"""Return list of URLs (`<a href=...>` tags) present in the content."""
|
||||
|
||||
ignore = []
|
||||
ignore = []
|
||||
|
||||
if "<mx-reply>" in text or "mention" in text:
|
||||
parser = lxml.html.etree.HTMLParser()
|
||||
tree = lxml.etree.fromstring(text, parser)
|
||||
ignore = [
|
||||
lxml.etree.tostring(matching_element)
|
||||
for ugly_disgusting_xpath in [
|
||||
# Match mx-reply > blockquote > second a (user ID link)
|
||||
"//mx-reply/blockquote/a[count(preceding-sibling::*)<=1]",
|
||||
# Match <a> tags with a mention class
|
||||
'//a[contains(concat(" ",normalize-space(@class)," ")'
|
||||
'," mention ")]',
|
||||
]
|
||||
for matching_element in tree.xpath(ugly_disgusting_xpath)
|
||||
]
|
||||
if "<mx-reply>" in text or "mention" in text:
|
||||
parser = lxml.html.etree.HTMLParser()
|
||||
tree = lxml.etree.fromstring(text, parser)
|
||||
ignore = [
|
||||
lxml.etree.tostring(matching_element)
|
||||
for ugly_disgusting_xpath in [
|
||||
# Match mx-reply > blockquote > second a (user ID link)
|
||||
"//mx-reply/blockquote/a[count(preceding-sibling::*)<=1]",
|
||||
# Match <a> tags with a mention class
|
||||
'//a[contains(concat(" ",normalize-space(@class)," ")'
|
||||
'," mention ")]',
|
||||
]
|
||||
for matching_element in tree.xpath(ugly_disgusting_xpath)
|
||||
]
|
||||
|
||||
if not text.strip():
|
||||
return []
|
||||
if not text.strip():
|
||||
return []
|
||||
|
||||
return [
|
||||
url for el, attrib, url, pos in lxml.html.iterlinks(text)
|
||||
if lxml.etree.tostring(el) not in ignore
|
||||
]
|
||||
return [
|
||||
url for el, attrib, url, pos in lxml.html.iterlinks(text)
|
||||
if lxml.etree.tostring(el) not in ignore
|
||||
]
|
||||
|
||||
def serialized_field(self, field: str) -> Any:
|
||||
if field == "source":
|
||||
source_dict = asdict(self.source) if self.source else {}
|
||||
return json.dumps(source_dict)
|
||||
def serialized_field(self, field: str) -> Any:
|
||||
if field == "source":
|
||||
source_dict = asdict(self.source) if self.source else {}
|
||||
return json.dumps(source_dict)
|
||||
|
||||
return super().serialized_field(field)
|
||||
return super().serialized_field(field)
|
||||
|
|
|
@ -5,7 +5,7 @@ import itertools
|
|||
from contextlib import contextmanager
|
||||
from threading import RLock
|
||||
from typing import (
|
||||
TYPE_CHECKING, Any, Dict, Iterator, List, MutableMapping, Optional, Tuple,
|
||||
TYPE_CHECKING, Any, Dict, Iterator, List, MutableMapping, Optional, Tuple,
|
||||
)
|
||||
|
||||
from sortedcontainers import SortedList
|
||||
|
@ -15,199 +15,199 @@ from ..utils import serialize_value_for_qml
|
|||
from . import SyncId
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .model_item import ModelItem
|
||||
from .proxy import ModelProxy # noqa
|
||||
from .model_item import ModelItem
|
||||
from .proxy import ModelProxy # noqa
|
||||
|
||||
|
||||
class Model(MutableMapping):
|
||||
"""A mapping of `{ModelItem.id: ModelItem}` synced between Python & QML.
|
||||
"""A mapping of `{ModelItem.id: ModelItem}` synced between Python & QML.
|
||||
|
||||
From the Python side, the model is usable like a normal dict of
|
||||
`ModelItem` subclass objects.
|
||||
Different types of `ModelItem` must not be mixed in the same model.
|
||||
From the Python side, the model is usable like a normal dict of
|
||||
`ModelItem` subclass objects.
|
||||
Different types of `ModelItem` must not be mixed in the same model.
|
||||
|
||||
When items are added, replaced, removed, have field value changes, or the
|
||||
model is cleared, corresponding `PyOtherSideEvent` are fired to inform
|
||||
QML of the changes so that it can keep its models in sync.
|
||||
When items are added, replaced, removed, have field value changes, or the
|
||||
model is cleared, corresponding `PyOtherSideEvent` are fired to inform
|
||||
QML of the changes so that it can keep its models in sync.
|
||||
|
||||
Items in the model are kept sorted using the `ModelItem` subclass `__lt__`.
|
||||
"""
|
||||
Items in the model are kept sorted using the `ModelItem` subclass `__lt__`.
|
||||
"""
|
||||
|
||||
instances: Dict[SyncId, "Model"] = {}
|
||||
proxies: Dict[SyncId, "ModelProxy"] = {}
|
||||
instances: Dict[SyncId, "Model"] = {}
|
||||
proxies: Dict[SyncId, "ModelProxy"] = {}
|
||||
|
||||
|
||||
def __init__(self, sync_id: Optional[SyncId]) -> None:
|
||||
self.sync_id: Optional[SyncId] = sync_id
|
||||
self.write_lock: RLock = RLock()
|
||||
self._data: Dict[Any, "ModelItem"] = {}
|
||||
self._sorted_data: SortedList["ModelItem"] = SortedList()
|
||||
def __init__(self, sync_id: Optional[SyncId]) -> None:
|
||||
self.sync_id: Optional[SyncId] = sync_id
|
||||
self.write_lock: RLock = RLock()
|
||||
self._data: Dict[Any, "ModelItem"] = {}
|
||||
self._sorted_data: SortedList["ModelItem"] = SortedList()
|
||||
|
||||
self.take_items_ownership: bool = True
|
||||
self.take_items_ownership: bool = True
|
||||
|
||||
# [(index, item.id), ...]
|
||||
self._active_batch_removed: Optional[List[Tuple[int, Any]]] = None
|
||||
# [(index, item.id), ...]
|
||||
self._active_batch_removed: Optional[List[Tuple[int, Any]]] = None
|
||||
|
||||
if self.sync_id:
|
||||
self.instances[self.sync_id] = self
|
||||
if self.sync_id:
|
||||
self.instances[self.sync_id] = self
|
||||
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Provide a full representation of the model and its content."""
|
||||
def __repr__(self) -> str:
|
||||
"""Provide a full representation of the model and its content."""
|
||||
|
||||
return "%s(sync_id=%s, %s)" % (
|
||||
type(self).__name__, self.sync_id, self._data,
|
||||
)
|
||||
return "%s(sync_id=%s, %s)" % (
|
||||
type(self).__name__, self.sync_id, self._data,
|
||||
)
|
||||
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Provide a short "<sync_id>: <num> items" representation."""
|
||||
return f"{self.sync_id}: {len(self)} items"
|
||||
def __str__(self) -> str:
|
||||
"""Provide a short "<sync_id>: <num> items" representation."""
|
||||
return f"{self.sync_id}: {len(self)} items"
|
||||
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self._data[key]
|
||||
def __getitem__(self, key):
|
||||
return self._data[key]
|
||||
|
||||
|
||||
def __setitem__(
|
||||
self,
|
||||
key,
|
||||
value: "ModelItem",
|
||||
_changed_fields: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
with self.write_lock:
|
||||
existing = self._data.get(key)
|
||||
new = value
|
||||
def __setitem__(
|
||||
self,
|
||||
key,
|
||||
value: "ModelItem",
|
||||
_changed_fields: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
with self.write_lock:
|
||||
existing = self._data.get(key)
|
||||
new = value
|
||||
|
||||
# Collect changed fields
|
||||
# Collect changed fields
|
||||
|
||||
changed_fields = _changed_fields or {}
|
||||
changed_fields = _changed_fields or {}
|
||||
|
||||
if not changed_fields:
|
||||
for field in new.__dataclass_fields__: # type: ignore
|
||||
if field.startswith("_"):
|
||||
continue
|
||||
if not changed_fields:
|
||||
for field in new.__dataclass_fields__: # type: ignore
|
||||
if field.startswith("_"):
|
||||
continue
|
||||
|
||||
changed = True
|
||||
changed = True
|
||||
|
||||
if existing:
|
||||
changed = \
|
||||
getattr(new, field) != getattr(existing, field)
|
||||
if existing:
|
||||
changed = \
|
||||
getattr(new, field) != getattr(existing, field)
|
||||
|
||||
if changed:
|
||||
changed_fields[field] = new.serialized_field(field)
|
||||
if changed:
|
||||
changed_fields[field] = new.serialized_field(field)
|
||||
|
||||
# Set parent model on new item
|
||||
# Set parent model on new item
|
||||
|
||||
if self.sync_id and self.take_items_ownership:
|
||||
new.parent_model = self
|
||||
if self.sync_id and self.take_items_ownership:
|
||||
new.parent_model = self
|
||||
|
||||
# Insert into sorted data
|
||||
# Insert into sorted data
|
||||
|
||||
index_then = None
|
||||
index_then = None
|
||||
|
||||
if existing:
|
||||
index_then = self._sorted_data.index(existing)
|
||||
del self._sorted_data[index_then]
|
||||
if existing:
|
||||
index_then = self._sorted_data.index(existing)
|
||||
del self._sorted_data[index_then]
|
||||
|
||||
self._sorted_data.add(new)
|
||||
index_now = self._sorted_data.index(new)
|
||||
self._sorted_data.add(new)
|
||||
index_now = self._sorted_data.index(new)
|
||||
|
||||
# Insert into dict data
|
||||
# Insert into dict data
|
||||
|
||||
self._data[key] = new
|
||||
self._data[key] = new
|
||||
|
||||
# Callbacks
|
||||
# Callbacks
|
||||
|
||||
for sync_id, proxy in self.proxies.items():
|
||||
if sync_id != self.sync_id:
|
||||
proxy.source_item_set(self, key, value)
|
||||
for sync_id, proxy in self.proxies.items():
|
||||
if sync_id != self.sync_id:
|
||||
proxy.source_item_set(self, key, value)
|
||||
|
||||
# Emit PyOtherSide event
|
||||
# Emit PyOtherSide event
|
||||
|
||||
if self.sync_id and (index_then != index_now or changed_fields):
|
||||
ModelItemSet(
|
||||
self.sync_id, index_then, index_now, changed_fields,
|
||||
)
|
||||
if self.sync_id and (index_then != index_now or changed_fields):
|
||||
ModelItemSet(
|
||||
self.sync_id, index_then, index_now, changed_fields,
|
||||
)
|
||||
|
||||
|
||||
def __delitem__(self, key) -> None:
|
||||
with self.write_lock:
|
||||
item = self._data[key]
|
||||
def __delitem__(self, key) -> None:
|
||||
with self.write_lock:
|
||||
item = self._data[key]
|
||||
|
||||
if self.sync_id and self.take_items_ownership:
|
||||
item.parent_model = None
|
||||
if self.sync_id and self.take_items_ownership:
|
||||
item.parent_model = None
|
||||
|
||||
del self._data[key]
|
||||
del self._data[key]
|
||||
|
||||
index = self._sorted_data.index(item)
|
||||
del self._sorted_data[index]
|
||||
index = self._sorted_data.index(item)
|
||||
del self._sorted_data[index]
|
||||
|
||||
for sync_id, proxy in self.proxies.items():
|
||||
if sync_id != self.sync_id:
|
||||
proxy.source_item_deleted(self, key)
|
||||
for sync_id, proxy in self.proxies.items():
|
||||
if sync_id != self.sync_id:
|
||||
proxy.source_item_deleted(self, key)
|
||||
|
||||
if self.sync_id:
|
||||
if self._active_batch_removed is None:
|
||||
i = serialize_value_for_qml(item.id, json_list_dicts=True)
|
||||
ModelItemDeleted(self.sync_id, index, 1, (i,))
|
||||
else:
|
||||
self._active_batch_removed.append((index, item.id))
|
||||
if self.sync_id:
|
||||
if self._active_batch_removed is None:
|
||||
i = serialize_value_for_qml(item.id, json_list_dicts=True)
|
||||
ModelItemDeleted(self.sync_id, index, 1, (i,))
|
||||
else:
|
||||
self._active_batch_removed.append((index, item.id))
|
||||
|
||||
|
||||
def __iter__(self) -> Iterator:
|
||||
return iter(self._data)
|
||||
def __iter__(self) -> Iterator:
|
||||
return iter(self._data)
|
||||
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._data)
|
||||
def __len__(self) -> int:
|
||||
return len(self._data)
|
||||
|
||||
|
||||
def __lt__(self, other: "Model") -> bool:
|
||||
"""Sort `Model` objects lexically by `sync_id`."""
|
||||
return str(self.sync_id) < str(other.sync_id)
|
||||
def __lt__(self, other: "Model") -> bool:
|
||||
"""Sort `Model` objects lexically by `sync_id`."""
|
||||
return str(self.sync_id) < str(other.sync_id)
|
||||
|
||||
|
||||
def clear(self) -> None:
|
||||
super().clear()
|
||||
if self.sync_id:
|
||||
ModelCleared(self.sync_id)
|
||||
def clear(self) -> None:
|
||||
super().clear()
|
||||
if self.sync_id:
|
||||
ModelCleared(self.sync_id)
|
||||
|
||||
|
||||
def copy(self, sync_id: Optional[SyncId] = None) -> "Model":
|
||||
new = type(self)(sync_id=sync_id)
|
||||
new.update(self)
|
||||
return new
|
||||
def copy(self, sync_id: Optional[SyncId] = None) -> "Model":
|
||||
new = type(self)(sync_id=sync_id)
|
||||
new.update(self)
|
||||
return new
|
||||
|
||||
|
||||
@contextmanager
|
||||
def batch_remove(self):
|
||||
"""Context manager that accumulates item removal events.
|
||||
@contextmanager
|
||||
def batch_remove(self):
|
||||
"""Context manager that accumulates item removal events.
|
||||
|
||||
When the context manager exits, sequences of removed items are grouped
|
||||
and one `ModelItemDeleted` pyotherside event is fired per sequence.
|
||||
"""
|
||||
When the context manager exits, sequences of removed items are grouped
|
||||
and one `ModelItemDeleted` pyotherside event is fired per sequence.
|
||||
"""
|
||||
|
||||
with self.write_lock:
|
||||
try:
|
||||
self._active_batch_removed = []
|
||||
yield None
|
||||
finally:
|
||||
batch = self._active_batch_removed
|
||||
groups = [
|
||||
list(group) for item, group in
|
||||
itertools.groupby(batch, key=lambda x: x[0])
|
||||
]
|
||||
with self.write_lock:
|
||||
try:
|
||||
self._active_batch_removed = []
|
||||
yield None
|
||||
finally:
|
||||
batch = self._active_batch_removed
|
||||
groups = [
|
||||
list(group) for item, group in
|
||||
itertools.groupby(batch, key=lambda x: x[0])
|
||||
]
|
||||
|
||||
def serialize_id(id_):
|
||||
return serialize_value_for_qml(id_, json_list_dicts=True)
|
||||
def serialize_id(id_):
|
||||
return serialize_value_for_qml(id_, json_list_dicts=True)
|
||||
|
||||
for group in groups:
|
||||
ModelItemDeleted(
|
||||
self.sync_id,
|
||||
index = group[0][0],
|
||||
count = len(group),
|
||||
ids = [serialize_id(item[1]) for item in group],
|
||||
)
|
||||
for group in groups:
|
||||
ModelItemDeleted(
|
||||
self.sync_id,
|
||||
index = group[0][0],
|
||||
count = len(group),
|
||||
ids = [serialize_id(item[1]) for item in group],
|
||||
)
|
||||
|
||||
self._active_batch_removed = None
|
||||
self._active_batch_removed = None
|
||||
|
|
|
@ -8,122 +8,122 @@ from ..pyotherside_events import ModelItemSet
|
|||
from ..utils import serialize_value_for_qml
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .model import Model
|
||||
from .model import Model
|
||||
|
||||
|
||||
@dataclass(eq=False)
|
||||
class ModelItem:
|
||||
"""Base class for items stored inside a `Model`.
|
||||
"""Base class for items stored inside a `Model`.
|
||||
|
||||
This class must be subclassed and not used directly.
|
||||
All subclasses must use the `@dataclass(eq=False)` decorator.
|
||||
This class must be subclassed and not used directly.
|
||||
All subclasses must use the `@dataclass(eq=False)` decorator.
|
||||
|
||||
Subclasses are also expected to implement `__lt__()`,
|
||||
to provide support for comparisons with the `<`, `>`, `<=`, `=>` operators
|
||||
and thus allow a `Model` to keep its data sorted.
|
||||
Subclasses are also expected to implement `__lt__()`,
|
||||
to provide support for comparisons with the `<`, `>`, `<=`, `=>` operators
|
||||
and thus allow a `Model` to keep its data sorted.
|
||||
|
||||
Make sure to respect SortedList requirements when implementing `__lt__()`:
|
||||
http://www.grantjenks.com/docs/sortedcontainers/introduction.html#caveats
|
||||
"""
|
||||
Make sure to respect SortedList requirements when implementing `__lt__()`:
|
||||
http://www.grantjenks.com/docs/sortedcontainers/introduction.html#caveats
|
||||
"""
|
||||
|
||||
id: Any = field()
|
||||
id: Any = field()
|
||||
|
||||
|
||||
def __new__(cls, *_args, **_kwargs) -> "ModelItem":
|
||||
cls.parent_model: Optional[Model] = None
|
||||
return super().__new__(cls)
|
||||
def __new__(cls, *_args, **_kwargs) -> "ModelItem":
|
||||
cls.parent_model: Optional[Model] = None
|
||||
return super().__new__(cls)
|
||||
|
||||
|
||||
def __setattr__(self, name: str, value) -> None:
|
||||
self.set_fields(**{name: value})
|
||||
def __setattr__(self, name: str, value) -> None:
|
||||
self.set_fields(**{name: value})
|
||||
|
||||
|
||||
def __delattr__(self, name: str) -> None:
|
||||
raise NotImplementedError()
|
||||
def __delattr__(self, name: str) -> None:
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
@property
|
||||
def serialized(self) -> Dict[str, Any]:
|
||||
"""Return this item as a dict ready to be passed to QML."""
|
||||
@property
|
||||
def serialized(self) -> Dict[str, Any]:
|
||||
"""Return this item as a dict ready to be passed to QML."""
|
||||
|
||||
return {
|
||||
name: self.serialized_field(name)
|
||||
for name in self.__dataclass_fields__ # type: ignore
|
||||
if not name.startswith("_")
|
||||
}
|
||||
return {
|
||||
name: self.serialized_field(name)
|
||||
for name in self.__dataclass_fields__ # type: ignore
|
||||
if not name.startswith("_")
|
||||
}
|
||||
|
||||
|
||||
def serialized_field(self, field: str) -> Any:
|
||||
"""Return a field's value in a form suitable for passing to QML."""
|
||||
def serialized_field(self, field: str) -> Any:
|
||||
"""Return a field's value in a form suitable for passing to QML."""
|
||||
|
||||
value = getattr(self, field)
|
||||
return serialize_value_for_qml(value, json_list_dicts=True)
|
||||
value = getattr(self, field)
|
||||
return serialize_value_for_qml(value, json_list_dicts=True)
|
||||
|
||||
|
||||
def set_fields(self, _force: bool = False, **fields: Any) -> None:
|
||||
"""Set one or more field's value and call `ModelItem.notify_change`.
|
||||
def set_fields(self, _force: bool = False, **fields: Any) -> None:
|
||||
"""Set one or more field's value and call `ModelItem.notify_change`.
|
||||
|
||||
For efficiency, to change multiple fields, this method should be
|
||||
used rather than setting them one after another with `=` or `setattr`.
|
||||
"""
|
||||
For efficiency, to change multiple fields, this method should be
|
||||
used rather than setting them one after another with `=` or `setattr`.
|
||||
"""
|
||||
|
||||
parent = self.parent_model
|
||||
parent = self.parent_model
|
||||
|
||||
# If we're currently being created or haven't been put in a model yet:
|
||||
if not parent:
|
||||
for name, value in fields.items():
|
||||
super().__setattr__(name, value)
|
||||
return
|
||||
# If we're currently being created or haven't been put in a model yet:
|
||||
if not parent:
|
||||
for name, value in fields.items():
|
||||
super().__setattr__(name, value)
|
||||
return
|
||||
|
||||
with parent.write_lock:
|
||||
qml_changes = {}
|
||||
changes = {
|
||||
name: value for name, value in fields.items()
|
||||
if _force or getattr(self, name) != value
|
||||
}
|
||||
with parent.write_lock:
|
||||
qml_changes = {}
|
||||
changes = {
|
||||
name: value for name, value in fields.items()
|
||||
if _force or getattr(self, name) != value
|
||||
}
|
||||
|
||||
if not changes:
|
||||
return
|
||||
if not changes:
|
||||
return
|
||||
|
||||
# To avoid corrupting the SortedList, we have to take out the item,
|
||||
# apply the field changes, *then* add it back in.
|
||||
# To avoid corrupting the SortedList, we have to take out the item,
|
||||
# apply the field changes, *then* add it back in.
|
||||
|
||||
index_then = parent._sorted_data.index(self)
|
||||
del parent._sorted_data[index_then]
|
||||
index_then = parent._sorted_data.index(self)
|
||||
del parent._sorted_data[index_then]
|
||||
|
||||
for name, value in changes.items():
|
||||
super().__setattr__(name, value)
|
||||
is_field = name in self.__dataclass_fields__ # type: ignore
|
||||
for name, value in changes.items():
|
||||
super().__setattr__(name, value)
|
||||
is_field = name in self.__dataclass_fields__ # type: ignore
|
||||
|
||||
if is_field and not name.startswith("_"):
|
||||
qml_changes[name] = self.serialized_field(name)
|
||||
if is_field and not name.startswith("_"):
|
||||
qml_changes[name] = self.serialized_field(name)
|
||||
|
||||
parent._sorted_data.add(self)
|
||||
index_now = parent._sorted_data.index(self)
|
||||
index_change = index_then != index_now
|
||||
parent._sorted_data.add(self)
|
||||
index_now = parent._sorted_data.index(self)
|
||||
index_change = index_then != index_now
|
||||
|
||||
# Now, inform QML about changed dataclass fields if any.
|
||||
# Now, inform QML about changed dataclass fields if any.
|
||||
|
||||
if not parent.sync_id or (not qml_changes and not index_change):
|
||||
return
|
||||
if not parent.sync_id or (not qml_changes and not index_change):
|
||||
return
|
||||
|
||||
ModelItemSet(parent.sync_id, index_then, index_now, qml_changes)
|
||||
ModelItemSet(parent.sync_id, index_then, index_now, qml_changes)
|
||||
|
||||
# Inform any proxy connected to the parent model of the field changes
|
||||
# Inform any proxy connected to the parent model of the field changes
|
||||
|
||||
for sync_id, proxy in parent.proxies.items():
|
||||
if sync_id != parent.sync_id:
|
||||
proxy.source_item_set(parent, self.id, self, qml_changes)
|
||||
for sync_id, proxy in parent.proxies.items():
|
||||
if sync_id != parent.sync_id:
|
||||
proxy.source_item_set(parent, self.id, self, qml_changes)
|
||||
|
||||
|
||||
def notify_change(self, *fields: str) -> None:
|
||||
"""Notify the parent model that fields of this item have changed.
|
||||
def notify_change(self, *fields: str) -> None:
|
||||
"""Notify the parent model that fields of this item have changed.
|
||||
|
||||
The model cannot automatically detect changes inside
|
||||
object fields, such as list or dicts having their data modified.
|
||||
In these cases, this method should be called.
|
||||
"""
|
||||
The model cannot automatically detect changes inside
|
||||
object fields, such as list or dicts having their data modified.
|
||||
In these cases, this method should be called.
|
||||
"""
|
||||
|
||||
kwargs = {name: getattr(self, name) for name in fields}
|
||||
kwargs["_force"] = True
|
||||
self.set_fields(**kwargs)
|
||||
kwargs = {name: getattr(self, name) for name in fields}
|
||||
kwargs["_force"] = True
|
||||
self.set_fields(**kwargs)
|
||||
|
|
|
@ -8,66 +8,66 @@ from typing import Dict, List, Union
|
|||
from . import SyncId
|
||||
from .model import Model
|
||||
from .special_models import (
|
||||
AllRooms, AutoCompletedMembers, FilteredHomeservers, FilteredMembers,
|
||||
MatchingAccounts,
|
||||
AllRooms, AutoCompletedMembers, FilteredHomeservers, FilteredMembers,
|
||||
MatchingAccounts,
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ModelStore(UserDict):
|
||||
"""Dict of sync ID keys and `Model` values.
|
||||
"""Dict of sync ID keys and `Model` values.
|
||||
|
||||
The dict keys must be the sync ID of `Model` values.
|
||||
If a non-existent key is accessed, a corresponding `Model` will be
|
||||
created, put into the internal `data` dict and returned.
|
||||
"""
|
||||
The dict keys must be the sync ID of `Model` values.
|
||||
If a non-existent key is accessed, a corresponding `Model` will be
|
||||
created, put into the internal `data` dict and returned.
|
||||
"""
|
||||
|
||||
data: Dict[SyncId, Model] = field(default_factory=dict)
|
||||
data: Dict[SyncId, Model] = field(default_factory=dict)
|
||||
|
||||
|
||||
def __missing__(self, key: SyncId) -> Model:
|
||||
"""When accessing a non-existent model, create and return it.
|
||||
def __missing__(self, key: SyncId) -> Model:
|
||||
"""When accessing a non-existent model, create and return it.
|
||||
|
||||
Special models rather than a generic `Model` object may be returned
|
||||
depending on the passed key.
|
||||
"""
|
||||
Special models rather than a generic `Model` object may be returned
|
||||
depending on the passed key.
|
||||
"""
|
||||
|
||||
is_tuple = isinstance(key, tuple)
|
||||
is_tuple = isinstance(key, tuple)
|
||||
|
||||
model: Model
|
||||
model: Model
|
||||
|
||||
if key == "all_rooms":
|
||||
model = AllRooms(self["accounts"])
|
||||
elif key == "matching_accounts":
|
||||
model = MatchingAccounts(self["all_rooms"])
|
||||
elif key == "filtered_homeservers":
|
||||
model = FilteredHomeservers()
|
||||
elif is_tuple and len(key) == 3 and key[2] == "filtered_members":
|
||||
model = FilteredMembers(user_id=key[0], room_id=key[1])
|
||||
elif is_tuple and len(key) == 3 and key[2] == "autocompleted_members":
|
||||
model = AutoCompletedMembers(user_id=key[0], room_id=key[1])
|
||||
else:
|
||||
model = Model(sync_id=key)
|
||||
if key == "all_rooms":
|
||||
model = AllRooms(self["accounts"])
|
||||
elif key == "matching_accounts":
|
||||
model = MatchingAccounts(self["all_rooms"])
|
||||
elif key == "filtered_homeservers":
|
||||
model = FilteredHomeservers()
|
||||
elif is_tuple and len(key) == 3 and key[2] == "filtered_members":
|
||||
model = FilteredMembers(user_id=key[0], room_id=key[1])
|
||||
elif is_tuple and len(key) == 3 and key[2] == "autocompleted_members":
|
||||
model = AutoCompletedMembers(user_id=key[0], room_id=key[1])
|
||||
else:
|
||||
model = Model(sync_id=key)
|
||||
|
||||
self.data[key] = model
|
||||
return model
|
||||
self.data[key] = model
|
||||
return model
|
||||
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Provide a nice overview of stored models when `print()` called."""
|
||||
def __str__(self) -> str:
|
||||
"""Provide a nice overview of stored models when `print()` called."""
|
||||
|
||||
return "%s(\n %s\n)" % (
|
||||
type(self).__name__,
|
||||
"\n ".join(sorted(str(v) for v in self.values())),
|
||||
)
|
||||
return "%s(\n %s\n)" % (
|
||||
type(self).__name__,
|
||||
"\n ".join(sorted(str(v) for v in self.values())),
|
||||
)
|
||||
|
||||
|
||||
async def ensure_exists_from_qml(
|
||||
self, sync_id: Union[SyncId, List[str]],
|
||||
) -> None:
|
||||
"""Create model if it doesn't exist. Should only be called by QML."""
|
||||
async def ensure_exists_from_qml(
|
||||
self, sync_id: Union[SyncId, List[str]],
|
||||
) -> None:
|
||||
"""Create model if it doesn't exist. Should only be called by QML."""
|
||||
|
||||
if isinstance(sync_id, list): # QML can't pass tuples
|
||||
sync_id = tuple(sync_id)
|
||||
if isinstance(sync_id, list): # QML can't pass tuples
|
||||
sync_id = tuple(sync_id)
|
||||
|
||||
self[sync_id] # will call __missing__ if needed
|
||||
self[sync_id] # will call __missing__ if needed
|
||||
|
|
|
@ -8,68 +8,68 @@ from . import SyncId
|
|||
from .model import Model
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .model_item import ModelItem
|
||||
from .model_item import ModelItem
|
||||
|
||||
|
||||
class ModelProxy(Model):
|
||||
"""Proxies data from one or more `Model` objects."""
|
||||
"""Proxies data from one or more `Model` objects."""
|
||||
|
||||
def __init__(self, sync_id: SyncId) -> None:
|
||||
super().__init__(sync_id)
|
||||
self.take_items_ownership = False
|
||||
Model.proxies[sync_id] = self
|
||||
def __init__(self, sync_id: SyncId) -> None:
|
||||
super().__init__(sync_id)
|
||||
self.take_items_ownership = False
|
||||
Model.proxies[sync_id] = self
|
||||
|
||||
with self.write_lock:
|
||||
for sync_id, model in Model.instances.items():
|
||||
if sync_id != self.sync_id and self.accept_source(model):
|
||||
for key, item in model.items():
|
||||
self.source_item_set(model, key, item)
|
||||
with self.write_lock:
|
||||
for sync_id, model in Model.instances.items():
|
||||
if sync_id != self.sync_id and self.accept_source(model):
|
||||
for key, item in model.items():
|
||||
self.source_item_set(model, key, item)
|
||||
|
||||
|
||||
def accept_source(self, source: Model) -> bool:
|
||||
"""Return whether passed `Model` should be proxied by this proxy."""
|
||||
return True
|
||||
def accept_source(self, source: Model) -> bool:
|
||||
"""Return whether passed `Model` should be proxied by this proxy."""
|
||||
return True
|
||||
|
||||
|
||||
def convert_item(self, item: "ModelItem") -> "ModelItem":
|
||||
"""Take a source `ModelItem`, return an appropriate one for proxy.
|
||||
def convert_item(self, item: "ModelItem") -> "ModelItem":
|
||||
"""Take a source `ModelItem`, return an appropriate one for proxy.
|
||||
|
||||
By default, this returns the passed item unchanged.
|
||||
By default, this returns the passed item unchanged.
|
||||
|
||||
Due to QML `ListModel` restrictions, if multiple source models
|
||||
containing different subclasses of `ModelItem` are proxied,
|
||||
they should be converted to a same `ModelItem`
|
||||
subclass by overriding this function.
|
||||
"""
|
||||
return copy(item)
|
||||
Due to QML `ListModel` restrictions, if multiple source models
|
||||
containing different subclasses of `ModelItem` are proxied,
|
||||
they should be converted to a same `ModelItem`
|
||||
subclass by overriding this function.
|
||||
"""
|
||||
return copy(item)
|
||||
|
||||
|
||||
def source_item_set(
|
||||
self,
|
||||
source: Model,
|
||||
key,
|
||||
value: "ModelItem",
|
||||
_changed_fields: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
"""Called when a source model item is added or changed."""
|
||||
def source_item_set(
|
||||
self,
|
||||
source: Model,
|
||||
key,
|
||||
value: "ModelItem",
|
||||
_changed_fields: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
"""Called when a source model item is added or changed."""
|
||||
|
||||
if self.accept_source(source):
|
||||
value = self.convert_item(value)
|
||||
self.__setitem__((source.sync_id, key), value, _changed_fields)
|
||||
if self.accept_source(source):
|
||||
value = self.convert_item(value)
|
||||
self.__setitem__((source.sync_id, key), value, _changed_fields)
|
||||
|
||||
|
||||
def source_item_deleted(self, source: Model, key) -> None:
|
||||
"""Called when a source model item is removed."""
|
||||
def source_item_deleted(self, source: Model, key) -> None:
|
||||
"""Called when a source model item is removed."""
|
||||
|
||||
if self.accept_source(source):
|
||||
del self[source.sync_id, key]
|
||||
if self.accept_source(source):
|
||||
del self[source.sync_id, key]
|
||||
|
||||
|
||||
def source_cleared(self, source: Model) -> None:
|
||||
"""Called when a source model is cleared."""
|
||||
def source_cleared(self, source: Model) -> None:
|
||||
"""Called when a source model is cleared."""
|
||||
|
||||
if self.accept_source(source):
|
||||
with self.batch_remove():
|
||||
for source_sync_id, key in self.copy():
|
||||
if source_sync_id == source.sync_id:
|
||||
del self[source_sync_id, key]
|
||||
if self.accept_source(source):
|
||||
with self.batch_remove():
|
||||
for source_sync_id, key in self.copy():
|
||||
if source_sync_id == source.sync_id:
|
||||
del self[source_sync_id, key]
|
||||
|
|
|
@ -11,143 +11,143 @@ from .model_item import ModelItem
|
|||
|
||||
|
||||
class AllRooms(FieldSubstringFilter):
|
||||
"""Flat filtered list of all accounts and their rooms."""
|
||||
"""Flat filtered list of all accounts and their rooms."""
|
||||
|
||||
def __init__(self, accounts: Model) -> None:
|
||||
self.accounts = accounts
|
||||
self._collapsed: Set[str] = set()
|
||||
def __init__(self, accounts: Model) -> None:
|
||||
self.accounts = accounts
|
||||
self._collapsed: Set[str] = set()
|
||||
|
||||
super().__init__(sync_id="all_rooms", fields=("display_name",))
|
||||
self.items_changed_callbacks.append(self.refilter_accounts)
|
||||
super().__init__(sync_id="all_rooms", fields=("display_name",))
|
||||
self.items_changed_callbacks.append(self.refilter_accounts)
|
||||
|
||||
|
||||
def set_account_collapse(self, user_id: str, collapsed: bool) -> None:
|
||||
"""Set whether the rooms for an account should be filtered out."""
|
||||
def set_account_collapse(self, user_id: str, collapsed: bool) -> None:
|
||||
"""Set whether the rooms for an account should be filtered out."""
|
||||
|
||||
def only_if(item):
|
||||
return item.type is Room and item.for_account == user_id
|
||||
def only_if(item):
|
||||
return item.type is Room and item.for_account == user_id
|
||||
|
||||
if collapsed and user_id not in self._collapsed:
|
||||
self._collapsed.add(user_id)
|
||||
self.refilter(only_if)
|
||||
if collapsed and user_id not in self._collapsed:
|
||||
self._collapsed.add(user_id)
|
||||
self.refilter(only_if)
|
||||
|
||||
if not collapsed and user_id in self._collapsed:
|
||||
self._collapsed.remove(user_id)
|
||||
self.refilter(only_if)
|
||||
if not collapsed and user_id in self._collapsed:
|
||||
self._collapsed.remove(user_id)
|
||||
self.refilter(only_if)
|
||||
|
||||
|
||||
def accept_source(self, source: Model) -> bool:
|
||||
return source.sync_id == "accounts" or (
|
||||
isinstance(source.sync_id, tuple) and
|
||||
len(source.sync_id) == 2 and
|
||||
source.sync_id[1] == "rooms"
|
||||
)
|
||||
def accept_source(self, source: Model) -> bool:
|
||||
return source.sync_id == "accounts" or (
|
||||
isinstance(source.sync_id, tuple) and
|
||||
len(source.sync_id) == 2 and
|
||||
source.sync_id[1] == "rooms"
|
||||
)
|
||||
|
||||
|
||||
def convert_item(self, item: ModelItem) -> AccountOrRoom:
|
||||
return AccountOrRoom(
|
||||
**asdict(item),
|
||||
type = type(item), # type: ignore
|
||||
def convert_item(self, item: ModelItem) -> AccountOrRoom:
|
||||
return AccountOrRoom(
|
||||
**asdict(item),
|
||||
type = type(item), # type: ignore
|
||||
|
||||
account_order =
|
||||
item.order if isinstance(item, Account) else
|
||||
self.accounts[item.for_account].order, # type: ignore
|
||||
)
|
||||
account_order =
|
||||
item.order if isinstance(item, Account) else
|
||||
self.accounts[item.for_account].order, # type: ignore
|
||||
)
|
||||
|
||||
|
||||
def accept_item(self, item: ModelItem) -> bool:
|
||||
assert isinstance(item, AccountOrRoom) # nosec
|
||||
def accept_item(self, item: ModelItem) -> bool:
|
||||
assert isinstance(item, AccountOrRoom) # nosec
|
||||
|
||||
if not self.filter and \
|
||||
item.type is Room and \
|
||||
item.for_account in self._collapsed:
|
||||
return False
|
||||
if not self.filter and \
|
||||
item.type is Room and \
|
||||
item.for_account in self._collapsed:
|
||||
return False
|
||||
|
||||
matches_filter = super().accept_item(item)
|
||||
matches_filter = super().accept_item(item)
|
||||
|
||||
if item.type is not Account or not self.filter:
|
||||
return matches_filter
|
||||
if item.type is not Account or not self.filter:
|
||||
return matches_filter
|
||||
|
||||
return next(
|
||||
(i for i in self.values() if i.for_account == item.id), False,
|
||||
)
|
||||
return next(
|
||||
(i for i in self.values() if i.for_account == item.id), False,
|
||||
)
|
||||
|
||||
|
||||
def refilter_accounts(self) -> None:
|
||||
self.refilter(lambda i: i.type is Account) # type: ignore
|
||||
def refilter_accounts(self) -> None:
|
||||
self.refilter(lambda i: i.type is Account) # type: ignore
|
||||
|
||||
|
||||
class MatchingAccounts(ModelFilter):
|
||||
"""List of our accounts in `AllRooms` with at least one matching room if
|
||||
a `filter` is set, else list of all accounts.
|
||||
"""
|
||||
"""List of our accounts in `AllRooms` with at least one matching room if
|
||||
a `filter` is set, else list of all accounts.
|
||||
"""
|
||||
|
||||
def __init__(self, all_rooms: AllRooms) -> None:
|
||||
self.all_rooms = all_rooms
|
||||
self.all_rooms.items_changed_callbacks.append(self.refilter)
|
||||
def __init__(self, all_rooms: AllRooms) -> None:
|
||||
self.all_rooms = all_rooms
|
||||
self.all_rooms.items_changed_callbacks.append(self.refilter)
|
||||
|
||||
super().__init__(sync_id="matching_accounts")
|
||||
super().__init__(sync_id="matching_accounts")
|
||||
|
||||
|
||||
def accept_source(self, source: Model) -> bool:
|
||||
return source.sync_id == "accounts"
|
||||
def accept_source(self, source: Model) -> bool:
|
||||
return source.sync_id == "accounts"
|
||||
|
||||
|
||||
def accept_item(self, item: ModelItem) -> bool:
|
||||
if not self.all_rooms.filter:
|
||||
return True
|
||||
def accept_item(self, item: ModelItem) -> bool:
|
||||
if not self.all_rooms.filter:
|
||||
return True
|
||||
|
||||
return next(
|
||||
(i for i in self.all_rooms.values() if i.id == item.id),
|
||||
False,
|
||||
)
|
||||
return next(
|
||||
(i for i in self.all_rooms.values() if i.id == item.id),
|
||||
False,
|
||||
)
|
||||
|
||||
|
||||
class FilteredMembers(FieldSubstringFilter):
|
||||
"""Filtered list of members for a room."""
|
||||
"""Filtered list of members for a room."""
|
||||
|
||||
def __init__(self, user_id: str, room_id: str) -> None:
|
||||
self.user_id = user_id
|
||||
self.room_id = room_id
|
||||
sync_id = (user_id, room_id, "filtered_members")
|
||||
def __init__(self, user_id: str, room_id: str) -> None:
|
||||
self.user_id = user_id
|
||||
self.room_id = room_id
|
||||
sync_id = (user_id, room_id, "filtered_members")
|
||||
|
||||
super().__init__(sync_id=sync_id, fields=("display_name",))
|
||||
super().__init__(sync_id=sync_id, fields=("display_name",))
|
||||
|
||||
|
||||
def accept_source(self, source: Model) -> bool:
|
||||
return source.sync_id == (self.user_id, self.room_id, "members")
|
||||
def accept_source(self, source: Model) -> bool:
|
||||
return source.sync_id == (self.user_id, self.room_id, "members")
|
||||
|
||||
|
||||
class AutoCompletedMembers(FieldStringFilter):
|
||||
"""Filtered list of mentionable members for tab-completion."""
|
||||
"""Filtered list of mentionable members for tab-completion."""
|
||||
|
||||
def __init__(self, user_id: str, room_id: str) -> None:
|
||||
self.user_id = user_id
|
||||
self.room_id = room_id
|
||||
sync_id = (user_id, room_id, "autocompleted_members")
|
||||
def __init__(self, user_id: str, room_id: str) -> None:
|
||||
self.user_id = user_id
|
||||
self.room_id = room_id
|
||||
sync_id = (user_id, room_id, "autocompleted_members")
|
||||
|
||||
super().__init__(
|
||||
sync_id = sync_id,
|
||||
fields = ("display_name", "id"),
|
||||
no_filter_accept_all_items = False,
|
||||
)
|
||||
super().__init__(
|
||||
sync_id = sync_id,
|
||||
fields = ("display_name", "id"),
|
||||
no_filter_accept_all_items = False,
|
||||
)
|
||||
|
||||
|
||||
def accept_source(self, source: Model) -> bool:
|
||||
return source.sync_id == (self.user_id, self.room_id, "members")
|
||||
def accept_source(self, source: Model) -> bool:
|
||||
return source.sync_id == (self.user_id, self.room_id, "members")
|
||||
|
||||
|
||||
def match(self, fields: Dict[str, str], filtr: str) -> bool:
|
||||
fields["id"] = fields["id"][1:] # remove leading @
|
||||
return super().match(fields, filtr)
|
||||
def match(self, fields: Dict[str, str], filtr: str) -> bool:
|
||||
fields["id"] = fields["id"][1:] # remove leading @
|
||||
return super().match(fields, filtr)
|
||||
|
||||
|
||||
class FilteredHomeservers(FieldSubstringFilter):
|
||||
"""Filtered list of public Matrix homeservers."""
|
||||
"""Filtered list of public Matrix homeservers."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__(sync_id="filtered_homeservers", fields=("id", "name"))
|
||||
def __init__(self) -> None:
|
||||
super().__init__(sync_id="filtered_homeservers", fields=("id", "name"))
|
||||
|
||||
|
||||
def accept_source(self, source: Model) -> bool:
|
||||
return source.sync_id == "homeservers"
|
||||
def accept_source(self, source: Model) -> bool:
|
||||
return source.sync_id == "homeservers"
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -2,46 +2,46 @@ from collections import UserDict
|
|||
from typing import TYPE_CHECKING, Any, Dict, Iterator
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .section import Section
|
||||
from .section import Section
|
||||
|
||||
from .. import color
|
||||
|
||||
PCN_GLOBALS: Dict[str, Any] = {
|
||||
"color": color.Color,
|
||||
"hsluv": color.hsluv,
|
||||
"hsluva": color.hsluva,
|
||||
"hsl": color.hsl,
|
||||
"hsla": color.hsla,
|
||||
"rgb": color.rgb,
|
||||
"rgba": color.rgba,
|
||||
"color": color.Color,
|
||||
"hsluv": color.hsluv,
|
||||
"hsluva": color.hsluva,
|
||||
"hsl": color.hsl,
|
||||
"hsla": color.hsla,
|
||||
"rgb": color.rgb,
|
||||
"rgba": color.rgba,
|
||||
}
|
||||
|
||||
|
||||
class GlobalsDict(UserDict):
|
||||
def __init__(self, section: "Section") -> None:
|
||||
super().__init__()
|
||||
self.section = section
|
||||
def __init__(self, section: "Section") -> None:
|
||||
super().__init__()
|
||||
self.section = section
|
||||
|
||||
@property
|
||||
def full_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
**PCN_GLOBALS,
|
||||
**(self.section.root if self.section.root else {}),
|
||||
**(self.section.root.globals if self.section.root else {}),
|
||||
"self": self.section,
|
||||
"parent": self.section.parent,
|
||||
"root": self.section.parent,
|
||||
**self.data,
|
||||
}
|
||||
@property
|
||||
def full_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
**PCN_GLOBALS,
|
||||
**(self.section.root if self.section.root else {}),
|
||||
**(self.section.root.globals if self.section.root else {}),
|
||||
"self": self.section,
|
||||
"parent": self.section.parent,
|
||||
"root": self.section.parent,
|
||||
**self.data,
|
||||
}
|
||||
|
||||
def __getitem__(self, key: str) -> Any:
|
||||
return self.full_dict[key]
|
||||
def __getitem__(self, key: str) -> Any:
|
||||
return self.full_dict[key]
|
||||
|
||||
def __iter__(self) -> Iterator[str]:
|
||||
return iter(self.full_dict)
|
||||
def __iter__(self) -> Iterator[str]:
|
||||
return iter(self.full_dict)
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self.full_dict)
|
||||
def __len__(self) -> int:
|
||||
return len(self.full_dict)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return repr(self.full_dict)
|
||||
def __repr__(self) -> str:
|
||||
return repr(self.full_dict)
|
||||
|
|
|
@ -3,50 +3,50 @@ from dataclasses import dataclass, field
|
|||
from typing import TYPE_CHECKING, Any, Callable, Dict, Type
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .section import Section
|
||||
from .section import Section
|
||||
|
||||
TYPE_PROCESSORS: Dict[str, Callable[[Any], Any]] = {
|
||||
"tuple": lambda v: tuple(v),
|
||||
"set": lambda v: set(v),
|
||||
"tuple": lambda v: tuple(v),
|
||||
"set": lambda v: set(v),
|
||||
}
|
||||
|
||||
|
||||
class Unset:
|
||||
pass
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class Property:
|
||||
name: str = field()
|
||||
annotation: str = field()
|
||||
expression: str = field()
|
||||
section: "Section" = field()
|
||||
value_override: Any = Unset
|
||||
name: str = field()
|
||||
annotation: str = field()
|
||||
expression: str = field()
|
||||
section: "Section" = field()
|
||||
value_override: Any = Unset
|
||||
|
||||
def __get__(self, obj: "Section", objtype: Type["Section"]) -> Any:
|
||||
if not obj:
|
||||
return self
|
||||
def __get__(self, obj: "Section", objtype: Type["Section"]) -> Any:
|
||||
if not obj:
|
||||
return self
|
||||
|
||||
if self.value_override is not Unset:
|
||||
return self.value_override
|
||||
if self.value_override is not Unset:
|
||||
return self.value_override
|
||||
|
||||
env = obj.globals
|
||||
result = eval(self.expression, dict(env), env) # nosec
|
||||
env = obj.globals
|
||||
result = eval(self.expression, dict(env), env) # nosec
|
||||
|
||||
return process_value(self.annotation, result)
|
||||
return process_value(self.annotation, result)
|
||||
|
||||
def __set__(self, obj: "Section", value: Any) -> None:
|
||||
self.value_override = value
|
||||
obj._edited[self.name] = value
|
||||
def __set__(self, obj: "Section", value: Any) -> None:
|
||||
self.value_override = value
|
||||
obj._edited[self.name] = value
|
||||
|
||||
|
||||
def process_value(annotation: str, value: Any) -> Any:
|
||||
annotation = re.sub(r"\[.*\]$", "", annotation)
|
||||
annotation = re.sub(r"\[.*\]$", "", annotation)
|
||||
|
||||
if annotation in TYPE_PROCESSORS:
|
||||
return TYPE_PROCESSORS[annotation](value)
|
||||
if annotation in TYPE_PROCESSORS:
|
||||
return TYPE_PROCESSORS[annotation](value)
|
||||
|
||||
if annotation.lower() in TYPE_PROCESSORS:
|
||||
return TYPE_PROCESSORS[annotation.lower()](value)
|
||||
if annotation.lower() in TYPE_PROCESSORS:
|
||||
return TYPE_PROCESSORS[annotation.lower()](value)
|
||||
|
||||
return value
|
||||
return value
|
||||
|
|
|
@ -7,8 +7,8 @@ from dataclasses import dataclass, field
|
|||
from operator import attrgetter
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
Any, Callable, ClassVar, Dict, Generator, List, Optional, Set, Tuple, Type,
|
||||
Union,
|
||||
Any, Callable, ClassVar, Dict, Generator, List, Optional, Set, Tuple, Type,
|
||||
Union,
|
||||
)
|
||||
|
||||
import pyotherside
|
||||
|
@ -25,423 +25,423 @@ assert BUILTINS_DIR.name == "src"
|
|||
|
||||
@dataclass(repr=False, eq=False)
|
||||
class Section(MutableMapping):
|
||||
sections: ClassVar[Set[str]] = set()
|
||||
methods: ClassVar[Set[str]] = set()
|
||||
properties: ClassVar[Set[str]] = set()
|
||||
order: ClassVar[Dict[str, None]] = OrderedDict()
|
||||
sections: ClassVar[Set[str]] = set()
|
||||
methods: ClassVar[Set[str]] = set()
|
||||
properties: ClassVar[Set[str]] = set()
|
||||
order: ClassVar[Dict[str, None]] = OrderedDict()
|
||||
|
||||
source_path: Optional[Path] = None
|
||||
root: Optional["Section"] = None
|
||||
parent: Optional["Section"] = None
|
||||
builtins_path: Path = BUILTINS_DIR
|
||||
included: List[Path] = field(default_factory=list)
|
||||
globals: GlobalsDict = field(init=False)
|
||||
source_path: Optional[Path] = None
|
||||
root: Optional["Section"] = None
|
||||
parent: Optional["Section"] = None
|
||||
builtins_path: Path = BUILTINS_DIR
|
||||
included: List[Path] = field(default_factory=list)
|
||||
globals: GlobalsDict = field(init=False)
|
||||
|
||||
_edited: Dict[str, Any] = field(init=False, default_factory=dict)
|
||||
_edited: Dict[str, Any] = field(init=False, default_factory=dict)
|
||||
|
||||
def __init_subclass__(cls, **kwargs) -> None:
|
||||
# Make these attributes not shared between Section and its subclasses
|
||||
cls.sections = set()
|
||||
cls.methods = set()
|
||||
cls.properties = set()
|
||||
cls.order = OrderedDict()
|
||||
def __init_subclass__(cls, **kwargs) -> None:
|
||||
# Make these attributes not shared between Section and its subclasses
|
||||
cls.sections = set()
|
||||
cls.methods = set()
|
||||
cls.properties = set()
|
||||
cls.order = OrderedDict()
|
||||
|
||||
for parent_class in cls.__bases__:
|
||||
if not issubclass(parent_class, Section):
|
||||
continue
|
||||
for parent_class in cls.__bases__:
|
||||
if not issubclass(parent_class, Section):
|
||||
continue
|
||||
|
||||
cls.sections |= parent_class.sections # union operator
|
||||
cls.methods |= parent_class.methods
|
||||
cls.properties |= parent_class.properties
|
||||
cls.order.update(parent_class.order)
|
||||
cls.sections |= parent_class.sections # union operator
|
||||
cls.methods |= parent_class.methods
|
||||
cls.properties |= parent_class.properties
|
||||
cls.order.update(parent_class.order)
|
||||
|
||||
super().__init_subclass__(**kwargs) # type: ignore
|
||||
super().__init_subclass__(**kwargs) # type: ignore
|
||||
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
self.globals = GlobalsDict(self)
|
||||
def __post_init__(self) -> None:
|
||||
self.globals = GlobalsDict(self)
|
||||
|
||||
|
||||
def __getattr__(self, name: str) -> Union["Section", Any]:
|
||||
# This method signature tells mypy about the dynamic attribute types
|
||||
# we can access. The body is run for attributes that aren't found.
|
||||
def __getattr__(self, name: str) -> Union["Section", Any]:
|
||||
# This method signature tells mypy about the dynamic attribute types
|
||||
# we can access. The body is run for attributes that aren't found.
|
||||
|
||||
return super().__getattribute__(name)
|
||||
return super().__getattribute__(name)
|
||||
|
||||
|
||||
def __setattr__(self, name: str, value: Any) -> None:
|
||||
# This method tells mypy about the dynamic attribute types we can set.
|
||||
# The body is also run when setting an existing or new attribute.
|
||||
def __setattr__(self, name: str, value: Any) -> None:
|
||||
# This method tells mypy about the dynamic attribute types we can set.
|
||||
# The body is also run when setting an existing or new attribute.
|
||||
|
||||
if name in self.__dataclass_fields__:
|
||||
super().__setattr__(name, value)
|
||||
return
|
||||
if name in self.__dataclass_fields__:
|
||||
super().__setattr__(name, value)
|
||||
return
|
||||
|
||||
if name in self.properties:
|
||||
value = process_value(getattr(type(self), name).annotation, value)
|
||||
if name in self.properties:
|
||||
value = process_value(getattr(type(self), name).annotation, value)
|
||||
|
||||
if self[name] == value:
|
||||
return
|
||||
if self[name] == value:
|
||||
return
|
||||
|
||||
getattr(type(self), name).value_override = value
|
||||
self._edited[name] = value
|
||||
return
|
||||
getattr(type(self), name).value_override = value
|
||||
self._edited[name] = value
|
||||
return
|
||||
|
||||
if name in self.sections or isinstance(value, Section):
|
||||
raise NotImplementedError(f"cannot set section {name!r}")
|
||||
if name in self.sections or isinstance(value, Section):
|
||||
raise NotImplementedError(f"cannot set section {name!r}")
|
||||
|
||||
if name in self.methods or callable(value):
|
||||
raise NotImplementedError(f"cannot set method {name!r}")
|
||||
if name in self.methods or callable(value):
|
||||
raise NotImplementedError(f"cannot set method {name!r}")
|
||||
|
||||
self._set_property(name, "Any", "None")
|
||||
getattr(type(self), name).value_override = value
|
||||
self._edited[name] = value
|
||||
self._set_property(name, "Any", "None")
|
||||
getattr(type(self), name).value_override = value
|
||||
self._edited[name] = value
|
||||
|
||||
|
||||
def __delattr__(self, name: str) -> None:
|
||||
raise NotImplementedError(f"cannot delete existing attribute {name!r}")
|
||||
def __delattr__(self, name: str) -> None:
|
||||
raise NotImplementedError(f"cannot delete existing attribute {name!r}")
|
||||
|
||||
|
||||
def __getitem__(self, key: str) -> Any:
|
||||
try:
|
||||
return getattr(self, key)
|
||||
except AttributeError as err:
|
||||
raise KeyError(str(err))
|
||||
def __getitem__(self, key: str) -> Any:
|
||||
try:
|
||||
return getattr(self, key)
|
||||
except AttributeError as err:
|
||||
raise KeyError(str(err))
|
||||
|
||||
|
||||
def __setitem__(self, key: str, value: Union["Section", str]) -> None:
|
||||
setattr(self, key, value)
|
||||
def __setitem__(self, key: str, value: Union["Section", str]) -> None:
|
||||
setattr(self, key, value)
|
||||
|
||||
|
||||
def __delitem__(self, key: str) -> None:
|
||||
delattr(self, key)
|
||||
def __delitem__(self, key: str) -> None:
|
||||
delattr(self, key)
|
||||
|
||||
|
||||
def __iter__(self) -> Generator[str, None, None]:
|
||||
for attr_name in self.order:
|
||||
yield attr_name
|
||||
def __iter__(self) -> Generator[str, None, None]:
|
||||
for attr_name in self.order:
|
||||
yield attr_name
|
||||
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self.order)
|
||||
def __len__(self) -> int:
|
||||
return len(self.order)
|
||||
|
||||
|
||||
def __eq__(self, obj: Any) -> bool:
|
||||
if not isinstance(obj, Section):
|
||||
return False
|
||||
def __eq__(self, obj: Any) -> bool:
|
||||
if not isinstance(obj, Section):
|
||||
return False
|
||||
|
||||
if self.globals.data != obj.globals.data or self.order != obj.order:
|
||||
return False
|
||||
if self.globals.data != obj.globals.data or self.order != obj.order:
|
||||
return False
|
||||
|
||||
return not any(self[attr] != obj[attr] for attr in self.order)
|
||||
return not any(self[attr] != obj[attr] for attr in self.order)
|
||||
|
||||
|
||||
def __repr__(self) -> str:
|
||||
name: str = type(self).__name__
|
||||
children: List[str] = []
|
||||
content: str = ""
|
||||
newline: bool = False
|
||||
def __repr__(self) -> str:
|
||||
name: str = type(self).__name__
|
||||
children: List[str] = []
|
||||
content: str = ""
|
||||
newline: bool = False
|
||||
|
||||
for attr_name in self.order:
|
||||
value = getattr(self, attr_name)
|
||||
for attr_name in self.order:
|
||||
value = getattr(self, attr_name)
|
||||
|
||||
if attr_name in self.sections:
|
||||
before = "\n" if children else ""
|
||||
newline = True
|
||||
if attr_name in self.sections:
|
||||
before = "\n" if children else ""
|
||||
newline = True
|
||||
|
||||
try:
|
||||
children.append(f"{before}{value!r},")
|
||||
except RecursionError as err:
|
||||
name = type(value).__name__
|
||||
children.append(f"{before}{name}(\n {err!r}\n),")
|
||||
pass
|
||||
try:
|
||||
children.append(f"{before}{value!r},")
|
||||
except RecursionError as err:
|
||||
name = type(value).__name__
|
||||
children.append(f"{before}{name}(\n {err!r}\n),")
|
||||
pass
|
||||
|
||||
elif attr_name in self.methods:
|
||||
before = "\n" if children else ""
|
||||
newline = True
|
||||
children.append(f"{before}def {value.__name__}(…),")
|
||||
elif attr_name in self.methods:
|
||||
before = "\n" if children else ""
|
||||
newline = True
|
||||
children.append(f"{before}def {value.__name__}(…),")
|
||||
|
||||
elif attr_name in self.properties:
|
||||
before = "\n" if newline else ""
|
||||
newline = False
|
||||
elif attr_name in self.properties:
|
||||
before = "\n" if newline else ""
|
||||
newline = False
|
||||
|
||||
try:
|
||||
children.append(f"{before}{attr_name} = {value!r},")
|
||||
except RecursionError as err:
|
||||
children.append(f"{before}{attr_name} = {err!r},")
|
||||
try:
|
||||
children.append(f"{before}{attr_name} = {value!r},")
|
||||
except RecursionError as err:
|
||||
children.append(f"{before}{attr_name} = {err!r},")
|
||||
|
||||
else:
|
||||
newline = False
|
||||
else:
|
||||
newline = False
|
||||
|
||||
if children:
|
||||
content = "\n%s\n" % textwrap.indent("\n".join(children), " " * 4)
|
||||
if children:
|
||||
content = "\n%s\n" % textwrap.indent("\n".join(children), " " * 4)
|
||||
|
||||
return f"{name}({content})"
|
||||
return f"{name}({content})"
|
||||
|
||||
|
||||
def children(self) -> Tuple[Tuple[str, Union["Section", Any]], ...]:
|
||||
"""Return pairs of (name, value) for child sections and properties."""
|
||||
return tuple((name, getattr(self, name)) for name in self)
|
||||
def children(self) -> Tuple[Tuple[str, Union["Section", Any]], ...]:
|
||||
"""Return pairs of (name, value) for child sections and properties."""
|
||||
return tuple((name, getattr(self, name)) for name in self)
|
||||
|
||||
|
||||
@classmethod
|
||||
def _register_set_attr(cls, name: str, add_to_set_name: str) -> None:
|
||||
cls.methods.discard(name)
|
||||
cls.properties.discard(name)
|
||||
cls.sections.discard(name)
|
||||
getattr(cls, add_to_set_name).add(name)
|
||||
cls.order[name] = None
|
||||
@classmethod
|
||||
def _register_set_attr(cls, name: str, add_to_set_name: str) -> None:
|
||||
cls.methods.discard(name)
|
||||
cls.properties.discard(name)
|
||||
cls.sections.discard(name)
|
||||
getattr(cls, add_to_set_name).add(name)
|
||||
cls.order[name] = None
|
||||
|
||||
for subclass in cls.__subclasses__():
|
||||
subclass._register_set_attr(name, add_to_set_name)
|
||||
for subclass in cls.__subclasses__():
|
||||
subclass._register_set_attr(name, add_to_set_name)
|
||||
|
||||
|
||||
def _set_section(self, section: "Section") -> None:
|
||||
name = type(section).__name__
|
||||
def _set_section(self, section: "Section") -> None:
|
||||
name = type(section).__name__
|
||||
|
||||
if hasattr(self, name) and name not in self.order:
|
||||
raise AttributeError(f"{name!r}: forbidden name")
|
||||
if hasattr(self, name) and name not in self.order:
|
||||
raise AttributeError(f"{name!r}: forbidden name")
|
||||
|
||||
if name in self.sections:
|
||||
self[name].deep_merge(section)
|
||||
return
|
||||
if name in self.sections:
|
||||
self[name].deep_merge(section)
|
||||
return
|
||||
|
||||
self._register_set_attr(name, "sections")
|
||||
setattr(type(self), name, section)
|
||||
self._register_set_attr(name, "sections")
|
||||
setattr(type(self), name, section)
|
||||
|
||||
|
||||
def _set_method(self, name: str, method: Callable) -> None:
|
||||
if hasattr(self, name) and name not in self.order:
|
||||
raise AttributeError(f"{name!r}: forbidden name")
|
||||
def _set_method(self, name: str, method: Callable) -> None:
|
||||
if hasattr(self, name) and name not in self.order:
|
||||
raise AttributeError(f"{name!r}: forbidden name")
|
||||
|
||||
self._register_set_attr(name, "methods")
|
||||
setattr(type(self), name, method)
|
||||
self._register_set_attr(name, "methods")
|
||||
setattr(type(self), name, method)
|
||||
|
||||
|
||||
def _set_property(
|
||||
self, name: str, annotation: str, expression: str,
|
||||
) -> None:
|
||||
if hasattr(self, name) and name not in self.order:
|
||||
raise AttributeError(f"{name!r}: forbidden name")
|
||||
def _set_property(
|
||||
self, name: str, annotation: str, expression: str,
|
||||
) -> None:
|
||||
if hasattr(self, name) and name not in self.order:
|
||||
raise AttributeError(f"{name!r}: forbidden name")
|
||||
|
||||
prop = Property(name, annotation, expression, self)
|
||||
self._register_set_attr(name, "properties")
|
||||
setattr(type(self), name, prop)
|
||||
prop = Property(name, annotation, expression, self)
|
||||
self._register_set_attr(name, "properties")
|
||||
setattr(type(self), name, prop)
|
||||
|
||||
|
||||
def deep_merge(self, section2: "Section") -> None:
|
||||
self.included += section2.included
|
||||
def deep_merge(self, section2: "Section") -> None:
|
||||
self.included += section2.included
|
||||
|
||||
for key in section2:
|
||||
if key in self.sections and key in section2.sections:
|
||||
self.globals.data.update(section2.globals.data)
|
||||
self[key].deep_merge(section2[key])
|
||||
for key in section2:
|
||||
if key in self.sections and key in section2.sections:
|
||||
self.globals.data.update(section2.globals.data)
|
||||
self[key].deep_merge(section2[key])
|
||||
|
||||
elif key in section2.sections:
|
||||
self.globals.data.update(section2.globals.data)
|
||||
new_type = type(key, (Section,), {})
|
||||
instance = new_type(
|
||||
source_path = self.source_path,
|
||||
root = self.root or self,
|
||||
parent = self,
|
||||
builtins_path = self.builtins_path,
|
||||
)
|
||||
self._set_section(instance)
|
||||
instance.deep_merge(section2[key])
|
||||
elif key in section2.sections:
|
||||
self.globals.data.update(section2.globals.data)
|
||||
new_type = type(key, (Section,), {})
|
||||
instance = new_type(
|
||||
source_path = self.source_path,
|
||||
root = self.root or self,
|
||||
parent = self,
|
||||
builtins_path = self.builtins_path,
|
||||
)
|
||||
self._set_section(instance)
|
||||
instance.deep_merge(section2[key])
|
||||
|
||||
elif key in section2.methods:
|
||||
self._set_method(key, section2[key])
|
||||
elif key in section2.methods:
|
||||
self._set_method(key, section2[key])
|
||||
|
||||
else:
|
||||
prop2 = getattr(type(section2), key)
|
||||
self._set_property(key, prop2.annotation, prop2.expression)
|
||||
else:
|
||||
prop2 = getattr(type(section2), key)
|
||||
self._set_property(key, prop2.annotation, prop2.expression)
|
||||
|
||||
|
||||
def include_file(self, path: Union[Path, str]) -> None:
|
||||
path = Path(path)
|
||||
def include_file(self, path: Union[Path, str]) -> None:
|
||||
path = Path(path)
|
||||
|
||||
if not path.is_absolute() and self.source_path:
|
||||
path = self.source_path.parent / path
|
||||
if not path.is_absolute() and self.source_path:
|
||||
path = self.source_path.parent / path
|
||||
|
||||
with suppress(ValueError):
|
||||
self.included.remove(path)
|
||||
with suppress(ValueError):
|
||||
self.included.remove(path)
|
||||
|
||||
self.included.append(path)
|
||||
self.deep_merge(Section.from_file(path))
|
||||
self.included.append(path)
|
||||
self.deep_merge(Section.from_file(path))
|
||||
|
||||
|
||||
def include_builtin(self, relative_path: Union[Path, str]) -> None:
|
||||
path = self.builtins_path / relative_path
|
||||
def include_builtin(self, relative_path: Union[Path, str]) -> None:
|
||||
path = self.builtins_path / relative_path
|
||||
|
||||
with suppress(ValueError):
|
||||
self.included.remove(path)
|
||||
with suppress(ValueError):
|
||||
self.included.remove(path)
|
||||
|
||||
self.included.append(path)
|
||||
self.deep_merge(Section.from_file(path))
|
||||
self.included.append(path)
|
||||
self.deep_merge(Section.from_file(path))
|
||||
|
||||
|
||||
def as_dict(self, _section: Optional["Section"] = None) -> Dict[str, Any]:
|
||||
dct = {}
|
||||
section = self if _section is None else _section
|
||||
def as_dict(self, _section: Optional["Section"] = None) -> Dict[str, Any]:
|
||||
dct = {}
|
||||
section = self if _section is None else _section
|
||||
|
||||
for key, value in section.items():
|
||||
if isinstance(value, Section):
|
||||
dct[key] = self.as_dict(value)
|
||||
else:
|
||||
dct[key] = value
|
||||
for key, value in section.items():
|
||||
if isinstance(value, Section):
|
||||
dct[key] = self.as_dict(value)
|
||||
else:
|
||||
dct[key] = value
|
||||
|
||||
return dct
|
||||
return dct
|
||||
|
||||
|
||||
def edits_as_dict(
|
||||
self, _section: Optional["Section"] = None,
|
||||
) -> Dict[str, Any]:
|
||||
def edits_as_dict(
|
||||
self, _section: Optional["Section"] = None,
|
||||
) -> Dict[str, Any]:
|
||||
|
||||
warning = (
|
||||
"This file is generated when settings are changed from the GUI, "
|
||||
"and properties in it override the ones in the corresponding "
|
||||
"PCN user config file. "
|
||||
"If a property is gets changed in the PCN file, any corresponding "
|
||||
"property override here is removed."
|
||||
)
|
||||
warning = (
|
||||
"This file is generated when settings are changed from the GUI, "
|
||||
"and properties in it override the ones in the corresponding "
|
||||
"PCN user config file. "
|
||||
"If a property is gets changed in the PCN file, any corresponding "
|
||||
"property override here is removed."
|
||||
)
|
||||
|
||||
if _section is None:
|
||||
section = self
|
||||
dct = {"__comment": warning, "set": section._edited.copy()}
|
||||
add_to = dct["set"]
|
||||
else:
|
||||
section = _section
|
||||
dct = {
|
||||
prop_name: (
|
||||
getattr(type(section), prop_name).expression,
|
||||
value_override,
|
||||
)
|
||||
for prop_name, value_override in section._edited.items()
|
||||
}
|
||||
add_to = dct
|
||||
|
||||
for name in section.sections:
|
||||
edits = section.edits_as_dict(section[name])
|
||||
|
||||
if edits:
|
||||
add_to[name] = edits # type: ignore
|
||||
|
||||
return dct
|
||||
|
||||
|
||||
def deep_merge_edits(
|
||||
self, edits: Dict[str, Any], has_expressions: bool = True,
|
||||
) -> bool:
|
||||
|
||||
changes = False
|
||||
|
||||
if not self.parent: # this is Root
|
||||
edits = edits.get("set", {})
|
||||
|
||||
for name, value in edits.copy().items():
|
||||
if isinstance(self.get(name), Section) and isinstance(value, dict):
|
||||
if self[name].deep_merge_edits(value, has_expressions):
|
||||
changes = True
|
||||
if _section is None:
|
||||
section = self
|
||||
dct = {"__comment": warning, "set": section._edited.copy()}
|
||||
add_to = dct["set"]
|
||||
else:
|
||||
section = _section
|
||||
dct = {
|
||||
prop_name: (
|
||||
getattr(type(section), prop_name).expression,
|
||||
value_override,
|
||||
)
|
||||
for prop_name, value_override in section._edited.items()
|
||||
}
|
||||
add_to = dct
|
||||
|
||||
for name in section.sections:
|
||||
edits = section.edits_as_dict(section[name])
|
||||
|
||||
if edits:
|
||||
add_to[name] = edits # type: ignore
|
||||
|
||||
return dct
|
||||
|
||||
|
||||
def deep_merge_edits(
|
||||
self, edits: Dict[str, Any], has_expressions: bool = True,
|
||||
) -> bool:
|
||||
|
||||
changes = False
|
||||
|
||||
if not self.parent: # this is Root
|
||||
edits = edits.get("set", {})
|
||||
|
||||
for name, value in edits.copy().items():
|
||||
if isinstance(self.get(name), Section) and isinstance(value, dict):
|
||||
if self[name].deep_merge_edits(value, has_expressions):
|
||||
changes = True
|
||||
|
||||
elif not has_expressions:
|
||||
self[name] = value
|
||||
elif not has_expressions:
|
||||
self[name] = value
|
||||
|
||||
elif isinstance(value, (tuple, list)):
|
||||
user_expression, gui_value = value
|
||||
elif isinstance(value, (tuple, list)):
|
||||
user_expression, gui_value = value
|
||||
|
||||
if not hasattr(type(self), name):
|
||||
self[name] = gui_value
|
||||
elif getattr(type(self), name).expression == user_expression:
|
||||
self[name] = gui_value
|
||||
else:
|
||||
# If user changed their config file, discard the GUI edit
|
||||
del edits[name]
|
||||
changes = True
|
||||
if not hasattr(type(self), name):
|
||||
self[name] = gui_value
|
||||
elif getattr(type(self), name).expression == user_expression:
|
||||
self[name] = gui_value
|
||||
else:
|
||||
# If user changed their config file, discard the GUI edit
|
||||
del edits[name]
|
||||
changes = True
|
||||
|
||||
return changes
|
||||
return changes
|
||||
|
||||
@property
|
||||
def all_includes(self) -> Generator[Path, None, None]:
|
||||
@property
|
||||
def all_includes(self) -> Generator[Path, None, None]:
|
||||
|
||||
yield from self.included
|
||||
yield from self.included
|
||||
|
||||
for sub in self.sections:
|
||||
yield from self[sub].all_includes
|
||||
for sub in self.sections:
|
||||
yield from self[sub].all_includes
|
||||
|
||||
@classmethod
|
||||
def from_source_code(
|
||||
cls,
|
||||
code: str,
|
||||
path: Optional[Path] = None,
|
||||
builtins: Optional[Path] = None,
|
||||
*,
|
||||
inherit: Tuple[Type["Section"], ...] = (),
|
||||
node: Union[None, red.RedBaron, red.ClassNode] = None,
|
||||
name: str = "Root",
|
||||
root: Optional["Section"] = None,
|
||||
parent: Optional["Section"] = None,
|
||||
) -> "Section":
|
||||
|
||||
builtins = builtins or BUILTINS_DIR
|
||||
section: Type["Section"] = type(name, inherit or (Section,), {})
|
||||
instance: Section = section(path, root, parent, builtins)
|
||||
|
||||
node = node or red.RedBaron(code)
|
||||
|
||||
for child in node.node_list:
|
||||
if isinstance(child, red.ClassNode):
|
||||
root_arg = instance if root is None else root
|
||||
child_inherit = []
|
||||
|
||||
for name in child.inherit_from.dumps().split(","):
|
||||
name = name.strip()
|
||||
|
||||
if name:
|
||||
child_inherit.append(type(attrgetter(name)(root_arg)))
|
||||
|
||||
instance._set_section(section.from_source_code(
|
||||
code = code,
|
||||
path = path,
|
||||
builtins = builtins,
|
||||
inherit = tuple(child_inherit),
|
||||
node = child,
|
||||
name = child.name,
|
||||
root = root_arg,
|
||||
parent = instance,
|
||||
))
|
||||
|
||||
elif isinstance(child, red.AssignmentNode):
|
||||
if isinstance(child.target, red.NameNode):
|
||||
name = child.target.value
|
||||
else:
|
||||
name = str(child.target.to_python())
|
||||
@classmethod
|
||||
def from_source_code(
|
||||
cls,
|
||||
code: str,
|
||||
path: Optional[Path] = None,
|
||||
builtins: Optional[Path] = None,
|
||||
*,
|
||||
inherit: Tuple[Type["Section"], ...] = (),
|
||||
node: Union[None, red.RedBaron, red.ClassNode] = None,
|
||||
name: str = "Root",
|
||||
root: Optional["Section"] = None,
|
||||
parent: Optional["Section"] = None,
|
||||
) -> "Section":
|
||||
|
||||
builtins = builtins or BUILTINS_DIR
|
||||
section: Type["Section"] = type(name, inherit or (Section,), {})
|
||||
instance: Section = section(path, root, parent, builtins)
|
||||
|
||||
node = node or red.RedBaron(code)
|
||||
|
||||
for child in node.node_list:
|
||||
if isinstance(child, red.ClassNode):
|
||||
root_arg = instance if root is None else root
|
||||
child_inherit = []
|
||||
|
||||
for name in child.inherit_from.dumps().split(","):
|
||||
name = name.strip()
|
||||
|
||||
if name:
|
||||
child_inherit.append(type(attrgetter(name)(root_arg)))
|
||||
|
||||
instance._set_section(section.from_source_code(
|
||||
code = code,
|
||||
path = path,
|
||||
builtins = builtins,
|
||||
inherit = tuple(child_inherit),
|
||||
node = child,
|
||||
name = child.name,
|
||||
root = root_arg,
|
||||
parent = instance,
|
||||
))
|
||||
|
||||
elif isinstance(child, red.AssignmentNode):
|
||||
if isinstance(child.target, red.NameNode):
|
||||
name = child.target.value
|
||||
else:
|
||||
name = str(child.target.to_python())
|
||||
|
||||
instance._set_property(
|
||||
name,
|
||||
child.annotation.dumps() if child.annotation else "",
|
||||
child.value.dumps(),
|
||||
)
|
||||
instance._set_property(
|
||||
name,
|
||||
child.annotation.dumps() if child.annotation else "",
|
||||
child.value.dumps(),
|
||||
)
|
||||
|
||||
else:
|
||||
env = instance.globals
|
||||
exec(child.dumps(), dict(env), env) # nosec
|
||||
else:
|
||||
env = instance.globals
|
||||
exec(child.dumps(), dict(env), env) # nosec
|
||||
|
||||
if isinstance(child, red.DefNode):
|
||||
instance._set_method(child.name, env[child.name])
|
||||
|
||||
return instance
|
||||
|
||||
|
||||
@classmethod
|
||||
def from_file(
|
||||
cls, path: Union[str, Path], builtins: Union[str, Path] = BUILTINS_DIR,
|
||||
) -> "Section":
|
||||
|
||||
path = Path(re.sub(r"^qrc:/", "", str(path)))
|
||||
|
||||
try:
|
||||
content = pyotherside.qrc_get_file_contents(str(path)).decode()
|
||||
except ValueError: # App was compiled without QRC
|
||||
content = path.read_text()
|
||||
|
||||
return Section.from_source_code(content, path, Path(builtins))
|
||||
if isinstance(child, red.DefNode):
|
||||
instance._set_method(child.name, env[child.name])
|
||||
|
||||
return instance
|
||||
|
||||
|
||||
@classmethod
|
||||
def from_file(
|
||||
cls, path: Union[str, Path], builtins: Union[str, Path] = BUILTINS_DIR,
|
||||
) -> "Section":
|
||||
|
||||
path = Path(re.sub(r"^qrc:/", "", str(path)))
|
||||
|
||||
try:
|
||||
content = pyotherside.qrc_get_file_contents(str(path)).decode()
|
||||
except ValueError: # App was compiled without QRC
|
||||
content = path.read_text()
|
||||
|
||||
return Section.from_source_code(content, path, Path(builtins))
|
||||
|
|
|
@ -8,89 +8,89 @@ from typing import TYPE_CHECKING, Dict, Optional
|
|||
from .utils import AutoStrEnum, auto
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .models.items import Account, Member
|
||||
from .models.items import Account, Member
|
||||
|
||||
ORDER: Dict[str, int] = {
|
||||
"online": 0,
|
||||
"unavailable": 1,
|
||||
"invisible": 2,
|
||||
"offline": 3,
|
||||
"online": 0,
|
||||
"unavailable": 1,
|
||||
"invisible": 2,
|
||||
"offline": 3,
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class Presence:
|
||||
"""Represents a single matrix user's presence fields.
|
||||
"""Represents a single matrix user's presence fields.
|
||||
|
||||
These objects are stored in `Backend.presences`, indexed by user ID.
|
||||
It must only be instanced when receiving a `PresenceEvent` or
|
||||
registering an `Account` model item.
|
||||
These objects are stored in `Backend.presences`, indexed by user ID.
|
||||
It must only be instanced when receiving a `PresenceEvent` or
|
||||
registering an `Account` model item.
|
||||
|
||||
When receiving a `PresenceEvent`, we get or create a `Presence` object in
|
||||
`Backend.presences` for the targeted user. If the user is registered in any
|
||||
room, add its `Member` model item to `members`. Finally, update every
|
||||
`Member` presence fields inside `members`.
|
||||
When receiving a `PresenceEvent`, we get or create a `Presence` object in
|
||||
`Backend.presences` for the targeted user. If the user is registered in any
|
||||
room, add its `Member` model item to `members`. Finally, update every
|
||||
`Member` presence fields inside `members`.
|
||||
|
||||
When a room member is registered, we try to find a `Presence` in
|
||||
`Backend.presences` for that user ID. If found, the `Member` item is added
|
||||
to `members`.
|
||||
When a room member is registered, we try to find a `Presence` in
|
||||
`Backend.presences` for that user ID. If found, the `Member` item is added
|
||||
to `members`.
|
||||
|
||||
When an Account model is registered, we create a `Presence` in
|
||||
`Backend.presences` for the accountu's user ID whether the server supports
|
||||
presence or not (we cannot know yet at this point),
|
||||
and assign that `Account` to the `Presence.account` field.
|
||||
When an Account model is registered, we create a `Presence` in
|
||||
`Backend.presences` for the accountu's user ID whether the server supports
|
||||
presence or not (we cannot know yet at this point),
|
||||
and assign that `Account` to the `Presence.account` field.
|
||||
|
||||
Special attributes:
|
||||
members: A `{room_id: Member}` dict for storing room members related to
|
||||
this `Presence`. As each room has its own `Member`s objects, we
|
||||
have to keep track of their presence fields. `Member`s are indexed
|
||||
by room ID.
|
||||
Special attributes:
|
||||
members: A `{room_id: Member}` dict for storing room members related to
|
||||
this `Presence`. As each room has its own `Member`s objects, we
|
||||
have to keep track of their presence fields. `Member`s are indexed
|
||||
by room ID.
|
||||
|
||||
account: `Account` related to this `Presence`, if any. Should be
|
||||
assigned when client starts (`MatrixClient._start()`) and
|
||||
cleared when client stops (`MatrixClient._start()`).
|
||||
"""
|
||||
account: `Account` related to this `Presence`, if any. Should be
|
||||
assigned when client starts (`MatrixClient._start()`) and
|
||||
cleared when client stops (`MatrixClient._start()`).
|
||||
"""
|
||||
|
||||
class State(AutoStrEnum):
|
||||
offline = auto() # can mean offline, invisible or unknwon
|
||||
unavailable = auto()
|
||||
online = auto()
|
||||
invisible = auto()
|
||||
class State(AutoStrEnum):
|
||||
offline = auto() # can mean offline, invisible or unknwon
|
||||
unavailable = auto()
|
||||
online = auto()
|
||||
invisible = auto()
|
||||
|
||||
def __lt__(self, other: "Presence.State") -> bool:
|
||||
return ORDER[self.value] < ORDER[other.value]
|
||||
def __lt__(self, other: "Presence.State") -> bool:
|
||||
return ORDER[self.value] < ORDER[other.value]
|
||||
|
||||
presence: State = State.offline
|
||||
currently_active: bool = False
|
||||
last_active_at: datetime = datetime.fromtimestamp(0)
|
||||
status_msg: str = ""
|
||||
presence: State = State.offline
|
||||
currently_active: bool = False
|
||||
last_active_at: datetime = datetime.fromtimestamp(0)
|
||||
status_msg: str = ""
|
||||
|
||||
members: Dict[str, "Member"] = field(default_factory=dict)
|
||||
account: Optional["Account"] = None
|
||||
members: Dict[str, "Member"] = field(default_factory=dict)
|
||||
account: Optional["Account"] = None
|
||||
|
||||
|
||||
def update_members(self) -> None:
|
||||
"""Update presence fields of every `Member` in `members`.
|
||||
def update_members(self) -> None:
|
||||
"""Update presence fields of every `Member` in `members`.
|
||||
|
||||
Currently it is only called when receiving a `PresenceEvent` and when
|
||||
registering room members.
|
||||
"""
|
||||
Currently it is only called when receiving a `PresenceEvent` and when
|
||||
registering room members.
|
||||
"""
|
||||
|
||||
for member in self.members.values():
|
||||
member.set_fields(
|
||||
presence = self.presence,
|
||||
status_msg = self.status_msg,
|
||||
last_active_at = self.last_active_at,
|
||||
currently_active = self.currently_active,
|
||||
)
|
||||
for member in self.members.values():
|
||||
member.set_fields(
|
||||
presence = self.presence,
|
||||
status_msg = self.status_msg,
|
||||
last_active_at = self.last_active_at,
|
||||
currently_active = self.currently_active,
|
||||
)
|
||||
|
||||
def update_account(self) -> None:
|
||||
"""Update presence fields of `Account` related to this `Presence`."""
|
||||
def update_account(self) -> None:
|
||||
"""Update presence fields of `Account` related to this `Presence`."""
|
||||
|
||||
if self.account:
|
||||
self.account.set_fields(
|
||||
presence = self.presence,
|
||||
status_msg = self.status_msg,
|
||||
last_active_at = self.last_active_at,
|
||||
currently_active = self.currently_active,
|
||||
)
|
||||
if self.account:
|
||||
self.account.set_fields(
|
||||
presence = self.presence,
|
||||
status_msg = self.status_msg,
|
||||
last_active_at = self.last_active_at,
|
||||
currently_active = self.currently_active,
|
||||
)
|
||||
|
|
|
@ -10,117 +10,117 @@ import pyotherside
|
|||
from .utils import serialize_value_for_qml
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .models import SyncId
|
||||
from .user_files import UserFile
|
||||
from .models import SyncId
|
||||
from .user_files import UserFile
|
||||
|
||||
|
||||
@dataclass
|
||||
class PyOtherSideEvent:
|
||||
"""Event that will be sent on instanciation to QML by PyOtherSide."""
|
||||
"""Event that will be sent on instanciation to QML by PyOtherSide."""
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
# XXX: CPython 3.6 or any Python implemention >= 3.7 is required for
|
||||
# correct __dataclass_fields__ dict order.
|
||||
args = [
|
||||
serialize_value_for_qml(getattr(self, field))
|
||||
for field in self.__dataclass_fields__ # type: ignore
|
||||
if field != "callbacks"
|
||||
]
|
||||
pyotherside.send(type(self).__name__, *args)
|
||||
def __post_init__(self) -> None:
|
||||
# XXX: CPython 3.6 or any Python implemention >= 3.7 is required for
|
||||
# correct __dataclass_fields__ dict order.
|
||||
args = [
|
||||
serialize_value_for_qml(getattr(self, field))
|
||||
for field in self.__dataclass_fields__ # type: ignore
|
||||
if field != "callbacks"
|
||||
]
|
||||
pyotherside.send(type(self).__name__, *args)
|
||||
|
||||
|
||||
@dataclass
|
||||
class NotificationRequested(PyOtherSideEvent):
|
||||
"""Request a notification bubble, sound or window urgency hint.
|
||||
"""Request a notification bubble, sound or window urgency hint.
|
||||
|
||||
Urgency hints usually flash or highlight the program's icon in a taskbar,
|
||||
dock or panel.
|
||||
"""
|
||||
Urgency hints usually flash or highlight the program's icon in a taskbar,
|
||||
dock or panel.
|
||||
"""
|
||||
|
||||
id: str = field()
|
||||
critical: bool = False
|
||||
bubble: bool = False
|
||||
sound: bool = False
|
||||
urgency_hint: bool = False
|
||||
id: str = field()
|
||||
critical: bool = False
|
||||
bubble: bool = False
|
||||
sound: bool = False
|
||||
urgency_hint: bool = False
|
||||
|
||||
# Bubble parameters
|
||||
title: str = ""
|
||||
body: str = ""
|
||||
image: Union[Path, str] = ""
|
||||
# Bubble parameters
|
||||
title: str = ""
|
||||
body: str = ""
|
||||
image: Union[Path, str] = ""
|
||||
|
||||
|
||||
@dataclass
|
||||
class CoroutineDone(PyOtherSideEvent):
|
||||
"""Indicate that an asyncio coroutine finished."""
|
||||
"""Indicate that an asyncio coroutine finished."""
|
||||
|
||||
uuid: str = field()
|
||||
result: Any = None
|
||||
exception: Optional[Exception] = None
|
||||
traceback: Optional[str] = None
|
||||
uuid: str = field()
|
||||
result: Any = None
|
||||
exception: Optional[Exception] = None
|
||||
traceback: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class LoopException(PyOtherSideEvent):
|
||||
"""Indicate an uncaught exception occurance in the asyncio loop."""
|
||||
"""Indicate an uncaught exception occurance in the asyncio loop."""
|
||||
|
||||
message: str = field()
|
||||
exception: Optional[Exception] = field()
|
||||
traceback: Optional[str] = None
|
||||
message: str = field()
|
||||
exception: Optional[Exception] = field()
|
||||
traceback: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class Pre070SettingsDetected(PyOtherSideEvent):
|
||||
"""Warn that a pre-0.7.0 settings.json file exists."""
|
||||
path: Path = field()
|
||||
"""Warn that a pre-0.7.0 settings.json file exists."""
|
||||
path: Path = field()
|
||||
|
||||
|
||||
@dataclass
|
||||
class UserFileChanged(PyOtherSideEvent):
|
||||
"""Indicate that a config or data file changed on disk."""
|
||||
"""Indicate that a config or data file changed on disk."""
|
||||
|
||||
type: Type["UserFile"] = field()
|
||||
new_data: Any = field()
|
||||
type: Type["UserFile"] = field()
|
||||
new_data: Any = field()
|
||||
|
||||
|
||||
@dataclass
|
||||
class ModelEvent(PyOtherSideEvent):
|
||||
"""Base class for model change events."""
|
||||
"""Base class for model change events."""
|
||||
|
||||
sync_id: "SyncId" = field()
|
||||
sync_id: "SyncId" = field()
|
||||
|
||||
|
||||
@dataclass
|
||||
class ModelItemSet(ModelEvent):
|
||||
"""Indicate `ModelItem` insert or field changes in a `Backend` `Model`."""
|
||||
"""Indicate `ModelItem` insert or field changes in a `Backend` `Model`."""
|
||||
|
||||
index_then: Optional[int] = field()
|
||||
index_now: int = field()
|
||||
fields: Dict[str, Any] = field()
|
||||
index_then: Optional[int] = field()
|
||||
index_now: int = field()
|
||||
fields: Dict[str, Any] = field()
|
||||
|
||||
|
||||
@dataclass
|
||||
class ModelItemDeleted(ModelEvent):
|
||||
"""Indicate the removal of a `ModelItem` from a `Backend` `Model`."""
|
||||
"""Indicate the removal of a `ModelItem` from a `Backend` `Model`."""
|
||||
|
||||
index: int = field()
|
||||
count: int = 1
|
||||
ids: Sequence[Any] = ()
|
||||
index: int = field()
|
||||
count: int = 1
|
||||
ids: Sequence[Any] = ()
|
||||
|
||||
|
||||
@dataclass
|
||||
class ModelCleared(ModelEvent):
|
||||
"""Indicate that a `Backend` `Model` was cleared."""
|
||||
"""Indicate that a `Backend` `Model` was cleared."""
|
||||
|
||||
|
||||
@dataclass
|
||||
class DevicesUpdated(PyOtherSideEvent):
|
||||
"""Indicate changes in devices for us or users we share a room with."""
|
||||
"""Indicate changes in devices for us or users we share a room with."""
|
||||
|
||||
our_user_id: str = field()
|
||||
our_user_id: str = field()
|
||||
|
||||
|
||||
@dataclass
|
||||
class InvalidAccessToken(PyOtherSideEvent):
|
||||
"""Indicate one of our account's access token is invalid or revoked."""
|
||||
"""Indicate one of our account's access token is invalid or revoked."""
|
||||
|
||||
user_id: str = field()
|
||||
user_id: str = field()
|
||||
|
|
|
@ -29,143 +29,143 @@ from .pyotherside_events import CoroutineDone, LoopException
|
|||
|
||||
|
||||
class QMLBridge:
|
||||
"""Setup asyncio and provide methods to call coroutines from QML.
|
||||
"""Setup asyncio and provide methods to call coroutines from QML.
|
||||
|
||||
A thread is created to run the asyncio loop in, to ensure all calls from
|
||||
QML return instantly.
|
||||
Synchronous methods are provided for QML to call coroutines using
|
||||
PyOtherSide, which doesn't have this ability out of the box.
|
||||
A thread is created to run the asyncio loop in, to ensure all calls from
|
||||
QML return instantly.
|
||||
Synchronous methods are provided for QML to call coroutines using
|
||||
PyOtherSide, which doesn't have this ability out of the box.
|
||||
|
||||
Attributes:
|
||||
backend: The `backend.Backend` object containing general coroutines
|
||||
for QML and that manages `MatrixClient` objects.
|
||||
"""
|
||||
Attributes:
|
||||
backend: The `backend.Backend` object containing general coroutines
|
||||
for QML and that manages `MatrixClient` objects.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
try:
|
||||
self._loop = asyncio.get_event_loop()
|
||||
except RuntimeError:
|
||||
self._loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(self._loop)
|
||||
self._loop.set_exception_handler(self._loop_exception_handler)
|
||||
def __init__(self) -> None:
|
||||
try:
|
||||
self._loop = asyncio.get_event_loop()
|
||||
except RuntimeError:
|
||||
self._loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(self._loop)
|
||||
self._loop.set_exception_handler(self._loop_exception_handler)
|
||||
|
||||
from .backend import Backend
|
||||
self.backend: Backend = Backend()
|
||||
from .backend import Backend
|
||||
self.backend: Backend = Backend()
|
||||
|
||||
self._running_futures: Dict[str, Future] = {}
|
||||
self._cancelled_early: Set[str] = set()
|
||||
self._running_futures: Dict[str, Future] = {}
|
||||
self._cancelled_early: Set[str] = set()
|
||||
|
||||
Thread(target=self._start_asyncio_loop).start()
|
||||
Thread(target=self._start_asyncio_loop).start()
|
||||
|
||||
|
||||
def _loop_exception_handler(
|
||||
self, loop: asyncio.AbstractEventLoop, context: dict,
|
||||
) -> None:
|
||||
if "exception" in context:
|
||||
err = context["exception"]
|
||||
trace = "".join(
|
||||
traceback.format_exception(type(err), err, err.__traceback__),
|
||||
)
|
||||
LoopException(context["message"], err, trace)
|
||||
def _loop_exception_handler(
|
||||
self, loop: asyncio.AbstractEventLoop, context: dict,
|
||||
) -> None:
|
||||
if "exception" in context:
|
||||
err = context["exception"]
|
||||
trace = "".join(
|
||||
traceback.format_exception(type(err), err, err.__traceback__),
|
||||
)
|
||||
LoopException(context["message"], err, trace)
|
||||
|
||||
loop.default_exception_handler(context)
|
||||
loop.default_exception_handler(context)
|
||||
|
||||
|
||||
def _start_asyncio_loop(self) -> None:
|
||||
asyncio.set_event_loop(self._loop)
|
||||
self._loop.run_forever()
|
||||
def _start_asyncio_loop(self) -> None:
|
||||
asyncio.set_event_loop(self._loop)
|
||||
self._loop.run_forever()
|
||||
|
||||
|
||||
def _call_coro(self, coro: Coroutine, uuid: str) -> None:
|
||||
"""Schedule a coroutine to run in our thread and return a `Future`."""
|
||||
def _call_coro(self, coro: Coroutine, uuid: str) -> None:
|
||||
"""Schedule a coroutine to run in our thread and return a `Future`."""
|
||||
|
||||
if uuid in self._cancelled_early:
|
||||
self._cancelled_early.remove(uuid)
|
||||
return
|
||||
if uuid in self._cancelled_early:
|
||||
self._cancelled_early.remove(uuid)
|
||||
return
|
||||
|
||||
def on_done(future: Future) -> None:
|
||||
"""Send a PyOtherSide event with the coro's result/exception."""
|
||||
result = exception = trace = None
|
||||
def on_done(future: Future) -> None:
|
||||
"""Send a PyOtherSide event with the coro's result/exception."""
|
||||
result = exception = trace = None
|
||||
|
||||
try:
|
||||
result = future.result()
|
||||
except Exception as err: # noqa
|
||||
exception = err
|
||||
trace = traceback.format_exc().rstrip()
|
||||
try:
|
||||
result = future.result()
|
||||
except Exception as err: # noqa
|
||||
exception = err
|
||||
trace = traceback.format_exc().rstrip()
|
||||
|
||||
CoroutineDone(uuid, result, exception, trace)
|
||||
del self._running_futures[uuid]
|
||||
CoroutineDone(uuid, result, exception, trace)
|
||||
del self._running_futures[uuid]
|
||||
|
||||
future = asyncio.run_coroutine_threadsafe(coro, self._loop)
|
||||
self._running_futures[uuid] = future
|
||||
future.add_done_callback(on_done)
|
||||
future = asyncio.run_coroutine_threadsafe(coro, self._loop)
|
||||
self._running_futures[uuid] = future
|
||||
future.add_done_callback(on_done)
|
||||
|
||||
|
||||
def call_backend_coro(
|
||||
self, name: str, uuid: str, args: Sequence[str] = (),
|
||||
) -> None:
|
||||
"""Schedule a coroutine from the `QMLBridge.backend` object."""
|
||||
def call_backend_coro(
|
||||
self, name: str, uuid: str, args: Sequence[str] = (),
|
||||
) -> None:
|
||||
"""Schedule a coroutine from the `QMLBridge.backend` object."""
|
||||
|
||||
if uuid in self._cancelled_early:
|
||||
self._cancelled_early.remove(uuid)
|
||||
else:
|
||||
self._call_coro(attrgetter(name)(self.backend)(*args), uuid)
|
||||
if uuid in self._cancelled_early:
|
||||
self._cancelled_early.remove(uuid)
|
||||
else:
|
||||
self._call_coro(attrgetter(name)(self.backend)(*args), uuid)
|
||||
|
||||
|
||||
def call_client_coro(
|
||||
self, user_id: str, name: str, uuid: str, args: Sequence[str] = (),
|
||||
) -> None:
|
||||
"""Schedule a coroutine from a `QMLBridge.backend.clients` client."""
|
||||
def call_client_coro(
|
||||
self, user_id: str, name: str, uuid: str, args: Sequence[str] = (),
|
||||
) -> None:
|
||||
"""Schedule a coroutine from a `QMLBridge.backend.clients` client."""
|
||||
|
||||
if uuid in self._cancelled_early:
|
||||
self._cancelled_early.remove(uuid)
|
||||
else:
|
||||
client = self.backend.clients[user_id]
|
||||
self._call_coro(attrgetter(name)(client)(*args), uuid)
|
||||
if uuid in self._cancelled_early:
|
||||
self._cancelled_early.remove(uuid)
|
||||
else:
|
||||
client = self.backend.clients[user_id]
|
||||
self._call_coro(attrgetter(name)(client)(*args), uuid)
|
||||
|
||||
|
||||
def cancel_coro(self, uuid: str) -> None:
|
||||
"""Cancel a couroutine scheduled by the `QMLBridge` methods."""
|
||||
def cancel_coro(self, uuid: str) -> None:
|
||||
"""Cancel a couroutine scheduled by the `QMLBridge` methods."""
|
||||
|
||||
if uuid in self._running_futures:
|
||||
self._running_futures[uuid].cancel()
|
||||
else:
|
||||
self._cancelled_early.add(uuid)
|
||||
if uuid in self._running_futures:
|
||||
self._running_futures[uuid].cancel()
|
||||
else:
|
||||
self._cancelled_early.add(uuid)
|
||||
|
||||
|
||||
def pdb(self, extra_data: Sequence = (), remote: bool = False) -> None:
|
||||
"""Call the python debugger, defining some conveniance variables."""
|
||||
def pdb(self, extra_data: Sequence = (), remote: bool = False) -> None:
|
||||
"""Call the python debugger, defining some conveniance variables."""
|
||||
|
||||
ad = extra_data # noqa
|
||||
ba = self.backend # noqa
|
||||
mo = self.backend.models # noqa
|
||||
cl = self.backend.clients
|
||||
gcl = lambda user: cl[f"@{user}"] # noqa
|
||||
ad = extra_data # noqa
|
||||
ba = self.backend # noqa
|
||||
mo = self.backend.models # noqa
|
||||
cl = self.backend.clients
|
||||
gcl = lambda user: cl[f"@{user}"] # noqa
|
||||
|
||||
rc = lambda c: asyncio.run_coroutine_threadsafe(c, self._loop) # noqa
|
||||
rc = lambda c: asyncio.run_coroutine_threadsafe(c, self._loop) # noqa
|
||||
|
||||
try:
|
||||
from devtools import debug # noqa
|
||||
d = debug # noqa
|
||||
except ModuleNotFoundError:
|
||||
log.warning("Module python-devtools not found, can't use debug()")
|
||||
try:
|
||||
from devtools import debug # noqa
|
||||
d = debug # noqa
|
||||
except ModuleNotFoundError:
|
||||
log.warning("Module python-devtools not found, can't use debug()")
|
||||
|
||||
if remote:
|
||||
# Run `socat readline tcp:127.0.0.1:4444` in a terminal to connect
|
||||
import remote_pdb
|
||||
remote_pdb.RemotePdb("127.0.0.1", 4444).set_trace()
|
||||
else:
|
||||
import pdb
|
||||
pdb.set_trace()
|
||||
if remote:
|
||||
# Run `socat readline tcp:127.0.0.1:4444` in a terminal to connect
|
||||
import remote_pdb
|
||||
remote_pdb.RemotePdb("127.0.0.1", 4444).set_trace()
|
||||
else:
|
||||
import pdb
|
||||
pdb.set_trace()
|
||||
|
||||
|
||||
def exit(self) -> None:
|
||||
try:
|
||||
asyncio.run_coroutine_threadsafe(
|
||||
self.backend.terminate_clients(), self._loop,
|
||||
).result()
|
||||
except Exception as e: # noqa
|
||||
print(e)
|
||||
def exit(self) -> None:
|
||||
try:
|
||||
asyncio.run_coroutine_threadsafe(
|
||||
self.backend.terminate_clients(), self._loop,
|
||||
).result()
|
||||
except Exception as e: # noqa
|
||||
print(e)
|
||||
|
||||
|
||||
# The AppImage AppRun script overwrites some environment path variables to
|
||||
|
@ -174,8 +174,8 @@ class QMLBridge:
|
|||
# to prevent problems like QML Qt.openUrlExternally() failing because
|
||||
# the external launched program is affected by our AppImage-specific variables.
|
||||
for var in ("LD_LIBRARY_PATH", "PYTHONHOME", "PYTHONUSERBASE"):
|
||||
if f"RESTORE_{var}" in os.environ:
|
||||
os.environ[var] = os.environ[f"RESTORE_{var}"]
|
||||
if f"RESTORE_{var}" in os.environ:
|
||||
os.environ[var] = os.environ[f"RESTORE_{var}"]
|
||||
|
||||
|
||||
BRIDGE = QMLBridge()
|
||||
|
|
|
@ -9,99 +9,99 @@ from . import __display_name__
|
|||
|
||||
_SUCCESS_HTML_PAGE = """<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>""" + __display_name__ + """</title>
|
||||
<meta charset="utf-8">
|
||||
<style>
|
||||
body { background: hsl(0, 0%, 90%); }
|
||||
<head>
|
||||
<title>""" + __display_name__ + """</title>
|
||||
<meta charset="utf-8">
|
||||
<style>
|
||||
body { background: hsl(0, 0%, 90%); }
|
||||
|
||||
@keyframes appear {
|
||||
0% { transform: scale(0); }
|
||||
45% { transform: scale(0); }
|
||||
80% { transform: scale(1.6); }
|
||||
100% { transform: scale(1); }
|
||||
}
|
||||
@keyframes appear {
|
||||
0% { transform: scale(0); }
|
||||
45% { transform: scale(0); }
|
||||
80% { transform: scale(1.6); }
|
||||
100% { transform: scale(1); }
|
||||
}
|
||||
|
||||
.circle {
|
||||
width: 90px;
|
||||
height: 90px;
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
left: 50%;
|
||||
margin: -45px 0 0 -45px;
|
||||
border-radius: 50%;
|
||||
font-size: 60px;
|
||||
line-height: 90px;
|
||||
text-align: center;
|
||||
background: hsl(203, 51%, 15%);
|
||||
color: hsl(162, 56%, 42%, 1);
|
||||
animation: appear 0.4s linear;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
.circle {
|
||||
width: 90px;
|
||||
height: 90px;
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
left: 50%;
|
||||
margin: -45px 0 0 -45px;
|
||||
border-radius: 50%;
|
||||
font-size: 60px;
|
||||
line-height: 90px;
|
||||
text-align: center;
|
||||
background: hsl(203, 51%, 15%);
|
||||
color: hsl(162, 56%, 42%, 1);
|
||||
animation: appear 0.4s linear;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body><div class="circle">✓</div></body>
|
||||
<body><div class="circle">✓</div></body>
|
||||
</html>"""
|
||||
|
||||
|
||||
class _SSORequestHandler(BaseHTTPRequestHandler):
|
||||
def do_GET(self) -> None:
|
||||
self.server: "SSOServer"
|
||||
def do_GET(self) -> None:
|
||||
self.server: "SSOServer"
|
||||
|
||||
redirect = "%s/_matrix/client/r0/login/sso/redirect?redirectUrl=%s" % (
|
||||
self.server.for_homeserver,
|
||||
quote(self.server.url_to_open),
|
||||
)
|
||||
redirect = "%s/_matrix/client/r0/login/sso/redirect?redirectUrl=%s" % (
|
||||
self.server.for_homeserver,
|
||||
quote(self.server.url_to_open),
|
||||
)
|
||||
|
||||
parameters = parse_qs(urlparse(self.path).query)
|
||||
parameters = parse_qs(urlparse(self.path).query)
|
||||
|
||||
if "loginToken" in parameters:
|
||||
self.server._token = parameters["loginToken"][0]
|
||||
self.send_response(200) # OK
|
||||
self.send_header("Content-type", "text/html")
|
||||
self.end_headers()
|
||||
self.wfile.write(_SUCCESS_HTML_PAGE.encode())
|
||||
else:
|
||||
self.send_response(308) # Permanent redirect, same method only
|
||||
self.send_header("Location", redirect)
|
||||
self.end_headers()
|
||||
if "loginToken" in parameters:
|
||||
self.server._token = parameters["loginToken"][0]
|
||||
self.send_response(200) # OK
|
||||
self.send_header("Content-type", "text/html")
|
||||
self.end_headers()
|
||||
self.wfile.write(_SUCCESS_HTML_PAGE.encode())
|
||||
else:
|
||||
self.send_response(308) # Permanent redirect, same method only
|
||||
self.send_header("Location", redirect)
|
||||
self.end_headers()
|
||||
|
||||
self.close_connection = True
|
||||
self.close_connection = True
|
||||
|
||||
|
||||
class SSOServer(HTTPServer):
|
||||
"""Local HTTP server to retrieve a SSO login token.
|
||||
"""Local HTTP server to retrieve a SSO login token.
|
||||
|
||||
Call `SSOServer.wait_for_token()` in a background task to start waiting
|
||||
for a SSO login token from the Matrix homeserver.
|
||||
Call `SSOServer.wait_for_token()` in a background task to start waiting
|
||||
for a SSO login token from the Matrix homeserver.
|
||||
|
||||
Once the task is running, the user must open `SSOServer.url_to_open` in
|
||||
their browser, where they will be able to complete the login process.
|
||||
Once they are done, the homeserver will call us back with a login token
|
||||
and the `SSOServer.wait_for_token()` task will return.
|
||||
"""
|
||||
Once the task is running, the user must open `SSOServer.url_to_open` in
|
||||
their browser, where they will be able to complete the login process.
|
||||
Once they are done, the homeserver will call us back with a login token
|
||||
and the `SSOServer.wait_for_token()` task will return.
|
||||
"""
|
||||
|
||||
def __init__(self, for_homeserver: str) -> None:
|
||||
self.for_homeserver: str = for_homeserver
|
||||
self._token: str = ""
|
||||
def __init__(self, for_homeserver: str) -> None:
|
||||
self.for_homeserver: str = for_homeserver
|
||||
self._token: str = ""
|
||||
|
||||
# Pick the first available port
|
||||
super().__init__(("127.0.0.1", 0), _SSORequestHandler)
|
||||
# Pick the first available port
|
||||
super().__init__(("127.0.0.1", 0), _SSORequestHandler)
|
||||
|
||||
|
||||
@property
|
||||
def url_to_open(self) -> str:
|
||||
"""URL for the user to open in their browser, to do the SSO process."""
|
||||
@property
|
||||
def url_to_open(self) -> str:
|
||||
"""URL for the user to open in their browser, to do the SSO process."""
|
||||
|
||||
return f"http://{self.server_address[0]}:{self.server_port}"
|
||||
return f"http://{self.server_address[0]}:{self.server_port}"
|
||||
|
||||
|
||||
async def wait_for_token(self) -> str:
|
||||
"""Wait until the homeserver gives us a login token and return it."""
|
||||
async def wait_for_token(self) -> str:
|
||||
"""Wait until the homeserver gives us a login token and return it."""
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
while not self._token:
|
||||
await loop.run_in_executor(None, self.handle_request)
|
||||
while not self._token:
|
||||
await loop.run_in_executor(None, self.handle_request)
|
||||
|
||||
return self._token
|
||||
return self._token
|
||||
|
|
|
@ -11,77 +11,77 @@ import re
|
|||
from typing import Generator
|
||||
|
||||
PROPERTY_TYPES = {"bool", "double", "int", "list", "real", "string", "url",
|
||||
"var", "date", "point", "rect", "size", "color"}
|
||||
"var", "date", "point", "rect", "size", "color"}
|
||||
|
||||
|
||||
def _add_property(line: str) -> str:
|
||||
"""Return a QML property declaration line from a QPL property line."""
|
||||
"""Return a QML property declaration line from a QPL property line."""
|
||||
|
||||
if re.match(r"^\s*[a-zA-Z\d_]+\s*:$", line):
|
||||
return re.sub(r"^(\s*)(\S*\s*):$",
|
||||
r"\1readonly property QtObject \2: QtObject",
|
||||
line)
|
||||
if re.match(r"^\s*[a-zA-Z\d_]+\s*:$", line):
|
||||
return re.sub(r"^(\s*)(\S*\s*):$",
|
||||
r"\1readonly property QtObject \2: QtObject",
|
||||
line)
|
||||
|
||||
types = "|".join(PROPERTY_TYPES)
|
||||
if re.match(fr"^\s*({types}) [a-zA-Z\d_]+\s*:", line):
|
||||
return re.sub(r"^(\s*)(\S*)", r"\1property \2", line)
|
||||
types = "|".join(PROPERTY_TYPES)
|
||||
if re.match(fr"^\s*({types}) [a-zA-Z\d_]+\s*:", line):
|
||||
return re.sub(r"^(\s*)(\S*)", r"\1property \2", line)
|
||||
|
||||
return line
|
||||
return line
|
||||
|
||||
|
||||
def _process_lines(content: str) -> Generator[str, None, None]:
|
||||
"""Yield lines of real QML from lines of QPL."""
|
||||
"""Yield lines of real QML from lines of QPL."""
|
||||
|
||||
skip = False
|
||||
indent = " " * 4
|
||||
current_indent = 0
|
||||
skip = False
|
||||
indent = " " * 4
|
||||
current_indent = 0
|
||||
|
||||
for line in content.split("\n"):
|
||||
line = line.rstrip()
|
||||
for line in content.split("\n"):
|
||||
line = line.rstrip()
|
||||
|
||||
if not line.strip() or line.strip().startswith("//"):
|
||||
continue
|
||||
if not line.strip() or line.strip().startswith("//"):
|
||||
continue
|
||||
|
||||
start_space_list = re.findall(r"^ +", line)
|
||||
start_space = start_space_list[0] if start_space_list else ""
|
||||
start_space_list = re.findall(r"^ +", line)
|
||||
start_space = start_space_list[0] if start_space_list else ""
|
||||
|
||||
line_indents = len(re.findall(indent, start_space))
|
||||
line_indents = len(re.findall(indent, start_space))
|
||||
|
||||
if not skip:
|
||||
if line_indents > current_indent:
|
||||
yield "%s{" % (indent * current_indent)
|
||||
current_indent = line_indents
|
||||
if not skip:
|
||||
if line_indents > current_indent:
|
||||
yield "%s{" % (indent * current_indent)
|
||||
current_indent = line_indents
|
||||
|
||||
while line_indents < current_indent:
|
||||
current_indent -= 1
|
||||
yield "%s}" % (indent * current_indent)
|
||||
while line_indents < current_indent:
|
||||
current_indent -= 1
|
||||
yield "%s}" % (indent * current_indent)
|
||||
|
||||
line = _add_property(line)
|
||||
line = _add_property(line)
|
||||
|
||||
yield line
|
||||
yield line
|
||||
|
||||
skip = any((line.endswith(e) for e in "([{+\\,?:"))
|
||||
skip = any((line.endswith(e) for e in "([{+\\,?:"))
|
||||
|
||||
while current_indent:
|
||||
current_indent -= 1
|
||||
yield "%s}" % (indent * current_indent)
|
||||
while current_indent:
|
||||
current_indent -= 1
|
||||
yield "%s}" % (indent * current_indent)
|
||||
|
||||
|
||||
def convert_to_qml(theme_content: str) -> str:
|
||||
"""Return valid QML code with imports from QPL content."""
|
||||
"""Return valid QML code with imports from QPL content."""
|
||||
|
||||
theme_content = theme_content.replace("\t", " ")
|
||||
theme_content = theme_content.replace("\t", " ")
|
||||
|
||||
lines = [
|
||||
"import QtQuick 2.12",
|
||||
'import "../Base"',
|
||||
"QtObject {",
|
||||
" function hsluv(h, s, l, a) { return utils.hsluv(h, s, l, a) }",
|
||||
" function hsl(h, s, l) { return utils.hsl(h, s, l) }",
|
||||
" function hsla(h, s, l, a) { return utils.hsla(h, s, l, a) }",
|
||||
" id: theme",
|
||||
]
|
||||
lines += [f" {line}" for line in _process_lines(theme_content)]
|
||||
lines += ["}"]
|
||||
lines = [
|
||||
"import QtQuick 2.12",
|
||||
'import "../Base"',
|
||||
"QtObject {",
|
||||
" function hsluv(h, s, l, a) { return utils.hsluv(h, s, l, a) }",
|
||||
" function hsl(h, s, l) { return utils.hsl(h, s, l) }",
|
||||
" function hsla(h, s, l, a) { return utils.hsla(h, s, l, a) }",
|
||||
" id: theme",
|
||||
]
|
||||
lines += [f" {line}" for line in _process_lines(theme_content)]
|
||||
lines += ["}"]
|
||||
|
||||
return "\n".join(lines)
|
||||
return "\n".join(lines)
|
||||
|
|
|
@ -12,7 +12,7 @@ from collections.abc import MutableMapping
|
|||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
TYPE_CHECKING, Any, ClassVar, Dict, Iterator, Optional, Tuple,
|
||||
TYPE_CHECKING, Any, ClassVar, Dict, Iterator, Optional, Tuple,
|
||||
)
|
||||
|
||||
import pyotherside
|
||||
|
@ -20,521 +20,521 @@ from watchgod import Change, awatch
|
|||
|
||||
from .pcn.section import Section
|
||||
from .pyotherside_events import (
|
||||
LoopException, Pre070SettingsDetected, UserFileChanged,
|
||||
LoopException, Pre070SettingsDetected, UserFileChanged,
|
||||
)
|
||||
from .theme_parser import convert_to_qml
|
||||
from .utils import (
|
||||
aiopen, atomic_write, deep_serialize_for_qml, dict_update_recursive,
|
||||
flatten_dict_keys,
|
||||
aiopen, atomic_write, deep_serialize_for_qml, dict_update_recursive,
|
||||
flatten_dict_keys,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .backend import Backend
|
||||
from .backend import Backend
|
||||
|
||||
|
||||
@dataclass
|
||||
class UserFile:
|
||||
"""Base class representing a user config or data file."""
|
||||
"""Base class representing a user config or data file."""
|
||||
|
||||
create_missing: ClassVar[bool] = True
|
||||
create_missing: ClassVar[bool] = True
|
||||
|
||||
backend: "Backend" = field(repr=False)
|
||||
filename: str = field()
|
||||
parent: Optional["UserFile"] = None
|
||||
children: Dict[Path, "UserFile"] = field(default_factory=dict)
|
||||
backend: "Backend" = field(repr=False)
|
||||
filename: str = field()
|
||||
parent: Optional["UserFile"] = None
|
||||
children: Dict[Path, "UserFile"] = field(default_factory=dict)
|
||||
|
||||
data: Any = field(init=False, default_factory=dict)
|
||||
_need_write: bool = field(init=False, default=False)
|
||||
_mtime: Optional[float] = field(init=False, default=None)
|
||||
data: Any = field(init=False, default_factory=dict)
|
||||
_need_write: bool = field(init=False, default=False)
|
||||
_mtime: Optional[float] = field(init=False, default=None)
|
||||
|
||||
_reader: Optional[asyncio.Future] = field(init=False, default=None)
|
||||
_writer: Optional[asyncio.Future] = field(init=False, default=None)
|
||||
_reader: Optional[asyncio.Future] = field(init=False, default=None)
|
||||
_writer: Optional[asyncio.Future] = field(init=False, default=None)
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
self.data = self.default_data
|
||||
self._need_write = self.create_missing
|
||||
def __post_init__(self) -> None:
|
||||
self.data = self.default_data
|
||||
self._need_write = self.create_missing
|
||||
|
||||
if self.path.exists():
|
||||
try:
|
||||
text = self.path.read_text()
|
||||
self.data, self._need_write = self.deserialized(text)
|
||||
except Exception as err: # noqa
|
||||
LoopException(str(err), err, traceback.format_exc().rstrip())
|
||||
if self.path.exists():
|
||||
try:
|
||||
text = self.path.read_text()
|
||||
self.data, self._need_write = self.deserialized(text)
|
||||
except Exception as err: # noqa
|
||||
LoopException(str(err), err, traceback.format_exc().rstrip())
|
||||
|
||||
self._reader = asyncio.ensure_future(self._start_reader())
|
||||
self._writer = asyncio.ensure_future(self._start_writer())
|
||||
self._reader = asyncio.ensure_future(self._start_reader())
|
||||
self._writer = asyncio.ensure_future(self._start_writer())
|
||||
|
||||
@property
|
||||
def path(self) -> Path:
|
||||
"""Full path of the file to read, can exist or not exist."""
|
||||
raise NotImplementedError()
|
||||
@property
|
||||
def path(self) -> Path:
|
||||
"""Full path of the file to read, can exist or not exist."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def write_path(self) -> Path:
|
||||
"""Full path of the file to write, can exist or not exist."""
|
||||
return self.path
|
||||
@property
|
||||
def write_path(self) -> Path:
|
||||
"""Full path of the file to write, can exist or not exist."""
|
||||
return self.path
|
||||
|
||||
@property
|
||||
def default_data(self) -> Any:
|
||||
"""Default deserialized content to use if the file doesn't exist."""
|
||||
raise NotImplementedError()
|
||||
@property
|
||||
def default_data(self) -> Any:
|
||||
"""Default deserialized content to use if the file doesn't exist."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def qml_data(self) -> Any:
|
||||
"""Data converted for usage in QML."""
|
||||
return self.data
|
||||
@property
|
||||
def qml_data(self) -> Any:
|
||||
"""Data converted for usage in QML."""
|
||||
return self.data
|
||||
|
||||
def deserialized(self, data: str) -> Tuple[Any, bool]:
|
||||
"""Return parsed data from file text and whether to call `save()`."""
|
||||
return (data, False)
|
||||
def deserialized(self, data: str) -> Tuple[Any, bool]:
|
||||
"""Return parsed data from file text and whether to call `save()`."""
|
||||
return (data, False)
|
||||
|
||||
def serialized(self) -> str:
|
||||
"""Return text from `UserFile.data` that can be written to disk."""
|
||||
raise NotImplementedError()
|
||||
def serialized(self) -> str:
|
||||
"""Return text from `UserFile.data` that can be written to disk."""
|
||||
raise NotImplementedError()
|
||||
|
||||
def save(self) -> None:
|
||||
"""Inform the disk writer coroutine that the data has changed."""
|
||||
self._need_write = True
|
||||
def save(self) -> None:
|
||||
"""Inform the disk writer coroutine that the data has changed."""
|
||||
self._need_write = True
|
||||
|
||||
def stop_watching(self) -> None:
|
||||
"""Stop watching the on-disk file for changes."""
|
||||
if self._reader:
|
||||
self._reader.cancel()
|
||||
def stop_watching(self) -> None:
|
||||
"""Stop watching the on-disk file for changes."""
|
||||
if self._reader:
|
||||
self._reader.cancel()
|
||||
|
||||
if self._writer:
|
||||
self._writer.cancel()
|
||||
if self._writer:
|
||||
self._writer.cancel()
|
||||
|
||||
for child in self.children.values():
|
||||
child.stop_watching()
|
||||
for child in self.children.values():
|
||||
child.stop_watching()
|
||||
|
||||
|
||||
async def set_data(self, data: Any) -> None:
|
||||
"""Set `data` and call `save()`, conveniance method for QML."""
|
||||
self.data = data
|
||||
self.save()
|
||||
async def set_data(self, data: Any) -> None:
|
||||
"""Set `data` and call `save()`, conveniance method for QML."""
|
||||
self.data = data
|
||||
self.save()
|
||||
|
||||
async def update_from_file(self) -> None:
|
||||
"""Read file at `path`, update `data` and call `save()` if needed."""
|
||||
async def update_from_file(self) -> None:
|
||||
"""Read file at `path`, update `data` and call `save()` if needed."""
|
||||
|
||||
if not self.path.exists():
|
||||
self.data = self.default_data
|
||||
self._need_write = self.create_missing
|
||||
return
|
||||
if not self.path.exists():
|
||||
self.data = self.default_data
|
||||
self._need_write = self.create_missing
|
||||
return
|
||||
|
||||
async with aiopen(self.path) as file:
|
||||
self.data, self._need_write = self.deserialized(await file.read())
|
||||
async with aiopen(self.path) as file:
|
||||
self.data, self._need_write = self.deserialized(await file.read())
|
||||
|
||||
async def _start_reader(self) -> None:
|
||||
"""Disk reader coroutine, watches for file changes to update `data`."""
|
||||
async def _start_reader(self) -> None:
|
||||
"""Disk reader coroutine, watches for file changes to update `data`."""
|
||||
|
||||
while not self.path.exists():
|
||||
await asyncio.sleep(1)
|
||||
while not self.path.exists():
|
||||
await asyncio.sleep(1)
|
||||
|
||||
async for changes in awatch(self.path):
|
||||
try:
|
||||
ignored = 0
|
||||
async for changes in awatch(self.path):
|
||||
try:
|
||||
ignored = 0
|
||||
|
||||
for change in changes:
|
||||
if change[0] in (Change.added, Change.modified):
|
||||
mtime = self.path.stat().st_mtime
|
||||
for change in changes:
|
||||
if change[0] in (Change.added, Change.modified):
|
||||
mtime = self.path.stat().st_mtime
|
||||
|
||||
if mtime == self._mtime:
|
||||
ignored += 1
|
||||
continue
|
||||
if mtime == self._mtime:
|
||||
ignored += 1
|
||||
continue
|
||||
|
||||
await self.update_from_file()
|
||||
self._mtime = mtime
|
||||
await self.update_from_file()
|
||||
self._mtime = mtime
|
||||
|
||||
elif change[0] == Change.deleted:
|
||||
self._mtime = None
|
||||
self.data = self.default_data
|
||||
self._need_write = self.create_missing
|
||||
elif change[0] == Change.deleted:
|
||||
self._mtime = None
|
||||
self.data = self.default_data
|
||||
self._need_write = self.create_missing
|
||||
|
||||
if changes and ignored < len(changes):
|
||||
UserFileChanged(type(self), self.qml_data)
|
||||
if changes and ignored < len(changes):
|
||||
UserFileChanged(type(self), self.qml_data)
|
||||
|
||||
parent = self.parent
|
||||
while parent:
|
||||
await parent.update_from_file()
|
||||
UserFileChanged(type(parent), parent.qml_data)
|
||||
parent = parent.parent
|
||||
parent = self.parent
|
||||
while parent:
|
||||
await parent.update_from_file()
|
||||
UserFileChanged(type(parent), parent.qml_data)
|
||||
parent = parent.parent
|
||||
|
||||
while not self.path.exists():
|
||||
# Prevent error spam after file gets deleted
|
||||
await asyncio.sleep(0.5)
|
||||
while not self.path.exists():
|
||||
# Prevent error spam after file gets deleted
|
||||
await asyncio.sleep(0.5)
|
||||
|
||||
except Exception as err: # noqa
|
||||
LoopException(str(err), err, traceback.format_exc().rstrip())
|
||||
except Exception as err: # noqa
|
||||
LoopException(str(err), err, traceback.format_exc().rstrip())
|
||||
|
||||
async def _start_writer(self) -> None:
|
||||
"""Disk writer coroutine, update the file with a 1 second cooldown."""
|
||||
async def _start_writer(self) -> None:
|
||||
"""Disk writer coroutine, update the file with a 1 second cooldown."""
|
||||
|
||||
if self.write_path.parts[0] == "qrc:":
|
||||
return
|
||||
if self.write_path.parts[0] == "qrc:":
|
||||
return
|
||||
|
||||
self.write_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
self.write_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
while True:
|
||||
await asyncio.sleep(1)
|
||||
while True:
|
||||
await asyncio.sleep(1)
|
||||
|
||||
try:
|
||||
if self._need_write:
|
||||
async with atomic_write(self.write_path) as (new, done):
|
||||
await new.write(self.serialized())
|
||||
done()
|
||||
try:
|
||||
if self._need_write:
|
||||
async with atomic_write(self.write_path) as (new, done):
|
||||
await new.write(self.serialized())
|
||||
done()
|
||||
|
||||
self._need_write = False
|
||||
self._mtime = self.write_path.stat().st_mtime
|
||||
self._need_write = False
|
||||
self._mtime = self.write_path.stat().st_mtime
|
||||
|
||||
except Exception as err: # noqa
|
||||
self._need_write = False
|
||||
LoopException(str(err), err, traceback.format_exc().rstrip())
|
||||
except Exception as err: # noqa
|
||||
self._need_write = False
|
||||
LoopException(str(err), err, traceback.format_exc().rstrip())
|
||||
|
||||
|
||||
@dataclass
|
||||
class ConfigFile(UserFile):
|
||||
"""A file that goes in the configuration directory, e.g. ~/.config/app."""
|
||||
"""A file that goes in the configuration directory, e.g. ~/.config/app."""
|
||||
|
||||
@property
|
||||
def path(self) -> Path:
|
||||
return Path(
|
||||
os.environ.get("MOMENT_CONFIG_DIR") or
|
||||
self.backend.appdirs.user_config_dir,
|
||||
) / self.filename
|
||||
@property
|
||||
def path(self) -> Path:
|
||||
return Path(
|
||||
os.environ.get("MOMENT_CONFIG_DIR") or
|
||||
self.backend.appdirs.user_config_dir,
|
||||
) / self.filename
|
||||
|
||||
|
||||
@dataclass
|
||||
class UserDataFile(UserFile):
|
||||
"""A file that goes in the user data directory, e.g. ~/.local/share/app."""
|
||||
"""A file that goes in the user data directory, e.g. ~/.local/share/app."""
|
||||
|
||||
@property
|
||||
def path(self) -> Path:
|
||||
return Path(
|
||||
os.environ.get("MOMENT_DATA_DIR") or
|
||||
self.backend.appdirs.user_data_dir,
|
||||
) / self.filename
|
||||
@property
|
||||
def path(self) -> Path:
|
||||
return Path(
|
||||
os.environ.get("MOMENT_DATA_DIR") or
|
||||
self.backend.appdirs.user_data_dir,
|
||||
) / self.filename
|
||||
|
||||
|
||||
@dataclass
|
||||
class MappingFile(MutableMapping, UserFile):
|
||||
"""A file manipulable like a dict. `data` must be a mutable mapping."""
|
||||
"""A file manipulable like a dict. `data` must be a mutable mapping."""
|
||||
|
||||
def __getitem__(self, key: Any) -> Any:
|
||||
return self.data[key]
|
||||
def __getitem__(self, key: Any) -> Any:
|
||||
return self.data[key]
|
||||
|
||||
def __setitem__(self, key: Any, value: Any) -> None:
|
||||
self.data[key] = value
|
||||
def __setitem__(self, key: Any, value: Any) -> None:
|
||||
self.data[key] = value
|
||||
|
||||
def __delitem__(self, key: Any) -> None:
|
||||
del self.data[key]
|
||||
def __delitem__(self, key: Any) -> None:
|
||||
del self.data[key]
|
||||
|
||||
def __iter__(self) -> Iterator:
|
||||
return iter(self.data)
|
||||
def __iter__(self) -> Iterator:
|
||||
return iter(self.data)
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self.data)
|
||||
def __len__(self) -> int:
|
||||
return len(self.data)
|
||||
|
||||
def __getattr__(self, key: Any) -> Any:
|
||||
try:
|
||||
return self.data[key]
|
||||
except KeyError:
|
||||
return super().__getattribute__(key)
|
||||
def __getattr__(self, key: Any) -> Any:
|
||||
try:
|
||||
return self.data[key]
|
||||
except KeyError:
|
||||
return super().__getattribute__(key)
|
||||
|
||||
def __setattr__(self, key: Any, value: Any) -> None:
|
||||
if key in self.__dataclass_fields__:
|
||||
super().__setattr__(key, value)
|
||||
return
|
||||
def __setattr__(self, key: Any, value: Any) -> None:
|
||||
if key in self.__dataclass_fields__:
|
||||
super().__setattr__(key, value)
|
||||
return
|
||||
|
||||
self.data[key] = value
|
||||
self.data[key] = value
|
||||
|
||||
def __delattr__(self, key: Any) -> None:
|
||||
del self.data[key]
|
||||
def __delattr__(self, key: Any) -> None:
|
||||
del self.data[key]
|
||||
|
||||
|
||||
@dataclass
|
||||
class JSONFile(MappingFile):
|
||||
"""A file stored on disk in the JSON format."""
|
||||
"""A file stored on disk in the JSON format."""
|
||||
|
||||
@property
|
||||
def default_data(self) -> dict:
|
||||
return {}
|
||||
@property
|
||||
def default_data(self) -> dict:
|
||||
return {}
|
||||
|
||||
def deserialized(self, data: str) -> Tuple[dict, bool]:
|
||||
"""Return parsed data from file text and whether to call `save()`.
|
||||
def deserialized(self, data: str) -> Tuple[dict, bool]:
|
||||
"""Return parsed data from file text and whether to call `save()`.
|
||||
|
||||
If the file has missing keys, the missing data will be merged to the
|
||||
returned dict and the second tuple item will be `True`.
|
||||
"""
|
||||
If the file has missing keys, the missing data will be merged to the
|
||||
returned dict and the second tuple item will be `True`.
|
||||
"""
|
||||
|
||||
loaded = json.loads(data)
|
||||
all_data = self.default_data.copy()
|
||||
dict_update_recursive(all_data, loaded)
|
||||
return (all_data, loaded != all_data)
|
||||
loaded = json.loads(data)
|
||||
all_data = self.default_data.copy()
|
||||
dict_update_recursive(all_data, loaded)
|
||||
return (all_data, loaded != all_data)
|
||||
|
||||
def serialized(self) -> str:
|
||||
data = self.data
|
||||
return json.dumps(data, indent=4, ensure_ascii=False, sort_keys=True)
|
||||
def serialized(self) -> str:
|
||||
data = self.data
|
||||
return json.dumps(data, indent=4, ensure_ascii=False, sort_keys=True)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PCNFile(MappingFile):
|
||||
"""File stored in the PCN format, with machine edits in a separate JSON."""
|
||||
"""File stored in the PCN format, with machine edits in a separate JSON."""
|
||||
|
||||
create_missing = False
|
||||
create_missing = False
|
||||
|
||||
path_override: Optional[Path] = None
|
||||
path_override: Optional[Path] = None
|
||||
|
||||
@property
|
||||
def path(self) -> Path:
|
||||
return self.path_override or super().path
|
||||
@property
|
||||
def path(self) -> Path:
|
||||
return self.path_override or super().path
|
||||
|
||||
@property
|
||||
def write_path(self) -> Path:
|
||||
"""Full path of file where programatically-done edits are stored."""
|
||||
return self.path.with_suffix(".gui.json")
|
||||
@property
|
||||
def write_path(self) -> Path:
|
||||
"""Full path of file where programatically-done edits are stored."""
|
||||
return self.path.with_suffix(".gui.json")
|
||||
|
||||
@property
|
||||
def qml_data(self) -> Dict[str, Any]:
|
||||
return deep_serialize_for_qml(self.data.as_dict()) # type: ignore
|
||||
@property
|
||||
def qml_data(self) -> Dict[str, Any]:
|
||||
return deep_serialize_for_qml(self.data.as_dict()) # type: ignore
|
||||
|
||||
@property
|
||||
def default_data(self) -> Section:
|
||||
return Section()
|
||||
@property
|
||||
def default_data(self) -> Section:
|
||||
return Section()
|
||||
|
||||
def deserialized(self, data: str) -> Tuple[Section, bool]:
|
||||
root = Section.from_source_code(data, self.path)
|
||||
edits = "{}"
|
||||
def deserialized(self, data: str) -> Tuple[Section, bool]:
|
||||
root = Section.from_source_code(data, self.path)
|
||||
edits = "{}"
|
||||
|
||||
if self.write_path.exists():
|
||||
edits = self.write_path.read_text()
|
||||
if self.write_path.exists():
|
||||
edits = self.write_path.read_text()
|
||||
|
||||
includes_now = list(root.all_includes)
|
||||
includes_now = list(root.all_includes)
|
||||
|
||||
for path, pcn in self.children.copy().items():
|
||||
if path not in includes_now:
|
||||
pcn.stop_watching()
|
||||
del self.children[path]
|
||||
for path, pcn in self.children.copy().items():
|
||||
if path not in includes_now:
|
||||
pcn.stop_watching()
|
||||
del self.children[path]
|
||||
|
||||
for path in includes_now:
|
||||
if path not in self.children:
|
||||
self.children[path] = PCNFile(
|
||||
self.backend,
|
||||
filename = path.name,
|
||||
parent = self,
|
||||
path_override = path,
|
||||
)
|
||||
for path in includes_now:
|
||||
if path not in self.children:
|
||||
self.children[path] = PCNFile(
|
||||
self.backend,
|
||||
filename = path.name,
|
||||
parent = self,
|
||||
path_override = path,
|
||||
)
|
||||
|
||||
return (root, root.deep_merge_edits(json.loads(edits)))
|
||||
return (root, root.deep_merge_edits(json.loads(edits)))
|
||||
|
||||
def serialized(self) -> str:
|
||||
edits = self.data.edits_as_dict()
|
||||
return json.dumps(edits, indent=4, ensure_ascii=False)
|
||||
def serialized(self) -> str:
|
||||
edits = self.data.edits_as_dict()
|
||||
return json.dumps(edits, indent=4, ensure_ascii=False)
|
||||
|
||||
async def set_data(self, data: Dict[str, Any]) -> None:
|
||||
self.data.deep_merge_edits({"set": data}, has_expressions=False)
|
||||
self.save()
|
||||
async def set_data(self, data: Dict[str, Any]) -> None:
|
||||
self.data.deep_merge_edits({"set": data}, has_expressions=False)
|
||||
self.save()
|
||||
|
||||
|
||||
@dataclass
|
||||
class Accounts(ConfigFile, JSONFile):
|
||||
"""Config file for saved matrix accounts: user ID, access tokens, etc"""
|
||||
"""Config file for saved matrix accounts: user ID, access tokens, etc"""
|
||||
|
||||
filename: str = "accounts.json"
|
||||
filename: str = "accounts.json"
|
||||
|
||||
async def any_saved(self) -> bool:
|
||||
"""Return for QML whether there are any accounts saved on disk."""
|
||||
return bool(self.data)
|
||||
async def any_saved(self) -> bool:
|
||||
"""Return for QML whether there are any accounts saved on disk."""
|
||||
return bool(self.data)
|
||||
|
||||
async def add(self, user_id: str) -> None:
|
||||
"""Add an account to the config and write it on disk.
|
||||
async def add(self, user_id: str) -> None:
|
||||
"""Add an account to the config and write it on disk.
|
||||
|
||||
The account's details such as its access token are retrieved from
|
||||
the corresponding `MatrixClient` in `backend.clients`.
|
||||
"""
|
||||
The account's details such as its access token are retrieved from
|
||||
the corresponding `MatrixClient` in `backend.clients`.
|
||||
"""
|
||||
|
||||
client = self.backend.clients[user_id]
|
||||
account = self.backend.models["accounts"][user_id]
|
||||
client = self.backend.clients[user_id]
|
||||
account = self.backend.models["accounts"][user_id]
|
||||
|
||||
self.update({
|
||||
client.user_id: {
|
||||
"homeserver": client.homeserver,
|
||||
"token": client.access_token,
|
||||
"device_id": client.device_id,
|
||||
"enabled": True,
|
||||
"presence": account.presence.value.replace("echo_", ""),
|
||||
"status_msg": account.status_msg,
|
||||
"order": account.order,
|
||||
},
|
||||
})
|
||||
self.save()
|
||||
self.update({
|
||||
client.user_id: {
|
||||
"homeserver": client.homeserver,
|
||||
"token": client.access_token,
|
||||
"device_id": client.device_id,
|
||||
"enabled": True,
|
||||
"presence": account.presence.value.replace("echo_", ""),
|
||||
"status_msg": account.status_msg,
|
||||
"order": account.order,
|
||||
},
|
||||
})
|
||||
self.save()
|
||||
|
||||
async def set(
|
||||
self,
|
||||
user_id: str,
|
||||
enabled: Optional[str] = None,
|
||||
presence: Optional[str] = None,
|
||||
order: Optional[int] = None,
|
||||
status_msg: Optional[str] = None,
|
||||
) -> None:
|
||||
"""Update an account if found in the config file and write to disk."""
|
||||
async def set(
|
||||
self,
|
||||
user_id: str,
|
||||
enabled: Optional[str] = None,
|
||||
presence: Optional[str] = None,
|
||||
order: Optional[int] = None,
|
||||
status_msg: Optional[str] = None,
|
||||
) -> None:
|
||||
"""Update an account if found in the config file and write to disk."""
|
||||
|
||||
if user_id not in self:
|
||||
return
|
||||
if user_id not in self:
|
||||
return
|
||||
|
||||
if enabled is not None:
|
||||
self[user_id]["enabled"] = enabled
|
||||
if enabled is not None:
|
||||
self[user_id]["enabled"] = enabled
|
||||
|
||||
if presence is not None:
|
||||
self[user_id]["presence"] = presence
|
||||
if presence is not None:
|
||||
self[user_id]["presence"] = presence
|
||||
|
||||
if order is not None:
|
||||
self[user_id]["order"] = order
|
||||
if order is not None:
|
||||
self[user_id]["order"] = order
|
||||
|
||||
if status_msg is not None:
|
||||
self[user_id]["status_msg"] = status_msg
|
||||
if status_msg is not None:
|
||||
self[user_id]["status_msg"] = status_msg
|
||||
|
||||
self.save()
|
||||
self.save()
|
||||
|
||||
async def forget(self, user_id: str) -> None:
|
||||
"""Delete an account from the config and write it on disk."""
|
||||
async def forget(self, user_id: str) -> None:
|
||||
"""Delete an account from the config and write it on disk."""
|
||||
|
||||
self.pop(user_id, None)
|
||||
self.save()
|
||||
self.pop(user_id, None)
|
||||
self.save()
|
||||
|
||||
|
||||
@dataclass
|
||||
class Pre070Settings(ConfigFile):
|
||||
"""Detect and warn about the presence of a pre-0.7.0 settings.json file."""
|
||||
"""Detect and warn about the presence of a pre-0.7.0 settings.json file."""
|
||||
|
||||
filename: str = "settings.json"
|
||||
filename: str = "settings.json"
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
if self.path.exists():
|
||||
Pre070SettingsDetected(self.path)
|
||||
def __post_init__(self) -> None:
|
||||
if self.path.exists():
|
||||
Pre070SettingsDetected(self.path)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Settings(ConfigFile, PCNFile):
|
||||
"""General config file for UI and backend settings"""
|
||||
"""General config file for UI and backend settings"""
|
||||
|
||||
filename: str = "settings.py"
|
||||
filename: str = "settings.py"
|
||||
|
||||
@property
|
||||
def default_data(self) -> Section:
|
||||
root = Section.from_file("src/config/settings.py")
|
||||
edits = "{}"
|
||||
@property
|
||||
def default_data(self) -> Section:
|
||||
root = Section.from_file("src/config/settings.py")
|
||||
edits = "{}"
|
||||
|
||||
if self.write_path.exists():
|
||||
edits = self.write_path.read_text()
|
||||
if self.write_path.exists():
|
||||
edits = self.write_path.read_text()
|
||||
|
||||
root.deep_merge_edits(json.loads(edits))
|
||||
return root
|
||||
root.deep_merge_edits(json.loads(edits))
|
||||
return root
|
||||
|
||||
def deserialized(self, data: str) -> Tuple[Section, bool]:
|
||||
section, save = super().deserialized(data)
|
||||
def deserialized(self, data: str) -> Tuple[Section, bool]:
|
||||
section, save = super().deserialized(data)
|
||||
|
||||
if self and self.General.theme != section.General.theme:
|
||||
if hasattr(self.backend, "theme"):
|
||||
self.backend.theme.stop_watching()
|
||||
if self and self.General.theme != section.General.theme:
|
||||
if hasattr(self.backend, "theme"):
|
||||
self.backend.theme.stop_watching()
|
||||
|
||||
self.backend.theme = Theme(
|
||||
self.backend, section.General.theme, # type: ignore
|
||||
)
|
||||
UserFileChanged(Theme, self.backend.theme.qml_data)
|
||||
self.backend.theme = Theme(
|
||||
self.backend, section.General.theme, # type: ignore
|
||||
)
|
||||
UserFileChanged(Theme, self.backend.theme.qml_data)
|
||||
|
||||
# if self and self.General.new_theme != section.General.new_theme:
|
||||
# self.backend.new_theme.stop_watching()
|
||||
# self.backend.new_theme = NewTheme(
|
||||
# self.backend, section.General.new_theme, # type: ignore
|
||||
# )
|
||||
# UserFileChanged(Theme, self.backend.new_theme.qml_data)
|
||||
# if self and self.General.new_theme != section.General.new_theme:
|
||||
# self.backend.new_theme.stop_watching()
|
||||
# self.backend.new_theme = NewTheme(
|
||||
# self.backend, section.General.new_theme, # type: ignore
|
||||
# )
|
||||
# UserFileChanged(Theme, self.backend.new_theme.qml_data)
|
||||
|
||||
return (section, save)
|
||||
return (section, save)
|
||||
|
||||
|
||||
@dataclass
|
||||
class NewTheme(UserDataFile, PCNFile):
|
||||
"""A theme file defining the look of QML components."""
|
||||
"""A theme file defining the look of QML components."""
|
||||
|
||||
create_missing = False
|
||||
create_missing = False
|
||||
|
||||
@property
|
||||
def path(self) -> Path:
|
||||
data_dir = Path(
|
||||
os.environ.get("MOMENT_DATA_DIR") or
|
||||
self.backend.appdirs.user_data_dir,
|
||||
)
|
||||
return data_dir / "themes" / self.filename
|
||||
@property
|
||||
def path(self) -> Path:
|
||||
data_dir = Path(
|
||||
os.environ.get("MOMENT_DATA_DIR") or
|
||||
self.backend.appdirs.user_data_dir,
|
||||
)
|
||||
return data_dir / "themes" / self.filename
|
||||
|
||||
@property
|
||||
def qml_data(self) -> Dict[str, Any]:
|
||||
return flatten_dict_keys(super().qml_data, last_level=False)
|
||||
@property
|
||||
def qml_data(self) -> Dict[str, Any]:
|
||||
return flatten_dict_keys(super().qml_data, last_level=False)
|
||||
|
||||
|
||||
@dataclass
|
||||
class UIState(UserDataFile, JSONFile):
|
||||
"""File used to save and restore the state of QML components."""
|
||||
"""File used to save and restore the state of QML components."""
|
||||
|
||||
filename: str = "state.json"
|
||||
filename: str = "state.json"
|
||||
|
||||
@property
|
||||
def default_data(self) -> dict:
|
||||
return {
|
||||
"collapseAccounts": {},
|
||||
"page": "Pages/Default.qml",
|
||||
"pageProperties": {},
|
||||
}
|
||||
@property
|
||||
def default_data(self) -> dict:
|
||||
return {
|
||||
"collapseAccounts": {},
|
||||
"page": "Pages/Default.qml",
|
||||
"pageProperties": {},
|
||||
}
|
||||
|
||||
def deserialized(self, data: str) -> Tuple[dict, bool]:
|
||||
dict_data, save = super().deserialized(data)
|
||||
def deserialized(self, data: str) -> Tuple[dict, bool]:
|
||||
dict_data, save = super().deserialized(data)
|
||||
|
||||
for user_id, do in dict_data["collapseAccounts"].items():
|
||||
self.backend.models["all_rooms"].set_account_collapse(user_id, do)
|
||||
for user_id, do in dict_data["collapseAccounts"].items():
|
||||
self.backend.models["all_rooms"].set_account_collapse(user_id, do)
|
||||
|
||||
return (dict_data, save)
|
||||
return (dict_data, save)
|
||||
|
||||
|
||||
@dataclass
|
||||
class History(UserDataFile, JSONFile):
|
||||
"""File to save and restore lines typed by the user in QML components."""
|
||||
"""File to save and restore lines typed by the user in QML components."""
|
||||
|
||||
filename: str = "history.json"
|
||||
filename: str = "history.json"
|
||||
|
||||
@property
|
||||
def default_data(self) -> dict:
|
||||
return {"console": []}
|
||||
@property
|
||||
def default_data(self) -> dict:
|
||||
return {"console": []}
|
||||
|
||||
|
||||
@dataclass
|
||||
class Theme(UserDataFile):
|
||||
"""A theme file defining the look of QML components."""
|
||||
"""A theme file defining the look of QML components."""
|
||||
|
||||
# Since it currently breaks at every update and the file format will be
|
||||
# changed later, don't copy the theme to user data dir if it doesn't exist.
|
||||
create_missing = False
|
||||
# Since it currently breaks at every update and the file format will be
|
||||
# changed later, don't copy the theme to user data dir if it doesn't exist.
|
||||
create_missing = False
|
||||
|
||||
@property
|
||||
def path(self) -> Path:
|
||||
data_dir = Path(
|
||||
os.environ.get("MOMENT_DATA_DIR") or
|
||||
self.backend.appdirs.user_data_dir,
|
||||
)
|
||||
return data_dir / "themes" / self.filename
|
||||
@property
|
||||
def path(self) -> Path:
|
||||
data_dir = Path(
|
||||
os.environ.get("MOMENT_DATA_DIR") or
|
||||
self.backend.appdirs.user_data_dir,
|
||||
)
|
||||
return data_dir / "themes" / self.filename
|
||||
|
||||
@property
|
||||
def default_data(self) -> str:
|
||||
if self.filename in ("Foliage.qpl", "Midnight.qpl", "Glass.qpl"):
|
||||
path = f"src/themes/{self.filename}"
|
||||
else:
|
||||
path = "src/themes/Foliage.qpl"
|
||||
@property
|
||||
def default_data(self) -> str:
|
||||
if self.filename in ("Foliage.qpl", "Midnight.qpl", "Glass.qpl"):
|
||||
path = f"src/themes/{self.filename}"
|
||||
else:
|
||||
path = "src/themes/Foliage.qpl"
|
||||
|
||||
try:
|
||||
byte_content = pyotherside.qrc_get_file_contents(path)
|
||||
except ValueError:
|
||||
# App was compiled without QRC
|
||||
return convert_to_qml(Path(path).read_text())
|
||||
else:
|
||||
return convert_to_qml(byte_content.decode())
|
||||
try:
|
||||
byte_content = pyotherside.qrc_get_file_contents(path)
|
||||
except ValueError:
|
||||
# App was compiled without QRC
|
||||
return convert_to_qml(Path(path).read_text())
|
||||
else:
|
||||
return convert_to_qml(byte_content.decode())
|
||||
|
||||
def deserialized(self, data: str) -> Tuple[str, bool]:
|
||||
return (convert_to_qml(data), False)
|
||||
def deserialized(self, data: str) -> Tuple[str, bool]:
|
||||
return (convert_to_qml(data), False)
|
||||
|
|
|
@ -20,8 +20,8 @@ from pathlib import Path
|
|||
from tempfile import NamedTemporaryFile
|
||||
from types import ModuleType
|
||||
from typing import (
|
||||
Any, AsyncIterator, Callable, Collection, Dict, Iterable, Mapping,
|
||||
Optional, Tuple, Type, Union,
|
||||
Any, AsyncIterator, Callable, Collection, Dict, Iterable, Mapping,
|
||||
Optional, Tuple, Type, Union,
|
||||
)
|
||||
from uuid import UUID
|
||||
|
||||
|
@ -36,348 +36,348 @@ from .color import Color
|
|||
from .pcn.section import Section
|
||||
|
||||
if sys.version_info >= (3, 7):
|
||||
from contextlib import asynccontextmanager
|
||||
current_task = asyncio.current_task
|
||||
from contextlib import asynccontextmanager
|
||||
current_task = asyncio.current_task
|
||||
else:
|
||||
from async_generator import asynccontextmanager
|
||||
current_task = asyncio.Task.current_task
|
||||
from async_generator import asynccontextmanager
|
||||
current_task = asyncio.Task.current_task
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
import collections.abc as collections
|
||||
import collections.abc as collections
|
||||
else:
|
||||
import collections
|
||||
import collections
|
||||
|
||||
Size = Tuple[int, int]
|
||||
Size = Tuple[int, int]
|
||||
BytesOrPIL = Union[bytes, PILImage.Image]
|
||||
auto = autostr
|
||||
auto = autostr
|
||||
|
||||
COMPRESSION_POOL = ProcessPoolExecutor()
|
||||
|
||||
|
||||
class AutoStrEnum(Enum):
|
||||
"""An Enum where auto() assigns the member's name instead of an integer.
|
||||
"""An Enum where auto() assigns the member's name instead of an integer.
|
||||
|
||||
Example:
|
||||
>>> class Fruits(AutoStrEnum): apple = auto()
|
||||
>>> Fruits.apple.value
|
||||
"apple"
|
||||
"""
|
||||
Example:
|
||||
>>> class Fruits(AutoStrEnum): apple = auto()
|
||||
>>> Fruits.apple.value
|
||||
"apple"
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def _generate_next_value_(name, *_):
|
||||
return name
|
||||
@staticmethod
|
||||
def _generate_next_value_(name, *_):
|
||||
return name
|
||||
|
||||
|
||||
def dict_update_recursive(dict1: dict, dict2: dict) -> None:
|
||||
"""Deep-merge `dict1` and `dict2`, recursive version of `dict.update()`."""
|
||||
# https://gist.github.com/angstwad/bf22d1822c38a92ec0a9
|
||||
"""Deep-merge `dict1` and `dict2`, recursive version of `dict.update()`."""
|
||||
# https://gist.github.com/angstwad/bf22d1822c38a92ec0a9
|
||||
|
||||
for k in dict2:
|
||||
if (k in dict1 and isinstance(dict1[k], dict) and
|
||||
isinstance(dict2[k], collections.Mapping)):
|
||||
dict_update_recursive(dict1[k], dict2[k])
|
||||
else:
|
||||
dict1[k] = dict2[k]
|
||||
for k in dict2:
|
||||
if (k in dict1 and isinstance(dict1[k], dict) and
|
||||
isinstance(dict2[k], collections.Mapping)):
|
||||
dict_update_recursive(dict1[k], dict2[k])
|
||||
else:
|
||||
dict1[k] = dict2[k]
|
||||
|
||||
|
||||
def flatten_dict_keys(
|
||||
source: Optional[Dict[str, Any]] = None,
|
||||
separator: str = ".",
|
||||
last_level: bool = True,
|
||||
_flat: Optional[Dict[str, Any]] = None,
|
||||
_prefix: str = "",
|
||||
source: Optional[Dict[str, Any]] = None,
|
||||
separator: str = ".",
|
||||
last_level: bool = True,
|
||||
_flat: Optional[Dict[str, Any]] = None,
|
||||
_prefix: str = "",
|
||||
) -> Dict[str, Any]:
|
||||
"""Return a flattened version of the ``source`` dict.
|
||||
"""Return a flattened version of the ``source`` dict.
|
||||
|
||||
Example:
|
||||
>>> dct
|
||||
{"content": {"body": "foo"}, "m.test": {"key": {"bar": 1}}}
|
||||
>>> flatten_dict_keys(dct)
|
||||
{"content.body": "foo", "m.test.key.bar": 1}
|
||||
>>> flatten_dict_keys(dct, last_level=False)
|
||||
{"content": {"body": "foo"}, "m.test.key": {bar": 1}}
|
||||
"""
|
||||
Example:
|
||||
>>> dct
|
||||
{"content": {"body": "foo"}, "m.test": {"key": {"bar": 1}}}
|
||||
>>> flatten_dict_keys(dct)
|
||||
{"content.body": "foo", "m.test.key.bar": 1}
|
||||
>>> flatten_dict_keys(dct, last_level=False)
|
||||
{"content": {"body": "foo"}, "m.test.key": {bar": 1}}
|
||||
"""
|
||||
|
||||
flat = {} if _flat is None else _flat
|
||||
flat = {} if _flat is None else _flat
|
||||
|
||||
for key, value in (source or {}).items():
|
||||
if isinstance(value, dict):
|
||||
prefix = f"{_prefix}{key}{separator}"
|
||||
flatten_dict_keys(value, separator, last_level, flat, prefix)
|
||||
elif last_level:
|
||||
flat[f"{_prefix}{key}"] = value
|
||||
else:
|
||||
prefix = _prefix[:-len(separator)] # remove trailing separator
|
||||
flat.setdefault(prefix, {})[key] = value
|
||||
for key, value in (source or {}).items():
|
||||
if isinstance(value, dict):
|
||||
prefix = f"{_prefix}{key}{separator}"
|
||||
flatten_dict_keys(value, separator, last_level, flat, prefix)
|
||||
elif last_level:
|
||||
flat[f"{_prefix}{key}"] = value
|
||||
else:
|
||||
prefix = _prefix[:-len(separator)] # remove trailing separator
|
||||
flat.setdefault(prefix, {})[key] = value
|
||||
|
||||
return flat
|
||||
return flat
|
||||
|
||||
|
||||
def config_get_account_room_rule(
|
||||
rules: Section, user_id: str, room_id: str,
|
||||
rules: Section, user_id: str, room_id: str,
|
||||
) -> Any:
|
||||
"""Return best matching rule value for an account/room PCN free Section."""
|
||||
"""Return best matching rule value for an account/room PCN free Section."""
|
||||
|
||||
for name, value in reversed(rules.children()):
|
||||
name = re.sub(r"\s+", " ", name.strip())
|
||||
for name, value in reversed(rules.children()):
|
||||
name = re.sub(r"\s+", " ", name.strip())
|
||||
|
||||
if name in (user_id, room_id, f"{user_id} {room_id}"):
|
||||
return value
|
||||
if name in (user_id, room_id, f"{user_id} {room_id}"):
|
||||
return value
|
||||
|
||||
return rules.default
|
||||
return rules.default
|
||||
|
||||
|
||||
async def is_svg(file: File) -> bool:
|
||||
"""Return whether the file is a SVG (`lxml` is used for detection)."""
|
||||
"""Return whether the file is a SVG (`lxml` is used for detection)."""
|
||||
|
||||
chunks = [c async for c in async_generator_from_data(file)]
|
||||
chunks = [c async for c in async_generator_from_data(file)]
|
||||
|
||||
with io.BytesIO(b"".join(chunks)) as file:
|
||||
try:
|
||||
_, element = next(xml_etree.iterparse(file, ("start",)))
|
||||
return element.tag == "{http://www.w3.org/2000/svg}svg"
|
||||
except (StopIteration, xml_etree.ParseError):
|
||||
return False
|
||||
with io.BytesIO(b"".join(chunks)) as file:
|
||||
try:
|
||||
_, element = next(xml_etree.iterparse(file, ("start",)))
|
||||
return element.tag == "{http://www.w3.org/2000/svg}svg"
|
||||
except (StopIteration, xml_etree.ParseError):
|
||||
return False
|
||||
|
||||
|
||||
async def svg_dimensions(file: File) -> Size:
|
||||
"""Return the width and height, or viewBox width and height for a SVG.
|
||||
"""Return the width and height, or viewBox width and height for a SVG.
|
||||
|
||||
If these properties are missing (broken file), ``(256, 256)`` is returned.
|
||||
"""
|
||||
If these properties are missing (broken file), ``(256, 256)`` is returned.
|
||||
"""
|
||||
|
||||
chunks = [c async for c in async_generator_from_data(file)]
|
||||
chunks = [c async for c in async_generator_from_data(file)]
|
||||
|
||||
with io.BytesIO(b"".join(chunks)) as file:
|
||||
attrs = xml_etree.parse(file).getroot().attrib
|
||||
with io.BytesIO(b"".join(chunks)) as file:
|
||||
attrs = xml_etree.parse(file).getroot().attrib
|
||||
|
||||
try:
|
||||
width = round(float(attrs.get("width", attrs["viewBox"].split()[3])))
|
||||
except (KeyError, IndexError, ValueError, TypeError):
|
||||
width = 256
|
||||
try:
|
||||
width = round(float(attrs.get("width", attrs["viewBox"].split()[3])))
|
||||
except (KeyError, IndexError, ValueError, TypeError):
|
||||
width = 256
|
||||
|
||||
try:
|
||||
height = round(float(attrs.get("height", attrs["viewBox"].split()[4])))
|
||||
except (KeyError, IndexError, ValueError, TypeError):
|
||||
height = 256
|
||||
try:
|
||||
height = round(float(attrs.get("height", attrs["viewBox"].split()[4])))
|
||||
except (KeyError, IndexError, ValueError, TypeError):
|
||||
height = 256
|
||||
|
||||
return (width, height)
|
||||
return (width, height)
|
||||
|
||||
|
||||
async def guess_mime(file: File) -> str:
|
||||
"""Return the file's mimetype, or `application/octet-stream` if unknown."""
|
||||
"""Return the file's mimetype, or `application/octet-stream` if unknown."""
|
||||
|
||||
if isinstance(file, io.IOBase):
|
||||
file.seek(0, 0)
|
||||
elif isinstance(file, AsyncBufferedIOBase):
|
||||
await file.seek(0, 0)
|
||||
if isinstance(file, io.IOBase):
|
||||
file.seek(0, 0)
|
||||
elif isinstance(file, AsyncBufferedIOBase):
|
||||
await file.seek(0, 0)
|
||||
|
||||
try:
|
||||
first_chunk: bytes
|
||||
async for first_chunk in async_generator_from_data(file):
|
||||
break
|
||||
else:
|
||||
return "inode/x-empty" # empty file
|
||||
try:
|
||||
first_chunk: bytes
|
||||
async for first_chunk in async_generator_from_data(file):
|
||||
break
|
||||
else:
|
||||
return "inode/x-empty" # empty file
|
||||
|
||||
# TODO: plaintext
|
||||
mime = filetype.guess_mime(first_chunk)
|
||||
# TODO: plaintext
|
||||
mime = filetype.guess_mime(first_chunk)
|
||||
|
||||
return mime or (
|
||||
"image/svg+xml" if await is_svg(file) else
|
||||
"application/octet-stream"
|
||||
)
|
||||
finally:
|
||||
if isinstance(file, io.IOBase):
|
||||
file.seek(0, 0)
|
||||
elif isinstance(file, AsyncBufferedIOBase):
|
||||
await file.seek(0, 0)
|
||||
return mime or (
|
||||
"image/svg+xml" if await is_svg(file) else
|
||||
"application/octet-stream"
|
||||
)
|
||||
finally:
|
||||
if isinstance(file, io.IOBase):
|
||||
file.seek(0, 0)
|
||||
elif isinstance(file, AsyncBufferedIOBase):
|
||||
await file.seek(0, 0)
|
||||
|
||||
|
||||
def plain2html(text: str) -> str:
|
||||
"""Convert `\\n` into `<br>` tags and `\\t` into four spaces."""
|
||||
"""Convert `\\n` into `<br>` tags and `\\t` into four spaces."""
|
||||
|
||||
return html.escape(text)\
|
||||
.replace("\n", "<br>")\
|
||||
.replace("\t", " " * 4)
|
||||
return html.escape(text)\
|
||||
.replace("\n", "<br>")\
|
||||
.replace("\t", " " * 4)
|
||||
|
||||
|
||||
def strip_html_tags(text: str) -> str:
|
||||
"""Remove HTML tags from text."""
|
||||
return re.sub(r"<\/?[^>]+(>|$)", "", text)
|
||||
"""Remove HTML tags from text."""
|
||||
return re.sub(r"<\/?[^>]+(>|$)", "", text)
|
||||
|
||||
|
||||
def serialize_value_for_qml(
|
||||
value: Any, json_list_dicts: bool = False, reject_unknown: bool = False,
|
||||
value: Any, json_list_dicts: bool = False, reject_unknown: bool = False,
|
||||
) -> Any:
|
||||
"""Convert a value to make it easier to use from QML.
|
||||
"""Convert a value to make it easier to use from QML.
|
||||
|
||||
Returns:
|
||||
Returns:
|
||||
|
||||
- For `bool`, `int`, `float`, `bytes`, `str`, `datetime`, `date`, `time`:
|
||||
the unchanged value (PyOtherSide handles these)
|
||||
- For `bool`, `int`, `float`, `bytes`, `str`, `datetime`, `date`, `time`:
|
||||
the unchanged value (PyOtherSide handles these)
|
||||
|
||||
- For `Collection` objects (includes `list` and `dict`):
|
||||
a JSON dump if `json_list_dicts` is `True`, else the unchanged value
|
||||
- For `Collection` objects (includes `list` and `dict`):
|
||||
a JSON dump if `json_list_dicts` is `True`, else the unchanged value
|
||||
|
||||
- If the value is an instancied object and has a `serialized` attribute or
|
||||
property, return that
|
||||
- If the value is an instancied object and has a `serialized` attribute or
|
||||
property, return that
|
||||
|
||||
- For `Enum` members, the actual value of the member
|
||||
- For `Enum` members, the actual value of the member
|
||||
|
||||
- For `Path` objects, a `file://<path...>` string
|
||||
- For `Path` objects, a `file://<path...>` string
|
||||
|
||||
- For `UUID` object: the UUID in string form
|
||||
- For `UUID` object: the UUID in string form
|
||||
|
||||
- For `timedelta` objects: the delta as a number of milliseconds `int`
|
||||
- For `timedelta` objects: the delta as a number of milliseconds `int`
|
||||
|
||||
- For `Color` objects: the color's hexadecimal value
|
||||
- For `Color` objects: the color's hexadecimal value
|
||||
|
||||
- For class types: the class `__name__`
|
||||
- For class types: the class `__name__`
|
||||
|
||||
- For anything else: raise a `TypeError` if `reject_unknown` is `True`,
|
||||
else return the unchanged value.
|
||||
"""
|
||||
- For anything else: raise a `TypeError` if `reject_unknown` is `True`,
|
||||
else return the unchanged value.
|
||||
"""
|
||||
|
||||
if isinstance(value, (bool, int, float, bytes, str, datetime, date, time)):
|
||||
return value
|
||||
if isinstance(value, (bool, int, float, bytes, str, datetime, date, time)):
|
||||
return value
|
||||
|
||||
if json_list_dicts and isinstance(value, Collection):
|
||||
if isinstance(value, set):
|
||||
value = list(value)
|
||||
return json.dumps(value)
|
||||
if json_list_dicts and isinstance(value, Collection):
|
||||
if isinstance(value, set):
|
||||
value = list(value)
|
||||
return json.dumps(value)
|
||||
|
||||
if not inspect.isclass(value) and hasattr(value, "serialized"):
|
||||
return value.serialized
|
||||
if not inspect.isclass(value) and hasattr(value, "serialized"):
|
||||
return value.serialized
|
||||
|
||||
if isinstance(value, Iterable):
|
||||
return value
|
||||
if isinstance(value, Iterable):
|
||||
return value
|
||||
|
||||
if hasattr(value, "__class__") and issubclass(value.__class__, Enum):
|
||||
return value.value
|
||||
if hasattr(value, "__class__") and issubclass(value.__class__, Enum):
|
||||
return value.value
|
||||
|
||||
if isinstance(value, Path):
|
||||
return f"file://{value!s}"
|
||||
if isinstance(value, Path):
|
||||
return f"file://{value!s}"
|
||||
|
||||
if isinstance(value, UUID):
|
||||
return str(value)
|
||||
if isinstance(value, UUID):
|
||||
return str(value)
|
||||
|
||||
if isinstance(value, timedelta):
|
||||
return value.total_seconds() * 1000
|
||||
if isinstance(value, timedelta):
|
||||
return value.total_seconds() * 1000
|
||||
|
||||
if isinstance(value, Color):
|
||||
return value.hex
|
||||
if isinstance(value, Color):
|
||||
return value.hex
|
||||
|
||||
if inspect.isclass(value):
|
||||
return value.__name__
|
||||
if inspect.isclass(value):
|
||||
return value.__name__
|
||||
|
||||
if reject_unknown:
|
||||
raise TypeError("Unknown type reject")
|
||||
if reject_unknown:
|
||||
raise TypeError("Unknown type reject")
|
||||
|
||||
return value
|
||||
return value
|
||||
|
||||
|
||||
def deep_serialize_for_qml(obj: Iterable) -> Union[list, dict]:
|
||||
"""Recursively serialize lists and dict values for QML."""
|
||||
"""Recursively serialize lists and dict values for QML."""
|
||||
|
||||
if isinstance(obj, Mapping):
|
||||
dct = {}
|
||||
if isinstance(obj, Mapping):
|
||||
dct = {}
|
||||
|
||||
for key, value in obj.items():
|
||||
if isinstance(value, Iterable) and not isinstance(value, str):
|
||||
# PyOtherSide only accept dicts with string keys
|
||||
dct[str(key)] = deep_serialize_for_qml(value)
|
||||
continue
|
||||
for key, value in obj.items():
|
||||
if isinstance(value, Iterable) and not isinstance(value, str):
|
||||
# PyOtherSide only accept dicts with string keys
|
||||
dct[str(key)] = deep_serialize_for_qml(value)
|
||||
continue
|
||||
|
||||
with suppress(TypeError):
|
||||
dct[str(key)] = \
|
||||
serialize_value_for_qml(value, reject_unknown=True)
|
||||
with suppress(TypeError):
|
||||
dct[str(key)] = \
|
||||
serialize_value_for_qml(value, reject_unknown=True)
|
||||
|
||||
return dct
|
||||
return dct
|
||||
|
||||
lst = []
|
||||
lst = []
|
||||
|
||||
for value in obj:
|
||||
if isinstance(value, Iterable) and not isinstance(value, str):
|
||||
lst.append(deep_serialize_for_qml(value))
|
||||
continue
|
||||
for value in obj:
|
||||
if isinstance(value, Iterable) and not isinstance(value, str):
|
||||
lst.append(deep_serialize_for_qml(value))
|
||||
continue
|
||||
|
||||
with suppress(TypeError):
|
||||
lst.append(serialize_value_for_qml(value, reject_unknown=True))
|
||||
with suppress(TypeError):
|
||||
lst.append(serialize_value_for_qml(value, reject_unknown=True))
|
||||
|
||||
return lst
|
||||
return lst
|
||||
|
||||
|
||||
def classes_defined_in(module: ModuleType) -> Dict[str, Type]:
|
||||
"""Return a `{name: class}` dict of all the classes a module defines."""
|
||||
"""Return a `{name: class}` dict of all the classes a module defines."""
|
||||
|
||||
return {
|
||||
m[0]: m[1] for m in inspect.getmembers(module, inspect.isclass)
|
||||
if not m[0].startswith("_") and
|
||||
m[1].__module__.startswith(module.__name__)
|
||||
}
|
||||
return {
|
||||
m[0]: m[1] for m in inspect.getmembers(module, inspect.isclass)
|
||||
if not m[0].startswith("_") and
|
||||
m[1].__module__.startswith(module.__name__)
|
||||
}
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def aiopen(*args, **kwargs) -> AsyncIterator[Any]:
|
||||
"""Wrapper for `aiofiles.open()` that doesn't break mypy"""
|
||||
async with aiofiles.open(*args, **kwargs) as file:
|
||||
yield file
|
||||
"""Wrapper for `aiofiles.open()` that doesn't break mypy"""
|
||||
async with aiofiles.open(*args, **kwargs) as file:
|
||||
yield file
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def atomic_write(
|
||||
path: Union[Path, str], binary: bool = False, **kwargs,
|
||||
path: Union[Path, str], binary: bool = False, **kwargs,
|
||||
) -> AsyncIterator[Tuple[Any, Callable[[], None]]]:
|
||||
"""Write a file asynchronously (using aiofiles) and atomically.
|
||||
"""Write a file asynchronously (using aiofiles) and atomically.
|
||||
|
||||
Yields a `(open_temporary_file, done_function)` tuple.
|
||||
The done function should be called after writing to the given file.
|
||||
When the context manager exits, the temporary file will either replace
|
||||
`path` if the function was called, or be deleted.
|
||||
Yields a `(open_temporary_file, done_function)` tuple.
|
||||
The done function should be called after writing to the given file.
|
||||
When the context manager exits, the temporary file will either replace
|
||||
`path` if the function was called, or be deleted.
|
||||
|
||||
Example:
|
||||
>>> async with atomic_write("foo.txt") as (file, done):
|
||||
>>> await file.write("Sample text")
|
||||
>>> done()
|
||||
"""
|
||||
Example:
|
||||
>>> async with atomic_write("foo.txt") as (file, done):
|
||||
>>> await file.write("Sample text")
|
||||
>>> done()
|
||||
"""
|
||||
|
||||
mode = "wb" if binary else "w"
|
||||
path = Path(path)
|
||||
temp = NamedTemporaryFile(dir=path.parent, delete=False)
|
||||
temp_path = Path(temp.name)
|
||||
mode = "wb" if binary else "w"
|
||||
path = Path(path)
|
||||
temp = NamedTemporaryFile(dir=path.parent, delete=False)
|
||||
temp_path = Path(temp.name)
|
||||
|
||||
can_replace = False
|
||||
can_replace = False
|
||||
|
||||
def done() -> None:
|
||||
nonlocal can_replace
|
||||
can_replace = True
|
||||
def done() -> None:
|
||||
nonlocal can_replace
|
||||
can_replace = True
|
||||
|
||||
try:
|
||||
async with aiopen(temp_path, mode, **kwargs) as out:
|
||||
yield (out, done)
|
||||
finally:
|
||||
if can_replace:
|
||||
temp_path.replace(path)
|
||||
else:
|
||||
temp_path.unlink()
|
||||
try:
|
||||
async with aiopen(temp_path, mode, **kwargs) as out:
|
||||
yield (out, done)
|
||||
finally:
|
||||
if can_replace:
|
||||
temp_path.replace(path)
|
||||
else:
|
||||
temp_path.unlink()
|
||||
|
||||
|
||||
def _compress(image: BytesOrPIL, fmt: str, optimize: bool) -> bytes:
|
||||
if isinstance(image, bytes):
|
||||
pil_image = PILImage.open(io.BytesIO(image))
|
||||
else:
|
||||
pil_image = image
|
||||
if isinstance(image, bytes):
|
||||
pil_image = PILImage.open(io.BytesIO(image))
|
||||
else:
|
||||
pil_image = image
|
||||
|
||||
with io.BytesIO() as buffer:
|
||||
pil_image.save(buffer, fmt, optimize=optimize)
|
||||
return buffer.getvalue()
|
||||
with io.BytesIO() as buffer:
|
||||
pil_image.save(buffer, fmt, optimize=optimize)
|
||||
return buffer.getvalue()
|
||||
|
||||
|
||||
async def compress_image(
|
||||
image: BytesOrPIL, fmt: str = "PNG", optimize: bool = True,
|
||||
image: BytesOrPIL, fmt: str = "PNG", optimize: bool = True,
|
||||
) -> bytes:
|
||||
"""Compress image in a separate process, without blocking event loop."""
|
||||
"""Compress image in a separate process, without blocking event loop."""
|
||||
|
||||
return await asyncio.get_event_loop().run_in_executor(
|
||||
COMPRESSION_POOL, _compress, image, fmt, optimize,
|
||||
)
|
||||
return await asyncio.get_event_loop().run_in_executor(
|
||||
COMPRESSION_POOL, _compress, image, fmt, optimize,
|
||||
)
|
||||
|
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user