I don't know what I'm doing here
This commit is contained in:
parent
43c7460100
commit
e3d3311ddb
|
@ -1,8 +0,0 @@
|
|||
from .matrix import *
|
||||
import .listeners
|
||||
real_listeners=dict(filter(lambda x:isinstance(x,Listener),listeners.__dict__.items()))
|
||||
|
||||
class MatrixClient(MatrixClient):
|
||||
def __init__(self,*args,**kwargs):
|
||||
super().__init__(*args,**kwargs)
|
||||
self.listeners=real_listeners
|
|
@ -19,4 +19,5 @@ def room_f(client,event):
|
|||
rid=event.data['id']
|
||||
room=goc(lambda x:x.id==rid,client.account.rooms,models.Room(rid))
|
||||
event.data=room
|
||||
room.name=
|
||||
room=_l('room','m.room',room_f)
|
54
matrix.py
54
matrix.py
|
@ -1,54 +0,0 @@
|
|||
import aiohttp as ah
|
||||
import time
|
||||
import asyncio
|
||||
import datetime
|
||||
import traceback
|
||||
from .utils import Event,Listener
|
||||
|
||||
tracing=False
|
||||
def trace(*msg):
|
||||
if tracing: print(datetime.datetime.now(),*msg)
|
||||
|
||||
class MatrixClient():
|
||||
def __init__(self,homeserver,token):
|
||||
self.event_queue=asyncio.Queue() #Contains Events. Everything in here must be an Event.
|
||||
self.homeserver=homeserver
|
||||
self.token=token # Deal with login stuff later
|
||||
self.baseurl=f'https://{self.homeserver}/_matrix/client'
|
||||
self.session=ah.ClientSession()
|
||||
self.since=None
|
||||
self.listeners=[]
|
||||
|
||||
def __del__(self):
|
||||
asyncio.get_event_loop().create_task(self.session.close()) # lol
|
||||
while not self.session.closed: time.sleep(0.01)
|
||||
|
||||
async def request(self,endpoint='sync',method='GET', ver=0,params={} *args,**kwargs):
|
||||
async with self.session.request(method, f'{self.baseurl}/r{ver}/{endpoint}', params=params|{'access_token':self.token}|({'since':self.since} if self.since else {}), *args,**kwargs) as fetched:
|
||||
if fetched.status_code!=200: raise Exception('fix ur shit')
|
||||
try: return await fetched.json()
|
||||
except JSONDecodeError: pass # TODO: Figure out what this is called
|
||||
|
||||
# This function just dumps /sync blobs in the event queue as a raw event.
|
||||
# ALL handling is deferred to handlers.
|
||||
# ... Except updating the since token. That's important to do here to guarantee that it never calls with the same token twice.
|
||||
async def sync(self):
|
||||
blob=await self.request(params={'timeout':30000})
|
||||
self.since=blob['next_batch']
|
||||
self.event_queue.put(Event('m.sync',None,blob))
|
||||
|
||||
def addlistener(self,name=None,match=None):
|
||||
func=type(lambda:1) # Gross, but I can't actually find it.
|
||||
# And yes, lamdas are <class 'function'> too.
|
||||
if isinstance(name,func) and match==None: match=name; name=None
|
||||
def __wrap__(funky):
|
||||
self.listeners.append(Listener(name,match,funky))
|
||||
return funky
|
||||
|
||||
async def process_queue(self):
|
||||
while True:
|
||||
item=await self.event_queue.get():
|
||||
for listener in self.listeners:
|
||||
if listener==item.event_type:
|
||||
listener(item)
|
||||
#traceback.format_exc()
|
50
utils.py
50
utils.py
|
@ -1,56 +1,32 @@
|
|||
from dataclasses import dataclass, field, _MISSING_TYPE as mt
|
||||
from asyncio import iscoroutinefunction
|
||||
|
||||
@dataclass
|
||||
class Event():
|
||||
def __init__(self,event_type,data,raw_data,outbound=False):
|
||||
self.event_type=event_type
|
||||
self.data=data
|
||||
self.raw_data=raw_data
|
||||
self.outbound=outbound
|
||||
event_type:str
|
||||
data:Object=None
|
||||
raw_data:dict=None
|
||||
outbound:bool=False
|
||||
|
||||
class Listener():
|
||||
def __init__(self,name,match,function,):
|
||||
def __init__(self,name,match,function):
|
||||
self.name=name or function.__name__
|
||||
self.match=match
|
||||
self._matchstr=isinstance(match,str)
|
||||
self.function=function
|
||||
self._async=iscoroutinefunction(function)
|
||||
|
||||
def __call__(self,*args,**kwargs):
|
||||
self.function(*args,**kwargs)
|
||||
async def __call__(self,*args,**kwargs):
|
||||
if not self._async: return self.function(*args,**kwargs)
|
||||
return await self.function(*args,**kwargs)
|
||||
|
||||
def __eq__(self,other):
|
||||
if isinstance(other,str):
|
||||
if self._matchstr: return self.match==other
|
||||
if isinstance(other,Event):
|
||||
if self._matchstr: return self.match==other.event_type
|
||||
return self.match(other) # If it's not a string, assume it's a callable.
|
||||
else: return super.__eq__(self,other)
|
||||
def __str__(self): return self.name
|
||||
|
||||
# I HATE THE TYPEHINTS! I HATE THE TYPEHINTS!
|
||||
# I REALLY fucking hate the typehints. This is the messiest fucking shit.
|
||||
# WHY IS THIS NOT JUST A DEFAULT THING IN DATACLASS
|
||||
# Please tell me as soon as this egregious oversight is corrected.
|
||||
# I hate this function so much, I want to delete it ASAP.
|
||||
def redc(classy):
|
||||
wrapped=dataclass(classy)
|
||||
def from_dict(somedict):
|
||||
# Sure would be nice if we could just ask for a list of required args.
|
||||
count=len(dict(filter(lambda x:isinstance(x[1].default,mt) and isinstance(x[1].default_factory,mt),wrapped.__dataclass_fields__.items())))
|
||||
p=wrapped(*[None]*count)
|
||||
for k,v in somedict.items():
|
||||
if k not in p.__dict__: continue
|
||||
t=p.__dataclass_fields__[k].type
|
||||
try: parsed=t(v)
|
||||
except TypeError:
|
||||
for n in t.__args__:
|
||||
try: parsed=n(v)
|
||||
except: continue
|
||||
else: break
|
||||
# Everything failed so just leave it as default. 🤷
|
||||
# Watch that somehow generate an error one day anyway.
|
||||
else: parsed=p.__dict__[k]
|
||||
p.__dict__[k]=parsed
|
||||
wrapped.from_dict=from_dict
|
||||
return wrapped
|
||||
|
||||
def get_or_create(needle,haystack,default):
|
||||
"""
|
||||
This is a wrapper for filter that can add stuff. Nothing special. Needle is a function, default isn't. Haystack is a list. I might fix that later.
|
||||
|
|
Loading…
Reference in New Issue
Block a user