Fix return deleted items of other collections in sync.
Fix invalid type (must be str, was datetime.datetime) of last_modified.
This commit is contained in:
parent
072feed372
commit
1906897159
1 changed files with 32 additions and 25 deletions
|
@ -5,6 +5,7 @@ import os
|
||||||
import re
|
import re
|
||||||
import binascii
|
import binascii
|
||||||
import datetime
|
import datetime
|
||||||
|
import zoneinfo
|
||||||
import uuid
|
import uuid
|
||||||
import string
|
import string
|
||||||
import itertools
|
import itertools
|
||||||
|
@ -36,6 +37,13 @@ PLUGIN_CONFIG_SCHEMA = {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class Item(radicale_item.Item):
|
||||||
|
|
||||||
|
def __init__(self, *args, last_modified: Optional[Union[str, datetime.datetime]]=None, **kwargs):
|
||||||
|
if last_modified is not None and isinstance(last_modified, datetime.datetime):
|
||||||
|
last_modified = last_modified.astimezone(tz=zoneinfo.ZoneInfo('GMT')).strftime('%a, %d %b %Y %H:%M:%S GMT')
|
||||||
|
super().__init__(*args, last_modified=last_modified, **kwargs)
|
||||||
|
|
||||||
class Collection(BaseCollection):
|
class Collection(BaseCollection):
|
||||||
|
|
||||||
def __init__(self, storage: "Storage", id: uuid.UUID, path: str):
|
def __init__(self, storage: "Storage", id: uuid.UUID, path: str):
|
||||||
|
@ -50,8 +58,8 @@ class Collection(BaseCollection):
|
||||||
def path(self) -> str:
|
def path(self) -> str:
|
||||||
return self._path
|
return self._path
|
||||||
|
|
||||||
def _row_to_item(self, row):
|
def _row_to_item(self, row) -> "radicale_item.Item":
|
||||||
return radicale_item.Item(
|
return Item(
|
||||||
collection=self,
|
collection=self,
|
||||||
href=row.name,
|
href=row.name,
|
||||||
last_modified=row.modified,
|
last_modified=row.modified,
|
||||||
|
@ -95,9 +103,8 @@ class Collection(BaseCollection):
|
||||||
).where(
|
).where(
|
||||||
item_table.c.collection_id == self._id,
|
item_table.c.collection_id == self._id,
|
||||||
)
|
)
|
||||||
with self._storage._engine.begin() as connection:
|
for row in connection.execute(select_stmt):
|
||||||
for row in connection.execute(select_stmt):
|
yield self._row_to_item(row)
|
||||||
yield self._row_to_item(row)
|
|
||||||
|
|
||||||
def get_all(self) -> Iterator["radicale_item.Item"]:
|
def get_all(self) -> Iterator["radicale_item.Item"]:
|
||||||
with self._storage._engine.begin() as c:
|
with self._storage._engine.begin() as c:
|
||||||
|
@ -299,7 +306,10 @@ class Collection(BaseCollection):
|
||||||
isouter=True,
|
isouter=True,
|
||||||
),
|
),
|
||||||
).where(
|
).where(
|
||||||
item_table.c.id == None,
|
sa.and_(
|
||||||
|
item_history_table.c.collection_id == self._id,
|
||||||
|
item_table.c.id == None,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
for row in connection.execute(select_stmt):
|
for row in connection.execute(select_stmt):
|
||||||
yield row.name
|
yield row.name
|
||||||
|
@ -322,6 +332,7 @@ class Collection(BaseCollection):
|
||||||
# https://github.com/Kozea/Radicale/blob/6a56a6026f6ec463d6eb77da29e03c48c0c736c6/radicale/storage/multifilesystem/sync.py
|
# https://github.com/Kozea/Radicale/blob/6a56a6026f6ec463d6eb77da29e03c48c0c736c6/radicale/storage/multifilesystem/sync.py
|
||||||
_prefix = 'http://radicale.org/ns/sync/'
|
_prefix = 'http://radicale.org/ns/sync/'
|
||||||
collection_state_table = self._storage._meta.tables['collection_state']
|
collection_state_table = self._storage._meta.tables['collection_state']
|
||||||
|
|
||||||
def check_token_name(token_name: str) -> bool:
|
def check_token_name(token_name: str) -> bool:
|
||||||
if len(token_name) != 64:
|
if len(token_name) != 64:
|
||||||
return False
|
return False
|
||||||
|
@ -436,19 +447,18 @@ class BdayCollection(Collection):
|
||||||
if r.match(v):
|
if r.match(v):
|
||||||
return datetime.datetime.strptime(v, f)
|
return datetime.datetime.strptime(v, f)
|
||||||
raise ValueError(f'cannot parse specified string {v}')
|
raise ValueError(f'cannot parse specified string {v}')
|
||||||
|
|
||||||
cal = vobject.iCalendar()
|
cal = vobject.iCalendar()
|
||||||
if 'bday' not in o.contents:
|
if 'bday' not in o.contents:
|
||||||
return None
|
return None
|
||||||
name = o.fn.value
|
|
||||||
date = vobj_str2date(o.bday)
|
date = vobj_str2date(o.bday)
|
||||||
if date.year <= 1900:
|
if date.year <= 1900:
|
||||||
date = date.replace(year=datetime.datetime.now().year)
|
date = date.replace(year=datetime.datetime.now().year)
|
||||||
date_end = date + datetime.timedelta(days=1)
|
date_end = date + datetime.timedelta(days=1)
|
||||||
|
|
||||||
cal.add('vevent')
|
cal.add('vevent')
|
||||||
cal.vevent_list[-1].add('uid').value = o.uid.value
|
cal.vevent_list[-1].add('uid').value = o.uid.value
|
||||||
cal.vevent_list[-1].add('dtstamp').value = vobj_str2date(o.rev)
|
cal.vevent_list[-1].add('dtstamp').value = vobj_str2date(o.rev)
|
||||||
cal.vevent_list[-1].add('summary').value = name
|
cal.vevent_list[-1].add('summary').value = o.fn.value
|
||||||
cal.vevent_list[-1].add('dtstart').value = date.date()
|
cal.vevent_list[-1].add('dtstart').value = date.date()
|
||||||
cal.vevent_list[-1].add('dtend').value = date_end.date()
|
cal.vevent_list[-1].add('dtend').value = date_end.date()
|
||||||
cal.vevent_list[-1].add('rrule').value = 'FREQ=YEARLY'
|
cal.vevent_list[-1].add('rrule').value = 'FREQ=YEARLY'
|
||||||
|
@ -459,7 +469,7 @@ class BdayCollection(Collection):
|
||||||
if new_vobject is None:
|
if new_vobject is None:
|
||||||
return None
|
return None
|
||||||
assert item.href is not None
|
assert item.href is not None
|
||||||
return radicale_item.Item(
|
return Item(
|
||||||
collection=self,
|
collection=self,
|
||||||
href=item.href,
|
href=item.href,
|
||||||
#href=item.href + '.ics',
|
#href=item.href + '.ics',
|
||||||
|
@ -618,29 +628,15 @@ class Storage(BaseStorage):
|
||||||
).where(
|
).where(
|
||||||
aliases[-1].c.parent_id == None,
|
aliases[-1].c.parent_id == None,
|
||||||
)
|
)
|
||||||
select_sub_stmt = None
|
|
||||||
if depth != "0":
|
|
||||||
aliased = select_collection_or_item.alias('data_list')
|
|
||||||
select_sub_stmt = sa.select(
|
|
||||||
aliased.c,
|
|
||||||
).select_from(
|
|
||||||
aliased.join(
|
|
||||||
select_from,
|
|
||||||
aliased.c.parent_id == aliases[0].c.id,
|
|
||||||
),
|
|
||||||
).where(
|
|
||||||
aliases[-1].c.parent_id == None,
|
|
||||||
)
|
|
||||||
|
|
||||||
l = []
|
l = []
|
||||||
self_collection = connection.execute(select_stmt).one_or_none()
|
self_collection = connection.execute(select_stmt).one_or_none()
|
||||||
|
|
||||||
if self_collection is None:
|
if self_collection is None:
|
||||||
# None found
|
# None found
|
||||||
return []
|
return []
|
||||||
if self_collection.type_ != 'collection':
|
if self_collection.type_ != 'collection':
|
||||||
# Item found
|
# Item found
|
||||||
return [radicale_item.Item(
|
return [Item(
|
||||||
collection=self._get_collection(self_collection.parent_id, connection=connection),
|
collection=self._get_collection(self_collection.parent_id, connection=connection),
|
||||||
href=self_collection.name,
|
href=self_collection.name,
|
||||||
last_modified=self_collection.modified,
|
last_modified=self_collection.modified,
|
||||||
|
@ -652,6 +648,17 @@ class Storage(BaseStorage):
|
||||||
l += [self_collection]
|
l += [self_collection]
|
||||||
# collection should list contents
|
# collection should list contents
|
||||||
if depth != "0":
|
if depth != "0":
|
||||||
|
sub_stmt_select_from = select_collection_or_item.alias()
|
||||||
|
select_sub_stmt = sa.select(
|
||||||
|
sub_stmt_select_from.c,
|
||||||
|
).select_from(
|
||||||
|
sub_stmt_select_from,
|
||||||
|
).where(
|
||||||
|
sa.and_(
|
||||||
|
sub_stmt_select_from.c.parent_id == self_collection._id,
|
||||||
|
sub_stmt_select_from.c.type_ == 'collection',
|
||||||
|
),
|
||||||
|
)
|
||||||
for row in connection.execute(select_sub_stmt):
|
for row in connection.execute(select_sub_stmt):
|
||||||
path = '/'.join(path_parts)
|
path = '/'.join(path_parts)
|
||||||
path += '/'
|
path += '/'
|
||||||
|
|
Loading…
Reference in a new issue