2022-09-28 15:52:52 +02:00
|
|
|
#!/usr/bin/env python3
|
2022-09-28 15:59:12 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
2022-09-28 15:52:52 +02:00
|
|
|
|
2023-02-18 20:48:22 +01:00
|
|
|
import functools
|
2022-10-13 00:16:05 +02:00
|
|
|
import os
|
2022-09-28 15:52:52 +02:00
|
|
|
import sys
|
2022-10-13 00:16:05 +02:00
|
|
|
import logging
|
2022-10-13 11:06:44 +02:00
|
|
|
import datetime
|
2023-02-18 20:48:22 +01:00
|
|
|
import yaml
|
|
|
|
from schema import Schema, And, Or, Use, Optional, Regex
|
2022-10-13 00:16:05 +02:00
|
|
|
|
|
|
|
import click
|
2022-09-28 15:52:52 +02:00
|
|
|
|
2023-02-18 20:48:22 +01:00
|
|
|
import adapters
|
2022-09-28 15:52:52 +02:00
|
|
|
from adapters import *
|
|
|
|
|
2022-10-13 11:06:44 +02:00
|
|
|
logger = logging.getLogger(f'wp_cal')
|
2022-10-13 00:16:05 +02:00
|
|
|
|
2022-10-13 11:06:44 +02:00
|
|
|
def range_str_to_timedelta(value):
|
|
|
|
valid_units = 'smhdw'
|
|
|
|
current_int = 0
|
|
|
|
values = {}
|
|
|
|
for c in value:
|
|
|
|
if c in '0123456789':
|
|
|
|
current_int *= 10
|
|
|
|
current_int += int(c)
|
|
|
|
elif c in valid_units:
|
|
|
|
if c in values:
|
|
|
|
logger.warning('unit %s already in values, overwriting', c)
|
|
|
|
values[c] = current_int
|
|
|
|
current_int = 0
|
|
|
|
c = 's'
|
|
|
|
if current_int != 0:
|
|
|
|
if c in values:
|
|
|
|
logger.warning('unit %s already in values, overwriting', c)
|
|
|
|
values['s'] = current_int
|
|
|
|
for valid_unit in valid_units:
|
|
|
|
values.setdefault(valid_unit, 0)
|
|
|
|
return datetime.timedelta(
|
|
|
|
seconds=values['s'],
|
|
|
|
minutes=values['m'],
|
|
|
|
hours=values['h'],
|
|
|
|
days=values['d'],
|
|
|
|
weeks=values['w']
|
|
|
|
)
|
|
|
|
|
2023-02-18 20:48:22 +01:00
|
|
|
config_schema = Schema({
|
|
|
|
'sources': [dict],
|
|
|
|
'sinks': [dict],
|
|
|
|
Optional('logging', default={'level': ':WARNING,wp_cal:INFO'}): {
|
|
|
|
Optional('level', default=':WARNING,wp_cal:INFO'): Use(str),
|
|
|
|
},
|
|
|
|
Optional('range', default='365d'): Or(
|
|
|
|
And(
|
|
|
|
Use(str),
|
|
|
|
Regex(r'[0-9]+[smhdw]'),
|
|
|
|
Use(range_str_to_timedelta),
|
|
|
|
),
|
|
|
|
lambda x: isinstance(x, datetime.timedelta)
|
|
|
|
),
|
|
|
|
})
|
|
|
|
|
|
|
|
def load_config(file):
|
|
|
|
if file == '-':
|
|
|
|
config = yaml.safe_load(sys.stdin)
|
|
|
|
else:
|
|
|
|
if os.stat(file).st_mode & 0o777 & ~0o600:
|
|
|
|
raise Exception('refusing to load insecure configuration file, file must have permission 0o600')
|
|
|
|
with open(file) as fp:
|
|
|
|
config = yaml.safe_load(fp)
|
|
|
|
return config_schema.validate(config)
|
|
|
|
|
2022-10-13 11:06:44 +02:00
|
|
|
def init_logging():
|
|
|
|
logging.getLogger().addHandler(
|
|
|
|
logging.StreamHandler(),
|
|
|
|
)
|
|
|
|
|
|
|
|
def set_logging_level(level: str):
|
2022-11-23 02:00:16 +01:00
|
|
|
levels = {
|
|
|
|
'NOTSET': logging.NOTSET,
|
|
|
|
'DEBUG': logging.DEBUG,
|
|
|
|
'INFO': logging.INFO,
|
|
|
|
'WARNING': logging.WARNING,
|
|
|
|
'WARN': logging.WARNING,
|
|
|
|
'ERROR': logging.ERROR,
|
|
|
|
'CRITICAL': logging.CRITICAL,
|
|
|
|
}
|
2022-10-13 00:16:05 +02:00
|
|
|
|
|
|
|
log_levels = [x.split(':') for x in level.split(',')]
|
|
|
|
for module, level in log_levels:
|
|
|
|
module = module.strip() if module else None
|
|
|
|
level = level.strip().upper()
|
2022-11-23 02:00:16 +01:00
|
|
|
if level not in levels:
|
|
|
|
raise ValueError(f'invalid log level, allowed values: {repr(set(levels.keys()))}')
|
|
|
|
logging.getLogger(module).setLevel(levels[level])
|
2022-10-13 00:16:05 +02:00
|
|
|
|
|
|
|
@click.command()
|
|
|
|
@click.option('--config', '-c', envvar='WP_CAL_CONFIG', default='-', help='The configuration file')
|
2022-10-13 00:53:28 +02:00
|
|
|
@click.option('--dryrun', '-d', envvar='WP_CAL_DRYRUN', is_flag=True, help="Don't actually post any data, just show it")
|
2022-10-13 11:27:21 +02:00
|
|
|
@click.option('--level', '-l', envvar='WP_CAL_LEVEL', default=None, help='The log level for the application')
|
|
|
|
@click.option('--range', '-r', envvar='WP_CAL_RANGE', default=None, help='The time range from start to start + range to synchronize events')
|
|
|
|
def main(config, dryrun, level, range):
|
2022-10-13 11:06:44 +02:00
|
|
|
init_logging()
|
2022-10-13 11:45:03 +02:00
|
|
|
set_logging_level(':DEBUG')
|
2022-10-13 00:16:05 +02:00
|
|
|
|
2023-02-18 20:48:22 +01:00
|
|
|
config = load_config(config)
|
|
|
|
if level:
|
|
|
|
config['logging']['level'] = level
|
|
|
|
if range:
|
|
|
|
config['range'] = range
|
|
|
|
config = config_schema.validate(config)
|
|
|
|
set_logging_level(config['logging']['level'])
|
|
|
|
|
|
|
|
sources = set()
|
|
|
|
sinks = set()
|
|
|
|
for source in config['sources']:
|
|
|
|
assert len(source.keys()) == 1
|
|
|
|
for aname, a in adapters.ADAPTERS.items():
|
|
|
|
if aname in source:
|
|
|
|
sources |= {a.new(source[aname])}
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
logger.error('couldn\'t find valid adapter for source configuration %s', source)
|
|
|
|
return 1
|
|
|
|
for sink in config['sinks']:
|
|
|
|
assert len(sink.keys()) == 1
|
|
|
|
for aname, a in adapters.ADAPTERS.items():
|
|
|
|
if aname in sink:
|
|
|
|
sinks |= {a.new(sink[aname])}
|
|
|
|
break
|
2022-09-28 15:52:52 +02:00
|
|
|
else:
|
2023-02-18 20:48:22 +01:00
|
|
|
logger.error('couldn\'t find valid adapter for sink configuration %s', sink)
|
|
|
|
return 1
|
2022-09-28 15:52:52 +02:00
|
|
|
|
2023-02-18 20:48:22 +01:00
|
|
|
if not all([isinstance(x, adapters.Source) for x in sources]):
|
|
|
|
logger.error('one or more source configurations do not implement being a source')
|
|
|
|
return 1
|
|
|
|
if not all([isinstance(x, adapters.Sink) for x in sinks]):
|
|
|
|
logger.error('one or more sink configurations do not implement being a sink')
|
|
|
|
return 1
|
2022-09-28 15:52:52 +02:00
|
|
|
|
2023-02-18 20:48:22 +01:00
|
|
|
# log in
|
|
|
|
if not all([x.login() for x in sources | sinks]):
|
|
|
|
logger.error('failed to log into one or more sinks or sources')
|
|
|
|
return 1
|
|
|
|
|
|
|
|
# gather events
|
|
|
|
events = []
|
|
|
|
source_results = []
|
|
|
|
for source in sources:
|
|
|
|
try:
|
|
|
|
events += source.get_events(until=config['range'])
|
|
|
|
source_results += [True]
|
|
|
|
except Exception:
|
|
|
|
logger.exception('failed to get events from source %s', source)
|
|
|
|
source_results += [False]
|
|
|
|
if not any(source_results):
|
|
|
|
logger.error('event get failed for all sources')
|
2022-10-13 11:06:44 +02:00
|
|
|
return 1
|
2023-02-18 21:01:05 +01:00
|
|
|
# filter cancelled events
|
|
|
|
logger.info('found %d events', len(events))
|
|
|
|
logger.info('not syncing cancelled events')
|
|
|
|
events = [e for e in events if e['status'] != 'cancelled']
|
|
|
|
logger.info('syncing %d events', len(events))
|
2023-02-18 20:48:22 +01:00
|
|
|
|
|
|
|
# post events
|
2022-10-13 00:53:28 +02:00
|
|
|
if dryrun:
|
|
|
|
logger.info("dryrun; would post events: %s", events)
|
|
|
|
else:
|
2023-02-18 20:48:22 +01:00
|
|
|
sink_results = []
|
|
|
|
for sink in sinks:
|
|
|
|
try:
|
|
|
|
sink.post_events(events, until=config['range'])
|
|
|
|
sink_results += [True]
|
|
|
|
except Exception:
|
|
|
|
logger.exception('failed to post to sink %s', sink)
|
|
|
|
sink_results += [False]
|
|
|
|
if not any(sink_results):
|
|
|
|
logger.error('event post failed for all sinks')
|
2022-10-13 11:06:44 +02:00
|
|
|
return 1
|
2023-02-18 20:48:22 +01:00
|
|
|
|
2022-10-13 00:16:05 +02:00
|
|
|
logger.info("done")
|
2022-10-13 11:06:44 +02:00
|
|
|
return 0
|
2022-10-13 00:16:05 +02:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|