diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 25f3623c..55e97a97 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -45,7 +45,7 @@ jobs: run: | python -m pip install \ --no-deps --pre --upgrade \ - numpy; \ + "watchdog!=4.0.0" numpy; \ python -m pip install \ --no-deps --upgrade \ git+https://github.com/pytroll/posttroll \ diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index 5caf386f..60100efa 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -22,7 +22,7 @@ dependencies: - paramiko - responses - netifaces - - watchdog + - watchdog!=4.0.0 - s3fs - pyinotify - requests diff --git a/pytroll_collectors/tests/test_trollstalker.py b/pytroll_collectors/tests/test_trollstalker.py index 5d882287..0264035d 100644 --- a/pytroll_collectors/tests/test_trollstalker.py +++ b/pytroll_collectors/tests/test_trollstalker.py @@ -1,13 +1,24 @@ """Tests for trollstalker.""" import os import time -from threading import Thread +import pytest from posttroll.message import Message -from pytroll_collectors.trollstalker import main, stop +from pytroll_collectors.trollstalker import start_observer, stop_observer -def create_config_file(dir_to_watch, tmp_path): +LAG_SECONDS = 0.02 + + +@pytest.fixture +def dir_to_watch(tmp_path): + """Define a dir to watch.""" + dir_to_watch = tmp_path / "to_watch" + return dir_to_watch + + +@pytest.fixture +def config_file(tmp_path, dir_to_watch): """Create a config file for trollstalker.""" config = """# This config is used in Trollstalker. @@ -16,10 +27,9 @@ def create_config_file(dir_to_watch, tmp_path): directory=""" + os.fspath(dir_to_watch) + """ filepattern={path}hrpt_{platform_name}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1b instruments=avhrr/3,mhs,amsu-b,amsu-a,hirs/3,hirs/4 -#stalker_log_config=/usr/local/etc/pytroll/trollstalker_logging.ini -loglevel=DEBUG -event_names=IN_CLOSE_WRITE,IN_MOVED_TO -posttroll_port=0 +loglevel=WARNING +posttroll_port=12234 +nameservers=false alias_platform_name = noaa18:NOAA-18|noaa19:NOAA-19 history=10""" config_file = tmp_path / "config.ini" @@ -28,45 +38,59 @@ def create_config_file(dir_to_watch, tmp_path): return config_file -def test_trollstalker(tmp_path, caplog): - """Test trollstalker functionality.""" - dir_to_watch = tmp_path / "to_watch" - os.makedirs(dir_to_watch) +@pytest.fixture +def messages_from_observer(config_file): + """Create an observer and yield the messages it published.""" + from posttroll.testing import patched_publisher + with patched_publisher() as messages: + obs = start_observer(["-c", os.fspath(config_file), "-C", "noaa_hrpt"]) + time.sleep(LAG_SECONDS) + yield messages + stop_observer(obs) - config_file = create_config_file(dir_to_watch, tmp_path) - thread = Thread(target=main, args=[["-c", os.fspath(config_file), "-C", "noaa_hrpt"]]) - thread.start() - time.sleep(.5) - trigger_file = dir_to_watch / "hrpt_noaa18_20230524_1017_10101.l1b" +def test_trollstalker(messages_from_observer, dir_to_watch): + """Test trollstalker functionality.""" + subdir_to_watch = dir_to_watch / "new_dir" + os.mkdir(subdir_to_watch) + + trigger_file = subdir_to_watch / "hrpt_noaa18_20230524_1017_10101.l1b" with open(trigger_file, "w") as fd: fd.write("hej") - time.sleep(.5) - stop() - thread.join() - assert "Publishing message pytroll://HRPT/l1b/dev/mystation file " in caplog.text - for line in caplog.text.split("\n"): - if "Publishing message" in line: - message = Message(rawstr=line.split("Publishing message ")[1]) + time.sleep(LAG_SECONDS) + message = messages_from_observer[0] + assert message.startswith("pytroll://HRPT/l1b/dev/mystation file ") + message = Message(rawstr=message) assert message.data['platform_name'] == "NOAA-18" assert message.data['uri'] == os.fspath(trigger_file) -def test_trollstalker_directory_does_not_exist(tmp_path): +def test_trollstalker_monitored_directory_is_created(messages_from_observer, dir_to_watch): """Test that monitored directories are created.""" - dir_to_watch = tmp_path / "to_watch" + trigger_file = dir_to_watch / "hrpt_noaa18_20230524_1017_10101.l1b" + with open(trigger_file, "w") as fd: + fd.write("hej") + time.sleep(LAG_SECONDS) + assert os.path.exists(dir_to_watch) - config_file = create_config_file(dir_to_watch, tmp_path) - thread = Thread(target=main, args=[["-c", os.fspath(config_file), "-C", "noaa_hrpt"]]) - thread.start() - time.sleep(.5) - trigger_file = dir_to_watch / "hrpt_noaa18_20230524_1017_10101.l1b" +def test_trollstalker_handles_moved_files(messages_from_observer, dir_to_watch, tmp_path): + """Test that trollstalker detects moved files.""" + filename = "hrpt_noaa18_20230524_1017_10101.l1b" + trigger_file = tmp_path / filename with open(trigger_file, "w") as fd: fd.write("hej") - time.sleep(.5) - stop() - thread.join() + os.rename(trigger_file, dir_to_watch / filename) + time.sleep(LAG_SECONDS) + assert len(messages_from_observer) == 1 + assert messages_from_observer[0].startswith("pytroll://HRPT/l1b/dev/mystation file ") - assert os.path.exists(dir_to_watch) + +def test_event_names_are_deprecated(config_file): + """Test that trollstalker detects moved files.""" + with open(config_file, "a") as fd: + fd.write("\nevent_names=IN_CLOSE_WRITE,IN_MOVED_TO,IN_CREATE\n") + with pytest.deprecated_call(): + obs = start_observer(["-c", os.fspath(config_file), "-C", "noaa_hrpt"]) + stop_observer(obs) diff --git a/pytroll_collectors/trollstalker.py b/pytroll_collectors/trollstalker.py index 55af5bdd..11902318 100644 --- a/pytroll_collectors/trollstalker.py +++ b/pytroll_collectors/trollstalker.py @@ -7,11 +7,12 @@ import time from collections import OrderedDict, deque from configparser import RawConfigParser +import warnings -import pyinotify +from watchdog.observers import Observer +from watchdog.events import FileSystemEventHandler from posttroll.message import Message -from posttroll.publisher import NoisyPublisher -from pyinotify import ProcessEvent, ThreadedNotifier, WatchManager +from posttroll.publisher import create_publisher_from_dict_config from pytroll_collectors import helper_functions from trollsift import Parser, compose @@ -27,318 +28,50 @@ def stop(): RUNNING = False -class EventHandler(ProcessEvent): - """Event handler class for inotify. +def main(command_args=None): + """Run Trollstalker. - *topic* - topic of the published messages - *posttroll_port* - port number to publish the messages on - *filepattern* - filepattern for finding information from the filename + Commandline parsing and stalker startup. """ + observer = start_observer(command_args) - def __init__(self, topic, instrument, config_item, posttroll_port=0, filepattern=None, - aliases=None, tbus_orbit=False, history=0, granule_length=0, - custom_vars=None, nameservers=[], watchManager=None): # noqa - """Set up the event handler.""" - super().__init__() - - self._pub = NoisyPublisher("trollstalker_" + config_item, posttroll_port, topic, - nameservers=nameservers) - self.pub = self._pub.start() - self.topic = topic - self.info = OrderedDict() - if filepattern is None: - filepattern = '{filename}' - self.file_parser = Parser(filepattern) - self.instrument = instrument - self.aliases = aliases - self.custom_vars = custom_vars - self.tbus_orbit = tbus_orbit - self.granule_length = granule_length - self._deque = deque([], history) - self._watchManager = watchManager - self._watched_dirs = dict() - - def stop(self): - """Stop publisher.""" - self._pub.stop() - - def __clean__(self): - """Clean instance attributes.""" - self.info = OrderedDict() - - def process_IN_CLOSE_WRITE(self, event): - """Process when a file is closed.""" - logger.debug("trigger: IN_CLOSE_WRITE") - self.process(event) - - def process_IN_CLOSE_NOWRITE(self, event): - """Process when a nonwritable file is closed.""" - logger.debug("trigger: IN_CREATE") - self.process(event) - - def process_IN_MOVED_TO(self, event): - """Process when a file is closed.""" - logger.debug("trigger: IN_MOVED_TO") - self.process(event) - - def process_IN_CREATE(self, event): - """Process when a file is created.""" - logger.debug("trigger: IN_CREATE") - self.process(event) - - def process_IN_CLOSE_MODIFY(self, event): - """Process when a file is modified and closed.""" - logger.debug("trigger: IN_MODIFY") - self.process(event) - - def process_IN_DELETE(self, event): - """On delete.""" - if (event.mask & pyinotify.IN_ISDIR): - try: - try: - self._watchManager.rm_watch(self._watched_dirs[event.pathname], quiet=False) - except pyinotify.WatchManagerError: - # As the directory is deleted prior removing the watch will cause a error message - # from pyinotify. This is ok, so just pass the exception. - logger.debug("Removed watch: {}".format(event.pathname)) - finally: - del self._watched_dirs[event.pathname] - except KeyError: - logger.warning("Dir {} not watched by inotify. Can not delete watch.".format(event.pathname)) - - def process(self, event): - """Process the event.""" - # New file created and closed - if not event.dir: - logger.debug("processing %s", event.pathname) - # parse information and create self.info OrderedDict{} - self.parse_file_info(event) - if len(self.info) > 0: - # Check if this file has been recently dealt with - if event.pathname not in self._deque: - self._deque.append(event.pathname) - message = self.create_message() - logger.info("Publishing message %s", str(message)) - self.pub.send(str(message)) - else: - logger.debug("Data has been published recently, skipping.") - self.__clean__() - elif (event.mask & pyinotify.IN_ISDIR): - tmask = (pyinotify.IN_CLOSE_WRITE | pyinotify.IN_MOVED_TO | - pyinotify.IN_CREATE | pyinotify.IN_DELETE) - try: - self._watched_dirs.update(self._watchManager.add_watch(event.pathname, tmask)) - logger.debug("Added watch on dir: {}".format(event.pathname)) - except AttributeError: - logger.error("No watchmanager given. Can not add watch on {}".format(event.pathname)) - - def create_message(self): - """Create broadcasted message.""" - return Message(self.topic, 'file', dict(self.info)) - - def parse_file_info(self, event): - """Parse satellite and orbit information from the filename. - - Message is sent, if a matching filepattern is found. - """ - try: - logger.debug("filter: %s\t event: %s", - self.file_parser.fmt, event.pathname) - pathname_join = os.path.basename(event.pathname) - if 'origin_inotify_base_dir_skip_levels' in self.custom_vars: - pathname_list = event.pathname.split('/') - pathname_join = "/".join(pathname_list[int(self.custom_vars['origin_inotify_base_dir_skip_levels']):]) - else: - logger.debug("No origin_inotify_base_dir_skip_levels in self.custom_vars") - - self.info = OrderedDict() - self.info.update(self.file_parser.parse( - pathname_join)) - logger.debug("Extracted: %s", str(self.info)) - except ValueError: - # Filename didn't match pattern, so empty the info dict - logger.debug("Couldn't extract any useful information from filename") - self.info = OrderedDict() - else: - self.info['uri'] = event.pathname - self.info['uid'] = os.path.basename(event.pathname) - self.info['sensor'] = self.instrument.split(',') - logger.debug("self.info['sensor']: " + str(self.info['sensor'])) - - if self.tbus_orbit and "orbit_number" in self.info: - logger.debug("Changing orbit number by -1!") - self.info["orbit_number"] -= 1 - - # replace values with corresponding aliases, if any are given - if self.aliases: - info = self.info.copy() - for key in info: - if key in self.aliases: - self.info['orig_' + key] = self.info[key] - self.info[key] = self.aliases[key][str(self.info[key])] - - # add start_time and end_time if not present - try: - base_time = self.info["time"] - except KeyError: - try: - base_time = self.info["nominal_time"] - except KeyError: - base_time = self.info["start_time"] - if "start_time" not in self.info: - self.info["start_time"] = base_time - if "start_date" in self.info: - self.info["start_time"] = \ - dt.datetime.combine(self.info["start_date"].date(), - self.info["start_time"].time()) - if "end_date" not in self.info: - self.info["end_date"] = self.info["start_date"] - del self.info["start_date"] - if "end_date" in self.info: - self.info["end_time"] = \ - dt.datetime.combine(self.info["end_date"].date(), - self.info["end_time"].time()) - del self.info["end_date"] - if "end_time" not in self.info and self.granule_length > 0: - self.info["end_time"] = base_time + \ - dt.timedelta(seconds=self.granule_length) - - if "end_time" in self.info: - while self.info["start_time"] > self.info["end_time"]: - self.info["end_time"] += dt.timedelta(days=1) - - if self.custom_vars is not None: - for var_name in self.custom_vars: - var_pattern = self.custom_vars[var_name] - var_val = None - if '%' in var_pattern: - var_val = helper_functions.create_aligned_datetime_var( - var_pattern, self.info) - if var_val is None: - var_val = compose(var_pattern, self.info) - self.info[var_name] = var_val + try: + while RUNNING: + time.sleep(1) + except KeyboardInterrupt: + logger.info("Stopping TrollStalker") + finally: + stop_observer(observer) -class NewThreadedNotifier(ThreadedNotifier): - """Threaded notifier class.""" +def start_observer(command_args): + """Start observing files and process them.""" + os.environ["TZ"] = "UTC" + time.tzset() - def stop(self, *args, **kwargs): - """Stop trollstalker.""" - self._default_proc_fun.stop() - ThreadedNotifier.stop(self, *args, **kwargs) + monitored_dirs, settings = get_settings(command_args) + event_processor = EventProcessor(**settings) -def create_notifier(topic, instrument, posttroll_port, filepattern, - event_names, monitored_dirs, config_item, aliases=None, - tbus_orbit=False, history=0, granule_length=0, - custom_vars=None, nameservers=[]): # noqa - """Create new notifier.""" - # Event handler observes the operations in defined folder - manager = WatchManager() + event_handler = WatchdogHandler(event_processor) + observer = Observer(generate_full_events=True) - # Collect mask for events that are monitored - if not isinstance(event_names, list): - event_names = event_names.split(',') - event_mask = 0 - for event in event_names: - try: - event_mask |= getattr(pyinotify, event) - except AttributeError: - logger.warning('Event %s not found in pyinotify', str(event)) - - event_handler = EventHandler(topic, instrument, - config_item, - posttroll_port=posttroll_port, - filepattern=filepattern, - aliases=aliases, - tbus_orbit=tbus_orbit, - history=history, - granule_length=granule_length, - custom_vars=custom_vars, - nameservers=nameservers, - watchManager=manager) - - notifier = NewThreadedNotifier(manager, event_handler) - - # Add directories and event masks to watch manager for monitored_dir in monitored_dirs: os.makedirs(monitored_dir, exist_ok=True) - manager.add_watch(monitored_dir, event_mask, rec=True) - - return notifier + observer.schedule(event_handler, monitored_dir, recursive=True) + observer.start() + return observer -def parse_vars(config): - """Parse custom variables from the config. - - Aliases are given in the config as: - - {'var_': 'value'} - - where is the name of the key which value will be - added to metadata. is a trollsift pattern. - - """ - variables = OrderedDict() - - for key in config: - if 'var_' in key: - new_key = key.replace('var_', '') - var = config[key] - variables[new_key] = var - return variables - - -def main(command_args=None): - """Run Trollstalker. - - Commandline parsing and stalker startup. - """ - print("Setting timezone to UTC") - os.environ["TZ"] = "UTC" - time.tzset() +def stop_observer(observer): + """Stop the observer.""" + observer.stop() + observer.join() - parser = argparse.ArgumentParser() - - parser.add_argument("-d", "--monitored_dirs", dest="monitored_dirs", - nargs='+', - type=str, - default=[], - help="Names of the monitored directories " - "separated by space") - parser.add_argument("-p", "--posttroll_port", dest="posttroll_port", - default=0, type=int, - help="Local port where messages are published") - parser.add_argument("-t", "--topic", dest="topic", - type=str, - default=None, - help="Topic of the sent messages") - parser.add_argument("-c", "--configuration_file", - type=str, - help="Name of the config.ini configuration file") - parser.add_argument("-C", "--config_item", - type=str, - help="Name of the configuration item to use") - parser.add_argument("-e", "--event_names", - type=str, default=None, - help="Name of the pyinotify events to monitor") - parser.add_argument("-f", "--filepattern", - type=str, - help="Filepattern used to parse " - "satellite/orbit/date/etc information") - parser.add_argument("-i", "--instrument", - type=str, default=None, - help="Instrument name in the satellite") - parser.add_argument("-n", "--nameservers", - type=str, default=None, - help="Posttroll nameservers to register own address," - " otherwise multicasting is used") - if len(sys.argv) <= 1: - parser.print_help() - sys.exit() - else: - args = parser.parse_args(command_args) +def get_settings(command_args): + """Get the trollstalker settings.""" + args = parse_args(command_args) # Parse commandline arguments. If args are given, they override # the configuration file. @@ -430,7 +163,9 @@ def main(command_args=None): else: logging.config.fileConfig(log_config) - event_names = event_names or 'IN_CLOSE_WRITE,IN_MOVED_TO' + if event_names: + warnings.warn("Event names is deprecated and is now ignored. Files are detected on write close and moving in.", + DeprecationWarning, stacklevel=2) logger.debug("Logger started") @@ -438,23 +173,236 @@ def main(command_args=None): monitored_dirs = [monitored_dirs] if nameservers: - nameservers = nameservers.split(',') + if nameservers.lower() == "false": + nameservers = False + else: + nameservers = nameservers.split(',') else: nameservers = [] - # Start watching for new files - notifier = create_notifier(topic, instrument, posttroll_port, filepattern, - event_names, monitored_dirs, config_item, aliases=aliases, - tbus_orbit=tbus_orbit, history=history, - granule_length=granule_length, - custom_vars=custom_vars, - nameservers=nameservers) - notifier.start() + settings = dict() + settings["topic"] = topic + settings["instrument"] = instrument + settings["config_item"] = config_item + settings["posttroll_port"] = posttroll_port + settings["filepattern"] = filepattern + settings["aliases"] = aliases + settings["tbus_orbit"] = tbus_orbit + settings["history_length"] = history + settings["granule_length"] = granule_length + settings["custom_vars"] = custom_vars + settings["nameservers"] = nameservers + return monitored_dirs, settings + + +def parse_args(command_args): + """Parse the command line arguments.""" + parser = argparse.ArgumentParser() - try: - while RUNNING: - time.sleep(1) - except KeyboardInterrupt: - logger.info("Stopping TrollStalker") - finally: - notifier.stop() + parser.add_argument("-d", "--monitored_dirs", dest="monitored_dirs", + nargs='+', + type=str, + default=[], + help="Names of the monitored directories " + "separated by space") + parser.add_argument("-p", "--posttroll_port", dest="posttroll_port", + default=0, type=int, + help="Local port where messages are published") + parser.add_argument("-t", "--topic", dest="topic", + type=str, + default=None, + help="Topic of the sent messages") + parser.add_argument("-c", "--configuration_file", + type=str, + help="Name of the config.ini configuration file") + parser.add_argument("-C", "--config_item", + type=str, + help="Name of the configuration item to use") + parser.add_argument("-e", "--event_names", + type=str, default=None, + help="Name of the pyinotify events to monitor") + parser.add_argument("-f", "--filepattern", + type=str, + help="Filepattern used to parse " + "satellite/orbit/date/etc information") + parser.add_argument("-i", "--instrument", + type=str, default=None, + help="Instrument name in the satellite") + parser.add_argument("-n", "--nameservers", + type=str, default=None, + help="Posttroll nameservers to register own address," + " otherwise multicasting is used") + + args = parser.parse_args(command_args) + return args + + +class WatchdogHandler(FileSystemEventHandler): + """A watchdog handler do detect incomming files.""" + + def __init__(self, processor): + """Set up the handler.""" + self.processor = processor + + def on_closed(self, event): + """Trigger processing on closed write.""" + self.processor.process(event) + print("yep, processed") + + def on_moved(self, event): + """Trigger processing on move.""" + self.processor.process(event) + + +class EventProcessor: + """A processor for events.""" + + def __init__(self, topic, instrument, config_item, posttroll_port=0, filepattern=None, + aliases=None, tbus_orbit=False, history_length=0, granule_length=0, + custom_vars=None, nameservers=[]): # noqa + """Set up the event processor.""" + pub_settings = dict(name="trollstalker_" + config_item, + port=posttroll_port, + nameservers=nameservers) + self.pub = create_publisher_from_dict_config(pub_settings) + + self.pub.start() + self.topic = topic + if filepattern is None: + filepattern = '{filename}' + self.file_parser = Parser(filepattern) + self.instrument = instrument + self.aliases = aliases + self.custom_vars = custom_vars + self.tbus_orbit = tbus_orbit + self.granule_length = granule_length + self._history = deque([], history_length) + + def process(self, event): + """Process the event.""" + try: + pathname = event.dest_path or event.src_path + except AttributeError: + pathname = event.src_path + logger.debug("processing %s", pathname) + info = self.parse_file_info(pathname) + if len(info) > 0: + # Check if this file has been recently dealt with + if pathname not in self._history: + self._history.append(pathname) + message = self.create_message(info) + logger.info("Publishing message %s", str(message)) + self.pub.send(str(message)) + else: + logger.debug("Data has been published recently, skipping.") + + def create_message(self, info): + """Create broadcasted message.""" + return Message(self.topic, 'file', dict(info)) + + def parse_file_info(self, pathname): + """Parse satellite and orbit information from the filename. + + Message is sent, if a matching filepattern is found. + """ + logger.debug("filter: %s\t event: %s", self.file_parser.fmt, pathname) + pathname_join = os.path.basename(pathname) + if 'origin_inotify_base_dir_skip_levels' in self.custom_vars: + # TODO wtf + pathname_list = pathname.split('/') + pathname_join = "/".join(pathname_list[int(self.custom_vars['origin_inotify_base_dir_skip_levels']):]) + else: + logger.debug("No origin_inotify_base_dir_skip_levels in self.custom_vars") + + info = OrderedDict() + + try: + info.update(self.file_parser.parse(pathname_join)) + logger.debug("Extracted info from filename: %s", str(info)) + except ValueError: + # Filename didn't match pattern, so empty the info dict + logger.debug("Couldn't extract any useful information from filename") + else: + info['uri'] = pathname + info['uid'] = os.path.basename(pathname) + info['sensor'] = self.instrument.split(',') + logger.debug("info['sensor']: " + str(info['sensor'])) + + if self.tbus_orbit and "orbit_number" in info: + logger.debug("Changing orbit number by -1!") + info["orbit_number"] -= 1 + + # replace values with corresponding aliases, if any are given + if self.aliases: + keys = info.copy().keys() + for key in keys: + if key in self.aliases: + info['orig_' + key] = info[key] + info[key] = self.aliases[key][str(info[key])] + + # add start_time and end_time if not present + try: + base_time = info["time"] + except KeyError: + try: + base_time = info["nominal_time"] + except KeyError: + base_time = info["start_time"] + if "start_time" not in info: + info["start_time"] = base_time + if "start_date" in info: + info["start_time"] = \ + dt.datetime.combine(info["start_date"].date(), + info["start_time"].time()) + if "end_date" not in info: + info["end_date"] = info["start_date"] + del info["start_date"] + if "end_date" in info: + info["end_time"] = \ + dt.datetime.combine(info["end_date"].date(), + info["end_time"].time()) + del info["end_date"] + if "end_time" not in info and self.granule_length > 0: + info["end_time"] = base_time + \ + dt.timedelta(seconds=self.granule_length) + + if "end_time" in info: + while info["start_time"] > info["end_time"]: + info["end_time"] += dt.timedelta(days=1) + + if self.custom_vars is not None: + for var_name in self.custom_vars: + var_pattern = self.custom_vars[var_name] + var_val = None + if '%' in var_pattern: + var_val = helper_functions.create_aligned_datetime_var( + var_pattern, info) + if var_val is None: + var_val = compose(var_pattern, info) + info[var_name] = var_val + return info + + def stop(self): + """Stop the publisher.""" + self.pub.stop() + + +def parse_vars(config): + """Parse custom variables from the config. + + Aliases are given in the config as: + + {'var_': 'value'} + + where is the name of the key which value will be + added to metadata. is a trollsift pattern. + + """ + variables = OrderedDict() + + for key in config: + if 'var_' in key: + new_key = key.replace('var_', '') + var = config[key] + variables[new_key] = var + return variables diff --git a/setup.py b/setup.py index 5d1d69a2..ec9f0d89 100644 --- a/setup.py +++ b/setup.py @@ -36,7 +36,6 @@ 'pyresample', 'pytroll-schedule', 'watchdog', - 'pyinotify', ], 's3stalker': [ 's3fs', @@ -46,7 +45,7 @@ 'netifaces', ], 'trollstalker': [ - 'pyinotify', + 'watchdog!=4.0.0', ], 's3_segment_gatherer': [ 'fsspec'