From e0f1d7948a33a4c8ba9dac25258eb42bd28fdf48 Mon Sep 17 00:00:00 2001 From: Philipp Schmidt <philipp.schmidt@xfel.eu> Date: Fri, 15 Dec 2023 16:02:34 +0100 Subject: [PATCH] Minor text, comment and code formatting improvements --- src/exdf/data_reduction/builtins.py | 5 ++++- src/exdf/data_reduction/red_writer.py | 18 ++++++++++-------- 2 files changed, 14 insertions(+), 9 deletions(-) diff --git a/src/exdf/data_reduction/builtins.py b/src/exdf/data_reduction/builtins.py index a9f3e88..965f401 100644 --- a/src/exdf/data_reduction/builtins.py +++ b/src/exdf/data_reduction/builtins.py @@ -199,7 +199,8 @@ class AgipdGain(ReductionMethod): 'explicit gain sources') return - self.log.info(f'Found {domain}') + self.log.info(f'No detector specified, detected {domain} ' + f'automatically') agipd_sources = f'{domain}/DET/*CH0:xtdf' else: @@ -214,6 +215,8 @@ class AgipdGain(ReductionMethod): # Unfortunately HED uses a different domain for # its AGIPD control devices, hardcode it here # for convenience. + # TODO: This could also be solved via a CalCat + # query. domain = domain.replace('_DET_', '_EXP_') control_source = f'{domain}/MDL/FPGA_COMP' diff --git a/src/exdf/data_reduction/red_writer.py b/src/exdf/data_reduction/red_writer.py index c2d1615..5f19fc6 100644 --- a/src/exdf/data_reduction/red_writer.py +++ b/src/exdf/data_reduction/red_writer.py @@ -33,8 +33,8 @@ def apply_by_key(op_name): assert isinstance(self, ReduceWriter) for source_glob, key_glob, *args in self._filter_ops(op_name): for source in fnmatch.filter(self._sources, source_glob): - keys = self._custom_keys.get(source, - set(self._data[source].keys())) + keys = self._custom_keys.get( + source, set(self._data[source].keys())) for key in fnmatch.filter(keys, key_glob): op_func(self, source, key, *args) @@ -116,7 +116,6 @@ class ReduceWriter(SourceDataWriter): raise ReduceInitError('Source may not be affected by both ' 'select-entries and select-xtdf operations') - if self._rechunked_keys.keys() & self._compressed_keys.keys(): raise ReduceInitError('Key may not be affected by both ' 'compress-keys and rechunk-keys') @@ -303,10 +302,12 @@ class ReduceWriter(SourceDataWriter): # Keys are guaranteed to never use both custom chunking and # compression. - if (source, key) in self._rechunked_keys: + sourcekey = source, key + + if sourcekey in self._rechunked_keys: orig_chunks = kwargs['chunks'] - chunks = list(self._rechunked_keys[source, key]) + chunks = list(self._rechunked_keys[sourcekey]) assert len(chunks) == len(orig_chunks) for i, dim_len in enumerate(chunks): @@ -319,14 +320,14 @@ class ReduceWriter(SourceDataWriter): kwargs['chunks'] = tuple(chunks) - elif (source, key) in self._compressed_keys or orig_dset.compression: + elif sourcekey in self._compressed_keys or orig_dset.compression: # TODO: Maintain more of existing properties, for now it is # forced to use gzip and (1, *entry) chunking. kwargs['chunks'] = (1,) + kwargs['shape'][1:] kwargs['shuffle'] = True kwargs['compression'] = 'gzip' kwargs['compression_opts'] = self._compressed_keys.setdefault( - (source, key), orig_dset.compression_opts) + sourcekey, orig_dset.compression_opts) return kwargs @@ -398,7 +399,8 @@ class ReduceWriter(SourceDataWriter): @apply_by_source('select-entries') def _handle_select_entries(self, source, idx_group, train_sel, entry_sel): if idx_group not in self._data[source].index_groups: - raise ReduceInitError(f'{idx_group} not index group of {source}') + raise ReduceInitError( + f'{idx_group} not an index group of {source}') self._custom_entry_masks.setdefault((source, idx_group), {}).update( self._get_entry_masks(source, idx_group, train_sel, entry_sel)) -- GitLab