Skip to content
Snippets Groups Projects
Commit e0f1d794 authored by Philipp Schmidt's avatar Philipp Schmidt
Browse files

Minor text, comment and code formatting improvements

parent 0d9c18ea
No related branches found
No related tags found
1 merge request!6Simplify reduction operation implementations and error handling
......@@ -199,7 +199,8 @@ class AgipdGain(ReductionMethod):
'explicit gain sources')
return
self.log.info(f'Found {domain}')
self.log.info(f'No detector specified, detected {domain} '
f'automatically')
agipd_sources = f'{domain}/DET/*CH0:xtdf'
else:
......@@ -214,6 +215,8 @@ class AgipdGain(ReductionMethod):
# Unfortunately HED uses a different domain for
# its AGIPD control devices, hardcode it here
# for convenience.
# TODO: This could also be solved via a CalCat
# query.
domain = domain.replace('_DET_', '_EXP_')
control_source = f'{domain}/MDL/FPGA_COMP'
......
......@@ -33,8 +33,8 @@ def apply_by_key(op_name):
assert isinstance(self, ReduceWriter)
for source_glob, key_glob, *args in self._filter_ops(op_name):
for source in fnmatch.filter(self._sources, source_glob):
keys = self._custom_keys.get(source,
set(self._data[source].keys()))
keys = self._custom_keys.get(
source, set(self._data[source].keys()))
for key in fnmatch.filter(keys, key_glob):
op_func(self, source, key, *args)
......@@ -116,7 +116,6 @@ class ReduceWriter(SourceDataWriter):
raise ReduceInitError('Source may not be affected by both '
'select-entries and select-xtdf operations')
if self._rechunked_keys.keys() & self._compressed_keys.keys():
raise ReduceInitError('Key may not be affected by both '
'compress-keys and rechunk-keys')
......@@ -303,10 +302,12 @@ class ReduceWriter(SourceDataWriter):
# Keys are guaranteed to never use both custom chunking and
# compression.
if (source, key) in self._rechunked_keys:
sourcekey = source, key
if sourcekey in self._rechunked_keys:
orig_chunks = kwargs['chunks']
chunks = list(self._rechunked_keys[source, key])
chunks = list(self._rechunked_keys[sourcekey])
assert len(chunks) == len(orig_chunks)
for i, dim_len in enumerate(chunks):
......@@ -319,14 +320,14 @@ class ReduceWriter(SourceDataWriter):
kwargs['chunks'] = tuple(chunks)
elif (source, key) in self._compressed_keys or orig_dset.compression:
elif sourcekey in self._compressed_keys or orig_dset.compression:
# TODO: Maintain more of existing properties, for now it is
# forced to use gzip and (1, *entry) chunking.
kwargs['chunks'] = (1,) + kwargs['shape'][1:]
kwargs['shuffle'] = True
kwargs['compression'] = 'gzip'
kwargs['compression_opts'] = self._compressed_keys.setdefault(
(source, key), orig_dset.compression_opts)
sourcekey, orig_dset.compression_opts)
return kwargs
......@@ -398,7 +399,8 @@ class ReduceWriter(SourceDataWriter):
@apply_by_source('select-entries')
def _handle_select_entries(self, source, idx_group, train_sel, entry_sel):
if idx_group not in self._data[source].index_groups:
raise ReduceInitError(f'{idx_group} not index group of {source}')
raise ReduceInitError(
f'{idx_group} not an index group of {source}')
self._custom_entry_masks.setdefault((source, idx_group), {}).update(
self._get_entry_masks(source, idx_group, train_sel, entry_sel))
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment