diff --git a/notebooks/AGIPD/AGIPD_Correct_and_Verify.ipynb b/notebooks/AGIPD/AGIPD_Correct_and_Verify.ipynb index 34038d771f0da0b1afb4991150e5aec85f8a1cb9..c0eb25b22b52d12adb081fa04e2228eb11a2df5b 100644 --- a/notebooks/AGIPD/AGIPD_Correct_and_Verify.ipynb +++ b/notebooks/AGIPD/AGIPD_Correct_and_Verify.ipynb @@ -124,7 +124,7 @@ "import matplotlib\n", "import matplotlib.pyplot as plt\n", "import yaml\n", - "from extra_data import RunDirectory, stack_detector_data\n", + "from extra_data import RunDirectory, stack_detector_data, by_id\n", "from extra_geom import AGIPD_1MGeometry, AGIPD_500K2GGeometry\n", "from matplotlib import cm as colormap\n", "from matplotlib.colors import LogNorm\n", @@ -279,6 +279,45 @@ "execution_count": null, "metadata": {}, "outputs": [], + "source": [ + "if use_ppu_device:\n", + " # Obtain trains to process if using a pulse picker device.\n", + " dc = RunDirectory(in_folder / f'r{run:04d}')\n", + "\n", + " # Will throw an uncaught exception if the device is wrong.\n", + " seq_start = run[use_ppu_device, 'trainTrigger.sequenceStart.value'].ndarray()\n", + "\n", + " # The trains picked are the unique values of trainTrigger.sequenceStart\n", + " # minus the first (previous trigger before this run).\n", + " train_ids = np.unique(seq_start)[1:] + ppu_train_offset\n", + "\n", + " print(f'PPU device {use_ppu_device} triggered for {len(train_ids)} trains')\n", + " \n", + " # Since we got the DataCollection already, narrow down the files we open.\n", + " # This hardcodes the receiver_id and path_template parameters currently, but this\n", + " # will disappear with moving the entire notebook to EXtra-data.\n", + " subdc = dc.select_trains(by_id[train_ids]).select(f'{karabo_id}/DET/*CH0:xtdf')\n", + " sequences = sorted({int(f.filename[-8:-3]) for f in subdc.files})\n", + "\n", + "elif train_ids[0] != [-1]:\n", + " # Specific trains passed by parameter, convert to ndarray.\n", + " train_ids = np.array(train_ids)\n", + " \n", + " print(f'Processing up to {len(train_ids)} manually selected trains')\n", + "else:\n", + " # Process all trains.\n", + " train_ids = None\n", + " \n", + " print(f'Processing all valid trains')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "scrolled": false + }, + "outputs": [], "source": [ "# set everything up filewise\n", "mapped_files, _, total_sequences, _, _ = cal_tools.tools.map_modules_from_folder(\n", @@ -385,33 +424,6 @@ "print(f\"• Photon Energy: {photon_energy}\")" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "if use_ppu_device:\n", - " # Obtain trains to process if using a pulse picker device.\n", - " run = RunDirectory(in_folder / f'r{run:04d}')\n", - "\n", - " # Will throw an uncaught exception if the device is wrong.\n", - " seq_start = run[use_ppu_device, 'trainTrigger.sequenceStart.value'].ndarray()\n", - "\n", - " # The trains picked are the unique values of trainTrigger.sequenceStart\n", - " # minus the first (previous trigger before this run).\n", - " train_ids = np.unique(seq_start)[1:] + ppu_train_offset\n", - "\n", - " print(f'PPU device {use_ppu_device} triggered for {len(train_ids)} trains')\n", - "\n", - "elif train_ids[0] != [-1]:\n", - " # Specific trains passed by parameter, convert to ndarray.\n", - " train_ids = np.array(train_ids)\n", - "else:\n", - " # Process all trains.\n", - " train_ids = None" - ] - }, { "cell_type": "code", "execution_count": null,