From eb739824818c71eeea49f294dbc3f8ab7d4ebb80 Mon Sep 17 00:00:00 2001 From: leloup314 Date: Thu, 27 Oct 2022 12:56:34 +0200 Subject: [PATCH 01/10] ENH: add rate method to calc particle rate for IrradIon --- irrad_control/ions/__init__.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/irrad_control/ions/__init__.py b/irrad_control/ions/__init__.py index 80c0bc48..c0ea80dc 100644 --- a/irrad_control/ions/__init__.py +++ b/irrad_control/ions/__init__.py @@ -4,6 +4,8 @@ import numpy as np from importlib import import_module +from irrad_control.analysis.constants import elementary_charge + class IrradIon(object): @@ -75,6 +77,24 @@ def _select_data(self, data_type, at_energy=None, at_index=None, as_dict=False, return _data + def rate(self, current): + """ + Returns the *rate* in particles / second, calculated from *current* in Ampere. + For IrradIons with n_charge = 1 current / elementary charge and rate are the same + + Parameters + ---------- + current : float + Ion beam current in Ampere + + Returns + ------- + ion rate + Number of ions per second + """ + # Ions per second + return current / (self.n_charge * elementary_charge) + def ekin_range(self): """ Return kinetic energy range as a tuple in MeV @@ -114,6 +134,14 @@ def hardness_factor(self, at_energy=None, at_index=None, as_dict=False, return_i # Generate all ions def get_ions(): + """ + Returns a dict with all available IrradIon.name, IrradIon key-value pairs + + Returns + ------- + dict + dict with IrradIon.names as keys and the respective IrradIon as value + """ ions = [] for ion in os.listdir(os.path.dirname(__file__)): try: From a4c24c9589fd0eb73d8a2ec30a5bd1bde2831391 Mon Sep 17 00:00:00 2001 From: leloup314 Date: Thu, 27 Oct 2022 16:02:08 +0200 Subject: [PATCH 02/10] ENH: make formulas valid for generic ions --- irrad_control/analysis/constants.py | 9 +++++ irrad_control/analysis/formulas.py | 54 +++++++++++++---------------- 2 files changed, 33 insertions(+), 30 deletions(-) diff --git a/irrad_control/analysis/constants.py b/irrad_control/analysis/constants.py index d8897785..cd4daec8 100644 --- a/irrad_control/analysis/constants.py +++ b/irrad_control/analysis/constants.py @@ -8,3 +8,12 @@ # nano prefix nano = 1e-9 + +# Conversion factor for MeV/g to Mrad, 1 eV = 1.602e-19 J, 1 rad = 0.01 J/kg +# -> MeV / g = 1e6 * 1.602e-19 J / 1e-3 kg +# -> MeV / g = 1e9 * 1.602e-19 J / kg +# -> MeV / g = 1e9 * 1.602e-19 * 1e2 rad +# -> MeV / g = 1e11 * 1.602e-19 rad +# -> Mev / g = 1e5 * 1.602e-19 Mrad +# -> Mev / g = 1e5 * elementary_charge * Mrad +MEV_PER_GRAM_TO_MRAD = 1e5 * elementary_charge diff --git a/irrad_control/analysis/formulas.py b/irrad_control/analysis/formulas.py index 888586fb..659a6eb7 100644 --- a/irrad_control/analysis/formulas.py +++ b/irrad_control/analysis/formulas.py @@ -1,51 +1,51 @@ """Collection of analysis functions""" -import irrad_control.analysis.constants as irrad_consts import numpy as np +import irrad_control.analysis.constants as irrad_consts -def tid_scan(proton_fluence, stopping_power): +def tid_per_scan(ion_fluence, stopping_power): """ Parameters ---------- - proton_fluence: float - Number of protons per square centimeter + ion_fluence: float + Number of ions per square centimeter stopping_power: - Total stopping power of the protons in MeV cm^2 / g + Total stopping power of the ions in MeV cm^2 / g Returns ------- Total ionizing dose in Mrad """ - return 1e5 * irrad_consts.elementary_charge * proton_fluence * stopping_power + return irrad_consts.MEV_PER_GRAM_TO_MRAD * ion_fluence * stopping_power -def tid_rate_scan(proton_flux, stopping_power): +def tid_rate(ion_rate, stopping_power): """ Parameters ---------- - proton_flux: float - Number of protons per square centimeter per second + ion_rate: float + Number of ions per second stopping_power: - Total stopping power of the protons in MeV cm^2 / g + Total stopping power of the ions in MeV cm^2 / g Returns ------- - Total ionizing dose in Mrad + Total ionizing dose rate in Mrad/s """ - return 1e5 * irrad_consts.elementary_charge * proton_flux * stopping_power + return irrad_consts.MEV_PER_GRAM_TO_MRAD * ion_rate * stopping_power -def proton_fluence_scan(proton_current, scan_step, scan_speed): +def fluence_per_scan(ion_current, ion_n_charge, scan_step, scan_speed): """ Parameters ---------- - proton_current: float - Proton beam current in A + current: float + Ion beam current in A scan_step: float Separation between scanned rows in mm @@ -55,34 +55,28 @@ def proton_fluence_scan(proton_current, scan_step, scan_speed): Returns ------- - Fluence in protons / cm^2 delivered. + Fluence in ions / cm^2 delivered. """ - return proton_current / (irrad_consts.elementary_charge * scan_speed * scan_step * 1e-2) + return ion_current / (ion_n_charge * irrad_consts.elementary_charge * scan_speed * scan_step * 1e-2) -def proton_flux_scan(proton_current, scan_step, scan_speed, scan_duration): +def niel_rate(ion_rate, hardness_factor): """ Parameters ---------- - proton_current: float - Proton beam current in A - - scan_step: float - Separation between scanned rows in mm / cm / m - - scan_speed: float - Speed with which rows are scanned in mm/s / cm/s / m/s + ion_rate: float + Ion rate in particles / s - scan_duration: - Duration which is needed to scan from first row to last row + hardness_factor: + Hardness factor to scale damage to neutron eqivalents Returns ------- - Proton flux in protons/cm^2/s + 1 MeV neutron eqivalent damages / s """ - return proton_fluence_scan(proton_current, scan_step, scan_speed) / scan_duration + return ion_rate * hardness_factor def time_scan(scan_area, scan_step, scan_speed): From 516be19c3e73b455c7fc33badcaa7527fd45cf76 Mon Sep 17 00:00:00 2001 From: leloup314 Date: Thu, 27 Oct 2022 16:35:57 +0200 Subject: [PATCH 03/10] WIP: renaming in favor of generic ions -> remove explicit proton --- irrad_control/analysis/damage.py | 30 ++++++++++---------- irrad_control/analysis/dtype.py | 18 ++++++------ irrad_control/analysis/formulas.py | 8 +++--- irrad_control/processes/converter.py | 42 ++++++++++++++-------------- irrad_control/processes/gui.py | 2 +- 5 files changed, 50 insertions(+), 50 deletions(-) diff --git a/irrad_control/analysis/damage.py b/irrad_control/analysis/damage.py index ab1b8635..b6ffa528 100644 --- a/irrad_control/analysis/damage.py +++ b/irrad_control/analysis/damage.py @@ -13,7 +13,7 @@ def analyse_radiation_damage(data, config=None): bins = (100, 100) # Dict that holds results and error maps; bin centers - results = {r: None for r in ('proton', 'neq', 'tid')} + results = {r: None for r in ('primary', 'neq', 'tid')} errors = {e: None for e in results} bin_centers = {'x': None, 'y': None} @@ -42,7 +42,7 @@ def analyse_radiation_damage(data, config=None): # Initialize damage and error maps if nfile == 0: - results['proton'], errors['proton'], bin_centers['x'], bin_centers['y'] = fluence.generate_fluence_map(beam_data=data_part[server]['Beam'], + results['primary'], errors['primary'], bin_centers['x'], bin_centers['y'] = fluence.generate_fluence_map(beam_data=data_part[server]['Beam'], scan_data=data_part[server]['Scan'], beam_sigma=beam_sigma, bins=bins) @@ -50,13 +50,13 @@ def analyse_radiation_damage(data, config=None): if server_config['daq']['kappa'] is None: del results['neq'] else: - results['neq'] = results['proton'] * server_config['daq']['kappa']['nominal'] + results['neq'] = results['primary'] * server_config['daq']['kappa']['nominal'] print(server_config['daq']['stopping_power'], type(server_config['daq']['stopping_power'])) if server_config['daq']['stopping_power'] is None: del results['tid'] else: - results['tid'] = formulas.tid_scan(proton_fluence=results['proton'], stopping_power=server_config['daq']['stopping_power']) + results['tid'] = formulas.tid_per_scan(primary_fluence=results['primary'], stopping_power=server_config['daq']['stopping_power']) continue @@ -65,23 +65,23 @@ def analyse_radiation_damage(data, config=None): beam_sigma=beam_sigma, bins=bins) # Add to overall map - results['proton'] += fluence_map_part - errors['proton'] = (errors['proton']**2 + fluence_map_part_error**2)**.5 + results['primary'] += fluence_map_part + errors['primary'] = (errors['primary']**2 + fluence_map_part_error**2)**.5 # Add to eqivalent fluence map if 'neq' in results: - results['neq'] += results['proton'] * server_config['daq']['kappa']['nominal'] - errors['neq'] = ((server_config['daq']['kappa']['nominal'] * errors['proton'])**2 + (results['proton'] * server_config['daq']['kappa']['sigma'])**2)**0.5 + results['neq'] += results['primary'] * server_config['daq']['kappa']['nominal'] + errors['neq'] = ((server_config['daq']['kappa']['nominal'] * errors['primary'])**2 + (results['primary'] * server_config['daq']['kappa']['sigma'])**2)**0.5 if 'tid' in results: - results['tid'] += formulas.tid_scan(proton_fluence=results['proton'], stopping_power=server_config['daq']['stopping_power']) - errors['tid'] = formulas.tid_scan(proton_fluence=errors['proton'], stopping_power=server_config['daq']['stopping_power']) + results['tid'] += formulas.tid_per_scan(primary_fluence=results['primary'], stopping_power=server_config['daq']['stopping_power']) + errors['tid'] = formulas.tid_per_scan(primary_fluence=errors['primary'], stopping_power=server_config['daq']['stopping_power']) else: server = config['name'] - results['proton'], errors['proton'], bin_centers['x'], bin_centers['y'] = fluence.generate_fluence_map(beam_data=data[server]['Beam'], + results['primary'], errors['primary'], bin_centers['x'], bin_centers['y'] = fluence.generate_fluence_map(beam_data=data[server]['Beam'], scan_data=data[server]['Scan'], beam_sigma=beam_sigma, bins=bins) @@ -89,14 +89,14 @@ def analyse_radiation_damage(data, config=None): if config['daq']['kappa'] is None: del results['neq'] else: - results['neq'] = results['proton'] * config['daq']['kappa']['nominal'] - errors['neq'] = ((config['daq']['kappa']['nominal'] * errors['proton'])**2 + (results['proton'] * config['daq']['kappa']['sigma'])**2)**.5 + results['neq'] = results['primary'] * config['daq']['kappa']['nominal'] + errors['neq'] = ((config['daq']['kappa']['nominal'] * errors['primary'])**2 + (results['primary'] * config['daq']['kappa']['sigma'])**2)**.5 if config['daq']['stopping_power'] is None: del results['tid'] else: - results['tid'] = formulas.tid_scan(proton_fluence=results['proton'], stopping_power=config['daq']['stopping_power']) - errors['tid'] = formulas.tid_scan(proton_fluence=errors['proton'], stopping_power=config['daq']['stopping_power']) + results['tid'] = formulas.tid_per_scan(primary_fluence=results['primary'], stopping_power=config['daq']['stopping_power']) + errors['tid'] = formulas.tid_per_scan(primary_fluence=errors['primary'], stopping_power=config['daq']['stopping_power']) if any(a is None for a in (list(bin_centers.values()) + list(results.values()))): raise ValueError('Uninitialized values! Something went wrong - maybe files not found?') diff --git a/irrad_control/analysis/dtype.py b/irrad_control/analysis/dtype.py index 0f0fcfb7..9e2fa951 100644 --- a/irrad_control/analysis/dtype.py +++ b/irrad_control/analysis/dtype.py @@ -37,8 +37,8 @@ ('row_stop_y', ' Date: Thu, 27 Oct 2022 17:02:41 +0200 Subject: [PATCH 04/10] WIP: renaming in favor of generic ions -> remove explicit proton --- irrad_control/analysis/damage.py | 11 ++++++---- irrad_control/analysis/fluence.py | 34 +++++++++++++++--------------- irrad_control/analysis/plotting.py | 18 ++++++++-------- 3 files changed, 33 insertions(+), 30 deletions(-) diff --git a/irrad_control/analysis/damage.py b/irrad_control/analysis/damage.py index b6ffa528..e46cf989 100644 --- a/irrad_control/analysis/damage.py +++ b/irrad_control/analysis/damage.py @@ -21,6 +21,7 @@ def analyse_radiation_damage(data, config=None): if config is None: server = None # Only allow files with exactly one server for multipart to avoid adding unrelated fluence maps + ion_name = None # Loop over generator and get partial data files for nfile, data_part, config_part, session_basename in data: @@ -35,6 +36,7 @@ def analyse_radiation_damage(data, config=None): # Only allow one fixed server for multipart if server is None: server = server_config['name'] + ion_name = server_config['daq']['ion'] if server not in data_part: raise KeyError(f"Server '{server}' not present in file {session_basename}!") @@ -80,6 +82,7 @@ def analyse_radiation_damage(data, config=None): else: server = config['name'] + ion_name = config['daq']['ion'] results['primary'], errors['primary'], bin_centers['x'], bin_centers['y'] = fluence.generate_fluence_map(beam_data=data[server]['Beam'], scan_data=data[server]['Scan'], @@ -123,16 +126,16 @@ def analyse_radiation_damage(data, config=None): is_dut = damage_map.shape == dut_map.shape - fig, _ = plotting.plot_damage_map_3d(damage_map=damage_map, map_centers_x=centers_x, map_centers_y=centers_y, contour=not is_dut, damage=damage, server=server, dut=is_dut) + fig, _ = plotting.plot_damage_map_3d(damage_map=damage_map, map_centers_x=centers_x, map_centers_y=centers_y, contour=not is_dut, damage=damage, ion_name=ion_name, server=server, dut=is_dut) figs.append(fig) - fig, _ = plotting.plot_damage_error_3d(damage_map=damage_map, error_map=errors[damage] if not is_dut else dut_error_map, map_centers_x=centers_x, map_centers_y=centers_y, contour=not is_dut, damage=damage, server=server, dut=is_dut) + fig, _ = plotting.plot_damage_error_3d(damage_map=damage_map, error_map=errors[damage] if not is_dut else dut_error_map, map_centers_x=centers_x, map_centers_y=centers_y, contour=not is_dut, damage=damage, ion_name=ion_name, server=server, dut=is_dut) figs.append(fig) - fig, _ = plotting.plot_damage_map_2d(damage_map=damage_map, map_centers_x=centers_x, map_centers_y=centers_y, damage=damage, server=server, dut=is_dut) + fig, _ = plotting.plot_damage_map_2d(damage_map=damage_map, map_centers_x=centers_x, map_centers_y=centers_y, damage=damage, ion_name=ion_name, server=server, dut=is_dut) figs.append(fig) - fig, _ = plotting.plot_damage_map_contourf(damage_map=damage_map, map_centers_x=centers_x, map_centers_y=centers_y, damage=damage, server=server, dut=is_dut) + fig, _ = plotting.plot_damage_map_contourf(damage_map=damage_map, map_centers_x=centers_x, map_centers_y=centers_y, damage=damage, ion_name=ion_name, server=server, dut=is_dut) figs.append(fig) logging.info("Finished plotting.") diff --git a/irrad_control/analysis/fluence.py b/irrad_control/analysis/fluence.py index dac07c3a..1a52915e 100644 --- a/irrad_control/analysis/fluence.py +++ b/irrad_control/analysis/fluence.py @@ -88,7 +88,7 @@ def generate_fluence_map(beam_data, scan_data, beam_sigma, bins=(100, 100)): # Take sqrt of error map squared fluence_map_error = np.sqrt(fluence_map_error) - # Scale from protons / mm² (intrinsic unit) to protons / cm² + # Scale from ions / mm² (intrinsic unit) to ions / cm² fluence_map *= 100 fluence_map_error *= 100 @@ -403,7 +403,7 @@ def _process_row_wait(row_data, wait_beam_data, fluence_map, fluence_map_error, wait_mu_y = row_data['row_start_y'] - scan_y_offset # Add variation to the uncertainty - wait_protons_std = np.std(wait_beam_data['beam_current']) + wait_ions_std = np.std(wait_beam_data['beam_current']) # Loop over currents and apply Gauss kernel at given position for i in range(wait_beam_data.shape[0] - 1): @@ -415,16 +415,16 @@ def _process_row_wait(row_data, wait_beam_data, fluence_map, fluence_map_error, # Calculate how many seconds this current was present while waiting wait_interval = wait_beam_data[i+1]['timestamp'] - wait_beam_data[i]['timestamp'] - # Integrate over *wait_interval* to obtain number of protons induced - wait_protons = wait_current * wait_interval / elementary_charge - wait_protons_error = wait_current_error * wait_interval / elementary_charge - wait_protons_error = (wait_protons_error**2 + wait_protons_std**2)**.5 + # Integrate over *wait_interval* to obtain number of ions induced + wait_ions = wait_current * wait_interval / elementary_charge + wait_ions_error = wait_current_error * wait_interval / elementary_charge + wait_ions_error = (wait_ions_error**2 + wait_ions_std**2)**.5 - # Apply Gaussian kernel for protons + # Apply Gaussian kernel for ions apply_gauss_2d_kernel(map_2d=fluence_map, map_2d_error=fluence_map_error, - amplitude=wait_protons, - amplitude_error=wait_protons_error, + amplitude=wait_ions, + amplitude_error=wait_ions_error, bin_centers_x=map_bin_centers_x, bin_centers_y=map_bin_centers_y, mu_x=wait_mu_x, @@ -479,23 +479,23 @@ def _process_row_scan(row_data, row_beam_data, fluence_map, fluence_map_error, r row_bin_center_currents = np.interp(row_bin_center_timestamps, row_beam_data['timestamp'], row_beam_data['beam_current']) row_bin_center_current_errors = np.interp(row_bin_center_timestamps, row_beam_data['timestamp'], row_beam_data['beam_current_error']) - # Integrate the current measurements with the times spent in each bin to calculate the amount of protons in the bin - row_bin_center_protons = (row_bin_center_currents * row_bin_transit_times) / elementary_charge - row_bin_center_proton_errors = (row_bin_center_current_errors * row_bin_transit_times) / elementary_charge - row_bin_center_proton_errors = (row_bin_center_proton_errors**2 + np.std(row_bin_center_protons)**2)**.5 + # Integrate the current measurements with the times spent in each bin to calculate the amount of ions in the bin + row_bin_center_ions = (row_bin_center_currents * row_bin_transit_times) / elementary_charge + row_bin_center_ion_errors = (row_bin_center_current_errors * row_bin_transit_times) / elementary_charge + row_bin_center_ion_errors = (row_bin_center_ion_errors**2 + np.std(row_bin_center_ions)**2)**.5 # Loop over row times - for i in range(row_bin_center_protons.shape[0]): + for i in range(row_bin_center_ions.shape[0]): # Update mean location of the distribution mu_x = map_bin_centers_x[(-(i+1) if row_data['row'] % 2 else i)] mu_y = row_data['row_start_y'] - scan_y_offset - # Apply Gaussian kernel for protons + # Apply Gaussian kernel for ions apply_gauss_2d_kernel(map_2d=fluence_map, map_2d_error=fluence_map_error, - amplitude=row_bin_center_protons[i], - amplitude_error=row_bin_center_proton_errors[i], + amplitude=row_bin_center_ions[i], + amplitude_error=row_bin_center_ion_errors[i], bin_centers_x=map_bin_centers_x, bin_centers_y=map_bin_centers_y, mu_x=mu_x, diff --git a/irrad_control/analysis/plotting.py b/irrad_control/analysis/plotting.py index 90b44517..b5174fb0 100644 --- a/irrad_control/analysis/plotting.py +++ b/irrad_control/analysis/plotting.py @@ -8,15 +8,15 @@ from irrad_control.analysis.formulas import lin_odr -def _get_damage_label_unit_target(damage, dut=False): - damage_unit = r'n$_\mathrm{eq}$ cm$^{-2}$' if damage == 'neq' else r'p cm$^{-2}$' if damage == 'proton' else 'Mrad' - damage_label = 'Fluence' if damage in ('neq', 'proton') else 'Total Ionizing Dose' +def _get_damage_label_unit_target(damage, ion_name, dut=False): + damage_unit = r'n$_\mathrm{eq}$ cm$^{-2}$' if damage == 'neq' else f'{ion_name}s' + r' cm$^{-2}$' if damage == 'primary' else 'Mrad' + damage_label = 'Fluence' if damage in ('neq', 'primary') else 'Total Ionizing Dose' damage_target = "DUT" if dut else "Scan" return damage_label, damage_unit, damage_target -def _apply_labels_damage_plots(ax, damage, server, cbar=None, dut=False, damage_map=None, uncertainty_map=False): +def _apply_labels_damage_plots(ax, damage, ion_name, server, cbar=None, dut=False, damage_map=None, uncertainty_map=False): - damage_label, damage_unit, damage_target = _get_damage_label_unit_target(damage=damage, dut=dut) + damage_label, damage_unit, damage_target = _get_damage_label_unit_target(damage=damage, ion_name=ion_name, dut=dut) ax.set_xlabel(f'{damage_target} area horizontal / mm') ax.set_ylabel(f'{damage_target} area vertical / mm') @@ -35,9 +35,9 @@ def _apply_labels_damage_plots(ax, damage, server, cbar=None, dut=False, damage_ cbar_label = f"{damage_label} / {damage_unit}" cbar.set_label(cbar_label) -def _make_cbar(fig, damage_map, damage, rel_error_lims=None): +def _make_cbar(fig, damage_map, damage, ion_name, rel_error_lims=None): - damage_label, damage_unit, _ = _get_damage_label_unit_target(damage=damage, dut=False) + damage_label, damage_unit, _ = _get_damage_label_unit_target(damage=damage, ion_name=ion_name, dut=False) # Make axis for cbar cbar_axis = plt.axes([0.85, 0.1, 0.033, 0.8]) @@ -70,7 +70,7 @@ def plot_damage_error_3d(damage_map, error_map, map_centers_x, map_centers_y, vi # Relative errors rel_damage_map = error_map / damage_map * 100.0 - _make_cbar(fig=fig, damage_map=surface_3d, damage=damage_label_kwargs.get('damage', 'neq'), rel_error_lims=(rel_damage_map.min(), rel_damage_map.max())) + _make_cbar(fig=fig, damage_map=surface_3d, damage=damage_label_kwargs.get('damage', 'neq'), ion_name=damage_label_kwargs['ion_name'], rel_error_lims=(rel_damage_map.min(), rel_damage_map.max())) # Apply labels _apply_labels_damage_plots(ax=ax, damage_map=damage_map, uncertainty_map=True, **damage_label_kwargs) @@ -99,7 +99,7 @@ def plot_damage_map_3d(damage_map, map_centers_x, map_centers_y, view_angle=(25, ax.view_init(*view_angle) ax.set_ylim(ax.get_ylim()[::-1]) # Inverty y axis in order to set origin to upper left - _make_cbar(fig=fig, damage_map=surface_3d, damage=damage_label_kwargs.get('damage', 'neq')) + _make_cbar(fig=fig, damage_map=surface_3d, damage=damage_label_kwargs.get('damage', 'neq'), ion_name=damage_label_kwargs['ion_name']) # Apply labels _apply_labels_damage_plots(ax=ax, damage_map=damage_map, **damage_label_kwargs) From 04b68805f52e0718567068ad03337c58d4bcfe5c Mon Sep 17 00:00:00 2001 From: leloup314 Date: Thu, 27 Oct 2022 17:21:57 +0200 Subject: [PATCH 05/10] WIP: renaming in favor of generic ions -> remove explicit proton --- irrad_control/processes/converter.py | 99 ++++++++++++++-------------- irrad_control/processes/gui.py | 2 +- 2 files changed, 49 insertions(+), 52 deletions(-) diff --git a/irrad_control/processes/converter.py b/irrad_control/processes/converter.py index 36e983d8..d4119303 100644 --- a/irrad_control/processes/converter.py +++ b/irrad_control/processes/converter.py @@ -10,6 +10,7 @@ import irrad_control.analysis as analysis import irrad_control.devices.readout as ro from irrad_control.processes.daq import DAQProcess +from irrad_control.ions import get_ions class IrradConverter(DAQProcess): @@ -29,6 +30,7 @@ def __init__(self, name=None): self.dtypes = analysis.dtype.IrradDtypes() self.hists = analysis.dtype.IrradHists() + self.ions = get_ions() # Call init of super class super(IrradConverter, self).__init__(name=name) @@ -231,6 +233,7 @@ def _setup_daq_parameters(self, server, server_setup): daq_setup = server_setup['daq'] + self._daq_params[server]['ion'] = self.ions[daq_setup['ion']] self._daq_params[server]['stopping_power'] = daq_setup['stopping_power'] or np.nan self._daq_params[server]['kappa'] = (np.nan, np.nan) if daq_setup['kappa'] is None else (daq_setup['kappa']['nominal'], daq_setup['kappa']['sigma']) self._daq_params[server]['lambda'] = (np.nan, np.nan) if daq_setup['lambda'] is None else (daq_setup['lambda']['nominal'], daq_setup['lambda']['sigma']) @@ -573,50 +576,43 @@ def _interpret_scan_data(self, server, data, meta): # Calculate mean row fluence and error row_mean_beam_current, row_mean_beam_current_err = self._calc_mean_and_error(data=self._scan_currents[server]) - row_proton_fluence = analysis.formulas.fluence_per_scan(ion_current=ufloat(row_mean_beam_current, row_mean_beam_current_err), - scan_step=self.data_arrays[server]['irrad']['row_separation'][0], - scan_speed=self.data_arrays[server]['scan']['row_scan_speed'][0]) + row_primary_fluence = analysis.formulas.fluence_per_scan(ion_current=ufloat(row_mean_beam_current, row_mean_beam_current_err), + ion_n_charge=self._daq_params[server]['ion'].n_charge, + scan_step=self.data_arrays[server]['irrad']['row_separation'][0], + scan_speed=self.data_arrays[server]['scan']['row_scan_speed'][0]) - row_proton_tid = analysis.formulas.tid_per_scan(primary_fluence=row_proton_fluence, stopping_power=self._daq_params[server]['stopping_power']) + row_tid = analysis.formulas.tid_per_scan(primary_fluence=row_primary_fluence, stopping_power=self._daq_params[server]['stopping_power']) self.data_arrays[server]['scan']['row_stop_timestamp'] = meta['timestamp'] self.data_arrays[server]['scan']['row_stop_x'] = data['x_stop'] self.data_arrays[server]['scan']['row_stop_y'] = data['y_stop'] self.data_arrays[server]['scan']['row_mean_beam_current'] = row_mean_beam_current self.data_arrays[server]['scan']['row_mean_beam_current_error'] = row_mean_beam_current_err - self.data_arrays[server]['scan']['row_primary_fluence'] = row_proton_fluence.n - self.data_arrays[server]['scan']['row_primary_fluence_error'] = row_proton_fluence.s - self.data_arrays[server]['scan']['row_tid'] = row_proton_tid.n - self.data_arrays[server]['scan']['row_tid_error'] = row_proton_tid.s - - # Log - logging.info("Row {} of scan {}: ({:.2E} +- {:.2E}) protons / cm^2 and ({:.2E} +- {:.2E}) Mrad".format(self.data_arrays[server]['scan']['row'][0], - self.data_arrays[server]['scan']['scan'][0], - row_proton_fluence.n, - row_proton_fluence.s, - row_proton_tid.n, - row_proton_tid.s)) + self.data_arrays[server]['scan']['row_primary_fluence'] = row_primary_fluence.n + self.data_arrays[server]['scan']['row_primary_fluence_error'] = row_primary_fluence.s + self.data_arrays[server]['scan']['row_tid'] = row_tid.n + self.data_arrays[server]['scan']['row_tid_error'] = row_tid.s # Add to overall fluence - self._row_fluence_hist[server][self.data_arrays[server]['scan']['row'][0]] += row_proton_fluence + self._row_fluence_hist[server][self.data_arrays[server]['scan']['row'][0]] += row_primary_fluence # Append data to table within this interpretation cycle self.data_flags[server]['scan'] = True # ETA time and n_scans - _mean_proton_fluence = np.mean(self._row_fluence_hist[server]).n + _mean_primary_fluence = np.mean(self._row_fluence_hist[server]).n row_scan_time = self.data_arrays[server]['scan']['row_stop_timestamp'][0] - self.data_arrays[server]['scan']['row_start_timestamp'][0] try: # Check damage type if self.data_arrays[server]['irrad']['aim_damage'][0] == bytes('NIEL', encoding='ascii'): - # Get remaining proton fluence - remainder_NIEL = self.data_arrays[server]['irrad']['aim_value'][0] / self._daq_params[server]['kappa'][0] - _mean_proton_fluence - eta_n_scans = int(remainder_NIEL / row_proton_fluence.n) + # Get remaining primary fluence + remainder_NIEL = self.data_arrays[server]['irrad']['aim_value'][0] / self._daq_params[server]['kappa'][0] - _mean_primary_fluence + eta_n_scans = int(remainder_NIEL / row_primary_fluence.n) else: - remainder_TID = self.data_arrays[server]['irrad']['aim_value'][0] - analysis.formulas.tid_per_scan(primary_fluence=_mean_proton_fluence, - stopping_power=self._daq_params[server]['stopping_power']) - eta_n_scans = int(remainder_TID / analysis.formulas.tid_per_scan(primary_fluence=row_proton_fluence.n, + remainder_TID = self.data_arrays[server]['irrad']['aim_value'][0] - analysis.formulas.tid_per_scan(primary_fluence=_mean_primary_fluence, + stopping_power=self._daq_params[server]['stopping_power']) + eta_n_scans = int(remainder_TID / analysis.formulas.tid_per_scan(primary_fluence=row_primary_fluence.n, stopping_power=self._daq_params[server]['stopping_power'])) eta_seconds = eta_n_scans * row_scan_time * self.data_arrays[server]['irrad']['n_rows'][0] @@ -627,71 +623,72 @@ def _interpret_scan_data(self, server, data, meta): scan_data = {'meta': {'timestamp': meta['timestamp'], 'name': server, 'type': 'scan'}, 'data': {'fluence_hist': unumpy.nominal_values(self._row_fluence_hist[server]).tolist(), 'fluence_hist_err': unumpy.std_devs(self._row_fluence_hist[server]).tolist(), - 'row_mean_proton_fluence': (row_proton_fluence.n, row_proton_fluence.s), - 'row_mean_tid': (row_proton_tid.n, row_proton_tid.s), + 'row_primary_fluence': (row_primary_fluence.n, row_primary_fluence.s), + 'row_tid': (row_tid.n, row_tid.s), 'row': int(self.data_arrays[server]['scan']['row'][0]), 'eta_seconds': eta_seconds, 'eta_n_scans': eta_n_scans, 'status': 'interpreted'}} elif data['status'] == 'scan_complete': - # Get scan proton fluence in each row - row_proton_fluences_last_scan = self.data_tables[server]['scan'].col('row_primary_fluence')[ + # Get scan primary fluence in each row + row_primary_fluences_last_scan = self.data_tables[server]['scan'].col('row_primary_fluence')[ -self.data_arrays[server]['irrad']['n_rows'][0]:] - # Get scan proton fluence error in each row - row_proton_fluences_last_scan_err = self.data_tables[server]['scan'].col('row_primary_fluence_error')[ + # Get scan primary fluence error in each row + row_primary_fluences_last_scan_err = self.data_tables[server]['scan'].col('row_primary_fluence_error')[ -self.data_arrays[server]['irrad']['n_rows'][0]:] - # Calculate mean proton fluence of last scan - mean_scan_proton_fluence, mean_scan_proton_fluence_err = self._calc_mean_and_error( - data=unumpy.uarray(row_proton_fluences_last_scan, - row_proton_fluences_last_scan_err)) + # Calculate mean primary fluence of last scan + mean_scan_primary_fluence, mean_scan_primary_fluence_err = self._calc_mean_and_error( + data=unumpy.uarray(row_primary_fluences_last_scan, + row_primary_fluences_last_scan_err)) # Calculate absolute delivered fluence with this scan - abs_proton_fluence = ufloat(mean_scan_proton_fluence, - mean_scan_proton_fluence_err) + ufloat( + abs_primary_fluence = ufloat(mean_scan_primary_fluence, + mean_scan_primary_fluence_err) + ufloat( self.data_arrays[server]['damage']['scan_primary_fluence'][0], self.data_arrays[server]['damage']['scan_primary_fluence_error'][0]) # Calculate absolute delivered TID with this scan - abs_tid = analysis.formulas.tid_per_scan(primary_fluence=abs_proton_fluence, + abs_tid = analysis.formulas.tid_per_scan(primary_fluence=abs_primary_fluence, stopping_power=self._daq_params[server]['stopping_power']) # Completed scan number and timestamp of completion self.data_arrays[server]['damage']['timestamp'] = meta['timestamp'] self.data_arrays[server]['damage']['scan'] = data['scan'] - self.data_arrays[server]['damage']['scan_primary_fluence'] = abs_proton_fluence.n - self.data_arrays[server]['damage']['scan_primary_fluence_error'] = abs_proton_fluence.s + self.data_arrays[server]['damage']['scan_primary_fluence'] = abs_primary_fluence.n + self.data_arrays[server]['damage']['scan_primary_fluence_error'] = abs_primary_fluence.s self.data_arrays[server]['damage']['scan_tid'] = abs_tid.n self.data_arrays[server]['damage']['scan_tid_error'] = abs_tid.s # Log logging.info( - "Scan {}: ({:.2E} +- {:.2E}) protons / cm^2 and ({:.2E} +- {:.2E}) Mrad".format(data['scan'], - abs_proton_fluence.n, - abs_proton_fluence.s, - abs_tid.n, - abs_tid.s)) + "Scan {}: ({:.2E} +- {:.2E}) {}s / cm^2 and ({:.2E} +- {:.2E}) Mrad".format(data['scan'], + abs_primary_fluence.n, + abs_primary_fluence.s, + self._daq_params[server]['ion'].name, + abs_tid.n, + abs_tid.s)) # Append data to table within this interpretation cycle self.data_flags[server]['damage'] = True scan_data = {'meta': {'timestamp': meta['timestamp'], 'name': server, 'type': 'damage'}, 'data': {'scan': data['scan'], - 'scan_primary_fluence': (abs_proton_fluence.n, abs_proton_fluence.s), + 'scan_primary_fluence': (abs_primary_fluence.n, abs_primary_fluence.s), 'scan_tid': (abs_tid.n, abs_tid.s)}} elif data['status'] == 'scan_finished': self.data_arrays[server]['result']['timestamp'] = meta['timestamp'] - mean_result_proton_fluence = np.mean(self._row_fluence_hist[server]) - mean_result_tid = analysis.formulas.tid_per_scan(primary_fluence=mean_result_proton_fluence, stopping_power=self._daq_params[server]['stopping_power']) - mean_result_neq_fluence = mean_result_proton_fluence * ufloat(*self._daq_params[server]['kappa']) + mean_result_primary_fluence = np.mean(self._row_fluence_hist[server]) + mean_result_tid = analysis.formulas.tid_per_scan(primary_fluence=mean_result_primary_fluence, stopping_power=self._daq_params[server]['stopping_power']) + mean_result_neq_fluence = mean_result_primary_fluence * ufloat(*self._daq_params[server]['kappa']) - self.data_arrays[server]['result']['primary_fluence'] = mean_result_proton_fluence.n - self.data_arrays[server]['result']['primary_fluence_error'] = mean_result_proton_fluence.s + self.data_arrays[server]['result']['primary_fluence'] = mean_result_primary_fluence.n + self.data_arrays[server]['result']['primary_fluence_error'] = mean_result_primary_fluence.s self.data_arrays[server]['result']['tid'] = mean_result_tid.n self.data_arrays[server]['result']['tid_error'] = mean_result_tid.s self.data_arrays[server]['result']['neq_fluence'] = mean_result_neq_fluence.n @@ -702,7 +699,7 @@ def _interpret_scan_data(self, server, data, meta): scan_data = {'meta': {'timestamp': meta['timestamp'], 'name': server, 'type': 'result'}, 'data': {'scan': int(self.data_arrays[server]['damage']['scan'][0]), - 'primary_fluence': (mean_result_proton_fluence.n, mean_result_proton_fluence.s), + 'primary_fluence': (mean_result_primary_fluence.n, mean_result_primary_fluence.s), 'tid': (mean_result_tid.n, mean_result_tid.s), 'neq_fluence': (mean_result_neq_fluence.n, mean_result_neq_fluence.s)}} diff --git a/irrad_control/processes/gui.py b/irrad_control/processes/gui.py index 6bd7e425..b529dfc9 100644 --- a/irrad_control/processes/gui.py +++ b/irrad_control/processes/gui.py @@ -515,7 +515,7 @@ def handle_data(self, data): # Check whether data is interpreted elif data['data']['status'] == 'interpreted': self.monitor_tab.plots[server]['fluence_plot'].set_data(data) - #self.control_tab.update_info(row=data['data']['row_mean_proton_fluence'][0], unit='p/cm^2') + #self.control_tab.update_info(row=data['data']['row_primary_fluence'][0], unit='p/cm^2') #self.control_tab.update_info(nscan=data['data']['eta_n_scans']) if data['data']['eta_n_scans'] >= 0: From 460f0a54e7faa690b54eac90b160f79f11044354 Mon Sep 17 00:00:00 2001 From: leloup314 Date: Sun, 30 Oct 2022 17:49:49 +0100 Subject: [PATCH 06/10] MAINT: change plotting label --- irrad_control/analysis/plotting.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/irrad_control/analysis/plotting.py b/irrad_control/analysis/plotting.py index b5174fb0..d09e9136 100644 --- a/irrad_control/analysis/plotting.py +++ b/irrad_control/analysis/plotting.py @@ -24,7 +24,7 @@ def _apply_labels_damage_plots(ax, damage, ion_name, server, cbar=None, dut=Fals # 3D plot if hasattr(ax, 'set_zlabel'): - ax.set_zlabel(f"{damage_label} / {damage_unit}") + ax.set_zlabel(f"{damage_unit}") if damage_map is not None and dut and not uncertainty_map: mean, std = damage_map.mean(), damage_map.std() From cdd6c419073e3bb87c933769b65e8a71afe03ce5 Mon Sep 17 00:00:00 2001 From: leloup314 Date: Sun, 30 Oct 2022 18:23:54 +0100 Subject: [PATCH 07/10] ENH: allow to set aim damage to be either primary, neq or tid, depending on whether kappa / stopping power are available for respective ion --- irrad_control/analysis/dtype.py | 4 +- irrad_control/gui/tabs/control_tab.py | 2 +- irrad_control/gui/widgets/control_widgets.py | 65 ++++++++++++++++---- irrad_control/processes/converter.py | 22 ++++--- 4 files changed, 72 insertions(+), 21 deletions(-) diff --git a/irrad_control/analysis/dtype.py b/irrad_control/analysis/dtype.py index 9e2fa951..49fdf0f9 100644 --- a/irrad_control/analysis/dtype.py +++ b/irrad_control/analysis/dtype.py @@ -48,8 +48,8 @@ _irrad_dtype = [('timestamp', ' Date: Sun, 30 Oct 2022 19:49:01 +0100 Subject: [PATCH 08/10] ENH: add checkbox to enable/disable auto finishing of scan --- irrad_control/gui/tabs/control_tab.py | 6 ++++++ irrad_control/gui/widgets/control_widgets.py | 8 ++++++++ irrad_control/processes/gui.py | 5 ++--- 3 files changed, 16 insertions(+), 3 deletions(-) diff --git a/irrad_control/gui/tabs/control_tab.py b/irrad_control/gui/tabs/control_tab.py index f99ec246..9a746f03 100644 --- a/irrad_control/gui/tabs/control_tab.py +++ b/irrad_control/gui/tabs/control_tab.py @@ -120,6 +120,12 @@ def check_no_beam(self, server, beam_current): if time.time() - self._beam_down_timer[server] > 1.0: self.send_cmd(hostname=server, target='__scan__', cmd='handle_event', cmd_data={'kwargs': {'event': 'beam_ok'}}) self._beam_down[server] = False + + def check_finish(self, server, eta_n_scans): + + if eta_n_scans == 0 and self.tab_widgets[server]['scan'].auto_finish_scan: + self.send_cmd(hostname=server, target='__scan__', cmd='handle_event', cmd_data={'kwargs': {'event': 'finish'}}) + def scan_status(self, server, status='started'): read_only_state = status == 'started' diff --git a/irrad_control/gui/widgets/control_widgets.py b/irrad_control/gui/widgets/control_widgets.py index 7fc009ca..d9800d02 100644 --- a/irrad_control/gui/widgets/control_widgets.py +++ b/irrad_control/gui/widgets/control_widgets.py @@ -411,6 +411,7 @@ def __init__(self, server, daq_setup, parent=None): self._after_scan_container = None self.n_rows = None + self.auto_finish_scan = True self._init_ui() @@ -554,6 +555,12 @@ def _init_ui(self): spx_end_x.valueChanged.connect(lambda v: self.update_scan_params(rel_end=[v, spx_end_y.value()])) spx_end_y.valueChanged.connect(lambda v: self.update_scan_params(rel_start=[spx_end_x.value(), v])) + # Auto finish scan + checkbox_auto_finish = QtWidgets.QCheckBox('Auto finish scan') + checkbox_auto_finish.setToolTip("Automatically finish scan procedure when target damage is reached.") + checkbox_auto_finish.stateChanged.connect(lambda state: setattr(self, 'auto_finish_scan', bool(state))) + checkbox_auto_finish.setChecked(True) + # Scan btn_start = QtWidgets.QPushButton('START') btn_start.setToolTip("Start scan.") @@ -596,6 +603,7 @@ def _init_ui(self): layout_scan.addWidget(btn_pause) layout_scan.addWidget(btn_finish) layout_scan.addWidget(btn_stop) + layout_scan.addWidget(checkbox_auto_finish) # Add to layout self.add_widget(widget=[label_row_sep, spx_row_sep]) diff --git a/irrad_control/processes/gui.py b/irrad_control/processes/gui.py index b529dfc9..4994bc81 100644 --- a/irrad_control/processes/gui.py +++ b/irrad_control/processes/gui.py @@ -523,9 +523,8 @@ def handle_data(self, data): # FIXME: more precise result would be helpful pass - # Finish the scan programatically - if data['data']['eta_n_scans'] == 0: - self.send_cmd(server, 'stage', 'finish') + # Finish the scan programatically, if wanted + self.control_tab.check_finish(server=server, eta_n_scans=data['data']['eta_n_scans']) elif data['meta']['type'] == 'temp_arduino': From 822da1cdafb0a0742971eb7c5218b3f10ce342c5 Mon Sep 17 00:00:00 2001 From: leloup314 Date: Thu, 10 Nov 2022 11:30:44 +0100 Subject: [PATCH 09/10] ENH: add energy degradation data from GEANT4 sim for protons, deuterons and alphas --- irrad_control/ions/alpha/dut_energy.dat | 18 ++++++++++++++++++ irrad_control/ions/deuteron/dut_energy.dat | 18 ++++++++++++++++++ irrad_control/ions/proton/dut_energy.dat | 22 +++++++++++++++++++--- 3 files changed, 55 insertions(+), 3 deletions(-) create mode 100644 irrad_control/ions/alpha/dut_energy.dat create mode 100644 irrad_control/ions/deuteron/dut_energy.dat diff --git a/irrad_control/ions/alpha/dut_energy.dat b/irrad_control/ions/alpha/dut_energy.dat new file mode 100644 index 00000000..9de353f3 --- /dev/null +++ b/irrad_control/ions/alpha/dut_energy.dat @@ -0,0 +1,18 @@ +### This file contains beam monitor calibration data for alphas ### +# Initial energy / MeV, energy at DUT mean / MeV, energy at DUT sigma / MeV +# [('initial_energy', ' Date: Thu, 10 Nov 2022 14:13:18 +0100 Subject: [PATCH 10/10] MAINT: correctly adjust extracted beam current also for plotting --- irrad_control/processes/converter.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/irrad_control/processes/converter.py b/irrad_control/processes/converter.py index b25715aa..20de4bdd 100644 --- a/irrad_control/processes/converter.py +++ b/irrad_control/processes/converter.py @@ -491,7 +491,8 @@ def _interpret_beam_data(self, server, data, meta): extracted_current = self.data_arrays[server]['beam']['beam_current'][0] - blm_current logging.warning("Correcting extracted beam current from {:.2E} A to {:.2E} A".format(self.data_arrays[server]['beam']['beam_current'][0], extracted_current)) - self.data_arrays[server]['beam']['beam_current'] = extracted_current + + self.data_arrays[server]['beam']['beam_current'] = beam_data['data']['current']['beam_current'] = extracted_current except ZeroDivisionError: pass