Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add ambiance interference simulation with sprinkling method #181

Draft
wants to merge 15 commits into
base: main
Choose a base branch
from
46 changes: 46 additions & 0 deletions fuse/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,6 +176,52 @@ def full_chain_context(
return st


def full_chain_sprinkle_context(
raw_records_st_module="fuse",
raw_records_st_name="full_chain_context",
raw_records_st_kwargs={},
raw_records_st_config={},
sprinkle_run_id="046477",
output_folder="./fuse_data",
corrections_version=None,
simulation_config_file="fuse_config_nt_sr1_dev.json",
corrections_run_id="046477",
run_id_specific_config={
"gain_model_mc": "gain_model",
"electron_lifetime_liquid": "elife",
"drift_velocity_liquid": "electron_drift_velocity",
"drift_time_gate": "electron_drift_time_gate",
},
run_without_proper_corrections=False,
):
"""Context for sprinkling.

Mix the simulated raw_records with a random time range from the
sprinkled_run_id.
"""
st = full_chain_context(
output_folder,
corrections_version,
simulation_config_file,
corrections_run_id,
run_id_specific_config,
run_without_proper_corrections,
)
del st._plugin_class_registry["raw_records"]
st.set_config(
{
"raw_records_st_module": raw_records_st_module,
"raw_records_st_name": raw_records_st_name,
"raw_records_st_kwargs": raw_records_st_kwargs,
"raw_records_st_config": raw_records_st_config,
"sprinkle_run_id": sprinkle_run_id,
}
)
st.register(fuse.pmt_and_daq.SprinkledRecords)
set_simulation_config_file(st, simulation_config_file)
return st


def set_simulation_config_file(context, config_file_name):
"""Function to loop over the plugin config and replace
SIMULATION_CONFIG_FILE with the actual file name."""
Expand Down
78 changes: 13 additions & 65 deletions fuse/plugins/detector_physics/s2_photon_propagation.py
Original file line number Diff line number Diff line change
Expand Up @@ -347,69 +347,7 @@ def compute(self, individual_electrons, interactions_in_roi, start, end):
last_start = start
if n_chunks > 1:
for electron_group in electron_chunks[:-1]:
unique_clusters_in_group = np.unique(electron_group["cluster_id"])
interactions_chunk = interactions_in_roi[mask][
np.isin(interactions_in_roi["cluster_id"][mask], unique_clusters_in_group)
]

# Sort both the interactions and the electrons by cluster_id
# We will later sort by time again when yielding the data.
sort_index_ic = np.argsort(interactions_chunk["cluster_id"])
sort_index_eg = np.argsort(electron_group["cluster_id"])
interactions_chunk = interactions_chunk[sort_index_ic]
electron_group = electron_group[sort_index_eg]

positions = np.array([interactions_chunk["x_obs"], interactions_chunk["y_obs"]]).T

_photon_channels = self.photon_channels(
interactions_chunk["n_electron_extracted"],
interactions_chunk["z_obs"],
positions,
interactions_chunk["drift_time_mean"],
interactions_chunk["sum_s2_photons"],
)

_photon_timings = self.photon_timings(
positions,
interactions_chunk["sum_s2_photons"],
_photon_channels,
).astype(np.int64)

# Repeat for n photons per electron # Should this be before adding delays?
_photon_timings += np.repeat(electron_group["time"], electron_group["n_s2_photons"])

_cluster_id = np.repeat(
interactions_chunk["cluster_id"], interactions_chunk["sum_s2_photons"]
)

# Do i want to save both -> timings with and without pmt transit time spread?
# Correct for PMT Transit Time Spread
_photon_timings = pmt_transit_time_spread(
_photon_timings=_photon_timings,
pmt_transit_time_mean=self.pmt_transit_time_mean,
pmt_transit_time_spread=self.pmt_transit_time_spread,
rng=self.rng,
)

_photon_gains, _photon_is_dpe = photon_gain_calculation(
_photon_channels=_photon_channels,
p_double_pe_emision=self.p_double_pe_emision,
gains=self.gains,
spe_scaling_factor_distributions=self.spe_scaling_factor_distributions,
rng=self.rng,
)

result = build_photon_propagation_output(
dtype=self.dtype,
_photon_timings=_photon_timings,
_photon_channels=_photon_channels,
_photon_gains=_photon_gains,
_photon_is_dpe=_photon_is_dpe,
_cluster_id=_cluster_id,
photon_type=2,
)

result = strax.sort_by_time(result)
result = self.compute_chunk(interactions_in_roi, mask, electron_group)

# Move the chunk bound 90% of the minimal gap length to
# the next photon to make space for afterpluses
Expand All @@ -422,11 +360,19 @@ def compute(self, individual_electrons, interactions_in_roi, start, end):

# And the last chunk
electron_group = electron_chunks[-1]
result = self.compute_chunk(interactions_in_roi, mask, electron_group)

chunk = self.chunk(start=last_start, end=end, data=result)
yield chunk

def compute_chunk(self, interactions_in_roi, mask, electron_group):
unique_clusters_in_group = np.unique(electron_group["cluster_id"])
interactions_chunk = interactions_in_roi[mask][
np.isin(interactions_in_roi["cluster_id"][mask], unique_clusters_in_group)
]

# Sort both the interactions and the electrons by cluster_id
# We will later sort by time again when yielding the data.
sort_index_ic = np.argsort(interactions_chunk["cluster_id"])
sort_index_eg = np.argsort(electron_group["cluster_id"])
interactions_chunk = interactions_chunk[sort_index_ic]
Expand All @@ -448,12 +394,15 @@ def compute(self, individual_electrons, interactions_in_roi, start, end):
_photon_channels,
).astype(np.int64)

# Repeat for n photons per electron # Should this be before adding delays?
_photon_timings += np.repeat(electron_group["time"], electron_group["n_s2_photons"])

_cluster_id = np.repeat(
interactions_chunk["cluster_id"], interactions_chunk["sum_s2_photons"]
)

# Do i want to save both -> timings with and without pmt transit time spread?
# Correct for PMT Transit Time Spread
_photon_timings = pmt_transit_time_spread(
_photon_timings=_photon_timings,
pmt_transit_time_mean=self.pmt_transit_time_mean,
Expand Down Expand Up @@ -481,8 +430,7 @@ def compute(self, individual_electrons, interactions_in_roi, start, end):

result = strax.sort_by_time(result)

chunk = self.chunk(start=last_start, end=end, data=result)
yield chunk
return result

def photon_channels(self, n_electron, z_obs, positions, drift_time_mean, n_photons):
channels = np.arange(self.n_tpc_pmts).astype(np.int64)
Expand Down
3 changes: 3 additions & 0 deletions fuse/plugins/pmt_and_daq/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,6 @@

from . import photon_pulses
from .photon_pulses import *

from . import sprinkled_records
from .sprinkled_records import *
44 changes: 10 additions & 34 deletions fuse/plugins/pmt_and_daq/pmt_response_and_daq.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,48 +248,26 @@ def compute(self, propagated_photons, pulse_windows, start, end):

if n_chunks > 1:
for pulse_group in pulse_window_chunks[:-1]:
# use an upper limit for the waveform buffer
length_waveform_buffer = np.int32(
np.sum(np.ceil(pulse_group["length"] / strax.DEFAULT_RECORD_LENGTH))
)
waveform_buffer = np.zeros(length_waveform_buffer, dtype=self.dtype)

buffer_level = build_waveform(
pulse_group,
_photons,
unique_photon_pulse_ids,
waveform_buffer,
self.dt,
self._pmt_current_templates,
self.current_2_adc,
self.noise_data["arr_0"],
self.enable_noise,
self.digitizer_reference_baseline,
self.thresholds,
self.trigger_window,
)

records = waveform_buffer[:buffer_level]

# Digitzier saturation
# Clip negative values to 0
records["data"][records["data"] < 0] = 0

records = strax.sort_by_time(records)

records = self.compute_chunk(_photons, unique_photon_pulse_ids, pulse_group)
chunk_end = np.max(strax.endtime(records))
chunk = self.chunk(start=last_start, end=chunk_end, data=records)
last_start = chunk_end
yield chunk

# And the last chunk
records = self.compute_chunk(_photons, unique_photon_pulse_ids, pulse_window_chunks[-1])
chunk = self.chunk(start=last_start, end=end, data=records)
yield chunk

def compute_chunk(self, _photons, unique_photon_pulse_ids, pulse_group):
# use an upper limit for the waveform buffer
length_waveform_buffer = np.int32(
np.sum(np.ceil(pulse_window_chunks[-1]["length"] / strax.DEFAULT_RECORD_LENGTH))
np.sum(np.ceil(pulse_group["length"] / strax.DEFAULT_RECORD_LENGTH))
)
waveform_buffer = np.zeros(length_waveform_buffer, dtype=self.dtype)

buffer_level = build_waveform(
pulse_window_chunks[-1],
pulse_group,
_photons,
unique_photon_pulse_ids,
waveform_buffer,
Expand All @@ -308,11 +286,9 @@ def compute(self, propagated_photons, pulse_windows, start, end):
# Digitzier saturation
# Clip negative values to 0
records["data"][records["data"] < 0] = 0

records = strax.sort_by_time(records)

chunk = self.chunk(start=last_start, end=end, data=records)
yield chunk
return records

def init_pmt_current_templates(self):
"""Create spe templates, for 10ns sample duration and 1ns rounding we
Expand Down
Loading
Loading