Skip to content

Commit

Permalink
First implemention of a different algorithm for veto signals. Also th…
Browse files Browse the repository at this point in the history
…e allowance for 2 points bigger for peak identification is removed, peaks can have no bigger neighbour.
  • Loading branch information
JPorron committed May 27, 2024
1 parent 3d6568a commit 9076ee8
Show file tree
Hide file tree
Showing 3 changed files with 118 additions and 6 deletions.
1 change: 1 addition & 0 deletions inc/TRestRawSignal.h
Original file line number Diff line number Diff line change
Expand Up @@ -220,6 +220,7 @@ class TRestRawSignal {
/// Peaks are defined as the points that are above the threshold and are separated by a minimum distance
/// in time bin units. The threshold must be set in absolute value (regardless of the baseline)
std::vector<std::pair<UShort_t, double>> GetPeaks(double threshold, UShort_t distance = 5) const;
std::vector<std::pair<UShort_t, double>> GetPeaksVeto(double threshold, UShort_t distance = 5) const;

TRestRawSignal();
TRestRawSignal(Int_t nBins);
Expand Down
24 changes: 18 additions & 6 deletions src/TRestRawPeaksFinderProcess.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -44,13 +44,25 @@ TRestEvent* TRestRawPeaksFinderProcess::ProcessEvent(TRestEvent* inputEvent) {
continue;
}

signal->CalculateBaseLine(20, 200);
const auto peaks =
signal->GetPeaks(signal->GetBaseLine() + 5 * signal->GetBaseLineSigma(), fDistance);
// Choose appropriate function based on channel type
if (channelType == "tpc") {
signal->CalculateBaseLine(20, 200);
const auto peaks =
signal->GetPeaks(signal->GetBaseLine() + 5 * signal->GetBaseLineSigma(), fDistance);

for (const auto& [time, amplitude] : peaks) {
eventPeaks.emplace_back(signalId, time, amplitude);
}
} else if (channelType == "veto") {
signal->CalculateBaseLine(20, 200);
const auto peaks =
signal->GetPeaksVeto(signal->GetBaseLine() + 5 * signal->GetBaseLineSigma(), fDistance);

for (const auto& [time, amplitude] : peaks) {
eventPeaks.emplace_back(signalId, time, amplitude);
for (const auto& [time, amplitude] : peaks) {
eventPeaks.emplace_back(signalId, time, amplitude);
}
}

/*
cout << "Signal ID: " << channelId << " Name: " << channelName << endl;
for (const auto& [time, amplitude] : peaks) {
Expand Down Expand Up @@ -196,4 +208,4 @@ void TRestRawPeaksFinderProcess::PrintMetadata() {
RESTMetadata << "Window: " << fWindow << RESTendl;

EndPrintProcess();
}
}
99 changes: 99 additions & 0 deletions src/TRestRawSignal.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -1008,3 +1008,102 @@ vector<pair<UShort_t, double>> TRestRawSignal::GetPeaks(double threshold, UShort

return peaks;
}

vector<pair<UShort_t, double>> TRestRawSignal::GetPeaksVeto(double threshold, UShort_t distance) const {
vector<pair<UShort_t, double>> peaks;

const UShort_t smoothingWindow =
10; // Region to compare for peak/no peak classification. 10 means 5 bins to each side
const size_t numPoints = GetNumberOfPoints();

if (numPoints == 0) return peaks;

// Pre-calculate smoothed values for all bins using a rolling sum
vector<double> smoothedValues(numPoints, 0.0);
double currentSum = 0.0;
UShort_t windowSize = smoothingWindow + 1;

// Initialize the sum for the first window
for (UShort_t i = 0; i < static_cast<UShort_t>(std::min<size_t>(windowSize, numPoints)); ++i) {
currentSum += GetRawData(i);
}
smoothedValues[0] = currentSum / windowSize;

for (UShort_t i = 1; i < numPoints; ++i) {
if (i < smoothingWindow / 2 + 1) {
// Adjust the window size at the beginning
currentSum = 0.0;
UShort_t currentWindowSize =
static_cast<UShort_t>(std::min<size_t>(windowSize, i + smoothingWindow / 2 + 1));
for (UShort_t j = 0; j < currentWindowSize; ++j) {
currentSum += GetRawData(j);
}
smoothedValues[i] = currentSum / currentWindowSize;
} else if (i > numPoints - smoothingWindow / 2 - 1) {
// Adjust the window size at the end
currentSum = 0.0;
UShort_t currentWindowSize =
static_cast<UShort_t>(std::min<size_t>(windowSize, numPoints - i + smoothingWindow / 2));
for (UShort_t j = i - smoothingWindow / 2; j < numPoints; ++j) {
currentSum += GetRawData(j);
}
smoothedValues[i] = currentSum / currentWindowSize;
} else {
// Use the rolling sum for the middle bins
currentSum -= GetRawData(i - smoothingWindow / 2 - 1);
currentSum += GetRawData(i + smoothingWindow / 2);
smoothedValues[i] = currentSum / windowSize;
}
}

// Compare pre-calculated smoothed values to identify peaks
for (UShort_t i = 0; i < numPoints; ++i) {
const double smoothedValue = smoothedValues[i];

if (i >= smoothingWindow / 2 && i < numPoints - smoothingWindow / 2) {
bool isPeak = true;
int numGreaterEqual = 0; // Counter for smoothed values greater or equal to the studied bin

for (UShort_t j = i - distance; j <= i + distance; ++j) {
if (j != i && smoothedValue <= smoothedValues[j]) {
numGreaterEqual++;
if (numGreaterEqual >
0) { // If more than one smoothed value is greater or equal, it's not a peak
isPeak = false;
break;
}
}
}

// If it's a peak and it´s above the threshold and further than distance to the previous peak, add
// to peaks
if (isPeak && smoothedValue > threshold) {
if (peaks.empty() || i - peaks.back().first >= distance) {
double fitMinRange = i - 20;
double fitMaxRange = i + 20;

// Create a Gaussian fit function
TF1 fitFunction("gaussianFit", "gaus", fitMinRange, fitMaxRange);
// Fit the data with the Gaussian function
fitFunction.SetRange(fitMinRange, fitMaxRange); // Initial parameters

// Create histogram with the values to fit
TH1D histogram("hist", "hist", 40, fitMinRange, fitMaxRange);
for (int k = i - 20; k <= i + 20; ++k) {
histogram.SetBinContent(k - (i - 20) + 1, GetRawData(k)); // Set bin content
}
histogram.Fit(&fitFunction, "RQ");

// Get peak position and amplitude from the fit
double peakPosition = fitFunction.GetParameter(1);
UShort_t formattedPeakPosition = static_cast<UShort_t>(peakPosition);
double peakAmplitude = GetRawData(formattedPeakPosition);

peaks.push_back(std::make_pair(formattedPeakPosition, peakAmplitude));
}
}
}
}

return peaks;
}

0 comments on commit 9076ee8

Please sign in to comment.