Bug Summary

File:libraries/FCAL/DFCALCluster_factory_Island.cc
Location:line 714, column 10
Description:Value stored to 'chisq_old' during its initialization is never read

Annotated Source Code

1// $Id$
2//
3// File: DFCALCluster_factory_Island.cc
4// Created: Fri Dec 4 08:25:47 EST 2020
5// Creator: staylor (on Linux ifarm1802.jlab.org 3.10.0-1062.4.1.el7.x86_64 x86_64)
6//
7
8
9#include <iostream>
10#include <iomanip>
11using namespace std;
12
13#include "DFCALCluster_factory_Island.h"
14using namespace jana;
15
16inline bool FCALHit_E_cmp(const DFCALHit *a,const DFCALHit *b){
17 return (a->E>b->E);
18}
19
20inline bool peak_E_cmp(DFCALCluster_factory_Island::PeakInfo a,
21 DFCALCluster_factory_Island::PeakInfo b){
22 return (a.E>b.E);
23}
24
25//------------------
26// init
27//------------------
28jerror_t DFCALCluster_factory_Island::init(void)
29{
30 TIME_CUT=15.;
31 gPARMS->SetDefaultParameter("FCAL:TIME_CUT",TIME_CUT,"time cut for associating FCAL hits together into a cluster");
32
33 MAX_HITS_FOR_CLUSTERING = 250;
34 gPARMS->SetDefaultParameter("FCAL:MAX_HITS_FOR_CLUSTERING", MAX_HITS_FOR_CLUSTERING);
35
36 MIN_CLUSTER_SEED_ENERGY=35.*k_MeV;
37 gPARMS->SetDefaultParameter("FCAL:MIN_CLUSTER_SEED_ENERGY",
38 MIN_CLUSTER_SEED_ENERGY);
39 SHOWER_ENERGY_THRESHOLD = 50*k_MeV;
40 gPARMS->SetDefaultParameter("FCAL:SHOWER_ENERGY_THRESHOLD", SHOWER_ENERGY_THRESHOLD);
41
42 SHOWER_WIDTH_PARAMETER=0.675;
43 gPARMS->SetDefaultParameter("FCAL:SHOWER_WIDTH_PARAMETER",
44 SHOWER_WIDTH_PARAMETER);
45 INSERT_SHOWER_WIDTH_PARAMETER=0.315;
46 gPARMS->SetDefaultParameter("FCAL:INSERT_SHOWER_WIDTH_PARAMETER",
47 INSERT_SHOWER_WIDTH_PARAMETER);
48 MIN_CUTDOWN_FRACTION=0.25;
49 gPARMS->SetDefaultParameter("FCAL:MIN_CUTDOWN_FRACTION",
50 MIN_CUTDOWN_FRACTION);
51
52 DEBUG_HISTS=false;
53 gPARMS->SetDefaultParameter("FCAL:DEBUG_HISTS",DEBUG_HISTS);
54
55 CHISQ_MARGIN=5.;
56 gPARMS->SetDefaultParameter("FCAL:CHISQ_MARGIN",CHISQ_MARGIN);
57
58 MASS_CUT=0.0001;
59 gPARMS->SetDefaultParameter("FCAL:MASS_CUT",MASS_CUT);
60 ENERGY_SHARING_CUTOFF=0.9;
61 gPARMS->SetDefaultParameter("FCAL:ENERGY_SHARING_CUTOFF",ENERGY_SHARING_CUTOFF);
62 ENERGY_SHARING_CUTOFF=0.9;
63 gPARMS->SetDefaultParameter("FCAL:ENERGY_SHARING_CUTOFF",ENERGY_SHARING_CUTOFF);
64
65 HistdE=new TH2D("HistdE",";E [GeV];#deltaE [GeV]",100,0,10,201,-0.25,0.25);
66 HistProb=new TH1D("HistProb",";CL",100,0,1);
67
68 return NOERROR;
69}
70
71//------------------
72// brun
73//------------------
74jerror_t DFCALCluster_factory_Island::brun(jana::JEventLoop *eventLoop, int32_t runnumber)
75{
76 DApplication *dapp = dynamic_cast<DApplication*>(eventLoop->GetJApplication());
77 const DGeometry *geom = dapp->GetDGeometry(runnumber);
78
79 double targetZ=0.;
80 geom->GetTargetZ(targetZ);
81 eventLoop->GetSingle(dFCALGeom);
82 m_zdiff=dFCALGeom->fcalFrontZ()-targetZ;
83
84 m_insert_Eres[0]=0.0003;
85 m_insert_Eres[1]=0.00025;
86 m_insert_Eres[2]=4.4e-5;
87 m_Eres[0]=0.0005;
88 m_Eres[1]=0.001225;
89 m_Eres[2]=9.0e-4;
90
91 return NOERROR;
92}
93
94//------------------
95// evnt
96//------------------
97jerror_t DFCALCluster_factory_Island::evnt(JEventLoop *loop, uint64_t eventnumber)
98{
99 vector<const DFCALHit*>fcal_hits;
100 loop->Get(fcal_hits);
101 if (fcal_hits.size()==0) return OBJECT_NOT_AVAILABLE;
102
103 // LED events will have hits in nearly every channel. Do NOT
104 // try clusterizing if more than 250 hits in FCAL
105 if(fcal_hits.size() > MAX_HITS_FOR_CLUSTERING) return VALUE_OUT_OF_RANGE;
106
107 // Sort the hits according to energy.
108 stable_sort(fcal_hits.begin(),fcal_hits.end(),FCALHit_E_cmp);
109
110 // Associate groups of adjacent hits into cluster candidates
111 vector<vector<const DFCALHit*>>clusterCandidates;
112 FindClusterCandidates(fcal_hits,clusterCandidates);
113
114 for (unsigned int i=0;i<clusterCandidates.size();i++){
115 // Skip over single hit "clusterCandidates"
116 if (clusterCandidates[i].size()==1) continue;
117
118 // Mininum number of hits to make a shower = 2
119 if (clusterCandidates[i].size()<3){
120 // Create a new DFCALCluster object and add it to the _data list
121 DFCALCluster *myCluster= new DFCALCluster(0);
122 vector<const DFCALHit*>clusterCandidate=clusterCandidates[i];
123
124 double Etot=0.,t=0,x=0,y=0;
125 for (unsigned int k=0;k<clusterCandidate.size();k++){
126 double E=clusterCandidate[k]->E;
127 Etot+=E;
128 t+=E*clusterCandidate[k]->t;
129 x+=E*clusterCandidate[k]->x;
130 y+=E*clusterCandidate[k]->y;
131
132 myCluster->AddAssociatedObject(clusterCandidate[k]);
133 }
134 x/=Etot;
135 y/=Etot;
136 t/=Etot;
137
138 myCluster->setEnergy(Etot);
139 myCluster->setTimeEWeight(t);
140
141 int channel=dFCALGeom->channel(clusterCandidate[0]->row,
142 clusterCandidate[0]->column);
143 myCluster->setChannelEmax(channel);
144 myCluster->setCentroid(x,y);
145
146 _data.push_back(myCluster);
147
148 continue;
149 }
150
151 //------------------------------------------------------------------------
152 // Handle cluster candidates containing more than 3 hits
153 //------------------------------------------------------------------------
154 vector<const DFCALHit*>clusterHits=clusterCandidates[i];
155 vector<PeakInfo>peaks;
156
157 // Create weight matrix for hits
158 unsigned int num_hits=clusterHits.size();
159 TMatrixD W(num_hits,num_hits);
160 double Esum=0.;
161 for (unsigned int i=0;i<num_hits;i++){
162 const DFCALHit *hit=clusterHits[i];
163 double E=hit->E;
164 double varE=0.;
165 if (dFCALGeom->isInsertBlock(hit->row,hit->column)){
166 varE=m_insert_Eres[0]+m_insert_Eres[1]*E+m_insert_Eres[2]*E*E;
167 }
168 else{
169 varE=m_Eres[0]+m_Eres[1]*E+m_Eres[2]*E*E;
170 }
171 W(i,i)=1./varE;
172 Esum+=E;
173 }
174
175 int min_row=1000,min_col=1000,max_row=0,max_col=0;
176 for (unsigned int j=0;j<num_hits;j++){
177 if (clusterHits[j]->row<min_row) min_row=clusterHits[j]->row;
178 if (clusterHits[j]->column<min_col) min_col=clusterHits[j]->column;
179 if (clusterHits[j]->row>max_row) max_row=clusterHits[j]->row;
180 if (clusterHits[j]->column>max_col) max_col=clusterHits[j]->column;
181 }
182 // Create arrays to represent the cluster of hits to aid in peak search
183 int num_rows=max_row-min_row+3;
184 int num_cols=max_col-min_col+3;
185 vector<vector<double>>Emap(num_cols,vector<double>(num_rows));
186 vector<vector<unsigned int>>imap(num_cols,vector<unsigned int>(num_rows));
187 for (unsigned int j=0;j<num_hits;j++){
188 int ic=clusterHits[j]->column-min_col+1;
189 int ir=clusterHits[j]->row-min_row+1;
190 Emap[ic][ir]=clusterHits[j]->E;
191 imap[ic][ir]=j;
192 }
193
194 double chisq=1e6,chisq_old=1e6;
195
196 // Loop over hits looking for peaks.
197 vector<PeakInfo>peak_candidates;
198 for (int ic=1;ic<num_cols-1;ic++){
199 for (int ir=1;ir<num_rows-1;ir++){
200 double E=Emap[ic][ir];
201 if (E>MIN_CLUSTER_SEED_ENERGY){
202 double x=E*clusterHits[imap[ic][ir]]->x;
203 double y=E*clusterHits[imap[ic][ir]]->y;
204 int nhits_in_peak=1;
205
206 bool got_peak=true;
207 int lo_col=ic-1;
208 int hi_col=ic+1;
209 int lo_row=ir-1;
210 int hi_row=ir+1;
211 for (int j=lo_col;j<=hi_col;j++){
212 for (int k=lo_row;k<=hi_row;k++){
213 if (j==ic && k==ir) continue;
214
215 double Ejk=Emap[j][k];
216 if (Ejk<0.001) continue;
217
218 got_peak=(E-Ejk>0)&&got_peak;
219 if (got_peak){
220 // Accumulate energy and energy weighted position variables
221 E+=Ejk;
222 x+=Ejk*clusterHits[imap[j][k]]->x;
223 y+=Ejk*clusterHits[imap[j][k]]->y;
224
225 nhits_in_peak++;
226 }
227 }
228 }
229 if (got_peak){
230 x/=E;
231 y/=E;
232 peak_candidates.push_back(PeakInfo(E,x,y,ic,ir,nhits_in_peak));
233 }
234 }// cut on minimum energy of central block
235 } // loop over rows
236 } // loop over columns
237
238 // Sort peak candidates by energy
239 if (peak_candidates.size()>0){
240 sort(peak_candidates.begin(),peak_candidates.end(),peak_E_cmp);
241 }
242
243 // Loop over peak candidates to perform the peak fit and add good candidates
244 // to the output list.
245 for (size_t i=0;i<peak_candidates.size();i++){
246 PeakInfo myPeak=peak_candidates[i];
247 if (peaks.size()>0){
248 int ir=myPeak.ir;
249 int ic=myPeak.ic;
250 int lo_col=ic-1;
251 int hi_col=ic+1;
252 int lo_row=ir-1;
253 int hi_row=ir+1;
254
255 // Subtract background due to the set of peaks already in the
256 // list from the current peak
257 for (unsigned int m=0;m<peaks.size();m++){
258 double frac=0.;
259 for (int j=lo_col;j<=hi_col;j++){
260 for (int k=lo_row;k<=hi_row;k++){
261 if (Emap[j][k]>0.) {
262 frac+=CalcClusterEDeriv(clusterHits[imap[j][k]],peaks[m]);
263 }
264 }
265 }
266 myPeak.E-=peaks[m].E*frac;
267 }
268 }
269
270 if (myPeak.E>MIN_CLUSTER_SEED_ENERGY){
271 // Save the current peak list
272 vector<PeakInfo>saved_peaks=peaks;
273 PeakInfo peak_guess=myPeak;
274 if (myPeak.nhits>2){
275 // Fit the data to find the best current guesses for the shower
276 // parameters for each peak within this group of FCAL hits.
277 chisq_old=chisq;
278 bool good_fit=FitPeaks(W,clusterHits,peaks,myPeak,chisq);
279 if (good_fit && chisq<chisq_old){
280 // Check that the difference between this candidate and the peaks
281 // in the list is physical (i.e., could be compatible with a pi0)
282 bool keep_photon_candidate=CheckPeak(peaks,myPeak);
283 if (keep_photon_candidate){
284 peaks.push_back(myPeak);
285 }
286 else {
287 // Difference between this photon and the others in the list is
288 // not physical. Restore the old list.
289 peaks=saved_peaks;
290 chisq=chisq_old;
291 }
292 }
293 else if (peaks.size()==0){
294 // Always add the first peak, even if the fit failed.
295 // Use the initial guess for the peak info.
296 peaks.push_back(peak_guess);
297 }
298 else{
299 // No improvement from adding the new peak. Restore the old
300 // list
301 peaks=saved_peaks;
302 chisq=chisq_old;
303 }
304 } // check number of hits in peak candidate
305 } // check threshold
306 } // loop over peak candidates
307
308 if (DEBUG_HISTS&&num_hits>3&&peaks.size()==1){
309 HistProb->Fill(TMath::Prob(chisq,num_hits-3));
310
311 PeakInfo myPeakInfo=peaks[0];
312 for (unsigned int k=0;k<clusterHits.size();k++){
313 double Ecalc=0.,E=clusterHits[k]->E;
314 Ecalc+=myPeakInfo.E*CalcClusterEDeriv(clusterHits[k],myPeakInfo);
315 HistdE->Fill(E,E-Ecalc);
316 }
317 }
318
319 if (num_hits>4){
320 SplitPeaks(W,clusterHits,peaks,chisq);
321 }
322
323 // Estimate fraction of "seen" energy for each peak
324 size_t npeaks=peaks.size();
325 vector<double>peak_fractions(npeaks);
326 double Ecalc_sum=0.;
327 for (unsigned int k=0;k<npeaks;k++){
328 Ecalc_sum+=peaks[k].E;
329 }
330 for (unsigned int k=0;k<npeaks;k++){
331 peak_fractions[k]=peaks[k].E/Ecalc_sum;
332 }
333
334 // Add the clusters to the output of the factory
335 for (unsigned int k=0;k<npeaks;k++){
336 // using the measured energy gives slightly better energy resolution
337 double E=peak_fractions[k]*Esum;
338 //cout << "E " << E << " " << peaks[k].E << endl;
339 if (E>SHOWER_ENERGY_THRESHOLD){
340 DFCALCluster *myCluster= new DFCALCluster(0);
341 myCluster->setEnergy(E);
342
343 // Compute energy-weighted time and find channel corresponding to
344 // peak position and add hits to the cluster object
345 unsigned int jmax=0.;
346 double fsum=0.,fmax=0.,t=0.;
347 for (unsigned int j=0;j<clusterHits.size();j++){
348 double f=CalcClusterEDeriv(clusterHits[j],peaks[k]);
349 t+=f*clusterHits[j]->t;
350 fsum+=f;
351 if (f>fmax){
352 fmax=f;
353 jmax=j;
354 }
355 }
356
357 myCluster->setTimeEWeight(clusterHits[jmax]->t);
358 int channel=dFCALGeom->channel(clusterHits[jmax]->row,
359 clusterHits[jmax]->column);
360 myCluster->setChannelEmax(channel);
361 myCluster->setCentroid(peaks[k].x,peaks[k].y);
362
363 // Add hits to the cluster object as associated objects
364 for (unsigned int j=0;j<clusterHits.size();j++){
365 if (npeaks==1){
366 myCluster->AddAssociatedObject(clusterHits[j]);
367 }
368 else{
369 // Output hits surrounding peak position
370 double dx=clusterHits[j]->x-clusterHits[jmax]->x;
371 double dy=clusterHits[j]->y-clusterHits[jmax]->y;
372 double d=dFCALGeom->blockSize();
373 if (dFCALGeom->inInsert(channel)){
374 d=dFCALGeom->insertBlockSize();
375 }
376 double dcut=2.5*d;
377 if (fabs(dx)<dcut && fabs(dy)<dcut){
378 myCluster->AddAssociatedObject(clusterHits[j]);
379 }
380 }
381 }
382
383 _data.push_back(myCluster);
384 }
385 }
386 }
387
388 return NOERROR;
389}
390
391//------------------
392// erun
393//------------------
394jerror_t DFCALCluster_factory_Island::erun(void)
395{
396 return NOERROR;
397}
398
399//------------------
400// fini
401//------------------
402jerror_t DFCALCluster_factory_Island::fini(void)
403{
404 return NOERROR;
405}
406
407// Make a list of potential clusters, each consisting of a "central" block
408// and all adjacent blocks, starting with the block with the highest energy.
409// Each successive cluster candidate starts with the block with the next
410// highest energy that has not already been included in another cluster.
411void DFCALCluster_factory_Island::FindClusterCandidates(vector<const DFCALHit*>&fcal_hits,
412 vector<vector<const DFCALHit*>>&clusterCandidates) const {
413 vector<const DFCALHit*>clusterCandidate;
414 clusterCandidate.push_back(fcal_hits[0]);
415
416 vector<bool>used_hits(fcal_hits.size());
417 used_hits[0]=true;
418
419 int hits_left=fcal_hits.size()-1;
420 int begin_i=1;
421 while (hits_left>0){
422 for (size_t i=begin_i;i<fcal_hits.size();i++){
423 if (used_hits[i]==true) continue;
424
425 double dt=clusterCandidate[0]->t-fcal_hits[i]->t;
426 if (fabs(dt)>TIME_CUT) continue;
427
428 int drow=clusterCandidate[0]->row-fcal_hits[i]->row;
429 int dcol=clusterCandidate[0]->column-fcal_hits[i]->column;
430 if (abs(drow)<=1 && abs(dcol)<=1){
431 clusterCandidate.push_back(fcal_hits[i]);
432
433 used_hits[i]=true;
434 hits_left--;
435 }
436 }
437
438 clusterCandidates.push_back(clusterCandidate);
439 clusterCandidate.clear();
440 for (size_t i=begin_i;i<fcal_hits.size();i++){
441 if (used_hits[i]==false){
442 begin_i=i+1;
443 clusterCandidate.push_back(fcal_hits[i]);
444
445 used_hits[i]=true;
446 hits_left--;
447
448 break;
449 }
450 }
451 }
452 if (clusterCandidate.size()>0) clusterCandidates.push_back(clusterCandidate);
453
454 // At this point we do not necessarily have complete clusters.
455 // Merge cluster candidates that appear belong to a single cluster, again
456 // looking for adjacent hits.
457 double borderCut=0.5*dFCALGeom->blockSize()+dFCALGeom->insertBlockSize();
458 vector<vector<const DFCALHit*>>::iterator iter=clusterCandidates.begin();
459 while (iter!=clusterCandidates.end()){
460 bool matched=false;
461 vector<vector<const DFCALHit*>>::iterator iter2=iter+1;
462 for (;iter2!=clusterCandidates.end();iter2++){
463 for (unsigned int i=0;i<(*iter).size();i++){
464 for (unsigned int j=0;j<(*iter2).size();j++){
465 double dt=(*iter)[i]->t-(*iter2)[j]->t;
466 if (fabs(dt)>TIME_CUT) continue;
467
468 int row1=(*iter)[i]->row;
469 int col1=(*iter)[i]->column;
470 int row2=(*iter2)[j]->row;
471 int col2=(*iter2)[j]->column;
472
473 if (abs(row1-row2)<=1 && abs(col1-col2)<=1){
474 matched=true;
475 break;
476 }
477
478 // look for adjacent clusters between the lead glass and the insert,
479 // if present
480 bool isInsert1=dFCALGeom->isInsertBlock(row1,col1);
481 bool isInsert2=dFCALGeom->isInsertBlock(row2,col2);
482 if ((isInsert1&&!isInsert2) || (isInsert2&&!isInsert1)){
483 double dx=(*iter)[i]->x-(*iter2)[j]->x;
484 double dy=(*iter)[i]->y-(*iter2)[j]->y;
485 if (fabs(dx)<borderCut && fabs(dy)<borderCut){
486 matched=true;
487 break;
488 }
489 }
490 }
491 if (matched){
492 // Add the hits from this cluster to the cluster to which it is
493 // matched and erase it from the list.
494 for (unsigned int j=0;j<(*iter2).size();j++){
495 (*iter).push_back((*iter2)[j]);
496 }
497 clusterCandidates.erase(iter2);
498 break;
499 }
500 }
501 if (matched==true){
502 iter=clusterCandidates.begin();
503 break;
504 }
505 }
506 if (matched==false) iter++;
507 }
508}
509
510// Fit peaks within a cluster containing a list of hits contained in hitList
511bool DFCALCluster_factory_Island::FitPeaks(const TMatrixD &W,
512 vector<const DFCALHit*>&hitList,
513 vector<PeakInfo>&peaks,
514 PeakInfo &myNewPeak,double &chisq
515 ) const {
516 size_t nhits=hitList.size();
517 size_t npeaks=peaks.size();
518 // Save the current peak info
519 vector<PeakInfo>saved_peaks=peaks;
520 PeakInfo saved_new_peak=myNewPeak;
521
522 // Iterate to find best shower energy and position
523 chisq=1e6;
524 unsigned int max_iter=100;
525 double min_frac=MIN_CUTDOWN_FRACTION;
526 double cutdown_scale=(1.-min_frac)/double(max_iter); //for cut-down for parameter adjustment
527 for (unsigned int k=0;k<max_iter;k++){
528 // Make sure the energy is positive!
529 if (myNewPeak.E<0){
530 return false;
531 }
532 //Check that the new peak positions are still within the fiducial area of
533 // the detector
534 if (dFCALGeom->isFiducial(myNewPeak.x,myNewPeak.y)==false){
535 return false;
536 }
537
538 // Matrix of per-block differences between measured and calculated energies
539 TMatrixD dE(nhits,1);
540 // Matrix containing partical derivatives of the shower profile function
541 // with respect to the three parameters (E, xc, yc): Jacobian matrix
542 TMatrixD A(nhits,3*npeaks+3);
543 // Loop over all the hits to fill dE and A
544 for (unsigned int i=0;i<nhits;i++){
545 double Ecalc=0.,df_dE=0;
546 for (unsigned int j=0;j<npeaks;j++){
547 PeakInfo myPeakInfo=peaks[j];
548
549 // Make sure the energy is positive!
550 if (myPeakInfo.E<0){
551 //_DBG_ <<endl;
552 return false;
553 }
554
555 //Check that the new peak positions are still within the fiducial
556 //area of the detector
557 if (dFCALGeom->isFiducial(myPeakInfo.x,myPeakInfo.y)==false){
558 //_DBG_ << myPeakInfo.x << " " << myPeakInfo.y << endl;
559 return false;
560 }
561
562 // Compute the Jacobian matrix elements
563 df_dE=CalcClusterEDeriv(hitList[i],myPeakInfo);
564 A(i,3*j+0)=df_dE;
565 A(i,3*j+1)=CalcClusterXYDeriv(true,hitList[i],myPeakInfo);
566 A(i,3*j+2)=CalcClusterXYDeriv(false,hitList[i],myPeakInfo);
567
568 Ecalc+=myPeakInfo.E*df_dE;
569 }
570 // Add contributions from the peak we wish to add
571 df_dE=CalcClusterEDeriv(hitList[i],myNewPeak);
572 A(i,3*npeaks+0)=df_dE;
573 A(i,3*npeaks+1)=CalcClusterXYDeriv(true,hitList[i],myNewPeak);
574 A(i,3*npeaks+2)=CalcClusterXYDeriv(false,hitList[i],myNewPeak);
575
576 Ecalc+=myNewPeak.E*df_dE;
577
578 double Ediff=hitList[i]->E-Ecalc;
579 dE(i,0)=Ediff;
580 //cout << " Ediff " << Ediff << endl;
581 }
582 // Compute chi^2 for this iteration
583 double chisq_new=0.;
584 for (unsigned int i=0;i<nhits;i++) chisq_new+=W(i,i)*dE(i,0)*dE(i,0);
585 // cout << endl;
586 // cout << k << " chisq "<< chisq_new << " " << (chisq_new-chisq_old)/chisq_old <<endl;
587 //cout << endl;
588 if (fabs(chisq_new-chisq)/chisq<0.0001) break;
589 if (chisq_new>chisq) break;
590
591 // Save the current value of chisq
592 chisq=chisq_new;
593
594 // Save the current best peak values
595 saved_new_peak=myNewPeak;
596 saved_peaks.assign(peaks.begin(),peaks.end());
597
598 // Determine the set of corrections needed to bring the computed shower
599 // shape closer to the measurements
600 TMatrixD A_T(TMatrixD::kTransposed,A);
601 TMatrixD AT_A=A_T*W*A;
602 TMatrixD InvATA(TMatrixD::kInverted,AT_A);
603 // The function describing the cluster profile is rather nastily non-linear,
604 // so we perform a cut-down on the maximum shift vector.
605 double frac=min_frac+cutdown_scale*double(k);
606 TMatrixD dPar=frac*(InvATA*A_T*W*dE);
607
608 // Update the peak parameters
609 for (unsigned int i=0;i<npeaks;i++){
610 peaks[i].E+=dPar(3*i+0,0);
611 peaks[i].x+=dPar(3*i+1,0);
612 peaks[i].y+=dPar(3*i+2,0);
613 }
614 myNewPeak.E+=dPar(3*npeaks+0,0);
615 myNewPeak.x+=dPar(3*npeaks+1,0);
616 myNewPeak.y+=dPar(3*npeaks+2,0);
617 }
618
619 // Peak info for output of the routine. At this stage myNewPeak, for example,
620 // has been adjusted away from the solution with the best chisq value.
621 myNewPeak=saved_new_peak;
622 peaks.assign(saved_peaks.begin(),saved_peaks.end());
623
624 // Sanity check for peak position: does it correspond to one of the hit
625 // blocks?
626 vector<int>peak_channels;
627 peak_channels.push_back(dFCALGeom->channel(myNewPeak.x,myNewPeak.y));
628 for (unsigned int i=0;i<peaks.size();i++){
629 peak_channels.push_back(dFCALGeom->channel(peaks[i].x,peaks[i].y));
630 }
631 vector<int>hit_channels(nhits);
632 for (unsigned int i=0;i<nhits;i++){
633 hit_channels[i]=dFCALGeom->channel(hitList[i]->row,hitList[i]->column);
634 }
635 for (unsigned int j=0;j<peak_channels.size();j++){
636 int my_peak_channel=peak_channels[j];
637 if (find(hit_channels.begin(),hit_channels.end(),my_peak_channel)==hit_channels.end()){
638 //_DBG_ << endl;
639 return false;
640 }
641 }
642
643 return true;
644}
645
646// Routine to compute the partial derivatives of the shower profile function
647// with respect to the peak position parameters (xpeak,ypeak)
648double DFCALCluster_factory_Island::CalcClusterXYDeriv(bool isXDeriv,
649 const DFCALHit *hit,
650 const PeakInfo &myPeakInfo) const {
651 double sign1[4]={1,1,-1,-1};
652 double sign2[4]={1,-1,-1,1};
653 double half_block=0.5*dFCALGeom->blockSize();
654 double b=SHOWER_WIDTH_PARAMETER; // cm
655 if (dFCALGeom->isInsertBlock(hit->row,hit->column)){
656 half_block=0.5*dFCALGeom->insertBlockSize();
657 b=INSERT_SHOWER_WIDTH_PARAMETER;
658 }
659 double b2=b*b;
660 double f=0.;
661 double dxc=hit->x-myPeakInfo.x;
662 double dyc=hit->y-myPeakInfo.y;
663 double Ec=myPeakInfo.E;
664 for (int i=0;i<4;i++){
665 double dx=dxc+sign1[i]*half_block;
666 double dy=dyc+sign2[i]*half_block;
667 double dx2=dx*dx;
668 double dy2=dy*dy;
669 double factor=-sign1[i]*sign2[i]*Ec*b/(2*M_PI3.14159265358979323846)/sqrt(b2*dx2+dy2);
670 if (isXDeriv){
671 f+=factor*dy/(b2+dx2);
672 }
673 else {
674 f+=factor*dx/(b2+dy2);
675 }
676 }
677 return f;
678}
679
680// Shower profile function from Bland, et al., Instruments and Experimental
681// Techniques, 2008, Vol. 51, No. 3, pp. 342-350, eq. 6. Note that the
682// normalization of this function in this equation does not appear to be
683// correct (it's off by 1/sqrt(2pi)).
684double DFCALCluster_factory_Island::CalcClusterEDeriv(const DFCALHit *hit,
685 const PeakInfo &myPeakInfo) const {
686 double sign1[4]={1,1,-1,-1};
687 double sign2[4]={1,-1,-1,1};
688 double half_block=0.5*dFCALGeom->blockSize();
689 double b=SHOWER_WIDTH_PARAMETER; // cm
690 if (dFCALGeom->isInsertBlock(hit->row,hit->column)){
691 half_block=0.5*dFCALGeom->insertBlockSize();
692 b=INSERT_SHOWER_WIDTH_PARAMETER;
693 }
694 double f=0.;
695 double dxc=hit->x-myPeakInfo.x;
696 double dyc=hit->y-myPeakInfo.y;
697 for (int i=0;i<4;i++){
698 double dx=dxc+sign1[i]*half_block;
699 double dy=dyc+sign2[i]*half_block;
700 f+=sign1[i]*sign2[i]/(2*M_PI3.14159265358979323846)*atan(dx*dy/(b*sqrt(b*b+dx*dx+dy*dy)));
701 }
702 return f;
703}
704
705// Try to split peaks into two following the algorithm (barely) described in
706// Lednev, IHEP 93-153.
707void DFCALCluster_factory_Island::SplitPeaks(const TMatrixD &W,
708 vector<const DFCALHit*>&hits,
709 vector<PeakInfo>&peaks,
710 double &chisq) const{
711 unsigned int npeaks=peaks.size(),nhits=hits.size();
712 vector<PeakInfo>saved_peaks=peaks;
713
714 double chisq_old=chisq;
Value stored to 'chisq_old' during its initialization is never read
715 // Find the centroid of the hits for each peak region
716 for (unsigned int i=0;i<npeaks;i++){
717 double E0=0.,x0=0.,y0=0.;
718 vector<double>Elist(nhits);
719 for (unsigned int j=0;j<nhits;j++){
720 double Ecalc=0.;
721 for (unsigned int k=0;k<peaks.size();k++){
722 if (i==k) continue;
723 Ecalc+=peaks[k].E*CalcClusterEDeriv(hits[j],peaks[k]);
724 }
725 Elist[j]=hits[j]->E-Ecalc;
726 E0+=Elist[j];
727 x0+=Elist[j]*hits[j]->x;
728 y0+=Elist[j]*hits[j]->y;
729 }
730 x0/=E0;
731 y0/=E0;
732
733 // Find the second moments about the center of the cluster of hits
734 double xx=0.,yy=0.,yx=0.;
735 for (unsigned int j=0;j<nhits;j++){
736 double dx=hits[j]->x-x0;
737 double dy=hits[j]->y-y0;
738 xx+=Elist[j]*dx*dx;
739 yy+=Elist[j]*dy*dy;
740 yx+=Elist[j]*dx*dy;
741 }
742
743 // Determine the parameter alpha=(E1-E2)/E0 specifying the energy sharing
744 // between the two candidate peaks
745 double dxy=xx-yy;
746 double rsq2 = dxy*dxy + 4.*yx*yx;
747 if( rsq2 < 1.e-20 ) rsq2 = 1.e-20;
748 double rsq = sqrt(rsq2);
749 double dxc = -sqrt((rsq+dxy)*2.);
750 double dyc = sqrt((rsq-dxy)*2.);
751 if( yx >= 0. ) dyc = -dyc;
752 double r = sqrt(dxc*dxc + dyc*dyc);
753 double alpha = 0.;
754 for(unsigned int i=0;i<nhits;i++) {
755 double u=(hits[i]->x-x0)*dxc/r + (hits[i]->y-y0)*dyc/r;
756 alpha-=Elist[i]*u*fabs(u);
757 }
758 alpha/=E0*rsq;
759 // Make sure alpha is not too small or too big
760 if (alpha<-ENERGY_SHARING_CUTOFF) alpha=-ENERGY_SHARING_CUTOFF;
761 if (alpha> ENERGY_SHARING_CUTOFF) alpha= ENERGY_SHARING_CUTOFF;
762
763 // Find the first guess for the energies of the two potential peaks
764 double alpha_plus_factor=0.5*(1.+alpha);
765 double alpha_minus_factor=0.5*(1.-alpha);
766 double E1=E0*alpha_plus_factor;
767 double E2=E0*alpha_minus_factor;
768 if (E1<SHOWER_ENERGY_THRESHOLD || E2<SHOWER_ENERGY_THRESHOLD){
769 // Bail if either of the energies is below the shower threshold
770 continue;
771 }
772
773 // Scale dxc and dyc to find estimates for the peak separation in x and y
774 double scale=1./sqrt(1.-alpha*alpha);
775 dxc*=scale;
776 dyc*=scale;
777
778 // Estimates for peak positions
779 double x1=x0+dxc*alpha_plus_factor;
780 double y1=y0+dyc*alpha_plus_factor;
781 double x2=x0+dxc*alpha_minus_factor;
782 double y2=y0+dyc*alpha_minus_factor;
783
784 PeakInfo myNewPeak(E2,x2,y2,0,0,0);
785 peaks[i].E=E1;
786 peaks[i].x=x1;
787 peaks[i].y=y1;
788
789 // Refit with the split peaks
790 chisq_old=chisq;
791 bool good_fit=FitPeaks(W,hits,peaks,myNewPeak,chisq);
792 if (good_fit && chisq+CHISQ_MARGIN<chisq_old){
793 // Check that the difference between this candidate and the peaks in the
794 // list is physical (i.e., could be compatible with a pi0)
795 bool keep_photon_candidate=CheckPeak(peaks,myNewPeak);
796 if (keep_photon_candidate){
797 peaks.push_back(myNewPeak);
798 }
799 else {
800 // Difference between this photon and the others in the list is not
801 // physical. Restore the old list.
802 peaks=saved_peaks;
803 chisq=chisq_old;
804 }
805 }
806 else {
807 // No improvement from adding the new peak. Restore the old list.
808 peaks=saved_peaks;
809 chisq=chisq_old;
810 }
811 }
812}
813
814// Check that the difference between this candidate and the peaks in the list
815//is physical (i.e., could be compatible with a pi0)
816bool DFCALCluster_factory_Island::CheckPeak(const vector<PeakInfo>&peaks,const PeakInfo &myNewPeak) const{
817 DVector3 dir1(myNewPeak.x,myNewPeak.y,m_zdiff);
818 dir1.SetMag(1.);
819 for (unsigned int m=0;m<peaks.size();m++){
820 DVector3 dir2(peaks[m].x,peaks[m].y,m_zdiff);
821 dir2.SetMag(1.);
822 double m2=peaks[m].E*myNewPeak.E*(1.-dir1.Dot(dir2));
823 if (m2<MASS_CUT){
824 return false;
825 }
826 }
827
828 return true;
829}