From 3235856d9bc6958d9ae0149a95e7add5ddfbc6a8 Mon Sep 17 00:00:00 2001 From: Tongtong Cao Date: Mon, 24 Mar 2025 17:12:20 -0400 Subject: [PATCH 1/5] Update DC reconstruction at the denoisinng level (#499) * cancel SNR and change limit of total DC hits from raw hits to hits after denoising * remove unused codes --- .../java/org/jlab/rec/dc/banks/HitReader.java | 62 +++---------------- .../org/jlab/service/dc/DCHBClustering.java | 29 +++------ .../java/org/jlab/service/dc/DCHBEngine.java | 45 +++++--------- 3 files changed, 30 insertions(+), 106 deletions(-) diff --git a/reconstruction/dc/src/main/java/org/jlab/rec/dc/banks/HitReader.java b/reconstruction/dc/src/main/java/org/jlab/rec/dc/banks/HitReader.java index faa907a8cf..02162955c5 100644 --- a/reconstruction/dc/src/main/java/org/jlab/rec/dc/banks/HitReader.java +++ b/reconstruction/dc/src/main/java/org/jlab/rec/dc/banks/HitReader.java @@ -205,65 +205,24 @@ private int getJitter(int sector, int layer, int wire, int order) { } return jitter; } - - public void fetch_DCHits(DataEvent event, Clas12NoiseAnalysis noiseAnalysis, - NoiseReductionParameters parameters, - Clas12NoiseResult results) { - this.initialize(event); - this.fetch_DCHits(noiseAnalysis, parameters, results); - } - + /** * reads the hits using clas-io methods to get the EvioBank for the DC and * fill the values to instantiate the DChit and MChit classes.This methods * fills the DChit list of hits. - * - * @param noiseAnalysis - * @param parameters - * @param results */ - private void fetch_DCHits(Clas12NoiseAnalysis noiseAnalysis, - NoiseReductionParameters parameters, - Clas12NoiseResult results) { - - _DCHits = new ArrayList<>(); - - IndexedList noise = new IndexedList<>(4); + public void fetch_DCHits(DataEvent event) { + this.initialize(event); - RawDataBank bankDGTZ = new RawDataBank(bankNames.getTdcBank(), OrderGroups.NODENOISE); - bankDGTZ.read(event); - - // event selection, including cut on max number of hits - if( run <= 0 || - tiTimeStamp < 0 || - bankDGTZ.rows()==0 || bankDGTZ.rows()>Constants.MAXHITS ) { - return; - } - else { - int rows = bankDGTZ.rows(); - int[] sector = new int[rows]; - int[] layer = new int[rows]; - int[] superlayer = new int[rows]; - int[] wire = new int[rows]; - for (int i = 0; i < rows; i++) { - sector[i] = bankDGTZ.getByte("sector", i); - layer[i] = (bankDGTZ.getByte("layer", i)-1)%6 + 1; - superlayer[i] = (bankDGTZ.getByte("layer", i)-1)/6 + 1; - wire[i] = bankDGTZ.getShort("component", i); - } - results.clear(); - noiseAnalysis.clear(); - noiseAnalysis.findNoise(sector, superlayer, layer, wire, results); - for(int i=0; i(); this.getDCRBJitters(Constants.getInstance().isSWAPDCRBBITS()); RawDataBank bankFiltered = new RawDataBank(bankNames.getTdcBank(), rawBankOrders); bankFiltered.read(event); + + if(run <= 0 || tiTimeStamp < 0 || bankFiltered.rows() > Constants.MAXHITS) return; + this.set_NumTDCBankRows(bankFiltered.rows()); for (int i = 0; i < bankFiltered.rows(); i++) { int sector = bankFiltered.getByte("sector", i); @@ -279,12 +238,7 @@ private void fetch_DCHits(Clas12NoiseAnalysis noiseAnalysis, if (wirestat != null) { if (wirestat.getIntValue("status", sector, layer+(superlayer-1)*6, wire) != 0) passHit = false; - } - - if(noise.hasItem(sector, superlayer, layer, wire)) { - if(noise.getItem(sector, superlayer, layer, wire)) - passHit = false; - } + } if (passHit && wire != -1 && !(superlayer == 0)) { diff --git a/reconstruction/dc/src/main/java/org/jlab/service/dc/DCHBClustering.java b/reconstruction/dc/src/main/java/org/jlab/service/dc/DCHBClustering.java index 978ae61e2f..f6d0e5f7ab 100644 --- a/reconstruction/dc/src/main/java/org/jlab/service/dc/DCHBClustering.java +++ b/reconstruction/dc/src/main/java/org/jlab/service/dc/DCHBClustering.java @@ -1,9 +1,5 @@ package org.jlab.service.dc; -import cnuphys.snr.NoiseReductionParameters; -import cnuphys.snr.clas12.Clas12NoiseAnalysis; -import cnuphys.snr.clas12.Clas12NoiseResult; - import java.util.List; import org.jlab.clas.swimtools.Swim; import org.jlab.io.base.DataEvent; @@ -45,27 +41,16 @@ public boolean processDataEvent(DataEvent event) { // get Field Swim dcSwim = new Swim(); /* 2 */ - // init SNR - Clas12NoiseResult results = new Clas12NoiseResult(); + ClusterFitter cf = new ClusterFitter(); /* 3 */ - Clas12NoiseAnalysis noiseAnalysis = new Clas12NoiseAnalysis(); + ClusterCleanerUtilities ct = new ClusterCleanerUtilities(); /* 4 */ - NoiseReductionParameters parameters = - new NoiseReductionParameters( - 2, - Constants.SNR_LEFTSHIFTS, - Constants.SNR_RIGHTSHIFTS); + RecoBankWriter rbc = new RecoBankWriter(this.getBanks()); /* 5 */ - ClusterFitter cf = new ClusterFitter(); + HitReader hitRead = new HitReader(this.getBanks(), this.getRawBankOrders(), super.getConstantsManager(), Constants.getInstance().dcDetector); /* 6 */ - ClusterCleanerUtilities ct = new ClusterCleanerUtilities(); + hitRead.fetch_DCHits(event); /* 7 */ - RecoBankWriter rbc = new RecoBankWriter(this.getBanks()); - /* 8 */ - HitReader hitRead = new HitReader(this.getBanks(), this.getRawBankOrders(), super.getConstantsManager(), Constants.getInstance().dcDetector); - /* 9 */ - hitRead.fetch_DCHits(event, noiseAnalysis, parameters, results); - /* 10 */ //I) get the hits List hits = hitRead.get_DCHits(Constants.getInstance().SECTORSELECT); //II) process the hits @@ -73,7 +58,7 @@ public boolean processDataEvent(DataEvent event) { if (hits.isEmpty()) { return true; } - /* 11 */ + /* 8 */ //2) find the clusters from these hits ClusterFinder clusFinder = new ClusterFinder(); List clusters = clusFinder.FindHitBasedClusters(hits, @@ -84,7 +69,7 @@ public boolean processDataEvent(DataEvent event) { return true; } else { List fhits = rbc.createRawHitList(hits); - /* 13 */ + /* 9 */ rbc.updateListsWithClusterInfo(fhits, clusters); event.appendBanks(rbc.fillHitsBank(event, fhits), rbc.fillHBClustersBank(event, clusters) diff --git a/reconstruction/dc/src/main/java/org/jlab/service/dc/DCHBEngine.java b/reconstruction/dc/src/main/java/org/jlab/service/dc/DCHBEngine.java index eae796665e..c25bc4f479 100644 --- a/reconstruction/dc/src/main/java/org/jlab/service/dc/DCHBEngine.java +++ b/reconstruction/dc/src/main/java/org/jlab/service/dc/DCHBEngine.java @@ -1,9 +1,5 @@ package org.jlab.service.dc; -import cnuphys.snr.NoiseReductionParameters; -import cnuphys.snr.clas12.Clas12NoiseAnalysis; -import cnuphys.snr.clas12.Clas12NoiseResult; - import java.util.ArrayList; import java.util.List; @@ -69,28 +65,17 @@ public boolean processDataEvent(DataEvent event) { // get Field Swim dcSwim = new Swim(); /* 2 */ - // init SNR - Clas12NoiseResult results = new Clas12NoiseResult(); - /* 3 */ - Clas12NoiseAnalysis noiseAnalysis = new Clas12NoiseAnalysis(); - /* 4 */ - NoiseReductionParameters parameters = - new NoiseReductionParameters( - 2, - Constants.SNR_LEFTSHIFTS, - Constants.SNR_RIGHTSHIFTS); - /* 5 */ ClusterFitter cf = new ClusterFitter(); - /* 6 */ + /* 3 */ ClusterCleanerUtilities ct = new ClusterCleanerUtilities(); - /* 7 */ + /* 4 */ RecoBankWriter rbc = new RecoBankWriter(this.getBanks()); - /* 8 */ + /* 5 */ HitReader hitRead = new HitReader(this.getBanks(), this.getRawBankOrders(), super.getConstantsManager(), Constants.getInstance().dcDetector); - /* 9 */ - hitRead.fetch_DCHits(event, noiseAnalysis, parameters, results); + /* 6 */ + hitRead.fetch_DCHits(event); - /* 10 */ + /* 7 */ //I) get the hits List hits = hitRead.get_DCHits(); //II) process the hits @@ -98,7 +83,7 @@ public boolean processDataEvent(DataEvent event) { if (hits.isEmpty()) { return true; } - /* 11 */ + /* 8 */ //2) find the clusters from these hits ClusterFinder clusFinder = new ClusterFinder(); List clusters = clusFinder.FindHitBasedClusters(hits, @@ -108,17 +93,17 @@ public boolean processDataEvent(DataEvent event) { if (clusters.isEmpty()) { return true; } - /* 12 */ + /* 9 */ List fhits = rbc.createRawHitList(hits); - /* 13 : assign cluster IDs to hits: if hit is associated to two clusters, the second survives*/ + /* 10 : assign cluster IDs to hits: if hit is associated to two clusters, the second survives*/ rbc.updateListsWithClusterInfo(fhits, clusters); - /* 14 */ + /* 11 */ //3) find the segments from the fitted clusters SegmentFinder segFinder = new SegmentFinder(); List segments = segFinder.get_Segments(clusters, event, Constants.getInstance().dcDetector, false); - /* 15 */ + /* 12 */ // need 6 segments to make a trajectory if (segments.isEmpty()) { rbc.fillAllHBBanks(event, @@ -142,7 +127,7 @@ public boolean processDataEvent(DataEvent event) { } } segments.removeAll(rmSegs); - /* 16 */ + /* 13 */ CrossMaker crossMake = new CrossMaker(); List crosses = crossMake.find_Crosses(segments, Constants.getInstance().dcDetector); if (crosses.isEmpty()) { @@ -154,7 +139,7 @@ public boolean processDataEvent(DataEvent event) { null); return true; } - /* 17 */ + /* 14 */ CrossListFinder crossLister = new CrossListFinder(); CrossList crosslist = crossLister.candCrossLists(event, crosses, @@ -163,14 +148,14 @@ public boolean processDataEvent(DataEvent event) { Constants.getInstance().dcDetector, null, dcSwim, false); - /* 18 */ + /* 15 */ //6) find the list of track candidates TrackCandListFinder trkcandFinder = new TrackCandListFinder(Constants.HITBASE); List trkcands = trkcandFinder.getTrackCands(crosslist, Constants.getInstance().dcDetector, Swimmer.getTorScale(), dcSwim, false); - /* 19 */ + /* 16 */ // track found int trkId = 1; From c8510a2c8adab3c6c99e1098aa2b36d5af8b00f2 Mon Sep 17 00:00:00 2001 From: Tongtong Cao Date: Mon, 24 Mar 2025 17:13:39 -0400 Subject: [PATCH 2/5] reset limit for prob in DC clustering and cancel requirement that no skipped layer for cluster candidates from splitter (#500) --- .../dc/cluster/ClusterCleanerUtilities.java | 40 ++++++------------- .../jlab/rec/dc/cluster/ClusterFinder.java | 3 +- 2 files changed, 15 insertions(+), 28 deletions(-) diff --git a/reconstruction/dc/src/main/java/org/jlab/rec/dc/cluster/ClusterCleanerUtilities.java b/reconstruction/dc/src/main/java/org/jlab/rec/dc/cluster/ClusterCleanerUtilities.java index b2b37c20f1..6ab72f997e 100644 --- a/reconstruction/dc/src/main/java/org/jlab/rec/dc/cluster/ClusterCleanerUtilities.java +++ b/reconstruction/dc/src/main/java/org/jlab/rec/dc/cluster/ClusterCleanerUtilities.java @@ -188,34 +188,20 @@ public List ClusterSplitter(FittedCluster clus, int nextClsStartI } } } - //no gaps - List contigArrayOfHits = new ArrayList<>(); //contiguous cluster - - boolean passCluster = true; - for (int l = 1; l <= Constants.NLAYR; l++) { - for (int i = 0; i < newClus.size(); i++) { - if (newClus.get(i).get_Layer() == l) { - contigArrayOfHits.add(newClus.get(i)); - } - } - } - for (int i = 0; i < contigArrayOfHits.size() - 1; i++) { //if there is a gap do not include in list - if (contigArrayOfHits.get(i + 1).get_Layer() - contigArrayOfHits.get(i).get_Layer() > 1) { - passCluster = false; + + //Limits for cluster candiates + boolean passCluster = false; + int nLayers = count_nlayers_in_cluster(newClus); + if((!isExceptionalCluster(newClus) && nLayers >= Constants.DC_MIN_NLAYERS) + || (isExceptionalCluster(newClus) && nLayers >= Constants.DC_MIN_NLAYERS - 1)) { + //require consistency with line + cf.SetFitArray(newClus, "LC"); + cf.Fit(newClus, true); + if ((nLayers == 6 && newClus.get_fitProb() > 0.9) || (nLayers == 5 && newClus.get_fitProb() > 0.85) + || (nLayers == 4 && newClus.get_fitProb() > 0.75) || (nLayers == 3 && newClus.get_fitProb() > 0.65)) { + passCluster = true; } } - //require 4 layers to make a cluster - if ((!isExceptionalCluster(contigArrayOfHits) && count_nlayers_in_cluster(contigArrayOfHits) < Constants.DC_MIN_NLAYERS) - || (isExceptionalCluster(contigArrayOfHits) && count_nlayers_in_cluster(contigArrayOfHits) < Constants.DC_MIN_NLAYERS - 1)) { - passCluster = false; - } - - //require consistency with line - cf.SetFitArray(newClus, "LC"); - cf.Fit(newClus, true); - if (newClus.get_fitProb() < 0.9) { - passCluster = false; - } if (!(splitclusters.contains(newClus)) && passCluster) { splitclusters.add(newClus); @@ -314,7 +300,7 @@ public int count_nlayers_hit(Hit[] hits_inlayer) { * @param hitsInClus the hits in a cluster * @return the number of layers in a cluster */ - int count_nlayers_in_cluster(List hitsInClus) { + int count_nlayers_in_cluster(List hitsInClus) { // count hits in each layer int nlayr = 6; int[] nlayers = new int[nlayr]; diff --git a/reconstruction/dc/src/main/java/org/jlab/rec/dc/cluster/ClusterFinder.java b/reconstruction/dc/src/main/java/org/jlab/rec/dc/cluster/ClusterFinder.java index d9b8eb2ab8..a2d1b735fe 100644 --- a/reconstruction/dc/src/main/java/org/jlab/rec/dc/cluster/ClusterFinder.java +++ b/reconstruction/dc/src/main/java/org/jlab/rec/dc/cluster/ClusterFinder.java @@ -236,7 +236,8 @@ public List FindHitBasedClusters(List allhits, ClusterCleane int idSharedHits = numTDCBankRows + 10000; for (FittedCluster clus : fittedClusList) { if (clus != null && ((!ct.isExceptionalFittedCluster(clus) && clus.size() >= Constants.DC_MIN_NLAYERS) || - (ct.isExceptionalFittedCluster(clus) && clus.size() >= Constants.DC_MIN_NLAYERS-1)) && clus.get_fitProb()>Constants.HITBASEDTRKGMINFITHI2PROB) { + (ct.isExceptionalFittedCluster(clus) && clus.size() >= Constants.DC_MIN_NLAYERS-1)) + && ((ct.count_nlayers_in_cluster(clus) < Constants.DC_MIN_NLAYERS && clus.get_fitProb() > 0.4) || (ct.count_nlayers_in_cluster(clus) >= Constants.DC_MIN_NLAYERS && clus.get_fitProb()>Constants.HITBASEDTRKGMINFITHI2PROB))) { // update the hits for (FittedHit fhit : clus) { From c4b11bac9d8a2b56da676a58939b1c72e0f10bad Mon Sep 17 00:00:00 2001 From: Tongtong Cao Date: Thu, 17 Apr 2025 11:25:53 -0400 Subject: [PATCH 3/5] Add conventional tracking into ai-assisted tracking as complementary (#543) * As supplementary, take conventional tracking with remaining cluters after AI-assisted tracking * add one more bit into track status to tell if track is from AI-asssisted tracking --- .../java/org/jlab/rec/dc/track/Track.java | 13 ++ .../jlab/service/dc/DCHBPostClusterAI.java | 197 ++++++++++++++++-- .../java/org/jlab/service/dc/DCTBEngine.java | 5 + 3 files changed, 192 insertions(+), 23 deletions(-) diff --git a/reconstruction/dc/src/main/java/org/jlab/rec/dc/track/Track.java b/reconstruction/dc/src/main/java/org/jlab/rec/dc/track/Track.java index 5105388bc9..d9c8d75825 100644 --- a/reconstruction/dc/src/main/java/org/jlab/rec/dc/track/Track.java +++ b/reconstruction/dc/src/main/java/org/jlab/rec/dc/track/Track.java @@ -66,6 +66,8 @@ public void setFinalStateVec(StateVec finalStateVec) { private Segment _singleSuperlayer ; private int _fitConvergenceStatus; private StateVec finalStateVec ; + + private boolean isAITrack = false; public Track() { } @@ -98,6 +100,14 @@ public Segment getSingleSuperlayer() { public void setSingleSuperlayer(Segment _singleSuperlayer) { this._singleSuperlayer = _singleSuperlayer; } + + public void setIsAITrack(boolean isAITrack){ + this.isAITrack = isAITrack; + } + + public boolean getIsAITrack(){ + return isAITrack; + } public int getBitStatus() { int status = 0; @@ -119,6 +129,9 @@ public int getBitStatus() { for(int isl = 0; isl <6; isl++) { status |= segmentStatus[isl] << isl*2; } + + status |= (this.isAITrack ? 1 : 0) << 12; // The 13th bit tells if track is from AI-assisted trcking; 1: yes; 0: no + return status; } diff --git a/reconstruction/dc/src/main/java/org/jlab/service/dc/DCHBPostClusterAI.java b/reconstruction/dc/src/main/java/org/jlab/service/dc/DCHBPostClusterAI.java index 0b63a3310b..4651716770 100644 --- a/reconstruction/dc/src/main/java/org/jlab/service/dc/DCHBPostClusterAI.java +++ b/reconstruction/dc/src/main/java/org/jlab/service/dc/DCHBPostClusterAI.java @@ -3,6 +3,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.logging.Level; import org.jlab.clas.swimtools.Swim; import org.jlab.clas.swimtools.Swimmer; @@ -20,6 +21,12 @@ import org.jlab.rec.dc.segment.Segment; import org.jlab.rec.dc.track.Track; import org.jlab.rec.dc.track.TrackCandListFinder; +import org.jlab.rec.dc.cluster.ClusterFinder; +import org.jlab.rec.dc.cluster.ClusterFitter; +import org.jlab.rec.dc.segment.SegmentFinder; +import org.jlab.rec.dc.cross.CrossMaker; +import org.jlab.rec.dc.trajectory.Road; +import org.jlab.rec.dc.trajectory.RoadFinder; /** * @@ -52,18 +59,15 @@ public boolean processDataEvent(DataEvent event) { return true; } + ////// AI-assisted tracking /* IO */ HitReader reader = new HitReader(this.getBanks(), Constants.getInstance().dcDetector); reader.initialize(event); RecoBankWriter writer = new RecoBankWriter(this.getBanks()); // get Field Swim dcSwim = new Swim(); - /* 2 */ - - /* 5 */ LOGGER.log(Level.FINE, "HB AI process event"); - /* 7 */ - /* 8 */ + //AI List trkcands = null; List crosses = null; @@ -77,7 +81,7 @@ public boolean processDataEvent(DataEvent event) { List hits = reader.get_DCHits(); fhits = new ArrayList<>(); //II) process the hits - //1) exit if hit list is empty + // Exit if hit list is empty if (hits.isEmpty()) { return true; } @@ -128,35 +132,182 @@ public boolean processDataEvent(DataEvent event) { // remove overlaps trkcandFinder.removeOverlappingTracks(trkcands); for (Track trk : trkcands) { + trk.setIsAITrack(true); + // reset the id trk.set_Id(trkId); trkcandFinder.matchHits(trk.getStateVecs(), trk, Constants.getInstance().dcDetector, dcSwim); - for (Cross c : trk) { - c.set_CrossDirIntersSegWires(); - clusters.add(c.get_Segment1().get_fittedCluster()); - clusters.add(c.get_Segment2().get_fittedCluster()); - trkcandFinder.setHitDoubletsInfo(c.get_Segment1()); - trkcandFinder.setHitDoubletsInfo(c.get_Segment2()); - for (FittedHit h1 : c.get_Segment1()) { - h1.set_AssociatedHBTrackID(trkId); - //if(h1.get_AssociatedHBTrackID()>0) - fhits.add(h1); + trkId++; + } + } + + ////// Find tracks by rest of clusters using conventional tracking + List clustersConv = null; + List segmentsConv = null; + List crossesConv = null; + List trkcandsConv = null; + + //1) read hits from the banks + Map> hitsConv = reader.read_Hits(event); + + //2) find clusters from these hits + ClusterFinder clusFinder = new ClusterFinder(); + ClusterFitter cf = new ClusterFitter(); + clustersConv = clusFinder.RecomposeClusters(hitsConv, Constants.getInstance().dcDetector, cf); + + //3) remove clusters which are on tracks + List removedClustersConv = new ArrayList(); + for(FittedCluster cls : clustersConv){ + boolean flag = false; + for(Track trk : trkcands){ + if(flag) break; + for(Cross crs : trk){ + if(cls.get_Id() == crs.get_Segment1().get_Id() || cls.get_Id() == crs.get_Segment2().get_Id()) { + removedClustersConv.add(cls); + flag = true; + break; + } + } + } + } + clustersConv.removeAll(removedClustersConv); + clusters.addAll(clustersConv); + + //4) find segments from clusters + SegmentFinder segFinder = new SegmentFinder(); + segmentsConv = segFinder.get_Segments(clustersConv, + event, + Constants.getInstance().dcDetector, false); + List rmSegsConv = new ArrayList<>(); + // clean up hit-based segments + double trkDocOverCellSize; + for (Segment se : segmentsConv) { + trkDocOverCellSize = 0; + for (FittedHit fh : se.get_fittedCluster()) { + trkDocOverCellSize += fh.get_ClusFitDoca() / fh.get_CellSize(); + } + if (trkDocOverCellSize / se.size() > 1.1) { + rmSegsConv.add(se); + } + } + segmentsConv.removeAll(rmSegsConv); + segments.addAll(segmentsConv); + + //5) find crosses from segments + CrossMaker crossMake = new CrossMaker(); + crossesConv = crossMake.find_Crosses(segmentsConv, Constants.getInstance().dcDetector); + crosses.addAll(crossesConv); + + //6) find cross lists from crosses + CrossList crosslistConv = crossLister.candCrossLists(event, crossesConv, + false, + null, + Constants.getInstance().dcDetector, + null, + dcSwim, false); + + //7) find track candidates with 5 or 6 clusters + // track candidates with 6 clusters + trkcandsConv = trkcandFinder.getTrackCands(crosslistConv, + Constants.getInstance().dcDetector, + Swimmer.getTorScale(), + dcSwim, false); + + // track candidates with 5 clusters + RoadFinder rf = new RoadFinder(); + List allRoadsConv = rf.findRoads(segmentsConv, Constants.getInstance().dcDetector); + List Segs2RoadConv = new ArrayList<>(); + List psegmentsConv = new ArrayList<>(); + for (Road r : allRoadsConv) { + Segs2RoadConv.clear(); + int missingSL = -1; + for (int ri = 0; ri < 3; ri++) { + if (r.get(ri).associatedCrossId == -1) { + if (r.get(ri).get_Superlayer() % 2 == 1) { + missingSL = r.get(ri).get_Superlayer() + 1; + } else { + missingSL = r.get(ri).get_Superlayer() - 1; } - for (FittedHit h2 : c.get_Segment2()) { - h2.set_AssociatedHBTrackID(trkId); - //if(h2.get_AssociatedHBTrackID()>0) - fhits.add(h2); + } + } + if(missingSL==-1) + continue; + for (int ri = 0; ri < 3; ri++) { + for (Segment s : segmentsConv) { + if (s.get_Sector() == r.get(ri).get_Sector() && + s.get_Region() == r.get(ri).get_Region() && + s.associatedCrossId == r.get(ri).associatedCrossId && + r.get(ri).associatedCrossId != -1) { + if (s.get_Superlayer() % 2 == missingSL % 2) + Segs2RoadConv.add(s); } } - trk.calcTrajectory(trk.getId(), dcSwim, trk.get_Vtx0(), trk.get_pAtOrig(), trk.get_Q()); - trkId++; + } + if (Segs2RoadConv.size() == 2) { + Segment pSegmentConv = rf.findRoadMissingSegment(Segs2RoadConv, + Constants.getInstance().dcDetector, + r.a); + if (pSegmentConv != null) + psegmentsConv.add(pSegmentConv); } } + + segmentsConv.addAll(psegmentsConv); + List pcrossesConv = crossMake.find_Crosses(segmentsConv, Constants.getInstance().dcDetector); + CrossList pcrosslistConv = crossLister.candCrossLists(event, pcrossesConv, + false, + null, + Constants.getInstance().dcDetector, + null, + dcSwim, true); + List mistrkcandsConv = trkcandFinder.getTrackCands(pcrosslistConv, + Constants.getInstance().dcDetector, + Swimmer.getTorScale(), + dcSwim, false); - // no candidate found, stop here and save the hits, + //8) Select overlapping tracks from all track candidates with 5 or 6 clusters, and update hits in tracks + trkcandsConv.addAll(mistrkcandsConv); + if (!trkcandsConv.isEmpty()) { + // remove overlaps + trkcandFinder.removeOverlappingTracks(trkcandsConv); + for (Track trk : trkcandsConv) { + // reset the id + trk.set_Id(trkId); + trkcandFinder.matchHits(trk.getStateVecs(), + trk, + Constants.getInstance().dcDetector, + dcSwim); + trkId++; + } + } + + //////gather all the hits for pointer bank creation + trkcands.addAll(trkcandsConv); + trkId=1; + for (Track trk : trkcands) { + trk.calcTrajectory(trk.getId(), dcSwim, trk.get_Vtx0(), trk.get_pAtOrig(), trk.get_Q()); + for (Cross c : trk) { + c.set_CrossDirIntersSegWires(); + trkcandFinder.setHitDoubletsInfo(c.get_Segment1()); + trkcandFinder.setHitDoubletsInfo(c.get_Segment2()); + for (FittedHit h1 : c.get_Segment1()) { + h1.set_AssociatedHBTrackID(trkId); + //if(h1.get_AssociatedHBTrackID()>0) + fhits.add(h1); + } + for (FittedHit h2 : c.get_Segment2()) { + h2.set_AssociatedHBTrackID(trkId); + //if(h2.get_AssociatedHBTrackID()>0) + fhits.add(h2); + } + } + trkId++; + } + + // no candidate found, stop here and save the hits, // the clusters, the segments, the crosses if (trkcands.isEmpty()) { event.appendBanks( diff --git a/reconstruction/dc/src/main/java/org/jlab/service/dc/DCTBEngine.java b/reconstruction/dc/src/main/java/org/jlab/service/dc/DCTBEngine.java index 2bd9d1e9f9..ee5952aa32 100644 --- a/reconstruction/dc/src/main/java/org/jlab/service/dc/DCTBEngine.java +++ b/reconstruction/dc/src/main/java/org/jlab/service/dc/DCTBEngine.java @@ -189,6 +189,11 @@ public boolean processDataEvent(DataEvent event) { trkbank.getFloat("tx", i), trkbank.getFloat("ty", i)); HBFinalSV.setZ(trkbank.getFloat("z", i)); HBtrk.setFinalStateVec(HBFinalSV); + + int status = trkbank.getShort("status", i); + int isAITrack = (status >> 12) & 1; + HBtrk.setIsAITrack((isAITrack == 1)); + TrackArray[HBtrk.get_Id()-1] = HBtrk; // TrackArray[HBtrk.get_Id()-1].set_Status(0); } From 3c36b6740f8b47c539a0385b5cc3446495fc10da Mon Sep 17 00:00:00 2001 From: Tongtong Cao Date: Fri, 25 Apr 2025 20:08:30 -0400 Subject: [PATCH 4/5] fix an issue in DCHBPostClusterAI (#589) * fix an issue in DCHBPostClusterAI.java to save all necessary clusters in HB cluster bank * add clusters in tracks from AI-assisted tracking into cluster list --- .../java/org/jlab/service/dc/DCHBPostClusterAI.java | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/reconstruction/dc/src/main/java/org/jlab/service/dc/DCHBPostClusterAI.java b/reconstruction/dc/src/main/java/org/jlab/service/dc/DCHBPostClusterAI.java index 4651716770..c60e14f4e0 100644 --- a/reconstruction/dc/src/main/java/org/jlab/service/dc/DCHBPostClusterAI.java +++ b/reconstruction/dc/src/main/java/org/jlab/service/dc/DCHBPostClusterAI.java @@ -30,7 +30,7 @@ /** * - * @author ziegler + * @author ziegler, tongtong */ public class DCHBPostClusterAI extends DCEngine { @@ -71,7 +71,7 @@ public boolean processDataEvent(DataEvent event) { //AI List trkcands = null; List crosses = null; - List clusters = null; + List clusters = new ArrayList<>(); List segments = null; List fhits = null; @@ -103,7 +103,6 @@ public boolean processDataEvent(DataEvent event) { LOGGER.log(Level.FINE, "Pass Cross"+c.printInfo()); } if (crosses.isEmpty()) { - clusters = new ArrayList<>(); for(Segment seg : segments) { clusters.add(seg.get_fittedCluster()); } @@ -126,7 +125,6 @@ public boolean processDataEvent(DataEvent event) { dcSwim, true); // track found - clusters = new ArrayList<>(); int trkId = 1; if (trkcands.size() > 0) { // remove overlaps @@ -134,6 +132,11 @@ public boolean processDataEvent(DataEvent event) { for (Track trk : trkcands) { trk.setIsAITrack(true); + for (Cross c : trk) { + clusters.add(c.get_Segment1().get_fittedCluster()); + clusters.add(c.get_Segment2().get_fittedCluster()); + } + // reset the id trk.set_Id(trkId); trkcandFinder.matchHits(trk.getStateVecs(), @@ -312,6 +315,7 @@ public boolean processDataEvent(DataEvent event) { if (trkcands.isEmpty()) { event.appendBanks( writer.fillHBHitsBank(event, fhits), + writer.fillHBClustersBank(event, clusters), writer.fillHBSegmentsBank(event, segments), writer.fillHBCrossesBank(event, crosses)); } From d08cb18c9a336d802de41e8a83b74c724ad1e39a Mon Sep 17 00:00:00 2001 From: tongtongcao Date: Mon, 5 May 2025 11:41:28 -0400 Subject: [PATCH 5/5] ci: re-trigger