Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
45 commits
Select commit Hold shift + click to select a range
b625208
add convenience method
baltzell Apr 22, 2026
9ddeb04
remove unused stuff
baltzell Apr 22, 2026
8abc9f9
add decoding engine
baltzell Apr 22, 2026
7fc741b
add it to the clara yaml
baltzell Apr 22, 2026
d28d112
use a pool
baltzell Apr 23, 2026
c483a65
hmm
baltzell Apr 23, 2026
086a9fe
allow decoder instances to share ConstantsManagers
baltzell Apr 23, 2026
68e6b2f
share ConstantsManagers
baltzell Apr 23, 2026
2fa3ad7
kludge test
baltzell Apr 23, 2026
adfab5c
cleanup
baltzell Apr 23, 2026
fa21298
try this
baltzell Apr 23, 2026
dd5ea34
inherit ConstantsManagers
baltzell Apr 23, 2026
e8ad4a1
cleanup
baltzell Apr 23, 2026
dfdff95
only check tables if not shared
baltzell Apr 23, 2026
2e24c77
higher ports on macos, cleanup process dpe process
baltzell Apr 23, 2026
7c4d726
remove ineffective pid trap
baltzell Apr 23, 2026
5bfeb49
Revert "higher ports on macos, cleanup process dpe process"
baltzell Apr 24, 2026
74d70ff
Revert "fix job name"
baltzell Apr 24, 2026
3789826
Revert "decouple ubuntu/macos builds to reduce wait"
baltzell Apr 24, 2026
8c939a1
Revert "remove unnecessary reader class"
baltzell Apr 24, 2026
64a202c
Revert "remove example engine"
baltzell Apr 24, 2026
03fdb87
restore reported data type
baltzell Apr 25, 2026
c89a8cc
rename class
baltzell Apr 27, 2026
04cb650
undo
baltzell Apr 27, 2026
9f5708b
fix rebase oops
baltzell Apr 27, 2026
1aa6e0a
rename class
baltzell Apr 27, 2026
e62ab27
fix rebase oops
baltzell Apr 27, 2026
17c659d
cleanup
baltzell Apr 27, 2026
731ffc2
cleanup
baltzell Apr 27, 2026
0793a15
stf
baltzell Apr 27, 2026
2aacbc1
just events, not per thread
baltzell Apr 28, 2026
af5e72f
reduce constants sharing
baltzell Apr 28, 2026
be194f3
bugfix
baltzell Apr 28, 2026
1021c02
share one RCDBManager
baltzell Apr 28, 2026
137d3f8
bugfix
baltzell Apr 28, 2026
f108b4e
restore
baltzell May 9, 2026
236ce28
DecoderEngine: just pass along HIPO events
baltzell May 11, 2026
4bd0a9d
add Clas12Reader, no decoding
baltzell May 11, 2026
bea8f32
dummy initialization value
baltzell May 11, 2026
1e3e062
bugfix
baltzell May 11, 2026
dda612e
split I/O service into separate PR
baltzell May 12, 2026
f93d36a
split pull requests
baltzell May 12, 2026
7e03830
cleanup, avoid class variable
baltzell May 12, 2026
bc73d3f
remove debugging leftover
baltzell May 12, 2026
4f5da7a
revert to hard-coded byte order
baltzell May 13, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ public int readEventCount() throws EventReaderException {

@Override
public ByteOrder readByteOrder() throws EventReaderException {
return reader.getFileByteOrder();
return ByteOrder.LITTLE_ENDIAN; //reader.getFileByteOrder();
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,6 @@

import org.jlab.io.base.DataEvent;
import org.jlab.io.evio.EvioDataEvent;
import org.jlab.io.hipo.HipoDataEvent;
import org.jlab.io.hipo.HipoDataSync;

import org.jlab.jnp.hipo4.data.Bank;
import org.jlab.jnp.hipo4.data.Event;
Expand All @@ -38,27 +36,28 @@ public class CLASDecoder {
protected SchemaFactory schemaFactory = new SchemaFactory();
private CodaEventDecoder codaDecoder = null;
private List<DetectorDataDgtz> dataList = new ArrayList<>();
private HipoDataSync writer = null;
private HipoDataEvent hipoEvent = null;
private boolean isRunNumberFixed = false;
private int decoderDebugMode = 0;
private ModeAHDC ahdcExtractor = new ModeAHDC();
private RCDBManager rcdbManager = new RCDBManager();
private static RCDBManager rcdbManager = new RCDBManager();

public CLASDecoder(boolean development){
codaDecoder = new CodaEventDecoder();
detectorDecoder = new DetectorEventDecoder(development);
writer = new HipoDataSync();
hipoEvent = (HipoDataEvent) writer.createEvent();
String dir = ClasUtilsFile.getResourceDir("CLAS12DIR", "etc/bankdefs/hipo4");
schemaFactory.initFromDirectory(dir);
}

public CLASDecoder(){
codaDecoder = new CodaEventDecoder();
detectorDecoder = new DetectorEventDecoder();
writer = new HipoDataSync();
hipoEvent = (HipoDataEvent) writer.createEvent();
String dir = ClasUtilsFile.getResourceDir("CLAS12DIR", "etc/bankdefs/hipo4");
schemaFactory.initFromDirectory(dir);
}

public CLASDecoder(CLASDecoder d) {
codaDecoder = new CodaEventDecoder();
detectorDecoder = new DetectorEventDecoder(d.detectorDecoder);
String dir = ClasUtilsFile.getResourceDir("CLAS12DIR", "etc/bankdefs/hipo4");
schemaFactory.initFromDirectory(dir);
}
Expand Down Expand Up @@ -792,4 +791,8 @@ public Event getDecodedEvent(EvioDataEvent rawEvent, int run, int counter, Doubl

return decodedEvent;
}

public Event getDecodedEvent(EvioDataEvent rawEvent) {
return getDecodedEvent(rawEvent, -1, -1, null, null);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,17 @@ public DetectorEventDecoder(boolean development){
}
}

public DetectorEventDecoder(){
this.initDecoder();
}

public DetectorEventDecoder(DetectorEventDecoder d) {
translationManager = d.translationManager;
fitterManager = d.fitterManager;
scalerManager = d.scalerManager;
initDecoder(false);
}

public void setTimestamp(String timestamp) {
translationManager.setTimeStamp(timestamp);
fitterManager.setTimeStamp(timestamp);
Expand Down Expand Up @@ -80,10 +91,6 @@ public float getRcdbSolenoidScale() {
getValue()).floatValue();
}

public DetectorEventDecoder(){
this.initDecoder();
}

public final void initDecoderDev(){
keysTrans = Arrays.asList(new DetectorType[]{ DetectorType.HTCC,DetectorType.BST,DetectorType.RTPC} );
tablesTrans = Arrays.asList(new String[]{ "/daq/tt/clasdev/htcc","/daq/tt/clasdev/svt","/daq/tt/clasdev/rtpc" });
Expand All @@ -95,7 +102,11 @@ public final void initDecoderDev(){
"/runcontrol/helicity","/daq/config/scalers/dsc1"}));
}

public final void initDecoder(){
public final void initDecoder() {
initDecoder(true);
}

public final void initDecoder(boolean initializeManagers){

// Detector translation table
keysTrans = Arrays.asList(new DetectorType[]{DetectorType.FTCAL,DetectorType.FTHODO,DetectorType.FTTRK,DetectorType.LTCC,DetectorType.ECAL,DetectorType.FTOF,
Expand All @@ -109,7 +120,6 @@ public final void initDecoder(){
"/daq/tt/rf","/daq/tt/bmt","/daq/tt/fmt","/daq/tt/rich2","/daq/tt/hel","/daq/tt/band","/daq/tt/rtpc",
"/daq/tt/raster","/daq/tt/atof","/daq/tt/ahdc"
});
translationManager.init(tablesTrans);

// ADC waveform fitter translation table
keysFitter = Arrays.asList(new DetectorType[]{DetectorType.FTCAL,DetectorType.FTHODO,DetectorType.FTTRK,DetectorType.FTOF,DetectorType.LTCC,
Expand All @@ -122,17 +132,20 @@ public final void initDecoder(){
"/daq/config/fmt","/daq/fadc/hel","/daq/fadc/rf","/daq/fadc/band","/daq/fadc/raster",
"/daq/config/ahdc"
});
fitterManager.init(tablesFitter);

// Data filter list
keysFilter = Arrays.asList(new DetectorType[]{DetectorType.DC});

scalerManager.init(Arrays.asList(new String[]{"/runcontrol/fcup","/runcontrol/slm","/runcontrol/hwp",
"/runcontrol/helicity","/daq/config/scalers/dsc1"}));


keysMicromega = Arrays.asList(new DetectorType[]{DetectorType.BMT,DetectorType.FMT,DetectorType.FTTRK});

checkTables();
if (initializeManagers) {
translationManager.init(tablesTrans);
fitterManager.init(tablesFitter);
scalerManager.init(Arrays.asList(new String[]{"/runcontrol/fcup","/runcontrol/slm","/runcontrol/hwp",
"/runcontrol/helicity","/daq/config/scalers/dsc1"}));
checkTables();
}

}

public void checkTables() {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
package org.jlab.clas.reco;

import java.util.Set;
import java.util.HashSet;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import org.jlab.clara.base.ClaraUtil;
import org.jlab.clara.engine.Engine;
import org.jlab.clara.engine.EngineData;
import org.jlab.clara.engine.EngineDataType;
import org.jlab.clara.engine.EngineStatus;
import org.jlab.detector.decode.CLASDecoder;
import org.jlab.io.evio.EvioDataEvent;
import org.jlab.io.hipo.HipoDataEvent;
import org.jlab.jnp.hipo4.data.SchemaFactory;
import org.json.JSONObject;

/**
*
* @author baltzell
*/
public class DecoderEngine implements Engine {

static final int POOL_SIZE = 64;
static final Set<EngineDataType> ED_TYPES = ClaraUtil.buildDataTypes(
Clas12Types.EVIO,Clas12Types.HIPO,EngineDataType.JSON,EngineDataType.STRING);

SchemaFactory schema;
BlockingQueue<CLASDecoder> pool;
int constantsShared = 16;

public DecoderEngine() {
schema = new SchemaFactory();
schema.initFromDirectory(System.getenv("CLAS12DIR") + "/etc/bankdefs/hipo4");
Comment thread
baltzell marked this conversation as resolved.
}

@Override
public Set<EngineDataType> getInputDataTypes() { return ED_TYPES; }
@Override
public Set<EngineDataType> getOutputDataTypes() { return ED_TYPES; }
@Override
public EngineData executeGroup(Set<EngineData> set) { return null; }
@Override
public Set<String> getStates() { return new HashSet<>(); }
@Override
public String getDescription() { return "decoder engine"; }
@Override
public String getVersion() { return "1.0"; }
@Override
public String getAuthor() { return "baltzell"; }
@Override
public void reset() {}
@Override
public void destroy() {}

@Override
public EngineData configure(EngineData ed) {
JSONObject json = new JSONObject(ed.getData());
pool = new ArrayBlockingQueue<>(POOL_SIZE);
CLASDecoder d0 = null;
for (int i=0; i<POOL_SIZE; i++) {
CLASDecoder d;
if (i % constantsShared == 0) {
d0 = new CLASDecoder();
if (json.has("variation")) d0.setVariation(json.getString("variation"));
if (json.has("timestamp")) d0.setVariation(json.getString("timestamp"));
d = d0;
}
else {
d = new CLASDecoder(d0);
}
pool.add(d);
}
return ed;
}

@Override
public EngineData execute(EngineData input) {

EngineData output = input;

// if it's EVIO, decode it, otherwise just pass it along
if (input.getMimeType().equals("binary/data-evio")) {
EvioDataEvent evio;
try {
ByteBuffer bb = (ByteBuffer) input.getData();
//evio = new EvioDataEvent(bb.array(), bb.order());
evio = new EvioDataEvent(bb.array(), ByteOrder.LITTLE_ENDIAN);
} catch (Exception e) {
String msg = String.format("Error reading input event%n%n%s", ClaraUtil.reportException(e));
output.setStatus(EngineStatus.ERROR);
output.setDescription(msg);
return output;
}
HipoDataEvent hipo;
try {
CLASDecoder d = pool.take();
hipo = new HipoDataEvent(d.getDecodedEvent(evio),schema);
pool.put(d);
output.setData("binary/data-hipo", hipo.getHipoEvent());
} catch (Exception e) {
String msg = String.format("Error processing input event%n%n%s", ClaraUtil.reportException(e));
output.setStatus(EngineStatus.ERROR);
output.setDescription(msg);
return output;
}
}

return output;
}
}
6 changes: 4 additions & 2 deletions etc/services/rgd-clarode.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,14 @@ configuration:
outputBankPrefix: "HB"
io-services:
reader:
class: org.jlab.io.clara.DecoderReader
name: DecoderReader
class: org.jlab.io.clara.EvioToEvioReader
name: EvioToEvioReader
writer:
class: org.jlab.io.clara.DecoderWriter
name: DecoderWriter
services:
- class: org.jlab.clas.reco.DecoderEngine
name: DECO
- class: org.jlab.service.ai.DCDenoiseEngine
name: DCDN
- class: org.jlab.clas.swimtools.MagFieldsEngine
Expand Down