Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
38 commits
Select commit Hold shift + click to select a range
22d148b
add convenience method
baltzell Apr 22, 2026
11762ce
remove unused stuff
baltzell Apr 22, 2026
4c319c4
add decoding engine
baltzell Apr 22, 2026
09c09e5
add it to the clara yaml
baltzell Apr 22, 2026
a460adf
use a pool
baltzell Apr 23, 2026
11874a9
hmm
baltzell Apr 23, 2026
11f17a8
allow decoder instances to share ConstantsManagers
baltzell Apr 23, 2026
3d741af
share ConstantsManagers
baltzell Apr 23, 2026
27d8cc4
kludge test
baltzell Apr 23, 2026
adb6567
cleanup
baltzell Apr 23, 2026
3a91417
try this
baltzell Apr 23, 2026
064fc5c
inherit ConstantsManagers
baltzell Apr 23, 2026
38c124a
cleanup
baltzell Apr 23, 2026
54cefae
only check tables if not shared
baltzell Apr 23, 2026
0d8d854
higher ports on macos, cleanup process dpe process
baltzell Apr 23, 2026
41f3216
remove ineffective pid trap
baltzell Apr 23, 2026
b800a37
Revert "higher ports on macos, cleanup process dpe process"
baltzell Apr 24, 2026
746c196
Revert "fix job name"
baltzell Apr 24, 2026
acc7d41
Revert "decouple ubuntu/macos builds to reduce wait"
baltzell Apr 24, 2026
2c61171
Revert "remove example engine"
baltzell Apr 24, 2026
37fd575
restore reported data type
baltzell Apr 25, 2026
2f4cde4
rename class
baltzell Apr 27, 2026
900b8b3
undo
baltzell Apr 27, 2026
1d32a0d
fix rebase oops
baltzell Apr 27, 2026
3f9994c
rename class
baltzell Apr 27, 2026
a1b5e4a
fix rebase oops
baltzell Apr 27, 2026
c56a514
stf
baltzell Apr 27, 2026
d8c4f2f
just events, not per thread
baltzell Apr 28, 2026
5380c80
reduce constants sharing
baltzell Apr 28, 2026
91d7a92
bugfix
baltzell Apr 28, 2026
b791b24
share one RCDBManager
baltzell Apr 28, 2026
ed10100
bugfix
baltzell Apr 28, 2026
aec8922
restore
baltzell May 9, 2026
e15903b
DecoderEngine: just pass along HIPO events
baltzell May 11, 2026
70da57d
cleanup, avoid class variable
baltzell May 12, 2026
968d675
remove debugging leftover
baltzell May 12, 2026
cb67bba
revert to hard-coded byte order
baltzell May 13, 2026
8178612
default to no sharing
baltzell May 16, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ public int readEventCount() throws EventReaderException {

@Override
public ByteOrder readByteOrder() throws EventReaderException {
return reader.getFileByteOrder();
return ByteOrder.LITTLE_ENDIAN; //reader.getFileByteOrder();
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,6 @@

import org.jlab.io.base.DataEvent;
import org.jlab.io.evio.EvioDataEvent;
import org.jlab.io.hipo.HipoDataEvent;
import org.jlab.io.hipo.HipoDataSync;

import org.jlab.jnp.hipo4.data.Bank;
import org.jlab.jnp.hipo4.data.Event;
Expand All @@ -38,27 +36,28 @@ public class CLASDecoder {
protected SchemaFactory schemaFactory = new SchemaFactory();
private CodaEventDecoder codaDecoder = null;
private List<DetectorDataDgtz> dataList = new ArrayList<>();
private HipoDataSync writer = null;
private HipoDataEvent hipoEvent = null;
private boolean isRunNumberFixed = false;
private int decoderDebugMode = 0;
private ModeAHDC ahdcExtractor = new ModeAHDC();
private RCDBManager rcdbManager = new RCDBManager();
private static RCDBManager rcdbManager = new RCDBManager();

public CLASDecoder(boolean development){
codaDecoder = new CodaEventDecoder();
detectorDecoder = new DetectorEventDecoder(development);
writer = new HipoDataSync();
hipoEvent = (HipoDataEvent) writer.createEvent();
String dir = ClasUtilsFile.getResourceDir("CLAS12DIR", "etc/bankdefs/hipo4");
schemaFactory.initFromDirectory(dir);
}

public CLASDecoder(){
codaDecoder = new CodaEventDecoder();
detectorDecoder = new DetectorEventDecoder();
writer = new HipoDataSync();
hipoEvent = (HipoDataEvent) writer.createEvent();
String dir = ClasUtilsFile.getResourceDir("CLAS12DIR", "etc/bankdefs/hipo4");
schemaFactory.initFromDirectory(dir);
}

public CLASDecoder(CLASDecoder d) {
codaDecoder = new CodaEventDecoder();
detectorDecoder = new DetectorEventDecoder(d.detectorDecoder);
String dir = ClasUtilsFile.getResourceDir("CLAS12DIR", "etc/bankdefs/hipo4");
schemaFactory.initFromDirectory(dir);
}
Expand Down Expand Up @@ -792,4 +791,8 @@ public Event getDecodedEvent(EvioDataEvent rawEvent, int run, int counter, Doubl

return decodedEvent;
}

public Event getDecodedEvent(EvioDataEvent rawEvent) {
return getDecodedEvent(rawEvent, -1, -1, null, null);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,17 @@ public DetectorEventDecoder(boolean development){
}
}

public DetectorEventDecoder(){
this.initDecoder();
}

public DetectorEventDecoder(DetectorEventDecoder d) {
translationManager = d.translationManager;
fitterManager = d.fitterManager;
scalerManager = d.scalerManager;
initDecoder(false);
}

public void setTimestamp(String timestamp) {
translationManager.setTimeStamp(timestamp);
fitterManager.setTimeStamp(timestamp);
Expand Down Expand Up @@ -80,10 +91,6 @@ public float getRcdbSolenoidScale() {
getValue()).floatValue();
}

public DetectorEventDecoder(){
this.initDecoder();
}

public final void initDecoderDev(){
keysTrans = Arrays.asList(new DetectorType[]{ DetectorType.HTCC,DetectorType.BST,DetectorType.RTPC} );
tablesTrans = Arrays.asList(new String[]{ "/daq/tt/clasdev/htcc","/daq/tt/clasdev/svt","/daq/tt/clasdev/rtpc" });
Expand All @@ -95,7 +102,11 @@ public final void initDecoderDev(){
"/runcontrol/helicity","/daq/config/scalers/dsc1"}));
}

public final void initDecoder(){
public final void initDecoder() {
initDecoder(true);
}

public final void initDecoder(boolean initializeManagers){

// Detector translation table
keysTrans = Arrays.asList(new DetectorType[]{DetectorType.FTCAL,DetectorType.FTHODO,DetectorType.FTTRK,DetectorType.LTCC,DetectorType.ECAL,DetectorType.FTOF,
Expand All @@ -109,7 +120,6 @@ public final void initDecoder(){
"/daq/tt/rf","/daq/tt/bmt","/daq/tt/fmt","/daq/tt/rich2","/daq/tt/hel","/daq/tt/band","/daq/tt/rtpc",
"/daq/tt/raster","/daq/tt/atof","/daq/tt/ahdc"
});
translationManager.init(tablesTrans);

// ADC waveform fitter translation table
keysFitter = Arrays.asList(new DetectorType[]{DetectorType.FTCAL,DetectorType.FTHODO,DetectorType.FTTRK,DetectorType.FTOF,DetectorType.LTCC,
Expand All @@ -122,17 +132,20 @@ public final void initDecoder(){
"/daq/config/fmt","/daq/fadc/hel","/daq/fadc/rf","/daq/fadc/band","/daq/fadc/raster",
"/daq/config/ahdc"
});
fitterManager.init(tablesFitter);

// Data filter list
keysFilter = Arrays.asList(new DetectorType[]{DetectorType.DC});

scalerManager.init(Arrays.asList(new String[]{"/runcontrol/fcup","/runcontrol/slm","/runcontrol/hwp",
"/runcontrol/helicity","/daq/config/scalers/dsc1"}));


keysMicromega = Arrays.asList(new DetectorType[]{DetectorType.BMT,DetectorType.FMT,DetectorType.FTTRK});

checkTables();
if (initializeManagers) {
translationManager.init(tablesTrans);
fitterManager.init(tablesFitter);
scalerManager.init(Arrays.asList(new String[]{"/runcontrol/fcup","/runcontrol/slm","/runcontrol/hwp",
"/runcontrol/helicity","/daq/config/scalers/dsc1"}));
checkTables();
}

}

public void checkTables() {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
package org.jlab.clas.reco;

import java.util.Set;
import java.util.HashSet;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import org.jlab.clara.base.ClaraUtil;
import org.jlab.clara.engine.Engine;
import org.jlab.clara.engine.EngineData;
import org.jlab.clara.engine.EngineDataType;
import org.jlab.clara.engine.EngineStatus;
import org.jlab.detector.decode.CLASDecoder;
import org.jlab.io.evio.EvioDataEvent;
import org.jlab.io.hipo.HipoDataEvent;
import org.jlab.jnp.hipo4.data.SchemaFactory;
import org.json.JSONObject;

/**
*
* @author baltzell
*/
public class DecoderEngine implements Engine {

static final int POOL_SIZE = 64;
static final Set<EngineDataType> ED_TYPES = ClaraUtil.buildDataTypes(
Clas12Types.EVIO,Clas12Types.HIPO,EngineDataType.JSON,EngineDataType.STRING);

SchemaFactory schema;
BlockingQueue<CLASDecoder> pool;
int constantsShared = 64;

public DecoderEngine() {
schema = new SchemaFactory();
schema.initFromDirectory(System.getenv("CLAS12DIR") + "/etc/bankdefs/hipo4");
Comment thread
baltzell marked this conversation as resolved.
}

@Override
public Set<EngineDataType> getInputDataTypes() { return ED_TYPES; }
@Override
public Set<EngineDataType> getOutputDataTypes() { return ED_TYPES; }
@Override
public EngineData executeGroup(Set<EngineData> set) { return null; }
@Override
public Set<String> getStates() { return new HashSet<>(); }
@Override
public String getDescription() { return "decoder engine"; }
@Override
public String getVersion() { return "1.0"; }
@Override
public String getAuthor() { return "baltzell"; }
@Override
public void reset() {}
@Override
public void destroy() {}

@Override
public EngineData configure(EngineData ed) {
JSONObject json = new JSONObject(ed.getData());
pool = new ArrayBlockingQueue<>(POOL_SIZE);
CLASDecoder d0 = null;
for (int i=0; i<POOL_SIZE; i++) {
CLASDecoder d;
if (i % constantsShared == 0) {
d0 = new CLASDecoder();
if (json.has("variation")) d0.setVariation(json.getString("variation"));
if (json.has("timestamp")) d0.setVariation(json.getString("timestamp"));
d = d0;
}
else {
d = new CLASDecoder(d0);
}
pool.add(d);
}
return ed;
}

@Override
public EngineData execute(EngineData input) {

EngineData output = input;

// if it's EVIO, decode it, otherwise just pass it along
if (input.getMimeType().equals("binary/data-evio")) {
EvioDataEvent evio;
try {
ByteBuffer bb = (ByteBuffer) input.getData();
//evio = new EvioDataEvent(bb.array(), bb.order());
evio = new EvioDataEvent(bb.array(), ByteOrder.LITTLE_ENDIAN);
} catch (Exception e) {
String msg = String.format("Error reading input event%n%n%s", ClaraUtil.reportException(e));
output.setStatus(EngineStatus.ERROR);
output.setDescription(msg);
return output;
}
HipoDataEvent hipo;
try {
CLASDecoder d = pool.take();
hipo = new HipoDataEvent(d.getDecodedEvent(evio),schema);
pool.put(d);
output.setData("binary/data-hipo", hipo.getHipoEvent());
} catch (Exception e) {
String msg = String.format("Error processing input event%n%n%s", ClaraUtil.reportException(e));
output.setStatus(EngineStatus.ERROR);
output.setDescription(msg);
return output;
}
}

return output;
}
}
6 changes: 4 additions & 2 deletions etc/services/rgd-clarode.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,14 @@ configuration:
outputBankPrefix: "HB"
io-services:
reader:
class: org.jlab.io.clara.DecoderReader
name: DecoderReader
class: org.jlab.io.clara.EvioToEvioReader
name: EvioReader
writer:
class: org.jlab.io.clara.DecoderWriter
name: DecoderWriter
services:
- class: org.jlab.clas.reco.DecoderEngine
name: DECO
- class: org.jlab.service.ai.DCDenoiseEngine
name: DCDN
- class: org.jlab.clas.swimtools.MagFieldsEngine
Expand Down