adds support for compressed FTR
This commit is contained in:
parent
a737f5588d
commit
3b57ec029b
@ -17,15 +17,13 @@ import java.io.ByteArrayInputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.channels.FileChannel;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import org.apache.commons.compress.compressors.lz4.BlockLZ4CompressorInputStream;
|
||||
import org.eclipse.collections.impl.map.mutable.UnifiedMap;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@ -42,7 +40,6 @@ import com.minres.scviewer.database.RelationType;
|
||||
import com.minres.scviewer.database.RelationTypeFactory;
|
||||
import com.minres.scviewer.database.tx.ITx;
|
||||
import com.minres.scviewer.database.tx.ITxAttribute;
|
||||
import com.minres.scviewer.database.RelationType;
|
||||
|
||||
import jacob.CborDecoder;
|
||||
import jacob.CborType;
|
||||
@ -97,9 +94,8 @@ public class FtrDbLoader implements IWaveformDbLoader {
|
||||
/** The pcs. */
|
||||
protected PropertyChangeSupport pcs = new PropertyChangeSupport(this);
|
||||
|
||||
/** The Constant x. */
|
||||
static final byte[] x = "scv_tr_stream".getBytes();
|
||||
|
||||
long time_scale_factor = 1000l;
|
||||
|
||||
/**
|
||||
* Adds the property change listener.
|
||||
*
|
||||
@ -203,9 +199,17 @@ public class FtrDbLoader implements IWaveformDbLoader {
|
||||
try(FileInputStream fis = new FileInputStream(file)) {
|
||||
FileChannel fc = fis.getChannel();
|
||||
for (Long offset : fileOffsets) {
|
||||
fc.position(offset);
|
||||
CborDecoder parser = new CborDecoder(fis);
|
||||
ret.add(parser.readByteString());
|
||||
if(offset>=0) {
|
||||
fc.position(offset);
|
||||
CborDecoder parser = new CborDecoder(fis);
|
||||
ret.add(parser.readByteString());
|
||||
} else {
|
||||
fc.position(-offset);
|
||||
CborDecoder parser = new CborDecoder(fis);
|
||||
BlockLZ4CompressorInputStream decomp = new BlockLZ4CompressorInputStream(new ByteArrayInputStream(parser.readByteString()));
|
||||
ret.add(decomp.readAllBytes());
|
||||
decomp.close();
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
LOG.error("Error parsing file "+file.getName(), e);
|
||||
@ -318,16 +322,40 @@ public class FtrDbLoader implements IWaveformDbLoader {
|
||||
while(next != null && !break_type.isEqualType(next)) {
|
||||
long tag = readTag();
|
||||
switch((int)tag) {
|
||||
case 6: // info
|
||||
case 6: { // info
|
||||
CborDecoder cbd = new CborDecoder(new ByteArrayInputStream(readByteString()));
|
||||
long sz = cbd.readArrayLength();
|
||||
assert(sz==3);
|
||||
long time_numerator=cbd.readInt();
|
||||
long time_denominator=cbd.readInt();
|
||||
loader.time_scale_factor = 1000000000000000l*time_numerator/time_denominator;
|
||||
long epoch_tag = cbd.readTag();
|
||||
assert(epoch_tag==1);
|
||||
cbd.readInt(); // epoch
|
||||
break;
|
||||
}
|
||||
case 8: { // dictionary uncompressed
|
||||
parseDict(new CborDecoder(new ByteArrayInputStream(readByteString())));
|
||||
break;
|
||||
}
|
||||
case 9: { // dictionary compressed
|
||||
long sz = readArrayLength();
|
||||
assert(sz==2);
|
||||
readInt(); // uncompressed size
|
||||
parseDict(new CborDecoder(new BlockLZ4CompressorInputStream(new ByteArrayInputStream(readByteString()))));
|
||||
break;
|
||||
}
|
||||
case 10: { // directory uncompressed
|
||||
parseDir(new CborDecoder(new ByteArrayInputStream(readByteString())));
|
||||
break;
|
||||
}
|
||||
case 11: { // directory compressed
|
||||
long sz = readArrayLength();
|
||||
assert(sz==2);
|
||||
readInt(); // uncompressed size
|
||||
parseDir(new CborDecoder(new BlockLZ4CompressorInputStream(new ByteArrayInputStream(readByteString()))));
|
||||
break;
|
||||
}
|
||||
case 12: { //tx chunk uncompressed
|
||||
long len = readArrayLength();
|
||||
assert(len==2);
|
||||
@ -337,10 +365,29 @@ public class FtrDbLoader implements IWaveformDbLoader {
|
||||
parseTx(txStream, txStream.fileOffsets.size()-1, readByteString());
|
||||
break;
|
||||
}
|
||||
case 13: { //tx chunk compressed
|
||||
long len = readArrayLength();
|
||||
assert(len==3);
|
||||
long stream_id = readInt();
|
||||
readInt(); // uncompressed size
|
||||
TxStream txStream = loader.txStreams.get(stream_id);
|
||||
txStream.fileOffsets.add(0-inputStream.getChannel().position());
|
||||
BlockLZ4CompressorInputStream decomp = new BlockLZ4CompressorInputStream(new ByteArrayInputStream(readByteString()));
|
||||
parseTx(txStream, txStream.fileOffsets.size()-1, decomp.readAllBytes());
|
||||
decomp.close();
|
||||
break;
|
||||
}
|
||||
case 14: { // relations uncompressed
|
||||
parseRel(new CborDecoder(new ByteArrayInputStream(readByteString())));
|
||||
break;
|
||||
}
|
||||
case 15: { // relations uncompressed
|
||||
long sz = readArrayLength();
|
||||
assert(sz==2);
|
||||
readInt(); // uncompressed size
|
||||
parseRel(new CborDecoder(new BlockLZ4CompressorInputStream(new ByteArrayInputStream(readByteString()))));
|
||||
break;
|
||||
}
|
||||
}
|
||||
next = peekType();
|
||||
}
|
||||
@ -413,8 +460,8 @@ public class FtrDbLoader implements IWaveformDbLoader {
|
||||
assert(len==4);
|
||||
long txId = cborDecoder.readInt();
|
||||
long genId = cborDecoder.readInt();
|
||||
long startTime = cborDecoder.readInt()*1000; //TODO: scale based on info
|
||||
long endTime = cborDecoder.readInt()*1000; //TODO: scale based on info
|
||||
long startTime = cborDecoder.readInt()*loader.time_scale_factor;
|
||||
long endTime = cborDecoder.readInt()*loader.time_scale_factor;
|
||||
TxGenerator gen = loader.txGenerators.get(genId);
|
||||
FtrTx scvTx = new FtrTx(txId, gen.stream.getId(), genId, startTime, endTime, blockId, blockOffset);
|
||||
loader.maxTime = loader.maxTime > scvTx.endTime ? loader.maxTime : scvTx.endTime;
|
||||
|
BIN
tests/com.minres.scviewer.database.test/inputs/my_db_c.ftr
Normal file
BIN
tests/com.minres.scviewer.database.test/inputs/my_db_c.ftr
Normal file
Binary file not shown.
@ -148,4 +148,38 @@ public class DatabaseServicesTest {
|
||||
assertEquals(3, attr.size());
|
||||
});
|
||||
}
|
||||
@Test
|
||||
public void testCFtr() throws Exception {
|
||||
File f = new File("inputs/my_db_c.ftr").getAbsoluteFile();
|
||||
assertTrue(f.exists());
|
||||
waveformDb.load(f);
|
||||
assertNotNull(waveformDb);
|
||||
List<IWaveform> waveforms = waveformDb.getAllWaves();
|
||||
assertEquals(8, waveforms.size());
|
||||
assertEquals(1, waveformDb.getChildNodes().size());
|
||||
for(IWaveform w:waveforms) {
|
||||
if(w.getId()==1) {
|
||||
assertEquals(2, w.getRowCount());
|
||||
} else if(w.getId()==2l) {
|
||||
assertEquals(1, w.getRowCount());
|
||||
} else if(w.getId()==3l) {
|
||||
assertEquals(1, w.getRowCount());
|
||||
}
|
||||
}
|
||||
//waveforms.stream().filter(s -> s.getId()==1).collect(Collectors.toList());
|
||||
waveforms.stream().filter(s -> s.getId()==1).forEach(s -> {
|
||||
assertEquals(27, s.getEvents().size());
|
||||
});
|
||||
waveforms.stream().filter(s -> s.getId()==1).map(s -> s.getEventsAtTime(0)).forEach(el -> {
|
||||
assertEquals(1, el.length);
|
||||
IEvent evt = el[0];
|
||||
assertTrue(evt instanceof ITxEvent);
|
||||
ITx tx = ((ITxEvent)evt).getTransaction();
|
||||
assertNotNull(tx);
|
||||
assertEquals(0, tx.getBeginTime());
|
||||
assertEquals(280000000, tx.getEndTime());
|
||||
List<ITxAttribute> attr = tx.getAttributes();
|
||||
assertEquals(3, attr.size());
|
||||
});
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user