adds support for compressed FTR
This commit is contained in:
parent
a737f5588d
commit
3b57ec029b
|
@ -17,15 +17,13 @@ import java.io.ByteArrayInputStream;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileInputStream;
|
import java.io.FileInputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
|
||||||
import java.nio.channels.FileChannel;
|
import java.nio.channels.FileChannel;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.TreeMap;
|
|
||||||
|
|
||||||
|
import org.apache.commons.compress.compressors.lz4.BlockLZ4CompressorInputStream;
|
||||||
import org.eclipse.collections.impl.map.mutable.UnifiedMap;
|
import org.eclipse.collections.impl.map.mutable.UnifiedMap;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
@ -42,7 +40,6 @@ import com.minres.scviewer.database.RelationType;
|
||||||
import com.minres.scviewer.database.RelationTypeFactory;
|
import com.minres.scviewer.database.RelationTypeFactory;
|
||||||
import com.minres.scviewer.database.tx.ITx;
|
import com.minres.scviewer.database.tx.ITx;
|
||||||
import com.minres.scviewer.database.tx.ITxAttribute;
|
import com.minres.scviewer.database.tx.ITxAttribute;
|
||||||
import com.minres.scviewer.database.RelationType;
|
|
||||||
|
|
||||||
import jacob.CborDecoder;
|
import jacob.CborDecoder;
|
||||||
import jacob.CborType;
|
import jacob.CborType;
|
||||||
|
@ -97,8 +94,7 @@ public class FtrDbLoader implements IWaveformDbLoader {
|
||||||
/** The pcs. */
|
/** The pcs. */
|
||||||
protected PropertyChangeSupport pcs = new PropertyChangeSupport(this);
|
protected PropertyChangeSupport pcs = new PropertyChangeSupport(this);
|
||||||
|
|
||||||
/** The Constant x. */
|
long time_scale_factor = 1000l;
|
||||||
static final byte[] x = "scv_tr_stream".getBytes();
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Adds the property change listener.
|
* Adds the property change listener.
|
||||||
|
@ -203,9 +199,17 @@ public class FtrDbLoader implements IWaveformDbLoader {
|
||||||
try(FileInputStream fis = new FileInputStream(file)) {
|
try(FileInputStream fis = new FileInputStream(file)) {
|
||||||
FileChannel fc = fis.getChannel();
|
FileChannel fc = fis.getChannel();
|
||||||
for (Long offset : fileOffsets) {
|
for (Long offset : fileOffsets) {
|
||||||
fc.position(offset);
|
if(offset>=0) {
|
||||||
CborDecoder parser = new CborDecoder(fis);
|
fc.position(offset);
|
||||||
ret.add(parser.readByteString());
|
CborDecoder parser = new CborDecoder(fis);
|
||||||
|
ret.add(parser.readByteString());
|
||||||
|
} else {
|
||||||
|
fc.position(-offset);
|
||||||
|
CborDecoder parser = new CborDecoder(fis);
|
||||||
|
BlockLZ4CompressorInputStream decomp = new BlockLZ4CompressorInputStream(new ByteArrayInputStream(parser.readByteString()));
|
||||||
|
ret.add(decomp.readAllBytes());
|
||||||
|
decomp.close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOG.error("Error parsing file "+file.getName(), e);
|
LOG.error("Error parsing file "+file.getName(), e);
|
||||||
|
@ -318,16 +322,40 @@ public class FtrDbLoader implements IWaveformDbLoader {
|
||||||
while(next != null && !break_type.isEqualType(next)) {
|
while(next != null && !break_type.isEqualType(next)) {
|
||||||
long tag = readTag();
|
long tag = readTag();
|
||||||
switch((int)tag) {
|
switch((int)tag) {
|
||||||
case 6: // info
|
case 6: { // info
|
||||||
|
CborDecoder cbd = new CborDecoder(new ByteArrayInputStream(readByteString()));
|
||||||
|
long sz = cbd.readArrayLength();
|
||||||
|
assert(sz==3);
|
||||||
|
long time_numerator=cbd.readInt();
|
||||||
|
long time_denominator=cbd.readInt();
|
||||||
|
loader.time_scale_factor = 1000000000000000l*time_numerator/time_denominator;
|
||||||
|
long epoch_tag = cbd.readTag();
|
||||||
|
assert(epoch_tag==1);
|
||||||
|
cbd.readInt(); // epoch
|
||||||
break;
|
break;
|
||||||
|
}
|
||||||
case 8: { // dictionary uncompressed
|
case 8: { // dictionary uncompressed
|
||||||
parseDict(new CborDecoder(new ByteArrayInputStream(readByteString())));
|
parseDict(new CborDecoder(new ByteArrayInputStream(readByteString())));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
case 9: { // dictionary compressed
|
||||||
|
long sz = readArrayLength();
|
||||||
|
assert(sz==2);
|
||||||
|
readInt(); // uncompressed size
|
||||||
|
parseDict(new CborDecoder(new BlockLZ4CompressorInputStream(new ByteArrayInputStream(readByteString()))));
|
||||||
|
break;
|
||||||
|
}
|
||||||
case 10: { // directory uncompressed
|
case 10: { // directory uncompressed
|
||||||
parseDir(new CborDecoder(new ByteArrayInputStream(readByteString())));
|
parseDir(new CborDecoder(new ByteArrayInputStream(readByteString())));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
case 11: { // directory compressed
|
||||||
|
long sz = readArrayLength();
|
||||||
|
assert(sz==2);
|
||||||
|
readInt(); // uncompressed size
|
||||||
|
parseDir(new CborDecoder(new BlockLZ4CompressorInputStream(new ByteArrayInputStream(readByteString()))));
|
||||||
|
break;
|
||||||
|
}
|
||||||
case 12: { //tx chunk uncompressed
|
case 12: { //tx chunk uncompressed
|
||||||
long len = readArrayLength();
|
long len = readArrayLength();
|
||||||
assert(len==2);
|
assert(len==2);
|
||||||
|
@ -337,10 +365,29 @@ public class FtrDbLoader implements IWaveformDbLoader {
|
||||||
parseTx(txStream, txStream.fileOffsets.size()-1, readByteString());
|
parseTx(txStream, txStream.fileOffsets.size()-1, readByteString());
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
case 13: { //tx chunk compressed
|
||||||
|
long len = readArrayLength();
|
||||||
|
assert(len==3);
|
||||||
|
long stream_id = readInt();
|
||||||
|
readInt(); // uncompressed size
|
||||||
|
TxStream txStream = loader.txStreams.get(stream_id);
|
||||||
|
txStream.fileOffsets.add(0-inputStream.getChannel().position());
|
||||||
|
BlockLZ4CompressorInputStream decomp = new BlockLZ4CompressorInputStream(new ByteArrayInputStream(readByteString()));
|
||||||
|
parseTx(txStream, txStream.fileOffsets.size()-1, decomp.readAllBytes());
|
||||||
|
decomp.close();
|
||||||
|
break;
|
||||||
|
}
|
||||||
case 14: { // relations uncompressed
|
case 14: { // relations uncompressed
|
||||||
parseRel(new CborDecoder(new ByteArrayInputStream(readByteString())));
|
parseRel(new CborDecoder(new ByteArrayInputStream(readByteString())));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
case 15: { // relations uncompressed
|
||||||
|
long sz = readArrayLength();
|
||||||
|
assert(sz==2);
|
||||||
|
readInt(); // uncompressed size
|
||||||
|
parseRel(new CborDecoder(new BlockLZ4CompressorInputStream(new ByteArrayInputStream(readByteString()))));
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
next = peekType();
|
next = peekType();
|
||||||
}
|
}
|
||||||
|
@ -413,8 +460,8 @@ public class FtrDbLoader implements IWaveformDbLoader {
|
||||||
assert(len==4);
|
assert(len==4);
|
||||||
long txId = cborDecoder.readInt();
|
long txId = cborDecoder.readInt();
|
||||||
long genId = cborDecoder.readInt();
|
long genId = cborDecoder.readInt();
|
||||||
long startTime = cborDecoder.readInt()*1000; //TODO: scale based on info
|
long startTime = cborDecoder.readInt()*loader.time_scale_factor;
|
||||||
long endTime = cborDecoder.readInt()*1000; //TODO: scale based on info
|
long endTime = cborDecoder.readInt()*loader.time_scale_factor;
|
||||||
TxGenerator gen = loader.txGenerators.get(genId);
|
TxGenerator gen = loader.txGenerators.get(genId);
|
||||||
FtrTx scvTx = new FtrTx(txId, gen.stream.getId(), genId, startTime, endTime, blockId, blockOffset);
|
FtrTx scvTx = new FtrTx(txId, gen.stream.getId(), genId, startTime, endTime, blockId, blockOffset);
|
||||||
loader.maxTime = loader.maxTime > scvTx.endTime ? loader.maxTime : scvTx.endTime;
|
loader.maxTime = loader.maxTime > scvTx.endTime ? loader.maxTime : scvTx.endTime;
|
||||||
|
|
Binary file not shown.
|
@ -148,4 +148,38 @@ public class DatabaseServicesTest {
|
||||||
assertEquals(3, attr.size());
|
assertEquals(3, attr.size());
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@Test
|
||||||
|
public void testCFtr() throws Exception {
|
||||||
|
File f = new File("inputs/my_db_c.ftr").getAbsoluteFile();
|
||||||
|
assertTrue(f.exists());
|
||||||
|
waveformDb.load(f);
|
||||||
|
assertNotNull(waveformDb);
|
||||||
|
List<IWaveform> waveforms = waveformDb.getAllWaves();
|
||||||
|
assertEquals(8, waveforms.size());
|
||||||
|
assertEquals(1, waveformDb.getChildNodes().size());
|
||||||
|
for(IWaveform w:waveforms) {
|
||||||
|
if(w.getId()==1) {
|
||||||
|
assertEquals(2, w.getRowCount());
|
||||||
|
} else if(w.getId()==2l) {
|
||||||
|
assertEquals(1, w.getRowCount());
|
||||||
|
} else if(w.getId()==3l) {
|
||||||
|
assertEquals(1, w.getRowCount());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//waveforms.stream().filter(s -> s.getId()==1).collect(Collectors.toList());
|
||||||
|
waveforms.stream().filter(s -> s.getId()==1).forEach(s -> {
|
||||||
|
assertEquals(27, s.getEvents().size());
|
||||||
|
});
|
||||||
|
waveforms.stream().filter(s -> s.getId()==1).map(s -> s.getEventsAtTime(0)).forEach(el -> {
|
||||||
|
assertEquals(1, el.length);
|
||||||
|
IEvent evt = el[0];
|
||||||
|
assertTrue(evt instanceof ITxEvent);
|
||||||
|
ITx tx = ((ITxEvent)evt).getTransaction();
|
||||||
|
assertNotNull(tx);
|
||||||
|
assertEquals(0, tx.getBeginTime());
|
||||||
|
assertEquals(280000000, tx.getEndTime());
|
||||||
|
List<ITxAttribute> attr = tx.getAttributes();
|
||||||
|
assertEquals(3, attr.size());
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue