/******************************************************************************* * Copyright (c) 2023 MINRES Technologies GmbH * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IT Just working - initial API and implementation *******************************************************************************/ package com.minres.scviewer.database.ftr; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.nio.channels.FileChannel; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import org.apache.commons.compress.compressors.lz4.BlockLZ4CompressorInputStream; import org.eclipse.collections.impl.map.mutable.UnifiedMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.HashMultimap; import com.minres.scviewer.database.AssociationType; import com.minres.scviewer.database.DataType; import com.minres.scviewer.database.EventKind; import com.minres.scviewer.database.IWaveform; import com.minres.scviewer.database.IWaveformDb; import com.minres.scviewer.database.IWaveformDbLoader; import com.minres.scviewer.database.InputFormatException; import com.minres.scviewer.database.RelationType; import com.minres.scviewer.database.RelationTypeFactory; import com.minres.scviewer.database.tx.ITx; import com.minres.scviewer.database.tx.ITxAttribute; import jacob.CborDecoder; import jacob.CborType; /** * The Class TextDbLoader. */ public class FtrDbLoader implements IWaveformDbLoader { enum FileType { NONE, PLAIN, GZIP, LZ4}; /** The max time. */ private Long maxTime = 0L; ArrayList strDict = new ArrayList<>(); /** The attr values. */ final List attrValues = new ArrayList<>(); /** The relation types. */ final Map relationTypes = UnifiedMap.newMap(); /** The tx streams. */ final Map txStreams = UnifiedMap.newMap(); /** The tx generators. */ final Map txGenerators = UnifiedMap.newMap(); /** The transactions. */ final Map transactions = UnifiedMap.newMap(); /** The attribute types. */ final Map attributeTypes = UnifiedMap.newMap(); /** The relations in. */ final HashMultimap relationsIn = HashMultimap.create(); /** The relations out. */ final HashMultimap relationsOut = HashMultimap.create(); /** The tx cache. */ final Map txCache = UnifiedMap.newMap(); /** The threads. */ List threads = new ArrayList<>(); File file; private static final Logger LOG = LoggerFactory.getLogger(FtrDbLoader.class); /** The pcs. */ protected PropertyChangeSupport pcs = new PropertyChangeSupport(this); long time_scale_factor = 1000l; /** * Adds the property change listener. * * @param l the l */ @Override public void addPropertyChangeListener(PropertyChangeListener l) { pcs.addPropertyChangeListener(l); } /** * Removes the property change listener. * * @param l the l */ @Override public void removePropertyChangeListener(PropertyChangeListener l) { pcs.removePropertyChangeListener(l); } /** * Gets the max time. * * @return the max time */ @Override public long getMaxTime() { return maxTime; } /** * Gets the transaction. * * @param txId the tx id * @return the transaction */ public ITx getTransaction(long txId) { if (txCache.containsKey(txId)) return txCache.get(txId); if(transactions.containsKey(txId)) { Tx tx = new Tx(this, transactions.get(txId)); txCache.put(txId, tx); return tx; } else { throw new IllegalArgumentException(); } } public FtrTx getScvTx(long id) { if(transactions.containsKey(id)) return transactions.get(id); else throw new IllegalArgumentException(); } /** * Gets the all waves. * * @return the all waves */ @Override public Collection getAllWaves() { ArrayList ret = new ArrayList<>(txStreams.values()); ret.addAll(txGenerators.values()); return ret; } /** * Gets the all relation types. * * @return the all relation types */ public Collection getAllRelationTypes() { return relationTypes.values(); } /** * Load. * * @param db the db * @param file the file * @return true, if successful * @throws InputFormatException the input format exception */ @Override public void load(IWaveformDb db, File file) throws InputFormatException { dispose(); this.file=file; try(FileInputStream fis = new FileInputStream(file)) { new CborDbParser(this, fis); } catch (Exception e) { LOG.error("Error parsing file "+file.getName(), e); transactions.clear(); throw new InputFormatException(e.toString()); } txStreams.values().parallelStream().forEach(TxStream::calculateConcurrency); } public List getChunksAtOffsets(ArrayList fileOffsets) throws InputFormatException { List ret = new ArrayList<>(); try(FileInputStream fis = new FileInputStream(file)) { FileChannel fc = fis.getChannel(); for (Long offset : fileOffsets) { if(offset>=0) { fc.position(offset); CborDecoder parser = new CborDecoder(fis); ret.add(parser.readByteString()); } else { fc.position(-offset); CborDecoder parser = new CborDecoder(fis); BlockLZ4CompressorInputStream decomp = new BlockLZ4CompressorInputStream(new ByteArrayInputStream(parser.readByteString())); ret.add(decomp.readAllBytes()); decomp.close(); } } } catch (Exception e) { LOG.error("Error parsing file "+file.getName(), e); transactions.clear(); throw new InputFormatException(e.toString()); } return ret; } public List parseAtrributes(byte[] chunk, long blockOffset) { List ret = new ArrayList<>(); ByteArrayInputStream bais = new ByteArrayInputStream(chunk); bais.skip(blockOffset); CborDecoder cborDecoder = new CborDecoder(bais); try { long tx_size = cborDecoder.readArrayLength(); for(long i = 0; i0?"True":"False"); ret.add(b); break; case 2: // INTEGER case 3: // UNSIGNED ITxAttribute a = new TxAttribute(attrType, String.valueOf(cborDecoder.readInt())); ret.add(a); break; case 4: // FLOATING_POINT_NUMBER case 7: // FIXED_POINT_INTEGER case 8: // UNSIGNED_FIXED_POINT_INTEGER ITxAttribute v = new TxAttribute(attrType, String.valueOf(cborDecoder.readFloat())); ret.add(v); break; case 1: // ENUMERATION case 5: // BIT_VECTOR case 6: // LOGIC_VECTOR case 12: // STRING ITxAttribute s = new TxAttribute(attrType, strDict.get((int)cborDecoder.readInt())); ret.add(s); break; } } } } } catch (IOException e) { LOG.error("Error parsing file "+file.getName(), e); } return ret; } /** * Dispose. */ @Override public void dispose() { attrValues.clear(); relationTypes.clear(); txStreams.clear(); txGenerators.clear(); transactions.clear(); attributeTypes.clear(); relationsIn.clear(); relationsOut.clear(); } /** * The Class TextDbParser. */ static class CborDbParser extends CborDecoder { static final private CborType break_type = CborType.valueOf(0xff); /** The loader. */ final FtrDbLoader loader; /** * Instantiates a new text db parser. * * @param loader the loader */ public CborDbParser(FtrDbLoader loader, FileInputStream inputStream) { super(inputStream); this.loader = loader; try { long cbor_tag = readTag(); assert(cbor_tag == 55799); long array_len = readArrayLength(); assert(array_len==-1); CborType next = peekType(); while(next != null && !break_type.isEqualType(next)) { long tag = readTag(); switch((int)tag) { <<<<<<< HEAD case 6: // info ======= case 6: { // info >>>>>>> refs/heads/release/2.17.1 CborDecoder cbd = new CborDecoder(new ByteArrayInputStream(readByteString())); long sz = cbd.readArrayLength(); assert(sz==3); long time_numerator=cbd.readInt(); long time_denominator=cbd.readInt(); loader.time_scale_factor = 1000000000000000l*time_numerator/time_denominator; long epoch_tag = cbd.readTag(); assert(epoch_tag==1); cbd.readInt(); // epoch break; } case 8: { // dictionary uncompressed parseDict(new CborDecoder(new ByteArrayInputStream(readByteString()))); break; } case 9: { // dictionary compressed long sz = readArrayLength(); assert(sz==2); readInt(); // uncompressed size parseDict(new CborDecoder(new BlockLZ4CompressorInputStream(new ByteArrayInputStream(readByteString())))); break; } case 10: { // directory uncompressed parseDir(new CborDecoder(new ByteArrayInputStream(readByteString()))); break; } case 11: { // directory compressed long sz = readArrayLength(); assert(sz==2); readInt(); // uncompressed size parseDir(new CborDecoder(new BlockLZ4CompressorInputStream(new ByteArrayInputStream(readByteString())))); break; } case 12: { //tx chunk uncompressed long len = readArrayLength(); assert(len==2); long stream_id = readInt(); TxStream txStream = loader.txStreams.get(stream_id); txStream.fileOffsets.add(inputStream.getChannel().position()); parseTx(txStream, txStream.fileOffsets.size()-1, readByteString()); break; } case 13: { //tx chunk compressed long len = readArrayLength(); assert(len==3); long stream_id = readInt(); readInt(); // uncompressed size TxStream txStream = loader.txStreams.get(stream_id); txStream.fileOffsets.add(0-inputStream.getChannel().position()); BlockLZ4CompressorInputStream decomp = new BlockLZ4CompressorInputStream(new ByteArrayInputStream(readByteString())); parseTx(txStream, txStream.fileOffsets.size()-1, decomp.readAllBytes()); decomp.close(); break; } case 14: { // relations uncompressed parseRel(new CborDecoder(new ByteArrayInputStream(readByteString()))); break; } case 15: { // relations uncompressed long sz = readArrayLength(); assert(sz==2); readInt(); // uncompressed size parseRel(new CborDecoder(new BlockLZ4CompressorInputStream(new ByteArrayInputStream(readByteString())))); break; } } next = peekType(); } } catch(IOException e) { LOG.error("Error parsing file input stream", e); } } private void parseDict(CborDecoder cborDecoder) throws IOException { long size = cborDecoder.readMapLength(); ArrayList lst = new ArrayList<>((int)size); for(long i = 0; i scvTx.endTime ? loader.maxTime : scvTx.endTime; loader.transactions.put(txId, scvTx); TxStream stream = loader.txStreams.get(gen.stream.getId()); if (scvTx.beginTime == scvTx.endTime) { stream.addEvent(new TxEvent(loader, EventKind.SINGLE, txId, scvTx.beginTime)); gen.addEvent(new TxEvent(loader, EventKind.SINGLE, txId, scvTx.beginTime)); } else { stream.addEvent(new TxEvent(loader, EventKind.BEGIN, txId, scvTx.beginTime)); gen.addEvent(new TxEvent(loader, EventKind.BEGIN, txId, scvTx.beginTime)); stream.addEvent(new TxEvent(loader, EventKind.END, txId, scvTx.endTime)); gen.addEvent(new TxEvent(loader, EventKind.END, txId, scvTx.endTime)); } break; default: { // skip over 7:begin attr, 8:record attr, 9:end attr long sz = cborDecoder.readArrayLength(); assert(sz==3); for(long j = 0; j