/******************************************************************************* * Copyright (c) 2023 MINRES Technologies GmbH * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IT Just working - initial API and implementation *******************************************************************************/ package com.minres.scviewer.database.ftr; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.nio.channels.FileChannel; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import org.apache.commons.compress.compressors.lz4.BlockLZ4CompressorInputStream; import org.eclipse.collections.impl.map.mutable.UnifiedMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.HashMultimap; import com.minres.scviewer.database.AssociationType; import com.minres.scviewer.database.DataType; import com.minres.scviewer.database.EventKind; import com.minres.scviewer.database.IWaveform; import com.minres.scviewer.database.IWaveformDb; import com.minres.scviewer.database.IWaveformDbLoader; import com.minres.scviewer.database.InputFormatException; import com.minres.scviewer.database.RelationType; import com.minres.scviewer.database.RelationTypeFactory; import com.minres.scviewer.database.tx.ITx; import com.minres.scviewer.database.tx.ITxAttribute; import jacob.CborDecoder; import jacob.CborType; /** * The Class TextDbLoader. */ public class FtrDbLoader implements IWaveformDbLoader { enum FileType { NONE, PLAIN, GZIP, LZ4}; /** The max time. */ private Long maxTime = 0L; ArrayList strDict = new ArrayList<>(); /** The attr values. */ final List attrValues = new ArrayList<>(); /** The relation types. */ final Map relationTypes = UnifiedMap.newMap(); /** The tx streams. */ final Map txStreams = UnifiedMap.newMap(); /** The tx generators. */ final Map txGenerators = UnifiedMap.newMap(); /** The transactions. */ final Map transactions = UnifiedMap.newMap(); /** The attribute types. */ final Map attributeTypes = UnifiedMap.newMap(); /** The relations in. */ final HashMultimap relationsIn = HashMultimap.create(); /** The relations out. */ final HashMultimap relationsOut = HashMultimap.create(); /** The tx cache. */ final Map txCache = UnifiedMap.newMap(); /** The threads. */ List threads = new ArrayList<>(); File file; private static final Logger LOG = LoggerFactory.getLogger(FtrDbLoader.class); /** The pcs. */ protected PropertyChangeSupport pcs = new PropertyChangeSupport(this); long time_scale_factor = 1000l; /** * Adds the property change listener. * * @param l the l */ @Override public void addPropertyChangeListener(PropertyChangeListener l) { pcs.addPropertyChangeListener(l); } /** * Removes the property change listener. * * @param l the l */ @Override public void removePropertyChangeListener(PropertyChangeListener l) { pcs.removePropertyChangeListener(l); } /** * Gets the max time. * * @return the max time */ @Override public long getMaxTime() { return maxTime; } /** * Gets the transaction. * * @param txId the tx id * @return the transaction */ public synchronized ITx getTransaction(long txId) { if (txCache.containsKey(txId)) return txCache.get(txId); if(transactions.containsKey(txId)) { Tx tx = new Tx(this, transactions.get(txId)); txCache.put(txId, tx); return tx; } else { throw new IllegalArgumentException(); } } public FtrTx getScvTx(long id) { if(transactions.containsKey(id)) return transactions.get(id); else throw new IllegalArgumentException(); } /** * Gets the all waves. * * @return the all waves */ @Override public Collection getAllWaves() { ArrayList ret = new ArrayList<>(txStreams.values()); ret.addAll(txGenerators.values()); return ret; } /** * Gets the all relation types. * * @return the all relation types */ public Collection getAllRelationTypes() { return relationTypes.values(); } /** * Load. * * @param db the db * @param file the file * @return true, if successful * @throws InputFormatException the input format exception */ @Override public void load(IWaveformDb db, File file) throws InputFormatException { dispose(); this.file=file; try(FileInputStream fis = new FileInputStream(file)) { new CborDbParser(this, fis); } catch (IOException e) { LOG.warn("Problem parsing file "+file.getName()+": " , e); } catch (Exception e) { LOG.error("Error parsing file "+file.getName()+ ": ", e); transactions.clear(); throw new InputFormatException(e.toString()); } txStreams.values().parallelStream().forEach(TxStream::calculateConcurrency); } public List getChunksAtOffsets(ArrayList fileOffsets) throws InputFormatException { List ret = new ArrayList<>(); try(FileInputStream fis = new FileInputStream(file)) { FileChannel fc = fis.getChannel(); for (Long offset : fileOffsets) { if(offset>=0) { fc.position(offset); CborDecoder parser = new CborDecoder(fis); ret.add(parser.readByteString()); } else { fc.position(-offset); CborDecoder parser = new CborDecoder(fis); BlockLZ4CompressorInputStream decomp = new BlockLZ4CompressorInputStream(new ByteArrayInputStream(parser.readByteString())); ret.add(decomp.readAllBytes()); decomp.close(); } } } catch (Exception e) { LOG.error("Error parsing file "+file.getName(), e); transactions.clear(); throw new InputFormatException(e.toString()); } return ret; } public List parseAtrributes(byte[] chunk, long blockOffset) { List ret = new ArrayList<>(); ByteArrayInputStream bais = new ByteArrayInputStream(chunk); bais.skip(blockOffset); CborDecoder cborDecoder = new CborDecoder(bais); try { long tx_size = cborDecoder.readArrayLength(); for(long i = 0; i>>>>>> refs/heads/release/2.17.1 CborDecoder cbd = new CborDecoder(new ByteArrayInputStream(readByteString())); long sz = cbd.readArrayLength(); assert(sz==3); long time_numerator=cbd.readInt(); long time_denominator=cbd.readInt(); loader.time_scale_factor = 1000000000000000l*time_numerator/time_denominator; long epoch_tag = cbd.readTag(); assert(epoch_tag==1); cbd.readInt(); // epoch break; } case 8: { // dictionary uncompressed parseDict(new CborDecoder(new ByteArrayInputStream(readByteString()))); break; } case 9: { // dictionary compressed long sz = readArrayLength(); assert(sz==2); readInt(); // uncompressed size parseDict(new CborDecoder(new BlockLZ4CompressorInputStream(new ByteArrayInputStream(readByteString())))); break; } case 10: { // directory uncompressed parseDir(new CborDecoder(new ByteArrayInputStream(readByteString()))); break; } case 11: { // directory compressed long sz = readArrayLength(); assert(sz==2); readInt(); // uncompressed size parseDir(new CborDecoder(new BlockLZ4CompressorInputStream(new ByteArrayInputStream(readByteString())))); break; } case 12: { //tx chunk uncompressed long len = readArrayLength(); assert(len==2); long stream_id = readInt(); TxStream txStream = loader.txStreams.get(stream_id); txStream.fileOffsets.add(inputStream.getChannel().position()); parseTx(txStream, txStream.fileOffsets.size()-1, readByteString()); break; } case 13: { //tx chunk compressed long len = readArrayLength(); assert(len==3); long stream_id = readInt(); readInt(); // uncompressed size TxStream txStream = loader.txStreams.get(stream_id); txStream.fileOffsets.add(0-inputStream.getChannel().position()); BlockLZ4CompressorInputStream decomp = new BlockLZ4CompressorInputStream(new ByteArrayInputStream(readByteString())); parseTx(txStream, txStream.fileOffsets.size()-1, decomp.readAllBytes()); decomp.close(); break; } case 14: { // relations uncompressed parseRel(new CborDecoder(new ByteArrayInputStream(readByteString()))); break; } case 15: { // relations uncompressed long sz = readArrayLength(); assert(sz==2); readInt(); // uncompressed size parseRel(new CborDecoder(new BlockLZ4CompressorInputStream(new ByteArrayInputStream(readByteString())))); break; } } next = peekType(); } } catch(IOException e) { long pos = 0; try {pos=inputStream.getChannel().position(); } catch (Exception ee) {} LOG.error("Error parsing file input stream at position" + pos, e); } } private void parseDict(CborDecoder cborDecoder) throws IOException { long size = cborDecoder.readMapLength(); ArrayList lst = new ArrayList<>((int)size); for(long i = 0; i scvTx.endTime ? loader.maxTime : scvTx.endTime; loader.transactions.put(txId, scvTx); TxStream stream = loader.txStreams.get(gen.stream.getId()); if (scvTx.beginTime == scvTx.endTime) { stream.addEvent(new TxEvent(loader, EventKind.SINGLE, txId, scvTx.beginTime)); gen.addEvent(new TxEvent(loader, EventKind.SINGLE, txId, scvTx.beginTime)); } else { stream.addEvent(new TxEvent(loader, EventKind.BEGIN, txId, scvTx.beginTime)); gen.addEvent(new TxEvent(loader, EventKind.BEGIN, txId, scvTx.beginTime)); stream.addEvent(new TxEvent(loader, EventKind.END, txId, scvTx.endTime)); gen.addEvent(new TxEvent(loader, EventKind.END, txId, scvTx.endTime)); } break; default: { // skip over 7:begin attr, 8:record attr, 9:end attr long sz = cborDecoder.readArrayLength(); assert(sz==3); cborDecoder.readInt(); long type_id = cborDecoder.readInt(); switch(DataType.values()[(int)type_id]) { case BOOLEAN: cborDecoder.readBoolean(); break; case FLOATING_POINT_NUMBER: // FLOATING_POINT_NUMBER case FIXED_POINT_INTEGER: // FIXED_POINT_INTEGER case UNSIGNED_FIXED_POINT_INTEGER: // UNSIGNED_FIXED_POINT_INTEGER cborDecoder.readFloat(); break; case NONE: // UNSIGNED_FIXED_POINT_INTEGER LOG.warn("Unsupported data type: "+type_id); break; default: cborDecoder.readInt(); } } } } next = cborDecoder.peekType(); } } private void parseRel(CborDecoder cborDecoder) throws IOException { long size = cborDecoder.readArrayLength(); assert(size==-1); CborType next = cborDecoder.peekType(); while(next != null && !break_type.isEqualType(next)) { long sz = cborDecoder.readArrayLength(); assert(sz==3); long type_id = cborDecoder.readInt(); long from_id = cborDecoder.readInt(); long to_id = cborDecoder.readInt(); String rel_name = loader.strDict.get((int)type_id); FtrRelation ftrRel = new FtrRelation(loader.relationTypes.getOrDefault(rel_name, RelationTypeFactory.create(rel_name)), from_id, to_id); loader.relationsOut.put(from_id, ftrRel); loader.relationsIn.put(to_id, ftrRel); next = cborDecoder.peekType(); } } private void add(Long id, TxStream stream) { loader.txStreams.put(id, stream); loader.pcs.firePropertyChange(IWaveformDbLoader.STREAM_ADDED, null, stream); } private void add(Long id, TxGenerator generator) { loader.txGenerators.put(id, generator); loader.pcs.firePropertyChange(IWaveformDbLoader.GENERATOR_ADDED, null, generator); } } }