2012-06-17 20:34:50 +02:00
|
|
|
/*******************************************************************************
|
|
|
|
* Copyright (c) 2012 IT Just working.
|
|
|
|
* All rights reserved. This program and the accompanying materials
|
|
|
|
* are made available under the terms of the Eclipse Public License v1.0
|
|
|
|
* which accompanies this distribution, and is available at
|
|
|
|
* http://www.eclipse.org/legal/epl-v10.html
|
|
|
|
*
|
|
|
|
* Contributors:
|
|
|
|
* IT Just working - initial API and implementation
|
|
|
|
*******************************************************************************/
|
2015-01-03 16:34:32 +01:00
|
|
|
package com.minres.scviewer.database.text;
|
2012-06-17 19:53:05 +02:00
|
|
|
|
2017-01-23 21:23:43 +01:00
|
|
|
import java.nio.charset.CharsetDecoder;
|
2015-11-15 22:15:37 +01:00
|
|
|
import java.util.Collection;
|
2017-01-23 21:23:43 +01:00
|
|
|
import java.util.zip.GZIPInputStream
|
2018-11-06 08:24:26 +01:00
|
|
|
|
|
|
|
import org.codehaus.groovy.ast.stmt.CatchStatement
|
|
|
|
import org.mapdb.DB
|
|
|
|
import org.mapdb.DBMaker
|
|
|
|
|
2017-01-23 21:23:43 +01:00
|
|
|
import groovy.io.FileType
|
2015-11-15 22:15:37 +01:00
|
|
|
|
2015-01-20 18:50:15 +01:00
|
|
|
import com.minres.scviewer.database.AssociationType
|
|
|
|
import com.minres.scviewer.database.DataType
|
|
|
|
import com.minres.scviewer.database.ITxGenerator
|
|
|
|
import com.minres.scviewer.database.ITxStream
|
2015-01-10 00:23:46 +01:00
|
|
|
import com.minres.scviewer.database.IWaveform
|
2015-01-20 18:50:15 +01:00
|
|
|
import com.minres.scviewer.database.IWaveformDb
|
|
|
|
import com.minres.scviewer.database.IWaveformDbLoader
|
2015-01-03 16:34:32 +01:00
|
|
|
import com.minres.scviewer.database.RelationType
|
2019-12-05 16:25:43 +01:00
|
|
|
import com.minres.scviewer.database.DataType
|
2012-06-17 19:53:05 +02:00
|
|
|
|
2015-01-10 00:23:46 +01:00
|
|
|
public class TextDbLoader implements IWaveformDbLoader{
|
2012-06-17 19:53:05 +02:00
|
|
|
|
2015-01-20 18:50:15 +01:00
|
|
|
private Long maxTime;
|
2015-01-10 00:23:46 +01:00
|
|
|
|
|
|
|
IWaveformDb db;
|
2017-01-23 21:23:43 +01:00
|
|
|
|
2012-06-17 19:53:05 +02:00
|
|
|
def streams = []
|
2017-01-23 21:23:43 +01:00
|
|
|
|
2015-01-01 23:17:32 +01:00
|
|
|
def relationTypes=[:]
|
2017-01-23 21:23:43 +01:00
|
|
|
|
2018-11-06 08:24:26 +01:00
|
|
|
DB mapDb
|
|
|
|
|
2015-01-10 00:23:46 +01:00
|
|
|
public TextDbLoader() {
|
2012-06-17 19:53:05 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
@Override
|
2015-01-20 18:50:15 +01:00
|
|
|
public Long getMaxTime() {
|
2012-06-17 19:53:05 +02:00
|
|
|
return maxTime;
|
|
|
|
}
|
|
|
|
|
2015-01-10 00:23:46 +01:00
|
|
|
@Override
|
|
|
|
public List<IWaveform> getAllWaves() {
|
|
|
|
return new LinkedList<IWaveform>(streams);
|
2012-06-17 19:53:05 +02:00
|
|
|
}
|
|
|
|
|
2015-01-06 17:14:16 +01:00
|
|
|
public Map<Long, ITxGenerator> getGeneratorsById() {
|
|
|
|
TreeMap<Long, ITxGenerator> res = new TreeMap<Long, ITxGenerator>();
|
|
|
|
streams.each{TxStream stream -> stream.generators.each{res.put(it.id, id)} }
|
2012-06-17 19:53:05 +02:00
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
2015-01-10 00:23:46 +01:00
|
|
|
static final byte[] x = "scv_tr_stream".bytes
|
|
|
|
|
|
|
|
@Override
|
|
|
|
boolean load(IWaveformDb db, File file) throws Exception {
|
|
|
|
this.db=db
|
2015-02-04 16:20:59 +01:00
|
|
|
this.streams=[]
|
2018-11-05 18:21:54 +01:00
|
|
|
try {
|
|
|
|
def gzipped = isGzipped(file)
|
|
|
|
if(isTxfile(gzipped?new GZIPInputStream(new FileInputStream(file)):new FileInputStream(file))){
|
2018-11-06 08:24:26 +01:00
|
|
|
def mapDbFile = File.createTempFile("."+file.name, "tmp", file.parentFile)
|
|
|
|
mapDbFile.delete()
|
|
|
|
mapDbFile.deleteOnExit()
|
|
|
|
this.mapDb = DBMaker
|
|
|
|
.fileDB(mapDbFile)
|
|
|
|
.fileMmapEnableIfSupported()
|
|
|
|
.fileMmapPreclearDisable()
|
|
|
|
.cleanerHackEnable()
|
|
|
|
.allocateStartSize(64*1024*1024)
|
|
|
|
.allocateIncrement(64*1024*1024)
|
|
|
|
.make()
|
2019-12-03 22:19:56 +01:00
|
|
|
// NPE here --->
|
2018-11-05 18:21:54 +01:00
|
|
|
parseInput(gzipped?new GZIPInputStream(new FileInputStream(file)):new FileInputStream(file))
|
|
|
|
calculateConcurrencyIndicees()
|
|
|
|
return true
|
|
|
|
}
|
2019-03-14 19:27:58 +01:00
|
|
|
} catch(EOFException e) {
|
|
|
|
return true;
|
|
|
|
} catch(Exception e) {
|
2019-12-03 22:19:56 +01:00
|
|
|
System.out.println("---->>> Exception caught while loading database. StackTrace following... ");
|
2019-03-14 19:27:58 +01:00
|
|
|
e.printStackTrace()
|
|
|
|
}
|
2017-01-23 21:23:43 +01:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
private static boolean isTxfile(InputStream istream) {
|
|
|
|
byte[] buffer = new byte[x.size()]
|
|
|
|
def readCnt = istream.read(buffer, 0, x.size())
|
|
|
|
istream.close()
|
|
|
|
if(readCnt==x.size()){
|
|
|
|
for(int i=0; i<x.size(); i++)
|
|
|
|
if(buffer[i]!=x[i]) return false
|
|
|
|
}
|
2015-01-10 00:23:46 +01:00
|
|
|
return true
|
2012-06-17 19:53:05 +02:00
|
|
|
}
|
2015-01-03 16:34:32 +01:00
|
|
|
|
2017-01-23 21:23:43 +01:00
|
|
|
private static boolean isGzipped(File f) {
|
|
|
|
InputStream is = null;
|
|
|
|
try {
|
|
|
|
is = new FileInputStream(f);
|
|
|
|
byte [] signature = new byte[2];
|
|
|
|
int nread = is.read( signature ); //read the gzip signature
|
|
|
|
return nread == 2 && signature[ 0 ] == (byte) 0x1f && signature[ 1 ] == (byte) 0x8b;
|
|
|
|
} catch (IOException e) {
|
|
|
|
return false;
|
|
|
|
} finally {
|
|
|
|
is.close()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-01-20 18:50:15 +01:00
|
|
|
private stringToScale(String scale){
|
|
|
|
switch(scale.trim()){
|
|
|
|
case "fs":return 1L
|
|
|
|
case "ps":return 1000L
|
|
|
|
case "ns":return 1000000L
|
|
|
|
case "us":return 1000000000L
|
|
|
|
case "ms":return 1000000000000L
|
|
|
|
case "s": return 1000000000000000L
|
|
|
|
}
|
|
|
|
}
|
2017-01-23 21:23:43 +01:00
|
|
|
private def parseInput(InputStream inputStream){
|
2012-06-17 19:53:05 +02:00
|
|
|
def streamsById = [:]
|
|
|
|
def generatorsById = [:]
|
|
|
|
def transactionsById = [:]
|
2015-01-06 17:14:16 +01:00
|
|
|
TxGenerator generator
|
|
|
|
Tx transaction
|
2012-06-17 19:53:05 +02:00
|
|
|
boolean endTransaction=false
|
|
|
|
def matcher
|
2017-01-23 21:23:43 +01:00
|
|
|
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, "UTF-8"));
|
|
|
|
long lineCnt=0;
|
|
|
|
reader.eachLine { line ->
|
2019-12-05 16:25:43 +01:00
|
|
|
def tokens = line.split(/\s+/) as ArrayList
|
2012-06-17 19:53:05 +02:00
|
|
|
switch(tokens[0]){
|
|
|
|
case "scv_tr_stream":
|
|
|
|
case "scv_tr_generator":
|
|
|
|
case "begin_attribute":
|
|
|
|
case "end_attribute":
|
|
|
|
if ((matcher = line =~ /^scv_tr_stream\s+\(ID (\d+),\s+name\s+"([^"]+)",\s+kind\s+"([^"]+)"\)$/)) {
|
|
|
|
def id = Integer.parseInt(matcher[0][1])
|
2018-11-06 08:24:26 +01:00
|
|
|
def stream = new TxStream(this, id, matcher[0][2], matcher[0][3])
|
2012-06-17 19:53:05 +02:00
|
|
|
streams<<stream
|
|
|
|
streamsById[id]=stream
|
|
|
|
} else if ((matcher = line =~ /^scv_tr_generator\s+\(ID\s+(\d+),\s+name\s+"([^"]+)",\s+scv_tr_stream\s+(\d+),$/)) {
|
|
|
|
def id = Integer.parseInt(matcher[0][1])
|
2015-01-06 17:14:16 +01:00
|
|
|
ITxStream stream=streamsById[Integer.parseInt(matcher[0][3])]
|
|
|
|
generator=new TxGenerator(id, stream, matcher[0][2])
|
2012-06-17 19:53:05 +02:00
|
|
|
stream.generators<<generator
|
|
|
|
generatorsById[id]=generator
|
|
|
|
} else if ((matcher = line =~ /^begin_attribute \(ID (\d+), name "([^"]+)", type "([^"]+)"\)$/)) {
|
2015-01-06 17:14:16 +01:00
|
|
|
generator.begin_attrs << TxAttributeType.getAttrType(matcher[0][2], DataType.valueOf(matcher[0][3]), AssociationType.BEGIN)
|
2012-06-17 19:53:05 +02:00
|
|
|
} else if ((matcher = line =~ /^end_attribute \(ID (\d+), name "([^"]+)", type "([^"]+)"\)$/)) {
|
2015-01-06 17:14:16 +01:00
|
|
|
generator.end_attrs << TxAttributeType.getAttrType(matcher[0][2], DataType.valueOf(matcher[0][3]), AssociationType.END)
|
2012-06-17 19:53:05 +02:00
|
|
|
}
|
|
|
|
break;
|
|
|
|
case ")":
|
|
|
|
generator=null
|
|
|
|
break
|
|
|
|
case "tx_begin"://matcher = line =~ /^tx_begin\s+(\d+)\s+(\d+)\s+(\d+)\s+([munpf]?s)/
|
|
|
|
def id = Integer.parseInt(tokens[1])
|
2015-01-06 17:14:16 +01:00
|
|
|
TxGenerator gen=generatorsById[Integer.parseInt(tokens[2])]
|
2015-01-20 18:50:15 +01:00
|
|
|
transaction = new Tx(id, gen.stream, gen, Long.parseLong(tokens[3])*stringToScale(tokens[4]))
|
2012-06-17 19:53:05 +02:00
|
|
|
gen.transactions << transaction
|
|
|
|
transactionsById[id]= transaction
|
|
|
|
gen.begin_attrs_idx=0;
|
|
|
|
maxTime = maxTime>transaction.beginTime?maxTime:transaction.beginTime
|
|
|
|
endTransaction=false
|
|
|
|
break
|
|
|
|
case "tx_end"://matcher = line =~ /^tx_end\s+(\d+)\s+(\d+)\s+(\d+)\s+([munpf]?s)/
|
|
|
|
def id = Integer.parseInt(tokens[1])
|
|
|
|
transaction = transactionsById[id]
|
|
|
|
assert Integer.parseInt(tokens[2])==transaction.generator.id
|
2015-01-20 18:50:15 +01:00
|
|
|
transaction.endTime = Long.parseLong(tokens[3])*stringToScale(tokens[4])
|
2012-06-17 19:53:05 +02:00
|
|
|
transaction.generator.end_attrs_idx=0;
|
|
|
|
maxTime = maxTime>transaction.endTime?maxTime:transaction.endTime
|
|
|
|
endTransaction=true
|
|
|
|
break
|
|
|
|
case "tx_record_attribute"://matcher = line =~ /^tx_record_attribute\s+(\d+)\s+"([^"]+)"\s+(\S+)\s*=\s*(.+)$/
|
|
|
|
def id = Integer.parseInt(tokens[1])
|
2019-12-05 16:25:43 +01:00
|
|
|
def name = tokens[2][1..-2]
|
|
|
|
def type = tokens[3] as DataType
|
|
|
|
def remaining = tokens.size()>5?tokens[5..-1].join(' '):""
|
|
|
|
transactionsById[id].attributes<<new TxAttribute(name, type, AssociationType.RECORD, remaining)
|
2012-06-17 19:53:05 +02:00
|
|
|
break
|
|
|
|
case "a"://matcher = line =~ /^a\s+(.+)$/
|
|
|
|
if(endTransaction){
|
2017-10-14 20:46:22 +02:00
|
|
|
transaction.attributes << new TxAttribute(transaction.generator.end_attrs[transaction.generator.end_attrs_idx], tokens[1])
|
|
|
|
transaction.generator.end_attrs_idx++
|
2012-06-17 19:53:05 +02:00
|
|
|
} else {
|
2017-10-14 20:46:22 +02:00
|
|
|
transaction.attributes << new TxAttribute(transaction.generator.begin_attrs[transaction.generator.begin_attrs_idx], tokens[1])
|
|
|
|
transaction.generator.begin_attrs_idx++
|
2012-06-17 19:53:05 +02:00
|
|
|
}
|
|
|
|
break
|
|
|
|
case "tx_relation"://matcher = line =~ /^tx_relation\s+\"(\S+)\"\s+(\d+)\s+(\d+)$/
|
2015-02-11 09:32:46 +01:00
|
|
|
Tx tr2= transactionsById[Integer.parseInt(tokens[2])]
|
|
|
|
Tx tr1= transactionsById[Integer.parseInt(tokens[3])]
|
2015-01-01 23:17:32 +01:00
|
|
|
def relType=tokens[1][1..-2]
|
2015-11-15 22:15:37 +01:00
|
|
|
if(!relationTypes.containsKey(relType)) relationTypes[relType]=RelationType.create(relType)
|
2015-01-06 17:14:16 +01:00
|
|
|
def rel = new TxRelation(relationTypes[relType], tr1, tr2)
|
2015-01-01 23:17:32 +01:00
|
|
|
tr1.outgoingRelations<<rel
|
|
|
|
tr2.incomingRelations<<rel
|
2012-06-17 19:53:05 +02:00
|
|
|
break
|
|
|
|
default:
|
|
|
|
println "Don't know what to do with: '$line'"
|
|
|
|
|
|
|
|
}
|
2017-01-23 21:23:43 +01:00
|
|
|
lineCnt++
|
2012-06-17 19:53:05 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-05 16:25:43 +01:00
|
|
|
private def toDataType(String str){
|
|
|
|
switch (str)
|
|
|
|
{
|
|
|
|
case "BOOLEAN": return DataType. BOOLEAN
|
|
|
|
case "ENUMERATION": return DataType. ENUMERATION
|
|
|
|
case "INTEGER": return DataType. INTEGER
|
|
|
|
case "UNSIGNED": return DataType. UNSIGNED
|
|
|
|
case "FLOATING_POINT_NUMBER": return DataType. FLOATING_POINT_NUMBER
|
|
|
|
case "BIT_VECTOR": return DataType. BIT_VECTOR
|
|
|
|
case "LOGIC_VECTOR": return DataType. LOGIC_VECTOR
|
|
|
|
case "FIXED_POINT_INTEGER": return DataType. FIXED_POINT_INTEGER
|
|
|
|
case "UNSIGNED_FIXED_POINT_INTEGER": return DataType. UNSIGNED_FIXED_POINT_INTEGER
|
|
|
|
case "RECORD": return DataType. RECORD
|
|
|
|
case "POINTER": return DataType. POINTER
|
|
|
|
case "ARRAY": return DataType. ARRAY
|
|
|
|
case "STRING": return DataType. STRING
|
|
|
|
default: return DataType.INTEGER
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-01-20 18:50:15 +01:00
|
|
|
private def calculateConcurrencyIndicees(){
|
|
|
|
streams.each{ TxStream stream -> stream.getMaxConcurrency() }
|
|
|
|
}
|
2017-01-23 21:23:43 +01:00
|
|
|
|
|
|
|
|
2015-11-15 22:15:37 +01:00
|
|
|
public Collection<RelationType> getAllRelationTypes(){
|
|
|
|
return relationTypes.values();
|
|
|
|
}
|
|
|
|
|
2012-06-17 19:53:05 +02:00
|
|
|
}
|
2017-01-23 21:23:43 +01:00
|
|
|
|