fix max concurrency handling
This commit is contained in:
		| @@ -78,31 +78,39 @@ public class TextDbLoader implements IWaveformDbLoader{ | ||||
| 		if(file.isDirectory() || !file.exists()) return false; | ||||
| 		this.db=db; | ||||
| 		this.streams = new ArrayList<>(); | ||||
| 		TextDbParser parser = new TextDbParser(this); | ||||
| 		boolean gzipped = isGzipped(file); | ||||
| 		try { | ||||
| 			boolean gzipped = isGzipped(file); | ||||
| 			if(isTxfile(gzipped?new GZIPInputStream(new FileInputStream(file)):new FileInputStream(file))){ | ||||
| 				File mapDbFile = File.createTempFile("."+file.getName(), null /*"tmp"*/, null /*file.parentFile*/); | ||||
| 				mapDbFile.delete(); | ||||
| 				mapDbFile.deleteOnExit(); | ||||
| 				this.mapDb = DBMaker | ||||
| 						.fileDB(mapDbFile) | ||||
| 						.fileMmapEnableIfSupported() | ||||
| 						.fileMmapPreclearDisable() | ||||
| 						.cleanerHackEnable() | ||||
| 						.allocateStartSize(64*1024*1024) | ||||
| 						.allocateIncrement(64*1024*1024) | ||||
| 						.make(); | ||||
| 				// NPE here ---> | ||||
| 				parseInput(gzipped?new GZIPInputStream(new FileInputStream(file)):new FileInputStream(file)); | ||||
| 				for(IWaveform stream: streams){ stream.getWidth(); } | ||||
| 				return true; | ||||
| 			} else  | ||||
| 				return false; | ||||
| 		if(!isTxfile(gzipped?new GZIPInputStream(new FileInputStream(file)):new FileInputStream(file))) | ||||
| 			return false; | ||||
| 		} catch(Throwable e) { | ||||
| 			throw new InputFormatException(); | ||||
| 		} | ||||
| 		File mapDbFile; | ||||
| 		try { | ||||
| 			mapDbFile = File.createTempFile("."+file.getName(), null /*"tmp"*/, null /*file.parentFile*/); | ||||
| 		} catch (IOException e1) { | ||||
| 			return false; | ||||
| 		} | ||||
| 		mapDbFile.delete(); // we just need a file name | ||||
| 		mapDbFile.deleteOnExit(); | ||||
| 		this.mapDb = DBMaker | ||||
| 				.fileDB(mapDbFile) | ||||
| 				.fileMmapEnableIfSupported() | ||||
| 				.fileMmapPreclearDisable() | ||||
| 				.cleanerHackEnable() | ||||
| 				.allocateStartSize(64*1024*1024) | ||||
| 				.allocateIncrement(64*1024*1024) | ||||
| 				.make(); | ||||
| 		try { | ||||
| 				parser.parseInput(gzipped?new GZIPInputStream(new FileInputStream(file)):new FileInputStream(file)); | ||||
| 		} catch(IllegalArgumentException|ArrayIndexOutOfBoundsException e) { | ||||
| 		} catch(Throwable e) { | ||||
| 			System.out.println("---->>> Exception "+e.toString()+" caught while loading database"); | ||||
| 			e.printStackTrace(); | ||||
| 			return false; | ||||
| 		} | ||||
| 		streams.addAll(parser.streamsById.values()); | ||||
| 		return true; | ||||
| 	} | ||||
|  | ||||
| @@ -131,131 +139,141 @@ public class TextDbLoader implements IWaveformDbLoader{ | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	private long stringToScale(String scale){ | ||||
| 		String cmp = scale.trim(); | ||||
| 		if("fs".equals(cmp)) return 1L; | ||||
| 		if("ps".equals(cmp)) return 1000L; | ||||
| 		if("ns".equals(cmp)) return 1000000L; | ||||
| 		if("us".equals(cmp)) return 1000000000L; | ||||
| 		if("ms".equals(cmp)) return 1000000000000L; | ||||
| 		if("s".equals(cmp) ) return 1000000000000000L; | ||||
| 		return 1L; | ||||
| 	} | ||||
|  | ||||
| 	static final Pattern scv_tr_stream = Pattern.compile("^scv_tr_stream\\s+\\(ID (\\d+),\\s+name\\s+\"([^\"]+)\",\\s+kind\\s+\"([^\"]+)\"\\)$"); | ||||
| 	static final Pattern scv_tr_generator = Pattern.compile("^scv_tr_generator\\s+\\(ID\\s+(\\d+),\\s+name\\s+\"([^\"]+)\",\\s+scv_tr_stream\\s+(\\d+),$"); | ||||
| 	static final Pattern begin_attribute = Pattern.compile("^begin_attribute \\(ID (\\d+), name \"([^\"]+)\", type \"([^\"]+)\"\\)$"); | ||||
| 	static final Pattern end_attribute = Pattern.compile("^end_attribute \\(ID (\\d+), name \"([^\"]+)\", type \"([^\"]+)\"\\)$"); | ||||
|  | ||||
| 	HashMap<Long, TxStream> streamsById = new HashMap<Long, TxStream>(); | ||||
| 	HashMap<Long, TxGenerator> generatorsById = new HashMap<Long, TxGenerator>(); | ||||
| 	HashMap<Long, Tx> transactionsById = new HashMap<Long, Tx>(); | ||||
| 	TxGenerator generator = null; | ||||
| 	Tx transaction = null; | ||||
| 	boolean endTransaction=false; | ||||
| 	BufferedReader reader =null; | ||||
| 	 | ||||
| 	private void parseInput(InputStream inputStream) throws IOException{ | ||||
| 		reader = new BufferedReader(new InputStreamReader(inputStream, "UTF-8")); | ||||
| 		String curLine = reader.readLine(); | ||||
| 		String nextLine = null; | ||||
| 		while((nextLine=reader.readLine())!=null && curLine!=null) { | ||||
| 			curLine=parseLine(curLine, nextLine); | ||||
| 		} | ||||
| 		if(curLine!=null) | ||||
| 			parseLine(curLine, nextLine); | ||||
| 	} | ||||
|  | ||||
| 	private String parseLine(String curLine, String nextLine) throws IOException{ | ||||
| 		String[] tokens = curLine.split("\\s+"); | ||||
| 		if("tx_record_attribute".equals(tokens[0])){//matcher = line =~ /^tx_record_attribute\s+(\d+)\s+"([^"]+)"\s+(\S+)\s*=\s*(.+)$/ | ||||
| 			Long id = Long.parseLong(tokens[1]); | ||||
| 			String name = tokens[2].substring(1, tokens[2].length()); | ||||
| 			DataType type = DataType.valueOf(tokens[3]); | ||||
| 			String remaining = tokens.length>5?String.join(" ", Arrays.copyOfRange(tokens, 5, tokens.length-1)):""; | ||||
| 			transactionsById.get(id).getAttributes().add(new TxAttribute(name, type, AssociationType.RECORD, remaining)); | ||||
| 		} else if("tx_begin".equals(tokens[0])){//matcher = line =~ /^tx_begin\s+(\d+)\s+(\d+)\s+(\d+)\s+([munpf]?s)/ | ||||
| 			Long id = Long.parseLong(tokens[1]); | ||||
| 			TxGenerator gen=generatorsById.get(Long.parseLong(tokens[2])); | ||||
| 			transaction = new Tx(id, gen.getStream(), gen, Long.parseLong(tokens[3])*stringToScale(tokens[4])); | ||||
| 			gen.getTransactions().add(transaction); | ||||
| 			transactionsById.put(id, transaction); | ||||
| 			maxTime = maxTime>transaction.getBeginTime()?maxTime:transaction.getBeginTime(); | ||||
| 			if(nextLine!=null && nextLine.charAt(0)=='a') { | ||||
| 				int idx=0; | ||||
| 				while(nextLine!=null && nextLine.charAt(0)=='a') { | ||||
| 					String[] attrTokens=nextLine.split("\\s+"); | ||||
| 					TxAttribute attr = new TxAttribute(gen.getBeginAttrs().get(idx), attrTokens[1]); | ||||
| 					transaction.getAttributes().add(attr); | ||||
| 					idx++; | ||||
| 					nextLine=reader.readLine(); | ||||
| 				} | ||||
| 			} | ||||
| 		} else if("tx_end".equals(tokens[0])){//matcher = line =~ /^tx_end\s+(\d+)\s+(\d+)\s+(\d+)\s+([munpf]?s)/ | ||||
| 			Long id = Long.parseLong(tokens[1]); | ||||
| 			transaction = transactionsById.get(id); | ||||
| 			assert Integer.parseInt(tokens[2])==transaction.getGenerator().getId(); | ||||
| 			transaction.setEndTime(Long.parseLong(tokens[3])*stringToScale(tokens[4])); | ||||
| 			maxTime = maxTime>transaction.getEndTime()?maxTime:transaction.getEndTime(); | ||||
| 			if(nextLine!=null && nextLine.charAt(0)=='a') { | ||||
| 				TxGenerator gen = (TxGenerator) transaction.getGenerator(); | ||||
| 				int idx=0; | ||||
| 				while(nextLine!=null && nextLine.charAt(0)=='a') { | ||||
| 					String[] attrTokens=nextLine.split("\\s+"); | ||||
| 					TxAttribute attr = new TxAttribute(gen.getEndAttrs().get(idx), attrTokens[1]); | ||||
| 					transaction.getAttributes().add(attr); | ||||
| 					idx++; | ||||
| 					nextLine=reader.readLine(); | ||||
| 				} | ||||
| 			} | ||||
| 		} else if("tx_relation".equals(tokens[0])){//matcher = line =~ /^tx_relation\s+\"(\S+)\"\s+(\d+)\s+(\d+)$/ | ||||
| 			Tx tr2= transactionsById.get(Long.parseLong(tokens[2])); | ||||
| 			Tx tr1= transactionsById.get(Long.parseLong(tokens[3])); | ||||
| 			String relType=tokens[1].substring(1, tokens[1].length()-2); | ||||
| 			if(!relationTypes.containsKey(relType)) | ||||
| 				relationTypes.put(relType, RelationType.create(relType)); | ||||
| 			TxRelation rel = new TxRelation(relationTypes.get(relType), tr1, tr2); | ||||
| 			tr1.getOutgoingRelations().add(rel); | ||||
| 			tr2.getIncomingRelations().add(rel); | ||||
| 		} else if("scv_tr_stream".equals(tokens[0])){ | ||||
| 			Matcher matcher = scv_tr_stream.matcher(curLine); | ||||
| 			if (matcher.matches()) { | ||||
| 				Long id = Long.parseLong(matcher.group(1)); | ||||
| 				TxStream stream = new TxStream(this, id, matcher.group(2), matcher.group(3)); | ||||
| 				streams.add(stream); | ||||
| 				streamsById.put(id, stream); | ||||
| 			} | ||||
| 		} else if("scv_tr_generator".equals(tokens[0])){ | ||||
| 			Matcher matcher = scv_tr_generator.matcher(curLine); | ||||
| 			if ((matcher.matches())) { | ||||
| 				Long id = Long.parseLong(matcher.group(1)); | ||||
| 				TxStream stream=streamsById.get(Long.parseLong(matcher.group(3))); | ||||
| 				generator=new TxGenerator(id, stream, matcher.group(2)); | ||||
| 				stream.getGenerators().add(generator); | ||||
| 				generatorsById.put(id,  generator); | ||||
| 			} | ||||
| 		} else if("begin_attribute".equals(tokens[0])){ | ||||
| 			Matcher matcher = begin_attribute.matcher(curLine); | ||||
| 			if ((matcher.matches())) { | ||||
| 				generator.getBeginAttrs().add(TxAttributeType.getAttrType(matcher.group(2), DataType.valueOf(matcher.group(3)), AssociationType.BEGIN)); | ||||
| 			} | ||||
| 		} else if("end_attribute".equals(tokens[0])){ | ||||
| 			Matcher matcher = end_attribute.matcher(curLine); | ||||
| 			if ((matcher.matches())) { | ||||
| 				generator.getEndAttrs().add(TxAttributeType.getAttrType(matcher.group(2), DataType.valueOf(matcher.group(3)), AssociationType.END)); | ||||
| 			} | ||||
| 		} else if(")".equals(tokens[0])){ | ||||
| 			generator=null; | ||||
| 		} else if("a".equals(tokens[0])){//matcher = line =~ /^a\s+(.+)$/ | ||||
| 			System.out.println("Don't know what to do with: '"+curLine+"'"); | ||||
| 		} else | ||||
| 			System.out.println("Don't know what to do with: '"+curLine+"'"); | ||||
| 		return nextLine; | ||||
| 	} | ||||
|  | ||||
| 	public Collection<RelationType> getAllRelationTypes(){ | ||||
| 		return relationTypes.values(); | ||||
| 	} | ||||
|  | ||||
| 	static class TextDbParser { | ||||
| 		static final Pattern scv_tr_stream = Pattern.compile("^scv_tr_stream\\s+\\(ID (\\d+),\\s+name\\s+\"([^\"]+)\",\\s+kind\\s+\"([^\"]+)\"\\)$"); | ||||
| 		static final Pattern scv_tr_generator = Pattern.compile("^scv_tr_generator\\s+\\(ID\\s+(\\d+),\\s+name\\s+\"([^\"]+)\",\\s+scv_tr_stream\\s+(\\d+),$"); | ||||
| 		static final Pattern begin_attribute = Pattern.compile("^begin_attribute \\(ID (\\d+), name \"([^\"]+)\", type \"([^\"]+)\"\\)$"); | ||||
| 		static final Pattern end_attribute = Pattern.compile("^end_attribute \\(ID (\\d+), name \"([^\"]+)\", type \"([^\"]+)\"\\)$"); | ||||
|  | ||||
| 		HashMap<Long, TxStream> streamsById = new HashMap<Long, TxStream>(); | ||||
| 		HashMap<Long, TxGenerator> generatorsById = new HashMap<Long, TxGenerator>(); | ||||
| 		HashMap<Long, Tx> transactionsById = new HashMap<Long, Tx>(); | ||||
| 		TxGenerator generator = null; | ||||
| 		Tx transaction = null; | ||||
| 		boolean endTransaction=false; | ||||
| 		final TextDbLoader loader; | ||||
| 		BufferedReader reader =null; | ||||
|  | ||||
| 		public TextDbParser(TextDbLoader loader) { | ||||
| 			super(); | ||||
| 			this.loader = loader; | ||||
| 		} | ||||
|  | ||||
| 		void parseInput(InputStream inputStream) throws IOException{ | ||||
| 			reader = new BufferedReader(new InputStreamReader(inputStream, "UTF-8")); | ||||
| 			String curLine = reader.readLine(); | ||||
| 			String nextLine = null; | ||||
| 			while((nextLine=reader.readLine())!=null && curLine!=null) { | ||||
| 				curLine=parseLine(curLine, nextLine); | ||||
| 			} | ||||
| 			if(curLine!=null) | ||||
| 				parseLine(curLine, nextLine); | ||||
| 		} | ||||
|  | ||||
| 		private String parseLine(String curLine, String nextLine) throws IOException{ | ||||
| 			String[] tokens = curLine.split("\\s+"); | ||||
| 			if("tx_record_attribute".equals(tokens[0])){//matcher = line =~ /^tx_record_attribute\s+(\d+)\s+"([^"]+)"\s+(\S+)\s*=\s*(.+)$/ | ||||
| 				Long id = Long.parseLong(tokens[1]); | ||||
| 				String name = tokens[2].substring(1, tokens[2].length()); | ||||
| 				DataType type = DataType.valueOf(tokens[3]); | ||||
| 				String remaining = tokens.length>5?String.join(" ", Arrays.copyOfRange(tokens, 5, tokens.length-1)):""; | ||||
| 				transactionsById.get(id).getAttributes().add(new TxAttribute(name, type, AssociationType.RECORD, remaining)); | ||||
| 			} else if("tx_begin".equals(tokens[0])){ | ||||
| 				Long id = Long.parseLong(tokens[1]); | ||||
| 				TxGenerator gen=generatorsById.get(Long.parseLong(tokens[2])); | ||||
| 				TxStream stream = (TxStream) gen.getStream(); | ||||
| 				stream.setConcurrency(stream.getConcurrency()+1); | ||||
| 				transaction = new Tx(id, gen.getStream(), gen, Long.parseLong(tokens[3])*stringToScale(tokens[4])); | ||||
| 				gen.getTransactions().add(transaction); | ||||
| 				transactionsById.put(id, transaction); | ||||
| 				loader.maxTime = loader.maxTime>transaction.getBeginTime()?loader.maxTime:transaction.getBeginTime(); | ||||
| 				if(nextLine!=null && nextLine.charAt(0)=='a') { | ||||
| 					int idx=0; | ||||
| 					while(nextLine!=null && nextLine.charAt(0)=='a') { | ||||
| 						String[] attrTokens=nextLine.split("\\s+"); | ||||
| 						TxAttribute attr = new TxAttribute(gen.getBeginAttrs().get(idx), attrTokens[1]); | ||||
| 						transaction.getAttributes().add(attr); | ||||
| 						idx++; | ||||
| 						nextLine=reader.readLine(); | ||||
| 					} | ||||
| 				} | ||||
| 			} else if("tx_end".equals(tokens[0])){//matcher = line =~ /^tx_end\s+(\d+)\s+(\d+)\s+(\d+)\s+([munpf]?s)/ | ||||
| 				Long id = Long.parseLong(tokens[1]); | ||||
| 				transaction = transactionsById.get(id); | ||||
| 				assert Integer.parseInt(tokens[2])==transaction.getGenerator().getId(); | ||||
| 				transaction.setEndTime(Long.parseLong(tokens[3])*stringToScale(tokens[4])); | ||||
| 				loader.maxTime = loader.maxTime>transaction.getEndTime()?loader.maxTime:transaction.getEndTime(); | ||||
| 				TxGenerator gen = (TxGenerator) transaction.getGenerator(); | ||||
| 				TxStream stream = (TxStream) gen.getStream(); | ||||
| 				stream.setConcurrency(stream.getConcurrency()-1); | ||||
| 				if(nextLine!=null && nextLine.charAt(0)=='a') { | ||||
| 					int idx=0; | ||||
| 					while(nextLine!=null && nextLine.charAt(0)=='a') { | ||||
| 						String[] attrTokens=nextLine.split("\\s+"); | ||||
| 						TxAttribute attr = new TxAttribute(gen.getEndAttrs().get(idx), attrTokens[1]); | ||||
| 						transaction.getAttributes().add(attr); | ||||
| 						idx++; | ||||
| 						nextLine=reader.readLine(); | ||||
| 					} | ||||
| 				} | ||||
| 			} else if("tx_relation".equals(tokens[0])){//matcher = line =~ /^tx_relation\s+\"(\S+)\"\s+(\d+)\s+(\d+)$/ | ||||
| 				Tx tr2= transactionsById.get(Long.parseLong(tokens[2])); | ||||
| 				Tx tr1= transactionsById.get(Long.parseLong(tokens[3])); | ||||
| 				String relType=tokens[1].substring(1, tokens[1].length()-2); | ||||
| 				if(!loader.relationTypes.containsKey(relType)) | ||||
| 					loader.relationTypes.put(relType, RelationType.create(relType)); | ||||
| 				TxRelation rel = new TxRelation(loader.relationTypes.get(relType), tr1, tr2); | ||||
| 				tr1.getOutgoingRelations().add(rel); | ||||
| 				tr2.getIncomingRelations().add(rel); | ||||
| 			} else if("scv_tr_stream".equals(tokens[0])){ | ||||
| 				Matcher matcher = scv_tr_stream.matcher(curLine); | ||||
| 				if (matcher.matches()) { | ||||
| 					Long id = Long.parseLong(matcher.group(1)); | ||||
| 					TxStream stream = new TxStream(loader, id, matcher.group(2), matcher.group(3)); | ||||
| 					streamsById.put(id, stream); | ||||
| 				} | ||||
| 			} else if("scv_tr_generator".equals(tokens[0])){ | ||||
| 				Matcher matcher = scv_tr_generator.matcher(curLine); | ||||
| 				if ((matcher.matches())) { | ||||
| 					Long id = Long.parseLong(matcher.group(1)); | ||||
| 					TxStream stream=streamsById.get(Long.parseLong(matcher.group(3))); | ||||
| 					generator=new TxGenerator(id, stream, matcher.group(2)); | ||||
| 					stream.getGenerators().add(generator); | ||||
| 					generatorsById.put(id,  generator); | ||||
| 				} | ||||
| 			} else if("begin_attribute".equals(tokens[0])){ | ||||
| 				Matcher matcher = begin_attribute.matcher(curLine); | ||||
| 				if ((matcher.matches())) { | ||||
| 					generator.getBeginAttrs().add(TxAttributeType.getAttrType(matcher.group(2), DataType.valueOf(matcher.group(3)), AssociationType.BEGIN)); | ||||
| 				} | ||||
| 			} else if("end_attribute".equals(tokens[0])){ | ||||
| 				Matcher matcher = end_attribute.matcher(curLine); | ||||
| 				if ((matcher.matches())) { | ||||
| 					generator.getEndAttrs().add(TxAttributeType.getAttrType(matcher.group(2), DataType.valueOf(matcher.group(3)), AssociationType.END)); | ||||
| 				} | ||||
| 			} else if(")".equals(tokens[0])){ | ||||
| 				generator=null; | ||||
| 			} else if("a".equals(tokens[0])){//matcher = line =~ /^a\s+(.+)$/ | ||||
| 				System.out.println("Don't know what to do with: '"+curLine+"'"); | ||||
| 			} else | ||||
| 				System.out.println("Don't know what to do with: '"+curLine+"'"); | ||||
| 			return nextLine; | ||||
| 		} | ||||
| 		 | ||||
| 		private long stringToScale(String scale){ | ||||
| 			String cmp = scale.trim(); | ||||
| 			if("fs".equals(cmp)) return 1L; | ||||
| 			if("ps".equals(cmp)) return 1000L; | ||||
| 			if("ns".equals(cmp)) return 1000000L; | ||||
| 			if("us".equals(cmp)) return 1000000000L; | ||||
| 			if("ms".equals(cmp)) return 1000000000000L; | ||||
| 			if("s".equals(cmp) ) return 1000000000000000L; | ||||
| 			return 1L; | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
|   | ||||
| @@ -40,8 +40,20 @@ class TxStream extends HierNode implements IWaveform, Serializable { | ||||
| 			 | ||||
| 	private ArrayList<ITxGenerator> generators = new ArrayList<ITxGenerator>(); | ||||
| 	 | ||||
| 	private int maxConcurrency; | ||||
| 	private int maxConcurrency = 0; | ||||
| 	 | ||||
| 	private int concurrency = 0; | ||||
|  | ||||
| 	void setConcurrency(int concurrency) { | ||||
| 		this.concurrency = concurrency; | ||||
| 		if(concurrency>maxConcurrency) | ||||
| 			maxConcurrency = concurrency; | ||||
| 	} | ||||
|  | ||||
| 	int getConcurrency() { | ||||
| 		return this.concurrency; | ||||
| 	} | ||||
|  | ||||
| 	private BTreeMap<Long, IEvent[]> events; | ||||
| 	 | ||||
| 	@SuppressWarnings("unchecked") | ||||
| @@ -59,45 +71,9 @@ class TxStream extends HierNode implements IWaveform, Serializable { | ||||
|  | ||||
| 	@Override | ||||
| 	public int getWidth() { | ||||
| 		if(maxConcurrency==0){ | ||||
| 			for(ITxGenerator generator:getGenerators()) { | ||||
| 				for(ITx tx:generator.getTransactions()){ | ||||
| 					putEvent(new TxEvent(EventKind.BEGIN, tx)); | ||||
| 					putEvent(new TxEvent(EventKind.END, tx)); | ||||
| 				} | ||||
| 			} | ||||
| 			ArrayList<Long> rowendtime = new ArrayList<Long>(); | ||||
| 			rowendtime.add(0l); | ||||
| 			for(Long time: events.keySet()){ | ||||
| 				IEvent[] value=events.get(time); | ||||
| 				Arrays.asList(value).stream().filter(event -> event.getKind()==EventKind.BEGIN).forEach(event -> { | ||||
| 					ITx tx = ((ITxEvent)event).getTransaction(); | ||||
| 					int rowIdx = 0; | ||||
| 					for(rowIdx=0; rowIdx<rowendtime.size() && rowendtime.get(rowIdx)>tx.getBeginTime(); rowIdx++); | ||||
| 					if(rowendtime.size()<=rowIdx) | ||||
| 						rowendtime.add(tx.getEndTime()); | ||||
| 					else | ||||
| 						rowendtime.set(rowIdx, tx.getEndTime()); | ||||
| 					((Tx)tx).setConcurrencyIndex(rowIdx); | ||||
| 					 | ||||
| 				}); | ||||
| 			} | ||||
| 			maxConcurrency=rowendtime.size(); | ||||
| 		} | ||||
| 		return maxConcurrency; | ||||
| 	} | ||||
|  | ||||
| 	private void putEvent(ITxEvent event){ | ||||
| 		if(!events.containsKey(event.getTime()))  | ||||
| 			events.put(event.getTime(), new ITxEvent[]{event} ); | ||||
| 		else { | ||||
| 			IEvent[] entries = events.get(event.getTime()); | ||||
| 			IEvent[] newEntries = Arrays.copyOf(entries, entries.length+1); | ||||
| 			newEntries[entries.length]=event; | ||||
| 			events.put(event.getTime(), newEntries); | ||||
| 		} | ||||
| 	} | ||||
| 	 | ||||
| 	@Override | ||||
| 	public NavigableMap<Long, IEvent[]> getEvents() { | ||||
| 		return (NavigableMap<Long, IEvent[]>)events; | ||||
|   | ||||
		Reference in New Issue
	
	Block a user