1 /*******************************************************************************
2 * Copyright (c) 2012, 2013 Ericsson
3 * Copyright (c) 2010, 2011 École Polytechnique de Montréal
4 * Copyright (c) 2010, 2011 Alexandre Montplaisir <alexandre.montplaisir@gmail.com>
6 * All rights reserved. This program and the accompanying materials are
7 * made available under the terms of the Eclipse Public License v1.0 which
8 * accompanies this distribution, and is available at
9 * http://www.eclipse.org/legal/epl-v10.html
11 *******************************************************************************/
13 package org
.eclipse
.linuxtools
.internal
.tmf
.core
.statesystem
.backends
.historytree
;
15 import java
.io
.IOException
;
16 import java
.io
.PrintWriter
;
17 import java
.nio
.ByteBuffer
;
18 import java
.nio
.ByteOrder
;
19 import java
.nio
.channels
.FileChannel
;
20 import java
.util
.ArrayList
;
21 import java
.util
.Collections
;
22 import java
.util
.List
;
24 import org
.eclipse
.linuxtools
.tmf
.core
.exceptions
.TimeRangeException
;
25 import org
.eclipse
.linuxtools
.tmf
.core
.interval
.ITmfStateInterval
;
26 import org
.eclipse
.linuxtools
.tmf
.core
.statevalue
.TmfStateValue
;
29 * The base class for all the types of nodes that go in the History Tree.
33 abstract class HTNode
{
36 * Size of an entry in the data section.
38 * 16 2 x Timevalue/long (interval start + end)
41 * + 4 int (valueOffset)
43 protected static final int DATA_ENTRY_SIZE
= 25;
45 /* Reference to the History Tree to whom this node belongs */
46 private final HistoryTree ownerTree
;
48 /* Time range of this node */
49 private final long nodeStart
;
52 /* Sequence number = position in the node section of the file */
53 private final int sequenceNumber
;
54 private int parentSequenceNumber
; /* = -1 if this node is the root node */
56 /* Where the Strings section begins (from the start of the node */
57 private int stringSectionOffset
;
59 /* True if this node is closed (and to be committed to disk) */
60 private boolean isDone
;
62 /* Vector containing all the intervals contained in this node */
63 private final List
<HTInterval
> intervals
;
65 HTNode(HistoryTree tree
, int seqNumber
, int parentSeqNumber
, long start
) {
66 this.ownerTree
= tree
;
67 this.nodeStart
= start
;
68 this.sequenceNumber
= seqNumber
;
69 this.parentSequenceNumber
= parentSeqNumber
;
71 this.stringSectionOffset
= ownerTree
.getConfig().getBlockSize();
73 this.intervals
= new ArrayList
<HTInterval
>();
77 * Reader factory constructor. Build a Node object (of the right type) by
78 * reading a block in the file.
81 * Reference to the HT which will own this node
83 * FileChannel to the history file, ALREADY SEEKED at the start
87 static final HTNode
readNode(HistoryTree tree
, FileChannel fc
)
89 HTNode newNode
= null;
92 ByteBuffer buffer
= ByteBuffer
.allocate(tree
.getConfig().getBlockSize());
93 buffer
.order(ByteOrder
.LITTLE_ENDIAN
);
95 res
= fc
.read(buffer
);
96 assert (res
== tree
.getConfig().getBlockSize());
99 /* Read the common header part */
100 byte type
= buffer
.get();
101 long start
= buffer
.getLong();
102 long end
= buffer
.getLong();
103 int seqNb
= buffer
.getInt();
104 int parentSeqNb
= buffer
.getInt();
105 int intervalCount
= buffer
.getInt();
106 int stringSectionOffset
= buffer
.getInt();
107 boolean done
= byteToBool(buffer
.get());
109 /* Now the rest of the header depends on the node type */
113 newNode
= new CoreNode(tree
, seqNb
, parentSeqNb
, start
);
114 newNode
.readSpecificHeader(buffer
);
117 // TODO implement other node types
124 // /* "Claudette" (extended) nodes */
128 /* Unrecognized node type */
129 throw new IOException();
133 * At this point, we should be done reading the header and 'buffer'
134 * should only have the intervals left
136 for (i
= 0; i
< intervalCount
; i
++) {
137 newNode
.intervals
.add(HTInterval
.readFrom(buffer
));
140 /* Assign the node's other information we have read previously */
141 newNode
.nodeEnd
= end
;
142 newNode
.stringSectionOffset
= stringSectionOffset
;
143 newNode
.isDone
= done
;
148 final void writeSelf(FileChannel fc
) throws IOException
{
149 final int blockSize
= ownerTree
.getConfig().getBlockSize();
150 int curStringsEntryEndPos
= blockSize
;
152 ByteBuffer buffer
= ByteBuffer
.allocate(blockSize
);
153 buffer
.order(ByteOrder
.LITTLE_ENDIAN
);
156 /* Write the common header part */
157 buffer
.put(this.getNodeType());
158 buffer
.putLong(nodeStart
);
159 buffer
.putLong(nodeEnd
);
160 buffer
.putInt(sequenceNumber
);
161 buffer
.putInt(parentSequenceNumber
);
162 buffer
.putInt(intervals
.size());
163 buffer
.putInt(stringSectionOffset
);
164 buffer
.put(boolToByte(isDone
));
166 /* Now call the inner method to write the specific header part */
167 this.writeSpecificHeader(buffer
);
169 /* Back to us, we write the intervals */
170 for (HTInterval interval
: intervals
) {
171 int size
= interval
.writeInterval(buffer
, curStringsEntryEndPos
);
172 curStringsEntryEndPos
-= size
;
176 * Write padding between the end of the Data section and the start of
177 * the Strings section (needed to fill the node in case there is no
180 while (buffer
.position() < stringSectionOffset
) {
181 buffer
.put((byte) 0);
185 * If the offsets were right, the size of the Strings section should be
186 * == to the expected size
188 assert (curStringsEntryEndPos
== stringSectionOffset
);
190 /* Finally, write everything in the Buffer to disk */
192 // if we don't do this, flip() will lose what's after.
193 buffer
.position(blockSize
);
196 int res
= fc
.write(buffer
);
197 assert (res
== blockSize
);
200 // ------------------------------------------------------------------------
202 // ------------------------------------------------------------------------
204 protected HistoryTree
getTree() {
208 long getNodeStart() {
219 int getSequenceNumber() {
220 return sequenceNumber
;
223 int getParentSequenceNumber() {
224 return parentSequenceNumber
;
228 * Change this node's parent. Used when we create a new root node for
231 void setParentSequenceNumber(int newParent
) {
232 parentSequenceNumber
= newParent
;
240 * Add an interval to this node
244 void addInterval(HTInterval newInterval
) {
245 /* Just in case, but should be checked before even calling this function */
246 assert (newInterval
.getIntervalSize() <= this.getNodeFreeSpace());
248 intervals
.add(newInterval
);
250 /* Update the in-node offset "pointer" */
251 stringSectionOffset
-= (newInterval
.getStringsEntrySize());
255 * We've received word from the containerTree that newest nodes now exist to
256 * our right. (Puts isDone = true and sets the endtime)
259 * The nodeEnd time that the node will have
260 * @throws TimeRangeException
262 void closeThisNode(long endtime
) {
263 assert (endtime
>= this.nodeStart
);
265 if (intervals
.size() > 0) {
267 * Sort the intervals by ascending order of their end time. This
268 * speeds up lookups a bit
270 Collections
.sort(intervals
);
273 * Make sure there are no intervals in this node with their EndTime
274 * > the one requested. Only need to check the last one since they
277 assert (endtime
>= intervals
.get(intervals
.size() - 1).getEndTime());
281 this.nodeEnd
= endtime
;
286 * The method to fill up the stateInfo (passed on from the Current State
287 * Tree when it does a query on the SHT). We'll replace the data in that
288 * vector with whatever relevant we can find from this node
291 * The same stateInfo that comes from SHT's doQuery()
293 * The timestamp for which the query is for. Only return
294 * intervals that intersect t.
295 * @throws TimeRangeException
297 void writeInfoFromNode(List
<ITmfStateInterval
> stateInfo
, long t
)
298 throws TimeRangeException
{
299 assert (this.isDone
); // not sure this will always be the case...
302 if (intervals
.size() == 0) {
305 startIndex
= getStartIndexFor(t
);
307 for (int i
= startIndex
; i
< intervals
.size(); i
++) {
309 * Now we only have to compare the Start times, since we now the End
310 * times necessarily fit
312 if (intervals
.get(i
).getStartTime() <= t
) {
313 stateInfo
.set(intervals
.get(i
).getAttribute(), intervals
.get(i
));
320 * Get a single Interval from the information in this node If the
321 * key/timestamp pair cannot be found, we return null.
325 * @return The Interval containing the information we want, or null if it
327 * @throws TimeRangeException
329 HTInterval
getRelevantInterval(int key
, long t
) throws TimeRangeException
{
330 assert (this.isDone
);
332 HTInterval curInterval
;
334 if (intervals
.size() == 0) {
338 startIndex
= getStartIndexFor(t
);
340 for (int i
= startIndex
; i
< intervals
.size(); i
++) {
341 curInterval
= intervals
.get(i
);
342 if (curInterval
.getAttribute() == key
343 && curInterval
.getStartTime() <= t
344 && curInterval
.getEndTime() >= t
) {
348 /* We didn't find the relevant information in this node */
352 private int getStartIndexFor(long t
) throws TimeRangeException
{
357 * Since the intervals are sorted by end time, we can skip all the ones
358 * at the beginning whose end times are smaller than 't'. Java does
359 * provides a .binarySearch method, but its API is quite weird...
361 dummy
= new HTInterval(0, t
, 0, TmfStateValue
.nullValue());
362 index
= Collections
.binarySearch(intervals
, dummy
);
366 * .binarySearch returns a negative number if the exact value was
367 * not found. Here we just want to know where to start searching, we
368 * don't care if the value is exact or not.
374 /* Sometimes binarySearch yields weird stuff... */
378 if (index
>= intervals
.size()) {
379 index
= intervals
.size() - 1;
383 * Another API quirkiness, the returned index is the one of the *last*
384 * element of a series of equal endtimes, which happens sometimes. We
385 * want the *first* element of such a series, to read through them
389 && intervals
.get(index
- 1).compareTo(intervals
.get(index
)) == 0) {
397 * @return The offset, within the node, where the Data section ends
399 private int getDataSectionEndOffset() {
400 return this.getTotalHeaderSize() + HTNode
.DATA_ENTRY_SIZE
* intervals
.size();
404 * Returns the free space in the node, which is simply put, the
405 * stringSectionOffset - dataSectionOffset
407 int getNodeFreeSpace() {
408 return stringSectionOffset
- this.getDataSectionEndOffset();
412 * Returns the current space utilisation of this node, as a percentage.
413 * (used space / total usable space, which excludes the header)
415 long getNodeUsagePRC() {
416 final int blockSize
= ownerTree
.getConfig().getBlockSize();
417 float freePercent
= (float) this.getNodeFreeSpace()
418 / (float) (blockSize
- this.getTotalHeaderSize())
420 return (long) (100L - freePercent
);
423 protected static final byte boolToByte(boolean thebool
) {
430 static final boolean byteToBool(byte thebyte
) {
431 return (thebyte
== (byte) 1);
435 * @name Debugging functions
438 @SuppressWarnings("nls")
440 public String
toString() {
441 /* Only used for debugging, shouldn't be externalized */
442 StringBuffer buf
= new StringBuffer("Node #" + sequenceNumber
+ ", ");
443 buf
.append(this.toStringSpecific());
444 buf
.append(intervals
.size() + " intervals (" + this.getNodeUsagePRC()
447 buf
.append("[" + this.nodeStart
+ " - ");
449 buf
= buf
.append("" + this.nodeEnd
+ "]");
451 buf
= buf
.append("...]");
453 return buf
.toString();
457 * Debugging function that prints out the contents of this node
460 * PrintWriter in which we will print the debug output
462 @SuppressWarnings("nls")
463 void debugPrintIntervals(PrintWriter writer
) {
464 /* Only used for debugging, shouldn't be externalized */
465 writer
.println("Node #" + sequenceNumber
+ ":");
467 /* Array of children */
468 if (this.getNodeType() == 1) { /* Only Core Nodes can have children */
469 CoreNode thisNode
= (CoreNode
) this;
470 writer
.print(" " + thisNode
.getNbChildren() + " children");
471 if (thisNode
.getNbChildren() >= 1) {
472 writer
.print(": [ " + thisNode
.getChild(0));
473 for (int i
= 1; i
< thisNode
.getNbChildren(); i
++) {
474 writer
.print(", " + thisNode
.getChild(i
));
481 /* List of intervals in the node */
482 writer
.println(" Intervals contained:");
483 for (int i
= 0; i
< intervals
.size(); i
++) {
484 writer
.println(intervals
.get(i
).toString());
486 writer
.println('\n');
492 * 16 - 2x long (start time, end time)
494 * 16 - 4x int (seq number, parent seq number, intervalcount, strings
497 * 1 - byte (done or not)
499 protected static final int COMMON_HEADER_SIZE
= 34;
501 // ------------------------------------------------------------------------
503 // ------------------------------------------------------------------------
505 protected abstract byte getNodeType();
507 protected abstract int getTotalHeaderSize();
509 protected abstract void readSpecificHeader(ByteBuffer buffer
);
511 protected abstract void writeSpecificHeader(ByteBuffer buffer
);
513 protected abstract String
toStringSpecific();
This page took 0.042782 seconds and 6 git commands to generate.