</attributes>
</classpathentry>
<classpathentry kind="src" path="src"/>
+ <classpathentry kind="src" path="stubs"/>
<classpathentry kind="output" path="bin"/>
</classpath>
Bundle-RequiredExecutionEnvironment: JavaSE-1.8
Require-Bundle: org.junit;bundle-version="4.0.0"
Export-Package: org.eclipse.tracecompass.internal.datastore.core.condition;x-internal:=true,
+ org.eclipse.tracecompass.internal.datastore.core.historytree;x-internal:=true,
org.eclipse.tracecompass.internal.datastore.core.serialization;x-internal:=true,
- org.eclipse.tracecompass.internal.provisional.datastore.core.condition;x-internal:=true
+ org.eclipse.tracecompass.internal.provisional.datastore.core.condition;x-internal:=true,
+ org.eclipse.tracecompass.internal.provisional.datastore.core.historytree;x-internal:=true
Import-Package: com.google.common.collect
# http://www.eclipse.org/legal/epl-v10.html
###############################################################################
-source.. = src/
+source.. = src/,\
+ stubs/
output.. = bin/
bin.includes = META-INF/,\
.,\
--- /dev/null
+/*******************************************************************************
+ * Copyright (c) 2017 École Polytechnique de Montréal
+ *
+ * All rights reserved. This program and the accompanying materials are
+ * made available under the terms of the Eclipse Public License v1.0 which
+ * accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *******************************************************************************/
+
+package org.eclipse.tracecompass.internal.datastore.core.historytree;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.eclipse.jdt.annotation.Nullable;
+import org.eclipse.tracecompass.internal.datastore.core.historytree.HtIo;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.HTNode;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.HistoryTreeStub;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.HtTestUtils;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.IHTNode.NodeType;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.interval.HTInterval;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * Test the {@link HtIo} class
+ *
+ * @author Geneviève Bastien
+ */
+public class HtIoTest {
+
+ private static final int BLOCKSIZE = HtTestUtils.BLOCKSIZE;
+ private static final int NB_CHILDREN = 3;
+
+ private @Nullable HtIo<HTInterval, HTNode<HTInterval>> fHtIo;
+ private @Nullable File fStateFile;
+
+ /**
+ * Construct the tree IO Object
+ *
+ * @throws IOException
+ * Exception with the file
+ */
+ @Before
+ public void setUp() throws IOException {
+ File file = File.createTempFile("tmp", null);
+ assertNotNull(file);
+ fStateFile = file;
+
+ fHtIo = new HtIo<>(file,
+ HtTestUtils.BLOCKSIZE,
+ NB_CHILDREN,
+ true,
+ HtTestUtils.READ_FACTORY,
+ HistoryTreeStub.NODE_FACTORY);
+ }
+
+ /**
+ * Delete the file after test
+ */
+ @After
+ public void cleanUp() {
+ HtIo<HTInterval, HTNode<HTInterval>> htIo = fHtIo;
+ if (htIo != null) {
+ htIo.deleteFile();
+ }
+ }
+
+ private static HTNode<HTInterval> createCoreNode(int seqNum, int parentNum) {
+ HTNode<HTInterval> node = HistoryTreeStub.NODE_FACTORY.createNode(NodeType.CORE,
+ BLOCKSIZE, NB_CHILDREN, seqNum, parentNum, 0L);
+ return node;
+ }
+
+ private static HTNode<HTInterval> createLeafNode(int seqNum, int parentNum) {
+ HTNode<HTInterval> node = HistoryTreeStub.NODE_FACTORY.createNode(NodeType.LEAF,
+ BLOCKSIZE, NB_CHILDREN, seqNum, parentNum, 0L);
+ return node;
+ }
+
+ /**
+ * Test reading and writing nodes
+ *
+ * @throws IOException
+ * Exception thrown by the file
+ */
+ @Test
+ public void testReadWriteNode() throws IOException {
+ HtIo<HTInterval, HTNode<HTInterval>> htio = fHtIo;
+ assertNotNull(htio);
+
+ int coreNodeSeqNum = 0;
+ int leafNodeSeqNum = 1;
+
+ // Add a core node and a leaf node
+ HTNode<HTInterval> coreNode = createCoreNode(coreNodeSeqNum, -1);
+ assertFalse(HtIo.isInCache(htio, coreNodeSeqNum));
+ htio.writeNode(coreNode);
+ assertTrue(HtIo.isInCache(htio, coreNodeSeqNum));
+
+ HTNode<HTInterval> leafNode = createLeafNode(leafNodeSeqNum, coreNodeSeqNum);
+ assertFalse(HtIo.isInCache(htio, leafNodeSeqNum));
+ htio.writeNode(leafNode);
+ assertTrue(HtIo.isInCache(htio, leafNodeSeqNum));
+
+ // Now read the nodes from the same htio object, they should be in cache
+ HTNode<HTInterval> coreRead = htio.readNode(coreNodeSeqNum);
+ assertEquals(coreNode, coreRead);
+ HTNode<HTInterval> leafRead = htio.readNode(leafNodeSeqNum);
+ assertEquals(leafNode, leafRead);
+
+ // Invalidate the cache
+ HtIo.clearCache();
+
+ // Re-read the nodes, they should now be read from disk and be in the
+ // cache after
+ assertFalse(HtIo.isInCache(htio, coreNodeSeqNum));
+ coreRead = htio.readNode(coreNodeSeqNum);
+ assertEquals(coreNode, coreRead);
+ assertTrue(HtIo.isInCache(htio, coreNodeSeqNum));
+
+ // Read the leaf node from disk
+ assertFalse(HtIo.isInCache(htio, leafNodeSeqNum));
+ leafRead = htio.readNode(leafNodeSeqNum);
+ assertEquals(leafNode, leafRead);
+ assertTrue(HtIo.isInCache(htio, leafNodeSeqNum));
+
+ // Close the file and reopen a new htio object
+ htio.closeFile();
+
+ assertNotNull(fStateFile);
+ htio = new HtIo<>(fStateFile,
+ BLOCKSIZE,
+ NB_CHILDREN,
+ false,
+ HtTestUtils.READ_FACTORY,
+ HistoryTreeStub.NODE_FACTORY);
+
+ fHtIo = htio;
+
+ // Read the core node from the disk
+ assertFalse(HtIo.isInCache(htio, coreNodeSeqNum));
+ coreRead = htio.readNode(coreNodeSeqNum);
+ assertEquals(coreNode, coreRead);
+
+ // Read the leaf node from the disk
+ assertFalse(HtIo.isInCache(htio, leafNodeSeqNum));
+ leafRead = htio.readNode(leafNodeSeqNum);
+ assertEquals(leafNode, leafRead);
+
+ // Re-read the nodes, they should have been read from the cache
+ assertTrue(HtIo.isInCache(htio, coreNodeSeqNum));
+ coreRead = htio.readNode(coreNodeSeqNum);
+ assertEquals(coreNode, coreRead);
+
+ // Read the leaf node from cache
+ assertTrue(HtIo.isInCache(htio, leafNodeSeqNum));
+ leafRead = htio.readNode(leafNodeSeqNum);
+ assertEquals(leafNode, leafRead);
+ }
+
+ /**
+ * Test that the section at the end of the file where extra data can be
+ * written works well
+ *
+ * @throws IOException
+ * Exception thrown by the file
+ */
+ @Test
+ public void testExtraDataSave() throws IOException {
+ writeBufferAtNodePos(2);
+ }
+
+ /**
+ * Test that writing at the beginning of the file works well
+ *
+ * @throws IOException
+ * Exception thrown by the file
+ */
+ @Test
+ public void testHeaderDataSave() throws IOException {
+ writeBufferAtNodePos(-1);
+ }
+
+ /**
+ * Test that writing data far beyond the node section end works.
+ *
+ * @throws IOException
+ * Exception thrown by the file
+ */
+ @Test
+ public void testTooFarData() throws IOException {
+ writeBufferAtNodePos(6);
+ }
+
+ private void writeBufferAtNodePos(int nodeOffset) throws IOException {
+ HtIo<HTInterval, HTNode<HTInterval>> htio = fHtIo;
+ assertNotNull(htio);
+
+ int coreNodeSeqNum = 0;
+ int leafNodeSeqNum = 1;
+
+ // Add a core node and a leaf node
+ HTNode<HTInterval> coreNode = createCoreNode(coreNodeSeqNum, -1);
+ assertFalse(HtIo.isInCache(htio, coreNodeSeqNum));
+ htio.writeNode(coreNode);
+ assertTrue(HtIo.isInCache(htio, coreNodeSeqNum));
+
+ HTNode<HTInterval> leafNode = createLeafNode(leafNodeSeqNum, coreNodeSeqNum);
+ assertFalse(HtIo.isInCache(htio, leafNodeSeqNum));
+ htio.writeNode(leafNode);
+ assertTrue(HtIo.isInCache(htio, leafNodeSeqNum));
+
+ // Write 3 integers at some position of the file
+ ByteBuffer buffer = ByteBuffer.allocate(12);
+ buffer.putInt(32);
+ buffer.putInt(33);
+ buffer.putInt(232);
+ buffer.flip();
+
+ try (FileOutputStream fcOut = htio.getFileWriter(nodeOffset)) {
+ fcOut.write(buffer.array());
+ }
+
+ // Close the file and reopen a new htio object
+ htio.closeFile();
+ assertNotNull(fStateFile);
+ htio = new HtIo<>(fStateFile,
+ BLOCKSIZE,
+ NB_CHILDREN,
+ false,
+ HtTestUtils.READ_FACTORY,
+ HistoryTreeStub.NODE_FACTORY);
+
+ fHtIo = htio;
+
+ // Read the same 3 integers at the same position in the file
+ byte[] bytes = new byte[12];
+ htio.supplyATReader(nodeOffset).read(bytes);
+ buffer = ByteBuffer.wrap(bytes);
+ assertEquals(32, buffer.getInt());
+ assertEquals(33, buffer.getInt());
+ assertEquals(232, buffer.getInt());
+ }
+}
--- /dev/null
+/*******************************************************************************
+ * Copyright (c) 2017 École Polytechnique de Montréal
+ *
+ * All rights reserved. This program and the accompanying materials are
+ * made available under the terms of the Eclipse Public License v1.0 which
+ * accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *******************************************************************************/
+
+package org.eclipse.tracecompass.internal.provisional.datastore.core.historytree;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.channels.ClosedChannelException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.eclipse.jdt.annotation.Nullable;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.AbstractHistoryTree;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.HTNode;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.interval.IHTInterval;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * This is the base class for all history tree implementation tests. Specific
+ * tree's tests can extend this class to have some basic tests.
+ *
+ * It tests the {@link AbstractHistoryTree} class. This test class will fill
+ * nodes only with sequential objects, so that each extending test for trees
+ * will have to add the tests and filling methods that correspond to their own
+ * use cases.
+ *
+ * @author Geneviève Bastien
+ * @param <E>
+ * The type of objects that will be saved in the tree
+ * @param <N>
+ * The base type of the nodes of this tree
+ */
+public abstract class AbstractHistoryTreeTestBase<E extends IHTInterval, N extends HTNode<E>> {
+
+ private @Nullable File fTempFile;
+
+ /**
+ * Create the temporary file for this history tree
+ */
+ @Before
+ public void setupTest() {
+ try {
+ fTempFile = File.createTempFile("tmpStateSystem", null);
+ } catch (IOException e) {
+ fail(e.getMessage());
+ }
+ }
+
+ /**
+ * Delete the temporary history tree file after the test
+ */
+ @After
+ public void cleanup() {
+ if (fTempFile != null) {
+ fTempFile.delete();
+ }
+ }
+
+ /**
+ * Setup a history tree.
+ *
+ * @param maxChildren
+ * The max number of children per node in the tree (tree config
+ * option)
+ * @param treeStart
+ * The start of the tree
+ * @return The new history tree
+ */
+ protected AbstractHistoryTree<E, N> setupSmallTree(int maxChildren, long treeStart) {
+ AbstractHistoryTree<E, N> ht = null;
+ try {
+ File newFile = fTempFile;
+ assertNotNull(newFile);
+
+ ht = createHistoryTree(newFile,
+ HtTestUtils.BLOCKSIZE,
+ maxChildren, /* Number of children */
+ 1, /* Provider version */
+ 1); /* Start time */
+
+ } catch (IOException e) {
+ fail(e.getMessage());
+ }
+
+ assertNotNull(ht);
+ return ht;
+ }
+
+ /**
+ * Create the history tree to test
+ *
+ * @param stateHistoryFile
+ * The name of the history file
+ * @param blockSize
+ * The size of each "block" on disk in bytes. One node will
+ * always fit in one block. It should be at least 4096.
+ * @param maxChildren
+ * The maximum number of children allowed per core (non-leaf)
+ * node.
+ * @param providerVersion
+ * The version of the state provider. If a file already exists,
+ * and their versions match, the history file will not be rebuilt
+ * uselessly.
+ * @param treeStart
+ * The start time of the history
+ * @return The history tree stub
+ * @throws IOException
+ * Any exception thrown by the history tree
+ */
+ protected abstract AbstractHistoryTree<E, N> createHistoryTree(File stateHistoryFile,
+ int blockSize,
+ int maxChildren,
+ int providerVersion,
+ long treeStart) throws IOException;
+
+ /**
+ * "Reader" constructor : instantiate a history tree from an existing tree
+ * file on disk
+ *
+ * @param existingStateFile
+ * Path/filename of the history-file we are to open
+ * @param expProviderVersion
+ * The expected version of the state provider
+ * @return The history tree stub
+ * @throws IOException
+ * If an error happens reading the file
+ */
+ protected abstract AbstractHistoryTree<E, N> createHistoryTree(
+ File existingStateFile,
+ int expProviderVersion) throws IOException;
+
+ /**
+ * Create an interval that fits in the tree with the given start/end time
+ *
+ * @param start
+ * The start time
+ * @param end
+ * The end time
+ * @return The object
+ */
+ protected abstract E createInterval(long start, long end);
+
+ /**
+ * Create a reader history tree
+ *
+ * @return The history tree stub
+ * @throws IOException
+ * If an error happened reading the file
+ */
+ protected final AbstractHistoryTree<E, N> createHistoryTreeReader() throws IOException {
+ File tempFile = fTempFile;
+ assertNotNull(tempFile);
+ return createHistoryTree(tempFile, 1);
+ }
+
+ /**
+ * Setup a history tree with config MAX_CHILDREN = 3 and start time of 1
+ *
+ * @return A new history tree
+ */
+ protected AbstractHistoryTree<E, N> setupSmallTree() {
+ return setupSmallTree(3, 1);
+ }
+
+ /**
+ * Add sequential elements to the history up to a certain size. Any element
+ * that would go above the fixed limit should not be added
+ *
+ * @param ht
+ * The tree to which to add values
+ * @param fillSize
+ * The limit on the size of the elements to add
+ * @param start
+ * The start time of the values
+ * @return The latest end time
+ */
+ protected abstract long fillValues(AbstractHistoryTree<E, N> ht, int fillSize, long start);
+
+ /**
+ * Insert objects in the tree to fill the current leaf node to capacity,
+ * without exceeding it.
+ *
+ * This guarantees that the following insertion will create new nodes.
+ *
+ * @param ht
+ * The history tree in which to insert
+ * @return Start time of the current leaf node. Future insertions should be
+ * greater than or equal to this to make sure the intervals go in
+ * the leaf node.
+ */
+ private long fillNextLeafNode(AbstractHistoryTree<E, N> ht, long leafNodeStart) {
+ int prevCount = ht.getNodeCount();
+ int prevDepth = ht.getDepth();
+
+ /* Fill the following leaf node */
+ N node = ht.getLatestLeaf();
+ long ret = fillValues(ht, node.getNodeFreeSpace(), leafNodeStart);
+
+ /* Make sure we haven't changed the depth or node count */
+ assertEquals(prevCount, ht.getNodeCount());
+ assertEquals(prevDepth, ht.getDepth());
+
+ return ret;
+ }
+
+ /**
+ * Test that nodes are filled
+ *
+ * It fills nodes with sequential elements, so that leafs should be filled.
+ */
+ @Test
+ public void testSequentialFill() {
+ AbstractHistoryTree<E, N> ht = setupSmallTree();
+
+ // Make sure it is empty
+ N node = ht.getLatestLeaf();
+ assertEquals(0, node.getNodeUsagePercent());
+
+ /* Fill ~10% of the node with elements */
+ int initialFreeSpace = node.getNodeFreeSpace();
+ int limit = node.getNodeFreeSpace() / 10;
+ long start = fillValues(ht, limit, 1);
+ assertTrue(node.getNodeFreeSpace() > initialFreeSpace - limit);
+ assertTrue(node.getNodeFreeSpace() < initialFreeSpace);
+
+ /* Add elements up to ~20% */
+ start = fillValues(ht, limit, start);
+ assertTrue(node.getNodeFreeSpace() > initialFreeSpace - 2 * limit);
+ assertTrue(node.getNodeFreeSpace() < initialFreeSpace - limit);
+
+ /* Add elements up to ~30% */
+ start = fillValues(ht, limit, start);
+ assertTrue(node.getNodeFreeSpace() > initialFreeSpace - 3 * limit);
+ assertTrue(node.getNodeFreeSpace() < initialFreeSpace - 2 * limit);
+
+ /* Add elements up to ~40% */
+ start = fillValues(ht, limit, start);
+ assertTrue(node.getNodeFreeSpace() > initialFreeSpace - 4 * limit);
+ assertTrue(node.getNodeFreeSpace() < initialFreeSpace - 3 * limit);
+
+ // Assert the integrity of the tree
+ ht.closeTree(ht.getTreeEnd());
+ HtTestUtils.assertTreeIntegrity(ht);
+
+ }
+
+ /**
+ * Test the addition of new nodes to the tree and make sure the tree is
+ * built with the right structure
+ */
+ @Test
+ public void testDepth() {
+ AbstractHistoryTree<E, N> ht = setupSmallTree();
+
+ /* Fill a first node */
+ N node = ht.getLatestLeaf();
+ long start = 1;
+ long time = fillValues(ht, node.getNodeFreeSpace(), start);
+
+ /*
+ * Add intervals that should add a sibling to the node and a new root
+ * node
+ */
+ assertEquals(1, ht.getNodeCount());
+ assertEquals(1, ht.getDepth());
+ ht.insert(createInterval(time, time + 1));
+ time += 1;
+ assertEquals(3, ht.getNodeCount());
+ assertEquals(2, ht.getDepth());
+
+ /* Fill the latest leaf node (2nd child) */
+ node = ht.getLatestLeaf();
+ time = fillValues(ht, node.getNodeFreeSpace(), time);
+
+ /*
+ * Add an interval that should add another sibling to the previous nodes
+ */
+ ht.insert(createInterval(time, time + 1));
+ time += 1;
+ assertEquals(4, ht.getNodeCount());
+ assertEquals(2, ht.getDepth());
+
+ /* Fill the latest leaf node (3rd and last child) */
+ node = ht.getLatestLeaf();
+ time = fillValues(ht, node.getNodeFreeSpace(), time);
+
+ /* The new node created here should generate a new branch */
+ ht.insert(createInterval(time, time + 1));
+ time += 1;
+ assertEquals(7, ht.getNodeCount());
+ assertEquals(3, ht.getDepth());
+
+ /*
+ * Completely fill the second level, such that there will be a 4th level
+ * added
+ */
+ while (ht.getDepth() < 4) {
+ time = fillNextLeafNode(ht, time);
+ ht.insert(createInterval(time, time + 1));
+ }
+ assertEquals(4, ht.getDepth());
+
+ // Assert the integrity of the tree
+ ht.closeTree(ht.getTreeEnd());
+ HtTestUtils.assertTreeIntegrity(ht);
+ }
+
+ /**
+ * Make sure the node sequence numbers and parent pointers are set correctly
+ * when new nodes are created.
+ *
+ * <p>
+ * We are building a tree whose node sequence numbers will look like this at
+ * the end:
+ * </p>
+ *
+ * <pre>
+ * 3
+ * / \
+ * 1 4
+ * / \ \
+ * 0 2 5
+ * </pre>
+ *
+ * <p>
+ * However while building, the parent pointers may be different.
+ * </p>
+ *
+ * @throws ClosedChannelException
+ * If the file channel is closed
+ */
+ @Test
+ public void testNodeSequenceNumbers() throws ClosedChannelException {
+
+ long time = 1;
+
+ AbstractHistoryTree<E, N> ht = setupSmallTree(2, time);
+ time = fillNextLeafNode(ht, time);
+
+ /* There is only one node in the tree at this point, with no parent */
+ List<N> branch = ht.getLatestBranch();
+ assertEquals(1, branch.size());
+ assertEquals(0, branch.get(0).getSequenceNumber());
+ assertEquals(-1, branch.get(0).getParentSequenceNumber());
+
+ /* Create a new branch */
+ ht.insert(createInterval(time, time + 1));
+ time = fillNextLeafNode(ht, time + 1);
+ assertEquals(3, ht.getNodeCount());
+ assertEquals(2, ht.getDepth());
+
+ /* Make sure the first node's parent was updated */
+ N node = ht.getNode(0);
+ assertEquals(0, node.getSequenceNumber());
+ assertEquals(1, node.getParentSequenceNumber());
+
+ /* Make sure the new branch is all right */
+ branch = ht.getLatestBranch();
+ assertEquals(2, branch.size());
+ assertEquals(1, branch.get(0).getSequenceNumber());
+ assertEquals(-1, branch.get(0).getParentSequenceNumber());
+ assertEquals(2, branch.get(1).getSequenceNumber());
+ assertEquals(1, branch.get(1).getParentSequenceNumber());
+
+ /* Create a third branch */
+ ht.insert(createInterval(time, time + 1));
+ time = fillNextLeafNode(ht, time + 1);
+ assertEquals(6, ht.getNodeCount());
+ assertEquals(3, ht.getDepth());
+
+ /* Make sure all previous nodes are still correct */
+ node = ht.getNode(0);
+ assertEquals(0, node.getSequenceNumber());
+ assertEquals(1, node.getParentSequenceNumber());
+ node = ht.getNode(1);
+ assertEquals(1, node.getSequenceNumber());
+ assertEquals(3, node.getParentSequenceNumber());
+ node = ht.getNode(2);
+ assertEquals(2, node.getSequenceNumber());
+ assertEquals(1, node.getParentSequenceNumber());
+
+ /* Verify the contents of the new latest branch */
+ branch = ht.getLatestBranch();
+ assertEquals(3, branch.size());
+ assertEquals(3, branch.get(0).getSequenceNumber());
+ assertEquals(-1, branch.get(0).getParentSequenceNumber());
+ assertEquals(4, branch.get(1).getSequenceNumber());
+ assertEquals(3, branch.get(1).getParentSequenceNumber());
+ assertEquals(5, branch.get(2).getSequenceNumber());
+ assertEquals(4, branch.get(2).getParentSequenceNumber());
+
+ // Assert the integrity of the tree
+ ht.closeTree(ht.getTreeEnd());
+ HtTestUtils.assertTreeIntegrity(ht);
+ }
+
+ /**
+ * Test reading a tree and make sure it is identical to the original one
+ *
+ * <p>
+ * We are building a tree whose node sequence numbers will look like this at
+ * the end:
+ * </p>
+ *
+ * <pre>
+ * 4
+ * / \
+ * 1 5
+ * / | \ \
+ * 0 2 3 6
+ * </pre>
+ *
+ * @throws IOException
+ * Exceptions with the HT file
+ *
+ */
+ @Test
+ public void testReadTree() throws IOException {
+
+ long time = 1;
+
+ // Build the tree for the test
+ AbstractHistoryTree<E, N> ht = setupSmallTree();
+ time = fillNextLeafNode(ht, time);
+
+ /* Create a new branch */
+ ht.insert(createInterval(time, time + 1));
+ time = fillNextLeafNode(ht, time + 1);
+
+ /* Fill the third child */
+ ht.insert(createInterval(time, time + 1));
+ time = fillNextLeafNode(ht, time + 1);
+
+ /* Make sure the tree has the expected structure at this point */
+ assertEquals(4, ht.getNodeCount());
+ assertEquals(2, ht.getDepth());
+
+ /* Add the third branch */
+ ht.insert(createInterval(time, time + 1));
+ time = fillNextLeafNode(ht, time + 1);
+
+ /* Make sure the tree has the expected structure */
+ assertEquals(7, ht.getNodeCount());
+ assertEquals(3, ht.getDepth());
+
+ // Close the tree and save the nodes for later
+ ht.closeTree(time + 1);
+
+ List<N> expectedNodes = new ArrayList<>(ht.getNodeCount());
+ for (int i = 0; i < ht.getNodeCount(); i++) {
+ expectedNodes.add(ht.getNode(i));
+ }
+ ht.closeFile();
+
+ // Create a reader history tree
+ ht = createHistoryTreeReader();
+
+ // Make sure the number of nodes and depth is as expected
+ assertEquals(7, ht.getNodeCount());
+ assertEquals(3, ht.getDepth());
+
+ for (int i = 0; i < ht.getNodeCount(); i++) {
+ assertEquals(expectedNodes.get(i), ht.readNode(i));
+ }
+
+ // Assert the integrity of the read tree
+ HtTestUtils.assertTreeIntegrity(ht);
+ }
+
+ /**
+ * Test the tree end time
+ */
+ @Test
+ public void testTreeEnd() {
+ long time = 1;
+
+ // Check the end time at the start
+ AbstractHistoryTree<E, N> ht = setupSmallTree();
+ assertEquals(time, ht.getTreeEnd());
+
+ // Fill a node and check the end
+ time = fillNextLeafNode(ht, time);
+ assertEquals(time, ht.getTreeEnd());
+
+ // Add an object that should not change the end time
+ E object = createInterval(time - 10, time - 5);
+ ht.insert(object);
+ assertEquals(time, ht.getTreeEnd());
+
+ // Assert the tree integrity
+ ht.closeTree(ht.getTreeEnd());
+ HtTestUtils.assertTreeIntegrity(ht);
+ }
+
+}
--- /dev/null
+/*******************************************************************************
+ * Copyright (c) 2017 École Polytechnique de Montréal
+ *
+ * All rights reserved. This program and the accompanying materials are
+ * made available under the terms of the Eclipse Public License v1.0 which
+ * accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *******************************************************************************/
+
+package org.eclipse.tracecompass.internal.provisional.datastore.core.historytree;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.AbstractHistoryTree.IHTNodeFactory;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.HTNode;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.HistoryTreeStub;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.IHTNode.NodeType;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.interval.IHTInterval;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.interval.IHTIntervalReader;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+/**
+ * Basic test class for core nodes. These tests will test nodes with children.
+ * We are testing only the current node, so it does not matter if children are
+ * core or leaf nodes (here they would be core). Specific implementations of
+ * core nodes can extend this method to add specific tests for their use cases.
+ *
+ * @author Geneviève Bastien
+ * @param <E>
+ * The type of element to add in the nodes
+ * @param <N>
+ * The type of node to test
+ */
+@RunWith(Parameterized.class)
+public class HTCoreNodeTest<E extends IHTInterval, N extends HTNode<E>> extends HTNodeTest<E, N> {
+
+ /**
+ * @return The arrays of parameters
+ */
+ @Parameters(name = "{index}: {0}")
+ public static Iterable<Object[]> getParameters() {
+ return Arrays.asList(new Object[][] {
+ { "Core node",
+ HTNode.COMMON_HEADER_SIZE + Integer.BYTES + Integer.BYTES * NB_CHILDREN,
+ HistoryTreeStub.NODE_FACTORY,
+ HtTestUtils.READ_FACTORY,
+ HTNodeTest.BASE_OBJ_FACTORY
+ },
+ });
+ }
+
+ /**
+ * Constructor
+ *
+ * @param name
+ * The name of the test
+ * @param headerSize
+ * The size of the header for this node type
+ * @param factory
+ * The node factory to use
+ * @param readFactory
+ * The factory to read element data from disk
+ * @param objFactory
+ * The factory to create objects for this tree
+ * @throws IOException
+ * Any exception occurring with the file
+ */
+ public HTCoreNodeTest(String name, int headerSize, IHTNodeFactory<E, N> factory,
+ IHTIntervalReader<E> readFactory, ObjectFactory<E> objFactory) throws IOException {
+ super(name, headerSize, NodeType.CORE, factory, readFactory, objFactory);
+ }
+
+ /**
+ * Test getting existing children
+ */
+ @Test
+ public void testGetChild2() {
+ long start = 10L;
+ HTNode<E> node = newNode(0, -1, 10L);
+ assertEquals(0, node.getNbChildren());
+
+ // Add the child node to the parent and make sure it can be retrieved
+ HTNode<E> childNode = newNode(1, 0, start);
+ node.linkNewChild(childNode);
+ assertEquals(1, node.getNbChildren());
+ assertEquals(childNode.getSequenceNumber(), node.getChild(0));
+
+ // Add a second child and retrieve both children
+ HTNode<E> childNode2 = newNode(2, 0, start);
+ node.linkNewChild(childNode2);
+ assertEquals(2, node.getNbChildren());
+ assertEquals(childNode.getSequenceNumber(), node.getChild(0));
+ assertEquals(childNode2.getSequenceNumber(), node.getChild(1));
+
+ // Add a third child with a non-sequential number and retrieve all
+ // children
+ HTNode<E> childNode3 = newNode(10, 0, start);
+ node.linkNewChild(childNode3);
+ assertEquals(3, node.getNbChildren());
+ assertEquals(childNode.getSequenceNumber(), node.getChild(0));
+ assertEquals(childNode2.getSequenceNumber(), node.getChild(1));
+ assertEquals(childNode3.getSequenceNumber(), node.getChild(2));
+ }
+
+ /**
+ * Test getting the latest child from a childless node
+ */
+ @Test(expected = IndexOutOfBoundsException.class)
+ @Override
+ public void testGetLatestChild() {
+ super.testGetLatestChild();
+ }
+
+ /**
+ * Test the {@link HTNode#getLatestChild()} method with children
+ */
+ @Test
+ public void testGetLatestChild2() {
+ long start = 10L;
+ HTNode<E> node = newNode(0, -1, 10L);
+ assertEquals(0, node.getNbChildren());
+
+ // Add the child node to the parent and make sure it can be retrieved
+ HTNode<E> childNode = newNode(1, 0, start);
+ node.linkNewChild(childNode);
+ assertEquals(1, node.getNbChildren());
+ assertEquals(childNode.getSequenceNumber(), node.getLatestChild());
+
+ // Add a second child that becomes the latest child
+ HTNode<E> childNode2 = newNode(2, 0, start);
+ node.linkNewChild(childNode2);
+ assertEquals(2, node.getNbChildren());
+ assertEquals(childNode2.getSequenceNumber(), node.getLatestChild());
+
+ // Add a third child that becomes the latest child
+ HTNode<E> childNode3 = newNode(10, 0, start);
+ node.linkNewChild(childNode3);
+ assertEquals(3, node.getNbChildren());
+ assertEquals(childNode3.getSequenceNumber(), node.getLatestChild());
+ }
+
+ @Test
+ @Override
+ public void testLinkNewChild() throws IOException {
+
+ E object = createObject(10L, 11L);
+ long time = 10L;
+
+ // Create and fill 2 nodes
+ int nbObjects = (HtTestUtils.BLOCKSIZE - getHeaderSize()) / object.getSizeOnDisk();
+ HTNode<E> node = newNode(0, -1, time);
+ fillNode(node, nbObjects, time);
+ HTNode<E> childNode = newNode(1, 0, time);
+ fillNode(childNode, nbObjects, time);
+
+ // Add the child node to the parent and make sure it was added
+ node.linkNewChild(childNode);
+ assertEquals(0, childNode.getNbChildren());
+ assertEquals(1, node.getNbChildren());
+
+ // Close the first child, then fill and add a second one
+ time = time + nbObjects + 1;
+ childNode.closeThisNode(time);
+
+ HTNode<E> childNode2 = newNode(2, 0, time);
+ fillNode(childNode2, nbObjects, time);
+ node.linkNewChild(childNode2);
+ assertEquals(0, childNode2.getNbChildren());
+ assertEquals(2, node.getNbChildren());
+
+ // Close the second child, then fill and add a third one
+ time = time + nbObjects + 1;
+ childNode2.closeThisNode(time);
+
+ HTNode<E> childNode3 = newNode(3, 0, time);
+ fillNode(childNode3, nbObjects, time);
+ node.linkNewChild(childNode3);
+ assertEquals(0, childNode3.getNbChildren());
+ assertEquals(3, node.getNbChildren());
+
+ time = time + nbObjects + 1;
+
+ // Try to add a 4th child, there should be no place for it
+ HTNode<E> noPlace = newNode(4, 0, time);
+ Exception exception = null;
+ try {
+ node.linkNewChild(noPlace);
+ } catch (IllegalStateException e) {
+ exception = e;
+ }
+ assertNotNull(exception);
+
+ // Close the last child and first node
+ childNode3.closeThisNode(time);
+ node.closeThisNode(time);
+
+ // Write them all to disk
+ write(node);
+ write(childNode);
+ write(childNode2);
+ write(childNode3);
+ assertTrue(node.isOnDisk());
+ assertTrue(childNode.isOnDisk());
+ assertTrue(childNode2.isOnDisk());
+ assertTrue(childNode3.isOnDisk());
+
+ // Read the node and make sure its data is equal to that of the original
+ // node
+ HTNode<E> readNode = read(0);
+ HTNode<E> readChildNode = read(1);
+ HTNode<E> readChildNode2 = read(2);
+ HTNode<E> readChildNode3 = read(3);
+ assertTrue(readNode.isOnDisk());
+ assertEquals(node, readNode);
+
+ assertTrue(readChildNode.isOnDisk());
+ assertEquals(childNode, readChildNode);
+
+ assertTrue(readChildNode2.isOnDisk());
+ assertEquals(childNode2, readChildNode2);
+
+ assertTrue(readChildNode3.isOnDisk());
+ assertEquals(childNode3, readChildNode3);
+ }
+
+}
--- /dev/null
+/*******************************************************************************
+ * Copyright (c) 2017 École Polytechnique de Montréal
+ *
+ * All rights reserved. This program and the accompanying materials are
+ * made available under the terms of the Eclipse Public License v1.0 which
+ * accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *******************************************************************************/
+
+package org.eclipse.tracecompass.internal.provisional.datastore.core.historytree;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.eclipse.tracecompass.internal.datastore.core.historytree.HtIo;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.AbstractHistoryTree.IHTNodeFactory;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.HTNode;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.HistoryTreeStub;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.IHTNode;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.IHTNode.NodeType;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.interval.HTInterval;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.interval.IHTInterval;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.interval.IHTIntervalReader;
+import org.junit.After;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+/**
+ * Test the {@link HTNode} base class for nodes. This class has the different
+ * node types as parameter. It tests specifically the leaf node methods.
+ *
+ * @author Geneviève Bastien
+ * @param <E>
+ * The type of element to add in the nodes
+ * @param <N>
+ * The type of node to test
+ */
+@RunWith(Parameterized.class)
+public class HTNodeTest<E extends IHTInterval, N extends HTNode<E>> {
+
+ /**
+ * Factory to create new objects to insert in the nodes
+ *
+ * @param <T>
+ * The type of object to create
+ */
+ protected interface ObjectFactory<T extends IHTInterval> {
+ /**
+ * Create an object that fits in the tree with the given start/end time
+ *
+ * @param start
+ * The start time
+ * @param end
+ * The end time
+ * @return The object
+ */
+ T createObject(long start, long end);
+ }
+
+ /**
+ * A factory to create base objects for test
+ */
+ protected static final ObjectFactory<HTInterval> BASE_OBJ_FACTORY = (s, e) -> new HTInterval(s, e);
+
+ /** The nodes' block size */
+ protected static final int BLOCKSIZE = HtTestUtils.BLOCKSIZE;
+ /** The maximum number of children for the nodes */
+ protected static final int NB_CHILDREN = 3;
+ private static final long TREE_START = 10L;
+
+ /**
+ * @return The arrays of parameters
+ */
+ @Parameters(name = "{index}: {0}")
+ public static Iterable<Object[]> getParameters() {
+ return Arrays.asList(new Object[][] {
+ { "Leaf node",
+ HTNode.COMMON_HEADER_SIZE,
+ HistoryTreeStub.NODE_FACTORY,
+ HtTestUtils.READ_FACTORY, BASE_OBJ_FACTORY
+ },
+ });
+ }
+
+ private final HtIo<E, N> fHtIo;
+ private final int fHeaderSize;
+ private final NodeType fType;
+ private final IHTIntervalReader<E> fHtObjectReader;
+ private final IHTNodeFactory<E, N> fNodeFactory;
+ private final ObjectFactory<E> fObjectFactory;
+
+ /**
+ * Constructor
+ *
+ * @param name
+ * The name of the test
+ * @param headerSize
+ * The size of the header for this node type
+ * @param factory
+ * The node factory to use
+ * @param objReader
+ * The factory to read element data from disk
+ * @param objFactory
+ * The factory to create objects for this tree
+ * @throws IOException
+ * Any exception occurring with the file
+ */
+ public HTNodeTest(String name,
+ int headerSize,
+ IHTNodeFactory<E, N> factory,
+ IHTIntervalReader<E> objReader,
+ ObjectFactory<E> objFactory) throws IOException {
+ this(name, headerSize, NodeType.LEAF, factory, objReader, objFactory);
+ }
+
+ /**
+ * Constructor
+ *
+ * @param name
+ * The name of the test
+ * @param headerSize
+ * The size of the header for this node
+ * @param type
+ * The node type
+ * @param nodeFactory
+ * The node factory to use
+ * @param objReader
+ * The factory to read element data from disk
+ * @param objFactory
+ * The factory to create objects for this tree
+ * @throws IOException
+ * Any exception occurring with the file
+ */
+ protected HTNodeTest(String name,
+ int headerSize,
+ NodeType type,
+ IHTNodeFactory<E, N> nodeFactory,
+ IHTIntervalReader<E> objReader,
+ ObjectFactory<E> objFactory) throws IOException {
+ File file = File.createTempFile("tmp", null);
+ assertNotNull(file);
+ fHtObjectReader = objReader;
+ fNodeFactory = nodeFactory;
+
+ fHtIo = new HtIo<>(file,
+ HtTestUtils.BLOCKSIZE,
+ NB_CHILDREN,
+ true,
+ objReader,
+ nodeFactory);
+
+ fHeaderSize = headerSize;
+ fType = type;
+ fObjectFactory = objFactory;
+ }
+
+ /**
+ * Get a new node
+ *
+ * @param seqNb
+ * The sequence number
+ * @param parentNb
+ * The parent sequence number
+ * @param nodeStart
+ * The node start
+ * @return A new node, created with the factory sent in parameter in the
+ * constructor
+ */
+ public N newNode(int seqNb, int parentNb, long nodeStart) {
+ return fNodeFactory.createNode(fType, BLOCKSIZE, NB_CHILDREN, seqNb, parentNb, nodeStart);
+ }
+
+ /**
+ * Delete the file after test
+ */
+ @After
+ public void cleanUp() {
+ fHtIo.deleteFile();
+ }
+
+ /**
+ * Fills a node with objects of length 1, going incrementally
+ *
+ * @param node
+ * The node to fill
+ * @param nbObjects
+ * The number of objects to add
+ * @param start
+ * The start time of the objects
+ */
+ protected void fillNode(HTNode<E> node, int nbObjects, long start) {
+ for (int i = 0; i < nbObjects; i++) {
+ node.add(fObjectFactory.createObject(i + start, i + start + 1));
+ }
+ }
+
+ /**
+ * Get the header size of this node
+ *
+ * @return The header size
+ */
+ protected int getHeaderSize() {
+ return fHeaderSize;
+ }
+
+ /**
+ * Create a new object for this type of node
+ *
+ * @param start
+ * The start of the object
+ * @param end
+ * The end of the object
+ * @return The new object
+ */
+ protected E createObject(long start, long end) {
+ return fObjectFactory.createObject(start, end);
+ }
+
+ /**
+ * Write a node to the file
+ *
+ * @param node
+ * Node to write to disk
+ * @throws IOException
+ * Exceptions while writing to file
+ */
+ protected void write(HTNode<E> node) throws IOException {
+ HtIo<E, N> htIo = fHtIo;
+
+ // Close the node and write it to disk
+ node.writeSelf(htIo.getFileWriter(node.getSequenceNumber()).getChannel());
+ }
+
+ /**
+ * Reads a node from the history tree file
+ *
+ * @param seqNb
+ * The sequence number of the node to get
+ * @return The read node
+ * @throws IOException
+ * Exceptions while reading the node
+ */
+ protected HTNode<E> read(int seqNb) throws IOException {
+ HtIo<E, N> htIo = fHtIo;
+
+ return HTNode.readNode(BLOCKSIZE,
+ NB_CHILDREN,
+ htIo.supplyATReader(seqNb).getChannel(),
+ fHtObjectReader,
+ fNodeFactory);
+
+ }
+
+ /**
+ * Test the leaf node methods without adding the node to disk
+ */
+ @Test
+ public void testNodeData() {
+ HTNode<E> node = newNode(0, -1, TREE_START);
+
+ // Test the values at the beginning
+ assertFalse(node.isOnDisk());
+ assertEquals(TREE_START, node.getNodeStart());
+ assertEquals(Long.MAX_VALUE, node.getNodeEnd());
+ assertEquals(0, node.getSequenceNumber());
+ assertEquals(-1, node.getParentSequenceNumber());
+ assertEquals(fHeaderSize, node.getTotalHeaderSize());
+ assertTrue(node.isEmpty());
+ assertEquals(fType, node.getNodeType());
+ assertEquals(HtTestUtils.BLOCKSIZE - fHeaderSize, node.getNodeFreeSpace());
+ assertEquals(0, node.getNodeUsagePercent());
+
+ // Add an element. It is possible to add an element outside the
+ // boundaries of the node
+ E object = fObjectFactory.createObject(0L, 10L);
+ node.add(object);
+ assertEquals(HtTestUtils.BLOCKSIZE - fHeaderSize - object.getSizeOnDisk(), node.getNodeFreeSpace());
+
+ // Fill the node with objects
+ int nbObjects = (HtTestUtils.BLOCKSIZE - fHeaderSize) / object.getSizeOnDisk();
+ fillNode(node, nbObjects - 1, TREE_START);
+ // Check the free space and sizes
+ int expectedSize = HtTestUtils.BLOCKSIZE - fHeaderSize - object.getSizeOnDisk() * nbObjects;
+ assertEquals(expectedSize, node.getNodeFreeSpace());
+ assertEquals(99, node.getNodeUsagePercent());
+ assertEquals(nbObjects, node.getIntervals().size());
+
+ }
+
+ /**
+ * Test adding an element to a full leaf node
+ */
+ @Test(expected = IllegalArgumentException.class)
+ public void testNodeInvalidAdd() {
+ HTNode<E> node = newNode(0, -1, TREE_START);
+ // Fill the node with objects
+ E object = fObjectFactory.createObject(0L, 10L);
+ int nbObjects = (HtTestUtils.BLOCKSIZE - fHeaderSize) / object.getSizeOnDisk();
+ fillNode(node, nbObjects, TREE_START);
+
+ // Add a new object
+ node.add(object);
+ }
+
+ /**
+ * Test closing a node at an invalid end time
+ */
+ @Test(expected = IllegalArgumentException.class)
+ public void testNodeInvalidEnd() {
+ HTNode<E> node = newNode(0, -1, TREE_START);
+ // Fill the node with objects
+ E object = fObjectFactory.createObject(0L, 10L);
+ int nbObjects = (HtTestUtils.BLOCKSIZE - fHeaderSize) / object.getSizeOnDisk();
+ fillNode(node, nbObjects, TREE_START);
+
+ // Close the node at a wrong time
+ node.closeThisNode(TREE_START);
+ }
+
+ /**
+ * Test adding an element to a closed node
+ */
+ @Test
+ public void testAddToCloseNode() {
+ HTNode<E> node = newNode(0, -1, TREE_START);
+ // Fill the node with objects
+ E object = fObjectFactory.createObject(TREE_START, TREE_START + 10);
+ int nbObjects = (HtTestUtils.BLOCKSIZE - fHeaderSize) / object.getSizeOnDisk();
+ fillNode(node, nbObjects - 1, TREE_START);
+
+ // Add a new object
+ node.closeThisNode(TREE_START + nbObjects);
+ // FIXME: shouldn't this fail?
+ node.add(object);
+
+ }
+
+ /**
+ * test closing, writing and reading a leaf node
+ *
+ * @throws IOException
+ * Exception while writing/reading the file
+ */
+ @Test
+ public void testCloseNode() throws IOException {
+ HTNode<E> node = newNode(0, -1, TREE_START);
+ // Fill the node with objects
+ E object = fObjectFactory.createObject(0L, 10L);
+ int nbObjects = (HtTestUtils.BLOCKSIZE - fHeaderSize) / object.getSizeOnDisk();
+ fillNode(node, nbObjects, TREE_START);
+ assertEquals(nbObjects, node.getIntervals().size());
+
+ // Close the node and write it to disk
+ node.closeThisNode(TREE_START + nbObjects + 1);
+ write(node);
+ assertTrue(node.isOnDisk());
+
+ // Read the node and make sure its data is equal to that of the original
+ // node
+ HTNode<E> readNode = read(0);
+ assertTrue(readNode.isOnDisk());
+ assertEquals(node, readNode);
+ }
+
+ /**
+ * Test the {@link HTNode#getNbChildren()} method
+ */
+ @Test
+ public void testNbChildren() {
+ HTNode<E> node = newNode(0, -1, TREE_START);
+ assertEquals(0, node.getNbChildren());
+ }
+
+ /**
+ * Test the {@link HTNode#getChild(int)} method
+ */
+ @Test(expected = IndexOutOfBoundsException.class)
+ public void testGetChild() {
+ HTNode<E> node = newNode(0, -1, TREE_START);
+ node.getChild(0);
+ }
+
+ /**
+ * Test the {@link HTNode#getLatestChild()} method
+ */
+ @Test(expected = UnsupportedOperationException.class)
+ public void testGetLatestChild() {
+ HTNode<E> node = newNode(0, -1, TREE_START);
+ node.getLatestChild();
+ }
+
+ /**
+ * Test the {@link HTNode#linkNewChild(IHTNode)} method
+ *
+ * @throws IOException
+ * Exceptiosn thrown when reading/writing
+ */
+ @SuppressWarnings("unused")
+ @Test(expected = UnsupportedOperationException.class)
+ public void testLinkNewChild() throws IOException {
+ HTNode<E> node = newNode(0, -1, TREE_START);
+ HTNode<E> childNode = newNode(1, 0, TREE_START);
+ node.linkNewChild(childNode);
+ }
+
+}
--- /dev/null
+/*******************************************************************************
+ * Copyright (c) 2017 École Polytechnique de Montréal
+ *
+ * All rights reserved. This program and the accompanying materials are
+ * made available under the terms of the Eclipse Public License v1.0 which
+ * accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *******************************************************************************/
+
+package org.eclipse.tracecompass.internal.provisional.datastore.core.historytree;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.AbstractHistoryTree;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.HTNode;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.HistoryTreeStub;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.interval.HTInterval;
+
+/**
+ * Test the default abstract implementation of the tree, with a concrete stub.
+ * It contains only children
+ *
+ * @author Geneviève Bastien
+ */
+public class HistoryTreeStubTest extends AbstractHistoryTreeTestBase<HTInterval, HTNode<HTInterval>> {
+
+ private static final HTInterval DEFAULT_OBJECT = new HTInterval(10, 20);
+
+ @Override
+ protected HistoryTreeStub createHistoryTree(File stateHistoryFile,
+ int blockSize,
+ int maxChildren,
+ int providerVersion,
+ long treeStart) throws IOException {
+
+ return new HistoryTreeStub(stateHistoryFile,
+ blockSize,
+ maxChildren,
+ providerVersion,
+ treeStart);
+ }
+
+ @Override
+ protected HistoryTreeStub createHistoryTree(File existingStateFile, int expProviderVersion)
+ throws IOException {
+ return new HistoryTreeStub(existingStateFile, expProviderVersion);
+ }
+
+ @Override
+ protected long fillValues(AbstractHistoryTree<HTInterval, HTNode<HTInterval>> ht, int sizeLimit, long start) {
+ int objectSize = DEFAULT_OBJECT.getSizeOnDisk();
+ int nbValues = sizeLimit / objectSize;
+ for (int i = 0; i < nbValues; i++) {
+ ht.insert(new HTInterval(start + i, start + i + 1));
+ }
+ return start + nbValues;
+ }
+
+ @Override
+ protected HTInterval createInterval(long start, long end) {
+ return new HTInterval(start, end);
+ }
+
+}
--- /dev/null
+/*******************************************************************************
+ * Copyright (c) 2017 École Polytechnique de Montréal
+ *
+ * All rights reserved. This program and the accompanying materials are
+ * made available under the terms of the Eclipse Public License v1.0 which
+ * accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *******************************************************************************/
+
+package org.eclipse.tracecompass.internal.provisional.datastore.core.historytree;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.PrintStream;
+import java.nio.channels.ClosedChannelException;
+
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.AbstractHistoryTree;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.HTNode;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.IHTNode.NodeType;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.interval.HTInterval;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.interval.IHTInterval;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.interval.IHTIntervalReader;
+
+/**
+ * Some utility classes and methods for the history tree tests
+ *
+ * @author Geneviève Bastien
+ */
+public class HtTestUtils {
+
+ /**
+ * Default block size for the tests
+ */
+ public static final int BLOCKSIZE = 4096;
+
+ /**
+ * The factory to read BaseHtObject nodes
+ */
+ public static final IHTIntervalReader<HTInterval> READ_FACTORY = HTInterval.INTERVAL_READER;
+
+ /**
+ * Helper method to check the integrity of a tree. For each node of the
+ * tree, it makes sure that elements in nodes are within the node's range
+ * and for core nodes, it makes sure that the children's data is OK.
+ *
+ * @param tree
+ * The history to check
+ */
+ public static final <E extends IHTInterval, N extends HTNode<E>>
+ void assertTreeIntegrity(AbstractHistoryTree<E, N> tree) {
+
+ try {
+ for (int i = 0; i < tree.getNodeCount(); i++) {
+ assertNodeIntegrity(tree, tree.getNode(i));
+ }
+ } catch (ClosedChannelException e) {
+ fail(e.getMessage());
+ }
+ }
+
+ private static <E extends IHTInterval, N extends HTNode<E>>
+ void assertNodeIntegrity(AbstractHistoryTree<E, N> tree, N node) {
+
+ if (node.getNodeType() == NodeType.CORE) {
+ assertChildrenIntegrity(tree, node);
+ }
+
+ /* Check that all intervals are within the node's range */
+ for (E object : node.getIntervals()) {
+ assertTrue(String.format("Object start (%d) >= node start (%d)", object.getStart(), node.getNodeStart()), object.getStart() >= node.getNodeStart());
+ assertTrue(String.format("Object start (%d) <= node end (%d)", object.getStart(), node.getNodeEnd()), object.getStart() <= node.getNodeEnd());
+ assertTrue(String.format("Object end (%d) >= node start (%d)", object.getEnd(), node.getNodeStart()), object.getEnd() >= node.getNodeStart());
+ assertTrue(String.format("Object end (%d) <= node end (%d)", object.getEnd(), node.getNodeEnd()), object.getEnd() <= node.getNodeEnd());
+ }
+
+ }
+
+ private static <E extends IHTInterval, N extends HTNode<E>>
+ void assertChildrenIntegrity(AbstractHistoryTree<E, N> tree, N node) {
+
+ try {
+ /*
+ * Test that this node's start and end times match the start of the
+ * first child and the end of the last child, respectively
+ */
+ if (node.getNbChildren() > 0) {
+ N childNode = tree.getNode(node.getChild(0));
+ assertEquals("Equals start time of parent " + node.getSequenceNumber() + " and first child " + childNode.getSequenceNumber(),
+ node.getNodeStart(), childNode.getNodeStart());
+ if (node.isOnDisk()) {
+ childNode = tree.getNode(node.getLatestChild());
+ assertEquals("Equals end time of parent " + node.getSequenceNumber() + " and last child " + childNode.getSequenceNumber(),
+ node.getNodeEnd(), childNode.getNodeEnd());
+ }
+ }
+
+ /*
+ * Test that children range is within the parent's range
+ */
+ for (int i = 0; i < node.getNbChildren(); i++) {
+ N childNode = tree.getNode(node.getChild(i));
+ assertTrue("Child at index " + i + " of parent " + node.getSequenceNumber() + " has valid start time",
+ node.getNodeStart() <= childNode.getNodeStart());
+ if (node.isOnDisk() && childNode.isOnDisk()) {
+ assertTrue("Child at index " + i + " of parent " + node.getSequenceNumber() + " has valid end time",
+ childNode.getNodeEnd() <= node.getNodeEnd());
+ }
+ assertTrue("Child at index " + i + " of parent " + node.getSequenceNumber() + " specific children", tree.verifyChildrenSpecific(node, i, childNode));
+ assertTrue("Child at index " + i + " of parent " + node.getSequenceNumber() + " intersecting children", tree.verifyIntersectingChildren(node, childNode));
+ }
+
+ } catch (ClosedChannelException e) {
+ fail(e.getMessage());
+ }
+ }
+
+ /**
+ * Print out the full tree for debugging purposes
+ *
+ * @param writer
+ * PrintWriter in which to write the output
+ * @param tree
+ * The history tree to print
+ * @param printIntervals
+ * Flag to enable full output of the interval information
+ * @param ts
+ * The timestamp that nodes have to intersect for intervals to be
+ * printed. A negative value will print intervals for all nodes.
+ * The timestamp only applies if printIntervals is true.
+ */
+ public static <E extends IHTInterval, N extends HTNode<E>> void debugPrintFullTree(
+ PrintStream writer,
+ AbstractHistoryTree<E, N> tree,
+ boolean printIntervals,
+ long ts) {
+
+ preOrderPrint(writer, tree, false, tree.getLatestBranch().get(0), 0, ts);
+
+ if (printIntervals) {
+ preOrderPrint(writer, tree, true, tree.getLatestBranch().get(0), 0, ts);
+ }
+ writer.println('\n');
+ }
+
+ private static <E extends IHTInterval, N extends HTNode<E>> void preOrderPrint(
+ PrintStream writer,
+ AbstractHistoryTree<E, N> tree,
+ boolean printIntervals,
+ N node,
+ int curDepth,
+ long ts) {
+
+ writer.println(node.toString());
+ /*
+ * Print intervals only if timestamp is negative or within the range of
+ * the node
+ */
+ if (printIntervals &&
+ (ts <= 0 ||
+ (ts >= node.getNodeStart() && ts <= node.getNodeEnd()))) {
+ node.debugPrintIntervals(writer);
+ }
+
+ switch (node.getNodeType()) {
+ case LEAF:
+ /* Stop if it's the leaf node */
+ return;
+
+ case CORE:
+ try {
+ /* Print the child nodes */
+ for (int i = 0; i < node.getNbChildren(); i++) {
+ N nextNode = tree.readNode(node.getChild(i));
+ for (int j = 0; j < curDepth; j++) {
+ writer.print(" ");
+ }
+ writer.print("+-");
+ preOrderPrint(writer, tree, printIntervals, nextNode, curDepth + 1, ts);
+ }
+ } catch (ClosedChannelException e) {
+ throw new RuntimeException(e);
+ }
+ break;
+
+ default:
+ break;
+ }
+ }
+}
--- /dev/null
+/*******************************************************************************
+ * Copyright (c) 2017 École Polytechnique de Montréal
+ *
+ * All rights reserved. This program and the accompanying materials are
+ * made available under the terms of the Eclipse Public License v1.0 which
+ * accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *******************************************************************************/
+
+package org.eclipse.tracecompass.internal.provisional.datastore.core.historytree;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.eclipse.jdt.annotation.NonNull;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.AbstractHistoryTree;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.HTNode;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.interval.HTInterval;
+
+/**
+ * A stub history tree for the base abstract history tree
+ *
+ * @author Geneviève Bastien
+ */
+public class HistoryTreeStub extends AbstractHistoryTree<HTInterval, HTNode<HTInterval>> {
+
+ /**
+ * A factory to create leaf and core nodes based on the BaseHtObject object
+ */
+ public static final IHTNodeFactory<HTInterval, HTNode<HTInterval>> NODE_FACTORY =
+ (t, b, m, seq, p, start) -> new HTNode<>(t, b, m, seq, p, start);
+
+ /**
+ * Create a new State History from scratch, specifying all configuration
+ * parameters.
+ *
+ * @param stateHistoryFile
+ * The name of the history file
+ * @param blockSize
+ * The size of each "block" on disk in bytes. One node will
+ * always fit in one block. It should be at least 4096.
+ * @param maxChildren
+ * The maximum number of children allowed per core (non-leaf)
+ * node.
+ * @param providerVersion
+ * The version of the state provider. If a file already exists,
+ * and their versions match, the history file will not be rebuilt
+ * uselessly.
+ * @param treeStart
+ * The start time of the history
+ * @throws IOException
+ * If an error happens trying to open/write to the file
+ * specified in the config
+ */
+ public HistoryTreeStub(File stateHistoryFile,
+ int blockSize,
+ int maxChildren,
+ int providerVersion,
+ long treeStart) throws IOException {
+
+ super(stateHistoryFile,
+ blockSize,
+ maxChildren,
+ providerVersion,
+ treeStart,
+ HTInterval.INTERVAL_READER);
+ }
+
+ /**
+ * "Reader" constructor : instantiate a SHTree from an existing tree file on
+ * disk
+ *
+ * @param existingStateFile
+ * Path/filename of the history-file we are to open
+ * @param expProviderVersion
+ * The expected version of the state provider
+ * @throws IOException
+ * If an error happens reading the file
+ */
+ public HistoryTreeStub(File existingStateFile,
+ int expProviderVersion) throws IOException {
+ super(existingStateFile, expProviderVersion, HTInterval.INTERVAL_READER);
+ }
+
+ @Override
+ protected int getMagicNumber() {
+ return 12345;
+ }
+
+ @Override
+ protected int getFileVersion() {
+ return 0;
+ }
+
+ @Override
+ protected IHTNodeFactory<HTInterval, HTNode<HTInterval>> getNodeFactory() {
+ return NODE_FACTORY;
+ }
+
+ @Override
+ protected long getNewBranchStart(int depth, @NonNull HTInterval interval) {
+ return getTreeEnd();
+ }
+
+}
--- /dev/null
+/*******************************************************************************
+ * Copyright (c) 2017 École Polytechnique de Montréal
+ *
+ * All rights reserved. This program and the accompanying materials are made
+ * available under the terms of the Eclipse Public License v1.0 which
+ * accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *******************************************************************************/
+
+@org.eclipse.jdt.annotation.NonNullByDefault
+package org.eclipse.tracecompass.internal.provisional.datastore.core.historytree;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.FileChannel;
import java.util.ArrayList;
+import java.util.Collection;
import java.util.Collections;
import java.util.Deque;
import java.util.LinkedList;
* @return The immutable latest branch
*/
@VisibleForTesting
- protected List<N> getLatestBranch() {
+ List<N> getLatestBranch() {
return ImmutableList.copyOf(fLatestBranch);
}
* closed
*/
@VisibleForTesting
- protected @NonNull N getNode(int seqNum) throws ClosedChannelException {
+ @NonNull N getNode(int seqNum) throws ClosedChannelException {
// First, check in the latest branch if the node is there
for (N node : fLatestBranch) {
if (node.getSequenceNumber() == seqNum) {
* @return The TreeIO
*/
@VisibleForTesting
- protected HtIo<E, N> getTreeIO() {
+ HtIo<E, N> getTreeIO() {
return fTreeIO;
}
+ fLatestBranch.get(fLatestBranch.size() - 1).getSequenceNumber();
}
+
+ // ------------------------------------------------------------------------
+ // Test-specific methods
+ // ------------------------------------------------------------------------
+
+ /**
+ * Get the current depth of the tree.
+ *
+ * @return The current depth
+ */
+ @VisibleForTesting
+ protected int getDepth() {
+ return getLatestBranch().size();
+ }
+
+ /**
+ * Get the leaf (bottom-most) node of the latest branch.
+ *
+ * @return The latest leaf
+ */
+ @VisibleForTesting
+ protected N getLatestLeaf() {
+ List<N> latestBranch = getLatestBranch();
+ return latestBranch.get(latestBranch.size() - 1);
+ }
+
+ /**
+ * Verify a node's specific information about a child.
+ *
+ * @param parent
+ * The parent node
+ * @param index
+ * The index of the child in the parent's extra data
+ * @param child
+ * The child node to verify
+ * @return False if a problem was found, true otherwise
+ */
+ @VisibleForTesting
+ protected boolean verifyChildrenSpecific(N parent,
+ int index,
+ N child) {
+ // Nothing to do for the default implementation
+ return true;
+ }
+
+ /**
+ * This method should verify in the whole time range of the parent node that
+ * the child node appears or not as a next children for a given timestamp.
+ *
+ * @param parent
+ * The parent node
+ * @param child
+ * The child node
+ * @return False if a problem was found, true otherwise
+ */
+ @VisibleForTesting
+ protected boolean verifyIntersectingChildren(N parent, N child) {
+ int childSequence = child.getSequenceNumber();
+ boolean shouldBeInCollection;
+ Collection<Integer> nextChildren;
+ for (long t = parent.getNodeStart(); t < parent.getNodeEnd(); t++) {
+ shouldBeInCollection = true;
+ nextChildren = parent.selectNextChildren(RangeCondition.singleton(t));
+ if (shouldBeInCollection != nextChildren.contains(childSequence)) {
+ return false;
+ }
+ }
+ return true;
+ }
}
package org.eclipse.tracecompass.internal.provisional.datastore.core.historytree;
import java.io.IOException;
+import java.io.PrintStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.channels.FileChannel;
* </pre>
*/
@VisibleForTesting
- protected static final int COMMON_HEADER_SIZE = Byte.BYTES
+ public static final int COMMON_HEADER_SIZE = Byte.BYTES
+ 2 * Long.BYTES
+ 3 * Integer.BYTES
+ Byte.BYTES;
return childList;
}
+ @Override
+ public int hashCode() {
+ /*
+ * Do not consider "fNode", since the node's equals/hashCode already
+ * consider us.
+ */
+ return Objects.hash(fNbChildren, fChildren);
+ }
+
+ @Override
+ public boolean equals(@Nullable Object obj) {
+ if (obj == null) {
+ return false;
+ }
+ if (obj.getClass() != getClass()) {
+ return false;
+ }
+ CoreNodeData other = (CoreNodeData) obj;
+ return (fNbChildren == other.fNbChildren
+ && Arrays.equals(fChildren, other.fChildren));
+ }
+
}
/**
@Override
public int hashCode() {
- return Objects.hash(fBlockSize, fMaxChildren, fNodeStart, fNodeEnd, fSequenceNumber, fParentSequenceNumber);
+ return Objects.hash(fBlockSize, fMaxChildren, fNodeStart, fNodeEnd, fSequenceNumber, fParentSequenceNumber, fExtraData);
}
@Override
fNodeStart == other.fNodeStart &&
fNodeEnd == other.fNodeEnd &&
fSequenceNumber == other.fSequenceNumber &&
- fParentSequenceNumber == other.fParentSequenceNumber);
+ fParentSequenceNumber == other.fParentSequenceNumber &&
+ Objects.equals(fExtraData, other.fExtraData));
}
+ // -----------------------------------------
+ // Test-specific methods
+ // -----------------------------------------
+
+ /**
+ * Print all current intervals into the given writer.
+ *
+ * @param writer
+ * The writer to write to
+ */
+ @VisibleForTesting
+ public void debugPrintIntervals(PrintStream writer) {
+ getIntervals().forEach(writer::println);
+ }
}