analysis.io: Provide information from the IO model
authorGeneviève Bastien <gbastien+lttng@versatic.net>
Thu, 31 Mar 2016 18:45:47 +0000 (14:45 -0400)
committerGenevieve Bastien <gbastien+lttng@versatic.net>
Fri, 8 Apr 2016 16:02:05 +0000 (12:02 -0400)
This extracts a base Disk class from the DiskWriteModel, that can be used
by outputs and views to get data from the analysis.

It also adds an Information Provider for the input output analysis module

It comes with its units tests

Change-Id: Iffb8428eb161bbd5cfcb834deebfa55d812f4d00
Signed-off-by: Geneviève Bastien <gbastien+lttng@versatic.net>
Reviewed-on: https://git.eclipse.org/r/69669
Reviewed-by: Hudson CI
Reviewed-by: Matthew Khouzam <matthew.khouzam@ericsson.com>
Tested-by: Matthew Khouzam <matthew.khouzam@ericsson.com>
analysis/org.eclipse.tracecompass.analysis.os.linux.core.tests/src/org/eclipse/tracecompass/analysis/os/linux/core/tests/inputoutput/InputOutputDiskTest.java [new file with mode: 0644]
analysis/org.eclipse.tracecompass.analysis.os.linux.core.tests/src/org/eclipse/tracecompass/analysis/os/linux/core/tests/inputoutput/InputOutputInformationProviderTest.java [new file with mode: 0644]
analysis/org.eclipse.tracecompass.analysis.os.linux.core.tests/stubs/org/eclipse/tracecompass/analysis/os/linux/core/tests/stubs/inputoutput/IoTestCase.java [new file with mode: 0644]
analysis/org.eclipse.tracecompass.analysis.os.linux.core.tests/stubs/org/eclipse/tracecompass/analysis/os/linux/core/tests/stubs/inputoutput/IoTestFactory.java
analysis/org.eclipse.tracecompass.analysis.os.linux.core.tests/testfiles/io_analysis/io_two_devices.xml
analysis/org.eclipse.tracecompass.analysis.os.linux.core/src/org/eclipse/tracecompass/analysis/os/linux/core/inputoutput/Disk.java [new file with mode: 0644]
analysis/org.eclipse.tracecompass.analysis.os.linux.core/src/org/eclipse/tracecompass/analysis/os/linux/core/inputoutput/InputOutputInformationProvider.java [new file with mode: 0644]
analysis/org.eclipse.tracecompass.analysis.os.linux.core/src/org/eclipse/tracecompass/internal/analysis/os/linux/core/inputoutput/DiskWriteModel.java

diff --git a/analysis/org.eclipse.tracecompass.analysis.os.linux.core.tests/src/org/eclipse/tracecompass/analysis/os/linux/core/tests/inputoutput/InputOutputDiskTest.java b/analysis/org.eclipse.tracecompass.analysis.os.linux.core.tests/src/org/eclipse/tracecompass/analysis/os/linux/core/tests/inputoutput/InputOutputDiskTest.java
new file mode 100644 (file)
index 0000000..2569542
--- /dev/null
@@ -0,0 +1,124 @@
+/*******************************************************************************
+ * Copyright (c) 2016 École Polytechnique de Montréal
+ *
+ * All rights reserved. This program and the accompanying materials are
+ * made available under the terms of the Eclipse Public License v1.0 which
+ * accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *******************************************************************************/
+package org.eclipse.tracecompass.analysis.os.linux.core.tests.inputoutput;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+import java.util.Arrays;
+import java.util.Map.Entry;
+
+import org.eclipse.jdt.annotation.NonNull;
+import org.eclipse.tracecompass.analysis.os.linux.core.inputoutput.Disk;
+import org.eclipse.tracecompass.analysis.os.linux.core.inputoutput.InputOutputAnalysisModule;
+import org.eclipse.tracecompass.analysis.os.linux.core.inputoutput.InputOutputInformationProvider;
+import org.eclipse.tracecompass.analysis.os.linux.core.inputoutput.IoOperationType;
+import org.eclipse.tracecompass.analysis.os.linux.core.tests.stubs.inputoutput.IoTestCase;
+import org.eclipse.tracecompass.analysis.os.linux.core.tests.stubs.inputoutput.IoTestCase.DiskInfo;
+import org.eclipse.tracecompass.analysis.os.linux.core.tests.stubs.inputoutput.IoTestCase.SectorCountInfo;
+import org.eclipse.tracecompass.analysis.os.linux.core.tests.stubs.inputoutput.IoTestFactory;
+import org.eclipse.tracecompass.tmf.core.tests.shared.TmfTestHelper;
+import org.junit.After;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+/**
+ * Test the {@link Disk} class
+ *
+ * @author Geneviève Bastien
+ */
+@RunWith(Parameterized.class)
+public class InputOutputDiskTest extends AbstractTestInputOutput {
+
+    private final IoTestCase fTestCase;
+
+    /**
+     * Constructor
+     *
+     * @param testName
+     *            A name for the test, to display in the header
+     * @param test
+     *            A test case parameter for this test
+     */
+    public InputOutputDiskTest(String testName, IoTestCase test) {
+        super();
+        fTestCase = test;
+
+    }
+
+    /**
+     * Clean up
+     */
+    @After
+    public void tearDown() {
+        super.deleteTrace();
+    }
+
+    @Override
+    protected @NonNull InputOutputAnalysisModule setUp(String fileName) {
+        InputOutputAnalysisModule module = super.setUp(fileName);
+        TmfTestHelper.executeAnalysis(module);
+        return module;
+    }
+
+    /**
+     * @return The arrays of parameters
+     */
+    @Parameters(name = "{index}: {0}")
+    public static Iterable<Object[]> getParameters() {
+        return Arrays.asList(new Object[][] {
+                { IoTestFactory.SIMPLE_REQUESTS.getTraceFileName(), IoTestFactory.SIMPLE_REQUESTS },
+                { IoTestFactory.SIMPLE_NO_STATEDUMP.getTraceFileName(), IoTestFactory.SIMPLE_NO_STATEDUMP },
+                { IoTestFactory.TWO_DEVICES.getTraceFileName(), IoTestFactory.TWO_DEVICES }
+        });
+    }
+
+    private static Disk getDisk(@NonNull InputOutputAnalysisModule module, Integer deviceId) {
+        return InputOutputInformationProvider.getDisks(module).stream().filter(d -> d.getDevideId().equals(deviceId)).findFirst().get();
+    }
+
+    /**
+     * Test the {@link Disk#getSectorsAt(long, IoOperationType)} method
+     * method
+     */
+    @Test
+    public void testSectorsAt() {
+        InputOutputAnalysisModule module = setUp(fTestCase.getTraceFileName());
+
+        for (Integer deviceId : fTestCase.getSectorCount().keySet()) {
+            Disk disk = getDisk(module, deviceId);
+            assertNotNull(disk);
+            for (SectorCountInfo info : fTestCase.getSectorCount().get(deviceId)) {
+                long sectorsAt = disk.getSectorsAt(info.getTimestamp(), info.getType());
+                assertEquals("Sectors at " + info.getTimestamp() + " for type " + info.getType(), info.getNbSectors(), sectorsAt);
+            }
+        }
+    }
+
+    /**
+     * Test the {@link Disk#getDeviceIdString()} and
+     * {@link Disk#getDiskName()} methods
+     */
+    @Test
+    public void testDeviceStrings() {
+        InputOutputAnalysisModule module = setUp(fTestCase.getTraceFileName());
+
+        for (Entry<Integer, DiskInfo> deviceInfo : fTestCase.getDiskInfo().entrySet()) {
+            Integer deviceId = deviceInfo.getKey();
+            DiskInfo diskInfo = deviceInfo.getValue();
+            Disk disk = getDisk(module, deviceId);
+            assertNotNull(disk);
+            assertEquals("Device ID string for " + deviceId, diskInfo.getDeviceString(), disk.getDeviceIdString());
+            assertEquals("Disk name string for " + deviceId, diskInfo.getDeviceName(), disk.getDiskName());
+            assertEquals("Disk activity for " + deviceId, diskInfo.hasActivity(), disk.hasActivity());
+        }
+    }
+}
diff --git a/analysis/org.eclipse.tracecompass.analysis.os.linux.core.tests/src/org/eclipse/tracecompass/analysis/os/linux/core/tests/inputoutput/InputOutputInformationProviderTest.java b/analysis/org.eclipse.tracecompass.analysis.os.linux.core.tests/src/org/eclipse/tracecompass/analysis/os/linux/core/tests/inputoutput/InputOutputInformationProviderTest.java
new file mode 100644 (file)
index 0000000..8f28b9c
--- /dev/null
@@ -0,0 +1,95 @@
+/*******************************************************************************
+ * Copyright (c) 2016 École Polytechnique de Montréal
+ *
+ * All rights reserved. This program and the accompanying materials are
+ * made available under the terms of the Eclipse Public License v1.0 which
+ * accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *******************************************************************************/
+package org.eclipse.tracecompass.analysis.os.linux.core.tests.inputoutput;
+
+import static org.junit.Assert.assertEquals;
+
+import java.util.Arrays;
+import java.util.Collection;
+
+import org.eclipse.jdt.annotation.NonNull;
+import org.eclipse.tracecompass.analysis.os.linux.core.inputoutput.Disk;
+import org.eclipse.tracecompass.analysis.os.linux.core.inputoutput.InputOutputAnalysisModule;
+import org.eclipse.tracecompass.analysis.os.linux.core.inputoutput.InputOutputInformationProvider;
+import org.eclipse.tracecompass.analysis.os.linux.core.tests.stubs.LinuxTestCase;
+import org.eclipse.tracecompass.analysis.os.linux.core.tests.stubs.inputoutput.IoTestFactory;
+import org.eclipse.tracecompass.tmf.core.tests.shared.TmfTestHelper;
+import org.junit.After;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+/**
+ * Test the {@link InputOutputInformationProvider} class
+ *
+ * @author Geneviève Bastien
+ */
+@RunWith(Parameterized.class)
+public class InputOutputInformationProviderTest extends AbstractTestInputOutput {
+
+    private final LinuxTestCase fTestCase;
+    private final int fDiskCount;
+
+    /**
+     * Constructor
+     *
+     * @param testName
+     *            A name for the test, to display in the header
+     * @param test
+     *            A test case parameter for this test
+     * @param diskCount
+     *            The number of disks
+     */
+    public InputOutputInformationProviderTest(String testName, LinuxTestCase test, int diskCount) {
+        super();
+        fTestCase = test;
+        fDiskCount = diskCount;
+    }
+
+    /**
+     * Clean up
+     */
+    @After
+    public void tearDown() {
+        super.deleteTrace();
+    }
+
+    @Override
+    protected @NonNull InputOutputAnalysisModule setUp(String fileName) {
+        InputOutputAnalysisModule module = super.setUp(fileName);
+        TmfTestHelper.executeAnalysis(module);
+        return module;
+    }
+
+    /**
+     * @return The arrays of parameters
+     */
+    @Parameters(name = "{index}: {0}")
+    public static Iterable<Object[]> getParameters() {
+        return Arrays.asList(new Object[][] {
+                { IoTestFactory.SIMPLE_REQUESTS.getTraceFileName(), IoTestFactory.SIMPLE_REQUESTS, 1 },
+                { IoTestFactory.SIMPLE_NO_STATEDUMP.getTraceFileName(), IoTestFactory.SIMPLE_NO_STATEDUMP, 1 },
+                { IoTestFactory.TWO_DEVICES.getTraceFileName(), IoTestFactory.TWO_DEVICES, 3 }
+        });
+    }
+
+    /**
+     * Test the
+     * {@link InputOutputInformationProvider#getDisks(InputOutputAnalysisModule)}
+     * method
+     */
+    @Test
+    public void testGetDisks() {
+        InputOutputAnalysisModule module = setUp(fTestCase.getTraceFileName());
+        Collection<Disk> disks = InputOutputInformationProvider.getDisks(module);
+        assertEquals(fDiskCount, disks.size());
+    }
+
+}
diff --git a/analysis/org.eclipse.tracecompass.analysis.os.linux.core.tests/stubs/org/eclipse/tracecompass/analysis/os/linux/core/tests/stubs/inputoutput/IoTestCase.java b/analysis/org.eclipse.tracecompass.analysis.os.linux.core.tests/stubs/org/eclipse/tracecompass/analysis/os/linux/core/tests/stubs/inputoutput/IoTestCase.java
new file mode 100644 (file)
index 0000000..9ea9a6e
--- /dev/null
@@ -0,0 +1,160 @@
+/*******************************************************************************
+ * Copyright (c) 2016 École Polytechnique de Montréal
+ *
+ * All rights reserved. This program and the accompanying materials are
+ * made available under the terms of the Eclipse Public License v1.0 which
+ * accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *******************************************************************************/
+package org.eclipse.tracecompass.analysis.os.linux.core.tests.stubs.inputoutput;
+
+import java.util.Collections;
+import java.util.Map;
+
+import org.eclipse.tracecompass.analysis.os.linux.core.inputoutput.IoOperationType;
+import org.eclipse.tracecompass.analysis.os.linux.core.tests.stubs.LinuxTestCase;
+
+import com.google.common.collect.HashMultimap;
+import com.google.common.collect.Multimap;
+
+/**
+ * Test case with additional information for IO test cases
+ *
+ * @author Geneviève Bastien
+ */
+public class IoTestCase extends LinuxTestCase {
+
+    /**
+     * Constructor
+     *
+     * @param filename
+     *            The Name of the file containing the trace for this test case
+     */
+    public IoTestCase(String filename) {
+        super(filename);
+    }
+
+    /**
+     * Class to store sector count information
+     */
+    public static class SectorCountInfo {
+        private final long fTs;
+        private final IoOperationType fType;
+        private final long fSectors;
+
+        /**
+         * Constructor
+         *
+         * @param ts
+         *            The timestamp at which to test
+         * @param type
+         *            The type of IO operation to test
+         * @param nbSectors
+         *            The expected number of sectors at this timestamp
+         */
+        public SectorCountInfo(long ts, IoOperationType type, long nbSectors) {
+            fTs = ts;
+            fType = type;
+            fSectors = nbSectors;
+        }
+
+        /**
+         * Get the timestamp to test
+         *
+         * @return The timestamp at which to test
+         */
+        public long getTimestamp() {
+            return fTs;
+        }
+
+        /**
+         * Get the type of IO operation
+         *
+         * @return The type of IO operation
+         */
+        public IoOperationType getType() {
+            return fType;
+        }
+
+        /**
+         * Get the expected number of sectors at this timestamp
+         *
+         * @return The expected number of sectors
+         */
+        public long getNbSectors() {
+            return fSectors;
+        }
+    }
+
+    /**
+     * Class to contain information on a disk
+     */
+    public static class DiskInfo {
+        private final String fDeviceString;
+        private final String fDiskName;
+        private final boolean fActive;
+
+        /**
+         * Constructor
+         *
+         * @param deviceString
+         *            The device ID string, as obtained with ls -al /dev
+         * @param diskname
+         *            The real human-readable name of the disk. If a name is not
+         *            available, this value should be equal to the deviceString
+         * @param active
+         *            Whether there is activity on this disk
+         */
+        public DiskInfo(String deviceString, String diskname, boolean active) {
+            fDeviceString = deviceString;
+            fDiskName = diskname;
+            fActive = active;
+        }
+
+        /**
+         * Get the device ID string for this disk
+         *
+         * @return The device ID string
+         */
+        public String getDeviceString() {
+            return fDeviceString;
+        }
+
+        /**
+         * Get the device name of the disk
+         *
+         * @return The device name
+         */
+        public String getDeviceName() {
+            return fDiskName;
+        }
+
+        /**
+         * Return whether the disk had activity during the trace
+         *
+         * @return Whether the disk had activity
+         */
+        public boolean hasActivity() {
+            return fActive;
+        }
+    }
+
+    /**
+     * Get a collection of sector count information to test
+     *
+     * @return A collection of sector count information
+     */
+    public Multimap<Integer, SectorCountInfo> getSectorCount() {
+        return HashMultimap.create();
+    }
+
+    /**
+     * Get a mapping of device ID to disk information for a disk
+     *
+     * @return A mapping of device ID to disk information
+     */
+    public Map<Integer, DiskInfo> getDiskInfo() {
+        return Collections.EMPTY_MAP;
+    }
+
+}
index 0f72d905f1cc6a2f06e33e493e61e143540e3ae2..4f94f8fcf08ceb94b4892c0f52c8e7b1485c77d3 100644 (file)
 package org.eclipse.tracecompass.analysis.os.linux.core.tests.stubs.inputoutput;
 
 import java.util.HashSet;
+import java.util.Map;
 import java.util.Set;
+import java.util.TreeMap;
 
 import org.eclipse.tracecompass.analysis.os.linux.core.inputoutput.Attributes;
+import org.eclipse.tracecompass.analysis.os.linux.core.inputoutput.IoOperationType;
 import org.eclipse.tracecompass.analysis.os.linux.core.inputoutput.StateValues;
 import org.eclipse.tracecompass.analysis.os.linux.core.tests.stubs.LinuxTestCase;
 import org.eclipse.tracecompass.statesystem.core.interval.ITmfStateInterval;
 import org.eclipse.tracecompass.statesystem.core.statevalue.TmfStateValue;
+import org.eclipse.tracecompass.statesystem.core.tests.shared.utils.StateIntervalStub;
 import org.eclipse.tracecompass.statesystem.core.tests.shared.utils.StateSystemTestUtils;
 
+import com.google.common.collect.HashMultimap;
 import com.google.common.collect.ImmutableList;
-
-import org.eclipse.tracecompass.statesystem.core.tests.shared.utils.StateIntervalStub;
+import com.google.common.collect.Multimap;
 
 /**
  * Factory of test for the input output analysis
@@ -48,7 +52,7 @@ public final class IoTestFactory {
      * - Read request inserted at 40L, issued at 50L and completed at 60L
      * </pre>
      */
-    public final static LinuxTestCase SIMPLE_REQUESTS = new LinuxTestCase("io_analysis.xml") {
+    public final static IoTestCase SIMPLE_REQUESTS = new IoTestCase("io_analysis.xml") {
 
         @Override
         public Set<IntervalInfo> getTestIntervals() {
@@ -232,6 +236,34 @@ public final class IoTestFactory {
             return info;
         }
 
+        @Override
+        public Multimap<Integer, SectorCountInfo> getSectorCount() {
+            Integer deviceId = Integer.parseInt(DEVICE_ID);
+            Multimap<Integer, SectorCountInfo> map = HashMultimap.create();
+            map.put(deviceId, new SectorCountInfo(5, IoOperationType.READ, 0));
+            map.put(deviceId, new SectorCountInfo(50, IoOperationType.READ, 0));
+            map.put(deviceId, new SectorCountInfo(55, IoOperationType.READ, 128));
+            map.put(deviceId, new SectorCountInfo(60, IoOperationType.READ, 256));
+            map.put(deviceId, new SectorCountInfo(5, IoOperationType.WRITE, 0));
+            map.put(deviceId, new SectorCountInfo(15, IoOperationType.WRITE, 4));
+            map.put(deviceId, new SectorCountInfo(20, IoOperationType.WRITE, 8));
+            map.put(deviceId, new SectorCountInfo(27, IoOperationType.WRITE, 8));
+            map.put(deviceId, new SectorCountInfo(28, IoOperationType.WRITE, 10));
+            map.put(deviceId, new SectorCountInfo(29, IoOperationType.WRITE, 13));
+            map.put(deviceId, new SectorCountInfo(30, IoOperationType.WRITE, 17));
+            map.put(deviceId, new SectorCountInfo(35, IoOperationType.WRITE, 24));
+            map.put(deviceId, new SectorCountInfo(60, IoOperationType.WRITE, 24));
+            return map;
+        }
+
+        @Override
+        public Map<Integer, DiskInfo> getDiskInfo() {
+            Map<Integer, DiskInfo> map = new TreeMap<>();
+            Integer deviceId = Integer.parseInt(DEVICE_ID);
+            map.put(deviceId, new DiskInfo("8,16", DEVICE_NAME, true));
+            return map;
+        }
+
     };
 
     /**
@@ -242,7 +274,7 @@ public final class IoTestFactory {
      * - Write Request inserted at 5L, issued at 10L and completed at 20L
      * </pre>
      */
-    public final static LinuxTestCase SIMPLE_NO_STATEDUMP = new LinuxTestCase("io_nostatedump.xml") {
+    public final static IoTestCase SIMPLE_NO_STATEDUMP = new IoTestCase("io_nostatedump.xml") {
 
         @Override
         public Set<IntervalInfo> getTestIntervals() {
@@ -265,6 +297,14 @@ public final class IoTestFactory {
         public Set<PunctualInfo> getPunctualTestData() {
             return new HashSet<>();
         }
+
+        @Override
+        public Map<Integer, DiskInfo> getDiskInfo() {
+            Map<Integer, DiskInfo> map = new TreeMap<>();
+            Integer deviceId = Integer.parseInt(DEVICE_ID);
+            map.put(deviceId, new DiskInfo("8,16", "8,16", true));
+            return map;
+        }
     };
 
     /**
@@ -607,7 +647,7 @@ public final class IoTestFactory {
      * - read request on sdb inserted at 8L (before statedump), issued at 12L and completed at 18L
      * </pre>
      */
-    public final static LinuxTestCase TWO_DEVICES = new LinuxTestCase("io_two_devices.xml") {
+    public final static IoTestCase TWO_DEVICES = new IoTestCase("io_two_devices.xml") {
 
         @Override
         public Set<IntervalInfo> getTestIntervals() {
@@ -660,6 +700,17 @@ public final class IoTestFactory {
         public Set<PunctualInfo> getPunctualTestData() {
             return new HashSet<>();
         }
+
+        @Override
+        public Map<Integer, DiskInfo> getDiskInfo() {
+            Map<Integer, DiskInfo> map = new TreeMap<>();
+            Integer deviceId = Integer.parseInt(DEVICE_ID);
+            map.put(deviceId, new DiskInfo("8,16", DEVICE_NAME, true));
+            deviceId = Integer.parseInt(SECOND_DEVICE_ID);
+            map.put(deviceId, new DiskInfo("8,0", "sdb", true));
+            map.put(8388609, new DiskInfo("8,1", "sdb1", false));
+            return map;
+        }
     };
 
     /**
index 4da42cec514dd00b1b36f232d16ace34f36aadf3..504f949368c806a4ce0dc43297922473b6f93c98 100644 (file)
@@ -7,6 +7,11 @@
 <field name="dev" value="8388624" type="long" />
 <field name="diskname" value="sda" type="string" />
 </event>
+<event timestamp="4" name="statedump_block_device">
+<field name="cpu" value="1" type="int" />
+<field name="dev" value="8388609" type="long" />
+<field name="diskname" value="sdb1" type="string" />
+</event>
 <event timestamp="5" name="block_rq_insert">
 <field name="cpu" value="0" type="int" />
 <field name="dev" value="8388624" type="long" />
diff --git a/analysis/org.eclipse.tracecompass.analysis.os.linux.core/src/org/eclipse/tracecompass/analysis/os/linux/core/inputoutput/Disk.java b/analysis/org.eclipse.tracecompass.analysis.os.linux.core/src/org/eclipse/tracecompass/analysis/os/linux/core/inputoutput/Disk.java
new file mode 100644 (file)
index 0000000..be80c80
--- /dev/null
@@ -0,0 +1,252 @@
+/*******************************************************************************
+ * Copyright (c) 2016 École Polytechnique de Montréal
+ *
+ * All rights reserved. This program and the accompanying materials are
+ * made available under the terms of the Eclipse Public License v1.0 which
+ * accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *******************************************************************************/
+package org.eclipse.tracecompass.analysis.os.linux.core.inputoutput;
+
+import java.util.List;
+
+import org.eclipse.jdt.annotation.Nullable;
+import org.eclipse.tracecompass.common.core.NonNullUtils;
+import org.eclipse.tracecompass.internal.analysis.os.linux.core.Activator;
+import org.eclipse.tracecompass.statesystem.core.ITmfStateSystem;
+import org.eclipse.tracecompass.statesystem.core.StateSystemUtils;
+import org.eclipse.tracecompass.statesystem.core.exceptions.AttributeNotFoundException;
+import org.eclipse.tracecompass.statesystem.core.exceptions.StateSystemDisposedException;
+import org.eclipse.tracecompass.statesystem.core.interval.ITmfStateInterval;
+
+import com.google.common.hash.HashFunction;
+import com.google.common.hash.Hashing;
+
+/**
+ * This class represents a storage device in the system that behaves like a disk
+ * from the operating system point of view. Concretely, it can be an HDD, an
+ * SSD, a USB key, etc.
+ *
+ * @author Geneviève Bastien
+ * @since 2.0
+ */
+public class Disk {
+
+    private static final HashFunction HF = NonNullUtils.checkNotNull(Hashing.goodFastHash(32));
+
+    private static final Integer MINORBITS = 20;
+    private static final Integer MINORMASK = ((1 << MINORBITS) - 1);
+
+    private final Integer fDev;
+    private final int fDiskQuark;
+    private final ITmfStateSystem fSs;
+    private @Nullable String fDiskName = null;
+
+    /**
+     * Constructor
+     *
+     * @param dev
+     *            The device number of the disk
+     * @param ss
+     *            The state system this disk will be saved to
+     * @param diskQuark
+     *            The quark of this disk in the state system
+     */
+    public Disk(Integer dev, ITmfStateSystem ss, int diskQuark) {
+        fDev = dev;
+        fSs = ss;
+        fDiskQuark = diskQuark;
+        ITmfStateInterval diskNameInterval = StateSystemUtils.queryUntilNonNullValue(ss, diskQuark, ss.getStartTime(), ss.getCurrentEndTime());
+        if (diskNameInterval != null) {
+            fDiskName = diskNameInterval.getStateValue().unboxStr();
+        }
+    }
+
+    /**
+     * Get the device ID of this device
+     *
+     * @return The devide ID of this disk
+     */
+    public Integer getDevideId() {
+        return fDev;
+    }
+
+    /**
+     * Get the disk name if available. If the disk name is not set, this method
+     * will return the string corresponding to the major, minor value of the
+     * disk's ID, ie the return value of {@link #getDeviceIdString()}.
+     *
+     * @return The disk name or the value returned by
+     *         {@link #getDeviceIdString()}
+     */
+    public String getDiskName() {
+        String diskName = fDiskName;
+        if (diskName == null) {
+            return getDeviceIdString();
+        }
+        return diskName;
+    }
+
+    /**
+     * Get the quark
+     *
+     * @return The quark of this disk in the state system
+     */
+    public int getQuark() {
+        return fDiskQuark;
+    }
+
+    /**
+     * Set the human readable disk name of this device
+     *
+     * @param diskname
+     *            The human readable name of the disk
+     */
+    public void setDiskName(String diskname) {
+        fDiskName = diskname;
+    }
+
+    /**
+     * Return the disk's device ID as a major,minor string. Those major,minor
+     * numbers correspond to the number of the disk found when listing disk with
+     * ls -al /dev, or using lsblk in Linux.
+     *
+     * @return The device ID string as major,minor
+     */
+    public String getDeviceIdString() {
+        Integer major = fDev >> MINORBITS;
+        Integer minor = fDev & MINORMASK;
+        return major.toString() + ',' + minor.toString();
+    }
+
+    /**
+     * Get the total number of sectors either read or written at the end of a
+     * time range. This method will interpolate the requests that are in
+     * progress.
+     *
+     * @param ts
+     *            The start of the time range to query
+     * @param type
+     *            The type of IO operation to query
+     * @return The number of sectors affected by operation at the end of the
+     *         range
+     */
+    public long getSectorsAt(long ts, IoOperationType type) {
+
+        ITmfStateSystem ss = fSs;
+        long currentCount = 0;
+
+        /* Get the quark for the number of sector for the requested operation */
+        int rwSectorQuark = ITmfStateSystem.INVALID_ATTRIBUTE;
+        if (type == IoOperationType.READ) {
+            rwSectorQuark = ss.optQuarkRelative(fDiskQuark, Attributes.SECTORS_READ);
+        } else if (type == IoOperationType.WRITE) {
+            rwSectorQuark = ss.optQuarkRelative(fDiskQuark, Attributes.SECTORS_WRITTEN);
+        }
+        if (rwSectorQuark == ITmfStateSystem.INVALID_ATTRIBUTE) {
+            return currentCount;
+        }
+
+        int rw = type == IoOperationType.READ ? StateValues.READING_REQUEST : StateValues.WRITING_REQUEST;
+
+        long time = Math.max(ts, ss.getStartTime());
+        time = Math.min(time, ss.getCurrentEndTime());
+
+        try {
+            List<ITmfStateInterval> states = ss.queryFullState(time);
+            long count = states.get(rwSectorQuark).getStateValue().unboxLong();
+            if (count == -1) {
+                count = 0;
+            }
+            Integer driverQ = ss.getQuarkRelative(fDiskQuark, Attributes.DRIVER_QUEUE);
+
+            /*
+             * Interpolate the part of the requests in progress at requested
+             * time
+             */
+            for (Integer driverSlotQuark : ss.getSubAttributes(driverQ, false)) {
+                int sizeQuark = ss.getQuarkRelative(driverSlotQuark, Attributes.REQUEST_SIZE);
+                ITmfStateInterval interval = states.get(sizeQuark);
+                if (!interval.getStateValue().isNull()) {
+                    if (states.get(driverSlotQuark).getStateValue().unboxInt() == rw) {
+                        /*
+                         * The request is fully completed (and included in the
+                         * r/w sectors) at interval end time + 1, so at interval
+                         * end time, we do not expect the size to be total size
+                         */
+                        long runningTime = interval.getEndTime() - interval.getStartTime() + 1;
+                        long runningEnd = interval.getEndTime() + 1;
+                        long startsize = interval.getStateValue().unboxLong();
+                        count = interpolateCount(count, time, runningEnd, runningTime, startsize);
+                    }
+                }
+            }
+            currentCount = count;
+        } catch (StateSystemDisposedException | AttributeNotFoundException e) {
+            Activator.getDefault().logError("Error getting disk IO Activity", e); //$NON-NLS-1$
+        }
+        return currentCount;
+    }
+
+    private static long interpolateCount(long count, long ts, long runningEnd, long runningTime, long size) {
+
+        long newCount = count;
+        if (runningTime > 0) {
+            long runningStart = runningEnd - runningTime;
+            if (ts < runningStart) {
+                return newCount;
+            }
+            double interpolation = (double) (ts - runningStart) * (double) size / (runningTime);
+            /* Will truncate the decimal part */
+            newCount += (long) interpolation;
+        }
+        return newCount;
+    }
+
+    /**
+     * Return whether requests were made on this disk during the trace or not
+     *
+     * @return {@code true} if there was requests on this disk, {@code false}
+     *         otherwise
+     */
+    public boolean hasActivity() {
+        try {
+            int wqQuark = fSs.getQuarkRelative(fDiskQuark, Attributes.WAITING_QUEUE);
+            if (fSs.getSubAttributes(wqQuark, false).size() > 0) {
+                return true;
+            }
+            int dqQuark = fSs.getQuarkRelative(fDiskQuark, Attributes.DRIVER_QUEUE);
+            if (fSs.getSubAttributes(dqQuark, false).size() > 0) {
+                return true;
+            }
+        } catch (AttributeNotFoundException e) {
+        }
+        return false;
+    }
+
+    // ----------------------------------------------------
+    // Object methods
+    // ----------------------------------------------------
+
+    @Override
+    public String toString() {
+        return "Disk: [" + getDeviceIdString() + ',' + fDiskName + ']'; //$NON-NLS-1$
+    }
+
+    @Override
+    public int hashCode() {
+        return HF.newHasher().putInt(fDev).hash().asInt();
+    }
+
+    @Override
+    public boolean equals(@Nullable Object o) {
+        if (o instanceof Disk) {
+            Disk disk = (Disk) o;
+            if (fDev.equals(disk.fDev)) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+}
diff --git a/analysis/org.eclipse.tracecompass.analysis.os.linux.core/src/org/eclipse/tracecompass/analysis/os/linux/core/inputoutput/InputOutputInformationProvider.java b/analysis/org.eclipse.tracecompass.analysis.os.linux.core/src/org/eclipse/tracecompass/analysis/os/linux/core/inputoutput/InputOutputInformationProvider.java
new file mode 100644 (file)
index 0000000..ced9330
--- /dev/null
@@ -0,0 +1,57 @@
+/*******************************************************************************
+ * Copyright (c) 2016 École Polytechnique de Montréal
+ *
+ * All rights reserved. This program and the accompanying materials are
+ * made available under the terms of the Eclipse Public License v1.0 which
+ * accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *******************************************************************************/
+
+package org.eclipse.tracecompass.analysis.os.linux.core.inputoutput;
+
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Set;
+
+import org.eclipse.tracecompass.statesystem.core.ITmfStateSystem;
+
+/**
+ * Utility methods to return data from a {@link InputOutputAnalysisModule}
+ * analysis.
+ *
+ * @author Geneviève Bastien
+ * @author Houssem Daoud
+ * @since 2.0
+ */
+public final class InputOutputInformationProvider {
+
+    private InputOutputInformationProvider() {
+
+    }
+
+    /**
+     * Get the disks for an input/output analysis module
+     *
+     * @param module
+     *            The analysis module
+     * @return A collection of disks from this analysis
+     */
+    public static Collection<Disk> getDisks(InputOutputAnalysisModule module) {
+        module.schedule();
+        if (!module.waitForInitialization()) {
+            return Collections.EMPTY_SET;
+        }
+        ITmfStateSystem ss = module.getStateSystem();
+        if (ss == null) {
+            throw new IllegalStateException("The state system should not be null at this point"); //$NON-NLS-1$
+        }
+
+        Set<Disk> disks = new HashSet<>();
+        for (Integer diskQuark : ss.getQuarks(Attributes.DISKS, "*")) { //$NON-NLS-1$
+            String devName = ss.getAttributeName(diskQuark);
+            disks.add(new Disk(Integer.parseInt(devName), ss, diskQuark));
+        }
+        return disks;
+    }
+}
index 31f4e0d817e0f1f5248d9f7fb9df448235d1dcf9..a1309fef83f3a6adfe0f5105aab3d672df3c9527 100644 (file)
@@ -14,9 +14,9 @@ import java.util.Map;
 
 import org.eclipse.jdt.annotation.Nullable;
 import org.eclipse.tracecompass.analysis.os.linux.core.inputoutput.Attributes;
+import org.eclipse.tracecompass.analysis.os.linux.core.inputoutput.Disk;
 import org.eclipse.tracecompass.analysis.os.linux.core.inputoutput.IoOperationType;
 import org.eclipse.tracecompass.analysis.os.linux.core.inputoutput.StateValues;
-import org.eclipse.tracecompass.common.core.NonNullUtils;
 import org.eclipse.tracecompass.internal.analysis.os.linux.core.Activator;
 import org.eclipse.tracecompass.statesystem.core.ITmfStateSystem;
 import org.eclipse.tracecompass.statesystem.core.ITmfStateSystemBuilder;
@@ -29,9 +29,6 @@ import org.eclipse.tracecompass.tmf.core.statesystem.TmfAttributePool;
 import org.eclipse.tracecompass.tmf.core.statesystem.TmfAttributePool.QueueType;
 import org.eclipse.tracecompass.tmf.core.util.Pair;
 
-import com.google.common.hash.HashFunction;
-import com.google.common.hash.Hashing;
-
 /**
  * Class that represents a disk on a system. This class provides operation to
  * save the analysis data in a state system.
@@ -39,18 +36,12 @@ import com.google.common.hash.Hashing;
  * @author Houssem Daoud
  * @since 2.0
  */
-public class DiskWriteModel {
-
-    private static final HashFunction HF = NonNullUtils.checkNotNull(Hashing.goodFastHash(32));
-
-    private final Integer fDev;
+public class DiskWriteModel extends Disk {
     private final Map<Long, Pair<Request, Integer>> fDriverQueue = new HashMap<>();
     private final Map<Long, Pair<Request, Integer>> fWaitingQueue = new HashMap<>();
     private final ITmfStateSystemBuilder fSs;
-    private final int fDiskQuark;
     private final TmfAttributePool fWaitingQueueAttrib;
     private final TmfAttributePool fDriverQueueAttrib;
-    private @Nullable String fDiskname = null;
 
     /**
      * Constructor
@@ -60,30 +51,26 @@ public class DiskWriteModel {
      * @param ss
      *            The state system this disk will be saved to
      */
-    public DiskWriteModel(Integer dev, ITmfStateSystemBuilder ss) {
-        fDev = dev;
+   public DiskWriteModel(Integer dev, ITmfStateSystemBuilder ss) {
+        super(dev, ss, ss.getQuarkAbsoluteAndAdd(Attributes.DISKS, String.valueOf(dev)));
         fSs = ss;
+        int diskQuark = getQuark();
         /* Initialize the state system for this disk */
-        fDiskQuark = fSs.getQuarkAbsoluteAndAdd(Attributes.DISKS, String.valueOf(dev));
-        fSs.getQuarkRelativeAndAdd(fDiskQuark, Attributes.SECTORS_WRITTEN);
-        fSs.getQuarkRelativeAndAdd(fDiskQuark, Attributes.SECTORS_READ);
-        int wqQuark = fSs.getQuarkRelativeAndAdd(fDiskQuark, Attributes.WAITING_QUEUE);
+        fSs.getQuarkRelativeAndAdd(diskQuark, Attributes.SECTORS_WRITTEN);
+        fSs.getQuarkRelativeAndAdd(diskQuark, Attributes.SECTORS_READ);
+        int wqQuark = fSs.getQuarkRelativeAndAdd(diskQuark, Attributes.WAITING_QUEUE);
         fWaitingQueueAttrib = new TmfAttributePool(fSs, wqQuark, QueueType.PRIORITY);
-        fSs.getQuarkRelativeAndAdd(fDiskQuark, Attributes.WAITING_QUEUE_LENGTH);
-        int dqQuark = fSs.getQuarkRelativeAndAdd(fDiskQuark, Attributes.DRIVER_QUEUE);
+        fSs.getQuarkRelativeAndAdd(diskQuark, Attributes.WAITING_QUEUE_LENGTH);
+        int dqQuark = fSs.getQuarkRelativeAndAdd(diskQuark, Attributes.DRIVER_QUEUE);
         fDriverQueueAttrib = new TmfAttributePool(fSs, dqQuark, QueueType.PRIORITY);
-        fSs.getQuarkRelativeAndAdd(fDiskQuark, Attributes.DRIVER_QUEUE_LENGTH);
+        fSs.getQuarkRelativeAndAdd(diskQuark, Attributes.DRIVER_QUEUE_LENGTH);
     }
 
-    /**
-     * Set the human readable disk name of this device
-     *
-     * @param diskname
-     *            The human readable name of the disk
-     */
+    @Override
     public void setDiskName(String diskname) {
+        super.setDiskName(diskname);
         try {
-            fSs.modifyAttribute(fSs.getCurrentEndTime(), TmfStateValue.newValueString(diskname), fDiskQuark);
+            fSs.modifyAttribute(fSs.getCurrentEndTime(), TmfStateValue.newValueString(diskname), getQuark());
         } catch (StateValueTypeException | AttributeNotFoundException e) {
             Activator.getDefault().logError("Cannot set the diskname for disk " + diskname, e); //$NON-NLS-1$
         }
@@ -312,11 +299,11 @@ public class DiskWriteModel {
         try {
             switch (request.getType()) {
             case READ:
-                int readQuark = fSs.getQuarkRelativeAndAdd(fDiskQuark, Attributes.SECTORS_READ);
+                int readQuark = fSs.getQuarkRelativeAndAdd(getQuark(), Attributes.SECTORS_READ);
                 StateSystemBuilderUtils.incrementAttributeInt(fSs, ts, readQuark, request.getNrSector());
                 break;
             case WRITE:
-                int writtenQuark = fSs.getQuarkRelativeAndAdd(fDiskQuark, Attributes.SECTORS_WRITTEN);
+                int writtenQuark = fSs.getQuarkRelativeAndAdd(getQuark(), Attributes.SECTORS_WRITTEN);
                 StateSystemBuilderUtils.incrementAttributeInt(fSs, ts, writtenQuark, request.getNrSector());
                 break;
             default:
@@ -370,49 +357,15 @@ public class DiskWriteModel {
         return fDriverQueue.size();
     }
 
-    /**
-     * Get the quark corresponding to this disk
-     *
-     * @return The quark in the state system of this disk
-     */
-    public int getDiskQuark() {
-        return fDiskQuark;
-    }
-
     private void updateQueuesLength(long ts) {
         try {
-            int fDriverQueueLength = fSs.getQuarkRelativeAndAdd(fDiskQuark, Attributes.DRIVER_QUEUE_LENGTH);
+            int fDriverQueueLength = fSs.getQuarkRelativeAndAdd(getQuark(), Attributes.DRIVER_QUEUE_LENGTH);
             fSs.modifyAttribute(ts, TmfStateValue.newValueInt(getDriverQueueSize()), fDriverQueueLength);
-            int fWaitinQueueLength = fSs.getQuarkRelativeAndAdd(fDiskQuark, Attributes.WAITING_QUEUE_LENGTH);
+            int fWaitinQueueLength = fSs.getQuarkRelativeAndAdd(getQuark(), Attributes.WAITING_QUEUE_LENGTH);
             fSs.modifyAttribute(ts, TmfStateValue.newValueInt(getWaitingQueueSize()), fWaitinQueueLength);
         } catch (StateValueTypeException | AttributeNotFoundException e) {
             Activator.getDefault().logError("Error updating queues lengths", e); //$NON-NLS-1$
         }
     }
 
-    // ----------------------------------------------------
-    // Object methods
-    // ----------------------------------------------------
-
-    @Override
-    public int hashCode() {
-        return HF.newHasher().putInt(fDev).hash().asInt();
-    }
-
-    @Override
-    public boolean equals(@Nullable Object o) {
-        if (o instanceof DiskWriteModel) {
-            DiskWriteModel disk = (DiskWriteModel) o;
-            if (fDev.equals(disk.fDev)) {
-                return true;
-            }
-        }
-        return false;
-    }
-
-    @Override
-    public String toString() {
-        return "Disk: [" + fDev + ',' + fDiskname + ']'; //$NON-NLS-1$
-    }
-
 }
This page took 0.036407 seconds and 5 git commands to generate.