1 /*******************************************************************************
2 * Copyright (c) 2011, 2013 Ericsson
4 * All rights reserved. This program and the accompanying materials are
5 * made available under the terms of the Eclipse Public License v1.0 which
6 * accompanies this distribution, and is available at
7 * http://www.eclipse.org/legal/epl-v10.html
10 * Francois Chouinard - Initial API and implementation
11 * Bernd Hufmann - Adapt to junit.framework.TestCase
12 * Alexandre Montplaisir - Port to JUnit4
13 * Patrick Tasse - Support selection range
14 *******************************************************************************/
16 package org
.eclipse
.linuxtools
.tmf
.ui
.tests
.histogram
;
18 import static org
.junit
.Assert
.assertArrayEquals
;
19 import static org
.junit
.Assert
.assertEquals
;
20 import static org
.junit
.Assert
.fail
;
22 import org
.eclipse
.linuxtools
.tmf
.core
.timestamp
.ITmfTimestamp
;
23 import org
.eclipse
.linuxtools
.tmf
.core
.timestamp
.TmfTimeRange
;
24 import org
.eclipse
.linuxtools
.tmf
.core
.timestamp
.TmfTimestamp
;
25 import org
.eclipse
.linuxtools
.tmf
.ui
.views
.histogram
.HistogramDataModel
;
26 import org
.eclipse
.linuxtools
.tmf
.ui
.views
.histogram
.HistogramScaledData
;
27 import org
.eclipse
.linuxtools
.tmf
.ui
.views
.histogram
.IHistogramModelListener
;
28 import org
.junit
.Test
;
31 * Unit tests for the HistogramDataModel class.
33 public class HistogramDataModelTest
{
35 private static final double DELTA
= 1e-15;
38 * Test method for {@link HistogramDataModel#HistogramDataModel()}.
41 public void testHistogramDataModel() {
42 HistogramDataModel model
= new HistogramDataModel();
43 testModelConsistency(model
, HistogramDataModel
.DEFAULT_NUMBER_OF_BUCKETS
,0, 1, 0 , 0 , 0 , HistogramDataModel
.DEFAULT_NUMBER_OF_BUCKETS
);
47 * Test method for {@link HistogramDataModel#HistogramDataModel(int)}.
50 public void testHistogramDataModelInt() {
51 final int nbBuckets
= 5 * 1000;
52 HistogramDataModel model
= new HistogramDataModel(nbBuckets
);
53 testModelConsistency(model
, nbBuckets
, 0, 1, 0, 0, 0, nbBuckets
);
57 * Test methods for {@link HistogramDataModel#countEvent(long,long)}.
60 public void testClear() {
61 final int nbBuckets
= 100;
62 HistogramDataModel model
= new HistogramDataModel(nbBuckets
);
63 model
.countEvent(0, -1);
65 testModelConsistency(model
, nbBuckets
, 0, 1, 0, 0, 0, nbBuckets
);
69 * Test methods for {@link HistogramDataModel#countEvent(long,long)}.
72 public void testCountEvent_0() {
73 final int nbBuckets
= 100;
74 HistogramDataModel model
= new HistogramDataModel(nbBuckets
);
75 model
.countEvent(0, -1);
77 testModelConsistency(model
, nbBuckets
, 0, 1, 0, 0, 0, nbBuckets
);
81 * Test methods for {@link HistogramDataModel#countEvent(long,long)} and
82 * {@link HistogramDataModel#scaleTo(int,int,int)}.
85 public void testCountEvent_1() {
86 final int nbBuckets
= 100;
87 final int maxHeight
= 10;
89 HistogramDataModel model
= new HistogramDataModel(nbBuckets
);
91 HistogramScaledData result
= model
.scaleTo(nbBuckets
, maxHeight
, 1);
93 for (int i
= 0; i
< result
.fData
.length
; i
++) {
94 assertEquals(0, result
.fData
[i
]);
97 testModelConsistency(model
, nbBuckets
, 0, 1, 0, 0, 0, nbBuckets
);
101 * Test methods for {@link HistogramDataModel#countEvent(long,long)} and
102 * {@link HistogramDataModel#scaleTo(int,int,int)}.
105 public void testCountEvent_2() {
106 final int nbBuckets
= 100;
107 final int maxHeight
= 10;
109 HistogramDataModel model
= new HistogramDataModel(nbBuckets
);
110 model
.countEvent(0, 1);
112 HistogramScaledData result
= model
.scaleTo(nbBuckets
, maxHeight
, 1);
113 assertEquals(1, result
.fData
[0]);
115 assertArrayEqualsInt(0, result
.fData
,1);
117 testModelConsistency(model
, nbBuckets
, 1, 1, 1, 1, 1, nbBuckets
+ 1);
121 * Test methods for {@link HistogramDataModel#countEvent(long,long)} and
122 * {@link HistogramDataModel#scaleTo(int,int,int)}.
125 public void testCountEvent_3() {
126 final int nbBuckets
= 100;
127 final int maxHeight
= 10;
129 HistogramDataModel model
= new HistogramDataModel(nbBuckets
);
130 countEventsInModel(nbBuckets
, model
);
132 HistogramScaledData result
= model
.scaleTo(nbBuckets
, maxHeight
, 1);
134 assertArrayEqualsInt(1, result
.fData
);
136 testModelConsistency(model
, nbBuckets
, nbBuckets
, 1, 0, 0, nbBuckets
- 1, nbBuckets
);
140 * Test methods for {@link HistogramDataModel#countEvent(long,long)} and
141 * {@link HistogramDataModel#scaleTo(int,int,int)}.
144 public void testCountEvent_4() {
145 final int nbBuckets
= 100;
146 final int maxHeight
= 10;
148 HistogramDataModel model
= new HistogramDataModel(nbBuckets
);
149 // to different to call elsewhere
150 for (int i
= 0; i
< nbBuckets
; i
++) {
151 model
.countEvent(i
, i
);
152 model
.countEvent(i
+ 1, i
);
155 HistogramScaledData result
= model
.scaleTo(nbBuckets
, maxHeight
, 1);
157 assertArrayEqualsInt(2, result
.fData
);
159 testModelConsistency(model
, nbBuckets
, 2 * nbBuckets
, 1, 0, 0, nbBuckets
- 1, nbBuckets
);
164 * Test methods for {@link HistogramDataModel#countEvent(long,long)} and
165 * {@link HistogramDataModel#scaleTo(int,int,int)}.
168 public void testCountEvent_5() {
169 final int nbBuckets
= 100;
170 final int startTime
= 25;
171 final int maxHeight
= 10;
173 HistogramDataModel model
= new HistogramDataModel(nbBuckets
);
174 for (int i
= startTime
; i
< startTime
+ nbBuckets
; i
++) {
175 model
.countEvent(i
, i
);
178 HistogramScaledData result
= model
.scaleTo(nbBuckets
, maxHeight
, 1);
180 assertArrayEqualsInt(1, result
.fData
);
182 testModelConsistency(model
, nbBuckets
, nbBuckets
, 1, startTime
, startTime
, startTime
+ nbBuckets
- 1, startTime
+ nbBuckets
);
186 * Test method for {@link HistogramDataModel#scaleTo(int,int,int)}.
189 public void testScaleTo_0() {
190 HistogramDataModel model
= new HistogramDataModel(10);
192 model
.scaleTo(10, 0, 1);
193 } catch (AssertionError e1
) {
195 model
.scaleTo(0, 10, 1);
196 } catch (AssertionError e2
) {
198 model
.scaleTo(0, 0, 1);
199 } catch (AssertionError e3
) {
204 fail("Uncaught assertion error");
208 * Test method for {@link HistogramDataModel#scaleTo(int,int,int)}.
211 public void testScaleTo_1() {
212 final int nbBuckets
= 10;
213 final int maxHeight
= 10;
214 final int nbEvents
= nbBuckets
/ 2;
215 final int[] expectedResult
= new int[] { 1, 1, 1, 1, 1, 0, 0, 0, 0, 0 };
217 HistogramDataModel model
= new HistogramDataModel(nbBuckets
);
218 countEventsInModel(nbEvents
, model
);
220 HistogramScaledData result
= model
.scaleTo(nbBuckets
, maxHeight
, 1);
222 assertArrayEquals( expectedResult
, result
.fData
);
224 testModelConsistency(model
, nbBuckets
, nbEvents
, 1, 0, 0, nbEvents
- 1, nbBuckets
);
228 * Test method for {@link HistogramDataModel#scaleTo(int,int,int)}.
231 public void testScaleTo_2() {
232 final int nbBuckets
= 10;
233 final int maxHeight
= 10;
234 final int nbEvents
= nbBuckets
;
235 final int[] expectedResult
= new int[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 };
237 HistogramDataModel model
= new HistogramDataModel(nbBuckets
);
238 countEventsInModel(nbEvents
, model
);
240 HistogramScaledData result
= model
.scaleTo(nbBuckets
, maxHeight
, 1);
242 assertArrayEquals(expectedResult
, result
.fData
);
244 testModelConsistency(model
, nbBuckets
, nbEvents
, 1, 0, 0, nbEvents
- 1, nbBuckets
);
248 * Test method for {@link HistogramDataModel#scaleTo(int,int,int)}.
251 public void testScaleTo_3() {
252 final int nbBuckets
= 10;
253 final int maxHeight
= 10;
254 final int nbEvents
= 2 * nbBuckets
;
255 final int[] expectedResult
= new int[] { 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 };
257 HistogramDataModel model
= new HistogramDataModel(nbBuckets
);
258 countEventsInModel(nbEvents
, model
);
260 HistogramScaledData result
= model
.scaleTo(nbBuckets
, maxHeight
, 1);
262 assertArrayEquals(expectedResult
, result
.fData
);
264 testModelConsistency(model
, nbBuckets
, nbEvents
, 2, 0, 0, nbEvents
- 1, 2 * nbBuckets
);
268 * Test method for {@link HistogramDataModel#scaleTo(int,int,int)}.
271 public void testScaleTo_4() {
272 final int nbBuckets
= 10;
273 final int maxHeight
= 10;
274 final int nbEvents
= 3 * nbBuckets
;
275 final int[] expectedResult
= new int[] { 4, 4, 4, 4, 4, 4, 4, 2, 0, 0 };
277 HistogramDataModel model
= new HistogramDataModel(nbBuckets
);
278 countEventsInModel(nbEvents
, model
);
280 HistogramScaledData result
= model
.scaleTo(nbBuckets
, maxHeight
, 1);
282 assertArrayEquals(expectedResult
, result
.fData
);
284 testModelConsistency(model
, nbBuckets
, nbEvents
, 4, 0, 0, nbEvents
- 1, 4 * nbBuckets
);
288 * Test method for {@link HistogramDataModel#scaleTo(int,int,int)}.
291 public void testScaleTo_5() {
292 final int nbBuckets
= 100;
293 final int maxHeight
= 20;
294 final int nbEvents
= 2 * nbBuckets
;
295 final int[] expectedResult
= new int[] { 20, 20, 20, 20, 20, 20, 20, 20, 20, 20 };
297 HistogramDataModel model
= new HistogramDataModel(nbBuckets
);
298 countEventsInModel(nbEvents
, model
);
300 HistogramScaledData result
= model
.scaleTo(10, maxHeight
, 1);
302 assertArrayEquals(expectedResult
, result
.fData
);
304 testModelConsistency(model
, nbBuckets
, nbEvents
, 2, 0, 0, nbEvents
- 1, 2 * nbBuckets
);
308 * Test method for {@link HistogramDataModel#scaleTo(int,int,int)}.
311 public void testScaleTo_6() {
312 final int nbBuckets
= 100;
313 final int maxHeight
= 24;
314 final int nbEvents
= 2 * nbBuckets
+ 1;
315 final int[] expectedResult
= new int[] { 24, 24, 24, 24, 24, 24, 24, 24, 9, 0 };
317 HistogramDataModel model
= new HistogramDataModel(nbBuckets
);
318 countEventsInModel(nbEvents
, model
);
320 HistogramScaledData result
= model
.scaleTo(10, maxHeight
, 1);
322 assertArrayEquals(expectedResult
, result
.fData
);
324 testModelConsistency(model
, nbBuckets
, nbEvents
, 4, 0, 0, nbEvents
- 1, 4 * nbBuckets
);
328 * Test method for {@link HistogramDataModel#scaleTo(int,int,int)}.
331 public void testScaleTo_7() {
332 // verify scaleTo with barWidth > 1
333 final int nbBuckets
= 100;
334 final int maxHeight
= 24;
335 final int width
= 10;
336 final int barWidth
= 4;
337 final int nbEvents
= 2 * nbBuckets
+ 1;
339 // (int)(width / barWith) = 2
340 // -> 2 bars -> expected result needs two buckets (scaled data)
342 // buckets (in model) per bar = last bucket id / nbBars + 1 (plus 1 to
343 // cover all used buckets)
344 // -> buckets per bar = 50 / 2 + 1 = 26
345 // -> first entry in expected result is 26 * 4 = 104
346 // -> second entry in expected result is 22 * 4 + 9 = 97
347 final int[] expectedResult
= new int[] { 104, 97 };
349 HistogramDataModel model
= new HistogramDataModel(nbBuckets
);
350 countEventsInModel(nbEvents
, model
);
352 // verify scaled data
353 HistogramScaledData result
= model
.scaleTo(width
, maxHeight
, barWidth
);
355 assertEquals(4 * 26, result
.fBucketDuration
);
356 assertEquals(0, result
.fSelectionBeginBucket
);
357 assertEquals(0, result
.fSelectionEndBucket
);
358 assertEquals(0, result
.fFirstBucketTime
);
359 assertEquals(0, result
.fFirstEventTime
);
360 assertEquals(1, result
.fLastBucket
);
361 assertEquals(104, result
.fMaxValue
);
362 assertEquals((double) maxHeight
/ 104, result
.fScalingFactor
, DELTA
);
363 assertEquals(maxHeight
, result
.fHeight
);
364 assertEquals(width
, result
.fWidth
);
365 assertEquals(barWidth
, result
.fBarWidth
);
367 assertArrayEquals(expectedResult
, result
.fData
);
370 testModelConsistency(model
, nbBuckets
, nbEvents
, 4, 0, 0, nbEvents
- 1, 4 * nbBuckets
);
374 * Test method for {@link HistogramDataModel#scaleTo(int,int,int)}.
377 public void testScaleToReverse_1() {
378 final int nbBuckets
= 100;
379 final int maxHeight
= 24;
380 final int width
= 10;
381 final int barWidth
= 1;
382 final int nbEvents
= 2 * nbBuckets
+ 1;
384 // (int)(width / barWith) = 10
385 // -> 10 bars -> expected result needs 10 buckets (scaled data)
387 // buckets in (model) per bar = last bucket id / nbBars + 1 (plus 1 to
388 // cover all used buckets)
389 // -> buckets per bar = 50 / 10 + 1 = 6
390 final int[] expectedResult
= new int[] { 21, 24, 24, 24, 24, 24, 24, 24, 12, 0 };
392 HistogramDataModel model
= new HistogramDataModel(nbBuckets
);
393 countInvertedEvents(nbEvents
, model
);
395 // verify scaled data
396 HistogramScaledData result
= model
.scaleTo(width
, maxHeight
, barWidth
);
398 assertEquals(4 * 6, result
.fBucketDuration
);
399 assertEquals(0, result
.fSelectionBeginBucket
);
400 assertEquals(0, result
.fSelectionEndBucket
);
401 assertEquals(-3, result
.fFirstBucketTime
); // negative is correct, can
402 // happen when reverse
403 assertEquals(0, result
.fFirstEventTime
);
404 assertEquals(9, result
.fLastBucket
);
405 assertEquals(24, result
.fMaxValue
);
406 assertEquals((double) maxHeight
/ 24, result
.fScalingFactor
, DELTA
);
407 assertEquals(maxHeight
, result
.fHeight
);
408 assertEquals(width
, result
.fWidth
);
409 assertEquals(barWidth
, result
.fBarWidth
);
411 assertArrayEquals(expectedResult
, result
.fData
);
414 testModelConsistency(model
, nbBuckets
, nbEvents
, 4, -3, 0, nbEvents
- 1, -3 + 4 * nbBuckets
);
417 private static void countInvertedEvents(final int nbEvents
, HistogramDataModel model
) {
418 for (int i
= nbEvents
- 1; i
>= 0; i
--) {
419 model
.countEvent(i
, i
);
424 * Test method for {@link HistogramDataModel#scaleTo(int,int,int)}.
427 public void testScaleToReverse_2() {
428 final int nbBuckets
= 100;
429 final int maxHeight
= 24;
430 final int width
= 10;
431 final int barWidth
= 1;
433 final int nbEvents
= 2 * nbBuckets
;
435 HistogramDataModel model
= new HistogramDataModel(nbBuckets
);
436 countEventsInModel(nbEvents
, model
);
438 HistogramScaledData result
= model
.scaleTo(width
, maxHeight
, barWidth
);
442 countInvertedEvents(nbEvents
, model
);
444 HistogramScaledData revResult
= model
.scaleTo(width
, maxHeight
, barWidth
);
446 testModelConsistency(model
, nbBuckets
, nbEvents
, 2, 0, 0, nbEvents
- 1, 2 * nbBuckets
);
448 // For the above number of events, result and revResult are exactly the same.
450 assertEquals(result
.fBucketDuration
, revResult
.fBucketDuration
);
451 assertEquals(result
.fSelectionBeginBucket
, revResult
.fSelectionBeginBucket
);
452 assertEquals(result
.fSelectionEndBucket
, revResult
.fSelectionEndBucket
);
453 assertEquals(result
.fFirstBucketTime
, revResult
.fFirstBucketTime
);
454 assertEquals(result
.fMaxValue
, revResult
.fMaxValue
);
455 assertEquals(result
.fScalingFactor
, revResult
.fScalingFactor
, DELTA
);
456 assertEquals(result
.fLastBucket
, revResult
.fLastBucket
);
457 assertEquals(result
.getBucketEndTime(0), revResult
.getBucketEndTime(0));
458 assertEquals(result
.getBucketStartTime(0), revResult
.getBucketStartTime(0));
460 assertArrayEquals(revResult
.fData
, result
.fData
);
464 * Test method for testing model listener.
467 public void testModelListener() {
468 final int nbBuckets
= 2000;
469 final int nbEvents
= 10 * nbBuckets
+ 256;
470 final int[] count
= new int[1];
473 // Test add listener and call of listener
474 IHistogramModelListener listener
= new IHistogramModelListener() {
476 public void modelUpdated() {
481 // Test that the listener interface is called every 16000 events.
482 HistogramDataModel model
= new HistogramDataModel(nbBuckets
);
483 model
.addHistogramListener(listener
);
485 countEventsInModel(nbEvents
, model
, 1);
487 assertEquals(1, count
[0]);
489 // Test that the listener interface is called when complete is called.
491 assertEquals(2, count
[0]);
493 // Test that clear triggers call of listener interface
495 assertEquals(3, count
[0]);
497 // Test remove listener
499 model
.removeHistogramListener(listener
);
501 countEventsInModel(nbEvents
, model
);
503 assertEquals(0, count
[0]);
507 * Test method for {@link HistogramDataModel#scaleTo(int,int,int)}.
510 public void testLostEventsScaleTo_0() {
511 final int nbBuckets
= 10;
512 final int maxHeight
= 10;
513 final int nbEvents
= 3 * nbBuckets
;
514 final int nbLostEvents_0
= 4;
515 final int nbLostEvents_1
= 9;
516 final int nbCombinedEvents
= nbEvents
+ 2;
517 final int[] expectedResult
= new int[] { 4, 4, 4, 4, 4, 4, 4, 2, 0, 0 };
518 final int[] expectedLostEventsResult
= new int[] { 0, 2, 2, 0, 3, 3, 3, 0, 0, 0 };
520 HistogramDataModel model
= new HistogramDataModel(nbBuckets
);
521 countEventsInModel(nbEvents
, model
);
523 final TmfTimeRange timeRange_0
= new TmfTimeRange(
524 new TmfTimestamp(5L, ITmfTimestamp
.NANOSECOND_SCALE
),
525 new TmfTimestamp(10L, ITmfTimestamp
.NANOSECOND_SCALE
));
526 model
.countLostEvent(timeRange_0
, nbLostEvents_0
, false);
528 final TmfTimeRange timeRange_1
= new TmfTimeRange(
529 new TmfTimestamp(18L, ITmfTimestamp
.NANOSECOND_SCALE
),
530 new TmfTimestamp(27L, ITmfTimestamp
.NANOSECOND_SCALE
));
531 model
.countLostEvent(timeRange_1
, nbLostEvents_1
, false);
533 HistogramScaledData result
= model
.scaleTo(nbBuckets
, maxHeight
, 1);
535 assertArrayEquals(expectedResult
, result
.fData
);
537 assertArrayEquals(expectedLostEventsResult
, result
.fLostEventsData
);
539 testModelConsistency(model
, nbBuckets
, nbCombinedEvents
, 4, 0, 0, nbEvents
- 1, 4 * nbBuckets
);
540 assertEquals(7, result
.fMaxCombinedValue
);
544 * Test method for {@link HistogramDataModel#scaleTo(int,int,int)}.
547 public void testLostEventsScaleTo_1() {
548 final int nbBuckets
= 10;
549 final int maxHeight
= 10;
550 final int nbEvents
= 3 * nbBuckets
;
551 final int nbLostEvents_0
= 4;
552 final int nbLostEvents_1
= 9;
553 final int nbCombinedEvents
= nbEvents
+ 2;
554 final int[] expectedLostEventsResult
= new int[] { 0, 2, 5, 3, 3, 0, 0, 0, 0, 0 };
556 HistogramDataModel model
= new HistogramDataModel(nbBuckets
);
557 countEventsInModel(nbEvents
, model
);
559 final TmfTimeRange timeRange_0
= new TmfTimeRange(
560 new TmfTimestamp(5L, ITmfTimestamp
.NANOSECOND_SCALE
),
561 new TmfTimestamp(10L, ITmfTimestamp
.NANOSECOND_SCALE
));
562 model
.countLostEvent(timeRange_0
, nbLostEvents_0
, false);
564 final TmfTimeRange timeRange_1
= new TmfTimeRange(
565 new TmfTimestamp(11L, ITmfTimestamp
.NANOSECOND_SCALE
),
566 new TmfTimestamp(18L, ITmfTimestamp
.NANOSECOND_SCALE
));
567 model
.countLostEvent(timeRange_1
, nbLostEvents_1
, false);
569 HistogramScaledData result
= model
.scaleTo(nbBuckets
, maxHeight
, 1);
571 assertArrayEquals(expectedLostEventsResult
, result
.fLostEventsData
);
573 testModelConsistency(model
, nbBuckets
, nbCombinedEvents
, 4, 0, 0, nbEvents
- 1, 4 * nbBuckets
);
574 assertEquals(9, result
.fMaxCombinedValue
);
578 * Test method for {@link HistogramDataModel#scaleTo(int,int,int)}.
581 public void testLostEventsScaleTo_2() {
582 final int nbBuckets
= 10;
583 final int maxHeight
= 10;
584 final int nbEvents
= 3 * nbBuckets
;
585 final int nbLostEvents_0
= 5;
586 final int nbLostEvents_1
= 15;
587 final int nbLostEvents_2
= 2;
588 final int nbCombinedEvents
= nbEvents
+ 3;
589 final int[] expectedLostEventsResult
= new int[] { 0, 0, 3, 3, 6, 5, 3, 2, 0, 0 };
591 HistogramDataModel model
= new HistogramDataModel(nbBuckets
);
592 countEventsInModel(nbEvents
, model
);
594 final TmfTimeRange timeRange_0
= new TmfTimeRange(
595 new TmfTimestamp(18L, ITmfTimestamp
.NANOSECOND_SCALE
),
596 new TmfTimestamp(22L, ITmfTimestamp
.NANOSECOND_SCALE
));
597 model
.countLostEvent(timeRange_0
, nbLostEvents_0
, false);
599 final TmfTimeRange timeRange_2
= new TmfTimeRange(
600 new TmfTimestamp(28L, ITmfTimestamp
.NANOSECOND_SCALE
),
601 new TmfTimestamp(29L, ITmfTimestamp
.NANOSECOND_SCALE
));
602 model
.countLostEvent(timeRange_2
, nbLostEvents_2
, false);
604 final TmfTimeRange timeRange_1
= new TmfTimeRange(
605 new TmfTimestamp(11L, ITmfTimestamp
.NANOSECOND_SCALE
),
606 new TmfTimestamp(26L, ITmfTimestamp
.NANOSECOND_SCALE
));
607 model
.countLostEvent(timeRange_1
, nbLostEvents_1
, false);
609 HistogramScaledData result
= model
.scaleTo(nbBuckets
, maxHeight
, 1);
611 assertArrayEquals(expectedLostEventsResult
, result
.fLostEventsData
);
613 testModelConsistency(model
, nbBuckets
, nbCombinedEvents
, 4, 0, 0, nbEvents
- 1, 4 * nbBuckets
);
614 assertEquals(10, result
.fMaxCombinedValue
);
618 * Test method for {@link HistogramDataModel#scaleTo(int,int,int)}.
621 public void testLostEventsScaleTo_3() {
622 final int nbBuckets
= 10;
623 final int maxHeight
= 10;
624 final int nbEvents
= 3 * nbBuckets
;
625 final int nbLostEvents_0
= 23;
626 final int nbCombinedEvents
= nbEvents
+ 1;
627 final int[] expectedLostEventsResult
= new int[] { 0, 0, 5, 5, 5, 5, 3, 0, 0, 0 };
629 HistogramDataModel model
= new HistogramDataModel(nbBuckets
);
630 countEventsInModel(nbEvents
, model
);
632 final TmfTimeRange timeRange_0
= new TmfTimeRange(
633 new TmfTimestamp(11L, ITmfTimestamp
.NANOSECOND_SCALE
),
634 new TmfTimestamp(26L, ITmfTimestamp
.NANOSECOND_SCALE
));
635 model
.countLostEvent(timeRange_0
, nbLostEvents_0
, false);
637 HistogramScaledData result
= model
.scaleTo(nbBuckets
, maxHeight
, 1);
639 assertArrayEquals(expectedLostEventsResult
, result
.fLostEventsData
);
641 testModelConsistency(model
, nbBuckets
, nbCombinedEvents
, 4, 0, 0, nbEvents
- 1, 4 * nbBuckets
);
642 assertEquals(9, result
.fMaxCombinedValue
);
649 private static void countEventsInModel(final int nbEvents
, HistogramDataModel model
) {
650 countEventsInModel(nbEvents
, model
, 0);
653 private static void countEventsInModel(final int nbEvents
, HistogramDataModel model
, int offset
) {
654 countEventsInModel(nbEvents
, model
, offset
, 0);
657 private static void countEventsInModel(final int nbEvents
, HistogramDataModel model
, int offset
, int startTime
) {
658 for (int i
= startTime
; i
< nbEvents
+ startTime
; i
++) {
659 model
.countEvent(i
+ offset
, i
);
663 private static void testModelConsistency(HistogramDataModel model
, int numberOfBuckets
,int nbEvents
, int bucketduration
,int firstBucketTime
, int startTime
, int endTime
, int timeLimit
) {
664 assertEquals(numberOfBuckets
, model
.getNbBuckets());
665 assertEquals(nbEvents
, model
.getNbEvents());
666 assertEquals(bucketduration
, model
.getBucketDuration());
667 assertEquals(firstBucketTime
, model
.getFirstBucketTime());
668 assertEquals(startTime
, model
.getStartTime());
669 assertEquals(endTime
, model
.getEndTime());
670 assertEquals(timeLimit
, model
.getTimeLimit());
673 private static void assertArrayEqualsInt(final int val
, int[] result
) {
674 assertArrayEqualsInt(val
, result
, 0);
677 private static void assertArrayEqualsInt(final int val
, int[] result
, int startVal
) {
678 for (int i
= startVal
; i
< result
.length
; i
++) {
679 assertEquals(val
, result
[i
]);