gem5 v24.0.0.0
Loading...
Searching...
No Matches
storage.test.cc
Go to the documentation of this file.
1/*
2 * Copyright (c) 2021 Daniel R. Carvalho
3 * All rights reserved
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are
7 * met: redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer;
9 * redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution;
12 * neither the name of the copyright holders nor the names of its
13 * contributors may be used to endorse or promote products derived from
14 * this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 */
28
29#include <gtest/gtest-spi.h>
30#include <gtest/gtest.h>
31
32#include <cmath>
33
35#include "base/gtest/logging.hh"
36#include "base/stats/storage.hh"
37
38using namespace gem5;
39
40// Instantiate the fake class to have a valid curTick of 0
42
45
57
59TEST(StatsStatStorTest, SetValueResult)
60{
61 statistics::StatStor stor(nullptr);
63
64 val = 10;
65 stor.set(val);
66 ASSERT_EQ(stor.value(), val);
67 ASSERT_EQ(stor.result(), statistics::Result(val));
68
69 val = 1234;
70 stor.set(val);
71 ASSERT_EQ(stor.value(), val);
72 ASSERT_EQ(stor.result(), statistics::Result(val));
73}
74
76TEST(StatsStatStorTest, Prepare)
77{
78 statistics::StatStor stor(nullptr);
80
81 val = 10;
82 stor.set(val);
83 stor.prepare(nullptr);
84 ASSERT_EQ(stor.value(), val);
85 ASSERT_EQ(stor.result(), statistics::Result(val));
86}
87
89TEST(StatsStatStorTest, IncDec)
90{
91 statistics::StatStor stor(nullptr);
92 statistics::Counter diff_val = 10;
94
95 stor.inc(diff_val);
96 val += diff_val;
97 ASSERT_EQ(stor.value(), val);
98
99 stor.inc(diff_val);
100 val += diff_val;
101 ASSERT_EQ(stor.value(), val);
102
103 stor.dec(diff_val);
104 val -= diff_val;
105 ASSERT_EQ(stor.value(), val);
106
107 stor.dec(diff_val);
108 val -= diff_val;
109 ASSERT_EQ(stor.value(), val);
110}
111
117TEST(StatsStatStorTest, ZeroReset)
118{
119 statistics::StatStor stor(nullptr);
121
122 ASSERT_TRUE(stor.zero());
123
124 stor.reset(nullptr);
125 ASSERT_TRUE(stor.zero());
126
127 stor.reset(nullptr);
128 stor.inc(val);
129 ASSERT_FALSE(stor.zero());
130}
131
133TEST(StatsAvgStorTest, SetValueResult)
134{
135 statistics::AvgStor stor(nullptr);
138 Tick last_reset = 0;
139 Tick last_tick = 0;
140
141 val = 10;
142 stor.set(val);
143 last_tick = curTick();
144 ASSERT_EQ(stor.value(), val);
145 ASSERT_EQ(stor.result(), statistics::Result(total + val) /
146 statistics::Result(curTick() - last_reset + 1));
147 increaseTick();
148
149 total += val * (curTick() - last_tick);
150 val = 1234;
151 stor.set(val);
152 last_tick = curTick();
153 ASSERT_EQ(stor.value(), val);
154 ASSERT_EQ(stor.result(), statistics::Result(total + val) /
155 statistics::Result(curTick() - last_reset + 1));
156 increaseTick();
157}
158
159#if TRACING_ON
163TEST(StatsAvgStorDeathTest, Result)
164{
165 statistics::AvgStor stor(nullptr);
166 increaseTick();
167 ASSERT_DEATH(stor.result(), ".+");
168}
169#endif
170
175TEST(StatsAvgStorTest, Prepare)
176{
177 statistics::AvgStor stor(nullptr);
180 Tick last_reset = 0;
181 Tick last_tick = 0;
182
183 val = 10;
184 stor.set(val);
185 last_tick = curTick();
186 ASSERT_EQ(stor.value(), val);
187 ASSERT_EQ(stor.result(), statistics::Result(total + val) /
188 statistics::Result(curTick() - last_reset + 1));
189 increaseTick();
190
191 total += val * (curTick() - last_tick);
192 stor.prepare(nullptr);
193 last_tick = curTick();
194 ASSERT_EQ(stor.value(), val);
195 ASSERT_EQ(stor.result(), statistics::Result(total + val) /
196 statistics::Result(curTick() - last_reset + 1));
197 increaseTick();
198}
199
201TEST(StatsAvgStorTest, IncDec)
202{
203 statistics::AvgStor stor(nullptr);
204 statistics::Counter diff_val = 10;
206
207 stor.set(diff_val);
208 val += diff_val;
209 ASSERT_EQ(stor.value(), val);
210
211 stor.inc(diff_val);
212 val += diff_val;
213 ASSERT_EQ(stor.value(), val);
214
215 stor.inc(diff_val);
216 val += diff_val;
217 ASSERT_EQ(stor.value(), val);
218
219 stor.dec(diff_val);
220 val -= diff_val;
221 ASSERT_EQ(stor.value(), val);
222
223 stor.dec(diff_val);
224 val -= diff_val;
225 ASSERT_EQ(stor.value(), val);
226}
227
233TEST(StatsAvgStorTest, ZeroReset)
234{
235 statistics::AvgStor stor(nullptr);
237
238 ASSERT_TRUE(stor.zero());
239
240 stor.reset(nullptr);
241 ASSERT_TRUE(stor.zero());
242
243 // Set current value to val, reset total and increase tick, so that the
244 // next call to set will update the total to be different from zero
245 stor.inc(val);
246 stor.reset(nullptr);
247 increaseTick();
248 stor.inc(val);
249 ASSERT_FALSE(stor.zero());
250}
251
252#if TRACING_ON
254TEST(StatsDistStorDeathTest, BucketSize0)
255{
256 EXPECT_ANY_THROW(statistics::DistStor::Params params(0, 5, 0));
257}
258#endif
259
265TEST(StatsDistStorTest, ZeroReset)
266{
267 statistics::DistStor::Params params(0, 99, 10);
268 statistics::DistStor stor(&params);
270 statistics::Counter num_samples = 5;
271
272 ASSERT_TRUE(stor.zero());
273
274 stor.reset(&params);
275 stor.sample(val, num_samples);
276 ASSERT_FALSE(stor.zero());
277
278 stor.reset(&params);
279 ASSERT_TRUE(stor.zero());
280}
281
286TEST(StatsDistStorTest, Size)
287{
289 statistics::Counter num_samples = 5;
290 statistics::Counter size = 20;
292
293 statistics::DistStor::Params params(0, 19, 1);
294 statistics::DistStor stor(&params);
295
296 ASSERT_EQ(stor.size(), size);
297 stor.sample(val, num_samples);
298 ASSERT_EQ(stor.size(), size);
299 stor.prepare(&params, data);
300 ASSERT_EQ(stor.size(), size);
301 stor.reset(&params);
302 ASSERT_EQ(stor.size(), size);
303 stor.zero();
304 ASSERT_EQ(stor.size(), size);
305}
306
314void
316 const statistics::DistData& expected_data, bool no_log = true)
317{
318 ASSERT_EQ(data.type, expected_data.type);
319 ASSERT_EQ(data.min, expected_data.min);
320 ASSERT_EQ(data.max, expected_data.max);
321 ASSERT_EQ(data.bucket_size, expected_data.bucket_size);
322 ASSERT_EQ(data.min_val, expected_data.min_val);
323 ASSERT_EQ(data.max_val, expected_data.max_val);
324 ASSERT_EQ(data.sum, expected_data.sum);
325 ASSERT_EQ(data.squares, expected_data.squares);
326 if (!no_log) {
327 ASSERT_EQ(data.logs, expected_data.logs);
328 }
329 ASSERT_EQ(data.samples, expected_data.samples);
330 ASSERT_EQ(data.cvec.size(), expected_data.cvec.size());
331 for (int i = 0; i < expected_data.cvec.size(); i++) {
332 ASSERT_EQ(data.cvec[i], expected_data.cvec[i]);
333 }
334}
335
346void
348 ValueSamples* values, int num_values, statistics::DistData& expected_data)
349{
350 statistics::DistStor stor(&params);
351
354
355 expected_data.min = params.min;
356 expected_data.max = params.max;
357 expected_data.sum = 0;
358 expected_data.squares = 0;
359 expected_data.logs = 0;
360 expected_data.samples = 0;
361
362 // Populate storage with more data
363 for (int i = 0; i < num_values; i++) {
364 stor.sample(values[i].value, values[i].numSamples);
365
366 val = values[i].value * values[i].numSamples;
367 expected_data.sum += val;
368 expected_data.squares += values[i].value * val;
369 expected_data.samples += values[i].numSamples;
370 }
371 stor.prepare(&params, data);
372
373 // DistStor does not use log
374 checkExpectedDistData(data, expected_data, true);
375}
376
378TEST(StatsDistStorTest, SamplePrepareSingle)
379{
380 statistics::DistStor::Params params(0, 99, 5);
381
382 ValueSamples values[] = {{10, 5}};
383 int num_values = sizeof(values) / sizeof(ValueSamples);
384
385 // Setup expected data
386 statistics::DistData expected_data;
387 expected_data.type = statistics::Dist;
388 expected_data.bucket_size = params.bucket_size;
389 expected_data.underflow = 0;
390 expected_data.overflow = 0;
391 expected_data.min_val = 10;
392 expected_data.max_val = 10;
393 expected_data.cvec.clear();
394 expected_data.cvec.resize(params.buckets);
395 expected_data.cvec[2] = 5;
396
397 prepareCheckDistStor(params, values, num_values, expected_data);
398}
399
401TEST(StatsDistStorTest, SamplePrepareMultiple)
402{
403 statistics::DistStor::Params params(0, 99, 5);
404
405 // There are 20 buckets: [0,5[, [5,10[, [10,15[, ..., [95,100[.
406 // We test that values that pass the maximum bucket value (1234, 12345678,
407 // 100) are added to the overflow counter, and that the ones below the
408 // minimum bucket value (-10, -1) are added to the underflow counter.
409 // The extremes (0 and 99) are added to check if they go to the first and
410 // last buckets.
411 ValueSamples values[] = {{10, 5}, {1234, 2}, {12345678, 99}, {-10, 4},
412 {17, 17}, {52, 63}, {18, 11}, {0, 1}, {99, 15}, {-1, 200}, {100, 50}};
413 int num_values = sizeof(values) / sizeof(ValueSamples);
414
415 // Setup variables that should always match params' values
416 statistics::DistData expected_data;
417 expected_data.type = statistics::Dist;
418 expected_data.min_val = -10;
419 expected_data.max_val = 12345678;
420 expected_data.bucket_size = params.bucket_size;
421 expected_data.underflow = 204;
422 expected_data.overflow = 151;
423 expected_data.sum = 0;
424 expected_data.squares = 0;
425 expected_data.samples = 0;
426 expected_data.cvec.clear();
427 expected_data.cvec.resize(params.buckets);
428 expected_data.cvec[0] = 1;
429 expected_data.cvec[2] = 5;
430 expected_data.cvec[3] = 17+11;
431 expected_data.cvec[10] = 63;
432 expected_data.cvec[19] = 15;
433
434 prepareCheckDistStor(params, values, num_values, expected_data);
435}
436
438TEST(StatsDistStorTest, Reset)
439{
440 statistics::DistStor::Params params(0, 99, 5);
441 statistics::DistStor stor(&params);
442
443 // Populate storage with random samples
444 ValueSamples values[] = {{10, 5}, {1234, 2}, {12345678, 99}, {-10, 4},
445 {17, 17}, {52, 63}, {18, 11}, {0, 1}, {99, 15}, {-1, 200}, {100, 50}};
446 int num_values = sizeof(values) / sizeof(ValueSamples);
447 for (int i = 0; i < num_values; i++) {
448 stor.sample(values[i].value, values[i].numSamples);
449 }
450
451 // Reset storage, and make sure all data has been cleared
452 stor.reset(&params);
454 stor.prepare(&params, data);
455
456 statistics::DistData expected_data;
457 expected_data.type = statistics::Dist;
458 expected_data.bucket_size = params.bucket_size;
459 expected_data.underflow = 0;
460 expected_data.overflow = 0;
461 expected_data.min = params.min;
462 expected_data.max = params.max;
463 expected_data.min_val = 0;
464 expected_data.max_val = 0;
465 expected_data.sum = 0;
466 expected_data.squares = 0;
467 expected_data.samples = 0;
468 expected_data.cvec.clear();
469 expected_data.cvec.resize(params.buckets);
470
471 checkExpectedDistData(data, expected_data, true);
472}
473
474#if TRACING_ON
476TEST(StatsHistStorDeathTest, NotEnoughBuckets0)
477{
478 EXPECT_ANY_THROW(statistics::HistStor::Params params(0));
479}
480
482TEST(StatsHistStorDeathTest, NotEnoughBuckets1)
483{
484 EXPECT_ANY_THROW(statistics::HistStor::Params params(1));
485}
486#endif
487
493TEST(StatsHistStorTest, ZeroReset)
494{
496 statistics::HistStor stor(&params);
498 statistics::Counter num_samples = 5;
499
500 ASSERT_TRUE(stor.zero());
501
502 stor.reset(&params);
503 stor.sample(val, num_samples);
504 ASSERT_FALSE(stor.zero());
505
506 stor.reset(&params);
507 ASSERT_TRUE(stor.zero());
508}
509
514TEST(StatsHistStorTest, Size)
515{
517 statistics::Counter num_samples = 5;
519 statistics::size_type sizes[] = {2, 10, 1234};
520
521 for (int i = 0; i < (sizeof(sizes) / sizeof(statistics::size_type)); i++) {
522 statistics::HistStor::Params params(sizes[i]);
523 statistics::HistStor stor(&params);
524
525 ASSERT_EQ(stor.size(), sizes[i]);
526 stor.sample(val, num_samples);
527 ASSERT_EQ(stor.size(), sizes[i]);
528 stor.prepare(&params, data);
529 ASSERT_EQ(stor.size(), sizes[i]);
530 stor.reset(&params);
531 ASSERT_EQ(stor.size(), sizes[i]);
532 stor.zero();
533 ASSERT_EQ(stor.size(), sizes[i]);
534 }
535}
536
547void
549 ValueSamples* values, int num_values, statistics::DistData& expected_data)
550{
551 statistics::HistStor stor(&params);
552
555 bool no_log = false;
556
557 expected_data.min_val = expected_data.min;
558 expected_data.max = expected_data.max_val + expected_data.bucket_size - 1;
559 expected_data.sum = 0;
560 expected_data.squares = 0;
561 expected_data.logs = 0;
562 expected_data.samples = 0;
563
564 // Populate storage with more data
565 for (int i = 0; i < num_values; i++) {
566 stor.sample(values[i].value, values[i].numSamples);
567
568 val = values[i].value * values[i].numSamples;
569 expected_data.sum += val;
570 expected_data.squares += values[i].value * val;
571 if (values[i].value < 0) {
572 // Negative values don't have log, so mark log check to be skipped
573 no_log = true;
574 } else {
575 expected_data.logs +=
576 std::log(values[i].value) * values[i].numSamples;
577 }
578 expected_data.samples += values[i].numSamples;
579 }
580 stor.prepare(&params, data);
581 checkExpectedDistData(data, expected_data, no_log);
582}
583
588TEST(StatsHistStorTest, SamplePrepareFit)
589{
591
592 // Setup expected data for the hand-carved values given. The final buckets
593 // will be divided at:
594 // Bkt0=[0,1[ , Bkt1=[1,2[, Bkt2=[2,3[, Bkt3=[3,4[
595 ValueSamples values[] = {{0, 5}, {1, 2}, {2, 99}, {3, 4}};
596 const int num_values = sizeof(values) / sizeof(ValueSamples);
597 statistics::DistData expected_data;
598 expected_data.type = statistics::Hist;
599 expected_data.bucket_size = 1;
600 expected_data.min = 0;
601 expected_data.max_val = 3;
602 expected_data.cvec.clear();
603 expected_data.cvec.resize(params.buckets);
604 expected_data.cvec[0] = 5;
605 expected_data.cvec[1] = 2;
606 expected_data.cvec[2] = 99;
607 expected_data.cvec[3] = 4;
608
609 prepareCheckHistStor(params, values, num_values, expected_data);
610}
611
616TEST(StatsHistStorTest, SamplePrepareSingleGrowUp)
617{
619
620 // Setup expected data for the hand-carved values given. Since there
621 // are four buckets, and the highest value is 4, the bucket size will
622 // grow to be 2. The final buckets will be divided at:
623 // Bkt0=[0,2[ , Bkt1=[2,4[, Bkt2=[4,6[, Bkt3=[6,8[
624 ValueSamples values[] = {{0, 5}, {1, 2}, {2, 99}, {4, 4}};
625 const int num_values = sizeof(values) / sizeof(ValueSamples);
626 statistics::DistData expected_data;
627 expected_data.type = statistics::Hist;
628 expected_data.bucket_size = 2;
629 expected_data.min = 0;
630 expected_data.max_val = 6;
631 expected_data.cvec.clear();
632 expected_data.cvec.resize(params.buckets);
633 expected_data.cvec[0] = 5+2;
634 expected_data.cvec[1] = 99;
635 expected_data.cvec[2] = 4;
636 expected_data.cvec[3] = 0;
637
638 prepareCheckHistStor(params, values, num_values, expected_data);
639}
640
645TEST(StatsHistStorTest, SamplePrepareMultipleGrowUp)
646{
648
649 // Setup expected data for the hand-carved values given. Since there
650 // are four buckets, and the highest value is 4, the bucket size will
651 // grow thrice to become 8. The final buckets will be divided at:
652 // Bkt0=[0,8[ , Bkt1=[8,16[, Bkt2=[16,24[, Bkt3=[24,32[
653 ValueSamples values[] = {{0, 5}, {1, 2}, {2, 99}, {16, 4}};
654 const int num_values = sizeof(values) / sizeof(ValueSamples);
655 statistics::DistData expected_data;
656 expected_data.type = statistics::Hist;
657 expected_data.bucket_size = 8;
658 expected_data.min = 0;
659 expected_data.max_val = 24;
660 expected_data.cvec.clear();
661 expected_data.cvec.resize(params.buckets);
662 expected_data.cvec[0] = 5+2+99;
663 expected_data.cvec[1] = 0;
664 expected_data.cvec[2] = 4;
665 expected_data.cvec[3] = 0;
666
667 prepareCheckHistStor(params, values, num_values, expected_data);
668}
669
675TEST(StatsHistStorTest, SamplePrepareGrowDownOddBuckets)
676{
678
679 // Setup expected data for the hand-carved values given. Since there
680 // is a negative value, the min bucket will change, and the bucket size
681 // will grow to be 2. The final buckets will be divided at:
682 // Bkt0=[-4,-2[ , Bkt1=[-2,-0[, Bkt2=[0,2[, Bkt3=[2,4[, Bkt4=[4,6[
683 ValueSamples values[] =
684 {{0, 5}, {1, 2}, {2, 99}, {3, 12}, {4, 33}, {-1, 4}};
685 const int num_values = sizeof(values) / sizeof(ValueSamples);
686 statistics::DistData expected_data;
687 expected_data.type = statistics::Hist;
688 expected_data.bucket_size = 2;
689 expected_data.min = -4;
690 expected_data.max_val = 4;
691 expected_data.cvec.clear();
692 expected_data.cvec.resize(params.buckets);
693 expected_data.cvec[0] = 0;
694 expected_data.cvec[1] = 4;
695 expected_data.cvec[2] = 5+2;
696 expected_data.cvec[3] = 99+12;
697 expected_data.cvec[4] = 33;
698
699 prepareCheckHistStor(params, values, num_values, expected_data);
700}
701
707TEST(StatsHistStorTest, SamplePrepareGrowDownEvenBuckets)
708{
710
711 // Setup expected data for the hand-carved values given. Since there
712 // is a negative value, the min bucket will change, and the bucket size
713 // will grow to be 2. The final buckets will be divided at:
714 // Bkt0=[-4,-2[ , Bkt1=[-2,0[, Bkt2=[0,2[, Bkt3=[2,4[
715 ValueSamples values[] = {{0, 5}, {1, 2}, {2, 99}, {-1, 4}};
716 const int num_values = sizeof(values) / sizeof(ValueSamples);
717 statistics::DistData expected_data;
718 expected_data.type = statistics::Hist;
719 expected_data.bucket_size = 2;
720 expected_data.min = -4;
721 expected_data.max_val = 2;
722 expected_data.cvec.clear();
723 expected_data.cvec.resize(params.buckets);
724 expected_data.cvec[0] = 0;
725 expected_data.cvec[1] = 4;
726 expected_data.cvec[2] = 5+2;
727 expected_data.cvec[3] = 99;
728
729 prepareCheckHistStor(params, values, num_values, expected_data);
730}
731
737TEST(StatsHistStorTest, SamplePrepareGrowDownGrowOutOddBuckets)
738{
740
741 // Setup expected data for the hand-carved values given. Since there
742 // is a negative value, the min bucket will change, and the bucket size
743 // will grow to be 8. The final buckets will be divided at:
744 // Bkt0=[-16,-8[ , Bkt1=[-8,0[, Bkt2=[0,8[, Bkt3=[8,16[, Bkt4=[16,24[
745 ValueSamples values[] =
746 {{0, 5}, {1, 2}, {2, 99}, {3, 12}, {4, 33}, {-12, 4}};
747 const int num_values = sizeof(values) / sizeof(ValueSamples);
748 statistics::DistData expected_data;
749 expected_data.type = statistics::Hist;
750 expected_data.bucket_size = 8;
751 expected_data.min = -16;
752 expected_data.max_val = 16;
753 expected_data.cvec.clear();
754 expected_data.cvec.resize(params.buckets);
755 expected_data.cvec[0] = 4;
756 expected_data.cvec[1] = 0;
757 expected_data.cvec[2] = 5+2+99+12+33;
758 expected_data.cvec[3] = 0;
759 expected_data.cvec[4] = 0;
760
761 prepareCheckHistStor(params, values, num_values, expected_data);
762}
763
769TEST(StatsHistStorTest, SamplePrepareGrowDownGrowOutEvenBuckets)
770{
772
773 // Setup expected data for the hand-carved values given. Since there
774 // is a negative value, the min bucket will change, and the bucket size
775 // will grow to be 8. The final buckets will be divided at:
776 // Bkt0=[-16,-8[ , Bkt1=[-8,0[, Bkt2=[0,8[, Bkt3=[8,16[
777 ValueSamples values[] =
778 {{0, 5}, {1, 2}, {2, 99}, {3, 12}, {-12, 4}};
779 const int num_values = sizeof(values) / sizeof(ValueSamples);
780 statistics::DistData expected_data;
781 expected_data.type = statistics::Hist;
782 expected_data.bucket_size = 8;
783 expected_data.min = -16;
784 expected_data.max_val = 8;
785 expected_data.cvec.clear();
786 expected_data.cvec.resize(params.buckets);
787 expected_data.cvec[0] = 4;
788 expected_data.cvec[1] = 0;
789 expected_data.cvec[2] = 5+2+99+12;
790 expected_data.cvec[3] = 0;
791
792 prepareCheckHistStor(params, values, num_values, expected_data);
793}
794
800TEST(StatsHistStorTest, SamplePrepareMultipleGrowOddBuckets)
801{
803
804 // Setup expected data for the hand-carved values given. This adds quite
805 // a few positive and negative samples, and the bucket size will grow to
806 // be 64. The final buckets will be divided at:
807 // Bkt0=[-128,-64[ , Bkt1=[-64,0[, Bkt2=[0,64[, Bkt3=[64,128[,
808 // Bkt4=[128,192[
809 ValueSamples values[] =
810 {{0, 5}, {7, 2}, {31, 99}, {-8, 12}, {127, 4}, {-120, 53}, {-50, 1}};
811 const int num_values = sizeof(values) / sizeof(ValueSamples);
812 statistics::DistData expected_data;
813 expected_data.type = statistics::Hist;
814 expected_data.bucket_size = 64;
815 expected_data.min = -128;
816 expected_data.max_val = 128;
817 expected_data.cvec.clear();
818 expected_data.cvec.resize(params.buckets);
819 expected_data.cvec[0] = 53;
820 expected_data.cvec[1] = 12+1;
821 expected_data.cvec[2] = 5+2+99;
822 expected_data.cvec[3] = 4;
823 expected_data.cvec[4] = 0;
824
825 prepareCheckHistStor(params, values, num_values, expected_data);
826}
827
833TEST(StatsHistStorTest, SamplePrepareMultipleGrowEvenBuckets)
834{
836
837 // Setup expected data for the hand-carved values given. This adds quite
838 // a few positive and negative samples, and the bucket size will grow to
839 // be 64. The final buckets will be divided at:
840 // Bkt0=[-128,-64[ , Bkt1=[-64,0[, Bkt2=[0,64[, Bkt3=[64,128[
841 ValueSamples values[] =
842 {{0, 5}, {7, 2}, {31, 99}, {-8, 12}, {127, 4}, {-120, 53}, {-50, 1}};
843 const int num_values = sizeof(values) / sizeof(ValueSamples);
844 statistics::DistData expected_data;
845 expected_data.type = statistics::Hist;
846 expected_data.bucket_size = 64;
847 expected_data.min = -128;
848 expected_data.max_val = 64;
849 expected_data.cvec.clear();
850 expected_data.cvec.resize(params.buckets);
851 expected_data.cvec[0] = 53;
852 expected_data.cvec[1] = 12+1;
853 expected_data.cvec[2] = 5+2+99;
854 expected_data.cvec[3] = 4;
855
856 prepareCheckHistStor(params, values, num_values, expected_data);
857}
858
860TEST(StatsHistStorTest, Reset)
861{
863 statistics::HistStor stor(&params);
864
865 // Setup expected data for the hand-carved values given. This adds quite
866 // a few positive and negative samples, and the bucket size will grow to
867 // be 64. The final buckets will be divided at:
868 // Bkt0=[-128,-64[ , Bkt1=[-64,0[, Bkt2=[0,64[, Bkt3=[64,128[
869 ValueSamples values[] =
870 {{0, 5}, {7, 2}, {31, 99}, {-8, 12}, {127, 4}, {-120, 53}, {-50, 1}};
871 const int num_values = sizeof(values) / sizeof(ValueSamples);
872 for (int i = 0; i < num_values; i++) {
873 stor.sample(values[i].value, values[i].numSamples);
874 }
875
876 // Reset storage, and make sure all data has been cleared:
877 // Bkt0=[0,1[ , Bkt1=[1,2[, Bkt2=[2,3[, Bkt3=[3,4[
878 stor.reset(&params);
879 statistics::DistData expected_data;
880 expected_data.type = statistics::Hist;
881 expected_data.bucket_size = 1;
882 expected_data.min = 0;
883 expected_data.max_val = 3;
884 expected_data.cvec.clear();
885 expected_data.cvec.resize(params.buckets);
886 prepareCheckHistStor(params, values, 0, expected_data);
887}
888
889#if TRACING_ON
891TEST(StatsHistStorDeathTest, AddDifferentSize)
892{
894 statistics::HistStor stor(&params);
895
897 statistics::HistStor stor2(&params2);
898
899 ASSERT_DEATH(stor.add(&stor2), ".+");
900}
901
903TEST(StatsHistStorDeathTest, AddDifferentMin)
904{
906 statistics::HistStor stor(&params);
907 stor.sample(-1, 3);
908
909 // On creation, the storage's min is zero
911 statistics::HistStor stor2(&params2);
912
913 ASSERT_DEATH(stor.add(&stor2), ".+");
914}
915#endif
916
918TEST(StatsHistStorTest, Add)
919{
921
922 // Setup first storage. Buckets are:
923 // Bkt0=[0,16[, Bkt1=[16,32[, Bkt2=[32,48[, Bkt3=[58,64[
924 statistics::HistStor stor(&params);
925 ValueSamples values[] = {{0, 5}, {3, 2}, {20, 37}, {32, 18}};
926 int num_values = sizeof(values) / sizeof(ValueSamples);
927 for (int i = 0; i < num_values; i++) {
928 stor.sample(values[i].value, values[i].numSamples);
929 }
931 stor.prepare(&params, data);
932
933 // Setup second storage. Buckets are:
934 // Bkt0=[0,32[, Bkt1=[32,64[, Bkt2=[64,96[, Bkt3=[96,128[
935 statistics::HistStor stor2(&params);
936 ValueSamples values2[] = {{10, 10}, {0, 1}, {80, 4}, {17, 100}, {95, 79}};
937 int num_values2 = sizeof(values2) / sizeof(ValueSamples);
938 for (int i = 0; i < num_values2; i++) {
939 stor2.sample(values2[i].value, values2[i].numSamples);
940 }
942 stor2.prepare(&params, data2);
943
944 // Perform the merge
945 stor.add(&stor2);
946 statistics::DistData merge_data;
947 stor.prepare(&params, merge_data);
948
949 // Setup expected data. Buckets are:
950 // Bkt0=[0,32[, Bkt1=[32,64[, Bkt2=[64,96[, Bkt3=[96,128[
951 statistics::DistData expected_data;
952 expected_data.type = statistics::Hist;
953 expected_data.bucket_size = 32;
954 expected_data.min = 0;
955 expected_data.max = 127;
956 expected_data.min_val = 0;
957 expected_data.max_val = 96;
958 expected_data.cvec.clear();
959 expected_data.cvec.resize(params.buckets);
960 expected_data.cvec[0] = 5+2+37+10+1+100;
961 expected_data.cvec[1] = 18;
962 expected_data.cvec[2] = 4+79;
963 expected_data.cvec[3] = 0;
964 expected_data.sum = data.sum + data2.sum;
965 expected_data.squares = data.squares + data2.squares;
966 expected_data.logs = data.squares + data2.logs;
967 expected_data.samples = data.samples + data2.samples;
968
969 // Compare results
970 checkExpectedDistData(merge_data, expected_data, false);
971}
972
978TEST(StatsSampleStorTest, ZeroReset)
979{
980 statistics::SampleStor stor(nullptr);
982 statistics::Counter num_samples = 5;
983
984 ASSERT_TRUE(stor.zero());
985
986 stor.reset(nullptr);
987 stor.sample(val, num_samples);
988 ASSERT_FALSE(stor.zero());
989
990 stor.reset(nullptr);
991 ASSERT_TRUE(stor.zero());
992}
993
995TEST(StatsSampleStorTest, SamplePrepare)
996{
997 statistics::SampleStor stor(nullptr);
998 ValueSamples values[] = {{10, 5}, {1234, 2}, {0xFFFFFFFF, 18}};
999 int num_values = sizeof(values) / sizeof(ValueSamples);
1002 statistics::DistData expected_data;
1004
1005 // Simple test with one value being sampled
1006 stor.sample(values[0].value, values[0].numSamples);
1007 stor.prepare(&params, data);
1008 val = values[0].value * values[0].numSamples;
1009 expected_data.type = statistics::Deviation;
1010 expected_data.sum = val;
1011 expected_data.squares = values[0].value * val;
1012 expected_data.samples = values[0].numSamples;
1013 ASSERT_EQ(data.type, expected_data.type);
1014 ASSERT_EQ(data.sum, expected_data.sum);
1015 ASSERT_EQ(data.squares, expected_data.squares);
1016 ASSERT_EQ(data.samples, expected_data.samples);
1017
1018 // Reset storage, and make sure all data has been cleared
1019 expected_data.sum = 0;
1020 expected_data.squares = 0;
1021 expected_data.samples = 0;
1022 stor.reset(nullptr);
1023 stor.prepare(&params, data);
1024 ASSERT_EQ(data.type, expected_data.type);
1025 ASSERT_EQ(data.sum, expected_data.sum);
1026 ASSERT_EQ(data.squares, expected_data.squares);
1027 ASSERT_EQ(data.samples, expected_data.samples);
1028
1029 // Populate storage with more data
1030 for (int i = 0; i < num_values; i++) {
1031 stor.sample(values[i].value, values[i].numSamples);
1032
1033 val = values[i].value * values[i].numSamples;
1034 expected_data.sum += val;
1035 expected_data.squares += values[i].value * val;
1036 expected_data.samples += values[i].numSamples;
1037 }
1038 stor.prepare(&params, data);
1039 ASSERT_EQ(data.type, expected_data.type);
1040 ASSERT_EQ(data.sum, expected_data.sum);
1041 ASSERT_EQ(data.squares, expected_data.squares);
1042 ASSERT_EQ(data.samples, expected_data.samples);
1043}
1044
1046TEST(StatsSampleStorTest, Size)
1047{
1048 statistics::SampleStor stor(nullptr);
1050 statistics::Counter num_samples = 5;
1053
1054 ASSERT_EQ(stor.size(), 1);
1055 stor.sample(val, num_samples);
1056 ASSERT_EQ(stor.size(), 1);
1057 stor.prepare(&params, data);
1058 ASSERT_EQ(stor.size(), 1);
1059 stor.reset(nullptr);
1060 ASSERT_EQ(stor.size(), 1);
1061 stor.zero();
1062 ASSERT_EQ(stor.size(), 1);
1063}
1064
1070TEST(StatsAvgSampleStorTest, ZeroReset)
1071{
1072 statistics::AvgSampleStor stor(nullptr);
1074 statistics::Counter num_samples = 5;
1075
1076 ASSERT_TRUE(stor.zero());
1077
1078 stor.reset(nullptr);
1079 stor.sample(val, num_samples);
1080 ASSERT_FALSE(stor.zero());
1081
1082 stor.reset(nullptr);
1083 ASSERT_TRUE(stor.zero());
1084}
1085
1087TEST(StatsAvgSampleStorTest, SamplePrepare)
1088{
1089 statistics::AvgSampleStor stor(nullptr);
1090 ValueSamples values[] = {{10, 5}, {1234, 2}, {0xFFFFFFFF, 18}};
1091 int num_values = sizeof(values) / sizeof(ValueSamples);
1094 statistics::DistData expected_data;
1096
1097 // Simple test with one value being sampled
1098 stor.sample(values[0].value, values[0].numSamples);
1099 stor.prepare(&params, data);
1100 val = values[0].value * values[0].numSamples;
1101 expected_data.type = statistics::Deviation;
1102 expected_data.sum = val;
1103 expected_data.squares = values[0].value * val;
1104 ASSERT_EQ(data.type, expected_data.type);
1105 ASSERT_EQ(data.sum, expected_data.sum);
1106 ASSERT_EQ(data.squares, expected_data.squares);
1107 ASSERT_EQ(data.samples, curTick());
1108
1109 increaseTick();
1110
1111 // Reset storage, and make sure all data has been cleared
1112 expected_data.sum = 0;
1113 expected_data.squares = 0;
1114 stor.reset(nullptr);
1115 stor.prepare(&params, data);
1116 ASSERT_EQ(data.type, expected_data.type);
1117 ASSERT_EQ(data.sum, expected_data.sum);
1118 ASSERT_EQ(data.squares, expected_data.squares);
1119 ASSERT_EQ(data.samples, curTick());
1120
1121 increaseTick();
1122
1123 // Populate storage with more data
1124 for (int i = 0; i < num_values; i++) {
1125 stor.sample(values[i].value, values[i].numSamples);
1126
1127 val = values[i].value * values[i].numSamples;
1128 expected_data.sum += val;
1129 expected_data.squares += values[i].value * val;
1130 }
1131 stor.prepare(&params, data);
1132 ASSERT_EQ(data.type, expected_data.type);
1133 ASSERT_EQ(data.sum, expected_data.sum);
1134 ASSERT_EQ(data.squares, expected_data.squares);
1135 ASSERT_EQ(data.samples, curTick());
1136}
1137
1139TEST(StatsAvgSampleStorTest, Size)
1140{
1141 statistics::AvgSampleStor stor(nullptr);
1143 statistics::Counter num_samples = 5;
1146
1147 ASSERT_EQ(stor.size(), 1);
1148 stor.sample(val, num_samples);
1149 ASSERT_EQ(stor.size(), 1);
1150 stor.prepare(&params, data);
1151 ASSERT_EQ(stor.size(), 1);
1152 stor.reset(nullptr);
1153 ASSERT_EQ(stor.size(), 1);
1154 stor.zero();
1155 ASSERT_EQ(stor.size(), 1);
1156}
1157
1163TEST(StatsSparseHistStorTest, ZeroReset)
1164{
1165 statistics::SparseHistStor stor(nullptr);
1167 statistics::Counter num_samples = 5;
1168
1169 ASSERT_TRUE(stor.zero());
1170
1171 stor.reset(nullptr);
1172 stor.sample(val, num_samples);
1173 ASSERT_FALSE(stor.zero());
1174
1175 stor.reset(nullptr);
1176 ASSERT_TRUE(stor.zero());
1177}
1178
1180TEST(StatsSparseHistStorTest, SamplePrepare)
1181{
1182 statistics::SparseHistStor stor(nullptr);
1183 ValueSamples values[] = {{10, 5}, {1234, 2}, {0xFFFFFFFF, 18}};
1184 int num_values = sizeof(values) / sizeof(ValueSamples);
1185 statistics::Counter total_samples;
1187
1188 // Simple test with one value being sampled
1189 stor.sample(values[0].value, values[0].numSamples);
1190 stor.prepare(nullptr, data);
1191 ASSERT_EQ(stor.size(), 1);
1192 ASSERT_EQ(data.cmap.size(), 1);
1193 ASSERT_EQ(data.cmap[values[0].value], values[0].numSamples);
1194 ASSERT_EQ(data.samples, values[0].numSamples);
1195
1196 // Reset storage, and make sure all data has been cleared
1197 stor.reset(nullptr);
1198 stor.prepare(nullptr, data);
1199 ASSERT_EQ(stor.size(), 0);
1200 ASSERT_EQ(data.cmap.size(), 0);
1201 ASSERT_EQ(data.samples, 0);
1202
1203 // Populate storage with more data
1204 for (int i = 0; i < num_values; i++) {
1205 stor.sample(values[i].value, values[i].numSamples);
1206 }
1207 stor.prepare(nullptr, data);
1208 total_samples = 0;
1209 ASSERT_EQ(stor.size(), num_values);
1210 ASSERT_EQ(data.cmap.size(), num_values);
1211 for (int i = 0; i < num_values; i++) {
1212 ASSERT_EQ(data.cmap[values[i].value], values[i].numSamples);
1213 total_samples += values[i].numSamples;
1214 }
1215 ASSERT_EQ(data.samples, total_samples);
1216}
const char data[]
void setCurTick(Tick tick)
Assign a value to the current simulation tick.
Templatized storage for distribution that calculates per tick mean and variance.
Definition storage.hh:638
bool zero() const
Return true if no samples have been added.
Definition storage.hh:681
void reset(const StorageParams *const storage_params)
Reset stat value to default.
Definition storage.hh:699
void sample(Counter val, int number)
Add a value to the distribution for the given number of times.
Definition storage.hh:665
size_type size() const
Return the number of entries, in this case 1.
Definition storage.hh:675
void prepare(const StorageParams *const storage_params, DistData &data)
Definition storage.hh:684
Templatized storage and interface to a per-tick average stat.
Definition storage.hh:127
Counter value() const
Return the current count.
Definition storage.hh:177
void reset(const StorageParams *const storage_params)
Reset stat value to default.
Definition storage.hh:209
void inc(Counter val)
Increment the current count by the provided value, calls set.
Definition storage.hh:165
Result result() const
Return the current average.
Definition storage.hh:184
void dec(Counter val)
Deccrement the current count by the provided value, calls set.
Definition storage.hh:171
void prepare(const StorageParams *const storage_params)
Prepare stat data for dumping or serialization.
Definition storage.hh:199
void set(Counter val)
Set the current count to the one provided, update the total and last set values.
Definition storage.hh:154
Templatized storage and interface for a distribution stat.
Definition storage.hh:233
size_type size() const
Return the number of buckets in this distribution.
Definition storage.hh:304
void prepare(const StorageParams *const storage_params, DistData &data)
Definition storage.hh:317
void sample(Counter val, int number)
Add a value to the distribution for the given number of times.
Definition storage.cc:53
bool zero() const
Returns true if any calls to sample have been made.
Definition storage.hh:311
void reset(const StorageParams *const storage_params)
Reset stat value to default.
Definition storage.hh:345
Templatized storage and interface for a histogram stat.
Definition storage.hh:399
void reset(const StorageParams *const storage_params)
Reset stat value to default.
Definition storage.hh:537
void add(HistStor *other)
Adds the contents of the given storage to this storage.
Definition storage.cc:209
size_type size() const
Return the number of buckets in this distribution.
Definition storage.hh:496
bool zero() const
Returns true if any calls to sample have been made.
Definition storage.hh:503
void sample(Counter val, int number)
Add a value to the distribution for the given number of times.
Definition storage.cc:176
void prepare(const StorageParams *const storage_params, DistData &data)
Definition storage.hh:509
Templatized storage and interface for a distribution that calculates mean and variance.
Definition storage.hh:560
size_type size() const
Return the number of entries in this stat, 1.
Definition storage.hh:601
bool zero() const
Return true if no samples have been added.
Definition storage.hh:607
void reset(const StorageParams *const storage_params)
Reset stat value to default.
Definition storage.hh:625
void prepare(const StorageParams *const storage_params, DistData &data)
Definition storage.hh:610
void sample(Counter val, int number)
Add a value the given number of times to this running average.
Definition storage.hh:590
Templatized storage and interface for a sparse histogram stat.
Definition storage.hh:714
size_type size() const
Return the number of buckets in this distribution.
Definition storage.hh:749
void reset(const StorageParams *const storage_params)
Reset stat value to default.
Definition storage.hh:777
void sample(Counter val, int number)
Add a value to the distribution for the given number of times.
Definition storage.hh:739
void prepare(const StorageParams *const storage_params, SparseHistData &data)
Definition storage.hh:762
bool zero() const
Returns true if any calls to sample have been made.
Definition storage.hh:756
Templatized storage and interface for a simple scalar stat.
Definition storage.hh:57
void prepare(const StorageParams *const storage_params)
Prepare stat data for dumping or serialization.
Definition storage.hh:106
Result result() const
Return the value of this stat as a result type.
Definition storage.hh:101
void dec(Counter val)
Decrement the stat by the given value.
Definition storage.hh:89
Counter value() const
Return the value of this stat as its base type.
Definition storage.hh:95
void set(Counter val)
The the stat to the given value.
Definition storage.hh:77
void inc(Counter val)
Increment the stat by the given value.
Definition storage.hh:83
void reset(const StorageParams *const storage_params)
Reset stat value to default.
Definition storage.hh:111
Bitfield< 7 > i
Definition misc_types.hh:67
Bitfield< 63 > val
Definition misc.hh:804
unsigned int size_type
Definition types.hh:59
const FlagsType total
Print the total.
Definition info.hh:59
double Counter
All counters are of 64-bit values.
Definition types.hh:46
double Result
All results are doubles.
Definition types.hh:55
Copyright (c) 2024 - Pranith Kumar Copyright (c) 2020 Inria All rights reserved.
Definition binary32.hh:36
Tick curTick()
The universal simulation clock.
Definition cur_tick.hh:46
uint64_t Tick
Tick count type.
Definition types.hh:58
void increaseTick()
Increases the current tick by one.
void prepareCheckHistStor(statistics::HistStor::Params &params, ValueSamples *values, int num_values, statistics::DistData &expected_data)
Auxiliary function that finishes preparing the HistStor's expected values, perform the calls to the s...
TEST(StatsStatStorTest, SetValueResult)
Test setting and getting a value to the storage.
void checkExpectedDistData(const statistics::DistData &data, const statistics::DistData &expected_data, bool no_log=true)
Compare both dist datas to see if their contents match.
GTestTickHandler tickHandler
void prepareCheckDistStor(statistics::DistStor::Params &params, ValueSamples *values, int num_values, statistics::DistData &expected_data)
Auxiliary function that finishes preparing the DistStor's expected values, perform the calls to the s...
A pair of value and its number of samples, used for sampling.
ValueSamples(statistics::Counter value, statistics::Counter num_samples)
statistics::Counter numSamples
statistics::Counter value
General container for distribution data.
Definition types.hh:66
The parameters for a distribution stat.
Definition storage.hh:262
size_type buckets
The number of buckets.
Definition storage.hh:270
Counter max
The maximum value to track.
Definition storage.hh:266
Counter bucket_size
The number of entries in each bucket.
Definition storage.hh:268
Counter min
The minimum value to track.
Definition storage.hh:264
The parameters for a distribution stat.
Definition storage.hh:460
size_type buckets
The number of buckets.
Definition storage.hh:462
Data structure of sparse histogram.
Definition types.hh:85

Generated on Tue Jun 18 2024 16:24:01 for gem5 by doxygen 1.11.0