Monitoring System 0.1.0
System resource monitoring with pluggable collectors and alerting
Loading...
Searching...
No Matches
kcenon::monitoring::performance_benchmark Class Reference

Performance benchmark utility. More...

#include <performance_monitor.h>

Collaboration diagram for kcenon::monitoring::performance_benchmark:
Collaboration graph

Public Member Functions

 performance_benchmark (const std::string &name)
 
void set_iterations (std::uint32_t iterations)
 Set number of benchmark iterations.
 
void set_warmup_iterations (std::uint32_t warmup)
 Set number of warmup iterations.
 
template<typename Func >
common::Result< performance_metricsrun (const std::string &operation_name, Func &&func)
 Run a benchmark.
 
template<typename Func1 , typename Func2 >
common::Result< std::pair< performance_metrics, performance_metrics > > compare (const std::string &operation1_name, Func1 &&func1, const std::string &operation2_name, Func2 &&func2)
 Compare two operations.
 

Private Attributes

performance_profiler profiler_
 
std::string name_
 
std::uint32_t iterations_ {1000}
 
std::uint32_t warmup_iterations_ {100}
 

Detailed Description

Performance benchmark utility.

Definition at line 627 of file performance_monitor.h.

Constructor & Destructor Documentation

◆ performance_benchmark()

kcenon::monitoring::performance_benchmark::performance_benchmark ( const std::string & name)
inlineexplicit

Member Function Documentation

◆ compare()

template<typename Func1 , typename Func2 >
common::Result< std::pair< performance_metrics, performance_metrics > > kcenon::monitoring::performance_benchmark::compare ( const std::string & operation1_name,
Func1 && func1,
const std::string & operation2_name,
Func2 && func2 )
inline

Compare two operations.

Examples
/home/runner/work/monitoring_system/monitoring_system/include/kcenon/monitoring/core/performance_monitor.h.

Definition at line 697 of file performance_monitor.h.

702 {
703 auto result1 = run(operation1_name, std::forward<Func1>(func1));
704 if (result1.is_err()) {
705 return common::Result<std::pair<performance_metrics, performance_metrics>>::err(result1.error());
706 }
707
708 auto result2 = run(operation2_name, std::forward<Func2>(func2));
709 if (result2.is_err()) {
710 return common::Result<std::pair<performance_metrics, performance_metrics>>::err(result2.error());
711 }
712
713 return common::ok(std::make_pair(result1.value(), result2.value()));
714 }
common::Result< performance_metrics > run(const std::string &operation_name, Func &&func)
Run a benchmark.

Referenced by TEST_F().

Here is the caller graph for this function:

◆ run()

template<typename Func >
common::Result< performance_metrics > kcenon::monitoring::performance_benchmark::run ( const std::string & operation_name,
Func && func )
inline

Run a benchmark.

Examples
/home/runner/work/monitoring_system/monitoring_system/include/kcenon/monitoring/core/performance_monitor.h.

Definition at line 658 of file performance_monitor.h.

661 {
662 // Warmup
663 for (std::uint32_t i = 0; i < warmup_iterations_; ++i) {
664 func();
665 }
666
667 // Actual benchmark
668 for (std::uint32_t i = 0; i < iterations_; ++i) {
669 auto start = std::chrono::high_resolution_clock::now();
670
671 try {
672 func();
673 } catch (...) {
674 // Record error
675 auto end = std::chrono::high_resolution_clock::now();
676 auto duration = std::chrono::duration_cast<std::chrono::nanoseconds>(
677 end - start
678 );
679 profiler_.record_sample(operation_name, duration, false);
680 continue;
681 }
682
683 auto end = std::chrono::high_resolution_clock::now();
684 auto duration = std::chrono::duration_cast<std::chrono::nanoseconds>(
685 end - start
686 );
687 profiler_.record_sample(operation_name, duration, true);
688 }
689
690 return profiler_.get_metrics(operation_name);
691 }
common::Result< bool > record_sample(const std::string &operation_name, std::chrono::nanoseconds duration, bool success=true)
Record a performance sample.
common::Result< performance_metrics > get_metrics(const std::string &operation_name) const
Get performance metrics for an operation.

References kcenon::monitoring::performance_profiler::get_metrics(), and kcenon::monitoring::performance_profiler::record_sample().

Referenced by TEST_F().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ set_iterations()

void kcenon::monitoring::performance_benchmark::set_iterations ( std::uint32_t iterations)
inline

Set number of benchmark iterations.

Parameters
iterationsNumber of iterations to run during the benchmark
Examples
/home/runner/work/monitoring_system/monitoring_system/include/kcenon/monitoring/core/performance_monitor.h.

Definition at line 642 of file performance_monitor.h.

642 {
643 iterations_ = iterations;
644 }

Referenced by TEST_F(), and TEST_F().

Here is the caller graph for this function:

◆ set_warmup_iterations()

void kcenon::monitoring::performance_benchmark::set_warmup_iterations ( std::uint32_t warmup)
inline

Set number of warmup iterations.

Parameters
warmupNumber of warmup iterations to run before the benchmark
Examples
/home/runner/work/monitoring_system/monitoring_system/include/kcenon/monitoring/core/performance_monitor.h.

Definition at line 650 of file performance_monitor.h.

650 {
651 warmup_iterations_ = warmup;
652 }

Referenced by TEST_F(), and TEST_F().

Here is the caller graph for this function:

Member Data Documentation

◆ iterations_

std::uint32_t kcenon::monitoring::performance_benchmark::iterations_ {1000}
private

◆ name_

std::string kcenon::monitoring::performance_benchmark::name_
private

◆ profiler_

performance_profiler kcenon::monitoring::performance_benchmark::profiler_
private

◆ warmup_iterations_

std::uint32_t kcenon::monitoring::performance_benchmark::warmup_iterations_ {100}
private

The documentation for this class was generated from the following file: