From: Ondrej Lichtner olichtne@redhat.com
Refactoring the Perf and PerfResult modules into a separate package lnst.RecipeCommon.Perf that will host everything related to the Perf recipe template.
I'm also considering later moving this into the lnst.Recipes package where it might make more sense as an actual recipe, with an example test method that will show off the basic usage of the template.
Changes summary: * moved lnst/RecipeCommon/Perf.py to lnst/RecipeCommon/Perf/Recipe.py * renamed PerfTestAndEvaluate class to just Recipe since the "Perf" part is obvious from the namespace * PerfConf class renamed to RecipeConf * RecipeConf only contains configuration for the Recipe - the list of measurements to do and the number of repeats for these * PerfMeasurementTool removed, this will be replaced by the Measurements class hierarchy added in the following commit * added RecipeResults class to store aggregated measurement results associated with the current Recipe configuration
* moved lnst/RecipeCommon/PerfResults.py to lnst.RecipeCommon/Perf/Results.py * removed StreamPerf, MultiStreamPerf, MultiRunPerf and replaced them with SequentialPerfResult and ParallelPerfResult to improve code reuse * added the PerfResult base class * set PerfInterval string formatting precision to 2 decimals * improved code reuse for item validation in PerfList class
Signed-off-by: Ondrej Lichtner olichtne@redhat.com --- lnst/RecipeCommon/Perf.py | 120 ------------------ lnst/RecipeCommon/Perf/Recipe.py | 73 +++++++++++ .../{PerfResult.py => Perf/Results.py} | 65 ++++------ lnst/RecipeCommon/Perf/__init__.py | 0 lnst/Recipes/ENRT/BaseEnrtRecipe.py | 20 +-- 5 files changed, 106 insertions(+), 172 deletions(-) delete mode 100644 lnst/RecipeCommon/Perf.py create mode 100644 lnst/RecipeCommon/Perf/Recipe.py rename lnst/RecipeCommon/{PerfResult.py => Perf/Results.py} (72%) create mode 100644 lnst/RecipeCommon/Perf/__init__.py
diff --git a/lnst/RecipeCommon/Perf.py b/lnst/RecipeCommon/Perf.py deleted file mode 100644 index 97aa0f1..0000000 --- a/lnst/RecipeCommon/Perf.py +++ /dev/null @@ -1,120 +0,0 @@ -from lnst.Controller.Recipe import BaseRecipe -from lnst.RecipeCommon.PerfResult import MultiRunPerf - -class PerfConf(object): - def __init__(self, - perf_tool, - test_type, - generator, generator_bind, - receiver, receiver_bind, - msg_size, duration, iterations, streams): - self._perf_tool = perf_tool - self._test_type = test_type - - self._generator = generator - self._generator_bind = generator_bind - self._receiver = receiver - self._receiver_bind = receiver_bind - - self._msg_size = msg_size - self._duration = duration - self._iterations = iterations - self._streams = streams - - @property - def perf_tool(self): - return self._perf_tool - - @property - def generator(self): - return self._generator - - @property - def generator_bind(self): - return self._generator_bind - - @property - def receiver(self): - return self._receiver - - @property - def receiver_bind(self): - return self._receiver_bind - - @property - def test_type(self): - return self._test_type - - @property - def msg_size(self): - return self._msg_size - - @property - def duration(self): - return self._duration - - @property - def iterations(self): - return self._iterations - - @property - def streams(self): - return self._streams - -class PerfMeasurementTool(object): - @staticmethod - def perf_measure(perf_conf): - raise NotImplementedError - -class PerfTestAndEvaluate(BaseRecipe): - def perf_test(self, perf_conf): - generator_measurements = MultiRunPerf() - receiver_measurements = MultiRunPerf() - for i in range(perf_conf.iterations): - tx, rx = perf_conf.perf_tool.perf_measure(perf_conf) - - if tx: - generator_measurements.append(tx) - if rx: - receiver_measurements.append(rx) - - return generator_measurements, receiver_measurements - - def perf_evaluate_and_report(self, perf_conf, results, baseline): - self.perf_evaluate(perf_conf, results, baseline) - - self.perf_report(perf_conf, results, baseline) - - def perf_evaluate(self, perf_conf, results, baseline): - generator, receiver = results - - if generator.average > 0: - self.add_result(True, "Generator reported non-zero throughput") - else: - self.add_result(False, "Generator reported zero throughput") - - if receiver.average > 0: - self.add_result(True, "Receiver reported non-zero throughput") - else: - self.add_result(False, "Receiver reported zero throughput") - - - def perf_report(self, perf_conf, results, baseline): - generator, receiver = results - - self.add_result( - True, - "Generator measured throughput: {tput} +-{deviation}({percentage:.2}%) {unit} per second" - .format(tput=generator.average, - deviation=generator.std_deviation, - percentage=(generator.std_deviation/generator.average) * 100, - unit=generator.unit), - data = generator) - self.add_result( - True, - "Receiver measured throughput: {tput} +-{deviation}({percentage:.2}%) {unit} per second" - .format(tput=receiver.average, - deviation=receiver.std_deviation, - percentage=(receiver.std_deviation/receiver.average) * 100, - unit=receiver.unit), - data = receiver) diff --git a/lnst/RecipeCommon/Perf/Recipe.py b/lnst/RecipeCommon/Perf/Recipe.py new file mode 100644 index 0000000..e305310 --- /dev/null +++ b/lnst/RecipeCommon/Perf/Recipe.py @@ -0,0 +1,73 @@ +from lnst.Controller.Recipe import BaseRecipe +from lnst.RecipeCommon.Perf.Results import SequentialPerfResult +from lnst.RecipeCommon.Perf.Results import ParallelPerfResult + +class RecipeConf(object): + def __init__(self, measurements, iterations): + self._measurements = measurements + self._iterations = iterations + + @property + def measurements(self): + return self._measurements + + @property + def iterations(self): + return self._iterations + +class RecipeResults(object): + def __init__(self, perf_conf): + self._perf_conf = perf_conf + self._results = {} + + @property + def perf_conf(self): + return self._perf_conf + + @property + def results(self): + return self._results + + def add_measurement_results(self, measurement, new_results): + aggregated_results = self._results.get(measurement, None) + aggregated_results = measurement.aggregate_results( + aggregated_results, new_results) + self._results[measurement] = aggregated_results + +class Recipe(BaseRecipe): + def perf_test(self, recipe_conf): + results = RecipeResults(recipe_conf) + + for i in range(recipe_conf.iterations): + run_results = [] + for measurement in recipe_conf.measurements: + measurement.start() + for measurement in reversed(recipe_conf.measurements): + measurement.finish() + for measurement in recipe_conf.measurements: + measurement_results = measurement.collect_results() + results.add_measurement_results( + measurement, measurement_results) + + return results + + def perf_report_and_evaluate(self, results): + self.perf_report(results) + + self.perf_evaluate(results) + + def perf_report(self, recipe_results): + if not recipe_results: + self.add_result(False, "No results available to report.") + return + + for measurement, results in recipe_results.results.items(): + measurement.report_results(self, results) + + def perf_evaluate(self, recipe_results): + if not recipe_results: + self.add_result(False, "No results available to evaluate.") + return + + for measurement, results in recipe_results.results.items(): + measurement.evaluate_results(self, results) diff --git a/lnst/RecipeCommon/PerfResult.py b/lnst/RecipeCommon/Perf/Results.py similarity index 72% rename from lnst/RecipeCommon/PerfResult.py rename to lnst/RecipeCommon/Perf/Results.py index f48fd0a..4591447 100644 --- a/lnst/RecipeCommon/PerfResult.py +++ b/lnst/RecipeCommon/Perf/Results.py @@ -10,7 +10,20 @@ class PerfStatMixin(object): def std_deviation(self): return std_deviation([i.average for i in self])
-class PerfInterval(PerfStatMixin): +class PerfResult(PerfStatMixin): + @property + def value(self): + raise NotImplementedError() + + @property + def duration(self): + raise NotImplementedError() + + @property + def unit(self): + raise NotImplementedError() + +class PerfInterval(PerfResult): def __init__(self, value, duration, unit): self._value = value self._duration = duration @@ -33,20 +46,13 @@ class PerfInterval(PerfStatMixin): return 0
def __str__(self): - return "{} {} in {} seconds".format( - self.value, self.unit, self.duration) + return "{:.2f} {} in {:.2f} seconds".format( + float(self.value), self.unit, float(self.duration))
class PerfList(list): - _sub_type = None - def __init__(self, iterable=[]): - unit = None - for i, item in enumerate(iterable): - if not isinstance(item, self._sub_type): - raise LnstError("{} only accepts {} objects." - .format(self.__class__.__name__, - self._sub_type.__name__)) + self._validate_item_type(item)
if i == 0: unit = item.unit @@ -57,14 +63,17 @@ class PerfList(list): super(PerfList, self).__init__(iterable)
def _validate_item(self, item): - if not isinstance(item, self._sub_type): - raise LnstError("{} only accepts {} objects." - .format(self.__class__.__name__, - self._sub_type.__name__)) + self._validate_item_type(item)
if len(self) > 0 and item.unit != self[0].unit: raise LnstError("PerfList items must have the same unit.")
+ def _validate_item_type(self, item): + if (not isinstance(item, PerfInterval) and + not isinstance(item, PerfList)): + raise LnstError("{} only accepts PerfInterval or PerfList objects." + .format(self.__class__.__name__)) + def append(self, item): self._validate_item(item)
@@ -104,9 +113,7 @@ class PerfList(list):
super(PerfList, self).__setslice__(i, j, iterable)
-class StreamPerf(PerfList, PerfStatMixin): - _sub_type = PerfInterval - +class SequentialPerfResult(PerfResult, PerfList): @property def value(self): return sum([i.value for i in self]) @@ -122,9 +129,7 @@ class StreamPerf(PerfList, PerfStatMixin): else: return None
-class MultiStreamPerf(PerfList, PerfStatMixin): - _sub_type = StreamPerf - +class ParallelPerfResult(PerfResult, PerfList): @property def value(self): return sum([i.value for i in self]) @@ -139,21 +144,3 @@ class MultiStreamPerf(PerfList, PerfStatMixin): return self[0].unit else: return None - -class MultiRunPerf(PerfList, PerfStatMixin): - _sub_type = MultiStreamPerf - - @property - def value(self): - return sum([i.value for i in self]) - - @property - def duration(self): - return sum([i.duration for i in self]) - - @property - def unit(self): - if len(self) > 0: - return self[0].unit - else: - return None diff --git a/lnst/RecipeCommon/Perf/__init__.py b/lnst/RecipeCommon/Perf/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lnst/Recipes/ENRT/BaseEnrtRecipe.py b/lnst/Recipes/ENRT/BaseEnrtRecipe.py index 9e2b674..a26d999 100644 --- a/lnst/Recipes/ENRT/BaseEnrtRecipe.py +++ b/lnst/Recipes/ENRT/BaseEnrtRecipe.py @@ -6,7 +6,8 @@ from lnst.Common.IpAddress import AF_INET, AF_INET6 from lnst.Controller.Recipe import BaseRecipe
from lnst.RecipeCommon.Ping import PingTestAndEvaluate, PingConf -from lnst.RecipeCommon.Perf import PerfTestAndEvaluate, PerfConf +from lnst.RecipeCommon.Perf.Recipe import Recipe as PerfRecipe +from lnst.RecipeCommon.Perf.Recipe import RecipeConf as PerfRecipeConf from lnst.RecipeCommon.IperfMeasurementTool import IperfMeasurementTool
class EnrtConfiguration(object): @@ -61,7 +62,7 @@ class EnrtSubConfiguration(object): def offload_settings(self, value): self._offload_settings = value
-class BaseEnrtRecipe(PingTestAndEvaluate, PerfTestAndEvaluate): +class BaseEnrtRecipe(PingTestAndEvaluate, PerfRecipe): ip_versions = Param(default=("ipv4", "ipv6")) perf_tests = Param(default=("tcp_stream", "udp_stream", "sctp_stream"))
@@ -101,7 +102,7 @@ class BaseEnrtRecipe(PingTestAndEvaluate, PerfTestAndEvaluate): for perf_config in self.generate_perf_configurations(main_config, sub_config): result = self.perf_test(perf_config) - self.perf_evaluate_and_report(perf_config, result, baseline=None) + self.perf_report_and_evaluate(result)
self.remove_sub_configuration(main_config, sub_config)
@@ -187,16 +188,9 @@ class BaseEnrtRecipe(PingTestAndEvaluate, PerfTestAndEvaluate): server_bind = server_nic.ips_filter(family=family)[0]
for perf_test in self.params.perf_tests: - yield PerfConf(perf_tool = self.params.perf_tool, - test_type = perf_test, - generator = client_netns, - generator_bind = client_bind, - receiver = server_netns, - receiver_bind = server_bind, - msg_size = self.params.perf_msg_size, - duration = self.params.perf_duration, - iterations = self.params.perf_iterations, - streams = self.params.perf_streams) + yield PerfRecipeConf( + measurements=[ ], + iterations=self.params.perf_iterations)
def _pin_dev_interrupts(self, dev, cpu): netns = dev.netns