Scheduler-test: more precise accounting for expected concurrency

It turns out to be not correct using all the divergence in concurrency
as a form factor, since it is quite common that not all cores can be active
at every level, given the structural constraints as dictated by the load graph.

On the other hand, if the empirical work (non wait-time) concurrency
systematically differs from the simple model used for establishing the schedule,
then this should indeed be considered a form factor and deduced from
the effective stress factor, since it is not a reserve available for speed-up

The solution entertained here is to derive an effective compounded sum
of weights from the calculation used to build the schedule. This compounded
weight sum is typically lower than the plain sum of all node weights, which
is precisely due to the theoretical amount of expense reduction assumed
in the schedule generation. So this gives us a handle at the theoretically
expected expense and through the plain weight sum, we may draw conclusion
about the effective concurrency expected in this schedule.

Taking only this part as base for the empirical deviations yields search results
very close to stressFactor ~1 -- implying that the test setup now
observes what was intended to observe...
This commit is contained in:
Fischlurch 2024-02-19 17:36:46 +01:00
parent 2d1bd2b765
commit 93729e5667
4 changed files with 59 additions and 10 deletions

View file

@ -350,15 +350,14 @@ namespace test {
{
usec LOAD_BASE = 500us;
uint CONCURRENCY = 4;
bool SCHED_DEPENDS = true;
bool showRuns = true;
auto testLoad() { return TestChainLoad<>{64}.configureShape_chain_loadBursts(); }
};
auto [stress,delta,time] = StressRig::with<Setup>().searchBreakingPoint();
CHECK (delta > 2.0);
CHECK (0.55 > stress and stress > 0.4);
CHECK (delta > 2.5);
CHECK (1.15 > stress and stress > 0.9);
}

View file

@ -183,7 +183,7 @@ namespace test {
runTime[i] = testSetup.launch_and_wait() / 1000;
avgT += runTime[i];
testSetup.adaptEmpirically (stressFac, CONF::CONCURRENCY);
this->adjustmentFac = testSetup.getStressFac() / stressFac;
this->adjustmentFac = 1 / (testSetup.getStressFac() / stressFac);
}
expT = testSetup.getExpectedEndTime() / 1000;
avgT /= CONF::REPETITIONS;
@ -249,7 +249,7 @@ namespace test {
_Fmt fmtRun_ {"....·%-2d: Δ=%4.1f t=%4.1f %s %s"}; // i % Δ % t % t>avg? % fail?
_Fmt fmtStep_{ "%4.2f| : ∅Δ=%4.1f±%-4.2f ∅t=%4.1f %s %%%3.1f -- expect:%4.1fms"}; // stress % ∅Δ % σ % ∅t % fail % pecentOff % t-expect
_Fmt fmtStep_{ "%4.2f| : ∅Δ=%4.1f±%-4.2f ∅t=%4.1f %s %%%-3.0f -- expect:%4.1fms"};// stress % ∅Δ % σ % ∅t % fail % pecentOff % t-expect
_Fmt fmtResSDv_{"%9s= %5.2f ±%4.2f%s"};
_Fmt fmtResVal_{"%9s: %5.2f%s"};
@ -267,7 +267,7 @@ namespace test {
if (CONF::showStep)
cout << fmtStep_ % res.stressFac % res.avgDelta % res.stdDev % res.avgTime
% (decideBreakPoint(res)? "—◆—":"—◇—")
% res.percentOff % res.expTime
% (100*res.percentOff) % res.expTime
<< endl;
}

View file

@ -834,7 +834,7 @@ namespace test {
/** overall sum of configured node weights **/
size_t
getWeightSum()
calcWeightSum()
{
return allNodes()
.transform([](Node& n){ return n.weight; })
@ -870,6 +870,15 @@ namespace test {
});
}
/** calculate the simplified/theoretic reduction of compounded weight through concurrency */
double
calcExpectedCompoundedWeight (uint concurrency =1)
{
return allLevelWeights()
.transform([concurrency](LevelWeight const& lw){ return computeWeightFactor (lw, concurrency); })
.resultSum();
}
Statistic computeGraphStatistics();
@ -1969,8 +1978,11 @@ namespace test {
concurrency = defaultConcurrency();
double worktimeRatio = 1 - stat.timeAtConc(0) / stat.coveredTime;
double workConcurrency = stat.avgConcurrency / worktimeRatio;
double formFac = concurrency / workConcurrency;
double expectedNodeTime = _uSec(compuLoad_->timeBase) * chainLoad_.getWeightSum() / chainLoad_.size();
double weightSum = chainLoad_.calcWeightSum();
double expectedCompoundedWeight = chainLoad_.calcExpectedCompoundedWeight(concurrency);
double expectedConcurrency = weightSum / expectedCompoundedWeight;
double formFac = 1 / (workConcurrency / expectedConcurrency);
double expectedNodeTime = _uSec(compuLoad_->timeBase) * weightSum / chainLoad_.size();
double realAvgNodeTime = stat.activeTime / stat.activationCnt;
formFac *= realAvgNodeTime / expectedNodeTime;
return withAdaptedSchedule (stressFac, concurrency, formFac);

View file

@ -111634,7 +111634,7 @@ Date:&#160;&#160;&#160;Thu Apr 20 18:53:17 2023 +0200<br/>
<icon BUILTIN="button_ok"/>
</node>
</node>
<node BACKGROUND_COLOR="#f0d5c5" COLOR="#990033" CREATED="1708353494162" ID="ID_1417741022" MODIFIED="1708353524083" TEXT="Fraglich ob Einrechnen der Concurrency sinnvoll ist">
<node COLOR="#435e98" CREATED="1708353494162" ID="ID_1417741022" MODIFIED="1708359948264" TEXT="Fraglich ob Einrechnen der Concurrency sinnvoll ist">
<icon BUILTIN="messagebox_warning"/>
<node CREATED="1708353529389" ID="ID_1264074130" MODIFIED="1708353581342">
<richcontent TYPE="NODE"><html>
@ -111665,6 +111665,44 @@ Date:&#160;&#160;&#160;Thu Apr 20 18:53:17 2023 +0200<br/>
<node CREATED="1708353589901" ID="ID_753673138" MODIFIED="1708353635590" TEXT="was dann mithin nicht als Leistungsreserve gedeutet werden darf">
<icon BUILTIN="messagebox_warning"/>
</node>
<node COLOR="#338800" CREATED="1708354706383" ID="ID_160832329" MODIFIED="1708359924650" TEXT="kann man eine erwartete effektive Concurrency berechnen?">
<icon BUILTIN="help"/>
<node CREATED="1708355316244" ID="ID_1132423890" MODIFIED="1708355357499" TEXT="l&#xe4;uft auf eine Berechnung hinaus analog zum Schedule"/>
<node CREATED="1708355455934" ID="ID_1644260527" MODIFIED="1708355549750" TEXT="mu&#xdf; es aber from scratch berechnen">
<richcontent TYPE="NOTE"><html>
<head>
</head>
<body>
<p>
...die Berechnung l&#228;uft zwar genauso, n&#228;mlich &#252;ber eine Gruppierung per Level, jedoch m&#252;ssen dann nur die reinen Nodes pro Level ber&#252;cksichtigt werden
</p>
</body>
</html>
</richcontent>
</node>
<node CREATED="1708357060445" ID="ID_1779085910" MODIFIED="1708357108807" TEXT="mu&#xdf; dabei aber die Gewichte beachten">
<richcontent TYPE="NOTE"><html>
<head>
</head>
<body>
<p>
weil die Gewichte entsprechend proportional auch in die durchschnittliche empirische Concurrency eingehen
</p>
</body>
</html>
</richcontent>
</node>
<node CREATED="1708357112590" ID="ID_823184827" MODIFIED="1708357137110" TEXT="Idee: den f&#xfc;r&apos;s Schedule berechneten Level-WeightFactor ins Verh&#xe4;ltnis setzen"/>
<node CREATED="1708357576600" ID="ID_630915220" MODIFIED="1708357610199" TEXT="die Reduktion im Vergleich zur reinen weightSum wird als effektive Concurrency gedeutet"/>
</node>
<node COLOR="#338800" CREATED="1708359933075" ID="ID_559935495" MODIFIED="1708359945343" TEXT="nur die Abweichung von der erwarteten Concurrency in den Form-Faktor">
<icon BUILTIN="button_ok"/>
</node>
</node>
<node COLOR="#338800" CREATED="1708359949377" ID="ID_3066237" MODIFIED="1708359973277" TEXT="damit kommt ein Stre&#xdf;-Faktor ganz nah an 1 heraus!!">
<icon BUILTIN="ksmiletris"/>
</node>
</node>
</node>