source: doc/theses/mike_brooks_MMath/plots/ListCommon.py@ 5a95560

stuck-waitfor-destruct
Last change on this file since 5a95560 was fb7c9168, checked in by Michael Brooks <mlbrooks@…>, 4 months ago

Data and analysis associated with harness of commit 78bc398830.

Analysis change narrows the sweet-spot zone to where CFA is not misbehaving on queues and where general noise is reduced.

Revised plots are going into thesis doc, but commentary/focus has not shifted accordingly. Notably, the CFA-attribution analysis is now mostly obviated.

  • Property mode set to 100644
File size: 6.8 KB
Line 
1# Based on crunch1
2# updates for run-scenario columns not seen back then
3# result eyeballs okay
4
5import pandas as pd
6import numpy as np
7import sys
8import os
9from subprocess import Popen, PIPE
10
11def getDataset( infile ):
12 # grep to remove lines that end in comma; these were error runs
13 with Popen("grep '[^,]$' " + infile, shell=True, stdout=PIPE) as process:
14 timings = pd.read_csv(
15 process.stdout,
16 names=['RunMoment', 'RunIdx', 'Args', 'Program', 'expt_ops_completed', 'expt_elapsed_sec', 'mean_op_dur_ns'],
17 dtype={'RunMoment': str,
18 'RunIdx': np.int64,
19 'Args': str,
20 'Program': str,
21 'expt_ops_completed': np.int64,
22 'expt_elapsed_sec': np.float64,
23 'mean_op_dur_ns': np.float64},
24 parse_dates=['RunMoment']
25 )
26 # print(timings.head())
27
28 ## parse executable name and args
29
30 timings[['ExperimentDurSec',
31 'CheckDonePeriod',
32 'NumNodes',
33 'ExperimentDurOpCount',
34 'Seed',
35 'InterleaveFrac']] = timings['Args'].str.strip().str.split(expand=True)
36 timings["NumNodes"] = pd.to_numeric(timings["NumNodes"])
37 timings["InterleaveFrac"] = pd.to_numeric(timings["InterleaveFrac"]).round(3)
38
39 timings[['__ProgramPrefix',
40 'fx',
41 'op']] = timings['Program'].str.split('--', expand=True)
42
43 timings[['movement',
44 'polarity',
45 'accessor']] = timings['op'].str.split('-', expand=True)
46
47 ## calculate relative to baselines
48 baseline_fx = 'lq-tailq'
49 baseline_intrl = 0.0
50
51 # chose calc "FineCrossRun" from labpc:crunch3
52 byPeer = timings.groupby(['NumNodes', 'op', 'InterleaveFrac'])
53 for [NumNodes, op, intrlFrac], peerGroup in byPeer:
54 grpfx = peerGroup.groupby(['fx'])
55 if baseline_fx in grpfx.groups:
56 baselineRows = grpfx.get_group(baseline_fx)
57 baselineDur = meanNoOutlr( baselineRows['mean_op_dur_ns'] )
58 else:
59 baselineDur = 1.0
60 timings.loc[peerGroup.index, 'BaselineFxOpDurNs'] = baselineDur
61 timings['OpDurRelFx'] = timings['mean_op_dur_ns'] / timings['BaselineFxOpDurNs']
62
63 # relative to same fx, no interleave
64 byPeer = timings.groupby(['NumNodes', 'op', 'fx'])
65 for [NumNodes, op, fx], peerGroup in byPeer:
66 baselineRows = peerGroup.groupby(['InterleaveFrac']).get_group(baseline_intrl)
67 baselineDur = meanNoOutlr( baselineRows['mean_op_dur_ns'] )
68 timings.loc[peerGroup.index, 'BaselineIntrlOpDurNs'] = baselineDur
69 timings['OpDurRelIntrl'] = timings['mean_op_dur_ns'] / timings['BaselineIntrlOpDurNs']
70
71 return timings
72
73def getSingleResults(infileLocal, *,
74 tgtMovement = 'all',
75 tgtPolarity = 'all',
76 tgtAccessor = 'all',
77 tgtInterleave = 0.0 ):
78
79 infile = os.path.dirname(os.path.abspath(__file__)) + '/../benchmarks/list/' + infileLocal
80
81 timings = getDataset( infile )
82
83 movements = timings['movement'].unique()
84 polarities = timings['polarity'].unique()
85 accessors = timings['accessor'].unique()
86 interleaves = timings['InterleaveFrac'].unique()
87
88 if movements.size > 1:
89 movements = np.append(movements, 'all')
90 if polarities.size > 1:
91 polarities = np.append(polarities, 'all')
92 if accessors.size > 1:
93 accessors = np.append(accessors, 'all')
94
95 if (tgtMovement != 'all'):
96 grp = timings.groupby('movement')
97 timings = grp.get_group(tgtMovement)
98 if (tgtPolarity != 'all'):
99 grp = timings.groupby('polarity')
100 timings = grp.get_group(tgtPolarity)
101 if (tgtAccessor != 'all'):
102 grp = timings.groupby('accessor')
103 timings = grp.get_group(tgtAccessor)
104 if (tgtInterleave != 'all'):
105 timings = timings[ timings['InterleaveFrac'] == float(tgtInterleave) ]
106
107 return timings
108
109def getSummaryMeta(metaFileCore):
110 metafile = os.path.dirname(os.path.abspath(__file__)) + "/" + metaFileCore + '-meta.dat'
111 metadata = pd.read_csv(
112 metafile,
113 names=['OpIx', 'Op'],
114 delimiter='\t'
115 )
116 metadata[['movement',
117 'polarity',
118 'accessor']] = metadata['Op'].str.split('\\\\n', expand=True)
119 metadata.replace('*', 'all', inplace=True)
120 return metadata
121
122def printManySummary(*,
123 infileLocal,
124 metafileCore,
125 fxs,
126 sizeQual = (lambda x: x > 16 and x < 150), # for Swift
127 #sizeQual = (lambda x: x >= 2 and x <= 32), # for java
128 tgtInterleave = 0.0,
129 measure = 'OpDurRelFx') :
130
131 metadata = getSummaryMeta(metafileCore)
132
133 print("# op_num\tfx_num\tfx\tmean\tstdev\tmin\tmax\tcount\tpl95\tpl68\tp50\tph68\tph95")
134
135 for op in metadata.itertuples():
136 timings = getSingleResults(infileLocal,
137 tgtMovement = op.movement,
138 tgtPolarity = op.polarity,
139 tgtAccessor = op.accessor,
140 tgtInterleave = tgtInterleave )
141
142 timings = timings[ timings['fx'].isin(fxs) ]
143 timings = timings[ timings['NumNodes'].apply(sizeQual) ]
144
145 fxnums = timings['fx'].apply(
146 lambda fx: fxs.index(fx) + 1
147 )
148 timings.insert(loc=0, column='fx_num', value=fxnums)
149 timings.insert(loc=0, column='op_num', value=op.OpIx)
150
151 grouped = timings.groupby(['op_num', 'fx_num', 'fx'])
152
153 aggregated = grouped[measure].agg(
154 ["mean", "std", "min", "max", "count",
155 lambda x: x.quantile(0.025),
156 lambda x: x.quantile(0.16),
157 lambda x: x.quantile(0.5),
158 lambda x: x.quantile(0.84),
159 lambda x: x.quantile(0.975)]
160 )
161
162 text = aggregated.to_csv(header=False, index=True, sep='\t')
163 print(text, end='')
164
165def printSingleDetail(infileLocal, *,
166 tgtMovement = 'all',
167 tgtPolarity = 'all',
168 tgtAccessor = 'all',
169 tgtInterleave = 0.0,
170 measure = 'mean_op_dur_ns' ):
171
172 timings = getSingleResults(infileLocal,
173 tgtMovement = tgtMovement,
174 tgtPolarity = tgtPolarity,
175 tgtAccessor = tgtAccessor,
176 tgtInterleave = tgtInterleave)
177 groupedFx = timings.groupby('fx')
178
179 for fx, fgroup in groupedFx:
180 # print(fgroup.head())
181 groupedRun = fgroup.groupby(['NumNodes']) # , 'fx', 'op'
182 aggregated = groupedRun[measure].agg(
183 ["mean", "std", "min", "max", "count", "sum"]
184 )
185 aggregated['mean_no_outlr'] = (
186 ( aggregated['sum'] - aggregated['min'] - aggregated['max'] )
187 /
188 ( aggregated['count'] - 2 )
189 )
190
191 #print(aggregated.head())
192
193 print('"{header}"'.format(header=fx))
194 text = aggregated.to_csv(header=False, index=True, sep='\t')
195 print(text)
196 print()
197 print()
198
199def meanNoOutlr(range):
200 return ( range.sum() - range.min() - range.max() ) / ( range.count() - 2 )
Note: See TracBrowser for help on using the repository browser.