2005-09-17 22:51:26 +02:00
|
|
|
# Copyright (c) 2005 The Regents of The University of Michigan
|
|
|
|
# All rights reserved.
|
|
|
|
#
|
|
|
|
# Redistribution and use in source and binary forms, with or without
|
|
|
|
# modification, are permitted provided that the following conditions are
|
|
|
|
# met: redistributions of source code must retain the above copyright
|
|
|
|
# notice, this list of conditions and the following disclaimer;
|
|
|
|
# redistributions in binary form must reproduce the above copyright
|
|
|
|
# notice, this list of conditions and the following disclaimer in the
|
|
|
|
# documentation and/or other materials provided with the distribution;
|
|
|
|
# neither the name of the copyright holders nor the names of its
|
|
|
|
# contributors may be used to endorse or promote products derived from
|
|
|
|
# this software without specific prior written permission.
|
|
|
|
#
|
|
|
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
|
|
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
|
|
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
|
|
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
|
|
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
|
|
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
|
|
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
|
|
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
|
|
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
|
|
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
|
|
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
|
|
#
|
|
|
|
# Authors: Nathan Binkert
|
|
|
|
|
2005-11-23 03:50:34 +01:00
|
|
|
from chart import ChartOptions
|
|
|
|
|
|
|
|
class StatOutput(ChartOptions):
|
|
|
|
def __init__(self, jobfile, info, stat=None, binstats=None):
|
|
|
|
super(StatOutput, self).__init__()
|
2005-09-17 22:51:26 +02:00
|
|
|
self.jobfile = jobfile
|
|
|
|
self.stat = stat
|
|
|
|
self.binstats = None
|
|
|
|
self.invert = False
|
|
|
|
self.info = info
|
|
|
|
|
2005-11-23 03:50:34 +01:00
|
|
|
def printdata(self, name, bin = None, printmode = 'G'):
|
2005-09-17 22:51:26 +02:00
|
|
|
import info
|
|
|
|
|
|
|
|
if bin:
|
2005-11-23 03:50:34 +01:00
|
|
|
print '%s %s stats' % (name, bin)
|
2005-09-17 22:51:26 +02:00
|
|
|
|
|
|
|
if self.binstats:
|
|
|
|
for stat in self.binstats:
|
|
|
|
stat.bins = bin
|
|
|
|
|
|
|
|
if printmode == 'G':
|
|
|
|
valformat = '%g'
|
|
|
|
elif printmode != 'F' and value > 1e6:
|
|
|
|
valformat = '%0.5e'
|
|
|
|
else:
|
|
|
|
valformat = '%f'
|
|
|
|
|
|
|
|
for job in self.jobfile.jobs():
|
|
|
|
value = self.info.get(job, self.stat)
|
|
|
|
if value is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
if not isinstance(value, list):
|
|
|
|
value = [ value ]
|
|
|
|
|
|
|
|
if self.invert:
|
|
|
|
for i,val in enumerate(value):
|
|
|
|
if val != 0.0:
|
|
|
|
value[i] = 1 / val
|
|
|
|
|
|
|
|
valstring = ', '.join([ valformat % val for val in value ])
|
|
|
|
print '%-50s %s' % (job.name + ':', valstring)
|
|
|
|
|
2005-11-23 03:50:34 +01:00
|
|
|
def display(self, name, binned = False, printmode = 'G'):
|
2005-09-17 22:51:26 +02:00
|
|
|
if binned and self.binstats:
|
2005-11-23 03:50:34 +01:00
|
|
|
self.printdata(name, 'kernel', printmode)
|
|
|
|
self.printdata(name, 'idle', printmode)
|
|
|
|
self.printdata(name, 'user', printmode)
|
|
|
|
self.printdata(name, 'interrupt', printmode)
|
2005-09-17 22:51:26 +02:00
|
|
|
|
2005-11-23 03:50:34 +01:00
|
|
|
print '%s total stats' % name
|
|
|
|
self.printdata(name, printmode=printmode)
|
2005-09-17 22:51:26 +02:00
|
|
|
|
2005-11-23 03:50:34 +01:00
|
|
|
def graph(self, name, graphdir, proxy=None):
|
2005-10-18 21:01:51 +02:00
|
|
|
from os.path import expanduser, isdir, join as joinpath
|
2005-09-17 22:51:26 +02:00
|
|
|
from barchart import BarChart
|
2005-10-18 21:01:51 +02:00
|
|
|
from matplotlib.numerix import Float, array, zeros
|
2005-11-23 03:50:34 +01:00
|
|
|
import os, re, urllib
|
|
|
|
from jobfile import crossproduct
|
2005-09-17 22:51:26 +02:00
|
|
|
|
|
|
|
confgroups = self.jobfile.groups()
|
|
|
|
ngroups = len(confgroups)
|
|
|
|
skiplist = [ False ] * ngroups
|
2005-11-23 03:50:34 +01:00
|
|
|
groupopts = []
|
|
|
|
baropts = []
|
2005-09-17 22:51:26 +02:00
|
|
|
groups = []
|
|
|
|
for i,group in enumerate(confgroups):
|
|
|
|
if group.flags.graph_group:
|
2005-11-23 03:50:34 +01:00
|
|
|
groupopts.append(group.subopts())
|
2005-09-17 22:51:26 +02:00
|
|
|
skiplist[i] = True
|
|
|
|
elif group.flags.graph_bars:
|
2005-11-23 03:50:34 +01:00
|
|
|
baropts.append(group.subopts())
|
2005-09-17 22:51:26 +02:00
|
|
|
skiplist[i] = True
|
|
|
|
else:
|
|
|
|
groups.append(group)
|
|
|
|
|
2005-11-23 03:50:34 +01:00
|
|
|
if not groupopts:
|
2005-09-17 22:51:26 +02:00
|
|
|
raise AttributeError, 'No group selected for graph group'
|
|
|
|
|
2005-11-23 03:50:34 +01:00
|
|
|
if not baropts:
|
2005-09-17 22:51:26 +02:00
|
|
|
raise AttributeError, 'No group selected for graph bars'
|
|
|
|
|
2005-11-23 03:50:34 +01:00
|
|
|
groupopts = [ group for group in crossproduct(groupopts) ]
|
|
|
|
baropts = [ bar for bar in crossproduct(baropts) ]
|
|
|
|
|
2005-09-17 22:51:26 +02:00
|
|
|
directory = expanduser(graphdir)
|
2005-10-18 21:01:51 +02:00
|
|
|
if not isdir(directory):
|
|
|
|
os.mkdir(directory)
|
2005-11-23 03:50:34 +01:00
|
|
|
html = file(joinpath(directory, '%s.html' % name), 'w')
|
2005-09-17 22:51:26 +02:00
|
|
|
print >>html, '<html>'
|
2005-11-23 03:50:34 +01:00
|
|
|
print >>html, '<title>Graphs for %s</title>' % name
|
2005-09-17 22:51:26 +02:00
|
|
|
print >>html, '<body>'
|
2005-11-23 03:50:34 +01:00
|
|
|
html.flush()
|
2005-09-17 22:51:26 +02:00
|
|
|
|
|
|
|
for options in self.jobfile.options(groups):
|
2005-11-23 03:50:34 +01:00
|
|
|
chart = BarChart(self)
|
|
|
|
|
2005-09-17 22:51:26 +02:00
|
|
|
data = zeros((len(groupopts), len(baropts)), Float)
|
|
|
|
data = [ [ None ] * len(baropts) for i in xrange(len(groupopts)) ]
|
|
|
|
enabled = False
|
Major cleanup of the statistics handling code
util/stats/db.py:
Build a result object as the result of a query operation so it is
easier to populate and contains a bit more information than just
a big dict. Also change the next level data into a matrix instead
of a dict of dicts.
Move the "get" function into the Database object. (The get function
is used by the output parsing function as the interface for accessing
backend storage, same interface for profile stuff.)
Change the old get variable to the method variable, it describes how
the get works, (whether using sum, stdev, etc.)
util/stats/display.py:
Clean up the display functions, mostly formatting.
Handle values the way they should be now.
util/stats/info.py:
Totally re-work how values are accessed from their data store.
Access individual values on demand instead of calculating everything
and passing up a huge result from the bottom.
This impacts the way that proxying works, and in general, everything
is now esentially a proxy for the lower level database. Provide new
operators: unproxy, scalar, vector, value, values, total, and len which
retrieve the proper result from the object they are called on.
Move the ProxyGroup stuff (proxies of proxies!) here from the now gone
proxy.py file and integrate the shared parts of the code. The ProxyGroup
stuff allows you to write formulas without specifying the statistics
until evaluation time.
Get rid of global variables!
util/stats/output.py:
Move the dbinfo stuff into the Database itself. Each source should
have it's own get() function for accessing it's data.
This get() function behaves a bit differently than before in that it
can return vectors as well, deal with these vectors and with no result
conditions better.
util/stats/stats.py:
the info module no longer has the source global variable, just
create the database source and pass it around as necessary
--HG--
extra : convert_revision : 8e5aa228e5d3ae8068ef9c40f65b3a2f9e7c0cff
2005-10-21 22:29:13 +02:00
|
|
|
stacked = 0
|
2005-09-17 22:51:26 +02:00
|
|
|
for g,gopt in enumerate(groupopts):
|
|
|
|
for b,bopt in enumerate(baropts):
|
2005-11-23 03:50:34 +01:00
|
|
|
job = self.jobfile.job(options + gopt + bopt)
|
2005-10-18 21:01:51 +02:00
|
|
|
if not job:
|
|
|
|
continue
|
2005-09-17 22:51:26 +02:00
|
|
|
|
2005-11-23 03:50:34 +01:00
|
|
|
if proxy:
|
|
|
|
import db
|
|
|
|
proxy.dict['system'] = self.info[job.system]
|
2005-09-17 22:51:26 +02:00
|
|
|
val = self.info.get(job, self.stat)
|
2005-11-23 03:50:34 +01:00
|
|
|
if val is None:
|
|
|
|
print 'stat "%s" for job "%s" not found' % \
|
|
|
|
(self.stat, job)
|
|
|
|
|
Major cleanup of the statistics handling code
util/stats/db.py:
Build a result object as the result of a query operation so it is
easier to populate and contains a bit more information than just
a big dict. Also change the next level data into a matrix instead
of a dict of dicts.
Move the "get" function into the Database object. (The get function
is used by the output parsing function as the interface for accessing
backend storage, same interface for profile stuff.)
Change the old get variable to the method variable, it describes how
the get works, (whether using sum, stdev, etc.)
util/stats/display.py:
Clean up the display functions, mostly formatting.
Handle values the way they should be now.
util/stats/info.py:
Totally re-work how values are accessed from their data store.
Access individual values on demand instead of calculating everything
and passing up a huge result from the bottom.
This impacts the way that proxying works, and in general, everything
is now esentially a proxy for the lower level database. Provide new
operators: unproxy, scalar, vector, value, values, total, and len which
retrieve the proper result from the object they are called on.
Move the ProxyGroup stuff (proxies of proxies!) here from the now gone
proxy.py file and integrate the shared parts of the code. The ProxyGroup
stuff allows you to write formulas without specifying the statistics
until evaluation time.
Get rid of global variables!
util/stats/output.py:
Move the dbinfo stuff into the Database itself. Each source should
have it's own get() function for accessing it's data.
This get() function behaves a bit differently than before in that it
can return vectors as well, deal with these vectors and with no result
conditions better.
util/stats/stats.py:
the info module no longer has the source global variable, just
create the database source and pass it around as necessary
--HG--
extra : convert_revision : 8e5aa228e5d3ae8068ef9c40f65b3a2f9e7c0cff
2005-10-21 22:29:13 +02:00
|
|
|
if isinstance(val, (list, tuple)):
|
|
|
|
if len(val) == 1:
|
|
|
|
val = val[0]
|
|
|
|
else:
|
|
|
|
stacked = len(val)
|
2005-09-17 22:51:26 +02:00
|
|
|
|
|
|
|
data[g][b] = val
|
|
|
|
|
Major cleanup of the statistics handling code
util/stats/db.py:
Build a result object as the result of a query operation so it is
easier to populate and contains a bit more information than just
a big dict. Also change the next level data into a matrix instead
of a dict of dicts.
Move the "get" function into the Database object. (The get function
is used by the output parsing function as the interface for accessing
backend storage, same interface for profile stuff.)
Change the old get variable to the method variable, it describes how
the get works, (whether using sum, stdev, etc.)
util/stats/display.py:
Clean up the display functions, mostly formatting.
Handle values the way they should be now.
util/stats/info.py:
Totally re-work how values are accessed from their data store.
Access individual values on demand instead of calculating everything
and passing up a huge result from the bottom.
This impacts the way that proxying works, and in general, everything
is now esentially a proxy for the lower level database. Provide new
operators: unproxy, scalar, vector, value, values, total, and len which
retrieve the proper result from the object they are called on.
Move the ProxyGroup stuff (proxies of proxies!) here from the now gone
proxy.py file and integrate the shared parts of the code. The ProxyGroup
stuff allows you to write formulas without specifying the statistics
until evaluation time.
Get rid of global variables!
util/stats/output.py:
Move the dbinfo stuff into the Database itself. Each source should
have it's own get() function for accessing it's data.
This get() function behaves a bit differently than before in that it
can return vectors as well, deal with these vectors and with no result
conditions better.
util/stats/stats.py:
the info module no longer has the source global variable, just
create the database source and pass it around as necessary
--HG--
extra : convert_revision : 8e5aa228e5d3ae8068ef9c40f65b3a2f9e7c0cff
2005-10-21 22:29:13 +02:00
|
|
|
if stacked == 0:
|
|
|
|
for i in xrange(len(groupopts)):
|
|
|
|
for j in xrange(len(baropts)):
|
|
|
|
if data[i][j] is None:
|
|
|
|
data[i][j] = 0.0
|
|
|
|
else:
|
|
|
|
for i in xrange(len(groupopts)):
|
|
|
|
for j in xrange(len(baropts)):
|
|
|
|
val = data[i][j]
|
|
|
|
if val is None:
|
2005-11-23 03:50:34 +01:00
|
|
|
data[i][j] = [ 0.0 ] * stacked
|
Major cleanup of the statistics handling code
util/stats/db.py:
Build a result object as the result of a query operation so it is
easier to populate and contains a bit more information than just
a big dict. Also change the next level data into a matrix instead
of a dict of dicts.
Move the "get" function into the Database object. (The get function
is used by the output parsing function as the interface for accessing
backend storage, same interface for profile stuff.)
Change the old get variable to the method variable, it describes how
the get works, (whether using sum, stdev, etc.)
util/stats/display.py:
Clean up the display functions, mostly formatting.
Handle values the way they should be now.
util/stats/info.py:
Totally re-work how values are accessed from their data store.
Access individual values on demand instead of calculating everything
and passing up a huge result from the bottom.
This impacts the way that proxying works, and in general, everything
is now esentially a proxy for the lower level database. Provide new
operators: unproxy, scalar, vector, value, values, total, and len which
retrieve the proper result from the object they are called on.
Move the ProxyGroup stuff (proxies of proxies!) here from the now gone
proxy.py file and integrate the shared parts of the code. The ProxyGroup
stuff allows you to write formulas without specifying the statistics
until evaluation time.
Get rid of global variables!
util/stats/output.py:
Move the dbinfo stuff into the Database itself. Each source should
have it's own get() function for accessing it's data.
This get() function behaves a bit differently than before in that it
can return vectors as well, deal with these vectors and with no result
conditions better.
util/stats/stats.py:
the info module no longer has the source global variable, just
create the database source and pass it around as necessary
--HG--
extra : convert_revision : 8e5aa228e5d3ae8068ef9c40f65b3a2f9e7c0cff
2005-10-21 22:29:13 +02:00
|
|
|
elif len(val) != stacked:
|
|
|
|
raise ValueError, "some stats stacked, some not"
|
|
|
|
|
2005-10-18 21:01:51 +02:00
|
|
|
data = array(data)
|
|
|
|
if data.sum() == 0:
|
|
|
|
continue
|
|
|
|
|
2005-11-23 03:50:34 +01:00
|
|
|
x = data.shape[0]
|
|
|
|
y = data.shape[1]
|
|
|
|
xkeep = [ i for i in xrange(x) if data[i].sum() != 0 ]
|
|
|
|
ykeep = [ i for i in xrange(y) if data[:,i].sum() != 0 ]
|
|
|
|
data = data.take(xkeep, axis=0)
|
|
|
|
data = data.take(ykeep, axis=1)
|
|
|
|
chart.data = data
|
|
|
|
|
|
|
|
gopts = [ groupopts[i] for i in xkeep ]
|
|
|
|
bopts = [ baropts[i] for i in ykeep ]
|
|
|
|
|
|
|
|
bdescs = [ ' '.join([o.desc for o in opt]) for opt in bopts]
|
|
|
|
gdescs = [ ' '.join([o.desc for o in opt]) for opt in gopts]
|
|
|
|
|
|
|
|
if chart.legend is None:
|
|
|
|
if stacked:
|
|
|
|
try:
|
|
|
|
chart.legend = self.info.rcategories
|
|
|
|
except:
|
|
|
|
chart.legend = [ str(i) for i in xrange(stacked) ]
|
|
|
|
else:
|
|
|
|
chart.legend = bdescs
|
|
|
|
|
|
|
|
if chart.xticks is None:
|
|
|
|
chart.xticks = gdescs
|
2005-09-17 22:51:26 +02:00
|
|
|
chart.graph()
|
|
|
|
|
|
|
|
names = [ opt.name for opt in options ]
|
|
|
|
descs = [ opt.desc for opt in options ]
|
|
|
|
|
2005-11-23 03:50:34 +01:00
|
|
|
if names[0] == 'run':
|
|
|
|
names = names[1:]
|
|
|
|
descs = descs[1:]
|
|
|
|
|
|
|
|
basename = '%s-%s' % (name, ':'.join(names))
|
2005-09-17 22:51:26 +02:00
|
|
|
desc = ' '.join(descs)
|
2005-11-23 03:50:34 +01:00
|
|
|
|
|
|
|
pngname = '%s.png' % basename
|
|
|
|
psname = '%s.eps' % re.sub(':', '-', basename)
|
|
|
|
epsname = '%s.ps' % re.sub(':', '-', basename)
|
|
|
|
chart.savefig(joinpath(directory, pngname))
|
|
|
|
chart.savefig(joinpath(directory, epsname))
|
|
|
|
chart.savefig(joinpath(directory, psname))
|
|
|
|
html_name = urllib.quote(pngname)
|
|
|
|
print >>html, '''%s<br><img src="%s"><br>''' % (desc, html_name)
|
|
|
|
html.flush()
|
2005-09-17 22:51:26 +02:00
|
|
|
|
|
|
|
print >>html, '</body>'
|
|
|
|
print >>html, '</html>'
|
|
|
|
html.close()
|