util: update checkpoint aggregation script
The checkpoint aggregation script had become outdated due to numerous changes to checkpoints over the past couple of years. This updates the script. It now supports aggregation for x86 architecture instead of alpha. Also a couple of new options have been added that specify the size of the memory file to be created and whether or not the memory file should be compressed.
This commit is contained in:
parent
c033ead992
commit
59a041c5e7
1 changed files with 80 additions and 96 deletions
|
@ -1,5 +1,6 @@
|
||||||
# Copyright (c) 2009 The Regents of The University of Michigan
|
# Copyright (c) 2009 The Regents of The University of Michigan
|
||||||
# Copyright (c) 2011 Advanced Micro Devices, Inc.
|
# Copyright (c) 2011 Advanced Micro Devices, Inc.
|
||||||
|
# Copyright (c) 2013 Mark D. Hill and David A. Wood
|
||||||
# All rights reserved.
|
# All rights reserved.
|
||||||
#
|
#
|
||||||
# Redistribution and use in source and binary forms, with or without
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
@ -26,11 +27,12 @@
|
||||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
#
|
#
|
||||||
# Authors: Lisa Hsu
|
# Authors: Lisa Hsu
|
||||||
|
# Nilay Vaish
|
||||||
|
|
||||||
from ConfigParser import ConfigParser
|
from ConfigParser import ConfigParser
|
||||||
import gzip
|
import gzip
|
||||||
|
|
||||||
import sys, re, optparse, os
|
import sys, re, os
|
||||||
|
|
||||||
class myCP(ConfigParser):
|
class myCP(ConfigParser):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
@ -39,75 +41,43 @@ class myCP(ConfigParser):
|
||||||
def optionxform(self, optionstr):
|
def optionxform(self, optionstr):
|
||||||
return optionstr
|
return optionstr
|
||||||
|
|
||||||
def aggregate(options, args):
|
def aggregate(output_dir, cpts, no_compress, memory_size):
|
||||||
merged = myCP()
|
merged_config = None
|
||||||
page_ptr = 0
|
page_ptr = 0
|
||||||
|
|
||||||
allfiles = os.listdir(os.getcwd())
|
output_path = output_dir
|
||||||
cpts = []
|
if not os.path.isdir(output_path):
|
||||||
for arg in args:
|
os.system("mkdir -p " + output_path)
|
||||||
found = False
|
|
||||||
for f in allfiles:
|
|
||||||
if re.compile("cpt." + arg + ".\d+").search(f):
|
|
||||||
found = True
|
|
||||||
cpts.append(f)
|
|
||||||
break
|
|
||||||
if not found:
|
|
||||||
print "missing checkpoint: ", arg
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
dirname = "-".join([options.prefix, "cpt"])
|
agg_mem_file = open(output_path + "/system.physmem.store0.pmem", "wb+")
|
||||||
agg_name = "-".join(args)
|
agg_config_file = open(output_path + "/m5.cpt", "wb+")
|
||||||
print agg_name
|
|
||||||
fullpath = os.path.join("..", dirname, "cpt." + agg_name + ".10000")
|
|
||||||
if not os.path.isdir(fullpath):
|
|
||||||
os.system("mkdir -p " + fullpath)
|
|
||||||
elif os.path.isfile(fullpath + "/system.physmem.physmem"):
|
|
||||||
if os.path.isfile(fullpath + "/m5.cpt"):
|
|
||||||
print fullpath, " already done"
|
|
||||||
return
|
|
||||||
|
|
||||||
myfile = open(fullpath + "/system.physmem.physmem", "wb+")
|
if not no_compress:
|
||||||
merged_mem = gzip.GzipFile(fileobj=myfile, mode="wb")
|
merged_mem = gzip.GzipFile(fileobj= agg_mem_file, mode="wb")
|
||||||
|
|
||||||
max_curtick = 0
|
max_curtick = 0
|
||||||
when = 0
|
num_digits = len(str(len(cpts)-1))
|
||||||
for (i, arg) in enumerate(args):
|
|
||||||
|
for (i, arg) in enumerate(cpts):
|
||||||
print arg
|
print arg
|
||||||
|
merged_config = myCP()
|
||||||
config = myCP()
|
config = myCP()
|
||||||
config.readfp(open(cpts[i] + "/m5.cpt"))
|
config.readfp(open(cpts[i] + "/m5.cpt"))
|
||||||
|
|
||||||
for sec in config.sections():
|
for sec in config.sections():
|
||||||
if re.compile("cpu").search(sec):
|
if re.compile("cpu").search(sec):
|
||||||
newsec = re.sub("cpu", "cpu" + str(i), sec)
|
newsec = re.sub("cpu", "cpu" + str(i).zfill(num_digits), sec)
|
||||||
merged.add_section(newsec)
|
merged_config.add_section(newsec)
|
||||||
if re.compile("workload$").search(sec):
|
|
||||||
merged.set(newsec, "M5_pid", i)
|
|
||||||
|
|
||||||
items = config.items(sec)
|
items = config.items(sec)
|
||||||
if options.alpha:
|
for item in items:
|
||||||
for item in items:
|
if item[0] == "paddr":
|
||||||
if item[0] == "ppn":
|
merged_config.set(newsec, item[0], int(item[1]) + (page_ptr << 12))
|
||||||
if config.getint(sec, "tag") != 0:
|
continue
|
||||||
merged.set(newsec, item[0], int(item[1]) + page_ptr)
|
merged_config.set(newsec, item[0], item[1])
|
||||||
continue
|
|
||||||
elif item[0] == "asn":
|
if re.compile("workload.FdMap256$").search(sec):
|
||||||
tmp = re.compile("(.*).Entry(\d+)").search(sec).groups()
|
merged_config.set(newsec, "M5_pid", i)
|
||||||
if config.has_option(tmp[0], "nlu"):
|
|
||||||
size = config.getint(tmp[0], "nlu")
|
|
||||||
if int(tmp[1]) < size:
|
|
||||||
merged.set(newsec, item[0], i)
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
merged.set(newsec, item[0], i)
|
|
||||||
continue
|
|
||||||
merged.set(newsec, item[0], item[1])
|
|
||||||
else:a #x86
|
|
||||||
for item in items:
|
|
||||||
if item[0] == "paddr":
|
|
||||||
merged.set(newsec, item[0], int(item[1]) + (page_ptr << 12))
|
|
||||||
continue
|
|
||||||
merged.set(newsec, item[0], item[1])
|
|
||||||
|
|
||||||
elif sec == "system":
|
elif sec == "system":
|
||||||
pass
|
pass
|
||||||
|
@ -115,68 +85,82 @@ def aggregate(options, args):
|
||||||
tick = config.getint(sec, "curTick")
|
tick = config.getint(sec, "curTick")
|
||||||
if tick > max_curtick:
|
if tick > max_curtick:
|
||||||
max_curtick = tick
|
max_curtick = tick
|
||||||
when = config.getint("system.cpu.tickEvent", "_when")
|
|
||||||
else:
|
else:
|
||||||
if i == 0:
|
if i == len(cpts)-1:
|
||||||
merged.add_section(sec)
|
merged_config.add_section(sec)
|
||||||
for item in config.items(sec):
|
for item in config.items(sec):
|
||||||
merged.set(sec, item[0], item[1])
|
merged_config.set(sec, item[0], item[1])
|
||||||
if item[0] == "curtick":
|
|
||||||
merged.optionxform(str("curTick"))
|
|
||||||
elif item[0] == "numevents":
|
|
||||||
merged.optionxform(str("numEvents"))
|
|
||||||
|
|
||||||
page_ptr = page_ptr + int(config.get("system", "pagePtr"))
|
if i != len(cpts)-1:
|
||||||
|
merged_config.write(agg_config_file)
|
||||||
|
|
||||||
### memory stuff
|
### memory stuff
|
||||||
f = open(cpts[i] + "/system.physmem.physmem", "rb")
|
|
||||||
gf = gzip.GzipFile(fileobj=f, mode="rb")
|
|
||||||
pages = int(config.get("system", "pagePtr"))
|
pages = int(config.get("system", "pagePtr"))
|
||||||
|
page_ptr = page_ptr + pages
|
||||||
print "pages to be read: ", pages
|
print "pages to be read: ", pages
|
||||||
|
|
||||||
|
f = open(cpts[i] + "/system.physmem.store0.pmem", "rb")
|
||||||
|
gf = gzip.GzipFile(fileobj=f, mode="rb")
|
||||||
|
|
||||||
x = 0
|
x = 0
|
||||||
while x < pages:
|
while x < pages:
|
||||||
if options.alpha:
|
bytesRead = gf.read(1 << 12)
|
||||||
bytesRead = gf.read(1 << 13)
|
if not no_compress:
|
||||||
else: #x86
|
merged_mem.write(bytesRead)
|
||||||
bytesRead = gf.read(1 << 12)
|
else:
|
||||||
merged_mem.write(bytesRead)
|
agg_mem_file.write(bytesRead)
|
||||||
x += 1
|
x += 1
|
||||||
|
|
||||||
gf.close()
|
gf.close()
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
merged.add_section("system")
|
merged_config.add_section("system")
|
||||||
merged.set("system", "pagePtr", page_ptr)
|
merged_config.set("system", "pagePtr", page_ptr)
|
||||||
merged.set("system", "nextPID", len(args))
|
merged_config.set("system", "nextPID", len(cpts))
|
||||||
|
|
||||||
|
file_size = page_ptr * 4 * 1024
|
||||||
|
dummy_data = "".zfill(4096)
|
||||||
|
while file_size < memory_size:
|
||||||
|
if not no_compress:
|
||||||
|
merged_mem.write(dummy_data)
|
||||||
|
else:
|
||||||
|
agg_mem_file.write(dummy_data)
|
||||||
|
file_size += 4 * 1024
|
||||||
|
page_ptr += 1
|
||||||
|
|
||||||
print "WARNING: "
|
print "WARNING: "
|
||||||
print "Make sure the simulation using this checkpoint has at least ",
|
print "Make sure the simulation using this checkpoint has at least ",
|
||||||
if options.alpha:
|
print page_ptr, "x 4K of memory"
|
||||||
print page_ptr, "x 8K of memory"
|
merged_config.set("system.physmem.store0", "range_size", page_ptr * 4 * 1024)
|
||||||
else: # assume x86
|
|
||||||
print page_ptr, "x 4K of memory"
|
|
||||||
|
|
||||||
merged.add_section("Globals")
|
merged_config.add_section("Globals")
|
||||||
merged.set("Globals", "curTick", max_curtick)
|
merged_config.set("Globals", "curTick", max_curtick)
|
||||||
|
|
||||||
for i in xrange(len(args)):
|
merged_config.write(agg_config_file)
|
||||||
merged.set("system.cpu" + str(i) + ".tickEvent", "_when", when)
|
|
||||||
|
|
||||||
merged.write(file(fullpath + "/m5.cpt", "wb"))
|
if not no_compress:
|
||||||
merged_mem.close()
|
merged_mem.close()
|
||||||
myfile.close()
|
agg_mem_file.close()
|
||||||
|
else:
|
||||||
|
agg_mem_file.close()
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
parser = ArgumentParser("usage: %prog [options] <directory names which "\
|
||||||
|
"hold the checkpoints to be combined>")
|
||||||
|
parser.add_argument("-o", "--output-dir", action="store",
|
||||||
|
help="Output directory")
|
||||||
|
parser.add_argument("-c", "--no-compress", action="store_true")
|
||||||
|
parser.add_argument("--cpts", nargs='+')
|
||||||
|
parser.add_argument("--memory-size", action="store", type=int)
|
||||||
|
|
||||||
parser = optparse.OptionParser()
|
# Assume x86 ISA. Any other ISAs would need extra stuff in this script
|
||||||
parser.add_option("--prefix", type="string", default="agg")
|
# to appropriately parse their page tables and understand page sizes.
|
||||||
# If not alpha, then assume x86. Any other ISAs would need
|
options = parser.parse_args()
|
||||||
# extra stuff in this script to appropriately parse their page tables
|
print options.cpts, len(options.cpts)
|
||||||
# and understand page sizes.
|
if len(options.cpts) <= 1:
|
||||||
parser.add_option("--alpha", action="store_true")
|
parser.error("You must specify atleast two checkpoint files that "\
|
||||||
|
"need to be combined.")
|
||||||
(options, args) = parser.parse_args()
|
|
||||||
|
|
||||||
aggregate(options, args)
|
|
||||||
|
|
||||||
|
aggregate(options.output_dir, options.cpts, options.no_compress,
|
||||||
|
options.memory_size)
|
||||||
|
|
Loading…
Reference in a new issue