mem: Add explicit Cache subclass and make BaseCache abstract

Open up for other subclasses to BaseCache and transition to using the
explicit Cache subclass.

--HG--
rename : src/mem/cache/BaseCache.py => src/mem/cache/Cache.py
This commit is contained in:
Andreas Hansson 2015-08-21 07:03:23 -04:00
parent d71a0d790d
commit ddfa96cf45
12 changed files with 39 additions and 34 deletions

View file

@ -46,7 +46,7 @@ from m5.objects import *
# starting point, and specific parameters can be overridden in the
# specific instantiations.
class L1Cache(BaseCache):
class L1Cache(Cache):
assoc = 2
hit_latency = 2
response_latency = 2
@ -59,7 +59,7 @@ class L1_ICache(L1Cache):
class L1_DCache(L1Cache):
pass
class L2Cache(BaseCache):
class L2Cache(Cache):
assoc = 8
hit_latency = 20
response_latency = 20
@ -67,7 +67,7 @@ class L2Cache(BaseCache):
tgts_per_mshr = 12
write_buffers = 8
class IOCache(BaseCache):
class IOCache(Cache):
assoc = 8
hit_latency = 50
response_latency = 50
@ -76,7 +76,7 @@ class IOCache(BaseCache):
tgts_per_mshr = 12
forward_snoops = False
class PageTableWalkerCache(BaseCache):
class PageTableWalkerCache(Cache):
assoc = 2
hit_latency = 2
response_latency = 2

View file

@ -142,7 +142,7 @@ class O3_ARM_v7a_3(DerivO3CPU):
branchPred = O3_ARM_v7a_BP()
# Instruction Cache
class O3_ARM_v7a_ICache(BaseCache):
class O3_ARM_v7a_ICache(Cache):
hit_latency = 1
response_latency = 1
mshrs = 2
@ -153,7 +153,7 @@ class O3_ARM_v7a_ICache(BaseCache):
is_read_only = True
# Data Cache
class O3_ARM_v7a_DCache(BaseCache):
class O3_ARM_v7a_DCache(Cache):
hit_latency = 2
response_latency = 2
mshrs = 6
@ -164,7 +164,7 @@ class O3_ARM_v7a_DCache(BaseCache):
# TLB Cache
# Use a cache as a L2 TLB
class O3_ARM_v7aWalkCache(BaseCache):
class O3_ARM_v7aWalkCache(Cache):
hit_latency = 4
response_latency = 4
mshrs = 6
@ -176,7 +176,7 @@ class O3_ARM_v7aWalkCache(BaseCache):
is_read_only = True
# L2 Cache
class O3_ARM_v7aL2(BaseCache):
class O3_ARM_v7aL2(Cache):
hit_latency = 12
response_latency = 12
mshrs = 16

View file

@ -152,9 +152,9 @@ for t, m in zip(testerspec, multiplier):
numtesters += t * m
# Define a prototype L1 cache that we scale for all successive levels
proto_l1 = BaseCache(size = '32kB', assoc = 4,
hit_latency = 1, response_latency = 1,
tgts_per_mshr = 8)
proto_l1 = Cache(size = '32kB', assoc = 4,
hit_latency = 1, response_latency = 1,
tgts_per_mshr = 8)
if options.blocking:
proto_l1.mshrs = 1

View file

@ -175,9 +175,9 @@ else:
sys.exit(1)
# Define a prototype L1 cache that we scale for all successive levels
proto_l1 = BaseCache(size = '32kB', assoc = 4,
hit_latency = 1, response_latency = 1,
tgts_per_mshr = 8)
proto_l1 = Cache(size = '32kB', assoc = 4,
hit_latency = 1, response_latency = 1,
tgts_per_mshr = 8)
if options.blocking:
proto_l1.mshrs = 1

View file

@ -137,7 +137,7 @@ class Water_spatial(LiveProcess):
# Base L1 Cache Definition
# ====================
class L1(BaseCache):
class L1(Cache):
latency = options.l1latency
mshrs = 12
tgts_per_mshr = 8
@ -146,7 +146,7 @@ class L1(BaseCache):
# Base L2 Cache Definition
# ----------------------
class L2(BaseCache):
class L2(Cache):
latency = options.l2latency
mshrs = 92
tgts_per_mshr = 16

View file

@ -158,7 +158,7 @@ class Water_spatial(LiveProcess):
# Base L1 Cache Definition
# ====================
class L1(BaseCache):
class L1(Cache):
latency = options.l1latency
mshrs = 12
tgts_per_mshr = 8
@ -167,7 +167,7 @@ class L1(BaseCache):
# Base L2 Cache Definition
# ----------------------
class L2(BaseCache):
class L2(Cache):
latency = options.l2latency
mshrs = 92
tgts_per_mshr = 16

View file

@ -37,6 +37,7 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Nathan Binkert
# Andreas Hansson
from m5.params import *
from m5.proxy import *
@ -46,6 +47,7 @@ from Tags import *
class BaseCache(MemObject):
type = 'BaseCache'
abstract = True
cxx_header = "mem/cache/base.hh"
size = Param.MemorySize("Capacity")
@ -81,3 +83,7 @@ class BaseCache(MemObject):
"Address range for the CPU-side port (to allow striping)")
system = Param.System(Parent.any, "System we belong to")
class Cache(BaseCache):
type = 'Cache'
cxx_header = 'mem/cache/cache.hh'

View file

@ -30,7 +30,7 @@
Import('*')
SimObject('BaseCache.py')
SimObject('Cache.py')
Source('base.cc')
Source('cache.cc')

12
src/mem/cache/base.cc vendored
View file

@ -65,13 +65,13 @@ BaseCache::CacheSlavePort::CacheSlavePort(const std::string &_name,
{
}
BaseCache::BaseCache(const Params *p)
BaseCache::BaseCache(const BaseCacheParams *p, unsigned blk_size)
: MemObject(p),
cpuSidePort(nullptr), memSidePort(nullptr),
mshrQueue("MSHRs", p->mshrs, 4, p->demand_mshr_reserve, MSHRQueue_MSHRs),
writeBuffer("write buffer", p->write_buffers, p->mshrs+1000, 0,
MSHRQueue_WriteBuffer),
blkSize(p->system->cacheLineSize()),
blkSize(blk_size),
lookupLatency(p->hit_latency),
forwardLatency(p->hit_latency),
fillLatency(p->response_latency),
@ -774,11 +774,3 @@ BaseCache::regStats()
;
}
BaseCache *
BaseCacheParams::create()
{
assert(tags);
return new Cache(this);
}

View file

@ -473,8 +473,7 @@ class BaseCache : public MemObject
virtual void regStats();
public:
typedef BaseCacheParams Params;
BaseCache(const Params *p);
BaseCache(const BaseCacheParams *p, unsigned blk_size);
~BaseCache() {}
virtual void init();

View file

@ -63,8 +63,8 @@
#include "mem/cache/prefetch/base.hh"
#include "sim/sim_exit.hh"
Cache::Cache(const Params *p)
: BaseCache(p),
Cache::Cache(const CacheParams *p)
: BaseCache(p, p->system->cacheLineSize()),
tags(p->tags),
prefetcher(p->prefetcher),
doFastWrites(true),
@ -2382,6 +2382,13 @@ CpuSidePort::CpuSidePort(const std::string &_name, Cache *_cache,
{
}
Cache*
CacheParams::create()
{
assert(tags);
return new Cache(this);
}
///////////////
//
// MemSidePort

View file

@ -57,6 +57,7 @@
#include "mem/cache/blk.hh"
#include "mem/cache/mshr.hh"
#include "mem/cache/tags/base.hh"
#include "params/Cache.hh"
#include "sim/eventq.hh"
//Forward decleration
@ -419,7 +420,7 @@ class Cache : public BaseCache
public:
/** Instantiates a basic cache object. */
Cache(const Params *p);
Cache(const CacheParams *p);
/** Non-default destructor is needed to deallocate memory. */
virtual ~Cache();