prefetcher: Make prefetcher a sim object instead of it being a parameter on cache
This commit is contained in:
parent
b7cf64398f
commit
7e104a1af2
15 changed files with 110 additions and 94 deletions
|
@ -190,10 +190,7 @@ class O3_ARM_v7aL2(BaseCache):
|
|||
size = '1MB'
|
||||
assoc = 16
|
||||
write_buffers = 8
|
||||
# Simple stride prefetcher
|
||||
prefetch_policy = 'stride'
|
||||
prefetch_on_access = 'true'
|
||||
prefetch_latency = '1.0ns'
|
||||
prefetch_degree = 8
|
||||
|
||||
# Simple stride prefetcher
|
||||
prefetcher = StridePrefetcher(degree=8, latency='1.0ns')
|
||||
|
||||
|
|
19
src/mem/cache/BaseCache.py
vendored
19
src/mem/cache/BaseCache.py
vendored
|
@ -29,8 +29,8 @@
|
|||
from m5.params import *
|
||||
from m5.proxy import Self
|
||||
from MemObject import MemObject
|
||||
from Prefetcher import BasePrefetcher
|
||||
|
||||
class Prefetch(Enum): vals = ['none', 'tagged', 'stride', 'ghb']
|
||||
|
||||
class BaseCache(MemObject):
|
||||
type = 'BaseCache'
|
||||
|
@ -58,22 +58,7 @@ class BaseCache(MemObject):
|
|||
write_buffers = Param.Int(8, "number of write buffers")
|
||||
prefetch_on_access = Param.Bool(False,
|
||||
"notify the hardware prefetcher on every access (not just misses)")
|
||||
prefetcher_size = Param.Int(100,
|
||||
"Number of entries in the hardware prefetch queue")
|
||||
prefetch_past_page = Param.Bool(False,
|
||||
"Allow prefetches to cross virtual page boundaries")
|
||||
prefetch_serial_squash = Param.Bool(False,
|
||||
"Squash prefetches with a later time on a subsequent miss")
|
||||
prefetch_degree = Param.Int(1,
|
||||
"Degree of the prefetch depth")
|
||||
prefetch_latency = Param.Latency(10 * Self.latency,
|
||||
"Latency of the prefetcher")
|
||||
prefetch_policy = Param.Prefetch('none',
|
||||
"Type of prefetcher to use")
|
||||
prefetch_use_cpu_id = Param.Bool(True,
|
||||
"Use the CPU ID to separate calculations of prefetches")
|
||||
prefetch_data_accesses_only = Param.Bool(False,
|
||||
"Only prefetch on data not on instruction accesses")
|
||||
prefetcher = Param.BasePrefetcher(NULL,"Prefetcher attached to cache")
|
||||
cpu_side = Port("Port on side closer to CPU")
|
||||
mem_side = Port("Port on side closer to MEM")
|
||||
addr_range = Param.AddrRange(AllMemory, "The address range for the CPU-side port")
|
||||
|
|
20
src/mem/cache/builder.cc
vendored
20
src/mem/cache/builder.cc
vendored
|
@ -37,7 +37,6 @@
|
|||
#include <vector>
|
||||
|
||||
#include "config/the_isa.hh"
|
||||
#include "enums/Prefetch.hh"
|
||||
#include "mem/cache/base.hh"
|
||||
#include "mem/cache/cache.hh"
|
||||
#include "mem/config/cache.hh"
|
||||
|
@ -57,30 +56,13 @@
|
|||
#include "mem/cache/tags/iic.hh"
|
||||
#endif
|
||||
|
||||
//Prefetcher Headers
|
||||
#include "mem/cache/prefetch/ghb.hh"
|
||||
#include "mem/cache/prefetch/stride.hh"
|
||||
#include "mem/cache/prefetch/tagged.hh"
|
||||
|
||||
using namespace std;
|
||||
|
||||
#define BUILD_CACHE(TAGS, tags) \
|
||||
do { \
|
||||
BasePrefetcher *pf; \
|
||||
if (prefetch_policy == Enums::tagged) { \
|
||||
pf = new TaggedPrefetcher(this); \
|
||||
} \
|
||||
else if (prefetch_policy == Enums::stride) { \
|
||||
pf = new StridePrefetcher(this); \
|
||||
} \
|
||||
else if (prefetch_policy == Enums::ghb) { \
|
||||
pf = new GHBPrefetcher(this); \
|
||||
} \
|
||||
else { \
|
||||
pf = NULL; \
|
||||
} \
|
||||
Cache<TAGS> *retval = \
|
||||
new Cache<TAGS>(this, tags, pf); \
|
||||
new Cache<TAGS>(this, tags); \
|
||||
return retval; \
|
||||
} while (0)
|
||||
|
||||
|
|
2
src/mem/cache/cache.hh
vendored
2
src/mem/cache/cache.hh
vendored
|
@ -211,7 +211,7 @@ class Cache : public BaseCache
|
|||
|
||||
public:
|
||||
/** Instantiates a basic cache object. */
|
||||
Cache(const Params *p, TagStore *tags, BasePrefetcher *prefetcher);
|
||||
Cache(const Params *p, TagStore *tags);
|
||||
|
||||
virtual Port *getPort(const std::string &if_name, int idx = -1);
|
||||
|
||||
|
|
6
src/mem/cache/cache_impl.hh
vendored
6
src/mem/cache/cache_impl.hh
vendored
|
@ -63,10 +63,10 @@
|
|||
#include "sim/sim_exit.hh"
|
||||
|
||||
template<class TagStore>
|
||||
Cache<TagStore>::Cache(const Params *p, TagStore *tags, BasePrefetcher *pf)
|
||||
Cache<TagStore>::Cache(const Params *p, TagStore *tags)
|
||||
: BaseCache(p),
|
||||
tags(tags),
|
||||
prefetcher(pf),
|
||||
prefetcher(p->prefetcher),
|
||||
doFastWrites(true),
|
||||
prefetchOnAccess(p->prefetch_on_access)
|
||||
{
|
||||
|
@ -89,8 +89,6 @@ Cache<TagStore>::regStats()
|
|||
{
|
||||
BaseCache::regStats();
|
||||
tags->regStats(name());
|
||||
if (prefetcher)
|
||||
prefetcher->regStats(name());
|
||||
}
|
||||
|
||||
template<class TagStore>
|
||||
|
|
35
src/mem/cache/prefetch/Prefetcher.py
vendored
Normal file
35
src/mem/cache/prefetch/Prefetcher.py
vendored
Normal file
|
@ -0,0 +1,35 @@
|
|||
from m5.SimObject import SimObject
|
||||
from m5.params import *
|
||||
class BasePrefetcher(SimObject):
|
||||
type = 'BasePrefetcher'
|
||||
abstract = True
|
||||
size = Param.Int(100,
|
||||
"Number of entries in the hardware prefetch queue")
|
||||
cross_pages = Param.Bool(False,
|
||||
"Allow prefetches to cross virtual page boundaries")
|
||||
serial_squash = Param.Bool(False,
|
||||
"Squash prefetches with a later time on a subsequent miss")
|
||||
degree = Param.Int(1,
|
||||
"Degree of the prefetch depth")
|
||||
latency = Param.Latency('10t',
|
||||
"Latency of the prefetcher")
|
||||
use_cpu_id = Param.Bool(True,
|
||||
"Use the CPU ID to separate calculations of prefetches")
|
||||
data_accesses_only = Param.Bool(False,
|
||||
"Only prefetch on data not on instruction accesses")
|
||||
|
||||
class GHBPrefetcher(BasePrefetcher):
|
||||
type = 'GHBPrefetcher'
|
||||
cxx_class = 'GHBPrefetcher'
|
||||
|
||||
class StridePrefetcher(BasePrefetcher):
|
||||
type = 'StridePrefetcher'
|
||||
cxx_class = 'StridePrefetcher'
|
||||
|
||||
class TaggedPrefetcher(BasePrefetcher):
|
||||
type = 'TaggedPrefetcher'
|
||||
cxx_class = 'TaggedPrefetcher'
|
||||
|
||||
|
||||
|
||||
|
1
src/mem/cache/prefetch/SConscript
vendored
1
src/mem/cache/prefetch/SConscript
vendored
|
@ -32,6 +32,7 @@ Import('*')
|
|||
|
||||
if env['TARGET_ISA'] == 'no':
|
||||
Return()
|
||||
SimObject('Prefetcher.py')
|
||||
|
||||
Source('base.cc')
|
||||
Source('ghb.cc')
|
||||
|
|
31
src/mem/cache/prefetch/base.cc
vendored
31
src/mem/cache/prefetch/base.cc
vendored
|
@ -43,10 +43,10 @@
|
|||
#include "mem/cache/base.hh"
|
||||
#include "mem/request.hh"
|
||||
|
||||
BasePrefetcher::BasePrefetcher(const BaseCacheParams *p)
|
||||
: size(p->prefetcher_size), pageStop(!p->prefetch_past_page),
|
||||
serialSquash(p->prefetch_serial_squash),
|
||||
onlyData(p->prefetch_data_accesses_only)
|
||||
BasePrefetcher::BasePrefetcher(const Params *p)
|
||||
: SimObject(p), size(p->size), latency(p->latency), degree(p->degree),
|
||||
useContextId(p->use_cpu_id), pageStop(!p->cross_pages),
|
||||
serialSquash(p->serial_squash), onlyData(p->data_accesses_only)
|
||||
{
|
||||
}
|
||||
|
||||
|
@ -55,54 +55,53 @@ BasePrefetcher::setCache(BaseCache *_cache)
|
|||
{
|
||||
cache = _cache;
|
||||
blkSize = cache->getBlockSize();
|
||||
_name = cache->name() + "-pf";
|
||||
}
|
||||
|
||||
void
|
||||
BasePrefetcher::regStats(const std::string &name)
|
||||
BasePrefetcher::regStats()
|
||||
{
|
||||
pfIdentified
|
||||
.name(name + ".prefetcher.num_hwpf_identified")
|
||||
.name(name() + ".prefetcher.num_hwpf_identified")
|
||||
.desc("number of hwpf identified")
|
||||
;
|
||||
|
||||
pfMSHRHit
|
||||
.name(name + ".prefetcher.num_hwpf_already_in_mshr")
|
||||
.name(name() + ".prefetcher.num_hwpf_already_in_mshr")
|
||||
.desc("number of hwpf that were already in mshr")
|
||||
;
|
||||
|
||||
pfCacheHit
|
||||
.name(name + ".prefetcher.num_hwpf_already_in_cache")
|
||||
.name(name() + ".prefetcher.num_hwpf_already_in_cache")
|
||||
.desc("number of hwpf that were already in the cache")
|
||||
;
|
||||
|
||||
pfBufferHit
|
||||
.name(name + ".prefetcher.num_hwpf_already_in_prefetcher")
|
||||
.name(name() + ".prefetcher.num_hwpf_already_in_prefetcher")
|
||||
.desc("number of hwpf that were already in the prefetch queue")
|
||||
;
|
||||
|
||||
pfRemovedFull
|
||||
.name(name + ".prefetcher.num_hwpf_evicted")
|
||||
.name(name() + ".prefetcher.num_hwpf_evicted")
|
||||
.desc("number of hwpf removed due to no buffer left")
|
||||
;
|
||||
|
||||
pfRemovedMSHR
|
||||
.name(name + ".prefetcher.num_hwpf_removed_MSHR_hit")
|
||||
.name(name() + ".prefetcher.num_hwpf_removed_MSHR_hit")
|
||||
.desc("number of hwpf removed because MSHR allocated")
|
||||
;
|
||||
|
||||
pfIssued
|
||||
.name(name + ".prefetcher.num_hwpf_issued")
|
||||
.name(name() + ".prefetcher.num_hwpf_issued")
|
||||
.desc("number of hwpf issued")
|
||||
;
|
||||
|
||||
pfSpanPage
|
||||
.name(name + ".prefetcher.num_hwpf_span_page")
|
||||
.name(name() + ".prefetcher.num_hwpf_span_page")
|
||||
.desc("number of hwpf spanning a virtual page")
|
||||
;
|
||||
|
||||
pfSquashed
|
||||
.name(name + ".prefetcher.num_hwpf_squashed_from_miss")
|
||||
.name(name() + ".prefetcher.num_hwpf_squashed_from_miss")
|
||||
.desc("number of hwpf that got squashed due to a miss "
|
||||
"aborting calculation time")
|
||||
;
|
||||
|
@ -276,3 +275,5 @@ BasePrefetcher::samePage(Addr a, Addr b)
|
|||
{
|
||||
return roundDown(a, TheISA::VMPageSize) == roundDown(b, TheISA::VMPageSize);
|
||||
}
|
||||
|
||||
|
||||
|
|
30
src/mem/cache/prefetch/base.hh
vendored
30
src/mem/cache/prefetch/base.hh
vendored
|
@ -41,10 +41,11 @@
|
|||
#include "base/statistics.hh"
|
||||
#include "mem/packet.hh"
|
||||
#include "params/BaseCache.hh"
|
||||
#include "sim/sim_object.hh"
|
||||
|
||||
class BaseCache;
|
||||
|
||||
class BasePrefetcher
|
||||
class BasePrefetcher : public SimObject
|
||||
{
|
||||
protected:
|
||||
|
||||
|
@ -62,6 +63,14 @@ class BasePrefetcher
|
|||
/** The block size of the parent cache. */
|
||||
int blkSize;
|
||||
|
||||
/** The latency before a prefetch is issued */
|
||||
Tick latency;
|
||||
|
||||
/** The number of prefetches to issue */
|
||||
unsigned degree;
|
||||
|
||||
/** If patterns should be found per context id */
|
||||
bool useContextId;
|
||||
/** Do we prefetch across page boundaries. */
|
||||
bool pageStop;
|
||||
|
||||
|
@ -71,8 +80,6 @@ class BasePrefetcher
|
|||
/** Do we prefetch on only data reads, or on inst reads as well. */
|
||||
bool onlyData;
|
||||
|
||||
std::string _name;
|
||||
|
||||
public:
|
||||
|
||||
Stats::Scalar pfIdentified;
|
||||
|
@ -85,16 +92,14 @@ class BasePrefetcher
|
|||
Stats::Scalar pfSpanPage;
|
||||
Stats::Scalar pfSquashed;
|
||||
|
||||
void regStats(const std::string &name);
|
||||
void regStats();
|
||||
|
||||
public:
|
||||
|
||||
BasePrefetcher(const BaseCacheParams *p);
|
||||
typedef BasePrefetcherParams Params;
|
||||
BasePrefetcher(const Params *p);
|
||||
|
||||
virtual ~BasePrefetcher() {}
|
||||
|
||||
const std::string name() const { return _name; }
|
||||
|
||||
void setCache(BaseCache *_cache);
|
||||
|
||||
/**
|
||||
|
@ -130,7 +135,12 @@ class BasePrefetcher
|
|||
* Utility function: are addresses a and b on the same VM page?
|
||||
*/
|
||||
bool samePage(Addr a, Addr b);
|
||||
public:
|
||||
const Params*
|
||||
params() const
|
||||
{
|
||||
return dynamic_cast<const Params *>(_params);
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
|
||||
#endif //__MEM_CACHE_PREFETCH_BASE_PREFETCHER_HH__
|
||||
|
|
7
src/mem/cache/prefetch/ghb.cc
vendored
7
src/mem/cache/prefetch/ghb.cc
vendored
|
@ -71,3 +71,10 @@ GHBPrefetcher::calculatePrefetch(PacketPtr &pkt, std::list<Addr> &addresses,
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
GHBPrefetcher*
|
||||
GHBPrefetcherParams::create()
|
||||
{
|
||||
return new GHBPrefetcher(this);
|
||||
}
|
||||
|
|
11
src/mem/cache/prefetch/ghb.hh
vendored
11
src/mem/cache/prefetch/ghb.hh
vendored
|
@ -37,6 +37,7 @@
|
|||
#define __MEM_CACHE_PREFETCH_GHB_PREFETCHER_HH__
|
||||
|
||||
#include "mem/cache/prefetch/base.hh"
|
||||
#include "params/GHBPrefetcher.hh"
|
||||
|
||||
class GHBPrefetcher : public BasePrefetcher
|
||||
{
|
||||
|
@ -47,15 +48,9 @@ class GHBPrefetcher : public BasePrefetcher
|
|||
Addr secondLastMissAddr[Max_Contexts];
|
||||
Addr lastMissAddr[Max_Contexts];
|
||||
|
||||
Tick latency;
|
||||
int degree;
|
||||
bool useContextId;
|
||||
|
||||
public:
|
||||
|
||||
GHBPrefetcher(const BaseCacheParams *p)
|
||||
: BasePrefetcher(p), latency(p->prefetch_latency),
|
||||
degree(p->prefetch_degree), useContextId(p->prefetch_use_cpu_id)
|
||||
GHBPrefetcher(const Params *p)
|
||||
: BasePrefetcher(p)
|
||||
{
|
||||
}
|
||||
|
||||
|
|
7
src/mem/cache/prefetch/stride.cc
vendored
7
src/mem/cache/prefetch/stride.cc
vendored
|
@ -132,3 +132,10 @@ StridePrefetcher::calculatePrefetch(PacketPtr &pkt, std::list<Addr> &addresses,
|
|||
tab.push_back(new_entry);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
StridePrefetcher*
|
||||
StridePrefetcherParams::create()
|
||||
{
|
||||
return new StridePrefetcher(this);
|
||||
}
|
||||
|
|
9
src/mem/cache/prefetch/stride.hh
vendored
9
src/mem/cache/prefetch/stride.hh
vendored
|
@ -39,6 +39,7 @@
|
|||
#include <climits>
|
||||
|
||||
#include "mem/cache/prefetch/base.hh"
|
||||
#include "params/StridePrefetcher.hh"
|
||||
|
||||
class StridePrefetcher : public BasePrefetcher
|
||||
{
|
||||
|
@ -63,15 +64,11 @@ class StridePrefetcher : public BasePrefetcher
|
|||
Addr *lastMissAddr[Max_Contexts];
|
||||
|
||||
std::list<StrideEntry*> table[Max_Contexts];
|
||||
Tick latency;
|
||||
int degree;
|
||||
bool useContextId;
|
||||
|
||||
public:
|
||||
|
||||
StridePrefetcher(const BaseCacheParams *p)
|
||||
: BasePrefetcher(p), latency(p->prefetch_latency),
|
||||
degree(p->prefetch_degree), useContextId(p->prefetch_use_cpu_id)
|
||||
StridePrefetcher(const Params *p)
|
||||
: BasePrefetcher(p)
|
||||
{
|
||||
}
|
||||
|
||||
|
|
10
src/mem/cache/prefetch/tagged.cc
vendored
10
src/mem/cache/prefetch/tagged.cc
vendored
|
@ -35,9 +35,8 @@
|
|||
|
||||
#include "mem/cache/prefetch/tagged.hh"
|
||||
|
||||
TaggedPrefetcher::TaggedPrefetcher(const BaseCacheParams *p)
|
||||
: BasePrefetcher(p),
|
||||
latency(p->prefetch_latency), degree(p->prefetch_degree)
|
||||
TaggedPrefetcher::TaggedPrefetcher(const Params *p)
|
||||
: BasePrefetcher(p)
|
||||
{
|
||||
}
|
||||
|
||||
|
@ -62,3 +61,8 @@ calculatePrefetch(PacketPtr &pkt, std::list<Addr> &addresses,
|
|||
}
|
||||
|
||||
|
||||
TaggedPrefetcher*
|
||||
TaggedPrefetcherParams::create()
|
||||
{
|
||||
return new TaggedPrefetcher(this);
|
||||
}
|
||||
|
|
9
src/mem/cache/prefetch/tagged.hh
vendored
9
src/mem/cache/prefetch/tagged.hh
vendored
|
@ -37,17 +37,14 @@
|
|||
#define __MEM_CACHE_PREFETCH_TAGGED_PREFETCHER_HH__
|
||||
|
||||
#include "mem/cache/prefetch/base.hh"
|
||||
#include "params/TaggedPrefetcher.hh"
|
||||
|
||||
|
||||
class TaggedPrefetcher : public BasePrefetcher
|
||||
{
|
||||
protected:
|
||||
|
||||
Tick latency;
|
||||
int degree;
|
||||
|
||||
public:
|
||||
|
||||
TaggedPrefetcher(const BaseCacheParams *p);
|
||||
TaggedPrefetcher(const Params *p);
|
||||
|
||||
~TaggedPrefetcher() {}
|
||||
|
||||
|
|
Loading…
Reference in a new issue