X86: Fix address size handling so real mode works properly.
Virtual (pre-segmentation) addresses are truncated based on address size, and any non-64 bit linear address is truncated to 32 bits. This means that real mode addresses aren't truncated down to 16 bits after their segment bases are added in.
This commit is contained in:
parent
74043c4f5c
commit
a7859f7e45
2 changed files with 4 additions and 1 deletions
|
@ -361,7 +361,7 @@ let {{
|
||||||
exec_output = ""
|
exec_output = ""
|
||||||
|
|
||||||
calculateEA = '''
|
calculateEA = '''
|
||||||
EA = bits(SegBase + scale * Index + Base + disp, addressSize * 8 - 1, 0);
|
EA = SegBase + bits(scale * Index + Base + disp, addressSize * 8 - 1, 0);
|
||||||
'''
|
'''
|
||||||
|
|
||||||
def defineMicroLoadOp(mnemonic, code, bigCode='',
|
def defineMicroLoadOp(mnemonic, code, bigCode='',
|
||||||
|
|
|
@ -281,6 +281,9 @@ TLB::translate(RequestPtr req, ThreadContext *tc, Translation *translation,
|
||||||
return new GeneralProtection(0);
|
return new GeneralProtection(0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (m5Reg.mode != LongMode ||
|
||||||
|
(flags & (AddrSizeFlagBit << FlagShift)))
|
||||||
|
vaddr &= mask(32);
|
||||||
// If paging is enabled, do the translation.
|
// If paging is enabled, do the translation.
|
||||||
if (m5Reg.paging) {
|
if (m5Reg.paging) {
|
||||||
DPRINTF(TLB, "Paging enabled.\n");
|
DPRINTF(TLB, "Paging enabled.\n");
|
||||||
|
|
Loading…
Reference in a new issue