612f8f074f
Note: AArch64 and AArch32 interworking is not supported. If you use an AArch64 kernel you are restricted to AArch64 user-mode binaries. This will be addressed in a later patch. Note: Virtualization is only supported in AArch32 mode. This will also be fixed in a later patch. Contributors: Giacomo Gabrielli (TrustZone, LPAE, system-level AArch64, AArch64 NEON, validation) Thomas Grocutt (AArch32 Virtualization, AArch64 FP, validation) Mbou Eyole (AArch64 NEON, validation) Ali Saidi (AArch64 Linux support, code integration, validation) Edmund Grimley-Evans (AArch64 FP) William Wang (AArch64 Linux support) Rene De Jong (AArch64 Linux support, performance opt.) Matt Horsnell (AArch64 MP, validation) Matt Evans (device models, code integration, validation) Chris Adeniyi-Jones (AArch64 syscall-emulation) Prakash Ramrakhyani (validation) Dam Sunwoo (validation) Chander Sudanthi (validation) Stephan Diestelhorst (validation) Andreas Hansson (code integration, performance opt.) Eric Van Hensbergen (performance opt.) Gabe Black
269 lines
10 KiB
C++
269 lines
10 KiB
C++
// -*- mode:c++ -*-
|
|
|
|
// Copyright (c) 2010-2013 ARM Limited
|
|
// All rights reserved
|
|
//
|
|
// The license below extends only to copyright in the software and shall
|
|
// not be construed as granting a license to any other intellectual
|
|
// property including but not limited to intellectual property relating
|
|
// to a hardware implementation of the functionality of the software
|
|
// licensed hereunder. You may use the software subject to the license
|
|
// terms below provided that you ensure that this notice is replicated
|
|
// unmodified and in its entirety in all distributions of the software,
|
|
// modified or unmodified, in source code or in binary form.
|
|
//
|
|
// Redistribution and use in source and binary forms, with or without
|
|
// modification, are permitted provided that the following conditions are
|
|
// met: redistributions of source code must retain the above copyright
|
|
// notice, this list of conditions and the following disclaimer;
|
|
// redistributions in binary form must reproduce the above copyright
|
|
// notice, this list of conditions and the following disclaimer in the
|
|
// documentation and/or other materials provided with the distribution;
|
|
// neither the name of the copyright holders nor the names of its
|
|
// contributors may be used to endorse or promote products derived from
|
|
// this software without specific prior written permission.
|
|
//
|
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
//
|
|
// Authors: Gabe Black
|
|
|
|
let {{
|
|
vfpEnabledCheckCode = '''
|
|
uint32_t issEnCheck;
|
|
bool trapEnCheck;
|
|
uint32_t seq;
|
|
if (!vfpNeonEnabled(seq,Hcptr, Nsacr, Cpacr, Cpsr, issEnCheck,
|
|
trapEnCheck, xc->tcBase(), Fpexc))
|
|
{return disabledFault();}
|
|
if (trapEnCheck) {
|
|
CPSR cpsrEnCheck = Cpsr;
|
|
if (cpsrEnCheck.mode == MODE_HYP) {
|
|
return new UndefinedInstruction(machInst, issEnCheck,
|
|
EC_TRAPPED_HCPTR);
|
|
} else {
|
|
if (!inSecureState(Scr, Cpsr)) {
|
|
return new HypervisorTrap(machInst, issEnCheck,
|
|
EC_TRAPPED_HCPTR);
|
|
}
|
|
}
|
|
}
|
|
'''
|
|
|
|
vfp64EnabledCheckCode = '''
|
|
CPSR cpsrEnCheck = Cpsr;
|
|
ExceptionLevel el = (ExceptionLevel) (uint8_t) cpsrEnCheck.el;
|
|
if (!vfpNeon64Enabled(Cpacr64, el))
|
|
return new SupervisorTrap(machInst, 0x1E00000,
|
|
EC_TRAPPED_SIMD_FP);
|
|
|
|
if (ArmSystem::haveVirtualization(xc->tcBase()) && el <= EL2) {
|
|
HCPTR cptrEnCheck = xc->tcBase()->readMiscReg(MISCREG_CPTR_EL2);
|
|
if (cptrEnCheck.tfp)
|
|
return new HypervisorTrap(machInst, 0x1E00000,
|
|
EC_TRAPPED_SIMD_FP);
|
|
}
|
|
|
|
if (ArmSystem::haveSecurity(xc->tcBase())) {
|
|
HCPTR cptrEnCheck = xc->tcBase()->readMiscReg(MISCREG_CPTR_EL3);
|
|
if (cptrEnCheck.tfp)
|
|
return new SecureMonitorTrap(machInst, 0x1E00000,
|
|
EC_TRAPPED_SIMD_FP);
|
|
}
|
|
'''
|
|
|
|
vmsrEnabledCheckCode = '''
|
|
uint32_t issEnCheck;
|
|
bool trapEnCheck;
|
|
uint32_t seq;
|
|
if (!vfpNeonEnabled(seq,Hcptr, Nsacr, Cpacr, Cpsr, issEnCheck,
|
|
trapEnCheck, xc->tcBase()))
|
|
if (dest != (int)MISCREG_FPEXC && dest != (int)MISCREG_FPSID)
|
|
{return disabledFault();}
|
|
if (!inPrivilegedMode(Cpsr))
|
|
if (dest != (int)MISCREG_FPSCR)
|
|
return disabledFault();
|
|
if (trapEnCheck) {
|
|
CPSR cpsrEnCheck = Cpsr;
|
|
if (cpsrEnCheck.mode == MODE_HYP) {
|
|
return new UndefinedInstruction(machInst, issEnCheck,
|
|
EC_TRAPPED_HCPTR);
|
|
} else {
|
|
if (!inSecureState(Scr, Cpsr)) {
|
|
return new HypervisorTrap(machInst, issEnCheck,
|
|
EC_TRAPPED_HCPTR);
|
|
}
|
|
}
|
|
}
|
|
'''
|
|
|
|
vmrsEnabledCheckCode = '''
|
|
uint32_t issEnCheck;
|
|
bool trapEnCheck;
|
|
uint32_t seq;
|
|
if (!vfpNeonEnabled(seq,Hcptr, Nsacr, Cpacr, Cpsr, issEnCheck,
|
|
trapEnCheck, xc->tcBase()))
|
|
if (op1 != (int)MISCREG_FPEXC && op1 != (int)MISCREG_FPSID &&
|
|
op1 != (int)MISCREG_MVFR0 && op1 != (int)MISCREG_MVFR1)
|
|
{return disabledFault();}
|
|
if (!inPrivilegedMode(Cpsr))
|
|
if (op1 != (int)MISCREG_FPSCR)
|
|
return disabledFault();
|
|
if (trapEnCheck) {
|
|
CPSR cpsrEnCheck = Cpsr;
|
|
if (cpsrEnCheck.mode == MODE_HYP) {
|
|
return new UndefinedInstruction(machInst, issEnCheck,
|
|
EC_TRAPPED_HCPTR);
|
|
} else {
|
|
if (!inSecureState(Scr, Cpsr)) {
|
|
return new HypervisorTrap(machInst, issEnCheck,
|
|
EC_TRAPPED_HCPTR);
|
|
}
|
|
}
|
|
}
|
|
'''
|
|
vmrsApsrEnabledCheckCode = '''
|
|
uint32_t issEnCheck;
|
|
bool trapEnCheck;
|
|
uint32_t seq;
|
|
if (!vfpNeonEnabled(seq,Hcptr, Nsacr, Cpacr, Cpsr, issEnCheck,
|
|
trapEnCheck, xc->tcBase()))
|
|
{return disabledFault();}
|
|
if (trapEnCheck) {
|
|
CPSR cpsrEnCheck = Cpsr;
|
|
if (cpsrEnCheck.mode == MODE_HYP) {
|
|
return new UndefinedInstruction(machInst, issEnCheck,
|
|
EC_TRAPPED_HCPTR);
|
|
} else {
|
|
if (!inSecureState(Scr, Cpsr)) {
|
|
return new HypervisorTrap(machInst, issEnCheck,
|
|
EC_TRAPPED_HCPTR);
|
|
}
|
|
}
|
|
}
|
|
'''
|
|
}};
|
|
|
|
def template FpRegRegOpDeclare {{
|
|
class %(class_name)s : public %(base_class)s
|
|
{
|
|
public:
|
|
// Constructor
|
|
%(class_name)s(ExtMachInst machInst,
|
|
IntRegIndex _dest, IntRegIndex _op1,
|
|
VfpMicroMode mode = VfpNotAMicroop);
|
|
%(BasicExecDeclare)s
|
|
};
|
|
}};
|
|
|
|
def template FpRegRegOpConstructor {{
|
|
inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
|
|
IntRegIndex _dest, IntRegIndex _op1,
|
|
VfpMicroMode mode)
|
|
: %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
|
|
_dest, _op1, mode)
|
|
{
|
|
%(constructor)s;
|
|
if (!(condCode == COND_AL || condCode == COND_UC)) {
|
|
for (int x = 0; x < _numDestRegs; x++) {
|
|
_srcRegIdx[_numSrcRegs++] = _destRegIdx[x];
|
|
}
|
|
}
|
|
}
|
|
}};
|
|
|
|
def template FpRegImmOpDeclare {{
|
|
class %(class_name)s : public %(base_class)s
|
|
{
|
|
public:
|
|
// Constructor
|
|
%(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
|
|
uint64_t _imm, VfpMicroMode mode = VfpNotAMicroop);
|
|
%(BasicExecDeclare)s
|
|
};
|
|
}};
|
|
|
|
def template FpRegImmOpConstructor {{
|
|
inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
|
|
IntRegIndex _dest, uint64_t _imm, VfpMicroMode mode)
|
|
: %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
|
|
_dest, _imm, mode)
|
|
{
|
|
%(constructor)s;
|
|
if (!(condCode == COND_AL || condCode == COND_UC)) {
|
|
for (int x = 0; x < _numDestRegs; x++) {
|
|
_srcRegIdx[_numSrcRegs++] = _destRegIdx[x];
|
|
}
|
|
}
|
|
}
|
|
}};
|
|
|
|
def template FpRegRegImmOpDeclare {{
|
|
class %(class_name)s : public %(base_class)s
|
|
{
|
|
public:
|
|
// Constructor
|
|
%(class_name)s(ExtMachInst machInst,
|
|
IntRegIndex _dest, IntRegIndex _op1,
|
|
uint64_t _imm, VfpMicroMode mode = VfpNotAMicroop);
|
|
%(BasicExecDeclare)s
|
|
};
|
|
}};
|
|
|
|
def template FpRegRegImmOpConstructor {{
|
|
inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
|
|
IntRegIndex _dest,
|
|
IntRegIndex _op1,
|
|
uint64_t _imm,
|
|
VfpMicroMode mode)
|
|
: %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
|
|
_dest, _op1, _imm, mode)
|
|
{
|
|
%(constructor)s;
|
|
if (!(condCode == COND_AL || condCode == COND_UC)) {
|
|
for (int x = 0; x < _numDestRegs; x++) {
|
|
_srcRegIdx[_numSrcRegs++] = _destRegIdx[x];
|
|
}
|
|
}
|
|
}
|
|
}};
|
|
|
|
def template FpRegRegRegOpDeclare {{
|
|
class %(class_name)s : public %(base_class)s
|
|
{
|
|
public:
|
|
// Constructor
|
|
%(class_name)s(ExtMachInst machInst,
|
|
IntRegIndex _dest, IntRegIndex _op1, IntRegIndex _op2,
|
|
VfpMicroMode mode = VfpNotAMicroop);
|
|
%(BasicExecDeclare)s
|
|
};
|
|
}};
|
|
|
|
def template FpRegRegRegOpConstructor {{
|
|
inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
|
|
IntRegIndex _dest,
|
|
IntRegIndex _op1,
|
|
IntRegIndex _op2,
|
|
VfpMicroMode mode)
|
|
: %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
|
|
_dest, _op1, _op2, mode)
|
|
{
|
|
%(constructor)s;
|
|
if (!(condCode == COND_AL || condCode == COND_UC)) {
|
|
for (int x = 0; x < _numDestRegs; x++) {
|
|
_srcRegIdx[_numSrcRegs++] = _destRegIdx[x];
|
|
}
|
|
}
|
|
}
|
|
}};
|