Note: AArch64 and AArch32 interworking is not supported. If you use an AArch64 kernel you are restricted to AArch64 user-mode binaries. This will be addressed in a later patch. Note: Virtualization is only supported in AArch32 mode. This will also be fixed in a later patch. Contributors: Giacomo Gabrielli (TrustZone, LPAE, system-level AArch64, AArch64 NEON, validation) Thomas Grocutt (AArch32 Virtualization, AArch64 FP, validation) Mbou Eyole (AArch64 NEON, validation) Ali Saidi (AArch64 Linux support, code integration, validation) Edmund Grimley-Evans (AArch64 FP) William Wang (AArch64 Linux support) Rene De Jong (AArch64 Linux support, performance opt.) Matt Horsnell (AArch64 MP, validation) Matt Evans (device models, code integration, validation) Chris Adeniyi-Jones (AArch64 syscall-emulation) Prakash Ramrakhyani (validation) Dam Sunwoo (validation) Chander Sudanthi (validation) Stephan Diestelhorst (validation) Andreas Hansson (code integration, performance opt.) Eric Van Hensbergen (performance opt.) Gabe Black
687 lines
18 KiB
C++
687 lines
18 KiB
C++
// -*- mode:c++ -*-
|
|
|
|
// Copyright (c) 2011-2013 ARM Limited
|
|
// All rights reserved
|
|
//
|
|
// The license below extends only to copyright in the software and shall
|
|
// not be construed as granting a license to any other intellectual
|
|
// property including but not limited to intellectual property relating
|
|
// to a hardware implementation of the functionality of the software
|
|
// licensed hereunder. You may use the software subject to the license
|
|
// terms below provided that you ensure that this notice is replicated
|
|
// unmodified and in its entirety in all distributions of the software,
|
|
// modified or unmodified, in source code or in binary form.
|
|
//
|
|
// Redistribution and use in source and binary forms, with or without
|
|
// modification, are permitted provided that the following conditions are
|
|
// met: redistributions of source code must retain the above copyright
|
|
// notice, this list of conditions and the following disclaimer;
|
|
// redistributions in binary form must reproduce the above copyright
|
|
// notice, this list of conditions and the following disclaimer in the
|
|
// documentation and/or other materials provided with the distribution;
|
|
// neither the name of the copyright holders nor the names of its
|
|
// contributors may be used to endorse or promote products derived from
|
|
// this software without specific prior written permission.
|
|
//
|
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
//
|
|
// Authors: Gabe Black
|
|
|
|
let {{
|
|
SPAlignmentCheckCode = '''
|
|
if (baseIsSP && bits(XBase, 3, 0) &&
|
|
SPAlignmentCheckEnabled(xc->tcBase())) {
|
|
return new SPAlignmentFault();
|
|
}
|
|
'''
|
|
}};
|
|
|
|
def template Load64Execute {{
|
|
Fault %(class_name)s::execute(%(CPU_exec_context)s *xc,
|
|
Trace::InstRecord *traceData) const
|
|
{
|
|
Addr EA;
|
|
Fault fault = NoFault;
|
|
|
|
%(op_decl)s;
|
|
%(op_rd)s;
|
|
%(ea_code)s;
|
|
|
|
if (fault == NoFault) {
|
|
fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
|
|
%(memacc_code)s;
|
|
}
|
|
|
|
if (fault == NoFault) {
|
|
%(op_wb)s;
|
|
}
|
|
|
|
return fault;
|
|
}
|
|
}};
|
|
|
|
def template Store64Execute {{
|
|
Fault %(class_name)s::execute(%(CPU_exec_context)s *xc,
|
|
Trace::InstRecord *traceData) const
|
|
{
|
|
Addr EA;
|
|
Fault fault = NoFault;
|
|
|
|
%(op_decl)s;
|
|
%(op_rd)s;
|
|
%(ea_code)s;
|
|
|
|
if (fault == NoFault) {
|
|
%(memacc_code)s;
|
|
}
|
|
|
|
if (fault == NoFault) {
|
|
fault = writeMemAtomic(xc, traceData, Mem, EA,
|
|
memAccessFlags, NULL);
|
|
}
|
|
|
|
if (fault == NoFault) {
|
|
%(op_wb)s;
|
|
}
|
|
|
|
return fault;
|
|
}
|
|
}};
|
|
|
|
def template Store64InitiateAcc {{
|
|
Fault %(class_name)s::initiateAcc(%(CPU_exec_context)s *xc,
|
|
Trace::InstRecord *traceData) const
|
|
{
|
|
Addr EA;
|
|
Fault fault = NoFault;
|
|
|
|
%(op_decl)s;
|
|
%(op_rd)s;
|
|
%(ea_code)s;
|
|
|
|
if (fault == NoFault) {
|
|
%(memacc_code)s;
|
|
}
|
|
|
|
if (fault == NoFault) {
|
|
fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
|
|
NULL);
|
|
}
|
|
|
|
return fault;
|
|
}
|
|
}};
|
|
|
|
def template StoreEx64Execute {{
|
|
Fault %(class_name)s::execute(%(CPU_exec_context)s *xc,
|
|
Trace::InstRecord *traceData) const
|
|
{
|
|
Addr EA;
|
|
Fault fault = NoFault;
|
|
|
|
%(op_decl)s;
|
|
%(op_rd)s;
|
|
%(ea_code)s;
|
|
|
|
if (fault == NoFault) {
|
|
%(memacc_code)s;
|
|
}
|
|
|
|
uint64_t writeResult = 0;
|
|
if (fault == NoFault) {
|
|
fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
|
|
&writeResult);
|
|
}
|
|
|
|
if (fault == NoFault) {
|
|
%(postacc_code)s;
|
|
}
|
|
|
|
if (fault == NoFault) {
|
|
%(op_wb)s;
|
|
}
|
|
|
|
return fault;
|
|
}
|
|
}};
|
|
|
|
def template StoreEx64InitiateAcc {{
|
|
Fault %(class_name)s::initiateAcc(%(CPU_exec_context)s *xc,
|
|
Trace::InstRecord *traceData) const
|
|
{
|
|
Addr EA;
|
|
Fault fault = NoFault;
|
|
|
|
%(op_decl)s;
|
|
%(op_rd)s;
|
|
%(ea_code)s;
|
|
|
|
if (fault == NoFault) {
|
|
%(memacc_code)s;
|
|
}
|
|
|
|
if (fault == NoFault) {
|
|
fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
|
|
NULL);
|
|
}
|
|
|
|
return fault;
|
|
}
|
|
}};
|
|
|
|
def template Load64InitiateAcc {{
|
|
Fault %(class_name)s::initiateAcc(%(CPU_exec_context)s *xc,
|
|
Trace::InstRecord *traceData) const
|
|
{
|
|
Addr EA;
|
|
Fault fault = NoFault;
|
|
|
|
%(op_src_decl)s;
|
|
%(op_rd)s;
|
|
%(ea_code)s;
|
|
|
|
if (fault == NoFault) {
|
|
fault = readMemTiming(xc, traceData, EA, Mem, memAccessFlags);
|
|
}
|
|
|
|
return fault;
|
|
}
|
|
}};
|
|
|
|
def template Load64CompleteAcc {{
|
|
Fault %(class_name)s::completeAcc(PacketPtr pkt,
|
|
%(CPU_exec_context)s *xc,
|
|
Trace::InstRecord *traceData) const
|
|
{
|
|
Fault fault = NoFault;
|
|
|
|
%(op_decl)s;
|
|
%(op_rd)s;
|
|
|
|
// ARM instructions will not have a pkt if the predicate is false
|
|
getMem(pkt, Mem, traceData);
|
|
|
|
if (fault == NoFault) {
|
|
%(memacc_code)s;
|
|
}
|
|
|
|
if (fault == NoFault) {
|
|
%(op_wb)s;
|
|
}
|
|
|
|
return fault;
|
|
}
|
|
}};
|
|
|
|
def template Store64CompleteAcc {{
|
|
Fault %(class_name)s::completeAcc(PacketPtr pkt,
|
|
%(CPU_exec_context)s *xc,
|
|
Trace::InstRecord *traceData) const
|
|
{
|
|
return NoFault;
|
|
}
|
|
}};
|
|
|
|
def template StoreEx64CompleteAcc {{
|
|
Fault %(class_name)s::completeAcc(PacketPtr pkt,
|
|
%(CPU_exec_context)s *xc,
|
|
Trace::InstRecord *traceData) const
|
|
{
|
|
Fault fault = NoFault;
|
|
|
|
%(op_decl)s;
|
|
%(op_rd)s;
|
|
|
|
uint64_t writeResult = pkt->req->getExtraData();
|
|
%(postacc_code)s;
|
|
|
|
if (fault == NoFault) {
|
|
%(op_wb)s;
|
|
}
|
|
|
|
return fault;
|
|
}
|
|
}};
|
|
|
|
def template DCStore64Declare {{
|
|
class %(class_name)s : public %(base_class)s
|
|
{
|
|
public:
|
|
|
|
/// Constructor.
|
|
%(class_name)s(ExtMachInst machInst, IntRegIndex _base, IntRegIndex _dest, uint64_t _imm);
|
|
|
|
%(BasicExecDeclare)s
|
|
%(InitiateAccDeclare)s
|
|
%(CompleteAccDeclare)s
|
|
|
|
virtual void
|
|
annotateFault(ArmFault *fault) {
|
|
%(fa_code)s
|
|
}
|
|
};
|
|
}};
|
|
|
|
def template DCStore64Constructor {{
|
|
%(class_name)s::%(class_name)s(ExtMachInst machInst, IntRegIndex _base, IntRegIndex _dest, uint64_t _imm)
|
|
: %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
|
|
(IntRegIndex)_base, _dest, _imm)
|
|
{
|
|
%(constructor)s;
|
|
assert(!%(use_uops)d);
|
|
}
|
|
}};
|
|
|
|
def template DCStore64Execute {{
|
|
Fault %(class_name)s::execute(%(CPU_exec_context)s *xc,
|
|
Trace::InstRecord *traceData) const
|
|
{
|
|
Addr EA;
|
|
Fault fault = NoFault;
|
|
|
|
%(op_decl)s;
|
|
%(op_rd)s;
|
|
%(ea_code)s;
|
|
|
|
|
|
if (fault == NoFault) {
|
|
%(memacc_code)s;
|
|
}
|
|
|
|
if (fault == NoFault) {
|
|
fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
|
|
}
|
|
|
|
if (fault == NoFault) {
|
|
%(op_wb)s;
|
|
}
|
|
|
|
return fault;
|
|
}
|
|
}};
|
|
|
|
def template DCStore64InitiateAcc {{
|
|
Fault %(class_name)s::initiateAcc(%(CPU_exec_context)s *xc,
|
|
Trace::InstRecord *traceData) const
|
|
{
|
|
Addr EA;
|
|
Fault fault = NoFault;
|
|
|
|
%(op_decl)s;
|
|
%(op_rd)s;
|
|
%(ea_code)s;
|
|
|
|
if (fault == NoFault) {
|
|
%(memacc_code)s;
|
|
}
|
|
|
|
if (fault == NoFault) {
|
|
fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
|
|
}
|
|
|
|
return fault;
|
|
}
|
|
}};
|
|
|
|
|
|
def template LoadStoreImm64Declare {{
|
|
class %(class_name)s : public %(base_class)s
|
|
{
|
|
public:
|
|
|
|
/// Constructor.
|
|
%(class_name)s(ExtMachInst machInst,
|
|
IntRegIndex _dest, IntRegIndex _base, int64_t _imm);
|
|
|
|
%(BasicExecDeclare)s
|
|
%(InitiateAccDeclare)s
|
|
%(CompleteAccDeclare)s
|
|
|
|
virtual void
|
|
annotateFault(ArmFault *fault) {
|
|
%(fa_code)s
|
|
}
|
|
};
|
|
}};
|
|
|
|
def template LoadStoreImmU64Declare {{
|
|
class %(class_name)s : public %(base_class)s
|
|
{
|
|
public:
|
|
|
|
/// Constructor.
|
|
%(class_name)s(ExtMachInst machInst,
|
|
IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
|
|
bool noAlloc = false, bool exclusive = false,
|
|
bool acrel = false);
|
|
|
|
%(BasicExecDeclare)s
|
|
%(InitiateAccDeclare)s
|
|
%(CompleteAccDeclare)s
|
|
|
|
virtual void
|
|
annotateFault(ArmFault *fault) {
|
|
%(fa_code)s
|
|
}
|
|
};
|
|
}};
|
|
|
|
def template LoadStoreImmDU64Declare {{
|
|
class %(class_name)s : public %(base_class)s
|
|
{
|
|
public:
|
|
|
|
/// Constructor.
|
|
%(class_name)s(ExtMachInst machInst,
|
|
IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
|
|
int64_t _imm = 0, bool noAlloc = false, bool exclusive = false,
|
|
bool acrel = false);
|
|
|
|
%(BasicExecDeclare)s
|
|
%(InitiateAccDeclare)s
|
|
%(CompleteAccDeclare)s
|
|
|
|
virtual void
|
|
annotateFault(ArmFault *fault) {
|
|
%(fa_code)s
|
|
}
|
|
};
|
|
}};
|
|
|
|
def template StoreImmDEx64Declare {{
|
|
/**
|
|
* Static instruction class for "%(mnemonic)s".
|
|
*/
|
|
class %(class_name)s : public %(base_class)s
|
|
{
|
|
public:
|
|
|
|
/// Constructor.
|
|
%(class_name)s(ExtMachInst machInst,
|
|
IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
|
|
IntRegIndex _base, int64_t _imm = 0);
|
|
|
|
%(BasicExecDeclare)s
|
|
|
|
%(InitiateAccDeclare)s
|
|
|
|
%(CompleteAccDeclare)s
|
|
};
|
|
}};
|
|
|
|
|
|
def template LoadStoreReg64Declare {{
|
|
class %(class_name)s : public %(base_class)s
|
|
{
|
|
public:
|
|
|
|
/// Constructor.
|
|
%(class_name)s(ExtMachInst machInst,
|
|
IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
|
|
ArmExtendType _type, uint32_t _shiftAmt);
|
|
|
|
%(BasicExecDeclare)s
|
|
%(InitiateAccDeclare)s
|
|
%(CompleteAccDeclare)s
|
|
|
|
virtual void
|
|
annotateFault(ArmFault *fault) {
|
|
%(fa_code)s
|
|
}
|
|
};
|
|
}};
|
|
|
|
def template LoadStoreRegU64Declare {{
|
|
class %(class_name)s : public %(base_class)s
|
|
{
|
|
public:
|
|
|
|
/// Constructor.
|
|
%(class_name)s(ExtMachInst machInst,
|
|
IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
|
|
ArmExtendType _type, uint32_t _shiftAmt,
|
|
bool noAlloc = false, bool exclusive = false,
|
|
bool acrel = false);
|
|
|
|
%(BasicExecDeclare)s
|
|
%(InitiateAccDeclare)s
|
|
%(CompleteAccDeclare)s
|
|
|
|
virtual void
|
|
annotateFault(ArmFault *fault) {
|
|
%(fa_code)s
|
|
}
|
|
};
|
|
}};
|
|
|
|
def template LoadStoreRaw64Declare {{
|
|
class %(class_name)s : public %(base_class)s
|
|
{
|
|
public:
|
|
|
|
/// Constructor.
|
|
%(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
|
|
IntRegIndex _base);
|
|
|
|
%(BasicExecDeclare)s
|
|
%(InitiateAccDeclare)s
|
|
%(CompleteAccDeclare)s
|
|
|
|
virtual void
|
|
annotateFault(ArmFault *fault) {
|
|
%(fa_code)s
|
|
}
|
|
};
|
|
}};
|
|
|
|
def template LoadStoreEx64Declare {{
|
|
class %(class_name)s : public %(base_class)s
|
|
{
|
|
public:
|
|
|
|
/// Constructor.
|
|
%(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
|
|
IntRegIndex _base, IntRegIndex _result);
|
|
|
|
%(BasicExecDeclare)s
|
|
%(InitiateAccDeclare)s
|
|
%(CompleteAccDeclare)s
|
|
|
|
virtual void
|
|
annotateFault(ArmFault *fault) {
|
|
%(fa_code)s
|
|
}
|
|
};
|
|
}};
|
|
|
|
def template LoadStoreLit64Declare {{
|
|
class %(class_name)s : public %(base_class)s
|
|
{
|
|
public:
|
|
|
|
/// Constructor.
|
|
%(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm);
|
|
|
|
%(BasicExecDeclare)s
|
|
%(InitiateAccDeclare)s
|
|
%(CompleteAccDeclare)s
|
|
|
|
virtual void
|
|
annotateFault(ArmFault *fault) {
|
|
%(fa_code)s
|
|
}
|
|
};
|
|
}};
|
|
|
|
def template LoadStoreLitU64Declare {{
|
|
class %(class_name)s : public %(base_class)s
|
|
{
|
|
public:
|
|
|
|
/// Constructor.
|
|
%(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm,
|
|
bool noAlloc = false, bool exclusive = false,
|
|
bool acrel = false);
|
|
|
|
%(BasicExecDeclare)s
|
|
%(InitiateAccDeclare)s
|
|
%(CompleteAccDeclare)s
|
|
|
|
virtual void
|
|
annotateFault(ArmFault *fault) {
|
|
%(fa_code)s
|
|
}
|
|
};
|
|
}};
|
|
|
|
def template LoadStoreImm64Constructor {{
|
|
%(class_name)s::%(class_name)s(ExtMachInst machInst,
|
|
IntRegIndex _dest, IntRegIndex _base, int64_t _imm)
|
|
: %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
|
|
(IntRegIndex)_dest, (IntRegIndex)_base, _imm)
|
|
{
|
|
%(constructor)s;
|
|
#if %(use_uops)d
|
|
assert(numMicroops >= 2);
|
|
uops = new StaticInstPtr[numMicroops];
|
|
uops[0] = new %(acc_name)s(machInst, _dest, _base, _imm);
|
|
uops[0]->setDelayedCommit();
|
|
uops[1] = new %(wb_decl)s;
|
|
uops[1]->setLastMicroop();
|
|
#endif
|
|
}
|
|
}};
|
|
|
|
def template LoadStoreImmU64Constructor {{
|
|
%(class_name)s::%(class_name)s(ExtMachInst machInst,
|
|
IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
|
|
bool noAlloc, bool exclusive, bool acrel)
|
|
: %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
|
|
_dest, _base, _imm)
|
|
{
|
|
%(constructor)s;
|
|
assert(!%(use_uops)d);
|
|
setExcAcRel(exclusive, acrel);
|
|
}
|
|
}};
|
|
|
|
def template LoadStoreImmDU64Constructor {{
|
|
%(class_name)s::%(class_name)s(ExtMachInst machInst,
|
|
IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
|
|
int64_t _imm, bool noAlloc, bool exclusive, bool acrel)
|
|
: %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
|
|
_dest, _dest2, _base, _imm)
|
|
{
|
|
%(constructor)s;
|
|
assert(!%(use_uops)d);
|
|
setExcAcRel(exclusive, acrel);
|
|
}
|
|
}};
|
|
|
|
def template StoreImmDEx64Constructor {{
|
|
inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
|
|
IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
|
|
IntRegIndex _base, int64_t _imm)
|
|
: %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
|
|
_result, _dest, _dest2, _base, _imm)
|
|
{
|
|
%(constructor)s;
|
|
assert(!%(use_uops)d);
|
|
}
|
|
}};
|
|
|
|
|
|
def template LoadStoreReg64Constructor {{
|
|
%(class_name)s::%(class_name)s(ExtMachInst machInst,
|
|
IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
|
|
ArmExtendType _type, uint32_t _shiftAmt)
|
|
: %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
|
|
_dest, _base, _offset, _type, _shiftAmt)
|
|
{
|
|
%(constructor)s;
|
|
#if %(use_uops)d
|
|
assert(numMicroops >= 2);
|
|
uops = new StaticInstPtr[numMicroops];
|
|
uops[0] = new %(acc_name)s(machInst, _dest, _base, _offset,
|
|
_type, _shiftAmt);
|
|
uops[0]->setDelayedCommit();
|
|
uops[1] = new %(wb_decl)s;
|
|
uops[1]->setLastMicroop();
|
|
#endif
|
|
}
|
|
}};
|
|
|
|
def template LoadStoreRegU64Constructor {{
|
|
%(class_name)s::%(class_name)s(ExtMachInst machInst,
|
|
IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
|
|
ArmExtendType _type, uint32_t _shiftAmt,
|
|
bool noAlloc, bool exclusive, bool acrel)
|
|
: %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
|
|
_dest, _base, _offset, _type, _shiftAmt)
|
|
{
|
|
%(constructor)s;
|
|
assert(!%(use_uops)d);
|
|
setExcAcRel(exclusive, acrel);
|
|
}
|
|
}};
|
|
|
|
def template LoadStoreRaw64Constructor {{
|
|
%(class_name)s::%(class_name)s(ExtMachInst machInst,
|
|
IntRegIndex _dest, IntRegIndex _base)
|
|
: %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest, _base)
|
|
{
|
|
%(constructor)s;
|
|
}
|
|
}};
|
|
|
|
def template LoadStoreEx64Constructor {{
|
|
%(class_name)s::%(class_name)s(ExtMachInst machInst,
|
|
IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
|
|
: %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
|
|
_dest, _base, _result)
|
|
{
|
|
%(constructor)s;
|
|
}
|
|
}};
|
|
|
|
def template LoadStoreLit64Constructor {{
|
|
%(class_name)s::%(class_name)s(ExtMachInst machInst,
|
|
IntRegIndex _dest, int64_t _imm)
|
|
: %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
|
|
(IntRegIndex)_dest, _imm)
|
|
{
|
|
%(constructor)s;
|
|
#if %(use_uops)d
|
|
assert(numMicroops >= 2);
|
|
uops = new StaticInstPtr[numMicroops];
|
|
uops[0] = new %(acc_name)s(machInst, _dest, _imm);
|
|
uops[0]->setDelayedCommit();
|
|
uops[1] = new %(wb_decl)s;
|
|
uops[1]->setLastMicroop();
|
|
#endif
|
|
}
|
|
}};
|
|
|
|
def template LoadStoreLitU64Constructor {{
|
|
%(class_name)s::%(class_name)s(ExtMachInst machInst,
|
|
IntRegIndex _dest, int64_t _imm,
|
|
bool noAlloc, bool exclusive, bool acrel)
|
|
: %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
|
|
(IntRegIndex)_dest, _imm)
|
|
{
|
|
%(constructor)s;
|
|
assert(!%(use_uops)d);
|
|
setExcAcRel(exclusive, acrel);
|
|
}
|
|
}};
|