WIP 2 SingleDataTransfer

Former-commit-id: 8a103161f34eb1a6c731c63ae65ca1056117ec55
Former-commit-id: 74c8158e7354253f6bd4ad50488d34de34e3ad70
This commit is contained in:
Michel Heily 2021-06-30 09:11:02 +03:00 committed by MichelOS
parent b3c3c70bce
commit 0665ff7451
3 changed files with 66 additions and 22 deletions

View file

@ -162,7 +162,17 @@ fn arm_decode(i: u32) -> String {
}
0b01 => {
match (i.bit(25), i.bit(4)) {
(_, F) | (F, T) => String::from("exec_arm_ldr_str"),
(_, F) | (F, T) => format!(
"exec_arm_ldr_str::<{LOAD}, {WRITEBACK}, {PRE_INDEX}, {BYTE}, {SHIFT}, {ADD}, {BS_OP}, {SHIFT_BY_REG}>",
LOAD = i.bit(20),
WRITEBACK = i.bit(21),
BYTE = i.bit(22),
ADD = i.bit(23),
PRE_INDEX = i.bit(24),
SHIFT = i.bit(25),
BS_OP = i.bit_range(5..7) as u8,
SHIFT_BY_REG = i.bit(4),
),
(T, T) => String::from("arm_undefined"), /* Possible ARM11 but we don't implement these */
}
}

View file

@ -261,6 +261,28 @@ impl<I: MemoryInterface> Core<I> {
self.barrel_shift_op(bs_op, val, amount, carry, false)
}
pub fn register_shift_const<const BS_OP: u8, const SHIFT_BY_REG: bool>(
&mut self,
offset: u32,
reg: usize,
carry: &mut bool,
) -> u32 {
let op = match BS_OP {
0 => BarrelShiftOpCode::LSL,
1 => BarrelShiftOpCode::LSR,
2 => BarrelShiftOpCode::ASR,
3 => BarrelShiftOpCode::ROR,
_ => unreachable!(),
};
if SHIFT_BY_REG {
let rs = offset.bit_range(8..12) as usize;
self.shift_by_register(op, reg, rs, carry)
} else {
let amount = offset.bit_range(7..12) as u32;
self.barrel_shift_op(op, self.get_reg(reg), amount, carry, true)
}
}
pub fn register_shift(&mut self, shift: &ShiftedRegister, carry: &mut bool) -> u32 {
match shift.shift_by {
ShiftRegisterBy::ByAmount(amount) => {

View file

@ -185,8 +185,6 @@ impl<I: MemoryInterface> Core<I> {
let opcode =
AluOpCode::from_u8(OP).unwrap_or_else(|| unsafe { std::hint::unreachable_unchecked() });
// println!("{:?} {} {} {}, {:?} {} {} {}", insn.opcode(), insn.bit(25), insn.set_cond_flags(), insn.bit(4), opcode, IMM, SET_FLAGS, SHIFT_BY_REG);
let mut carry = self.cpsr.C();
let op2 = if IMM {
let immediate = insn & 0xff;
@ -288,37 +286,51 @@ impl<I: MemoryInterface> Core<I> {
/// STR{cond}{B}{T} Rd,<Address> | 2N | ---- | [Rn+/-<offset>]=Rd
/// ------------------------------------------------------------------------------
/// For LDR, add y=1S+1N if Rd=R15.
pub fn exec_arm_ldr_str(&mut self, insn: u32) -> CpuAction {
pub fn exec_arm_ldr_str<
const LOAD: bool,
const WRITEBACK: bool,
const PRE_INDEX: bool,
const BYTE: bool,
const SHIFT: bool,
const ADD: bool,
const BS_OP: u8,
const SHIFT_BY_REG: bool,
>(
&mut self,
insn: u32,
) -> CpuAction {
let mut result = CpuAction::AdvancePC(NonSeq);
let load = insn.load_flag();
let pre_index = insn.pre_index_flag();
let writeback = insn.write_back_flag();
let base_reg = insn.bit_range(16..20) as usize;
let dest_reg = insn.bit_range(12..16) as usize;
let mut addr = self.get_reg(base_reg);
if base_reg == REG_PC {
addr = self.pc_arm() + 8; // prefetching
}
let mut carry = self.cpsr.C();
let offset = self.get_barrel_shifted_value(&insn.ldr_str_offset(), &mut carry); // TODO: wrong to use in here
drop(carry);
let mut offset = insn.bit_range(0..12);
if SHIFT {
let mut carry = self.cpsr.C();
let rm = offset & 0xf;
offset =
self.register_shift_const::<BS_OP, SHIFT_BY_REG>(offset, rm as usize, &mut carry);
}
let offset = if ADD {
offset as u32
} else {
(-(offset as i32)) as u32
};
let effective_addr = (addr as i32).wrapping_add(offset as i32) as Addr;
// TODO - confirm this
let old_mode = self.cpsr.mode();
if !pre_index && writeback {
if !PRE_INDEX && WRITEBACK {
self.change_mode(old_mode, CpuMode::User);
}
addr = if insn.pre_index_flag() {
effective_addr
} else {
addr
};
addr = if PRE_INDEX { effective_addr } else { addr };
if load {
let data = if insn.transfer_size() == 1 {
if LOAD {
let data = if BYTE {
self.load_8(addr, NonSeq) as u32
} else {
self.ldr_word(addr, NonSeq)
@ -346,15 +358,15 @@ impl<I: MemoryInterface> Core<I> {
};
}
if !load || base_reg != dest_reg {
if !pre_index {
if !LOAD || base_reg != dest_reg {
if !PRE_INDEX {
self.set_reg(base_reg, effective_addr);
} else if insn.write_back_flag() {
} else if WRITEBACK {
self.set_reg(base_reg, effective_addr);
}
}
if !pre_index && insn.write_back_flag() {
if !PRE_INDEX && WRITEBACK {
self.change_mode(self.cpsr.mode(), old_mode);
}