1 /***************************************************************************
2 * Copyright (C) 2006 by Dominic Rath *
3 * Dominic.Rath@gmx.de *
5 * Copyright (C) 2008 by Hongtao Zheng *
8 * This program is free software; you can redistribute it and/or modify *
9 * it under the terms of the GNU General Public License as published by *
10 * the Free Software Foundation; either version 2 of the License, or *
11 * (at your option) any later version. *
13 * This program is distributed in the hope that it will be useful, *
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
16 * GNU General Public License for more details. *
18 * You should have received a copy of the GNU General Public License *
19 * along with this program; if not, write to the *
20 * Free Software Foundation, Inc., *
21 * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
22 ***************************************************************************/
29 #include "arm_disassembler.h"
30 #include "arm_simulator.h"
32 #include "binarybuffer.h"
36 u32 arm_shift(u8 shift, u32 Rm, u32 shift_amount, u8 *carry)
41 if (shift == 0x0) /* LSL */
43 if ((shift_amount > 0) && (shift_amount <= 32))
45 return_value = Rm << shift_amount;
46 *carry = Rm >> (32 - shift_amount);
48 else if (shift_amount > 32)
53 else /* (shift_amount == 0) */
58 else if (shift == 0x1) /* LSR */
60 if ((shift_amount > 0) && (shift_amount <= 32))
62 return_value = Rm >> shift_amount;
63 *carry = (Rm >> (shift_amount - 1)) & 1;
65 else if (shift_amount > 32)
70 else /* (shift_amount == 0) */
75 else if (shift == 0x2) /* ASR */
77 if ((shift_amount > 0) && (shift_amount <= 32))
79 /* right shifts of unsigned values are guaranteed to be logical (shift in zeroes)
80 * simulate an arithmetic shift (shift in signed-bit) by adding the signed-bit manually */
81 return_value = Rm >> shift_amount;
83 return_value |= 0xffffffff << (32 - shift_amount);
85 else if (shift_amount > 32)
89 return_value = 0xffffffff;
98 else /* (shift_amount == 0) */
103 else if (shift == 0x3) /* ROR */
105 if (shift_amount == 0)
111 shift_amount = shift_amount % 32;
112 return_value = (Rm >> shift_amount) | (Rm << (32 - shift_amount));
113 *carry = (return_value >> 31) & 0x1;
116 else if (shift == 0x4) /* RRX */
118 return_value = Rm >> 1;
127 u32 arm_shifter_operand(armv4_5_common_t *armv4_5, int variant, union arm_shifter_operand shifter_operand, u8 *shifter_carry_out)
130 int instruction_size;
132 if (armv4_5->core_state == ARMV4_5_STATE_ARM)
133 instruction_size = 4;
135 instruction_size = 2;
137 *shifter_carry_out = buf_get_u32(armv4_5->core_cache->reg_list[ARMV4_5_CPSR].value, 29, 1);
139 if (variant == 0) /* 32-bit immediate */
141 return_value = shifter_operand.immediate.immediate;
143 else if (variant == 1) /* immediate shift */
145 u32 Rm = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, shifter_operand.immediate_shift.Rm).value, 0, 32);
147 /* adjust RM in case the PC is being read */
148 if (shifter_operand.immediate_shift.Rm == 15)
149 Rm += 2 * instruction_size;
151 return_value = arm_shift(shifter_operand.immediate_shift.shift, Rm, shifter_operand.immediate_shift.shift_imm, shifter_carry_out);
153 else if (variant == 2) /* register shift */
155 u32 Rm = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, shifter_operand.register_shift.Rm).value, 0, 32);
156 u32 Rs = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, shifter_operand.register_shift.Rs).value, 0, 32);
158 /* adjust RM in case the PC is being read */
159 if (shifter_operand.register_shift.Rm == 15)
160 Rm += 2 * instruction_size;
162 return_value = arm_shift(shifter_operand.immediate_shift.shift, Rm, Rs, shifter_carry_out);
166 LOG_ERROR("BUG: shifter_operand.variant not 0, 1 or 2");
167 return_value = 0xffffffff;
173 int pass_condition(u32 cpsr, u32 opcode)
175 switch ((opcode & 0xf0000000) >> 28)
178 if (cpsr & 0x40000000)
183 if (!(cpsr & 0x40000000))
188 if (cpsr & 0x20000000)
193 if (!(cpsr & 0x20000000))
198 if (cpsr & 0x80000000)
203 if (!(cpsr & 0x80000000))
208 if (cpsr & 0x10000000)
213 if (!(cpsr & 0x10000000))
218 if ((cpsr & 0x20000000) && !(cpsr & 0x40000000))
223 if (!(cpsr & 0x20000000) || (cpsr & 0x40000000))
228 if (((cpsr & 0x80000000) && (cpsr & 0x10000000))
229 || (!(cpsr & 0x80000000) && !(cpsr & 0x10000000)))
234 if (((cpsr & 0x80000000) && !(cpsr & 0x10000000))
235 || (!(cpsr & 0x80000000) && (cpsr & 0x10000000)))
240 if (!(cpsr & 0x40000000) &&
241 (((cpsr & 0x80000000) && (cpsr & 0x10000000))
242 || (!(cpsr & 0x80000000) && !(cpsr & 0x10000000))))
247 if ((cpsr & 0x40000000) &&
248 (((cpsr & 0x80000000) && !(cpsr & 0x10000000))
249 || (!(cpsr & 0x80000000) && (cpsr & 0x10000000))))
259 LOG_ERROR("BUG: should never get here");
263 int thumb_pass_branch_condition(u32 cpsr, u16 opcode)
265 return pass_condition(cpsr, (opcode & 0x0f00) << 20);
268 /* simulate a single step (if possible)
269 * if the dry_run_pc argument is provided, no state is changed,
270 * but the new pc is stored in the variable pointed at by the argument
272 int arm_simulate_step(target_t *target, u32 *dry_run_pc)
274 armv4_5_common_t *armv4_5 = target->arch_info;
275 u32 current_pc = buf_get_u32(armv4_5->core_cache->reg_list[15].value, 0, 32);
276 arm_instruction_t instruction;
277 int instruction_size;
278 int retval = ERROR_OK;
280 if (armv4_5->core_state == ARMV4_5_STATE_ARM)
284 /* get current instruction, and identify it */
285 if((retval = target_read_u32(target, current_pc, &opcode)) != ERROR_OK)
289 if((retval = arm_evaluate_opcode(opcode, current_pc, &instruction)) != ERROR_OK)
293 instruction_size = 4;
295 /* check condition code (for all instructions) */
296 if (!pass_condition(buf_get_u32(armv4_5->core_cache->reg_list[ARMV4_5_CPSR].value, 0, 32), opcode))
300 *dry_run_pc = current_pc + instruction_size;
304 buf_set_u32(armv4_5->core_cache->reg_list[15].value, 0, 32, current_pc + instruction_size);
314 if((retval = target_read_u16(target, current_pc, &opcode)) != ERROR_OK)
318 if((retval = thumb_evaluate_opcode(opcode, current_pc, &instruction)) != ERROR_OK)
322 instruction_size = 2;
324 /* check condition code (only for branch instructions) */
325 if ((!thumb_pass_branch_condition(buf_get_u32(armv4_5->core_cache->reg_list[ARMV4_5_CPSR].value, 0, 32), opcode)) &&
326 (instruction.type == ARM_B))
330 *dry_run_pc = current_pc + instruction_size;
334 buf_set_u32(armv4_5->core_cache->reg_list[15].value, 0, 32, current_pc + instruction_size);
341 /* examine instruction type */
343 /* branch instructions */
344 if ((instruction.type >= ARM_B) && (instruction.type <= ARM_BLX))
348 if (instruction.info.b_bl_bx_blx.reg_operand == -1)
350 target = instruction.info.b_bl_bx_blx.target_address;
354 target = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.b_bl_bx_blx.reg_operand).value, 0, 32);
355 if(instruction.info.b_bl_bx_blx.reg_operand == 15)
357 target += 2 * instruction_size;
363 *dry_run_pc = target;
368 if (instruction.type == ARM_B)
370 buf_set_u32(armv4_5->core_cache->reg_list[15].value, 0, 32, target);
372 else if (instruction.type == ARM_BL)
374 u32 old_pc = buf_get_u32(armv4_5->core_cache->reg_list[15].value, 0, 32);
375 buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, 14).value, 0, 32, old_pc + 4);
376 buf_set_u32(armv4_5->core_cache->reg_list[15].value, 0, 32, target);
378 else if (instruction.type == ARM_BX)
382 armv4_5->core_state = ARMV4_5_STATE_THUMB;
386 armv4_5->core_state = ARMV4_5_STATE_ARM;
388 buf_set_u32(armv4_5->core_cache->reg_list[15].value, 0, 32, target & 0xfffffffe);
390 else if (instruction.type == ARM_BLX)
392 u32 old_pc = buf_get_u32(armv4_5->core_cache->reg_list[15].value, 0, 32);
393 buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, 14).value, 0, 32, old_pc + 4);
397 armv4_5->core_state = ARMV4_5_STATE_THUMB;
401 armv4_5->core_state = ARMV4_5_STATE_ARM;
403 buf_set_u32(armv4_5->core_cache->reg_list[15].value, 0, 32, target & 0xfffffffe);
409 /* data processing instructions, except compare instructions (CMP, CMN, TST, TEQ) */
410 else if (((instruction.type >= ARM_AND) && (instruction.type <= ARM_RSC))
411 || ((instruction.type >= ARM_ORR) && (instruction.type <= ARM_MVN)))
413 u32 Rd, Rn, shifter_operand;
414 u8 C = buf_get_u32(armv4_5->core_cache->reg_list[ARMV4_5_CPSR].value, 29, 1);
418 /* ARM_MOV and ARM_MVN does not use Rn */
419 if ((instruction.type != ARM_MOV) && (instruction.type != ARM_MVN))
420 Rn = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.data_proc.Rn).value, 0, 32);
424 shifter_operand = arm_shifter_operand(armv4_5, instruction.info.data_proc.variant, instruction.info.data_proc.shifter_operand, &carry_out);
426 /* adjust Rn in case the PC is being read */
427 if (instruction.info.data_proc.Rn == 15)
428 Rn += 2 * instruction_size;
430 if (instruction.type == ARM_AND)
431 Rd = Rn & shifter_operand;
432 else if (instruction.type == ARM_EOR)
433 Rd = Rn ^ shifter_operand;
434 else if (instruction.type == ARM_SUB)
435 Rd = Rn - shifter_operand;
436 else if (instruction.type == ARM_RSB)
437 Rd = shifter_operand - Rn;
438 else if (instruction.type == ARM_ADD)
439 Rd = Rn + shifter_operand;
440 else if (instruction.type == ARM_ADC)
441 Rd = Rn + shifter_operand + (C & 1);
442 else if (instruction.type == ARM_SBC)
443 Rd = Rn - shifter_operand - (C & 1) ? 0 : 1;
444 else if (instruction.type == ARM_RSC)
445 Rd = shifter_operand - Rn - (C & 1) ? 0 : 1;
446 else if (instruction.type == ARM_ORR)
447 Rd = Rn | shifter_operand;
448 else if (instruction.type == ARM_BIC)
449 Rd = Rn & ~(shifter_operand);
450 else if (instruction.type == ARM_MOV)
451 Rd = shifter_operand;
452 else if (instruction.type == ARM_MVN)
453 Rd = ~shifter_operand;
455 LOG_WARNING("unhandled instruction type");
459 if (instruction.info.data_proc.Rd == 15)
466 *dry_run_pc = current_pc + instruction_size;
473 buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.data_proc.Rd).value, 0, 32, Rd);
474 LOG_WARNING("no updating of flags yet");
476 if (instruction.info.data_proc.Rd == 15)
480 /* compare instructions (CMP, CMN, TST, TEQ) */
481 else if ((instruction.type >= ARM_TST) && (instruction.type <= ARM_CMN))
485 *dry_run_pc = current_pc + instruction_size;
490 LOG_WARNING("no updating of flags yet");
493 /* load register instructions */
494 else if ((instruction.type >= ARM_LDR) && (instruction.type <= ARM_LDRSH))
496 u32 load_address = 0, modified_address = 0, load_value;
497 u32 Rn = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.load_store.Rn).value, 0, 32);
499 /* adjust Rn in case the PC is being read */
500 if (instruction.info.load_store.Rn == 15)
501 Rn += 2 * instruction_size;
503 if (instruction.info.load_store.offset_mode == 0)
505 if (instruction.info.load_store.U)
506 modified_address = Rn + instruction.info.load_store.offset.offset;
508 modified_address = Rn - instruction.info.load_store.offset.offset;
510 else if (instruction.info.load_store.offset_mode == 1)
513 u32 Rm = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.load_store.offset.reg.Rm).value, 0, 32);
514 u8 shift = instruction.info.load_store.offset.reg.shift;
515 u8 shift_imm = instruction.info.load_store.offset.reg.shift_imm;
516 u8 carry = buf_get_u32(armv4_5->core_cache->reg_list[ARMV4_5_CPSR].value, 29, 1);
518 offset = arm_shift(shift, Rm, shift_imm, &carry);
520 if (instruction.info.load_store.U)
521 modified_address = Rn + offset;
523 modified_address = Rn - offset;
527 LOG_ERROR("BUG: offset_mode neither 0 (offset) nor 1 (scaled register)");
530 if (instruction.info.load_store.index_mode == 0)
533 * we load from the modified address, but don't change the base address register */
534 load_address = modified_address;
535 modified_address = Rn;
537 else if (instruction.info.load_store.index_mode == 1)
540 * we load from the modified address, and write it back to the base address register */
541 load_address = modified_address;
543 else if (instruction.info.load_store.index_mode == 2)
546 * we load from the unmodified address, and write the modified address back */
550 if((!dry_run_pc) || (instruction.info.load_store.Rd == 15))
552 if((retval = target_read_u32(target, load_address, &load_value)) != ERROR_OK)
560 if (instruction.info.load_store.Rd == 15)
562 *dry_run_pc = load_value;
567 *dry_run_pc = current_pc + instruction_size;
574 if ((instruction.info.load_store.index_mode == 1) ||
575 (instruction.info.load_store.index_mode == 2))
577 buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.load_store.Rn).value, 0, 32, modified_address);
579 buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.load_store.Rd).value, 0, 32, load_value);
581 if (instruction.info.load_store.Rd == 15)
585 /* load multiple instruction */
586 else if (instruction.type == ARM_LDM)
589 u32 Rn = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.load_store_multiple.Rn).value, 0, 32);
593 for (i = 0; i < 16; i++)
595 if (instruction.info.load_store_multiple.register_list & (1 << i))
599 switch (instruction.info.load_store_multiple.addressing_mode)
601 case 0: /* Increment after */
604 case 1: /* Increment before */
607 case 2: /* Decrement after */
608 Rn = Rn - (bits_set * 4) + 4;
610 case 3: /* Decrement before */
611 Rn = Rn - (bits_set * 4);
615 for (i = 0; i < 16; i++)
617 if (instruction.info.load_store_multiple.register_list & (1 << i))
619 if((!dry_run_pc) || (i == 15))
621 target_read_u32(target, Rn, &load_values[i]);
629 if (instruction.info.load_store_multiple.register_list & 0x8000)
631 *dry_run_pc = load_values[15];
637 enum armv4_5_mode mode = armv4_5->core_mode;
640 if (instruction.info.load_store_multiple.S)
642 if (instruction.info.load_store_multiple.register_list & 0x8000)
645 mode = ARMV4_5_MODE_USR;
648 for (i = 0; i < 16; i++)
650 if (instruction.info.load_store_multiple.register_list & (1 << i))
652 buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, mode, i).value, 0, 32, load_values[i]);
658 u32 spsr = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, 16).value, 0, 32);
659 buf_set_u32(armv4_5->core_cache->reg_list[ARMV4_5_CPSR].value, 0, 32, spsr);
662 /* base register writeback */
663 if (instruction.info.load_store_multiple.W)
664 buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.load_store_multiple.Rn).value, 0, 32, Rn);
666 if (instruction.info.load_store_multiple.register_list & 0x8000)
670 /* store multiple instruction */
671 else if (instruction.type == ARM_STM)
677 /* STM wont affect PC (advance by instruction size */
681 u32 Rn = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.load_store_multiple.Rn).value, 0, 32);
683 enum armv4_5_mode mode = armv4_5->core_mode;
685 for (i = 0; i < 16; i++)
687 if (instruction.info.load_store_multiple.register_list & (1 << i))
691 if (instruction.info.load_store_multiple.S)
693 mode = ARMV4_5_MODE_USR;
696 switch (instruction.info.load_store_multiple.addressing_mode)
698 case 0: /* Increment after */
701 case 1: /* Increment before */
704 case 2: /* Decrement after */
705 Rn = Rn - (bits_set * 4) + 4;
707 case 3: /* Decrement before */
708 Rn = Rn - (bits_set * 4);
712 for (i = 0; i < 16; i++)
714 if (instruction.info.load_store_multiple.register_list & (1 << i))
716 target_write_u32(target, Rn, buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, i).value, 0, 32));
721 /* base register writeback */
722 if (instruction.info.load_store_multiple.W)
723 buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.load_store_multiple.Rn).value, 0, 32, Rn);
727 else if (!dry_run_pc)
729 /* the instruction wasn't handled, but we're supposed to simulate it
731 return ERROR_ARM_SIMULATOR_NOT_IMPLEMENTED;
736 *dry_run_pc = current_pc + instruction_size;
741 buf_set_u32(armv4_5->core_cache->reg_list[15].value, 0, 32, current_pc + instruction_size);