1 /***************************************************************************
2 * Copyright (C) 2006 by Dominic Rath *
3 * Dominic.Rath@gmx.de *
5 * Copyright (C) 2008 by Hongtao Zheng *
8 * This program is free software; you can redistribute it and/or modify *
9 * it under the terms of the GNU General Public License as published by *
10 * the Free Software Foundation; either version 2 of the License, or *
11 * (at your option) any later version. *
13 * This program is distributed in the hope that it will be useful, *
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
16 * GNU General Public License for more details. *
18 * You should have received a copy of the GNU General Public License *
19 * along with this program; if not, write to the *
20 * Free Software Foundation, Inc., *
21 * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
22 ***************************************************************************/
28 #include "arm_disassembler.h"
29 #include "arm_simulator.h"
31 #include "binarybuffer.h"
34 u32 arm_shift(u8 shift, u32 Rm, u32 shift_amount, u8 *carry)
39 if (shift == 0x0) /* LSL */
41 if ((shift_amount > 0) && (shift_amount <= 32))
43 return_value = Rm << shift_amount;
44 *carry = Rm >> (32 - shift_amount);
46 else if (shift_amount > 32)
51 else /* (shift_amount == 0) */
56 else if (shift == 0x1) /* LSR */
58 if ((shift_amount > 0) && (shift_amount <= 32))
60 return_value = Rm >> shift_amount;
61 *carry = (Rm >> (shift_amount - 1)) & 1;
63 else if (shift_amount > 32)
68 else /* (shift_amount == 0) */
73 else if (shift == 0x2) /* ASR */
75 if ((shift_amount > 0) && (shift_amount <= 32))
77 /* right shifts of unsigned values are guaranteed to be logical (shift in zeroes)
78 * simulate an arithmetic shift (shift in signed-bit) by adding the signed-bit manually */
79 return_value = Rm >> shift_amount;
81 return_value |= 0xffffffff << (32 - shift_amount);
83 else if (shift_amount > 32)
87 return_value = 0xffffffff;
96 else /* (shift_amount == 0) */
101 else if (shift == 0x3) /* ROR */
103 if (shift_amount == 0)
109 shift_amount = shift_amount % 32;
110 return_value = (Rm >> shift_amount) | (Rm << (32 - shift_amount));
111 *carry = (return_value >> 31) & 0x1;
114 else if (shift == 0x4) /* RRX */
116 return_value = Rm >> 1;
125 u32 arm_shifter_operand(armv4_5_common_t *armv4_5, int variant, union arm_shifter_operand shifter_operand, u8 *shifter_carry_out)
128 int instruction_size;
130 if (armv4_5->core_state == ARMV4_5_STATE_ARM)
131 instruction_size = 4;
133 instruction_size = 2;
135 *shifter_carry_out = buf_get_u32(armv4_5->core_cache->reg_list[ARMV4_5_CPSR].value, 29, 1);
137 if (variant == 0) /* 32-bit immediate */
139 return_value = shifter_operand.immediate.immediate;
141 else if (variant == 1) /* immediate shift */
143 u32 Rm = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, shifter_operand.immediate_shift.Rm).value, 0, 32);
145 /* adjust RM in case the PC is being read */
146 if (shifter_operand.immediate_shift.Rm == 15)
147 Rm += 2 * instruction_size;
149 return_value = arm_shift(shifter_operand.immediate_shift.shift, Rm, shifter_operand.immediate_shift.shift_imm, shifter_carry_out);
151 else if (variant == 2) /* register shift */
153 u32 Rm = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, shifter_operand.register_shift.Rm).value, 0, 32);
154 u32 Rs = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, shifter_operand.register_shift.Rs).value, 0, 32);
156 /* adjust RM in case the PC is being read */
157 if (shifter_operand.register_shift.Rm == 15)
158 Rm += 2 * instruction_size;
160 return_value = arm_shift(shifter_operand.immediate_shift.shift, Rm, Rs, shifter_carry_out);
164 LOG_ERROR("BUG: shifter_operand.variant not 0, 1 or 2");
165 return_value = 0xffffffff;
171 int pass_condition(u32 cpsr, u32 opcode)
173 switch ((opcode & 0xf0000000) >> 28)
176 if (cpsr & 0x40000000)
181 if (!(cpsr & 0x40000000))
186 if (cpsr & 0x20000000)
191 if (!(cpsr & 0x20000000))
196 if (cpsr & 0x80000000)
201 if (!(cpsr & 0x80000000))
206 if (cpsr & 0x10000000)
211 if (!(cpsr & 0x10000000))
216 if ((cpsr & 0x20000000) && !(cpsr & 0x40000000))
221 if (!(cpsr & 0x20000000) || (cpsr & 0x40000000))
226 if (((cpsr & 0x80000000) && (cpsr & 0x10000000))
227 || (!(cpsr & 0x80000000) && !(cpsr & 0x10000000)))
232 if (((cpsr & 0x80000000) && !(cpsr & 0x10000000))
233 || (!(cpsr & 0x80000000) && (cpsr & 0x10000000)))
238 if (!(cpsr & 0x40000000) &&
239 (((cpsr & 0x80000000) && (cpsr & 0x10000000))
240 || (!(cpsr & 0x80000000) && !(cpsr & 0x10000000))))
245 if ((cpsr & 0x40000000) &&
246 (((cpsr & 0x80000000) && !(cpsr & 0x10000000))
247 || (!(cpsr & 0x80000000) && (cpsr & 0x10000000))))
257 LOG_ERROR("BUG: should never get here");
261 int thumb_pass_branch_condition(u32 cpsr, u16 opcode)
263 return pass_condition(cpsr, (opcode & 0x0f00) << 20);
266 /* simulate a single step (if possible)
267 * if the dry_run_pc argument is provided, no state is changed,
268 * but the new pc is stored in the variable pointed at by the argument
270 int arm_simulate_step(target_t *target, u32 *dry_run_pc)
272 armv4_5_common_t *armv4_5 = target->arch_info;
273 u32 current_pc = buf_get_u32(armv4_5->core_cache->reg_list[15].value, 0, 32);
274 arm_instruction_t instruction;
275 int instruction_size;
276 int retval = ERROR_OK;
278 if (armv4_5->core_state == ARMV4_5_STATE_ARM)
282 /* get current instruction, and identify it */
283 if((retval = target_read_u32(target, current_pc, &opcode)) != ERROR_OK)
287 if((retval = arm_evaluate_opcode(opcode, current_pc, &instruction)) != ERROR_OK)
291 instruction_size = 4;
293 /* check condition code (for all instructions) */
294 if (!pass_condition(buf_get_u32(armv4_5->core_cache->reg_list[ARMV4_5_CPSR].value, 0, 32), opcode))
298 *dry_run_pc = current_pc + instruction_size;
302 buf_set_u32(armv4_5->core_cache->reg_list[15].value, 0, 32, current_pc + instruction_size);
312 if((retval = target_read_u16(target, current_pc, &opcode)) != ERROR_OK)
316 if((retval = thumb_evaluate_opcode(opcode, current_pc, &instruction)) != ERROR_OK)
320 instruction_size = 2;
322 /* check condition code (only for branch instructions) */
323 if ((!thumb_pass_branch_condition(buf_get_u32(armv4_5->core_cache->reg_list[ARMV4_5_CPSR].value, 0, 32), opcode)) &&
324 (instruction.type == ARM_B))
328 *dry_run_pc = current_pc + instruction_size;
332 buf_set_u32(armv4_5->core_cache->reg_list[15].value, 0, 32, current_pc + instruction_size);
339 /* examine instruction type */
341 /* branch instructions */
342 if ((instruction.type >= ARM_B) && (instruction.type <= ARM_BLX))
346 if (instruction.info.b_bl_bx_blx.reg_operand == -1)
348 target = instruction.info.b_bl_bx_blx.target_address;
352 target = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.b_bl_bx_blx.reg_operand).value, 0, 32);
353 if(instruction.info.b_bl_bx_blx.reg_operand == 15)
355 target += 2 * instruction_size;
361 *dry_run_pc = target;
366 if (instruction.type == ARM_B)
368 buf_set_u32(armv4_5->core_cache->reg_list[15].value, 0, 32, target);
370 else if (instruction.type == ARM_BL)
372 u32 old_pc = buf_get_u32(armv4_5->core_cache->reg_list[15].value, 0, 32);
373 buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, 14).value, 0, 32, old_pc + 4);
374 buf_set_u32(armv4_5->core_cache->reg_list[15].value, 0, 32, target);
376 else if (instruction.type == ARM_BX)
380 armv4_5->core_state = ARMV4_5_STATE_THUMB;
384 armv4_5->core_state = ARMV4_5_STATE_ARM;
386 buf_set_u32(armv4_5->core_cache->reg_list[15].value, 0, 32, target & 0xfffffffe);
388 else if (instruction.type == ARM_BLX)
390 u32 old_pc = buf_get_u32(armv4_5->core_cache->reg_list[15].value, 0, 32);
391 buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, 14).value, 0, 32, old_pc + 4);
395 armv4_5->core_state = ARMV4_5_STATE_THUMB;
399 armv4_5->core_state = ARMV4_5_STATE_ARM;
401 buf_set_u32(armv4_5->core_cache->reg_list[15].value, 0, 32, target & 0xfffffffe);
407 /* data processing instructions, except compare instructions (CMP, CMN, TST, TEQ) */
408 else if (((instruction.type >= ARM_AND) && (instruction.type <= ARM_RSC))
409 || ((instruction.type >= ARM_ORR) && (instruction.type <= ARM_MVN)))
411 u32 Rd, Rn, shifter_operand;
412 u8 C = buf_get_u32(armv4_5->core_cache->reg_list[ARMV4_5_CPSR].value, 29, 1);
416 /* ARM_MOV and ARM_MVN does not use Rn */
417 if ((instruction.type != ARM_MOV) && (instruction.type != ARM_MVN))
418 Rn = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.data_proc.Rn).value, 0, 32);
422 shifter_operand = arm_shifter_operand(armv4_5, instruction.info.data_proc.variant, instruction.info.data_proc.shifter_operand, &carry_out);
424 /* adjust Rn in case the PC is being read */
425 if (instruction.info.data_proc.Rn == 15)
426 Rn += 2 * instruction_size;
428 if (instruction.type == ARM_AND)
429 Rd = Rn & shifter_operand;
430 else if (instruction.type == ARM_EOR)
431 Rd = Rn ^ shifter_operand;
432 else if (instruction.type == ARM_SUB)
433 Rd = Rn - shifter_operand;
434 else if (instruction.type == ARM_RSB)
435 Rd = shifter_operand - Rn;
436 else if (instruction.type == ARM_ADD)
437 Rd = Rn + shifter_operand;
438 else if (instruction.type == ARM_ADC)
439 Rd = Rn + shifter_operand + (C & 1);
440 else if (instruction.type == ARM_SBC)
441 Rd = Rn - shifter_operand - (C & 1) ? 0 : 1;
442 else if (instruction.type == ARM_RSC)
443 Rd = shifter_operand - Rn - (C & 1) ? 0 : 1;
444 else if (instruction.type == ARM_ORR)
445 Rd = Rn | shifter_operand;
446 else if (instruction.type == ARM_BIC)
447 Rd = Rn & ~(shifter_operand);
448 else if (instruction.type == ARM_MOV)
449 Rd = shifter_operand;
450 else if (instruction.type == ARM_MVN)
451 Rd = ~shifter_operand;
453 LOG_WARNING("unhandled instruction type");
457 if (instruction.info.data_proc.Rd == 15)
464 *dry_run_pc = current_pc + instruction_size;
471 buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.data_proc.Rd).value, 0, 32, Rd);
472 LOG_WARNING("no updating of flags yet");
474 if (instruction.info.data_proc.Rd == 15)
478 /* compare instructions (CMP, CMN, TST, TEQ) */
479 else if ((instruction.type >= ARM_TST) && (instruction.type <= ARM_CMN))
483 *dry_run_pc = current_pc + instruction_size;
488 LOG_WARNING("no updating of flags yet");
491 /* load register instructions */
492 else if ((instruction.type >= ARM_LDR) && (instruction.type <= ARM_LDRSH))
494 u32 load_address = 0, modified_address = 0, load_value;
495 u32 Rn = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.load_store.Rn).value, 0, 32);
497 /* adjust Rn in case the PC is being read */
498 if (instruction.info.load_store.Rn == 15)
499 Rn += 2 * instruction_size;
501 if (instruction.info.load_store.offset_mode == 0)
503 if (instruction.info.load_store.U)
504 modified_address = Rn + instruction.info.load_store.offset.offset;
506 modified_address = Rn - instruction.info.load_store.offset.offset;
508 else if (instruction.info.load_store.offset_mode == 1)
511 u32 Rm = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.load_store.offset.reg.Rm).value, 0, 32);
512 u8 shift = instruction.info.load_store.offset.reg.shift;
513 u8 shift_imm = instruction.info.load_store.offset.reg.shift_imm;
514 u8 carry = buf_get_u32(armv4_5->core_cache->reg_list[ARMV4_5_CPSR].value, 29, 1);
516 offset = arm_shift(shift, Rm, shift_imm, &carry);
518 if (instruction.info.load_store.U)
519 modified_address = Rn + offset;
521 modified_address = Rn - offset;
525 LOG_ERROR("BUG: offset_mode neither 0 (offset) nor 1 (scaled register)");
528 if (instruction.info.load_store.index_mode == 0)
531 * we load from the modified address, but don't change the base address register */
532 load_address = modified_address;
533 modified_address = Rn;
535 else if (instruction.info.load_store.index_mode == 1)
538 * we load from the modified address, and write it back to the base address register */
539 load_address = modified_address;
541 else if (instruction.info.load_store.index_mode == 2)
544 * we load from the unmodified address, and write the modified address back */
548 if((!dry_run_pc) || (instruction.info.load_store.Rd == 15))
550 if((retval = target_read_u32(target, load_address, &load_value)) != ERROR_OK)
558 if (instruction.info.load_store.Rd == 15)
560 *dry_run_pc = load_value;
565 *dry_run_pc = current_pc + instruction_size;
572 if ((instruction.info.load_store.index_mode == 1) ||
573 (instruction.info.load_store.index_mode == 2))
575 buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.load_store.Rn).value, 0, 32, modified_address);
577 buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.load_store.Rd).value, 0, 32, load_value);
579 if (instruction.info.load_store.Rd == 15)
583 /* load multiple instruction */
584 else if (instruction.type == ARM_LDM)
587 u32 Rn = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.load_store_multiple.Rn).value, 0, 32);
591 for (i = 0; i < 16; i++)
593 if (instruction.info.load_store_multiple.register_list & (1 << i))
597 switch (instruction.info.load_store_multiple.addressing_mode)
599 case 0: /* Increment after */
602 case 1: /* Increment before */
605 case 2: /* Decrement after */
606 Rn = Rn - (bits_set * 4) + 4;
608 case 3: /* Decrement before */
609 Rn = Rn - (bits_set * 4);
613 for (i = 0; i < 16; i++)
615 if (instruction.info.load_store_multiple.register_list & (1 << i))
617 if((!dry_run_pc) || (i == 15))
619 target_read_u32(target, Rn, &load_values[i]);
627 if (instruction.info.load_store_multiple.register_list & 0x8000)
629 *dry_run_pc = load_values[15];
635 enum armv4_5_mode mode = armv4_5->core_mode;
638 if (instruction.info.load_store_multiple.S)
640 if (instruction.info.load_store_multiple.register_list & 0x8000)
643 mode = ARMV4_5_MODE_USR;
646 for (i = 0; i < 16; i++)
648 if (instruction.info.load_store_multiple.register_list & (1 << i))
650 buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, mode, i).value, 0, 32, load_values[i]);
656 u32 spsr = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, 16).value, 0, 32);
657 buf_set_u32(armv4_5->core_cache->reg_list[ARMV4_5_CPSR].value, 0, 32, spsr);
660 /* base register writeback */
661 if (instruction.info.load_store_multiple.W)
662 buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.load_store_multiple.Rn).value, 0, 32, Rn);
664 if (instruction.info.load_store_multiple.register_list & 0x8000)
668 /* store multiple instruction */
669 else if (instruction.type == ARM_STM)
675 /* STM wont affect PC (advance by instruction size */
679 u32 Rn = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.load_store_multiple.Rn).value, 0, 32);
681 enum armv4_5_mode mode = armv4_5->core_mode;
683 for (i = 0; i < 16; i++)
685 if (instruction.info.load_store_multiple.register_list & (1 << i))
689 if (instruction.info.load_store_multiple.S)
691 mode = ARMV4_5_MODE_USR;
694 switch (instruction.info.load_store_multiple.addressing_mode)
696 case 0: /* Increment after */
699 case 1: /* Increment before */
702 case 2: /* Decrement after */
703 Rn = Rn - (bits_set * 4) + 4;
705 case 3: /* Decrement before */
706 Rn = Rn - (bits_set * 4);
710 for (i = 0; i < 16; i++)
712 if (instruction.info.load_store_multiple.register_list & (1 << i))
714 target_write_u32(target, Rn, buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, i).value, 0, 32));
719 /* base register writeback */
720 if (instruction.info.load_store_multiple.W)
721 buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.load_store_multiple.Rn).value, 0, 32, Rn);
725 else if (!dry_run_pc)
727 /* the instruction wasn't handled, but we're supposed to simulate it
729 return ERROR_ARM_SIMULATOR_NOT_IMPLEMENTED;
734 *dry_run_pc = current_pc + instruction_size;
739 buf_set_u32(armv4_5->core_cache->reg_list[15].value, 0, 32, current_pc + instruction_size);