Arm64Assembler.cpp 38 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240
  1. /*
  2. * Copyright (C) 2013 The Android Open Source Project
  3. * All rights reserved.
  4. *
  5. * Redistribution and use in source and binary forms, with or without
  6. * modification, are permitted provided that the following conditions
  7. * are met:
  8. * * Redistributions of source code must retain the above copyright
  9. * notice, this list of conditions and the following disclaimer.
  10. * * Redistributions in binary form must reproduce the above copyright
  11. * notice, this list of conditions and the following disclaimer in
  12. * the documentation and/or other materials provided with the
  13. * distribution.
  14. *
  15. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
  16. * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
  17. * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
  18. * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
  19. * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
  20. * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
  21. * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
  22. * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
  23. * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
  24. * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
  25. * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
  26. * SUCH DAMAGE.
  27. */
  28. #define LOG_TAG "ArmToArm64Assembler"
  29. #include <stdio.h>
  30. #include <stdlib.h>
  31. #include <string.h>
  32. #include <cutils/properties.h>
  33. #include <log/log.h>
  34. #include <private/pixelflinger/ggl_context.h>
  35. #include "codeflinger/Arm64Assembler.h"
  36. #include "codeflinger/Arm64Disassembler.h"
  37. #include "codeflinger/CodeCache.h"
  38. /*
  39. ** --------------------------------------------
  40. ** Support for Arm64 in GGLAssembler JIT
  41. ** --------------------------------------------
  42. **
  43. ** Approach
  44. ** - GGLAssembler and associated files are largely un-changed.
  45. ** - A translator class maps ArmAssemblerInterface calls to
  46. ** generate Arm64 instructions.
  47. **
  48. ** ----------------------
  49. ** ArmToArm64Assembler
  50. ** ----------------------
  51. **
  52. ** - Subclassed from ArmAssemblerInterface
  53. **
  54. ** - Translates each ArmAssemblerInterface call to generate
  55. ** one or more Arm64 instructions as necessary.
  56. **
  57. ** - Does not implement ArmAssemblerInterface portions unused by GGLAssembler
  58. ** It calls NOT_IMPLEMENTED() for such cases, which in turn logs
  59. ** a fatal message.
  60. **
  61. ** - Uses A64_.. series of functions to generate instruction machine code
  62. ** for Arm64 instructions. These functions also log the instruction
  63. ** to LOG, if ARM64_ASM_DEBUG define is set to 1
  64. **
  65. ** - Dumps machine code and eqvt assembly if "debug.pf.disasm" option is set
  66. ** It uses arm64_disassemble to perform disassembly
  67. **
  68. ** - Uses register 13 (SP in ARM), 15 (PC in ARM), 16, 17 for storing
  69. ** intermediate results. GGLAssembler does not use SP and PC as these
  70. ** registers are marked as reserved. The temporary registers are not
  71. ** saved/restored on stack as these are caller-saved registers in Arm64
  72. **
  73. ** - Uses CSEL instruction to support conditional execution. The result is
  74. ** stored in a temporary register and then copied to the target register
  75. ** if the condition is true.
  76. **
  77. ** - In the case of conditional data transfer instructions, conditional
  78. ** branch is used to skip over instruction, if the condition is false
  79. **
  80. ** - Wherever possible, immediate values are transferred to temporary
  81. ** register prior to processing. This simplifies overall implementation
  82. ** as instructions requiring immediate values are converted to
  83. ** move immediate instructions followed by register-register instruction.
  84. **
  85. ** --------------------------------------------
  86. ** ArmToArm64Assembler unit test bench
  87. ** --------------------------------------------
  88. **
  89. ** - Tests ArmToArm64Assembler interface for all the possible
  90. ** ways in which GGLAssembler uses ArmAssemblerInterface interface.
  91. **
  92. ** - Uses test jacket (written in assembly) to set the registers,
  93. ** condition flags prior to calling generated instruction. It also
  94. ** copies registers and flags at the end of execution. Caller then
  95. ** checks if generated code performed correct operation based on
  96. ** output registers and flags.
  97. **
  98. ** - Broadly contains three type of tests, (i) data operation tests
  99. ** (ii) data transfer tests and (iii) LDM/STM tests.
  100. **
  101. ** ----------------------
  102. ** Arm64 disassembler
  103. ** ----------------------
  104. ** - This disassembler disassembles only those machine codes which can be
  105. ** generated by ArmToArm64Assembler. It has a unit testbench which
  106. ** tests all the instructions supported by the disassembler.
  107. **
  108. ** ------------------------------------------------------------------
  109. ** ARMAssembler/ARMAssemblerInterface/ARMAssemblerProxy changes
  110. ** ------------------------------------------------------------------
  111. **
  112. ** - In existing code, addresses were being handled as 32 bit values at
  113. ** certain places.
  114. **
  115. ** - Added a new set of functions for address load/store/manipulation.
  116. ** These are ADDR_LDR, ADDR_STR, ADDR_ADD, ADDR_SUB and they map to
  117. ** default 32 bit implementations in ARMAssemblerInterface.
  118. **
  119. ** - ArmToArm64Assembler maps these functions to appropriate 64 bit
  120. ** functions.
  121. **
  122. ** ----------------------
  123. ** GGLAssembler changes
  124. ** ----------------------
  125. ** - Since ArmToArm64Assembler can generate 4 Arm64 instructions for
  126. ** each call in worst case, the memory required is set to 4 times
  127. ** ARM memory
  128. **
  129. ** - Address load/store/manipulation were changed to use new functions
  130. ** added in the ARMAssemblerInterface.
  131. **
  132. */
  133. #define NOT_IMPLEMENTED() LOG_FATAL("Arm instruction %s not yet implemented\n", __func__)
  134. #define ARM64_ASM_DEBUG 0
  135. #if ARM64_ASM_DEBUG
  136. #define LOG_INSTR(...) ALOGD("\t" __VA_ARGS__)
  137. #define LOG_LABEL(...) ALOGD(__VA_ARGS__)
  138. #else
  139. #define LOG_INSTR(...) ((void)0)
  140. #define LOG_LABEL(...) ((void)0)
  141. #endif
  142. namespace android {
  143. static __unused const char* shift_codes[] =
  144. {
  145. "LSL", "LSR", "ASR", "ROR"
  146. };
  147. static __unused const char *cc_codes[] =
  148. {
  149. "EQ", "NE", "CS", "CC", "MI",
  150. "PL", "VS", "VC", "HI", "LS",
  151. "GE", "LT", "GT", "LE", "AL", "NV"
  152. };
  153. ArmToArm64Assembler::ArmToArm64Assembler(const sp<Assembly>& assembly)
  154. : ARMAssemblerInterface(),
  155. mAssembly(assembly)
  156. {
  157. mBase = mPC = (uint32_t *)assembly->base();
  158. mDuration = ggl_system_time();
  159. mZeroReg = 13;
  160. mTmpReg1 = 15;
  161. mTmpReg2 = 16;
  162. mTmpReg3 = 17;
  163. }
  164. ArmToArm64Assembler::ArmToArm64Assembler(void *base)
  165. : ARMAssemblerInterface(), mAssembly(NULL)
  166. {
  167. mBase = mPC = (uint32_t *)base;
  168. mDuration = ggl_system_time();
  169. // Regs 13, 15, 16, 17 are used as temporary registers
  170. mZeroReg = 13;
  171. mTmpReg1 = 15;
  172. mTmpReg2 = 16;
  173. mTmpReg3 = 17;
  174. }
  175. ArmToArm64Assembler::~ArmToArm64Assembler()
  176. {
  177. }
  178. uint32_t* ArmToArm64Assembler::pc() const
  179. {
  180. return mPC;
  181. }
  182. uint32_t* ArmToArm64Assembler::base() const
  183. {
  184. return mBase;
  185. }
  186. void ArmToArm64Assembler::reset()
  187. {
  188. if(mAssembly == NULL)
  189. mPC = mBase;
  190. else
  191. mBase = mPC = (uint32_t *)mAssembly->base();
  192. mBranchTargets.clear();
  193. mLabels.clear();
  194. mLabelsInverseMapping.clear();
  195. mComments.clear();
  196. #if ARM64_ASM_DEBUG
  197. ALOGI("RESET\n");
  198. #endif
  199. }
  200. int ArmToArm64Assembler::getCodegenArch()
  201. {
  202. return CODEGEN_ARCH_ARM64;
  203. }
  204. // ----------------------------------------------------------------------------
  205. void ArmToArm64Assembler::disassemble(const char* name)
  206. {
  207. if(name)
  208. {
  209. printf("%s:\n", name);
  210. }
  211. size_t count = pc()-base();
  212. uint32_t* i = base();
  213. while (count--)
  214. {
  215. ssize_t label = mLabelsInverseMapping.indexOfKey(i);
  216. if (label >= 0)
  217. {
  218. printf("%s:\n", mLabelsInverseMapping.valueAt(label));
  219. }
  220. ssize_t comment = mComments.indexOfKey(i);
  221. if (comment >= 0)
  222. {
  223. printf("; %s\n", mComments.valueAt(comment));
  224. }
  225. printf("%p: %08x ", i, uint32_t(i[0]));
  226. {
  227. char instr[256];
  228. ::arm64_disassemble(*i, instr);
  229. printf("%s\n", instr);
  230. }
  231. i++;
  232. }
  233. }
  234. void ArmToArm64Assembler::comment(const char* string)
  235. {
  236. mComments.add(mPC, string);
  237. LOG_INSTR("//%s\n", string);
  238. }
  239. void ArmToArm64Assembler::label(const char* theLabel)
  240. {
  241. mLabels.add(theLabel, mPC);
  242. mLabelsInverseMapping.add(mPC, theLabel);
  243. LOG_LABEL("%s:\n", theLabel);
  244. }
  245. void ArmToArm64Assembler::B(int cc, const char* label)
  246. {
  247. mBranchTargets.add(branch_target_t(label, mPC));
  248. LOG_INSTR("B%s %s\n", cc_codes[cc], label );
  249. *mPC++ = (0x54 << 24) | cc;
  250. }
  251. void ArmToArm64Assembler::BL(int /*cc*/, const char* /*label*/)
  252. {
  253. NOT_IMPLEMENTED(); //Not Required
  254. }
  255. // ----------------------------------------------------------------------------
  256. //Prolog/Epilog & Generate...
  257. // ----------------------------------------------------------------------------
  258. void ArmToArm64Assembler::prolog()
  259. {
  260. // write prolog code
  261. mPrologPC = mPC;
  262. *mPC++ = A64_MOVZ_X(mZeroReg,0,0);
  263. }
  264. void ArmToArm64Assembler::epilog(uint32_t /*touched*/)
  265. {
  266. // write epilog code
  267. static const int XLR = 30;
  268. *mPC++ = A64_RET(XLR);
  269. }
  270. int ArmToArm64Assembler::generate(const char* name)
  271. {
  272. // fixup all the branches
  273. size_t count = mBranchTargets.size();
  274. while (count--)
  275. {
  276. const branch_target_t& bt = mBranchTargets[count];
  277. uint32_t* target_pc = mLabels.valueFor(bt.label);
  278. LOG_ALWAYS_FATAL_IF(!target_pc,
  279. "error resolving branch targets, target_pc is null");
  280. int32_t offset = int32_t(target_pc - bt.pc);
  281. *bt.pc |= (offset & 0x7FFFF) << 5;
  282. }
  283. if(mAssembly != NULL)
  284. mAssembly->resize( int(pc()-base())*4 );
  285. // the instruction cache is flushed by CodeCache
  286. const int64_t duration = ggl_system_time() - mDuration;
  287. const char * const format = "generated %s (%d ins) at [%p:%p] in %ld ns\n";
  288. ALOGI(format, name, int(pc()-base()), base(), pc(), duration);
  289. char value[PROPERTY_VALUE_MAX];
  290. property_get("debug.pf.disasm", value, "0");
  291. if (atoi(value) != 0)
  292. {
  293. printf(format, name, int(pc()-base()), base(), pc(), duration);
  294. disassemble(name);
  295. }
  296. return OK;
  297. }
  298. uint32_t* ArmToArm64Assembler::pcForLabel(const char* label)
  299. {
  300. return mLabels.valueFor(label);
  301. }
  302. // ----------------------------------------------------------------------------
  303. // Data Processing...
  304. // ----------------------------------------------------------------------------
  305. void ArmToArm64Assembler::dataProcessingCommon(int opcode,
  306. int s, int Rd, int Rn, uint32_t Op2)
  307. {
  308. if(opcode != opSUB && s == 1)
  309. {
  310. NOT_IMPLEMENTED(); //Not required
  311. return;
  312. }
  313. if(opcode != opSUB && opcode != opADD && opcode != opAND &&
  314. opcode != opORR && opcode != opMVN)
  315. {
  316. NOT_IMPLEMENTED(); //Not required
  317. return;
  318. }
  319. if(Op2 == OPERAND_REG_IMM && mAddrMode.reg_imm_shift > 31)
  320. {
  321. NOT_IMPLEMENTED();
  322. return;
  323. }
  324. //Store immediate in temporary register and convert
  325. //immediate operation into register operation
  326. if(Op2 == OPERAND_IMM)
  327. {
  328. int imm = mAddrMode.immediate;
  329. *mPC++ = A64_MOVZ_W(mTmpReg2, imm & 0x0000FFFF, 0);
  330. *mPC++ = A64_MOVK_W(mTmpReg2, (imm >> 16) & 0x0000FFFF, 16);
  331. Op2 = mTmpReg2;
  332. }
  333. {
  334. uint32_t shift;
  335. uint32_t amount;
  336. uint32_t Rm;
  337. if(Op2 == OPERAND_REG_IMM)
  338. {
  339. shift = mAddrMode.reg_imm_type;
  340. amount = mAddrMode.reg_imm_shift;
  341. Rm = mAddrMode.reg_imm_Rm;
  342. }
  343. else if(Op2 < OPERAND_REG)
  344. {
  345. shift = 0;
  346. amount = 0;
  347. Rm = Op2;
  348. }
  349. else
  350. {
  351. NOT_IMPLEMENTED(); //Not required
  352. return;
  353. }
  354. switch(opcode)
  355. {
  356. case opADD: *mPC++ = A64_ADD_W(Rd, Rn, Rm, shift, amount); break;
  357. case opAND: *mPC++ = A64_AND_W(Rd, Rn, Rm, shift, amount); break;
  358. case opORR: *mPC++ = A64_ORR_W(Rd, Rn, Rm, shift, amount); break;
  359. case opMVN: *mPC++ = A64_ORN_W(Rd, Rn, Rm, shift, amount); break;
  360. case opSUB: *mPC++ = A64_SUB_W(Rd, Rn, Rm, shift, amount, s);break;
  361. };
  362. }
  363. }
  364. void ArmToArm64Assembler::dataProcessing(int opcode, int cc,
  365. int s, int Rd, int Rn, uint32_t Op2)
  366. {
  367. uint32_t Wd;
  368. if(cc != AL)
  369. Wd = mTmpReg1;
  370. else
  371. Wd = Rd;
  372. if(opcode == opADD || opcode == opAND || opcode == opORR ||opcode == opSUB)
  373. {
  374. dataProcessingCommon(opcode, s, Wd, Rn, Op2);
  375. }
  376. else if(opcode == opCMP)
  377. {
  378. dataProcessingCommon(opSUB, 1, mTmpReg3, Rn, Op2);
  379. }
  380. else if(opcode == opRSB)
  381. {
  382. dataProcessingCommon(opSUB, s, Wd, Rn, Op2);
  383. dataProcessingCommon(opSUB, s, Wd, mZeroReg, Wd);
  384. }
  385. else if(opcode == opMOV)
  386. {
  387. dataProcessingCommon(opORR, 0, Wd, mZeroReg, Op2);
  388. if(s == 1)
  389. {
  390. dataProcessingCommon(opSUB, 1, mTmpReg3, Wd, mZeroReg);
  391. }
  392. }
  393. else if(opcode == opMVN)
  394. {
  395. dataProcessingCommon(opMVN, s, Wd, mZeroReg, Op2);
  396. }
  397. else if(opcode == opBIC)
  398. {
  399. dataProcessingCommon(opMVN, s, mTmpReg3, mZeroReg, Op2);
  400. dataProcessingCommon(opAND, s, Wd, Rn, mTmpReg3);
  401. }
  402. else
  403. {
  404. NOT_IMPLEMENTED();
  405. return;
  406. }
  407. if(cc != AL)
  408. {
  409. *mPC++ = A64_CSEL_W(Rd, mTmpReg1, Rd, cc);
  410. }
  411. }
  412. // ----------------------------------------------------------------------------
  413. // Address Processing...
  414. // ----------------------------------------------------------------------------
  415. void ArmToArm64Assembler::ADDR_ADD(int cc,
  416. int s, int Rd, int Rn, uint32_t Op2)
  417. {
  418. if(cc != AL){ NOT_IMPLEMENTED(); return;} //Not required
  419. if(s != 0) { NOT_IMPLEMENTED(); return;} //Not required
  420. if(Op2 == OPERAND_REG_IMM && mAddrMode.reg_imm_type == LSL)
  421. {
  422. int Rm = mAddrMode.reg_imm_Rm;
  423. int amount = mAddrMode.reg_imm_shift;
  424. *mPC++ = A64_ADD_X_Wm_SXTW(Rd, Rn, Rm, amount);
  425. }
  426. else if(Op2 < OPERAND_REG)
  427. {
  428. int Rm = Op2;
  429. int amount = 0;
  430. *mPC++ = A64_ADD_X_Wm_SXTW(Rd, Rn, Rm, amount);
  431. }
  432. else if(Op2 == OPERAND_IMM)
  433. {
  434. int imm = mAddrMode.immediate;
  435. *mPC++ = A64_MOVZ_W(mTmpReg1, imm & 0x0000FFFF, 0);
  436. *mPC++ = A64_MOVK_W(mTmpReg1, (imm >> 16) & 0x0000FFFF, 16);
  437. int Rm = mTmpReg1;
  438. int amount = 0;
  439. *mPC++ = A64_ADD_X_Wm_SXTW(Rd, Rn, Rm, amount);
  440. }
  441. else
  442. {
  443. NOT_IMPLEMENTED(); //Not required
  444. }
  445. }
  446. void ArmToArm64Assembler::ADDR_SUB(int cc,
  447. int s, int Rd, int Rn, uint32_t Op2)
  448. {
  449. if(cc != AL){ NOT_IMPLEMENTED(); return;} //Not required
  450. if(s != 0) { NOT_IMPLEMENTED(); return;} //Not required
  451. if(Op2 == OPERAND_REG_IMM && mAddrMode.reg_imm_type == LSR)
  452. {
  453. *mPC++ = A64_ADD_W(mTmpReg1, mZeroReg, mAddrMode.reg_imm_Rm,
  454. LSR, mAddrMode.reg_imm_shift);
  455. *mPC++ = A64_SUB_X_Wm_SXTW(Rd, Rn, mTmpReg1, 0);
  456. }
  457. else
  458. {
  459. NOT_IMPLEMENTED(); //Not required
  460. }
  461. }
  462. // ----------------------------------------------------------------------------
  463. // multiply...
  464. // ----------------------------------------------------------------------------
  465. void ArmToArm64Assembler::MLA(int cc, int s,int Rd, int Rm, int Rs, int Rn)
  466. {
  467. if(cc != AL){ NOT_IMPLEMENTED(); return;} //Not required
  468. *mPC++ = A64_MADD_W(Rd, Rm, Rs, Rn);
  469. if(s == 1)
  470. dataProcessingCommon(opSUB, 1, mTmpReg1, Rd, mZeroReg);
  471. }
  472. void ArmToArm64Assembler::MUL(int cc, int s, int Rd, int Rm, int Rs)
  473. {
  474. if(cc != AL){ NOT_IMPLEMENTED(); return;} //Not required
  475. if(s != 0) { NOT_IMPLEMENTED(); return;} //Not required
  476. *mPC++ = A64_MADD_W(Rd, Rm, Rs, mZeroReg);
  477. }
  478. void ArmToArm64Assembler::UMULL(int /*cc*/, int /*s*/,
  479. int /*RdLo*/, int /*RdHi*/, int /*Rm*/, int /*Rs*/)
  480. {
  481. NOT_IMPLEMENTED(); //Not required
  482. }
  483. void ArmToArm64Assembler::UMUAL(int /*cc*/, int /*s*/,
  484. int /*RdLo*/, int /*RdHi*/, int /*Rm*/, int /*Rs*/)
  485. {
  486. NOT_IMPLEMENTED(); //Not required
  487. }
  488. void ArmToArm64Assembler::SMULL(int /*cc*/, int /*s*/,
  489. int /*RdLo*/, int /*RdHi*/, int /*Rm*/, int /*Rs*/)
  490. {
  491. NOT_IMPLEMENTED(); //Not required
  492. }
  493. void ArmToArm64Assembler::SMUAL(int /*cc*/, int /*s*/,
  494. int /*RdLo*/, int /*RdHi*/, int /*Rm*/, int /*Rs*/)
  495. {
  496. NOT_IMPLEMENTED(); //Not required
  497. }
  498. // ----------------------------------------------------------------------------
  499. // branches relative to PC...
  500. // ----------------------------------------------------------------------------
  501. void ArmToArm64Assembler::B(int /*cc*/, uint32_t* /*pc*/){
  502. NOT_IMPLEMENTED(); //Not required
  503. }
  504. void ArmToArm64Assembler::BL(int /*cc*/, uint32_t* /*pc*/){
  505. NOT_IMPLEMENTED(); //Not required
  506. }
  507. void ArmToArm64Assembler::BX(int /*cc*/, int /*Rn*/){
  508. NOT_IMPLEMENTED(); //Not required
  509. }
  510. // ----------------------------------------------------------------------------
  511. // data transfer...
  512. // ----------------------------------------------------------------------------
  513. enum dataTransferOp
  514. {
  515. opLDR,opLDRB,opLDRH,opSTR,opSTRB,opSTRH
  516. };
  517. void ArmToArm64Assembler::dataTransfer(int op, int cc,
  518. int Rd, int Rn, uint32_t op_type, uint32_t size)
  519. {
  520. const int XSP = 31;
  521. if(Rn == SP)
  522. Rn = XSP;
  523. if(op_type == OPERAND_IMM)
  524. {
  525. int addrReg;
  526. int imm = mAddrMode.immediate;
  527. if(imm >= 0 && imm < (1<<12))
  528. *mPC++ = A64_ADD_IMM_X(mTmpReg1, mZeroReg, imm, 0);
  529. else if(imm < 0 && -imm < (1<<12))
  530. *mPC++ = A64_SUB_IMM_X(mTmpReg1, mZeroReg, -imm, 0);
  531. else
  532. {
  533. NOT_IMPLEMENTED();
  534. return;
  535. }
  536. addrReg = Rn;
  537. if(mAddrMode.preindex == true || mAddrMode.postindex == true)
  538. {
  539. *mPC++ = A64_ADD_X(mTmpReg2, addrReg, mTmpReg1);
  540. if(mAddrMode.preindex == true)
  541. addrReg = mTmpReg2;
  542. }
  543. if(cc != AL)
  544. *mPC++ = A64_B_COND(cc^1, 8);
  545. *mPC++ = A64_LDRSTR_Wm_SXTW_0(op, size, Rd, addrReg, mZeroReg);
  546. if(mAddrMode.writeback == true)
  547. *mPC++ = A64_CSEL_X(Rn, mTmpReg2, Rn, cc);
  548. }
  549. else if(op_type == OPERAND_REG_OFFSET)
  550. {
  551. if(cc != AL)
  552. *mPC++ = A64_B_COND(cc^1, 8);
  553. *mPC++ = A64_LDRSTR_Wm_SXTW_0(op, size, Rd, Rn, mAddrMode.reg_offset);
  554. }
  555. else if(op_type > OPERAND_UNSUPPORTED)
  556. {
  557. if(cc != AL)
  558. *mPC++ = A64_B_COND(cc^1, 8);
  559. *mPC++ = A64_LDRSTR_Wm_SXTW_0(op, size, Rd, Rn, mZeroReg);
  560. }
  561. else
  562. {
  563. NOT_IMPLEMENTED(); // Not required
  564. }
  565. return;
  566. }
  567. void ArmToArm64Assembler::ADDR_LDR(int cc, int Rd, int Rn, uint32_t op_type)
  568. {
  569. return dataTransfer(opLDR, cc, Rd, Rn, op_type, 64);
  570. }
  571. void ArmToArm64Assembler::ADDR_STR(int cc, int Rd, int Rn, uint32_t op_type)
  572. {
  573. return dataTransfer(opSTR, cc, Rd, Rn, op_type, 64);
  574. }
  575. void ArmToArm64Assembler::LDR(int cc, int Rd, int Rn, uint32_t op_type)
  576. {
  577. return dataTransfer(opLDR, cc, Rd, Rn, op_type);
  578. }
  579. void ArmToArm64Assembler::LDRB(int cc, int Rd, int Rn, uint32_t op_type)
  580. {
  581. return dataTransfer(opLDRB, cc, Rd, Rn, op_type);
  582. }
  583. void ArmToArm64Assembler::STR(int cc, int Rd, int Rn, uint32_t op_type)
  584. {
  585. return dataTransfer(opSTR, cc, Rd, Rn, op_type);
  586. }
  587. void ArmToArm64Assembler::STRB(int cc, int Rd, int Rn, uint32_t op_type)
  588. {
  589. return dataTransfer(opSTRB, cc, Rd, Rn, op_type);
  590. }
  591. void ArmToArm64Assembler::LDRH(int cc, int Rd, int Rn, uint32_t op_type)
  592. {
  593. return dataTransfer(opLDRH, cc, Rd, Rn, op_type);
  594. }
  595. void ArmToArm64Assembler::LDRSB(int /*cc*/, int /*Rd*/, int /*Rn*/, uint32_t /*offset*/)
  596. {
  597. NOT_IMPLEMENTED(); //Not required
  598. }
  599. void ArmToArm64Assembler::LDRSH(int /*cc*/, int /*Rd*/, int /*Rn*/, uint32_t /*offset*/)
  600. {
  601. NOT_IMPLEMENTED(); //Not required
  602. }
  603. void ArmToArm64Assembler::STRH(int cc, int Rd, int Rn, uint32_t op_type)
  604. {
  605. return dataTransfer(opSTRH, cc, Rd, Rn, op_type);
  606. }
  607. // ----------------------------------------------------------------------------
  608. // block data transfer...
  609. // ----------------------------------------------------------------------------
  610. void ArmToArm64Assembler::LDM(int cc, int dir,
  611. int Rn, int W, uint32_t reg_list)
  612. {
  613. const int XSP = 31;
  614. if(cc != AL || dir != IA || W == 0 || Rn != SP)
  615. {
  616. NOT_IMPLEMENTED();
  617. return;
  618. }
  619. for(int i = 0; i < 32; ++i)
  620. {
  621. if((reg_list & (1 << i)))
  622. {
  623. int reg = i;
  624. int size = 16;
  625. *mPC++ = A64_LDR_IMM_PostIndex(reg, XSP, size);
  626. }
  627. }
  628. }
  629. void ArmToArm64Assembler::STM(int cc, int dir,
  630. int Rn, int W, uint32_t reg_list)
  631. {
  632. const int XSP = 31;
  633. if(cc != AL || dir != DB || W == 0 || Rn != SP)
  634. {
  635. NOT_IMPLEMENTED();
  636. return;
  637. }
  638. for(int i = 31; i >= 0; --i)
  639. {
  640. if((reg_list & (1 << i)))
  641. {
  642. int size = -16;
  643. int reg = i;
  644. *mPC++ = A64_STR_IMM_PreIndex(reg, XSP, size);
  645. }
  646. }
  647. }
  648. // ----------------------------------------------------------------------------
  649. // special...
  650. // ----------------------------------------------------------------------------
  651. void ArmToArm64Assembler::SWP(int /*cc*/, int /*Rn*/, int /*Rd*/, int /*Rm*/)
  652. {
  653. NOT_IMPLEMENTED(); //Not required
  654. }
  655. void ArmToArm64Assembler::SWPB(int /*cc*/, int /*Rn*/, int /*Rd*/, int /*Rm*/)
  656. {
  657. NOT_IMPLEMENTED(); //Not required
  658. }
  659. void ArmToArm64Assembler::SWI(int /*cc*/, uint32_t /*comment*/)
  660. {
  661. NOT_IMPLEMENTED(); //Not required
  662. }
  663. // ----------------------------------------------------------------------------
  664. // DSP instructions...
  665. // ----------------------------------------------------------------------------
  666. void ArmToArm64Assembler::PLD(int /*Rn*/, uint32_t /*offset*/) {
  667. NOT_IMPLEMENTED(); //Not required
  668. }
  669. void ArmToArm64Assembler::CLZ(int /*cc*/, int /*Rd*/, int /*Rm*/)
  670. {
  671. NOT_IMPLEMENTED(); //Not required
  672. }
  673. void ArmToArm64Assembler::QADD(int /*cc*/, int /*Rd*/, int /*Rm*/, int /*Rn*/)
  674. {
  675. NOT_IMPLEMENTED(); //Not required
  676. }
  677. void ArmToArm64Assembler::QDADD(int /*cc*/, int /*Rd*/, int /*Rm*/, int /*Rn*/)
  678. {
  679. NOT_IMPLEMENTED(); //Not required
  680. }
  681. void ArmToArm64Assembler::QSUB(int /*cc*/, int /*Rd*/, int /*Rm*/, int /*Rn*/)
  682. {
  683. NOT_IMPLEMENTED(); //Not required
  684. }
  685. void ArmToArm64Assembler::QDSUB(int /*cc*/, int /*Rd*/, int /*Rm*/, int /*Rn*/)
  686. {
  687. NOT_IMPLEMENTED(); //Not required
  688. }
  689. // ----------------------------------------------------------------------------
  690. // 16 x 16 multiplication
  691. // ----------------------------------------------------------------------------
  692. void ArmToArm64Assembler::SMUL(int cc, int xy,
  693. int Rd, int Rm, int Rs)
  694. {
  695. if(cc != AL){ NOT_IMPLEMENTED(); return;} //Not required
  696. if (xy & xyTB)
  697. *mPC++ = A64_SBFM_W(mTmpReg1, Rm, 16, 31);
  698. else
  699. *mPC++ = A64_SBFM_W(mTmpReg1, Rm, 0, 15);
  700. if (xy & xyBT)
  701. *mPC++ = A64_SBFM_W(mTmpReg2, Rs, 16, 31);
  702. else
  703. *mPC++ = A64_SBFM_W(mTmpReg2, Rs, 0, 15);
  704. *mPC++ = A64_MADD_W(Rd,mTmpReg1,mTmpReg2, mZeroReg);
  705. }
  706. // ----------------------------------------------------------------------------
  707. // 32 x 16 multiplication
  708. // ----------------------------------------------------------------------------
  709. void ArmToArm64Assembler::SMULW(int cc, int y, int Rd, int Rm, int Rs)
  710. {
  711. if(cc != AL){ NOT_IMPLEMENTED(); return;} //Not required
  712. if (y & yT)
  713. *mPC++ = A64_SBFM_W(mTmpReg1, Rs, 16, 31);
  714. else
  715. *mPC++ = A64_SBFM_W(mTmpReg1, Rs, 0, 15);
  716. *mPC++ = A64_SBFM_W(mTmpReg2, Rm, 0, 31);
  717. *mPC++ = A64_SMADDL(mTmpReg3,mTmpReg1,mTmpReg2, mZeroReg);
  718. *mPC++ = A64_UBFM_X(Rd,mTmpReg3, 16, 47);
  719. }
  720. // ----------------------------------------------------------------------------
  721. // 16 x 16 multiplication and accumulate
  722. // ----------------------------------------------------------------------------
  723. void ArmToArm64Assembler::SMLA(int cc, int xy, int Rd, int Rm, int Rs, int Rn)
  724. {
  725. if(cc != AL){ NOT_IMPLEMENTED(); return;} //Not required
  726. if(xy != xyBB) { NOT_IMPLEMENTED(); return;} //Not required
  727. *mPC++ = A64_SBFM_W(mTmpReg1, Rm, 0, 15);
  728. *mPC++ = A64_SBFM_W(mTmpReg2, Rs, 0, 15);
  729. *mPC++ = A64_MADD_W(Rd, mTmpReg1, mTmpReg2, Rn);
  730. }
  731. void ArmToArm64Assembler::SMLAL(int /*cc*/, int /*xy*/,
  732. int /*RdHi*/, int /*RdLo*/, int /*Rs*/, int /*Rm*/)
  733. {
  734. NOT_IMPLEMENTED(); //Not required
  735. return;
  736. }
  737. void ArmToArm64Assembler::SMLAW(int /*cc*/, int /*y*/,
  738. int /*Rd*/, int /*Rm*/, int /*Rs*/, int /*Rn*/)
  739. {
  740. NOT_IMPLEMENTED(); //Not required
  741. return;
  742. }
  743. // ----------------------------------------------------------------------------
  744. // Byte/half word extract and extend
  745. // ----------------------------------------------------------------------------
  746. void ArmToArm64Assembler::UXTB16(int cc, int Rd, int Rm, int rotate)
  747. {
  748. if(cc != AL){ NOT_IMPLEMENTED(); return;} //Not required
  749. *mPC++ = A64_EXTR_W(mTmpReg1, Rm, Rm, rotate * 8);
  750. uint32_t imm = 0x00FF00FF;
  751. *mPC++ = A64_MOVZ_W(mTmpReg2, imm & 0xFFFF, 0);
  752. *mPC++ = A64_MOVK_W(mTmpReg2, (imm >> 16) & 0x0000FFFF, 16);
  753. *mPC++ = A64_AND_W(Rd,mTmpReg1, mTmpReg2);
  754. }
  755. // ----------------------------------------------------------------------------
  756. // Bit manipulation
  757. // ----------------------------------------------------------------------------
  758. void ArmToArm64Assembler::UBFX(int cc, int Rd, int Rn, int lsb, int width)
  759. {
  760. if(cc != AL){ NOT_IMPLEMENTED(); return;} //Not required
  761. *mPC++ = A64_UBFM_W(Rd, Rn, lsb, lsb + width - 1);
  762. }
  763. // ----------------------------------------------------------------------------
  764. // Shifters...
  765. // ----------------------------------------------------------------------------
  766. int ArmToArm64Assembler::buildImmediate(
  767. uint32_t immediate, uint32_t& rot, uint32_t& imm)
  768. {
  769. rot = 0;
  770. imm = immediate;
  771. return 0; // Always true
  772. }
  773. bool ArmToArm64Assembler::isValidImmediate(uint32_t immediate)
  774. {
  775. uint32_t rot, imm;
  776. return buildImmediate(immediate, rot, imm) == 0;
  777. }
  778. uint32_t ArmToArm64Assembler::imm(uint32_t immediate)
  779. {
  780. mAddrMode.immediate = immediate;
  781. mAddrMode.writeback = false;
  782. mAddrMode.preindex = false;
  783. mAddrMode.postindex = false;
  784. return OPERAND_IMM;
  785. }
  786. uint32_t ArmToArm64Assembler::reg_imm(int Rm, int type, uint32_t shift)
  787. {
  788. mAddrMode.reg_imm_Rm = Rm;
  789. mAddrMode.reg_imm_type = type;
  790. mAddrMode.reg_imm_shift = shift;
  791. return OPERAND_REG_IMM;
  792. }
  793. uint32_t ArmToArm64Assembler::reg_rrx(int /*Rm*/)
  794. {
  795. NOT_IMPLEMENTED();
  796. return OPERAND_UNSUPPORTED;
  797. }
  798. uint32_t ArmToArm64Assembler::reg_reg(int /*Rm*/, int /*type*/, int /*Rs*/)
  799. {
  800. NOT_IMPLEMENTED(); //Not required
  801. return OPERAND_UNSUPPORTED;
  802. }
  803. // ----------------------------------------------------------------------------
  804. // Addressing modes...
  805. // ----------------------------------------------------------------------------
  806. uint32_t ArmToArm64Assembler::immed12_pre(int32_t immed12, int W)
  807. {
  808. mAddrMode.immediate = immed12;
  809. mAddrMode.writeback = W;
  810. mAddrMode.preindex = true;
  811. mAddrMode.postindex = false;
  812. return OPERAND_IMM;
  813. }
  814. uint32_t ArmToArm64Assembler::immed12_post(int32_t immed12)
  815. {
  816. mAddrMode.immediate = immed12;
  817. mAddrMode.writeback = true;
  818. mAddrMode.preindex = false;
  819. mAddrMode.postindex = true;
  820. return OPERAND_IMM;
  821. }
  822. uint32_t ArmToArm64Assembler::reg_scale_pre(int Rm, int type,
  823. uint32_t shift, int W)
  824. {
  825. if(type != 0 || shift != 0 || W != 0)
  826. {
  827. NOT_IMPLEMENTED(); //Not required
  828. return OPERAND_UNSUPPORTED;
  829. }
  830. else
  831. {
  832. mAddrMode.reg_offset = Rm;
  833. return OPERAND_REG_OFFSET;
  834. }
  835. }
  836. uint32_t ArmToArm64Assembler::reg_scale_post(int /*Rm*/, int /*type*/, uint32_t /*shift*/)
  837. {
  838. NOT_IMPLEMENTED(); //Not required
  839. return OPERAND_UNSUPPORTED;
  840. }
  841. uint32_t ArmToArm64Assembler::immed8_pre(int32_t immed8, int W)
  842. {
  843. mAddrMode.immediate = immed8;
  844. mAddrMode.writeback = W;
  845. mAddrMode.preindex = true;
  846. mAddrMode.postindex = false;
  847. return OPERAND_IMM;
  848. }
  849. uint32_t ArmToArm64Assembler::immed8_post(int32_t immed8)
  850. {
  851. mAddrMode.immediate = immed8;
  852. mAddrMode.writeback = true;
  853. mAddrMode.preindex = false;
  854. mAddrMode.postindex = true;
  855. return OPERAND_IMM;
  856. }
  857. uint32_t ArmToArm64Assembler::reg_pre(int Rm, int W)
  858. {
  859. if(W != 0)
  860. {
  861. NOT_IMPLEMENTED(); //Not required
  862. return OPERAND_UNSUPPORTED;
  863. }
  864. else
  865. {
  866. mAddrMode.reg_offset = Rm;
  867. return OPERAND_REG_OFFSET;
  868. }
  869. }
  870. uint32_t ArmToArm64Assembler::reg_post(int /*Rm*/)
  871. {
  872. NOT_IMPLEMENTED(); //Not required
  873. return OPERAND_UNSUPPORTED;
  874. }
  875. // ----------------------------------------------------------------------------
  876. // A64 instructions
  877. // ----------------------------------------------------------------------------
  878. static __unused const char * dataTransferOpName[] =
  879. {
  880. "LDR","LDRB","LDRH","STR","STRB","STRH"
  881. };
  882. static const uint32_t dataTransferOpCode [] =
  883. {
  884. ((0xB8u << 24) | (0x3 << 21) | (0x6 << 13) | (0x0 << 12) |(0x1 << 11)),
  885. ((0x38u << 24) | (0x3 << 21) | (0x6 << 13) | (0x1 << 12) |(0x1 << 11)),
  886. ((0x78u << 24) | (0x3 << 21) | (0x6 << 13) | (0x0 << 12) |(0x1 << 11)),
  887. ((0xB8u << 24) | (0x1 << 21) | (0x6 << 13) | (0x0 << 12) |(0x1 << 11)),
  888. ((0x38u << 24) | (0x1 << 21) | (0x6 << 13) | (0x1 << 12) |(0x1 << 11)),
  889. ((0x78u << 24) | (0x1 << 21) | (0x6 << 13) | (0x0 << 12) |(0x1 << 11))
  890. };
  891. uint32_t ArmToArm64Assembler::A64_LDRSTR_Wm_SXTW_0(uint32_t op,
  892. uint32_t size, uint32_t Rt,
  893. uint32_t Rn, uint32_t Rm)
  894. {
  895. if(size == 32)
  896. {
  897. LOG_INSTR("%s W%d, [X%d, W%d, SXTW #0]\n",
  898. dataTransferOpName[op], Rt, Rn, Rm);
  899. return(dataTransferOpCode[op] | (Rm << 16) | (Rn << 5) | Rt);
  900. }
  901. else
  902. {
  903. LOG_INSTR("%s X%d, [X%d, W%d, SXTW #0]\n",
  904. dataTransferOpName[op], Rt, Rn, Rm);
  905. return(dataTransferOpCode[op] | (0x1<<30) | (Rm<<16) | (Rn<<5)|Rt);
  906. }
  907. }
  908. uint32_t ArmToArm64Assembler::A64_STR_IMM_PreIndex(uint32_t Rt,
  909. uint32_t Rn, int32_t simm)
  910. {
  911. if(Rn == 31)
  912. LOG_INSTR("STR W%d, [SP, #%d]!\n", Rt, simm);
  913. else
  914. LOG_INSTR("STR W%d, [X%d, #%d]!\n", Rt, Rn, simm);
  915. uint32_t imm9 = (unsigned)(simm) & 0x01FF;
  916. return (0xB8 << 24) | (imm9 << 12) | (0x3 << 10) | (Rn << 5) | Rt;
  917. }
  918. uint32_t ArmToArm64Assembler::A64_LDR_IMM_PostIndex(uint32_t Rt,
  919. uint32_t Rn, int32_t simm)
  920. {
  921. if(Rn == 31)
  922. LOG_INSTR("LDR W%d, [SP], #%d\n",Rt,simm);
  923. else
  924. LOG_INSTR("LDR W%d, [X%d], #%d\n",Rt, Rn, simm);
  925. uint32_t imm9 = (unsigned)(simm) & 0x01FF;
  926. return (0xB8 << 24) | (0x1 << 22) |
  927. (imm9 << 12) | (0x1 << 10) | (Rn << 5) | Rt;
  928. }
  929. uint32_t ArmToArm64Assembler::A64_ADD_X_Wm_SXTW(uint32_t Rd,
  930. uint32_t Rn,
  931. uint32_t Rm,
  932. uint32_t amount)
  933. {
  934. LOG_INSTR("ADD X%d, X%d, W%d, SXTW #%d\n", Rd, Rn, Rm, amount);
  935. return ((0x8B << 24) | (0x1 << 21) |(Rm << 16) |
  936. (0x6 << 13) | (amount << 10) | (Rn << 5) | Rd);
  937. }
  938. uint32_t ArmToArm64Assembler::A64_SUB_X_Wm_SXTW(uint32_t Rd,
  939. uint32_t Rn,
  940. uint32_t Rm,
  941. uint32_t amount)
  942. {
  943. LOG_INSTR("SUB X%d, X%d, W%d, SXTW #%d\n", Rd, Rn, Rm, amount);
  944. return ((0xCB << 24) | (0x1 << 21) |(Rm << 16) |
  945. (0x6 << 13) | (amount << 10) | (Rn << 5) | Rd);
  946. }
  947. uint32_t ArmToArm64Assembler::A64_B_COND(uint32_t cc, uint32_t offset)
  948. {
  949. LOG_INSTR("B.%s #.+%d\n", cc_codes[cc], offset);
  950. return (0x54 << 24) | ((offset/4) << 5) | (cc);
  951. }
  952. uint32_t ArmToArm64Assembler::A64_ADD_X(uint32_t Rd, uint32_t Rn,
  953. uint32_t Rm, uint32_t shift,
  954. uint32_t amount)
  955. {
  956. LOG_INSTR("ADD X%d, X%d, X%d, %s #%d\n",
  957. Rd, Rn, Rm, shift_codes[shift], amount);
  958. return ((0x8B << 24) | (shift << 22) | ( Rm << 16) |
  959. (amount << 10) |(Rn << 5) | Rd);
  960. }
  961. uint32_t ArmToArm64Assembler::A64_ADD_IMM_X(uint32_t Rd, uint32_t Rn,
  962. uint32_t imm, uint32_t shift)
  963. {
  964. LOG_INSTR("ADD X%d, X%d, #%d, LSL #%d\n", Rd, Rn, imm, shift);
  965. return (0x91 << 24) | ((shift/12) << 22) | (imm << 10) | (Rn << 5) | Rd;
  966. }
  967. uint32_t ArmToArm64Assembler::A64_SUB_IMM_X(uint32_t Rd, uint32_t Rn,
  968. uint32_t imm, uint32_t shift)
  969. {
  970. LOG_INSTR("SUB X%d, X%d, #%d, LSL #%d\n", Rd, Rn, imm, shift);
  971. return (0xD1 << 24) | ((shift/12) << 22) | (imm << 10) | (Rn << 5) | Rd;
  972. }
  973. uint32_t ArmToArm64Assembler::A64_ADD_W(uint32_t Rd, uint32_t Rn,
  974. uint32_t Rm, uint32_t shift,
  975. uint32_t amount)
  976. {
  977. LOG_INSTR("ADD W%d, W%d, W%d, %s #%d\n",
  978. Rd, Rn, Rm, shift_codes[shift], amount);
  979. return ((0x0B << 24) | (shift << 22) | ( Rm << 16) |
  980. (amount << 10) |(Rn << 5) | Rd);
  981. }
  982. uint32_t ArmToArm64Assembler::A64_SUB_W(uint32_t Rd, uint32_t Rn,
  983. uint32_t Rm, uint32_t shift,
  984. uint32_t amount,
  985. uint32_t setflag)
  986. {
  987. if(setflag == 0)
  988. {
  989. LOG_INSTR("SUB W%d, W%d, W%d, %s #%d\n",
  990. Rd, Rn, Rm, shift_codes[shift], amount);
  991. return ((0x4B << 24) | (shift << 22) | ( Rm << 16) |
  992. (amount << 10) |(Rn << 5) | Rd);
  993. }
  994. else
  995. {
  996. LOG_INSTR("SUBS W%d, W%d, W%d, %s #%d\n",
  997. Rd, Rn, Rm, shift_codes[shift], amount);
  998. return ((0x6B << 24) | (shift << 22) | ( Rm << 16) |
  999. (amount << 10) |(Rn << 5) | Rd);
  1000. }
  1001. }
  1002. uint32_t ArmToArm64Assembler::A64_AND_W(uint32_t Rd, uint32_t Rn,
  1003. uint32_t Rm, uint32_t shift,
  1004. uint32_t amount)
  1005. {
  1006. LOG_INSTR("AND W%d, W%d, W%d, %s #%d\n",
  1007. Rd, Rn, Rm, shift_codes[shift], amount);
  1008. return ((0x0A << 24) | (shift << 22) | ( Rm << 16) |
  1009. (amount << 10) |(Rn << 5) | Rd);
  1010. }
  1011. uint32_t ArmToArm64Assembler::A64_ORR_W(uint32_t Rd, uint32_t Rn,
  1012. uint32_t Rm, uint32_t shift,
  1013. uint32_t amount)
  1014. {
  1015. LOG_INSTR("ORR W%d, W%d, W%d, %s #%d\n",
  1016. Rd, Rn, Rm, shift_codes[shift], amount);
  1017. return ((0x2A << 24) | (shift << 22) | ( Rm << 16) |
  1018. (amount << 10) |(Rn << 5) | Rd);
  1019. }
  1020. uint32_t ArmToArm64Assembler::A64_ORN_W(uint32_t Rd, uint32_t Rn,
  1021. uint32_t Rm, uint32_t shift,
  1022. uint32_t amount)
  1023. {
  1024. LOG_INSTR("ORN W%d, W%d, W%d, %s #%d\n",
  1025. Rd, Rn, Rm, shift_codes[shift], amount);
  1026. return ((0x2A << 24) | (shift << 22) | (0x1 << 21) | ( Rm << 16) |
  1027. (amount << 10) |(Rn << 5) | Rd);
  1028. }
  1029. uint32_t ArmToArm64Assembler::A64_CSEL_X(uint32_t Rd, uint32_t Rn,
  1030. uint32_t Rm, uint32_t cond)
  1031. {
  1032. LOG_INSTR("CSEL X%d, X%d, X%d, %s\n", Rd, Rn, Rm, cc_codes[cond]);
  1033. return ((0x9A << 24)|(0x1 << 23)|(Rm << 16) |(cond << 12)| (Rn << 5) | Rd);
  1034. }
  1035. uint32_t ArmToArm64Assembler::A64_CSEL_W(uint32_t Rd, uint32_t Rn,
  1036. uint32_t Rm, uint32_t cond)
  1037. {
  1038. LOG_INSTR("CSEL W%d, W%d, W%d, %s\n", Rd, Rn, Rm, cc_codes[cond]);
  1039. return ((0x1A << 24)|(0x1 << 23)|(Rm << 16) |(cond << 12)| (Rn << 5) | Rd);
  1040. }
  1041. uint32_t ArmToArm64Assembler::A64_RET(uint32_t Rn)
  1042. {
  1043. LOG_INSTR("RET X%d\n", Rn);
  1044. return ((0xD6 << 24) | (0x1 << 22) | (0x1F << 16) | (Rn << 5));
  1045. }
  1046. uint32_t ArmToArm64Assembler::A64_MOVZ_X(uint32_t Rd, uint32_t imm,
  1047. uint32_t shift)
  1048. {
  1049. LOG_INSTR("MOVZ X%d, #0x%x, LSL #%d\n", Rd, imm, shift);
  1050. return(0xD2 << 24) | (0x1 << 23) | ((shift/16) << 21) | (imm << 5) | Rd;
  1051. }
  1052. uint32_t ArmToArm64Assembler::A64_MOVK_W(uint32_t Rd, uint32_t imm,
  1053. uint32_t shift)
  1054. {
  1055. LOG_INSTR("MOVK W%d, #0x%x, LSL #%d\n", Rd, imm, shift);
  1056. return (0x72 << 24) | (0x1 << 23) | ((shift/16) << 21) | (imm << 5) | Rd;
  1057. }
  1058. uint32_t ArmToArm64Assembler::A64_MOVZ_W(uint32_t Rd, uint32_t imm,
  1059. uint32_t shift)
  1060. {
  1061. LOG_INSTR("MOVZ W%d, #0x%x, LSL #%d\n", Rd, imm, shift);
  1062. return(0x52 << 24) | (0x1 << 23) | ((shift/16) << 21) | (imm << 5) | Rd;
  1063. }
  1064. uint32_t ArmToArm64Assembler::A64_SMADDL(uint32_t Rd, uint32_t Rn,
  1065. uint32_t Rm, uint32_t Ra)
  1066. {
  1067. LOG_INSTR("SMADDL X%d, W%d, W%d, X%d\n",Rd, Rn, Rm, Ra);
  1068. return ((0x9B << 24) | (0x1 << 21) | (Rm << 16)|(Ra << 10)|(Rn << 5) | Rd);
  1069. }
  1070. uint32_t ArmToArm64Assembler::A64_MADD_W(uint32_t Rd, uint32_t Rn,
  1071. uint32_t Rm, uint32_t Ra)
  1072. {
  1073. LOG_INSTR("MADD W%d, W%d, W%d, W%d\n",Rd, Rn, Rm, Ra);
  1074. return ((0x1B << 24) | (Rm << 16) | (Ra << 10) |(Rn << 5) | Rd);
  1075. }
  1076. uint32_t ArmToArm64Assembler::A64_SBFM_W(uint32_t Rd, uint32_t Rn,
  1077. uint32_t immr, uint32_t imms)
  1078. {
  1079. LOG_INSTR("SBFM W%d, W%d, #%d, #%d\n", Rd, Rn, immr, imms);
  1080. return ((0x13 << 24) | (immr << 16) | (imms << 10) | (Rn << 5) | Rd);
  1081. }
  1082. uint32_t ArmToArm64Assembler::A64_UBFM_W(uint32_t Rd, uint32_t Rn,
  1083. uint32_t immr, uint32_t imms)
  1084. {
  1085. LOG_INSTR("UBFM W%d, W%d, #%d, #%d\n", Rd, Rn, immr, imms);
  1086. return ((0x53 << 24) | (immr << 16) | (imms << 10) | (Rn << 5) | Rd);
  1087. }
  1088. uint32_t ArmToArm64Assembler::A64_UBFM_X(uint32_t Rd, uint32_t Rn,
  1089. uint32_t immr, uint32_t imms)
  1090. {
  1091. LOG_INSTR("UBFM X%d, X%d, #%d, #%d\n", Rd, Rn, immr, imms);
  1092. return ((0xD3 << 24) | (0x1 << 22) |
  1093. (immr << 16) | (imms << 10) | (Rn << 5) | Rd);
  1094. }
  1095. uint32_t ArmToArm64Assembler::A64_EXTR_W(uint32_t Rd, uint32_t Rn,
  1096. uint32_t Rm, uint32_t lsb)
  1097. {
  1098. LOG_INSTR("EXTR W%d, W%d, W%d, #%d\n", Rd, Rn, Rm, lsb);
  1099. return (0x13 << 24)|(0x1 << 23) | (Rm << 16) | (lsb << 10)|(Rn << 5) | Rd;
  1100. }
  1101. }; // namespace android