blob: a1594b02ac8d9f5eb5acd165da369d0301eebc07 [file] [log] [blame]
Dave Allison65fcc2c2014-04-28 13:45:27 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "assembler_arm32.h"
18
19#include "base/logging.h"
20#include "entrypoints/quick/quick_entrypoints.h"
21#include "offsets.h"
22#include "thread.h"
23#include "utils.h"
24
25namespace art {
26namespace arm {
27
28void Arm32Assembler::and_(Register rd, Register rn, const ShifterOperand& so,
29 Condition cond) {
30 EmitType01(cond, so.type(), AND, 0, rn, rd, so);
31}
32
33
34void Arm32Assembler::eor(Register rd, Register rn, const ShifterOperand& so,
35 Condition cond) {
36 EmitType01(cond, so.type(), EOR, 0, rn, rd, so);
37}
38
39
40void Arm32Assembler::sub(Register rd, Register rn, const ShifterOperand& so,
41 Condition cond) {
42 EmitType01(cond, so.type(), SUB, 0, rn, rd, so);
43}
44
45void Arm32Assembler::rsb(Register rd, Register rn, const ShifterOperand& so,
46 Condition cond) {
47 EmitType01(cond, so.type(), RSB, 0, rn, rd, so);
48}
49
50void Arm32Assembler::rsbs(Register rd, Register rn, const ShifterOperand& so,
51 Condition cond) {
52 EmitType01(cond, so.type(), RSB, 1, rn, rd, so);
53}
54
55
56void Arm32Assembler::add(Register rd, Register rn, const ShifterOperand& so,
57 Condition cond) {
58 EmitType01(cond, so.type(), ADD, 0, rn, rd, so);
59}
60
61
62void Arm32Assembler::adds(Register rd, Register rn, const ShifterOperand& so,
63 Condition cond) {
64 EmitType01(cond, so.type(), ADD, 1, rn, rd, so);
65}
66
67
68void Arm32Assembler::subs(Register rd, Register rn, const ShifterOperand& so,
69 Condition cond) {
70 EmitType01(cond, so.type(), SUB, 1, rn, rd, so);
71}
72
73
74void Arm32Assembler::adc(Register rd, Register rn, const ShifterOperand& so,
75 Condition cond) {
76 EmitType01(cond, so.type(), ADC, 0, rn, rd, so);
77}
78
79
80void Arm32Assembler::sbc(Register rd, Register rn, const ShifterOperand& so,
81 Condition cond) {
82 EmitType01(cond, so.type(), SBC, 0, rn, rd, so);
83}
84
85
86void Arm32Assembler::rsc(Register rd, Register rn, const ShifterOperand& so,
87 Condition cond) {
88 EmitType01(cond, so.type(), RSC, 0, rn, rd, so);
89}
90
91
92void Arm32Assembler::tst(Register rn, const ShifterOperand& so, Condition cond) {
93 CHECK_NE(rn, PC); // Reserve tst pc instruction for exception handler marker.
94 EmitType01(cond, so.type(), TST, 1, rn, R0, so);
95}
96
97
98void Arm32Assembler::teq(Register rn, const ShifterOperand& so, Condition cond) {
99 CHECK_NE(rn, PC); // Reserve teq pc instruction for exception handler marker.
100 EmitType01(cond, so.type(), TEQ, 1, rn, R0, so);
101}
102
103
104void Arm32Assembler::cmp(Register rn, const ShifterOperand& so, Condition cond) {
105 EmitType01(cond, so.type(), CMP, 1, rn, R0, so);
106}
107
108
109void Arm32Assembler::cmn(Register rn, const ShifterOperand& so, Condition cond) {
110 EmitType01(cond, so.type(), CMN, 1, rn, R0, so);
111}
112
113
114void Arm32Assembler::orr(Register rd, Register rn,
115 const ShifterOperand& so, Condition cond) {
116 EmitType01(cond, so.type(), ORR, 0, rn, rd, so);
117}
118
119
120void Arm32Assembler::orrs(Register rd, Register rn,
121 const ShifterOperand& so, Condition cond) {
122 EmitType01(cond, so.type(), ORR, 1, rn, rd, so);
123}
124
125
126void Arm32Assembler::mov(Register rd, const ShifterOperand& so, Condition cond) {
127 EmitType01(cond, so.type(), MOV, 0, R0, rd, so);
128}
129
130
131void Arm32Assembler::movs(Register rd, const ShifterOperand& so, Condition cond) {
132 EmitType01(cond, so.type(), MOV, 1, R0, rd, so);
133}
134
135
136void Arm32Assembler::bic(Register rd, Register rn, const ShifterOperand& so,
137 Condition cond) {
138 EmitType01(cond, so.type(), BIC, 0, rn, rd, so);
139}
140
141
142void Arm32Assembler::mvn(Register rd, const ShifterOperand& so, Condition cond) {
143 EmitType01(cond, so.type(), MVN, 0, R0, rd, so);
144}
145
146
147void Arm32Assembler::mvns(Register rd, const ShifterOperand& so, Condition cond) {
148 EmitType01(cond, so.type(), MVN, 1, R0, rd, so);
149}
150
151
152void Arm32Assembler::mul(Register rd, Register rn, Register rm, Condition cond) {
153 // Assembler registers rd, rn, rm are encoded as rn, rm, rs.
154 EmitMulOp(cond, 0, R0, rd, rn, rm);
155}
156
157
158void Arm32Assembler::mla(Register rd, Register rn, Register rm, Register ra,
159 Condition cond) {
160 // Assembler registers rd, rn, rm, ra are encoded as rn, rm, rs, rd.
161 EmitMulOp(cond, B21, ra, rd, rn, rm);
162}
163
164
165void Arm32Assembler::mls(Register rd, Register rn, Register rm, Register ra,
166 Condition cond) {
167 // Assembler registers rd, rn, rm, ra are encoded as rn, rm, rs, rd.
168 EmitMulOp(cond, B22 | B21, ra, rd, rn, rm);
169}
170
171
172void Arm32Assembler::umull(Register rd_lo, Register rd_hi, Register rn,
173 Register rm, Condition cond) {
174 // Assembler registers rd_lo, rd_hi, rn, rm are encoded as rd, rn, rm, rs.
175 EmitMulOp(cond, B23, rd_lo, rd_hi, rn, rm);
176}
177
178
179void Arm32Assembler::sdiv(Register rd, Register rn, Register rm, Condition cond) {
180 CHECK_NE(rd, kNoRegister);
181 CHECK_NE(rn, kNoRegister);
182 CHECK_NE(rm, kNoRegister);
183 CHECK_NE(cond, kNoCondition);
184 int32_t encoding = B26 | B25 | B24 | B20 |
185 B15 | B14 | B13 | B12 |
186 (static_cast<int32_t>(cond) << kConditionShift) |
187 (static_cast<int32_t>(rn) << 0) |
188 (static_cast<int32_t>(rd) << 16) |
189 (static_cast<int32_t>(rm) << 8) |
190 B4;
191 Emit(encoding);
192}
193
194
195void Arm32Assembler::udiv(Register rd, Register rn, Register rm, Condition cond) {
196 CHECK_NE(rd, kNoRegister);
197 CHECK_NE(rn, kNoRegister);
198 CHECK_NE(rm, kNoRegister);
199 CHECK_NE(cond, kNoCondition);
200 int32_t encoding = B26 | B25 | B24 | B21 | B20 |
201 B15 | B14 | B13 | B12 |
202 (static_cast<int32_t>(cond) << kConditionShift) |
203 (static_cast<int32_t>(rn) << 0) |
204 (static_cast<int32_t>(rd) << 16) |
205 (static_cast<int32_t>(rm) << 8) |
206 B4;
207 Emit(encoding);
208}
209
210
Roland Levillain51d3fc42014-11-13 14:11:42 +0000211void Arm32Assembler::sbfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
212 CHECK_NE(rd, kNoRegister);
213 CHECK_NE(rn, kNoRegister);
214 CHECK_NE(cond, kNoCondition);
215 CHECK_LE(lsb, 31U);
216 CHECK(1U <= width && width <= 32U) << width;
217 uint32_t widthminus1 = width - 1;
218
219 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
220 B26 | B25 | B24 | B23 | B21 |
221 (widthminus1 << 16) |
222 (static_cast<uint32_t>(rd) << 12) |
223 (lsb << 7) |
224 B6 | B4 |
225 static_cast<uint32_t>(rn);
226 Emit(encoding);
227}
228
229
Roland Levillain981e4542014-11-14 11:47:14 +0000230void Arm32Assembler::ubfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
231 CHECK_NE(rd, kNoRegister);
232 CHECK_NE(rn, kNoRegister);
233 CHECK_NE(cond, kNoCondition);
234 CHECK_LE(lsb, 31U);
235 CHECK(1U <= width && width <= 32U) << width;
236 uint32_t widthminus1 = width - 1;
237
238 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
239 B26 | B25 | B24 | B23 | B22 | B21 |
240 (widthminus1 << 16) |
241 (static_cast<uint32_t>(rd) << 12) |
242 (lsb << 7) |
243 B6 | B4 |
244 static_cast<uint32_t>(rn);
245 Emit(encoding);
246}
247
248
Dave Allison65fcc2c2014-04-28 13:45:27 -0700249void Arm32Assembler::ldr(Register rd, const Address& ad, Condition cond) {
250 EmitMemOp(cond, true, false, rd, ad);
251}
252
253
254void Arm32Assembler::str(Register rd, const Address& ad, Condition cond) {
255 EmitMemOp(cond, false, false, rd, ad);
256}
257
258
259void Arm32Assembler::ldrb(Register rd, const Address& ad, Condition cond) {
260 EmitMemOp(cond, true, true, rd, ad);
261}
262
263
264void Arm32Assembler::strb(Register rd, const Address& ad, Condition cond) {
265 EmitMemOp(cond, false, true, rd, ad);
266}
267
268
269void Arm32Assembler::ldrh(Register rd, const Address& ad, Condition cond) {
270 EmitMemOpAddressMode3(cond, L | B7 | H | B4, rd, ad);
271}
272
273
274void Arm32Assembler::strh(Register rd, const Address& ad, Condition cond) {
275 EmitMemOpAddressMode3(cond, B7 | H | B4, rd, ad);
276}
277
278
279void Arm32Assembler::ldrsb(Register rd, const Address& ad, Condition cond) {
280 EmitMemOpAddressMode3(cond, L | B7 | B6 | B4, rd, ad);
281}
282
283
284void Arm32Assembler::ldrsh(Register rd, const Address& ad, Condition cond) {
285 EmitMemOpAddressMode3(cond, L | B7 | B6 | H | B4, rd, ad);
286}
287
288
289void Arm32Assembler::ldrd(Register rd, const Address& ad, Condition cond) {
290 CHECK_EQ(rd % 2, 0);
291 EmitMemOpAddressMode3(cond, B7 | B6 | B4, rd, ad);
292}
293
294
295void Arm32Assembler::strd(Register rd, const Address& ad, Condition cond) {
296 CHECK_EQ(rd % 2, 0);
297 EmitMemOpAddressMode3(cond, B7 | B6 | B5 | B4, rd, ad);
298}
299
300
301void Arm32Assembler::ldm(BlockAddressMode am,
302 Register base,
303 RegList regs,
304 Condition cond) {
305 EmitMultiMemOp(cond, am, true, base, regs);
306}
307
308
309void Arm32Assembler::stm(BlockAddressMode am,
310 Register base,
311 RegList regs,
312 Condition cond) {
313 EmitMultiMemOp(cond, am, false, base, regs);
314}
315
316
317void Arm32Assembler::vmovs(SRegister sd, SRegister sm, Condition cond) {
318 EmitVFPsss(cond, B23 | B21 | B20 | B6, sd, S0, sm);
319}
320
321
322void Arm32Assembler::vmovd(DRegister dd, DRegister dm, Condition cond) {
323 EmitVFPddd(cond, B23 | B21 | B20 | B6, dd, D0, dm);
324}
325
326
327bool Arm32Assembler::vmovs(SRegister sd, float s_imm, Condition cond) {
328 uint32_t imm32 = bit_cast<uint32_t, float>(s_imm);
329 if (((imm32 & ((1 << 19) - 1)) == 0) &&
330 ((((imm32 >> 25) & ((1 << 6) - 1)) == (1 << 5)) ||
331 (((imm32 >> 25) & ((1 << 6) - 1)) == ((1 << 5) -1)))) {
332 uint8_t imm8 = ((imm32 >> 31) << 7) | (((imm32 >> 29) & 1) << 6) |
333 ((imm32 >> 19) & ((1 << 6) -1));
334 EmitVFPsss(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | (imm8 & 0xf),
335 sd, S0, S0);
336 return true;
337 }
338 return false;
339}
340
341
342bool Arm32Assembler::vmovd(DRegister dd, double d_imm, Condition cond) {
343 uint64_t imm64 = bit_cast<uint64_t, double>(d_imm);
344 if (((imm64 & ((1LL << 48) - 1)) == 0) &&
345 ((((imm64 >> 54) & ((1 << 9) - 1)) == (1 << 8)) ||
346 (((imm64 >> 54) & ((1 << 9) - 1)) == ((1 << 8) -1)))) {
347 uint8_t imm8 = ((imm64 >> 63) << 7) | (((imm64 >> 61) & 1) << 6) |
348 ((imm64 >> 48) & ((1 << 6) -1));
349 EmitVFPddd(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | B8 | (imm8 & 0xf),
350 dd, D0, D0);
351 return true;
352 }
353 return false;
354}
355
356
357void Arm32Assembler::vadds(SRegister sd, SRegister sn, SRegister sm,
358 Condition cond) {
359 EmitVFPsss(cond, B21 | B20, sd, sn, sm);
360}
361
362
363void Arm32Assembler::vaddd(DRegister dd, DRegister dn, DRegister dm,
364 Condition cond) {
365 EmitVFPddd(cond, B21 | B20, dd, dn, dm);
366}
367
368
369void Arm32Assembler::vsubs(SRegister sd, SRegister sn, SRegister sm,
370 Condition cond) {
371 EmitVFPsss(cond, B21 | B20 | B6, sd, sn, sm);
372}
373
374
375void Arm32Assembler::vsubd(DRegister dd, DRegister dn, DRegister dm,
376 Condition cond) {
377 EmitVFPddd(cond, B21 | B20 | B6, dd, dn, dm);
378}
379
380
381void Arm32Assembler::vmuls(SRegister sd, SRegister sn, SRegister sm,
382 Condition cond) {
383 EmitVFPsss(cond, B21, sd, sn, sm);
384}
385
386
387void Arm32Assembler::vmuld(DRegister dd, DRegister dn, DRegister dm,
388 Condition cond) {
389 EmitVFPddd(cond, B21, dd, dn, dm);
390}
391
392
393void Arm32Assembler::vmlas(SRegister sd, SRegister sn, SRegister sm,
394 Condition cond) {
395 EmitVFPsss(cond, 0, sd, sn, sm);
396}
397
398
399void Arm32Assembler::vmlad(DRegister dd, DRegister dn, DRegister dm,
400 Condition cond) {
401 EmitVFPddd(cond, 0, dd, dn, dm);
402}
403
404
405void Arm32Assembler::vmlss(SRegister sd, SRegister sn, SRegister sm,
406 Condition cond) {
407 EmitVFPsss(cond, B6, sd, sn, sm);
408}
409
410
411void Arm32Assembler::vmlsd(DRegister dd, DRegister dn, DRegister dm,
412 Condition cond) {
413 EmitVFPddd(cond, B6, dd, dn, dm);
414}
415
416
417void Arm32Assembler::vdivs(SRegister sd, SRegister sn, SRegister sm,
418 Condition cond) {
419 EmitVFPsss(cond, B23, sd, sn, sm);
420}
421
422
423void Arm32Assembler::vdivd(DRegister dd, DRegister dn, DRegister dm,
424 Condition cond) {
425 EmitVFPddd(cond, B23, dd, dn, dm);
426}
427
428
429void Arm32Assembler::vabss(SRegister sd, SRegister sm, Condition cond) {
430 EmitVFPsss(cond, B23 | B21 | B20 | B7 | B6, sd, S0, sm);
431}
432
433
434void Arm32Assembler::vabsd(DRegister dd, DRegister dm, Condition cond) {
435 EmitVFPddd(cond, B23 | B21 | B20 | B7 | B6, dd, D0, dm);
436}
437
438
439void Arm32Assembler::vnegs(SRegister sd, SRegister sm, Condition cond) {
440 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B6, sd, S0, sm);
441}
442
443
444void Arm32Assembler::vnegd(DRegister dd, DRegister dm, Condition cond) {
445 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B6, dd, D0, dm);
446}
447
448
449void Arm32Assembler::vsqrts(SRegister sd, SRegister sm, Condition cond) {
450 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B7 | B6, sd, S0, sm);
451}
452
453void Arm32Assembler::vsqrtd(DRegister dd, DRegister dm, Condition cond) {
454 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B7 | B6, dd, D0, dm);
455}
456
457
458void Arm32Assembler::vcvtsd(SRegister sd, DRegister dm, Condition cond) {
459 EmitVFPsd(cond, B23 | B21 | B20 | B18 | B17 | B16 | B8 | B7 | B6, sd, dm);
460}
461
462
463void Arm32Assembler::vcvtds(DRegister dd, SRegister sm, Condition cond) {
464 EmitVFPds(cond, B23 | B21 | B20 | B18 | B17 | B16 | B7 | B6, dd, sm);
465}
466
467
468void Arm32Assembler::vcvtis(SRegister sd, SRegister sm, Condition cond) {
469 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B16 | B7 | B6, sd, S0, sm);
470}
471
472
473void Arm32Assembler::vcvtid(SRegister sd, DRegister dm, Condition cond) {
474 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B16 | B8 | B7 | B6, sd, dm);
475}
476
477
478void Arm32Assembler::vcvtsi(SRegister sd, SRegister sm, Condition cond) {
479 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B7 | B6, sd, S0, sm);
480}
481
482
483void Arm32Assembler::vcvtdi(DRegister dd, SRegister sm, Condition cond) {
484 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B7 | B6, dd, sm);
485}
486
487
488void Arm32Assembler::vcvtus(SRegister sd, SRegister sm, Condition cond) {
489 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B7 | B6, sd, S0, sm);
490}
491
492
493void Arm32Assembler::vcvtud(SRegister sd, DRegister dm, Condition cond) {
494 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B8 | B7 | B6, sd, dm);
495}
496
497
498void Arm32Assembler::vcvtsu(SRegister sd, SRegister sm, Condition cond) {
499 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B6, sd, S0, sm);
500}
501
502
503void Arm32Assembler::vcvtdu(DRegister dd, SRegister sm, Condition cond) {
504 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B6, dd, sm);
505}
506
507
508void Arm32Assembler::vcmps(SRegister sd, SRegister sm, Condition cond) {
509 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B6, sd, S0, sm);
510}
511
512
513void Arm32Assembler::vcmpd(DRegister dd, DRegister dm, Condition cond) {
514 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B6, dd, D0, dm);
515}
516
517
518void Arm32Assembler::vcmpsz(SRegister sd, Condition cond) {
519 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B16 | B6, sd, S0, S0);
520}
521
522
523void Arm32Assembler::vcmpdz(DRegister dd, Condition cond) {
524 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B16 | B6, dd, D0, D0);
525}
526
527void Arm32Assembler::b(Label* label, Condition cond) {
528 EmitBranch(cond, label, false);
529}
530
531
532void Arm32Assembler::bl(Label* label, Condition cond) {
533 EmitBranch(cond, label, true);
534}
535
536
537void Arm32Assembler::MarkExceptionHandler(Label* label) {
538 EmitType01(AL, 1, TST, 1, PC, R0, ShifterOperand(0));
539 Label l;
540 b(&l);
541 EmitBranch(AL, label, false);
542 Bind(&l);
543}
544
545
546void Arm32Assembler::Emit(int32_t value) {
547 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
548 buffer_.Emit<int32_t>(value);
549}
550
551
552void Arm32Assembler::EmitType01(Condition cond,
553 int type,
554 Opcode opcode,
555 int set_cc,
556 Register rn,
557 Register rd,
558 const ShifterOperand& so) {
559 CHECK_NE(rd, kNoRegister);
560 CHECK_NE(cond, kNoCondition);
561 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
562 type << kTypeShift |
563 static_cast<int32_t>(opcode) << kOpcodeShift |
564 set_cc << kSShift |
565 static_cast<int32_t>(rn) << kRnShift |
566 static_cast<int32_t>(rd) << kRdShift |
567 so.encodingArm();
568 Emit(encoding);
569}
570
571
572void Arm32Assembler::EmitType5(Condition cond, int offset, bool link) {
573 CHECK_NE(cond, kNoCondition);
574 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
575 5 << kTypeShift |
576 (link ? 1 : 0) << kLinkShift;
577 Emit(Arm32Assembler::EncodeBranchOffset(offset, encoding));
578}
579
580
581void Arm32Assembler::EmitMemOp(Condition cond,
Dave Allison45fdb932014-06-25 12:37:10 -0700582 bool load,
583 bool byte,
584 Register rd,
585 const Address& ad) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700586 CHECK_NE(rd, kNoRegister);
587 CHECK_NE(cond, kNoCondition);
588 const Address& addr = static_cast<const Address&>(ad);
589
Dave Allison45fdb932014-06-25 12:37:10 -0700590 int32_t encoding = 0;
591 if (!ad.IsImmediate() && ad.GetRegisterOffset() == PC) {
592 // PC relative LDR(literal)
593 int32_t offset = ad.GetOffset();
594 int32_t u = B23;
595 if (offset < 0) {
596 offset = -offset;
597 u = 0;
598 }
599 CHECK_LT(offset, (1 << 12));
600 encoding = (static_cast<int32_t>(cond) << kConditionShift) |
601 B26 | B24 | u | B20 |
602 (load ? L : 0) |
603 (byte ? B : 0) |
604 (static_cast<int32_t>(rd) << kRdShift) |
605 0xf << 16 |
606 (offset & 0xfff);
607
608 } else {
609 encoding = (static_cast<int32_t>(cond) << kConditionShift) |
610 B26 |
611 (load ? L : 0) |
612 (byte ? B : 0) |
613 (static_cast<int32_t>(rd) << kRdShift) |
614 addr.encodingArm();
615 }
Dave Allison65fcc2c2014-04-28 13:45:27 -0700616 Emit(encoding);
617}
618
619
620void Arm32Assembler::EmitMemOpAddressMode3(Condition cond,
621 int32_t mode,
622 Register rd,
623 const Address& ad) {
624 CHECK_NE(rd, kNoRegister);
625 CHECK_NE(cond, kNoCondition);
626 const Address& addr = static_cast<const Address&>(ad);
627 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
628 B22 |
629 mode |
630 (static_cast<int32_t>(rd) << kRdShift) |
631 addr.encoding3();
632 Emit(encoding);
633}
634
635
636void Arm32Assembler::EmitMultiMemOp(Condition cond,
637 BlockAddressMode am,
638 bool load,
639 Register base,
640 RegList regs) {
641 CHECK_NE(base, kNoRegister);
642 CHECK_NE(cond, kNoCondition);
643 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
644 B27 |
645 am |
646 (load ? L : 0) |
647 (static_cast<int32_t>(base) << kRnShift) |
648 regs;
649 Emit(encoding);
650}
651
652
653void Arm32Assembler::EmitShiftImmediate(Condition cond,
654 Shift opcode,
655 Register rd,
656 Register rm,
657 const ShifterOperand& so) {
658 CHECK_NE(cond, kNoCondition);
659 CHECK(so.IsImmediate());
660 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
661 static_cast<int32_t>(MOV) << kOpcodeShift |
662 static_cast<int32_t>(rd) << kRdShift |
663 so.encodingArm() << kShiftImmShift |
664 static_cast<int32_t>(opcode) << kShiftShift |
665 static_cast<int32_t>(rm);
666 Emit(encoding);
667}
668
669
670void Arm32Assembler::EmitShiftRegister(Condition cond,
671 Shift opcode,
672 Register rd,
673 Register rm,
674 const ShifterOperand& so) {
675 CHECK_NE(cond, kNoCondition);
676 CHECK(so.IsRegister());
677 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
678 static_cast<int32_t>(MOV) << kOpcodeShift |
679 static_cast<int32_t>(rd) << kRdShift |
680 so.encodingArm() << kShiftRegisterShift |
681 static_cast<int32_t>(opcode) << kShiftShift |
682 B4 |
683 static_cast<int32_t>(rm);
684 Emit(encoding);
685}
686
687
688void Arm32Assembler::EmitBranch(Condition cond, Label* label, bool link) {
689 if (label->IsBound()) {
690 EmitType5(cond, label->Position() - buffer_.Size(), link);
691 } else {
692 int position = buffer_.Size();
693 // Use the offset field of the branch instruction for linking the sites.
694 EmitType5(cond, label->position_, link);
695 label->LinkTo(position);
696 }
697}
698
699
700void Arm32Assembler::clz(Register rd, Register rm, Condition cond) {
701 CHECK_NE(rd, kNoRegister);
702 CHECK_NE(rm, kNoRegister);
703 CHECK_NE(cond, kNoCondition);
704 CHECK_NE(rd, PC);
705 CHECK_NE(rm, PC);
706 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
707 B24 | B22 | B21 | (0xf << 16) |
708 (static_cast<int32_t>(rd) << kRdShift) |
709 (0xf << 8) | B4 | static_cast<int32_t>(rm);
710 Emit(encoding);
711}
712
713
714void Arm32Assembler::movw(Register rd, uint16_t imm16, Condition cond) {
715 CHECK_NE(cond, kNoCondition);
716 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
717 B25 | B24 | ((imm16 >> 12) << 16) |
718 static_cast<int32_t>(rd) << kRdShift | (imm16 & 0xfff);
719 Emit(encoding);
720}
721
722
723void Arm32Assembler::movt(Register rd, uint16_t imm16, Condition cond) {
724 CHECK_NE(cond, kNoCondition);
725 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
726 B25 | B24 | B22 | ((imm16 >> 12) << 16) |
727 static_cast<int32_t>(rd) << kRdShift | (imm16 & 0xfff);
728 Emit(encoding);
729}
730
731
732void Arm32Assembler::EmitMulOp(Condition cond, int32_t opcode,
733 Register rd, Register rn,
734 Register rm, Register rs) {
735 CHECK_NE(rd, kNoRegister);
736 CHECK_NE(rn, kNoRegister);
737 CHECK_NE(rm, kNoRegister);
738 CHECK_NE(rs, kNoRegister);
739 CHECK_NE(cond, kNoCondition);
740 int32_t encoding = opcode |
741 (static_cast<int32_t>(cond) << kConditionShift) |
742 (static_cast<int32_t>(rn) << kRnShift) |
743 (static_cast<int32_t>(rd) << kRdShift) |
744 (static_cast<int32_t>(rs) << kRsShift) |
745 B7 | B4 |
746 (static_cast<int32_t>(rm) << kRmShift);
747 Emit(encoding);
748}
749
750void Arm32Assembler::ldrex(Register rt, Register rn, Condition cond) {
751 CHECK_NE(rn, kNoRegister);
752 CHECK_NE(rt, kNoRegister);
753 CHECK_NE(cond, kNoCondition);
754 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
755 B24 |
756 B23 |
757 L |
758 (static_cast<int32_t>(rn) << kLdExRnShift) |
759 (static_cast<int32_t>(rt) << kLdExRtShift) |
760 B11 | B10 | B9 | B8 | B7 | B4 | B3 | B2 | B1 | B0;
761 Emit(encoding);
762}
763
764
765void Arm32Assembler::strex(Register rd,
766 Register rt,
767 Register rn,
768 Condition cond) {
769 CHECK_NE(rn, kNoRegister);
770 CHECK_NE(rd, kNoRegister);
771 CHECK_NE(rt, kNoRegister);
772 CHECK_NE(cond, kNoCondition);
773 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
774 B24 |
775 B23 |
776 (static_cast<int32_t>(rn) << kStrExRnShift) |
777 (static_cast<int32_t>(rd) << kStrExRdShift) |
778 B11 | B10 | B9 | B8 | B7 | B4 |
779 (static_cast<int32_t>(rt) << kStrExRtShift);
780 Emit(encoding);
781}
782
783
784void Arm32Assembler::clrex(Condition cond) {
785 CHECK_EQ(cond, AL); // This cannot be conditional on ARM.
786 int32_t encoding = (kSpecialCondition << kConditionShift) |
787 B26 | B24 | B22 | B21 | B20 | (0xff << 12) | B4 | 0xf;
788 Emit(encoding);
789}
790
791
792void Arm32Assembler::nop(Condition cond) {
793 CHECK_NE(cond, kNoCondition);
794 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
795 B25 | B24 | B21 | (0xf << 12);
796 Emit(encoding);
797}
798
799
800void Arm32Assembler::vmovsr(SRegister sn, Register rt, Condition cond) {
801 CHECK_NE(sn, kNoSRegister);
802 CHECK_NE(rt, kNoRegister);
803 CHECK_NE(rt, SP);
804 CHECK_NE(rt, PC);
805 CHECK_NE(cond, kNoCondition);
806 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
807 B27 | B26 | B25 |
808 ((static_cast<int32_t>(sn) >> 1)*B16) |
809 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
810 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
811 Emit(encoding);
812}
813
814
815void Arm32Assembler::vmovrs(Register rt, SRegister sn, Condition cond) {
816 CHECK_NE(sn, kNoSRegister);
817 CHECK_NE(rt, kNoRegister);
818 CHECK_NE(rt, SP);
819 CHECK_NE(rt, PC);
820 CHECK_NE(cond, kNoCondition);
821 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
822 B27 | B26 | B25 | B20 |
823 ((static_cast<int32_t>(sn) >> 1)*B16) |
824 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
825 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
826 Emit(encoding);
827}
828
829
830void Arm32Assembler::vmovsrr(SRegister sm, Register rt, Register rt2,
831 Condition cond) {
832 CHECK_NE(sm, kNoSRegister);
833 CHECK_NE(sm, S31);
834 CHECK_NE(rt, kNoRegister);
835 CHECK_NE(rt, SP);
836 CHECK_NE(rt, PC);
837 CHECK_NE(rt2, kNoRegister);
838 CHECK_NE(rt2, SP);
839 CHECK_NE(rt2, PC);
840 CHECK_NE(cond, kNoCondition);
841 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
842 B27 | B26 | B22 |
843 (static_cast<int32_t>(rt2)*B16) |
844 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
845 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
846 (static_cast<int32_t>(sm) >> 1);
847 Emit(encoding);
848}
849
850
851void Arm32Assembler::vmovrrs(Register rt, Register rt2, SRegister sm,
852 Condition cond) {
853 CHECK_NE(sm, kNoSRegister);
854 CHECK_NE(sm, S31);
855 CHECK_NE(rt, kNoRegister);
856 CHECK_NE(rt, SP);
857 CHECK_NE(rt, PC);
858 CHECK_NE(rt2, kNoRegister);
859 CHECK_NE(rt2, SP);
860 CHECK_NE(rt2, PC);
861 CHECK_NE(rt, rt2);
862 CHECK_NE(cond, kNoCondition);
863 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
864 B27 | B26 | B22 | B20 |
865 (static_cast<int32_t>(rt2)*B16) |
866 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
867 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
868 (static_cast<int32_t>(sm) >> 1);
869 Emit(encoding);
870}
871
872
873void Arm32Assembler::vmovdrr(DRegister dm, Register rt, Register rt2,
874 Condition cond) {
875 CHECK_NE(dm, kNoDRegister);
876 CHECK_NE(rt, kNoRegister);
877 CHECK_NE(rt, SP);
878 CHECK_NE(rt, PC);
879 CHECK_NE(rt2, kNoRegister);
880 CHECK_NE(rt2, SP);
881 CHECK_NE(rt2, PC);
882 CHECK_NE(cond, kNoCondition);
883 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
884 B27 | B26 | B22 |
885 (static_cast<int32_t>(rt2)*B16) |
886 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
887 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
888 (static_cast<int32_t>(dm) & 0xf);
889 Emit(encoding);
890}
891
892
893void Arm32Assembler::vmovrrd(Register rt, Register rt2, DRegister dm,
894 Condition cond) {
895 CHECK_NE(dm, kNoDRegister);
896 CHECK_NE(rt, kNoRegister);
897 CHECK_NE(rt, SP);
898 CHECK_NE(rt, PC);
899 CHECK_NE(rt2, kNoRegister);
900 CHECK_NE(rt2, SP);
901 CHECK_NE(rt2, PC);
902 CHECK_NE(rt, rt2);
903 CHECK_NE(cond, kNoCondition);
904 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
905 B27 | B26 | B22 | B20 |
906 (static_cast<int32_t>(rt2)*B16) |
907 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
908 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
909 (static_cast<int32_t>(dm) & 0xf);
910 Emit(encoding);
911}
912
913
914void Arm32Assembler::vldrs(SRegister sd, const Address& ad, Condition cond) {
915 const Address& addr = static_cast<const Address&>(ad);
916 CHECK_NE(sd, kNoSRegister);
917 CHECK_NE(cond, kNoCondition);
918 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
919 B27 | B26 | B24 | B20 |
920 ((static_cast<int32_t>(sd) & 1)*B22) |
921 ((static_cast<int32_t>(sd) >> 1)*B12) |
922 B11 | B9 | addr.vencoding();
923 Emit(encoding);
924}
925
926
927void Arm32Assembler::vstrs(SRegister sd, const Address& ad, Condition cond) {
928 const Address& addr = static_cast<const Address&>(ad);
929 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
930 CHECK_NE(sd, kNoSRegister);
931 CHECK_NE(cond, kNoCondition);
932 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
933 B27 | B26 | B24 |
934 ((static_cast<int32_t>(sd) & 1)*B22) |
935 ((static_cast<int32_t>(sd) >> 1)*B12) |
936 B11 | B9 | addr.vencoding();
937 Emit(encoding);
938}
939
940
941void Arm32Assembler::vldrd(DRegister dd, const Address& ad, Condition cond) {
942 const Address& addr = static_cast<const Address&>(ad);
943 CHECK_NE(dd, kNoDRegister);
944 CHECK_NE(cond, kNoCondition);
945 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
946 B27 | B26 | B24 | B20 |
947 ((static_cast<int32_t>(dd) >> 4)*B22) |
948 ((static_cast<int32_t>(dd) & 0xf)*B12) |
949 B11 | B9 | B8 | addr.vencoding();
950 Emit(encoding);
951}
952
953
954void Arm32Assembler::vstrd(DRegister dd, const Address& ad, Condition cond) {
955 const Address& addr = static_cast<const Address&>(ad);
956 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
957 CHECK_NE(dd, kNoDRegister);
958 CHECK_NE(cond, kNoCondition);
959 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
960 B27 | B26 | B24 |
961 ((static_cast<int32_t>(dd) >> 4)*B22) |
962 ((static_cast<int32_t>(dd) & 0xf)*B12) |
963 B11 | B9 | B8 | addr.vencoding();
964 Emit(encoding);
965}
966
967
968void Arm32Assembler::vpushs(SRegister reg, int nregs, Condition cond) {
969 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, false, cond);
970}
971
972
973void Arm32Assembler::vpushd(DRegister reg, int nregs, Condition cond) {
974 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, true, cond);
975}
976
977
978void Arm32Assembler::vpops(SRegister reg, int nregs, Condition cond) {
979 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, false, cond);
980}
981
982
983void Arm32Assembler::vpopd(DRegister reg, int nregs, Condition cond) {
984 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, true, cond);
985}
986
987
988void Arm32Assembler::EmitVPushPop(uint32_t reg, int nregs, bool push, bool dbl, Condition cond) {
989 CHECK_NE(cond, kNoCondition);
990 CHECK_GT(nregs, 0);
991 uint32_t D;
992 uint32_t Vd;
993 if (dbl) {
994 // Encoded as D:Vd.
995 D = (reg >> 4) & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -0700996 Vd = reg & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700997 } else {
998 // Encoded as Vd:D.
999 D = reg & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001000 Vd = (reg >> 1) & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001001 }
1002 int32_t encoding = B27 | B26 | B21 | B19 | B18 | B16 |
1003 B11 | B9 |
1004 (dbl ? B8 : 0) |
1005 (push ? B24 : (B23 | B20)) |
1006 static_cast<int32_t>(cond) << kConditionShift |
1007 nregs << (dbl ? 1 : 0) |
1008 D << 22 |
1009 Vd << 12;
1010 Emit(encoding);
1011}
1012
1013
1014void Arm32Assembler::EmitVFPsss(Condition cond, int32_t opcode,
1015 SRegister sd, SRegister sn, SRegister sm) {
1016 CHECK_NE(sd, kNoSRegister);
1017 CHECK_NE(sn, kNoSRegister);
1018 CHECK_NE(sm, kNoSRegister);
1019 CHECK_NE(cond, kNoCondition);
1020 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1021 B27 | B26 | B25 | B11 | B9 | opcode |
1022 ((static_cast<int32_t>(sd) & 1)*B22) |
1023 ((static_cast<int32_t>(sn) >> 1)*B16) |
1024 ((static_cast<int32_t>(sd) >> 1)*B12) |
1025 ((static_cast<int32_t>(sn) & 1)*B7) |
1026 ((static_cast<int32_t>(sm) & 1)*B5) |
1027 (static_cast<int32_t>(sm) >> 1);
1028 Emit(encoding);
1029}
1030
1031
1032void Arm32Assembler::EmitVFPddd(Condition cond, int32_t opcode,
1033 DRegister dd, DRegister dn, DRegister dm) {
1034 CHECK_NE(dd, kNoDRegister);
1035 CHECK_NE(dn, kNoDRegister);
1036 CHECK_NE(dm, kNoDRegister);
1037 CHECK_NE(cond, kNoCondition);
1038 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1039 B27 | B26 | B25 | B11 | B9 | B8 | opcode |
1040 ((static_cast<int32_t>(dd) >> 4)*B22) |
1041 ((static_cast<int32_t>(dn) & 0xf)*B16) |
1042 ((static_cast<int32_t>(dd) & 0xf)*B12) |
1043 ((static_cast<int32_t>(dn) >> 4)*B7) |
1044 ((static_cast<int32_t>(dm) >> 4)*B5) |
1045 (static_cast<int32_t>(dm) & 0xf);
1046 Emit(encoding);
1047}
1048
1049
1050void Arm32Assembler::EmitVFPsd(Condition cond, int32_t opcode,
1051 SRegister sd, DRegister dm) {
1052 CHECK_NE(sd, kNoSRegister);
1053 CHECK_NE(dm, kNoDRegister);
1054 CHECK_NE(cond, kNoCondition);
1055 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1056 B27 | B26 | B25 | B11 | B9 | opcode |
1057 ((static_cast<int32_t>(sd) & 1)*B22) |
1058 ((static_cast<int32_t>(sd) >> 1)*B12) |
1059 ((static_cast<int32_t>(dm) >> 4)*B5) |
1060 (static_cast<int32_t>(dm) & 0xf);
1061 Emit(encoding);
1062}
1063
1064
1065void Arm32Assembler::EmitVFPds(Condition cond, int32_t opcode,
1066 DRegister dd, SRegister sm) {
1067 CHECK_NE(dd, kNoDRegister);
1068 CHECK_NE(sm, kNoSRegister);
1069 CHECK_NE(cond, kNoCondition);
1070 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1071 B27 | B26 | B25 | B11 | B9 | opcode |
1072 ((static_cast<int32_t>(dd) >> 4)*B22) |
1073 ((static_cast<int32_t>(dd) & 0xf)*B12) |
1074 ((static_cast<int32_t>(sm) & 1)*B5) |
1075 (static_cast<int32_t>(sm) >> 1);
1076 Emit(encoding);
1077}
1078
1079
1080void Arm32Assembler::Lsl(Register rd, Register rm, uint32_t shift_imm,
Dave Allison45fdb932014-06-25 12:37:10 -07001081 bool setcc, Condition cond) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001082 CHECK_NE(shift_imm, 0u); // Do not use Lsl if no shift is wanted.
Dave Allison45fdb932014-06-25 12:37:10 -07001083 if (setcc) {
1084 movs(rd, ShifterOperand(rm, LSL, shift_imm), cond);
1085 } else {
1086 mov(rd, ShifterOperand(rm, LSL, shift_imm), cond);
1087 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001088}
1089
1090
1091void Arm32Assembler::Lsr(Register rd, Register rm, uint32_t shift_imm,
Dave Allison45fdb932014-06-25 12:37:10 -07001092 bool setcc, Condition cond) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001093 CHECK_NE(shift_imm, 0u); // Do not use Lsr if no shift is wanted.
1094 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Dave Allison45fdb932014-06-25 12:37:10 -07001095 if (setcc) {
1096 movs(rd, ShifterOperand(rm, LSR, shift_imm), cond);
1097 } else {
1098 mov(rd, ShifterOperand(rm, LSR, shift_imm), cond);
1099 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001100}
1101
1102
1103void Arm32Assembler::Asr(Register rd, Register rm, uint32_t shift_imm,
Dave Allison45fdb932014-06-25 12:37:10 -07001104 bool setcc, Condition cond) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001105 CHECK_NE(shift_imm, 0u); // Do not use Asr if no shift is wanted.
1106 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Dave Allison45fdb932014-06-25 12:37:10 -07001107 if (setcc) {
1108 movs(rd, ShifterOperand(rm, ASR, shift_imm), cond);
1109 } else {
1110 mov(rd, ShifterOperand(rm, ASR, shift_imm), cond);
1111 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001112}
1113
1114
1115void Arm32Assembler::Ror(Register rd, Register rm, uint32_t shift_imm,
Dave Allison45fdb932014-06-25 12:37:10 -07001116 bool setcc, Condition cond) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001117 CHECK_NE(shift_imm, 0u); // Use Rrx instruction.
Dave Allison45fdb932014-06-25 12:37:10 -07001118 if (setcc) {
1119 movs(rd, ShifterOperand(rm, ROR, shift_imm), cond);
1120 } else {
1121 mov(rd, ShifterOperand(rm, ROR, shift_imm), cond);
1122 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001123}
1124
Dave Allison45fdb932014-06-25 12:37:10 -07001125void Arm32Assembler::Rrx(Register rd, Register rm, bool setcc, Condition cond) {
1126 if (setcc) {
1127 movs(rd, ShifterOperand(rm, ROR, 0), cond);
1128 } else {
1129 mov(rd, ShifterOperand(rm, ROR, 0), cond);
1130 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001131}
1132
1133
Dave Allison45fdb932014-06-25 12:37:10 -07001134void Arm32Assembler::Lsl(Register rd, Register rm, Register rn,
1135 bool setcc, Condition cond) {
1136 if (setcc) {
1137 movs(rd, ShifterOperand(rm, LSL, rn), cond);
1138 } else {
1139 mov(rd, ShifterOperand(rm, LSL, rn), cond);
1140 }
1141}
1142
1143
1144void Arm32Assembler::Lsr(Register rd, Register rm, Register rn,
1145 bool setcc, Condition cond) {
1146 if (setcc) {
1147 movs(rd, ShifterOperand(rm, LSR, rn), cond);
1148 } else {
1149 mov(rd, ShifterOperand(rm, LSR, rn), cond);
1150 }
1151}
1152
1153
1154void Arm32Assembler::Asr(Register rd, Register rm, Register rn,
1155 bool setcc, Condition cond) {
1156 if (setcc) {
1157 movs(rd, ShifterOperand(rm, ASR, rn), cond);
1158 } else {
1159 mov(rd, ShifterOperand(rm, ASR, rn), cond);
1160 }
1161}
1162
1163
1164void Arm32Assembler::Ror(Register rd, Register rm, Register rn,
1165 bool setcc, Condition cond) {
1166 if (setcc) {
1167 movs(rd, ShifterOperand(rm, ROR, rn), cond);
1168 } else {
1169 mov(rd, ShifterOperand(rm, ROR, rn), cond);
1170 }
1171}
1172
Dave Allison65fcc2c2014-04-28 13:45:27 -07001173void Arm32Assembler::vmstat(Condition cond) { // VMRS APSR_nzcv, FPSCR
1174 CHECK_NE(cond, kNoCondition);
1175 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1176 B27 | B26 | B25 | B23 | B22 | B21 | B20 | B16 |
1177 (static_cast<int32_t>(PC)*B12) |
1178 B11 | B9 | B4;
1179 Emit(encoding);
1180}
1181
1182
1183void Arm32Assembler::svc(uint32_t imm24) {
1184 CHECK(IsUint(24, imm24)) << imm24;
1185 int32_t encoding = (AL << kConditionShift) | B27 | B26 | B25 | B24 | imm24;
1186 Emit(encoding);
1187}
1188
1189
1190void Arm32Assembler::bkpt(uint16_t imm16) {
1191 int32_t encoding = (AL << kConditionShift) | B24 | B21 |
1192 ((imm16 >> 4) << 8) | B6 | B5 | B4 | (imm16 & 0xf);
1193 Emit(encoding);
1194}
1195
1196
1197void Arm32Assembler::blx(Register rm, Condition cond) {
1198 CHECK_NE(rm, kNoRegister);
1199 CHECK_NE(cond, kNoCondition);
1200 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1201 B24 | B21 | (0xfff << 8) | B5 | B4 |
1202 (static_cast<int32_t>(rm) << kRmShift);
1203 Emit(encoding);
1204}
1205
1206
1207void Arm32Assembler::bx(Register rm, Condition cond) {
1208 CHECK_NE(rm, kNoRegister);
1209 CHECK_NE(cond, kNoCondition);
1210 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1211 B24 | B21 | (0xfff << 8) | B4 |
1212 (static_cast<int32_t>(rm) << kRmShift);
1213 Emit(encoding);
1214}
1215
1216
1217void Arm32Assembler::Push(Register rd, Condition cond) {
1218 str(rd, Address(SP, -kRegisterSize, Address::PreIndex), cond);
1219}
1220
1221
1222void Arm32Assembler::Pop(Register rd, Condition cond) {
1223 ldr(rd, Address(SP, kRegisterSize, Address::PostIndex), cond);
1224}
1225
1226
1227void Arm32Assembler::PushList(RegList regs, Condition cond) {
1228 stm(DB_W, SP, regs, cond);
1229}
1230
1231
1232void Arm32Assembler::PopList(RegList regs, Condition cond) {
1233 ldm(IA_W, SP, regs, cond);
1234}
1235
1236
1237void Arm32Assembler::Mov(Register rd, Register rm, Condition cond) {
1238 if (rd != rm) {
1239 mov(rd, ShifterOperand(rm), cond);
1240 }
1241}
1242
1243
1244void Arm32Assembler::Bind(Label* label) {
1245 CHECK(!label->IsBound());
1246 int bound_pc = buffer_.Size();
1247 while (label->IsLinked()) {
1248 int32_t position = label->Position();
1249 int32_t next = buffer_.Load<int32_t>(position);
1250 int32_t encoded = Arm32Assembler::EncodeBranchOffset(bound_pc - position, next);
1251 buffer_.Store<int32_t>(position, encoded);
1252 label->position_ = Arm32Assembler::DecodeBranchOffset(next);
1253 }
1254 label->BindTo(bound_pc);
1255}
1256
1257
1258int32_t Arm32Assembler::EncodeBranchOffset(int offset, int32_t inst) {
1259 // The offset is off by 8 due to the way the ARM CPUs read PC.
1260 offset -= 8;
1261 CHECK_ALIGNED(offset, 4);
1262 CHECK(IsInt(POPCOUNT(kBranchOffsetMask), offset)) << offset;
1263
1264 // Properly preserve only the bits supported in the instruction.
1265 offset >>= 2;
1266 offset &= kBranchOffsetMask;
1267 return (inst & ~kBranchOffsetMask) | offset;
1268}
1269
1270
1271int Arm32Assembler::DecodeBranchOffset(int32_t inst) {
1272 // Sign-extend, left-shift by 2, then add 8.
1273 return ((((inst & kBranchOffsetMask) << 8) >> 6) + 8);
1274}
1275
1276
1277void Arm32Assembler::AddConstant(Register rd, int32_t value, Condition cond) {
1278 AddConstant(rd, rd, value, cond);
1279}
1280
1281
1282void Arm32Assembler::AddConstant(Register rd, Register rn, int32_t value,
1283 Condition cond) {
1284 if (value == 0) {
1285 if (rd != rn) {
1286 mov(rd, ShifterOperand(rn), cond);
1287 }
1288 return;
1289 }
1290 // We prefer to select the shorter code sequence rather than selecting add for
1291 // positive values and sub for negatives ones, which would slightly improve
1292 // the readability of generated code for some constants.
1293 ShifterOperand shifter_op;
1294 if (ShifterOperand::CanHoldArm(value, &shifter_op)) {
1295 add(rd, rn, shifter_op, cond);
1296 } else if (ShifterOperand::CanHoldArm(-value, &shifter_op)) {
1297 sub(rd, rn, shifter_op, cond);
1298 } else {
1299 CHECK(rn != IP);
1300 if (ShifterOperand::CanHoldArm(~value, &shifter_op)) {
1301 mvn(IP, shifter_op, cond);
1302 add(rd, rn, ShifterOperand(IP), cond);
1303 } else if (ShifterOperand::CanHoldArm(~(-value), &shifter_op)) {
1304 mvn(IP, shifter_op, cond);
1305 sub(rd, rn, ShifterOperand(IP), cond);
1306 } else {
1307 movw(IP, Low16Bits(value), cond);
1308 uint16_t value_high = High16Bits(value);
1309 if (value_high != 0) {
1310 movt(IP, value_high, cond);
1311 }
1312 add(rd, rn, ShifterOperand(IP), cond);
1313 }
1314 }
1315}
1316
1317
1318void Arm32Assembler::AddConstantSetFlags(Register rd, Register rn, int32_t value,
1319 Condition cond) {
1320 ShifterOperand shifter_op;
1321 if (ShifterOperand::CanHoldArm(value, &shifter_op)) {
1322 adds(rd, rn, shifter_op, cond);
1323 } else if (ShifterOperand::CanHoldArm(-value, &shifter_op)) {
1324 subs(rd, rn, shifter_op, cond);
1325 } else {
1326 CHECK(rn != IP);
1327 if (ShifterOperand::CanHoldArm(~value, &shifter_op)) {
1328 mvn(IP, shifter_op, cond);
1329 adds(rd, rn, ShifterOperand(IP), cond);
1330 } else if (ShifterOperand::CanHoldArm(~(-value), &shifter_op)) {
1331 mvn(IP, shifter_op, cond);
1332 subs(rd, rn, ShifterOperand(IP), cond);
1333 } else {
1334 movw(IP, Low16Bits(value), cond);
1335 uint16_t value_high = High16Bits(value);
1336 if (value_high != 0) {
1337 movt(IP, value_high, cond);
1338 }
1339 adds(rd, rn, ShifterOperand(IP), cond);
1340 }
1341 }
1342}
1343
Dave Allison65fcc2c2014-04-28 13:45:27 -07001344void Arm32Assembler::LoadImmediate(Register rd, int32_t value, Condition cond) {
1345 ShifterOperand shifter_op;
1346 if (ShifterOperand::CanHoldArm(value, &shifter_op)) {
1347 mov(rd, shifter_op, cond);
1348 } else if (ShifterOperand::CanHoldArm(~value, &shifter_op)) {
1349 mvn(rd, shifter_op, cond);
1350 } else {
1351 movw(rd, Low16Bits(value), cond);
1352 uint16_t value_high = High16Bits(value);
1353 if (value_high != 0) {
1354 movt(rd, value_high, cond);
1355 }
1356 }
1357}
1358
1359
1360// Implementation note: this method must emit at most one instruction when
1361// Address::CanHoldLoadOffsetArm.
1362void Arm32Assembler::LoadFromOffset(LoadOperandType type,
1363 Register reg,
1364 Register base,
1365 int32_t offset,
1366 Condition cond) {
1367 if (!Address::CanHoldLoadOffsetArm(type, offset)) {
1368 CHECK(base != IP);
1369 LoadImmediate(IP, offset, cond);
1370 add(IP, IP, ShifterOperand(base), cond);
1371 base = IP;
1372 offset = 0;
1373 }
1374 CHECK(Address::CanHoldLoadOffsetArm(type, offset));
1375 switch (type) {
1376 case kLoadSignedByte:
1377 ldrsb(reg, Address(base, offset), cond);
1378 break;
1379 case kLoadUnsignedByte:
1380 ldrb(reg, Address(base, offset), cond);
1381 break;
1382 case kLoadSignedHalfword:
1383 ldrsh(reg, Address(base, offset), cond);
1384 break;
1385 case kLoadUnsignedHalfword:
1386 ldrh(reg, Address(base, offset), cond);
1387 break;
1388 case kLoadWord:
1389 ldr(reg, Address(base, offset), cond);
1390 break;
1391 case kLoadWordPair:
1392 ldrd(reg, Address(base, offset), cond);
1393 break;
1394 default:
1395 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07001396 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001397 }
1398}
1399
1400
1401// Implementation note: this method must emit at most one instruction when
1402// Address::CanHoldLoadOffsetArm, as expected by JIT::GuardedLoadFromOffset.
1403void Arm32Assembler::LoadSFromOffset(SRegister reg,
1404 Register base,
1405 int32_t offset,
1406 Condition cond) {
1407 if (!Address::CanHoldLoadOffsetArm(kLoadSWord, offset)) {
1408 CHECK_NE(base, IP);
1409 LoadImmediate(IP, offset, cond);
1410 add(IP, IP, ShifterOperand(base), cond);
1411 base = IP;
1412 offset = 0;
1413 }
1414 CHECK(Address::CanHoldLoadOffsetArm(kLoadSWord, offset));
1415 vldrs(reg, Address(base, offset), cond);
1416}
1417
1418
1419// Implementation note: this method must emit at most one instruction when
1420// Address::CanHoldLoadOffsetArm, as expected by JIT::GuardedLoadFromOffset.
1421void Arm32Assembler::LoadDFromOffset(DRegister reg,
1422 Register base,
1423 int32_t offset,
1424 Condition cond) {
1425 if (!Address::CanHoldLoadOffsetArm(kLoadDWord, offset)) {
1426 CHECK_NE(base, IP);
1427 LoadImmediate(IP, offset, cond);
1428 add(IP, IP, ShifterOperand(base), cond);
1429 base = IP;
1430 offset = 0;
1431 }
1432 CHECK(Address::CanHoldLoadOffsetArm(kLoadDWord, offset));
1433 vldrd(reg, Address(base, offset), cond);
1434}
1435
1436
1437// Implementation note: this method must emit at most one instruction when
1438// Address::CanHoldStoreOffsetArm.
1439void Arm32Assembler::StoreToOffset(StoreOperandType type,
1440 Register reg,
1441 Register base,
1442 int32_t offset,
1443 Condition cond) {
1444 if (!Address::CanHoldStoreOffsetArm(type, offset)) {
1445 CHECK(reg != IP);
1446 CHECK(base != IP);
1447 LoadImmediate(IP, offset, cond);
1448 add(IP, IP, ShifterOperand(base), cond);
1449 base = IP;
1450 offset = 0;
1451 }
1452 CHECK(Address::CanHoldStoreOffsetArm(type, offset));
1453 switch (type) {
1454 case kStoreByte:
1455 strb(reg, Address(base, offset), cond);
1456 break;
1457 case kStoreHalfword:
1458 strh(reg, Address(base, offset), cond);
1459 break;
1460 case kStoreWord:
1461 str(reg, Address(base, offset), cond);
1462 break;
1463 case kStoreWordPair:
1464 strd(reg, Address(base, offset), cond);
1465 break;
1466 default:
1467 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07001468 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001469 }
1470}
1471
1472
1473// Implementation note: this method must emit at most one instruction when
1474// Address::CanHoldStoreOffsetArm, as expected by JIT::GuardedStoreToOffset.
1475void Arm32Assembler::StoreSToOffset(SRegister reg,
1476 Register base,
1477 int32_t offset,
1478 Condition cond) {
1479 if (!Address::CanHoldStoreOffsetArm(kStoreSWord, offset)) {
1480 CHECK_NE(base, IP);
1481 LoadImmediate(IP, offset, cond);
1482 add(IP, IP, ShifterOperand(base), cond);
1483 base = IP;
1484 offset = 0;
1485 }
1486 CHECK(Address::CanHoldStoreOffsetArm(kStoreSWord, offset));
1487 vstrs(reg, Address(base, offset), cond);
1488}
1489
1490
1491// Implementation note: this method must emit at most one instruction when
1492// Address::CanHoldStoreOffsetArm, as expected by JIT::GuardedStoreSToOffset.
1493void Arm32Assembler::StoreDToOffset(DRegister reg,
1494 Register base,
1495 int32_t offset,
1496 Condition cond) {
1497 if (!Address::CanHoldStoreOffsetArm(kStoreDWord, offset)) {
1498 CHECK_NE(base, IP);
1499 LoadImmediate(IP, offset, cond);
1500 add(IP, IP, ShifterOperand(base), cond);
1501 base = IP;
1502 offset = 0;
1503 }
1504 CHECK(Address::CanHoldStoreOffsetArm(kStoreDWord, offset));
1505 vstrd(reg, Address(base, offset), cond);
1506}
1507
1508
1509void Arm32Assembler::MemoryBarrier(ManagedRegister mscratch) {
1510 CHECK_EQ(mscratch.AsArm().AsCoreRegister(), R12);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01001511 dmb(SY);
1512}
1513
1514
1515void Arm32Assembler::dmb(DmbOptions flavor) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001516 int32_t encoding = 0xf57ff05f; // dmb
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01001517 Emit(encoding | flavor);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001518}
1519
1520
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001521void Arm32Assembler::cbz(Register rn ATTRIBUTE_UNUSED, Label* target ATTRIBUTE_UNUSED) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001522 LOG(FATAL) << "cbz is not supported on ARM32";
1523}
1524
1525
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001526void Arm32Assembler::cbnz(Register rn ATTRIBUTE_UNUSED, Label* target ATTRIBUTE_UNUSED) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001527 LOG(FATAL) << "cbnz is not supported on ARM32";
1528}
1529
1530
1531void Arm32Assembler::CompareAndBranchIfZero(Register r, Label* label) {
1532 cmp(r, ShifterOperand(0));
1533 b(label, EQ);
1534}
1535
1536
1537void Arm32Assembler::CompareAndBranchIfNonZero(Register r, Label* label) {
1538 cmp(r, ShifterOperand(0));
1539 b(label, NE);
1540}
1541
1542
1543} // namespace arm
1544} // namespace art