blob: 9ae7bb62908766bc6bf80ccfea62303de7baf381 [file] [log] [blame]
Dave Allison65fcc2c2014-04-28 13:45:27 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "assembler_arm32.h"
18
Vladimir Marko80afd022015-05-19 18:08:00 +010019#include "base/bit_utils.h"
Dave Allison65fcc2c2014-04-28 13:45:27 -070020#include "base/logging.h"
21#include "entrypoints/quick/quick_entrypoints.h"
22#include "offsets.h"
23#include "thread.h"
Dave Allison65fcc2c2014-04-28 13:45:27 -070024
25namespace art {
26namespace arm {
27
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +000028bool Arm32Assembler::ShifterOperandCanHoldArm32(uint32_t immediate, ShifterOperand* shifter_op) {
29 // Avoid the more expensive test for frequent small immediate values.
30 if (immediate < (1 << kImmed8Bits)) {
31 shifter_op->type_ = ShifterOperand::kImmediate;
32 shifter_op->is_rotate_ = true;
33 shifter_op->rotate_ = 0;
34 shifter_op->immed_ = immediate;
35 return true;
36 }
37 // Note that immediate must be unsigned for the test to work correctly.
38 for (int rot = 0; rot < 16; rot++) {
39 uint32_t imm8 = (immediate << 2*rot) | (immediate >> (32 - 2*rot));
40 if (imm8 < (1 << kImmed8Bits)) {
41 shifter_op->type_ = ShifterOperand::kImmediate;
42 shifter_op->is_rotate_ = true;
43 shifter_op->rotate_ = rot;
44 shifter_op->immed_ = imm8;
45 return true;
46 }
47 }
48 return false;
49}
50
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +010051bool Arm32Assembler::ShifterOperandCanAlwaysHold(uint32_t immediate) {
52 ShifterOperand shifter_op;
53 return ShifterOperandCanHoldArm32(immediate, &shifter_op);
54}
55
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +000056bool Arm32Assembler::ShifterOperandCanHold(Register rd ATTRIBUTE_UNUSED,
57 Register rn ATTRIBUTE_UNUSED,
58 Opcode opcode ATTRIBUTE_UNUSED,
59 uint32_t immediate,
60 ShifterOperand* shifter_op) {
61 return ShifterOperandCanHoldArm32(immediate, shifter_op);
62}
63
Dave Allison65fcc2c2014-04-28 13:45:27 -070064void Arm32Assembler::and_(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +010065 Condition cond, SetCc set_cc) {
66 EmitType01(cond, so.type(), AND, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -070067}
68
69
70void Arm32Assembler::eor(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +010071 Condition cond, SetCc set_cc) {
72 EmitType01(cond, so.type(), EOR, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -070073}
74
75
76void Arm32Assembler::sub(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +010077 Condition cond, SetCc set_cc) {
78 EmitType01(cond, so.type(), SUB, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -070079}
80
81void Arm32Assembler::rsb(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +010082 Condition cond, SetCc set_cc) {
83 EmitType01(cond, so.type(), RSB, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -070084}
85
Dave Allison65fcc2c2014-04-28 13:45:27 -070086void Arm32Assembler::add(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +010087 Condition cond, SetCc set_cc) {
88 EmitType01(cond, so.type(), ADD, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -070089}
90
91
92void Arm32Assembler::adc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +010093 Condition cond, SetCc set_cc) {
94 EmitType01(cond, so.type(), ADC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -070095}
96
97
98void Arm32Assembler::sbc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +010099 Condition cond, SetCc set_cc) {
100 EmitType01(cond, so.type(), SBC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700101}
102
103
104void Arm32Assembler::rsc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100105 Condition cond, SetCc set_cc) {
106 EmitType01(cond, so.type(), RSC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700107}
108
109
110void Arm32Assembler::tst(Register rn, const ShifterOperand& so, Condition cond) {
111 CHECK_NE(rn, PC); // Reserve tst pc instruction for exception handler marker.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100112 EmitType01(cond, so.type(), TST, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700113}
114
115
116void Arm32Assembler::teq(Register rn, const ShifterOperand& so, Condition cond) {
117 CHECK_NE(rn, PC); // Reserve teq pc instruction for exception handler marker.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100118 EmitType01(cond, so.type(), TEQ, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700119}
120
121
122void Arm32Assembler::cmp(Register rn, const ShifterOperand& so, Condition cond) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100123 EmitType01(cond, so.type(), CMP, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700124}
125
126
127void Arm32Assembler::cmn(Register rn, const ShifterOperand& so, Condition cond) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100128 EmitType01(cond, so.type(), CMN, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700129}
130
131
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100132void Arm32Assembler::orr(Register rd, Register rn, const ShifterOperand& so,
133 Condition cond, SetCc set_cc) {
134 EmitType01(cond, so.type(), ORR, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700135}
136
137
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100138void Arm32Assembler::mov(Register rd, const ShifterOperand& so,
139 Condition cond, SetCc set_cc) {
140 EmitType01(cond, so.type(), MOV, set_cc, R0, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700141}
142
143
144void Arm32Assembler::bic(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100145 Condition cond, SetCc set_cc) {
146 EmitType01(cond, so.type(), BIC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700147}
148
149
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100150void Arm32Assembler::mvn(Register rd, const ShifterOperand& so,
151 Condition cond, SetCc set_cc) {
152 EmitType01(cond, so.type(), MVN, set_cc, R0, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700153}
154
155
156void Arm32Assembler::mul(Register rd, Register rn, Register rm, Condition cond) {
157 // Assembler registers rd, rn, rm are encoded as rn, rm, rs.
158 EmitMulOp(cond, 0, R0, rd, rn, rm);
159}
160
161
162void Arm32Assembler::mla(Register rd, Register rn, Register rm, Register ra,
163 Condition cond) {
164 // Assembler registers rd, rn, rm, ra are encoded as rn, rm, rs, rd.
165 EmitMulOp(cond, B21, ra, rd, rn, rm);
166}
167
168
169void Arm32Assembler::mls(Register rd, Register rn, Register rm, Register ra,
170 Condition cond) {
171 // Assembler registers rd, rn, rm, ra are encoded as rn, rm, rs, rd.
172 EmitMulOp(cond, B22 | B21, ra, rd, rn, rm);
173}
174
175
Zheng Xuc6667102015-05-15 16:08:45 +0800176void Arm32Assembler::smull(Register rd_lo, Register rd_hi, Register rn,
177 Register rm, Condition cond) {
178 // Assembler registers rd_lo, rd_hi, rn, rm are encoded as rd, rn, rm, rs.
179 EmitMulOp(cond, B23 | B22, rd_lo, rd_hi, rn, rm);
180}
181
182
Dave Allison65fcc2c2014-04-28 13:45:27 -0700183void Arm32Assembler::umull(Register rd_lo, Register rd_hi, Register rn,
184 Register rm, Condition cond) {
185 // Assembler registers rd_lo, rd_hi, rn, rm are encoded as rd, rn, rm, rs.
186 EmitMulOp(cond, B23, rd_lo, rd_hi, rn, rm);
187}
188
189
190void Arm32Assembler::sdiv(Register rd, Register rn, Register rm, Condition cond) {
191 CHECK_NE(rd, kNoRegister);
192 CHECK_NE(rn, kNoRegister);
193 CHECK_NE(rm, kNoRegister);
194 CHECK_NE(cond, kNoCondition);
195 int32_t encoding = B26 | B25 | B24 | B20 |
196 B15 | B14 | B13 | B12 |
197 (static_cast<int32_t>(cond) << kConditionShift) |
198 (static_cast<int32_t>(rn) << 0) |
199 (static_cast<int32_t>(rd) << 16) |
200 (static_cast<int32_t>(rm) << 8) |
201 B4;
202 Emit(encoding);
203}
204
205
206void Arm32Assembler::udiv(Register rd, Register rn, Register rm, Condition cond) {
207 CHECK_NE(rd, kNoRegister);
208 CHECK_NE(rn, kNoRegister);
209 CHECK_NE(rm, kNoRegister);
210 CHECK_NE(cond, kNoCondition);
211 int32_t encoding = B26 | B25 | B24 | B21 | B20 |
212 B15 | B14 | B13 | B12 |
213 (static_cast<int32_t>(cond) << kConditionShift) |
214 (static_cast<int32_t>(rn) << 0) |
215 (static_cast<int32_t>(rd) << 16) |
216 (static_cast<int32_t>(rm) << 8) |
217 B4;
218 Emit(encoding);
219}
220
221
Roland Levillain51d3fc42014-11-13 14:11:42 +0000222void Arm32Assembler::sbfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
223 CHECK_NE(rd, kNoRegister);
224 CHECK_NE(rn, kNoRegister);
225 CHECK_NE(cond, kNoCondition);
226 CHECK_LE(lsb, 31U);
227 CHECK(1U <= width && width <= 32U) << width;
228 uint32_t widthminus1 = width - 1;
229
230 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
231 B26 | B25 | B24 | B23 | B21 |
232 (widthminus1 << 16) |
233 (static_cast<uint32_t>(rd) << 12) |
234 (lsb << 7) |
235 B6 | B4 |
236 static_cast<uint32_t>(rn);
237 Emit(encoding);
238}
239
240
Roland Levillain981e4542014-11-14 11:47:14 +0000241void Arm32Assembler::ubfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
242 CHECK_NE(rd, kNoRegister);
243 CHECK_NE(rn, kNoRegister);
244 CHECK_NE(cond, kNoCondition);
245 CHECK_LE(lsb, 31U);
246 CHECK(1U <= width && width <= 32U) << width;
247 uint32_t widthminus1 = width - 1;
248
249 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
250 B26 | B25 | B24 | B23 | B22 | B21 |
251 (widthminus1 << 16) |
252 (static_cast<uint32_t>(rd) << 12) |
253 (lsb << 7) |
254 B6 | B4 |
255 static_cast<uint32_t>(rn);
256 Emit(encoding);
257}
258
259
Dave Allison65fcc2c2014-04-28 13:45:27 -0700260void Arm32Assembler::ldr(Register rd, const Address& ad, Condition cond) {
261 EmitMemOp(cond, true, false, rd, ad);
262}
263
264
265void Arm32Assembler::str(Register rd, const Address& ad, Condition cond) {
266 EmitMemOp(cond, false, false, rd, ad);
267}
268
269
270void Arm32Assembler::ldrb(Register rd, const Address& ad, Condition cond) {
271 EmitMemOp(cond, true, true, rd, ad);
272}
273
274
275void Arm32Assembler::strb(Register rd, const Address& ad, Condition cond) {
276 EmitMemOp(cond, false, true, rd, ad);
277}
278
279
280void Arm32Assembler::ldrh(Register rd, const Address& ad, Condition cond) {
281 EmitMemOpAddressMode3(cond, L | B7 | H | B4, rd, ad);
282}
283
284
285void Arm32Assembler::strh(Register rd, const Address& ad, Condition cond) {
286 EmitMemOpAddressMode3(cond, B7 | H | B4, rd, ad);
287}
288
289
290void Arm32Assembler::ldrsb(Register rd, const Address& ad, Condition cond) {
291 EmitMemOpAddressMode3(cond, L | B7 | B6 | B4, rd, ad);
292}
293
294
295void Arm32Assembler::ldrsh(Register rd, const Address& ad, Condition cond) {
296 EmitMemOpAddressMode3(cond, L | B7 | B6 | H | B4, rd, ad);
297}
298
299
300void Arm32Assembler::ldrd(Register rd, const Address& ad, Condition cond) {
301 CHECK_EQ(rd % 2, 0);
302 EmitMemOpAddressMode3(cond, B7 | B6 | B4, rd, ad);
303}
304
305
306void Arm32Assembler::strd(Register rd, const Address& ad, Condition cond) {
307 CHECK_EQ(rd % 2, 0);
308 EmitMemOpAddressMode3(cond, B7 | B6 | B5 | B4, rd, ad);
309}
310
311
312void Arm32Assembler::ldm(BlockAddressMode am,
313 Register base,
314 RegList regs,
315 Condition cond) {
316 EmitMultiMemOp(cond, am, true, base, regs);
317}
318
319
320void Arm32Assembler::stm(BlockAddressMode am,
321 Register base,
322 RegList regs,
323 Condition cond) {
324 EmitMultiMemOp(cond, am, false, base, regs);
325}
326
327
328void Arm32Assembler::vmovs(SRegister sd, SRegister sm, Condition cond) {
329 EmitVFPsss(cond, B23 | B21 | B20 | B6, sd, S0, sm);
330}
331
332
333void Arm32Assembler::vmovd(DRegister dd, DRegister dm, Condition cond) {
334 EmitVFPddd(cond, B23 | B21 | B20 | B6, dd, D0, dm);
335}
336
337
338bool Arm32Assembler::vmovs(SRegister sd, float s_imm, Condition cond) {
339 uint32_t imm32 = bit_cast<uint32_t, float>(s_imm);
340 if (((imm32 & ((1 << 19) - 1)) == 0) &&
341 ((((imm32 >> 25) & ((1 << 6) - 1)) == (1 << 5)) ||
342 (((imm32 >> 25) & ((1 << 6) - 1)) == ((1 << 5) -1)))) {
343 uint8_t imm8 = ((imm32 >> 31) << 7) | (((imm32 >> 29) & 1) << 6) |
344 ((imm32 >> 19) & ((1 << 6) -1));
345 EmitVFPsss(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | (imm8 & 0xf),
346 sd, S0, S0);
347 return true;
348 }
349 return false;
350}
351
352
353bool Arm32Assembler::vmovd(DRegister dd, double d_imm, Condition cond) {
354 uint64_t imm64 = bit_cast<uint64_t, double>(d_imm);
355 if (((imm64 & ((1LL << 48) - 1)) == 0) &&
356 ((((imm64 >> 54) & ((1 << 9) - 1)) == (1 << 8)) ||
357 (((imm64 >> 54) & ((1 << 9) - 1)) == ((1 << 8) -1)))) {
358 uint8_t imm8 = ((imm64 >> 63) << 7) | (((imm64 >> 61) & 1) << 6) |
359 ((imm64 >> 48) & ((1 << 6) -1));
360 EmitVFPddd(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | B8 | (imm8 & 0xf),
361 dd, D0, D0);
362 return true;
363 }
364 return false;
365}
366
367
368void Arm32Assembler::vadds(SRegister sd, SRegister sn, SRegister sm,
369 Condition cond) {
370 EmitVFPsss(cond, B21 | B20, sd, sn, sm);
371}
372
373
374void Arm32Assembler::vaddd(DRegister dd, DRegister dn, DRegister dm,
375 Condition cond) {
376 EmitVFPddd(cond, B21 | B20, dd, dn, dm);
377}
378
379
380void Arm32Assembler::vsubs(SRegister sd, SRegister sn, SRegister sm,
381 Condition cond) {
382 EmitVFPsss(cond, B21 | B20 | B6, sd, sn, sm);
383}
384
385
386void Arm32Assembler::vsubd(DRegister dd, DRegister dn, DRegister dm,
387 Condition cond) {
388 EmitVFPddd(cond, B21 | B20 | B6, dd, dn, dm);
389}
390
391
392void Arm32Assembler::vmuls(SRegister sd, SRegister sn, SRegister sm,
393 Condition cond) {
394 EmitVFPsss(cond, B21, sd, sn, sm);
395}
396
397
398void Arm32Assembler::vmuld(DRegister dd, DRegister dn, DRegister dm,
399 Condition cond) {
400 EmitVFPddd(cond, B21, dd, dn, dm);
401}
402
403
404void Arm32Assembler::vmlas(SRegister sd, SRegister sn, SRegister sm,
405 Condition cond) {
406 EmitVFPsss(cond, 0, sd, sn, sm);
407}
408
409
410void Arm32Assembler::vmlad(DRegister dd, DRegister dn, DRegister dm,
411 Condition cond) {
412 EmitVFPddd(cond, 0, dd, dn, dm);
413}
414
415
416void Arm32Assembler::vmlss(SRegister sd, SRegister sn, SRegister sm,
417 Condition cond) {
418 EmitVFPsss(cond, B6, sd, sn, sm);
419}
420
421
422void Arm32Assembler::vmlsd(DRegister dd, DRegister dn, DRegister dm,
423 Condition cond) {
424 EmitVFPddd(cond, B6, dd, dn, dm);
425}
426
427
428void Arm32Assembler::vdivs(SRegister sd, SRegister sn, SRegister sm,
429 Condition cond) {
430 EmitVFPsss(cond, B23, sd, sn, sm);
431}
432
433
434void Arm32Assembler::vdivd(DRegister dd, DRegister dn, DRegister dm,
435 Condition cond) {
436 EmitVFPddd(cond, B23, dd, dn, dm);
437}
438
439
440void Arm32Assembler::vabss(SRegister sd, SRegister sm, Condition cond) {
441 EmitVFPsss(cond, B23 | B21 | B20 | B7 | B6, sd, S0, sm);
442}
443
444
445void Arm32Assembler::vabsd(DRegister dd, DRegister dm, Condition cond) {
446 EmitVFPddd(cond, B23 | B21 | B20 | B7 | B6, dd, D0, dm);
447}
448
449
450void Arm32Assembler::vnegs(SRegister sd, SRegister sm, Condition cond) {
451 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B6, sd, S0, sm);
452}
453
454
455void Arm32Assembler::vnegd(DRegister dd, DRegister dm, Condition cond) {
456 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B6, dd, D0, dm);
457}
458
459
460void Arm32Assembler::vsqrts(SRegister sd, SRegister sm, Condition cond) {
461 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B7 | B6, sd, S0, sm);
462}
463
464void Arm32Assembler::vsqrtd(DRegister dd, DRegister dm, Condition cond) {
465 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B7 | B6, dd, D0, dm);
466}
467
468
469void Arm32Assembler::vcvtsd(SRegister sd, DRegister dm, Condition cond) {
470 EmitVFPsd(cond, B23 | B21 | B20 | B18 | B17 | B16 | B8 | B7 | B6, sd, dm);
471}
472
473
474void Arm32Assembler::vcvtds(DRegister dd, SRegister sm, Condition cond) {
475 EmitVFPds(cond, B23 | B21 | B20 | B18 | B17 | B16 | B7 | B6, dd, sm);
476}
477
478
479void Arm32Assembler::vcvtis(SRegister sd, SRegister sm, Condition cond) {
480 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B16 | B7 | B6, sd, S0, sm);
481}
482
483
484void Arm32Assembler::vcvtid(SRegister sd, DRegister dm, Condition cond) {
485 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B16 | B8 | B7 | B6, sd, dm);
486}
487
488
489void Arm32Assembler::vcvtsi(SRegister sd, SRegister sm, Condition cond) {
490 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B7 | B6, sd, S0, sm);
491}
492
493
494void Arm32Assembler::vcvtdi(DRegister dd, SRegister sm, Condition cond) {
495 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B7 | B6, dd, sm);
496}
497
498
499void Arm32Assembler::vcvtus(SRegister sd, SRegister sm, Condition cond) {
500 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B7 | B6, sd, S0, sm);
501}
502
503
504void Arm32Assembler::vcvtud(SRegister sd, DRegister dm, Condition cond) {
505 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B8 | B7 | B6, sd, dm);
506}
507
508
509void Arm32Assembler::vcvtsu(SRegister sd, SRegister sm, Condition cond) {
510 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B6, sd, S0, sm);
511}
512
513
514void Arm32Assembler::vcvtdu(DRegister dd, SRegister sm, Condition cond) {
515 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B6, dd, sm);
516}
517
518
519void Arm32Assembler::vcmps(SRegister sd, SRegister sm, Condition cond) {
520 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B6, sd, S0, sm);
521}
522
523
524void Arm32Assembler::vcmpd(DRegister dd, DRegister dm, Condition cond) {
525 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B6, dd, D0, dm);
526}
527
528
529void Arm32Assembler::vcmpsz(SRegister sd, Condition cond) {
530 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B16 | B6, sd, S0, S0);
531}
532
533
534void Arm32Assembler::vcmpdz(DRegister dd, Condition cond) {
535 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B16 | B6, dd, D0, D0);
536}
537
538void Arm32Assembler::b(Label* label, Condition cond) {
539 EmitBranch(cond, label, false);
540}
541
542
543void Arm32Assembler::bl(Label* label, Condition cond) {
544 EmitBranch(cond, label, true);
545}
546
547
548void Arm32Assembler::MarkExceptionHandler(Label* label) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100549 EmitType01(AL, 1, TST, kCcSet, PC, R0, ShifterOperand(0));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700550 Label l;
551 b(&l);
552 EmitBranch(AL, label, false);
553 Bind(&l);
554}
555
556
557void Arm32Assembler::Emit(int32_t value) {
558 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
559 buffer_.Emit<int32_t>(value);
560}
561
562
563void Arm32Assembler::EmitType01(Condition cond,
564 int type,
565 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100566 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -0700567 Register rn,
568 Register rd,
569 const ShifterOperand& so) {
570 CHECK_NE(rd, kNoRegister);
571 CHECK_NE(cond, kNoCondition);
572 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
573 type << kTypeShift |
574 static_cast<int32_t>(opcode) << kOpcodeShift |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100575 (set_cc == kCcSet ? 1 : 0) << kSShift |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700576 static_cast<int32_t>(rn) << kRnShift |
577 static_cast<int32_t>(rd) << kRdShift |
578 so.encodingArm();
579 Emit(encoding);
580}
581
582
583void Arm32Assembler::EmitType5(Condition cond, int offset, bool link) {
584 CHECK_NE(cond, kNoCondition);
585 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
586 5 << kTypeShift |
587 (link ? 1 : 0) << kLinkShift;
588 Emit(Arm32Assembler::EncodeBranchOffset(offset, encoding));
589}
590
591
592void Arm32Assembler::EmitMemOp(Condition cond,
Dave Allison45fdb932014-06-25 12:37:10 -0700593 bool load,
594 bool byte,
595 Register rd,
596 const Address& ad) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700597 CHECK_NE(rd, kNoRegister);
598 CHECK_NE(cond, kNoCondition);
599 const Address& addr = static_cast<const Address&>(ad);
600
Dave Allison45fdb932014-06-25 12:37:10 -0700601 int32_t encoding = 0;
602 if (!ad.IsImmediate() && ad.GetRegisterOffset() == PC) {
603 // PC relative LDR(literal)
604 int32_t offset = ad.GetOffset();
605 int32_t u = B23;
606 if (offset < 0) {
607 offset = -offset;
608 u = 0;
609 }
610 CHECK_LT(offset, (1 << 12));
611 encoding = (static_cast<int32_t>(cond) << kConditionShift) |
612 B26 | B24 | u | B20 |
613 (load ? L : 0) |
614 (byte ? B : 0) |
615 (static_cast<int32_t>(rd) << kRdShift) |
616 0xf << 16 |
617 (offset & 0xfff);
618
619 } else {
620 encoding = (static_cast<int32_t>(cond) << kConditionShift) |
621 B26 |
622 (load ? L : 0) |
623 (byte ? B : 0) |
624 (static_cast<int32_t>(rd) << kRdShift) |
625 addr.encodingArm();
626 }
Dave Allison65fcc2c2014-04-28 13:45:27 -0700627 Emit(encoding);
628}
629
630
631void Arm32Assembler::EmitMemOpAddressMode3(Condition cond,
632 int32_t mode,
633 Register rd,
634 const Address& ad) {
635 CHECK_NE(rd, kNoRegister);
636 CHECK_NE(cond, kNoCondition);
637 const Address& addr = static_cast<const Address&>(ad);
638 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
639 B22 |
640 mode |
641 (static_cast<int32_t>(rd) << kRdShift) |
642 addr.encoding3();
643 Emit(encoding);
644}
645
646
647void Arm32Assembler::EmitMultiMemOp(Condition cond,
648 BlockAddressMode am,
649 bool load,
650 Register base,
651 RegList regs) {
652 CHECK_NE(base, kNoRegister);
653 CHECK_NE(cond, kNoCondition);
654 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
655 B27 |
656 am |
657 (load ? L : 0) |
658 (static_cast<int32_t>(base) << kRnShift) |
659 regs;
660 Emit(encoding);
661}
662
663
664void Arm32Assembler::EmitShiftImmediate(Condition cond,
665 Shift opcode,
666 Register rd,
667 Register rm,
668 const ShifterOperand& so) {
669 CHECK_NE(cond, kNoCondition);
670 CHECK(so.IsImmediate());
671 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
672 static_cast<int32_t>(MOV) << kOpcodeShift |
673 static_cast<int32_t>(rd) << kRdShift |
674 so.encodingArm() << kShiftImmShift |
675 static_cast<int32_t>(opcode) << kShiftShift |
676 static_cast<int32_t>(rm);
677 Emit(encoding);
678}
679
680
681void Arm32Assembler::EmitShiftRegister(Condition cond,
682 Shift opcode,
683 Register rd,
684 Register rm,
685 const ShifterOperand& so) {
686 CHECK_NE(cond, kNoCondition);
687 CHECK(so.IsRegister());
688 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
689 static_cast<int32_t>(MOV) << kOpcodeShift |
690 static_cast<int32_t>(rd) << kRdShift |
691 so.encodingArm() << kShiftRegisterShift |
692 static_cast<int32_t>(opcode) << kShiftShift |
693 B4 |
694 static_cast<int32_t>(rm);
695 Emit(encoding);
696}
697
698
699void Arm32Assembler::EmitBranch(Condition cond, Label* label, bool link) {
700 if (label->IsBound()) {
701 EmitType5(cond, label->Position() - buffer_.Size(), link);
702 } else {
703 int position = buffer_.Size();
704 // Use the offset field of the branch instruction for linking the sites.
705 EmitType5(cond, label->position_, link);
706 label->LinkTo(position);
707 }
708}
709
710
711void Arm32Assembler::clz(Register rd, Register rm, Condition cond) {
712 CHECK_NE(rd, kNoRegister);
713 CHECK_NE(rm, kNoRegister);
714 CHECK_NE(cond, kNoCondition);
715 CHECK_NE(rd, PC);
716 CHECK_NE(rm, PC);
717 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
718 B24 | B22 | B21 | (0xf << 16) |
719 (static_cast<int32_t>(rd) << kRdShift) |
720 (0xf << 8) | B4 | static_cast<int32_t>(rm);
721 Emit(encoding);
722}
723
724
725void Arm32Assembler::movw(Register rd, uint16_t imm16, Condition cond) {
726 CHECK_NE(cond, kNoCondition);
727 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
728 B25 | B24 | ((imm16 >> 12) << 16) |
729 static_cast<int32_t>(rd) << kRdShift | (imm16 & 0xfff);
730 Emit(encoding);
731}
732
733
734void Arm32Assembler::movt(Register rd, uint16_t imm16, Condition cond) {
735 CHECK_NE(cond, kNoCondition);
736 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
737 B25 | B24 | B22 | ((imm16 >> 12) << 16) |
738 static_cast<int32_t>(rd) << kRdShift | (imm16 & 0xfff);
739 Emit(encoding);
740}
741
742
Scott Wakeling9ee23f42015-07-23 10:44:35 +0100743void Arm32Assembler::rbit(Register rd, Register rm, Condition cond) {
744 CHECK_NE(rd, kNoRegister);
745 CHECK_NE(rm, kNoRegister);
746 CHECK_NE(cond, kNoCondition);
747 CHECK_NE(rd, PC);
748 CHECK_NE(rm, PC);
749 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
750 B26 | B25 | B23 | B22 | B21 | B20 | (0xf << 16) |
751 (static_cast<int32_t>(rd) << kRdShift) |
752 (0xf << 8) | B5 | B4 | static_cast<int32_t>(rm);
753 Emit(encoding);
754}
755
756
Dave Allison65fcc2c2014-04-28 13:45:27 -0700757void Arm32Assembler::EmitMulOp(Condition cond, int32_t opcode,
758 Register rd, Register rn,
759 Register rm, Register rs) {
760 CHECK_NE(rd, kNoRegister);
761 CHECK_NE(rn, kNoRegister);
762 CHECK_NE(rm, kNoRegister);
763 CHECK_NE(rs, kNoRegister);
764 CHECK_NE(cond, kNoCondition);
765 int32_t encoding = opcode |
766 (static_cast<int32_t>(cond) << kConditionShift) |
767 (static_cast<int32_t>(rn) << kRnShift) |
768 (static_cast<int32_t>(rd) << kRdShift) |
769 (static_cast<int32_t>(rs) << kRsShift) |
770 B7 | B4 |
771 (static_cast<int32_t>(rm) << kRmShift);
772 Emit(encoding);
773}
774
Calin Juravle52c48962014-12-16 17:02:57 +0000775
Dave Allison65fcc2c2014-04-28 13:45:27 -0700776void Arm32Assembler::ldrex(Register rt, Register rn, Condition cond) {
777 CHECK_NE(rn, kNoRegister);
778 CHECK_NE(rt, kNoRegister);
779 CHECK_NE(cond, kNoCondition);
780 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
781 B24 |
782 B23 |
783 L |
784 (static_cast<int32_t>(rn) << kLdExRnShift) |
785 (static_cast<int32_t>(rt) << kLdExRtShift) |
786 B11 | B10 | B9 | B8 | B7 | B4 | B3 | B2 | B1 | B0;
787 Emit(encoding);
788}
789
790
Calin Juravle52c48962014-12-16 17:02:57 +0000791void Arm32Assembler::ldrexd(Register rt, Register rt2, Register rn, Condition cond) {
792 CHECK_NE(rn, kNoRegister);
793 CHECK_NE(rt, kNoRegister);
794 CHECK_NE(rt2, kNoRegister);
795 CHECK_NE(rt, R14);
796 CHECK_EQ(0u, static_cast<uint32_t>(rt) % 2);
797 CHECK_EQ(static_cast<uint32_t>(rt) + 1, static_cast<uint32_t>(rt2));
798 CHECK_NE(cond, kNoCondition);
799
800 int32_t encoding =
801 (static_cast<uint32_t>(cond) << kConditionShift) |
802 B24 | B23 | B21 | B20 |
803 static_cast<uint32_t>(rn) << 16 |
804 static_cast<uint32_t>(rt) << 12 |
805 B11 | B10 | B9 | B8 | B7 | B4 | B3 | B2 | B1 | B0;
806 Emit(encoding);
807}
808
809
Dave Allison65fcc2c2014-04-28 13:45:27 -0700810void Arm32Assembler::strex(Register rd,
811 Register rt,
812 Register rn,
813 Condition cond) {
814 CHECK_NE(rn, kNoRegister);
815 CHECK_NE(rd, kNoRegister);
816 CHECK_NE(rt, kNoRegister);
817 CHECK_NE(cond, kNoCondition);
818 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
819 B24 |
820 B23 |
821 (static_cast<int32_t>(rn) << kStrExRnShift) |
822 (static_cast<int32_t>(rd) << kStrExRdShift) |
823 B11 | B10 | B9 | B8 | B7 | B4 |
824 (static_cast<int32_t>(rt) << kStrExRtShift);
825 Emit(encoding);
826}
827
Calin Juravle52c48962014-12-16 17:02:57 +0000828void Arm32Assembler::strexd(Register rd, Register rt, Register rt2, Register rn, Condition cond) {
829 CHECK_NE(rd, kNoRegister);
830 CHECK_NE(rn, kNoRegister);
831 CHECK_NE(rt, kNoRegister);
832 CHECK_NE(rt2, kNoRegister);
833 CHECK_NE(rt, R14);
834 CHECK_NE(rd, rt);
835 CHECK_NE(rd, rt2);
836 CHECK_EQ(0u, static_cast<uint32_t>(rt) % 2);
837 CHECK_EQ(static_cast<uint32_t>(rt) + 1, static_cast<uint32_t>(rt2));
838 CHECK_NE(cond, kNoCondition);
839
840 int32_t encoding =
841 (static_cast<uint32_t>(cond) << kConditionShift) |
842 B24 | B23 | B21 |
843 static_cast<uint32_t>(rn) << 16 |
844 static_cast<uint32_t>(rd) << 12 |
845 B11 | B10 | B9 | B8 | B7 | B4 |
846 static_cast<uint32_t>(rt);
847 Emit(encoding);
848}
849
Dave Allison65fcc2c2014-04-28 13:45:27 -0700850
851void Arm32Assembler::clrex(Condition cond) {
852 CHECK_EQ(cond, AL); // This cannot be conditional on ARM.
853 int32_t encoding = (kSpecialCondition << kConditionShift) |
854 B26 | B24 | B22 | B21 | B20 | (0xff << 12) | B4 | 0xf;
855 Emit(encoding);
856}
857
858
859void Arm32Assembler::nop(Condition cond) {
860 CHECK_NE(cond, kNoCondition);
861 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
862 B25 | B24 | B21 | (0xf << 12);
863 Emit(encoding);
864}
865
866
867void Arm32Assembler::vmovsr(SRegister sn, Register rt, Condition cond) {
868 CHECK_NE(sn, kNoSRegister);
869 CHECK_NE(rt, kNoRegister);
870 CHECK_NE(rt, SP);
871 CHECK_NE(rt, PC);
872 CHECK_NE(cond, kNoCondition);
873 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
874 B27 | B26 | B25 |
875 ((static_cast<int32_t>(sn) >> 1)*B16) |
876 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
877 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
878 Emit(encoding);
879}
880
881
882void Arm32Assembler::vmovrs(Register rt, SRegister sn, Condition cond) {
883 CHECK_NE(sn, kNoSRegister);
884 CHECK_NE(rt, kNoRegister);
885 CHECK_NE(rt, SP);
886 CHECK_NE(rt, PC);
887 CHECK_NE(cond, kNoCondition);
888 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
889 B27 | B26 | B25 | B20 |
890 ((static_cast<int32_t>(sn) >> 1)*B16) |
891 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
892 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
893 Emit(encoding);
894}
895
896
897void Arm32Assembler::vmovsrr(SRegister sm, Register rt, Register rt2,
898 Condition cond) {
899 CHECK_NE(sm, kNoSRegister);
900 CHECK_NE(sm, S31);
901 CHECK_NE(rt, kNoRegister);
902 CHECK_NE(rt, SP);
903 CHECK_NE(rt, PC);
904 CHECK_NE(rt2, kNoRegister);
905 CHECK_NE(rt2, SP);
906 CHECK_NE(rt2, PC);
907 CHECK_NE(cond, kNoCondition);
908 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
909 B27 | B26 | B22 |
910 (static_cast<int32_t>(rt2)*B16) |
911 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
912 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
913 (static_cast<int32_t>(sm) >> 1);
914 Emit(encoding);
915}
916
917
918void Arm32Assembler::vmovrrs(Register rt, Register rt2, SRegister sm,
919 Condition cond) {
920 CHECK_NE(sm, kNoSRegister);
921 CHECK_NE(sm, S31);
922 CHECK_NE(rt, kNoRegister);
923 CHECK_NE(rt, SP);
924 CHECK_NE(rt, PC);
925 CHECK_NE(rt2, kNoRegister);
926 CHECK_NE(rt2, SP);
927 CHECK_NE(rt2, PC);
928 CHECK_NE(rt, rt2);
929 CHECK_NE(cond, kNoCondition);
930 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
931 B27 | B26 | B22 | B20 |
932 (static_cast<int32_t>(rt2)*B16) |
933 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
934 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
935 (static_cast<int32_t>(sm) >> 1);
936 Emit(encoding);
937}
938
939
940void Arm32Assembler::vmovdrr(DRegister dm, Register rt, Register rt2,
941 Condition cond) {
942 CHECK_NE(dm, kNoDRegister);
943 CHECK_NE(rt, kNoRegister);
944 CHECK_NE(rt, SP);
945 CHECK_NE(rt, PC);
946 CHECK_NE(rt2, kNoRegister);
947 CHECK_NE(rt2, SP);
948 CHECK_NE(rt2, PC);
949 CHECK_NE(cond, kNoCondition);
950 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
951 B27 | B26 | B22 |
952 (static_cast<int32_t>(rt2)*B16) |
953 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
954 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
955 (static_cast<int32_t>(dm) & 0xf);
956 Emit(encoding);
957}
958
959
960void Arm32Assembler::vmovrrd(Register rt, Register rt2, DRegister dm,
961 Condition cond) {
962 CHECK_NE(dm, kNoDRegister);
963 CHECK_NE(rt, kNoRegister);
964 CHECK_NE(rt, SP);
965 CHECK_NE(rt, PC);
966 CHECK_NE(rt2, kNoRegister);
967 CHECK_NE(rt2, SP);
968 CHECK_NE(rt2, PC);
969 CHECK_NE(rt, rt2);
970 CHECK_NE(cond, kNoCondition);
971 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
972 B27 | B26 | B22 | B20 |
973 (static_cast<int32_t>(rt2)*B16) |
974 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
975 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
976 (static_cast<int32_t>(dm) & 0xf);
977 Emit(encoding);
978}
979
980
981void Arm32Assembler::vldrs(SRegister sd, const Address& ad, Condition cond) {
982 const Address& addr = static_cast<const Address&>(ad);
983 CHECK_NE(sd, kNoSRegister);
984 CHECK_NE(cond, kNoCondition);
985 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
986 B27 | B26 | B24 | B20 |
987 ((static_cast<int32_t>(sd) & 1)*B22) |
988 ((static_cast<int32_t>(sd) >> 1)*B12) |
989 B11 | B9 | addr.vencoding();
990 Emit(encoding);
991}
992
993
994void Arm32Assembler::vstrs(SRegister sd, const Address& ad, Condition cond) {
995 const Address& addr = static_cast<const Address&>(ad);
996 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
997 CHECK_NE(sd, kNoSRegister);
998 CHECK_NE(cond, kNoCondition);
999 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1000 B27 | B26 | B24 |
1001 ((static_cast<int32_t>(sd) & 1)*B22) |
1002 ((static_cast<int32_t>(sd) >> 1)*B12) |
1003 B11 | B9 | addr.vencoding();
1004 Emit(encoding);
1005}
1006
1007
1008void Arm32Assembler::vldrd(DRegister dd, const Address& ad, Condition cond) {
1009 const Address& addr = static_cast<const Address&>(ad);
1010 CHECK_NE(dd, kNoDRegister);
1011 CHECK_NE(cond, kNoCondition);
1012 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1013 B27 | B26 | B24 | B20 |
1014 ((static_cast<int32_t>(dd) >> 4)*B22) |
1015 ((static_cast<int32_t>(dd) & 0xf)*B12) |
1016 B11 | B9 | B8 | addr.vencoding();
1017 Emit(encoding);
1018}
1019
1020
1021void Arm32Assembler::vstrd(DRegister dd, const Address& ad, Condition cond) {
1022 const Address& addr = static_cast<const Address&>(ad);
1023 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
1024 CHECK_NE(dd, kNoDRegister);
1025 CHECK_NE(cond, kNoCondition);
1026 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1027 B27 | B26 | B24 |
1028 ((static_cast<int32_t>(dd) >> 4)*B22) |
1029 ((static_cast<int32_t>(dd) & 0xf)*B12) |
1030 B11 | B9 | B8 | addr.vencoding();
1031 Emit(encoding);
1032}
1033
1034
1035void Arm32Assembler::vpushs(SRegister reg, int nregs, Condition cond) {
1036 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, false, cond);
1037}
1038
1039
1040void Arm32Assembler::vpushd(DRegister reg, int nregs, Condition cond) {
1041 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, true, cond);
1042}
1043
1044
1045void Arm32Assembler::vpops(SRegister reg, int nregs, Condition cond) {
1046 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, false, cond);
1047}
1048
1049
1050void Arm32Assembler::vpopd(DRegister reg, int nregs, Condition cond) {
1051 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, true, cond);
1052}
1053
1054
1055void Arm32Assembler::EmitVPushPop(uint32_t reg, int nregs, bool push, bool dbl, Condition cond) {
1056 CHECK_NE(cond, kNoCondition);
1057 CHECK_GT(nregs, 0);
1058 uint32_t D;
1059 uint32_t Vd;
1060 if (dbl) {
1061 // Encoded as D:Vd.
1062 D = (reg >> 4) & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001063 Vd = reg & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001064 } else {
1065 // Encoded as Vd:D.
1066 D = reg & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001067 Vd = (reg >> 1) & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001068 }
1069 int32_t encoding = B27 | B26 | B21 | B19 | B18 | B16 |
1070 B11 | B9 |
1071 (dbl ? B8 : 0) |
1072 (push ? B24 : (B23 | B20)) |
1073 static_cast<int32_t>(cond) << kConditionShift |
1074 nregs << (dbl ? 1 : 0) |
1075 D << 22 |
1076 Vd << 12;
1077 Emit(encoding);
1078}
1079
1080
1081void Arm32Assembler::EmitVFPsss(Condition cond, int32_t opcode,
1082 SRegister sd, SRegister sn, SRegister sm) {
1083 CHECK_NE(sd, kNoSRegister);
1084 CHECK_NE(sn, kNoSRegister);
1085 CHECK_NE(sm, kNoSRegister);
1086 CHECK_NE(cond, kNoCondition);
1087 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1088 B27 | B26 | B25 | B11 | B9 | opcode |
1089 ((static_cast<int32_t>(sd) & 1)*B22) |
1090 ((static_cast<int32_t>(sn) >> 1)*B16) |
1091 ((static_cast<int32_t>(sd) >> 1)*B12) |
1092 ((static_cast<int32_t>(sn) & 1)*B7) |
1093 ((static_cast<int32_t>(sm) & 1)*B5) |
1094 (static_cast<int32_t>(sm) >> 1);
1095 Emit(encoding);
1096}
1097
1098
1099void Arm32Assembler::EmitVFPddd(Condition cond, int32_t opcode,
1100 DRegister dd, DRegister dn, DRegister dm) {
1101 CHECK_NE(dd, kNoDRegister);
1102 CHECK_NE(dn, kNoDRegister);
1103 CHECK_NE(dm, kNoDRegister);
1104 CHECK_NE(cond, kNoCondition);
1105 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1106 B27 | B26 | B25 | B11 | B9 | B8 | opcode |
1107 ((static_cast<int32_t>(dd) >> 4)*B22) |
1108 ((static_cast<int32_t>(dn) & 0xf)*B16) |
1109 ((static_cast<int32_t>(dd) & 0xf)*B12) |
1110 ((static_cast<int32_t>(dn) >> 4)*B7) |
1111 ((static_cast<int32_t>(dm) >> 4)*B5) |
1112 (static_cast<int32_t>(dm) & 0xf);
1113 Emit(encoding);
1114}
1115
1116
1117void Arm32Assembler::EmitVFPsd(Condition cond, int32_t opcode,
1118 SRegister sd, DRegister dm) {
1119 CHECK_NE(sd, kNoSRegister);
1120 CHECK_NE(dm, kNoDRegister);
1121 CHECK_NE(cond, kNoCondition);
1122 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1123 B27 | B26 | B25 | B11 | B9 | opcode |
1124 ((static_cast<int32_t>(sd) & 1)*B22) |
1125 ((static_cast<int32_t>(sd) >> 1)*B12) |
1126 ((static_cast<int32_t>(dm) >> 4)*B5) |
1127 (static_cast<int32_t>(dm) & 0xf);
1128 Emit(encoding);
1129}
1130
1131
1132void Arm32Assembler::EmitVFPds(Condition cond, int32_t opcode,
1133 DRegister dd, SRegister sm) {
1134 CHECK_NE(dd, kNoDRegister);
1135 CHECK_NE(sm, kNoSRegister);
1136 CHECK_NE(cond, kNoCondition);
1137 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1138 B27 | B26 | B25 | B11 | B9 | opcode |
1139 ((static_cast<int32_t>(dd) >> 4)*B22) |
1140 ((static_cast<int32_t>(dd) & 0xf)*B12) |
1141 ((static_cast<int32_t>(sm) & 1)*B5) |
1142 (static_cast<int32_t>(sm) >> 1);
1143 Emit(encoding);
1144}
1145
1146
1147void Arm32Assembler::Lsl(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001148 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00001149 CHECK_LE(shift_imm, 31u);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001150 mov(rd, ShifterOperand(rm, LSL, shift_imm), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001151}
1152
1153
1154void Arm32Assembler::Lsr(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001155 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00001156 CHECK(1u <= shift_imm && shift_imm <= 32u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001157 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001158 mov(rd, ShifterOperand(rm, LSR, shift_imm), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001159}
1160
1161
1162void Arm32Assembler::Asr(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001163 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00001164 CHECK(1u <= shift_imm && shift_imm <= 32u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001165 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001166 mov(rd, ShifterOperand(rm, ASR, shift_imm), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001167}
1168
1169
1170void Arm32Assembler::Ror(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001171 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00001172 CHECK(1u <= shift_imm && shift_imm <= 31u);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001173 mov(rd, ShifterOperand(rm, ROR, shift_imm), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001174}
1175
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001176void Arm32Assembler::Rrx(Register rd, Register rm, Condition cond, SetCc set_cc) {
1177 mov(rd, ShifterOperand(rm, ROR, 0), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001178}
1179
1180
Dave Allison45fdb932014-06-25 12:37:10 -07001181void Arm32Assembler::Lsl(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001182 Condition cond, SetCc set_cc) {
1183 mov(rd, ShifterOperand(rm, LSL, rn), cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07001184}
1185
1186
1187void Arm32Assembler::Lsr(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001188 Condition cond, SetCc set_cc) {
1189 mov(rd, ShifterOperand(rm, LSR, rn), cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07001190}
1191
1192
1193void Arm32Assembler::Asr(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001194 Condition cond, SetCc set_cc) {
1195 mov(rd, ShifterOperand(rm, ASR, rn), cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07001196}
1197
1198
1199void Arm32Assembler::Ror(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001200 Condition cond, SetCc set_cc) {
1201 mov(rd, ShifterOperand(rm, ROR, rn), cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07001202}
1203
Dave Allison65fcc2c2014-04-28 13:45:27 -07001204void Arm32Assembler::vmstat(Condition cond) { // VMRS APSR_nzcv, FPSCR
1205 CHECK_NE(cond, kNoCondition);
1206 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1207 B27 | B26 | B25 | B23 | B22 | B21 | B20 | B16 |
1208 (static_cast<int32_t>(PC)*B12) |
1209 B11 | B9 | B4;
1210 Emit(encoding);
1211}
1212
1213
1214void Arm32Assembler::svc(uint32_t imm24) {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08001215 CHECK(IsUint<24>(imm24)) << imm24;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001216 int32_t encoding = (AL << kConditionShift) | B27 | B26 | B25 | B24 | imm24;
1217 Emit(encoding);
1218}
1219
1220
1221void Arm32Assembler::bkpt(uint16_t imm16) {
1222 int32_t encoding = (AL << kConditionShift) | B24 | B21 |
1223 ((imm16 >> 4) << 8) | B6 | B5 | B4 | (imm16 & 0xf);
1224 Emit(encoding);
1225}
1226
1227
1228void Arm32Assembler::blx(Register rm, Condition cond) {
1229 CHECK_NE(rm, kNoRegister);
1230 CHECK_NE(cond, kNoCondition);
1231 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1232 B24 | B21 | (0xfff << 8) | B5 | B4 |
1233 (static_cast<int32_t>(rm) << kRmShift);
1234 Emit(encoding);
1235}
1236
1237
1238void Arm32Assembler::bx(Register rm, Condition cond) {
1239 CHECK_NE(rm, kNoRegister);
1240 CHECK_NE(cond, kNoCondition);
1241 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1242 B24 | B21 | (0xfff << 8) | B4 |
1243 (static_cast<int32_t>(rm) << kRmShift);
1244 Emit(encoding);
1245}
1246
1247
1248void Arm32Assembler::Push(Register rd, Condition cond) {
1249 str(rd, Address(SP, -kRegisterSize, Address::PreIndex), cond);
1250}
1251
1252
1253void Arm32Assembler::Pop(Register rd, Condition cond) {
1254 ldr(rd, Address(SP, kRegisterSize, Address::PostIndex), cond);
1255}
1256
1257
1258void Arm32Assembler::PushList(RegList regs, Condition cond) {
1259 stm(DB_W, SP, regs, cond);
1260}
1261
1262
1263void Arm32Assembler::PopList(RegList regs, Condition cond) {
1264 ldm(IA_W, SP, regs, cond);
1265}
1266
1267
1268void Arm32Assembler::Mov(Register rd, Register rm, Condition cond) {
1269 if (rd != rm) {
1270 mov(rd, ShifterOperand(rm), cond);
1271 }
1272}
1273
1274
1275void Arm32Assembler::Bind(Label* label) {
1276 CHECK(!label->IsBound());
1277 int bound_pc = buffer_.Size();
1278 while (label->IsLinked()) {
1279 int32_t position = label->Position();
1280 int32_t next = buffer_.Load<int32_t>(position);
1281 int32_t encoded = Arm32Assembler::EncodeBranchOffset(bound_pc - position, next);
1282 buffer_.Store<int32_t>(position, encoded);
1283 label->position_ = Arm32Assembler::DecodeBranchOffset(next);
1284 }
1285 label->BindTo(bound_pc);
1286}
1287
1288
1289int32_t Arm32Assembler::EncodeBranchOffset(int offset, int32_t inst) {
1290 // The offset is off by 8 due to the way the ARM CPUs read PC.
1291 offset -= 8;
1292 CHECK_ALIGNED(offset, 4);
1293 CHECK(IsInt(POPCOUNT(kBranchOffsetMask), offset)) << offset;
1294
1295 // Properly preserve only the bits supported in the instruction.
1296 offset >>= 2;
1297 offset &= kBranchOffsetMask;
1298 return (inst & ~kBranchOffsetMask) | offset;
1299}
1300
1301
1302int Arm32Assembler::DecodeBranchOffset(int32_t inst) {
1303 // Sign-extend, left-shift by 2, then add 8.
1304 return ((((inst & kBranchOffsetMask) << 8) >> 6) + 8);
1305}
1306
1307
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001308uint32_t Arm32Assembler::GetAdjustedPosition(uint32_t old_position ATTRIBUTE_UNUSED) {
1309 LOG(FATAL) << "Unimplemented.";
1310 UNREACHABLE();
1311}
1312
1313Literal* Arm32Assembler::NewLiteral(size_t size ATTRIBUTE_UNUSED,
1314 const uint8_t* data ATTRIBUTE_UNUSED) {
1315 LOG(FATAL) << "Unimplemented.";
1316 UNREACHABLE();
1317}
1318
1319void Arm32Assembler::LoadLiteral(Register rt ATTRIBUTE_UNUSED,
1320 Literal* literal ATTRIBUTE_UNUSED) {
1321 LOG(FATAL) << "Unimplemented.";
1322 UNREACHABLE();
1323}
1324
1325void Arm32Assembler::LoadLiteral(Register rt ATTRIBUTE_UNUSED, Register rt2 ATTRIBUTE_UNUSED,
1326 Literal* literal ATTRIBUTE_UNUSED) {
1327 LOG(FATAL) << "Unimplemented.";
1328 UNREACHABLE();
1329}
1330
1331void Arm32Assembler::LoadLiteral(SRegister sd ATTRIBUTE_UNUSED,
1332 Literal* literal ATTRIBUTE_UNUSED) {
1333 LOG(FATAL) << "Unimplemented.";
1334 UNREACHABLE();
1335}
1336
1337void Arm32Assembler::LoadLiteral(DRegister dd ATTRIBUTE_UNUSED,
1338 Literal* literal ATTRIBUTE_UNUSED) {
1339 LOG(FATAL) << "Unimplemented.";
1340 UNREACHABLE();
1341}
1342
Dave Allison65fcc2c2014-04-28 13:45:27 -07001343
1344void Arm32Assembler::AddConstant(Register rd, Register rn, int32_t value,
Vladimir Marko449b1092015-09-08 12:16:45 +01001345 Condition cond, SetCc set_cc) {
1346 if (value == 0 && set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001347 if (rd != rn) {
Vladimir Marko449b1092015-09-08 12:16:45 +01001348 mov(rd, ShifterOperand(rn), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001349 }
1350 return;
1351 }
1352 // We prefer to select the shorter code sequence rather than selecting add for
1353 // positive values and sub for negatives ones, which would slightly improve
1354 // the readability of generated code for some constants.
1355 ShifterOperand shifter_op;
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00001356 if (ShifterOperandCanHoldArm32(value, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01001357 add(rd, rn, shifter_op, cond, set_cc);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00001358 } else if (ShifterOperandCanHoldArm32(-value, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01001359 sub(rd, rn, shifter_op, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001360 } else {
1361 CHECK(rn != IP);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00001362 if (ShifterOperandCanHoldArm32(~value, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01001363 mvn(IP, shifter_op, cond, kCcKeep);
1364 add(rd, rn, ShifterOperand(IP), cond, set_cc);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00001365 } else if (ShifterOperandCanHoldArm32(~(-value), &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01001366 mvn(IP, shifter_op, cond, kCcKeep);
1367 sub(rd, rn, ShifterOperand(IP), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001368 } else {
1369 movw(IP, Low16Bits(value), cond);
1370 uint16_t value_high = High16Bits(value);
1371 if (value_high != 0) {
1372 movt(IP, value_high, cond);
1373 }
Vladimir Marko449b1092015-09-08 12:16:45 +01001374 add(rd, rn, ShifterOperand(IP), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001375 }
1376 }
1377}
1378
1379
Dave Allison65fcc2c2014-04-28 13:45:27 -07001380void Arm32Assembler::LoadImmediate(Register rd, int32_t value, Condition cond) {
1381 ShifterOperand shifter_op;
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00001382 if (ShifterOperandCanHoldArm32(value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001383 mov(rd, shifter_op, cond);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00001384 } else if (ShifterOperandCanHoldArm32(~value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001385 mvn(rd, shifter_op, cond);
1386 } else {
1387 movw(rd, Low16Bits(value), cond);
1388 uint16_t value_high = High16Bits(value);
1389 if (value_high != 0) {
1390 movt(rd, value_high, cond);
1391 }
1392 }
1393}
1394
1395
1396// Implementation note: this method must emit at most one instruction when
1397// Address::CanHoldLoadOffsetArm.
1398void Arm32Assembler::LoadFromOffset(LoadOperandType type,
1399 Register reg,
1400 Register base,
1401 int32_t offset,
1402 Condition cond) {
1403 if (!Address::CanHoldLoadOffsetArm(type, offset)) {
1404 CHECK(base != IP);
1405 LoadImmediate(IP, offset, cond);
1406 add(IP, IP, ShifterOperand(base), cond);
1407 base = IP;
1408 offset = 0;
1409 }
1410 CHECK(Address::CanHoldLoadOffsetArm(type, offset));
1411 switch (type) {
1412 case kLoadSignedByte:
1413 ldrsb(reg, Address(base, offset), cond);
1414 break;
1415 case kLoadUnsignedByte:
1416 ldrb(reg, Address(base, offset), cond);
1417 break;
1418 case kLoadSignedHalfword:
1419 ldrsh(reg, Address(base, offset), cond);
1420 break;
1421 case kLoadUnsignedHalfword:
1422 ldrh(reg, Address(base, offset), cond);
1423 break;
1424 case kLoadWord:
1425 ldr(reg, Address(base, offset), cond);
1426 break;
1427 case kLoadWordPair:
1428 ldrd(reg, Address(base, offset), cond);
1429 break;
1430 default:
1431 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07001432 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001433 }
1434}
1435
1436
1437// Implementation note: this method must emit at most one instruction when
1438// Address::CanHoldLoadOffsetArm, as expected by JIT::GuardedLoadFromOffset.
1439void Arm32Assembler::LoadSFromOffset(SRegister reg,
1440 Register base,
1441 int32_t offset,
1442 Condition cond) {
1443 if (!Address::CanHoldLoadOffsetArm(kLoadSWord, offset)) {
1444 CHECK_NE(base, IP);
1445 LoadImmediate(IP, offset, cond);
1446 add(IP, IP, ShifterOperand(base), cond);
1447 base = IP;
1448 offset = 0;
1449 }
1450 CHECK(Address::CanHoldLoadOffsetArm(kLoadSWord, offset));
1451 vldrs(reg, Address(base, offset), cond);
1452}
1453
1454
1455// Implementation note: this method must emit at most one instruction when
1456// Address::CanHoldLoadOffsetArm, as expected by JIT::GuardedLoadFromOffset.
1457void Arm32Assembler::LoadDFromOffset(DRegister reg,
1458 Register base,
1459 int32_t offset,
1460 Condition cond) {
1461 if (!Address::CanHoldLoadOffsetArm(kLoadDWord, offset)) {
1462 CHECK_NE(base, IP);
1463 LoadImmediate(IP, offset, cond);
1464 add(IP, IP, ShifterOperand(base), cond);
1465 base = IP;
1466 offset = 0;
1467 }
1468 CHECK(Address::CanHoldLoadOffsetArm(kLoadDWord, offset));
1469 vldrd(reg, Address(base, offset), cond);
1470}
1471
1472
1473// Implementation note: this method must emit at most one instruction when
1474// Address::CanHoldStoreOffsetArm.
1475void Arm32Assembler::StoreToOffset(StoreOperandType type,
1476 Register reg,
1477 Register base,
1478 int32_t offset,
1479 Condition cond) {
1480 if (!Address::CanHoldStoreOffsetArm(type, offset)) {
1481 CHECK(reg != IP);
1482 CHECK(base != IP);
1483 LoadImmediate(IP, offset, cond);
1484 add(IP, IP, ShifterOperand(base), cond);
1485 base = IP;
1486 offset = 0;
1487 }
1488 CHECK(Address::CanHoldStoreOffsetArm(type, offset));
1489 switch (type) {
1490 case kStoreByte:
1491 strb(reg, Address(base, offset), cond);
1492 break;
1493 case kStoreHalfword:
1494 strh(reg, Address(base, offset), cond);
1495 break;
1496 case kStoreWord:
1497 str(reg, Address(base, offset), cond);
1498 break;
1499 case kStoreWordPair:
1500 strd(reg, Address(base, offset), cond);
1501 break;
1502 default:
1503 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07001504 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001505 }
1506}
1507
1508
1509// Implementation note: this method must emit at most one instruction when
1510// Address::CanHoldStoreOffsetArm, as expected by JIT::GuardedStoreToOffset.
1511void Arm32Assembler::StoreSToOffset(SRegister reg,
1512 Register base,
1513 int32_t offset,
1514 Condition cond) {
1515 if (!Address::CanHoldStoreOffsetArm(kStoreSWord, offset)) {
1516 CHECK_NE(base, IP);
1517 LoadImmediate(IP, offset, cond);
1518 add(IP, IP, ShifterOperand(base), cond);
1519 base = IP;
1520 offset = 0;
1521 }
1522 CHECK(Address::CanHoldStoreOffsetArm(kStoreSWord, offset));
1523 vstrs(reg, Address(base, offset), cond);
1524}
1525
1526
1527// Implementation note: this method must emit at most one instruction when
1528// Address::CanHoldStoreOffsetArm, as expected by JIT::GuardedStoreSToOffset.
1529void Arm32Assembler::StoreDToOffset(DRegister reg,
1530 Register base,
1531 int32_t offset,
1532 Condition cond) {
1533 if (!Address::CanHoldStoreOffsetArm(kStoreDWord, offset)) {
1534 CHECK_NE(base, IP);
1535 LoadImmediate(IP, offset, cond);
1536 add(IP, IP, ShifterOperand(base), cond);
1537 base = IP;
1538 offset = 0;
1539 }
1540 CHECK(Address::CanHoldStoreOffsetArm(kStoreDWord, offset));
1541 vstrd(reg, Address(base, offset), cond);
1542}
1543
1544
1545void Arm32Assembler::MemoryBarrier(ManagedRegister mscratch) {
1546 CHECK_EQ(mscratch.AsArm().AsCoreRegister(), R12);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01001547 dmb(SY);
1548}
1549
1550
1551void Arm32Assembler::dmb(DmbOptions flavor) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001552 int32_t encoding = 0xf57ff05f; // dmb
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01001553 Emit(encoding | flavor);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001554}
1555
1556
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001557void Arm32Assembler::cbz(Register rn ATTRIBUTE_UNUSED, Label* target ATTRIBUTE_UNUSED) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001558 LOG(FATAL) << "cbz is not supported on ARM32";
1559}
1560
1561
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001562void Arm32Assembler::cbnz(Register rn ATTRIBUTE_UNUSED, Label* target ATTRIBUTE_UNUSED) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001563 LOG(FATAL) << "cbnz is not supported on ARM32";
1564}
1565
1566
1567void Arm32Assembler::CompareAndBranchIfZero(Register r, Label* label) {
1568 cmp(r, ShifterOperand(0));
1569 b(label, EQ);
1570}
1571
1572
1573void Arm32Assembler::CompareAndBranchIfNonZero(Register r, Label* label) {
1574 cmp(r, ShifterOperand(0));
1575 b(label, NE);
1576}
1577
1578
1579} // namespace arm
1580} // namespace art