summaryrefslogtreecommitdiffstats
path: root/runtime/stack_map.h
blob: 87133cf59c2b075d405a04aecafc5f8e806097a1 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
/*
 * Copyright (C) 2014 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#ifndef ART_RUNTIME_STACK_MAP_H_
#define ART_RUNTIME_STACK_MAP_H_

#include <limits>

#include "arch/instruction_set.h"
#include "base/bit_memory_region.h"
#include "base/bit_table.h"
#include "base/bit_utils.h"
#include "base/bit_vector.h"
#include "base/leb128.h"
#include "base/memory_region.h"
#include "dex/dex_file_types.h"
#include "dex_register_location.h"
#include "quick/quick_method_frame_info.h"

namespace art {

class OatQuickMethodHeader;
class VariableIndentationOutputStream;

// Size of a frame slot, in bytes.  This constant is a signed value,
// to please the compiler in arithmetic operations involving int32_t
// (signed) values.
static constexpr ssize_t kFrameSlotSize = 4;

// The delta compression of dex register maps means we need to scan the stackmaps backwards.
// We compress the data in such a way so that there is an upper bound on the search distance.
// Max distance 0 means each stack map must be fully defined and no scanning back is allowed.
// If this value is changed, the oat file version should be incremented (for DCHECK to pass).
static constexpr size_t kMaxDexRegisterMapSearchDistance = 32;

class ArtMethod;
class CodeInfo;
class Stats;

std::ostream& operator<<(std::ostream& stream, const DexRegisterLocation& reg);

// Information on Dex register locations for a specific PC.
// Effectively just a convenience wrapper for DexRegisterLocation vector.
// If the size is small enough, it keeps the data on the stack.
// TODO: Replace this with generic purpose "small-vector" implementation.
class DexRegisterMap {
 public:
  using iterator = DexRegisterLocation*;
  using const_iterator = const DexRegisterLocation*;

  // Create map for given number of registers and initialize them to the given value.
  DexRegisterMap(size_t count, DexRegisterLocation value) : count_(count), regs_small_{} {
    if (count_ <= kSmallCount) {
      std::fill_n(regs_small_.begin(), count, value);
    } else {
      regs_large_.resize(count, value);
    }
  }

  DexRegisterLocation* data() {
    return count_ <= kSmallCount ? regs_small_.data() : regs_large_.data();
  }
  const DexRegisterLocation* data() const {
    return count_ <= kSmallCount ? regs_small_.data() : regs_large_.data();
  }

  iterator begin() { return data(); }
  iterator end() { return data() + count_; }
  const_iterator begin() const { return data(); }
  const_iterator end() const { return data() + count_; }
  size_t size() const { return count_; }
  bool empty() const { return count_ == 0; }

  DexRegisterLocation& operator[](size_t index) {
    DCHECK_LT(index, count_);
    return data()[index];
  }
  const DexRegisterLocation& operator[](size_t index) const {
    DCHECK_LT(index, count_);
    return data()[index];
  }

  size_t GetNumberOfLiveDexRegisters() const {
    return std::count_if(begin(), end(), [](auto& loc) { return loc.IsLive(); });
  }

  bool HasAnyLiveDexRegisters() const {
    return std::any_of(begin(), end(), [](auto& loc) { return loc.IsLive(); });
  }

  void Dump(VariableIndentationOutputStream* vios) const;

 private:
  // Store the data inline if the number of registers is small to avoid memory allocations.
  // If count_ <= kSmallCount, we use the regs_small_ array, and regs_large_ otherwise.
  static constexpr size_t kSmallCount = 16;
  size_t count_;
  std::array<DexRegisterLocation, kSmallCount> regs_small_;
  dchecked_vector<DexRegisterLocation> regs_large_;
};

/**
 * A Stack Map holds compilation information for a specific PC necessary for:
 * - Mapping it to a dex PC,
 * - Knowing which stack entries are objects,
 * - Knowing which registers hold objects,
 * - Knowing the inlining information,
 * - Knowing the values of dex registers.
 */
class StackMap : public BitTableAccessor<8> {
 public:
  enum Kind {
    Default = -1,
    Catch = 0,
    OSR = 1,
    Debug = 2,
  };
  BIT_TABLE_HEADER(StackMap)
  BIT_TABLE_COLUMN(0, Kind)
  BIT_TABLE_COLUMN(1, PackedNativePc)
  BIT_TABLE_COLUMN(2, DexPc)
  BIT_TABLE_COLUMN(3, RegisterMaskIndex)
  BIT_TABLE_COLUMN(4, StackMaskIndex)
  BIT_TABLE_COLUMN(5, InlineInfoIndex)
  BIT_TABLE_COLUMN(6, DexRegisterMaskIndex)
  BIT_TABLE_COLUMN(7, DexRegisterMapIndex)

  ALWAYS_INLINE uint32_t GetNativePcOffset(InstructionSet instruction_set) const {
    return UnpackNativePc(GetPackedNativePc(), instruction_set);
  }

  ALWAYS_INLINE bool HasInlineInfo() const {
    return HasInlineInfoIndex();
  }

  ALWAYS_INLINE bool HasDexRegisterMap() const {
    return HasDexRegisterMapIndex();
  }

  static uint32_t PackNativePc(uint32_t native_pc, InstructionSet isa) {
    DCHECK_ALIGNED_PARAM(native_pc, GetInstructionSetInstructionAlignment(isa));
    return native_pc / GetInstructionSetInstructionAlignment(isa);
  }

  static uint32_t UnpackNativePc(uint32_t packed_native_pc, InstructionSet isa) {
    uint32_t native_pc = packed_native_pc * GetInstructionSetInstructionAlignment(isa);
    DCHECK_EQ(native_pc / GetInstructionSetInstructionAlignment(isa), packed_native_pc);
    return native_pc;
  }

  void Dump(VariableIndentationOutputStream* vios,
            const CodeInfo& code_info,
            uint32_t code_offset,
            InstructionSet instruction_set) const;
};

/**
 * Inline information for a specific PC.
 * The row referenced from the StackMap holds information at depth 0.
 * Following rows hold information for further depths.
 */
class InlineInfo : public BitTableAccessor<6> {
 public:
  BIT_TABLE_HEADER(InlineInfo)
  BIT_TABLE_COLUMN(0, IsLast)  // Determines if there are further rows for further depths.
  BIT_TABLE_COLUMN(1, DexPc)
  BIT_TABLE_COLUMN(2, MethodInfoIndex)
  BIT_TABLE_COLUMN(3, ArtMethodHi)  // High bits of ArtMethod*.
  BIT_TABLE_COLUMN(4, ArtMethodLo)  // Low bits of ArtMethod*.
  BIT_TABLE_COLUMN(5, NumberOfDexRegisters)  // Includes outer levels and the main method.
  BIT_TABLE_COLUMN(6, DexRegisterMapIndex)

  static constexpr uint32_t kLast = -1;
  static constexpr uint32_t kMore = 0;

  bool EncodesArtMethod() const {
    return HasArtMethodLo();
  }

  ArtMethod* GetArtMethod() const {
    uint64_t lo = GetArtMethodLo();
    uint64_t hi = GetArtMethodHi();
    return reinterpret_cast<ArtMethod*>((hi << 32) | lo);
  }

  void Dump(VariableIndentationOutputStream* vios,
            const CodeInfo& info,
            const StackMap& stack_map) const;
};

class StackMask : public BitTableAccessor<1> {
 public:
  BIT_TABLE_HEADER(StackMask)
  BIT_TABLE_COLUMN(0, Mask)
};

class DexRegisterMask : public BitTableAccessor<1> {
 public:
  BIT_TABLE_HEADER(DexRegisterMask)
  BIT_TABLE_COLUMN(0, Mask)
};

class DexRegisterMapInfo : public BitTableAccessor<1> {
 public:
  BIT_TABLE_HEADER(DexRegisterMapInfo)
  BIT_TABLE_COLUMN(0, CatalogueIndex)
};

class DexRegisterInfo : public BitTableAccessor<2> {
 public:
  BIT_TABLE_HEADER(DexRegisterInfo)
  BIT_TABLE_COLUMN(0, Kind)
  BIT_TABLE_COLUMN(1, PackedValue)

  ALWAYS_INLINE DexRegisterLocation GetLocation() const {
    DexRegisterLocation::Kind kind = static_cast<DexRegisterLocation::Kind>(GetKind());
    return DexRegisterLocation(kind, UnpackValue(kind, GetPackedValue()));
  }

  static uint32_t PackValue(DexRegisterLocation::Kind kind, uint32_t value) {
    uint32_t packed_value = value;
    if (kind == DexRegisterLocation::Kind::kInStack) {
      DCHECK(IsAligned<kFrameSlotSize>(packed_value));
      packed_value /= kFrameSlotSize;
    }
    return packed_value;
  }

  static uint32_t UnpackValue(DexRegisterLocation::Kind kind, uint32_t packed_value) {
    uint32_t value = packed_value;
    if (kind == DexRegisterLocation::Kind::kInStack) {
      value *= kFrameSlotSize;
    }
    return value;
  }
};

// Register masks tend to have many trailing zero bits (caller-saves are usually not encoded),
// therefore it is worth encoding the mask as value+shift.
class RegisterMask : public BitTableAccessor<2> {
 public:
  BIT_TABLE_HEADER(RegisterMask)
  BIT_TABLE_COLUMN(0, Value)
  BIT_TABLE_COLUMN(1, Shift)

  ALWAYS_INLINE uint32_t GetMask() const {
    return GetValue() << GetShift();
  }
};

// Method indices are not very dedup friendly.
// Separating them greatly improves dedup efficiency of the other tables.
class MethodInfo : public BitTableAccessor<1> {
 public:
  BIT_TABLE_HEADER(MethodInfo)
  BIT_TABLE_COLUMN(0, MethodIndex)
};

/**
 * Wrapper around all compiler information collected for a method.
 * See the Decode method at the end for the precise binary format.
 */
class CodeInfo {
 public:
  class Deduper {
   public:
    explicit Deduper(std::vector<uint8_t>* output) : writer_(output) {
      DCHECK_EQ(output->size(), 0u);
    }

    // Copy CodeInfo into output while de-duplicating the internal bit tables.
    // It returns the byte offset of the copied CodeInfo within the output.
    size_t Dedupe(const uint8_t* code_info);

   private:
    BitMemoryWriter<std::vector<uint8_t>> writer_;

    // Deduplicate at BitTable level. The value is bit offset within the output.
    std::map<BitMemoryRegion, uint32_t, BitMemoryRegion::Less> dedupe_map_;
  };

  enum DecodeFlags {
    AllTables = 0,
    // Limits the decoding only to the data needed by GC.
    GcMasksOnly = 1,
    // Limits the decoding only to the main stack map table and inline info table.
    // This is sufficient for many use cases and makes the header decoding faster.
    InlineInfoOnly = 2,
  };

  CodeInfo() {}

  explicit CodeInfo(const uint8_t* data, DecodeFlags flags = AllTables) {
    Decode(reinterpret_cast<const uint8_t*>(data), flags);
  }

  explicit CodeInfo(const OatQuickMethodHeader* header, DecodeFlags flags = AllTables);

  size_t Size() const {
    return BitsToBytesRoundUp(size_in_bits_);
  }

  ALWAYS_INLINE const BitTable<StackMap>& GetStackMaps() const {
    return stack_maps_;
  }

  ALWAYS_INLINE StackMap GetStackMapAt(size_t index) const {
    return stack_maps_.GetRow(index);
  }

  BitMemoryRegion GetStackMask(size_t index) const {
    return stack_masks_.GetBitMemoryRegion(index);
  }

  BitMemoryRegion GetStackMaskOf(const StackMap& stack_map) const {
    uint32_t index = stack_map.GetStackMaskIndex();
    return (index == StackMap::kNoValue) ? BitMemoryRegion() : GetStackMask(index);
  }

  uint32_t GetRegisterMaskOf(const StackMap& stack_map) const {
    uint32_t index = stack_map.GetRegisterMaskIndex();
    return (index == StackMap::kNoValue) ? 0 : register_masks_.GetRow(index).GetMask();
  }

  uint32_t GetNumberOfLocationCatalogEntries() const {
    return dex_register_catalog_.NumRows();
  }

  ALWAYS_INLINE DexRegisterLocation GetDexRegisterCatalogEntry(size_t index) const {
    return (index == StackMap::kNoValue)
      ? DexRegisterLocation::None()
      : dex_register_catalog_.GetRow(index).GetLocation();
  }

  bool HasInlineInfo() const {
    return inline_infos_.NumRows() > 0;
  }

  uint32_t GetNumberOfStackMaps() const {
    return stack_maps_.NumRows();
  }

  uint32_t GetMethodIndexOf(InlineInfo inline_info) const {
    return method_infos_.GetRow(inline_info.GetMethodInfoIndex()).GetMethodIndex();
  }

  ALWAYS_INLINE DexRegisterMap GetDexRegisterMapOf(StackMap stack_map) const {
    if (stack_map.HasDexRegisterMap()) {
      DexRegisterMap map(number_of_dex_registers_, DexRegisterLocation::Invalid());
      DecodeDexRegisterMap(stack_map.Row(), /* first_dex_register= */ 0, &map);
      return map;
    }
    return DexRegisterMap(0, DexRegisterLocation::None());
  }

  ALWAYS_INLINE DexRegisterMap GetInlineDexRegisterMapOf(StackMap stack_map,
                                                         InlineInfo inline_info) const {
    if (stack_map.HasDexRegisterMap()) {
      DCHECK(stack_map.HasInlineInfoIndex());
      uint32_t depth = inline_info.Row() - stack_map.GetInlineInfoIndex();
      // The register counts are commutative and include all outer levels.
      // This allows us to determine the range [first, last) in just two lookups.
      // If we are at depth 0 (the first inlinee), the count from the main method is used.
      uint32_t first = (depth == 0)
          ? number_of_dex_registers_
          : inline_infos_.GetRow(inline_info.Row() - 1).GetNumberOfDexRegisters();
      uint32_t last = inline_info.GetNumberOfDexRegisters();
      DexRegisterMap map(last - first, DexRegisterLocation::Invalid());
      DecodeDexRegisterMap(stack_map.Row(), first, &map);
      return map;
    }
    return DexRegisterMap(0, DexRegisterLocation::None());
  }

  BitTableRange<InlineInfo> GetInlineInfosOf(StackMap stack_map) const {
    uint32_t index = stack_map.GetInlineInfoIndex();
    if (index != StackMap::kNoValue) {
      auto begin = inline_infos_.begin() + index;
      auto end = begin;
      while ((*end++).GetIsLast() == InlineInfo::kMore) { }
      return BitTableRange<InlineInfo>(begin, end);
    } else {
      return BitTableRange<InlineInfo>();
    }
  }

  StackMap GetStackMapForDexPc(uint32_t dex_pc) const {
    for (StackMap stack_map : stack_maps_) {
      if (stack_map.GetDexPc() == dex_pc && stack_map.GetKind() != StackMap::Kind::Debug) {
        return stack_map;
      }
    }
    return stack_maps_.GetInvalidRow();
  }

  // Searches the stack map list backwards because catch stack maps are stored at the end.
  StackMap GetCatchStackMapForDexPc(uint32_t dex_pc) const {
    for (size_t i = GetNumberOfStackMaps(); i > 0; --i) {
      StackMap stack_map = GetStackMapAt(i - 1);
      if (stack_map.GetDexPc() == dex_pc && stack_map.GetKind() == StackMap::Kind::Catch) {
        return stack_map;
      }
    }
    return stack_maps_.GetInvalidRow();
  }

  StackMap GetOsrStackMapForDexPc(uint32_t dex_pc) const {
    for (StackMap stack_map : stack_maps_) {
      if (stack_map.GetDexPc() == dex_pc && stack_map.GetKind() == StackMap::Kind::OSR) {
        return stack_map;
      }
    }
    return stack_maps_.GetInvalidRow();
  }

  StackMap GetStackMapForNativePcOffset(uint32_t pc, InstructionSet isa = kRuntimeISA) const;

  // Dump this CodeInfo object on `vios`.
  // `code_offset` is the (absolute) native PC of the compiled method.
  void Dump(VariableIndentationOutputStream* vios,
            uint32_t code_offset,
            bool verbose,
            InstructionSet instruction_set) const;

  // Accumulate code info size statistics into the given Stats tree.
  static void CollectSizeStats(const uint8_t* code_info, /*out*/ Stats* parent);

  ALWAYS_INLINE static QuickMethodFrameInfo DecodeFrameInfo(const uint8_t* data) {
    BitMemoryReader reader(data);
    return QuickMethodFrameInfo(
        reader.ReadVarint() * kStackAlignment,  // Decode packed_frame_size_ and unpack.
        reader.ReadVarint(),  // core_spill_mask_.
        reader.ReadVarint());  // fp_spill_mask_.
  }

 private:
  // Returns lower bound (fist stack map which has pc greater or equal than the desired one).
  // It ignores catch stack maps at the end (it is the same as if they had maximum pc value).
  BitTable<StackMap>::const_iterator BinarySearchNativePc(uint32_t packed_pc) const;

  // Scan backward to determine dex register locations at given stack map.
  void DecodeDexRegisterMap(uint32_t stack_map_index,
                            uint32_t first_dex_register,
                            /*out*/ DexRegisterMap* map) const;

  void Decode(const uint8_t* data, DecodeFlags flags);

  // Invokes the callback with member pointer of each header field.
  template<typename Callback>
  ALWAYS_INLINE static void ForEachHeaderField(Callback callback) {
    callback(&CodeInfo::packed_frame_size_);
    callback(&CodeInfo::core_spill_mask_);
    callback(&CodeInfo::fp_spill_mask_);
    callback(&CodeInfo::number_of_dex_registers_);
  }

  // Invokes the callback with member pointer of each BitTable field.
  template<typename Callback>
  ALWAYS_INLINE static void ForEachBitTableField(Callback callback, DecodeFlags flags = AllTables) {
    callback(&CodeInfo::stack_maps_);
    callback(&CodeInfo::register_masks_);
    callback(&CodeInfo::stack_masks_);
    if (flags & DecodeFlags::GcMasksOnly) {
      return;
    }
    callback(&CodeInfo::inline_infos_);
    callback(&CodeInfo::method_infos_);
    if (flags & DecodeFlags::InlineInfoOnly) {
      return;
    }
    callback(&CodeInfo::dex_register_masks_);
    callback(&CodeInfo::dex_register_maps_);
    callback(&CodeInfo::dex_register_catalog_);
  }

  uint32_t packed_frame_size_ = 0;  // Frame size in kStackAlignment units.
  uint32_t core_spill_mask_ = 0;
  uint32_t fp_spill_mask_ = 0;
  uint32_t number_of_dex_registers_ = 0;
  BitTable<StackMap> stack_maps_;
  BitTable<RegisterMask> register_masks_;
  BitTable<StackMask> stack_masks_;
  BitTable<InlineInfo> inline_infos_;
  BitTable<MethodInfo> method_infos_;
  BitTable<DexRegisterMask> dex_register_masks_;
  BitTable<DexRegisterMapInfo> dex_register_maps_;
  BitTable<DexRegisterInfo> dex_register_catalog_;
  uint32_t size_in_bits_ = 0;
};

#undef ELEMENT_BYTE_OFFSET_AFTER
#undef ELEMENT_BIT_OFFSET_AFTER

}  // namespace art

#endif  // ART_RUNTIME_STACK_MAP_H_