blob: 17e10265f1eef9e73318a090310d0216829ad424 [file] [log] [blame]
Igor Murashkin457e8742015-10-22 17:37:50 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16#ifndef ART_RUNTIME_LAMBDA_BOX_CLASS_TABLE_H_
17#define ART_RUNTIME_LAMBDA_BOX_CLASS_TABLE_H_
18
19#include "base/allocator.h"
20#include "base/hash_map.h"
21#include "gc_root.h"
22#include "base/macros.h"
23#include "base/mutex.h"
24#include "object_callbacks.h"
25
26#include <stdint.h>
27
28namespace art {
29
30class ArtMethod; // forward declaration
31template<class T> class Handle; // forward declaration
32
33namespace mirror {
34class Class; // forward declaration
35class ClassLoader; // forward declaration
36class LambdaProxy; // forward declaration
37class Object; // forward declaration
38} // namespace mirror
39
40namespace lambda {
41struct Closure; // forward declaration
42
43/*
44 * Store a table of boxed lambdas. This is required to maintain object referential equality
45 * when a lambda is re-boxed.
46 *
47 * Conceptually, we store a mapping of Class Name -> Weak Reference<Class>.
48 * When too many objects get GCd, we shrink the underlying table to use less space.
49 */
50class BoxClassTable FINAL {
51 public:
52 // TODO: This should take a LambdaArtMethod instead, read class name from that.
53 // Note: null class_loader means bootclasspath.
54 mirror::Class* GetOrCreateBoxClass(const char* class_name,
55 const Handle<mirror::ClassLoader>& class_loader)
56 REQUIRES(!Locks::lambda_class_table_lock_, !Roles::uninterruptible_)
57 SHARED_REQUIRES(Locks::mutator_lock_);
58
59 // Sweep strong references to lambda class boxes. Update the addresses if the objects
60 // have been moved, and delete them from the table if the objects have been cleaned up.
61 template <typename Visitor>
62 void VisitRoots(const Visitor& visitor)
63 NO_THREAD_SAFETY_ANALYSIS // for object marking requiring heap bitmap lock
64 REQUIRES(!Locks::lambda_class_table_lock_)
65 SHARED_REQUIRES(Locks::mutator_lock_);
66
67 BoxClassTable();
68 ~BoxClassTable();
69
70 private:
71 // We only store strong GC roots in our table.
72 using ValueType = GcRoot<mirror::Class>;
73
74 // Attempt to look up the class in the map, or return null if it's not there yet.
75 ValueType FindBoxedClass(const std::string& class_name) const
76 SHARED_REQUIRES(Locks::lambda_class_table_lock_);
77
78 // Store the key as a string so that we can have our own copy of the class name.
79 using UnorderedMapKeyType = std::string;
80
81 // EmptyFn implementation for art::HashMap
82 struct EmptyFn {
83 void MakeEmpty(std::pair<UnorderedMapKeyType, ValueType>& item) const
84 NO_THREAD_SAFETY_ANALYSIS;
85 // SHARED_REQUIRES(Locks::mutator_lock_);
86
87 bool IsEmpty(const std::pair<UnorderedMapKeyType, ValueType>& item) const;
88 };
89
90 // HashFn implementation for art::HashMap
91 struct HashFn {
92 size_t operator()(const UnorderedMapKeyType& key) const
93 NO_THREAD_SAFETY_ANALYSIS;
94 // SHARED_REQUIRES(Locks::mutator_lock_);
95 };
96
97 // EqualsFn implementation for art::HashMap
98 struct EqualsFn {
99 bool operator()(const UnorderedMapKeyType& lhs, const UnorderedMapKeyType& rhs) const
100 NO_THREAD_SAFETY_ANALYSIS;
101 // SHARED_REQUIRES(Locks::mutator_lock_);
102 };
103
104 using UnorderedMap = art::HashMap<UnorderedMapKeyType,
105 ValueType,
106 EmptyFn,
107 HashFn,
108 EqualsFn,
109 TrackingAllocator<std::pair<UnorderedMapKeyType, ValueType>,
110 kAllocatorTagLambdaProxyClassBoxTable>>;
111
112 // Map of strong GC roots (lambda interface name -> lambda proxy class)
113 UnorderedMap map_ GUARDED_BY(Locks::lambda_class_table_lock_);
114
115 // Shrink the map when we get below this load factor.
116 // (This is an arbitrary value that should be large enough to prevent aggressive map erases
117 // from shrinking the table too often.)
118 static constexpr double kMinimumLoadFactor = UnorderedMap::kDefaultMinLoadFactor / 2;
119
120 DISALLOW_COPY_AND_ASSIGN(BoxClassTable);
121};
122
123} // namespace lambda
124} // namespace art
125
126#endif // ART_RUNTIME_LAMBDA_BOX_CLASS_TABLE_H_