YugabyteDB (2.13.0.0-b42, bfc6a6643e7399ac8a0e81d06a3ee6d6571b33ab)

Coverage Report

Created: 2022-03-09 17:30

/Users/deen/code/yugabyte-db/src/yb/util/memory/tracked_shared_ptr_impl.h
Line
Count
Source (jump to first uncovered line)
1
// Copyright (c) YugaByte, Inc.
2
//
3
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
4
// in compliance with the License.  You may obtain a copy of the License at
5
//
6
// http://www.apache.org/licenses/LICENSE-2.0
7
//
8
// Unless required by applicable law or agreed to in writing, software distributed under the License
9
// is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
10
// or implied.  See the License for the specific language governing permissions and limitations
11
// under the License.
12
//
13
14
#ifndef YB_UTIL_MEMORY_TRACKED_SHARED_PTR_IMPL_H_
15
#define YB_UTIL_MEMORY_TRACKED_SHARED_PTR_IMPL_H_
16
17
#include "yb/util/memory/tracked_shared_ptr.h"
18
19
#include <map>
20
21
#include <boost/functional/hash.hpp>
22
23
namespace { // NOLINT
24
25
struct StackWithIsNotNull {
26
  yb::StackTrace* stack_trace;
27
  bool is_not_nullptr;
28
29
0
  size_t HashCode() const {
30
0
    size_t hash = 0;
31
0
    boost::hash_combine(hash, stack_trace);
32
0
    boost::hash_combine(hash, is_not_nullptr);
33
0
    return hash;
34
0
  }
35
36
12
  int compare(const StackWithIsNotNull& other) const {
37
12
    int cmp = is_not_nullptr - other.is_not_nullptr;
38
12
    return cmp ? cmp : stack_trace->compare(*other.stack_trace);
39
12
  }
40
};
41
42
__attribute__((unused)) bool operator==(
43
0
    const StackWithIsNotNull& lhs, const StackWithIsNotNull& rhs) {
44
0
  return lhs.is_not_nullptr == rhs.is_not_nullptr && lhs.stack_trace == rhs.stack_trace;
45
0
}
46
47
__attribute__((unused)) bool operator<(
48
12
    const StackWithIsNotNull& lhs, const StackWithIsNotNull& rhs) {
49
12
  return lhs.compare(rhs) < 0;
50
12
}
51
52
template <class Key, class Value>
53
5
void IncrementMapEntry(std::map<Key, Value>* map, const Key& key) {
54
5
  if (map->count(key) > 0) {
55
0
    (*map)[key]++;
56
5
  } else {
57
5
    (*map)[key] = 1;
58
5
  }
59
5
}
60
61
} // namespace
62
63
namespace std {
64
65
template <>
66
struct hash<StackWithIsNotNull> {
67
0
  std::size_t operator()(StackWithIsNotNull const& obj) const noexcept { return obj.HashCode(); }
68
};
69
70
} // namespace std
71
72
namespace yb {
73
74
template <class T>
75
std::atomic<int64_t> TrackedSharedPtr<T>::num_references_{0};
76
77
template <class T>
78
std::atomic<int64_t> TrackedSharedPtr<T>::num_instances_{0};
79
80
template <class T>
81
simple_spinlock TrackedSharedPtr<T>::lock_;
82
83
template <class T>
84
std::set<TrackedSharedPtr<T>*> TrackedSharedPtr<T>::instances_;
85
86
template <class T>
87
2
void TrackedSharedPtr<T>::Dump() {
88
2
  std::lock_guard<simple_spinlock> l(lock_);
89
2
  LOG(INFO) << "num_references: " << num_references_;
90
2
  LOG(INFO) << "num_instances: " << num_instances_;
91
92
2
  std::map<StackWithIsNotNull, size_t> count_by_stack_trace_created;
93
94
2
  size_t count_non_nullptr = 0;
95
2
  LOG(INFO) << "instances: ";
96
5
  for (auto& instance : instances_) {
97
5
    LOG(INFO) << instance << " ptr:" << instance->get()
98
5
              << " last_not_null_value: " << AsString(instance->last_not_null_value_)
99
5
              << " use_count: " << instance->use_count();
100
101
5
    const bool is_not_nullptr = instance->get();
102
5
    if (is_not_nullptr) {
103
5
      ++count_non_nullptr;
104
5
    }
105
5
    IncrementMapEntry(&count_by_stack_trace_created,
106
5
        StackWithIsNotNull{&instance->stack_trace_created_, is_not_nullptr});
107
5
  }
108
109
2
  LOG(INFO) << "count_non_nullptr: " << count_non_nullptr;
110
2
  LOG(INFO) << "Tracked shared pointers alive by stack trace created:";
111
5
  for (auto& entry : count_by_stack_trace_created) {
112
5
    LOG(INFO) << "not_null: " << entry.first.is_not_nullptr << " count: " << entry.second
113
5
              << " stack_trace: " << entry.first.stack_trace->Symbolize();
114
5
  }
115
2
  LOG(INFO) << "<<<";
116
2
  CHECK_EQ(num_instances_, instances_.size());
117
2
}
118
119
template <class T>
120
37.4k
void TrackedSharedPtr<T>::RefIfInitialized() {
121
37.4k
  if (IsInitialized()) {
122
22.7k
    last_not_null_value_ = std::shared_ptr<T>::get();
123
22.7k
    ++num_references_;
124
22.7k
  }
125
37.4k
}
126
127
template <class T>
128
43.7k
void TrackedSharedPtr<T>::UnrefIfInitialized() {
129
43.7k
  if (IsInitialized()) {
130
22.7k
    --num_references_;
131
22.7k
  }
132
43.7k
}
133
134
template <class T>
135
7.33k
void TrackedSharedPtr<T>::RegisterInstance() {
136
7.33k
  std::lock_guard<simple_spinlock> l(lock_);
137
7.33k
  ++num_instances_;
138
7.33k
  instances_.insert(this);
139
  // We skip 3 frames: StackTrace::Collect, TrackedSharedPtr::RegisterInstance and
140
  // TrackedSharedPtr constructor to only have stack trace for code we want to debug.
141
7.33k
  stack_trace_created_.Collect(/* skip_frames =*/ 3);
142
7.33k
}
143
144
template <class T>
145
7.33k
void TrackedSharedPtr<T>::UnregisterInstance() {
146
7.33k
  std::lock_guard<simple_spinlock> l(lock_);
147
7.33k
  --num_instances_;
148
7.33k
  instances_.erase(this);
149
7.33k
}
150
151
}  // namespace yb
152
153
#endif // YB_UTIL_MEMORY_TRACKED_SHARED_PTR_IMPL_H_