/Users/deen/code/yugabyte-db/src/yb/docdb/doc_key.cc
Line | Count | Source (jump to first uncovered line) |
1 | | // Copyright (c) YugaByte, Inc. |
2 | | // |
3 | | // Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except |
4 | | // in compliance with the License. You may obtain a copy of the License at |
5 | | // |
6 | | // http://www.apache.org/licenses/LICENSE-2.0 |
7 | | // |
8 | | // Unless required by applicable law or agreed to in writing, software distributed under the License |
9 | | // is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express |
10 | | // or implied. See the License for the specific language governing permissions and limitations |
11 | | // under the License. |
12 | | // |
13 | | |
14 | | #include "yb/docdb/doc_key.h" |
15 | | |
16 | | #include <memory> |
17 | | #include <sstream> |
18 | | |
19 | | #include "yb/common/schema.h" |
20 | | |
21 | | #include "yb/docdb/doc_kv_util.h" |
22 | | #include "yb/docdb/doc_path.h" |
23 | | #include "yb/docdb/primitive_value.h" |
24 | | #include "yb/docdb/value_type.h" |
25 | | |
26 | | #include "yb/gutil/strings/substitute.h" |
27 | | |
28 | | #include "yb/util/compare_util.h" |
29 | | #include "yb/util/enums.h" |
30 | | #include "yb/util/result.h" |
31 | | #include "yb/util/status_format.h" |
32 | | #include "yb/util/string_util.h" |
33 | | #include "yb/util/tostring.h" |
34 | | #include "yb/util/uuid.h" |
35 | | |
36 | | using std::ostringstream; |
37 | | |
38 | | using strings::Substitute; |
39 | | |
40 | | using yb::util::CompareVectors; |
41 | | using yb::util::CompareUsingLessThan; |
42 | | |
43 | | namespace yb { |
44 | | namespace docdb { |
45 | | |
46 | | namespace { |
47 | | |
48 | | // Checks whether slice starts with primitive value. |
49 | | // Valid cases are end of group or primitive value starting with value type. |
50 | 1.02G | Result<bool> HasPrimitiveValue(Slice* slice, AllowSpecial allow_special) { |
51 | 1.02G | if (PREDICT_FALSE(slice->empty())) { |
52 | 0 | return STATUS(Corruption, "Unexpected end of key when decoding document key"); |
53 | 0 | } |
54 | 1.02G | ValueType current_value_type = static_cast<ValueType>(*slice->data()); |
55 | 1.02G | if (current_value_type == ValueType::kGroupEnd) { |
56 | 488M | slice->consume_byte(); |
57 | 488M | return false; |
58 | 488M | } |
59 | | |
60 | 532M | if (IsPrimitiveValueType(current_value_type)) { |
61 | 532M | return true; |
62 | 532M | } |
63 | | |
64 | 18.4E | if (allow_special && IsSpecialValueType(current_value_type)) { |
65 | 338 | return true; |
66 | 338 | } |
67 | | |
68 | 18.4E | return STATUS_FORMAT(Corruption, "Expected a primitive value type, got $0", current_value_type); |
69 | 18.4E | } |
70 | | |
71 | | constexpr auto kNumValuesNoLimit = std::numeric_limits<int>::max(); |
72 | | |
73 | | // Consumes up to n_values_limit primitive values from key until group end is found. |
74 | | // Callback is called for each value and responsible for consuming this single value from slice. |
75 | | template<class Callback> |
76 | | Status ConsumePrimitiveValuesFromKey( |
77 | | Slice* slice, AllowSpecial allow_special, Callback callback, |
78 | 357M | int n_values_limit = kNumValuesNoLimit) { |
79 | 357M | const auto initial_slice(*slice); // For error reporting. |
80 | 776M | for (; n_values_limit > 0; --n_values_limit) { |
81 | 774M | if (!VERIFY_RESULT(HasPrimitiveValue(slice, allow_special))) { |
82 | 355M | return Status::OK(); |
83 | 355M | } |
84 | | |
85 | 419M | RETURN_NOT_OK_PREPEND(callback(), |
86 | 419M | Substitute("while consuming primitive values from $0", |
87 | 419M | initial_slice.ToDebugHexString())); |
88 | 419M | } |
89 | 2.25M | return Status::OK(); |
90 | 357M | } doc_key.cc:_ZN2yb5docdb12_GLOBAL__N_129ConsumePrimitiveValuesFromKeyIZNS1_29ConsumePrimitiveValuesFromKeyEPNS_5SliceENS_17StronglyTypedBoolINS0_16AllowSpecial_TagEEEPNSt3__16vectorINS0_14PrimitiveValueENS8_9allocatorISA_EEEEiE3$_1EENS_6StatusES4_S7_T_i Line | Count | Source | 78 | 193M | int n_values_limit = kNumValuesNoLimit) { | 79 | 193M | const auto initial_slice(*slice); // For error reporting. | 80 | 400M | for (; n_values_limit > 0; --n_values_limit) { | 81 | 400M | if (!VERIFY_RESULT(HasPrimitiveValue(slice, allow_special))) { | 82 | 193M | return Status::OK(); | 83 | 193M | } | 84 | | | 85 | 206M | RETURN_NOT_OK_PREPEND(callback(), | 86 | 206M | Substitute("while consuming primitive values from $0", | 87 | 206M | initial_slice.ToDebugHexString())); | 88 | 206M | } | 89 | 18.4E | return Status::OK(); | 90 | 193M | } |
doc_key.cc:_ZN2yb5docdb12_GLOBAL__N_129ConsumePrimitiveValuesFromKeyIZNS1_29ConsumePrimitiveValuesFromKeyEPNS_5SliceENS_17StronglyTypedBoolINS0_16AllowSpecial_TagEEEPN5boost9container17small_vector_baseIS3_vvEEiE3$_0EENS_6StatusES4_S7_T_i Line | Count | Source | 78 | 163M | int n_values_limit = kNumValuesNoLimit) { | 79 | 163M | const auto initial_slice(*slice); // For error reporting. | 80 | 376M | for (; n_values_limit > 0; --n_values_limit) { | 81 | 374M | if (!VERIFY_RESULT(HasPrimitiveValue(slice, allow_special))) { | 82 | 161M | return Status::OK(); | 83 | 161M | } | 84 | | | 85 | 212M | RETURN_NOT_OK_PREPEND(callback(), | 86 | 212M | Substitute("while consuming primitive values from $0", | 87 | 212M | initial_slice.ToDebugHexString())); | 88 | 212M | } | 89 | 2.38M | return Status::OK(); | 90 | 163M | } |
|
91 | | |
92 | | Status ConsumePrimitiveValuesFromKey(Slice* slice, AllowSpecial allow_special, |
93 | | boost::container::small_vector_base<Slice>* result, |
94 | 163M | int n_values_limit = kNumValuesNoLimit) { |
95 | 212M | return ConsumePrimitiveValuesFromKey(slice, allow_special, [slice, result]() -> Status { |
96 | 212M | auto begin = slice->data(); |
97 | 212M | RETURN_NOT_OK(PrimitiveValue::DecodeKey(slice, /* out */ nullptr)); |
98 | 212M | if (result) { |
99 | 4.61M | result->emplace_back(begin, slice->data()); |
100 | 4.61M | } |
101 | 212M | return Status::OK(); |
102 | 212M | }, n_values_limit); |
103 | 163M | } |
104 | | |
105 | | Status ConsumePrimitiveValuesFromKey( |
106 | | Slice* slice, AllowSpecial allow_special, std::vector<PrimitiveValue>* result, |
107 | 193M | int n_values_limit = kNumValuesNoLimit) { |
108 | 206M | return ConsumePrimitiveValuesFromKey(slice, allow_special, [slice, result] { |
109 | 206M | result->emplace_back(); |
110 | 206M | return result->back().DecodeFromKey(slice); |
111 | 206M | }, n_values_limit); |
112 | 193M | } |
113 | | |
114 | | } // namespace |
115 | | |
116 | 48.7M | Result<bool> ConsumePrimitiveValueFromKey(Slice* slice) { |
117 | 48.7M | if (!VERIFY_RESULT(HasPrimitiveValue(slice, AllowSpecial::kFalse))) { |
118 | 28.6M | return false; |
119 | 28.6M | } |
120 | 20.1M | RETURN_NOT_OK(PrimitiveValue::DecodeKey(slice, nullptr /* out */)); |
121 | 20.1M | return true; |
122 | 20.1M | } |
123 | | |
124 | 0 | Status ConsumePrimitiveValuesFromKey(Slice* slice, std::vector<PrimitiveValue>* result) { |
125 | 0 | return ConsumePrimitiveValuesFromKey(slice, AllowSpecial::kFalse, result); |
126 | 0 | } |
127 | | |
128 | | // ------------------------------------------------------------------------------------------------ |
129 | | // DocKey |
130 | | // ------------------------------------------------------------------------------------------------ |
131 | | |
132 | | DocKey::DocKey() |
133 | | : cotable_id_(Uuid::Nil()), |
134 | | pgtable_id_(0), |
135 | | hash_present_(false), |
136 | 139M | hash_(0) { |
137 | 139M | } |
138 | | |
139 | | DocKey::DocKey(std::vector<PrimitiveValue> range_components) |
140 | | : cotable_id_(Uuid::Nil()), |
141 | | pgtable_id_(0), |
142 | | hash_present_(false), |
143 | | hash_(0), |
144 | 1.53M | range_group_(std::move(range_components)) { |
145 | 1.53M | } |
146 | | |
147 | | DocKey::DocKey(DocKeyHash hash, |
148 | | std::vector<PrimitiveValue> hashed_components, |
149 | | std::vector<PrimitiveValue> range_components) |
150 | | : cotable_id_(Uuid::Nil()), |
151 | | pgtable_id_(0), |
152 | | hash_present_(true), |
153 | | hash_(hash), |
154 | | hashed_group_(std::move(hashed_components)), |
155 | 5.90M | range_group_(std::move(range_components)) { |
156 | 5.90M | } |
157 | | |
158 | | DocKey::DocKey(const Uuid& cotable_id, |
159 | | DocKeyHash hash, |
160 | | std::vector<PrimitiveValue> hashed_components, |
161 | | std::vector<PrimitiveValue> range_components) |
162 | | : cotable_id_(cotable_id), |
163 | | pgtable_id_(0), |
164 | | hash_present_(true), |
165 | | hash_(hash), |
166 | | hashed_group_(std::move(hashed_components)), |
167 | 0 | range_group_(std::move(range_components)) { |
168 | 0 | } |
169 | | |
170 | | DocKey::DocKey(const PgTableOid pgtable_id, |
171 | | DocKeyHash hash, |
172 | | std::vector<PrimitiveValue> hashed_components, |
173 | | std::vector<PrimitiveValue> range_components) |
174 | | : cotable_id_(Uuid::Nil()), |
175 | | pgtable_id_(pgtable_id), |
176 | | hash_present_(true), |
177 | | hash_(hash), |
178 | | hashed_group_(std::move(hashed_components)), |
179 | 0 | range_group_(std::move(range_components)) { |
180 | 0 | } |
181 | | |
182 | | DocKey::DocKey(const Uuid& cotable_id) |
183 | | : cotable_id_(cotable_id), |
184 | | pgtable_id_(0), |
185 | | hash_present_(false), |
186 | 0 | hash_(0) { |
187 | 0 | } |
188 | | |
189 | | DocKey::DocKey(const PgTableOid pgtable_id) |
190 | | : cotable_id_(Uuid::Nil()), |
191 | | pgtable_id_(pgtable_id), |
192 | | hash_present_(false), |
193 | 0 | hash_(0) { |
194 | 0 | } |
195 | | |
196 | | DocKey::DocKey(const Schema& schema) |
197 | | : cotable_id_(schema.cotable_id()), |
198 | | pgtable_id_(schema.pgtable_id()), |
199 | | hash_present_(false), |
200 | 2.57M | hash_(0) { |
201 | 2.57M | } |
202 | | |
203 | | DocKey::DocKey(const Schema& schema, DocKeyHash hash) |
204 | | : cotable_id_(schema.cotable_id()), |
205 | | pgtable_id_(schema.pgtable_id()), |
206 | | hash_present_(true), |
207 | 0 | hash_(hash) { |
208 | 0 | } |
209 | | |
210 | | DocKey::DocKey(const Schema& schema, std::vector<PrimitiveValue> range_components) |
211 | | : cotable_id_(schema.cotable_id()), |
212 | | pgtable_id_(schema.pgtable_id()), |
213 | | hash_present_(false), |
214 | | hash_(0), |
215 | 1.38M | range_group_(std::move(range_components)) { |
216 | 1.38M | } |
217 | | |
218 | | DocKey::DocKey(const Schema& schema, DocKeyHash hash, |
219 | | std::vector<PrimitiveValue> hashed_components, |
220 | | std::vector<PrimitiveValue> range_components) |
221 | | : cotable_id_(schema.cotable_id()), |
222 | | pgtable_id_(schema.pgtable_id()), |
223 | | hash_present_(true), |
224 | | hash_(hash), |
225 | | hashed_group_(std::move(hashed_components)), |
226 | 1.59M | range_group_(std::move(range_components)) { |
227 | 1.59M | } |
228 | | |
229 | 9.91M | KeyBytes DocKey::Encode() const { |
230 | 9.91M | KeyBytes result; |
231 | 9.91M | AppendTo(&result); |
232 | 9.91M | return result; |
233 | 9.91M | } |
234 | | |
235 | | namespace { |
236 | | |
237 | | // Used as cache of allocated memory by EncodeAsRefCntPrefix. |
238 | | thread_local boost::optional<KeyBytes> thread_local_encode_buffer; |
239 | | |
240 | | } |
241 | | |
242 | 6.56M | RefCntPrefix DocKey::EncodeAsRefCntPrefix() const { |
243 | 6.56M | KeyBytes* encode_buffer = thread_local_encode_buffer.get_ptr(); |
244 | 6.56M | if (!encode_buffer) { |
245 | 33.0k | thread_local_encode_buffer.emplace(); |
246 | 33.0k | encode_buffer = thread_local_encode_buffer.get_ptr(); |
247 | 33.0k | } |
248 | 6.56M | encode_buffer->Clear(); |
249 | 6.56M | AppendTo(encode_buffer); |
250 | 6.56M | return RefCntPrefix(encode_buffer->AsSlice()); |
251 | 6.56M | } |
252 | | |
253 | 16.4M | void DocKey::AppendTo(KeyBytes* out) const { |
254 | 16.4M | auto encoder = DocKeyEncoder(out); |
255 | 16.4M | if (!cotable_id_.IsNil()) { |
256 | 1.57M | encoder.CotableId(cotable_id_).Hash(hash_present_, hash_, hashed_group_).Range(range_group_); |
257 | 14.8M | } else { |
258 | 14.8M | encoder.PgtableId(pgtable_id_).Hash(hash_present_, hash_, hashed_group_).Range(range_group_); |
259 | 14.8M | } |
260 | 16.4M | } |
261 | | |
262 | 254M | void DocKey::Clear() { |
263 | 254M | hash_present_ = false; |
264 | 254M | hash_ = 0xdead; |
265 | 254M | hashed_group_.clear(); |
266 | 254M | range_group_.clear(); |
267 | 254M | } |
268 | | |
269 | 0 | void DocKey::ClearRangeComponents() { |
270 | 0 | range_group_.clear(); |
271 | 0 | } |
272 | | |
273 | 0 | void DocKey::ResizeRangeComponents(int new_size) { |
274 | 0 | range_group_.resize(new_size); |
275 | 0 | } |
276 | | |
277 | | namespace { |
278 | | |
279 | | class DecodeDocKeyCallback { |
280 | | public: |
281 | 7.02M | explicit DecodeDocKeyCallback(boost::container::small_vector_base<Slice>* out) : out_(out) {} |
282 | | |
283 | 25.2M | boost::container::small_vector_base<Slice>* hashed_group() const { |
284 | 25.2M | return nullptr; |
285 | 25.2M | } |
286 | | |
287 | 27.1M | boost::container::small_vector_base<Slice>* range_group() const { |
288 | 27.1M | return out_; |
289 | 27.1M | } |
290 | | |
291 | 27.0M | void SetHash(...) const {} |
292 | | |
293 | 35.7k | void SetCoTableId(const Uuid cotable_id) const {} |
294 | | |
295 | 0 | void SetPgTableId(const PgTableOid pgtable_id) const {} |
296 | | |
297 | | private: |
298 | | boost::container::small_vector_base<Slice>* out_; |
299 | | }; |
300 | | |
301 | | class DummyCallback { |
302 | | public: |
303 | 79.7M | boost::container::small_vector_base<Slice>* hashed_group() const { |
304 | 79.7M | return nullptr; |
305 | 79.7M | } |
306 | | |
307 | 42.3M | boost::container::small_vector_base<Slice>* range_group() const { |
308 | 42.3M | return nullptr; |
309 | 42.3M | } |
310 | | |
311 | 106M | void SetHash(...) const {} |
312 | | |
313 | 18.8M | void SetCoTableId(const Uuid cotable_id) const {} |
314 | | |
315 | 649 | void SetPgTableId(const PgTableOid pgtable_id) const {} |
316 | | |
317 | 35.3M | PrimitiveValue* AddSubkey() const { |
318 | 35.3M | return nullptr; |
319 | 35.3M | } |
320 | | }; |
321 | | |
322 | | class EncodedSizesCallback { |
323 | | public: |
324 | 44.2M | explicit EncodedSizesCallback(DocKeyDecoder* decoder) : decoder_(decoder) {} |
325 | | |
326 | 28.8M | boost::container::small_vector_base<Slice>* hashed_group() const { |
327 | 28.8M | return nullptr; |
328 | 28.8M | } |
329 | | |
330 | 44.2M | boost::container::small_vector_base<Slice>* range_group() const { |
331 | 44.2M | range_group_start_ = decoder_->left_input().data(); |
332 | 44.2M | return nullptr; |
333 | 44.2M | } |
334 | | |
335 | 44.2M | void SetHash(...) const {} |
336 | | |
337 | 13.8M | void SetCoTableId(const Uuid cotable_id) const {} |
338 | | |
339 | 167 | void SetPgTableId(const PgTableOid pgtable_id) const {} |
340 | | |
341 | 0 | PrimitiveValue* AddSubkey() const { |
342 | 0 | return nullptr; |
343 | 0 | } |
344 | | |
345 | 44.2M | const uint8_t* range_group_start() { |
346 | 44.2M | return range_group_start_; |
347 | 44.2M | } |
348 | | |
349 | | private: |
350 | | DocKeyDecoder* decoder_; |
351 | | mutable const uint8_t* range_group_start_ = nullptr; |
352 | | }; |
353 | | |
354 | | } // namespace |
355 | | |
356 | | yb::Status DocKey::PartiallyDecode(Slice *slice, |
357 | 7.02M | boost::container::small_vector_base<Slice>* out) { |
358 | 7.02M | CHECK_NOTNULL(out); |
359 | 7.02M | DocKeyDecoder decoder(*slice); |
360 | 7.02M | RETURN_NOT_OK(DoDecode( |
361 | 7.02M | &decoder, DocKeyPart::kWholeDocKey, AllowSpecial::kFalse, DecodeDocKeyCallback(out))); |
362 | 7.02M | *slice = decoder.left_input(); |
363 | 7.02M | return Status::OK(); |
364 | 7.02M | } |
365 | | |
366 | 2.03M | Result<DocKeyHash> DocKey::DecodeHash(const Slice& slice) { |
367 | 2.03M | DocKeyDecoder decoder(slice); |
368 | 2.03M | RETURN_NOT_OK(decoder.DecodeCotableId()); |
369 | 2.03M | RETURN_NOT_OK(decoder.DecodePgtableId()); |
370 | 2.03M | uint16_t hash; |
371 | 2.03M | RETURN_NOT_OK(decoder.DecodeHashCode(&hash)); |
372 | 2.03M | return hash; |
373 | 2.03M | } |
374 | | |
375 | 106M | Result<size_t> DocKey::EncodedSize(Slice slice, DocKeyPart part, AllowSpecial allow_special) { |
376 | 106M | auto initial_begin = slice.cdata(); |
377 | 106M | DocKeyDecoder decoder(slice); |
378 | 106M | RETURN_NOT_OK(DoDecode(&decoder, part, allow_special, DummyCallback())); |
379 | 106M | return decoder.left_input().cdata() - initial_begin; |
380 | 106M | } |
381 | | |
382 | 0 | Result<std::pair<size_t, bool>> DocKey::EncodedSizeAndHashPresent(Slice slice, DocKeyPart part) { |
383 | 0 | class HashPresenceAwareDummyCallback : public DummyCallback { |
384 | 0 | public: |
385 | 0 | explicit HashPresenceAwareDummyCallback(bool* hash_present) : hash_present_(hash_present) {} |
386 | |
|
387 | 0 | void SetHash(const bool hash_present, const DocKeyHash hash = 0) const { |
388 | 0 | *hash_present_ = hash_present; |
389 | 0 | } |
390 | |
|
391 | 0 | private: |
392 | 0 | bool* hash_present_; |
393 | 0 | }; |
394 | |
|
395 | 0 | auto initial_begin = slice.cdata(); |
396 | 0 | DocKeyDecoder decoder(slice); |
397 | 0 | bool hash_present = false; |
398 | 0 | HashPresenceAwareDummyCallback callback(&hash_present); |
399 | 0 | RETURN_NOT_OK(DoDecode(&decoder, part, AllowSpecial::kFalse, callback)); |
400 | | // TODO: left_input() should be called remaining_input(). |
401 | 0 | return std::make_pair(decoder.left_input().cdata() - initial_begin, hash_present); |
402 | 0 | } |
403 | | |
404 | | Result<std::pair<size_t, size_t>> DocKey::EncodedHashPartAndDocKeySizes( |
405 | | Slice slice, |
406 | 44.2M | AllowSpecial allow_special) { |
407 | 44.2M | auto initial_begin = slice.data(); |
408 | 44.2M | DocKeyDecoder decoder(slice); |
409 | 44.2M | EncodedSizesCallback callback(&decoder); |
410 | 44.2M | RETURN_NOT_OK(DoDecode( |
411 | 44.2M | &decoder, DocKeyPart::kWholeDocKey, allow_special, callback)); |
412 | 44.2M | return std::make_pair(callback.range_group_start() - initial_begin, |
413 | 44.2M | decoder.left_input().data() - initial_begin); |
414 | 44.2M | } |
415 | | |
416 | | class DocKey::DecodeFromCallback { |
417 | | public: |
418 | 133M | explicit DecodeFromCallback(DocKey* key) : key_(key) { |
419 | 133M | } |
420 | | |
421 | 67.6M | std::vector<PrimitiveValue>* hashed_group() const { |
422 | 67.6M | return &key_->hashed_group_; |
423 | 67.6M | } |
424 | | |
425 | 126M | std::vector<PrimitiveValue>* range_group() const { |
426 | 126M | return &key_->range_group_; |
427 | 126M | } |
428 | | |
429 | 133M | void SetHash(bool present, DocKeyHash hash = 0) const { |
430 | 133M | key_->hash_present_ = present; |
431 | 133M | if (present) { |
432 | 72.2M | key_->hash_ = hash; |
433 | 72.2M | } |
434 | 133M | } |
435 | 54.3M | void SetCoTableId(const Uuid cotable_id) const { |
436 | 54.3M | key_->cotable_id_ = cotable_id; |
437 | 54.3M | } |
438 | | |
439 | 452 | void SetPgTableId(const PgTableOid pgtable_id) const { |
440 | 452 | key_->pgtable_id_ = pgtable_id; |
441 | 452 | } |
442 | | |
443 | | private: |
444 | | DocKey* key_; |
445 | | }; |
446 | | |
447 | 133M | Status DocKey::DecodeFrom(Slice *slice, DocKeyPart part_to_decode, AllowSpecial allow_special) { |
448 | 133M | Clear(); |
449 | 133M | DocKeyDecoder decoder(*slice); |
450 | 133M | RETURN_NOT_OK(DoDecode(&decoder, part_to_decode, allow_special, DecodeFromCallback(this))); |
451 | 133M | *slice = decoder.left_input(); |
452 | 133M | return Status::OK(); |
453 | 133M | } |
454 | | |
455 | | Result<size_t> DocKey::DecodeFrom( |
456 | 9.34M | const Slice& slice, DocKeyPart part_to_decode, AllowSpecial allow_special) { |
457 | 9.34M | Slice copy = slice; |
458 | 9.34M | RETURN_NOT_OK(DecodeFrom(©, part_to_decode, allow_special)); |
459 | 9.34M | return slice.size() - copy.size(); |
460 | 9.34M | } |
461 | | |
462 | | namespace { |
463 | | |
464 | | // Return limit on number of range components to decode based on part_to_decode and whether hash |
465 | | // component are present in key (hash_present). |
466 | 306M | int MaxRangeComponentsToDecode(const DocKeyPart part_to_decode, const bool hash_present) { |
467 | 306M | switch (part_to_decode) { |
468 | 0 | case DocKeyPart::kUpToId: |
469 | 0 | LOG(FATAL) << "Internal error: unexpected to have DocKeyPart::kUpToId here"; |
470 | 237M | case DocKeyPart::kWholeDocKey: |
471 | 237M | return kNumValuesNoLimit; |
472 | 2.15M | case DocKeyPart::kUpToHashCode: FALLTHROUGH_INTENDED; |
473 | 37.4M | case DocKeyPart::kUpToHash: |
474 | 37.4M | return 0; |
475 | 32.2M | case DocKeyPart::kUpToHashOrFirstRange: |
476 | 29.5M | return hash_present ? 0 : 1; |
477 | 0 | } |
478 | 0 | FATAL_INVALID_ENUM_VALUE(DocKeyPart, part_to_decode); |
479 | 0 | } |
480 | | |
481 | | } // namespace |
482 | | |
483 | | template<class Callback> |
484 | | yb::Status DocKey::DoDecode(DocKeyDecoder* decoder, |
485 | | DocKeyPart part_to_decode, |
486 | | AllowSpecial allow_special, |
487 | 311M | const Callback& callback) { |
488 | 311M | Uuid cotable_id; |
489 | 311M | PgTableOid pgtable_id; |
490 | 311M | if (VERIFY_RESULT(decoder->DecodeCotableId(&cotable_id))) { |
491 | 87.0M | callback.SetCoTableId(cotable_id); |
492 | 224M | } else if (VERIFY_RESULT(decoder->DecodePgtableId(&pgtable_id))) { |
493 | 1.26k | callback.SetPgTableId(pgtable_id); |
494 | 1.26k | } |
495 | | |
496 | 311M | switch (part_to_decode) { |
497 | 494 | case DocKeyPart::kUpToId: |
498 | 494 | return Status::OK(); |
499 | 6.76M | case DocKeyPart::kUpToHashCode: FALLTHROUGH_INTENDED; |
500 | 42.0M | case DocKeyPart::kUpToHash: FALLTHROUGH_INTENDED; |
501 | 74.2M | case DocKeyPart::kUpToHashOrFirstRange: FALLTHROUGH_INTENDED; |
502 | 311M | case DocKeyPart::kWholeDocKey: |
503 | 311M | uint16_t hash_code; |
504 | 311M | const auto hash_present = VERIFY_RESULT(decoder->DecodeHashCode(&hash_code, allow_special)); |
505 | 311M | if (hash_present) { |
506 | 206M | callback.SetHash(/* present */ true, hash_code); |
507 | 206M | if (part_to_decode == DocKeyPart::kUpToHashCode) { |
508 | 4.60M | return Status::OK(); |
509 | 4.60M | } |
510 | 201M | RETURN_NOT_OK_PREPEND( |
511 | 201M | ConsumePrimitiveValuesFromKey( |
512 | 201M | decoder->mutable_input(), allow_special, callback.hashed_group()), |
513 | 201M | "Error when decoding hashed components of a document key"); |
514 | 105M | } else { |
515 | 105M | callback.SetHash(/* present */ false); |
516 | 105M | } |
517 | 306M | if (decoder->left_input().empty()) { |
518 | 29 | return Status::OK(); |
519 | 29 | } |
520 | | // The rest are range components. |
521 | 306M | const auto max_components_to_decode = |
522 | 306M | MaxRangeComponentsToDecode(part_to_decode, hash_present); |
523 | 306M | if (max_components_to_decode > 0) { |
524 | 240M | RETURN_NOT_OK_PREPEND( |
525 | 240M | ConsumePrimitiveValuesFromKey( |
526 | 240M | decoder->mutable_input(), allow_special, callback.range_group(), |
527 | 240M | max_components_to_decode), |
528 | 239M | "Error when decoding range components of a document key"); |
529 | 239M | } |
530 | 306M | return Status::OK(); |
531 | 0 | } |
532 | 0 | FATAL_INVALID_ENUM_VALUE(DocKeyPart, part_to_decode); |
533 | 0 | } doc_key.cc:_ZN2yb5docdb6DocKey8DoDecodeINS0_12_GLOBAL__N_120DecodeDocKeyCallbackEEENS_6StatusEPNS0_13DocKeyDecoderENS0_10DocKeyPartENS_17StronglyTypedBoolINS0_16AllowSpecial_TagEEERKT_ Line | Count | Source | 487 | 27.1M | const Callback& callback) { | 488 | 27.1M | Uuid cotable_id; | 489 | 27.1M | PgTableOid pgtable_id; | 490 | 27.1M | if (VERIFY_RESULT(decoder->DecodeCotableId(&cotable_id))) { | 491 | 35.7k | callback.SetCoTableId(cotable_id); | 492 | 27.1M | } else if (VERIFY_RESULT(decoder->DecodePgtableId(&pgtable_id))) { | 493 | 0 | callback.SetPgTableId(pgtable_id); | 494 | 0 | } | 495 | | | 496 | 27.1M | switch (part_to_decode) { | 497 | 0 | case DocKeyPart::kUpToId: | 498 | 0 | return Status::OK(); | 499 | 0 | case DocKeyPart::kUpToHashCode: FALLTHROUGH_INTENDED; | 500 | 0 | case DocKeyPart::kUpToHash: FALLTHROUGH_INTENDED; | 501 | 0 | case DocKeyPart::kUpToHashOrFirstRange: FALLTHROUGH_INTENDED; | 502 | 27.0M | case DocKeyPart::kWholeDocKey: | 503 | 27.0M | uint16_t hash_code; | 504 | 27.0M | const auto hash_present = VERIFY_RESULT(decoder->DecodeHashCode(&hash_code, allow_special)); | 505 | 27.0M | if (hash_present) { | 506 | 25.2M | callback.SetHash(/* present */ true, hash_code); | 507 | 25.2M | if (part_to_decode == DocKeyPart::kUpToHashCode) { | 508 | 0 | return Status::OK(); | 509 | 0 | } | 510 | 25.2M | RETURN_NOT_OK_PREPEND( | 511 | 25.2M | ConsumePrimitiveValuesFromKey( | 512 | 25.2M | decoder->mutable_input(), allow_special, callback.hashed_group()), | 513 | 25.2M | "Error when decoding hashed components of a document key"); | 514 | 1.82M | } else { | 515 | 1.82M | callback.SetHash(/* present */ false); | 516 | 1.82M | } | 517 | 27.0M | if (decoder->left_input().empty()) { | 518 | 0 | return Status::OK(); | 519 | 0 | } | 520 | | // The rest are range components. | 521 | 27.0M | const auto max_components_to_decode = | 522 | 27.0M | MaxRangeComponentsToDecode(part_to_decode, hash_present); | 523 | 27.1M | if (max_components_to_decode > 0) { | 524 | 27.1M | RETURN_NOT_OK_PREPEND( | 525 | 27.1M | ConsumePrimitiveValuesFromKey( | 526 | 27.1M | decoder->mutable_input(), allow_special, callback.range_group(), | 527 | 27.1M | max_components_to_decode), | 528 | 27.1M | "Error when decoding range components of a document key"); | 529 | 27.1M | } | 530 | 27.0M | return Status::OK(); | 531 | 0 | } | 532 | 0 | FATAL_INVALID_ENUM_VALUE(DocKeyPart, part_to_decode); | 533 | 0 | } |
doc_key.cc:_ZN2yb5docdb6DocKey8DoDecodeINS0_12_GLOBAL__N_113DummyCallbackEEENS_6StatusEPNS0_13DocKeyDecoderENS0_10DocKeyPartENS_17StronglyTypedBoolINS0_16AllowSpecial_TagEEERKT_ Line | Count | Source | 487 | 106M | const Callback& callback) { | 488 | 106M | Uuid cotable_id; | 489 | 106M | PgTableOid pgtable_id; | 490 | 106M | if (VERIFY_RESULT(decoder->DecodeCotableId(&cotable_id))) { | 491 | 18.8M | callback.SetCoTableId(cotable_id); | 492 | 88.1M | } else if (VERIFY_RESULT(decoder->DecodePgtableId(&pgtable_id))) { | 493 | 649 | callback.SetPgTableId(pgtable_id); | 494 | 649 | } | 495 | | | 496 | 106M | switch (part_to_decode) { | 497 | 436 | case DocKeyPart::kUpToId: | 498 | 436 | return Status::OK(); | 499 | 46 | case DocKeyPart::kUpToHashCode: FALLTHROUGH_INTENDED; | 500 | 35.2M | case DocKeyPart::kUpToHash: FALLTHROUGH_INTENDED; | 501 | 67.4M | case DocKeyPart::kUpToHashOrFirstRange: FALLTHROUGH_INTENDED; | 502 | 107M | case DocKeyPart::kWholeDocKey: | 503 | 107M | uint16_t hash_code; | 504 | 107M | const auto hash_present = VERIFY_RESULT(decoder->DecodeHashCode(&hash_code, allow_special)); | 505 | 107M | if (hash_present) { | 506 | 79.7M | callback.SetHash(/* present */ true, hash_code); | 507 | 79.7M | if (part_to_decode == DocKeyPart::kUpToHashCode) { | 508 | 46 | return Status::OK(); | 509 | 46 | } | 510 | 79.7M | RETURN_NOT_OK_PREPEND( | 511 | 79.7M | ConsumePrimitiveValuesFromKey( | 512 | 79.7M | decoder->mutable_input(), allow_special, callback.hashed_group()), | 513 | 79.7M | "Error when decoding hashed components of a document key"); | 514 | 27.3M | } else { | 515 | 27.3M | callback.SetHash(/* present */ false); | 516 | 27.3M | } | 517 | 107M | if (decoder->left_input().empty()) { | 518 | 29 | return Status::OK(); | 519 | 29 | } | 520 | | // The rest are range components. | 521 | 107M | const auto max_components_to_decode = | 522 | 107M | MaxRangeComponentsToDecode(part_to_decode, hash_present); | 523 | 107M | if (max_components_to_decode > 0) { | 524 | 42.3M | RETURN_NOT_OK_PREPEND( | 525 | 42.3M | ConsumePrimitiveValuesFromKey( | 526 | 42.3M | decoder->mutable_input(), allow_special, callback.range_group(), | 527 | 42.3M | max_components_to_decode), | 528 | 42.3M | "Error when decoding range components of a document key"); | 529 | 42.3M | } | 530 | 107M | return Status::OK(); | 531 | 0 | } | 532 | 0 | FATAL_INVALID_ENUM_VALUE(DocKeyPart, part_to_decode); | 533 | 0 | } |
Unexecuted instantiation: doc_key.cc:_ZN2yb5docdb6DocKey8DoDecodeIZNS1_25EncodedSizeAndHashPresentENS_5SliceENS0_10DocKeyPartEE30HashPresenceAwareDummyCallbackEENS_6StatusEPNS0_13DocKeyDecoderES4_NS_17StronglyTypedBoolINS0_16AllowSpecial_TagEEERKT_ doc_key.cc:_ZN2yb5docdb6DocKey8DoDecodeINS0_12_GLOBAL__N_120EncodedSizesCallbackEEENS_6StatusEPNS0_13DocKeyDecoderENS0_10DocKeyPartENS_17StronglyTypedBoolINS0_16AllowSpecial_TagEEERKT_ Line | Count | Source | 487 | 44.1M | const Callback& callback) { | 488 | 44.1M | Uuid cotable_id; | 489 | 44.1M | PgTableOid pgtable_id; | 490 | 44.1M | if (VERIFY_RESULT(decoder->DecodeCotableId(&cotable_id))) { | 491 | 13.8M | callback.SetCoTableId(cotable_id); | 492 | 30.3M | } else if (VERIFY_RESULT(decoder->DecodePgtableId(&pgtable_id))) { | 493 | 167 | callback.SetPgTableId(pgtable_id); | 494 | 167 | } | 495 | | | 496 | 44.1M | switch (part_to_decode) { | 497 | 0 | case DocKeyPart::kUpToId: | 498 | 0 | return Status::OK(); | 499 | 0 | case DocKeyPart::kUpToHashCode: FALLTHROUGH_INTENDED; | 500 | 0 | case DocKeyPart::kUpToHash: FALLTHROUGH_INTENDED; | 501 | 0 | case DocKeyPart::kUpToHashOrFirstRange: FALLTHROUGH_INTENDED; | 502 | 44.2M | case DocKeyPart::kWholeDocKey: | 503 | 44.2M | uint16_t hash_code; | 504 | 44.2M | const auto hash_present = VERIFY_RESULT(decoder->DecodeHashCode(&hash_code, allow_special)); | 505 | 44.2M | if (hash_present) { | 506 | 28.8M | callback.SetHash(/* present */ true, hash_code); | 507 | 28.8M | if (part_to_decode == DocKeyPart::kUpToHashCode) { | 508 | 0 | return Status::OK(); | 509 | 0 | } | 510 | 28.8M | RETURN_NOT_OK_PREPEND( | 511 | 28.8M | ConsumePrimitiveValuesFromKey( | 512 | 28.8M | decoder->mutable_input(), allow_special, callback.hashed_group()), | 513 | 28.8M | "Error when decoding hashed components of a document key"); | 514 | 15.3M | } else { | 515 | 15.3M | callback.SetHash(/* present */ false); | 516 | 15.3M | } | 517 | 44.2M | if (decoder->left_input().empty()) { | 518 | 0 | return Status::OK(); | 519 | 0 | } | 520 | | // The rest are range components. | 521 | 44.2M | const auto max_components_to_decode = | 522 | 44.2M | MaxRangeComponentsToDecode(part_to_decode, hash_present); | 523 | 44.2M | if (max_components_to_decode > 0) { | 524 | 44.2M | RETURN_NOT_OK_PREPEND( | 525 | 44.2M | ConsumePrimitiveValuesFromKey( | 526 | 44.2M | decoder->mutable_input(), allow_special, callback.range_group(), | 527 | 44.2M | max_components_to_decode), | 528 | 44.2M | "Error when decoding range components of a document key"); | 529 | 44.2M | } | 530 | 44.2M | return Status::OK(); | 531 | 0 | } | 532 | 0 | FATAL_INVALID_ENUM_VALUE(DocKeyPart, part_to_decode); | 533 | 0 | } |
_ZN2yb5docdb6DocKey8DoDecodeINS1_18DecodeFromCallbackEEENS_6StatusEPNS0_13DocKeyDecoderENS0_10DocKeyPartENS_17StronglyTypedBoolINS0_16AllowSpecial_TagEEERKT_ Line | Count | Source | 487 | 133M | const Callback& callback) { | 488 | 133M | Uuid cotable_id; | 489 | 133M | PgTableOid pgtable_id; | 490 | 133M | if (VERIFY_RESULT(decoder->DecodeCotableId(&cotable_id))) { | 491 | 54.3M | callback.SetCoTableId(cotable_id); | 492 | 78.8M | } else if (VERIFY_RESULT(decoder->DecodePgtableId(&pgtable_id))) { | 493 | 452 | callback.SetPgTableId(pgtable_id); | 494 | 452 | } | 495 | | | 496 | 133M | switch (part_to_decode) { | 497 | 58 | case DocKeyPart::kUpToId: | 498 | 58 | return Status::OK(); | 499 | 6.76M | case DocKeyPart::kUpToHashCode: FALLTHROUGH_INTENDED; | 500 | 6.76M | case DocKeyPart::kUpToHash: FALLTHROUGH_INTENDED; | 501 | 6.76M | case DocKeyPart::kUpToHashOrFirstRange: FALLTHROUGH_INTENDED; | 502 | 133M | case DocKeyPart::kWholeDocKey: | 503 | 133M | uint16_t hash_code; | 504 | 133M | const auto hash_present = VERIFY_RESULT(decoder->DecodeHashCode(&hash_code, allow_special)); | 505 | 133M | if (hash_present) { | 506 | 72.2M | callback.SetHash(/* present */ true, hash_code); | 507 | 72.2M | if (part_to_decode == DocKeyPart::kUpToHashCode) { | 508 | 4.60M | return Status::OK(); | 509 | 4.60M | } | 510 | 67.6M | RETURN_NOT_OK_PREPEND( | 511 | 67.6M | ConsumePrimitiveValuesFromKey( | 512 | 67.6M | decoder->mutable_input(), allow_special, callback.hashed_group()), | 513 | 67.6M | "Error when decoding hashed components of a document key"); | 514 | 60.8M | } else { | 515 | 60.8M | callback.SetHash(/* present */ false); | 516 | 60.8M | } | 517 | 128M | if (decoder->left_input().empty()) { | 518 | 0 | return Status::OK(); | 519 | 0 | } | 520 | | // The rest are range components. | 521 | 128M | const auto max_components_to_decode = | 522 | 128M | MaxRangeComponentsToDecode(part_to_decode, hash_present); | 523 | 128M | if (max_components_to_decode > 0) { | 524 | 126M | RETURN_NOT_OK_PREPEND( | 525 | 126M | ConsumePrimitiveValuesFromKey( | 526 | 126M | decoder->mutable_input(), allow_special, callback.range_group(), | 527 | 126M | max_components_to_decode), | 528 | 126M | "Error when decoding range components of a document key"); | 529 | 126M | } | 530 | 128M | return Status::OK(); | 531 | 0 | } | 532 | 0 | FATAL_INVALID_ENUM_VALUE(DocKeyPart, part_to_decode); | 533 | 0 | } |
|
534 | | |
535 | 2.12M | yb::Status DocKey::FullyDecodeFrom(const rocksdb::Slice& slice) { |
536 | 2.12M | rocksdb::Slice mutable_slice = slice; |
537 | 2.12M | Status status = DecodeFrom(&mutable_slice); |
538 | 2.12M | if (!mutable_slice.empty()) { |
539 | 0 | return STATUS_SUBSTITUTE(InvalidArgument, |
540 | 0 | "Expected all bytes of the slice to be decoded into DocKey, found $0 extra bytes", |
541 | 0 | mutable_slice.size()); |
542 | 0 | } |
543 | 2.12M | return status; |
544 | 2.12M | } |
545 | | |
546 | | namespace { |
547 | | |
548 | | // We need a special implementation of converting a vector to string because we need to pass the |
549 | | // auto_decode_keys flag to PrimitiveValue::ToString. |
550 | | void AppendVectorToString( |
551 | | std::string* dest, |
552 | | const std::vector<PrimitiveValue>& vec, |
553 | 540k | AutoDecodeKeys auto_decode_keys) { |
554 | 540k | bool need_comma = false; |
555 | 360k | for (const auto& pv : vec) { |
556 | 360k | if (need_comma) { |
557 | 72 | dest->append(", "); |
558 | 72 | } |
559 | 360k | need_comma = true; |
560 | 360k | dest->append(pv.ToString(auto_decode_keys)); |
561 | 360k | } |
562 | 540k | } |
563 | | |
564 | | void AppendVectorToStringWithBrackets( |
565 | | std::string* dest, |
566 | | const std::vector<PrimitiveValue>& vec, |
567 | 360k | AutoDecodeKeys auto_decode_keys) { |
568 | 360k | dest->push_back('['); |
569 | 360k | AppendVectorToString(dest, vec, auto_decode_keys); |
570 | 360k | dest->push_back(']'); |
571 | 360k | } |
572 | | |
573 | | } // namespace |
574 | | |
575 | 180k | string DocKey::ToString(AutoDecodeKeys auto_decode_keys) const { |
576 | 180k | string result = "DocKey("; |
577 | 180k | if (!cotable_id_.IsNil()) { |
578 | 0 | result += "CoTableId="; |
579 | 0 | result += cotable_id_.ToString(); |
580 | 0 | result += ", "; |
581 | 180k | } else if (pgtable_id_ > 0) { |
582 | 0 | result += "PgTableId="; |
583 | 0 | result += std::to_string(pgtable_id_); |
584 | 0 | result += ", "; |
585 | 0 | } |
586 | | |
587 | 180k | if (hash_present_) { |
588 | 180k | result += StringPrintf("0x%04x", hash_); |
589 | 180k | result += ", "; |
590 | 180k | } |
591 | | |
592 | 180k | AppendVectorToStringWithBrackets(&result, hashed_group_, auto_decode_keys); |
593 | 180k | result += ", "; |
594 | 180k | AppendVectorToStringWithBrackets(&result, range_group_, auto_decode_keys); |
595 | 180k | result.push_back(')'); |
596 | 180k | return result; |
597 | 180k | } |
598 | | |
599 | 0 | bool DocKey::operator ==(const DocKey& other) const { |
600 | 0 | return cotable_id_ == other.cotable_id_ && |
601 | 0 | pgtable_id_ == other.pgtable_id_ && |
602 | 0 | HashedComponentsEqual(other) && |
603 | 0 | range_group_ == other.range_group_; |
604 | 0 | } |
605 | | |
606 | 0 | bool DocKey::HashedComponentsEqual(const DocKey& other) const { |
607 | 0 | return hash_present_ == other.hash_present_ && |
608 | | // Only compare hashes and hashed groups if the hash presence flag is set. |
609 | 0 | (!hash_present_ || (hash_ == other.hash_ && hashed_group_ == other.hashed_group_)); |
610 | 0 | } |
611 | | |
612 | 0 | void DocKey::AddRangeComponent(const PrimitiveValue& val) { |
613 | 0 | range_group_.push_back(val); |
614 | 0 | } |
615 | | |
616 | 0 | void DocKey::SetRangeComponent(const PrimitiveValue& val, int idx) { |
617 | 0 | DCHECK_LT(idx, range_group_.size()); |
618 | 0 | range_group_[idx] = val; |
619 | 0 | } |
620 | | |
621 | 30 | int DocKey::CompareTo(const DocKey& other) const { |
622 | 30 | int result = CompareUsingLessThan(cotable_id_, other.cotable_id_); |
623 | 30 | if (result != 0) return result; |
624 | | |
625 | 30 | result = CompareUsingLessThan(pgtable_id_, other.pgtable_id_); |
626 | 30 | if (result != 0) return result; |
627 | | |
628 | 30 | result = CompareUsingLessThan(hash_present_, other.hash_present_); |
629 | 30 | if (result != 0) return result; |
630 | | |
631 | 30 | if (hash_present_) { |
632 | 0 | result = CompareUsingLessThan(hash_, other.hash_); |
633 | 0 | if (result != 0) return result; |
634 | 30 | } |
635 | | |
636 | 30 | result = CompareVectors(hashed_group_, other.hashed_group_); |
637 | 30 | if (result != 0) return result; |
638 | | |
639 | 30 | return CompareVectors(range_group_, other.range_group_); |
640 | 30 | } |
641 | | |
642 | 307k | DocKey DocKey::FromRedisKey(uint16_t hash, const string &key) { |
643 | 307k | DocKey new_doc_key; |
644 | 307k | new_doc_key.hash_present_ = true; |
645 | 307k | new_doc_key.hash_ = hash; |
646 | 307k | new_doc_key.hashed_group_.emplace_back(key); |
647 | 307k | return new_doc_key; |
648 | 307k | } |
649 | | |
650 | 110k | KeyBytes DocKey::EncodedFromRedisKey(uint16_t hash, const std::string &key) { |
651 | 110k | KeyBytes result; |
652 | 110k | result.AppendValueType(ValueType::kUInt16Hash); |
653 | 110k | result.AppendUInt16(hash); |
654 | 110k | result.AppendValueType(ValueType::kString); |
655 | 110k | result.AppendString(key); |
656 | 110k | result.AppendValueType(ValueType::kGroupEnd); |
657 | 110k | result.AppendValueType(ValueType::kGroupEnd); |
658 | 110k | DCHECK_EQ(result, FromRedisKey(hash, key).Encode()); |
659 | 110k | return result; |
660 | 110k | } |
661 | | |
662 | 103 | std::string DocKey::DebugSliceToString(Slice slice) { |
663 | 103 | DocKey key; |
664 | 103 | auto decoded_size = key.DecodeFrom(slice, DocKeyPart::kWholeDocKey, AllowSpecial::kTrue); |
665 | 103 | if (!decoded_size.ok()) { |
666 | 16 | return decoded_size.status().ToString() + ": " + slice.ToDebugHexString(); |
667 | 16 | } |
668 | 87 | slice.remove_prefix(*decoded_size); |
669 | 87 | auto result = key.ToString(); |
670 | 87 | if (!slice.empty()) { |
671 | 0 | result += " + "; |
672 | 0 | result += slice.ToDebugHexString(); |
673 | 0 | } |
674 | 87 | return result; |
675 | 87 | } |
676 | | |
677 | 0 | bool DocKey::BelongsTo(const Schema& schema) const { |
678 | 0 | if (!cotable_id_.IsNil()) { |
679 | 0 | return cotable_id_ == schema.cotable_id(); |
680 | 0 | } else if (pgtable_id_ > 0) { |
681 | 0 | return pgtable_id_ == schema.pgtable_id(); |
682 | 0 | } |
683 | 0 | return schema.cotable_id().IsNil() && schema.pgtable_id() == 0; |
684 | 0 | } |
685 | | |
686 | | // ------------------------------------------------------------------------------------------------ |
687 | | // SubDocKey |
688 | | // ------------------------------------------------------------------------------------------------ |
689 | | |
690 | 2.18M | KeyBytes SubDocKey::DoEncode(bool include_hybrid_time) const { |
691 | 2.18M | KeyBytes key_bytes = doc_key_.Encode(); |
692 | 2.10M | for (const auto& subkey : subkeys_) { |
693 | 2.10M | subkey.AppendToKey(&key_bytes); |
694 | 2.10M | } |
695 | 2.18M | if (has_hybrid_time() && include_hybrid_time) { |
696 | 24.9k | AppendDocHybridTime(doc_ht_, &key_bytes); |
697 | 24.9k | } |
698 | 2.18M | return key_bytes; |
699 | 2.18M | } |
700 | | |
701 | | namespace { |
702 | | |
703 | | class DecodeSubDocKeyCallback { |
704 | | public: |
705 | 7.02M | explicit DecodeSubDocKeyCallback(boost::container::small_vector_base<Slice>* out) : out_(out) {} |
706 | | |
707 | 27.0M | CHECKED_STATUS DecodeDocKey(Slice* slice) const { |
708 | 27.0M | return DocKey::PartiallyDecode(slice, out_); |
709 | 27.0M | } |
710 | | |
711 | | // We don't need subkeys in partial decoding. |
712 | 27.5M | PrimitiveValue* AddSubkey() const { |
713 | 27.5M | return nullptr; |
714 | 27.5M | } |
715 | | |
716 | 27.0M | DocHybridTime& doc_hybrid_time() const { |
717 | 27.0M | return doc_hybrid_time_; |
718 | 27.0M | } |
719 | | |
720 | 27.1M | void DocHybridTimeSlice(Slice slice) const { |
721 | 27.1M | out_->push_back(slice); |
722 | 27.1M | } |
723 | | private: |
724 | | boost::container::small_vector_base<Slice>* out_; |
725 | | mutable DocHybridTime doc_hybrid_time_; |
726 | | }; |
727 | | |
728 | | } // namespace |
729 | | |
730 | 7.02M | Status SubDocKey::PartiallyDecode(Slice* slice, boost::container::small_vector_base<Slice>* out) { |
731 | 7.02M | CHECK_NOTNULL(out); |
732 | 7.02M | return DoDecode(slice, HybridTimeRequired::kTrue, AllowSpecial::kFalse, |
733 | 7.02M | DecodeSubDocKeyCallback(out)); |
734 | 7.02M | } |
735 | | |
736 | | class SubDocKey::DecodeCallback { |
737 | | public: |
738 | 121M | explicit DecodeCallback(SubDocKey* key) : key_(key) {} |
739 | | |
740 | 121M | CHECKED_STATUS DecodeDocKey(Slice* slice) const { |
741 | 121M | return key_->doc_key_.DecodeFrom(slice); |
742 | 121M | } |
743 | | |
744 | 87.5M | PrimitiveValue* AddSubkey() const { |
745 | 87.5M | key_->subkeys_.emplace_back(); |
746 | 87.5M | return &key_->subkeys_.back(); |
747 | 87.5M | } |
748 | | |
749 | 121M | DocHybridTime& doc_hybrid_time() const { |
750 | 121M | return key_->doc_ht_; |
751 | 121M | } |
752 | | |
753 | 27.3M | void DocHybridTimeSlice(Slice slice) const { |
754 | 27.3M | } |
755 | | private: |
756 | | SubDocKey* key_; |
757 | | }; |
758 | | |
759 | | Status SubDocKey::DecodeFrom( |
760 | 121M | Slice* slice, HybridTimeRequired require_hybrid_time, AllowSpecial allow_special) { |
761 | 121M | Clear(); |
762 | 121M | return DoDecode(slice, require_hybrid_time, allow_special, DecodeCallback(this)); |
763 | 121M | } |
764 | | |
765 | 73.9M | Status SubDocKey::FullyDecodeFromKeyWithOptionalHybridTime(const rocksdb::Slice& slice) { |
766 | 73.9M | return FullyDecodeFrom(slice, HybridTimeRequired::kFalse); |
767 | 73.9M | } |
768 | | |
769 | 86.5M | Result<bool> SubDocKey::DecodeSubkey(Slice* slice) { |
770 | 86.5M | return DecodeSubkey(slice, DummyCallback()); |
771 | 86.5M | } |
772 | | |
773 | | template<class Callback> |
774 | 350M | Result<bool> SubDocKey::DecodeSubkey(Slice* slice, const Callback& callback) { |
775 | 350M | if (!slice->empty() && *slice->data() != ValueTypeAsChar::kHybridTime) { |
776 | 150M | RETURN_NOT_OK(PrimitiveValue::DecodeKey(slice, callback.AddSubkey())); |
777 | 150M | return true; |
778 | 199M | } |
779 | 199M | return false; |
780 | 199M | } doc_key.cc:_ZN2yb5docdb9SubDocKey12DecodeSubkeyINS0_12_GLOBAL__N_123DecodeSubDocKeyCallbackEEENS_6ResultIbEEPNS_5SliceERKT_ Line | Count | Source | 774 | 54.5M | Result<bool> SubDocKey::DecodeSubkey(Slice* slice, const Callback& callback) { | 775 | 54.5M | if (!slice->empty() && *slice->data() != ValueTypeAsChar::kHybridTime) { | 776 | 27.5M | RETURN_NOT_OK(PrimitiveValue::DecodeKey(slice, callback.AddSubkey())); | 777 | 27.5M | return true; | 778 | 27.0M | } | 779 | 27.0M | return false; | 780 | 27.0M | } |
_ZN2yb5docdb9SubDocKey12DecodeSubkeyINS1_14DecodeCallbackEEENS_6ResultIbEEPNS_5SliceERKT_ Line | Count | Source | 774 | 209M | Result<bool> SubDocKey::DecodeSubkey(Slice* slice, const Callback& callback) { | 775 | 209M | if (!slice->empty() && *slice->data() != ValueTypeAsChar::kHybridTime) { | 776 | 87.5M | RETURN_NOT_OK(PrimitiveValue::DecodeKey(slice, callback.AddSubkey())); | 777 | 87.5M | return true; | 778 | 121M | } | 779 | 121M | return false; | 780 | 121M | } |
doc_key.cc:_ZN2yb5docdb9SubDocKey12DecodeSubkeyINS0_12_GLOBAL__N_113DummyCallbackEEENS_6ResultIbEEPNS_5SliceERKT_ Line | Count | Source | 774 | 86.5M | Result<bool> SubDocKey::DecodeSubkey(Slice* slice, const Callback& callback) { | 775 | 86.5M | if (!slice->empty() && *slice->data() != ValueTypeAsChar::kHybridTime) { | 776 | 35.4M | RETURN_NOT_OK(PrimitiveValue::DecodeKey(slice, callback.AddSubkey())); | 777 | 35.4M | return true; | 778 | 51.1M | } | 779 | 51.1M | return false; | 780 | 51.1M | } |
|
781 | | |
782 | | template<class Callback> |
783 | | Status SubDocKey::DoDecode(rocksdb::Slice* slice, |
784 | | const HybridTimeRequired require_hybrid_time, |
785 | | AllowSpecial allow_special, |
786 | 148M | const Callback& callback) { |
787 | 148M | if (allow_special && require_hybrid_time) { |
788 | 0 | return STATUS(NotSupported, |
789 | 0 | "Not supported to have both require_hybrid_time and allow_special"); |
790 | 0 | } |
791 | 148M | const rocksdb::Slice original_bytes(*slice); |
792 | | |
793 | 148M | RETURN_NOT_OK(callback.DecodeDocKey(slice)); |
794 | 263M | for (;;) { |
795 | 263M | if (allow_special && !slice->empty() && |
796 | 5 | IsSpecialValueType(static_cast<ValueType>(slice->cdata()[0]))) { |
797 | 0 | callback.doc_hybrid_time() = DocHybridTime::kInvalid; |
798 | 0 | return Status::OK(); |
799 | 0 | } |
800 | 263M | auto decode_result = DecodeSubkey(slice, callback); |
801 | 263M | RETURN_NOT_OK_PREPEND( |
802 | 263M | decode_result, |
803 | 263M | Substitute("While decoding SubDocKey $0", ToShortDebugStr(original_bytes))); |
804 | 263M | if (!decode_result.get()) { |
805 | 148M | break; |
806 | 148M | } |
807 | 263M | } |
808 | 148M | if (slice->empty()) { |
809 | 94.3M | if (!require_hybrid_time) { |
810 | 94.3M | callback.doc_hybrid_time() = DocHybridTime::kInvalid; |
811 | 94.3M | return Status::OK(); |
812 | 94.3M | } |
813 | 18.4E | return STATUS_SUBSTITUTE( |
814 | 18.4E | Corruption, |
815 | 18.4E | "Found too few bytes in the end of a SubDocKey for a type-prefixed hybrid_time: $0", |
816 | 18.4E | ToShortDebugStr(*slice)); |
817 | 18.4E | } |
818 | | |
819 | | // The reason the following is not handled as a Status is that the logic above (loop + emptiness |
820 | | // check) should guarantee this is the only possible case left. |
821 | 54.5M | DCHECK_EQ(ValueType::kHybridTime, DecodeValueType(*slice)); |
822 | 54.5M | slice->consume_byte(); |
823 | | |
824 | 54.5M | auto begin = slice->data(); |
825 | 54.5M | RETURN_NOT_OK(ConsumeHybridTimeFromKey(slice, &callback.doc_hybrid_time())); |
826 | 54.5M | callback.DocHybridTimeSlice(Slice(begin, slice->data())); |
827 | | |
828 | 54.5M | return Status::OK(); |
829 | 54.5M | } doc_key.cc:_ZN2yb5docdb9SubDocKey8DoDecodeINS0_12_GLOBAL__N_123DecodeSubDocKeyCallbackEEENS_6StatusEPNS_5SliceENS_17StronglyTypedBoolINS0_22HybridTimeRequired_TagEEENS8_INS0_16AllowSpecial_TagEEERKT_ Line | Count | Source | 786 | 27.1M | const Callback& callback) { | 787 | 27.1M | if (allow_special && require_hybrid_time) { | 788 | 0 | return STATUS(NotSupported, | 789 | 0 | "Not supported to have both require_hybrid_time and allow_special"); | 790 | 0 | } | 791 | 27.1M | const rocksdb::Slice original_bytes(*slice); | 792 | | | 793 | 27.1M | RETURN_NOT_OK(callback.DecodeDocKey(slice)); | 794 | 54.6M | for (;;) { | 795 | 54.6M | if (allow_special && !slice->empty() && | 796 | 0 | IsSpecialValueType(static_cast<ValueType>(slice->cdata()[0]))) { | 797 | 0 | callback.doc_hybrid_time() = DocHybridTime::kInvalid; | 798 | 0 | return Status::OK(); | 799 | 0 | } | 800 | 54.6M | auto decode_result = DecodeSubkey(slice, callback); | 801 | 54.6M | RETURN_NOT_OK_PREPEND( | 802 | 54.6M | decode_result, | 803 | 54.6M | Substitute("While decoding SubDocKey $0", ToShortDebugStr(original_bytes))); | 804 | 54.6M | if (!decode_result.get()) { | 805 | 27.0M | break; | 806 | 27.0M | } | 807 | 54.6M | } | 808 | 27.1M | if (slice->empty()) { | 809 | 0 | if (!require_hybrid_time) { | 810 | 0 | callback.doc_hybrid_time() = DocHybridTime::kInvalid; | 811 | 0 | return Status::OK(); | 812 | 0 | } | 813 | 0 | return STATUS_SUBSTITUTE( | 814 | 0 | Corruption, | 815 | 0 | "Found too few bytes in the end of a SubDocKey for a type-prefixed hybrid_time: $0", | 816 | 0 | ToShortDebugStr(*slice)); | 817 | 0 | } | 818 | | | 819 | | // The reason the following is not handled as a Status is that the logic above (loop + emptiness | 820 | | // check) should guarantee this is the only possible case left. | 821 | 27.1M | DCHECK_EQ(ValueType::kHybridTime, DecodeValueType(*slice)); | 822 | 27.1M | slice->consume_byte(); | 823 | | | 824 | 27.1M | auto begin = slice->data(); | 825 | 27.1M | RETURN_NOT_OK(ConsumeHybridTimeFromKey(slice, &callback.doc_hybrid_time())); | 826 | 27.1M | callback.DocHybridTimeSlice(Slice(begin, slice->data())); | 827 | | | 828 | 27.1M | return Status::OK(); | 829 | 27.1M | } |
_ZN2yb5docdb9SubDocKey8DoDecodeINS1_14DecodeCallbackEEENS_6StatusEPNS_5SliceENS_17StronglyTypedBoolINS0_22HybridTimeRequired_TagEEENS7_INS0_16AllowSpecial_TagEEERKT_ Line | Count | Source | 786 | 121M | const Callback& callback) { | 787 | 121M | if (allow_special && require_hybrid_time) { | 788 | 0 | return STATUS(NotSupported, | 789 | 0 | "Not supported to have both require_hybrid_time and allow_special"); | 790 | 0 | } | 791 | 121M | const rocksdb::Slice original_bytes(*slice); | 792 | | | 793 | 121M | RETURN_NOT_OK(callback.DecodeDocKey(slice)); | 794 | 209M | for (;;) { | 795 | 209M | if (allow_special && !slice->empty() && | 796 | 5 | IsSpecialValueType(static_cast<ValueType>(slice->cdata()[0]))) { | 797 | 0 | callback.doc_hybrid_time() = DocHybridTime::kInvalid; | 798 | 0 | return Status::OK(); | 799 | 0 | } | 800 | 209M | auto decode_result = DecodeSubkey(slice, callback); | 801 | 209M | RETURN_NOT_OK_PREPEND( | 802 | 209M | decode_result, | 803 | 209M | Substitute("While decoding SubDocKey $0", ToShortDebugStr(original_bytes))); | 804 | 209M | if (!decode_result.get()) { | 805 | 121M | break; | 806 | 121M | } | 807 | 209M | } | 808 | 121M | if (slice->empty()) { | 809 | 94.3M | if (!require_hybrid_time) { | 810 | 94.3M | callback.doc_hybrid_time() = DocHybridTime::kInvalid; | 811 | 94.3M | return Status::OK(); | 812 | 94.3M | } | 813 | 18.4E | return STATUS_SUBSTITUTE( | 814 | 18.4E | Corruption, | 815 | 18.4E | "Found too few bytes in the end of a SubDocKey for a type-prefixed hybrid_time: $0", | 816 | 18.4E | ToShortDebugStr(*slice)); | 817 | 18.4E | } | 818 | | | 819 | | // The reason the following is not handled as a Status is that the logic above (loop + emptiness | 820 | | // check) should guarantee this is the only possible case left. | 821 | 27.3M | DCHECK_EQ(ValueType::kHybridTime, DecodeValueType(*slice)); | 822 | 27.3M | slice->consume_byte(); | 823 | | | 824 | 27.3M | auto begin = slice->data(); | 825 | 27.3M | RETURN_NOT_OK(ConsumeHybridTimeFromKey(slice, &callback.doc_hybrid_time())); | 826 | 27.3M | callback.DocHybridTimeSlice(Slice(begin, slice->data())); | 827 | | | 828 | 27.3M | return Status::OK(); | 829 | 27.3M | } |
|
830 | | |
831 | | Status SubDocKey::FullyDecodeFrom(const rocksdb::Slice& slice, |
832 | 121M | HybridTimeRequired require_hybrid_time) { |
833 | 121M | rocksdb::Slice mutable_slice = slice; |
834 | 121M | RETURN_NOT_OK(DecodeFrom(&mutable_slice, require_hybrid_time)); |
835 | 121M | if (!mutable_slice.empty()) { |
836 | 0 | return STATUS_SUBSTITUTE(InvalidArgument, |
837 | 0 | "Expected all bytes of the slice to be decoded into SubDocKey, found $0 extra bytes: $1", |
838 | 0 | mutable_slice.size(), mutable_slice.ToDebugHexString()); |
839 | 0 | } |
840 | 121M | return Status::OK(); |
841 | 121M | } |
842 | | |
843 | | Status SubDocKey::DecodePrefixLengths( |
844 | 4.26M | Slice slice, boost::container::small_vector_base<size_t>* out) { |
845 | 4.26M | auto begin = slice.data(); |
846 | 4.26M | auto hashed_part_size = VERIFY_RESULT(DocKey::EncodedSize(slice, DocKeyPart::kUpToHash)); |
847 | 4.26M | if (hashed_part_size != 0) { |
848 | 3.53M | slice.remove_prefix(hashed_part_size); |
849 | 3.53M | out->push_back(hashed_part_size); |
850 | 3.53M | } |
851 | 6.06M | while (VERIFY_RESULT(ConsumePrimitiveValueFromKey(&slice))) { |
852 | 6.06M | out->push_back(slice.data() - begin); |
853 | 6.06M | } |
854 | 4.26M | if (!out->empty()) { |
855 | 4.26M | if (begin[out->back()] != ValueTypeAsChar::kGroupEnd) { |
856 | 0 | return STATUS_FORMAT(Corruption, "Range keys group end expected at $0 in $1", |
857 | 0 | out->back(), Slice(begin, slice.end()).ToDebugHexString()); |
858 | 0 | } |
859 | 4.26M | ++out->back(); // Add range key group end to last prefix |
860 | 4.26M | } |
861 | 4.26M | while (VERIFY_RESULT(SubDocKey::DecodeSubkey(&slice))) { |
862 | 3.10M | out->push_back(slice.data() - begin); |
863 | 3.10M | } |
864 | | |
865 | 4.26M | return Status::OK(); |
866 | 4.26M | } |
867 | | |
868 | | Status SubDocKey::DecodeDocKeyAndSubKeyEnds( |
869 | 0 | Slice slice, boost::container::small_vector_base<size_t>* out) { |
870 | 0 | auto begin = slice.data(); |
871 | 0 | if (out->empty()) { |
872 | 0 | auto id_size = VERIFY_RESULT(DocKey::EncodedSize(slice, DocKeyPart::kUpToId)); |
873 | 0 | out->push_back(id_size); |
874 | 0 | } |
875 | 0 | if (out->size() == 1) { |
876 | 0 | auto id_size = out->front(); |
877 | 0 | SCHECK_GE(slice.size(), id_size + 1, Corruption, |
878 | 0 | Format("Cannot have exclusively ID in key $0", slice.ToDebugHexString())); |
879 | | // Identify table tombstone. |
880 | 0 | if (slice[0] == ValueTypeAsChar::kPgTableOid && slice[id_size] == ValueTypeAsChar::kGroupEnd) { |
881 | 0 | SCHECK_GE(slice.size(), id_size + 2, Corruption, |
882 | 0 | Format("Space for kHybridTime expected in key $0", slice.ToDebugHexString())); |
883 | 0 | SCHECK_EQ(slice[id_size + 1], ValueTypeAsChar::kHybridTime, Corruption, |
884 | 0 | Format("Hybrid time expected in key $0", slice.ToDebugHexString())); |
885 | | // Consume kGroupEnd without pushing to out because the empty key of a table tombstone |
886 | | // shouldn't count as an end. |
887 | 0 | slice.remove_prefix(id_size + 1); |
888 | 0 | } else { |
889 | 0 | auto doc_key_size = VERIFY_RESULT(DocKey::EncodedSize(slice, DocKeyPart::kWholeDocKey)); |
890 | 0 | slice.remove_prefix(doc_key_size); |
891 | 0 | out->push_back(doc_key_size); |
892 | 0 | } |
893 | 0 | } else { |
894 | 0 | slice.remove_prefix(out->back()); |
895 | 0 | } |
896 | 0 | while (VERIFY_RESULT(SubDocKey::DecodeSubkey(&slice))) { |
897 | 0 | out->push_back(slice.data() - begin); |
898 | 0 | } |
899 | |
|
900 | 0 | return Status::OK(); |
901 | 0 | } |
902 | | |
903 | 5 | std::string SubDocKey::DebugSliceToString(Slice slice) { |
904 | 5 | auto r = DebugSliceToStringAsResult(slice); |
905 | 5 | if (r.ok()) { |
906 | 5 | return r.get(); |
907 | 5 | } |
908 | 0 | return r.status().ToString(); |
909 | 0 | } |
910 | | |
911 | 5 | Result<std::string> SubDocKey::DebugSliceToStringAsResult(Slice slice) { |
912 | 5 | SubDocKey key; |
913 | 5 | auto status = key.DecodeFrom(&slice, HybridTimeRequired::kFalse, AllowSpecial::kTrue); |
914 | 5 | if (status.ok()) { |
915 | 5 | if (slice.empty()) { |
916 | 5 | return key.ToString(); |
917 | 5 | } |
918 | 0 | return key.ToString() + "+" + slice.ToDebugHexString(); |
919 | 0 | } |
920 | 0 | return status; |
921 | 0 | } |
922 | | |
923 | 180k | string SubDocKey::ToString(AutoDecodeKeys auto_decode_keys) const { |
924 | 180k | std::string result("SubDocKey("); |
925 | 180k | result.append(doc_key_.ToString(auto_decode_keys)); |
926 | 180k | result.append(", ["); |
927 | | |
928 | 180k | AppendVectorToString(&result, subkeys_, auto_decode_keys); |
929 | | |
930 | 180k | if (has_hybrid_time()) { |
931 | 180k | if (!subkeys_.empty()) { |
932 | 180k | result.append("; "); |
933 | 180k | } |
934 | 180k | result.append(doc_ht_.ToString()); |
935 | 180k | } |
936 | 180k | result.append("])"); |
937 | 180k | return result; |
938 | 180k | } |
939 | | |
940 | 15 | Status SubDocKey::FromDocPath(const DocPath& doc_path) { |
941 | 15 | RETURN_NOT_OK(doc_key_.FullyDecodeFrom(doc_path.encoded_doc_key().AsSlice())); |
942 | 15 | subkeys_ = doc_path.subkeys(); |
943 | 15 | return Status::OK(); |
944 | 15 | } |
945 | | |
946 | 121M | void SubDocKey::Clear() { |
947 | 121M | doc_key_.Clear(); |
948 | 121M | subkeys_.clear(); |
949 | 121M | doc_ht_ = DocHybridTime::kInvalid; |
950 | 121M | } |
951 | | |
952 | 0 | bool SubDocKey::StartsWith(const SubDocKey& prefix) const { |
953 | 0 | return doc_key_ == prefix.doc_key_ && |
954 | | // Subkeys precede the hybrid_time field in the encoded representation, so the hybrid_time |
955 | | // either has to be undefined in the prefix, or the entire key must match, including |
956 | | // subkeys and the hybrid_time (in this case the prefix is the same as this key). |
957 | 0 | (!prefix.has_hybrid_time() || |
958 | 0 | (doc_ht_ == prefix.doc_ht_ && prefix.num_subkeys() == num_subkeys())) && |
959 | 0 | prefix.num_subkeys() <= num_subkeys() && |
960 | | // std::mismatch finds the first difference between two sequences. Prior to C++14, the |
961 | | // behavior is undefined if the second range is shorter than the first range, so we make |
962 | | // sure the potentially shorter range is first. |
963 | 0 | std::mismatch( |
964 | 0 | prefix.subkeys_.begin(), prefix.subkeys_.end(), subkeys_.begin() |
965 | 0 | ).first == prefix.subkeys_.end(); |
966 | 0 | } |
967 | | |
968 | 0 | bool SubDocKey::operator==(const SubDocKey& other) const { |
969 | 0 | if (doc_key_ != other.doc_key_ || |
970 | 0 | subkeys_ != other.subkeys_) |
971 | 0 | return false; |
972 | | |
973 | 0 | const bool ht_is_valid = doc_ht_.is_valid(); |
974 | 0 | const bool other_ht_is_valid = other.doc_ht_.is_valid(); |
975 | 0 | if (ht_is_valid != other_ht_is_valid) |
976 | 0 | return false; |
977 | 0 | if (ht_is_valid) { |
978 | 0 | return doc_ht_ == other.doc_ht_; |
979 | 0 | } else { |
980 | | // Both keys don't have a hybrid time. |
981 | 0 | return true; |
982 | 0 | } |
983 | 0 | } |
984 | | |
985 | 0 | int SubDocKey::CompareTo(const SubDocKey& other) const { |
986 | 0 | int result = CompareToIgnoreHt(other); |
987 | 0 | if (result != 0) return result; |
988 | | |
989 | 0 | const bool ht_is_valid = doc_ht_.is_valid(); |
990 | 0 | const bool other_ht_is_valid = other.doc_ht_.is_valid(); |
991 | 0 | if (ht_is_valid) { |
992 | 0 | if (other_ht_is_valid) { |
993 | | // HybridTimes are sorted in reverse order. |
994 | 0 | return -doc_ht_.CompareTo(other.doc_ht_); |
995 | 0 | } else { |
996 | | // This key has a hybrid time and the other one is identical but lacks the hybrid time, so |
997 | | // this one is greater. |
998 | 0 | return 1; |
999 | 0 | } |
1000 | 0 | } else { |
1001 | 0 | if (other_ht_is_valid) { |
1002 | | // This key is a "prefix" of the other key, which has a hybrid time, so this one is less. |
1003 | 0 | return -1; |
1004 | 0 | } else { |
1005 | | // Neither key has a hybrid time. |
1006 | 0 | return 0; |
1007 | 0 | } |
1008 | 0 | } |
1009 | |
|
1010 | 0 | } |
1011 | | |
1012 | 0 | int SubDocKey::CompareToIgnoreHt(const SubDocKey& other) const { |
1013 | 0 | int result = doc_key_.CompareTo(other.doc_key_); |
1014 | 0 | if (result != 0) return result; |
1015 | | |
1016 | 0 | result = CompareVectors(subkeys_, other.subkeys_); |
1017 | 0 | return result; |
1018 | 0 | } |
1019 | | |
1020 | 1 | string BestEffortDocDBKeyToStr(const KeyBytes &key_bytes) { |
1021 | 1 | rocksdb::Slice mutable_slice(key_bytes.AsSlice()); |
1022 | 1 | SubDocKey subdoc_key; |
1023 | 1 | Status decode_status = subdoc_key.DecodeFrom( |
1024 | 1 | &mutable_slice, HybridTimeRequired::kFalse, AllowSpecial::kTrue); |
1025 | 1 | if (decode_status.ok()) { |
1026 | 1 | ostringstream ss; |
1027 | 1 | if (!subdoc_key.has_hybrid_time() && subdoc_key.num_subkeys() == 0) { |
1028 | | // This is really just a DocKey. |
1029 | 0 | ss << subdoc_key.doc_key().ToString(); |
1030 | 1 | } else { |
1031 | 1 | ss << subdoc_key.ToString(); |
1032 | 1 | } |
1033 | 1 | if (mutable_slice.size() > 0) { |
1034 | 0 | ss << "+" << mutable_slice.ToDebugString(); |
1035 | | // Can append the above status of why we could not decode a SubDocKey, if needed. |
1036 | 0 | } |
1037 | 1 | return ss.str(); |
1038 | 1 | } |
1039 | | |
1040 | | // We could not decode a SubDocKey at all, even without a hybrid_time. |
1041 | 0 | return key_bytes.ToString(); |
1042 | 0 | } |
1043 | | |
1044 | 0 | std::string BestEffortDocDBKeyToStr(const rocksdb::Slice& slice) { |
1045 | 0 | return BestEffortDocDBKeyToStr(KeyBytes(slice)); |
1046 | 0 | } |
1047 | | |
1048 | 0 | KeyBytes SubDocKey::AdvanceOutOfSubDoc() const { |
1049 | 0 | KeyBytes subdoc_key_no_ts = EncodeWithoutHt(); |
1050 | 0 | subdoc_key_no_ts.AppendValueType(ValueType::kMaxByte); |
1051 | 0 | return subdoc_key_no_ts; |
1052 | 0 | } |
1053 | | |
1054 | 0 | KeyBytes SubDocKey::AdvanceOutOfDocKeyPrefix() const { |
1055 | | // To construct key bytes that will seek past this DocKey and DocKeys that have the same hash |
1056 | | // components but add more range components to it, we will strip the group-end of the range |
1057 | | // components and append 0xff, which will be lexicographically higher than any key bytes |
1058 | | // with the same hash and range component prefix. For example, |
1059 | | // |
1060 | | // DocKey(0x1234, ["aa", "bb"], ["cc", "dd"]) |
1061 | | // Encoded: H\0x12\0x34$aa\x00\x00$bb\x00\x00!$cc\x00\x00$dd\x00\x00! |
1062 | | // Result: H\0x12\0x34$aa\x00\x00$bb\x00\x00!$cc\x00\x00$dd\x00\x00\xff |
1063 | | // This key will also skip all DocKeys that have additional range components, e.g. |
1064 | | // DocKey(0x1234, ["aa", "bb"], ["cc", "dd", "ee"]) |
1065 | | // (encoded as H\0x12\0x34$aa\x00\x00$bb\x00\x00!$cc\x00\x00$dd\x00\x00$ee\x00\00!). That should |
1066 | | // make no difference to DocRowwiseIterator in a valid database, because all keys actually stored |
1067 | | // in DocDB will have exactly the same number of range components. |
1068 | | // |
1069 | | // Now, suppose there are no range components in the key passed to us (note: that does not |
1070 | | // necessarily mean there are no range components in the schema, just the doc key being passed to |
1071 | | // us is a custom-constructed DocKey with no range components because the caller wants a key |
1072 | | // that will skip pass all doc keys with the same hash components prefix). Example: |
1073 | | // |
1074 | | // DocKey(0x1234, ["aa", "bb"], []) |
1075 | | // Encoded: H\0x12\0x34$aa\x00\x00$bb\x00\x00!! |
1076 | | // Result: H\0x12\0x34$aa\x00\x00$bb\x00\x00!\xff |
1077 | 0 | KeyBytes doc_key_encoded = doc_key_.Encode(); |
1078 | 0 | doc_key_encoded.RemoveValueTypeSuffix(ValueType::kGroupEnd); |
1079 | 0 | doc_key_encoded.AppendValueType(ValueType::kMaxByte); |
1080 | 0 | return doc_key_encoded; |
1081 | 0 | } |
1082 | | |
1083 | 0 | void SubDocKey::AppendSubKey(PrimitiveValue subkey) { |
1084 | 0 | subkeys_.emplace_back(std::move(subkey)); |
1085 | 0 | } |
1086 | | |
1087 | 0 | void SubDocKey::RemoveLastSubKey() { |
1088 | 0 | DCHECK(!subkeys_.empty()); |
1089 | 0 | subkeys_.pop_back(); |
1090 | 0 | } |
1091 | | |
1092 | 3 | void SubDocKey::KeepPrefix(size_t num_sub_keys_to_keep) { |
1093 | 3 | if (subkeys_.size() > num_sub_keys_to_keep) { |
1094 | 0 | subkeys_.resize(num_sub_keys_to_keep); |
1095 | 0 | } |
1096 | 3 | } |
1097 | | |
1098 | | // ------------------------------------------------------------------------------------------------ |
1099 | | // DocDbAwareFilterPolicy |
1100 | | // ------------------------------------------------------------------------------------------------ |
1101 | | |
1102 | | namespace { |
1103 | | |
1104 | | template<DocKeyPart doc_key_part> |
1105 | | class DocKeyComponentsExtractor : public rocksdb::FilterPolicy::KeyTransformer { |
1106 | | public: |
1107 | | DocKeyComponentsExtractor(const DocKeyComponentsExtractor&) = delete; |
1108 | | DocKeyComponentsExtractor& operator=(const DocKeyComponentsExtractor&) = delete; |
1109 | | |
1110 | 6.77k | static DocKeyComponentsExtractor& GetInstance() { |
1111 | 6.77k | static DocKeyComponentsExtractor<doc_key_part> instance; |
1112 | 6.77k | return instance; |
1113 | 6.77k | } Unexecuted instantiation: doc_key.cc:_ZN2yb5docdb12_GLOBAL__N_125DocKeyComponentsExtractorILNS0_10DocKeyPartE1EE11GetInstanceEv doc_key.cc:_ZN2yb5docdb12_GLOBAL__N_125DocKeyComponentsExtractorILNS0_10DocKeyPartE3EE11GetInstanceEv Line | Count | Source | 1110 | 6.77k | static DocKeyComponentsExtractor& GetInstance() { | 1111 | 6.77k | static DocKeyComponentsExtractor<doc_key_part> instance; | 1112 | 6.77k | return instance; | 1113 | 6.77k | } |
|
1114 | | |
1115 | | // For encoded DocKey extracts specified part, for non-DocKey returns empty key, so they will |
1116 | | // always match the filter (this is correct, but might be optimized for performance if/when |
1117 | | // needed). |
1118 | | // As of 2020-05-12 intents DB could contain keys in non-DocKey format. |
1119 | 32.1M | Slice Transform(Slice key) const override { |
1120 | 32.1M | auto size_result = DocKey::EncodedSize(key, doc_key_part); |
1121 | 18.4E | return size_result.ok() ? Slice(key.data(), *size_result) : Slice(); |
1122 | 32.1M | } Unexecuted instantiation: doc_key.cc:_ZNK2yb5docdb12_GLOBAL__N_125DocKeyComponentsExtractorILNS0_10DocKeyPartE1EE9TransformENS_5SliceE doc_key.cc:_ZNK2yb5docdb12_GLOBAL__N_125DocKeyComponentsExtractorILNS0_10DocKeyPartE3EE9TransformENS_5SliceE Line | Count | Source | 1119 | 32.1M | Slice Transform(Slice key) const override { | 1120 | 32.1M | auto size_result = DocKey::EncodedSize(key, doc_key_part); | 1121 | 18.4E | return size_result.ok() ? Slice(key.data(), *size_result) : Slice(); | 1122 | 32.1M | } |
|
1123 | | |
1124 | | private: |
1125 | | DocKeyComponentsExtractor() = default; |
1126 | | }; |
1127 | | |
1128 | | class HashedDocKeyUpToHashComponentsExtractor : public rocksdb::FilterPolicy::KeyTransformer { |
1129 | | public: |
1130 | | HashedDocKeyUpToHashComponentsExtractor(const HashedDocKeyUpToHashComponentsExtractor&) = delete; |
1131 | | HashedDocKeyUpToHashComponentsExtractor& operator=( |
1132 | | const HashedDocKeyUpToHashComponentsExtractor&) = delete; |
1133 | | |
1134 | 0 | static HashedDocKeyUpToHashComponentsExtractor& GetInstance() { |
1135 | 0 | static HashedDocKeyUpToHashComponentsExtractor instance; |
1136 | 0 | return instance; |
1137 | 0 | } |
1138 | | |
1139 | | // For encoded DocKey with hash code present extracts prefix up to hashed components, |
1140 | | // for non-DocKey or DocKey without hash code (for range-partitioned tables) returns empty key, |
1141 | | // so they will always match the filter. |
1142 | 0 | Slice Transform(Slice key) const override { |
1143 | 0 | auto size_result = DocKey::EncodedSizeAndHashPresent(key, DocKeyPart::kUpToHash); |
1144 | 0 | return (size_result.ok() && size_result->second) ? Slice(key.data(), size_result->first) |
1145 | 0 | : Slice(); |
1146 | 0 | } |
1147 | | |
1148 | | private: |
1149 | | HashedDocKeyUpToHashComponentsExtractor() = default; |
1150 | | }; |
1151 | | |
1152 | | } // namespace |
1153 | | |
1154 | | void DocDbAwareFilterPolicyBase::CreateFilter( |
1155 | 0 | const rocksdb::Slice* keys, int n, std::string* dst) const { |
1156 | 0 | CHECK_GT(n, 0); |
1157 | 0 | return builtin_policy_->CreateFilter(keys, n, dst); |
1158 | 0 | } |
1159 | | |
1160 | | bool DocDbAwareFilterPolicyBase::KeyMayMatch( |
1161 | 0 | const rocksdb::Slice& key, const rocksdb::Slice& filter) const { |
1162 | 0 | return builtin_policy_->KeyMayMatch(key, filter); |
1163 | 0 | } |
1164 | | |
1165 | 3.14k | rocksdb::FilterBitsBuilder* DocDbAwareFilterPolicyBase::GetFilterBitsBuilder() const { |
1166 | 3.14k | return builtin_policy_->GetFilterBitsBuilder(); |
1167 | 3.14k | } |
1168 | | |
1169 | | rocksdb::FilterBitsReader* DocDbAwareFilterPolicyBase::GetFilterBitsReader( |
1170 | 1.34k | const rocksdb::Slice& contents) const { |
1171 | 1.34k | return builtin_policy_->GetFilterBitsReader(contents); |
1172 | 1.34k | } |
1173 | | |
1174 | 3.09k | rocksdb::FilterPolicy::FilterType DocDbAwareFilterPolicyBase::GetFilterType() const { |
1175 | 3.09k | return builtin_policy_->GetFilterType(); |
1176 | 3.09k | } |
1177 | | |
1178 | | const rocksdb::FilterPolicy::KeyTransformer* |
1179 | 0 | DocDbAwareHashedComponentsFilterPolicy::GetKeyTransformer() const { |
1180 | 0 | return &DocKeyComponentsExtractor<DocKeyPart::kUpToHash>::GetInstance(); |
1181 | 0 | } |
1182 | | |
1183 | | const rocksdb::FilterPolicy::KeyTransformer* |
1184 | 0 | DocDbAwareV2FilterPolicy::GetKeyTransformer() const { |
1185 | | // We want for DocDbAwareV2FilterPolicy to disable bloom filtering during read path for |
1186 | | // range-partitioned tablets (see https://github.com/yugabyte/yugabyte-db/issues/6435, |
1187 | | // https://github.com/yugabyte/yugabyte-db/issues/8731). |
1188 | 0 | return &HashedDocKeyUpToHashComponentsExtractor::GetInstance(); |
1189 | 0 | } |
1190 | | |
1191 | | const rocksdb::FilterPolicy::KeyTransformer* |
1192 | 6.77k | DocDbAwareV3FilterPolicy::GetKeyTransformer() const { |
1193 | 6.77k | return &DocKeyComponentsExtractor<DocKeyPart::kUpToHashOrFirstRange>::GetInstance(); |
1194 | 6.77k | } |
1195 | | |
1196 | 11.7M | DocKeyEncoderAfterTableIdStep DocKeyEncoder::CotableId(const Uuid& cotable_id) { |
1197 | 11.7M | if (!cotable_id.IsNil()) { |
1198 | 2.44M | std::string bytes; |
1199 | 2.44M | cotable_id.EncodeToComparable(&bytes); |
1200 | 2.44M | out_->AppendValueType(ValueType::kTableId); |
1201 | 2.44M | out_->AppendRawBytes(bytes); |
1202 | 2.44M | } |
1203 | 11.7M | return DocKeyEncoderAfterTableIdStep(out_); |
1204 | 11.7M | } |
1205 | | |
1206 | 14.8M | DocKeyEncoderAfterTableIdStep DocKeyEncoder::PgtableId(const PgTableOid pgtable_id) { |
1207 | 14.8M | if (pgtable_id > 0) { |
1208 | 373 | out_->AppendValueType(ValueType::kPgTableOid); |
1209 | 373 | out_->AppendUInt32(pgtable_id); |
1210 | 373 | } |
1211 | 14.8M | return DocKeyEncoderAfterTableIdStep(out_); |
1212 | 14.8M | } |
1213 | | |
1214 | 2.77M | DocKeyEncoderAfterTableIdStep DocKeyEncoder::Schema(const class Schema& schema) { |
1215 | 2.77M | if (schema.pgtable_id() > 0) { |
1216 | 82 | return PgtableId(schema.pgtable_id()); |
1217 | 2.77M | } else { |
1218 | 2.77M | return CotableId(schema.cotable_id()); |
1219 | 2.77M | } |
1220 | 2.77M | } |
1221 | | |
1222 | 383M | Result<bool> DocKeyDecoder::DecodeCotableId(Uuid* uuid) { |
1223 | 383M | if (!input_.TryConsumeByte(ValueTypeAsChar::kTableId)) { |
1224 | 281M | return false; |
1225 | 281M | } |
1226 | | |
1227 | 102M | if (input_.size() < kUuidSize) { |
1228 | 0 | return STATUS_FORMAT( |
1229 | 0 | Corruption, "Not enough bytes for cotable id: $0", input_.ToDebugHexString()); |
1230 | 0 | } |
1231 | | |
1232 | 102M | if (uuid) { |
1233 | 87.0M | RETURN_NOT_OK(uuid->DecodeFromComparableSlice(Slice(input_.data(), kUuidSize))); |
1234 | 87.0M | } |
1235 | 102M | input_.remove_prefix(kUuidSize); |
1236 | | |
1237 | 102M | return true; |
1238 | 102M | } |
1239 | | |
1240 | 296M | Result<bool> DocKeyDecoder::DecodePgtableId(PgTableOid* pgtable_id) { |
1241 | 296M | if (input_.empty() || input_[0] != ValueTypeAsChar::kPgTableOid) { |
1242 | 296M | return false; |
1243 | 296M | } |
1244 | | |
1245 | 262k | input_.consume_byte(); |
1246 | | |
1247 | 262k | if (input_.size() < sizeof(PgTableOid)) { |
1248 | 0 | return STATUS_FORMAT( |
1249 | 0 | Corruption, "Not enough bytes for pgtable id: $0", input_.ToDebugHexString()); |
1250 | 0 | } |
1251 | | |
1252 | 262k | static_assert( |
1253 | 262k | sizeof(PgTableOid) == sizeof(uint32_t), |
1254 | 262k | "It looks like the pgtable ID's size has changed -- need to update encoder/decoder."); |
1255 | 262k | if (pgtable_id) { |
1256 | 1.26k | *pgtable_id = BigEndian::Load32(input_.data()); |
1257 | 1.26k | } |
1258 | 262k | input_.remove_prefix(sizeof(PgTableOid)); |
1259 | | |
1260 | 262k | return true; |
1261 | 262k | } |
1262 | | |
1263 | 383M | Result<bool> DocKeyDecoder::DecodeHashCode(uint16_t* out, AllowSpecial allow_special) { |
1264 | 383M | if (input_.empty()) { |
1265 | 45.0k | return false; |
1266 | 45.0k | } |
1267 | | |
1268 | 383M | auto first_value_type = static_cast<ValueType>(input_[0]); |
1269 | | |
1270 | 15.9M | auto good_value_type = allow_special ? IsPrimitiveOrSpecialValueType(first_value_type) |
1271 | 367M | : IsPrimitiveValueType(first_value_type); |
1272 | 383M | if (first_value_type == ValueType::kGroupEnd) { |
1273 | 332k | return false; |
1274 | 332k | } |
1275 | | |
1276 | 383M | if (!good_value_type) { |
1277 | 8 | return STATUS_FORMAT(Corruption, |
1278 | 8 | "Expected first value type to be primitive or GroupEnd, got $0 in $1", |
1279 | 8 | first_value_type, input_.ToDebugHexString()); |
1280 | 8 | } |
1281 | | |
1282 | 383M | if (input_.empty() || input_[0] != ValueTypeAsChar::kUInt16Hash) { |
1283 | 123M | return false; |
1284 | 123M | } |
1285 | | |
1286 | 259M | if (input_.size() < sizeof(DocKeyHash) + 1) { |
1287 | 0 | return STATUS_FORMAT( |
1288 | 0 | Corruption, |
1289 | 0 | "Could not decode a 16-bit hash component of a document key: only $0 bytes left", |
1290 | 0 | input_.size()); |
1291 | 0 | } |
1292 | | |
1293 | | // We'll need to update this code if we ever change the size of the hash field. |
1294 | 259M | static_assert(sizeof(DocKeyHash) == sizeof(uint16_t), |
1295 | 259M | "It looks like the DocKeyHash's size has changed -- need to update encoder/decoder."); |
1296 | 259M | if (out) { |
1297 | 207M | *out = BigEndian::Load16(input_.data() + 1); |
1298 | 207M | } |
1299 | 259M | input_.remove_prefix(sizeof(DocKeyHash) + 1); |
1300 | 259M | return true; |
1301 | 259M | } |
1302 | | |
1303 | 15.8M | Status DocKeyDecoder::DecodePrimitiveValue(AllowSpecial allow_special) { |
1304 | 15.8M | return DecodePrimitiveValue(nullptr /* out */, allow_special); |
1305 | 15.8M | } |
1306 | | |
1307 | 154M | Status DocKeyDecoder::DecodePrimitiveValue(PrimitiveValue* out, AllowSpecial allow_special) { |
1308 | 154M | if (allow_special && |
1309 | 15.8M | !input_.empty() && |
1310 | 15.8M | (input_[0] == ValueTypeAsChar::kLowest || input_[0] == ValueTypeAsChar::kHighest)) { |
1311 | 0 | input_.consume_byte(); |
1312 | 0 | return Status::OK(); |
1313 | 0 | } |
1314 | 154M | return PrimitiveValue::DecodeKey(&input_, out); |
1315 | 154M | } |
1316 | | |
1317 | 42.5M | Status DocKeyDecoder::ConsumeGroupEnd() { |
1318 | 42.5M | if (input_.empty() || input_[0] != ValueTypeAsChar::kGroupEnd) { |
1319 | 0 | return STATUS_FORMAT(Corruption, "Group end expected but $0 found", input_.ToDebugHexString()); |
1320 | 0 | } |
1321 | 42.5M | input_.consume_byte(); |
1322 | 42.5M | return Status::OK(); |
1323 | 42.5M | } |
1324 | | |
1325 | 62.5M | bool DocKeyDecoder::GroupEnded() const { |
1326 | 62.5M | return input_.empty() || input_[0] == ValueTypeAsChar::kGroupEnd; |
1327 | 62.5M | } |
1328 | | |
1329 | 45.2M | Result<bool> DocKeyDecoder::HasPrimitiveValue() { |
1330 | 45.2M | return docdb::HasPrimitiveValue(&input_, AllowSpecial::kFalse); |
1331 | 45.2M | } |
1332 | | |
1333 | 22.8M | Status DocKeyDecoder::DecodeToRangeGroup() { |
1334 | 22.8M | RETURN_NOT_OK(DecodeCotableId()); |
1335 | 22.8M | RETURN_NOT_OK(DecodePgtableId()); |
1336 | 22.8M | if (VERIFY_RESULT(DecodeHashCode())) { |
1337 | 23.2M | while (VERIFY_RESULT(HasPrimitiveValue())) { |
1338 | 23.2M | RETURN_NOT_OK(DecodePrimitiveValue()); |
1339 | 23.2M | } |
1340 | 21.9M | } |
1341 | | |
1342 | 22.8M | return Status::OK(); |
1343 | 22.8M | } |
1344 | | |
1345 | 15.9M | Result<bool> DocKeyDecoder::DecodeHashCode(AllowSpecial allow_special) { |
1346 | 15.9M | return DecodeHashCode(nullptr /* out */, allow_special); |
1347 | 15.9M | } |
1348 | | |
1349 | 288 | Result<bool> ClearRangeComponents(KeyBytes* out, AllowSpecial allow_special) { |
1350 | 288 | auto prefix_size = VERIFY_RESULT( |
1351 | 288 | DocKey::EncodedSize(out->AsSlice(), DocKeyPart::kUpToHash, allow_special)); |
1352 | 288 | auto& str = *out->mutable_data(); |
1353 | 288 | if (str.size() == prefix_size + 1 && str[prefix_size] == ValueTypeAsChar::kGroupEnd) { |
1354 | 242 | return false; |
1355 | 242 | } |
1356 | 46 | if (str.size() > prefix_size) { |
1357 | 17 | str[prefix_size] = ValueTypeAsChar::kGroupEnd; |
1358 | 17 | str.Truncate(prefix_size + 1); |
1359 | 29 | } else { |
1360 | 29 | str.PushBack(ValueTypeAsChar::kGroupEnd); |
1361 | 29 | } |
1362 | 46 | return true; |
1363 | 46 | } |
1364 | | |
1365 | 7.99M | Result<bool> HashedOrFirstRangeComponentsEqual(const Slice& lhs, const Slice& rhs) { |
1366 | 7.99M | DocKeyDecoder lhs_decoder(lhs); |
1367 | 7.99M | DocKeyDecoder rhs_decoder(rhs); |
1368 | 7.99M | RETURN_NOT_OK(lhs_decoder.DecodeCotableId()); |
1369 | 7.99M | RETURN_NOT_OK(rhs_decoder.DecodeCotableId()); |
1370 | 7.99M | RETURN_NOT_OK(lhs_decoder.DecodePgtableId()); |
1371 | 7.99M | RETURN_NOT_OK(rhs_decoder.DecodePgtableId()); |
1372 | | |
1373 | 7.99M | const bool hash_present = VERIFY_RESULT(lhs_decoder.DecodeHashCode(AllowSpecial::kTrue)); |
1374 | 7.99M | if (hash_present != VERIFY_RESULT(rhs_decoder.DecodeHashCode(AllowSpecial::kTrue))) { |
1375 | 71.7k | return false; |
1376 | 71.7k | } |
1377 | | |
1378 | 7.92M | size_t consumed = lhs_decoder.ConsumedSizeFrom(lhs.data()); |
1379 | 7.92M | if (consumed != rhs_decoder.ConsumedSizeFrom(rhs.data()) || |
1380 | 7.93M | !strings::memeq(lhs.data(), rhs.data(), consumed)) { |
1381 | 1.00k | return false; |
1382 | 1.00k | } |
1383 | | |
1384 | | // Check all hashed components if present or first range component otherwise. |
1385 | 7.91M | int num_components_to_check = hash_present ? kNumValuesNoLimit : 1; |
1386 | | |
1387 | 15.8M | while (!lhs_decoder.GroupEnded() && num_components_to_check > 0) { |
1388 | 7.93M | auto lhs_start = lhs_decoder.left_input().data(); |
1389 | 7.93M | auto rhs_start = rhs_decoder.left_input().data(); |
1390 | 7.93M | auto value_type = lhs_start[0]; |
1391 | 7.93M | if (rhs_decoder.GroupEnded() || rhs_start[0] != value_type) { |
1392 | 17.8k | return false; |
1393 | 17.8k | } |
1394 | | |
1395 | 7.91M | if (PREDICT_FALSE(!IsPrimitiveOrSpecialValueType(static_cast<ValueType>(value_type)))) { |
1396 | 0 | return false; |
1397 | 0 | } |
1398 | | |
1399 | 7.91M | RETURN_NOT_OK(lhs_decoder.DecodePrimitiveValue(AllowSpecial::kTrue)); |
1400 | 7.91M | RETURN_NOT_OK(rhs_decoder.DecodePrimitiveValue(AllowSpecial::kTrue)); |
1401 | 7.91M | consumed = lhs_decoder.ConsumedSizeFrom(lhs_start); |
1402 | 7.91M | if (consumed != rhs_decoder.ConsumedSizeFrom(rhs_start) || |
1403 | 7.93M | !strings::memeq(lhs_start, rhs_start, consumed)) { |
1404 | 29 | return false; |
1405 | 29 | } |
1406 | 7.91M | --num_components_to_check; |
1407 | 7.91M | } |
1408 | 7.90M | if (num_components_to_check == 0) { |
1409 | | // We don't care about difference in rest of range components. |
1410 | 979k | return true; |
1411 | 979k | } |
1412 | | |
1413 | 6.92M | return rhs_decoder.GroupEnded(); |
1414 | 6.92M | } |
1415 | | |
1416 | 44.2M | bool DocKeyBelongsTo(Slice doc_key, const Schema& schema) { |
1417 | 44.2M | bool has_table_id = !doc_key.empty() && |
1418 | 44.2M | (doc_key[0] == ValueTypeAsChar::kTableId || doc_key[0] == ValueTypeAsChar::kPgTableOid); |
1419 | | |
1420 | 44.2M | if (schema.cotable_id().IsNil() && schema.pgtable_id() == 0) { |
1421 | 30.3M | return !has_table_id; |
1422 | 30.3M | } |
1423 | | |
1424 | 13.8M | if (!has_table_id) { |
1425 | 0 | return false; |
1426 | 0 | } |
1427 | | |
1428 | 13.8M | if (doc_key[0] == ValueTypeAsChar::kTableId) { |
1429 | 13.8M | doc_key.consume_byte(); |
1430 | | |
1431 | 13.8M | uint8_t bytes[kUuidSize]; |
1432 | 13.8M | schema.cotable_id().EncodeToComparable(bytes); |
1433 | 13.8M | return doc_key.starts_with(Slice(bytes, kUuidSize)); |
1434 | 29.4k | } else { |
1435 | 29.4k | DCHECK(doc_key[0] == ValueTypeAsChar::kPgTableOid); |
1436 | 29.4k | doc_key.consume_byte(); |
1437 | 29.4k | char buf[sizeof(PgTableOid)]; |
1438 | 29.4k | BigEndian::Store32(buf, schema.pgtable_id()); |
1439 | 29.4k | return doc_key.starts_with(Slice(buf, sizeof(PgTableOid))); |
1440 | 29.4k | } |
1441 | 13.8M | } |
1442 | | |
1443 | 6.76M | Result<boost::optional<DocKeyHash>> DecodeDocKeyHash(const Slice& encoded_key) { |
1444 | 6.76M | DocKey key; |
1445 | 6.76M | RETURN_NOT_OK(key.DecodeFrom(encoded_key, DocKeyPart::kUpToHashCode)); |
1446 | 6.76M | return key.has_hash() ? key.hash() : boost::optional<DocKeyHash>(); |
1447 | 6.76M | } |
1448 | | |
1449 | | } // namespace docdb |
1450 | | } // namespace yb |