aboutsummaryrefslogtreecommitdiff
path: root/absl/strings/internal
diff options
context:
space:
mode:
Diffstat (limited to 'absl/strings/internal')
-rw-r--r--absl/strings/internal/cord_internal.cc76
-rw-r--r--absl/strings/internal/cord_internal.h107
2 files changed, 183 insertions, 0 deletions
diff --git a/absl/strings/internal/cord_internal.cc b/absl/strings/internal/cord_internal.cc
new file mode 100644
index 00000000..35f4d235
--- /dev/null
+++ b/absl/strings/internal/cord_internal.cc
@@ -0,0 +1,76 @@
+// Copyright 2020 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+#include "absl/strings/internal/cord_internal.h"
+
+#include <atomic>
+#include <cassert>
+#include <memory>
+
+#include "absl/container/inlined_vector.h"
+#include "absl/strings/internal/cord_rep_flat.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+ABSL_CONST_INIT std::atomic<bool> cord_ring_buffer_enabled(false);
+
+void CordRep::Destroy(CordRep* rep) {
+ assert(rep != nullptr);
+
+ absl::InlinedVector<CordRep*, Constants::kInlinedVectorSize> pending;
+ while (true) {
+ assert(!rep->refcount.IsImmortal());
+ if (rep->tag == CONCAT) {
+ CordRepConcat* rep_concat = rep->concat();
+ CordRep* right = rep_concat->right;
+ if (!right->refcount.Decrement()) {
+ pending.push_back(right);
+ }
+ CordRep* left = rep_concat->left;
+ delete rep_concat;
+ rep = nullptr;
+ if (!left->refcount.Decrement()) {
+ rep = left;
+ continue;
+ }
+ } else if (rep->tag == EXTERNAL) {
+ CordRepExternal::Delete(rep);
+ rep = nullptr;
+ } else if (rep->tag == SUBSTRING) {
+ CordRepSubstring* rep_substring = rep->substring();
+ CordRep* child = rep_substring->child;
+ delete rep_substring;
+ rep = nullptr;
+ if (!child->refcount.Decrement()) {
+ rep = child;
+ continue;
+ }
+ } else {
+ CordRepFlat::Delete(rep);
+ rep = nullptr;
+ }
+
+ if (!pending.empty()) {
+ rep = pending.back();
+ pending.pop_back();
+ } else {
+ break;
+ }
+ }
+}
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
diff --git a/absl/strings/internal/cord_internal.h b/absl/strings/internal/cord_internal.h
index 6fb75c4f..f1af8e6e 100644
--- a/absl/strings/internal/cord_internal.h
+++ b/absl/strings/internal/cord_internal.h
@@ -22,6 +22,7 @@
#include <type_traits>
#include "absl/base/internal/invoke.h"
+#include "absl/base/optimization.h"
#include "absl/container/internal/compressed_tuple.h"
#include "absl/meta/type_traits.h"
#include "absl/strings/string_view.h"
@@ -30,6 +31,28 @@ namespace absl {
ABSL_NAMESPACE_BEGIN
namespace cord_internal {
+extern std::atomic<bool> cord_ring_buffer_enabled;
+
+inline void enable_cord_ring_buffer(bool enable) {
+ cord_ring_buffer_enabled.store(enable, std::memory_order_relaxed);
+}
+
+enum Constants {
+ // The inlined size to use with absl::InlinedVector.
+ //
+ // Note: The InlinedVectors in this file (and in cord.h) do not need to use
+ // the same value for their inlined size. The fact that they do is historical.
+ // It may be desirable for each to use a different inlined size optimized for
+ // that InlinedVector's usage.
+ //
+ // TODO(jgm): Benchmark to see if there's a more optimal value than 47 for
+ // the inlined vector size (47 exists for backward compatibility).
+ kInlinedVectorSize = 47,
+
+ // Prefer copying blocks of at most this size, otherwise reference count.
+ kMaxBytesToCopy = 511
+};
+
// Wraps std::atomic for reference counting.
class Refcount {
public:
@@ -151,6 +174,34 @@ struct CordRep {
inline const CordRepExternal* external() const;
inline CordRepFlat* flat();
inline const CordRepFlat* flat() const;
+
+ // --------------------------------------------------------------------
+ // Memory management
+
+ // This internal routine is called from the cold path of Unref below. Keeping
+ // it in a separate routine allows good inlining of Unref into many profitable
+ // call sites. However, the call to this function can be highly disruptive to
+ // the register pressure in those callers. To minimize the cost to callers, we
+ // use a special LLVM calling convention that preserves most registers. This
+ // allows the call to this routine in cold paths to not disrupt the caller's
+ // register pressure. This calling convention is not available on all
+ // platforms; we intentionally allow LLVM to ignore the attribute rather than
+ // attempting to hardcode the list of supported platforms.
+#if defined(__clang__) && !defined(__i386__)
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wattributes"
+ __attribute__((preserve_most))
+#pragma clang diagnostic pop
+#endif
+ static void Destroy(CordRep* rep);
+
+ // Increments the reference count of `rep`.
+ // Requires `rep` to be a non-null pointer value.
+ static inline CordRep* Ref(CordRep* rep);
+
+ // Decrements the reference count of `rep`. Destroys rep if count reaches
+ // zero. Requires `rep` to be a non-null pointer value.
+ static inline void Unref(CordRep* rep);
};
struct CordRepConcat : public CordRep {
@@ -284,7 +335,63 @@ static_assert(sizeof(InlineData) == kMaxInline + 1, "");
static_assert(sizeof(AsTree) == sizeof(InlineData), "");
static_assert(offsetof(AsTree, tagged_size) == kMaxInline, "");
+inline CordRepConcat* CordRep::concat() {
+ assert(tag == CONCAT);
+ return static_cast<CordRepConcat*>(this);
+}
+
+inline const CordRepConcat* CordRep::concat() const {
+ assert(tag == CONCAT);
+ return static_cast<const CordRepConcat*>(this);
+}
+
+inline CordRepSubstring* CordRep::substring() {
+ assert(tag == SUBSTRING);
+ return static_cast<CordRepSubstring*>(this);
+}
+
+inline const CordRepSubstring* CordRep::substring() const {
+ assert(tag == SUBSTRING);
+ return static_cast<const CordRepSubstring*>(this);
+}
+
+inline CordRepExternal* CordRep::external() {
+ assert(tag == EXTERNAL);
+ return static_cast<CordRepExternal*>(this);
+}
+
+inline const CordRepExternal* CordRep::external() const {
+ assert(tag == EXTERNAL);
+ return static_cast<const CordRepExternal*>(this);
+}
+
+inline CordRepFlat* CordRep::flat() {
+ assert(tag >= FLAT && tag <= MAX_FLAT_TAG);
+ return reinterpret_cast<CordRepFlat*>(this);
+}
+
+inline const CordRepFlat* CordRep::flat() const {
+ assert(tag >= FLAT && tag <= MAX_FLAT_TAG);
+ return reinterpret_cast<const CordRepFlat*>(this);
+}
+
+inline CordRep* CordRep::Ref(CordRep* rep) {
+ assert(rep != nullptr);
+ rep->refcount.Increment();
+ return rep;
+}
+
+inline void CordRep::Unref(CordRep* rep) {
+ assert(rep != nullptr);
+ // Expect refcount to be 0. Avoiding the cost of an atomic decrement should
+ // typically outweigh the cost of an extra branch checking for ref == 1.
+ if (ABSL_PREDICT_FALSE(!rep->refcount.DecrementExpectHighRefcount())) {
+ Destroy(rep);
+ }
+}
+
} // namespace cord_internal
+
ABSL_NAMESPACE_END
} // namespace absl
#endif // ABSL_STRINGS_INTERNAL_CORD_INTERNAL_H_