Flutter Engine
 
Loading...
Searching...
No Matches
host_buffer.cc
Go to the documentation of this file.
1// Copyright 2013 The Flutter Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
7#include <cstring>
8#include <tuple>
9
16
17namespace impeller {
18
19constexpr size_t kAllocatorBlockSize = 1024000; // 1024 Kb.
20
21std::shared_ptr<HostBuffer> HostBuffer::Create(
22 const std::shared_ptr<Allocator>& allocator,
23 const std::shared_ptr<const IdleWaiter>& idle_waiter,
24 size_t minimum_uniform_alignment) {
25 return std::shared_ptr<HostBuffer>(
26 new HostBuffer(allocator, idle_waiter, minimum_uniform_alignment));
27}
28
29HostBuffer::HostBuffer(const std::shared_ptr<Allocator>& allocator,
30 const std::shared_ptr<const IdleWaiter>& idle_waiter,
31 size_t minimum_uniform_alignment)
32 : allocator_(allocator),
33 idle_waiter_(idle_waiter),
34 minimum_uniform_alignment_(minimum_uniform_alignment) {
38 for (auto i = 0u; i < kHostBufferArenaSize; i++) {
39 std::shared_ptr<DeviceBuffer> device_buffer = allocator->CreateBuffer(desc);
40 FML_CHECK(device_buffer) << "Failed to allocate device buffer.";
41 device_buffers_[i].push_back(device_buffer);
42 }
43}
44
46 if (idle_waiter_) {
47 // Since we hold on to DeviceBuffers we should make sure they aren't being
48 // used while we are deleting the HostBuffer.
49 idle_waiter_->WaitIdle();
50 }
51};
52
54 size_t length,
55 size_t align) {
56 auto [range, device_buffer, raw_device_buffer] =
57 EmplaceInternal(buffer, length, align);
58 if (device_buffer) {
59 return BufferView(std::move(device_buffer), range);
60 } else if (raw_device_buffer) {
61 return BufferView(raw_device_buffer, range);
62 } else {
63 return {};
64 }
65}
66
67BufferView HostBuffer::Emplace(const void* buffer, size_t length) {
68 auto [range, device_buffer, raw_device_buffer] =
69 EmplaceInternal(buffer, length);
70 if (device_buffer) {
71 return BufferView(std::move(device_buffer), range);
72 } else if (raw_device_buffer) {
73 return BufferView(raw_device_buffer, range);
74 } else {
75 return {};
76 }
77}
78
80 size_t align,
81 const EmplaceProc& cb) {
82 auto [range, device_buffer, raw_device_buffer] =
83 EmplaceInternal(length, align, cb);
84 if (device_buffer) {
85 return BufferView(std::move(device_buffer), range);
86 } else if (raw_device_buffer) {
87 return BufferView(raw_device_buffer, range);
88 } else {
89 return {};
90 }
91}
92
95 .current_frame = frame_index_,
96 .current_buffer = current_buffer_,
97 .total_buffer_count = device_buffers_[frame_index_].size(),
98 };
99}
100
101bool HostBuffer::MaybeCreateNewBuffer() {
102 current_buffer_++;
103 if (current_buffer_ >= device_buffers_[frame_index_].size()) {
107 std::shared_ptr<DeviceBuffer> buffer = allocator_->CreateBuffer(desc);
108 if (!buffer) {
109 VALIDATION_LOG << "Failed to allocate host buffer of size " << desc.size;
110 return false;
111 }
112 device_buffers_[frame_index_].push_back(std::move(buffer));
113 }
114 offset_ = 0;
115 return true;
116}
117
118std::tuple<Range, std::shared_ptr<DeviceBuffer>, DeviceBuffer*>
119HostBuffer::EmplaceInternal(size_t length,
120 size_t align,
121 const EmplaceProc& cb) {
122 if (!cb) {
123 return {};
124 }
125
126 // If the requested allocation is bigger than the block size, create a one-off
127 // device buffer and write to that.
129 DeviceBufferDescriptor desc;
130 desc.size = length;
131 desc.storage_mode = StorageMode::kHostVisible;
132 std::shared_ptr<DeviceBuffer> device_buffer =
133 allocator_->CreateBuffer(desc);
134 if (!device_buffer) {
135 return {};
136 }
137 if (cb) {
138 cb(device_buffer->OnGetContents());
139 device_buffer->Flush(Range{0, length});
140 }
141 return std::make_tuple(Range{0, length}, std::move(device_buffer), nullptr);
142 }
143
144 size_t padding = 0;
145 if (align > 0 && offset_ % align) {
146 padding = align - (offset_ % align);
147 }
148 if (offset_ + padding + length > kAllocatorBlockSize) {
149 if (!MaybeCreateNewBuffer()) {
150 return {};
151 }
152 } else {
153 offset_ += padding;
154 }
155
156 const std::shared_ptr<DeviceBuffer>& current_buffer = GetCurrentBuffer();
157 auto contents = current_buffer->OnGetContents();
158 cb(contents + offset_);
159 Range output_range(offset_, length);
160 current_buffer->Flush(output_range);
161
162 offset_ += length;
163 return std::make_tuple(output_range, nullptr, current_buffer.get());
164}
165
166std::tuple<Range, std::shared_ptr<DeviceBuffer>, DeviceBuffer*>
167HostBuffer::EmplaceInternal(const void* buffer, size_t length) {
168 // If the requested allocation is bigger than the block size, create a one-off
169 // device buffer and write to that.
171 DeviceBufferDescriptor desc;
172 desc.size = length;
173 desc.storage_mode = StorageMode::kHostVisible;
174 std::shared_ptr<DeviceBuffer> device_buffer =
175 allocator_->CreateBuffer(desc);
176 if (!device_buffer) {
177 return {};
178 }
179 if (buffer) {
180 if (!device_buffer->CopyHostBuffer(static_cast<const uint8_t*>(buffer),
181 Range{0, length})) {
182 return {};
183 }
184 }
185 return std::make_tuple(Range{0, length}, std::move(device_buffer), nullptr);
186 }
187
188 auto old_length = GetLength();
189 if (old_length + length > kAllocatorBlockSize) {
190 if (!MaybeCreateNewBuffer()) {
191 return {};
192 }
193 }
194 old_length = GetLength();
195
196 const std::shared_ptr<DeviceBuffer>& current_buffer = GetCurrentBuffer();
197 auto contents = current_buffer->OnGetContents();
198 if (buffer) {
199 ::memmove(contents + old_length, buffer, length);
200 current_buffer->Flush(Range{old_length, length});
201 }
202 offset_ += length;
203 return std::make_tuple(Range{old_length, length}, nullptr,
204 current_buffer.get());
205}
206
207std::tuple<Range, std::shared_ptr<DeviceBuffer>, DeviceBuffer*>
208HostBuffer::EmplaceInternal(const void* buffer, size_t length, size_t align) {
209 if (align == 0 || (GetLength() % align) == 0) {
210 return EmplaceInternal(buffer, length);
211 }
212
213 {
214 auto padding = align - (GetLength() % align);
215 if (offset_ + padding < kAllocatorBlockSize) {
216 offset_ += padding;
217 } else if (!MaybeCreateNewBuffer()) {
218 return {};
219 }
220 }
221
222 return EmplaceInternal(buffer, length);
223}
224
225const std::shared_ptr<DeviceBuffer>& HostBuffer::GetCurrentBuffer() const {
226 return device_buffers_[frame_index_][current_buffer_];
227}
228
230 // When resetting the host buffer state at the end of the frame, check if
231 // there are any unused buffers and remove them.
232 while (device_buffers_[frame_index_].size() > current_buffer_ + 1) {
233 device_buffers_[frame_index_].pop_back();
234 }
235
236 offset_ = 0u;
237 current_buffer_ = 0u;
238 frame_index_ = (frame_index_ + 1) % kHostBufferArenaSize;
239}
240
242 return minimum_uniform_alignment_;
243}
244
245} // namespace impeller
BufferView Emplace(const BufferType &buffer, size_t alignment=0)
Emplace non-uniform data (like contiguous vertices) onto the host buffer.
Definition host_buffer.h:92
size_t GetMinimumUniformAlignment() const
Retrieve the minimum uniform buffer alignment in bytes.
TestStateQuery GetStateForTest()
Retrieve internal buffer state for test expectations.
static std::shared_ptr< HostBuffer > Create(const std::shared_ptr< Allocator > &allocator, const std::shared_ptr< const IdleWaiter > &idle_waiter, size_t minimum_uniform_alignment)
std::function< void(uint8_t *buffer)> EmplaceProc
void Reset()
Resets the contents of the HostBuffer to nothing so it can be reused.
#define FML_CHECK(condition)
Definition logging.h:104
Vector2 padding
The halo padding in source space.
size_t length
constexpr size_t kAllocatorBlockSize
static const constexpr size_t kHostBufferArenaSize
Approximately the same size as the max frames in flight.
Definition host_buffer.h:20
Test only internal state.
#define VALIDATION_LOG
Definition validation.h:91