21 const std::shared_ptr<Allocator>& allocator) {
22 return std::shared_ptr<HostBuffer>(
new HostBuffer(allocator));
25HostBuffer::HostBuffer(
const std::shared_ptr<Allocator>& allocator)
26 : allocator_(allocator) {
31 device_buffers_[i].push_back(allocator->CreateBuffer(desc));
38 label_ = std::move(label);
44 auto [range, device_buffer] = EmplaceInternal(
buffer,
length, align);
48 return BufferView{std::move(device_buffer), range};
52 auto [range, device_buffer] = EmplaceInternal(
buffer,
length);
56 return BufferView{std::move(device_buffer), range};
62 auto [range, device_buffer] = EmplaceInternal(
length, align, cb);
66 return BufferView{std::move(device_buffer), range};
72 .current_buffer = current_buffer_,
73 .total_buffer_count = device_buffers_[frame_index_].size(),
77void HostBuffer::MaybeCreateNewBuffer() {
79 if (current_buffer_ >= device_buffers_[frame_index_].size()) {
83 device_buffers_[frame_index_].push_back(allocator_->CreateBuffer(desc));
88std::tuple<Range, std::shared_ptr<DeviceBuffer>> HostBuffer::EmplaceInternal(
91 const EmplaceProc& cb) {
99 DeviceBufferDescriptor
desc;
102 std::shared_ptr<DeviceBuffer> device_buffer =
103 allocator_->CreateBuffer(desc);
104 if (!device_buffer) {
108 cb(device_buffer->OnGetContents());
109 device_buffer->Flush(Range{0,
length});
111 return std::make_tuple(Range{0,
length}, std::move(device_buffer));
115 if (align > 0 && offset_ % align) {
116 padding = align - (offset_ % align);
119 MaybeCreateNewBuffer();
124 const std::shared_ptr<DeviceBuffer>& current_buffer = GetCurrentBuffer();
125 auto contents = current_buffer->OnGetContents();
126 cb(contents + offset_);
127 Range output_range(offset_,
length);
128 current_buffer->Flush(output_range);
131 return std::make_tuple(output_range, current_buffer);
134std::tuple<Range, std::shared_ptr<DeviceBuffer>> HostBuffer::EmplaceInternal(
140 DeviceBufferDescriptor
desc;
143 std::shared_ptr<DeviceBuffer> device_buffer =
144 allocator_->CreateBuffer(desc);
145 if (!device_buffer) {
149 if (!device_buffer->CopyHostBuffer(
static_cast<const uint8_t*
>(
buffer),
154 return std::make_tuple(Range{0,
length}, std::move(device_buffer));
157 auto old_length = GetLength();
159 MaybeCreateNewBuffer();
161 old_length = GetLength();
163 const std::shared_ptr<DeviceBuffer>& current_buffer = GetCurrentBuffer();
164 auto contents = current_buffer->OnGetContents();
167 current_buffer->Flush(Range{old_length,
length});
170 return std::make_tuple(Range{old_length,
length}, current_buffer);
173std::tuple<Range, std::shared_ptr<DeviceBuffer>>
174HostBuffer::EmplaceInternal(
const void*
buffer,
size_t length,
size_t align) {
175 if (align == 0 || (GetLength() % align) == 0) {
180 auto padding = align - (GetLength() % align);
184 MaybeCreateNewBuffer();
191const std::shared_ptr<DeviceBuffer>& HostBuffer::GetCurrentBuffer()
const {
192 return device_buffers_[frame_index_][current_buffer_];
198 while (device_buffers_[frame_index_].size() > current_buffer_ + 1) {
199 device_buffers_[frame_index_].pop_back();
203 current_buffer_ = 0u;
void SetLabel(std::string label)
TestStateQuery GetStateForTest()
Retrieve internal buffer state for test expectations.
static std::shared_ptr< HostBuffer > Create(const std::shared_ptr< Allocator > &allocator)
BufferView Emplace(const BufferType &buffer)
Emplace non-uniform data (like contiguous vertices) onto the host buffer.
std::function< void(uint8_t *buffer)> EmplaceProc
void Reset()
Resets the contents of the HostBuffer to nothing so it can be reused.
static const uint8_t buffer[]
constexpr size_t kAllocatorBlockSize
static const constexpr size_t kHostBufferArenaSize
Approximately the same size as the max frames in flight.
Test only internal state.