Skip to content

Commit

Permalink
chore: Update vendored sources to duckdb/duckdb@e6929cf
Browse files Browse the repository at this point in the history
Merge pull request duckdb/duckdb#11414 from Mytherin/constcast
  • Loading branch information
krlmlr committed Mar 30, 2024
1 parent ec4d890 commit d8b9a84
Show file tree
Hide file tree
Showing 19 changed files with 91 additions and 68 deletions.
2 changes: 1 addition & 1 deletion src/duckdb/src/common/local_file_system.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,7 @@ static string AdditionalProcessInfo(FileSystem &fs, pid_t pid) {
try {
auto cmdline_file = fs.OpenFile(StringUtil::Format("/proc/%d/cmdline", pid), FileFlags::FILE_FLAGS_READ);
auto cmdline = cmdline_file->ReadLine();
process_name = basename(const_cast<char *>(cmdline.c_str()));
process_name = basename(const_cast<char *>(cmdline.c_str())); // NOLINT: old C API does not take const
} catch (std::exception &) {
// ignore
}
Expand Down
2 changes: 1 addition & 1 deletion src/duckdb/src/common/serializer/buffered_file_writer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ void BufferedFileWriter::WriteData(const_data_ptr_t buffer, idx_t write_size) {
Flush(); // Flush buffer before writing every things else
}
idx_t remaining_to_write = write_size - to_copy;
fs.Write(*handle, const_cast<data_ptr_t>(buffer + to_copy), remaining_to_write);
fs.Write(*handle, const_cast<data_ptr_t>(buffer + to_copy), remaining_to_write); // NOLINT: wrong API in Write
total_written += remaining_to_write;
} else {
// first copy anything we can from the buffer
Expand Down
4 changes: 2 additions & 2 deletions src/duckdb/src/common/types/data_chunk.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,7 @@ void DataChunk::Flatten() {
}
}

vector<LogicalType> DataChunk::GetTypes() {
vector<LogicalType> DataChunk::GetTypes() const {
vector<LogicalType> types;
for (idx_t i = 0; i < ColumnCount(); i++) {
types.push_back(data[i].GetType());
Expand Down Expand Up @@ -290,7 +290,7 @@ void DataChunk::Slice(const SelectionVector &sel_vector, idx_t count_p) {
}
}

void DataChunk::Slice(DataChunk &other, const SelectionVector &sel, idx_t count_p, idx_t col_offset) {
void DataChunk::Slice(const DataChunk &other, const SelectionVector &sel, idx_t count_p, idx_t col_offset) {
D_ASSERT(other.ColumnCount() <= col_offset + ColumnCount());
this->count = count_p;
SelCache merge_cache;
Expand Down
36 changes: 22 additions & 14 deletions src/duckdb/src/common/types/vector.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -78,11 +78,11 @@ Vector::Vector(Vector &other) : type(other.type) {
Reference(other);
}

Vector::Vector(Vector &other, const SelectionVector &sel, idx_t count) : type(other.type) {
Vector::Vector(const Vector &other, const SelectionVector &sel, idx_t count) : type(other.type) {
Slice(other, sel, count);
}

Vector::Vector(Vector &other, idx_t offset, idx_t end) : type(other.type) {
Vector::Vector(const Vector &other, idx_t offset, idx_t end) : type(other.type) {
Slice(other, offset, end);
}

Expand Down Expand Up @@ -574,8 +574,8 @@ Value Vector::GetValueInternal(const Vector &v_p, idx_t index_p) {
throw InternalException("FSST Vector with non-string datatype found!");
}
auto str_compressed = reinterpret_cast<string_t *>(data)[index];
Value result = FSSTPrimitives::DecompressValue(FSSTVector::GetDecoder(const_cast<Vector &>(*vector)),
str_compressed.GetData(), str_compressed.GetSize());
Value result = FSSTPrimitives::DecompressValue(FSSTVector::GetDecoder(*vector), str_compressed.GetData(),
str_compressed.GetSize());
return result;
}

Expand Down Expand Up @@ -776,8 +776,8 @@ string Vector::ToString(idx_t count) const {
case VectorType::FSST_VECTOR: {
for (idx_t i = 0; i < count; i++) {
string_t compressed_string = reinterpret_cast<string_t *>(data)[i];
Value val = FSSTPrimitives::DecompressValue(FSSTVector::GetDecoder(const_cast<Vector &>(*this)),
compressed_string.GetData(), compressed_string.GetSize());
Value val = FSSTPrimitives::DecompressValue(FSSTVector::GetDecoder(*this), compressed_string.GetData(),
compressed_string.GetSize());
retval += GetValue(i).ToString() + (i == count - 1 ? "" : ", ");
}
} break;
Expand Down Expand Up @@ -2127,7 +2127,8 @@ const vector<unique_ptr<Vector>> &StructVector::GetEntries(const Vector &vector)
//===--------------------------------------------------------------------===//
// ListVector
//===--------------------------------------------------------------------===//
const Vector &ListVector::GetEntry(const Vector &vector) {
template <class T>
T &ListVector::GetEntryInternal(T &vector) {
D_ASSERT(vector.GetType().id() == LogicalTypeId::LIST || vector.GetType().id() == LogicalTypeId::MAP);
if (vector.GetVectorType() == VectorType::DICTIONARY_VECTOR) {
auto &child = DictionaryVector::Child(vector);
Expand All @@ -2137,12 +2138,15 @@ const Vector &ListVector::GetEntry(const Vector &vector) {
vector.GetVectorType() == VectorType::CONSTANT_VECTOR);
D_ASSERT(vector.auxiliary);
D_ASSERT(vector.auxiliary->GetBufferType() == VectorBufferType::LIST_BUFFER);
return vector.auxiliary->Cast<VectorListBuffer>().GetChild();
return vector.auxiliary->template Cast<VectorListBuffer>().GetChild();
}

const Vector &ListVector::GetEntry(const Vector &vector) {
return GetEntryInternal<const Vector>(vector);
}

Vector &ListVector::GetEntry(Vector &vector) {
const Vector &cvector = vector;
return const_cast<Vector &>(ListVector::GetEntry(cvector));
return GetEntryInternal<Vector>(vector);
}

void ListVector::Reserve(Vector &vector, idx_t required_capacity) {
Expand Down Expand Up @@ -2496,7 +2500,8 @@ UnionInvalidReason UnionVector::CheckUnionValidity(Vector &vector_p, idx_t count
//===--------------------------------------------------------------------===//
// ArrayVector
//===--------------------------------------------------------------------===//
const Vector &ArrayVector::GetEntry(const Vector &vector) {
template <class T>
T &ArrayVector::GetEntryInternal(T &vector) {
D_ASSERT(vector.GetType().id() == LogicalTypeId::ARRAY);
if (vector.GetVectorType() == VectorType::DICTIONARY_VECTOR) {
auto &child = DictionaryVector::Child(vector);
Expand All @@ -2506,12 +2511,15 @@ const Vector &ArrayVector::GetEntry(const Vector &vector) {
vector.GetVectorType() == VectorType::CONSTANT_VECTOR);
D_ASSERT(vector.auxiliary);
D_ASSERT(vector.auxiliary->GetBufferType() == VectorBufferType::ARRAY_BUFFER);
return vector.auxiliary->Cast<VectorArrayBuffer>().GetChild();
return vector.auxiliary->template Cast<VectorArrayBuffer>().GetChild();
}

const Vector &ArrayVector::GetEntry(const Vector &vector) {
return GetEntryInternal<const Vector>(vector);
}

Vector &ArrayVector::GetEntry(Vector &vector) {
const Vector &cvector = vector;
return const_cast<Vector &>(ArrayVector::GetEntry(cvector));
return GetEntryInternal<Vector>(vector);
}

idx_t ArrayVector::GetTotalSize(const Vector &vector) {
Expand Down
4 changes: 2 additions & 2 deletions src/duckdb/src/execution/index/art/art.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -133,13 +133,13 @@ unique_ptr<IndexScanState> ART::TryInitializeScan(const Transaction &transaction
// match on a comparison type
matcher.expr_type = make_uniq<ComparisonExpressionTypeMatcher>();
// match on a constant comparison with the indexed expression
matcher.matchers.push_back(make_uniq<ExpressionEqualityMatcher>(const_cast<Expression &>(index_expr)));
matcher.matchers.push_back(make_uniq<ExpressionEqualityMatcher>(index_expr));
matcher.matchers.push_back(make_uniq<ConstantExpressionMatcher>());

matcher.policy = SetMatcher::Policy::UNORDERED;

vector<reference<Expression>> bindings;
if (matcher.Match(const_cast<Expression &>(filter_expr), bindings)) {
if (matcher.Match(const_cast<Expression &>(filter_expr), bindings)) { // NOLINT: Match does not alter the expr
// range or equality comparison with constant value
// we can use our index here
// bindings[0] = the expression
Expand Down
39 changes: 18 additions & 21 deletions src/duckdb/src/execution/window_segment_tree.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -470,7 +470,7 @@ WindowNaiveState::WindowNaiveState(const WindowNaiveAggregator &gstate)
row_set(STANDARD_VECTOR_SIZE, hash_row, equal_row) {
InitSubFrames(frames, gstate.exclude_mode);

auto &inputs = const_cast<DataChunk &>(gstate.GetInputs());
auto &inputs = gstate.GetInputs();
if (inputs.ColumnCount() > 0) {
leaves.Initialize(Allocator::DefaultAllocator(), inputs.GetTypes());
}
Expand All @@ -494,9 +494,8 @@ void WindowNaiveState::FlushStates() {
return;
}

auto &inputs = const_cast<DataChunk &>(gstate.GetInputs());
leaves.Reference(inputs);
leaves.Slice(update_sel, flush_count);
auto &inputs = gstate.GetInputs();
leaves.Slice(inputs, update_sel, flush_count);

auto &aggr = gstate.aggr;
AggregateInputData aggr_input_data(aggr.GetFunctionData(), allocator);
Expand All @@ -506,19 +505,18 @@ void WindowNaiveState::FlushStates() {
}

size_t WindowNaiveState::Hash(idx_t rid) {
auto &inputs = const_cast<DataChunk &>(gstate.GetInputs());
leaves.Reference(inputs);
auto &inputs = gstate.GetInputs();

auto s = UnsafeNumericCast<sel_t>(rid);
SelectionVector sel(&s);
leaves.Slice(sel, 1);
leaves.Slice(inputs, sel, 1);
leaves.Hash(hashes);

return *FlatVector::GetData<hash_t>(hashes);
}

bool WindowNaiveState::KeyEqual(const idx_t &lhs, const idx_t &rhs) {
auto &inputs = const_cast<DataChunk &>(gstate.GetInputs());
auto &inputs = gstate.GetInputs();

auto l = UnsafeNumericCast<sel_t>(lhs);
SelectionVector lsel(&l);
Expand Down Expand Up @@ -644,7 +642,7 @@ class WindowSegmentTreePart {

enum FramePart : uint8_t { FULL = 0, LEFT = 1, RIGHT = 2 };

WindowSegmentTreePart(ArenaAllocator &allocator, const AggregateObject &aggr, DataChunk &inputs,
WindowSegmentTreePart(ArenaAllocator &allocator, const AggregateObject &aggr, const DataChunk &inputs,
const ValidityMask &filter_mask);
~WindowSegmentTreePart();

Expand Down Expand Up @@ -681,7 +679,7 @@ class WindowSegmentTreePart {
//! Order insensitive aggregate (we can optimise internal combines)
const bool order_insensitive;
//! The partition arguments
DataChunk &inputs;
const DataChunk &inputs;
//! The filtered rows in inputs
const ValidityMask &filter_mask;
//! The size of a single aggregate state
Expand All @@ -706,14 +704,14 @@ class WindowSegmentTreePart {

class WindowSegmentTreeState : public WindowAggregatorState {
public:
WindowSegmentTreeState(const AggregateObject &aggr, DataChunk &inputs, const ValidityMask &filter_mask)
WindowSegmentTreeState(const AggregateObject &aggr, const DataChunk &inputs, const ValidityMask &filter_mask)
: aggr(aggr), inputs(inputs), filter_mask(filter_mask), part(allocator, aggr, inputs, filter_mask) {
}

//! The aggregate function
const AggregateObject &aggr;
//! The aggregate function
DataChunk &inputs;
const DataChunk &inputs;
//! The filtered rows in inputs
const ValidityMask &filter_mask;
//! The left (default) segment tree part
Expand All @@ -722,8 +720,8 @@ class WindowSegmentTreeState : public WindowAggregatorState {
unique_ptr<WindowSegmentTreePart> right_part;
};

WindowSegmentTreePart::WindowSegmentTreePart(ArenaAllocator &allocator, const AggregateObject &aggr, DataChunk &inputs,
const ValidityMask &filter_mask)
WindowSegmentTreePart::WindowSegmentTreePart(ArenaAllocator &allocator, const AggregateObject &aggr,
const DataChunk &inputs, const ValidityMask &filter_mask)
: allocator(allocator), aggr(aggr),
order_insensitive(aggr.function.order_dependent == AggregateOrderDependent::NOT_ORDER_DEPENDENT), inputs(inputs),
filter_mask(filter_mask), state_size(aggr.function.state_size()), state(state_size * STANDARD_VECTOR_SIZE),
Expand All @@ -749,7 +747,7 @@ WindowSegmentTreePart::~WindowSegmentTreePart() {
}

unique_ptr<WindowAggregatorState> WindowSegmentTree::GetLocalState() const {
return make_uniq<WindowSegmentTreeState>(aggr, const_cast<DataChunk &>(inputs), filter_mask);
return make_uniq<WindowSegmentTreeState>(aggr, inputs, filter_mask);
}

void WindowSegmentTreePart::FlushStates(bool combining) {
Expand All @@ -762,8 +760,7 @@ void WindowSegmentTreePart::FlushStates(bool combining) {
statel.Verify(flush_count);
aggr.function.combine(statel, statep, aggr_input_data, flush_count);
} else {
leaves.Reference(inputs);
leaves.Slice(filter_sel, flush_count);
leaves.Slice(inputs, filter_sel, flush_count);
aggr.function.update(&leaves.data[0], aggr_input_data, leaves.ColumnCount(), statep, flush_count);
}

Expand Down Expand Up @@ -1382,7 +1379,7 @@ WindowDistinctAggregator::DistinctSortTree::DistinctSortTree(ZippedElements &&pr

class WindowDistinctState : public WindowAggregatorState {
public:
WindowDistinctState(const AggregateObject &aggr, DataChunk &inputs, const WindowDistinctAggregator &tree);
WindowDistinctState(const AggregateObject &aggr, const DataChunk &inputs, const WindowDistinctAggregator &tree);

void Evaluate(const DataChunk &bounds, Vector &result, idx_t count, idx_t row_idx);

Expand All @@ -1393,7 +1390,7 @@ class WindowDistinctState : public WindowAggregatorState {
//! The aggregate function
const AggregateObject &aggr;
//! The aggregate function
DataChunk &inputs;
const DataChunk &inputs;
//! The merge sort tree data
const WindowDistinctAggregator &tree;
//! The size of a single aggregate state
Expand All @@ -1412,7 +1409,7 @@ class WindowDistinctState : public WindowAggregatorState {
SubFrames frames;
};

WindowDistinctState::WindowDistinctState(const AggregateObject &aggr, DataChunk &inputs,
WindowDistinctState::WindowDistinctState(const AggregateObject &aggr, const DataChunk &inputs,
const WindowDistinctAggregator &tree)
: aggr(aggr), inputs(inputs), tree(tree), state_size(aggr.function.state_size()),
state((state_size * STANDARD_VECTOR_SIZE)), statef(LogicalType::POINTER), statep(LogicalType::POINTER),
Expand Down Expand Up @@ -1488,7 +1485,7 @@ void WindowDistinctState::Evaluate(const DataChunk &bounds, Vector &result, idx_
}

unique_ptr<WindowAggregatorState> WindowDistinctAggregator::GetLocalState() const {
return make_uniq<WindowDistinctState>(aggr, const_cast<DataChunk &>(inputs), *this);
return make_uniq<WindowDistinctState>(aggr, inputs, *this);
}

void WindowDistinctAggregator::Evaluate(WindowAggregatorState &lstate, const DataChunk &bounds, Vector &result,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -538,7 +538,7 @@ struct SortedAggregateFunction {
template <class STATE, class OP>
static void Combine(const STATE &source, STATE &target, AggregateInputData &aggr_input_data) {
auto &order_bind = aggr_input_data.bind_data->Cast<SortedAggregateBindData>();
auto &other = const_cast<STATE &>(source);
auto &other = const_cast<STATE &>(source); // NOLINT: absorb explicitly allows destruction
target.Absorb(order_bind, other);
}

Expand Down
6 changes: 3 additions & 3 deletions src/duckdb/src/function/table/version/pragma_version.cpp
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#ifndef DUCKDB_PATCH_VERSION
#define DUCKDB_PATCH_VERSION "2-dev323"
#define DUCKDB_PATCH_VERSION "2-dev328"
#endif
#ifndef DUCKDB_MINOR_VERSION
#define DUCKDB_MINOR_VERSION 10
Expand All @@ -8,10 +8,10 @@
#define DUCKDB_MAJOR_VERSION 0
#endif
#ifndef DUCKDB_VERSION
#define DUCKDB_VERSION "v0.10.2-dev323"
#define DUCKDB_VERSION "v0.10.2-dev328"
#endif
#ifndef DUCKDB_SOURCE_ID
#define DUCKDB_SOURCE_ID "5d1baa5e0e"
#define DUCKDB_SOURCE_ID "e6929cf46e"
#endif
#include "duckdb/function/table/system_functions.hpp"
#include "duckdb/main/database.hpp"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ struct DeserializationData {
stack<reference<DatabaseInstance>> databases;
stack<idx_t> enums;
stack<reference<bound_parameter_map_t>> parameter_data;
stack<reference<LogicalType>> types;
stack<const_reference<LogicalType>> types;

template <class T>
void Set(T entry) = delete;
Expand Down Expand Up @@ -167,13 +167,24 @@ inline void DeserializationData::Set(LogicalType &type) {
}

template <>
inline LogicalType &DeserializationData::Get() {
inline void DeserializationData::Unset<LogicalType>() {
AssertNotEmpty(types);
types.pop();
}

template <>
inline void DeserializationData::Set(const LogicalType &type) {
types.emplace(type);
}

template <>
inline const LogicalType &DeserializationData::Get() {
AssertNotEmpty(types);
return types.top();
}

template <>
inline void DeserializationData::Unset<LogicalType>() {
inline void DeserializationData::Unset<const LogicalType>() {
AssertNotEmpty(types);
types.pop();
}
Expand Down
4 changes: 2 additions & 2 deletions src/duckdb/src/include/duckdb/common/types/data_chunk.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ class DataChunk {

//! Slice all Vectors from other.data[i] to data[i + 'col_offset']
//! Turning all Vectors into Dictionary Vectors, using 'sel'
DUCKDB_API void Slice(DataChunk &other, const SelectionVector &sel, idx_t count, idx_t col_offset = 0);
DUCKDB_API void Slice(const DataChunk &other, const SelectionVector &sel, idx_t count, idx_t col_offset = 0);

//! Slice a DataChunk from "offset" to "offset + count"
DUCKDB_API void Slice(idx_t offset, idx_t count);
Expand All @@ -150,7 +150,7 @@ class DataChunk {
DUCKDB_API void Hash(vector<idx_t> &column_ids, Vector &result);

//! Returns a list of types of the vectors of this data chunk
DUCKDB_API vector<LogicalType> GetTypes();
DUCKDB_API vector<LogicalType> GetTypes() const;

//! Converts this DataChunk to a printable string representation
DUCKDB_API string ToString() const;
Expand Down
Loading

0 comments on commit d8b9a84

Please # to comment.