Skip to content
New issue

Have a question about this project? # for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “#”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? # to your account

[clang-tidy] enable cppcoreguidelines-pro-type-cstyle-cast check #55459

Closed
wants to merge 36 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
36 commits
Select commit Hold shift + click to select a range
dd791a4
add cppcoreguidelines-pro-type-cstyle-cast
RedContritio Jul 16, 2023
d217d6c
used reinterpret_cast in paddle/phi/kernels/cpu/multiclass_nms3_kerne…
RedContritio Jul 20, 2023
74a522f
used reinterpret_cast in paddle/fluid/distributed/fleet_executor/test…
RedContritio Jul 20, 2023
dd5b990
used reinterpret_cast in paddle/fluid/pybind/eager_method.cc to remov…
RedContritio Jul 22, 2023
7f0496b
used reinterpret_cast in paddle/fluid/pybind/eager_properties.cc to r…
RedContritio Jul 22, 2023
3e743c3
used reinterpret_cast in paddle/fluid/eager/auto_code_generator/gener…
RedContritio Jul 22, 2023
2111916
used reinterpret_cast in paddle/fluid/pybind/eager.cc to remove 2 cpp…
RedContritio Jul 21, 2023
7293fb9
used reinterpret_cast in paddle/fluid/pybind/eager_functions.cc to re…
RedContritio Jul 23, 2023
8f08077
use reinterpret_cast in test/cpp/inference/api/analyzer_capi_exp_pd_t…
RedContritio Jul 23, 2023
9bb6cdb
used reinterpret_cast in paddle/fluid/pybind/eager_legacy_op_function…
RedContritio Jul 23, 2023
c9fa7f5
used reinterpret_cast in paddle/fluid/distributed/ps/service/brpc_ps_…
RedContritio Jul 23, 2023
688d29f
used reinterpret_cast in paddle/fluid/pybind/eager_py_layer.cc to rem…
RedContritio Jul 23, 2023
d6b1344
used reinterpret_cast in paddle/fluid/inference/tensorrt/trt_int8_cal…
RedContritio Jul 20, 2023
0b221f5
used reinterpret_cast in paddle/fluid/distributed/ps/service/brpc_ps_…
RedContritio Jul 20, 2023
30c814a
used reinterpret_cast in paddle/fluid/distributed/ps/service/brpc_uti…
RedContritio Jul 21, 2023
6830fb0
used reinterpret_cast in paddle/fluid/distributed/ps/service/heter_cl…
RedContritio Jul 21, 2023
f15650c
used reinterpret_cast in paddle/fluid/pybind/op_function_common.cc to…
RedContritio Jul 21, 2023
ced21ec
used reinterpret_cast in paddle/phi/kernels/funcs/sequence_padding.cc…
RedContritio Jul 21, 2023
01a04ed
used reinterpret_cast in paddle/phi/core/distributed/store/socket.cpp…
RedContritio Jul 21, 2023
955df67
used reinterpret_cast in paddle/fluid/eager/auto_code_generator/gener…
RedContritio Jul 21, 2023
dc2b742
used reinterpret_cast in paddle/ir/core/value.cc to remove 1 cppcoreg…
RedContritio Jul 21, 2023
fa46b6d
used reinterpret_cast in paddle/phi/api/profiler/device_tracer.cc to …
RedContritio Jul 23, 2023
95cc336
used reinterpret_cast in paddle/fluid/pybind/eager_math_op_patch.cc t…
RedContritio Jul 23, 2023
888a2e8
used reinterpret_cast in paddle/fluid/framework/fleet/gloo_wrapper.cc…
RedContritio Jul 23, 2023
2494c54
used reinterpret_cast in paddle/fluid/platform/gen_comm_id_helper.cc …
RedContritio Jul 23, 2023
2770b3f
used reinterpret_cast in paddle/fluid/pybind/eager_py_layer.cc to rem…
RedContritio Jul 23, 2023
3277c23
used dynamic_cast in paddle/fluid/operators/reader/buffered_reader.cc…
RedContritio Jul 21, 2023
c36ec17
add NOLINT in paddle/fluid/pybind/eager_py_layer.cc to remove 4 cppco…
RedContritio Jul 23, 2023
f86f322
add NOLINT in paddle/fluid/pybind/eager.cc to remove 7 cppcoreguideli…
RedContritio Jul 23, 2023
d01e08c
define PyXXXX_s macro in paddle/utils/python_patch.h to remove 1340 c…
RedContritio Jul 20, 2023
c55dc95
add guard_ifndef in python_patch.h
RedContritio Jul 22, 2023
c3761fa
add PyList_GET_SIZE_s in python_patch.h to remove some cppcoreguideli…
RedContritio Jul 21, 2023
38c447e
include paddle/utils/python_patch.h in paddle/fluid/pybind/jit.cc to …
RedContritio Jul 21, 2023
8d81133
add Py_TYPE_s in python_patch.h to remove 1 cppcoreguidelines-pro-typ…
RedContritio Jul 21, 2023
22ce7a1
redefine MAP_FAILED macro in paddle/utils/mman_patch.h to remove 3 cp…
RedContritio Jul 22, 2023
cb369ce
include mman_patch.h in paddle/fluid/framework/data_feed.cc to remove…
RedContritio Jul 23, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .clang-tidy
Original file line number Diff line number Diff line change
Expand Up @@ -201,6 +201,7 @@ modernize-use-override,
-performance-trivially-destructible,
-performance-type-promotion-in-math-fn,
-performance-unnecessary-copy-initialization,
cppcoreguidelines-pro-type-cstyle-cast,
readability-container-size-empty,
'
HeaderFilterRegex: '^(paddle/(?!cinn)).*$'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,9 @@ TEST(InterceptorTest, PingPong) {
address.sin_family = AF_INET;
address.sin_addr.s_addr = INADDR_ANY;
address.sin_port = htons(port0);
while (bind(server_fd, (struct sockaddr*)&address, sizeof(address)) == -1) {
while (bind(server_fd,
reinterpret_cast<struct sockaddr*>(&address),
sizeof(address)) == -1) {
port0++;
address.sin_port = htons(port0);
}
Expand All @@ -93,7 +95,9 @@ TEST(InterceptorTest, PingPong) {
setsockopt(server_fd, SOL_SOCKET, SO_REUSEADDR, &opt, sizeof(opt));
port1 = port0 + 1;
address.sin_port = htons(port1);
while (bind(server_fd, (struct sockaddr*)&address, sizeof(address)) == -1) {
while (bind(server_fd,
reinterpret_cast<struct sockaddr*>(&address),
sizeof(address)) == -1) {
port1++;
address.sin_port = htons(port1);
}
Expand Down
16 changes: 9 additions & 7 deletions paddle/fluid/distributed/ps/service/brpc_ps_client.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1580,7 +1580,8 @@ std::future<int32_t> BrpcPsClient::PushSparse(size_t table_id,
for (size_t kv_idx = 0; kv_idx < sorted_kv_size; ++kv_idx) {
shard_kv_data.key_list[kv_idx] = sorted_kv_list[kv_idx].first;
shard_kv_data.value_list[kv_idx].assign(
(const char *)sorted_kv_list[kv_idx].second, value_size);
reinterpret_cast<const char *>(sorted_kv_list[kv_idx].second),
value_size);
}
shard_kv_data.kv_num = sorted_kv_size;
}
Expand Down Expand Up @@ -1790,8 +1791,8 @@ int BrpcPsClient::PushSparseAsyncShardMerge(
} else if (sorted_kv_size == 1) {
shard_kv_data.kv_num = 1;
shard_kv_data.key_list[0] = sorted_kv_list[0].first;
shard_kv_data.value_list[0].assign((const char *)(sorted_kv_list[0].second),
value_size);
shard_kv_data.value_list[0].assign(
reinterpret_cast<const char *>(sorted_kv_list[0].second), value_size);
return 0;
}

Expand All @@ -1814,11 +1815,12 @@ int BrpcPsClient::PushSparseAsyncShardMerge(
}
if (last_merge_data != NULL) {
shard_kv_data.value_list[merged_kv_count].assign(
(const char *)last_merge_data, value_size);
reinterpret_cast<const char *>(last_merge_data), value_size);
last_merge_data = NULL;
} else {
shard_kv_data.value_list[merged_kv_count].assign(
(const char *)sorted_kv_list[kv_idx - 1].second, value_size);
reinterpret_cast<const char *>(sorted_kv_list[kv_idx - 1].second),
value_size);
}
shard_kv_data.key_list[merged_kv_count++] = last_key;
if (kv_idx < sorted_kv_size) {
Expand All @@ -1827,7 +1829,7 @@ int BrpcPsClient::PushSparseAsyncShardMerge(
}
if (kv_idx == sorted_kv_size - 1) {
shard_kv_data.value_list[merged_kv_count].assign(
(const char *)last_value_data, value_size);
reinterpret_cast<const char *>(last_value_data), value_size);
shard_kv_data.key_list[merged_kv_count++] = last_key;
}
}
Expand Down Expand Up @@ -1918,7 +1920,7 @@ std::future<int32_t> BrpcPsClient::PushDense(const Region *regions,
CHECK(pos + data_num <= data_size)
<< "invalid dense size, cur pos[" << pos << "]"
<< " data_num[" << data_num << "] size[" << data_size << "]";
const float *region_data = (const float *)(regions[i].data);
const float *region_data = reinterpret_cast<const float *>(regions[i].data);
memcpy(data + pos, region_data, regions[i].size);
pos += data_num;
}
Expand Down
33 changes: 18 additions & 15 deletions paddle/fluid/distributed/ps/service/brpc_ps_server.cc
Original file line number Diff line number Diff line change
Expand Up @@ -311,7 +311,7 @@ int32_t BrpcPsService::PullDense(Table *table,
return 0;
}
CostTimer timer("pserver_server_pull_dense");
uint32_t num = *(const uint32_t *)request.params(0).c_str();
uint32_t num = *reinterpret_cast<const uint32_t *>(request.params(0).c_str());

auto res_data = butil::get_object<std::vector<float>>();
res_data->resize(num * table->ValueAccesor()->GetAccessorInfo().select_size /
Expand Down Expand Up @@ -346,12 +346,14 @@ int32_t BrpcPsService::PushDenseParam(Table *table,
}
push_buffer.resize(0);
push_buffer.reserve(req_buffer_size);
const char *data = (const char *)cntl->request_attachment().fetch(
const_cast<char *>(push_buffer.data()), req_buffer_size);
const char *data =
reinterpret_cast<const char *>(cntl->request_attachment().fetch(
const_cast<char *>(push_buffer.data()), req_buffer_size));

uint32_t num = *(const uint32_t *)data;
uint32_t num = *reinterpret_cast<const uint32_t *>(data);

const float *values = (const float *)(data + sizeof(uint32_t));
const float *values =
reinterpret_cast<const float *>(data + sizeof(uint32_t));
TableContext table_context;
table_context.value_type = Dense;
table_context.push_context.values = values;
Expand Down Expand Up @@ -383,11 +385,11 @@ int32_t BrpcPsService::PushDense(Table *table,
|--num--|---valuesData---|
|--4B---|----------------|
*/
uint32_t num = *(const uint32_t *)(request.data().data());
uint32_t num = *reinterpret_cast<const uint32_t *>(request.data().data());
TableContext table_context;
table_context.value_type = Dense;
table_context.push_context.values =
(const float *)(request.data().data() + sizeof(uint32_t));
reinterpret_cast<const float *>(request.data().data() + sizeof(uint32_t));
table_context.num = num;
// const float *values = (const float *)(request.data().data() +
// sizeof(uint32_t));
Expand Down Expand Up @@ -446,9 +448,9 @@ int32_t BrpcPsService::PushSparseParam(Table *table,
|---keysData---|---valuesData---|
|---8*{num}B---|----------------|
*/
const uint64_t *keys = (const uint64_t *)push_data.data();
const float *values =
(const float *)(push_data.data() + sizeof(uint64_t) * num);
const uint64_t *keys = reinterpret_cast<const uint64_t *>(push_data.data());
const float *values = reinterpret_cast<const float *>(push_data.data() +
sizeof(uint64_t) * num);

TableContext table_context;
table_context.value_type = Sparse;
Expand Down Expand Up @@ -578,9 +580,10 @@ int32_t BrpcPsService::PushSparse(Table *table,
*/
TableContext table_context;
table_context.value_type = Sparse;
table_context.push_context.keys = (const uint64_t *)push_data.data();
table_context.push_context.values =
(const float *)(push_data.data() + sizeof(uint64_t) * num);
table_context.push_context.keys =
reinterpret_cast<const uint64_t *>(push_data.data());
table_context.push_context.values = reinterpret_cast<const float *>(
push_data.data() + sizeof(uint64_t) * num);
table_context.num = num;
// const uint64_t *keys = (const uint64_t *)push_data.data();
// const float *values = (const float *)(push_data.data() + sizeof(uint64_t) *
Expand Down Expand Up @@ -879,8 +882,8 @@ int32_t BrpcPsService::PushGlobalStep(Table *table,
set_response_code(response, 0, "run_program data is empty");
return 0;
}
const int64_t *values =
(const int64_t *)(request.data().data() + sizeof(uint32_t));
const int64_t *values = reinterpret_cast<const int64_t *>(
request.data().data() + sizeof(uint32_t));
auto trainer_id = request.client_id();

TableContext context;
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/distributed/ps/service/brpc_utils.cc
Original file line number Diff line number Diff line change
Expand Up @@ -333,7 +333,7 @@ std::string GetIntTypeEndpoint(const std::string& ip, const uint32_t& port) {
char* int_ip = NULL;

while (hp->h_addr_list[i] != NULL) {
int_ip = inet_ntoa(*(struct in_addr*)hp->h_addr_list[i]);
int_ip = inet_ntoa(*reinterpret_cast<struct in_addr*>(hp->h_addr_list[i]));
VLOG(3) << "Brpc Get host by name, host:" << ip << " -> ip: " << int_ip;
break;
}
Expand Down
4 changes: 3 additions & 1 deletion paddle/fluid/distributed/ps/service/heter_client.cc
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,9 @@ std::future<int32_t> HeterClient::SendCmd(
new paddle::distributed::DownpourBrpcClosure(
request_call_num, [request_call_num, cmd_id](void* done) {
int ret = 0;
auto* closure = (paddle::distributed::DownpourBrpcClosure*)done;
auto* closure =
reinterpret_cast<paddle::distributed::DownpourBrpcClosure*>(
done);
for (size_t i = 0; i < request_call_num; ++i) {
if (closure->check_response(i, cmd_id) != 0) {
ret = -1;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ def FindParsingFunctionFromAttributeType(atype):
FUNCTION_NAME_TEMPLATE = "{}{}{}"


PYTHON_C_FUNCTION_REG_TEMPLATE = " {{\"{}{}\", (PyCFunction)(void(*)(void)) {}eager_api_{}, METH_VARARGS | METH_KEYWORDS, \"C++ interface function for {} in dygraph.\"}},\n"
PYTHON_C_FUNCTION_REG_TEMPLATE = " {{\"{}{}\", reinterpret_cast<PyCFunction>(reinterpret_cast<void (*)(void)>({}eager_api_{})), METH_VARARGS | METH_KEYWORDS, \"C++ interface function for {} in dygraph.\"}},\n"


PYTHON_C_WRAPPER_TEMPLATE = """
Expand Down Expand Up @@ -243,9 +243,9 @@ def FindParsingFunctionFromAttributeType(atype):


CORE_OPS_INFO_REGISTRY = """
{\"get_core_ops_args_info\", (PyCFunction)(void(*)(void))eager_get_core_ops_args_info, METH_NOARGS, \"C++ interface function for eager_get_core_ops_args_info.\"},
{\"get_core_ops_args_type_info\", (PyCFunction)(void(*)(void))eager_get_core_ops_args_type_info, METH_NOARGS, \"C++ interface function for eager_get_core_ops_args_type_info.\"},
{\"get_core_ops_returns_info\", (PyCFunction)(void(*)(void))eager_get_core_ops_returns_info, METH_NOARGS, \"C++ interface function for eager_get_core_ops_returns_info.\"},
{\"get_core_ops_args_info\", reinterpret_cast<PyCFunction>(reinterpret_cast<void (*)(void)>(eager_get_core_ops_args_info)), METH_NOARGS, \"C++ interface function for eager_get_core_ops_args_info.\"},
{\"get_core_ops_args_type_info\", reinterpret_cast<PyCFunction>(reinterpret_cast<void (*)(void)>(eager_get_core_ops_args_type_info)), METH_NOARGS, \"C++ interface function for eager_get_core_ops_args_type_info.\"},
{\"get_core_ops_returns_info\", reinterpret_cast<PyCFunction>(reinterpret_cast<void (*)(void)>(eager_get_core_ops_returns_info)), METH_NOARGS, \"C++ interface function for eager_get_core_ops_returns_info.\"},
"""

NAMESPACE_WRAPPER_TEMPLATE = """namespace {} {{
Expand Down
1 change: 1 addition & 0 deletions paddle/fluid/framework/data_feed.cc
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ limitations under the License. */
#include "io/fs.h"
#include "paddle/fluid/platform/monitor.h"
#include "paddle/fluid/platform/timer.h"
#include "paddle/utils/mman_patch.h"

USE_INT_STAT(STAT_total_feasign_num_in_mem);
PHI_DECLARE_bool(enable_ins_parser_file);
Expand Down
3 changes: 2 additions & 1 deletion paddle/fluid/framework/fleet/gloo_wrapper.cc
Original file line number Diff line number Diff line change
Expand Up @@ -281,7 +281,8 @@ void ParallelConnectContext::connectFullMesh(
}
Impl impl_;
memcpy(&impl_, addr.data(), sizeof(impl_));
struct sockaddr_in* sa = (struct sockaddr_in*)&(impl_.ss);
struct sockaddr_in* sa =
reinterpret_cast<struct sockaddr_in*>(&(impl_.ss));
std::string ip = getCharIpAddr(sa->sin_addr.s_addr);
VLOG(0) << "peer " << i << " ip addr: " << ip
<< ", port: " << sa->sin_port;
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/inference/api/mkldnn_quantizer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -584,7 +584,7 @@ AnalysisPredictor::MkldnnQuantizer::Histogram(
void AnalysisPredictor::MkldnnQuantizer::ClearDeviceContext() const {
platform::DeviceContextPool& pool = platform::DeviceContextPool::Instance();
phi::OneDNNContext* dev_ctx =
(phi::OneDNNContext*)pool.Get(predictor_.place_);
dynamic_cast<phi::OneDNNContext*>(pool.Get(predictor_.place_));
dev_ctx->ResetBlobMap(phi::OneDNNContext::tls().get_curr_exec());
}

Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/inference/tensorrt/trt_int8_calibrator.cc
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ const void* TRTInt8Calibrator::readCalibrationCache(size_t& length)

void TRTInt8Calibrator::writeCalibrationCache(const void* ptr,
std::size_t length) TRT_NOEXCEPT {
calibration_table_ = std::string((const char*)ptr, length);
calibration_table_ = std::string(reinterpret_cast<const char*>(ptr), length);
VLOG(4) << "Got calibration data for " << engine_name_ << " " << ptr
<< " length=" << length;
}
Expand Down
1 change: 1 addition & 0 deletions paddle/fluid/memory/allocation/mmap_allocator.cc
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
#include "glog/logging.h"
#include "paddle/fluid/platform/enforce.h"
#include "paddle/phi/core/flags.h"
#include "paddle/utils/mman_patch.h"

PHI_DECLARE_bool(use_shm_cache);

Expand Down
11 changes: 5 additions & 6 deletions paddle/fluid/operators/reader/buffered_reader.cc
Original file line number Diff line number Diff line change
Expand Up @@ -51,10 +51,9 @@ BufferedReader::BufferedReader(
#if defined(PADDLE_WITH_CUDA) || defined(PADDLE_WITH_HIP)
if (platform::is_gpu_place(place_) && !pin_memory) {
int dev_idx = place_.device;
compute_stream_ =
((phi::GPUContext *)(platform::DeviceContextPool::Instance().Get(
place_)))
->stream();
compute_stream_ = (dynamic_cast<phi::GPUContext *>(
platform::DeviceContextPool::Instance().Get(place_)))
->stream();
events_.resize(buffer_size);
for (auto &event : events_) {
event = platform::CudaEventResourcePool::Instance().New(dev_idx);
Expand All @@ -80,8 +79,8 @@ BufferedReader::BufferedReader(

#ifdef PADDLE_WITH_CUSTOM_DEVICE
if (platform::is_custom_place(place_)) {
auto stream = ((platform::CustomDeviceContext
*)(platform::DeviceContextPool::Instance().Get(place_)))
auto stream = (dynamic_cast<platform::CustomDeviceContext *>(
platform::DeviceContextPool::Instance().Get(place_)))
->stream();
custom_device_compute_stream_ =
std::make_shared<phi::stream::Stream>(place_, stream);
Expand Down
20 changes: 11 additions & 9 deletions paddle/fluid/platform/gen_comm_id_helper.cc
Original file line number Diff line number Diff line change
Expand Up @@ -195,10 +195,11 @@ int CreateListenSocket(const std::string& ep) {
int total_time = 0;
while (true) {
int ret_val = -1;
RETRY_SYS_CALL_VAL(
bind(server_fd, (struct sockaddr*)&address, sizeof(address)),
"bind",
ret_val);
RETRY_SYS_CALL_VAL(bind(server_fd,
reinterpret_cast<struct sockaddr*>(&address),
sizeof(address)),
"bind",
ret_val);

if (ret_val == -1) {
BindOrConnectFailed(timeout, &try_times, &total_time, "bind", ep);
Expand Down Expand Up @@ -277,7 +278,7 @@ static int ConnectAddr(const std::string& ep, const CommHead head) {

int i = 0;
while (hp->h_addr_list[i] != NULL) {
ip = inet_ntoa(*(struct in_addr*)hp->h_addr_list[i]);
ip = inet_ntoa(*reinterpret_cast<struct in_addr*>(hp->h_addr_list[i]));
VLOG(3) << "gethostbyname host:" << host << " ->ip: " << ip;
break;
}
Expand All @@ -301,10 +302,11 @@ static int ConnectAddr(const std::string& ep, const CommHead head) {
CHECK_SYS_CALL_VAL(socket(AF_INET, SOCK_STREAM, 0), "socket", sock);
while (true) {
int ret_val = -1;
RETRY_SYS_CALL_VAL(
connect(sock, (struct sockaddr*)&server_addr, sizeof(server_addr)),
"connect",
ret_val);
RETRY_SYS_CALL_VAL(connect(sock,
reinterpret_cast<struct sockaddr*>(&server_addr),
sizeof(server_addr)),
"connect",
ret_val);

if (ret_val == -1) {
BindOrConnectFailed(timeout, &try_times, &total_time, "connect", ep);
Expand Down
16 changes: 8 additions & 8 deletions paddle/fluid/pybind/eager.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1328,7 +1328,7 @@ void BindEager(pybind11::module* module) {
auto type = &heap_type->ht_type;
type->tp_name = "Tensor";
type->tp_basicsize = sizeof(TensorObject);
type->tp_dealloc = (destructor)TensorDealloc;
type->tp_dealloc = reinterpret_cast<destructor>(TensorDealloc);
type->tp_as_number = &number_methods;
type->tp_as_sequence = &sequence_methods;
type->tp_as_mapping = &mapping_methods;
Expand All @@ -1337,7 +1337,7 @@ void BindEager(pybind11::module* module) {
type->tp_init = TensorInit;
type->tp_new = TensorNew;
type->tp_weaklistoffset = offsetof(TensorObject, weakrefs);
Py_INCREF(&PyBaseObject_Type);
Py_INCREF(&PyBaseObject_Type); // NOLINT
type->tp_base = reinterpret_cast<PyTypeObject*>(&PyBaseObject_Type);
type->tp_flags |=
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HEAPTYPE;
Expand All @@ -1352,10 +1352,10 @@ void BindEager(pybind11::module* module) {
return;
}

Py_INCREF(type);
Py_INCREF(type); // NOLINT
if (PyModule_AddObject(m.ptr(), "Tensor", reinterpret_cast<PyObject*>(type)) <
0) {
Py_DECREF(type);
Py_DECREF(type); // NOLINT
Py_DECREF(m.ptr());
PADDLE_THROW(platform::errors::Fatal(
"Init Paddle error in BindEager(PyModule_AddObject)."));
Expand All @@ -1377,15 +1377,15 @@ void BindEagerStringTensor(pybind11::module* module) {
auto type = &heap_type->ht_type;
type->tp_name = "StringTensor";
type->tp_basicsize = sizeof(TensorObject);
type->tp_dealloc = (destructor)TensorDealloc;
type->tp_dealloc = reinterpret_cast<destructor>(TensorDealloc);
type->tp_as_number = &number_methods;
type->tp_as_sequence = &sequence_methods;
type->tp_as_mapping = &mapping_methods;
type->tp_methods = string_tensor_variable_methods;
type->tp_getset = string_tensor_variable_properties;
type->tp_init = StringTensorInit;
type->tp_new = TensorNew;
Py_INCREF(&PyBaseObject_Type);
Py_INCREF(&PyBaseObject_Type); // NOLINT
type->tp_base = reinterpret_cast<PyTypeObject*>(&PyBaseObject_Type);
type->tp_flags |=
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HEAPTYPE;
Expand All @@ -1400,10 +1400,10 @@ void BindEagerStringTensor(pybind11::module* module) {
return;
}

Py_INCREF(type);
Py_INCREF(type); // NOLINT
if (PyModule_AddObject(
m.ptr(), "StringTensor", reinterpret_cast<PyObject*>(type)) < 0) {
Py_DECREF(type);
Py_DECREF(type); // NOLINT
Py_DECREF(m.ptr());
PADDLE_THROW(platform::errors::Fatal(
"Init Paddle error in BindEagerStringTensor(PyModule_AddObject)."));
Expand Down
Loading