Skip to content

Commit

Permalink
add a candidate dense tensor class, test=develop (#28)
Browse files Browse the repository at this point in the history
  • Loading branch information
Shixiaowei02 authored Oct 20, 2021
1 parent ce210b4 commit f1c9661
Show file tree
Hide file tree
Showing 20 changed files with 838 additions and 15 deletions.
7 changes: 5 additions & 2 deletions paddle/pten/common/data_type.h
Original file line number Diff line number Diff line change
Expand Up @@ -75,17 +75,20 @@ inline size_t SizeOf(DataType data_type) {
PADDLE_THROW(platform::errors::Unimplemented(
"Data type %d is not supported by tensor.",
static_cast<int>(data_type)));
return 0;
}
return 0;
}

#define PT_FOR_EACH_DATA_TYPE(_) \
_(bool, DataType::BOOL) \
_(int8_t, DataType::INT8) \
_(uint8_t, DataType::UINT8) \
_(int16_t, DataType::INT16) \
_(int, DataType::INT32) \
_(uint16_t, DataType::UINT16) \
_(int32_t, DataType::INT32) \
_(uint32_t, DataType::UINT32) \
_(int64_t, DataType::INT64) \
_(uint64_t, DataType::UINT64) \
_(bfloat16, DataType::BFLOAT16) \
_(float16, DataType::FLOAT16) \
_(float, DataType::FLOAT32) \
Expand Down
4 changes: 4 additions & 0 deletions paddle/pten/core/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
add_subdirectory(candidate)

IF(WITH_MKLDNN)
set(MKLDNN_CTX_DEPS mkldnn)
ELSE()
Expand All @@ -15,3 +17,5 @@ cc_library(dense_tensor SRCS dense_tensor.cc DEPS enforce data_type ddim allocat

cc_library(kernel_factory SRCS kernel_factory.cc DEPS enforce)
cc_library(kernel_context SRCS kernel_context.cc DEPS enforce device_context)

cc_library(tensor_base SRCS tensor_base.cc allocator.cc storage.cc DEPS enforce)
14 changes: 8 additions & 6 deletions paddle/pten/core/allocator.h
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@ namespace pten {
/// deallocation and construction/destruction of objects.
class RawAllocator {
public:
using Place = paddle::platform::Place;

/// \brief Default destructor.
virtual ~RawAllocator() = default;

Expand All @@ -43,7 +45,7 @@ class RawAllocator {

/// \brief Get the place value of the allocator and the allocation.
/// \return The place value of the allocator and the allocation.
virtual const paddle::platform::Place& place() const = 0;
virtual const Place& place() const = 0;
};

/// \brief Fancy pointer with context. The use of this data type
Expand All @@ -52,24 +54,24 @@ class RawAllocator {
/// support being inherited.
class Allocation final {
public:
using Place = paddle::platform::Place;
using DeleterFnPtr = void (*)(void*);

Allocation() = default;
Allocation(Allocation&&) = default;
Allocation& operator=(Allocation&&) = default;

Allocation(void* data, const paddle::platform::Place& place)
: data_(data), place_(place) {}
Allocation(void* data, const Place& place) : data_(data), place_(place) {}

Allocation(void* data,
void* ctx,
DeleterFnPtr ctx_deleter,
const paddle::platform::Place& place)
const Place& place)
: data_(data), ctx_(ctx, ctx_deleter), place_(place) {}

void* operator->() const noexcept { return data_; }
operator bool() const noexcept { return data_ || ctx_.Get(); }
const paddle::platform::Place& place() const noexcept { return place_; }
const Place& place() const noexcept { return place_; }

void Clear() noexcept {
data_ = nullptr;
Expand Down Expand Up @@ -132,7 +134,7 @@ class Allocation final {
Context ctx_;
// TODO(Shixiaowei02): Enum needs to be used instead to reduce
// the construction overhead by more than 50%.
paddle::platform::Place place_;
Place place_;
};

inline void swap(Allocation::Context& a, Allocation::Context& b) noexcept {
Expand Down
1 change: 1 addition & 0 deletions paddle/pten/core/candidate/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
cc_library(pten_dense_tensor SRCS dense_tensor.cc DEPS tensor_base)
145 changes: 145 additions & 0 deletions paddle/pten/core/candidate/dense_tensor.cc
Original file line number Diff line number Diff line change
@@ -0,0 +1,145 @@
/* Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#include "paddle/pten/core/candidate/dense_tensor.h"

namespace pten {
namespace candidate {

DenseTensorMeta::DenseTensorMeta(DataType type, const DDim& dims)
: dims(dims), type(type) {}
DenseTensorMeta::DenseTensorMeta(DataType type,
const DDim& dims,
DataLayout layout)
: dims(dims), type(type), layout(layout) {}
DenseTensorMeta::DenseTensorMeta(DataType type,
const DDim& dims,
DataLayout layout,
const std::vector<std::vector<size_t>>& lod)
: dims(dims), type(type), layout(layout), lod(lod) {}

bool DenseTensorMeta::valid() const noexcept {
bool valid{true};
valid = valid && (type != DataType::UNDEFINED);
valid = valid && (layout != DataLayout::UNDEFINED);
valid = valid && (is_scalar || product(dims));
return valid;
}

DenseTensor::DenseTensor(const std::shared_ptr<Allocator>& a,
const DenseTensorMeta& meta)
: meta_(meta),
storage_(
make_intrusive<TensorStorage>(a, SizeOf(data_type()) * numel())) {}

DenseTensor::DenseTensor(const std::shared_ptr<Allocator>& a,
DenseTensorMeta&& meta)
: meta_(std::move(meta)),
storage_(
make_intrusive<TensorStorage>(a, SizeOf(data_type()) * numel())) {}

DenseTensor::DenseTensor(intrusive_ptr<Storage> storage,
const DenseTensorMeta& meta)
: meta_(meta), storage_(std::move(storage)) {}

DenseTensor::DenseTensor(intrusive_ptr<Storage> storage, DenseTensorMeta&& meta)
: meta_(std::move(meta)), storage_(std::move(storage)) {}

int64_t DenseTensor::numel() const {
if (meta_.is_scalar) {
return 1;
}
return product(meta_.dims);
}

bool DenseTensor::SharesStorageWith(const DenseTensor& b) const {
return storage_.get() == b.storage_.get() && storage_.get() != nullptr;
}

template <typename T>
T* DenseTensor::mutable_data(size_t request_bytes) {
PADDLE_ENFORCE(
valid(),
paddle::platform::errors::PreconditionNotMet(
"The meta data must be valid when call the mutable data function."));
PADDLE_ENFORCE_NOT_NULL(
storage_,
paddle::platform::errors::PreconditionNotMet(
"The storage must be valid when call the mutable data function."));
PADDLE_ENFORCE(
(data_type() == paddle::experimental::CppTypeToDataType<T>::Type()),
paddle::platform::errors::PreconditionNotMet(
"The type of data we are trying to retrieve does not match the "
"type of data currently contained in the container."));
size_t bytes = numel() * SizeOf(data_type());
if (request_bytes) {
PADDLE_ENFORCE_GE(request_bytes,
bytes,
paddle::platform::errors::InvalidArgument(
"The reserved size %d should be enough to meet the "
"volume required by metadata %d.",
request_bytes,
bytes));
bytes = request_bytes;
}
if (storage_->size() < bytes) {
storage_->Realloc(bytes);
}
return static_cast<T*>(storage_->data());
}

template <typename T>
const T* DenseTensor::data() const {
PADDLE_ENFORCE_NOT_NULL(
storage_,
paddle::platform::errors::PreconditionNotMet(
"The storage must be valid when call the mutable data function."));
PADDLE_ENFORCE(
(data_type() == paddle::experimental::CppTypeToDataType<T>::Type()),
paddle::platform::errors::PreconditionNotMet(
"The type of data we are trying to retrieve does not match the "
"type of data currently contained in the container."));
return static_cast<const T*>(storage_->data());
}

void DenseTensor::check_memory_size() const {
size_t bytes = numel() * SizeOf(data_type());
PADDLE_ENFORCE_GE(memory_size(),
bytes,
paddle::platform::errors::InvalidArgument(
"The memory size %d should be enough to meet the "
"volume required by metadata %d.",
memory_size(),
bytes));
}

#define DATA_MEMBER_FUNC_INSTANTIATION(dtype) \
template dtype* DenseTensor::mutable_data(size_t request_bytes); \
template const dtype* DenseTensor::data() const;

DATA_MEMBER_FUNC_INSTANTIATION(int8_t);
DATA_MEMBER_FUNC_INSTANTIATION(uint8_t);
DATA_MEMBER_FUNC_INSTANTIATION(int16_t);
DATA_MEMBER_FUNC_INSTANTIATION(uint16_t);
DATA_MEMBER_FUNC_INSTANTIATION(int32_t);
DATA_MEMBER_FUNC_INSTANTIATION(uint32_t);
DATA_MEMBER_FUNC_INSTANTIATION(int64_t);
DATA_MEMBER_FUNC_INSTANTIATION(uint64_t);
DATA_MEMBER_FUNC_INSTANTIATION(float);
DATA_MEMBER_FUNC_INSTANTIATION(double);

#undef DATA_MEMBER_FUNC_INSTANTIATION

} // namespace candidate
} // namespace pten
Loading

1 comment on commit f1c9661

@paddle-bot-old
Copy link

@paddle-bot-old paddle-bot-old bot commented on f1c9661 Oct 20, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🕵️ CI failures summary

🔍 PR: #28 Commit ID: f1c9661 contains failed CI.

🔹 Failed: PR-CI-APPROVAL

approve_failed
2021-10-20 22:51:35 正在保存至: “bk.txt”
2021-10-20 22:51:35 0K 100% 3.33M=0s
2021-10-20 22:51:35 2021-10-20 22:51:35 (3.33 MB/s) - 已保存 “bk.txt” [5/5])
2021-10-20 22:51:42 ****************
2021-10-20 22:51:42 0. You must have one RD (lanxianghit (Recommend), phlrain or luotao1) approval for changing the FLAGS, which manages the environment variables.
2021-10-20 22:51:42 1. You must have Dianhai approval for change 20+ files or add than 1000+ lines of content.
2021-10-20 22:51:42 2. You must have one RD (XiaoguangHu01,chenwhql,zhiqiu,Xreki,luotao1) approval for paddle/fluid/framework/operator.h, which manages the underlying code for fluid.
2021-10-20 22:51:42 3. You must have one RD (zhiqiu (Recommend) , phlrain) approval for the changes of paddle/fluid/pybind/op_function_generator.cc, which manages the logic of automatic generating op functions for dygraph.
2021-10-20 22:51:42 4. You must have one RD (XiaoguangHu01,chenwhql,zhiqiu,Xreki,luotao1) approval for the usage of const_cast.
2021-10-20 22:51:42 5. You must have one RD (Avin0323(Recommend) or zhouwei25 or wanghuancoder or luotao1) approval for modifying unity_build_rule.cmake which the rules of Unity Build.
2021-10-20 22:51:42 There are 6 approved errors.
2021-10-20 22:51:42 ****************
2021-10-20 22:51:42 + EXCODE=6
2021-10-20 22:51:42 + echo 'EXCODE: 6'
2021-10-20 22:51:42 EXCODE: 6
2021-10-20 22:51:42 + echo 'ipipe_log_param_EXCODE: 6'
2021-10-20 22:51:42 ipipe_log_param_EXCODE: 6
2021-10-20 22:51:42 + exit 6

Please sign in to comment.