Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
… frame_migration
  • Loading branch information
Charles-hit committed Jul 27, 2022
2 parents 2799cd7 + 84d595f commit 5eaaf2d
Show file tree
Hide file tree
Showing 92 changed files with 2,584 additions and 869 deletions.
8 changes: 4 additions & 4 deletions cmake/xpu_kp.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -168,11 +168,13 @@ macro(compile_kernel COMPILE_ARGS)
else()
set(ABI_VERSION "-D_GLIBCXX_USE_CXX11_ABI=1")
endif()
add_custom_target(
${kernel_name}.xpu ALL
COMMAND ${CMAKE_COMMAND} -E copy ${kernel_path}/${kernel_name}.kps
kernel_build/${kernel_name}.xpu)
add_custom_command(
OUTPUT kernel_build/${kernel_name}.bin.o
COMMAND ${CMAKE_COMMAND} -E make_directory kernel_build
COMMAND ${CMAKE_COMMAND} -E copy ${kernel_path}/${kernel_name}.kps
kernel_build/${kernel_name}.xpu
COMMAND
${XPU_CLANG} --sysroot=${CXX_DIR} -std=c++11 ${ABI_VERSION} ${OPT_LEVEL}
-fno-builtin -mcpu=xpu2 -fPIC ${XPU_CXX_DEFINES} ${XPU_CXX_FLAGS}
Expand All @@ -189,8 +191,6 @@ macro(compile_kernel COMPILE_ARGS)
add_custom_command(
OUTPUT kernel_build/${kernel_name}.host.o
COMMAND ${CMAKE_COMMAND} -E make_directory kernel_build
COMMAND ${CMAKE_COMMAND} -E copy ${kernel_path}/${kernel_name}.kps
kernel_build/${kernel_name}.xpu
COMMAND
${XPU_CLANG} --sysroot=${CXX_DIR} -std=c++11 ${ABI_VERSION} ${OPT_LEVEL}
-fno-builtin -mcpu=xpu2 -fPIC ${XPU_CXX_DEFINES} ${XPU_CXX_FLAGS}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,8 @@ def SkipAPIGeneration(forward_api_name):
'adam',
'adamw_',
'adamw',
'average_accumulates',
'average_accumulates_',
'decayed_adagrad_',
'decayed_adagrad',
'dgc_momentum_',
Expand Down
13 changes: 12 additions & 1 deletion paddle/fluid/framework/ir/fuse_elewise_add_act_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -297,7 +297,18 @@ void FuseElewiseAddActPass::RemoveIntermediateOut(Graph *graph) const {
}
}
}
GraphSafeRemoveNodes(graph, need_removed_nodes);
details::RemovedVars *saved_removed_nodes = new details::RemovedVars;
GraphSafeRemoveNodes(graph, need_removed_nodes, saved_removed_nodes);
if (!saved_removed_nodes->empty()) {
// TODO(pangyoki): If kRemovedVars exists, merge saved_removed_nodes into
// RemovedVars.
PADDLE_ENFORCE_EQ(graph->Has(details::kRemovedVars),
false,
platform::errors::PreconditionNotMet(
"Removed nodes are only saved for "
"fuse_elewise_add_act_pass in temporary."));
graph->Set(details::kRemovedVars, saved_removed_nodes);
}
}

void FuseElewiseAddActPass::ReLinkNodes(Graph *graph,
Expand Down
2 changes: 2 additions & 0 deletions paddle/fluid/framework/ir/graph.h
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@ namespace details {
// This attr is not recommended, because the graph should not dependence
// the program once it is built.
constexpr char kStaleProgramOpDescs[] = "stale_program_op_descs";
constexpr char kRemovedVars[] = "removed_vars";
typedef std::unordered_set<std::shared_ptr<ir::Node>> RemovedVars;
} // namespace details

namespace ir {
Expand Down
22 changes: 17 additions & 5 deletions paddle/fluid/framework/ir/graph_helper.cc
Original file line number Diff line number Diff line change
Expand Up @@ -549,6 +549,18 @@ static void GetGraphOpDesc(const std::vector<Node *> &nodes,
}
}

template <class T = Node *>
static void GetGraphVarDesc(const Graph &graph,
const std::unordered_set<T> &nodes,
std::vector<proto::VarDesc> *vars) {
for (T node : nodes) {
if (node->IsVar() && node->Var() &&
node->GetVarNodeBlockId() == graph.GetBlockId()) {
vars->emplace_back(*node->Var()->Proto());
}
}
}

static void GraphToBlock(const Graph &graph,
proto::BlockDesc *block,
const SortKind *sort_kind) {
Expand All @@ -562,11 +574,11 @@ static void GraphToBlock(const Graph &graph,
}

std::vector<proto::VarDesc> vars_in_graph;
for (Node *node : graph.Nodes()) {
if (node->IsVar() && node->Var() &&
node->GetVarNodeBlockId() == graph.GetBlockId()) {
vars_in_graph.emplace_back(*node->Var()->Proto());
}
GetGraphVarDesc<Node *>(graph, graph.Nodes(), &vars_in_graph);
if (graph.Has(details::kRemovedVars)) {
auto &removed_vars = graph.Get<details::RemovedVars>(details::kRemovedVars);
GetGraphVarDesc<std::shared_ptr<ir::Node>>(
graph, removed_vars, &vars_in_graph);
}

// add vars_in_graph to blcok
Expand Down
14 changes: 11 additions & 3 deletions paddle/fluid/framework/ir/graph_pattern_detector.cc
Original file line number Diff line number Diff line change
Expand Up @@ -771,10 +771,18 @@ bool IsNthOutput(Node *var, Node *op, const std::string &argument, size_t nth) {
return var->Name() == op->Op()->Output(argument)[nth];
}

void GraphSafeRemoveNodes(Graph *graph,
const std::unordered_set<const Node *> &nodes) {
void GraphSafeRemoveNodes(
Graph *graph,
const std::unordered_set<const Node *> &nodes,
std::unordered_set<std::shared_ptr<Node>> *saved_nodes) {
for (auto *node : nodes) {
graph->RemoveNode(const_cast<Node *>(node));
if (saved_nodes != nullptr) {
// prevent unique_ptr node from being released
saved_nodes->insert(
std::move(graph->RemoveNode(const_cast<Node *>(node))));
} else {
graph->RemoveNode(const_cast<Node *>(node));
}
}

for (auto *node : graph->Nodes()) {
Expand Down
6 changes: 4 additions & 2 deletions paddle/fluid/framework/ir/graph_pattern_detector.h
Original file line number Diff line number Diff line change
Expand Up @@ -392,8 +392,10 @@ bool HasOutput(Node* op, const std::string& argument);
bool IsNthOutput(Node* var, Node* op, const std::string& argument, size_t nth);

// Graph safely remove some nodes, will automatically clean up the edges.
void GraphSafeRemoveNodes(Graph* graph,
const std::unordered_set<const Node*>& nodes);
void GraphSafeRemoveNodes(
Graph* graph,
const std::unordered_set<const Node*>& nodes,
std::unordered_set<std::shared_ptr<Node>>* saved_nodes = nullptr);

// Some pre-defined patterns those can be reused in multiple passes.
// The related Fluid Layer or Op should be one pattern here for better re-usage
Expand Down
20 changes: 20 additions & 0 deletions paddle/fluid/jit/all.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#pragma once

#include "base_function.h"
#include "layer.h"
#include "serializer.h"
#include "serializer_utils.h"
1 change: 0 additions & 1 deletion paddle/fluid/jit/base_function.h
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
#pragma once

#include "paddle/phi/api/include/tensor.h"
#include "paddle/phi/core/dense_tensor.h"

namespace paddle {
namespace jit {
Expand Down
2 changes: 2 additions & 0 deletions paddle/fluid/jit/compilation_unit.cc
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@

#include "paddle/phi/core/enforce.h"

#include "paddle/fluid/jit/base_function.h"

namespace paddle {
namespace jit {

Expand Down
5 changes: 3 additions & 2 deletions paddle/fluid/jit/compilation_unit.h
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,14 @@

#pragma once

#include <memory>
#include <string>
#include <unordered_map>

#include "paddle/fluid/jit/base_function.h"
#include <vector>

namespace paddle {
namespace jit {
class BaseFunction;
using Name2FunctionMap =
std::unordered_map<std::string, std::shared_ptr<BaseFunction>>;

Expand Down
14 changes: 7 additions & 7 deletions paddle/fluid/jit/function_schema.cc
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

#include "paddle/fluid/jit/function_schema.h"

#include "paddle/fluid/framework/program_desc.h"
#include "paddle/phi/core/enforce.h"

#include "paddle/fluid/jit/function_utils.h"
Expand Down Expand Up @@ -52,22 +53,21 @@ void FunctionSchema::AddOutputArg(const std::string& name) {
FunctionInfo::FunctionInfo(const std::string& func_name,
const std::vector<std::string>& param_names,
const framework::ProgramDesc& program_desc)
: func_name_(func_name),
param_names_(param_names),
program_desc_(program_desc) {
: func_name_(func_name), param_names_(param_names) {
program_desc_.reset(new framework::ProgramDesc(program_desc));
// Parse FunctionSchema
for (auto& in_name : program_desc_.GetFeedTargetNames()) {
for (auto& in_name : program_desc_->GetFeedTargetNames()) {
schema_.AddInputArg(in_name);
}
for (auto& out_name : program_desc_.GetFetchTargetNames()) {
for (auto& out_name : program_desc_->GetFetchTargetNames()) {
schema_.AddOutputArg(out_name);
}
}

const std::string& FunctionInfo::FunctionName() const { return func_name_; }

const framework::ProgramDesc& FunctionInfo::ProgramDesc() const {
return program_desc_;
return *program_desc_.get();
}

const std::vector<std::string>& FunctionInfo::ParamNames() const {
Expand All @@ -83,7 +83,7 @@ const std::vector<std::string> FunctionInfo::OutputArgNames() const {
}

void FunctionInfo::RemoveDescFeedFetch() {
utils::RemoveFeedFetch(&program_desc_);
utils::RemoveFeedFetch(program_desc_.get());
}

} // namespace jit
Expand Down
12 changes: 7 additions & 5 deletions paddle/fluid/jit/function_schema.h
Original file line number Diff line number Diff line change
Expand Up @@ -14,15 +14,17 @@

#pragma once

#include <memory>
#include <string>
#include <vector>

#include "paddle/fluid/framework/program_desc.h"
#include "paddle/fluid/framework/variable.h"

namespace paddle {

namespace framework {
class ProgramDesc;
} // namespace framework

namespace jit {
using Variable = paddle::framework::Variable;

class Argument {
public:
Expand Down Expand Up @@ -75,7 +77,7 @@ class FunctionInfo {
private:
std::string func_name_;
std::vector<std::string> param_names_;
framework::ProgramDesc program_desc_;
std::shared_ptr<framework::ProgramDesc> program_desc_;
FunctionSchema schema_;
};

Expand Down
4 changes: 3 additions & 1 deletion paddle/fluid/jit/function_utils.cc
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,9 @@
#include "paddle/fluid/jit/function_utils.h"

#include "paddle/fluid/framework/program_desc.h"
#include "paddle/fluid/framework/scope.h"
#include "paddle/fluid/framework/var_desc.h"
#include "paddle/fluid/framework/variable.h"
#include "paddle/phi/core/enforce.h"

namespace paddle {
Expand Down Expand Up @@ -75,7 +77,7 @@ void ShareParamsIntoScope(const std::vector<std::string> &param_names,
for (size_t i = 0; i < param_names.size(); ++i) {
std::string name = param_names[i];
auto &param = params_dict.find(name)->second;
auto &dense_tensor = param.Get<DenseTensor>();
auto &dense_tensor = param->Get<DenseTensor>();
VLOG(3) << "share into scope: " << name;
auto *var = scope->Var(name);
auto *dst_tensor = var->GetMutable<DenseTensor>();
Expand Down
13 changes: 9 additions & 4 deletions paddle/fluid/jit/function_utils.h
Original file line number Diff line number Diff line change
Expand Up @@ -18,18 +18,23 @@
#include <unordered_map>
#include <vector>

#include "paddle/fluid/framework/scope.h"
#include "paddle/fluid/framework/variable.h"
#include "paddle/phi/api/include/tensor.h"
#include "paddle/phi/common/place.h"
#include "paddle/phi/core/dense_tensor.h"

#include "paddle/fluid/jit/function_schema.h"

namespace paddle {

namespace framework {
class Variable;
class ProgramDesc;
class Scope;
} // namespace framework

namespace jit {
using Variable = paddle::framework::Variable;
using Name2VariableMap = std::unordered_map<std::string, Variable>;
using Name2VariableMap =
std::unordered_map<std::string, std::shared_ptr<Variable>>;
using DenseTensor = phi::DenseTensor;
using Tensor = paddle::experimental::Tensor;

Expand Down
20 changes: 12 additions & 8 deletions paddle/fluid/jit/layer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -14,17 +14,21 @@

#include "paddle/fluid/jit/layer.h"

#include "paddle/fluid/framework/variable.h"

#include "paddle/fluid/jit/base_function.h"
#include "paddle/fluid/jit/compilation_unit.h"
#include "paddle/fluid/jit/function_schema.h"

namespace paddle {
namespace jit {
Layer::Layer(const std::vector<std::shared_ptr<FunctionInfo>>& infos,
const Name2VariableMap& params_dict,
const phi::Place& place)
Layer::Layer(const Name2VariableMap& params_dict, const phi::Place& place)
: params_dict_(params_dict) {
VLOG(3) << "infos size: " << infos.size();
unit_.reset(new CompilationUnit());
}

std::shared_ptr<BaseFunction> Layer::Function(const std::string& name) const {
return unit_.Function(name);
return unit_->Function(name);
}

std::vector<Tensor> Layer::forward(const std::vector<Tensor>& inputs) {
Expand All @@ -42,15 +46,15 @@ void Layer::to(const phi::Place& place) {}

void Layer::SetFunction(const std::string& name,
const std::shared_ptr<BaseFunction>& function) {
unit_.SetFunction(name, function);
unit_->SetFunction(name, function);
}

std::vector<std::string> Layer::FunctionNames() const {
return unit_.FunctionNames();
return unit_->FunctionNames();
}

const Name2FunctionMap& Layer::FunctionMap() const {
return unit_.FunctionMap();
return unit_->FunctionMap();
}

} // namespace jit
Expand Down
Loading

0 comments on commit 5eaaf2d

Please sign in to comment.