forked from pytorch/pytorch
-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathcreate_scope_op.cc
45 lines (33 loc) · 1.21 KB
/
create_scope_op.cc
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
#include "caffe2/operators/create_scope_op.h"
C10_DEFINE_bool(
caffe2_workspace_stack_debug,
false,
"Enable debug checks for CreateScope's workspace stack");
namespace caffe2 {
CAFFE_KNOWN_TYPE(detail::WorkspaceStack);
template <>
bool CreateScopeOp<CPUContext>::RunOnDevice() {
auto* ws_stack = OperatorBase::Output<detail::WorkspaceStack>(0);
ws_stack->clear();
return true;
}
REGISTER_CPU_OPERATOR(CreateScope, CreateScopeOp<CPUContext>);
SHOULD_NOT_DO_GRADIENT(CreateScope);
OPERATOR_SCHEMA(CreateScope).NumInputs(0).NumOutputs(1).SetDoc(R"DOC(
'CreateScope' operator initializes and outputs empty scope that is used
by Do operator to store local blobs
)DOC");
template <>
bool HasScopeOp<CPUContext>::RunOnDevice() {
const auto& ws_stack = OperatorBase::Input<detail::WorkspaceStack>(0);
auto* output = Output(0, {1}, at::dtype<bool>());
bool* output_value = output->template mutable_data<bool>();
*output_value = !ws_stack.empty();
return true;
}
REGISTER_CPU_OPERATOR(HasScope, HasScopeOp<CPUContext>);
SHOULD_NOT_DO_GRADIENT(HasScope);
OPERATOR_SCHEMA(HasScope).NumInputs(1).NumOutputs(1).SetDoc(R"DOC(
Checks whether scope blob has any saved scopes left
)DOC");
} // namespace caffe2