File tree Expand file tree Collapse file tree 5 files changed +8
-10
lines changed Expand file tree Collapse file tree 5 files changed +8
-10
lines changed Original file line number Diff line number Diff line change @@ -49,9 +49,8 @@ class Cloneable : public Module {
49
49
" and not the constructor?" );
50
50
for (const auto & parameter : named_parameters (/* recurse=*/ false )) {
51
51
auto & tensor = *parameter;
52
- auto data = device && tensor.device () != *device
53
- ? tensor.to (*device)
54
- : autograd::Variable (tensor).clone ();
52
+ auto data = device && tensor.device () != *device ? tensor.to (*device)
53
+ : tensor.clone ();
55
54
copy->parameters_ [parameter.key ()].set_data (data);
56
55
}
57
56
TORCH_CHECK (
@@ -62,9 +61,8 @@ class Cloneable : public Module {
62
61
" and not the constructor?" );
63
62
for (const auto & buffer : named_buffers (/* recurse=*/ false )) {
64
63
auto & tensor = *buffer;
65
- auto data = device && tensor.device () != *device
66
- ? tensor.to (*device)
67
- : autograd::Variable (tensor).clone ();
64
+ auto data = device && tensor.device () != *device ? tensor.to (*device)
65
+ : tensor.clone ();
68
66
copy->buffers_ [buffer.key ()].set_data (data);
69
67
}
70
68
TORCH_CHECK (
Original file line number Diff line number Diff line change @@ -347,7 +347,7 @@ static IValue addInput(
347
347
value->setType (type);
348
348
if (type->isSubtypeOf (*TensorType::get ())) {
349
349
auto input_tensor = input.toTensor ();
350
- auto name = Variable ( input_tensor) .name ();
350
+ auto const & name = input_tensor.name ();
351
351
if (state->hasValue (input)) {
352
352
input_tensor = input_tensor.view (input_tensor.sizes ());
353
353
}
Original file line number Diff line number Diff line change @@ -612,7 +612,7 @@ py::object toPyObject(IValue ivalue) {
612
612
}
613
613
} else {
614
614
guardAgainstNamedTensor<at::Tensor>(tensor);
615
- return py::cast (autograd::Variable ( std::move (tensor) ));
615
+ return py::cast (std::move (tensor));
616
616
}
617
617
} else if (ivalue.isStorage ()) {
618
618
return py::cast (std::move (ivalue).toStorage ());
Original file line number Diff line number Diff line change @@ -121,7 +121,7 @@ struct CaptureList {
121
121
}
122
122
123
123
void captureTensor (const at::Tensor& tensor, bool is_output) {
124
- var_captures_.emplace_back (Variable ( tensor) , is_output);
124
+ var_captures_.emplace_back (tensor, is_output);
125
125
}
126
126
127
127
void capture (const IValue& val, bool is_output) {
Original file line number Diff line number Diff line change @@ -1309,7 +1309,7 @@ static const std::vector<OperatorGeneratorArgs> opGenArgs{
1309
1309
[](Stack& stack) {
1310
1310
at::Tensor a;
1311
1311
pop (stack, a);
1312
- push (stack, autograd::Variable (a) .variable_data ());
1312
+ push (stack, a .variable_data ());
1313
1313
},
1314
1314
aliasAnalysisFromSchema ()),
1315
1315
// these ops are not defined for Tensor
You can’t perform that action at this time.
0 commit comments