8000 Enable -Wunused on torch targets (#150077) · pytorch/pytorch@e9e1aac · GitHub
[go: up one dir, main page]

Skip to content

Commit e9e1aac

Browse files
cyyeverpytorchmergebot
authored andcommitted
Enable -Wunused on torch targets (#150077)
For GCC, ``-Wunused`` contains: ``` -Wunused-function Warn whenever a static function is declared but not defined or a non\-inline static function is unused. -Wunused-label Warn whenever a label is declared but not used. To suppress this warning use the unused attribute. -Wunused-parameter Warn whenever a function parameter is unused aside from its declaration. To suppress this warning use the unused attribute. -Wunused-variable Warn whenever a local variable or non-constant static variable is unused aside from its declaration To suppress this warning use the unused attribute. ``` For Clang, some of the diagnostics controlled by ``-Wunused`` are enabled by default: ``` Controls [-Wunused-argument](https://clang.llvm.org/docs/DiagnosticsReference.html#wunused-argument), [-Wunused-but-set-variable](https://clang.llvm.org/docs/DiagnosticsReference.html#wunused-but-set-variable), [-Wunused-function](https://clang.llvm.org/docs/DiagnosticsReference.html#wunused-function), [-Wunused-label](https://clang.llvm.org/docs/DiagnosticsReference.html#wunused-label), [-Wunused-lambda-capture](https://clang.llvm.org/docs/DiagnosticsReference.html#wunused-lambda-capture), [-Wunused-local-typedef](https://clang.llvm.org/docs/DiagnosticsReference.html#wunused-local-typedef), [-Wunused-private-field](https://clang.llvm.org/docs/DiagnosticsReference.html#wunused-private-field), [-Wunused-property-ivar](https://clang.llvm.org/docs/DiagnosticsReference.html#wunused-property-ivar), [-Wunused-value](https://clang.llvm.org/docs/DiagnosticsReference.html#wunused-value), [-Wunused-variable](https://clang.llvm.org/docs/DiagnosticsReference.html#wunused-variable). ``` These checks are all usefull. This PR aims to enable ``-Wunused`` without breaking code. Pull Request resolved: #150077 Approved by: https://github.com/zou3519, https://github.com/wdvr
1 parent 38a9a8b commit e9e1aac

21 files changed

+71
-74
lines changed

aten/src/ATen/detail/CUDAHooksInterface.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ struct TORCH_API CUDAHooksInterface : AcceleratorHooksInterface {
8686
TORCH_CHECK(false, "Cannot get device of pointer on CUDA without ATen_cuda library. ", CUDA_HELP);
8787
}
8888

89-
bool isPinnedPtr(const void* data) const override {
89+
bool isPinnedPtr(const void* /*data*/) const override {
9090
return false;
9191
}
9292

aten/src/ATen/detail/HIPHooksInterface.h

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,6 @@
66

77
#include <ATen/detail/AcceleratorHooksInterface.h>
88

9-
#include <memory>
10-
119
// NB: Class must live in `at` due to limitations of Registry.h.
1210
namespace at {
1311

@@ -37,7 +35,7 @@ struct TORCH_API HIPHooksInterface : AcceleratorHooksInterface {
3735
return -1;
3836
}
3937

40-
bool isPinnedPtr(const void* data) const override {
38+
bool isPinnedPtr(const void* /*data*/ ) const override {
4139
return false;
4240
}
4341

@@ -49,7 +47,7 @@ struct TORCH_API HIPHooksInterface : AcceleratorHooksInterface {
4947
return 0;
5048
}
5149

52-
bool hasPrimaryContext(DeviceIndex device_index) const override {
50+
bool hasPrimaryContext(DeviceIndex /*device_index*/ ) const override {
5351
TORCH_CHECK(false, "Cannot check primary context without ATen_hip library.");
5452
}
5553
};

aten/src/ATen/detail/IPUHooksInterface.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ struct TORCH_API IPUHooksInterface : AcceleratorHooksInterface {
1515
TORCH_CHECK(false, "Cannot initialize IPU without ATen_ipu library.");
1616
}
1717

18-
bool hasPrimaryContext(DeviceIndex device_index) const override {
18+
bool hasPrimaryContext(DeviceIndex /*device_index*/) const override {
1919
TORCH_CHECK(false, "Cannot initialize IPU without ATen_ipu library.");
2020
return false;
2121
}
@@ -26,7 +26,7 @@ struct TORCH_API IPUHooksInterface : AcceleratorHooksInterface {
2626
}
2727

2828
Generator getNewGenerator(
29-
DeviceIndex device_index [[maybe_unused]] = -1) const override {
29+
DeviceIndex /*device_index*/ = -1) const override {
3030
TORCH_CHECK(false, "Cannot initialize IPU without ATen_ipu library.");
3131
}
3232
};

aten/src/ATen/detail/MAIAHooksInterface.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ struct TORCH_API MAIAHooksInterface : AcceleratorHooksInterface {
1717
TORCH_CHECK(false, "Cannot initialize MAIA without ATen_maia library.");
1818
}
1919

20-
bool hasPrimaryContext(DeviceIndex device_index) const override {
20+
bool hasPrimaryContext(DeviceIndex /*device_index*/) const override {
2121
TORCH_CHECK(false, "Cannot initialize MAIA without ATen_maia library.");
2222
return false;
2323
}

aten/src/ATen/detail/MTIAHooksInterface.h

Lines changed: 16 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@
1212
#include <ATen/detail/AcceleratorHooksInterface.h>
1313

1414
#include <string>
15-
C10_DIAGNOSTIC_PUSH_AND_IGNORED_IF_DEFINED("-Wunused-parameter")
1615
namespace at {
1716
class Context;
1817
}
@@ -46,19 +45,19 @@ struct TORCH_API MTIAHooksInterface : AcceleratorHooksInterface {
4645
return 0;
4746
}
4847

49-
virtual void deviceSynchronize(c10::DeviceIndex device_index) const {
48+
virtual void deviceSynchronize(c10::DeviceIndex /*device_index*/) const {
5049
FAIL_MTIAHOOKS_FUNC(__func__);
5150
}
5251

5352
virtual std::string showConfig() const {
5453
FAIL_MTIAHOOKS_FUNC(__func__);
5554
}
5655

57-
bool hasPrimaryContext(DeviceIndex device_index) const override {
56+
bool hasPrimaryContext(DeviceIndex /*device_index*/) const override {
5857
return false;
5958
}
6059

61-
void setCurrentDevice(DeviceIndex device) const override {
60+
void setCurrentDevice(DeviceIndex /*device*/) const override {
6261
FAIL_MTIAHOOKS_FUNC(__func__);
6362
}
6463

@@ -67,36 +66,36 @@ struct TORCH_API MTIAHooksInterface : AcceleratorHooksInterface {
6766
return -1;
6867
}
6968

70-
DeviceIndex exchangeDevice(DeviceIndex device) const override {
69+
DeviceIndex exchangeDevice(DeviceIndex /*device*/) const override {
7170
FAIL_MTIAHOOKS_FUNC(__func__);
7271
return -1;
7372
}
7473

75-
DeviceIndex maybeExchangeDevice(DeviceIndex device) const override {
74+
DeviceIndex maybeExchangeDevice(DeviceIndex /*device*/) const override {
7675
FAIL_MTIAHOOKS_FUNC(__func__);
7776
return -1;
7877
}
7978

80-
virtual c10::Stream getCurrentStream(DeviceIndex device) const {
79+
virtual c10::Stream getCurrentStream(DeviceIndex /*device*/) const {
8180
FAIL_MTIAHOOKS_FUNC(__func__);
8281
return c10::Stream::unpack3(-1, 0, c10::DeviceType::MTIA);
8382
}
8483

85-
virtual int64_t getCurrentRawStream(DeviceIndex device) const {
84+
virtual int64_t getCurrentRawStream(DeviceIndex /*device*/) const {
8685
FAIL_MTIAHOOKS_FUNC(__func__);
8786
return -1;
8887
}
8988

90-
virtual c10::Stream getDefaultStream(DeviceIndex device) const {
89+
virtual c10::Stream getDefaultStream(DeviceIndex /*device*/) const {
9190
FAIL_MTIAHOOKS_FUNC(__func__);
9291
return c10::Stream::unpack3(-1, 0, c10::DeviceType::MTIA);
9392
}
9493

95-
virtual void setCurrentStream(const c10::Stream& stream) const {
94+
virtual void setCurrentStream(const c10::Stream& /*stream*/ ) const {
9695
FAIL_MTIAHOOKS_FUNC(__func__);
9796
}
9897

99-
bool isPinnedPtr(const void* data) const override {
98+
bool isPinnedPtr(const void* /*data*/) const override {
10099
return false;
101100
}
102101

@@ -105,12 +104,12 @@ struct TORCH_API MTIAHooksInterface : AcceleratorHooksInterface {
105104
return nullptr;
106105
}
107106

108-
virtual PyObject* memoryStats(DeviceIndex device) const {
107+
virtual PyObject* memoryStats(DeviceIndex /*device*/) const {
109108
FAIL_MTIAHOOKS_FUNC(__func__);
110109
return nullptr;
111110
}
112111

113-
virtual PyObject* getDeviceCapability(DeviceIndex device) const {
112+
virtual PyObject* getDeviceCapability(DeviceIndex /*device*/) const {
114113
FAIL_MTIAHOOKS_FUNC(__func__);
115114
return nullptr;
116115
}
@@ -121,9 +120,9 @@ struct TORCH_API MTIAHooksInterface : AcceleratorHooksInterface {
121120

122121

123122
virtual void recordMemoryHistory(
124-
const std::optional<std::string>& enabled,
125-
const std::string& stacks,
126-
size_t max_entries) const {
123+
const std::optional<std::string>& /*enabled*/,
124+
const std::string& /*stacks*/,
125+
size_t /*max_entries*/) const {
127126
FAIL_MTIAHOOKS_FUNC(__func__);
128127
}
129128

@@ -137,7 +136,7 @@ struct TORCH_API MTIAHooksInterface : AcceleratorHooksInterface {
137136
return 0;
138137
}
139138

140-
virtual void resetPeakMemoryStats(DeviceIndex device) const {
139+
virtual void resetPeakMemoryStats(DeviceIndex /*device*/) const {
141140
FAIL_MTIAHOOKS_FUNC(__func__);
142141
}
143142

@@ -158,4 +157,3 @@ TORCH_API const MTIAHooksInterface& getMTIAHooks();
158157
TORCH_API bool isMTIAHooksBuilt();
159158
} // namespace detail
160159
} // namespace at
161-
C10_DIAGNOSTIC_POP()

aten/src/ATen/native/TensorShape.cpp

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4201,8 +4201,7 @@ static inline void handle_unflatten_exception(
42014201
const std::runtime_error& e,
42024202
const Tensor& self,
42034203
int64_t dim,
4204-
SymIntArrayRef sizes,
4205-
std::optional<DimnameList> names) {
4204+
SymIntArrayRef sizes) {
42064205
if (!strstr(e.what(), "is invalid for input of size")) {
42074206
TORCH_CHECK(false, "unflatten got an unexpected error:\n", e.what());
42084207
}
@@ -4256,7 +4255,7 @@ static Tensor unflatten_impl(
42564255
// at::infer_size would throw std::runtime_error for invalid size,
42574256
// catch the runtime_error and display the error message in a more
42584257
// user-friendly way for both tensors and named tensors
4259-
handle_unflatten_exception(e, self, dim, sizes, names);
4258+
handle_unflatten_exception(e, self, dim, sizes);
42604259
}
42614260

42624261
SymDimVector shape(self.sym_sizes().begin(), self.sym_sizes().end());

aten/src/ATen/native/cuda/MemoryAccess.cuh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ struct static_unroll {
4848
template<template<int i> typename func, int end>
4949
struct static_unroll<func, end, end> {
5050
template<typename... Args>
51-
static inline C10_HOST_DEVICE void with_args(Args... args) {}
51+
static inline C10_HOST_DEVICE void with_args(Args... /*args*/) {}
5252
};
5353

5454
// helper structs to be used with static_unroll to load arguments
@@ -516,7 +516,7 @@ inline C10_HOST_DEVICE int can_vectorize_up_to(char *pointer) {
516516
template<int i>
517517
struct can_vectorize_up_to_helper {
518518
template <typename array_t, typename traits>
519-
static C10_HOST_DEVICE void apply(int &result, array_t pointers, traits _) {
519+
static C10_HOST_DEVICE void apply(int &result, array_t pointers, traits /*_*/) {
520520
using arg_t = typename traits::template arg<i>::type;
521521
// `pointers` hold the data_ptr for tensors [output, input0, input1, ...], so we
522522
// need a +1 offset to get the input

aten/src/ATen/native/quantized/cpu/qnnpack/CMakeLists.txt

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -331,6 +331,9 @@ if(NOT TARGET clog)
331331
"${CONFU_DEPENDENCIES_BINARY_DIR}/clog")
332332
# We build static version of clog but a dynamic library may indirectly depend on it
333333
set_property(TARGET clog PROPERTY POSITION_INDEPENDENT_CODE ON)
334+
if(CMAKE_CXX_COMPILER_ID MATCHES "Clang" OR CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
335+
target_compile_options(clog PRIVATE "-Wno-unused-result")
336+
endif()
334337
endif()
335338
target_link_libraries(pytorch_qnnpack PUBLIC clog)
336339

c10/core/GeneratorImpl.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ c10::intrusive_ptr<GeneratorImpl> GeneratorImpl::clone() const {
3232
}
3333

3434
void GeneratorImpl::graphsafe_set_state(
35-
const c10::intrusive_ptr<c10::GeneratorImpl>& state) {
35+
const c10::intrusive_ptr<c10::GeneratorImpl>& /*state*/) {
3636
TORCH_CHECK_NOT_IMPLEMENTED(
3737
false, "graphsafe_set_state is not supported in this Generator");
3838
}

c10/core/TensorImpl.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,7 @@ TensorImpl::TensorImpl(
102102

103103
// NOLINTNEXTLINE(cppcoreguidelines-pro-type-member-init)
104104
TensorImpl::TensorImpl(
105-
ImplType type,
105+
ImplType /*type*/,
106106
Storage&& storage,
107107
DispatchKeySet key_set,
108108
const caffe2::TypeMeta data_type)

0 commit comments

Comments
 (0)
0