8000 Rewrites len(..) == 0 into not .. by xadupre · Pull Request #51 · sdpython/onnx-array-api · GitHub
[go: up one dir, main page]

Skip to content

Rewrites len(..) == 0 into not .. #51

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Nov 13, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion _unittests/ut_light_api/test_light_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ def list_ops_missing(self, n_inputs):
methods.append("")
new_missing.append(m)
text = "\n".join(methods)
if len(new_missing) > 0:
if new_missing:
raise AssertionError(
f"n_inputs={n_inputs}: missing method for operators "
f"{new_missing}\n{text}"
Expand Down
2 changes: 1 addition & 1 deletion _unittests/ut_validation/test_f8.py
Original file line number Diff line number Diff line change
Expand Up @@ -344,7 +344,7 @@ def test_search_float32_into_fe5m2(self):
add = value
else:
add = v - value
if len(w) > 0:
if w:
raise AssertionError(
f"A warning was thrown for v={v}, "
f"value={value}, w={w[0]}."
Expand Down
4 changes: 2 additions & 2 deletions _unittests/ut_xrun_doc/test_documentation_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def import_source(module_file_path, module_name):
class TestDocumentationExamples(ExtTestCase):
def run_test(self, fold: str, name: str, verbose=0) -> int:
ppath = os.environ.get("PYTHONPATH", "")
if len(ppath) == 0:
if not ppath:
os.environ["PYTHONPATH"] = ROOT
elif ROOT not in ppath:
sep = ";" if is_windows() else ":"
Expand All @@ -42,7 +42,7 @@ def run_test(self, fold: str, name: str, verbose=0) -> int:
res = p.communicate()
out, err = res
st = err.decode("ascii", errors="ignore")
if len(st) > 0 and "Traceback" in st:
if st and "Traceback" in st:
if '"dot" not found in path.' in st:
# dot not installed, this part
# is tested in onnx framework
Expand Down
6 changes: 2 additions & 4 deletions onnx_array_api/ext_test_case.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,9 +226,7 @@ def assertRaise(self, fct: Callable, exc_type: Exception):
raise AssertionError("No exception was raised.")

def assertEmpty(self, value: Any):
if value is None:
return
if len(value) == 0:
if not value:
return
raise AssertionError(f"value is not empty: {value!r}.")

Expand All @@ -240,7 +238,7 @@ def assertNotEmpty(self, value: Any):
if value is None:
raise AssertionError(f"value is empty: {value!r}.")
if isinstance(value, (list, dict, tuple, set)):
if len(value) == 0:
if value:
raise AssertionError(f"value is empty: {value!r}.")

def assertStartsWith(self, prefix: str, full: str):
Expand Down
2 changes: 1 addition & 1 deletion onnx_array_api/light_api/emitter.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ def render_attribute_value(self, value: Any) -> Tuple[List[str], str]:
if isinstance(v, str):
return [], f"{v!r}"
if isinstance(v, np.ndarray):
if len(v.shape) == 0:
if not v.shape:
return [], str(v)
if len(v.shape) == 1:
if value[0].type in (
Expand Down
2 changes: 1 addition & 1 deletion onnx_array_api/light_api/translate.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def export(self, as_str, single_line: bool = False) -> Union[str, List[str]]:
else:
raise ValueError(f"Unexpected type {type(self.proto_)} for proto.")

if len(sparse_initializers) != 0:
if sparse_initializers:
raise NotImplementedError("Sparse initializer not supported yet.")

rows.extend(
Expand Down
2 changes: 1 addition & 1 deletion onnx_array_api/npx/npx_graph_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -919,7 +919,7 @@ def to_onnx(
[(var, i, None) for i in range(var.n_var_outputs)]
)

if len(possible_types) > 0:
if possible_types:
# converts possibles types into a dictionary
map_types = {}
for var, i, dt in possible_types:
Expand Down
4 changes: 2 additions & 2 deletions onnx_array_api/npx/npx_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def _process_attributes(attributes):
nodes = []
modified = False
for node in graph.node:
if len(set(node.input) & set_rep) == 0:
if not (set(node.input) & set_rep):
modified = True
new_inputs = [replacements.get(i, i) for i in node.input]
atts = _process_attributes(node.attribute) or node.attribute
Expand All @@ -66,7 +66,7 @@ def _process_attributes(attributes):
if not modified:
return None

if len(set(i.name for i in graph.input) & set_rep) == 0:
if not (set(i.name for i in graph.input) & set_rep):
return make_graph(nodes, graph.name, graph.input, graph.output)

new_inputs = []
Expand Down
12 changes: 6 additions & 6 deletions onnx_array_api/npx/npx_jit_eager.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@ def to_jit(self, *values, **kwargs):
"""
self.info("+", "to_jit", args=values, kwargs=kwargs)
annotations = self.f.__annotations__
if len(annotations) > 0:
if annotations:
input_to_kwargs = {}
kwargs_to_input = {}
names = list(annotations.keys())
Expand Down Expand Up @@ -352,10 +352,10 @@ def to_jit(self, *values, **kwargs):
if iname in constraints
]
names = [i.name for i in inputs]
if len(new_kwargs) > 0:
if new_kwargs:
# An attribute is not named in the numpy API
# but is the ONNX definition.
if len(kwargs) == 0:
if not kwargs:
kwargs = new_kwargs
else:
kwargs = kwargs.copy()
Expand All @@ -375,13 +375,13 @@ def to_jit(self, *values, **kwargs):
target_opsets=self.target_opsets,
ir_version=self.ir_version,
)
if len(values) > 0 and len(values[0].shape) == 0:
if values and not values[0].shape:
inps = onx.graph.input[0]
shape = []
for d in inps.type.tensor_type.shape.dim:
v = d.dim_value if d.dim_value > 0 else d.dim_param
shape.append(v)
if len(shape) != 0:
if shape:
raise RuntimeError(
f"Shape mismatch, values[0]={values[0]} "
f"and inputs={onx.graph.input}."
Expand Down Expand Up @@ -441,7 +441,7 @@ def move_input_to_kwargs(
f"self.input_to_kwargs_ is not initialized for function {self.f} "
f"from module {self.f.__module__!r}."
)
if len(self.input_to_kwargs_) == 0:
if not self.input_to_kwargs_:
return values, kwargs
new_values = []
new_kwargs = kwargs.copy()
Expand Down
6 changes: 3 additions & 3 deletions onnx_array_api/npx/npx_numpy_tensors.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,7 @@ def __bool__(self):
)
if self.shape == (0,):
return False
if len(self.shape) != 0:
if self.shape:
warnings.warn(
f"Conversion to bool only works for scalar, not for {self!r}, "
f"bool(...)={bool(self._tensor)}."
Expand All @@ -233,7 +233,7 @@ def __bool__(self):

def __int__(self):
"Implicit conversion to int."
if len(self.shape) != 0:
if self.shape:
raise ValueError(
f"Conversion to bool only works for scalar, not for {self!r}."
)
Expand All @@ -255,7 +255,7 @@ def __int__(self):

def __float__(self):
"Implicit conversion to float."
if len(self.shape) != 0:
if self.shape:
raise ValueError(
f"Conversion to bool only works for scalar, not for {self!r}."
)
Expand Down
8 changes: 4 additions & 4 deletions onnx_array_api/npx/npx_var.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ def to_onnx(
f"Mismatch number of outputs, expecting {len(outputs)}, "
f"got ({len(onx.output)})."
)
if len(g.functions_) > 0:
if g.functions_:
return [g.functions_, onx]
return onx

Expand Down Expand Up @@ -1020,7 +1020,7 @@ def __getitem__(self, index: Any) -> "Var":

if not isinstance(index, tuple):
index = (index,)
elif len(index) == 0:
elif not index:
# The array contains a scalar and it needs to be returned.
return var(self, op="Identity")

Expand Down Expand Up @@ -1091,7 +1091,7 @@ def __getitem__(self, index: Any) -> "Var":
starts = np.array(starts, dtype=np.int64)
axes = np.array(axes, dtype=np.int64)

if len(needs_shape) > 0:
if needs_shape:
shape = self.shape
conc = []
for e in ends:
Expand All @@ -1116,7 +1116,7 @@ def __getitem__(self, index: Any) -> "Var":
sliced_args.append(steps)
sliced_args_cst = [v if isinstance(v, Var) else cst(v) for v in sliced_args]
sliced = var(self.self_var, *sliced_args_cst, op="Slice")
if len(axis_squeeze) > 0:
if axis_squeeze:
return var(
sliced,
cst(np.array(axis_squeeze, dtype=np.int64)),
Expand Down
5 changes: 1 addition & 4 deletions onnx_array_api/plotting/_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,10 +120,7 @@ def get_tensor_shape(obj):
for d in obj.tensor_type.shape.dim:
v = d.dim_value if d.dim_value > 0 else d.dim_param
shape.append(v)
if len(shape) == 0:
shape = None
else:
shape = list(None if s == 0 else s for s in shape)
shape = None if not shape else list(None if s == 0 else s for s in shape)
return shape


Expand Down
6 changes: 3 additions & 3 deletions onnx_array_api/plotting/dot_plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ def dot_label(text):
for node in nodes:
exp.append("")
for out in node.output:
if len(out) > 0 and out not in inter_vars:
if out and out not in inter_vars:
inter_vars[out] = out
sh = shapes.get(out, "")
if sh:
Expand Down Expand Up @@ -318,7 +318,7 @@ def dot_label(text):
f"{dot_name(subprefix)}{dot_name(inp2.name)};"
)
for out1, out2 in zip(body.output, node.output):
if len(out2) == 0:
if not out2:
# Empty output, it cannot be used.
continue
exp.append(
Expand Down Expand Up @@ -346,7 +346,7 @@ def dot_label(text):
f"{dot_name(prefix)}{dot_name(node.name)};"
)
for out in node.output:
if len(out) == 0:
if not out:
# Empty output, it cannot be used.
continue
exp.append(
Expand Down
14 changes: 7 additions & 7 deletions onnx_array_api/plotting/text_plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def append_target(self, tid, weight):
def process_node(self):
"node to string"
if self.nodes_modes == "LEAF":
if len(self.targets) == 0:
if not self.targets:
text = f"{self.true_false}f"
elif len(self.targets) == 1:
t = self.targets[0]
Expand Down Expand Up @@ -264,7 +264,7 @@ def _append_succ_pred_s(
unknown.add(i)
for i in n.output:
known[i] = n
if len(unknown) > 0:
if unknown:
# These inputs are coming from the graph below.
for name in unknown:
successors[name].append(parent_node_name)
Expand Down Expand Up @@ -402,7 +402,7 @@ def _find_sequence(node_name, known, done):
% (k, ",".join(sequences[k]), list(sequences))
)

if len(sequences) == 0:
if not sequences:
raise RuntimeError( # pragma: no cover
"Unexpected empty sequence (len(possibles)=%d, "
"len(done)=%d, len(nodes)=%d). This is usually due to "
Expand All @@ -417,7 +417,7 @@ def _find_sequence(node_name, known, done):
# if the sequence of successors is longer
best = k
elif len(v) == len(sequences[best]):
if len(new_nodes) > 0:
if new_nodes:
# then choose the next successor sharing input with
# previous output
so = set(new_nodes[-1].output)
Expand Down Expand Up @@ -808,7 +808,7 @@ def str_node(indent, node):
val = ".%d" % att.type
atts.append(f"{att.name}={val}")
inputs = list(node.input)
if len(atts) > 0:
if atts:
inputs.extend(atts)
if node.domain in ("", "ai.onnx.ml"):
domain = ""
Expand Down Expand Up @@ -917,7 +917,7 @@ def str_node(indent, node):
indent = previous_indent
else:
inds = [indents.get(i, 0) for i in node.input if i not in init_names]
if len(inds) == 0:
if not inds:
indent = 0
else:
mi = min(inds)
Expand All @@ -929,7 +929,7 @@ def str_node(indent, node):
)
add_break = True
if not add_break and previous_out is not None:
if len(set(node.input) & previous_out) == 0:
if not (set(node.input) & previous_out):
if verbose:
print(f"[onnx_simple_text_plot] break3 {node.op_type}")
add_break = True
Expand Down
4 changes: 2 additions & 2 deletions onnx_array_api/profiling.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def get_root(self):
def _get_root(node, stor=None):
if stor is not None:
stor.append(node)
if len(node.called_by) == 0:
if not node.called_by:
return node
if len(node.called_by) == 1:
return _get_root(node.called_by[0], stor=stor)
Expand Down Expand Up @@ -383,7 +383,7 @@ def walk(node, roots_keys, indent=0):
continue
child[key] = walk(n, roots_key, indent + 1)

if len(child) > 0:
if child:
mx = max(_[0] for _ in child)
dg = int(math.log(mx) / math.log(10) + 1.5)
form = f"%-{dg}d-%s"
Expand Down
2 changes: 1 addition & 1 deletion onnx_array_api/reference/ops/op_constant_of_shape.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ class ConstantOfShape(OpRun):
def _process(value):
cst = value[0] if isinstance(value, np.ndarray) and value.size > 0 else value
if isinstance(value, np.ndarray):
if len(value.shape) == 0:
if not value.shape:
cst = value
elif value.size > 0:
cst = value.ravel()[0]
Expand Down
2 changes: 1 addition & 1 deletion onnx_array_api/validation/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def randomize_proto(
doc_string=onx.doc_string,
opset_imports=list(onx.opset_import),
)
if len(onx.metadata_props) > 0:
if onx.metadata_props:
values = {p.key: p.value for p in onx.metadata_props}
set_model_props(onnx_model, values)
return onnx_model
Expand Down
4 changes: 2 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
requirements = f.read().strip(" \n\r\t").split("\n")
except FileNotFoundError:
requirements = []
if len(requirements) == 0 or requirements == [""]:
if not requirements or requirements == [""]:
requirements = ["numpy", "scipy", "onnx"]

try:
Expand All @@ -34,7 +34,7 @@
for _ in [_.strip("\r\n ") for _ in f.readlines()]
if _.startswith("__version__")
]
if len(line) > 0:
if line:
version_str = line[0].split("=")[1].strip('" ')


Expand Down
0