Merge pull request #28743 from mrc0mmand/lint-python-scripts

tools: lint Python scripts
This commit is contained in:
Luca Boccassi
2023-08-10 19:13:45 +01:00
committed by GitHub
24 changed files with 205 additions and 166 deletions

23
.pylintrc Normal file
View File

@@ -0,0 +1,23 @@
[MAIN]
extension-pkg-allow-list=lxml
[MESSAGES CONTROL]
disable=fixme,
invalid-name,
line-too-long,
missing-class-docstring,
missing-docstring,
missing-function-docstring,
missing-module-docstring,
too-few-public-methods,
too-many-arguments,
too-many-boolean-expressions,
too-many-branches,
too-many-instance-attributes,
too-many-instance-attributes,
too-many-lines,
too-many-locals,
too-many-public-methods,
too-many-return-statements,
too-many-statements,
unspecified-encoding,

View File

@@ -17,10 +17,7 @@
# You should have received a copy of the GNU Lesser General Public License
# along with systemd; If not, see <https://www.gnu.org/licenses/>.
# pylint: disable=missing-docstring,invalid-name,import-outside-toplevel
# pylint: disable=consider-using-with,unspecified-encoding,line-too-long
# pylint: disable=too-many-locals,too-many-statements,too-many-return-statements
# pylint: disable=too-many-branches,redefined-builtin,fixme
# pylint: disable=import-outside-toplevel,consider-using-with,disable=redefined-builtin
import argparse
import os

View File

@@ -1,10 +1,9 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
# pylint: disable=missing-docstring,redefined-outer-name,invalid-name
# pylint: disable=unused-import,import-outside-toplevel,useless-else-on-loop
# pylint: disable=consider-using-with,wrong-import-position,unspecified-encoding
# pylint: disable=protected-access
# pylint: disable=protected-access,redefined-outer-name
import base64
import json

View File

@@ -16,12 +16,8 @@
# You should have received a copy of the GNU Lesser General Public License
# along with systemd; If not, see <https://www.gnu.org/licenses/>.
# pylint: disable=missing-docstring,invalid-name,import-outside-toplevel
# pylint: disable=consider-using-with,unspecified-encoding,line-too-long
# pylint: disable=too-many-locals,too-many-statements,too-many-return-statements
# pylint: disable=too-many-branches,too-many-lines,too-many-instance-attributes
# pylint: disable=too-many-arguments,unnecessary-lambda-assignment,fixme
# pylint: disable=unused-argument
# pylint: disable=import-outside-toplevel,consider-using-with,unused-argument
# pylint: disable=unnecessary-lambda-assignment
import argparse
import configparser

View File

@@ -1,8 +1,5 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
# pylint: disable=line-too-long,too-many-lines,too-many-branches,too-many-statements,too-many-arguments
# pylint: disable=too-many-public-methods,too-many-boolean-expressions,invalid-name,no-self-use
# pylint: disable=missing-function-docstring,missing-class-docstring,missing-module-docstring
#
# Copyright © 2017 Michal Sekletar <msekleta@redhat.com>

View File

@@ -1,8 +1,5 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
# pylint: disable=line-too-long,too-many-lines,too-many-branches,too-many-statements,too-many-arguments
# pylint: disable=too-many-public-methods,too-many-boolean-expressions,invalid-name
# pylint: disable=missing-function-docstring,missing-class-docstring,missing-module-docstring
# systemd-networkd tests
# These tests can be executed in the systemd mkosi image when booted in QEMU. After booting the QEMU VM,

View File

@@ -1,6 +1,6 @@
#!/usr/bin/python3
# SPDX-License-Identifier: LGPL-2.1-or-later
# pylint: disable=line-too-long,invalid-name,missing-module-docstring,missing-function-docstring,too-many-statements,broad-except
# pylint: disable=broad-except
import argparse
import logging

View File

@@ -1,10 +1,8 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
# pylint: disable=missing-docstring,redefined-outer-name,invalid-name
# pylint: disable=unspecified-encoding,no-else-return,line-too-long,too-many-lines
# pylint: disable=multiple-imports,too-many-instance-attributes,consider-using-with
# pylint: disable=global-statement
# pylint: disable=redefined-outer-name,no-else-return,multiple-imports
# pylint: disable=consider-using-with,global-statement
# udev test
#

View File

@@ -1,5 +1,6 @@
#!/usr/bin/python
# SPDX-License-Identifier: LGPL-2.1-or-later
# pylint: disable=consider-using-with
"""
A helper to compare 'systemd-analyze dump' outputs.
@@ -13,8 +14,9 @@ tools/analyze-dump-sort.py /var/tmp/{dump1,dump2} → this does a diff from dump
"""
import argparse
import tempfile
import subprocess
import tempfile
def sort_dump(sourcefile, destfile=None):
if destfile is None:

View File

@@ -28,57 +28,57 @@ Prints out journal entries with no or bad catalog explanations.
"""
import re
from systemd import journal, id128
j = journal.Reader()
from systemd import id128, journal
logged = set()
pattern = re.compile('@[A-Z0-9_]+@')
mids = {v:k for k,v in id128.__dict__.items()
if k.startswith('SD_MESSAGE')}
freq = 1000
def log_entry(x):
if 'CODE_FILE' in x:
# pylint: disable=consider-using-f-string
def log_entry(entry):
if 'CODE_FILE' in entry:
# some of our code was using 'CODE_FUNCTION' instead of 'CODE_FUNC'
print('{}:{} {}'.format(x.get('CODE_FILE', '???'),
x.get('CODE_LINE', '???'),
x.get('CODE_FUNC', None) or x.get('CODE_FUNCTION', '???')))
print(' {}'.format(x.get('MESSAGE', 'no message!')))
for k, v in x.items():
print('{}:{} {}'.format(entry.get('CODE_FILE', '???'),
entry.get('CODE_LINE', '???'),
entry.get('CODE_FUNC', None) or entry.get('CODE_FUNCTION', '???')))
print(' {}'.format(entry.get('MESSAGE', 'no message!')))
for k, v in entry.items():
if k.startswith('CODE_') or k in {'MESSAGE_ID', 'MESSAGE'}:
continue
print(' {}={}'.format(k, v))
print(f' {k}={v}')
print()
for i, x in enumerate(j):
if i % freq == 0:
print(i, end='\r')
if __name__ == '__main__':
j = journal.Reader()
logged = set()
pattern = re.compile('@[A-Z0-9_]+@')
try:
mid = x['MESSAGE_ID']
except KeyError:
continue
name = mids.get(mid, 'unknown')
mids = { v:k for k,v in id128.__dict__.items() if k.startswith('SD_MESSAGE') }
try:
desc = journal.get_catalog(mid)
except FileNotFoundError:
if mid in logged:
for i, x in enumerate(j):
if i % 1000 == 0:
print(i, end='\r')
try:
mid = x['MESSAGE_ID']
except KeyError:
continue
name = mids.get(mid, 'unknown')
try:
desc = journal.get_catalog(mid)
except FileNotFoundError:
if mid in logged:
continue
print(f'{name} {mid.hex}: no catalog entry')
log_entry(x)
logged.add(mid)
continue
print('{} {.hex}: no catalog entry'.format(name, mid))
log_entry(x)
logged.add(mid)
continue
fields = [field[1:-1] for field in pattern.findall(desc)]
for field in fields:
index = (mid, field)
if field in x or index in logged:
continue
print('{} {.hex}: no field {}'.format(name, mid, field))
log_entry(x)
logged.add(index)
fields = [field[1:-1] for field in pattern.findall(desc)]
for field in fields:
index = (mid, field)
if field in x or index in logged:
continue
print(f'{name} {mid.hex}: no field {field}')
log_entry(x)
logged.add(index)

View File

@@ -1,7 +1,7 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
# pylint: disable=missing-docstring,invalid-name,unspecified-encoding,consider-using-with
# pylint: disable=consider-using-with
import os
import pathlib
@@ -28,5 +28,5 @@ def check_file(filename):
return good
if __name__ == '__main__':
good = all(check_file(name) for name in sys.argv[1:])
sys.exit(0 if good else 1)
all_good = all(check_file(name) for name in sys.argv[1:])
sys.exit(0 if all_good else 1)

View File

@@ -2,7 +2,8 @@
# SPDX-License-Identifier: LGPL-2.1-or-later
from argparse import ArgumentParser
from pathlib import Path
from subprocess import run, PIPE
from subprocess import PIPE, run
def extract_interfaces_xml(output_dir, executable):
proc = run(

View File

@@ -1,5 +1,9 @@
#!/usr/bin/python
# SPDX-License-Identifier: LGPL-2.1-or-later
#
# Note: the no-value-for-parameter here is expected, as the click module
# decorators modify function arguments which pylint doesn't know
# pylint: disable=no-value-for-parameter
"""
A program to parse auxv (e.g. /proc/self/auxv).
@@ -109,12 +113,12 @@ def dump(endian, field_width, file):
width = {32:'II', 64:'QQ'}[field_width]
format = f'{endian}{width}'
print(f'# {format=}')
format_str = f'{endian}{width}'
print(f'# {format_str=}')
seen_null = False
for item in struct.iter_unpack(format, data):
for item in struct.iter_unpack(format_str, data):
key, val = item
name = AT_AUXV_NAMES.get(key, f'unknown ({key})')
if name.endswith(('UID', 'GID')):
@@ -123,7 +127,7 @@ def dump(endian, field_width, file):
pref, fmt = '0x', 'x'
if seen_null:
print('# trailing garbarbage after AT_NULL')
print('# trailing garbage after AT_NULL')
print(f'{name:18} = {pref}{val:{fmt}}')

View File

@@ -19,7 +19,7 @@
# the resulting binary useless. gnu-efi relies on this method and contains a stub that performs the
# ELF dynamic relocations at runtime.
# pylint: disable=missing-docstring,invalid-name,attribute-defined-outside-init
# pylint: disable=attribute-defined-outside-init
import argparse
import hashlib

View File

@@ -1,5 +1,6 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
# pylint: disable=consider-using-f-string
import gdb
@@ -9,7 +10,7 @@ class sd_dump_hashmaps(gdb.Command):
def __init__(self):
super().__init__("sd_dump_hashmaps", gdb.COMMAND_DATA, gdb.COMPLETE_NONE)
def invoke(self, arg, from_tty):
def invoke(self, arg, _from_tty):
d = gdb.parse_and_eval("hashmap_debug_list")
hashmap_type_info = gdb.parse_and_eval("hashmap_type_info")
uchar_t = gdb.lookup_type("unsigned char")

View File

@@ -1,5 +1,6 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
# pylint: disable=unbalanced-tuple-unpacking,consider-using-f-string,consider-using-with
"""
Generate %-from-name.gperf from %-list.txt
@@ -7,18 +8,22 @@ Generate %-from-name.gperf from %-list.txt
import sys
name, prefix, input = sys.argv[1:]
if __name__ == '__main__':
if len(sys.argv) != 4:
sys.exit(f'Usage: {sys.argv[0]} name prefix file')
print("""\
name, prefix, file = sys.argv[1:]
print("""\
%{
#if __GNUC__ >= 7
_Pragma("GCC diagnostic ignored \\"-Wimplicit-fallthrough\\"")
#endif
%}""")
print("""\
struct {}_name {{ const char* name; int id; }};
print(f"""\
struct {name}_name {{ const char* name; int id; }};
%null-strings
%%""".format(name))
%%""")
for line in open(input):
print("{0}, {1}{0}".format(line.rstrip(), prefix))
for line in open(file):
print("{0}, {1}{0}".format(line.rstrip(), prefix))

View File

@@ -5,7 +5,7 @@ import re
import sys
import uuid
HEADER = f'''\
HEADER = '''\
| Name | Partition Type UUID | Allowed File Systems | Explanation |
|------|---------------------|----------------------|-------------|
'''
@@ -149,21 +149,21 @@ def extract(file):
name = line.split()[1]
if m2 := re.match(r'^(ROOT|USR)_([A-Z0-9]+|X86_64|PPC64_LE|MIPS_LE|MIPS64_LE)(|_VERITY|_VERITY_SIG)\s+SD_ID128_MAKE\((.*)\)', m.group(1)):
type, arch, suffix, u = m2.groups()
ptype, arch, suffix, u = m2.groups()
u = uuid.UUID(u.replace(',', ''))
assert arch in ARCHITECTURES, f'{arch} not in f{ARCHITECTURES}'
type = f'{type}{suffix}'
assert type in TYPES
ptype = f'{type}{suffix}'
assert ptype in TYPES
yield name, type, arch, u
yield name, ptype, arch, u
elif m2 := re.match(r'(\w+)\s+SD_ID128_MAKE\((.*)\)', m.group(1)):
type, u = m2.groups()
ptype, u = m2.groups()
u = uuid.UUID(u.replace(',', ''))
yield name, type, None, u
yield name, ptype, None, u
else:
raise Exception(f'Failed to match: {m.group(1)}')
raise ValueError(f'Failed to match: {m.group(1)}')
def generate(defines):
prevtype = None
@@ -172,21 +172,21 @@ def generate(defines):
uuids = set()
for name, type, arch, uuid in defines:
tdesc = TYPES[type]
for name, ptype, arch, puuid in defines:
tdesc = TYPES[ptype]
adesc = '' if arch is None else f' ({ARCHITECTURES[arch]})'
# Let's make sure that we didn't select&paste the same value twice
assert uuid not in uuids
uuids.add(uuid)
assert puuid not in uuids
uuids.add(puuid)
if type != prevtype:
prevtype = type
morea, moreb = DESCRIPTIONS[type]
if ptype != prevtype:
prevtype = ptype
morea, moreb = DESCRIPTIONS[ptype]
else:
morea = moreb = 'ditto'
print(f'| _{tdesc}{adesc}_ | `{uuid}` `{name}` | {morea} | {moreb} |')
print(f'| _{tdesc}{adesc}_ | `{puuid}` `{name}` | {morea} | {moreb} |')
if __name__ == '__main__':
known = extract(sys.stdin)

View File

@@ -12,13 +12,13 @@ for entry in chromiumos.gen_autosuspend_rules.PCI_IDS:
vendor, device = entry.split(':')
vendor = int(vendor, 16)
device = int(device, 16)
print('pci:v{:08X}d{:08X}*'.format(vendor, device))
print(f'pci:v{vendor:08X}d{device:08X}*')
print('# usb:v<VEND>p<PROD> (4 uppercase hexadecimal digits twice)')
for entry in chromiumos.gen_autosuspend_rules.USB_IDS:
vendor, product = entry.split(':')
vendor = int(vendor, 16)
product = int(product, 16)
print('usb:v{:04X}p{:04X}*'.format(vendor, product))
print(f'usb:v{vendor:04X}p{product:04X}*')
print(' ID_AUTOSUSPEND=1')

View File

@@ -1,12 +1,13 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
import sys
import collections
import re
from xml_helper import xml_parse, xml_print, tree
import sys
from copy import deepcopy
from xml_helper import tree, xml_parse, xml_print
COLOPHON = '''\
This index contains {count} entries in {sections} sections,
referring to {pages} individual manual pages.
@@ -101,7 +102,7 @@ def _extract_directives(directive_groups, formatting, page):
formatting[name.text] = name
def _make_section(template, name, directives, formatting):
varlist = template.find(".//*[@id='{}']".format(name))
varlist = template.find(f".//*[@id='{name}']")
for varname, manpages in sorted(directives.items()):
entry = tree.SubElement(varlist, 'varlistentry')
term = tree.SubElement(entry, 'term')
@@ -161,14 +162,14 @@ def make_page(template_path, xml_files):
for page in xml_files:
try:
_extract_directives(directive_groups, formatting, page)
except Exception:
raise ValueError("failed to process " + page)
except Exception as e:
raise ValueError("failed to process " + page) from e
return _make_page(template, directive_groups, formatting)
if __name__ == '__main__':
with open(sys.argv[1], 'wb') as f:
template_path = sys.argv[2]
xml_files = sys.argv[3:]
xml = make_page(template_path, xml_files)
f.write(xml_print(xml))
_template_path = sys.argv[2]
_xml_files = sys.argv[3:]
_xml = make_page(_template_path, _xml_files)
f.write(xml_print(_xml))

View File

@@ -2,9 +2,10 @@
# SPDX-License-Identifier: LGPL-2.1-or-later
import collections
import sys
import re
from xml_helper import xml_parse, xml_print, tree
import sys
from xml_helper import tree, xml_parse, xml_print
MDASH = '' if sys.version_info.major >= 3 else ' -- '
@@ -44,9 +45,9 @@ This index contains {count} entries, referring to {pages} individual manual page
def check_id(page, t):
id = t.getroot().get('id')
if not re.search('/' + id + '[.]', page):
raise ValueError("id='{}' is not the same as page name '{}'".format(id, page))
page_id = t.getroot().get('id')
if not re.search('/' + page_id + '[.]', page):
raise ValueError(f"id='{page_id}' is not the same as page name '{page}'")
def make_index(pages):
index = collections.defaultdict(list)
@@ -68,7 +69,7 @@ def add_letter(template, letter, pages):
title.text = letter
para = tree.SubElement(refsect1, 'para')
for info in sorted(pages, key=lambda info: str.lower(info[0])):
refname, section, purpose, realname = info
refname, section, purpose, _realname = info
b = tree.SubElement(para, 'citerefentry')
c = tree.SubElement(b, 'refentrytitle')
@@ -86,7 +87,7 @@ def add_summary(template, indexpages):
for group in indexpages:
count += len(group)
for info in group:
refname, section, purpose, realname = info
_refname, section, _purpose, realname = info
pages.add((realname, section))
refsect1 = tree.fromstring(SUMMARY)
@@ -107,5 +108,5 @@ def make_page(*xml_files):
return template
if __name__ == '__main__':
with open(sys.argv[1], 'wb') as f:
f.write(xml_print(make_page(*sys.argv[2:])))
with open(sys.argv[1], 'wb') as file:
file.write(xml_print(make_page(*sys.argv[2:])))

Some files were not shown because too many files have changed in this diff Show More