diff --git a/.pylintrc b/.pylintrc
new file mode 100644
index 0000000000..afd811a154
--- /dev/null
+++ b/.pylintrc
@@ -0,0 +1,23 @@
+[MAIN]
+extension-pkg-allow-list=lxml
+
+[MESSAGES CONTROL]
+disable=fixme,
+ invalid-name,
+ line-too-long,
+ missing-class-docstring,
+ missing-docstring,
+ missing-function-docstring,
+ missing-module-docstring,
+ too-few-public-methods,
+ too-many-arguments,
+ too-many-boolean-expressions,
+ too-many-branches,
+ too-many-instance-attributes,
+ too-many-instance-attributes,
+ too-many-lines,
+ too-many-locals,
+ too-many-public-methods,
+ too-many-return-statements,
+ too-many-statements,
+ unspecified-encoding,
diff --git a/src/kernel-install/60-ukify.install.in b/src/kernel-install/60-ukify.install.in
index 1104770a4a..71e3094efb 100755
--- a/src/kernel-install/60-ukify.install.in
+++ b/src/kernel-install/60-ukify.install.in
@@ -17,10 +17,7 @@
# You should have received a copy of the GNU Lesser General Public License
# along with systemd; If not, see .
-# pylint: disable=missing-docstring,invalid-name,import-outside-toplevel
-# pylint: disable=consider-using-with,unspecified-encoding,line-too-long
-# pylint: disable=too-many-locals,too-many-statements,too-many-return-statements
-# pylint: disable=too-many-branches,redefined-builtin,fixme
+# pylint: disable=import-outside-toplevel,consider-using-with,disable=redefined-builtin
import argparse
import os
diff --git a/src/ukify/test/test_ukify.py b/src/ukify/test/test_ukify.py
index d56d958d4e..7c25aace81 100755
--- a/src/ukify/test/test_ukify.py
+++ b/src/ukify/test/test_ukify.py
@@ -1,10 +1,9 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
-# pylint: disable=missing-docstring,redefined-outer-name,invalid-name
# pylint: disable=unused-import,import-outside-toplevel,useless-else-on-loop
# pylint: disable=consider-using-with,wrong-import-position,unspecified-encoding
-# pylint: disable=protected-access
+# pylint: disable=protected-access,redefined-outer-name
import base64
import json
diff --git a/src/ukify/ukify.py b/src/ukify/ukify.py
index cf7db49bdd..0ddf56ae5e 100755
--- a/src/ukify/ukify.py
+++ b/src/ukify/ukify.py
@@ -16,12 +16,8 @@
# You should have received a copy of the GNU Lesser General Public License
# along with systemd; If not, see .
-# pylint: disable=missing-docstring,invalid-name,import-outside-toplevel
-# pylint: disable=consider-using-with,unspecified-encoding,line-too-long
-# pylint: disable=too-many-locals,too-many-statements,too-many-return-statements
-# pylint: disable=too-many-branches,too-many-lines,too-many-instance-attributes
-# pylint: disable=too-many-arguments,unnecessary-lambda-assignment,fixme
-# pylint: disable=unused-argument
+# pylint: disable=import-outside-toplevel,consider-using-with,unused-argument
+# pylint: disable=unnecessary-lambda-assignment
import argparse
import configparser
diff --git a/test/test-exec-deserialization.py b/test/test-exec-deserialization.py
index 7fbf9a4d17..f8f3a6d272 100755
--- a/test/test-exec-deserialization.py
+++ b/test/test-exec-deserialization.py
@@ -1,8 +1,5 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
-# pylint: disable=line-too-long,too-many-lines,too-many-branches,too-many-statements,too-many-arguments
-# pylint: disable=too-many-public-methods,too-many-boolean-expressions,invalid-name,no-self-use
-# pylint: disable=missing-function-docstring,missing-class-docstring,missing-module-docstring
#
# Copyright © 2017 Michal Sekletar
diff --git a/test/test-network/systemd-networkd-tests.py b/test/test-network/systemd-networkd-tests.py
index 7f922b46f6..07afe354f7 100755
--- a/test/test-network/systemd-networkd-tests.py
+++ b/test/test-network/systemd-networkd-tests.py
@@ -1,8 +1,5 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
-# pylint: disable=line-too-long,too-many-lines,too-many-branches,too-many-statements,too-many-arguments
-# pylint: disable=too-many-public-methods,too-many-boolean-expressions,invalid-name
-# pylint: disable=missing-function-docstring,missing-class-docstring,missing-module-docstring
# systemd-networkd tests
# These tests can be executed in the systemd mkosi image when booted in QEMU. After booting the QEMU VM,
diff --git a/test/test-shutdown.py b/test/test-shutdown.py
index 700e77a15d..5339afcdd0 100755
--- a/test/test-shutdown.py
+++ b/test/test-shutdown.py
@@ -1,6 +1,6 @@
#!/usr/bin/python3
# SPDX-License-Identifier: LGPL-2.1-or-later
-# pylint: disable=line-too-long,invalid-name,missing-module-docstring,missing-function-docstring,too-many-statements,broad-except
+# pylint: disable=broad-except
import argparse
import logging
diff --git a/test/test-udev.py b/test/test-udev.py
index 02383ce4e9..5a95b9c521 100755
--- a/test/test-udev.py
+++ b/test/test-udev.py
@@ -1,10 +1,8 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
-# pylint: disable=missing-docstring,redefined-outer-name,invalid-name
-# pylint: disable=unspecified-encoding,no-else-return,line-too-long,too-many-lines
-# pylint: disable=multiple-imports,too-many-instance-attributes,consider-using-with
-# pylint: disable=global-statement
+# pylint: disable=redefined-outer-name,no-else-return,multiple-imports
+# pylint: disable=consider-using-with,global-statement
# udev test
#
diff --git a/tools/analyze-dump-sort.py b/tools/analyze-dump-sort.py
index 015027ad4b..cc9a9fcd37 100755
--- a/tools/analyze-dump-sort.py
+++ b/tools/analyze-dump-sort.py
@@ -1,5 +1,6 @@
#!/usr/bin/python
# SPDX-License-Identifier: LGPL-2.1-or-later
+# pylint: disable=consider-using-with
"""
A helper to compare 'systemd-analyze dump' outputs.
@@ -13,8 +14,9 @@ tools/analyze-dump-sort.py /var/tmp/{dump1,dump2} → this does a diff from dump
"""
import argparse
-import tempfile
import subprocess
+import tempfile
+
def sort_dump(sourcefile, destfile=None):
if destfile is None:
diff --git a/tools/catalog-report.py b/tools/catalog-report.py
index ca1e13df9a..060b1aae86 100755
--- a/tools/catalog-report.py
+++ b/tools/catalog-report.py
@@ -28,57 +28,57 @@ Prints out journal entries with no or bad catalog explanations.
"""
import re
-from systemd import journal, id128
-j = journal.Reader()
+from systemd import id128, journal
-logged = set()
-pattern = re.compile('@[A-Z0-9_]+@')
-mids = {v:k for k,v in id128.__dict__.items()
- if k.startswith('SD_MESSAGE')}
-
-freq = 1000
-
-def log_entry(x):
- if 'CODE_FILE' in x:
+# pylint: disable=consider-using-f-string
+def log_entry(entry):
+ if 'CODE_FILE' in entry:
# some of our code was using 'CODE_FUNCTION' instead of 'CODE_FUNC'
- print('{}:{} {}'.format(x.get('CODE_FILE', '???'),
- x.get('CODE_LINE', '???'),
- x.get('CODE_FUNC', None) or x.get('CODE_FUNCTION', '???')))
- print(' {}'.format(x.get('MESSAGE', 'no message!')))
- for k, v in x.items():
+ print('{}:{} {}'.format(entry.get('CODE_FILE', '???'),
+ entry.get('CODE_LINE', '???'),
+ entry.get('CODE_FUNC', None) or entry.get('CODE_FUNCTION', '???')))
+ print(' {}'.format(entry.get('MESSAGE', 'no message!')))
+ for k, v in entry.items():
if k.startswith('CODE_') or k in {'MESSAGE_ID', 'MESSAGE'}:
continue
- print(' {}={}'.format(k, v))
+ print(f' {k}={v}')
print()
-for i, x in enumerate(j):
- if i % freq == 0:
- print(i, end='\r')
+if __name__ == '__main__':
+ j = journal.Reader()
+ logged = set()
+ pattern = re.compile('@[A-Z0-9_]+@')
- try:
- mid = x['MESSAGE_ID']
- except KeyError:
- continue
- name = mids.get(mid, 'unknown')
+ mids = { v:k for k,v in id128.__dict__.items() if k.startswith('SD_MESSAGE') }
- try:
- desc = journal.get_catalog(mid)
- except FileNotFoundError:
- if mid in logged:
+ for i, x in enumerate(j):
+ if i % 1000 == 0:
+ print(i, end='\r')
+
+ try:
+ mid = x['MESSAGE_ID']
+ except KeyError:
+ continue
+ name = mids.get(mid, 'unknown')
+
+ try:
+ desc = journal.get_catalog(mid)
+ except FileNotFoundError:
+ if mid in logged:
+ continue
+
+ print(f'{name} {mid.hex}: no catalog entry')
+ log_entry(x)
+ logged.add(mid)
continue
- print('{} {.hex}: no catalog entry'.format(name, mid))
- log_entry(x)
- logged.add(mid)
- continue
-
- fields = [field[1:-1] for field in pattern.findall(desc)]
- for field in fields:
- index = (mid, field)
- if field in x or index in logged:
- continue
- print('{} {.hex}: no field {}'.format(name, mid, field))
- log_entry(x)
- logged.add(index)
+ fields = [field[1:-1] for field in pattern.findall(desc)]
+ for field in fields:
+ index = (mid, field)
+ if field in x or index in logged:
+ continue
+ print(f'{name} {mid.hex}: no field {field}')
+ log_entry(x)
+ logged.add(index)
diff --git a/tools/check-includes.py b/tools/check-includes.py
index afb957a2e1..5d5b39f778 100755
--- a/tools/check-includes.py
+++ b/tools/check-includes.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
-# pylint: disable=missing-docstring,invalid-name,unspecified-encoding,consider-using-with
+# pylint: disable=consider-using-with
import os
import pathlib
@@ -28,5 +28,5 @@ def check_file(filename):
return good
if __name__ == '__main__':
- good = all(check_file(name) for name in sys.argv[1:])
- sys.exit(0 if good else 1)
+ all_good = all(check_file(name) for name in sys.argv[1:])
+ sys.exit(0 if all_good else 1)
diff --git a/tools/dbus_exporter.py b/tools/dbus_exporter.py
index f94f261e07..819584dcba 100755
--- a/tools/dbus_exporter.py
+++ b/tools/dbus_exporter.py
@@ -2,7 +2,8 @@
# SPDX-License-Identifier: LGPL-2.1-or-later
from argparse import ArgumentParser
from pathlib import Path
-from subprocess import run, PIPE
+from subprocess import PIPE, run
+
def extract_interfaces_xml(output_dir, executable):
proc = run(
diff --git a/tools/dump-auxv.py b/tools/dump-auxv.py
index 36f3b37bf4..d6dbcb1f07 100644
--- a/tools/dump-auxv.py
+++ b/tools/dump-auxv.py
@@ -1,5 +1,9 @@
#!/usr/bin/python
# SPDX-License-Identifier: LGPL-2.1-or-later
+#
+# Note: the no-value-for-parameter here is expected, as the click module
+# decorators modify function arguments which pylint doesn't know
+# pylint: disable=no-value-for-parameter
"""
A program to parse auxv (e.g. /proc/self/auxv).
@@ -109,12 +113,12 @@ def dump(endian, field_width, file):
width = {32:'II', 64:'QQ'}[field_width]
- format = f'{endian}{width}'
- print(f'# {format=}')
+ format_str = f'{endian}{width}'
+ print(f'# {format_str=}')
seen_null = False
- for item in struct.iter_unpack(format, data):
+ for item in struct.iter_unpack(format_str, data):
key, val = item
name = AT_AUXV_NAMES.get(key, f'unknown ({key})')
if name.endswith(('UID', 'GID')):
@@ -123,7 +127,7 @@ def dump(endian, field_width, file):
pref, fmt = '0x', 'x'
if seen_null:
- print('# trailing garbarbage after AT_NULL')
+ print('# trailing garbage after AT_NULL')
print(f'{name:18} = {pref}{val:{fmt}}')
diff --git a/tools/elf2efi.py b/tools/elf2efi.py
index 2e478940f5..c683c12076 100755
--- a/tools/elf2efi.py
+++ b/tools/elf2efi.py
@@ -19,7 +19,7 @@
# the resulting binary useless. gnu-efi relies on this method and contains a stub that performs the
# ELF dynamic relocations at runtime.
-# pylint: disable=missing-docstring,invalid-name,attribute-defined-outside-init
+# pylint: disable=attribute-defined-outside-init
import argparse
import hashlib
diff --git a/tools/gdb-sd_dump_hashmaps.py b/tools/gdb-sd_dump_hashmaps.py
index d2388b7171..57f825a09c 100644
--- a/tools/gdb-sd_dump_hashmaps.py
+++ b/tools/gdb-sd_dump_hashmaps.py
@@ -1,5 +1,6 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
+# pylint: disable=consider-using-f-string
import gdb
@@ -9,7 +10,7 @@ class sd_dump_hashmaps(gdb.Command):
def __init__(self):
super().__init__("sd_dump_hashmaps", gdb.COMMAND_DATA, gdb.COMPLETE_NONE)
- def invoke(self, arg, from_tty):
+ def invoke(self, arg, _from_tty):
d = gdb.parse_and_eval("hashmap_debug_list")
hashmap_type_info = gdb.parse_and_eval("hashmap_type_info")
uchar_t = gdb.lookup_type("unsigned char")
diff --git a/tools/generate-gperfs.py b/tools/generate-gperfs.py
index d240b2c383..3887bb6ee9 100755
--- a/tools/generate-gperfs.py
+++ b/tools/generate-gperfs.py
@@ -1,5 +1,6 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
+# pylint: disable=unbalanced-tuple-unpacking,consider-using-f-string,consider-using-with
"""
Generate %-from-name.gperf from %-list.txt
@@ -7,18 +8,22 @@ Generate %-from-name.gperf from %-list.txt
import sys
-name, prefix, input = sys.argv[1:]
+if __name__ == '__main__':
+ if len(sys.argv) != 4:
+ sys.exit(f'Usage: {sys.argv[0]} name prefix file')
-print("""\
+ name, prefix, file = sys.argv[1:]
+
+ print("""\
%{
#if __GNUC__ >= 7
_Pragma("GCC diagnostic ignored \\"-Wimplicit-fallthrough\\"")
#endif
%}""")
-print("""\
-struct {}_name {{ const char* name; int id; }};
+ print(f"""\
+struct {name}_name {{ const char* name; int id; }};
%null-strings
-%%""".format(name))
+%%""")
-for line in open(input):
- print("{0}, {1}{0}".format(line.rstrip(), prefix))
+ for line in open(file):
+ print("{0}, {1}{0}".format(line.rstrip(), prefix))
diff --git a/tools/list-discoverable-partitions.py b/tools/list-discoverable-partitions.py
index 8376a7cdeb..a19bf1d6e2 100644
--- a/tools/list-discoverable-partitions.py
+++ b/tools/list-discoverable-partitions.py
@@ -5,7 +5,7 @@ import re
import sys
import uuid
-HEADER = f'''\
+HEADER = '''\
| Name | Partition Type UUID | Allowed File Systems | Explanation |
|------|---------------------|----------------------|-------------|
'''
@@ -149,21 +149,21 @@ def extract(file):
name = line.split()[1]
if m2 := re.match(r'^(ROOT|USR)_([A-Z0-9]+|X86_64|PPC64_LE|MIPS_LE|MIPS64_LE)(|_VERITY|_VERITY_SIG)\s+SD_ID128_MAKE\((.*)\)', m.group(1)):
- type, arch, suffix, u = m2.groups()
+ ptype, arch, suffix, u = m2.groups()
u = uuid.UUID(u.replace(',', ''))
assert arch in ARCHITECTURES, f'{arch} not in f{ARCHITECTURES}'
- type = f'{type}{suffix}'
- assert type in TYPES
+ ptype = f'{type}{suffix}'
+ assert ptype in TYPES
- yield name, type, arch, u
+ yield name, ptype, arch, u
elif m2 := re.match(r'(\w+)\s+SD_ID128_MAKE\((.*)\)', m.group(1)):
- type, u = m2.groups()
+ ptype, u = m2.groups()
u = uuid.UUID(u.replace(',', ''))
- yield name, type, None, u
+ yield name, ptype, None, u
else:
- raise Exception(f'Failed to match: {m.group(1)}')
+ raise ValueError(f'Failed to match: {m.group(1)}')
def generate(defines):
prevtype = None
@@ -172,21 +172,21 @@ def generate(defines):
uuids = set()
- for name, type, arch, uuid in defines:
- tdesc = TYPES[type]
+ for name, ptype, arch, puuid in defines:
+ tdesc = TYPES[ptype]
adesc = '' if arch is None else f' ({ARCHITECTURES[arch]})'
# Let's make sure that we didn't select&paste the same value twice
- assert uuid not in uuids
- uuids.add(uuid)
+ assert puuid not in uuids
+ uuids.add(puuid)
- if type != prevtype:
- prevtype = type
- morea, moreb = DESCRIPTIONS[type]
+ if ptype != prevtype:
+ prevtype = ptype
+ morea, moreb = DESCRIPTIONS[ptype]
else:
morea = moreb = 'ditto'
- print(f'| _{tdesc}{adesc}_ | `{uuid}` `{name}` | {morea} | {moreb} |')
+ print(f'| _{tdesc}{adesc}_ | `{puuid}` `{name}` | {morea} | {moreb} |')
if __name__ == '__main__':
known = extract(sys.stdin)
diff --git a/tools/make-autosuspend-rules.py b/tools/make-autosuspend-rules.py
index 633b7711d1..b1b4cef1be 100755
--- a/tools/make-autosuspend-rules.py
+++ b/tools/make-autosuspend-rules.py
@@ -12,13 +12,13 @@ for entry in chromiumos.gen_autosuspend_rules.PCI_IDS:
vendor, device = entry.split(':')
vendor = int(vendor, 16)
device = int(device, 16)
- print('pci:v{:08X}d{:08X}*'.format(vendor, device))
+ print(f'pci:v{vendor:08X}d{device:08X}*')
print('# usb:vp (4 uppercase hexadecimal digits twice)')
for entry in chromiumos.gen_autosuspend_rules.USB_IDS:
vendor, product = entry.split(':')
vendor = int(vendor, 16)
product = int(product, 16)
- print('usb:v{:04X}p{:04X}*'.format(vendor, product))
+ print(f'usb:v{vendor:04X}p{product:04X}*')
print(' ID_AUTOSUSPEND=1')
diff --git a/tools/make-directive-index.py b/tools/make-directive-index.py
index 8a2939959b..5398b452ef 100755
--- a/tools/make-directive-index.py
+++ b/tools/make-directive-index.py
@@ -1,12 +1,13 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
-import sys
import collections
import re
-from xml_helper import xml_parse, xml_print, tree
+import sys
from copy import deepcopy
+from xml_helper import tree, xml_parse, xml_print
+
COLOPHON = '''\
This index contains {count} entries in {sections} sections,
referring to {pages} individual manual pages.
@@ -101,7 +102,7 @@ def _extract_directives(directive_groups, formatting, page):
formatting[name.text] = name
def _make_section(template, name, directives, formatting):
- varlist = template.find(".//*[@id='{}']".format(name))
+ varlist = template.find(f".//*[@id='{name}']")
for varname, manpages in sorted(directives.items()):
entry = tree.SubElement(varlist, 'varlistentry')
term = tree.SubElement(entry, 'term')
@@ -161,14 +162,14 @@ def make_page(template_path, xml_files):
for page in xml_files:
try:
_extract_directives(directive_groups, formatting, page)
- except Exception:
- raise ValueError("failed to process " + page)
+ except Exception as e:
+ raise ValueError("failed to process " + page) from e
return _make_page(template, directive_groups, formatting)
if __name__ == '__main__':
with open(sys.argv[1], 'wb') as f:
- template_path = sys.argv[2]
- xml_files = sys.argv[3:]
- xml = make_page(template_path, xml_files)
- f.write(xml_print(xml))
+ _template_path = sys.argv[2]
+ _xml_files = sys.argv[3:]
+ _xml = make_page(_template_path, _xml_files)
+ f.write(xml_print(_xml))
diff --git a/tools/make-man-index.py b/tools/make-man-index.py
index bae36fbabf..579dd405c2 100755
--- a/tools/make-man-index.py
+++ b/tools/make-man-index.py
@@ -2,9 +2,10 @@
# SPDX-License-Identifier: LGPL-2.1-or-later
import collections
-import sys
import re
-from xml_helper import xml_parse, xml_print, tree
+import sys
+
+from xml_helper import tree, xml_parse, xml_print
MDASH = ' — ' if sys.version_info.major >= 3 else ' -- '
@@ -44,9 +45,9 @@ This index contains {count} entries, referring to {pages} individual manual page
def check_id(page, t):
- id = t.getroot().get('id')
- if not re.search('/' + id + '[.]', page):
- raise ValueError("id='{}' is not the same as page name '{}'".format(id, page))
+ page_id = t.getroot().get('id')
+ if not re.search('/' + page_id + '[.]', page):
+ raise ValueError(f"id='{page_id}' is not the same as page name '{page}'")
def make_index(pages):
index = collections.defaultdict(list)
@@ -68,7 +69,7 @@ def add_letter(template, letter, pages):
title.text = letter
para = tree.SubElement(refsect1, 'para')
for info in sorted(pages, key=lambda info: str.lower(info[0])):
- refname, section, purpose, realname = info
+ refname, section, purpose, _realname = info
b = tree.SubElement(para, 'citerefentry')
c = tree.SubElement(b, 'refentrytitle')
@@ -86,7 +87,7 @@ def add_summary(template, indexpages):
for group in indexpages:
count += len(group)
for info in group:
- refname, section, purpose, realname = info
+ _refname, section, _purpose, realname = info
pages.add((realname, section))
refsect1 = tree.fromstring(SUMMARY)
@@ -107,5 +108,5 @@ def make_page(*xml_files):
return template
if __name__ == '__main__':
- with open(sys.argv[1], 'wb') as f:
- f.write(xml_print(make_page(*sys.argv[2:])))
+ with open(sys.argv[1], 'wb') as file:
+ file.write(xml_print(make_page(*sys.argv[2:])))
diff --git a/tools/meson-render-jinja2.py b/tools/meson-render-jinja2.py
index 6ccb1fbe0c..3a3d912d8f 100755
--- a/tools/meson-render-jinja2.py
+++ b/tools/meson-render-jinja2.py
@@ -1,5 +1,6 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
+# pylint: disable=consider-using-with
import ast
import os
@@ -30,7 +31,7 @@ def render(filename, defines):
undefined=jinja2.StrictUndefined)
return template.render(defines)
-if __name__ == '__main__':
+def main():
defines = parse_config_h(sys.argv[1])
defines.update(parse_config_h(sys.argv[2]))
output = render(sys.argv[3], defines)
@@ -38,3 +39,6 @@ if __name__ == '__main__':
f.write(output)
info = os.stat(sys.argv[3])
os.chmod(sys.argv[4], info.st_mode)
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/update-dbus-docs.py b/tools/update-dbus-docs.py
index 473469e457..d6438a5daf 100755
--- a/tools/update-dbus-docs.py
+++ b/tools/update-dbus-docs.py
@@ -1,5 +1,6 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
+# pylint: disable=superfluous-parens,consider-using-with
import argparse
import collections
@@ -36,6 +37,8 @@ GREEN = '\x1b[32m'
YELLOW = '\x1b[33m'
RESET = '\x1b[39m'
+arguments = None
+
def xml_parser():
return etree.XMLParser(no_network=True,
remove_comments=False,
@@ -62,38 +65,38 @@ def print_method(declarations, elem, *, prefix, file, is_signal=False):
argname = arg.get('name')
if argname is None:
- if opts.print_errors:
+ if arguments.print_errors:
print(f'method {name}: argument {num+1} has no name', file=sys.stderr)
argname = 'UNNAMED'
- type = arg.get('type')
+ argtype = arg.get('type')
if not is_signal:
direction = arg.get('direction')
- print(f'''{lead if num > 0 else ''}{direction:3} {type} {argname}''', file=file, end='')
+ print(f'''{lead if num > 0 else ''}{direction:3} {argtype} {argname}''', file=file, end='')
else:
- print(f'''{lead if num > 0 else ''}{type} {argname}''', file=file, end='')
+ print(f'''{lead if num > 0 else ''}{argtype} {argname}''', file=file, end='')
- print(f');', file=file)
+ print(');', file=file)
ACCESS_MAP = {
'read' : 'readonly',
'write' : 'readwrite',
}
-def value_ellipsis(type):
- if type == 's':
- return "'...'";
- if type[0] == 'a':
- inner = value_ellipsis(type[1:])
- return f"[{inner}{', ...' if inner != '...' else ''}]";
+def value_ellipsis(prop_type):
+ if prop_type == 's':
+ return "'...'"
+ if prop_type[0] == 'a':
+ inner = value_ellipsis(prop_type[1:])
+ return f"[{inner}{', ...' if inner != '...' else ''}]"
return '...'
def print_property(declarations, elem, *, prefix, file):
- name = elem.get('name')
- type = elem.get('type')
- access = elem.get('access')
+ prop_name = elem.get('name')
+ prop_type = elem.get('type')
+ prop_access = elem.get('access')
- declarations['property'].append(name)
+ declarations['property'].append(prop_name)
# @org.freedesktop.DBus.Property.EmitsChangedSignal("false")
# @org.freedesktop.systemd1.Privileged("true")
@@ -104,8 +107,8 @@ def print_property(declarations, elem, *, prefix, file):
anno_value = anno.get('value')
print(f'''{prefix}@{anno_name}("{anno_value}")''', file=file)
- access = ACCESS_MAP.get(access, access)
- print(f'''{prefix}{access} {type} {name} = {value_ellipsis(type)};''', file=file)
+ prop_access = ACCESS_MAP.get(prop_access, prop_access)
+ print(f'''{prefix}{prop_access} {prop_type} {prop_name} = {value_ellipsis(prop_type)};''', file=file)
def print_interface(iface, *, prefix, file, print_boring, only_interface, declarations):
name = iface.get('name')
@@ -163,7 +166,7 @@ def check_documented(document, declarations, stats):
assert False, (klass, item)
if not document_has_elem_with_text(document, elem, item_repr):
- if opts.print_errors:
+ if arguments.print_errors:
print(f'{klass} {item} is not documented :(')
missing.append((klass, item))
@@ -189,7 +192,7 @@ def xml_to_text(destination, xml, *, only_interface=None):
if not name in BORING_INTERFACES:
interfaces.append(name)
- print(f'''}};''', file=file)
+ print('''};''', file=file)
return file.getvalue(), declarations, interfaces
@@ -202,7 +205,7 @@ def subst_output(document, programlisting, stats):
node = programlisting.get('node')
interface = programlisting.get('interface')
- argv = [f'{opts.build_dir}/{executable}', f'--bus-introspect={interface}']
+ argv = [f'{arguments.build_dir}/{executable}', f'--bus-introspect={interface}']
if isinstance(shlex_join, Exception):
print(f'COMMAND: {" ".join(shlex_quote(arg) for arg in argv)}')
else:
@@ -225,14 +228,11 @@ def subst_output(document, programlisting, stats):
# delete old comments
for child in parent:
- if (child.tag == etree.Comment
- and 'Autogenerated' in child.text):
+ if child.tag is etree.Comment and 'Autogenerated' in child.text:
parent.remove(child)
- if (child.tag == etree.Comment
- and 'not documented' in child.text):
+ if child.tag is etree.Comment and 'not documented' in child.text:
parent.remove(child)
- if (child.tag == "variablelist"
- and child.attrib.get("generated",False) == "True"):
+ if child.tag == "variablelist" and child.attrib.get("generated", False) == "True":
parent.remove(child)
# insert pointer for systemd-directives generation
@@ -282,7 +282,7 @@ def process(page):
# print('parsing {}'.format(name), file=sys.stderr)
if xml.tag != 'refentry':
- return
+ return None
stats = collections.Counter()
@@ -297,11 +297,11 @@ def process(page):
out_text[out_text.find(' t/2 else (YELLOW if m else GREEN)
print(f'{color}{p:{mlen + 1}} {t - m}/{t} {c}{RESET}')
- if opts.test and modified:
- exit(f'Outdated pages: {", ".join(modified)}\n'
- f'Hint: ninja -C {opts.build_dir} update-dbus-docs')
+ if arguments.test and modified:
+ sys.exit(f'Outdated pages: {", ".join(modified)}\n'
+ f'Hint: ninja -C {arguments.build_dir} update-dbus-docs')
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/update-man-rules.py b/tools/update-man-rules.py
index 3a8c31dc35..1c2c9a8f65 100755
--- a/tools/update-man-rules.py
+++ b/tools/update-man-rules.py
@@ -1,16 +1,17 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1-or-later
-from __future__ import print_function
import collections
import glob
+import pprint
import sys
from pathlib import Path
-import pprint
+
from xml_helper import xml_parse
+
def man(page, number):
- return '{}.{}'.format(page, number)
+ return f'{page}.{number}'
def add_rules(rules, name):
xml = xml_parse(name)
@@ -60,7 +61,7 @@ MESON_FOOTER = '''\
# Really, do not edit.
'''
-def make_mesonfile(rules, dist_files):
+def make_mesonfile(rules, _dist_files):
# reformat rules as
# grouped = [ [name, section, [alias...], condition], ...]
#
@@ -77,7 +78,7 @@ def make_mesonfile(rules, dist_files):
for p, aliases in sorted(grouped.items()) ]
return '\n'.join((MESON_HEADER, pprint.pformat(lines)[1:-1], MESON_FOOTER))
-if __name__ == '__main__':
+def main():
source_glob = sys.argv[1]
target = Path(sys.argv[2])
@@ -95,3 +96,6 @@ if __name__ == '__main__':
tmp = target.with_suffix('.tmp')
tmp.write_text(text)
tmp.rename(target)
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/xml_helper.py b/tools/xml_helper.py
index bc14298653..6f71350f5b 100755
--- a/tools/xml_helper.py
+++ b/tools/xml_helper.py
@@ -3,14 +3,18 @@
from lxml import etree as tree
+
class CustomResolver(tree.Resolver):
- def resolve(self, url, id, context):
+ def resolve(self, url, _id, context):
if 'custom-entities.ent' in url:
return self.resolve_filename('man/custom-entities.ent', context)
if 'ethtool-link-mode' in url:
return self.resolve_filename('src/shared/ethtool-link-mode.xml', context)
+ return None
+
_parser = tree.XMLParser()
+# pylint: disable=no-member
_parser.resolvers.add(CustomResolver())
def xml_parse(page):