pylint: 6 more

Change-Id: Ic16db7972fe6f9da86592d56f4983572d7c68989
This commit is contained in:
Sorin Sbarnea 2020-09-10 15:28:52 +01:00
parent c41b9c6fa0
commit 360c57118c
7 changed files with 7 additions and 22 deletions

View File

@ -14,7 +14,6 @@ disable =
inconsistent-return-statements,
invalid-name,
line-too-long,
literal-comparison,
missing-class-docstring,
missing-function-docstring,
missing-module-docstring,
@ -22,7 +21,6 @@ disable =
no-self-use,
protected-access,
redefined-outer-name,
simplifiable-if-expression,
super-init-not-called,
super-with-arguments,
too-few-public-methods,
@ -31,12 +29,8 @@ disable =
too-many-instance-attributes,
too-many-locals,
too-many-statements,
undefined-loop-variable,
ungrouped-imports,
unidiomatic-typecheck,
unused-argument,
unused-variable,
useless-else-on-loop,
useless-object-inheritance,
[REPORTS]

View File

@ -59,16 +59,6 @@ from elastic_recheck import log as logging
LPCACHEDIR = os.path.expanduser('~/.launchpadlib/cache')
try:
import daemon.pidlockfile
pid_file_module = daemon.pidlockfile
except Exception:
# as of python-daemon 1.6 it doesn't bundle pidlockfile anymore
# instead it depends on lockfile-0.9.1
import daemon.pidfile
pid_file_module = daemon.pidfile
class ElasticRecheckException(Exception):
pass
@ -328,7 +318,7 @@ def main():
if args.foreground:
_main(args, config)
else:
pid = pid_file_module.TimeoutPIDLockFile(config.pid_fn, 10)
pid = daemon.pid_file.TimeoutPIDLockFile(config.pid_fn, 10)
with daemon.DaemonContext(pidfile=pid):
_main(args, config)

View File

@ -186,7 +186,7 @@ def main():
fails=0,
fails24=0,
data=[],
voting=(False if query.get('allow-nonvoting') else True))
voting=(not query.get('allow-nonvoting')))
buglist.append(bug)
try:
results = classifier.hits_by_query(query['query'],

View File

@ -263,6 +263,7 @@ class Stream(object):
# Wait 40 seconds between queries.
sleep_time = 40
timed_out = False
job = None
# This checks that we've got the console log uploaded, need to retry
# in case ES goes bonkers on cold data, which it does some times.
# We check at least once so that we can return success if data is
@ -363,8 +364,7 @@ def check_failed_test_ids_for_job(build_uuid, test_ids, session):
for test_id in test_ids:
if test_id in failing_test_ids:
return True
else:
return False
return False
class Classifier(object):

View File

@ -50,7 +50,7 @@ def generic(raw_query, facet=None):
data = dict(field=facet, size=200)
# yes, elasticsearch is odd, and the way to do multiple facets
# is to specify the plural key value
if type(facet) == list:
if isinstance(facet, list):
data = dict(fields=facet, size=200)
query['facets'] = {

View File

@ -218,7 +218,7 @@ class Hit(object):
we use logstash, there is only ever one element in these lists.
"""
def first(item):
if type(item) == list:
if isinstance(item, list):
# We've seen cases where the field data, like @timestamp, is
# too large so we don't get anything back from elastic-search,
# so skip over those.

View File

@ -96,6 +96,7 @@ class TestStream(tests.TestCase):
stream.get_failed_tempest)
def test_gerrit_parsing(self):
job = None # avoid undefined-loop-variable with pylint
with open("elastic_recheck/tests/unit/jenkins/events.json") as f:
j = json.load(f)
events = j['events']