pylint: 6 more

Change-Id: Ic16db7972fe6f9da86592d56f4983572d7c68989
This commit is contained in:
Sorin Sbarnea 2020-09-10 15:28:52 +01:00
parent c41b9c6fa0
commit 360c57118c
7 changed files with 7 additions and 22 deletions

View File

@ -14,7 +14,6 @@ disable =
inconsistent-return-statements, inconsistent-return-statements,
invalid-name, invalid-name,
line-too-long, line-too-long,
literal-comparison,
missing-class-docstring, missing-class-docstring,
missing-function-docstring, missing-function-docstring,
missing-module-docstring, missing-module-docstring,
@ -22,7 +21,6 @@ disable =
no-self-use, no-self-use,
protected-access, protected-access,
redefined-outer-name, redefined-outer-name,
simplifiable-if-expression,
super-init-not-called, super-init-not-called,
super-with-arguments, super-with-arguments,
too-few-public-methods, too-few-public-methods,
@ -31,12 +29,8 @@ disable =
too-many-instance-attributes, too-many-instance-attributes,
too-many-locals, too-many-locals,
too-many-statements, too-many-statements,
undefined-loop-variable,
ungrouped-imports,
unidiomatic-typecheck,
unused-argument, unused-argument,
unused-variable, unused-variable,
useless-else-on-loop,
useless-object-inheritance, useless-object-inheritance,
[REPORTS] [REPORTS]

View File

@ -59,16 +59,6 @@ from elastic_recheck import log as logging
LPCACHEDIR = os.path.expanduser('~/.launchpadlib/cache') LPCACHEDIR = os.path.expanduser('~/.launchpadlib/cache')
try:
import daemon.pidlockfile
pid_file_module = daemon.pidlockfile
except Exception:
# as of python-daemon 1.6 it doesn't bundle pidlockfile anymore
# instead it depends on lockfile-0.9.1
import daemon.pidfile
pid_file_module = daemon.pidfile
class ElasticRecheckException(Exception): class ElasticRecheckException(Exception):
pass pass
@ -328,7 +318,7 @@ def main():
if args.foreground: if args.foreground:
_main(args, config) _main(args, config)
else: else:
pid = pid_file_module.TimeoutPIDLockFile(config.pid_fn, 10) pid = daemon.pid_file.TimeoutPIDLockFile(config.pid_fn, 10)
with daemon.DaemonContext(pidfile=pid): with daemon.DaemonContext(pidfile=pid):
_main(args, config) _main(args, config)

View File

@ -186,7 +186,7 @@ def main():
fails=0, fails=0,
fails24=0, fails24=0,
data=[], data=[],
voting=(False if query.get('allow-nonvoting') else True)) voting=(not query.get('allow-nonvoting')))
buglist.append(bug) buglist.append(bug)
try: try:
results = classifier.hits_by_query(query['query'], results = classifier.hits_by_query(query['query'],

View File

@ -263,6 +263,7 @@ class Stream(object):
# Wait 40 seconds between queries. # Wait 40 seconds between queries.
sleep_time = 40 sleep_time = 40
timed_out = False timed_out = False
job = None
# This checks that we've got the console log uploaded, need to retry # This checks that we've got the console log uploaded, need to retry
# in case ES goes bonkers on cold data, which it does some times. # in case ES goes bonkers on cold data, which it does some times.
# We check at least once so that we can return success if data is # We check at least once so that we can return success if data is
@ -363,8 +364,7 @@ def check_failed_test_ids_for_job(build_uuid, test_ids, session):
for test_id in test_ids: for test_id in test_ids:
if test_id in failing_test_ids: if test_id in failing_test_ids:
return True return True
else: return False
return False
class Classifier(object): class Classifier(object):

View File

@ -50,7 +50,7 @@ def generic(raw_query, facet=None):
data = dict(field=facet, size=200) data = dict(field=facet, size=200)
# yes, elasticsearch is odd, and the way to do multiple facets # yes, elasticsearch is odd, and the way to do multiple facets
# is to specify the plural key value # is to specify the plural key value
if type(facet) == list: if isinstance(facet, list):
data = dict(fields=facet, size=200) data = dict(fields=facet, size=200)
query['facets'] = { query['facets'] = {

View File

@ -218,7 +218,7 @@ class Hit(object):
we use logstash, there is only ever one element in these lists. we use logstash, there is only ever one element in these lists.
""" """
def first(item): def first(item):
if type(item) == list: if isinstance(item, list):
# We've seen cases where the field data, like @timestamp, is # We've seen cases where the field data, like @timestamp, is
# too large so we don't get anything back from elastic-search, # too large so we don't get anything back from elastic-search,
# so skip over those. # so skip over those.

View File

@ -96,6 +96,7 @@ class TestStream(tests.TestCase):
stream.get_failed_tempest) stream.get_failed_tempest)
def test_gerrit_parsing(self): def test_gerrit_parsing(self):
job = None # avoid undefined-loop-variable with pylint
with open("elastic_recheck/tests/unit/jenkins/events.json") as f: with open("elastic_recheck/tests/unit/jenkins/events.json") as f:
j = json.load(f) j = json.load(f)
events = j['events'] events = j['events']