Skip to content

Commit ca400a1

Browse files
committed
Ignore more rules and fix more issues
1 parent 29d29c7 commit ca400a1

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

71 files changed

+325
-354
lines changed

kibble/api/handler.py

+5-5
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,7 @@
5454

5555
class KibbleHTTPError(Exception):
5656
def __init__(self, code, message):
57+
super().__init__()
5758
self.code = code
5859
self.message = message
5960

@@ -75,7 +76,7 @@ def __call__(self, environ, start_response, session):
7576
# Read JSON client data if any
7677
try:
7778
request_size = int(environ.get("CONTENT_LENGTH", 0))
78-
except (ValueError):
79+
except ValueError:
7980
request_size = 0
8081
requestBody = environ["wsgi.input"].read(request_size)
8182
formdata = {}
@@ -123,7 +124,7 @@ def __call__(self, environ, start_response, session):
123124
) + "\n"
124125
return
125126

126-
except:
127+
except: # pylint: disable=bare-except
127128
err_type, err_value, tb = sys.exc_info()
128129
traceback_output = ["API traceback:"]
129130
traceback_output += traceback.format_tb(tb)
@@ -133,7 +134,7 @@ def __call__(self, environ, start_response, session):
133134
start_response(
134135
"500 Internal Server Error", [("Content-Type", "application/json")]
135136
)
136-
except:
137+
except: # pylint: disable=bare-except
137138
pass
138139
yield json.dumps({"code": "500", "reason": "\n".join(traceback_output)})
139140

@@ -142,7 +143,6 @@ def fourohfour(environ, start_response):
142143
"""A very simple 404 handler"""
143144
start_response("404 Not Found", [("Content-Type", "application/json")])
144145
yield json.dumps({"code": 404, "reason": "API endpoint not found"}, indent=4) + "\n"
145-
return
146146

147147

148148
def application(environ, start_response):
@@ -164,7 +164,7 @@ def application(environ, start_response):
164164
session.headers.append(bucket)
165165
try:
166166
start_response("200 Okay", session.headers)
167-
except:
167+
except: # pylint: disable=bare-except
168168
pass
169169
a += 1
170170
# WSGI prefers byte strings, so convert if regular py3 string

kibble/api/pages/bio/newtimers.py

+3-6
Original file line numberDiff line numberDiff line change
@@ -75,10 +75,10 @@ def find_earlier(session, query, when, who, which, where, doctype, dOrg):
7575
if "aggs" in query:
7676
del query["aggs"]
7777

78-
rangeQuery = {"range": {which: {"from": 0, "to": time.time()}}}
78+
range_query = {"range": {which: {"from": 0, "to": time.time()}}}
7979

8080
query["query"]["bool"]["must"] = [
81-
rangeQuery,
81+
range_query,
8282
{"term": {"organisation": dOrg}},
8383
{"term": {where: who}},
8484
]
@@ -90,10 +90,7 @@ def find_earlier(session, query, when, who, which, where, doctype, dOrg):
9090
doc = res["hits"]["hits"][0]["_source"]
9191
if doc[which] >= when:
9292
return [doc[which], doc]
93-
else:
94-
return [-1, None]
95-
else:
96-
return [-1, None]
93+
return [-1, None]
9794

9895

9996
def run(API, environ, indata, session):

kibble/api/pages/ci/queue.py

-1
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,6 @@
6565
This is the CI queue timeseries renderer for Kibble
6666
"""
6767

68-
import hashlib
6968
import json
7069
import time
7170

kibble/api/pages/ci/status.py

-1
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,6 @@
6565
This is the CI queue status (blocked/stuck) timeseries renderer for Kibble
6666
"""
6767

68-
import hashlib
6968
import json
7069
import time
7170

kibble/api/pages/ci/top-buildcount.py

-1
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,6 @@
6666
"""
6767

6868
import json
69-
import re
7069
import time
7170

7271

kibble/api/pages/ci/top-buildtime.py

-1
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,6 @@
6666
"""
6767

6868
import json
69-
import re
7069
import time
7170

7271

kibble/api/pages/code/commits.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@
6565
This is the TopN committers list renderer for Kibble
6666
"""
6767

68-
import hashlib
68+
6969
import json
7070
import time
7171

kibble/api/pages/code/evolution.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@
6565
This is the TopN committers list renderer for Kibble
6666
"""
6767

68-
import hashlib
68+
6969
import json
7070
import time
7171

@@ -119,7 +119,7 @@ def run(API, environ, indata, session):
119119
)
120120
sid = res["_scroll_id"]
121121
scroll_size = res["hits"]["total"]
122-
if type(scroll_size) is dict:
122+
if isinstance(scroll_size, dict):
123123
scroll_size = scroll_size["value"] # ES >= 7.x
124124

125125
timeseries = []

kibble/api/pages/code/pony-timeseries.py

-1
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,6 @@
6767

6868
import datetime
6969
import json
70-
import re
7170
import time
7271

7372
import dateutil.relativedelta

kibble/api/pages/code/pony.py

-1
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,6 @@
6666
"""
6767

6868
import json
69-
import re
7069
import time
7170

7271

kibble/api/pages/code/punchcard.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@
6565
This is the commit punch-card renderer for Kibble
6666
"""
6767

68-
import hashlib
68+
6969
import json
7070
import time
7171

kibble/api/pages/code/relationships.py

+32-35
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,6 @@
6666
"""
6767

6868
import copy
69-
import hashlib
7069
import json
7170
import math
7271
import re
@@ -82,34 +81,34 @@ def run(API, environ, indata, session):
8281
now = time.time()
8382

8483
# First, fetch the view if we have such a thing enabled
85-
viewList = []
84+
view_list = []
8685
if indata.get("view"):
87-
viewList = session.getView(indata.get("view"))
86+
view_list = session.getView(indata.get("view"))
8887
if indata.get("subfilter"):
89-
viewList = session.subFilter(indata.get("subfilter"), view=viewList)
88+
view_list = session.subFilter(indata.get("subfilter"), view=view_list)
9089

91-
dateTo = indata.get("to", int(time.time()))
92-
dateFrom = indata.get(
93-
"from", dateTo - (86400 * 30 * 6)
90+
date_to = indata.get("to", int(time.time()))
91+
date_from = indata.get(
92+
"from", date_to - (86400 * 30 * 6)
9493
) # Default to a 6 month span
9594

96-
which = "committer_email"
97-
role = "committer"
98-
if indata.get("author", False):
99-
which = "author_email"
100-
role = "author"
95+
# which = "committer_email"
96+
# role = "committer"
97+
# if indata.get("author", False):
98+
# which = "author_email"
99+
# role = "author"
101100

102101
interval = indata.get("interval", "day")
103102

104103
####################################################################
105104
####################################################################
106-
dOrg = session.user["defaultOrganisation"] or "apache"
105+
d_org = session.user["defaultOrganisation"] or "apache"
107106
query = {
108107
"query": {
109108
"bool": {
110109
"must": [
111-
{"range": {"tsday": {"from": dateFrom, "to": dateTo}}},
112-
{"term": {"organisation": dOrg}},
110+
{"range": {"tsday": {"from": date_from, "to": date_to}}},
111+
{"term": {"organisation": d_org}},
113112
]
114113
}
115114
}
@@ -119,8 +118,8 @@ def run(API, environ, indata, session):
119118
query["query"]["bool"]["must"].append(
120119
{"term": {"sourceID": indata.get("source")}}
121120
)
122-
elif viewList:
123-
query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}})
121+
elif view_list:
122+
query["query"]["bool"]["must"].append({"terms": {"sourceID": view_list}})
124123
if indata.get("email"):
125124
query["query"]["bool"]["must"].append(
126125
{
@@ -140,16 +139,15 @@ def run(API, environ, indata, session):
140139

141140
repos = {}
142141
repo_commits = {}
143-
authorlinks = {}
144142
max_commits = 0
145143
max_links = 0
146144
max_shared = 0
147145
max_authors = 0
148-
minLinks = indata.get("links", 1)
146+
min_links = indata.get("links", 1)
149147

150148
# For each repo, count commits and gather data on authors
151149
for doc in res["aggregations"]["per_repo"]["buckets"]:
152-
sourceID = doc["key"]
150+
source_id = doc["key"]
153151
commits = doc["doc_count"]
154152

155153
# Gather the unique authors/committers
@@ -164,7 +162,7 @@ def run(API, environ, indata, session):
164162
}
165163
}
166164
xquery = copy.deepcopy(query)
167-
xquery["query"]["bool"]["must"].append({"term": {"sourceID": sourceID}})
165+
xquery["query"]["bool"]["must"].append({"term": {"sourceID": source_id}})
168166
xres = session.DB.ES.search(
169167
index=session.DB.dbname, doc_type="code_commit", size=0, body=xquery
170168
)
@@ -173,8 +171,8 @@ def run(API, environ, indata, session):
173171
authors.append(person["key"])
174172
if commits > max_commits:
175173
max_commits = commits
176-
repos[sourceID] = authors
177-
repo_commits[sourceID] = commits
174+
repos[source_id] = authors
175+
repo_commits[source_id] = commits
178176

179177
# Now, figure out which repos share the same contributors
180178
repo_links = {}
@@ -184,7 +182,6 @@ def run(API, environ, indata, session):
184182

185183
# Grab data of all sources
186184
for ID, repo in repos.items():
187-
mylinks = {}
188185
if not session.DB.ES.exists(index=session.DB.dbname, doc_type="source", id=ID):
189186
continue
190187
repodatas[ID] = session.DB.ES.get(
@@ -193,7 +190,7 @@ def run(API, environ, indata, session):
193190

194191
for ID, repo in repos.items():
195192
mylinks = {}
196-
if not ID in repodatas:
193+
if ID not in repodatas:
197194
continue
198195
repodata = repodatas[ID]
199196
oID = ID
@@ -221,7 +218,7 @@ def run(API, environ, indata, session):
221218
xlinks.append(author)
222219
lname = "%s@%s" % (ID, xID) # Link name
223220
rname = "%s@%s" % (xID, ID) # Reverse link name
224-
if len(xlinks) >= minLinks and not rname in repo_links:
221+
if len(xlinks) >= min_links and not rname in repo_links:
225222
mylinks[xID] = len(xlinks)
226223
repo_links[lname] = repo_links.get(lname, 0) + len(
227224
xlinks
@@ -255,25 +252,25 @@ def run(API, environ, indata, session):
255252
nodes = []
256253
links = []
257254
existing_repos = []
258-
for sourceID in repo_notoriety.keys():
255+
for source_id in repo_notoriety:
259256
lsize = 0
260-
for k in repo_links.keys():
257+
for k in repo_links:
261258
fr, to = k.split("@")
262-
if fr == sourceID or to == sourceID:
259+
if source_id in (fr, to):
263260
lsize += 1
264-
asize = len(repo_authors[sourceID])
261+
asize = len(repo_authors[source_id])
265262
doc = {
266-
"id": sourceID,
267-
"name": sourceID,
268-
"commits": repo_commits[sourceID],
263+
"id": source_id,
264+
"name": source_id,
265+
"commits": repo_commits[source_id],
269266
"authors": asize,
270267
"links": lsize,
271268
"size": max(5, (1 - abs(math.log10(asize / max_authors))) * 45),
272269
"tooltip": "%u connections, %u contributors, %u commits"
273-
% (lsize, asize, repo_commits[sourceID]),
270+
% (lsize, asize, repo_commits[source_id]),
274271
}
275272
nodes.append(doc)
276-
existing_repos.append(sourceID)
273+
existing_repos.append(source_id)
277274

278275
for k, s in repo_links.items():
279276
size = s

kibble/api/pages/code/retention.py

-1
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,6 @@
6767

6868
import datetime
6969
import json
70-
import re
7170
import time
7271

7372

kibble/api/pages/code/trends.py

+2-21
Original file line numberDiff line numberDiff line change
@@ -253,24 +253,5 @@ def run(API, environ, indata, session):
253253
},
254254
}
255255

256-
JSON_OUT = {"trends": trends, "okay": True, "responseTime": time.time() - now}
257-
yield json.dumps(JSON_OUT)
258-
259-
260-
"""
261-
commits = {
262-
before = pcommits,
263-
after = commits,
264-
title = "Commits"
265-
},
266-
[role.."s"] = {
267-
before = pcommitters,
268-
after = committers,
269-
title = role:gsub("^(%S)", string.upper).."s",
270-
},
271-
lines = {
272-
before = pdeletions + pinsertions,
273-
after = deletions + insertions,
274-
title = "Lines changed"
275-
}
276-
"""
256+
json_out = {"trends": trends, "okay": True, "responseTime": time.time() - now}
257+
yield json.dumps(json_out)

kibble/api/pages/forum/actors.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@
6565
This is the forum actors stats page for Kibble
6666
"""
6767

68-
import hashlib
68+
6969
import json
7070
import time
7171

kibble/api/pages/forum/issues.py

-1
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,6 @@
6565
This is the forum timeseries renderer for Kibble
6666
"""
6767

68-
import hashlib
6968
import json
7069
import time
7170

kibble/api/pages/forum/top.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@
6565
This is the issue actors stats page for Kibble
6666
"""
6767

68-
import hashlib
68+
6969
import json
7070
import time
7171

kibble/api/pages/issue/actors.py

-1
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,6 @@
6565
This is the issue actors stats page for Kibble
6666
"""
6767

68-
import hashlib
6968
import json
7069
import time
7170

0 commit comments

Comments
 (0)