-
Notifications
You must be signed in to change notification settings - Fork 16
Expand file tree
/
Copy pathgithub.py
More file actions
335 lines (285 loc) · 11 KB
/
github.py
File metadata and controls
335 lines (285 loc) · 11 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
import json
from itertools import chain
from typing import Dict, List, Optional, Tuple, Union
import traitlets
from jupyter_server.utils import url_path_join
from tornado.httputil import url_concat
from tornado.web import HTTPError
from ..base import CommentReply, NewComment, PRConfig
from .manager import PullRequestsManager
class GitHubManager(PullRequestsManager):
"""Pull request manager for GitHub."""
def __init__(self, config: traitlets.config.Config) -> None:
super().__init__(PRConfig(config=config))
self._pull_requests_cache = {}
@property
def base_api_url(self):
return self._config.api_base_url or "https://api.github.com"
@property
def per_page_argument(self) -> Optional[Tuple[str, int]]:
"""Returns query argument to set number of items per page.
Returns
[str, int]: (query argument name, value)
None: the provider does not support pagination
"""
return ("per_page", 100)
async def get_current_user(self) -> Dict[str, str]:
"""Get the current user information.
Returns:
JSON description of the user matching the access token
"""
git_url = url_path_join(self.base_api_url, "user")
data = await self._call_github(git_url, has_pagination=False)
return {"username": data["login"]}
async def get_file_diff(self, pr_id: str, filename: str) -> Dict[str, str]:
"""Get the file diff for the pull request.
Args:
pr_id: pull request ID endpoint
filename: The file name
Returns:
The file diff description
"""
pull_request = await self._get_pull_requests(pr_id)
base_content = await self.__get_content(
pull_request["base"]["repo"]["url"], filename, pull_request["base"]["sha"]
)
head_content = await self.__get_content(
pull_request["head"]["repo"]["url"], filename, pull_request["head"]["sha"]
)
return {
"base": {
"label": pull_request["base"]["label"],
"sha": pull_request["base"]["sha"],
"content": base_content,
},
"head": {
"label": pull_request["head"]["label"],
"sha": pull_request["head"]["sha"],
"content": head_content,
},
}
def get_search_filter(self, username: str, pr_filter: str) -> str:
"""Get the query arguments for a given filter.
Args:
username: Current username
pr_filter: Generic pull request filter
Returns:
The query arguments for the service
"""
if pr_filter == "created":
search_filter = "+author:"
elif pr_filter == "assigned":
search_filter = "+assignee:"
return search_filter + username
async def get_threads(
self, pr_id: str, filename: Optional[str] = None
) -> List[dict]:
"""Get the discussions on a file or the pull request.
Args:
pr_id: pull request ID endpoint
filename: The file name; None to get the discussion on the pull requests
Returns:
The discussions
"""
git_url = url_path_join(pr_id, "/comments")
if filename is None:
results = await self._call_github(git_url.replace("pulls", "issues"))
return [
{
"id": result["id"],
"comments": [GitHubManager._response_to_comment(result)],
"pullRequestId": pr_id,
}
for result in results
]
else:
results = await self._call_github(git_url)
threads = []
replies = []
for result in results:
if result["path"] == filename:
if "in_reply_to_id" in result:
replies.append(result)
else:
threads.append([result])
has_changed = True
while len(replies) > 0 and has_changed:
has_changed = False
for reply in replies.copy():
for comments in threads:
if comments[-1]["id"] == reply["in_reply_to_id"]:
comments.append(reply)
replies.remove(reply)
has_changed = True
return [
{
"id": thread[-1]["id"], # Set discussion id as the last comment id
"comments": [GitHubManager._response_to_comment(c) for c in thread],
"filename": filename,
"line": thread[0]["line"],
"originalLine": thread[0]["original_line"]
if thread[0]["line"] is None
else None,
"pullRequestId": pr_id,
}
for thread in threads
]
async def list_files(self, pr_id: str) -> List[Dict[str, str]]:
"""Get the list of modified files for a pull request.
Args:
pr_id: pull request ID endpoint
Returns:
The list of modified files
"""
git_url = url_path_join(pr_id, "/files")
results = await self._call_github(git_url)
data = []
for result in results:
data.append(
{
"name": result["filename"],
"status": result["status"],
}
)
return data
async def list_prs(self, username: str, pr_filter: str) -> List[Dict[str, str]]:
"""Returns the list of pull requests for the given user.
Args:
username: User ID for the versioning service
pr_filter: Filter to add to the pull requests requests
Returns:
The list of pull requests
"""
search_filter = self.get_search_filter(username, pr_filter)
# Use search API to find matching pull requests and return
git_url = url_path_join(
self.base_api_url, "/search/issues?q=+state:open+type:pr" + search_filter
)
results = await self._call_github(git_url)
data = []
for result in chain(*map(lambda r: r["items"], results)):
data.append(
{
"id": result["pull_request"]["url"],
"title": result["title"],
"body": result["body"],
"internalId": result["id"],
"link": result["html_url"],
}
)
# Reset cache
self._pull_requests_cache = {}
return data
async def post_comment(
self, pr_id: str, body: Union[CommentReply, NewComment]
) -> Dict[str, str]:
"""Create a new comment on a file or a the pull request.
Args:
pr_id: pull request ID endpoint
body: Comment body
Returns:
The created comment
"""
git_url = url_path_join(pr_id, "comments")
filename = body.filename
if filename is None:
# Concept of reply does not exist at pull request level in GitHub
data = {"body": body.text}
git_url = git_url.replace("pulls", "issues")
else:
if isinstance(body, CommentReply):
data = {"body": body.text, "in_reply_to": body.inReplyTo}
else:
data = {
"body": body.text,
"commit_id": (await self._get_pull_requests(pr_id))["head"]["sha"],
"path": filename,
"line": body.line or body.originalLine,
"side": "RIGHT" if body.line is not None else "LEFT",
}
response = await self._call_github(git_url, method="POST", body=data)
return GitHubManager._response_to_comment(response)
async def _call_github(
self,
url: str,
load_json: bool = True,
method: str = "GET",
body: Optional[dict] = None,
params: Optional[Dict[str, str]] = None,
media_type: str = "application/vnd.github.v3+json",
has_pagination: bool = True,
) -> Union[dict, str]:
"""Call GitHub
The request is presumed to support pagination by default if
- The method is GET
- load_json is True
- The provider returns not None per_page_argument property
Args:
url: Endpoint to request
load_json: Is the response of JSON type
method: HTTP method
body: Request body; None if no body
params: Query arguments as dictionary; None if no arguments
media_type: Type of accepted content
has_pagination: Whether the pagination query arguments should be appended
Returns:
List or Dict: Create from JSON response body if load_json is True
str: Raw response body if load_json is False
"""
headers = {
"Accept": media_type,
"Authorization": f"token {self._config.access_token}",
}
return await super()._call_provider(
url,
load_json=load_json,
method=method,
body=body,
params=params,
headers=headers,
has_pagination=has_pagination,
)
async def _get_pull_requests(self, pr_id: str) -> dict:
"""Get a single pull request information.
It uses the cached value if available.
Args:
pr_id: The API url of the pull request to request
Returns:
The JSON description of the pull request
"""
pull_request = self._pull_requests_cache.get(pr_id)
if pull_request is None:
pull_request = await self._call_github(pr_id, has_pagination=False)
self._pull_requests_cache[pr_id] = pull_request
return pull_request
@staticmethod
def _response_to_comment(result: Dict[str, str]) -> Dict[str, str]:
"""Format raw comment to generic data structure.
Args:
result: Raw comment object from GitLab
Returns:
Standardized comment object
"""
data = {
"id": result["id"],
"text": result["body"],
"updatedAt": result["updated_at"],
"userName": result["user"]["login"],
"userPicture": result["user"]["avatar_url"],
"inReplyToId": result.get("in_reply_to_id"),
}
return data
async def __get_content(self, url: str, filename: str, sha: str) -> str:
link = url_concat(
url_path_join(url, "contents", filename),
{"ref": sha},
)
try:
return await self._call_github(
link, media_type="application/vnd.github.v3.raw", load_json=False
)
except HTTPError as e:
if e.status_code == 404:
return ""
else:
raise e