|
1 | 1 | from __future__ import annotations |
2 | 2 |
|
3 | 3 | import argparse |
| 4 | +import io |
4 | 5 | import json |
5 | 6 | import os |
6 | 7 | import queue |
|
10 | 11 | import sys |
11 | 12 | import tempfile |
12 | 13 | import threading |
| 14 | +import time |
| 15 | +import urllib.request |
| 16 | +import uuid |
| 17 | +import zipfile |
13 | 18 | from typing import NamedTuple |
14 | 19 |
|
15 | 20 | SRC = os.path.abspath('../sentry') |
@@ -177,10 +182,77 @@ def _ssh_worker(q: queue.Queue[str], ssh: SSH) -> None: |
177 | 182 | ) |
178 | 183 |
|
179 | 184 |
|
| 185 | +def _gha_worker(q: queue.Queue[str]) -> None: |
| 186 | + with open(os.path.expanduser('~/.github-auth.json')) as f: |
| 187 | + token = json.load(f)['token'] |
| 188 | + |
| 189 | + while True: |
| 190 | + items = [] |
| 191 | + for _ in range(16): |
| 192 | + try: |
| 193 | + items.append(q.get(block=False)) |
| 194 | + except queue.Empty: |
| 195 | + break |
| 196 | + if not items: |
| 197 | + return |
| 198 | + |
| 199 | + aid = str(uuid.uuid4()) |
| 200 | + data = { |
| 201 | + 'ref': 'main', |
| 202 | + 'inputs': {'artifact': aid, 'shas': ' '.join(items)}, |
| 203 | + } |
| 204 | + headers = {'Authorization': f'Bearer {token}'} |
| 205 | + |
| 206 | + req = urllib.request.Request( |
| 207 | + 'https://api.github.com/repos/asottile/sentry-mypy-stats/actions/workflows/run.yml/dispatches', # noqa: E501 |
| 208 | + method='POST', |
| 209 | + data=json.dumps(data).encode(), |
| 210 | + headers=headers, |
| 211 | + ) |
| 212 | + urllib.request.urlopen(req).close() |
| 213 | + |
| 214 | + time.sleep(120) |
| 215 | + |
| 216 | + while True: |
| 217 | + req = urllib.request.Request( |
| 218 | + f'https://api.github.com/repos/asottile/sentry-mypy-stats/actions/artifacts?name={aid}', # noqa: E501 |
| 219 | + headers=headers, |
| 220 | + ) |
| 221 | + artifacts_resp = json.load(urllib.request.urlopen(req)) |
| 222 | + if artifacts_resp['artifacts']: |
| 223 | + break |
| 224 | + else: |
| 225 | + time.sleep(2) |
| 226 | + |
| 227 | + artifact, = artifacts_resp['artifacts'] |
| 228 | + req = urllib.request.Request(artifact['archive_download_url']) |
| 229 | + for k, v in headers.items(): |
| 230 | + req.add_unredirected_header(k, v) |
| 231 | + contents = urllib.request.urlopen(req).read() |
| 232 | + |
| 233 | + with tempfile.TemporaryDirectory(dir=DATA) as tmpdir: |
| 234 | + zipf = zipfile.ZipFile(io.BytesIO(contents)) |
| 235 | + zipf.extractall(tmpdir) |
| 236 | + |
| 237 | + for name in os.listdir(tmpdir): |
| 238 | + os.rename( |
| 239 | + os.path.join(tmpdir, name), |
| 240 | + os.path.join(DATA, name), |
| 241 | + ) |
| 242 | + |
| 243 | + req = urllib.request.Request( |
| 244 | + artifact['url'], |
| 245 | + method='DELETE', |
| 246 | + headers=headers, |
| 247 | + ) |
| 248 | + urllib.request.urlopen(req).close() |
| 249 | + |
| 250 | + |
180 | 251 | def main() -> int: |
181 | 252 | parser = argparse.ArgumentParser() |
182 | 253 | parser.add_argument('--jobs', type=int, default=os.cpu_count() or 8) |
183 | 254 | parser.add_argument('--ssh', type=SSH.parse, action='append', default=[]) |
| 255 | + parser.add_argument('--gha-jobs', type=int, default=0) |
184 | 256 | parser.add_argument('cid', nargs='*', default=[]) |
185 | 257 | args = parser.parse_args() |
186 | 258 |
|
@@ -224,6 +296,11 @@ def _clear_queue(*a: object) -> None: |
224 | 296 | threads.append(t) |
225 | 297 | t.start() |
226 | 298 |
|
| 299 | + for _ in range(args.gha_jobs): |
| 300 | + t = threading.Thread(target=_gha_worker, args=(q,)) |
| 301 | + threads.append(t) |
| 302 | + t.start() |
| 303 | + |
227 | 304 | for t in threads: |
228 | 305 | t.join() |
229 | 306 |
|
|
0 commit comments