|
| 1 | +#!/usr/bin/env python3 |
| 2 | +"""Create a weekly standup Discussion with auto-gathered activity per contributor.""" |
| 3 | + |
| 4 | +import os |
| 5 | +import time |
| 6 | +from datetime import datetime, timezone, timedelta |
| 7 | +from pathlib import Path |
| 8 | + |
| 9 | +import requests |
| 10 | +import yaml |
| 11 | + |
| 12 | +REPO = os.environ["GITHUB_REPOSITORY"] |
| 13 | +TOKEN = os.environ["STANDUP_TOKEN"] |
| 14 | +CATEGORY_NODE_ID = os.environ["DISCUSSION_CATEGORY_NODE_ID"] |
| 15 | +API = "https://api.github.com" |
| 16 | +GRAPHQL = "https://api.github.com/graphql" |
| 17 | +HEADERS = { |
| 18 | + "Authorization": f"token {TOKEN}", |
| 19 | + "Accept": "application/vnd.github+json", |
| 20 | +} |
| 21 | + |
| 22 | +_CONFIG_PATH = Path(__file__).resolve().parent.parent / "standup-contributors.yml" |
| 23 | +with open(_CONFIG_PATH) as _f: |
| 24 | + CONTRIBUTORS = [c["username"] for c in yaml.safe_load(_f)["contributors"]] |
| 25 | + |
| 26 | +# Public repos to search — explicit list avoids leaking private repo |
| 27 | +# data when the bot token has org membership. |
| 28 | +REPOS = [ |
| 29 | + "payjoin/rust-payjoin", |
| 30 | + "payjoin/payjoin.org", |
| 31 | + "payjoin/payjoindevkit.org", |
| 32 | + "payjoin/cja", |
| 33 | + "payjoin/cja-2", |
| 34 | + "payjoin/bitcoin-hpke", |
| 35 | + "payjoin/ohttp", |
| 36 | + "payjoin/bitcoin_uri", |
| 37 | + "payjoin/bitcoin-uri-ffi", |
| 38 | + "payjoin/research-docs", |
| 39 | + "payjoin/multiparty-protocol-docs", |
| 40 | + "payjoin/btsim", |
| 41 | + "payjoin/tx-indexer", |
| 42 | +] |
| 43 | + |
| 44 | +REPO_FILTER = " ".join(f"repo:{r}" for r in REPOS) |
| 45 | + |
| 46 | + |
| 47 | +def graphql(query, variables=None): |
| 48 | + """Run a GraphQL query with retry on 403 rate limits.""" |
| 49 | + for attempt in range(5): |
| 50 | + resp = requests.post( |
| 51 | + GRAPHQL, |
| 52 | + headers=HEADERS, |
| 53 | + json={"query": query, "variables": variables or {}}, |
| 54 | + ) |
| 55 | + if resp.status_code == 403: |
| 56 | + wait = 2**attempt |
| 57 | + print(f"Rate limited (403), retrying in {wait}s...") |
| 58 | + time.sleep(wait) |
| 59 | + continue |
| 60 | + resp.raise_for_status() |
| 61 | + data = resp.json() |
| 62 | + if "errors" in data: |
| 63 | + raise RuntimeError(f"GraphQL errors: {data['errors']}") |
| 64 | + return data["data"] |
| 65 | + resp.raise_for_status() |
| 66 | + |
| 67 | + |
| 68 | +def get_repo_node_id(): |
| 69 | + """Get the repository node ID for GraphQL mutations.""" |
| 70 | + resp = requests.get(f"{API}/repos/{REPO}", headers=HEADERS) |
| 71 | + resp.raise_for_status() |
| 72 | + return resp.json()["node_id"] |
| 73 | + |
| 74 | + |
| 75 | +def create_discussion(title, body, repo_node_id): |
| 76 | + """Create a GitHub Discussion via GraphQL and return its node ID and URL.""" |
| 77 | + data = graphql( |
| 78 | + """ |
| 79 | + mutation($repoId: ID!, $categoryId: ID!, $title: String!, $body: String!) { |
| 80 | + createDiscussion(input: { |
| 81 | + repositoryId: $repoId, |
| 82 | + categoryId: $categoryId, |
| 83 | + title: $title, |
| 84 | + body: $body |
| 85 | + }) { |
| 86 | + discussion { |
| 87 | + id |
| 88 | + url |
| 89 | + } |
| 90 | + } |
| 91 | + } |
| 92 | + """, |
| 93 | + { |
| 94 | + "repoId": repo_node_id, |
| 95 | + "categoryId": CATEGORY_NODE_ID, |
| 96 | + "title": title, |
| 97 | + "body": body, |
| 98 | + }, |
| 99 | + ) |
| 100 | + discussion = data["createDiscussion"]["discussion"] |
| 101 | + return discussion["id"], discussion["url"] |
| 102 | + |
| 103 | + |
| 104 | +def add_discussion_comment(discussion_id, body): |
| 105 | + """Add a threaded comment to a Discussion via GraphQL.""" |
| 106 | + graphql( |
| 107 | + """ |
| 108 | + mutation($discussionId: ID!, $body: String!) { |
| 109 | + addDiscussionComment(input: { |
| 110 | + discussionId: $discussionId, |
| 111 | + body: $body |
| 112 | + }) { |
| 113 | + comment { |
| 114 | + id |
| 115 | + } |
| 116 | + } |
| 117 | + } |
| 118 | + """, |
| 119 | + {"discussionId": discussion_id, "body": body}, |
| 120 | + ) |
| 121 | + |
| 122 | + |
| 123 | +SEARCH_QUERY = """ |
| 124 | +query($q: String!) { |
| 125 | + search(query: $q, type: ISSUE, first: 30) { |
| 126 | + nodes { |
| 127 | + ... on PullRequest { |
| 128 | + id |
| 129 | + title |
| 130 | + url |
| 131 | + author { login } |
| 132 | + } |
| 133 | + ... on Issue { |
| 134 | + id |
| 135 | + title |
| 136 | + url |
| 137 | + author { login } |
| 138 | + } |
| 139 | + } |
| 140 | + } |
| 141 | +} |
| 142 | +""" |
| 143 | + |
| 144 | + |
| 145 | +def search_issues(query): |
| 146 | + """Run a GitHub search query across REPOS using GraphQL.""" |
| 147 | + q = f"{query} {REPO_FILTER}" |
| 148 | + data = graphql(SEARCH_QUERY, {"q": q}) |
| 149 | + items = [] |
| 150 | + for node in data["search"]["nodes"]: |
| 151 | + if not node: |
| 152 | + continue |
| 153 | + items.append( |
| 154 | + { |
| 155 | + "id": node["id"], |
| 156 | + "title": node["title"], |
| 157 | + "html_url": node["url"], |
| 158 | + "user": { |
| 159 | + "login": node["author"]["login"] if node.get("author") else "" |
| 160 | + }, |
| 161 | + } |
| 162 | + ) |
| 163 | + return items |
| 164 | + |
| 165 | + |
| 166 | +def gather_activity(user, since_date): |
| 167 | + """Gather a contributor's past-week activity across the org.""" |
| 168 | + since = since_date.strftime("%Y-%m-%d") |
| 169 | + |
| 170 | + # PRs merged (authored) |
| 171 | + merged_prs = search_issues(f"author:{user} type:pr merged:>{since}") |
| 172 | + |
| 173 | + # PRs reviewed |
| 174 | + reviewed_prs = search_issues(f"reviewed-by:{user} type:pr updated:>{since}") |
| 175 | + # Exclude PRs the user authored (already counted above) |
| 176 | + reviewed_prs = [pr for pr in reviewed_prs if pr["user"]["login"] != user] |
| 177 | + |
| 178 | + # Issues opened |
| 179 | + issues_opened = search_issues(f"author:{user} type:issue created:>{since}") |
| 180 | + |
| 181 | + return merged_prs, reviewed_prs, issues_opened |
| 182 | + |
| 183 | + |
| 184 | +def gather_potential_bottlenecks(user, since_date): |
| 185 | + """Identify potential bottlenecks for a contributor.""" |
| 186 | + since = since_date.strftime("%Y-%m-%d") |
| 187 | + bottlenecks = [] |
| 188 | + |
| 189 | + # Open PRs with no reviews |
| 190 | + open_prs = search_issues( |
| 191 | + f"author:{user} type:pr state:open review:none created:>{since}" |
| 192 | + ) |
| 193 | + for pr in open_prs: |
| 194 | + bottlenecks.append(f"- PR awaiting review: [{pr['title']}]({pr['html_url']})") |
| 195 | + |
| 196 | + # PRs with requested changes |
| 197 | + changes_requested = search_issues( |
| 198 | + f"author:{user} type:pr state:open review:changes_requested" |
| 199 | + ) |
| 200 | + for pr in changes_requested: |
| 201 | + bottlenecks.append( |
| 202 | + f"- PR has requested changes: [{pr['title']}]({pr['html_url']})" |
| 203 | + ) |
| 204 | + |
| 205 | + return bottlenecks |
| 206 | + |
| 207 | + |
| 208 | +def format_contributor_comment( |
| 209 | + user, merged_prs, reviewed_prs, issues_opened, bottlenecks |
| 210 | +): |
| 211 | + """Format the threaded reply for a contributor.""" |
| 212 | + lines = [f"## @{user}", ""] |
| 213 | + |
| 214 | + # SHIPPED section |
| 215 | + lines.append("### Shipped") |
| 216 | + if merged_prs or reviewed_prs or issues_opened: |
| 217 | + if merged_prs: |
| 218 | + lines.append("") |
| 219 | + lines.append("**PRs merged:**") |
| 220 | + for pr in merged_prs: |
| 221 | + lines.append(f"- [{pr['title']}]({pr['html_url']})") |
| 222 | + |
| 223 | + if reviewed_prs: |
| 224 | + lines.append("") |
| 225 | + lines.append("**PRs reviewed:**") |
| 226 | + for pr in reviewed_prs: |
| 227 | + lines.append(f"- [{pr['title']}]({pr['html_url']})") |
| 228 | + |
| 229 | + if issues_opened: |
| 230 | + lines.append("") |
| 231 | + lines.append("**Issues opened:**") |
| 232 | + for issue in issues_opened: |
| 233 | + lines.append(f"- [{issue['title']}]({issue['html_url']})") |
| 234 | + else: |
| 235 | + lines.append("_No activity found._") |
| 236 | + |
| 237 | + if bottlenecks: |
| 238 | + lines.append("") |
| 239 | + lines.append("_Auto-detected signals:_") |
| 240 | + lines.extend(bottlenecks) |
| 241 | + |
| 242 | + return "\n".join(lines) |
| 243 | + |
| 244 | + |
| 245 | +def main(): |
| 246 | + today = datetime.now(timezone.utc) |
| 247 | + week_label = today.strftime("Week of %Y-%m-%d") |
| 248 | + since_date = today - timedelta(days=7) |
| 249 | + |
| 250 | + dry_run = os.environ.get("DRY_RUN") |
| 251 | + |
| 252 | + # Gather all comments first |
| 253 | + comments = [] |
| 254 | + for user in CONTRIBUTORS: |
| 255 | + merged_prs, reviewed_prs, issues_opened = gather_activity(user, since_date) |
| 256 | + bottlenecks = gather_potential_bottlenecks(user, since_date) |
| 257 | + comment_body = format_contributor_comment( |
| 258 | + user, merged_prs, reviewed_prs, issues_opened, bottlenecks |
| 259 | + ) |
| 260 | + comments.append((user, comment_body)) |
| 261 | + |
| 262 | + if dry_run: |
| 263 | + for user, comment_body in comments: |
| 264 | + print(f"--- {user} ---\n{comment_body}\n") |
| 265 | + print("Dry run complete — nothing was created.") |
| 266 | + return |
| 267 | + |
| 268 | + repo_node_id = get_repo_node_id() |
| 269 | + title = f"Weekly Check-in: {week_label}" |
| 270 | + body = ( |
| 271 | + "Weekly standup — each contributor has a thread below " |
| 272 | + "with auto-gathered activity.\n\n" |
| 273 | + "**Reply to your thread by end-of-day Monday (your timezone).** " |
| 274 | + "Copy the template below and fill it in:\n\n" |
| 275 | + "```markdown\n" |
| 276 | + "### Shipped\n" |
| 277 | + "<!-- Add anything the bot missed: design work, specs, " |
| 278 | + "conversations, off-GitHub contributions. Correct any mistakes. " |
| 279 | + "Skip if the bot covered everything. -->\n" |
| 280 | + "\n" |
| 281 | + "### Focus\n" |
| 282 | + "What are you working on this week?\n" |
| 283 | + "\n" |
| 284 | + "### Bottleneck\n" |
| 285 | + "What is the single biggest bottleneck in progress toward your " |
| 286 | + "greater goal?\n" |
| 287 | + "Name your goal. Name the constraint. Name who or what can " |
| 288 | + "unblock it.\n" |
| 289 | + "```" |
| 290 | + ) |
| 291 | + discussion_id, discussion_url = create_discussion(title, body, repo_node_id) |
| 292 | + print(f"Created discussion: {discussion_url}") |
| 293 | + |
| 294 | + for user, comment_body in comments: |
| 295 | + add_discussion_comment(discussion_id, comment_body) |
| 296 | + print(f" Added thread for @{user}") |
| 297 | + |
| 298 | + print("Done.") |
| 299 | + |
| 300 | + |
| 301 | +if __name__ == "__main__": |
| 302 | + main() |
0 commit comments