|
#!/usr/bin/env bash |
|
set -euo pipefail |
|
|
|
# monday-update — Generate weekly Monday engineering update for Slack |
|
# Dependencies: curl, jq, gh |
|
|
|
ENV_FILE="$HOME/.monday-update.env" |
|
# ---------- flags ---------- |
|
|
|
while [[ $# -gt 0 ]]; do |
|
case "$1" in |
|
*) echo "Unknown flag: $1"; echo "Usage: monday-update"; exit 1 ;; |
|
esac |
|
done |
|
|
|
# ---------- configuration ---------- |
|
|
|
load_config() { |
|
if [[ ! -f "$ENV_FILE" ]]; then |
|
echo "Missing $ENV_FILE — creating template..." |
|
cat > "$ENV_FILE" <<'TMPL' |
|
# Monday Update configuration |
|
# Get your API key at https://linear.app/settings/account/security |
|
LINEAR_API_KEY= |
|
# Auto-populated on first run: |
|
LINEAR_USER_ID= |
|
TMPL |
|
echo "Please add your LINEAR_API_KEY to $ENV_FILE and re-run." |
|
exit 1 |
|
fi |
|
# shellcheck disable=SC1090 |
|
source "$ENV_FILE" |
|
if [[ -z "${LINEAR_API_KEY:-}" ]]; then |
|
echo "Error: LINEAR_API_KEY is not set in $ENV_FILE" |
|
exit 1 |
|
fi |
|
} |
|
|
|
# ---------- Linear API helpers ---------- |
|
|
|
linear_query() { |
|
local query="$1" |
|
curl -s -X POST \ |
|
-H "Content-Type: application/json" \ |
|
-H "Authorization: $LINEAR_API_KEY" \ |
|
--data "{\"query\": \"$query\"}" \ |
|
https://api.linear.app/graphql |
|
} |
|
|
|
detect_linear_user() { |
|
if [[ -n "${LINEAR_USER_ID:-}" ]]; then |
|
return |
|
fi |
|
echo " Detecting Linear user ID..." |
|
local result |
|
result=$(linear_query "{ viewer { id name } }") |
|
LINEAR_USER_ID=$(echo "$result" | jq -r '.data.viewer.id') |
|
local name |
|
name=$(echo "$result" | jq -r '.data.viewer.name') |
|
if [[ -z "$LINEAR_USER_ID" || "$LINEAR_USER_ID" == "null" ]]; then |
|
echo "Error: Could not detect Linear user ID. Check your API key." |
|
exit 1 |
|
fi |
|
if grep -q '^LINEAR_USER_ID=' "$ENV_FILE"; then |
|
sed -i '' "s/^LINEAR_USER_ID=.*/LINEAR_USER_ID=$LINEAR_USER_ID/" "$ENV_FILE" |
|
else |
|
echo "LINEAR_USER_ID=$LINEAR_USER_ID" >> "$ENV_FILE" |
|
fi |
|
echo " Cached user: $name ($LINEAR_USER_ID)" |
|
} |
|
|
|
fetch_issue() { |
|
local issue_id="$1" |
|
linear_query "{ issue(id: \\\"$issue_id\\\") { identifier title url state { name type } } }" |
|
} |
|
|
|
# ---------- status helpers ---------- |
|
|
|
status_emoji() { |
|
local status="$1" |
|
case "$status" in |
|
Done|Closed) echo ":done_linear:" ;; |
|
"In Progress") echo ":in_progress_linear:" ;; |
|
"In Review") echo ":in_review_linear:" ;; |
|
Todo) echo ":todo_linear:" ;; |
|
Triage) echo ":triage_linear:" ;; |
|
Backlog) echo ":backlog_linear:" ;; |
|
*) echo ":todo_linear:" ;; |
|
esac |
|
} |
|
|
|
status_sort_order() { |
|
local status="$1" |
|
case "$status" in |
|
Done|Closed) echo "1" ;; |
|
"In Review") echo "2" ;; |
|
"In Progress") echo "3" ;; |
|
Todo) echo "4" ;; |
|
Triage) echo "5" ;; |
|
Backlog) echo "6" ;; |
|
*) echo "7" ;; |
|
esac |
|
} |
|
|
|
# ---------- data structures ---------- |
|
# Items file format: SECTION|SORT|EMOJI|ISSUE_ID|TITLE|PR_REF|PR_URL|MERGED|ISSUE_URL |
|
# PR_REF: e.g. "PR #23056" or empty |
|
# PR_URL: e.g. "https://github.com/..." or empty |
|
# MERGED: "merged" or empty |
|
|
|
ITEMS_FILE=$(mktemp) |
|
PR_MAP_FILE=$(mktemp) |
|
trap 'rm -f "$ITEMS_FILE" "$PR_MAP_FILE"' EXIT |
|
|
|
add_item() { |
|
local section="$1" sort="$2" emoji="$3" issue_id="$4" title="$5" |
|
local pr_ref="${6:-}" pr_url="${7:-}" merged="${8:-}" issue_url="$9" |
|
echo "${section}|${sort}|${emoji}|${issue_id}|${title}|${pr_ref}|${pr_url}|${merged}|${issue_url}" >> "$ITEMS_FILE" |
|
} |
|
|
|
remove_item_by_num() { |
|
local num="$1" |
|
local tmp |
|
tmp=$(mktemp) |
|
sed "${num}d" "$ITEMS_FILE" > "$tmp" |
|
mv "$tmp" "$ITEMS_FILE" |
|
} |
|
|
|
count_items() { |
|
wc -l < "$ITEMS_FILE" | tr -d ' ' |
|
} |
|
|
|
# PR map: ISSUE_ID|PR_REF|PR_URL|MERGED |
|
pr_map_set() { |
|
local issue_id="$1" pr_ref="$2" pr_url="$3" merged="${4:-}" |
|
if ! grep -q "^${issue_id}|" "$PR_MAP_FILE" 2>/dev/null; then |
|
echo "${issue_id}|${pr_ref}|${pr_url}|${merged}" >> "$PR_MAP_FILE" |
|
fi |
|
} |
|
|
|
pr_map_get_ref() { |
|
local issue_id="$1" |
|
grep "^${issue_id}|" "$PR_MAP_FILE" 2>/dev/null | head -1 | cut -d'|' -f2 || true |
|
} |
|
|
|
pr_map_get_url() { |
|
local issue_id="$1" |
|
grep "^${issue_id}|" "$PR_MAP_FILE" 2>/dev/null | head -1 | cut -d'|' -f3 || true |
|
} |
|
|
|
pr_map_get_merged() { |
|
local issue_id="$1" |
|
grep "^${issue_id}|" "$PR_MAP_FILE" 2>/dev/null | head -1 | cut -d'|' -f4 || true |
|
} |
|
|
|
pr_map_has() { |
|
local issue_id="$1" |
|
grep -q "^${issue_id}|" "$PR_MAP_FILE" 2>/dev/null |
|
} |
|
|
|
# ---------- data gathering ---------- |
|
|
|
gather_data() { |
|
echo "Gathering data..." |
|
|
|
detect_linear_user |
|
|
|
# 1. Completed issues from Linear (last 7 days) |
|
local completed_json |
|
completed_json=$(linear_query "{ issues(filter: { assignee: { id: { eq: \\\"$LINEAR_USER_ID\\\" } }, state: { type: { eq: \\\"completed\\\" } }, completedAt: { gt: \\\"-P1W\\\" } }) { nodes { identifier title url state { name type } } } }") |
|
local completed_count |
|
completed_count=$(echo "$completed_json" | jq '.data.issues.nodes | length') |
|
echo " ✓ Linear: $completed_count completed issues" |
|
|
|
local completed_ids="" |
|
local i |
|
for i in $(seq 0 $((completed_count - 1))); do |
|
local id title url status |
|
id=$(echo "$completed_json" | jq -r ".data.issues.nodes[$i].identifier") |
|
title=$(echo "$completed_json" | jq -r ".data.issues.nodes[$i].title") |
|
url=$(echo "$completed_json" | jq -r ".data.issues.nodes[$i].url") |
|
status=$(echo "$completed_json" | jq -r ".data.issues.nodes[$i].state.name") |
|
completed_ids="$completed_ids $id" |
|
add_item "last_week" "$(status_sort_order "$status")" "$(status_emoji "$status")" "$id" "$title" "" "" "" "$url" |
|
done |
|
|
|
# 2. Git: extract issue IDs from commit messages AND PR branch names |
|
local git_author |
|
git_author=$(git config user.name 2>/dev/null || echo "") |
|
local git_issue_ids="" |
|
if [[ -n "$git_author" ]]; then |
|
local commit_ids |
|
commit_ids=$(git log --since="7 days ago" --author="$git_author" --all --format="%s" 2>/dev/null \ |
|
| grep -oiE '[A-Z]+-[0-9]+' \ |
|
| sort -u || true) |
|
local branch_ids |
|
branch_ids=$(gh pr list --state all --author @me --limit 20 \ |
|
--json headRefName,updatedAt \ |
|
--jq '.[].headRefName' 2>/dev/null \ |
|
| grep -oiE '[A-Z]+-[0-9]+' \ |
|
| sort -u || true) |
|
git_issue_ids=$(printf '%s\n%s' "$commit_ids" "$branch_ids" | sort -u | tr '\n' ' ') |
|
fi |
|
|
|
local extra_count=0 |
|
local gid |
|
for gid in $git_issue_ids; do |
|
if echo "$completed_ids" | grep -qw "$gid"; then |
|
continue |
|
fi |
|
local issue_json |
|
issue_json=$(linear_query "{ issue(id: \\\"$gid\\\") { identifier title url state { name type } } }") |
|
local issue_id |
|
issue_id=$(echo "$issue_json" | jq -r '.data.issue.identifier // empty') |
|
if [[ -z "$issue_id" ]]; then |
|
continue |
|
fi |
|
local issue_title issue_url issue_status state_type |
|
issue_title=$(echo "$issue_json" | jq -r '.data.issue.title') |
|
issue_url=$(echo "$issue_json" | jq -r '.data.issue.url') |
|
issue_status=$(echo "$issue_json" | jq -r '.data.issue.state.name') |
|
state_type=$(echo "$issue_json" | jq -r '.data.issue.state.type') |
|
if [[ "$state_type" == "canceled" ]]; then |
|
continue |
|
fi |
|
# Only include started issues (in progress / in review) for last week |
|
if [[ "$state_type" != "started" ]]; then |
|
continue |
|
fi |
|
add_item "last_week" "$(status_sort_order "$issue_status")" "$(status_emoji "$issue_status")" "$issue_id" "$issue_title" "" "" "" "$issue_url" |
|
completed_ids="$completed_ids $issue_id" |
|
extra_count=$((extra_count + 1)) |
|
done |
|
echo " ✓ Git: $extra_count additional in-progress issues" |
|
|
|
# 3. Merged PRs — build PR map |
|
local pr_json since_date |
|
since_date=$(date -v-7d +%Y-%m-%dT00:00:00Z 2>/dev/null || date -d '7 days ago' +%Y-%m-%dT00:00:00Z) |
|
pr_json=$(gh pr list --state merged --author @me --limit 50 \ |
|
--json number,title,mergedAt,url \ |
|
--jq "[.[] | select(.mergedAt >= \"$since_date\")]" 2>/dev/null || echo "[]") |
|
local pr_count |
|
pr_count=$(echo "$pr_json" | jq 'length') |
|
echo " ✓ GitHub: $pr_count merged PRs" |
|
|
|
for i in $(seq 0 $((pr_count - 1))); do |
|
local pr_num pr_title pr_url pr_issue_id |
|
pr_num=$(echo "$pr_json" | jq -r ".[$i].number") |
|
pr_title=$(echo "$pr_json" | jq -r ".[$i].title") |
|
pr_url=$(echo "$pr_json" | jq -r ".[$i].url") |
|
pr_issue_id=$(echo "$pr_title" | grep -oE '\[([A-Z]+-[0-9]+)\]' | tr -d '[]' | head -1 || true) |
|
if [[ -n "$pr_issue_id" ]]; then |
|
pr_map_set "$pr_issue_id" "PR #${pr_num}" "$pr_url" "merged" |
|
fi |
|
done |
|
|
|
# Open PRs |
|
local open_pr_json |
|
open_pr_json=$(gh pr list --state open --author @me --limit 50 \ |
|
--json number,title,url 2>/dev/null || echo "[]") |
|
local open_pr_count |
|
open_pr_count=$(echo "$open_pr_json" | jq 'length') |
|
for i in $(seq 0 $((open_pr_count - 1))); do |
|
local pr_num pr_title pr_url pr_issue_id |
|
pr_num=$(echo "$open_pr_json" | jq -r ".[$i].number") |
|
pr_title=$(echo "$open_pr_json" | jq -r ".[$i].title") |
|
pr_url=$(echo "$open_pr_json" | jq -r ".[$i].url") |
|
pr_issue_id=$(echo "$pr_title" | grep -oE '\[([A-Z]+-[0-9]+)\]' | tr -d '[]' | head -1 || true) |
|
if [[ -n "$pr_issue_id" ]]; then |
|
pr_map_set "$pr_issue_id" "PR #${pr_num}" "$pr_url" "" |
|
fi |
|
done |
|
|
|
# Annotate last_week items with PR info |
|
local tmp_file |
|
tmp_file=$(mktemp) |
|
while IFS='|' read -r section sort emoji issue_id title pr_ref pr_url merged issue_url; do |
|
if pr_map_has "$issue_id"; then |
|
pr_ref=$(pr_map_get_ref "$issue_id") |
|
pr_url=$(pr_map_get_url "$issue_id") |
|
merged=$(pr_map_get_merged "$issue_id") |
|
fi |
|
echo "${section}|${sort}|${emoji}|${issue_id}|${title}|${pr_ref}|${pr_url}|${merged}|${issue_url}" >> "$tmp_file" |
|
done < "$ITEMS_FILE" |
|
mv "$tmp_file" "$ITEMS_FILE" |
|
|
|
# 4. This week — all assigned issues in active states |
|
local active_json |
|
local state_filter="[\\\"In Review\\\", \\\"In Progress\\\"]" |
|
active_json=$(linear_query "{ issues(filter: { assignee: { id: { eq: \\\"$LINEAR_USER_ID\\\" } }, state: { name: { in: $state_filter } } }) { nodes { identifier title url state { name } } } }") |
|
local active_count |
|
active_count=$(echo "$active_json" | jq '.data.issues.nodes | length') |
|
echo " ✓ Linear: $active_count planned issues" |
|
|
|
for i in $(seq 0 $((active_count - 1))); do |
|
local id title url status pr_ref pr_url |
|
id=$(echo "$active_json" | jq -r ".data.issues.nodes[$i].identifier") |
|
title=$(echo "$active_json" | jq -r ".data.issues.nodes[$i].title") |
|
url=$(echo "$active_json" | jq -r ".data.issues.nodes[$i].url") |
|
status=$(echo "$active_json" | jq -r ".data.issues.nodes[$i].state.name") |
|
pr_ref="" |
|
pr_url="" |
|
if pr_map_has "$id"; then |
|
pr_ref=$(pr_map_get_ref "$id") |
|
pr_url=$(pr_map_get_url "$id") |
|
fi |
|
add_item "this_week" "$(status_sort_order "$status")" "$(status_emoji "$status")" "$id" "$title" "$pr_ref" "$pr_url" "" "$url" |
|
done |
|
|
|
# Sort by section then sort order |
|
local sorted |
|
sorted=$(mktemp) |
|
sort -t'|' -k1,1 -k2,2n -k4,4Vr "$ITEMS_FILE" > "$sorted" |
|
mv "$sorted" "$ITEMS_FILE" |
|
} |
|
|
|
# ---------- display ---------- |
|
|
|
display_items() { |
|
local num=1 |
|
local last_section="" |
|
|
|
while IFS='|' read -r section sort emoji issue_id title pr_ref pr_url merged issue_url; do |
|
if [[ "$section" != "$last_section" ]]; then |
|
if [[ "$section" == "last_week" ]]; then |
|
echo "" |
|
echo "=== LAST WEEK ===" |
|
else |
|
echo "" |
|
echo "=== THIS WEEK ===" |
|
fi |
|
last_section="$section" |
|
fi |
|
|
|
local display="$emoji $issue_id: $title" |
|
if [[ -n "$pr_ref" ]]; then |
|
if [[ "$merged" == "merged" ]]; then |
|
display="$emoji $issue_id: $title ($pr_ref) (merged)" |
|
else |
|
display="$emoji $issue_id: $title ($pr_ref)" |
|
fi |
|
fi |
|
printf " %2d. %s\n" "$num" "$display" |
|
num=$((num + 1)) |
|
done < "$ITEMS_FILE" |
|
echo "" |
|
} |
|
|
|
# ---------- format for Slack ---------- |
|
|
|
format_html() { |
|
local last_section="" |
|
local section_num=0 |
|
|
|
echo '<html><body>' |
|
|
|
while IFS='|' read -r section sort emoji issue_id title pr_ref pr_url merged issue_url; do |
|
if [[ "$section" != "$last_section" ]]; then |
|
if [[ -n "$last_section" ]]; then |
|
echo "<br>" |
|
fi |
|
if [[ "$section" == "last_week" ]]; then |
|
echo '<b>Last week:</b><br>' |
|
else |
|
echo '<b>This week:</b><br>' |
|
fi |
|
last_section="$section" |
|
section_num=0 |
|
fi |
|
section_num=$((section_num + 1)) |
|
|
|
local issue_link="<a href=\"${issue_url}\">${issue_id}</a>" |
|
local line="${section_num}. ${emoji} ${issue_link}: ${title}" |
|
|
|
if [[ -n "$pr_ref" ]]; then |
|
local pr_link="<a href=\"${pr_url}\">${pr_ref}</a>" |
|
if [[ "$merged" == "merged" ]]; then |
|
line="${section_num}. ${emoji} ${issue_link}: ${title} (${pr_link}) (merged)" |
|
else |
|
line="${section_num}. ${emoji} ${issue_link}: ${title} (${pr_link})" |
|
fi |
|
fi |
|
echo "${line}<br>" |
|
done < "$ITEMS_FILE" |
|
|
|
echo '</body></html>' |
|
} |
|
|
|
# ---------- interactive loop ---------- |
|
|
|
interactive_add() { |
|
local section section_choice |
|
read -rp "Section - [l]ast week / [t]his week: " section_choice |
|
case "$section_choice" in |
|
l|L) section="last_week" ;; |
|
t|T) section="this_week" ;; |
|
*) echo " Invalid choice."; return ;; |
|
esac |
|
|
|
local input |
|
read -rp "Issue IDs (e.g. VDC-320 VDC-198): " input |
|
local ids |
|
ids=$(echo "$input" | tr ',' ' ' | tr '[:lower:]' '[:upper:]') |
|
|
|
local id |
|
for id in $ids; do |
|
if grep -q "^${section}.*|${id}|" "$ITEMS_FILE"; then |
|
echo " ✗ $id is already in the list." |
|
continue |
|
fi |
|
|
|
echo " Fetching $id from Linear..." |
|
local result |
|
result=$(fetch_issue "$id") |
|
local found_id |
|
found_id=$(echo "$result" | jq -r '.data.issue.identifier // empty') |
|
if [[ -z "$found_id" ]]; then |
|
echo " ✗ Issue $id not found in Linear." |
|
continue |
|
fi |
|
|
|
local title url status |
|
title=$(echo "$result" | jq -r '.data.issue.title') |
|
url=$(echo "$result" | jq -r '.data.issue.url') |
|
status=$(echo "$result" | jq -r '.data.issue.state.name') |
|
local emoji |
|
emoji=$(status_emoji "$status") |
|
|
|
local pr_ref="" pr_url="" |
|
if pr_map_has "$found_id"; then |
|
pr_ref=$(pr_map_get_ref "$found_id") |
|
pr_url=$(pr_map_get_url "$found_id") |
|
fi |
|
|
|
add_item "$section" "$(status_sort_order "$status")" "$emoji" "$found_id" "$title" "$pr_ref" "$pr_url" "" "$url" |
|
echo " ✓ Added: $found_id ($status) — $title" |
|
done |
|
|
|
local sorted |
|
sorted=$(mktemp) |
|
sort -t'|' -k1,1 -k2,2n -k4,4Vr "$ITEMS_FILE" > "$sorted" |
|
mv "$sorted" "$ITEMS_FILE" |
|
} |
|
|
|
interactive_remove() { |
|
local total |
|
total=$(count_items) |
|
local input |
|
read -rp "Item numbers to remove (e.g. 3 5 7): " input |
|
local nums |
|
nums=$(echo "$input" | tr ',' ' ') |
|
|
|
# Sort descending so removals don't shift line numbers |
|
local sorted_nums |
|
sorted_nums=$(echo "$nums" | tr ' ' '\n' | sort -rn | tr '\n' ' ') |
|
|
|
local num |
|
for num in $sorted_nums; do |
|
if ! [[ "$num" =~ ^[0-9]+$ ]] || [[ "$num" -lt 1 ]] || [[ "$num" -gt "$total" ]]; then |
|
echo " ✗ Invalid item number: $num" |
|
continue |
|
fi |
|
local removed_id |
|
removed_id=$(sed -n "${num}p" "$ITEMS_FILE" | cut -d'|' -f4) |
|
remove_item_by_num "$num" |
|
echo " ✓ Removed: $removed_id" |
|
done |
|
} |
|
|
|
interactive_loop() { |
|
while true; do |
|
display_items |
|
read -rp "[a]dd / [r]emove / [d]one > " action |
|
case "$action" in |
|
a|A|add) |
|
interactive_add |
|
;; |
|
r|R|remove) |
|
interactive_remove |
|
;; |
|
d|D|done) |
|
break |
|
;; |
|
*) |
|
echo " Invalid option. Use a, r, or d." |
|
;; |
|
esac |
|
done |
|
} |
|
|
|
# ---------- main ---------- |
|
|
|
copy_richtext() { |
|
local html_file="$1" |
|
osascript -l JavaScript -e ' |
|
function run(argv) { |
|
ObjC.import("AppKit"); |
|
ObjC.import("Foundation"); |
|
var path = $.NSString.alloc.initWithUTF8String(argv[0]); |
|
var htmlData = $.NSData.dataWithContentsOfFile(path); |
|
// Convert HTML to attributed string to extract plain text |
|
var attrStr = $.NSAttributedString.alloc.initWithHTMLDocumentAttributes(htmlData, null); |
|
var plainText = attrStr.string; |
|
var plainData = plainText.dataUsingEncoding($.NSUTF8StringEncoding); |
|
var pb = $.NSPasteboard.generalPasteboard; |
|
pb.clearContents; |
|
pb.setDataForType(htmlData, "public.html"); |
|
pb.setDataForType(plainData, "public.utf8-plain-text"); |
|
} |
|
' -- "$html_file" |
|
} |
|
|
|
main() { |
|
load_config |
|
gather_data |
|
interactive_loop |
|
|
|
local html_file |
|
html_file=$(mktemp /tmp/monday-update-XXXXXX.html) |
|
format_html > "$html_file" |
|
copy_richtext "$html_file" |
|
rm -f "$html_file" |
|
echo "" |
|
echo "✓ Copied to clipboard — Paste (⌘V) into Slack" |
|
} |
|
|
|
main |