summaryrefslogtreecommitdiff
path: root/cmd/gen-imworkingon
diff options
context:
space:
mode:
Diffstat (limited to 'cmd/gen-imworkingon')
-rw-r--r--cmd/gen-imworkingon/calendar.go122
-rw-r--r--cmd/gen-imworkingon/forge_forgejo.go185
-rw-r--r--cmd/gen-imworkingon/forge_gerrit.go160
-rw-r--r--cmd/gen-imworkingon/forge_github.go208
-rw-r--r--cmd/gen-imworkingon/forge_gitlab.go173
-rw-r--r--cmd/gen-imworkingon/forge_part_git.go82
-rw-r--r--cmd/gen-imworkingon/forge_part_pipermail.go193
-rw-r--r--cmd/gen-imworkingon/gitcache.go68
-rw-r--r--cmd/gen-imworkingon/imworkingon.html.tmpl165
-rw-r--r--cmd/gen-imworkingon/main.go189
-rw-r--r--cmd/gen-imworkingon/src_contribs.go223
-rw-r--r--cmd/gen-imworkingon/src_contribs_test.go39
-rw-r--r--cmd/gen-imworkingon/src_mastodon.go41
-rw-r--r--cmd/gen-imworkingon/src_tags.go25
-rw-r--r--cmd/gen-imworkingon/src_upstreams.go48
15 files changed, 1921 insertions, 0 deletions
diff --git a/cmd/gen-imworkingon/calendar.go b/cmd/gen-imworkingon/calendar.go
new file mode 100644
index 0000000..29c3318
--- /dev/null
+++ b/cmd/gen-imworkingon/calendar.go
@@ -0,0 +1,122 @@
+package main
+
+import (
+ "time"
+)
+
+//////////////////////////////////////////////////////////////////////
+
+type Date struct {
+ Year int
+ Month time.Month
+ Day int
+}
+
+func DateOf(t time.Time) Date {
+ y, m, d := t.Date()
+ return Date{Year: y, Month: m, Day: d}
+}
+
+func (d Date) Time() time.Time {
+ return time.Date(d.Year, d.Month, d.Day, 0, 0, 0, 0, time.Local)
+}
+
+func (d Date) AddDays(delta int) Date {
+ return DateOf(d.Time().AddDate(0, 0, delta))
+}
+
+func (d Date) Weekday() time.Weekday {
+ return d.Time().Weekday()
+}
+
+func (a Date) Cmp(b Date) int {
+ switch {
+ case a.Year < b.Year:
+ return -1
+ case a.Year > b.Year:
+ return 1
+ }
+ switch {
+ case a.Month < b.Month:
+ return -1
+ case a.Month > b.Month:
+ return 1
+ }
+ switch {
+ case a.Day < b.Day:
+ return -1
+ case a.Day > b.Day:
+ return 1
+ }
+ return 0
+}
+
+//////////////////////////////////////////////////////////////////////
+
+type CalendarDay[T any] struct {
+ Date
+ Data T
+}
+
+//////////////////////////////////////////////////////////////////////
+
+// keyed by time.Weekday
+type CalendarWeek[T any] [7]CalendarDay[T]
+
+//////////////////////////////////////////////////////////////////////
+
+// must be sorted, must be non-sparse
+type Calendar[T any] []CalendarWeek[T]
+
+func (c Calendar[T]) NumWeekdaysInMonth(weekday time.Weekday, target Date) int {
+ num := 0
+ for _, w := range c {
+ if w[weekday].Date == (Date{}) {
+ continue
+ }
+ switch {
+ case w[weekday].Year == target.Year:
+ switch {
+ case w[weekday].Month == target.Month:
+ num++
+ case w[weekday].Month > target.Month:
+ return num
+ }
+ case w[weekday].Year > target.Year:
+ return num
+ }
+ }
+ return num
+}
+
+//////////////////////////////////////////////////////////////////////
+
+func BuildCalendar[T any](things []T, dateOfThing func(T) Date) Calendar[T] {
+ if len(things) == 0 {
+ return nil
+ }
+
+ newestDate := DateOf(time.Now().Local())
+
+ oldestDate := dateOfThing(things[0])
+ byDate := make(map[Date]T, len(things))
+ for _, thing := range things {
+ date := dateOfThing(thing)
+ if oldestDate.Cmp(date) > 0 {
+ oldestDate = date
+ }
+ byDate[date] = thing
+ }
+
+ var ret Calendar[T]
+ for date := oldestDate; date.Cmp(newestDate) <= 0; date = date.AddDays(1) {
+ if len(ret) == 0 || date.Weekday() == 0 {
+ ret = append(ret, CalendarWeek[T]{})
+ }
+ ret[len(ret)-1][date.Weekday()] = CalendarDay[T]{
+ Date: date,
+ Data: byDate[date],
+ }
+ }
+ return ret
+}
diff --git a/cmd/gen-imworkingon/forge_forgejo.go b/cmd/gen-imworkingon/forge_forgejo.go
new file mode 100644
index 0000000..34ec767
--- /dev/null
+++ b/cmd/gen-imworkingon/forge_forgejo.go
@@ -0,0 +1,185 @@
+package main
+
+import (
+ "fmt"
+ "regexp"
+ "time"
+
+ "git.lukeshu.com/www/lib/httpcache"
+)
+
+var reForgejoPR = regexp.MustCompile(`^https://([^/]+)/([^/?#]+)/([^/?#]+)/pulls/([0-9]+)(?:\?[^#]*)?(?:#.*)?$`)
+
+type Forgejo struct {
+ Authority string
+}
+
+var _ Forge = Forgejo{}
+
+func (f Forgejo) FetchStatus(urls []string) (string, error) {
+ return fetchPerURLStatus(urls, func(u string) (string, error) {
+ m := reForgejoPR.FindStringSubmatch(u)
+ if m == nil || m[1] != f.Authority {
+ return "", nil
+ }
+ authority := m[1]
+ user := m[2]
+ repo := m[3]
+ prnum := m[4]
+
+ urlStr := "https://" + authority + "/api/v1/repos/" + user + "/" + repo + "/pulls/" + prnum
+
+ var obj struct {
+ // State values are "open" and "closed".
+ State string `json:"state"`
+ Merged bool `json:"merged"`
+ MergeCommitSha string `json:"merge_commit_sha"`
+ }
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
+ return "", err
+ }
+ ret := obj.State
+ if obj.Merged {
+ ret = statusMerged
+ tag, err := getGitTagThatContainsAll("https://"+authority+"/"+user+"/"+repo, obj.MergeCommitSha)
+ if err != nil {
+ return "", err
+ }
+ if tag != "" {
+ ret = fmt.Sprintf(statusReleasedFmt, tag)
+ }
+ }
+
+ return ret, nil
+ })
+}
+
+func (f Forgejo) FetchSubmittedAt(urls []string) (time.Time, error) {
+ return fetchPerURLSubmittedAt(urls, func(u string) (time.Time, error) {
+ m := reForgejoPR.FindStringSubmatch(u)
+ if m == nil || m[1] != f.Authority {
+ return time.Time{}, nil
+ }
+ authority := m[1]
+ user := m[2]
+ repo := m[3]
+ prnum := m[4]
+
+ urlStr := "https://" + authority + "/api/v1/repos/" + user + "/" + repo + "/pulls/" + prnum
+
+ var obj struct {
+ CreatedAt time.Time `json:"created_at"`
+ }
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
+ return time.Time{}, err
+ }
+ return obj.CreatedAt, nil
+ })
+}
+
+func (f Forgejo) FetchLastUpdated(urls []string) (time.Time, User, error) {
+ return fetchPerURLLastUpdated(urls, func(u string) (time.Time, User, error) {
+ m := reForgejoPR.FindStringSubmatch(u)
+ if m == nil || m[1] != f.Authority {
+ return time.Time{}, User{}, nil
+ }
+ authority := m[1]
+ user := m[2]
+ repo := m[3]
+ prnum := m[4]
+
+ urlStr := "https://" + authority + "/api/v1/repos/" + user + "/" + repo + "/pulls/" + prnum
+
+ var obj struct {
+ UpdatedAt time.Time `json:"updated_at"`
+
+ CreatedAt time.Time `json:"created_at"`
+ CreatedBy struct {
+ Login string `json:"login"`
+ HTMLURL string `json:"html_url"`
+ } `json:"user"`
+
+ MergedAt time.Time `json:"merged_at"`
+ MergedBy struct {
+ Login string `json:"login"`
+ HTMLURL string `json:"html_url"`
+ } `json:"merged_by"`
+ }
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
+ return time.Time{}, User{}, err
+ }
+
+ retUpdatedAt := obj.UpdatedAt
+ var retUser User
+
+ if retUser == (User{}) && withinOneSecond(obj.CreatedAt, retUpdatedAt) {
+ retUser.Name = obj.CreatedBy.Login
+ retUser.URL = obj.CreatedBy.HTMLURL
+ }
+ if retUser == (User{}) && withinOneSecond(obj.MergedAt, retUpdatedAt) {
+ retUser.Name = obj.MergedBy.Login
+ retUser.URL = obj.MergedBy.HTMLURL
+ }
+ if retUser == (User{}) {
+ // "normal" comments
+ var comments []struct {
+ UpdatedAt time.Time `json:"updated_at"`
+ User struct {
+ Login string `json:"login"`
+ HTMLURL string `json:"html_url"`
+ } `json:"user"`
+ }
+ if err := httpcache.GetPaginatedJSON("https://api.github.com/repos/"+user+"/"+repo+"/issues/"+prnum+"/comments", nil, &comments, githubPagination); err != nil {
+ return time.Time{}, User{}, err
+ }
+ for _, comment := range comments {
+ if withinOneSecond(comment.UpdatedAt, retUpdatedAt) {
+ retUser.Name = comment.User.Login
+ retUser.URL = comment.User.HTMLURL
+ break
+ }
+ }
+ }
+ if retUser == (User{}) {
+ // comments on a specific part of the diff
+ var reviewComments []struct {
+ UpdatedAt time.Time `json:"updated_at"`
+ User struct {
+ Login string `json:"login"`
+ HTMLURL string `json:"html_url"`
+ } `json:"user"`
+ }
+ if err := httpcache.GetPaginatedJSON("https://api.github.com/repos/"+user+"/"+repo+"/pulls/"+prnum+"/comments", nil, &reviewComments, githubPagination); err != nil {
+ return time.Time{}, User{}, err
+ }
+ for _, comment := range reviewComments {
+ if withinOneSecond(comment.UpdatedAt, retUpdatedAt) {
+ retUser.Name = comment.User.Login
+ retUser.URL = comment.User.HTMLURL
+ break
+ }
+ }
+ }
+ if retUser == (User{}) {
+ var events []struct {
+ CreatedAt time.Time `json:"created_at"`
+ Actor struct {
+ Login string `json:"login"`
+ HTMLURL string `json:"html_url"`
+ } `json:"actor"`
+ }
+ if err := httpcache.GetJSON("https://api.github.com/repos/"+user+"/"+repo+"/issues/"+prnum+"/events", nil, &events); err != nil {
+ return time.Time{}, User{}, err
+ }
+ for _, event := range events {
+ if withinOneSecond(event.CreatedAt, retUpdatedAt) {
+ retUser.Name = event.Actor.Login
+ retUser.URL = event.Actor.HTMLURL
+ break
+ }
+ }
+ }
+
+ return retUpdatedAt, retUser, nil
+ })
+}
diff --git a/cmd/gen-imworkingon/forge_gerrit.go b/cmd/gen-imworkingon/forge_gerrit.go
new file mode 100644
index 0000000..05f0386
--- /dev/null
+++ b/cmd/gen-imworkingon/forge_gerrit.go
@@ -0,0 +1,160 @@
+package main
+
+import (
+ "encoding"
+ "encoding/json"
+ "fmt"
+ "net/url"
+ "regexp"
+ "strings"
+ "time"
+
+ "git.lukeshu.com/www/lib/httpcache"
+)
+
+// httpGetGerritJSON is like [httpcache.GetJSON], but
+// https://gerrit-review.googlesource.com/Documentation/rest-api.html#output
+func httpGetGerritJSON(u string, hdr map[string]string, out any) error {
+ str, err := httpcache.Get(u, hdr)
+ if err != nil {
+ return err
+ }
+ if _, body, ok := strings.Cut(str, "\n"); ok {
+ str = body
+ }
+ return json.Unmarshal([]byte(str), out)
+}
+
+const GerritTimeFormat = "2006-01-02 15:04:05.000000000"
+
+type GerritTime struct {
+ Val time.Time
+}
+
+var (
+ _ fmt.Stringer = GerritTime{}
+ _ encoding.TextMarshaler = GerritTime{}
+ _ encoding.TextUnmarshaler = (*GerritTime)(nil)
+)
+
+// String implements [fmt.Stringer].
+func (t GerritTime) String() string {
+ return t.Val.Format(GerritTimeFormat)
+}
+
+// MarshalText implements [encoding.TextMarshaler].
+func (t GerritTime) MarshalText() ([]byte, error) {
+ return []byte(t.String()), nil
+}
+
+// UnmarshalText implements [encoding.TextUnmarshaler].
+func (t *GerritTime) UnmarshalText(data []byte) error {
+ val, err := time.Parse(GerritTimeFormat, string(data))
+ if err != nil {
+ return err
+ }
+ t.Val = val
+ return nil
+}
+
+type Gerrit struct{}
+
+var _ Forge = Gerrit{}
+
+var reGoogleGerritCL = regexp.MustCompile(`https://([a-z]+-review\.googlesource\.com)/c/([^?#]+)/\+/([0-9]+)(?:\?[^#]*)?(?:#.*)?$`)
+
+func (Gerrit) FetchStatus(urls []string) (string, error) {
+ return fetchPerURLStatus(urls, func(u string) (string, error) {
+ m := reGoogleGerritCL.FindStringSubmatch(u)
+ if m == nil {
+ return "", nil
+ }
+ authority := m[1]
+ projectID := m[2]
+ changeID := m[3]
+
+ urlStr := "https://" + authority + "/changes/" + url.PathEscape(projectID) + "~" + changeID + "?o=MESSAGES&o=DETAILED_ACCOUNTS"
+
+ var obj struct {
+ Status string `json:"status"`
+ }
+ if err := httpGetGerritJSON(urlStr, nil, &obj); err != nil {
+ return "", err
+ }
+ // https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#change-info
+ switch obj.Status {
+ case "NEW":
+ return "open", nil
+ case "MERGED":
+ return "merged", nil
+ case "ABANDONED":
+ return "closed", nil
+ }
+ return "", nil
+ })
+}
+
+func (Gerrit) FetchSubmittedAt(urls []string) (time.Time, error) {
+ return fetchPerURLSubmittedAt(urls, func(u string) (time.Time, error) {
+ m := reGoogleGerritCL.FindStringSubmatch(u)
+ if m == nil {
+ return time.Time{}, nil
+ }
+ authority := m[1]
+ projectID := m[2]
+ changeID := m[3]
+
+ urlStr := "https://" + authority + "/changes/" + url.PathEscape(projectID) + "~" + changeID + "?o=MESSAGES&o=DETAILED_ACCOUNTS"
+
+ var obj struct {
+ Created GerritTime `json:"created"`
+ }
+ if err := httpGetGerritJSON(urlStr, nil, &obj); err != nil {
+ return time.Time{}, err
+ }
+ return obj.Created.Val, nil
+ })
+}
+
+func (Gerrit) FetchLastUpdated(urls []string) (time.Time, User, error) {
+ return fetchPerURLLastUpdated(urls, func(u string) (time.Time, User, error) {
+ m := reGoogleGerritCL.FindStringSubmatch(u)
+ if m == nil {
+ return time.Time{}, User{}, nil
+ }
+ authority := m[1]
+ projectID := m[2]
+ changeID := m[3]
+
+ urlStr := "https://" + authority + "/changes/" + url.PathEscape(projectID) + "~" + changeID + "?o=MESSAGES&o=DETAILED_ACCOUNTS"
+
+ var obj struct {
+ Updated GerritTime `json:"updated"`
+ Messages []struct {
+ Author struct {
+ AccountID int `json:"_account_id"`
+ Name string `json:"name"`
+ DisplayName string `json:"display_name"`
+ } `json:"author"`
+ Date GerritTime `json:"date"`
+ } `json:"messages"`
+ }
+ if err := httpGetGerritJSON(urlStr, nil, &obj); err != nil {
+ return time.Time{}, User{}, err
+ }
+ retUpdatedAt := obj.Updated.Val
+ var retUser User
+ for _, message := range obj.Messages {
+ if withinOneSecond(message.Date.Val, retUpdatedAt) {
+ if message.Author.DisplayName != "" {
+ retUser.Name = message.Author.DisplayName
+ } else {
+ retUser.Name = message.Author.Name
+ }
+ retUser.URL = fmt.Sprintf("https://%s/dashboard/%d", authority, message.Author.AccountID)
+ break
+ }
+ }
+ return retUpdatedAt, retUser, nil
+ })
+}
diff --git a/cmd/gen-imworkingon/forge_github.go b/cmd/gen-imworkingon/forge_github.go
new file mode 100644
index 0000000..b657ad7
--- /dev/null
+++ b/cmd/gen-imworkingon/forge_github.go
@@ -0,0 +1,208 @@
+package main
+
+import (
+ "fmt"
+ "net/url"
+ "regexp"
+ "time"
+
+ "git.lukeshu.com/www/lib/httpcache"
+)
+
+var reGitHubPR = regexp.MustCompile(`^https://github\.com/([^/?#]+)/([^/?#]+)/pull/([0-9]+)(?:\?[^#]*)?(?:#.*)?$`)
+
+func githubPagination(i int) url.Values {
+ params := make(url.Values)
+ params.Set("page", fmt.Sprintf("%v", i+1))
+ return params
+}
+
+type GitHub struct{}
+
+var _ Forge = GitHub{}
+
+func (GitHub) FetchStatus(urls []string) (string, error) {
+ for _, u := range urls {
+ if reGoogleGerritCL.MatchString(u) {
+ return "", nil
+ }
+ }
+ return fetchPerURLStatus(urls, func(u string) (string, error) {
+ m := reGitHubPR.FindStringSubmatch(u)
+ if m == nil {
+ return "", nil
+ }
+ user := m[1]
+ repo := m[2]
+ prnum := m[3]
+
+ urlStr := "https://api.github.com/repos/" + user + "/" + repo + "/pulls/" + prnum
+
+ var obj struct {
+ // State values are "open" and "closed".
+ State string `json:"state"`
+ Merged bool `json:"merged"`
+ MergeCommitSha string `json:"merge_commit_sha"`
+ }
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
+ return "", err
+ }
+ ret := obj.State
+ if obj.Merged {
+ ret = statusMerged
+ tag, err := getGitTagThatContainsAll("https://github.com/"+user+"/"+repo, obj.MergeCommitSha)
+ if err != nil {
+ return "", err
+ }
+ if tag != "" {
+ ret = fmt.Sprintf(statusReleasedFmt, tag)
+ }
+ } else if obj.State == "closed" {
+ var mergeCommits []string
+ for _, u := range urls {
+ if m := reGitHubCommit.FindStringSubmatch(u); m != nil && m[1] == user && m[2] == repo {
+ mergeCommits = append(mergeCommits, m[3])
+ }
+ }
+ tag, err := getGitTagThatContainsAll("https://github.com/"+user+"/"+repo, mergeCommits...)
+ if err == nil && tag != "" {
+ ret = fmt.Sprintf(statusReleasedFmt, tag)
+ }
+ }
+
+ return ret, nil
+ })
+}
+
+func (GitHub) FetchSubmittedAt(urls []string) (time.Time, error) {
+ return fetchPerURLSubmittedAt(urls, func(u string) (time.Time, error) {
+ m := reGitHubPR.FindStringSubmatch(u)
+ if m == nil {
+ return time.Time{}, nil
+ }
+ user := m[1]
+ repo := m[2]
+ prnum := m[3]
+
+ urlStr := "https://api.github.com/repos/" + user + "/" + repo + "/pulls/" + prnum
+
+ var obj struct {
+ CreatedAt time.Time `json:"created_at"`
+ }
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
+ return time.Time{}, err
+ }
+ return obj.CreatedAt, nil
+ })
+}
+
+func (GitHub) FetchLastUpdated(urls []string) (time.Time, User, error) {
+ for _, u := range urls {
+ if reGoogleGerritCL.MatchString(u) {
+ return time.Time{}, User{}, nil
+ }
+ }
+ return fetchPerURLLastUpdated(urls, func(u string) (time.Time, User, error) {
+ m := reGitHubPR.FindStringSubmatch(u)
+ if m == nil {
+ return time.Time{}, User{}, nil
+ }
+ user := m[1]
+ repo := m[2]
+ prnum := m[3]
+
+ urlStr := "https://api.github.com/repos/" + user + "/" + repo + "/pulls/" + prnum
+
+ var obj struct {
+ UpdatedAt time.Time `json:"updated_at"`
+
+ CreatedAt time.Time `json:"created_at"`
+ CreatedBy struct {
+ Login string `json:"login"`
+ HTMLURL string `json:"html_url"`
+ } `json:"user"`
+
+ MergedAt time.Time `json:"merged_at"`
+ MergedBy struct {
+ Login string `json:"login"`
+ HTMLURL string `json:"html_url"`
+ } `json:"merged_by"`
+ }
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
+ return time.Time{}, User{}, err
+ }
+
+ retUpdatedAt := obj.UpdatedAt
+ var retUser User
+
+ if retUser == (User{}) && withinOneSecond(obj.CreatedAt, retUpdatedAt) {
+ retUser.Name = obj.CreatedBy.Login
+ retUser.URL = obj.CreatedBy.HTMLURL
+ }
+ if retUser == (User{}) && withinOneSecond(obj.MergedAt, retUpdatedAt) {
+ retUser.Name = obj.MergedBy.Login
+ retUser.URL = obj.MergedBy.HTMLURL
+ }
+ if retUser == (User{}) {
+ // "normal" comments
+ var comments []struct {
+ UpdatedAt time.Time `json:"updated_at"`
+ User struct {
+ Login string `json:"login"`
+ HTMLURL string `json:"html_url"`
+ } `json:"user"`
+ }
+ if err := httpcache.GetPaginatedJSON("https://api.github.com/repos/"+user+"/"+repo+"/issues/"+prnum+"/comments", nil, &comments, githubPagination); err != nil {
+ return time.Time{}, User{}, err
+ }
+ for _, comment := range comments {
+ if withinOneSecond(comment.UpdatedAt, retUpdatedAt) {
+ retUser.Name = comment.User.Login
+ retUser.URL = comment.User.HTMLURL
+ break
+ }
+ }
+ }
+ if retUser == (User{}) {
+ // comments on a specific part of the diff
+ var reviewComments []struct {
+ UpdatedAt time.Time `json:"updated_at"`
+ User struct {
+ Login string `json:"login"`
+ HTMLURL string `json:"html_url"`
+ } `json:"user"`
+ }
+ if err := httpcache.GetPaginatedJSON("https://api.github.com/repos/"+user+"/"+repo+"/pulls/"+prnum+"/comments", nil, &reviewComments, githubPagination); err != nil {
+ return time.Time{}, User{}, err
+ }
+ for _, comment := range reviewComments {
+ if withinOneSecond(comment.UpdatedAt, retUpdatedAt) {
+ retUser.Name = comment.User.Login
+ retUser.URL = comment.User.HTMLURL
+ break
+ }
+ }
+ }
+ if retUser == (User{}) {
+ var events []struct {
+ CreatedAt time.Time `json:"created_at"`
+ Actor struct {
+ Login string `json:"login"`
+ HTMLURL string `json:"html_url"`
+ } `json:"actor"`
+ }
+ if err := httpcache.GetJSON("https://api.github.com/repos/"+user+"/"+repo+"/issues/"+prnum+"/events", nil, &events); err != nil {
+ return time.Time{}, User{}, err
+ }
+ for _, event := range events {
+ if withinOneSecond(event.CreatedAt, retUpdatedAt) {
+ retUser.Name = event.Actor.Login
+ retUser.URL = event.Actor.HTMLURL
+ break
+ }
+ }
+ }
+
+ return retUpdatedAt, retUser, nil
+ })
+}
diff --git a/cmd/gen-imworkingon/forge_gitlab.go b/cmd/gen-imworkingon/forge_gitlab.go
new file mode 100644
index 0000000..84a2285
--- /dev/null
+++ b/cmd/gen-imworkingon/forge_gitlab.go
@@ -0,0 +1,173 @@
+package main
+
+import (
+ "fmt"
+ "net/url"
+ "regexp"
+ "time"
+
+ "git.lukeshu.com/www/lib/httpcache"
+)
+
+var reGitLabMR = regexp.MustCompile(`^https://([^/]+)/([^?#]+)/-/merge_requests/([0-9]+)(?:\?[^#]*)?(?:#.*)?$`)
+
+type GitLab struct{}
+
+var _ Forge = GitLab{}
+
+func (GitLab) FetchStatus(urls []string) (string, error) {
+ for _, u := range urls {
+ m := reGitLabMR.FindStringSubmatch(u)
+ if m == nil {
+ continue
+ }
+ authority := m[1]
+ projectID := m[2]
+ mrnum := m[3]
+
+ urlStr := "https://" + authority + "/api/v4/projects/" + url.QueryEscape(projectID) + "/merge_requests/" + mrnum
+
+ var obj struct {
+ // State values are "opened", "closed", "locked", and "merged".
+ State string `json:"state"`
+ MergeCommitSha string `json:"merge_commit_sha"`
+ SquashCommitSha string `json:"squash_commit_sha"`
+ }
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
+ return "", err
+ }
+
+ ret := obj.State
+ if ret == "opened" {
+ ret = statusOpen
+ }
+
+ if ret == "merged" {
+ ret = statusMerged
+ var mergeCommit string
+ if obj.MergeCommitSha != "" {
+ mergeCommit = obj.MergeCommitSha
+ }
+ if obj.SquashCommitSha != "" {
+ mergeCommit = obj.SquashCommitSha
+ }
+ if mergeCommit != "" {
+ tag, err := getGitTagThatContainsAll("https://"+authority+"/"+projectID+".git", mergeCommit)
+ if err != nil {
+ return "", err
+ }
+ if tag != "" {
+ ret = fmt.Sprintf(statusReleasedFmt, tag)
+ }
+ }
+ }
+
+ return ret, nil
+ }
+ return "", nil
+}
+
+func (GitLab) FetchSubmittedAt(urls []string) (time.Time, error) {
+ for _, u := range urls {
+ m := reGitLabMR.FindStringSubmatch(u)
+ if m == nil {
+ continue
+ }
+ authority := m[1]
+ projectID := m[2]
+ mrnum := m[3]
+
+ urlStr := "https://" + authority + "/api/v4/projects/" + url.QueryEscape(projectID) + "/merge_requests/" + mrnum
+
+ var obj struct {
+ CreatedAt time.Time `json:"created_at"`
+ }
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
+ return time.Time{}, err
+ }
+ return obj.CreatedAt, nil
+ }
+ return time.Time{}, nil
+}
+
+func (GitLab) FetchLastUpdated(urls []string) (time.Time, User, error) {
+ for _, u := range urls {
+ m := reGitLabMR.FindStringSubmatch(u)
+ if m == nil {
+ continue
+ }
+ authority := m[1]
+ projectID := m[2]
+ mrnum := m[3]
+
+ urlStr := "https://" + authority + "/api/v4/projects/" + url.QueryEscape(projectID) + "/merge_requests/" + mrnum
+
+ var obj struct {
+ ID int `json:"id"`
+
+ UpdatedAt time.Time `json:"updated_at"`
+
+ CreatedAt time.Time `json:"created_at"`
+ CreatedBy struct {
+ Username string `json:"username"`
+ WebURL string `json:"web_url"`
+ } `json:"author"`
+
+ MergedAt time.Time `json:"merged_at"`
+ MergedBy struct {
+ Username string `json:"username"`
+ WebURL string `json:"web_url"`
+ } `json:"merged_by"`
+ }
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
+ return time.Time{}, User{}, err
+ }
+
+ retUpdatedAt := obj.UpdatedAt
+ var retUser User
+
+ if retUser == (User{}) && withinOneSecond(obj.CreatedAt, retUpdatedAt) {
+ retUser.Name = obj.CreatedBy.Username
+ retUser.URL = obj.CreatedBy.WebURL
+ }
+ if retUser == (User{}) && withinOneSecond(obj.MergedAt, retUpdatedAt) {
+ retUser.Name = obj.MergedBy.Username
+ retUser.URL = obj.MergedBy.WebURL
+ }
+ if retUser == (User{}) {
+ var notes struct {
+ Notes []struct {
+ UpdatedAt time.Time `json:"updated_at"`
+ Author struct {
+ Username string `json:"username"`
+ Path string `json:"path"`
+ } `json:"author"`
+ ResolvedAt time.Time `json:"resolved_at"`
+ ResolvedBy struct {
+ ResolvedAt time.Time `json:"resolved_at"`
+ Username string `json:"username"`
+ Path string `json:"path"`
+ } `json:"resolved_by"`
+ } `json:"notes"`
+ }
+ if err := httpcache.GetJSON(fmt.Sprintf("https://%s/%s/noteable/merge_request/%d/notes", authority, projectID, obj.ID), map[string]string{"X-Last-Fetched-At": "0"}, &notes); err != nil {
+ return time.Time{}, User{}, err
+ }
+ for _, note := range notes.Notes {
+ if withinOneSecond(note.UpdatedAt, retUpdatedAt) {
+ if withinOneSecond(note.UpdatedAt, note.ResolvedAt) {
+ retUser.Name = note.ResolvedBy.Username
+ retUser.URL = "https://" + authority + note.ResolvedBy.Path
+ } else {
+ retUser.Name = note.Author.Username
+ retUser.URL = "https://" + authority + note.Author.Path
+ }
+ break
+ }
+ }
+ }
+
+ return retUpdatedAt, retUser, nil
+ }
+ return time.Time{}, User{}, nil
+}
diff --git a/cmd/gen-imworkingon/forge_part_git.go b/cmd/gen-imworkingon/forge_part_git.go
new file mode 100644
index 0000000..5175750
--- /dev/null
+++ b/cmd/gen-imworkingon/forge_part_git.go
@@ -0,0 +1,82 @@
+package main
+
+import (
+ "fmt"
+ "regexp"
+ "time"
+
+ "git.lukeshu.com/www/lib/httpcache"
+)
+
+var reGitHubCommit = regexp.MustCompile(`^https://github\.com/([^/?#]+)/([^/?#]+)/commit/([0-9a-f]+)(?:\?[^#]*)?(?:#.*)?$`)
+
+type PartGit struct{}
+
+var _ Forge = PartGit{}
+
+func (PartGit) FetchStatus(urls []string) (string, error) {
+ var gitURL string
+ var gitCommits []string
+ for _, u := range urls {
+ if m := reGitHubCommit.FindStringSubmatch(u); m != nil {
+ user := m[1]
+ repo := m[2]
+ hash := m[3]
+
+ gitURL = "https://github.com/" + user + "/" + repo
+ gitCommits = append(gitCommits, hash)
+ }
+ }
+ if len(gitCommits) == 0 {
+ return "", nil
+ }
+ ret := statusMerged
+ tag, err := getGitTagThatContainsAll(gitURL, gitCommits...)
+ if err != nil {
+ return "", err
+ }
+ if tag != "" {
+ ret = fmt.Sprintf(statusReleasedFmt, tag)
+ }
+ return ret, nil
+}
+
+func (PartGit) FetchSubmittedAt(urls []string) (time.Time, error) {
+ return time.Time{}, nil
+}
+
+func (PartGit) FetchLastUpdated(urls []string) (time.Time, User, error) {
+ var ret time.Time
+ for _, u := range urls {
+ if m := reGitHubCommit.FindStringSubmatch(u); m != nil {
+ user := m[1]
+ repo := m[2]
+ hash := m[3]
+
+ urlStr := "https://api.github.com/repos/" + user + "/" + repo + "/commits/" + hash
+ var obj struct {
+ Commit struct {
+ Author struct {
+ Date time.Time `json:"date"`
+ } `json:"author"`
+ Committer struct {
+ Date time.Time `json:"date"`
+ } `json:"committer"`
+ } `json:"commit"`
+ }
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
+ return time.Time{}, User{}, err
+ }
+ if obj.Commit.Author.Date.After(ret) {
+ ret = obj.Commit.Author.Date
+ }
+ if obj.Commit.Committer.Date.After(ret) {
+ ret = obj.Commit.Committer.Date
+ }
+ }
+ }
+ if ret.IsZero() {
+ return time.Time{}, User{}, nil
+ }
+ return time.Time{}, User{}, nil
+}
diff --git a/cmd/gen-imworkingon/forge_part_pipermail.go b/cmd/gen-imworkingon/forge_part_pipermail.go
new file mode 100644
index 0000000..9db498b
--- /dev/null
+++ b/cmd/gen-imworkingon/forge_part_pipermail.go
@@ -0,0 +1,193 @@
+package main
+
+import (
+ "compress/gzip"
+ "errors"
+ "fmt"
+ "net/mail"
+ "net/url"
+ "os"
+ "regexp"
+ "strconv"
+ "strings"
+ "time"
+
+ "git.lukeshu.com/www/lib/httpcache"
+ "git.lukeshu.com/www/lib/mailstuff"
+)
+
+var (
+ rePiperMailMessage = regexp.MustCompile(`^(https?://.*/pipermail/.*/)([0-4]{4}-(?:January|February|March|April|May|June|July|August|September|October|November|December))/([0-9]+)\.html$`)
+ rePiperMailDate = regexp.MustCompile(`^\s*<I>([^<]+)</I>\s*$`)
+ rePiperMailReply = regexp.MustCompile(`^\s*<LINK REL="made" HREF="(.*)">\s*$`)
+)
+
+type PartPiperMail struct{}
+
+var _ Forge = PartPiperMail{}
+
+func (PartPiperMail) FetchStatus(urls []string) (string, error) {
+ return "", nil
+}
+
+func (PartPiperMail) FetchSubmittedAt(urls []string) (time.Time, error) {
+ for _, u := range urls {
+ if !rePiperMailMessage.MatchString(u) {
+ continue
+ }
+ htmlStr, err := httpcache.Get(u, nil)
+ if err != nil {
+ return time.Time{}, err
+ }
+ for _, line := range strings.Split(htmlStr, "\n") {
+ if m := rePiperMailDate.FindStringSubmatch(line); m != nil {
+ return time.Parse(time.UnixDate, m[1])
+ }
+ }
+ }
+ return time.Time{}, nil
+}
+
+func (PartPiperMail) nextMonth(ym string) string {
+ yStr, mStr, ok := strings.Cut(ym, "-")
+ if !ok {
+ panic(fmt.Errorf("invalid year-month: %q", ym))
+ }
+ switch mStr {
+ case "January":
+ return yStr + "-February"
+ case "February":
+ return yStr + "-March"
+ case "March":
+ return yStr + "-April"
+ case "April":
+ return yStr + "-May"
+ case "May":
+ return yStr + "-June"
+ case "June":
+ return yStr + "-July"
+ case "July":
+ return yStr + "-August"
+ case "August":
+ return yStr + "-September"
+ case "September":
+ return yStr + "-October"
+ case "October":
+ return yStr + "-November"
+ case "November":
+ return yStr + "-December"
+ case "December":
+ y, _ := strconv.Atoi(yStr)
+ return fmt.Sprintf("%d-January", y+1)
+ default:
+ panic(fmt.Errorf("invalid year-month: %q", ym))
+ }
+}
+
+func (p PartPiperMail) threadLen(thread *mailstuff.ThreadedMessage) int {
+ if thread == nil {
+ return 0
+ }
+
+ ret := 0
+ if thread.Message != nil {
+ ret++
+ }
+ for child := range thread.Children {
+ ret += p.threadLen(child)
+ }
+ return ret
+}
+
+func (p PartPiperMail) FetchLastUpdated(urls []string) (time.Time, User, error) {
+ for _, u := range urls {
+ m := rePiperMailMessage.FindStringSubmatch(u)
+ if m == nil {
+ continue
+ }
+ uBase := m[1]
+ uYM := m[2]
+ //uInt := m[3]
+
+ htmlStr, err := httpcache.Get(u, nil)
+ if err != nil {
+ return time.Time{}, User{}, fmt.Errorf("could not fetch message: %w", err)
+ }
+ var msgid mailstuff.MessageID
+ for _, line := range strings.Split(htmlStr, "\n") {
+ if m := rePiperMailReply.FindStringSubmatch(line); m != nil {
+ ru, err := url.Parse(m[1])
+ if err != nil {
+ continue
+ }
+ if msgid = mailstuff.MessageID(ru.Query().Get("In-Reply-To")); msgid != "" {
+ break
+ }
+ }
+ }
+ if msgid == "" {
+ continue
+ }
+
+ var thread *mailstuff.ThreadedMessage
+ for ym, mbox := uYM, []*mail.Message(nil); true; ym = p.nextMonth(ym) {
+ lenBefore := p.threadLen(thread)
+
+ mboxGzStr, err := httpcache.Get(uBase+ym+".txt.gz", nil)
+ if err != nil {
+ if ym == uYM || !errors.Is(err, os.ErrNotExist) {
+ return time.Time{}, User{}, fmt.Errorf("could not fetch mbox for %s: %w", ym, err)
+ }
+ break
+ }
+ gzReader, err := gzip.NewReader(strings.NewReader(mboxGzStr))
+ if err != nil {
+ return time.Time{}, User{}, fmt.Errorf("could not read mbox gz: %w", err)
+ }
+ _mbox, err := mailstuff.ReadMBox(gzReader)
+ if err != nil {
+ gzReader.Close()
+ return time.Time{}, User{}, fmt.Errorf("could not parse mbox: %w", err)
+ }
+ if err := gzReader.Close(); err != nil {
+ return time.Time{}, User{}, fmt.Errorf("close gz: %w", err)
+ }
+ mbox = append(mbox, _mbox...)
+ _, messages := mailstuff.ThreadMessages(mbox)
+ thread = messages[msgid]
+
+ if p.threadLen(thread) == lenBefore {
+ break
+ }
+ }
+ if thread == nil {
+ continue
+ }
+
+ var retTime time.Time
+ var retUser User
+
+ var walk func(*mailstuff.ThreadedMessage)
+ walk = func(msg *mailstuff.ThreadedMessage) {
+ date, dateErr := msg.Header.Date()
+ froms, fromErr := msg.Header.AddressList("From")
+ if dateErr == nil && fromErr == nil && len(froms) > 0 && (retTime.IsZero() || date.After(retTime)) {
+ retTime = date
+ retUser.Name = froms[0].Name
+ if retUser.Name == "" {
+ retUser.Name = froms[0].Address
+ }
+ retUser.URL = "mailto:" + froms[0].Address
+ }
+ for child := range msg.Children {
+ walk(child)
+ }
+ }
+ walk(thread)
+
+ if !retTime.IsZero() {
+ return retTime, retUser, nil
+ }
+ }
+ return time.Time{}, User{}, nil
+}
diff --git a/cmd/gen-imworkingon/gitcache.go b/cmd/gen-imworkingon/gitcache.go
new file mode 100644
index 0000000..844408d
--- /dev/null
+++ b/cmd/gen-imworkingon/gitcache.go
@@ -0,0 +1,68 @@
+package main
+
+import (
+ "fmt"
+ "os"
+ "os/exec"
+ "strings"
+ "time"
+
+ "git.mothstuff.lol/lukeshu/eclipse/lib/gitcache"
+)
+
+var gitFetched = map[string]struct{}{}
+
+var gitCache = &gitcache.Cache{
+ Dir: ".git-cache",
+ MinPeriod: 1 * time.Hour,
+}
+
+func withGit(u string, fn func(dir string) error) error {
+ if _, ok := gitFetched[u]; !ok {
+ if err := gitCache.Fetch(os.Stderr, u); err != nil {
+ return err
+ }
+ }
+ return gitCache.WithFastClone(os.Stderr, u, fn)
+}
+
+func getGitTagThatContainsAll(gitURL string, gitHashes ...string) (string, error) {
+ if len(gitHashes) == 0 {
+ return "", nil
+ }
+ var tag string
+ err := withGit(gitURL, func(dir string) error {
+ gitHash := gitHashes[0]
+ if len(gitHashes) > 1 {
+ cmdline := append([]string{"git", "merge-base", "--independent", "--"}, gitHashes...)
+ cmd := exec.Command(cmdline[0], cmdline[1:]...)
+ cmd.Dir = dir
+ var stdout strings.Builder
+ cmd.Stdout = &stdout
+ cmd.Stderr = os.Stderr
+ if err := cmd.Run(); err != nil {
+ return err
+ }
+ gitHash = strings.TrimSpace(stdout.String())
+ }
+ cmd := exec.Command("git", "for-each-ref",
+ "--count=1",
+ "--format=%(refname:lstrip=2)",
+ "--contains="+gitHash,
+ "refs/tags/",
+ )
+ cmd.Dir = dir
+ var stdout strings.Builder
+ cmd.Stdout = &stdout
+ cmd.Stderr = os.Stderr
+ if err := cmd.Run(); err != nil {
+ return err
+ }
+ tag = strings.TrimSpace(stdout.String())
+ return nil
+ })
+ if err != nil {
+ return "", fmt.Errorf("%q: %w", gitURL, err)
+ }
+ return tag, nil
+}
diff --git a/cmd/gen-imworkingon/imworkingon.html.tmpl b/cmd/gen-imworkingon/imworkingon.html.tmpl
new file mode 100644
index 0000000..415a252
--- /dev/null
+++ b/cmd/gen-imworkingon/imworkingon.html.tmpl
@@ -0,0 +1,165 @@
+<!DOCTYPE html>
+<html lang="en">
+<head>
+ <meta charset="utf-8">
+ <title>Luke is working on…</title>
+ <meta name="viewport" content="width=device-width, initial-scale=1">
+ <link rel="stylesheet" type="text/css" href="../blog/assets/style.css">
+ <link rel="stylesheet" type="text/css" href="imworkingon.css">
+</head>
+<body>
+ <header><a href="/">Luke T. Shumaker</a> Β» imworkingon</header>
+
+ <section id="intro">
+ <h1>Luke is working on<br/><small>improving the GNU/Linux ecosystem</small></h1>
+ <nav>
+ <p>This page provides several views into what I'm doing to improve the ecosystem:</p>
+ <ol>
+ <!--<li><a href="#tags">Top themes</a></li>-->
+ <li><a href="#contribs-pending">In-progress work</a></li>
+ <li><a href="#contribs-completed">Completed work</a></li>
+ <li><a href="#standups">Daily statuses</a></li>
+ </ol>
+ </nav>
+
+ <p>The "In-progress work" and "Completed work" sections do <em>not</em> include routine maintenance on <a href="https://parabola.nu">Parabola GNU/Linux-libre</a>, which is also a solid chunk of what I do.</p>
+
+ <p>If you find this work valuable, please consider <a class="em" href="../sponsor/">sponsoring me</a>.</p>
+ </section>
+
+ <!--
+ <section id="tags">
+ <h2>Top themes <a href="#tags">πŸ”—</a></h2>
+ {{- range $tagName, $tagInfo := .Tags }}
+ <article class="tag" id="tag-{{ $tagName }}">
+ <h2><a href="#tag-{{ $tagName }}">#{{ $tagName }}</a> : {{ $tagInfo.PrettyName }}</h2>
+ <div clasg="tag-desc">{{ $tagInfo.Desc | md2html }}</div>
+ </article>
+ {{- end }}
+ </section>
+ -->
+
+{{- define "contrib" }}
+ {{ $contrib := . }}
+ {{ $upstream := $contrib | getUpstream }}
+ <article class="contrib {{ $contrib.StatusClass }}-contrib" {{- if $contrib.ID }}id="contrib-{{ $contrib.ID }}"{{ end }}>
+ <div class="contrib-upstream-name"><a class="em" href="{{ index $upstream.URLs 0 }}">{{ $upstream.Name }}</a></div>
+ <div class="contrib-upstream-desc">{{ $upstream.Desc | md2html }}</div>
+ <div class="contrib-urls">
+ {{- range $url := $contrib.URLs }}
+ <a href="{{ $url }}"><code>{{ $url }}</code></a><br />
+ {{- end }}
+ </div>
+ <div class="contrib-tags">
+ {{- range $tag := $contrib.Tags }}
+ <a href="#tag-{{ $tag }}">#{{ $tag }}</a> {{/* */}}
+ {{- end }}
+ </div>
+ <div class="contrib-submitted">Submitted: {{ timeTag $contrib.SubmittedAt "2006-01-02" }}</div>
+ <div class="contrib-updated">
+ {{- if not $contrib.LastUpdatedAt.IsZero -}}
+ Last updated: {{ timeTag $contrib.LastUpdatedAt "2006-01-02" }}
+ {{- if $contrib.LastUpdatedBy.Name }} by <a href="{{ $contrib.LastUpdatedBy.URL }}">{{ $contrib.LastUpdatedBy.Name }}</a>{{ end }}
+ {{- end -}}
+ </div>
+ <div class="contrib-status">Status: {{ $contrib.Status }}</div>
+ <div class="contrib-desc">
+ {{- $contrib.Desc | md2html }}
+ {{- if $contrib.SponsoredBy }}<p>Sponsored-by: {{ $contrib.SponsoredBy }}</p>{{ end -}}
+ </div>
+ </article>
+{{- end }}
+
+ <section id="contribs-pending">
+ <h2>In-progress work <a href="#contribs-pending">πŸ”—</a></h2>
+ {{- range $contrib := .Contribs }}
+ {{- if or (eq $contrib.StatusClass "merged") (eq $contrib.StatusClass "released") }}{{ continue }}{{ end }}
+ {{ template "contrib" $contrib }}
+ {{- end }}
+ </section>
+ <section id="contribs-completed">
+ <h2>Completed work <a href="#contribs-completed">πŸ”—</a></h2>
+ {{- range $contrib := .Contribs }}
+ {{- if or (eq $contrib.StatusClass "merged") (eq $contrib.StatusClass "released") | not }}{{ continue }}{{ end }}
+ {{ template "contrib" $contrib }}
+ {{- end }}
+ </section>
+ <section id="standups">
+ <h2>Daily statuses <a href="#standups">πŸ”—</a></h2>
+ <p>Posted daily on <a href="https://fosstodon.org/@lukeshu">Mastodon</a> with the #DailyStandup tag.</p>
+
+ <details><summary>Calendar view</summary>
+ <table>
+ <thead>
+ <tr>
+ <th></th>
+ <th><abbr title="Sunday">Su</abbr></th>
+ <th><abbr title="Monday">M</abbr></th>
+ <th><abbr title="Tuesday">Tu</abbr></th>
+ <th><abbr title="Wednesday">W</abbr></th>
+ <th><abbr title="Thursday">Th</abbr></th>
+ <th><abbr title="Friday">F</abbr></th>
+ <th><abbr title="Saturday">S</abbr></th>
+ <th></th>
+ </tr>
+ </thead>
+ <tbody>
+ {{- $cal := .StandupCalendar }}
+ {{- $curSunMonth := 0 }}
+ {{- $curSatMonth := 0 }}
+ {{- range $i, $week := reverse .StandupCalendar }}
+ <tr>
+ {{- $sun := (index $week time.Sunday) }}
+ {{- if not $sun.Day }}
+ <th></th>
+ {{- else if ne $sun.Month $curSunMonth }}
+ <th class="{{ monthClass $sun.Month }}" rowspan="{{ $cal.NumWeekdaysInMonth time.Sunday $sun.Date }}">
+ <span>{{ $sun.Month }} {{ $sun.Year }}</span>
+ </th>
+ {{- $curSunMonth = $sun.Month }}
+ {{- end }}
+ {{- range $day := $week }}
+ {{- if not $day.Day }}
+ <td></td>
+ {{- else if not $day.Data }}
+ <td class="{{ monthClass $day.Month }}">
+ {{ $day.Day }}
+ </td>
+ {{- else }}
+ <td class="{{ monthClass $day.Month }}">
+ <a href="#standup-id-{{ $day.Data.ID }}">
+ {{ $day.Day }}
+ </a>
+ </td>
+ {{- end }}
+ </td>
+ {{- end }}
+ {{- $sat := (index $week time.Saturday) }}
+ {{- if not $sat.Day }}
+ <th></th>
+ {{- else if ne $sat.Month $curSatMonth }}
+ <th class="{{ monthClass $sat.Month }}" rowspan="{{ $cal.NumWeekdaysInMonth time.Saturday $sat.Date }}">
+ <span>{{ $sat.Month }} {{ $sat.Year }}</span>
+ </th>
+ {{- $curSatMonth = $sat.Month }}
+ {{- end }}
+ {{- end }}
+ </tr>
+ </tbody>
+ </table>
+ </details>
+
+ {{- range $status := .Standups }}
+ <article class="standup" id="standup-id-{{ $status.ID }}">
+ <div class="standup-title"><a href="{{ $status.URL }}">{{ timeTag $status.CreatedAt "Mon 2006-01-02" }}</a></div>
+ <div class="standup-content">{{ $status.Content }}</div>
+ </article>
+ {{- end }}
+ </section>
+
+ <footer>
+ <p>The content of this page is Copyright Β© Luke T. Shumaker.</p>
+ <p>This page is licensed under the <a href="https://creativecommons.org/licenses/by-sa/4.0/">CC BY-SA 4.0</a> license.</p>
+ </footer>
+</body>
+</html>
diff --git a/cmd/gen-imworkingon/main.go b/cmd/gen-imworkingon/main.go
new file mode 100644
index 0000000..c0c9723
--- /dev/null
+++ b/cmd/gen-imworkingon/main.go
@@ -0,0 +1,189 @@
+package main
+
+import (
+ "bytes"
+ _ "embed"
+ "fmt"
+ "html/template"
+ "os"
+ "reflect"
+ "slices"
+ "sort"
+ "strings"
+ "time"
+
+ "github.com/yuin/goldmark"
+
+ "git.lukeshu.com/www/lib/httpcache"
+)
+
+func MarkdownToHTML(md string) (template.HTML, error) {
+ var html strings.Builder
+ if err := goldmark.Convert([]byte(md), &html); err != nil {
+ return template.HTML(""), err
+ }
+ return template.HTML(html.String()), nil
+}
+
+func main() {
+ if err := mainWithError(); err != nil {
+ fmt.Fprintf(os.Stderr, "%s: error: %v\n", os.Args[0], err)
+ os.Exit(1)
+ }
+}
+
+//go:embed imworkingon.html.tmpl
+var htmlTmplStr string
+
+var timeTagTmpl = template.Must(template.New("time.tag.tmpl").
+ Parse(`<time datetime="{{ .Machine }}" title="{{ .HumanVerbose }}">{{ .HumanPretty }}</time>`))
+
+func mainWithError() error {
+ httpcache.UserAgent = "https://git.lukeshu.com/www/tree/cmd/gen-imworkingon"
+
+ standups, err := ReadStandups("https://social.coop", "lukeshu")
+ if err != nil {
+ return err
+ }
+ _standups, err := ReadStandups("https://fosstodon.org", "lukeshu")
+ if err != nil {
+ return err
+ }
+ standups = append(standups, _standups...)
+ standupIgnoreList := []string{
+ "https://fosstodon.org/@lukeshu/112198267818432116",
+ "https://fosstodon.org/@lukeshu/112198241414760456",
+ }
+ standups = slices.DeleteFunc(standups, func(status *MastodonStatus) bool {
+ return slices.Contains(standupIgnoreList, status.URL)
+ })
+
+ contribs, err := ReadContribs("imworkingon/contribs.yml")
+ if err != nil {
+ return err
+ }
+ tags, err := ReadTags("imworkingon/tags.yml")
+ if err != nil {
+ return err
+ }
+ upstreams, err := ReadUpstreams("imworkingon/upstreams.yml")
+ if err != nil {
+ return err
+ }
+
+ sort.Slice(contribs, func(i, j int) bool {
+ iDate := contribs[i].LastUpdatedAt
+ if iDate.IsZero() {
+ iDate = contribs[i].SubmittedAt
+ }
+ jDate := contribs[j].LastUpdatedAt
+ if jDate.IsZero() {
+ jDate = contribs[j].SubmittedAt
+ }
+ return iDate.After(jDate)
+ })
+
+ tmpl := template.Must(template.New("imworkingon.html.tmpl").
+ Funcs(template.FuncMap{
+ "time": func() map[string]time.Weekday {
+ return map[string]time.Weekday{
+ "Sunday": time.Sunday,
+ "Monday": time.Monday,
+ "Tuesday": time.Tuesday,
+ "Wednesday": time.Wednesday,
+ "Thursday": time.Thursday,
+ "Friday": time.Friday,
+ "Saturday": time.Saturday,
+ }
+ },
+ "reverse": func(x any) any {
+ in := reflect.ValueOf(x)
+ l := in.Len()
+ out := reflect.MakeSlice(in.Type(), l, l)
+ for i := 0; i < l; i++ {
+ out.Index(l - (i + 1)).Set(in.Index(i))
+ }
+ return out.Interface()
+ },
+ "timeTag": func(ts time.Time, prettyFmt string) (template.HTML, error) {
+ ts = ts.Local()
+ var out strings.Builder
+ err := timeTagTmpl.Execute(&out, map[string]string{
+ "Machine": ts.Format(time.RFC3339),
+ "HumanVerbose": ts.Format("2006-01-02 15:04:05Z07:00"),
+ "HumanPretty": ts.Format(prettyFmt),
+ })
+ return template.HTML(out.String()), err
+ },
+ "monthClass": func(m time.Month) string {
+ if m%2 == 0 {
+ return "even-month"
+ } else {
+ return "odd-month"
+ }
+ },
+ "md2html": MarkdownToHTML,
+ "getUpstream": func(c Contribution) Upstream {
+ // First try any of the documented upstreams.
+ for _, cURL := range c.URLs {
+ for _, upstream := range upstreams {
+ for _, uURL := range upstream.URLs {
+ prefix := uURL
+ if !strings.HasSuffix(prefix, "/") {
+ prefix += "/"
+ }
+ if cURL == uURL || strings.HasPrefix(cURL, prefix) {
+ return upstream
+ }
+ }
+ }
+ }
+ // Now try to synthesize an upstream.
+ if m := reGitHubPR.FindStringSubmatch(c.URLs[0]); m != nil {
+ user := m[1]
+ repo := m[2]
+ return Upstream{
+ URLs: []string{"https://github.com/" + user + "/" + repo},
+ Name: user + "/" + repo,
+ }
+ }
+ if m := reGitLabMR.FindStringSubmatch(c.URLs[0]); m != nil {
+ authority := m[1]
+ projectID := m[2]
+ if authority == "gitlab.archlinux.org" && strings.HasPrefix(projectID, "archlinux/packaging/packages/") {
+ return Upstream{
+ URLs: []string{"https://" + authority + "/" + projectID},
+ Name: strings.Replace(projectID, "/packages/", "/", 1),
+ }
+ }
+ return Upstream{
+ URLs: []string{"https://" + authority + "/" + projectID},
+ Name: projectID,
+ }
+ }
+ // :(
+ return Upstream{
+ URLs: []string{c.URLs[0]},
+ Name: "???",
+ }
+ },
+ }).
+ Parse(htmlTmplStr))
+ var out bytes.Buffer
+ if err := tmpl.Execute(&out, map[string]any{
+ "Contribs": contribs,
+ "Tags": tags,
+ "Upstreams": upstreams,
+ "Standups": standups,
+ "StandupCalendar": BuildCalendar(standups, func(status *MastodonStatus) Date { return DateOf(status.CreatedAt.Local()) }),
+ }); err != nil {
+ return err
+ }
+ if err := os.WriteFile("public/imworkingon/index.new.html", out.Bytes(), 0666); err != nil {
+ return err
+ }
+ if err := os.Rename("public/imworkingon/index.new.html", "public/imworkingon/index.html"); err != nil {
+ return err
+ }
+ return nil
+}
diff --git a/cmd/gen-imworkingon/src_contribs.go b/cmd/gen-imworkingon/src_contribs.go
new file mode 100644
index 0000000..5694156
--- /dev/null
+++ b/cmd/gen-imworkingon/src_contribs.go
@@ -0,0 +1,223 @@
+package main
+
+import (
+ "fmt"
+ "os"
+ "strings"
+ "time"
+
+ "sigs.k8s.io/yaml"
+)
+
+type User struct {
+ Name string `json:"name"`
+ URL string `json:"url"`
+}
+
+type Contribution struct {
+ ID string
+ URLs []string `json:"urls"`
+ Tags []string `json:"tags"`
+ SponsoredBy string `json:"sponsored-by"`
+ Desc string `json:"desc"`
+
+ SubmittedAt time.Time `json:"submitted-at"`
+ LastUpdatedAt time.Time `json:"last-updated-at"`
+ LastUpdatedBy User `json:"last-updated-by"`
+ Status string `json:"status"`
+
+ StatusClass string `json:"-"`
+}
+
+func ReadContribs(filename string) ([]Contribution, error) {
+ bs, err := os.ReadFile(filename)
+ if err != nil {
+ return nil, fmt.Errorf("contribs: %q: %w", filename, err)
+ }
+ var ret []Contribution
+ if err := yaml.UnmarshalStrict(bs, &ret); err != nil {
+ return nil, fmt.Errorf("contribs: %q: %w", filename, err)
+ }
+ for i := range ret {
+ contrib := ret[i]
+ if err := contrib.Fill(); err != nil {
+ return nil, fmt.Errorf("contribs: %q: %w", filename, err)
+ }
+ ret[i] = contrib
+ }
+ return ret, nil
+}
+
+func (c *Contribution) Fill() error {
+ var err error
+ if c.SubmittedAt.IsZero() {
+ c.SubmittedAt, err = c.fetchSubmittedAt()
+ if err != nil {
+ return err
+ }
+ }
+ if c.LastUpdatedAt.IsZero() {
+ c.LastUpdatedAt, c.LastUpdatedBy, err = c.fetchLastUpdated()
+ if err != nil {
+ return err
+ }
+ }
+ if c.Status == "" {
+ c.Status, err = c.fetchStatus()
+ if err != nil {
+ return err
+ }
+ }
+ c.StatusClass, err = classifyStatus(c.Status)
+ if err != nil {
+ return err
+ }
+ for _, u := range c.URLs {
+ if m := reGoogleGerritCL.FindStringSubmatch(u); m != nil && m[1] == "go-review.googlesource.com" {
+ c.URLs = append(c.URLs, "https://golang.org/cl/"+m[3])
+ }
+ }
+ return nil
+}
+
+func classifyStatus(status string) (string, error) {
+ switch {
+ case strings.Contains(status, "released") || strings.Contains(status, "deployed"):
+ return "released", nil
+ case strings.Contains(status, "merged"):
+ return "merged", nil
+ case strings.Contains(status, "open"):
+ return "open", nil
+ case strings.Contains(status, "closed") || strings.Contains(status, "locked"):
+ return "closed", nil
+ default:
+ return "", fmt.Errorf("unrecognized status string: %q", status)
+ }
+}
+
+const (
+ statusOpen = "open"
+ statusMerged = "merged, not yet in a release"
+ statusReleasedFmt = "merged, released in %s"
+)
+
+type Forge interface {
+ FetchStatus(urls []string) (string, error)
+ FetchSubmittedAt(urls []string) (time.Time, error)
+ FetchLastUpdated(urls []string) (time.Time, User, error)
+}
+
+var forges = []Forge{
+ // precedence only matters for .FetchStatus.
+
+ // highest precedence
+ Gerrit{},
+ GitHub{},
+ GitLab{},
+ Forgejo{"codeberg.org"},
+ PartPiperMail{},
+ PartGit{},
+ // lowest precedence
+}
+
+func fetchPerURLStatus(urls []string, perURL func(string) (string, error)) (string, error) {
+ for _, u := range urls {
+ status, err := perURL(u)
+ if err != nil {
+ return "", err
+ }
+ if status != "" {
+ return status, nil
+ }
+ }
+ return "", nil
+}
+
+func (c Contribution) fetchStatus() (string, error) {
+ for _, forge := range forges {
+ status, err := forge.FetchStatus(c.URLs)
+ if err != nil {
+ return "", err
+ }
+ if status != "" {
+ return status, nil
+ }
+ }
+ return "", fmt.Errorf("idk how to get status for %q", c.URLs[0])
+}
+
+func fetchPerURLSubmittedAt(urls []string, perURL func(string) (time.Time, error)) (time.Time, error) {
+ var ret time.Time
+ for _, u := range urls {
+ submittedAt, err := perURL(u)
+ if err != nil {
+ return time.Time{}, err
+ }
+ if !submittedAt.IsZero() && (ret.IsZero() || submittedAt.Before(ret)) {
+ ret = submittedAt
+ }
+ }
+ return ret, nil
+}
+
+func (c Contribution) fetchSubmittedAt() (time.Time, error) {
+ var ret time.Time
+ for _, forge := range forges {
+ submittedAt, err := forge.FetchSubmittedAt(c.URLs)
+ if err != nil {
+ return time.Time{}, err
+ }
+ if !submittedAt.IsZero() && (ret.IsZero() || submittedAt.Before(ret)) {
+ ret = submittedAt
+ }
+ }
+ if !ret.IsZero() {
+ return ret, nil
+ }
+ return time.Time{}, fmt.Errorf("idk how to get created timestamp for %q", c.URLs[0])
+}
+
+func withinOneSecond(a, b time.Time) bool {
+ d := a.Sub(b)
+ if d < 0 {
+ d = -d
+ }
+ return d <= time.Second
+}
+
+func fetchPerURLLastUpdated(urls []string, perURL func(string) (time.Time, User, error)) (time.Time, User, error) {
+ var ret struct {
+ time.Time
+ User
+ }
+ for _, u := range urls {
+ updatedAt, updatedBy, err := perURL(u)
+ if err != nil {
+ return time.Time{}, User{}, err
+ }
+ if !updatedAt.IsZero() && (ret.Time.IsZero() || updatedAt.After(ret.Time)) {
+ ret.Time, ret.User = updatedAt, updatedBy
+ }
+ }
+ return ret.Time, ret.User, nil
+}
+
+func (c Contribution) fetchLastUpdated() (time.Time, User, error) {
+ var ret struct {
+ time.Time
+ User
+ }
+ for _, forge := range forges {
+ updatedAt, updatedBy, err := forge.FetchLastUpdated(c.URLs)
+ if err != nil {
+ return time.Time{}, User{}, err
+ }
+ if !updatedAt.IsZero() && (ret.Time.IsZero() || updatedAt.After(ret.Time)) {
+ ret.Time, ret.User = updatedAt, updatedBy
+ }
+ }
+ if !ret.Time.IsZero() {
+ return ret.Time, ret.User, nil
+ }
+ return time.Time{}, User{}, nil //fmt.Errorf("idk how to get updated timestamp for %q", c.URLs[0])
+}
diff --git a/cmd/gen-imworkingon/src_contribs_test.go b/cmd/gen-imworkingon/src_contribs_test.go
new file mode 100644
index 0000000..57ffc0f
--- /dev/null
+++ b/cmd/gen-imworkingon/src_contribs_test.go
@@ -0,0 +1,39 @@
+package main
+
+import (
+ "testing"
+
+ "github.com/alecthomas/assert/v2"
+)
+
+func TestClassifyStatus(t *testing.T) {
+ testcases := map[string]struct {
+ Str string
+ Err string
+ }{
+ "merged+deployed": {"released", ""},
+ "merged, deployed": {"released", ""},
+ "released in v1.2": {"released", ""},
+ "merged, released in v1.2": {"released", ""},
+ statusReleasedFmt: {"released", ""},
+
+ "merged": {"merged", ""},
+ statusMerged: {"merged", ""},
+
+ "open": {"open", ""},
+
+ "closed": {"closed", ""},
+ "locked": {"closed", ""},
+ }
+ for in, exp := range testcases {
+ t.Run(in, func(t *testing.T) {
+ actStr, actErr := classifyStatus(in)
+ assert.Equal(t, exp.Str, actStr)
+ if exp.Err == "" {
+ assert.NoError(t, actErr)
+ } else {
+ assert.EqualError(t, actErr, exp.Err)
+ }
+ })
+ }
+}
diff --git a/cmd/gen-imworkingon/src_mastodon.go b/cmd/gen-imworkingon/src_mastodon.go
new file mode 100644
index 0000000..a3b9617
--- /dev/null
+++ b/cmd/gen-imworkingon/src_mastodon.go
@@ -0,0 +1,41 @@
+package main
+
+import (
+ "html/template"
+ "net/url"
+ "time"
+
+ "git.lukeshu.com/www/lib/httpcache"
+)
+
+type MastodonStatus struct {
+ ID string `json:"id"`
+ CreatedAt time.Time `json:"created_at"`
+ URL string `json:"url"`
+ Content template.HTML `json:"content"`
+}
+
+// Returns statuses sorted from newest to oldest.
+func ReadStandups(server, username string) ([]*MastodonStatus, error) {
+ var account struct {
+ ID string `json:"id"`
+ }
+ if err := httpcache.GetJSON(server+"/api/v1/accounts/lookup?acct="+username, nil, &account); err != nil {
+ return nil, err
+ }
+
+ var statuses []*MastodonStatus
+ if err := httpcache.GetPaginatedJSON(server+"/api/v1/accounts/"+account.ID+"/statuses", nil, &statuses, func(_ int) url.Values {
+ params := make(url.Values)
+ params.Set("tagged", "DailyStandUp")
+ params.Set("exclude_reblogs", "true")
+ if len(statuses) > 0 {
+ params.Set("max_id", statuses[len(statuses)-1].ID)
+ }
+ return params
+ }); err != nil {
+ return nil, err
+ }
+
+ return statuses, nil
+}
diff --git a/cmd/gen-imworkingon/src_tags.go b/cmd/gen-imworkingon/src_tags.go
new file mode 100644
index 0000000..8dcf554
--- /dev/null
+++ b/cmd/gen-imworkingon/src_tags.go
@@ -0,0 +1,25 @@
+package main
+
+import (
+ "fmt"
+ "os"
+
+ "sigs.k8s.io/yaml"
+)
+
+type TagInfo struct {
+ PrettyName string `json:"prettyName"`
+ Desc string `json:"desc"`
+}
+
+func ReadTags(filename string) (map[string]TagInfo, error) {
+ bs, err := os.ReadFile(filename)
+ if err != nil {
+ return nil, fmt.Errorf("tags: %q: %w", filename, err)
+ }
+ var ret map[string]TagInfo
+ if err := yaml.UnmarshalStrict(bs, &ret); err != nil {
+ return nil, fmt.Errorf("tags: %q: %w", filename, err)
+ }
+ return ret, nil
+}
diff --git a/cmd/gen-imworkingon/src_upstreams.go b/cmd/gen-imworkingon/src_upstreams.go
new file mode 100644
index 0000000..03f72ec
--- /dev/null
+++ b/cmd/gen-imworkingon/src_upstreams.go
@@ -0,0 +1,48 @@
+package main
+
+import (
+ _ "embed"
+ "fmt"
+ "net/url"
+ "os"
+ "path"
+ "strings"
+
+ "sigs.k8s.io/yaml"
+)
+
+type Upstream struct {
+ URLs []string `json:"urls"`
+ Name string `json:"name"`
+ Desc string `json:"desc"`
+}
+
+func ReadUpstreams(filename string) ([]Upstream, error) {
+ bs, err := os.ReadFile(filename)
+ if err != nil {
+ return nil, fmt.Errorf("upstreams: %q: %w", filename, err)
+ }
+ var ret []Upstream
+ if err := yaml.UnmarshalStrict(bs, &ret); err != nil {
+ return nil, fmt.Errorf("upstreams: %q: %w", filename, err)
+ }
+ for i := range ret {
+ upstream := ret[i]
+ if err := upstream.Fill(); err != nil {
+ return nil, fmt.Errorf("upstreams: %q: %w", filename, err)
+ }
+ ret[i] = upstream
+ }
+ return ret, nil
+}
+
+func (upstream *Upstream) Fill() error {
+ if upstream.Name == "" {
+ u, err := url.Parse(upstream.URLs[0])
+ if err != nil {
+ return err
+ }
+ _, upstream.Name = path.Split(strings.TrimSuffix(path.Clean(u.Path), ".git"))
+ }
+ return nil
+}