summaryrefslogtreecommitdiff
path: root/cmd
diff options
context:
space:
mode:
Diffstat (limited to 'cmd')
-rw-r--r--cmd/gen-imworkingon/calendar.go (renamed from cmd/generate/calendar.go)0
-rw-r--r--cmd/gen-imworkingon/forge_forgejo.go185
-rw-r--r--cmd/gen-imworkingon/forge_gerrit.go160
-rw-r--r--cmd/gen-imworkingon/forge_github.go208
-rw-r--r--cmd/gen-imworkingon/forge_gitlab.go173
-rw-r--r--cmd/gen-imworkingon/forge_part_git.go82
-rw-r--r--cmd/gen-imworkingon/forge_part_pipermail.go193
-rw-r--r--cmd/gen-imworkingon/gitcache.go (renamed from cmd/generate/gitcache.go)4
-rw-r--r--cmd/gen-imworkingon/imworkingon.html.tmpl (renamed from cmd/generate/imworkingon.html.tmpl)9
-rw-r--r--cmd/gen-imworkingon/main.go (renamed from cmd/generate/main.go)53
-rw-r--r--cmd/gen-imworkingon/src_contribs.go223
-rw-r--r--cmd/gen-imworkingon/src_contribs_test.go (renamed from cmd/generate/src_contribs_test.go)0
-rw-r--r--cmd/gen-imworkingon/src_mastodon.go (renamed from cmd/generate/src_mastodon.go)15
-rw-r--r--cmd/gen-imworkingon/src_tags.go (renamed from cmd/generate/src_tags.go)0
-rw-r--r--cmd/gen-imworkingon/src_upstreams.go (renamed from cmd/generate/src_upstreams.go)0
-rw-r--r--cmd/gen-posix/data.go211
-rw-r--r--cmd/gen-posix/http_hacks.go156
-rw-r--r--cmd/gen-posix/main.go214
-rw-r--r--cmd/gen-posix/types.go26
-rw-r--r--cmd/generate/httpcache.go95
-rw-r--r--cmd/generate/src_contribs.go400
21 files changed, 1889 insertions, 518 deletions
diff --git a/cmd/generate/calendar.go b/cmd/gen-imworkingon/calendar.go
index 29c3318..29c3318 100644
--- a/cmd/generate/calendar.go
+++ b/cmd/gen-imworkingon/calendar.go
diff --git a/cmd/gen-imworkingon/forge_forgejo.go b/cmd/gen-imworkingon/forge_forgejo.go
new file mode 100644
index 0000000..34ec767
--- /dev/null
+++ b/cmd/gen-imworkingon/forge_forgejo.go
@@ -0,0 +1,185 @@
+package main
+
+import (
+ "fmt"
+ "regexp"
+ "time"
+
+ "git.lukeshu.com/www/lib/httpcache"
+)
+
+var reForgejoPR = regexp.MustCompile(`^https://([^/]+)/([^/?#]+)/([^/?#]+)/pulls/([0-9]+)(?:\?[^#]*)?(?:#.*)?$`)
+
+type Forgejo struct {
+ Authority string
+}
+
+var _ Forge = Forgejo{}
+
+func (f Forgejo) FetchStatus(urls []string) (string, error) {
+ return fetchPerURLStatus(urls, func(u string) (string, error) {
+ m := reForgejoPR.FindStringSubmatch(u)
+ if m == nil || m[1] != f.Authority {
+ return "", nil
+ }
+ authority := m[1]
+ user := m[2]
+ repo := m[3]
+ prnum := m[4]
+
+ urlStr := "https://" + authority + "/api/v1/repos/" + user + "/" + repo + "/pulls/" + prnum
+
+ var obj struct {
+ // State values are "open" and "closed".
+ State string `json:"state"`
+ Merged bool `json:"merged"`
+ MergeCommitSha string `json:"merge_commit_sha"`
+ }
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
+ return "", err
+ }
+ ret := obj.State
+ if obj.Merged {
+ ret = statusMerged
+ tag, err := getGitTagThatContainsAll("https://"+authority+"/"+user+"/"+repo, obj.MergeCommitSha)
+ if err != nil {
+ return "", err
+ }
+ if tag != "" {
+ ret = fmt.Sprintf(statusReleasedFmt, tag)
+ }
+ }
+
+ return ret, nil
+ })
+}
+
+func (f Forgejo) FetchSubmittedAt(urls []string) (time.Time, error) {
+ return fetchPerURLSubmittedAt(urls, func(u string) (time.Time, error) {
+ m := reForgejoPR.FindStringSubmatch(u)
+ if m == nil || m[1] != f.Authority {
+ return time.Time{}, nil
+ }
+ authority := m[1]
+ user := m[2]
+ repo := m[3]
+ prnum := m[4]
+
+ urlStr := "https://" + authority + "/api/v1/repos/" + user + "/" + repo + "/pulls/" + prnum
+
+ var obj struct {
+ CreatedAt time.Time `json:"created_at"`
+ }
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
+ return time.Time{}, err
+ }
+ return obj.CreatedAt, nil
+ })
+}
+
+func (f Forgejo) FetchLastUpdated(urls []string) (time.Time, User, error) {
+ return fetchPerURLLastUpdated(urls, func(u string) (time.Time, User, error) {
+ m := reForgejoPR.FindStringSubmatch(u)
+ if m == nil || m[1] != f.Authority {
+ return time.Time{}, User{}, nil
+ }
+ authority := m[1]
+ user := m[2]
+ repo := m[3]
+ prnum := m[4]
+
+ urlStr := "https://" + authority + "/api/v1/repos/" + user + "/" + repo + "/pulls/" + prnum
+
+ var obj struct {
+ UpdatedAt time.Time `json:"updated_at"`
+
+ CreatedAt time.Time `json:"created_at"`
+ CreatedBy struct {
+ Login string `json:"login"`
+ HTMLURL string `json:"html_url"`
+ } `json:"user"`
+
+ MergedAt time.Time `json:"merged_at"`
+ MergedBy struct {
+ Login string `json:"login"`
+ HTMLURL string `json:"html_url"`
+ } `json:"merged_by"`
+ }
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
+ return time.Time{}, User{}, err
+ }
+
+ retUpdatedAt := obj.UpdatedAt
+ var retUser User
+
+ if retUser == (User{}) && withinOneSecond(obj.CreatedAt, retUpdatedAt) {
+ retUser.Name = obj.CreatedBy.Login
+ retUser.URL = obj.CreatedBy.HTMLURL
+ }
+ if retUser == (User{}) && withinOneSecond(obj.MergedAt, retUpdatedAt) {
+ retUser.Name = obj.MergedBy.Login
+ retUser.URL = obj.MergedBy.HTMLURL
+ }
+ if retUser == (User{}) {
+ // "normal" comments
+ var comments []struct {
+ UpdatedAt time.Time `json:"updated_at"`
+ User struct {
+ Login string `json:"login"`
+ HTMLURL string `json:"html_url"`
+ } `json:"user"`
+ }
+ if err := httpcache.GetPaginatedJSON("https://api.github.com/repos/"+user+"/"+repo+"/issues/"+prnum+"/comments", nil, &comments, githubPagination); err != nil {
+ return time.Time{}, User{}, err
+ }
+ for _, comment := range comments {
+ if withinOneSecond(comment.UpdatedAt, retUpdatedAt) {
+ retUser.Name = comment.User.Login
+ retUser.URL = comment.User.HTMLURL
+ break
+ }
+ }
+ }
+ if retUser == (User{}) {
+ // comments on a specific part of the diff
+ var reviewComments []struct {
+ UpdatedAt time.Time `json:"updated_at"`
+ User struct {
+ Login string `json:"login"`
+ HTMLURL string `json:"html_url"`
+ } `json:"user"`
+ }
+ if err := httpcache.GetPaginatedJSON("https://api.github.com/repos/"+user+"/"+repo+"/pulls/"+prnum+"/comments", nil, &reviewComments, githubPagination); err != nil {
+ return time.Time{}, User{}, err
+ }
+ for _, comment := range reviewComments {
+ if withinOneSecond(comment.UpdatedAt, retUpdatedAt) {
+ retUser.Name = comment.User.Login
+ retUser.URL = comment.User.HTMLURL
+ break
+ }
+ }
+ }
+ if retUser == (User{}) {
+ var events []struct {
+ CreatedAt time.Time `json:"created_at"`
+ Actor struct {
+ Login string `json:"login"`
+ HTMLURL string `json:"html_url"`
+ } `json:"actor"`
+ }
+ if err := httpcache.GetJSON("https://api.github.com/repos/"+user+"/"+repo+"/issues/"+prnum+"/events", nil, &events); err != nil {
+ return time.Time{}, User{}, err
+ }
+ for _, event := range events {
+ if withinOneSecond(event.CreatedAt, retUpdatedAt) {
+ retUser.Name = event.Actor.Login
+ retUser.URL = event.Actor.HTMLURL
+ break
+ }
+ }
+ }
+
+ return retUpdatedAt, retUser, nil
+ })
+}
diff --git a/cmd/gen-imworkingon/forge_gerrit.go b/cmd/gen-imworkingon/forge_gerrit.go
new file mode 100644
index 0000000..05f0386
--- /dev/null
+++ b/cmd/gen-imworkingon/forge_gerrit.go
@@ -0,0 +1,160 @@
+package main
+
+import (
+ "encoding"
+ "encoding/json"
+ "fmt"
+ "net/url"
+ "regexp"
+ "strings"
+ "time"
+
+ "git.lukeshu.com/www/lib/httpcache"
+)
+
+// httpGetGerritJSON is like [httpcache.GetJSON], but
+// https://gerrit-review.googlesource.com/Documentation/rest-api.html#output
+func httpGetGerritJSON(u string, hdr map[string]string, out any) error {
+ str, err := httpcache.Get(u, hdr)
+ if err != nil {
+ return err
+ }
+ if _, body, ok := strings.Cut(str, "\n"); ok {
+ str = body
+ }
+ return json.Unmarshal([]byte(str), out)
+}
+
+const GerritTimeFormat = "2006-01-02 15:04:05.000000000"
+
+type GerritTime struct {
+ Val time.Time
+}
+
+var (
+ _ fmt.Stringer = GerritTime{}
+ _ encoding.TextMarshaler = GerritTime{}
+ _ encoding.TextUnmarshaler = (*GerritTime)(nil)
+)
+
+// String implements [fmt.Stringer].
+func (t GerritTime) String() string {
+ return t.Val.Format(GerritTimeFormat)
+}
+
+// MarshalText implements [encoding.TextMarshaler].
+func (t GerritTime) MarshalText() ([]byte, error) {
+ return []byte(t.String()), nil
+}
+
+// UnmarshalText implements [encoding.TextUnmarshaler].
+func (t *GerritTime) UnmarshalText(data []byte) error {
+ val, err := time.Parse(GerritTimeFormat, string(data))
+ if err != nil {
+ return err
+ }
+ t.Val = val
+ return nil
+}
+
+type Gerrit struct{}
+
+var _ Forge = Gerrit{}
+
+var reGoogleGerritCL = regexp.MustCompile(`https://([a-z]+-review\.googlesource\.com)/c/([^?#]+)/\+/([0-9]+)(?:\?[^#]*)?(?:#.*)?$`)
+
+func (Gerrit) FetchStatus(urls []string) (string, error) {
+ return fetchPerURLStatus(urls, func(u string) (string, error) {
+ m := reGoogleGerritCL.FindStringSubmatch(u)
+ if m == nil {
+ return "", nil
+ }
+ authority := m[1]
+ projectID := m[2]
+ changeID := m[3]
+
+ urlStr := "https://" + authority + "/changes/" + url.PathEscape(projectID) + "~" + changeID + "?o=MESSAGES&o=DETAILED_ACCOUNTS"
+
+ var obj struct {
+ Status string `json:"status"`
+ }
+ if err := httpGetGerritJSON(urlStr, nil, &obj); err != nil {
+ return "", err
+ }
+ // https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#change-info
+ switch obj.Status {
+ case "NEW":
+ return "open", nil
+ case "MERGED":
+ return "merged", nil
+ case "ABANDONED":
+ return "closed", nil
+ }
+ return "", nil
+ })
+}
+
+func (Gerrit) FetchSubmittedAt(urls []string) (time.Time, error) {
+ return fetchPerURLSubmittedAt(urls, func(u string) (time.Time, error) {
+ m := reGoogleGerritCL.FindStringSubmatch(u)
+ if m == nil {
+ return time.Time{}, nil
+ }
+ authority := m[1]
+ projectID := m[2]
+ changeID := m[3]
+
+ urlStr := "https://" + authority + "/changes/" + url.PathEscape(projectID) + "~" + changeID + "?o=MESSAGES&o=DETAILED_ACCOUNTS"
+
+ var obj struct {
+ Created GerritTime `json:"created"`
+ }
+ if err := httpGetGerritJSON(urlStr, nil, &obj); err != nil {
+ return time.Time{}, err
+ }
+ return obj.Created.Val, nil
+ })
+}
+
+func (Gerrit) FetchLastUpdated(urls []string) (time.Time, User, error) {
+ return fetchPerURLLastUpdated(urls, func(u string) (time.Time, User, error) {
+ m := reGoogleGerritCL.FindStringSubmatch(u)
+ if m == nil {
+ return time.Time{}, User{}, nil
+ }
+ authority := m[1]
+ projectID := m[2]
+ changeID := m[3]
+
+ urlStr := "https://" + authority + "/changes/" + url.PathEscape(projectID) + "~" + changeID + "?o=MESSAGES&o=DETAILED_ACCOUNTS"
+
+ var obj struct {
+ Updated GerritTime `json:"updated"`
+ Messages []struct {
+ Author struct {
+ AccountID int `json:"_account_id"`
+ Name string `json:"name"`
+ DisplayName string `json:"display_name"`
+ } `json:"author"`
+ Date GerritTime `json:"date"`
+ } `json:"messages"`
+ }
+ if err := httpGetGerritJSON(urlStr, nil, &obj); err != nil {
+ return time.Time{}, User{}, err
+ }
+ retUpdatedAt := obj.Updated.Val
+ var retUser User
+ for _, message := range obj.Messages {
+ if withinOneSecond(message.Date.Val, retUpdatedAt) {
+ if message.Author.DisplayName != "" {
+ retUser.Name = message.Author.DisplayName
+ } else {
+ retUser.Name = message.Author.Name
+ }
+ retUser.URL = fmt.Sprintf("https://%s/dashboard/%d", authority, message.Author.AccountID)
+ break
+ }
+ }
+ return retUpdatedAt, retUser, nil
+ })
+}
diff --git a/cmd/gen-imworkingon/forge_github.go b/cmd/gen-imworkingon/forge_github.go
new file mode 100644
index 0000000..b657ad7
--- /dev/null
+++ b/cmd/gen-imworkingon/forge_github.go
@@ -0,0 +1,208 @@
+package main
+
+import (
+ "fmt"
+ "net/url"
+ "regexp"
+ "time"
+
+ "git.lukeshu.com/www/lib/httpcache"
+)
+
+var reGitHubPR = regexp.MustCompile(`^https://github\.com/([^/?#]+)/([^/?#]+)/pull/([0-9]+)(?:\?[^#]*)?(?:#.*)?$`)
+
+func githubPagination(i int) url.Values {
+ params := make(url.Values)
+ params.Set("page", fmt.Sprintf("%v", i+1))
+ return params
+}
+
+type GitHub struct{}
+
+var _ Forge = GitHub{}
+
+func (GitHub) FetchStatus(urls []string) (string, error) {
+ for _, u := range urls {
+ if reGoogleGerritCL.MatchString(u) {
+ return "", nil
+ }
+ }
+ return fetchPerURLStatus(urls, func(u string) (string, error) {
+ m := reGitHubPR.FindStringSubmatch(u)
+ if m == nil {
+ return "", nil
+ }
+ user := m[1]
+ repo := m[2]
+ prnum := m[3]
+
+ urlStr := "https://api.github.com/repos/" + user + "/" + repo + "/pulls/" + prnum
+
+ var obj struct {
+ // State values are "open" and "closed".
+ State string `json:"state"`
+ Merged bool `json:"merged"`
+ MergeCommitSha string `json:"merge_commit_sha"`
+ }
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
+ return "", err
+ }
+ ret := obj.State
+ if obj.Merged {
+ ret = statusMerged
+ tag, err := getGitTagThatContainsAll("https://github.com/"+user+"/"+repo, obj.MergeCommitSha)
+ if err != nil {
+ return "", err
+ }
+ if tag != "" {
+ ret = fmt.Sprintf(statusReleasedFmt, tag)
+ }
+ } else if obj.State == "closed" {
+ var mergeCommits []string
+ for _, u := range urls {
+ if m := reGitHubCommit.FindStringSubmatch(u); m != nil && m[1] == user && m[2] == repo {
+ mergeCommits = append(mergeCommits, m[3])
+ }
+ }
+ tag, err := getGitTagThatContainsAll("https://github.com/"+user+"/"+repo, mergeCommits...)
+ if err == nil && tag != "" {
+ ret = fmt.Sprintf(statusReleasedFmt, tag)
+ }
+ }
+
+ return ret, nil
+ })
+}
+
+func (GitHub) FetchSubmittedAt(urls []string) (time.Time, error) {
+ return fetchPerURLSubmittedAt(urls, func(u string) (time.Time, error) {
+ m := reGitHubPR.FindStringSubmatch(u)
+ if m == nil {
+ return time.Time{}, nil
+ }
+ user := m[1]
+ repo := m[2]
+ prnum := m[3]
+
+ urlStr := "https://api.github.com/repos/" + user + "/" + repo + "/pulls/" + prnum
+
+ var obj struct {
+ CreatedAt time.Time `json:"created_at"`
+ }
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
+ return time.Time{}, err
+ }
+ return obj.CreatedAt, nil
+ })
+}
+
+func (GitHub) FetchLastUpdated(urls []string) (time.Time, User, error) {
+ for _, u := range urls {
+ if reGoogleGerritCL.MatchString(u) {
+ return time.Time{}, User{}, nil
+ }
+ }
+ return fetchPerURLLastUpdated(urls, func(u string) (time.Time, User, error) {
+ m := reGitHubPR.FindStringSubmatch(u)
+ if m == nil {
+ return time.Time{}, User{}, nil
+ }
+ user := m[1]
+ repo := m[2]
+ prnum := m[3]
+
+ urlStr := "https://api.github.com/repos/" + user + "/" + repo + "/pulls/" + prnum
+
+ var obj struct {
+ UpdatedAt time.Time `json:"updated_at"`
+
+ CreatedAt time.Time `json:"created_at"`
+ CreatedBy struct {
+ Login string `json:"login"`
+ HTMLURL string `json:"html_url"`
+ } `json:"user"`
+
+ MergedAt time.Time `json:"merged_at"`
+ MergedBy struct {
+ Login string `json:"login"`
+ HTMLURL string `json:"html_url"`
+ } `json:"merged_by"`
+ }
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
+ return time.Time{}, User{}, err
+ }
+
+ retUpdatedAt := obj.UpdatedAt
+ var retUser User
+
+ if retUser == (User{}) && withinOneSecond(obj.CreatedAt, retUpdatedAt) {
+ retUser.Name = obj.CreatedBy.Login
+ retUser.URL = obj.CreatedBy.HTMLURL
+ }
+ if retUser == (User{}) && withinOneSecond(obj.MergedAt, retUpdatedAt) {
+ retUser.Name = obj.MergedBy.Login
+ retUser.URL = obj.MergedBy.HTMLURL
+ }
+ if retUser == (User{}) {
+ // "normal" comments
+ var comments []struct {
+ UpdatedAt time.Time `json:"updated_at"`
+ User struct {
+ Login string `json:"login"`
+ HTMLURL string `json:"html_url"`
+ } `json:"user"`
+ }
+ if err := httpcache.GetPaginatedJSON("https://api.github.com/repos/"+user+"/"+repo+"/issues/"+prnum+"/comments", nil, &comments, githubPagination); err != nil {
+ return time.Time{}, User{}, err
+ }
+ for _, comment := range comments {
+ if withinOneSecond(comment.UpdatedAt, retUpdatedAt) {
+ retUser.Name = comment.User.Login
+ retUser.URL = comment.User.HTMLURL
+ break
+ }
+ }
+ }
+ if retUser == (User{}) {
+ // comments on a specific part of the diff
+ var reviewComments []struct {
+ UpdatedAt time.Time `json:"updated_at"`
+ User struct {
+ Login string `json:"login"`
+ HTMLURL string `json:"html_url"`
+ } `json:"user"`
+ }
+ if err := httpcache.GetPaginatedJSON("https://api.github.com/repos/"+user+"/"+repo+"/pulls/"+prnum+"/comments", nil, &reviewComments, githubPagination); err != nil {
+ return time.Time{}, User{}, err
+ }
+ for _, comment := range reviewComments {
+ if withinOneSecond(comment.UpdatedAt, retUpdatedAt) {
+ retUser.Name = comment.User.Login
+ retUser.URL = comment.User.HTMLURL
+ break
+ }
+ }
+ }
+ if retUser == (User{}) {
+ var events []struct {
+ CreatedAt time.Time `json:"created_at"`
+ Actor struct {
+ Login string `json:"login"`
+ HTMLURL string `json:"html_url"`
+ } `json:"actor"`
+ }
+ if err := httpcache.GetJSON("https://api.github.com/repos/"+user+"/"+repo+"/issues/"+prnum+"/events", nil, &events); err != nil {
+ return time.Time{}, User{}, err
+ }
+ for _, event := range events {
+ if withinOneSecond(event.CreatedAt, retUpdatedAt) {
+ retUser.Name = event.Actor.Login
+ retUser.URL = event.Actor.HTMLURL
+ break
+ }
+ }
+ }
+
+ return retUpdatedAt, retUser, nil
+ })
+}
diff --git a/cmd/gen-imworkingon/forge_gitlab.go b/cmd/gen-imworkingon/forge_gitlab.go
new file mode 100644
index 0000000..84a2285
--- /dev/null
+++ b/cmd/gen-imworkingon/forge_gitlab.go
@@ -0,0 +1,173 @@
+package main
+
+import (
+ "fmt"
+ "net/url"
+ "regexp"
+ "time"
+
+ "git.lukeshu.com/www/lib/httpcache"
+)
+
+var reGitLabMR = regexp.MustCompile(`^https://([^/]+)/([^?#]+)/-/merge_requests/([0-9]+)(?:\?[^#]*)?(?:#.*)?$`)
+
+type GitLab struct{}
+
+var _ Forge = GitLab{}
+
+func (GitLab) FetchStatus(urls []string) (string, error) {
+ for _, u := range urls {
+ m := reGitLabMR.FindStringSubmatch(u)
+ if m == nil {
+ continue
+ }
+ authority := m[1]
+ projectID := m[2]
+ mrnum := m[3]
+
+ urlStr := "https://" + authority + "/api/v4/projects/" + url.QueryEscape(projectID) + "/merge_requests/" + mrnum
+
+ var obj struct {
+ // State values are "opened", "closed", "locked", and "merged".
+ State string `json:"state"`
+ MergeCommitSha string `json:"merge_commit_sha"`
+ SquashCommitSha string `json:"squash_commit_sha"`
+ }
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
+ return "", err
+ }
+
+ ret := obj.State
+ if ret == "opened" {
+ ret = statusOpen
+ }
+
+ if ret == "merged" {
+ ret = statusMerged
+ var mergeCommit string
+ if obj.MergeCommitSha != "" {
+ mergeCommit = obj.MergeCommitSha
+ }
+ if obj.SquashCommitSha != "" {
+ mergeCommit = obj.SquashCommitSha
+ }
+ if mergeCommit != "" {
+ tag, err := getGitTagThatContainsAll("https://"+authority+"/"+projectID+".git", mergeCommit)
+ if err != nil {
+ return "", err
+ }
+ if tag != "" {
+ ret = fmt.Sprintf(statusReleasedFmt, tag)
+ }
+ }
+ }
+
+ return ret, nil
+ }
+ return "", nil
+}
+
+func (GitLab) FetchSubmittedAt(urls []string) (time.Time, error) {
+ for _, u := range urls {
+ m := reGitLabMR.FindStringSubmatch(u)
+ if m == nil {
+ continue
+ }
+ authority := m[1]
+ projectID := m[2]
+ mrnum := m[3]
+
+ urlStr := "https://" + authority + "/api/v4/projects/" + url.QueryEscape(projectID) + "/merge_requests/" + mrnum
+
+ var obj struct {
+ CreatedAt time.Time `json:"created_at"`
+ }
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
+ return time.Time{}, err
+ }
+ return obj.CreatedAt, nil
+ }
+ return time.Time{}, nil
+}
+
+func (GitLab) FetchLastUpdated(urls []string) (time.Time, User, error) {
+ for _, u := range urls {
+ m := reGitLabMR.FindStringSubmatch(u)
+ if m == nil {
+ continue
+ }
+ authority := m[1]
+ projectID := m[2]
+ mrnum := m[3]
+
+ urlStr := "https://" + authority + "/api/v4/projects/" + url.QueryEscape(projectID) + "/merge_requests/" + mrnum
+
+ var obj struct {
+ ID int `json:"id"`
+
+ UpdatedAt time.Time `json:"updated_at"`
+
+ CreatedAt time.Time `json:"created_at"`
+ CreatedBy struct {
+ Username string `json:"username"`
+ WebURL string `json:"web_url"`
+ } `json:"author"`
+
+ MergedAt time.Time `json:"merged_at"`
+ MergedBy struct {
+ Username string `json:"username"`
+ WebURL string `json:"web_url"`
+ } `json:"merged_by"`
+ }
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
+ return time.Time{}, User{}, err
+ }
+
+ retUpdatedAt := obj.UpdatedAt
+ var retUser User
+
+ if retUser == (User{}) && withinOneSecond(obj.CreatedAt, retUpdatedAt) {
+ retUser.Name = obj.CreatedBy.Username
+ retUser.URL = obj.CreatedBy.WebURL
+ }
+ if retUser == (User{}) && withinOneSecond(obj.MergedAt, retUpdatedAt) {
+ retUser.Name = obj.MergedBy.Username
+ retUser.URL = obj.MergedBy.WebURL
+ }
+ if retUser == (User{}) {
+ var notes struct {
+ Notes []struct {
+ UpdatedAt time.Time `json:"updated_at"`
+ Author struct {
+ Username string `json:"username"`
+ Path string `json:"path"`
+ } `json:"author"`
+ ResolvedAt time.Time `json:"resolved_at"`
+ ResolvedBy struct {
+ ResolvedAt time.Time `json:"resolved_at"`
+ Username string `json:"username"`
+ Path string `json:"path"`
+ } `json:"resolved_by"`
+ } `json:"notes"`
+ }
+ if err := httpcache.GetJSON(fmt.Sprintf("https://%s/%s/noteable/merge_request/%d/notes", authority, projectID, obj.ID), map[string]string{"X-Last-Fetched-At": "0"}, &notes); err != nil {
+ return time.Time{}, User{}, err
+ }
+ for _, note := range notes.Notes {
+ if withinOneSecond(note.UpdatedAt, retUpdatedAt) {
+ if withinOneSecond(note.UpdatedAt, note.ResolvedAt) {
+ retUser.Name = note.ResolvedBy.Username
+ retUser.URL = "https://" + authority + note.ResolvedBy.Path
+ } else {
+ retUser.Name = note.Author.Username
+ retUser.URL = "https://" + authority + note.Author.Path
+ }
+ break
+ }
+ }
+ }
+
+ return retUpdatedAt, retUser, nil
+ }
+ return time.Time{}, User{}, nil
+}
diff --git a/cmd/gen-imworkingon/forge_part_git.go b/cmd/gen-imworkingon/forge_part_git.go
new file mode 100644
index 0000000..5175750
--- /dev/null
+++ b/cmd/gen-imworkingon/forge_part_git.go
@@ -0,0 +1,82 @@
+package main
+
+import (
+ "fmt"
+ "regexp"
+ "time"
+
+ "git.lukeshu.com/www/lib/httpcache"
+)
+
+var reGitHubCommit = regexp.MustCompile(`^https://github\.com/([^/?#]+)/([^/?#]+)/commit/([0-9a-f]+)(?:\?[^#]*)?(?:#.*)?$`)
+
+type PartGit struct{}
+
+var _ Forge = PartGit{}
+
+func (PartGit) FetchStatus(urls []string) (string, error) {
+ var gitURL string
+ var gitCommits []string
+ for _, u := range urls {
+ if m := reGitHubCommit.FindStringSubmatch(u); m != nil {
+ user := m[1]
+ repo := m[2]
+ hash := m[3]
+
+ gitURL = "https://github.com/" + user + "/" + repo
+ gitCommits = append(gitCommits, hash)
+ }
+ }
+ if len(gitCommits) == 0 {
+ return "", nil
+ }
+ ret := statusMerged
+ tag, err := getGitTagThatContainsAll(gitURL, gitCommits...)
+ if err != nil {
+ return "", err
+ }
+ if tag != "" {
+ ret = fmt.Sprintf(statusReleasedFmt, tag)
+ }
+ return ret, nil
+}
+
+func (PartGit) FetchSubmittedAt(urls []string) (time.Time, error) {
+ return time.Time{}, nil
+}
+
+func (PartGit) FetchLastUpdated(urls []string) (time.Time, User, error) {
+ var ret time.Time
+ for _, u := range urls {
+ if m := reGitHubCommit.FindStringSubmatch(u); m != nil {
+ user := m[1]
+ repo := m[2]
+ hash := m[3]
+
+ urlStr := "https://api.github.com/repos/" + user + "/" + repo + "/commits/" + hash
+ var obj struct {
+ Commit struct {
+ Author struct {
+ Date time.Time `json:"date"`
+ } `json:"author"`
+ Committer struct {
+ Date time.Time `json:"date"`
+ } `json:"committer"`
+ } `json:"commit"`
+ }
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
+ return time.Time{}, User{}, err
+ }
+ if obj.Commit.Author.Date.After(ret) {
+ ret = obj.Commit.Author.Date
+ }
+ if obj.Commit.Committer.Date.After(ret) {
+ ret = obj.Commit.Committer.Date
+ }
+ }
+ }
+ if ret.IsZero() {
+ return time.Time{}, User{}, nil
+ }
+ return time.Time{}, User{}, nil
+}
diff --git a/cmd/gen-imworkingon/forge_part_pipermail.go b/cmd/gen-imworkingon/forge_part_pipermail.go
new file mode 100644
index 0000000..9db498b
--- /dev/null
+++ b/cmd/gen-imworkingon/forge_part_pipermail.go
@@ -0,0 +1,193 @@
+package main
+
+import (
+ "compress/gzip"
+ "errors"
+ "fmt"
+ "net/mail"
+ "net/url"
+ "os"
+ "regexp"
+ "strconv"
+ "strings"
+ "time"
+
+ "git.lukeshu.com/www/lib/httpcache"
+ "git.lukeshu.com/www/lib/mailstuff"
+)
+
+var (
+ rePiperMailMessage = regexp.MustCompile(`^(https?://.*/pipermail/.*/)([0-4]{4}-(?:January|February|March|April|May|June|July|August|September|October|November|December))/([0-9]+)\.html$`)
+ rePiperMailDate = regexp.MustCompile(`^\s*<I>([^<]+)</I>\s*$`)
+ rePiperMailReply = regexp.MustCompile(`^\s*<LINK REL="made" HREF="(.*)">\s*$`)
+)
+
+type PartPiperMail struct{}
+
+var _ Forge = PartPiperMail{}
+
+func (PartPiperMail) FetchStatus(urls []string) (string, error) {
+ return "", nil
+}
+
+func (PartPiperMail) FetchSubmittedAt(urls []string) (time.Time, error) {
+ for _, u := range urls {
+ if !rePiperMailMessage.MatchString(u) {
+ continue
+ }
+ htmlStr, err := httpcache.Get(u, nil)
+ if err != nil {
+ return time.Time{}, err
+ }
+ for _, line := range strings.Split(htmlStr, "\n") {
+ if m := rePiperMailDate.FindStringSubmatch(line); m != nil {
+ return time.Parse(time.UnixDate, m[1])
+ }
+ }
+ }
+ return time.Time{}, nil
+}
+
+func (PartPiperMail) nextMonth(ym string) string {
+ yStr, mStr, ok := strings.Cut(ym, "-")
+ if !ok {
+ panic(fmt.Errorf("invalid year-month: %q", ym))
+ }
+ switch mStr {
+ case "January":
+ return yStr + "-February"
+ case "February":
+ return yStr + "-March"
+ case "March":
+ return yStr + "-April"
+ case "April":
+ return yStr + "-May"
+ case "May":
+ return yStr + "-June"
+ case "June":
+ return yStr + "-July"
+ case "July":
+ return yStr + "-August"
+ case "August":
+ return yStr + "-September"
+ case "September":
+ return yStr + "-October"
+ case "October":
+ return yStr + "-November"
+ case "November":
+ return yStr + "-December"
+ case "December":
+ y, _ := strconv.Atoi(yStr)
+ return fmt.Sprintf("%d-January", y+1)
+ default:
+ panic(fmt.Errorf("invalid year-month: %q", ym))
+ }
+}
+
+func (p PartPiperMail) threadLen(thread *mailstuff.ThreadedMessage) int {
+ if thread == nil {
+ return 0
+ }
+
+ ret := 0
+ if thread.Message != nil {
+ ret++
+ }
+ for child := range thread.Children {
+ ret += p.threadLen(child)
+ }
+ return ret
+}
+
+func (p PartPiperMail) FetchLastUpdated(urls []string) (time.Time, User, error) {
+ for _, u := range urls {
+ m := rePiperMailMessage.FindStringSubmatch(u)
+ if m == nil {
+ continue
+ }
+ uBase := m[1]
+ uYM := m[2]
+ //uInt := m[3]
+
+ htmlStr, err := httpcache.Get(u, nil)
+ if err != nil {
+ return time.Time{}, User{}, fmt.Errorf("could not fetch message: %w", err)
+ }
+ var msgid mailstuff.MessageID
+ for _, line := range strings.Split(htmlStr, "\n") {
+ if m := rePiperMailReply.FindStringSubmatch(line); m != nil {
+ ru, err := url.Parse(m[1])
+ if err != nil {
+ continue
+ }
+ if msgid = mailstuff.MessageID(ru.Query().Get("In-Reply-To")); msgid != "" {
+ break
+ }
+ }
+ }
+ if msgid == "" {
+ continue
+ }
+
+ var thread *mailstuff.ThreadedMessage
+ for ym, mbox := uYM, []*mail.Message(nil); true; ym = p.nextMonth(ym) {
+ lenBefore := p.threadLen(thread)
+
+ mboxGzStr, err := httpcache.Get(uBase+ym+".txt.gz", nil)
+ if err != nil {
+ if ym == uYM || !errors.Is(err, os.ErrNotExist) {
+ return time.Time{}, User{}, fmt.Errorf("could not fetch mbox for %s: %w", ym, err)
+ }
+ break
+ }
+ gzReader, err := gzip.NewReader(strings.NewReader(mboxGzStr))
+ if err != nil {
+ return time.Time{}, User{}, fmt.Errorf("could not read mbox gz: %w", err)
+ }
+ _mbox, err := mailstuff.ReadMBox(gzReader)
+ if err != nil {
+ gzReader.Close()
+ return time.Time{}, User{}, fmt.Errorf("could not parse mbox: %w", err)
+ }
+ if err := gzReader.Close(); err != nil {
+ return time.Time{}, User{}, fmt.Errorf("close gz: %w", err)
+ }
+ mbox = append(mbox, _mbox...)
+ _, messages := mailstuff.ThreadMessages(mbox)
+ thread = messages[msgid]
+
+ if p.threadLen(thread) == lenBefore {
+ break
+ }
+ }
+ if thread == nil {
+ continue
+ }
+
+ var retTime time.Time
+ var retUser User
+
+ var walk func(*mailstuff.ThreadedMessage)
+ walk = func(msg *mailstuff.ThreadedMessage) {
+ date, dateErr := msg.Header.Date()
+ froms, fromErr := msg.Header.AddressList("From")
+ if dateErr == nil && fromErr == nil && len(froms) > 0 && (retTime.IsZero() || date.After(retTime)) {
+ retTime = date
+ retUser.Name = froms[0].Name
+ if retUser.Name == "" {
+ retUser.Name = froms[0].Address
+ }
+ retUser.URL = "mailto:" + froms[0].Address
+ }
+ for child := range msg.Children {
+ walk(child)
+ }
+ }
+ walk(thread)
+
+ if !retTime.IsZero() {
+ return retTime, retUser, nil
+ }
+ }
+ return time.Time{}, User{}, nil
+}
diff --git a/cmd/generate/gitcache.go b/cmd/gen-imworkingon/gitcache.go
index 7caf024..844408d 100644
--- a/cmd/generate/gitcache.go
+++ b/cmd/gen-imworkingon/gitcache.go
@@ -5,6 +5,7 @@ import (
"os"
"os/exec"
"strings"
+ "time"
"git.mothstuff.lol/lukeshu/eclipse/lib/gitcache"
)
@@ -12,7 +13,8 @@ import (
var gitFetched = map[string]struct{}{}
var gitCache = &gitcache.Cache{
- Dir: ".git-cache",
+ Dir: ".git-cache",
+ MinPeriod: 1 * time.Hour,
}
func withGit(u string, fn func(dir string) error) error {
diff --git a/cmd/generate/imworkingon.html.tmpl b/cmd/gen-imworkingon/imworkingon.html.tmpl
index 85a56e1..415a252 100644
--- a/cmd/generate/imworkingon.html.tmpl
+++ b/cmd/gen-imworkingon/imworkingon.html.tmpl
@@ -15,15 +15,19 @@
<nav>
<p>This page provides several views into what I'm doing to improve the ecosystem:</p>
<ol>
- <li><a href="#tags">Top themes</a></li>
+ <!--<li><a href="#tags">Top themes</a></li>-->
<li><a href="#contribs-pending">In-progress work</a></li>
<li><a href="#contribs-completed">Completed work</a></li>
<li><a href="#standups">Daily statuses</a></li>
</ol>
</nav>
+
+ <p>The "In-progress work" and "Completed work" sections do <em>not</em> include routine maintenance on <a href="https://parabola.nu">Parabola GNU/Linux-libre</a>, which is also a solid chunk of what I do.</p>
+
<p>If you find this work valuable, please consider <a class="em" href="../sponsor/">sponsoring me</a>.</p>
</section>
+ <!--
<section id="tags">
<h2>Top themes <a href="#tags">🔗</a></h2>
{{- range $tagName, $tagInfo := .Tags }}
@@ -33,11 +37,12 @@
</article>
{{- end }}
</section>
+ -->
{{- define "contrib" }}
{{ $contrib := . }}
{{ $upstream := $contrib | getUpstream }}
- <article class="contrib {{ $contrib.StatusClass }}-contrib">
+ <article class="contrib {{ $contrib.StatusClass }}-contrib" {{- if $contrib.ID }}id="contrib-{{ $contrib.ID }}"{{ end }}>
<div class="contrib-upstream-name"><a class="em" href="{{ index $upstream.URLs 0 }}">{{ $upstream.Name }}</a></div>
<div class="contrib-upstream-desc">{{ $upstream.Desc | md2html }}</div>
<div class="contrib-urls">
diff --git a/cmd/generate/main.go b/cmd/gen-imworkingon/main.go
index e322e5c..c0c9723 100644
--- a/cmd/generate/main.go
+++ b/cmd/gen-imworkingon/main.go
@@ -4,15 +4,17 @@ import (
"bytes"
_ "embed"
"fmt"
+ "html/template"
"os"
"reflect"
+ "slices"
"sort"
"strings"
"time"
- "html/template"
-
"github.com/yuin/goldmark"
+
+ "git.lukeshu.com/www/lib/httpcache"
)
func MarkdownToHTML(md string) (template.HTML, error) {
@@ -23,10 +25,6 @@ func MarkdownToHTML(md string) (template.HTML, error) {
return template.HTML(html.String()), nil
}
-var githubProjects = map[string]string{
- "flori/json": "ruby-json",
-}
-
func main() {
if err := mainWithError(); err != nil {
fmt.Fprintf(os.Stderr, "%s: error: %v\n", os.Args[0], err)
@@ -41,10 +39,25 @@ var timeTagTmpl = template.Must(template.New("time.tag.tmpl").
Parse(`<time datetime="{{ .Machine }}" title="{{ .HumanVerbose }}">{{ .HumanPretty }}</time>`))
func mainWithError() error {
- standups, err := ReadStandups("https://fosstodon.org", "lukeshu")
+ httpcache.UserAgent = "https://git.lukeshu.com/www/tree/cmd/gen-imworkingon"
+
+ standups, err := ReadStandups("https://social.coop", "lukeshu")
+ if err != nil {
+ return err
+ }
+ _standups, err := ReadStandups("https://fosstodon.org", "lukeshu")
if err != nil {
return err
}
+ standups = append(standups, _standups...)
+ standupIgnoreList := []string{
+ "https://fosstodon.org/@lukeshu/112198267818432116",
+ "https://fosstodon.org/@lukeshu/112198241414760456",
+ }
+ standups = slices.DeleteFunc(standups, func(status *MastodonStatus) bool {
+ return slices.Contains(standupIgnoreList, status.URL)
+ })
+
contribs, err := ReadContribs("imworkingon/contribs.yml")
if err != nil {
return err
@@ -125,12 +138,34 @@ func mainWithError() error {
}
}
}
+ // Now try to synthesize an upstream.
if m := reGitHubPR.FindStringSubmatch(c.URLs[0]); m != nil {
user := m[1]
repo := m[2]
- return Upstream{URLs: []string{c.URLs[0]}, Name: user + "/" + repo}
+ return Upstream{
+ URLs: []string{"https://github.com/" + user + "/" + repo},
+ Name: user + "/" + repo,
+ }
+ }
+ if m := reGitLabMR.FindStringSubmatch(c.URLs[0]); m != nil {
+ authority := m[1]
+ projectID := m[2]
+ if authority == "gitlab.archlinux.org" && strings.HasPrefix(projectID, "archlinux/packaging/packages/") {
+ return Upstream{
+ URLs: []string{"https://" + authority + "/" + projectID},
+ Name: strings.Replace(projectID, "/packages/", "/", 1),
+ }
+ }
+ return Upstream{
+ URLs: []string{"https://" + authority + "/" + projectID},
+ Name: projectID,
+ }
+ }
+ // :(
+ return Upstream{
+ URLs: []string{c.URLs[0]},
+ Name: "???",
}
- return Upstream{URLs: []string{c.URLs[0]}, Name: "???"}
},
}).
Parse(htmlTmplStr))
diff --git a/cmd/gen-imworkingon/src_contribs.go b/cmd/gen-imworkingon/src_contribs.go
new file mode 100644
index 0000000..5694156
--- /dev/null
+++ b/cmd/gen-imworkingon/src_contribs.go
@@ -0,0 +1,223 @@
+package main
+
+import (
+ "fmt"
+ "os"
+ "strings"
+ "time"
+
+ "sigs.k8s.io/yaml"
+)
+
+type User struct {
+ Name string `json:"name"`
+ URL string `json:"url"`
+}
+
+type Contribution struct {
+ ID string
+ URLs []string `json:"urls"`
+ Tags []string `json:"tags"`
+ SponsoredBy string `json:"sponsored-by"`
+ Desc string `json:"desc"`
+
+ SubmittedAt time.Time `json:"submitted-at"`
+ LastUpdatedAt time.Time `json:"last-updated-at"`
+ LastUpdatedBy User `json:"last-updated-by"`
+ Status string `json:"status"`
+
+ StatusClass string `json:"-"`
+}
+
+func ReadContribs(filename string) ([]Contribution, error) {
+ bs, err := os.ReadFile(filename)
+ if err != nil {
+ return nil, fmt.Errorf("contribs: %q: %w", filename, err)
+ }
+ var ret []Contribution
+ if err := yaml.UnmarshalStrict(bs, &ret); err != nil {
+ return nil, fmt.Errorf("contribs: %q: %w", filename, err)
+ }
+ for i := range ret {
+ contrib := ret[i]
+ if err := contrib.Fill(); err != nil {
+ return nil, fmt.Errorf("contribs: %q: %w", filename, err)
+ }
+ ret[i] = contrib
+ }
+ return ret, nil
+}
+
+func (c *Contribution) Fill() error {
+ var err error
+ if c.SubmittedAt.IsZero() {
+ c.SubmittedAt, err = c.fetchSubmittedAt()
+ if err != nil {
+ return err
+ }
+ }
+ if c.LastUpdatedAt.IsZero() {
+ c.LastUpdatedAt, c.LastUpdatedBy, err = c.fetchLastUpdated()
+ if err != nil {
+ return err
+ }
+ }
+ if c.Status == "" {
+ c.Status, err = c.fetchStatus()
+ if err != nil {
+ return err
+ }
+ }
+ c.StatusClass, err = classifyStatus(c.Status)
+ if err != nil {
+ return err
+ }
+ for _, u := range c.URLs {
+ if m := reGoogleGerritCL.FindStringSubmatch(u); m != nil && m[1] == "go-review.googlesource.com" {
+ c.URLs = append(c.URLs, "https://golang.org/cl/"+m[3])
+ }
+ }
+ return nil
+}
+
+func classifyStatus(status string) (string, error) {
+ switch {
+ case strings.Contains(status, "released") || strings.Contains(status, "deployed"):
+ return "released", nil
+ case strings.Contains(status, "merged"):
+ return "merged", nil
+ case strings.Contains(status, "open"):
+ return "open", nil
+ case strings.Contains(status, "closed") || strings.Contains(status, "locked"):
+ return "closed", nil
+ default:
+ return "", fmt.Errorf("unrecognized status string: %q", status)
+ }
+}
+
+const (
+ statusOpen = "open"
+ statusMerged = "merged, not yet in a release"
+ statusReleasedFmt = "merged, released in %s"
+)
+
+type Forge interface {
+ FetchStatus(urls []string) (string, error)
+ FetchSubmittedAt(urls []string) (time.Time, error)
+ FetchLastUpdated(urls []string) (time.Time, User, error)
+}
+
+var forges = []Forge{
+ // precedence only matters for .FetchStatus.
+
+ // highest precedence
+ Gerrit{},
+ GitHub{},
+ GitLab{},
+ Forgejo{"codeberg.org"},
+ PartPiperMail{},
+ PartGit{},
+ // lowest precedence
+}
+
+func fetchPerURLStatus(urls []string, perURL func(string) (string, error)) (string, error) {
+ for _, u := range urls {
+ status, err := perURL(u)
+ if err != nil {
+ return "", err
+ }
+ if status != "" {
+ return status, nil
+ }
+ }
+ return "", nil
+}
+
+func (c Contribution) fetchStatus() (string, error) {
+ for _, forge := range forges {
+ status, err := forge.FetchStatus(c.URLs)
+ if err != nil {
+ return "", err
+ }
+ if status != "" {
+ return status, nil
+ }
+ }
+ return "", fmt.Errorf("idk how to get status for %q", c.URLs[0])
+}
+
+func fetchPerURLSubmittedAt(urls []string, perURL func(string) (time.Time, error)) (time.Time, error) {
+ var ret time.Time
+ for _, u := range urls {
+ submittedAt, err := perURL(u)
+ if err != nil {
+ return time.Time{}, err
+ }
+ if !submittedAt.IsZero() && (ret.IsZero() || submittedAt.Before(ret)) {
+ ret = submittedAt
+ }
+ }
+ return ret, nil
+}
+
+func (c Contribution) fetchSubmittedAt() (time.Time, error) {
+ var ret time.Time
+ for _, forge := range forges {
+ submittedAt, err := forge.FetchSubmittedAt(c.URLs)
+ if err != nil {
+ return time.Time{}, err
+ }
+ if !submittedAt.IsZero() && (ret.IsZero() || submittedAt.Before(ret)) {
+ ret = submittedAt
+ }
+ }
+ if !ret.IsZero() {
+ return ret, nil
+ }
+ return time.Time{}, fmt.Errorf("idk how to get created timestamp for %q", c.URLs[0])
+}
+
+func withinOneSecond(a, b time.Time) bool {
+ d := a.Sub(b)
+ if d < 0 {
+ d = -d
+ }
+ return d <= time.Second
+}
+
+func fetchPerURLLastUpdated(urls []string, perURL func(string) (time.Time, User, error)) (time.Time, User, error) {
+ var ret struct {
+ time.Time
+ User
+ }
+ for _, u := range urls {
+ updatedAt, updatedBy, err := perURL(u)
+ if err != nil {
+ return time.Time{}, User{}, err
+ }
+ if !updatedAt.IsZero() && (ret.Time.IsZero() || updatedAt.After(ret.Time)) {
+ ret.Time, ret.User = updatedAt, updatedBy
+ }
+ }
+ return ret.Time, ret.User, nil
+}
+
+func (c Contribution) fetchLastUpdated() (time.Time, User, error) {
+ var ret struct {
+ time.Time
+ User
+ }
+ for _, forge := range forges {
+ updatedAt, updatedBy, err := forge.FetchLastUpdated(c.URLs)
+ if err != nil {
+ return time.Time{}, User{}, err
+ }
+ if !updatedAt.IsZero() && (ret.Time.IsZero() || updatedAt.After(ret.Time)) {
+ ret.Time, ret.User = updatedAt, updatedBy
+ }
+ }
+ if !ret.Time.IsZero() {
+ return ret.Time, ret.User, nil
+ }
+ return time.Time{}, User{}, nil //fmt.Errorf("idk how to get updated timestamp for %q", c.URLs[0])
+}
diff --git a/cmd/generate/src_contribs_test.go b/cmd/gen-imworkingon/src_contribs_test.go
index 57ffc0f..57ffc0f 100644
--- a/cmd/generate/src_contribs_test.go
+++ b/cmd/gen-imworkingon/src_contribs_test.go
diff --git a/cmd/generate/src_mastodon.go b/cmd/gen-imworkingon/src_mastodon.go
index b4b54a8..a3b9617 100644
--- a/cmd/generate/src_mastodon.go
+++ b/cmd/gen-imworkingon/src_mastodon.go
@@ -3,8 +3,9 @@ package main
import (
"html/template"
"net/url"
- "slices"
"time"
+
+ "git.lukeshu.com/www/lib/httpcache"
)
type MastodonStatus struct {
@@ -19,12 +20,12 @@ func ReadStandups(server, username string) ([]*MastodonStatus, error) {
var account struct {
ID string `json:"id"`
}
- if err := httpGetJSON(server+"/api/v1/accounts/lookup?acct="+username, &account); err != nil {
+ if err := httpcache.GetJSON(server+"/api/v1/accounts/lookup?acct="+username, nil, &account); err != nil {
return nil, err
}
var statuses []*MastodonStatus
- if err := httpGetPaginatedJSON(server+"/api/v1/accounts/"+account.ID+"/statuses", &statuses, func(_ int) url.Values {
+ if err := httpcache.GetPaginatedJSON(server+"/api/v1/accounts/"+account.ID+"/statuses", nil, &statuses, func(_ int) url.Values {
params := make(url.Values)
params.Set("tagged", "DailyStandUp")
params.Set("exclude_reblogs", "true")
@@ -36,13 +37,5 @@ func ReadStandups(server, username string) ([]*MastodonStatus, error) {
return nil, err
}
- ignoreList := []string{
- "https://fosstodon.org/@lukeshu/112198267818432116",
- "https://fosstodon.org/@lukeshu/112198241414760456",
- }
- statuses = slices.DeleteFunc(statuses, func(status *MastodonStatus) bool {
- return slices.Contains(ignoreList, status.URL)
- })
-
return statuses, nil
}
diff --git a/cmd/generate/src_tags.go b/cmd/gen-imworkingon/src_tags.go
index 8dcf554..8dcf554 100644
--- a/cmd/generate/src_tags.go
+++ b/cmd/gen-imworkingon/src_tags.go
diff --git a/cmd/generate/src_upstreams.go b/cmd/gen-imworkingon/src_upstreams.go
index 03f72ec..03f72ec 100644
--- a/cmd/generate/src_upstreams.go
+++ b/cmd/gen-imworkingon/src_upstreams.go
diff --git a/cmd/gen-posix/data.go b/cmd/gen-posix/data.go
new file mode 100644
index 0000000..165ecbd
--- /dev/null
+++ b/cmd/gen-posix/data.go
@@ -0,0 +1,211 @@
+package main
+
+import (
+ "fmt"
+ "os"
+ "os/exec"
+ "regexp"
+ "strings"
+
+ "git.lukeshu.com/www/lib/httpcache"
+)
+
+var IEEESA = Vendor{
+ Name: "IEEE-SA",
+ GetURL: func(id string) string { return fmt.Sprintf("http://standards.ieee.org/findstds/standard/%s.html", id) },
+ GetName: func(id string, url string) string {
+ html, err := httpcache.Get(url, nil)
+ if err != nil {
+ panic(fmt.Errorf("URL=%q: %v", url, err))
+ }
+ cmd := exec.Command("nokogiri", "-e", `puts $_.css("meta[name=\"des\"], meta[name=\"designation\"]").first["content"]`)
+ cmd.Stderr = os.Stderr
+ cmd.Stdin = strings.NewReader(html)
+ d, err := cmd.Output()
+ if err != nil {
+ panic(fmt.Errorf("URL=%q: %v", url, err))
+ }
+ return strings.TrimSuffix(string(d), "\n")
+ },
+}
+
+var reIEEE = regexp.MustCompile(`standardNumber":"([^"]*)"`)
+
+var IEEEXplore = Vendor{
+ Name: "IEEE Xplore",
+ GetURL: func(id string) string { return fmt.Sprintf("http://ieeexplore.ieee.org/servlet/opac?punumber=%s", id) },
+ GetName: func(id string, url string) string {
+ if strings.HasSuffix(url, "ERROR") {
+ return "ERROR"
+ }
+ html, err := httpcache.Get(url, nil)
+ if err != nil {
+ panic(fmt.Errorf("URL=%q: %v", url, err))
+ }
+ m := reIEEE.FindStringSubmatch(html)
+ if m == nil {
+ panic(fmt.Errorf("URL=%q did not contain expected JSON", url))
+ }
+ return m[1]
+ },
+}
+
+var TOG = Vendor{
+ Name: "The Open Group",
+ GetURL: func(id string) string { return fmt.Sprintf("https://www2.opengroup.org/ogsys/catalog/%s", id) },
+ GetName: func(id string, url string) string { return id },
+}
+
+var TOGOnline = Vendor{
+ Name: "online",
+ GetURL: func(id string) string { return fmt.Sprintf("http://pubs.opengroup.org/onlinepubs/%s/", id) },
+ GetName: func(id string, url string) string { return url },
+}
+
+var ISO = Vendor{
+ Name: "ISO",
+ GetURL: func(id string) string {
+ return fmt.Sprintf("http://www.iso.org/iso/home/store/catalogue_tc/catalogue_detail.htm?csnumber=%s", id)
+ },
+ GetName: func(id string, url string) string {
+ html, err := httpcache.Get(url, nil)
+ if err != nil {
+ panic(fmt.Errorf("URL=%q: %v", url, err))
+ }
+ cmd := exec.Command("nokogiri", "-e", `puts $_.css("[itemprop=\"name\"]").first.text`)
+ cmd.Stderr = os.Stderr
+ cmd.Stdin = strings.NewReader(html)
+ d, err := cmd.Output()
+ if err != nil {
+ panic(fmt.Errorf("URL=%q: %v", url, err))
+ }
+ return strings.TrimSuffix(string(d), "\n")
+ },
+}
+
+var Vendors = []Vendor{IEEESA, TOG, ISO}
+
+var Editions = []Edition{
+ {Name: "POSIX-2001 (Issue 6)", Docs: []Document{
+ {Vendor: IEEESA, Type: Full, ID: "1003.1-2001", Resellers: []Document{
+ {Vendor: IEEEXplore, Type: Full, ID: "7683"},
+ }},
+ }},
+ {Name: "----->XBD-2001", Docs: []Document{
+ {Vendor: TOG, Type: Full, ID: "C950"},
+ {Vendor: ISO, Type: Full, ID: "37312"},
+ }},
+ {Name: "----->XSH-2001", Docs: []Document{
+ {Vendor: TOG, Type: Full, ID: "C951"},
+ {Vendor: ISO, Type: Full, ID: "37313"},
+ }},
+ {Name: "----->XCU-2001", Docs: []Document{
+ {Vendor: TOG, Type: Full, ID: "C952"},
+ {Vendor: ISO, Type: Full, ID: "37314"},
+ }},
+ {Name: "----->XRAT-2001", Docs: []Document{
+ {Vendor: TOG, Type: Full, ID: "C953"},
+ {Vendor: ISO, Type: Full, ID: "37315"},
+ }},
+
+ {Name: "POSIX-2001, 2002 Edition", Docs: []Document{
+ {Vendor: IEEESA, Type: Patch, ID: "1003.1-2001-Cor_1-2002", Resellers: []Document{
+ {Vendor: IEEEXplore, Type: Patch, ID: "9507"},
+ }},
+ {Vendor: TOG, Type: Patch, ID: "U057", Resellers: []Document{
+ {Vendor: TOG, Type: Full, ID: "T031"},
+ }},
+ }},
+ {Name: "----->XBD-2001, 2002 Edition", Docs: []Document{
+ {Vendor: TOG, Type: Full, ID: "C031"},
+ {Vendor: ISO, Type: Full, ID: "38789", Resellers: []Document{
+ {Vendor: IEEESA, Type: Full, ID: "9945-1-2003"},
+ }},
+ }},
+ {Name: "----->XSH-2001, 2002 Edition", Docs: []Document{
+ {Vendor: TOG, Type: Full, ID: "C032"},
+ {Vendor: ISO, Type: Full, ID: "38790", Resellers: []Document{
+ {Vendor: IEEESA, Type: Full, ID: "9945-2-2003"},
+ }},
+ }},
+ {Name: "----->XCU-2001, 2002 Edition", Docs: []Document{
+ {Vendor: TOG, Type: Full, ID: "C033"},
+ {Vendor: ISO, Type: Full, ID: "38791", Resellers: []Document{
+ {Vendor: IEEESA, Type: Full, ID: "9945-3-2003"},
+ }},
+ }},
+ {Name: "----->XRAT-2001, 2002 Edition", Docs: []Document{
+ {Vendor: TOG, Type: Full, ID: "C034"},
+ {Vendor: ISO, Type: Full, ID: "38792", Resellers: []Document{
+ {Vendor: IEEESA, Type: Full, ID: "9945-4-2003"},
+ }},
+ }},
+
+ {Name: "POSIX-2001, 2004 Edition", Docs: []Document{
+ {Vendor: IEEESA, Type: Patch, ID: "1003.1-2001-Cor_2-2004", Resellers: []Document{
+ {Vendor: IEEEXplore, Type: Patch, ID: "9022"},
+ {Vendor: IEEEXplore, Type: Full, ID: "9156"},
+ }},
+ {Vendor: TOG, Type: Patch, ID: "U059", Resellers: []Document{
+ {Vendor: TOG, Type: Full, ID: "T041"},
+ {Vendor: TOGOnline, Type: Full, ID: "009695399"},
+ }},
+ }},
+ {Name: "----->XBD-2001, 2004 Edition", Docs: []Document{
+ {Vendor: TOG, Type: Full, ID: "C046"},
+ {Vendor: ISO, Type: Patch, ID: "40687"},
+ }},
+ {Name: "----->XSH-2001, 2004 Edition", Docs: []Document{
+ {Vendor: TOG, Type: Full, ID: "C047"},
+ {Vendor: ISO, Type: Patch, ID: "40688"},
+ }},
+ {Name: "----->XCU-2001, 2004 Edition", Docs: []Document{
+ {Vendor: TOG, Type: Full, ID: "C048"},
+ {Vendor: ISO, Type: Patch, ID: "40690"},
+ }},
+ {Name: "----->XRAT-2001, 2004 Edition", Docs: []Document{
+ {Vendor: TOG, Type: Full, ID: "C049"},
+ {Vendor: ISO, Type: Patch, ID: "40691"},
+ }},
+
+ {Name: "POSIX-2008 (Issue 7)", Docs: []Document{
+ {Vendor: TOG, Type: Full, ID: "C082", Resellers: []Document{
+ {Vendor: TOGOnline, Type: Full, ID: "9699919799.2008edition"},
+ }},
+
+ {Vendor: IEEESA, Type: Full, ID: "1003.1-2008", Resellers: []Document{
+ {Vendor: IEEEXplore, Type: Full, ID: "4694974"},
+ }},
+
+ {Vendor: ISO, Type: Full, ID: "50516", Resellers: []Document{
+ {Vendor: IEEESA, Type: Full, ID: "9945-2009"},
+ {Vendor: IEEEXplore, Type: Full, ID: "5393777"},
+ }},
+ }},
+ {Name: "POSIX-2008, 2013 Edition", Docs: []Document{
+ {Vendor: TOG, Type: Patch, ID: "U130", Resellers: []Document{
+ {Vendor: TOG, Type: Full, ID: "C138"},
+ {Vendor: TOGOnline, Type: Full, ID: "9699919799.2013edition"},
+ }},
+
+ {Vendor: IEEESA, Type: Patch, ID: "1003.1-2008-Cor_1-2013", Resellers: []Document{
+ {Vendor: IEEEXplore, Type: Patch, ID: "6482152"},
+ {Vendor: IEEEXplore, Type: Full, ID: "6506089"},
+ }},
+
+ {Vendor: ISO, Type: Patch, ID: "62005"},
+ }},
+ {Name: "POSIX-2008, 2016 Edition", Docs: []Document{
+ {Vendor: TOG, Type: Patch, ID: "U160", Resellers: []Document{
+ {Vendor: TOG, Type: Full, ID: "C165"},
+ {Vendor: TOGOnline, Type: Full, ID: "9699919799.2016edition"},
+ }},
+
+ {Vendor: IEEESA, Type: Patch, ID: "1003.1-2008-Cor_2-2016", Resellers: []Document{
+ {Vendor: IEEEXplore, Type: Patch, ID: "7542096"},
+ {Vendor: IEEEXplore, Type: Full, ID: "7582336"},
+ }},
+ }},
+}
+
+// SUSv2 http://pubs.opengroup.org/onlinepubs/007908799/
diff --git a/cmd/gen-posix/http_hacks.go b/cmd/gen-posix/http_hacks.go
new file mode 100644
index 0000000..16b8a8d
--- /dev/null
+++ b/cmd/gen-posix/http_hacks.go
@@ -0,0 +1,156 @@
+package main
+
+import (
+ "bufio"
+ "bytes"
+ "errors"
+ "fmt"
+ "io"
+ "net/http"
+ "os"
+ "os/exec"
+ "strings"
+
+ "git.lukeshu.com/www/lib/httpcache"
+)
+
+func _checkURL(url string) (string, error) {
+ switch {
+ case strings.HasPrefix(url, "https://web.archive.org/"):
+ _, err := httpcache.Get(url, nil)
+ return url, err
+ case strings.HasPrefix(url, "https://www2.opengroup.org/ogsys/catalog/"):
+ _, err := httpcache.Get(url, nil)
+ if err == nil {
+ return url, nil
+ }
+ if !errors.Is(err, os.ErrNotExist) { // don't hide non-404 errors
+ return "", err
+ }
+ suffix := strings.TrimPrefix(url, "https://www2.opengroup.org/ogsys/catalog/")
+ url2 := "https://publications.opengroup.org/" + strings.ToLower(suffix)
+ _, err = httpcache.Get(url2, nil)
+ if err == nil {
+ return url2, nil
+ }
+ if !errors.Is(err, os.ErrNotExist) { // don't hide non-404 errors
+ return "", err
+ }
+ url3, err := _checkURL("https://web.archive.org/web/20170102/" + url)
+ if err == nil {
+ return url3, nil
+ }
+ return url+"#ERROR", nil
+ case url == "http://ieeexplore.ieee.org/servlet/opac?punumber=7394900":
+ return url+"#ERROR", nil
+ default:
+ _, err := httpcache.Get(url, nil)
+ if err != nil && errors.Is(err, os.ErrNotExist) {
+ return _checkURL("https://web.archive.org/web/20170102/" + url)
+ }
+ return url, err
+ }
+}
+
+func checkURL(url string) string {
+ url2, err := _checkURL(url)
+ if err != nil {
+ panic(fmt.Errorf("URL=%q: %v", url, err))
+ }
+ return url2
+}
+
+func nokogiriIgnoreFailure(htmlBytes []byte, expr string) string {
+ cmd := exec.Command("nokogiri", "-e", "puts "+expr)
+ cmd.Stderr = io.Discard
+ cmd.Stdin = bytes.NewReader(htmlBytes)
+ outBytes, _ := cmd.Output()
+ return strings.TrimSpace(string(outBytes))
+}
+
+func mockRedirect(url string) *http.Response {
+ resp, err := http.ReadResponse(bufio.NewReader(strings.NewReader(""+
+ "HTTP/1.1 302 Found\r\n"+
+ "Location: "+url+"\r\n"+
+ "\r\n")), nil)
+ if err != nil {
+ panic(err)
+ }
+ return resp
+}
+
+func mockForbidden() *http.Response {
+ resp, err := http.ReadResponse(bufio.NewReader(strings.NewReader(""+
+ "HTTP/1.1 403 Forbidden\r\n"+
+ "\r\n")), nil)
+ if err != nil {
+ panic(err)
+ }
+ return resp
+}
+
+func modifyResponse(url string, entry httpcache.CacheEntry, resp *http.Response) *http.Response {
+ switch {
+ case strings.HasPrefix(url, "https://web.archive.org/"):
+ htmlBytes, _ := io.ReadAll(resp.Body)
+ _ = resp.Body.Close()
+
+ // native Wayback Machine redirect
+ redirect := nokogiriIgnoreFailure(htmlBytes, `$_.css("p.impatient a").first["href"]`)
+ if strings.HasPrefix(redirect, "https://web.archive.org/web/") {
+ return mockRedirect(redirect)
+ }
+
+ // silly TOG SSO
+ if strings.Contains(url, "sso.opengroup.org") {
+ if bytes.Contains(htmlBytes, []byte("document.forms.postbinding.submit()")) {
+ redirect := nokogiriIgnoreFailure(htmlBytes, `$_.css("#postbinding").first["action"]`)
+ if redirect != "" {
+ return mockRedirect(redirect)
+ }
+ }
+ if bytes.Contains(htmlBytes, []byte("General Authorization Error")) {
+ return mockForbidden()
+ }
+ }
+
+ // We drained resp.Body, so re-create it.
+ resp, err := http.ReadResponse(bufio.NewReader(strings.NewReader(string(entry))), nil)
+ if err != nil {
+ panic(err)
+ }
+ return resp
+ default:
+ return resp
+ }
+}
+
+type mock404 struct {
+ Msg string
+}
+
+// Is implements the interface for [errors.Is].
+func (e *mock404) Is(target error) bool {
+ return target == os.ErrNotExist
+}
+
+// Error implements [error].
+func (e *mock404) Error() string {
+ return e.Msg
+}
+
+func checkRedirect(req *http.Request, via []*http.Request) error {
+ // net/http.defaultCheckRedirect
+ if len(via) >= 10 {
+ return errors.New("stopped after 10 redirects")
+ }
+
+ // detect redirects that should be 404s
+ oldURL := via[len(via)-1].URL
+ newURL := req.URL
+ if (newURL.Path == "/" || newURL.Path == "") && !(oldURL.Path == "/" || oldURL.Path == "") {
+ return &mock404{Msg: fmt.Sprintf("should have been a 404: %q redirected to %q", oldURL.String(), newURL.String())}
+ }
+
+ return nil
+}
diff --git a/cmd/gen-posix/main.go b/cmd/gen-posix/main.go
new file mode 100644
index 0000000..6da598b
--- /dev/null
+++ b/cmd/gen-posix/main.go
@@ -0,0 +1,214 @@
+package main
+
+import (
+ "bytes"
+ "fmt"
+ "html/template"
+ "os"
+
+ "git.lukeshu.com/www/lib/httpcache"
+)
+
+var urls = map[string]string{}
+var names = map[string]string{}
+
+func (doc Document) URL() string {
+ if doc.ID == "" {
+ return ""
+ }
+ key := doc.Vendor.Name + "\000" + doc.ID
+ if _, have := urls[key]; !have {
+ urls[key] = checkURL(doc.Vendor.GetURL(doc.ID))
+ }
+ return urls[key]
+}
+
+func (doc Document) Name() string {
+ if doc.ID == "" {
+ var names []string
+ for _, full := range doc.Fulls() {
+ names = append(names, full.Name())
+ }
+ switch len(names) {
+ case 0:
+ return "???"
+ case 1:
+ return names[0]
+ default:
+ // BUG(lukeshu): Handle un-IDed Documents with
+ // multiple IDed resellers.
+ panic("TODO")
+ }
+ }
+ key := doc.Vendor.Name + "\000" + doc.ID
+ if _, have := names[key]; !have {
+ names[key] = doc.Vendor.GetName(doc.ID, doc.URL())
+ }
+ return names[key]
+}
+
+func (doc Document) Fulls() []Document {
+ var ret []Document
+ for _, reseller := range doc.Resellers {
+ if doc.ID != "" && reseller.Vendor.Name == doc.Vendor.Name && reseller.ID == doc.ID {
+ continue
+ }
+ if reseller.Type == Full {
+ ret = append(ret, reseller)
+ }
+ }
+ return ret
+}
+
+func (doc Document) Patches() []Document {
+ var ret []Document
+ for _, reseller := range doc.Resellers {
+ if doc.ID != "" && reseller.Vendor.Name == doc.Vendor.Name && reseller.ID == doc.ID {
+ continue
+ }
+ if reseller.Type == Patch {
+ ret = append(ret, reseller)
+ }
+ }
+ return ret
+}
+
+func (doc Document) AsFull() *Document {
+ if doc.Vendor.Name == "" && doc.ID == "" {
+ return nil
+ }
+ ret := doc
+ ret.Resellers = ret.Fulls()
+ switch doc.Type {
+ case Full:
+ // Nothing to do
+ case Patch:
+ if len(ret.Resellers) == 0 {
+ return nil
+ }
+ ret.Type = Full
+ ret.ID = func() string {
+ var ids []string
+ for _, reseller := range ret.Resellers {
+ if reseller.Vendor.Name == doc.Vendor.Name {
+ ids = append(ids, reseller.ID)
+ }
+ }
+ switch len(ids) {
+ case 0:
+ return ""
+ case 1:
+ return ids[0]
+ default:
+ panic("wut")
+ }
+ }()
+ if ret.ID != "" {
+ ret.Resellers = ret.Fulls()
+ }
+ default:
+ panic("uhh")
+ }
+ return &ret
+}
+
+func (doc Document) AsPatch() *Document {
+ if doc.Vendor.Name == "" && doc.ID == "" {
+ return nil
+ }
+ ret := doc
+ switch doc.Type {
+ case Full:
+ return nil
+ case Patch:
+ ret.Resellers = doc.Patches()
+ default:
+ panic("no")
+ }
+ return &ret
+}
+
+func (ed Edition) DocsOrdered() []Document {
+ // This chould be O(n), but this niaeve implementation is
+ // O(n^2). It's OK, n is small.
+ s := make([]Document, len(Vendors))
+ for i, vnd := range Vendors {
+ for _, doc := range ed.Docs {
+ if doc.Vendor.Name == vnd.Name {
+ s[i] = doc
+ }
+ }
+ }
+ return s
+}
+
+var tmpl = `{{define "document"}}{{if .}}
+ {{if .URL}}<a href="{{.URL}}" title="{{.Name}}">{{.Name}}</a>{{else}}{{.Name}}{{end}}
+ {{range .Resellers}}
+ <a href="{{.URL}}" title="{{.Name}}">({{.Vendor.Name}})</a>
+ {{end}}
+{{end}}{{end}}
+<!DOCTYPE html>
+<html lang="en">
+ <head>
+ <meta charset="utf-8">
+ <title>POSIX Editions</title>
+ <style>
+ body { font-size: 8px; }
+ table { border-collapse: collapse; }
+ th, td { border: solid 1px black; }
+ tr:not(:first-child):not(:nth-child(2)) th {
+ font-family: monospace;
+ text-align: left;
+ }
+ </style>
+ </head>
+ <body>
+ <p>There's a typo in the "standardNumber" in IEEE
+ Xplore's records forfor the 2004 edition of 1003.1;
+ it says 2014 instead or 2004. The actual document
+ says 2004 though.</p>
+ <table>
+ <caption><p>POSIX: C & Shell (1997-present)</p></caption>
+ <tr><td rowspan=2></td>{{range .Vendors}}<th colspan=2>{{.Name}}</th>{{end}}</tr>
+ <tr>{{range .Vendors}}<th>Full</th><th>Patch</th>{{end}}</tr>
+ {{range .Editions}}<tr>
+ <th>{{.Name}}</th>
+ {{range .DocsOrdered}}
+ <td>{{template "document" .AsFull}}</td><td>{{template "document" .AsPatch}}</td>
+ {{end}}
+ </tr>{{end}}
+ </table>
+ </body>
+</html>
+`
+
+func mainWithError() error {
+ httpcache.UserAgent = "https://git.lukeshu.com/www/tree/cmd/gen-posix"
+ httpcache.ModifyResponse = modifyResponse
+ httpcache.CheckRedirect = checkRedirect
+
+ tmpl := template.Must(template.New("page").Parse(tmpl))
+
+ var out bytes.Buffer
+ if err := tmpl.Execute(&out, map[string]interface{}{
+ "Vendors": Vendors,
+ "Editions": Editions,
+ }); err != nil {
+ return err
+ }
+ if err := os.WriteFile("public/posix/index.new.html", out.Bytes(), 0666); err != nil {
+ return err
+ }
+ if err := os.Rename("public/posix/index.new.html", "public/posix/index.html"); err != nil {
+ return err
+ }
+ return nil
+}
+
+func main() {
+ if err := mainWithError(); err != nil {
+ fmt.Fprintf(os.Stderr, "%s: error: %v\n", os.Args[0], err)
+ os.Exit(1)
+ }
+}
diff --git a/cmd/gen-posix/types.go b/cmd/gen-posix/types.go
new file mode 100644
index 0000000..9bb4c2d
--- /dev/null
+++ b/cmd/gen-posix/types.go
@@ -0,0 +1,26 @@
+package main
+
+type Vendor struct {
+ Name string
+ GetURL func(id string) string
+ GetName func(id string, url string) string
+}
+
+type Type int
+
+const (
+ Full Type = 0
+ Patch Type = 1
+)
+
+type Edition struct {
+ Name string
+ Docs []Document
+}
+
+type Document struct {
+ Vendor Vendor
+ Type Type
+ ID string
+ Resellers []Document
+}
diff --git a/cmd/generate/httpcache.go b/cmd/generate/httpcache.go
deleted file mode 100644
index 04762e3..0000000
--- a/cmd/generate/httpcache.go
+++ /dev/null
@@ -1,95 +0,0 @@
-package main
-
-import (
- "encoding/json"
- "fmt"
- "io"
- "net/http"
- "net/url"
- "os"
- "path/filepath"
-)
-
-var httpCache = map[string]string{}
-
-func httpGet(u string) (string, error) {
- if cache, ok := httpCache[u]; ok {
- fmt.Printf("CACHE-GET %q\n", u)
- return cache, nil
- }
- if err := os.Mkdir(".http-cache", 0777); err != nil && !os.IsExist(err) {
- return "", err
- }
- cacheFile := filepath.Join(".http-cache", url.QueryEscape(u))
- if bs, err := os.ReadFile(cacheFile); err == nil {
- httpCache[u] = string(bs)
- fmt.Printf("CACHE-GET %q\n", u)
- return httpCache[u], nil
- } else if !os.IsNotExist(err) {
- return "", err
- }
-
- fmt.Printf("GET %q...", u)
- resp, err := http.Get(u)
- if err != nil {
- fmt.Printf(" err\n")
- return "", err
- }
- if resp.StatusCode != http.StatusOK {
- fmt.Printf(" err\n")
- return "", fmt.Errorf("unexpected HTTP status: %v", resp.Status)
- }
- bs, err := io.ReadAll(resp.Body)
- if err != nil {
- fmt.Printf(" err\n")
- return "", err
- }
- fmt.Printf(" ok\n")
- if err := os.WriteFile(cacheFile, bs, 0666); err != nil {
- return "", err
- }
- httpCache[u] = string(bs)
- return httpCache[u], nil
-}
-
-func httpGetJSON(u string, out any) error {
- str, err := httpGet(u)
- if err != nil {
- return err
- }
- return json.Unmarshal([]byte(str), out)
-}
-
-func httpGetPaginatedJSON[T any](uStr string, out *[]T, pageFn func(i int) url.Values) error {
- u, err := url.Parse(uStr)
- if err != nil {
- return err
- }
- query := u.Query()
-
- for i := 0; true; i++ {
- pageParams := pageFn(i)
- for k, v := range pageParams {
- query[k] = v
- }
-
- u.RawQuery = query.Encode()
- var resp []T
- if err := httpGetJSON(u.String(), &resp); err != nil {
- return err
- }
- fmt.Printf(" -> %d records\n", len(resp))
- if len(resp) == 0 {
- break
- }
- *out = append(*out, resp...)
- }
-
- return nil
-}
-
-func githubPagination(i int) url.Values {
- params := make(url.Values)
- params.Set("page", fmt.Sprintf("%v", i+1))
- return params
-}
diff --git a/cmd/generate/src_contribs.go b/cmd/generate/src_contribs.go
deleted file mode 100644
index 6db6764..0000000
--- a/cmd/generate/src_contribs.go
+++ /dev/null
@@ -1,400 +0,0 @@
-package main
-
-import (
- "fmt"
- "net/url"
- "os"
- "regexp"
- "strings"
- "time"
-
- "sigs.k8s.io/yaml"
-)
-
-type User struct {
- Name string `json:"name"`
- URL string `json:"url"`
-}
-
-type Contribution struct {
- URLs []string `json:"urls"`
- Tags []string `json:"tags"`
- SponsoredBy string `json:"sponsored-by"`
- Desc string `json:"desc"`
-
- SubmittedAt time.Time `json:"submitted-at"`
- LastUpdatedAt time.Time `json:"last-updated-at"`
- LastUpdatedBy User `json:"last-updated-by"`
- Status string `json:"status"`
-
- StatusClass string `json:"-"`
-}
-
-func ReadContribs(filename string) ([]Contribution, error) {
- bs, err := os.ReadFile(filename)
- if err != nil {
- return nil, fmt.Errorf("contribs: %q: %w", filename, err)
- }
- var ret []Contribution
- if err := yaml.UnmarshalStrict(bs, &ret); err != nil {
- return nil, fmt.Errorf("contribs: %q: %w", filename, err)
- }
- for i := range ret {
- contrib := ret[i]
- if err := contrib.Fill(); err != nil {
- return nil, fmt.Errorf("contribs: %q: %w", filename, err)
- }
- ret[i] = contrib
- }
- return ret, nil
-}
-
-func (c *Contribution) Fill() error {
- var err error
- if c.SubmittedAt.IsZero() {
- c.SubmittedAt, err = c.fetchSubmittedAt()
- if err != nil {
- return err
- }
- }
- if c.LastUpdatedAt.IsZero() {
- c.LastUpdatedAt, c.LastUpdatedBy, err = c.fetchLastUpdated()
- if err != nil {
- return err
- }
- }
- if c.Status == "" {
- c.Status, err = c.fetchStatus()
- if err != nil {
- return err
- }
- }
- c.StatusClass, err = classifyStatus(c.Status)
- if err != nil {
- return err
- }
- for _, u := range c.URLs {
- if m := reGoLangGerritCL.FindStringSubmatch(u); m != nil {
- c.URLs = append(c.URLs, "https://golang.org/cl/"+m[1])
- }
- }
- return nil
-}
-
-func classifyStatus(status string) (string, error) {
- switch {
- case strings.Contains(status, "released") || strings.Contains(status, "deployed"):
- return "released", nil
- case strings.Contains(status, "merged"):
- return "merged", nil
- case strings.Contains(status, "open"):
- return "open", nil
- case strings.Contains(status, "closed") || strings.Contains(status, "locked"):
- return "closed", nil
- default:
- return "", fmt.Errorf("unrecognized status string: %q", status)
- }
-}
-
-var (
- reGoLangGerritCL = regexp.MustCompile(`https://go-review\.googlesource\.com/c/[^/?#]+/\+/([0-9]+)(?:\?[^#]*)?(?:#.*)?$`)
- reGitHubPR = regexp.MustCompile(`^https://github\.com/([^/?#]+)/([^/?#]+)/pull/([0-9]+)(?:\?[^#]*)?(?:#.*)?$`)
- reGitHubCommit = regexp.MustCompile(`^https://github\.com/([^/?#]+)/([^/?#]+)/commit/([0-9a-f]+)(?:\?[^#]*)?(?:#.*)?$`)
- reGitLabMR = regexp.MustCompile(`^https://([^/]+)/([^?#]+)/-/merge_requests/([0-9]+)(?:\?[^#]*)?(?:#.*)?$`)
- rePiperMailDate = regexp.MustCompile(`^\s*<I>([^<]+)</I>\s*$`)
-)
-
-const (
- statusOpen = "open"
- statusMerged = "merged, not yet in a release"
- statusReleasedFmt = "merged, released in %s"
-)
-
-func (c Contribution) fetchStatus() (string, error) {
- if m := reGitHubPR.FindStringSubmatch(c.URLs[0]); m != nil {
- user := m[1]
- repo := m[2]
- prnum := m[3]
-
- urlStr := "https://api.github.com/repos/" + user + "/" + repo + "/pulls/" + prnum
-
- var obj struct {
- // State values are "open" and "closed".
- State string `json:"state"`
- Merged bool `json:"merged"`
- MergeCommitSha string `json:"merge_commit_sha"`
- }
- if err := httpGetJSON(urlStr, &obj); err != nil {
- return "", err
- }
- ret := obj.State
- if obj.Merged {
- ret = statusMerged
- tag, err := getGitTagThatContainsAll("https://github.com/"+user+"/"+repo, obj.MergeCommitSha)
- if err != nil {
- return "", err
- }
- if tag != "" {
- ret = fmt.Sprintf(statusReleasedFmt, tag)
- }
- }
-
- return ret, nil
- }
- if m := reGitLabMR.FindStringSubmatch(c.URLs[0]); m != nil {
- authority := m[1]
- projectID := m[2]
- mrnum := m[3]
-
- urlStr := "https://" + authority + "/api/v4/projects/" + url.QueryEscape(projectID) + "/merge_requests/" + mrnum
-
- var obj struct {
- // State values are "opened", "closed", "locked", and "merged".
- State string `json:"state"`
- MergeCommitSha string `json:"merge_commit_sha"`
- SquashCommitSha string `json:"squash_commit_sha"`
- }
- if err := httpGetJSON(urlStr, &obj); err != nil {
- return "", err
- }
-
- ret := obj.State
- if ret == "opened" {
- ret = statusOpen
- }
-
- if ret == "merged" {
- ret = statusMerged
- var mergeCommit string
- if obj.MergeCommitSha != "" {
- mergeCommit = obj.MergeCommitSha
- }
- if obj.SquashCommitSha != "" {
- mergeCommit = obj.SquashCommitSha
- }
- if mergeCommit != "" {
- tag, err := getGitTagThatContainsAll("https://"+authority+"/"+projectID+".git", mergeCommit)
- if err != nil {
- return "", err
- }
- if tag != "" {
- ret = fmt.Sprintf(statusReleasedFmt, tag)
- }
- }
- }
-
- return ret, nil
- }
- if len(c.URLs) > 1 {
- var gitURL string
- var gitCommits []string
- for _, u := range c.URLs[1:] {
- if m := reGitHubCommit.FindStringSubmatch(u); m != nil {
- user := m[1]
- repo := m[2]
- hash := m[3]
-
- gitURL = "https://github.com/" + user + "/" + repo
- gitCommits = append(gitCommits, hash)
- }
- }
- if len(gitCommits) > 0 {
- ret := statusMerged
- tag, err := getGitTagThatContainsAll(gitURL, gitCommits...)
- if err != nil {
- return "", err
- }
- if tag != "" {
- ret = fmt.Sprintf(statusReleasedFmt, tag)
- }
- return ret, nil
- }
- }
- return "", fmt.Errorf("idk how to get status for %q", c.URLs[0])
-}
-
-func (c Contribution) fetchSubmittedAt() (time.Time, error) {
- if m := reGitHubPR.FindStringSubmatch(c.URLs[0]); m != nil {
- user := m[1]
- repo := m[2]
- prnum := m[3]
-
- urlStr := "https://api.github.com/repos/" + user + "/" + repo + "/pulls/" + prnum
-
- var obj struct {
- CreatedAt time.Time `json:"created_at"`
- }
- if err := httpGetJSON(urlStr, &obj); err != nil {
- return time.Time{}, err
- }
- return obj.CreatedAt, nil
- }
- if m := reGitLabMR.FindStringSubmatch(c.URLs[0]); m != nil {
- authority := m[1]
- projectID := m[2]
- mrnum := m[3]
-
- urlStr := "https://" + authority + "/api/v4/projects/" + url.QueryEscape(projectID) + "/merge_requests/" + mrnum
-
- var obj struct {
- CreatedAt time.Time `json:"created_at"`
- }
- if err := httpGetJSON(urlStr, &obj); err != nil {
- return time.Time{}, err
- }
- return obj.CreatedAt, nil
- }
- if strings.Contains(c.URLs[0], "/pipermail/") {
- htmlStr, err := httpGet(c.URLs[0])
- if err != nil {
- return time.Time{}, err
- }
- for _, line := range strings.Split(htmlStr, "\n") {
- if m := rePiperMailDate.FindStringSubmatch(line); m != nil {
- return time.Parse(time.UnixDate, m[1])
- }
- }
- }
- return time.Time{}, fmt.Errorf("idk how to get created timestamp for %q", c.URLs[0])
-}
-
-func (c Contribution) fetchLastUpdated() (time.Time, User, error) {
- if m := reGitHubPR.FindStringSubmatch(c.URLs[0]); m != nil {
- user := m[1]
- repo := m[2]
- prnum := m[3]
-
- var obj struct {
- UpdatedAt time.Time `json:"updated_at"`
- MergedAt time.Time `json:"merged_at"`
- MergedBy struct {
- Login string `json:"login"`
- HTMLURL string `json:"html_url"`
- } `json:"merged_by"`
- }
- if err := httpGetJSON("https://api.github.com/repos/"+user+"/"+repo+"/pulls/"+prnum, &obj); err != nil {
- return time.Time{}, User{}, err
- }
-
- retUpdatedAt := obj.UpdatedAt
- var retUser User
-
- if obj.MergedAt == retUpdatedAt {
- retUser.Name = obj.MergedBy.Login
- retUser.URL = obj.MergedBy.HTMLURL
- }
- if retUser == (User{}) {
- // "normal" comments
- var comments []struct {
- UpdatedAt time.Time `json:"updated_at"`
- User struct {
- Login string `json:"login"`
- HTMLURL string `json:"html_url"`
- } `json:"user"`
- }
- if err := httpGetPaginatedJSON("https://api.github.com/repos/"+user+"/"+repo+"/issues/"+prnum+"/comments", &comments, githubPagination); err != nil {
- return time.Time{}, User{}, err
- }
- for _, comment := range comments {
- if comment.UpdatedAt == retUpdatedAt || comment.UpdatedAt.Add(1*time.Second) == retUpdatedAt {
- retUser.Name = comment.User.Login
- retUser.URL = comment.User.HTMLURL
- break
- }
- }
- }
- if retUser == (User{}) {
- // comments on a specific part of the diff
- var reviewComments []struct {
- UpdatedAt time.Time `json:"updated_at"`
- User struct {
- Login string `json:"login"`
- HTMLURL string `json:"html_url"`
- } `json:"user"`
- }
- if err := httpGetPaginatedJSON("https://api.github.com/repos/"+user+"/"+repo+"/pulls/"+prnum+"/comments", &reviewComments, githubPagination); err != nil {
- return time.Time{}, User{}, err
- }
- for _, comment := range reviewComments {
- if comment.UpdatedAt == retUpdatedAt {
- retUser.Name = comment.User.Login
- retUser.URL = comment.User.HTMLURL
- break
- }
- }
- }
- if retUser == (User{}) {
- var events []struct {
- CreatedAt time.Time `json:"created_at"`
- Actor struct {
- Login string `json:"login"`
- HTMLURL string `json:"html_url"`
- } `json:"actor"`
- }
- if err := httpGetJSON("https://api.github.com/repos/"+user+"/"+repo+"/issues/"+prnum+"/events", &events); err != nil {
- return time.Time{}, User{}, err
- }
- for _, event := range events {
- if event.CreatedAt == retUpdatedAt {
- retUser.Name = event.Actor.Login
- retUser.URL = event.Actor.HTMLURL
- break
- }
- }
- }
-
- return retUpdatedAt, retUser, nil
- }
- if m := reGitLabMR.FindStringSubmatch(c.URLs[0]); m != nil {
- authority := m[1]
- projectID := m[2]
- mrnum := m[3]
-
- urlStr := "https://" + authority + "/api/v4/projects/" + url.QueryEscape(projectID) + "/merge_requests/" + mrnum
-
- var obj struct {
- UpdatedAt time.Time `json:"updated_at"`
- }
- if err := httpGetJSON(urlStr, &obj); err != nil {
- return time.Time{}, User{}, err
- }
- return obj.UpdatedAt, User{}, nil
- }
-
- var ret time.Time
- if len(c.URLs) > 1 {
- for _, u := range c.URLs[1:] {
- if m := reGitHubCommit.FindStringSubmatch(u); m != nil {
- user := m[1]
- repo := m[2]
- hash := m[3]
-
- urlStr := "https://api.github.com/repos/" + user + "/" + repo + "/commits/" + hash
- var obj struct {
- Commit struct {
- Author struct {
- Date time.Time `json:"date"`
- } `json:"author"`
- Committer struct {
- Date time.Time `json:"date"`
- } `json:"committer"`
- } `json:"commit"`
- }
- if err := httpGetJSON(urlStr, &obj); err != nil {
- return time.Time{}, User{}, err
- }
- if obj.Commit.Author.Date.After(ret) {
- ret = obj.Commit.Author.Date
- }
- if obj.Commit.Committer.Date.After(ret) {
- ret = obj.Commit.Committer.Date
- }
- }
- }
- }
- if !ret.IsZero() {
- return ret, User{}, nil
- }
-
- return time.Time{}, User{}, nil //fmt.Errorf("idk how to get updated timestamp for %q", c.URLs[0])
-}