summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore2
-rw-r--r--Makefile15
-rw-r--r--cmd/gen-imworkingon/calendar.go (renamed from cmd/generate/calendar.go)0
-rw-r--r--cmd/gen-imworkingon/forge_forgejo.go (renamed from cmd/generate/forge_forgejo.go)14
-rw-r--r--cmd/gen-imworkingon/forge_gerrit.go (renamed from cmd/generate/forge_gerrit.go)6
-rw-r--r--cmd/gen-imworkingon/forge_github.go (renamed from cmd/generate/forge_github.go)25
-rw-r--r--cmd/gen-imworkingon/forge_gitlab.go (renamed from cmd/generate/forge_gitlab.go)10
-rw-r--r--cmd/gen-imworkingon/forge_part_git.go (renamed from cmd/generate/forge_part_git.go)4
-rw-r--r--cmd/gen-imworkingon/forge_part_pipermail.go (renamed from cmd/generate/forge_part_pipermail.go)9
-rw-r--r--cmd/gen-imworkingon/gitcache.go (renamed from cmd/generate/gitcache.go)0
-rw-r--r--cmd/gen-imworkingon/imworkingon.html.tmpl (renamed from cmd/generate/imworkingon.html.tmpl)0
-rw-r--r--cmd/gen-imworkingon/main.go (renamed from cmd/generate/main.go)23
-rw-r--r--cmd/gen-imworkingon/src_contribs.go (renamed from cmd/generate/src_contribs.go)0
-rw-r--r--cmd/gen-imworkingon/src_contribs_test.go (renamed from cmd/generate/src_contribs_test.go)0
-rw-r--r--cmd/gen-imworkingon/src_mastodon.go (renamed from cmd/generate/src_mastodon.go)15
-rw-r--r--cmd/gen-imworkingon/src_tags.go (renamed from cmd/generate/src_tags.go)0
-rw-r--r--cmd/gen-imworkingon/src_upstreams.go (renamed from cmd/generate/src_upstreams.go)0
-rw-r--r--cmd/gen-posix/data.go211
-rw-r--r--cmd/gen-posix/http_hacks.go156
-rw-r--r--cmd/gen-posix/main.go214
-rw-r--r--cmd/gen-posix/types.go26
-rw-r--r--cmd/generate/httpcache.go139
-rw-r--r--imworkingon/contribs.yml30
-rw-r--r--imworkingon/upstreams.yml12
-rw-r--r--lib/httpcache/httpcache.go211
-rw-r--r--lib/mailstuff/jwz.md (renamed from cmd/generate/mailstuff/jwz.md)0
-rw-r--r--lib/mailstuff/mbox.go (renamed from cmd/generate/mailstuff/mbox.go)0
-rw-r--r--lib/mailstuff/thread.go (renamed from cmd/generate/mailstuff/thread.go)0
-rw-r--r--lib/mailstuff/thread_alg.go (renamed from cmd/generate/mailstuff/thread_alg.go)0
-rw-r--r--public/index.html2
-rw-r--r--public/sponsor/index.html2
31 files changed, 943 insertions, 183 deletions
diff --git a/.gitignore b/.gitignore
index 92a23fe..e4fb3c3 100644
--- a/.gitignore
+++ b/.gitignore
@@ -5,3 +5,5 @@
/public/imworkingon/index.html
/public/imworkingon/imworkingon.css
/public/imworkingon/imworkingon.css.map
+
+/public/posix/index.html
diff --git a/Makefile b/Makefile
index bd4534e..30107bb 100644
--- a/Makefile
+++ b/Makefile
@@ -1,12 +1,15 @@
+# build = not checked in to Git
build/files = public/imworkingon/index.html
build/files += public/imworkingon/imworkingon.css
+build/files += public/posix/index.html
+# generate = checked in to Git
generate/files = public/sponsor/liberapay-icon.svg
generate/files += public/sponsor/kofi-icon.png
generate/files += public/sponsor/patreon-icon.svg
generate/files += public/sponsor/github-icon.svg
-################################################################################
+# boilerplate ##################################################################
all: build
.PHONY: all
@@ -25,14 +28,20 @@ generate-clean:
rm -f -- $(generate/files)
.PHONY: generate generate-clean
-################################################################################
+# build = not checked in to Git ################################################
public/imworkingon/index.html: FORCE
- TZ=America/Denver go run ./cmd/generate
+ TZ=America/Denver go run ./cmd/gen-imworkingon
.PRECIOUS: public/imworkingon/index.html
public/imworkingon/imworkingon.css: public/imworkingon/imworkingon.scss
sass $< $@
+public/posix/index.html: FORCE
+ mkdir -p $(@D)
+ go run ./cmd/gen-posix
+
+# generate = checked in to Git #################################################
+
public/sponsor/liberapay-icon.svg:
curl -L https://liberapay.com/assets/liberapay/icon-v2_black.svg >$@
public/sponsor/kofi-icon.png:
diff --git a/cmd/generate/calendar.go b/cmd/gen-imworkingon/calendar.go
index 29c3318..29c3318 100644
--- a/cmd/generate/calendar.go
+++ b/cmd/gen-imworkingon/calendar.go
diff --git a/cmd/generate/forge_forgejo.go b/cmd/gen-imworkingon/forge_forgejo.go
index 84988f2..34ec767 100644
--- a/cmd/generate/forge_forgejo.go
+++ b/cmd/gen-imworkingon/forge_forgejo.go
@@ -4,6 +4,8 @@ import (
"fmt"
"regexp"
"time"
+
+ "git.lukeshu.com/www/lib/httpcache"
)
var reForgejoPR = regexp.MustCompile(`^https://([^/]+)/([^/?#]+)/([^/?#]+)/pulls/([0-9]+)(?:\?[^#]*)?(?:#.*)?$`)
@@ -33,7 +35,7 @@ func (f Forgejo) FetchStatus(urls []string) (string, error) {
Merged bool `json:"merged"`
MergeCommitSha string `json:"merge_commit_sha"`
}
- if err := httpGetJSON(urlStr, nil, &obj); err != nil {
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
return "", err
}
ret := obj.State
@@ -68,7 +70,7 @@ func (f Forgejo) FetchSubmittedAt(urls []string) (time.Time, error) {
var obj struct {
CreatedAt time.Time `json:"created_at"`
}
- if err := httpGetJSON(urlStr, nil, &obj); err != nil {
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
return time.Time{}, err
}
return obj.CreatedAt, nil
@@ -103,7 +105,7 @@ func (f Forgejo) FetchLastUpdated(urls []string) (time.Time, User, error) {
HTMLURL string `json:"html_url"`
} `json:"merged_by"`
}
- if err := httpGetJSON(urlStr, nil, &obj); err != nil {
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
return time.Time{}, User{}, err
}
@@ -127,7 +129,7 @@ func (f Forgejo) FetchLastUpdated(urls []string) (time.Time, User, error) {
HTMLURL string `json:"html_url"`
} `json:"user"`
}
- if err := httpGetPaginatedJSON("https://api.github.com/repos/"+user+"/"+repo+"/issues/"+prnum+"/comments", nil, &comments, githubPagination); err != nil {
+ if err := httpcache.GetPaginatedJSON("https://api.github.com/repos/"+user+"/"+repo+"/issues/"+prnum+"/comments", nil, &comments, githubPagination); err != nil {
return time.Time{}, User{}, err
}
for _, comment := range comments {
@@ -147,7 +149,7 @@ func (f Forgejo) FetchLastUpdated(urls []string) (time.Time, User, error) {
HTMLURL string `json:"html_url"`
} `json:"user"`
}
- if err := httpGetPaginatedJSON("https://api.github.com/repos/"+user+"/"+repo+"/pulls/"+prnum+"/comments", nil, &reviewComments, githubPagination); err != nil {
+ if err := httpcache.GetPaginatedJSON("https://api.github.com/repos/"+user+"/"+repo+"/pulls/"+prnum+"/comments", nil, &reviewComments, githubPagination); err != nil {
return time.Time{}, User{}, err
}
for _, comment := range reviewComments {
@@ -166,7 +168,7 @@ func (f Forgejo) FetchLastUpdated(urls []string) (time.Time, User, error) {
HTMLURL string `json:"html_url"`
} `json:"actor"`
}
- if err := httpGetJSON("https://api.github.com/repos/"+user+"/"+repo+"/issues/"+prnum+"/events", nil, &events); err != nil {
+ if err := httpcache.GetJSON("https://api.github.com/repos/"+user+"/"+repo+"/issues/"+prnum+"/events", nil, &events); err != nil {
return time.Time{}, User{}, err
}
for _, event := range events {
diff --git a/cmd/generate/forge_gerrit.go b/cmd/gen-imworkingon/forge_gerrit.go
index 31f2256..05f0386 100644
--- a/cmd/generate/forge_gerrit.go
+++ b/cmd/gen-imworkingon/forge_gerrit.go
@@ -8,12 +8,14 @@ import (
"regexp"
"strings"
"time"
+
+ "git.lukeshu.com/www/lib/httpcache"
)
-// httpGetGerritJSON is like [httpGetJSON], but
+// httpGetGerritJSON is like [httpcache.GetJSON], but
// https://gerrit-review.googlesource.com/Documentation/rest-api.html#output
func httpGetGerritJSON(u string, hdr map[string]string, out any) error {
- str, err := httpGet(u, hdr)
+ str, err := httpcache.Get(u, hdr)
if err != nil {
return err
}
diff --git a/cmd/generate/forge_github.go b/cmd/gen-imworkingon/forge_github.go
index 9f475a3..b657ad7 100644
--- a/cmd/generate/forge_github.go
+++ b/cmd/gen-imworkingon/forge_github.go
@@ -5,6 +5,8 @@ import (
"net/url"
"regexp"
"time"
+
+ "git.lukeshu.com/www/lib/httpcache"
)
var reGitHubPR = regexp.MustCompile(`^https://github\.com/([^/?#]+)/([^/?#]+)/pull/([0-9]+)(?:\?[^#]*)?(?:#.*)?$`)
@@ -42,7 +44,7 @@ func (GitHub) FetchStatus(urls []string) (string, error) {
Merged bool `json:"merged"`
MergeCommitSha string `json:"merge_commit_sha"`
}
- if err := httpGetJSON(urlStr, nil, &obj); err != nil {
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
return "", err
}
ret := obj.State
@@ -55,6 +57,17 @@ func (GitHub) FetchStatus(urls []string) (string, error) {
if tag != "" {
ret = fmt.Sprintf(statusReleasedFmt, tag)
}
+ } else if obj.State == "closed" {
+ var mergeCommits []string
+ for _, u := range urls {
+ if m := reGitHubCommit.FindStringSubmatch(u); m != nil && m[1] == user && m[2] == repo {
+ mergeCommits = append(mergeCommits, m[3])
+ }
+ }
+ tag, err := getGitTagThatContainsAll("https://github.com/"+user+"/"+repo, mergeCommits...)
+ if err == nil && tag != "" {
+ ret = fmt.Sprintf(statusReleasedFmt, tag)
+ }
}
return ret, nil
@@ -76,7 +89,7 @@ func (GitHub) FetchSubmittedAt(urls []string) (time.Time, error) {
var obj struct {
CreatedAt time.Time `json:"created_at"`
}
- if err := httpGetJSON(urlStr, nil, &obj); err != nil {
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
return time.Time{}, err
}
return obj.CreatedAt, nil
@@ -115,7 +128,7 @@ func (GitHub) FetchLastUpdated(urls []string) (time.Time, User, error) {
HTMLURL string `json:"html_url"`
} `json:"merged_by"`
}
- if err := httpGetJSON(urlStr, nil, &obj); err != nil {
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
return time.Time{}, User{}, err
}
@@ -139,7 +152,7 @@ func (GitHub) FetchLastUpdated(urls []string) (time.Time, User, error) {
HTMLURL string `json:"html_url"`
} `json:"user"`
}
- if err := httpGetPaginatedJSON("https://api.github.com/repos/"+user+"/"+repo+"/issues/"+prnum+"/comments", nil, &comments, githubPagination); err != nil {
+ if err := httpcache.GetPaginatedJSON("https://api.github.com/repos/"+user+"/"+repo+"/issues/"+prnum+"/comments", nil, &comments, githubPagination); err != nil {
return time.Time{}, User{}, err
}
for _, comment := range comments {
@@ -159,7 +172,7 @@ func (GitHub) FetchLastUpdated(urls []string) (time.Time, User, error) {
HTMLURL string `json:"html_url"`
} `json:"user"`
}
- if err := httpGetPaginatedJSON("https://api.github.com/repos/"+user+"/"+repo+"/pulls/"+prnum+"/comments", nil, &reviewComments, githubPagination); err != nil {
+ if err := httpcache.GetPaginatedJSON("https://api.github.com/repos/"+user+"/"+repo+"/pulls/"+prnum+"/comments", nil, &reviewComments, githubPagination); err != nil {
return time.Time{}, User{}, err
}
for _, comment := range reviewComments {
@@ -178,7 +191,7 @@ func (GitHub) FetchLastUpdated(urls []string) (time.Time, User, error) {
HTMLURL string `json:"html_url"`
} `json:"actor"`
}
- if err := httpGetJSON("https://api.github.com/repos/"+user+"/"+repo+"/issues/"+prnum+"/events", nil, &events); err != nil {
+ if err := httpcache.GetJSON("https://api.github.com/repos/"+user+"/"+repo+"/issues/"+prnum+"/events", nil, &events); err != nil {
return time.Time{}, User{}, err
}
for _, event := range events {
diff --git a/cmd/generate/forge_gitlab.go b/cmd/gen-imworkingon/forge_gitlab.go
index a1ea7c0..84a2285 100644
--- a/cmd/generate/forge_gitlab.go
+++ b/cmd/gen-imworkingon/forge_gitlab.go
@@ -5,6 +5,8 @@ import (
"net/url"
"regexp"
"time"
+
+ "git.lukeshu.com/www/lib/httpcache"
)
var reGitLabMR = regexp.MustCompile(`^https://([^/]+)/([^?#]+)/-/merge_requests/([0-9]+)(?:\?[^#]*)?(?:#.*)?$`)
@@ -31,7 +33,7 @@ func (GitLab) FetchStatus(urls []string) (string, error) {
MergeCommitSha string `json:"merge_commit_sha"`
SquashCommitSha string `json:"squash_commit_sha"`
}
- if err := httpGetJSON(urlStr, nil, &obj); err != nil {
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
return "", err
}
@@ -80,7 +82,7 @@ func (GitLab) FetchSubmittedAt(urls []string) (time.Time, error) {
var obj struct {
CreatedAt time.Time `json:"created_at"`
}
- if err := httpGetJSON(urlStr, nil, &obj); err != nil {
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
return time.Time{}, err
}
return obj.CreatedAt, nil
@@ -117,7 +119,7 @@ func (GitLab) FetchLastUpdated(urls []string) (time.Time, User, error) {
WebURL string `json:"web_url"`
} `json:"merged_by"`
}
- if err := httpGetJSON(urlStr, nil, &obj); err != nil {
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
return time.Time{}, User{}, err
}
@@ -148,7 +150,7 @@ func (GitLab) FetchLastUpdated(urls []string) (time.Time, User, error) {
} `json:"resolved_by"`
} `json:"notes"`
}
- if err := httpGetJSON(fmt.Sprintf("https://%s/%s/noteable/merge_request/%d/notes", authority, projectID, obj.ID), map[string]string{"X-Last-Fetched-At": "0"}, &notes); err != nil {
+ if err := httpcache.GetJSON(fmt.Sprintf("https://%s/%s/noteable/merge_request/%d/notes", authority, projectID, obj.ID), map[string]string{"X-Last-Fetched-At": "0"}, &notes); err != nil {
return time.Time{}, User{}, err
}
for _, note := range notes.Notes {
diff --git a/cmd/generate/forge_part_git.go b/cmd/gen-imworkingon/forge_part_git.go
index 5288286..5175750 100644
--- a/cmd/generate/forge_part_git.go
+++ b/cmd/gen-imworkingon/forge_part_git.go
@@ -4,6 +4,8 @@ import (
"fmt"
"regexp"
"time"
+
+ "git.lukeshu.com/www/lib/httpcache"
)
var reGitHubCommit = regexp.MustCompile(`^https://github\.com/([^/?#]+)/([^/?#]+)/commit/([0-9a-f]+)(?:\?[^#]*)?(?:#.*)?$`)
@@ -62,7 +64,7 @@ func (PartGit) FetchLastUpdated(urls []string) (time.Time, User, error) {
} `json:"committer"`
} `json:"commit"`
}
- if err := httpGetJSON(urlStr, nil, &obj); err != nil {
+ if err := httpcache.GetJSON(urlStr, nil, &obj); err != nil {
return time.Time{}, User{}, err
}
if obj.Commit.Author.Date.After(ret) {
diff --git a/cmd/generate/forge_part_pipermail.go b/cmd/gen-imworkingon/forge_part_pipermail.go
index af6a009..9db498b 100644
--- a/cmd/generate/forge_part_pipermail.go
+++ b/cmd/gen-imworkingon/forge_part_pipermail.go
@@ -12,7 +12,8 @@ import (
"strings"
"time"
- "git.lukeshu.com/www/cmd/generate/mailstuff"
+ "git.lukeshu.com/www/lib/httpcache"
+ "git.lukeshu.com/www/lib/mailstuff"
)
var (
@@ -34,7 +35,7 @@ func (PartPiperMail) FetchSubmittedAt(urls []string) (time.Time, error) {
if !rePiperMailMessage.MatchString(u) {
continue
}
- htmlStr, err := httpGet(u, nil)
+ htmlStr, err := httpcache.Get(u, nil)
if err != nil {
return time.Time{}, err
}
@@ -108,7 +109,7 @@ func (p PartPiperMail) FetchLastUpdated(urls []string) (time.Time, User, error)
uYM := m[2]
//uInt := m[3]
- htmlStr, err := httpGet(u, nil)
+ htmlStr, err := httpcache.Get(u, nil)
if err != nil {
return time.Time{}, User{}, fmt.Errorf("could not fetch message: %w", err)
}
@@ -132,7 +133,7 @@ func (p PartPiperMail) FetchLastUpdated(urls []string) (time.Time, User, error)
for ym, mbox := uYM, []*mail.Message(nil); true; ym = p.nextMonth(ym) {
lenBefore := p.threadLen(thread)
- mboxGzStr, err := httpGet(uBase+ym+".txt.gz", nil)
+ mboxGzStr, err := httpcache.Get(uBase+ym+".txt.gz", nil)
if err != nil {
if ym == uYM || !errors.Is(err, os.ErrNotExist) {
return time.Time{}, User{}, fmt.Errorf("could not fetch mbox for %s: %w", ym, err)
diff --git a/cmd/generate/gitcache.go b/cmd/gen-imworkingon/gitcache.go
index 844408d..844408d 100644
--- a/cmd/generate/gitcache.go
+++ b/cmd/gen-imworkingon/gitcache.go
diff --git a/cmd/generate/imworkingon.html.tmpl b/cmd/gen-imworkingon/imworkingon.html.tmpl
index 415a252..415a252 100644
--- a/cmd/generate/imworkingon.html.tmpl
+++ b/cmd/gen-imworkingon/imworkingon.html.tmpl
diff --git a/cmd/generate/main.go b/cmd/gen-imworkingon/main.go
index f6171b0..c0c9723 100644
--- a/cmd/generate/main.go
+++ b/cmd/gen-imworkingon/main.go
@@ -4,15 +4,17 @@ import (
"bytes"
_ "embed"
"fmt"
+ "html/template"
"os"
"reflect"
+ "slices"
"sort"
"strings"
"time"
- "html/template"
-
"github.com/yuin/goldmark"
+
+ "git.lukeshu.com/www/lib/httpcache"
)
func MarkdownToHTML(md string) (template.HTML, error) {
@@ -37,10 +39,25 @@ var timeTagTmpl = template.Must(template.New("time.tag.tmpl").
Parse(`<time datetime="{{ .Machine }}" title="{{ .HumanVerbose }}">{{ .HumanPretty }}</time>`))
func mainWithError() error {
- standups, err := ReadStandups("https://fosstodon.org", "lukeshu")
+ httpcache.UserAgent = "https://git.lukeshu.com/www/tree/cmd/gen-imworkingon"
+
+ standups, err := ReadStandups("https://social.coop", "lukeshu")
if err != nil {
return err
}
+ _standups, err := ReadStandups("https://fosstodon.org", "lukeshu")
+ if err != nil {
+ return err
+ }
+ standups = append(standups, _standups...)
+ standupIgnoreList := []string{
+ "https://fosstodon.org/@lukeshu/112198267818432116",
+ "https://fosstodon.org/@lukeshu/112198241414760456",
+ }
+ standups = slices.DeleteFunc(standups, func(status *MastodonStatus) bool {
+ return slices.Contains(standupIgnoreList, status.URL)
+ })
+
contribs, err := ReadContribs("imworkingon/contribs.yml")
if err != nil {
return err
diff --git a/cmd/generate/src_contribs.go b/cmd/gen-imworkingon/src_contribs.go
index 5694156..5694156 100644
--- a/cmd/generate/src_contribs.go
+++ b/cmd/gen-imworkingon/src_contribs.go
diff --git a/cmd/generate/src_contribs_test.go b/cmd/gen-imworkingon/src_contribs_test.go
index 57ffc0f..57ffc0f 100644
--- a/cmd/generate/src_contribs_test.go
+++ b/cmd/gen-imworkingon/src_contribs_test.go
diff --git a/cmd/generate/src_mastodon.go b/cmd/gen-imworkingon/src_mastodon.go
index 52dcfa4..a3b9617 100644
--- a/cmd/generate/src_mastodon.go
+++ b/cmd/gen-imworkingon/src_mastodon.go
@@ -3,8 +3,9 @@ package main
import (
"html/template"
"net/url"
- "slices"
"time"
+
+ "git.lukeshu.com/www/lib/httpcache"
)
type MastodonStatus struct {
@@ -19,12 +20,12 @@ func ReadStandups(server, username string) ([]*MastodonStatus, error) {
var account struct {
ID string `json:"id"`
}
- if err := httpGetJSON(server+"/api/v1/accounts/lookup?acct="+username, nil, &account); err != nil {
+ if err := httpcache.GetJSON(server+"/api/v1/accounts/lookup?acct="+username, nil, &account); err != nil {
return nil, err
}
var statuses []*MastodonStatus
- if err := httpGetPaginatedJSON(server+"/api/v1/accounts/"+account.ID+"/statuses", nil, &statuses, func(_ int) url.Values {
+ if err := httpcache.GetPaginatedJSON(server+"/api/v1/accounts/"+account.ID+"/statuses", nil, &statuses, func(_ int) url.Values {
params := make(url.Values)
params.Set("tagged", "DailyStandUp")
params.Set("exclude_reblogs", "true")
@@ -36,13 +37,5 @@ func ReadStandups(server, username string) ([]*MastodonStatus, error) {
return nil, err
}
- ignoreList := []string{
- "https://fosstodon.org/@lukeshu/112198267818432116",
- "https://fosstodon.org/@lukeshu/112198241414760456",
- }
- statuses = slices.DeleteFunc(statuses, func(status *MastodonStatus) bool {
- return slices.Contains(ignoreList, status.URL)
- })
-
return statuses, nil
}
diff --git a/cmd/generate/src_tags.go b/cmd/gen-imworkingon/src_tags.go
index 8dcf554..8dcf554 100644
--- a/cmd/generate/src_tags.go
+++ b/cmd/gen-imworkingon/src_tags.go
diff --git a/cmd/generate/src_upstreams.go b/cmd/gen-imworkingon/src_upstreams.go
index 03f72ec..03f72ec 100644
--- a/cmd/generate/src_upstreams.go
+++ b/cmd/gen-imworkingon/src_upstreams.go
diff --git a/cmd/gen-posix/data.go b/cmd/gen-posix/data.go
new file mode 100644
index 0000000..165ecbd
--- /dev/null
+++ b/cmd/gen-posix/data.go
@@ -0,0 +1,211 @@
+package main
+
+import (
+ "fmt"
+ "os"
+ "os/exec"
+ "regexp"
+ "strings"
+
+ "git.lukeshu.com/www/lib/httpcache"
+)
+
+var IEEESA = Vendor{
+ Name: "IEEE-SA",
+ GetURL: func(id string) string { return fmt.Sprintf("http://standards.ieee.org/findstds/standard/%s.html", id) },
+ GetName: func(id string, url string) string {
+ html, err := httpcache.Get(url, nil)
+ if err != nil {
+ panic(fmt.Errorf("URL=%q: %v", url, err))
+ }
+ cmd := exec.Command("nokogiri", "-e", `puts $_.css("meta[name=\"des\"], meta[name=\"designation\"]").first["content"]`)
+ cmd.Stderr = os.Stderr
+ cmd.Stdin = strings.NewReader(html)
+ d, err := cmd.Output()
+ if err != nil {
+ panic(fmt.Errorf("URL=%q: %v", url, err))
+ }
+ return strings.TrimSuffix(string(d), "\n")
+ },
+}
+
+var reIEEE = regexp.MustCompile(`standardNumber":"([^"]*)"`)
+
+var IEEEXplore = Vendor{
+ Name: "IEEE Xplore",
+ GetURL: func(id string) string { return fmt.Sprintf("http://ieeexplore.ieee.org/servlet/opac?punumber=%s", id) },
+ GetName: func(id string, url string) string {
+ if strings.HasSuffix(url, "ERROR") {
+ return "ERROR"
+ }
+ html, err := httpcache.Get(url, nil)
+ if err != nil {
+ panic(fmt.Errorf("URL=%q: %v", url, err))
+ }
+ m := reIEEE.FindStringSubmatch(html)
+ if m == nil {
+ panic(fmt.Errorf("URL=%q did not contain expected JSON", url))
+ }
+ return m[1]
+ },
+}
+
+var TOG = Vendor{
+ Name: "The Open Group",
+ GetURL: func(id string) string { return fmt.Sprintf("https://www2.opengroup.org/ogsys/catalog/%s", id) },
+ GetName: func(id string, url string) string { return id },
+}
+
+var TOGOnline = Vendor{
+ Name: "online",
+ GetURL: func(id string) string { return fmt.Sprintf("http://pubs.opengroup.org/onlinepubs/%s/", id) },
+ GetName: func(id string, url string) string { return url },
+}
+
+var ISO = Vendor{
+ Name: "ISO",
+ GetURL: func(id string) string {
+ return fmt.Sprintf("http://www.iso.org/iso/home/store/catalogue_tc/catalogue_detail.htm?csnumber=%s", id)
+ },
+ GetName: func(id string, url string) string {
+ html, err := httpcache.Get(url, nil)
+ if err != nil {
+ panic(fmt.Errorf("URL=%q: %v", url, err))
+ }
+ cmd := exec.Command("nokogiri", "-e", `puts $_.css("[itemprop=\"name\"]").first.text`)
+ cmd.Stderr = os.Stderr
+ cmd.Stdin = strings.NewReader(html)
+ d, err := cmd.Output()
+ if err != nil {
+ panic(fmt.Errorf("URL=%q: %v", url, err))
+ }
+ return strings.TrimSuffix(string(d), "\n")
+ },
+}
+
+var Vendors = []Vendor{IEEESA, TOG, ISO}
+
+var Editions = []Edition{
+ {Name: "POSIX-2001 (Issue 6)", Docs: []Document{
+ {Vendor: IEEESA, Type: Full, ID: "1003.1-2001", Resellers: []Document{
+ {Vendor: IEEEXplore, Type: Full, ID: "7683"},
+ }},
+ }},
+ {Name: "----->XBD-2001", Docs: []Document{
+ {Vendor: TOG, Type: Full, ID: "C950"},
+ {Vendor: ISO, Type: Full, ID: "37312"},
+ }},
+ {Name: "----->XSH-2001", Docs: []Document{
+ {Vendor: TOG, Type: Full, ID: "C951"},
+ {Vendor: ISO, Type: Full, ID: "37313"},
+ }},
+ {Name: "----->XCU-2001", Docs: []Document{
+ {Vendor: TOG, Type: Full, ID: "C952"},
+ {Vendor: ISO, Type: Full, ID: "37314"},
+ }},
+ {Name: "----->XRAT-2001", Docs: []Document{
+ {Vendor: TOG, Type: Full, ID: "C953"},
+ {Vendor: ISO, Type: Full, ID: "37315"},
+ }},
+
+ {Name: "POSIX-2001, 2002 Edition", Docs: []Document{
+ {Vendor: IEEESA, Type: Patch, ID: "1003.1-2001-Cor_1-2002", Resellers: []Document{
+ {Vendor: IEEEXplore, Type: Patch, ID: "9507"},
+ }},
+ {Vendor: TOG, Type: Patch, ID: "U057", Resellers: []Document{
+ {Vendor: TOG, Type: Full, ID: "T031"},
+ }},
+ }},
+ {Name: "----->XBD-2001, 2002 Edition", Docs: []Document{
+ {Vendor: TOG, Type: Full, ID: "C031"},
+ {Vendor: ISO, Type: Full, ID: "38789", Resellers: []Document{
+ {Vendor: IEEESA, Type: Full, ID: "9945-1-2003"},
+ }},
+ }},
+ {Name: "----->XSH-2001, 2002 Edition", Docs: []Document{
+ {Vendor: TOG, Type: Full, ID: "C032"},
+ {Vendor: ISO, Type: Full, ID: "38790", Resellers: []Document{
+ {Vendor: IEEESA, Type: Full, ID: "9945-2-2003"},
+ }},
+ }},
+ {Name: "----->XCU-2001, 2002 Edition", Docs: []Document{
+ {Vendor: TOG, Type: Full, ID: "C033"},
+ {Vendor: ISO, Type: Full, ID: "38791", Resellers: []Document{
+ {Vendor: IEEESA, Type: Full, ID: "9945-3-2003"},
+ }},
+ }},
+ {Name: "----->XRAT-2001, 2002 Edition", Docs: []Document{
+ {Vendor: TOG, Type: Full, ID: "C034"},
+ {Vendor: ISO, Type: Full, ID: "38792", Resellers: []Document{
+ {Vendor: IEEESA, Type: Full, ID: "9945-4-2003"},
+ }},
+ }},
+
+ {Name: "POSIX-2001, 2004 Edition", Docs: []Document{
+ {Vendor: IEEESA, Type: Patch, ID: "1003.1-2001-Cor_2-2004", Resellers: []Document{
+ {Vendor: IEEEXplore, Type: Patch, ID: "9022"},
+ {Vendor: IEEEXplore, Type: Full, ID: "9156"},
+ }},
+ {Vendor: TOG, Type: Patch, ID: "U059", Resellers: []Document{
+ {Vendor: TOG, Type: Full, ID: "T041"},
+ {Vendor: TOGOnline, Type: Full, ID: "009695399"},
+ }},
+ }},
+ {Name: "----->XBD-2001, 2004 Edition", Docs: []Document{
+ {Vendor: TOG, Type: Full, ID: "C046"},
+ {Vendor: ISO, Type: Patch, ID: "40687"},
+ }},
+ {Name: "----->XSH-2001, 2004 Edition", Docs: []Document{
+ {Vendor: TOG, Type: Full, ID: "C047"},
+ {Vendor: ISO, Type: Patch, ID: "40688"},
+ }},
+ {Name: "----->XCU-2001, 2004 Edition", Docs: []Document{
+ {Vendor: TOG, Type: Full, ID: "C048"},
+ {Vendor: ISO, Type: Patch, ID: "40690"},
+ }},
+ {Name: "----->XRAT-2001, 2004 Edition", Docs: []Document{
+ {Vendor: TOG, Type: Full, ID: "C049"},
+ {Vendor: ISO, Type: Patch, ID: "40691"},
+ }},
+
+ {Name: "POSIX-2008 (Issue 7)", Docs: []Document{
+ {Vendor: TOG, Type: Full, ID: "C082", Resellers: []Document{
+ {Vendor: TOGOnline, Type: Full, ID: "9699919799.2008edition"},
+ }},
+
+ {Vendor: IEEESA, Type: Full, ID: "1003.1-2008", Resellers: []Document{
+ {Vendor: IEEEXplore, Type: Full, ID: "4694974"},
+ }},
+
+ {Vendor: ISO, Type: Full, ID: "50516", Resellers: []Document{
+ {Vendor: IEEESA, Type: Full, ID: "9945-2009"},
+ {Vendor: IEEEXplore, Type: Full, ID: "5393777"},
+ }},
+ }},
+ {Name: "POSIX-2008, 2013 Edition", Docs: []Document{
+ {Vendor: TOG, Type: Patch, ID: "U130", Resellers: []Document{
+ {Vendor: TOG, Type: Full, ID: "C138"},
+ {Vendor: TOGOnline, Type: Full, ID: "9699919799.2013edition"},
+ }},
+
+ {Vendor: IEEESA, Type: Patch, ID: "1003.1-2008-Cor_1-2013", Resellers: []Document{
+ {Vendor: IEEEXplore, Type: Patch, ID: "6482152"},
+ {Vendor: IEEEXplore, Type: Full, ID: "6506089"},
+ }},
+
+ {Vendor: ISO, Type: Patch, ID: "62005"},
+ }},
+ {Name: "POSIX-2008, 2016 Edition", Docs: []Document{
+ {Vendor: TOG, Type: Patch, ID: "U160", Resellers: []Document{
+ {Vendor: TOG, Type: Full, ID: "C165"},
+ {Vendor: TOGOnline, Type: Full, ID: "9699919799.2016edition"},
+ }},
+
+ {Vendor: IEEESA, Type: Patch, ID: "1003.1-2008-Cor_2-2016", Resellers: []Document{
+ {Vendor: IEEEXplore, Type: Patch, ID: "7542096"},
+ {Vendor: IEEEXplore, Type: Full, ID: "7582336"},
+ }},
+ }},
+}
+
+// SUSv2 http://pubs.opengroup.org/onlinepubs/007908799/
diff --git a/cmd/gen-posix/http_hacks.go b/cmd/gen-posix/http_hacks.go
new file mode 100644
index 0000000..16b8a8d
--- /dev/null
+++ b/cmd/gen-posix/http_hacks.go
@@ -0,0 +1,156 @@
+package main
+
+import (
+ "bufio"
+ "bytes"
+ "errors"
+ "fmt"
+ "io"
+ "net/http"
+ "os"
+ "os/exec"
+ "strings"
+
+ "git.lukeshu.com/www/lib/httpcache"
+)
+
+func _checkURL(url string) (string, error) {
+ switch {
+ case strings.HasPrefix(url, "https://web.archive.org/"):
+ _, err := httpcache.Get(url, nil)
+ return url, err
+ case strings.HasPrefix(url, "https://www2.opengroup.org/ogsys/catalog/"):
+ _, err := httpcache.Get(url, nil)
+ if err == nil {
+ return url, nil
+ }
+ if !errors.Is(err, os.ErrNotExist) { // don't hide non-404 errors
+ return "", err
+ }
+ suffix := strings.TrimPrefix(url, "https://www2.opengroup.org/ogsys/catalog/")
+ url2 := "https://publications.opengroup.org/" + strings.ToLower(suffix)
+ _, err = httpcache.Get(url2, nil)
+ if err == nil {
+ return url2, nil
+ }
+ if !errors.Is(err, os.ErrNotExist) { // don't hide non-404 errors
+ return "", err
+ }
+ url3, err := _checkURL("https://web.archive.org/web/20170102/" + url)
+ if err == nil {
+ return url3, nil
+ }
+ return url+"#ERROR", nil
+ case url == "http://ieeexplore.ieee.org/servlet/opac?punumber=7394900":
+ return url+"#ERROR", nil
+ default:
+ _, err := httpcache.Get(url, nil)
+ if err != nil && errors.Is(err, os.ErrNotExist) {
+ return _checkURL("https://web.archive.org/web/20170102/" + url)
+ }
+ return url, err
+ }
+}
+
+func checkURL(url string) string {
+ url2, err := _checkURL(url)
+ if err != nil {
+ panic(fmt.Errorf("URL=%q: %v", url, err))
+ }
+ return url2
+}
+
+func nokogiriIgnoreFailure(htmlBytes []byte, expr string) string {
+ cmd := exec.Command("nokogiri", "-e", "puts "+expr)
+ cmd.Stderr = io.Discard
+ cmd.Stdin = bytes.NewReader(htmlBytes)
+ outBytes, _ := cmd.Output()
+ return strings.TrimSpace(string(outBytes))
+}
+
+func mockRedirect(url string) *http.Response {
+ resp, err := http.ReadResponse(bufio.NewReader(strings.NewReader(""+
+ "HTTP/1.1 302 Found\r\n"+
+ "Location: "+url+"\r\n"+
+ "\r\n")), nil)
+ if err != nil {
+ panic(err)
+ }
+ return resp
+}
+
+func mockForbidden() *http.Response {
+ resp, err := http.ReadResponse(bufio.NewReader(strings.NewReader(""+
+ "HTTP/1.1 403 Forbidden\r\n"+
+ "\r\n")), nil)
+ if err != nil {
+ panic(err)
+ }
+ return resp
+}
+
+func modifyResponse(url string, entry httpcache.CacheEntry, resp *http.Response) *http.Response {
+ switch {
+ case strings.HasPrefix(url, "https://web.archive.org/"):
+ htmlBytes, _ := io.ReadAll(resp.Body)
+ _ = resp.Body.Close()
+
+ // native Wayback Machine redirect
+ redirect := nokogiriIgnoreFailure(htmlBytes, `$_.css("p.impatient a").first["href"]`)
+ if strings.HasPrefix(redirect, "https://web.archive.org/web/") {
+ return mockRedirect(redirect)
+ }
+
+ // silly TOG SSO
+ if strings.Contains(url, "sso.opengroup.org") {
+ if bytes.Contains(htmlBytes, []byte("document.forms.postbinding.submit()")) {
+ redirect := nokogiriIgnoreFailure(htmlBytes, `$_.css("#postbinding").first["action"]`)
+ if redirect != "" {
+ return mockRedirect(redirect)
+ }
+ }
+ if bytes.Contains(htmlBytes, []byte("General Authorization Error")) {
+ return mockForbidden()
+ }
+ }
+
+ // We drained resp.Body, so re-create it.
+ resp, err := http.ReadResponse(bufio.NewReader(strings.NewReader(string(entry))), nil)
+ if err != nil {
+ panic(err)
+ }
+ return resp
+ default:
+ return resp
+ }
+}
+
+type mock404 struct {
+ Msg string
+}
+
+// Is implements the interface for [errors.Is].
+func (e *mock404) Is(target error) bool {
+ return target == os.ErrNotExist
+}
+
+// Error implements [error].
+func (e *mock404) Error() string {
+ return e.Msg
+}
+
+func checkRedirect(req *http.Request, via []*http.Request) error {
+ // net/http.defaultCheckRedirect
+ if len(via) >= 10 {
+ return errors.New("stopped after 10 redirects")
+ }
+
+ // detect redirects that should be 404s
+ oldURL := via[len(via)-1].URL
+ newURL := req.URL
+ if (newURL.Path == "/" || newURL.Path == "") && !(oldURL.Path == "/" || oldURL.Path == "") {
+ return &mock404{Msg: fmt.Sprintf("should have been a 404: %q redirected to %q", oldURL.String(), newURL.String())}
+ }
+
+ return nil
+}
diff --git a/cmd/gen-posix/main.go b/cmd/gen-posix/main.go
new file mode 100644
index 0000000..6da598b
--- /dev/null
+++ b/cmd/gen-posix/main.go
@@ -0,0 +1,214 @@
+package main
+
+import (
+ "bytes"
+ "fmt"
+ "html/template"
+ "os"
+
+ "git.lukeshu.com/www/lib/httpcache"
+)
+
+var urls = map[string]string{}
+var names = map[string]string{}
+
+func (doc Document) URL() string {
+ if doc.ID == "" {
+ return ""
+ }
+ key := doc.Vendor.Name + "\000" + doc.ID
+ if _, have := urls[key]; !have {
+ urls[key] = checkURL(doc.Vendor.GetURL(doc.ID))
+ }
+ return urls[key]
+}
+
+func (doc Document) Name() string {
+ if doc.ID == "" {
+ var names []string
+ for _, full := range doc.Fulls() {
+ names = append(names, full.Name())
+ }
+ switch len(names) {
+ case 0:
+ return "???"
+ case 1:
+ return names[0]
+ default:
+ // BUG(lukeshu): Handle un-IDed Documents with
+ // multiple IDed resellers.
+ panic("TODO")
+ }
+ }
+ key := doc.Vendor.Name + "\000" + doc.ID
+ if _, have := names[key]; !have {
+ names[key] = doc.Vendor.GetName(doc.ID, doc.URL())
+ }
+ return names[key]
+}
+
+func (doc Document) Fulls() []Document {
+ var ret []Document
+ for _, reseller := range doc.Resellers {
+ if doc.ID != "" && reseller.Vendor.Name == doc.Vendor.Name && reseller.ID == doc.ID {
+ continue
+ }
+ if reseller.Type == Full {
+ ret = append(ret, reseller)
+ }
+ }
+ return ret
+}
+
+func (doc Document) Patches() []Document {
+ var ret []Document
+ for _, reseller := range doc.Resellers {
+ if doc.ID != "" && reseller.Vendor.Name == doc.Vendor.Name && reseller.ID == doc.ID {
+ continue
+ }
+ if reseller.Type == Patch {
+ ret = append(ret, reseller)
+ }
+ }
+ return ret
+}
+
+func (doc Document) AsFull() *Document {
+ if doc.Vendor.Name == "" && doc.ID == "" {
+ return nil
+ }
+ ret := doc
+ ret.Resellers = ret.Fulls()
+ switch doc.Type {
+ case Full:
+ // Nothing to do
+ case Patch:
+ if len(ret.Resellers) == 0 {
+ return nil
+ }
+ ret.Type = Full
+ ret.ID = func() string {
+ var ids []string
+ for _, reseller := range ret.Resellers {
+ if reseller.Vendor.Name == doc.Vendor.Name {
+ ids = append(ids, reseller.ID)
+ }
+ }
+ switch len(ids) {
+ case 0:
+ return ""
+ case 1:
+ return ids[0]
+ default:
+ panic("wut")
+ }
+ }()
+ if ret.ID != "" {
+ ret.Resellers = ret.Fulls()
+ }
+ default:
+ panic("uhh")
+ }
+ return &ret
+}
+
+func (doc Document) AsPatch() *Document {
+ if doc.Vendor.Name == "" && doc.ID == "" {
+ return nil
+ }
+ ret := doc
+ switch doc.Type {
+ case Full:
+ return nil
+ case Patch:
+ ret.Resellers = doc.Patches()
+ default:
+ panic("no")
+ }
+ return &ret
+}
+
+func (ed Edition) DocsOrdered() []Document {
+ // This chould be O(n), but this niaeve implementation is
+ // O(n^2). It's OK, n is small.
+ s := make([]Document, len(Vendors))
+ for i, vnd := range Vendors {
+ for _, doc := range ed.Docs {
+ if doc.Vendor.Name == vnd.Name {
+ s[i] = doc
+ }
+ }
+ }
+ return s
+}
+
+var tmpl = `{{define "document"}}{{if .}}
+ {{if .URL}}<a href="{{.URL}}" title="{{.Name}}">{{.Name}}</a>{{else}}{{.Name}}{{end}}
+ {{range .Resellers}}
+ <a href="{{.URL}}" title="{{.Name}}">({{.Vendor.Name}})</a>
+ {{end}}
+{{end}}{{end}}
+<!DOCTYPE html>
+<html lang="en">
+ <head>
+ <meta charset="utf-8">
+ <title>POSIX Editions</title>
+ <style>
+ body { font-size: 8px; }
+ table { border-collapse: collapse; }
+ th, td { border: solid 1px black; }
+ tr:not(:first-child):not(:nth-child(2)) th {
+ font-family: monospace;
+ text-align: left;
+ }
+ </style>
+ </head>
+ <body>
+ <p>There's a typo in the "standardNumber" in IEEE
+ Xplore's records forfor the 2004 edition of 1003.1;
+ it says 2014 instead or 2004. The actual document
+ says 2004 though.</p>
+ <table>
+ <caption><p>POSIX: C & Shell (1997-present)</p></caption>
+ <tr><td rowspan=2></td>{{range .Vendors}}<th colspan=2>{{.Name}}</th>{{end}}</tr>
+ <tr>{{range .Vendors}}<th>Full</th><th>Patch</th>{{end}}</tr>
+ {{range .Editions}}<tr>
+ <th>{{.Name}}</th>
+ {{range .DocsOrdered}}
+ <td>{{template "document" .AsFull}}</td><td>{{template "document" .AsPatch}}</td>
+ {{end}}
+ </tr>{{end}}
+ </table>
+ </body>
+</html>
+`
+
+func mainWithError() error {
+ httpcache.UserAgent = "https://git.lukeshu.com/www/tree/cmd/gen-posix"
+ httpcache.ModifyResponse = modifyResponse
+ httpcache.CheckRedirect = checkRedirect
+
+ tmpl := template.Must(template.New("page").Parse(tmpl))
+
+ var out bytes.Buffer
+ if err := tmpl.Execute(&out, map[string]interface{}{
+ "Vendors": Vendors,
+ "Editions": Editions,
+ }); err != nil {
+ return err
+ }
+ if err := os.WriteFile("public/posix/index.new.html", out.Bytes(), 0666); err != nil {
+ return err
+ }
+ if err := os.Rename("public/posix/index.new.html", "public/posix/index.html"); err != nil {
+ return err
+ }
+ return nil
+}
+
+func main() {
+ if err := mainWithError(); err != nil {
+ fmt.Fprintf(os.Stderr, "%s: error: %v\n", os.Args[0], err)
+ os.Exit(1)
+ }
+}
diff --git a/cmd/gen-posix/types.go b/cmd/gen-posix/types.go
new file mode 100644
index 0000000..9bb4c2d
--- /dev/null
+++ b/cmd/gen-posix/types.go
@@ -0,0 +1,26 @@
+package main
+
+type Vendor struct {
+ Name string
+ GetURL func(id string) string
+ GetName func(id string, url string) string
+}
+
+type Type int
+
+const (
+ Full Type = 0
+ Patch Type = 1
+)
+
+type Edition struct {
+ Name string
+ Docs []Document
+}
+
+type Document struct {
+ Vendor Vendor
+ Type Type
+ ID string
+ Resellers []Document
+}
diff --git a/cmd/generate/httpcache.go b/cmd/generate/httpcache.go
deleted file mode 100644
index 6d663b7..0000000
--- a/cmd/generate/httpcache.go
+++ /dev/null
@@ -1,139 +0,0 @@
-package main
-
-import (
- "encoding/json"
- "fmt"
- "io"
- "net/http"
- "net/url"
- "os"
- "path/filepath"
- "sort"
-)
-
-type httpCacheEntry struct {
- Body string
- Err error
-}
-
-var httpCache = map[string]httpCacheEntry{}
-
-type httpStatusError struct {
- StatusCode int
- Status string
-}
-
-// Is implements the interface for [errors.Is].
-func (e *httpStatusError) Is(target error) bool {
- switch target {
- case os.ErrNotExist:
- return e.StatusCode == http.StatusNotFound
- default:
- return false
- }
-}
-
-// Error implements [error].
-func (e *httpStatusError) Error() string {
- return fmt.Sprintf("unexpected HTTP status: %v", e.Status)
-}
-
-func httpGet(u string, hdr map[string]string) (string, error) {
- cacheKey := url.QueryEscape(u)
- hdrKeys := make([]string, 0, len(hdr))
- for k := range hdr {
- hdrKeys = append(hdrKeys, http.CanonicalHeaderKey(k))
- }
- sort.Strings(hdrKeys)
- for _, k := range hdrKeys {
- cacheKey += "|" + url.QueryEscape(k) + ":" + url.QueryEscape(hdr[k])
- }
-
- if cache, ok := httpCache[cacheKey]; ok {
- fmt.Printf("CACHE-GET %q\n", u)
- return cache.Body, cache.Err
- }
- if err := os.Mkdir(".http-cache", 0777); err != nil && !os.IsExist(err) {
- return "", err
- }
- cacheFile := filepath.Join(".http-cache", cacheKey)
- if bs, err := os.ReadFile(cacheFile); err == nil {
- fmt.Printf("CACHE-GET %q\n", u)
- httpCache[cacheKey] = httpCacheEntry{Body: string(bs)}
- return httpCache[cacheKey].Body, nil
- } else if !os.IsNotExist(err) {
- httpCache[cacheKey] = httpCacheEntry{Err: err}
- return "", err
- }
-
- fmt.Printf("GET %q...", u)
- req, err := http.NewRequest(http.MethodGet, u, nil)
- if err != nil {
- httpCache[cacheKey] = httpCacheEntry{Err: err}
- return "", err
- }
- req.Header.Set("User-Agent", "https://git.lukeshu.com/www/tree/cmd/generate")
- for k, v := range hdr {
- req.Header.Add(k, v)
- }
- resp, err := http.DefaultClient.Do(req)
- if err != nil {
- fmt.Printf(" err\n")
- httpCache[cacheKey] = httpCacheEntry{Err: err}
- return "", err
- }
- if resp.StatusCode != http.StatusOK {
- fmt.Printf(" err\n")
- httpCache[cacheKey] = httpCacheEntry{Err: err}
- return "", &httpStatusError{StatusCode: resp.StatusCode, Status: resp.Status}
- }
- bs, err := io.ReadAll(resp.Body)
- if err != nil {
- fmt.Printf(" err\n")
- httpCache[cacheKey] = httpCacheEntry{Err: err}
- httpCache[cacheKey] = httpCacheEntry{Err: err}
- return "", err
- }
- fmt.Printf(" ok\n")
- if err := os.WriteFile(cacheFile, bs, 0666); err != nil {
- return "", err
- }
- httpCache[cacheKey] = httpCacheEntry{Body: string(bs)}
- return httpCache[cacheKey].Body, nil
-}
-
-func httpGetJSON(u string, hdr map[string]string, out any) error {
- str, err := httpGet(u, hdr)
- if err != nil {
- return err
- }
- return json.Unmarshal([]byte(str), out)
-}
-
-func httpGetPaginatedJSON[T any](uStr string, hdr map[string]string, out *[]T, pageFn func(i int) url.Values) error {
- u, err := url.Parse(uStr)
- if err != nil {
- return err
- }
- query := u.Query()
-
- for i := 0; true; i++ {
- pageParams := pageFn(i)
- for k, v := range pageParams {
- query[k] = v
- }
-
- u.RawQuery = query.Encode()
- var resp []T
- if err := httpGetJSON(u.String(), hdr, &resp); err != nil {
- return err
- }
- fmt.Printf(" -> %d records\n", len(resp))
- if len(resp) == 0 {
- break
- }
- *out = append(*out, resp...)
- }
-
- return nil
-}
diff --git a/imworkingon/contribs.yml b/imworkingon/contribs.yml
index 601cdf8..05c7ef4 100644
--- a/imworkingon/contribs.yml
+++ b/imworkingon/contribs.yml
@@ -1,4 +1,6 @@
-- urls: [https://github.com/flori/json/pull/567]
+- urls:
+ - https://github.com/flori/json/pull/567
+ - https://github.com/flori/json/commit/c57d33ec39344f7a6ae2786b8ac36892a51b03fe
tags: [Ruby, JSON, SoftwareFreedom]
id: ruby-json
desc: |
@@ -125,6 +127,7 @@
`html.UnescapeString` that were found when working on the
documentation parser in gotk4.
- urls: [https://github.com/luigifab/awf-extended/pull/9]
+ status: "merged, released in v2.9.0"
tags: [Parabola, GTK]
desc: |
Just a minor touch-up to `configure.ac` that I noticed could be
@@ -212,6 +215,7 @@
SIGTERM that it was sent... but it never would. This PR fixes
that.
- urls: [https://gitlab.archlinux.org/archlinux/packaging/packages/ruby/-/merge_requests/6]
+ tags: [Parabola, Ruby]
desc: |
Ruby's standard library has been going through a process of
"gemification" where it is converted into a set of Gems that are
@@ -226,12 +230,14 @@
future, it should also make it easier for downstream distros that
patch Ruby, such as applying [flori/json#567](#contrib-ruby-json).
- urls: [https://gitlab.archlinux.org/archlinux/packaging/packages/ruby/-/merge_requests/7]
+ tags: [Parabola, Ruby, docs]
desc: |
Arch Linux's `ruby-docs` package (version 3.2.5-1) is incomplete;
the `/usr/share/doc/ruby/capi/html/` directory is empty except for
`doxygen_crawl.html`. This fixes that, so that it includes the
full Doxygen output.
- urls: [https://github.com/flori/json/pull/599]
+ tags: [Ruby, JSON]
desc: |
The benchmark numbers given for the Ruby stdlib JSON
encoder/decoder are quite outdated, and the benchmark code has
@@ -241,3 +247,25 @@
[flori/json#567](#contrib-ruby-json) actually improves performance
rather than hurting it. While I believe Software Freedom would be
worth hurting performance, it doesn't have to!
+- urls: [https://github.com/python/typeshed/pull/13169]
+ tags: [GNU, GDB]
+ status: "merged, released in types-gdb 15.0.0.20241204"
+ desc: |
+ GDB has an internal Python module to support using Python to write
+ GDB extensions or using Python interactively in a GDB session.
+ Unfortunately, GDB does not provide type information for the
+ Python module; instead folks writing GDB extensions in Python must
+ rely on the Typeshed if they want to type-check their code.
+
+ This PR fixes several mistakes in the type-information.
+# - urls: ["https://sourceware.org/bugzilla/show_bug.cgi?id=32428"]
+# tags: [GNU, GDB]
+# - urls: ["https://gcc.gnu.org/bugzilla/show_bug.cgi?id=118212"]
+# tags: [GNU, GCC]
+- urls: [https://github.com/9fans/plan9port/pull/692]
+ tags: [Plan9, 9P, docs]
+ desc: |
+ Plan 9 from Userspace's `9pserve`/lib9pclient implement a
+ non-standard `openfd` extension to the 9P2000 protocol. However,
+ the documentation for the extension is inaccurate. Fix that, as
+ no where else but the source code documents how it actually works.
diff --git a/imworkingon/upstreams.yml b/imworkingon/upstreams.yml
index 53733d9..7257c6d 100644
--- a/imworkingon/upstreams.yml
+++ b/imworkingon/upstreams.yml
@@ -1,4 +1,6 @@
-- urls: [https://github.com/flori/json]
+- urls:
+ - https://github.com/ruby/json
+ - https://github.com/flori/json
name: ruby-json
desc: |
Ruby's standard JSON gem (which comes bundled with the core Ruby
@@ -80,3 +82,11 @@
desc: |
vboot is Google's Verified Boot reference implementation, and is
used by the coreboot userspace tools.
+- urls: [https://github.com/python/typeshed]
+ desc: |
+ The Python Typeshed is a collection of type-annotations for
+ popular Python libraries whose upstream don't provide
+ type-annotations.
+
+ This allows using `mypy` or other type-checkers to validate code
+ that uses such libraries.
diff --git a/lib/httpcache/httpcache.go b/lib/httpcache/httpcache.go
new file mode 100644
index 0000000..b2cc7fe
--- /dev/null
+++ b/lib/httpcache/httpcache.go
@@ -0,0 +1,211 @@
+package httpcache
+
+import (
+ "bufio"
+ hash "crypto/md5"
+ "encoding/hex"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "net/http"
+ "net/url"
+ "os"
+ "path/filepath"
+ "sort"
+ "strings"
+)
+
+var (
+ UserAgent string
+ ModifyResponse func(url string, entry CacheEntry, resp *http.Response) *http.Response
+ CheckRedirect func(req *http.Request, via []*http.Request) error
+)
+
+type CacheEntry string
+
+var memCache = map[string]CacheEntry{}
+
+type httpStatusError struct {
+ StatusCode int
+ Status string
+}
+
+// Is implements the interface for [errors.Is].
+func (e *httpStatusError) Is(target error) bool {
+ switch target {
+ case os.ErrNotExist:
+ return e.StatusCode == http.StatusNotFound
+ default:
+ return false
+ }
+}
+
+// Error implements [error].
+func (e *httpStatusError) Error() string {
+ return fmt.Sprintf("unexpected HTTP status: %v", e.Status)
+}
+
+type transport struct{}
+
+func (t *transport) RoundTrip(req *http.Request) (*http.Response, error) {
+ // Return an error for things that are the fault of things
+ // not-on-this-box. Panic for things that are the fault of
+ // this box.
+
+ // Initialize.
+ if err := os.Mkdir(".http-cache", 0777); err != nil && !os.IsExist(err) {
+ panic(err)
+ }
+
+ // Calculate cache-key.
+ u := req.URL.String()
+ cacheKey := url.QueryEscape(u)
+ hdrKeys := make([]string, 0, len(req.Header))
+ for k := range req.Header {
+ switch k {
+ case "User-Agent":
+ case "Referer":
+ default:
+ hdrKeys = append(hdrKeys, http.CanonicalHeaderKey(k))
+ }
+ }
+ sort.Strings(hdrKeys)
+ for _, k := range hdrKeys {
+ cacheKey += "|" + url.QueryEscape(k) + ":" + url.QueryEscape(req.Header[k][0])
+ }
+ if len(cacheKey) >= 255 {
+ prefix := cacheKey[:255-(hash.Size*2)]
+ csum := hash.Sum([]byte(cacheKey))
+ suffix := hex.EncodeToString(csum[:])
+ cacheKey = prefix + suffix
+ }
+ cacheFile := filepath.Join(".http-cache", cacheKey)
+
+ // Check the mem cache.
+ if _, ok := memCache[cacheKey]; ok {
+ fmt.Printf("GET|CACHE|MEM %q...", u)
+ goto end
+ }
+ // Check the file cache.
+ if bs, err := os.ReadFile(cacheFile); err == nil {
+ str := string(bs)
+ if strings.HasPrefix(str, "HTTP/") || strings.HasPrefix(str, "CLIENT/") {
+ fmt.Printf("GET|CACHE|FILE %q...", u)
+ memCache[cacheKey] = CacheEntry(str)
+ goto end
+ }
+ }
+
+ // Do the request for real.
+ fmt.Printf("GET|NET %q...", u)
+ if resp, err := http.DefaultTransport.RoundTrip(req); err == nil {
+ var buf strings.Builder
+ if err := resp.Write(&buf); err != nil {
+ panic(err)
+ }
+ memCache[cacheKey] = CacheEntry(buf.String())
+ } else {
+ memCache[cacheKey] = CacheEntry("CLIENT/" + err.Error())
+ }
+
+ // Record the response to the file cache.
+ if err := os.WriteFile(cacheFile, []byte(memCache[cacheKey]), 0666); err != nil {
+ panic(err)
+ }
+
+end:
+ // Turn the cache entry into an http.Response (or error)
+ var ret_resp *http.Response
+ var ret_err error
+ entry := memCache[cacheKey]
+ switch {
+ case strings.HasPrefix(string(entry), "HTTP/"):
+ var err error
+ ret_resp, err = http.ReadResponse(bufio.NewReader(strings.NewReader(string(entry))), nil)
+ if err != nil {
+ panic(fmt.Errorf("invalid cache entry: %v", err))
+ }
+ if ModifyResponse != nil {
+ ret_resp = ModifyResponse(u, entry, ret_resp)
+ }
+ case strings.HasPrefix(string(entry), "CLIENT/"):
+ ret_err = errors.New(string(entry)[len("CLIENT/"):])
+ default:
+ panic("invalid cache entry: invalid prefix")
+ }
+
+ // Return.
+ if ret_err != nil {
+ fmt.Printf(" err\n")
+ } else {
+ fmt.Printf(" http %v\n", ret_resp.StatusCode)
+ }
+ return ret_resp, ret_err
+}
+
+func Get(u string, hdr map[string]string) (string, error) {
+ if UserAgent == "" {
+ panic("main() must set the user agent string")
+ }
+ req, err := http.NewRequest(http.MethodGet, u, nil)
+ if err != nil {
+ panic(fmt.Errorf("should not happen: http.NewRequest: %v", err))
+ }
+ req.Header.Set("User-Agent", UserAgent)
+ for k, v := range hdr {
+ req.Header.Add(k, v)
+ }
+ client := &http.Client{
+ Transport: &transport{},
+ CheckRedirect: CheckRedirect,
+ }
+ resp, err := client.Do(req)
+ if err != nil {
+ return "", err
+ }
+ if resp.StatusCode != http.StatusOK {
+ return "", &httpStatusError{StatusCode: resp.StatusCode, Status: resp.Status}
+ }
+ bs, err := io.ReadAll(resp.Body)
+ if err != nil {
+ panic(fmt.Errorf("should not happen: strings.Reader.Read: %v", err))
+ }
+ return string(bs), nil
+}
+
+func GetJSON(u string, hdr map[string]string, out any) error {
+ str, err := Get(u, hdr)
+ if err != nil {
+ return err
+ }
+ return json.Unmarshal([]byte(str), out)
+}
+
+func GetPaginatedJSON[T any](uStr string, hdr map[string]string, out *[]T, pageFn func(i int) url.Values) error {
+ u, err := url.Parse(uStr)
+ if err != nil {
+ return err
+ }
+ query := u.Query()
+
+ for i := 0; true; i++ {
+ pageParams := pageFn(i)
+ for k, v := range pageParams {
+ query[k] = v
+ }
+
+ u.RawQuery = query.Encode()
+ var resp []T
+ if err := GetJSON(u.String(), hdr, &resp); err != nil {
+ return err
+ }
+ fmt.Printf(" -> %d records\n", len(resp))
+ if len(resp) == 0 {
+ break
+ }
+ *out = append(*out, resp...)
+ }
+
+ return nil
+}
diff --git a/cmd/generate/mailstuff/jwz.md b/lib/mailstuff/jwz.md
index 91e03f5..91e03f5 100644
--- a/cmd/generate/mailstuff/jwz.md
+++ b/lib/mailstuff/jwz.md
diff --git a/cmd/generate/mailstuff/mbox.go b/lib/mailstuff/mbox.go
index 8700c24..8700c24 100644
--- a/cmd/generate/mailstuff/mbox.go
+++ b/lib/mailstuff/mbox.go
diff --git a/cmd/generate/mailstuff/thread.go b/lib/mailstuff/thread.go
index 2cdf9a4..2cdf9a4 100644
--- a/cmd/generate/mailstuff/thread.go
+++ b/lib/mailstuff/thread.go
diff --git a/cmd/generate/mailstuff/thread_alg.go b/lib/mailstuff/thread_alg.go
index 1b351e9..1b351e9 100644
--- a/cmd/generate/mailstuff/thread_alg.go
+++ b/lib/mailstuff/thread_alg.go
diff --git a/public/index.html b/public/index.html
index 28d5477..35e75bc 100644
--- a/public/index.html
+++ b/public/index.html
@@ -117,7 +117,7 @@
<li>Social web:<ul>
- <li>I am <a rel="me" href="https://fosstodon.org/@lukeshu">@lukeshu@fosstodon.org</a> on Mastodon.</li>
+ <li>I am <s><a href="https://fosstodon.org/@lukeshu">@lukeshu@fosstodon.org</a></s><a rel="me" href="https://social.coop/@lukeshu">@lukeshu@social.coop</a> on Mastodon.</li>
<li>I am <a href="https://news.ycombinator.com/user?id=LukeShu">LukeShu</a> on Hacker News.</li>
diff --git a/public/sponsor/index.html b/public/sponsor/index.html
index 339794c..04f565b 100644
--- a/public/sponsor/index.html
+++ b/public/sponsor/index.html
@@ -59,7 +59,7 @@
<li><a class="donate-btn kofi" href="https://ko-fi.com/lukeshu">
<img src="kofi-icon.png"
alt="Ko-fi icon" />
- Ko-fi<!-- (0% fee, requires non-free JS) -->
+ Ko-fi<!-- (0% fee for one-offs, 5% fee for recurring; requires non-free JS) -->
</a></li>
<li><a class="donate-btn patreon" href="https://patreon.com/lukeshu">
<img src="patreon-icon.svg"