summaryrefslogtreecommitdiff
path: root/util/docker/coreboot.org-status/board-status.html/logs.go
diff options
context:
space:
mode:
authorPatrick Georgi <pgeorgi@google.com>2021-12-07 17:47:45 +0100
committerPatrick Georgi <patrick@coreboot.org>2022-11-03 13:50:30 +0000
commit3d0303a57c08bf8c56bb55885f6705680097bd27 (patch)
treedf4b5cdafb42223e11b0f532b0388f4e3a5c1758 /util/docker/coreboot.org-status/board-status.html/logs.go
parent5318d9c9d13b39908b03b8184184fd913221b71e (diff)
util/docker/coreboot.org-status: Rewrite parser
The current tool is a shell script that mixes data collection and HTML generation and is generally a pain to work with. It takes 15 minutes to run. The new tool is written in go, collects all data first, then generates the output HTML from the data and a single template, and finishes in 10 seconds. The goal in this version is to produce output as similar as possible to the output of the shell script. Some difference will remain because the shell script returns some trash data whose reproduction would require more effort than is worth. Change-Id: I4fab86d24088e4f9eff434c21ce9caa077f3f9e2 Signed-off-by: Patrick Georgi <pgeorgi@google.com> Reviewed-on: https://review.coreboot.org/c/coreboot/+/59958 Tested-by: build bot (Jenkins) <no-reply@coreboot.org> Reviewed-by: Maxim Polyakov <max.senia.poliak@gmail.com>
Diffstat (limited to 'util/docker/coreboot.org-status/board-status.html/logs.go')
-rw-r--r--util/docker/coreboot.org-status/board-status.html/logs.go165
1 files changed, 165 insertions, 0 deletions
diff --git a/util/docker/coreboot.org-status/board-status.html/logs.go b/util/docker/coreboot.org-status/board-status.html/logs.go
new file mode 100644
index 0000000000..21e91f2d9a
--- /dev/null
+++ b/util/docker/coreboot.org-status/board-status.html/logs.go
@@ -0,0 +1,165 @@
+package main
+
+import (
+ "fmt"
+ "io/fs"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "sort"
+ "strings"
+ "time"
+)
+
+// Color returns a HTML color code between green and yellow based on the
+// number of days that passed since ds.
+func (ds DateString) Color() string {
+ date, _ := time.Parse("2006-01-02T15:04:05Z", string(ds))
+ days := int(time.Since(date).Hours() / 24)
+ if days > 255 {
+ days = 255
+ }
+ return fmt.Sprintf("#%02xff00", days)
+}
+
+func fetchLogs(dirs chan<- NamedFS) {
+ err := fs.WalkDir(bsdirFS, ".", func(path string, d fs.DirEntry, err error) error {
+ if err != nil {
+ return err
+ }
+ if path[0] == '.' {
+ return nil
+ }
+ if d.IsDir() && len(strings.Split(path, string(filepath.Separator))) == 4 {
+ dirs <- NamedFS{
+ FS: bsdirFS,
+ Name: path,
+ }
+ }
+ return nil
+ })
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "Reading logs failed: %v\n", err)
+ }
+ close(dirs)
+}
+
+func collectLogs(dirs <-chan NamedFS) {
+ data.Logs = make(map[Timeframe][]Log)
+ data.VendorBoardDate = make(map[string]DateString)
+ data.VendorBoardReferenced = make(map[string]bool)
+ timeframes := make(map[Timeframe]bool)
+ gitcache := make(map[string]string)
+ for dir := range dirs {
+ upstream := ""
+ revB, err := fs.ReadFile(dir.FS, filepath.Join(dir.Name, "revision.txt"))
+ if err != nil {
+ continue
+ }
+ rev := string(revB)
+ skipDir := false
+ for _, line := range strings.Split(rev, "\n") {
+ item := strings.SplitN(line, ":", 2)
+ if len(item) != 2 {
+ // This is an error, but let's try to extract
+ // as much value out of revision.txt files as
+ // possible, even if some lines are erroneous.
+ continue
+ }
+ if item[0] == "Upstream revision" {
+ upstream = strings.TrimSpace(item[1])
+ // tried using go-git, but its resolver
+ // couldn't expand short hashes despite the
+ // docs claiming that it can.
+ if val, ok := gitcache[upstream]; ok {
+ upstream = val
+ } else {
+ res, err := exec.Command("/usr/bin/git", "-C", cbdir, "log", "-n1", "--format=%H", upstream).Output()
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "revision %s not found \n", upstream)
+ skipDir = true
+ break
+ }
+ gitcache[upstream] = strings.TrimSpace(string(res))
+ upstream = gitcache[upstream]
+ }
+ }
+ }
+ if skipDir {
+ continue
+ }
+
+ rawFiles, err := fs.Glob(dir.FS, filepath.Join(dir.Name, "*"))
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "Could not fetch log data, skipping: %v\n", err)
+ continue
+ }
+
+ pieces := strings.Split(dir.Name, string(filepath.Separator))
+ if len(pieces) < 4 {
+ fmt.Fprintf(os.Stderr, "log directory %s is malformed, skipping\n", dir.Name)
+ continue
+ }
+ vendorBoard := strings.Join(pieces[:2], "/")
+ // TODO: these need to become "second to last" and "last" item
+ // but only after compatibility to the current system has been ensured.
+ dateTimePath := pieces[3]
+ dateTime, err := time.Parse(time.RFC3339, strings.ReplaceAll(dateTimePath, "_", ":"))
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "Could not parse timestamp from %s: %v\n", dir.Name, err)
+ continue
+ }
+ dateTimeNormal := dateTime.UTC().Format("2006-01-02T15:04:05Z")
+ dateTimeHuman := dateTime.UTC().Format(time.UnixDate)
+ tfYear, tfWeek := dateTime.ISOWeek()
+ timeframe := Timeframe(fmt.Sprintf("%dW%02d", tfYear, tfWeek))
+
+ if !timeframes[timeframe] {
+ timeframes[timeframe] = true
+ data.Timeframes = append(data.Timeframes, timeframe)
+ data.Logs[timeframe] = []Log{}
+ }
+
+ files := []Path{}
+ l := len(dir.Name) + 1
+ for _, file := range rawFiles {
+ if file[l:] == "revision.txt" {
+ continue
+ }
+ files = append(files, Path{
+ Path: dir.Name + "/",
+ Basename: file[l:],
+ })
+ }
+
+ data.Logs[timeframe] = append(data.Logs[timeframe], Log{
+ VendorBoard: vendorBoard,
+ Time: dateTimeNormal,
+ TimeReadable: dateTimeHuman,
+ Upstream: upstream,
+ Files: files,
+ })
+ }
+ sort.Slice(data.Timeframes, func(i, j int) bool {
+ // reverse sort
+ return data.Timeframes[i] > data.Timeframes[j]
+ })
+ for bi := range data.Logs {
+ bucket := data.Logs[bi]
+ sort.Slice(data.Logs[bi], func(i, j int) bool {
+ if bucket[i].Time == bucket[j].Time {
+ return (bucket[i].VendorBoard > bucket[j].VendorBoard)
+ }
+ return (bucket[i].Time > bucket[j].Time)
+ })
+ }
+ for _, ts := range data.Timeframes {
+ for li, l := range data.Logs[ts] {
+ if _, match := data.VendorBoardDate[l.VendorBoard]; match {
+ continue
+ }
+ data.VendorBoardDate[l.VendorBoard] = DateString(l.Time)
+ data.Logs[ts][li].Reference = true
+ }
+ }
+}