From 8f30bed874d01f821225d548d89f52fb0fa3f959 Mon Sep 17 00:00:00 2001 From: Lewis Dale Date: Mon, 13 May 2024 07:36:53 +0100 Subject: [PATCH] Learning Go Day Twelve post --- src/blog/posts/2024/5/learning-go-day-12.md | 99 +++++++++++++++++++++ 1 file changed, 99 insertions(+) create mode 100644 src/blog/posts/2024/5/learning-go-day-12.md diff --git a/src/blog/posts/2024/5/learning-go-day-12.md b/src/blog/posts/2024/5/learning-go-day-12.md new file mode 100644 index 0000000..1b38261 --- /dev/null +++ b/src/blog/posts/2024/5/learning-go-day-12.md @@ -0,0 +1,99 @@ +--- +title: "Learning Go: Day Twelve" +date: 2024-05-14T08:00:00.0Z +tags: + - learning + - go +excerpt: "A quick one today on improving the data that's output" +--- + +Yesterday I [actually started storing and printing responses](/post/learning-go-day-eleven) from the database. Today as a short exercice, I'm going to improve that data and group it by site. + +## The data model + +Now I've introduced a new data model, `ping.SiteResponse`. Originally this was going to live inside the `sites` package, but this introduced a circular dependency. I'll need to resolve that soon regardless, because this data model sucks, but for now this code will live in the `ping` package. + +```go +// ping/ping.go +type SiteResponse struct { + created_at uint64 + Name string + Url string + Pings []Ping +} +``` + +## Retrieving the data + +My first instinct was to use an aggregate function to retrieve a site along with all of it's pings, however SQLite only has [a small subset of aggregate functions](https://www.sqlite.org/lang_aggfunc.html). There are some workarounds, but for this part I'm just going to do it as two separate queries, even if it is a bit inefficient[^1]. + +So, I add a `List` function to the `sites` package, which gets all of the site records: + +```go +// sites/sites.go + +func List(db *sql.DB) []Site { + rows, err := db.Query(`SELECT url, name, created_at FROM sites ORDER BY created_at DESC`) + + if err != nil { + panic(err) + } + defer rows.Close() + + sites := make([]Site, 0) + + for rows.Next() { + s := Site{} + rows.Scan(&s.Url, &s.Name, &s.created_at) + sites = append(sites, s) + } + + return sites +} +``` + +And then in `ping.ListGroupedBySite`, I iterate over this list and retrieve all of the pings for each site: + +```go +// ping/ping.go +func ListGroupedBySite(db *sql.DB) []SiteResponse { + + responses := make([]SiteResponse, 0) + + for _, site := range sites.List(db) { + s := SiteResponse{Url: site.Url, Name: site.Name, created_at: site.Created_at} + s.Pings = listForSite(db, site) + responses = append(responses, s) + } + + return responses +} + +func listForSite(db *sql.DB, site sites.Site) []Ping { + rows, err := db.Query(`SELECT ping.timestamp, ping.status FROM ping WHERE site = ? ORDER BY timestamp DESC`, site.Url) + + if err != nil { + panic(err) + } + defer rows.Close() + + pings := make([]Ping, 0) + + for rows.Next() { + p := Ping{} + rows.Scan(&p.Timestamp, &p.Status) + p.Site = site + pings = append(pings, p) + } + + return pings +} +``` + +I realised I had to make a couple of changes to the `Site` struct to make this work. Namely `created_at` needed to be a string, not an int, as SQLite stores the timestamps as strings[^2]. I also had to make it public so I could copy the value to the `SiteResponse` struct[^3]. + +But that works, now when accessing https://oopsie.lewisdale.dev, there's an array of sites, with a timestamped list of `pings` attached. + +[^1]: I've got like 12 database records. I'd spend more time writing a query and code than it would save me right now. +[^2]: I could also have used [datetime functions](https://www.sqlite.org/lang_datefunc.html) to convert it to a unix timestamp, but what's the point? +[^3]: Really this should be the other way round and `Site` has a mapping function, as `SiteResponse` is the publicly-accessible type \ No newline at end of file