Skip to content

Commit

Permalink
slicer: Record packages and slices in DB
Browse files Browse the repository at this point in the history
This is the first commit that adds support for the DB into the slicer
and the cut command.

The database is created and saved in the cut command and populated in
slicer. Currently, only packages and slices are recorded in the DB.
Recording of paths is added in the forthcoming commit because it is more
intricate.

Slicer test is extended to allow testing of created DB objects. A list
of expected DB objects is added to each existing test case. These lists
are currently quite boring as most test cases use only the embedded
base-files package, but it'll be extended with path entries in the
forthcoming commit.
  • Loading branch information
woky committed Oct 9, 2023
1 parent df78fe9 commit 180aa16
Show file tree
Hide file tree
Showing 4 changed files with 401 additions and 1 deletion.
11 changes: 10 additions & 1 deletion cmd/chisel/cmd_cut.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import (

"github.com/canonical/chisel/internal/archive"
"github.com/canonical/chisel/internal/cache"
"github.com/canonical/chisel/internal/db"
"github.com/canonical/chisel/internal/setup"
"github.com/canonical/chisel/internal/slicer"
)
Expand Down Expand Up @@ -98,11 +99,19 @@ func (cmd *cmdCut) Execute(args []string) error {
archives[archiveName] = openArchive
}

return slicer.Run(&slicer.RunOptions{
dbw := db.New()

err = slicer.Run(&slicer.RunOptions{
Selection: selection,
Archives: archives,
TargetDir: cmd.RootDir,
AddToDB: dbw.Add,
})
if err != nil {
return err
}

return db.Save(dbw, cmd.RootDir)
}

// TODO These need testing, and maybe moving into a common file.
Expand Down
162 changes: 162 additions & 0 deletions internal/slicer/fakedb_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,162 @@
package slicer_test

import (
"fmt"
"io"
"sort"
"strings"

"github.com/canonical/chisel/internal/db"
)

// fakeDB is used to compare a list of DB objects created by the slicer against
// a list of expected DB objects. We don't care about the order in which slicer
// creates DB objects. In real usage, they will be reordered by the jsonwall
// database anyway. We only care about the set of objects created. So we record
// the created objects and put them into fakeDB and put the expected objects
// into another fakeDB. Then, we compare both sets as sorted lists obtained
// from fakeDB.values().
//
// Since DB object types are not ordered nor comparable (Path has pointers), we
// keep different types of objects in different slices and sort these slices
// with a comparison function appropriate for each type.

type fakeDB struct {
packages []db.Package
slices []db.Slice
paths []db.Path
contents []db.Content
}

func (p *fakeDB) add(value any) error {
switch v := value.(type) {
case db.Package:
p.packages = append(p.packages, v)
case db.Slice:
p.slices = append(p.slices, v)
case db.Path:
p.paths = append(p.paths, v)
case db.Content:
p.contents = append(p.contents, v)
default:
return fmt.Errorf("invalid DB type %T", v)
}
return nil
}

func (p *fakeDB) values() []any {
sort.Slice(p.packages, func(i, j int) bool {
x1 := p.packages[i].Name
x2 := p.packages[j].Name
return x1 < x2
})
sort.Slice(p.slices, func(i, j int) bool {
x1 := p.slices[i].Name
x2 := p.slices[j].Name
return x1 < x2
})
sort.Slice(p.paths, func(i, j int) bool {
x1 := p.paths[i].Path
x2 := p.paths[j].Path
return x1 < x2
})
sort.Slice(p.contents, func(i, j int) bool {
x1 := p.contents[i].Slice
x2 := p.contents[j].Slice
y1 := p.contents[i].Path
y2 := p.contents[j].Path
return x1 < x2 || (x1 == x2 && y1 < y2)
})
i := 0
vals := make([]any, len(p.packages)+len(p.slices)+len(p.paths)+len(p.contents))
for _, v := range p.packages {
vals[i] = v
i++
}
for _, v := range p.slices {
vals[i] = v
i++
}
for _, v := range p.paths {
vals[i] = v
i++
}
for _, v := range p.contents {
vals[i] = v
i++
}
return vals
}

func (p *fakeDB) dumpValues(w io.Writer) {
for _, v := range p.values() {
switch t := v.(type) {
case db.Package:
fmt.Fprintln(w, "db.Package{")
fmt.Fprintf(w, "\tName: %#v,\n", t.Name)
fmt.Fprintf(w, "\tVersion: %#v,\n", t.Version)
if t.SHA256 != "" {
fmt.Fprintf(w, "\tSHA256: %#v,\n", t.SHA256)
}
if t.Arch != "" {
fmt.Fprintf(w, "\tArch: %#v,\n", t.Arch)
}
fmt.Fprintln(w, "},")
case db.Slice:
fmt.Fprintln(w, "db.Slice{")
fmt.Fprintf(w, "\tName: %#v,\n", t.Name)
fmt.Fprintln(w, "},")
case db.Path:
fmt.Fprintln(w, "db.Path{")
fmt.Fprintf(w, "\tPath: %#v,\n", t.Path)
fmt.Fprintf(w, "\tMode: %#o,\n", t.Mode)
fmt.Fprintf(w, "\tSlices: %#v,\n", t.Slices)
if t.SHA256 != nil {
fmt.Fprint(w, "\tSHA256: &[...]byte{")
for i, b := range t.SHA256 {
if i%8 == 0 {
fmt.Fprint(w, "\n\t\t")
} else {
fmt.Fprint(w, " ")
}
fmt.Fprintf(w, "%#02x,", b)
}
fmt.Fprintln(w, "\n\t},")
}
if t.FinalSHA256 != nil {
fmt.Fprint(w, "\tFinalSHA256: &[...]byte{")
for i, b := range t.FinalSHA256 {
if i%8 == 0 {
fmt.Fprint(w, "\n\t\t")
} else {
fmt.Fprint(w, " ")
}
fmt.Fprintf(w, "%#02x,", b)
}
fmt.Fprintln(w, "\n\t},")
}
if t.Size != 0 {
fmt.Fprintf(w, "\tSize: %d,\n", t.Size)
}
if t.Link != "" {
fmt.Fprintf(w, "\tLink: %#v,\n", t.Link)
}
fmt.Fprintln(w, "},")
case db.Content:
fmt.Fprintln(w, "db.Content{")
fmt.Fprintf(w, "\tSlice: %#v,\n", t.Slice)
fmt.Fprintf(w, "\tPath: %#v,\n", t.Path)
fmt.Fprintln(w, "},")
default:
panic(fmt.Sprintf("invalid DB value %#v", v))
}
}
}

func (p *fakeDB) dump() string {
var buf strings.Builder
fmt.Fprintln(&buf, "-----BEGIN DB DUMP-----")
p.dumpValues(&buf)
fmt.Fprintln(&buf, "-----END DB DUMP-----")
return buf.String()
}
25 changes: 25 additions & 0 deletions internal/slicer/slicer.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,16 +11,20 @@ import (
"syscall"

"github.com/canonical/chisel/internal/archive"
"github.com/canonical/chisel/internal/db"
"github.com/canonical/chisel/internal/deb"
"github.com/canonical/chisel/internal/fsutil"
"github.com/canonical/chisel/internal/scripts"
"github.com/canonical/chisel/internal/setup"
)

type AddToDB func(value any) error

type RunOptions struct {
Selection *setup.Selection
Archives map[string]archive.Archive
TargetDir string
AddToDB AddToDB
}

func Run(options *RunOptions) error {
Expand All @@ -30,6 +34,11 @@ func Run(options *RunOptions) error {
pathInfos := make(map[string]setup.PathInfo)
knownPaths := make(map[string]bool)

addToDB := options.AddToDB
if addToDB == nil {
addToDB = func(value any) error { return nil }
}

knownPaths["/"] = true

// addKnownPath path adds path and all its directory parent paths into
Expand Down Expand Up @@ -68,6 +77,11 @@ func Run(options *RunOptions) error {

// Build information to process the selection.
for _, slice := range options.Selection.Slices {
pkgSlice := slice.String()
if err := addToDB(db.Slice{pkgSlice}); err != nil {
return fmt.Errorf("cannot write slice to db: %w", err)
}

extractPackage := extract[slice.Package]
if extractPackage == nil {
archiveName := release.Packages[slice.Package].Archive
Expand All @@ -81,6 +95,17 @@ func Run(options *RunOptions) error {
archives[slice.Package] = archive
extractPackage = make(map[string][]deb.ExtractInfo)
extract[slice.Package] = extractPackage

pkgInfo := archive.Info(slice.Package)
dbPackage := db.Package{
slice.Package,
pkgInfo.Version(),
pkgInfo.SHA256(),
pkgInfo.Arch(),
}
if err := addToDB(dbPackage); err != nil {
return fmt.Errorf("cannot write package to db: %w", err)
}
}
arch := archives[slice.Package].Options().Arch
copyrightPath := "/usr/share/doc/" + slice.Package + "/copyright"
Expand Down
Loading

0 comments on commit 180aa16

Please sign in to comment.