Skip to content
This repository has been archived by the owner on Feb 3, 2018. It is now read-only.

Commit

Permalink
Merge pull request #139 from sdboyer/refactor-hashing
Browse files Browse the repository at this point in the history
Refactor hashing
  • Loading branch information
sdboyer authored Jan 14, 2017
2 parents 65939b4 + 53a999a commit e66ef60
Show file tree
Hide file tree
Showing 7 changed files with 410 additions and 314 deletions.
18 changes: 11 additions & 7 deletions bridge.go
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,9 @@ type bridge struct {
// held by the solver that it ends up being easier and saner to do this.
s *solver

// Whether to sort version lists for downgrade.
down bool

// Simple, local cache of the root's PackageTree
crp *struct {
ptree PackageTree
Expand All @@ -58,17 +61,18 @@ type bridge struct {

// Global factory func to create a bridge. This exists solely to allow tests to
// override it with a custom bridge and sm.
var mkBridge = func(s *solver, sm SourceManager) sourceBridge {
var mkBridge = func(s *solver, sm SourceManager, down bool) sourceBridge {
return &bridge{
sm: sm,
s: s,
down: down,
vlists: make(map[ProjectIdentifier][]Version),
}
}

func (b *bridge) GetManifestAndLock(id ProjectIdentifier, v Version) (Manifest, Lock, error) {
if id.ProjectRoot == ProjectRoot(b.s.rpt.ImportRoot) {
return b.s.rm, b.s.rl, nil
if b.s.rd.isRoot(id.ProjectRoot) {
return b.s.rd.rm, b.s.rd.rl, nil
}

b.s.mtr.push("b-gmal")
Expand All @@ -94,7 +98,7 @@ func (b *bridge) ListVersions(id ProjectIdentifier) ([]Version, error) {
return nil, err
}

if b.s.params.Downgrade {
if b.down {
SortForDowngrade(vl)
} else {
SortForUpgrade(vl)
Expand All @@ -120,7 +124,7 @@ func (b *bridge) SourceExists(id ProjectIdentifier) (bool, error) {
}

func (b *bridge) vendorCodeExists(id ProjectIdentifier) (bool, error) {
fi, err := os.Stat(filepath.Join(b.s.params.RootDir, "vendor", string(id.ProjectRoot)))
fi, err := os.Stat(filepath.Join(b.s.rd.dir, "vendor", string(id.ProjectRoot)))
if err != nil {
return false, err
} else if fi.IsDir() {
Expand Down Expand Up @@ -279,7 +283,7 @@ func (b *bridge) vtu(id ProjectIdentifier, v Version) versionTypeUnion {
// The root project is handled separately, as the source manager isn't
// responsible for that code.
func (b *bridge) ListPackages(id ProjectIdentifier, v Version) (PackageTree, error) {
if id.ProjectRoot == ProjectRoot(b.s.rpt.ImportRoot) {
if b.s.rd.isRoot(id.ProjectRoot) {
panic("should never call ListPackages on root project")
}

Expand Down Expand Up @@ -327,7 +331,7 @@ func (b *bridge) breakLock() {
return
}

for _, lp := range b.s.rl.Projects() {
for _, lp := range b.s.rd.rl.Projects() {
if _, is := b.s.sel.selected(lp.pi); !is {
// TODO(sdboyer) use this as an opportunity to detect
// inconsistencies between upstream and the lock (e.g., moved tags)?
Expand Down
111 changes: 64 additions & 47 deletions hash.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package gps
import (
"bytes"
"crypto/sha256"
"io"
"sort"
)

Expand All @@ -17,99 +18,115 @@ import (
//
// (Basically, this is for memoization.)
func (s *solver) HashInputs() (digest []byte) {
buf := new(bytes.Buffer)
s.writeHashingInputs(buf)
h := sha256.New()
s.writeHashingInputs(h)

hd := sha256.Sum256(buf.Bytes())
hd := h.Sum(nil)
digest = hd[:]
return
}

func (s *solver) writeHashingInputs(buf *bytes.Buffer) {
func (s *solver) writeHashingInputs(w io.Writer) {
writeString := func(s string) {
// Skip zero-length string writes; it doesn't affect the real hash
// calculation, and keeps misleading newlines from showing up in the
// debug output.
if s != "" {
// All users of writeHashingInputs cannot error on Write(), so just
// ignore it
w.Write([]byte(s))
}
}

// Apply overrides to the constraints from the root. Otherwise, the hash
// would be computed on the basis of a constraint from root that doesn't
// actually affect solving.
p := s.ovr.overrideAll(s.rm.DependencyConstraints().merge(s.rm.TestDependencyConstraints()))
wc := s.rd.combineConstraints()

for _, pd := range p {
buf.WriteString(string(pd.Ident.ProjectRoot))
buf.WriteString(pd.Ident.Source)
for _, pd := range wc {
writeString(string(pd.Ident.ProjectRoot))
writeString(pd.Ident.Source)
// FIXME Constraint.String() is a surjective-only transformation - tags
// and branches with the same name are written out as the same string.
// This could, albeit rarely, result in input collisions when a real
// change has occurred.
buf.WriteString(pd.Constraint.String())
// This could, albeit rarely, result in erroneously identical inputs
// when a real change has occurred.
writeString(pd.Constraint.String())
}

// Get the external reach list

// Write each of the packages, or the errors that were found for a
// particular subpath, into the hash. We need to do this in a
// deterministic order, so expand and sort the map.
var pkgs []PackageOrErr
for _, perr := range s.rpt.Packages {
for _, perr := range s.rd.rpt.Packages {
pkgs = append(pkgs, perr)
}
sort.Sort(sortPackageOrErr(pkgs))
for _, perr := range pkgs {
if perr.Err != nil {
buf.WriteString(perr.Err.Error())
writeString(perr.Err.Error())
} else {
buf.WriteString(perr.P.Name)
buf.WriteString(perr.P.CommentPath)
buf.WriteString(perr.P.ImportPath)
writeString(perr.P.Name)
writeString(perr.P.CommentPath)
writeString(perr.P.ImportPath)
for _, imp := range perr.P.Imports {
if !isStdLib(imp) {
buf.WriteString(imp)
writeString(imp)
}
}
for _, imp := range perr.P.TestImports {
if !isStdLib(imp) {
buf.WriteString(imp)
writeString(imp)
}
}
}
}

// Write any require packages given in the root manifest.
if len(s.req) > 0 {
// Dump and sort the reqnores
req := make([]string, 0, len(s.req))
for pkg := range s.req {
req = append(req, pkg)
}
sort.Strings(req)
// Write any required packages given in the root manifest.
req := make([]string, 0, len(s.rd.req))
for pkg := range s.rd.req {
req = append(req, pkg)
}
sort.Strings(req)

for _, reqp := range req {
buf.WriteString(reqp)
}
for _, reqp := range req {
writeString(reqp)
}

// Add the ignored packages, if any.
if len(s.ig) > 0 {
// Dump and sort the ignores
ig := make([]string, 0, len(s.ig))
for pkg := range s.ig {
ig = append(ig, pkg)
}
sort.Strings(ig)
ig := make([]string, 0, len(s.rd.ig))
for pkg := range s.rd.ig {
ig = append(ig, pkg)
}
sort.Strings(ig)

for _, igp := range ig {
buf.WriteString(igp)
}
for _, igp := range ig {
writeString(igp)
}

for _, pc := range s.ovr.asSortedSlice() {
buf.WriteString(string(pc.Ident.ProjectRoot))
for _, pc := range s.rd.ovr.asSortedSlice() {
writeString(string(pc.Ident.ProjectRoot))
if pc.Ident.Source != "" {
buf.WriteString(pc.Ident.Source)
writeString(pc.Ident.Source)
}
if pc.Constraint != nil {
buf.WriteString(pc.Constraint.String())
writeString(pc.Constraint.String())
}
}

an, av := s.b.AnalyzerInfo()
buf.WriteString(an)
buf.WriteString(av.String())
writeString(an)
writeString(av.String())
}

// bytes.Buffer wrapper that injects newlines after each call to Write().
type nlbuf bytes.Buffer

func (buf *nlbuf) Write(p []byte) (n int, err error) {
n, _ = (*bytes.Buffer)(buf).Write(p)
(*bytes.Buffer)(buf).WriteByte('\n')
return n + 1, nil
}

// HashingInputsAsString returns the raw input data used by Solver.HashInputs()
Expand All @@ -118,10 +135,10 @@ func (s *solver) writeHashingInputs(buf *bytes.Buffer) {
// This is primarily intended for debugging purposes.
func HashingInputsAsString(s Solver) string {
ts := s.(*solver)
buf := new(bytes.Buffer)
buf := new(nlbuf)
ts.writeHashingInputs(buf)

return buf.String()
return (*bytes.Buffer)(buf).String()
}

type sortPackageOrErr []PackageOrErr
Expand Down
Loading

0 comments on commit e66ef60

Please sign in to comment.