From 22a4b31d2da9d2dfb79266c710f6ee90c1036f91 Mon Sep 17 00:00:00 2001 From: Nathan Hammond Date: Wed, 26 Apr 2023 17:19:55 +0800 Subject: [PATCH] Make sure that we only hash the env pairs. --- cli/internal/run/global_hash.go | 78 +++++++++++++++++++++++---------- cli/internal/run/run.go | 18 ++++---- 2 files changed, 63 insertions(+), 33 deletions(-) diff --git a/cli/internal/run/global_hash.go b/cli/internal/run/global_hash.go index 2ebf6427eba15..da49f8fe8a85b 100644 --- a/cli/internal/run/global_hash.go +++ b/cli/internal/run/global_hash.go @@ -24,8 +24,8 @@ var _defaultEnvVars = []string{ "VERCEL_ANALYTICS_ID", } -// GlobalHashable represents all the things that we use to create the global hash -type GlobalHashable struct { +// GlobalHashableInputs represents all the things that we use to create the global hash +type GlobalHashableInputs struct { globalFileHashMap map[turbopath.AnchoredUnixPath]string rootExternalDepsHash string envVars env.DetailedMap @@ -35,9 +35,31 @@ type GlobalHashable struct { envMode util.EnvMode } -// This exists because the global hash used to have different fields. Changing -// to a new struct layout changes the global hash. We can remove this converter -// when we are going to have to update the global hash for something else. +type newGlobalHashable struct { + globalFileHashMap map[turbopath.AnchoredUnixPath]string + rootExternalDepsHash string + envVars env.EnvironmentVariablePairs + globalCacheKey string + pipeline fs.PristinePipeline + envVarPassthroughs []string + envMode util.EnvMode +} + +// newGlobalHash is a transformation of GlobalHashableData. +// It's used for the situations where we have an EnvMode specified +// inside of GlobalHashableData that changes the environment variable behavior.. +func newGlobalHash(full GlobalHashableInputs) (string, error) { + return fs.HashObject(newGlobalHashable{ + globalFileHashMap: full.globalFileHashMap, + rootExternalDepsHash: full.rootExternalDepsHash, + envVars: full.envVars.All.ToHashable(), + globalCacheKey: full.globalCacheKey, + pipeline: full.pipeline, + envVarPassthroughs: full.envVarPassthroughs, + envMode: full.envMode, + }) +} + type oldGlobalHashable struct { globalFileHashMap map[turbopath.AnchoredUnixPath]string rootExternalDepsHash string @@ -46,8 +68,22 @@ type oldGlobalHashable struct { pipeline fs.PristinePipeline } -// calculateGlobalHashFromHashable returns a hash string from the globalHashable -func calculateGlobalHashFromHashable(full GlobalHashable) (string, error) { +// oldGlobalHash is a transformation of GlobalHashableData. +// This exists because the global hash used to have different fields. Changing +// to a new struct layout changes the global hash. We can remove this converter +// when we are going to have to update the global hash for something else. +func oldGlobalHash(full GlobalHashableInputs) (string, error) { + return fs.HashObject(oldGlobalHashable{ + globalFileHashMap: full.globalFileHashMap, + rootExternalDepsHash: full.rootExternalDepsHash, + envVars: full.envVars.All.ToHashable(), + globalCacheKey: full.globalCacheKey, + pipeline: full.pipeline, + }) +} + +// calculateGlobalHashFromHashableInputs returns a hash string from the globalHashable +func calculateGlobalHashFromHashableInputs(full GlobalHashableInputs) (string, error) { switch full.envMode { case util.Infer: if full.envVarPassthroughs != nil { @@ -55,35 +91,29 @@ func calculateGlobalHashFromHashable(full GlobalHashable) (string, error) { // we'll hash the whole object, so we can detect changes to that config // Further, resolve the envMode to the concrete value. full.envMode = util.Strict - return fs.HashObject(full) + return newGlobalHash(full) } // If we're in infer mode, and there is no global pass through config, // we use the old struct layout. this will be true for everyone not using the strict env // feature, and we don't want to break their cache. - return fs.HashObject(oldGlobalHashable{ - globalFileHashMap: full.globalFileHashMap, - rootExternalDepsHash: full.rootExternalDepsHash, - envVars: full.envVars.All.ToHashable(), - globalCacheKey: full.globalCacheKey, - pipeline: full.pipeline, - }) + return oldGlobalHash(full) case util.Loose: // Remove the passthroughs from hash consideration if we're explicitly loose. full.envVarPassthroughs = nil - return fs.HashObject(full) + return newGlobalHash(full) case util.Strict: // Collapse `nil` and `[]` in strict mode. if full.envVarPassthroughs == nil { full.envVarPassthroughs = make([]string, 0) } - return fs.HashObject(full) + return newGlobalHash(full) default: panic("unimplemented environment mode") } } -func calculateGlobalHash( +func getGlobalHashInputs( rootpath turbopath.AbsoluteSystemPath, rootPackageJSON *fs.PackageJSON, pipeline fs.Pipeline, @@ -96,14 +126,14 @@ func calculateGlobalHash( logger hclog.Logger, ui cli.Ui, isStructuredOutput bool, -) (GlobalHashable, error) { +) (GlobalHashableInputs, error) { // Calculate env var dependencies envVars := []string{} envVars = append(envVars, envVarDependencies...) envVars = append(envVars, _defaultEnvVars...) globalHashableEnvVars, err := env.GetHashableEnvVars(envVars, []string{".*THASH.*"}, "") if err != nil { - return GlobalHashable{}, err + return GlobalHashableInputs{}, err } // The only way we can add env vars into the hash via matching is via THASH, @@ -121,12 +151,12 @@ func calculateGlobalHash( if len(globalFileDependencies) > 0 { ignores, err := packageManager.GetWorkspaceIgnores(rootpath) if err != nil { - return GlobalHashable{}, err + return GlobalHashableInputs{}, err } f, err := globby.GlobFiles(rootpath.ToStringDuringMigration(), globalFileDependencies, ignores) if err != nil { - return GlobalHashable{}, err + return GlobalHashableInputs{}, err } for _, val := range f { @@ -149,10 +179,10 @@ func calculateGlobalHash( globalFileHashMap, err := hashing.GetHashableDeps(rootpath, globalDepsPaths) if err != nil { - return GlobalHashable{}, fmt.Errorf("error hashing files: %w", err) + return GlobalHashableInputs{}, fmt.Errorf("error hashing files: %w", err) } - return GlobalHashable{ + return GlobalHashableInputs{ globalFileHashMap: globalFileHashMap, rootExternalDepsHash: rootPackageJSON.ExternalDepsHash, envVars: globalHashableEnvVars, diff --git a/cli/internal/run/run.go b/cli/internal/run/run.go index 8575e85f6f047..78bbff613c50e 100644 --- a/cli/internal/run/run.go +++ b/cli/internal/run/run.go @@ -245,7 +245,7 @@ func (r *run) run(ctx gocontext.Context, targets []string) error { } } - globalHashable, err := calculateGlobalHash( + globalHashInputs, err := getGlobalHashInputs( r.base.RepoRoot, rootPackageJSON, pipeline, @@ -264,7 +264,7 @@ func (r *run) run(ctx gocontext.Context, targets []string) error { return fmt.Errorf("failed to collect global hash inputs: %v", err) } - if globalHash, err := calculateGlobalHashFromHashable(globalHashable); err == nil { + if globalHash, err := calculateGlobalHashFromHashableInputs(globalHashInputs); err == nil { r.base.Logger.Debug("global hash", "value", globalHash) g.GlobalHash = globalHash } else { @@ -352,8 +352,8 @@ func (r *run) run(ctx gocontext.Context, targets []string) error { } var envVarPassthroughMap env.EnvironmentVariableMap - if globalHashable.envVarPassthroughs != nil { - if envVarPassthroughDetailedMap, err := env.GetHashableEnvVars(globalHashable.envVarPassthroughs, nil, ""); err == nil { + if globalHashInputs.envVarPassthroughs != nil { + if envVarPassthroughDetailedMap, err := env.GetHashableEnvVars(globalHashInputs.envVarPassthroughs, nil, ""); err == nil { envVarPassthroughMap = envVarPassthroughDetailedMap.BySource.Explicit } } @@ -376,12 +376,12 @@ func (r *run) run(ctx gocontext.Context, targets []string) error { packagesInScope, globalEnvMode, runsummary.NewGlobalHashSummary( - globalHashable.globalFileHashMap, - globalHashable.rootExternalDepsHash, - globalHashable.envVars, + globalHashInputs.globalFileHashMap, + globalHashInputs.rootExternalDepsHash, + globalHashInputs.envVars, envVarPassthroughMap, - globalHashable.globalCacheKey, - globalHashable.pipeline, + globalHashInputs.globalCacheKey, + globalHashInputs.pipeline, ), rs.Opts.SynthesizeCommand(rs.Targets), )