feat: use preview to compute workspace tags from terraform (#18720)

If using dynamic parameters, workspace tags are extracted using
`coder/preview`.
This commit is contained in:
Steven Masley
2025-07-03 14:35:44 -05:00
committed by GitHub
parent 4607e5113b
commit a099a8a25c
14 changed files with 1185 additions and 78 deletions

View File

@ -10,7 +10,7 @@ import (
"github.com/coder/coder/v2/codersdk"
)
func ParameterValidationError(diags hcl.Diagnostics) *DiagnosticError {
func parameterValidationError(diags hcl.Diagnostics) *DiagnosticError {
return &DiagnosticError{
Message: "Unable to validate parameters",
Diagnostics: diags,
@ -18,9 +18,9 @@ func ParameterValidationError(diags hcl.Diagnostics) *DiagnosticError {
}
}
func TagValidationError(diags hcl.Diagnostics) *DiagnosticError {
func tagValidationError(diags hcl.Diagnostics) *DiagnosticError {
return &DiagnosticError{
Message: "Failed to parse workspace tags",
Message: "Unable to parse workspace tags",
Diagnostics: diags,
KeyedDiagnostics: make(map[string]hcl.Diagnostics),
}

View File

@ -243,7 +243,28 @@ func (r *dynamicRenderer) getWorkspaceOwnerData(ctx context.Context, ownerID uui
return nil // already fetched
}
user, err := r.db.GetUserByID(ctx, ownerID)
owner, err := WorkspaceOwner(ctx, r.db, r.data.templateVersion.OrganizationID, ownerID)
if err != nil {
return err
}
r.currentOwner = owner
return nil
}
func (r *dynamicRenderer) Close() {
r.once.Do(r.close)
}
func ProvisionerVersionSupportsDynamicParameters(version string) bool {
major, minor, err := apiversion.Parse(version)
// If the api version is not valid or less than 1.6, we need to use the static parameters
useStaticParams := err != nil || major < 1 || (major == 1 && minor < 6)
return !useStaticParams
}
func WorkspaceOwner(ctx context.Context, db database.Store, org uuid.UUID, ownerID uuid.UUID) (*previewtypes.WorkspaceOwner, error) {
user, err := db.GetUserByID(ctx, ownerID)
if err != nil {
// If the user failed to read, we also try to read the user from their
// organization member. You only need to be able to read the organization member
@ -252,37 +273,37 @@ func (r *dynamicRenderer) getWorkspaceOwnerData(ctx context.Context, ownerID uui
// Only the terraform files can therefore leak more information than the
// caller should have access to. All this info should be public assuming you can
// read the user though.
mem, err := database.ExpectOne(r.db.OrganizationMembers(ctx, database.OrganizationMembersParams{
OrganizationID: r.data.templateVersion.OrganizationID,
mem, err := database.ExpectOne(db.OrganizationMembers(ctx, database.OrganizationMembersParams{
OrganizationID: org,
UserID: ownerID,
IncludeSystem: true,
}))
if err != nil {
return xerrors.Errorf("fetch user: %w", err)
return nil, xerrors.Errorf("fetch user: %w", err)
}
// Org member fetched, so use the provisioner context to fetch the user.
//nolint:gocritic // Has the correct permissions, and matches the provisioning flow.
user, err = r.db.GetUserByID(dbauthz.AsProvisionerd(ctx), mem.OrganizationMember.UserID)
user, err = db.GetUserByID(dbauthz.AsProvisionerd(ctx), mem.OrganizationMember.UserID)
if err != nil {
return xerrors.Errorf("fetch user: %w", err)
return nil, xerrors.Errorf("fetch user: %w", err)
}
}
// nolint:gocritic // This is kind of the wrong query to use here, but it
// matches how the provisioner currently works. We should figure out
// something that needs less escalation but has the correct behavior.
row, err := r.db.GetAuthorizationUserRoles(dbauthz.AsProvisionerd(ctx), ownerID)
row, err := db.GetAuthorizationUserRoles(dbauthz.AsProvisionerd(ctx), ownerID)
if err != nil {
return xerrors.Errorf("user roles: %w", err)
return nil, xerrors.Errorf("user roles: %w", err)
}
roles, err := row.RoleNames()
if err != nil {
return xerrors.Errorf("expand roles: %w", err)
return nil, xerrors.Errorf("expand roles: %w", err)
}
ownerRoles := make([]previewtypes.WorkspaceOwnerRBACRole, 0, len(roles))
for _, it := range roles {
if it.OrganizationID != uuid.Nil && it.OrganizationID != r.data.templateVersion.OrganizationID {
if it.OrganizationID != uuid.Nil && it.OrganizationID != org {
continue
}
var orgID string
@ -298,28 +319,28 @@ func (r *dynamicRenderer) getWorkspaceOwnerData(ctx context.Context, ownerID uui
// The correct public key has to be sent. This will not be leaked
// unless the template leaks it.
// nolint:gocritic
key, err := r.db.GetGitSSHKey(dbauthz.AsProvisionerd(ctx), ownerID)
key, err := db.GetGitSSHKey(dbauthz.AsProvisionerd(ctx), ownerID)
if err != nil && !xerrors.Is(err, sql.ErrNoRows) {
return xerrors.Errorf("ssh key: %w", err)
return nil, xerrors.Errorf("ssh key: %w", err)
}
// The groups need to be sent to preview. These groups are not exposed to the
// user, unless the template does it through the parameters. Regardless, we need
// the correct groups, and a user might not have read access.
// nolint:gocritic
groups, err := r.db.GetGroups(dbauthz.AsProvisionerd(ctx), database.GetGroupsParams{
OrganizationID: r.data.templateVersion.OrganizationID,
groups, err := db.GetGroups(dbauthz.AsProvisionerd(ctx), database.GetGroupsParams{
OrganizationID: org,
HasMemberID: ownerID,
})
if err != nil {
return xerrors.Errorf("groups: %w", err)
return nil, xerrors.Errorf("groups: %w", err)
}
groupNames := make([]string, 0, len(groups))
for _, it := range groups {
groupNames = append(groupNames, it.Group.Name)
}
r.currentOwner = &previewtypes.WorkspaceOwner{
return &previewtypes.WorkspaceOwner{
ID: user.ID.String(),
Name: user.Username,
FullName: user.Name,
@ -328,17 +349,5 @@ func (r *dynamicRenderer) getWorkspaceOwnerData(ctx context.Context, ownerID uui
RBACRoles: ownerRoles,
SSHPublicKey: key.PublicKey,
Groups: groupNames,
}
return nil
}
func (r *dynamicRenderer) Close() {
r.once.Do(r.close)
}
func ProvisionerVersionSupportsDynamicParameters(version string) bool {
major, minor, err := apiversion.Parse(version)
// If the api version is not valid or less than 1.6, we need to use the static parameters
useStaticParams := err != nil || major < 1 || (major == 1 && minor < 6)
return !useStaticParams
}, nil
}

View File

@ -73,7 +73,7 @@ func ResolveParameters(
// always be valid. If there is a case where this is not true, then this has to
// be changed to allow the build to continue with a different set of values.
return nil, ParameterValidationError(diags)
return nil, parameterValidationError(diags)
}
// The user's input now needs to be validated against the parameters.
@ -113,13 +113,13 @@ func ResolveParameters(
// are fatal. Additional validation for immutability has to be done manually.
output, diags = renderer.Render(ctx, ownerID, values.ValuesMap())
if diags.HasErrors() {
return nil, ParameterValidationError(diags)
return nil, parameterValidationError(diags)
}
// parameterNames is going to be used to remove any excess values that were left
// around without a parameter.
parameterNames := make(map[string]struct{}, len(output.Parameters))
parameterError := ParameterValidationError(nil)
parameterError := parameterValidationError(nil)
for _, parameter := range output.Parameters {
parameterNames[parameter.Name] = struct{}{}

View File

@ -0,0 +1,100 @@
package dynamicparameters
import (
"fmt"
"github.com/hashicorp/hcl/v2"
"github.com/coder/preview"
previewtypes "github.com/coder/preview/types"
)
func CheckTags(output *preview.Output, diags hcl.Diagnostics) *DiagnosticError {
de := tagValidationError(diags)
failedTags := output.WorkspaceTags.UnusableTags()
if len(failedTags) == 0 && !de.HasError() {
return nil // No errors, all is good!
}
for _, tag := range failedTags {
name := tag.KeyString()
if name == previewtypes.UnknownStringValue {
name = "unknown" // Best effort to get a name for the tag
}
de.Extend(name, failedTagDiagnostic(tag))
}
return de
}
// failedTagDiagnostic is a helper function that takes an invalid tag and
// returns an appropriate hcl diagnostic for it.
func failedTagDiagnostic(tag previewtypes.Tag) hcl.Diagnostics {
const (
key = "key"
value = "value"
)
diags := hcl.Diagnostics{}
// TODO: It would be really nice to pull out the variable references to help identify the source of
// the unknown or invalid tag.
unknownErr := "Tag %s is not known, it likely refers to a variable that is not set or has no default."
invalidErr := "Tag %s is not valid, it must be a non-null string value."
if !tag.Key.Value.IsWhollyKnown() {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: fmt.Sprintf(unknownErr, key),
})
} else if !tag.Key.Valid() {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: fmt.Sprintf(invalidErr, key),
})
}
if !tag.Value.Value.IsWhollyKnown() {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: fmt.Sprintf(unknownErr, value),
})
} else if !tag.Value.Valid() {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: fmt.Sprintf(invalidErr, value),
})
}
if diags.HasErrors() {
// Stop here if there are diags, as the diags manually created above are more
// informative than the original tag's diagnostics.
return diags
}
// If we reach here, decorate the original tag's diagnostics
diagErr := "Tag %s: %s"
if tag.Key.ValueDiags.HasErrors() {
// add 'Tag key' prefix to each diagnostic
for _, d := range tag.Key.ValueDiags {
d.Summary = fmt.Sprintf(diagErr, key, d.Summary)
}
}
diags = diags.Extend(tag.Key.ValueDiags)
if tag.Value.ValueDiags.HasErrors() {
// add 'Tag value' prefix to each diagnostic
for _, d := range tag.Value.ValueDiags {
d.Summary = fmt.Sprintf(diagErr, value, d.Summary)
}
}
diags = diags.Extend(tag.Value.ValueDiags)
if !diags.HasErrors() {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Tag is invalid for some unknown reason. Please check the tag's value and key.",
})
}
return diags
}

View File

@ -0,0 +1,667 @@
package dynamicparameters
import (
"archive/zip"
"bytes"
"testing"
"github.com/spf13/afero"
"github.com/spf13/afero/zipfs"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
archivefs "github.com/coder/coder/v2/archive/fs"
"github.com/coder/preview"
"github.com/coder/coder/v2/testutil"
)
func Test_DynamicWorkspaceTagDefaultsFromFile(t *testing.T) {
t.Parallel()
const (
unknownTag = "Tag value is not known"
invalidValueType = "Tag value is not valid"
)
for _, tc := range []struct {
name string
files map[string]string
expectTags map[string]string
expectedFailedTags map[string]string
expectedError string
}{
{
name: "single text file",
files: map[string]string{
"file.txt": `
hello world`,
},
expectTags: map[string]string{},
},
{
name: "main.tf with no workspace_tags",
files: map[string]string{
"main.tf": `
provider "foo" {}
resource "foo_bar" "baz" {}
variable "region" {
type = string
default = "us"
}
data "coder_parameter" "unrelated" {
name = "unrelated"
type = "list(string)"
default = jsonencode(["a", "b"])
}
data "coder_parameter" "az" {
name = "az"
type = "string"
default = "a"
}`,
},
expectTags: map[string]string{},
},
{
name: "main.tf with empty workspace tags",
files: map[string]string{
"main.tf": `
provider "foo" {}
resource "foo_bar" "baz" {}
variable "region" {
type = string
default = "us"
}
data "coder_parameter" "unrelated" {
name = "unrelated"
type = "list(string)"
default = jsonencode(["a", "b"])
}
data "coder_parameter" "az" {
name = "az"
type = "string"
default = "a"
}
data "coder_workspace_tags" "tags" {}`,
},
expectTags: map[string]string{},
},
{
name: "main.tf with valid workspace tags",
files: map[string]string{
"main.tf": `
provider "foo" {}
resource "foo_bar" "baz" {}
variable "region" {
type = string
default = "us"
}
variable "unrelated" {
type = bool
}
data "coder_parameter" "unrelated" {
name = "unrelated"
type = "list(string)"
default = jsonencode(["a", "b"])
}
data "coder_parameter" "az" {
name = "az"
type = "string"
default = "a"
}
data "coder_workspace_tags" "tags" {
tags = {
"platform" = "kubernetes",
"cluster" = "${"devel"}${"opers"}"
"region" = var.region
"az" = data.coder_parameter.az.value
}
}`,
},
expectTags: map[string]string{"platform": "kubernetes", "cluster": "developers", "region": "us", "az": "a"},
},
{
name: "main.tf with parameter that has default value from dynamic value",
files: map[string]string{
"main.tf": `
provider "foo" {}
resource "foo_bar" "baz" {}
variable "region" {
type = string
default = "us"
}
variable "az" {
type = string
default = "${""}${"a"}"
}
data "coder_parameter" "unrelated" {
name = "unrelated"
type = "list(string)"
default = jsonencode(["a", "b"])
}
data "coder_parameter" "az" {
name = "az"
type = "string"
default = var.az
}
data "coder_workspace_tags" "tags" {
tags = {
"platform" = "kubernetes",
"cluster" = "${"devel"}${"opers"}"
"region" = var.region
"az" = data.coder_parameter.az.value
}
}`,
},
expectTags: map[string]string{"platform": "kubernetes", "cluster": "developers", "region": "us", "az": "a"},
},
{
name: "main.tf with parameter that has default value from another parameter",
files: map[string]string{
"main.tf": `
provider "foo" {}
resource "foo_bar" "baz" {}
variable "region" {
type = string
default = "us"
}
data "coder_parameter" "unrelated" {
name = "unrelated"
type = "list(string)"
default = jsonencode(["a", "b"])
}
data "coder_parameter" "az" {
name = "az"
type = string
default = "${""}${"a"}"
}
data "coder_parameter" "az2" {
name = "az2"
type = "string"
default = data.coder_parameter.az.value
}
data "coder_workspace_tags" "tags" {
tags = {
"platform" = "kubernetes",
"cluster" = "${"devel"}${"opers"}"
"region" = var.region
"az" = data.coder_parameter.az2.value
}
}`,
},
expectTags: map[string]string{
"platform": "kubernetes",
"cluster": "developers",
"region": "us",
"az": "a",
},
},
{
name: "main.tf with multiple valid workspace tags",
files: map[string]string{
"main.tf": `
provider "foo" {}
resource "foo_bar" "baz" {}
variable "region" {
type = string
default = "us"
}
variable "region2" {
type = string
default = "eu"
}
data "coder_parameter" "unrelated" {
name = "unrelated"
type = "list(string)"
default = jsonencode(["a", "b"])
}
data "coder_parameter" "az" {
name = "az"
type = "string"
default = "a"
}
data "coder_parameter" "az2" {
name = "az2"
type = "string"
default = "b"
}
data "coder_workspace_tags" "tags" {
tags = {
"platform" = "kubernetes",
"cluster" = "${"devel"}${"opers"}"
"region" = var.region
"az" = data.coder_parameter.az.value
}
}
data "coder_workspace_tags" "more_tags" {
tags = {
"foo" = "bar"
}
}`,
},
expectTags: map[string]string{"platform": "kubernetes", "cluster": "developers", "region": "us", "az": "a", "foo": "bar"},
},
{
name: "main.tf with missing parameter default value for workspace tags",
files: map[string]string{
"main.tf": `
provider "foo" {}
resource "foo_bar" "baz" {}
variable "region" {
type = string
default = "us"
}
data "coder_parameter" "unrelated" {
name = "unrelated"
type = "list(string)"
default = jsonencode(["a", "b"])
}
data "coder_parameter" "az" {
name = "az"
type = "string"
}
data "coder_workspace_tags" "tags" {
tags = {
"platform" = "kubernetes",
"cluster" = "${"devel"}${"opers"}"
"region" = var.region
"az" = data.coder_parameter.az.value
}
}`,
},
expectTags: map[string]string{"cluster": "developers", "platform": "kubernetes", "region": "us"},
expectedFailedTags: map[string]string{
"az": "Tag value is not known, it likely refers to a variable that is not set or has no default.",
},
},
{
name: "main.tf with missing parameter default value outside workspace tags",
files: map[string]string{
"main.tf": `
provider "foo" {}
resource "foo_bar" "baz" {}
variable "region" {
type = string
default = "us"
}
data "coder_parameter" "unrelated" {
name = "unrelated"
type = "list(string)"
default = jsonencode(["a", "b"])
}
data "coder_parameter" "az" {
name = "az"
type = "string"
default = "a"
}
data "coder_parameter" "notaz" {
name = "notaz"
type = "string"
}
data "coder_workspace_tags" "tags" {
tags = {
"platform" = "kubernetes",
"cluster" = "${"devel"}${"opers"}"
"region" = var.region
"az" = data.coder_parameter.az.value
}
}`,
},
expectTags: map[string]string{"platform": "kubernetes", "cluster": "developers", "region": "us", "az": "a"},
},
{
name: "main.tf with missing variable default value outside workspace tags",
files: map[string]string{
"main.tf": `
provider "foo" {}
resource "foo_bar" "baz" {}
variable "region" {
type = string
default = "us"
}
variable "notregion" {
type = string
}
data "coder_parameter" "unrelated" {
name = "unrelated"
type = "list(string)"
default = jsonencode(["a", "b"])
}
data "coder_parameter" "az" {
name = "az"
type = "string"
default = "a"
}
data "coder_workspace_tags" "tags" {
tags = {
"platform" = "kubernetes",
"cluster" = "${"devel"}${"opers"}"
"region" = var.region
"az" = data.coder_parameter.az.value
}
}`,
},
expectTags: map[string]string{"platform": "kubernetes", "cluster": "developers", "region": "us", "az": "a"},
},
{
name: "main.tf with disallowed data source for workspace tags",
files: map[string]string{
"main.tf": `
provider "foo" {}
resource "foo_bar" "baz" {
name = "foobar"
}
variable "region" {
type = string
default = "us"
}
data "coder_parameter" "unrelated" {
name = "unrelated"
type = "list(string)"
default = jsonencode(["a", "b"])
}
data "coder_parameter" "az" {
name = "az"
type = "string"
default = "a"
}
data "local_file" "hostname" {
filename = "/etc/hostname"
}
data "coder_workspace_tags" "tags" {
tags = {
"platform" = "kubernetes",
"cluster" = "${"devel"}${"opers"}"
"region" = var.region
"az" = data.coder_parameter.az.value
"hostname" = data.local_file.hostname.content
}
}`,
},
expectTags: map[string]string{
"platform": "kubernetes",
"cluster": "developers",
"region": "us",
"az": "a",
},
expectedFailedTags: map[string]string{
"hostname": unknownTag,
},
},
{
name: "main.tf with disallowed resource for workspace tags",
files: map[string]string{
"main.tf": `
provider "foo" {}
resource "foo_bar" "baz" {
name = "foobar"
}
variable "region" {
type = string
default = "us"
}
data "coder_parameter" "unrelated" {
name = "unrelated"
type = "list(string)"
default = jsonencode(["a", "b"])
}
data "coder_parameter" "az" {
name = "az"
type = "string"
default = "a"
}
data "coder_workspace_tags" "tags" {
tags = {
"platform" = "kubernetes",
"cluster" = "${"devel"}${"opers"}"
"region" = var.region
"az" = data.coder_parameter.az.value
"foobarbaz" = foo_bar.baz.name
}
}`,
},
expectTags: map[string]string{
"platform": "kubernetes",
"cluster": "developers",
"region": "us",
"az": "a",
"foobarbaz": "foobar",
},
},
{
name: "main.tf with allowed functions in workspace tags",
files: map[string]string{
"main.tf": `
provider "foo" {}
resource "foo_bar" "baz" {
name = "foobar"
}
locals {
some_path = pathexpand("file.txt")
}
variable "region" {
type = string
default = "us"
}
data "coder_parameter" "unrelated" {
name = "unrelated"
type = "list(string)"
default = jsonencode(["a", "b"])
}
data "coder_parameter" "az" {
name = "az"
type = "string"
default = "a"
}
data "coder_workspace_tags" "tags" {
tags = {
"platform" = "kubernetes",
"cluster" = "${"devel"}${"opers"}"
"region" = try(split(".", var.region)[1], "placeholder")
"az" = try(split(".", data.coder_parameter.az.value)[1], "placeholder")
}
}`,
},
expectTags: map[string]string{"platform": "kubernetes", "cluster": "developers", "region": "placeholder", "az": "placeholder"},
},
{
// Trying to use '~' in a path expand is not allowed, as there is
// no concept of home directory in preview.
name: "main.tf with disallowed functions in workspace tags",
files: map[string]string{
"main.tf": `
provider "foo" {}
resource "foo_bar" "baz" {
name = "foobar"
}
locals {
some_path = pathexpand("file.txt")
}
variable "region" {
type = string
default = "region.us"
}
data "coder_parameter" "unrelated" {
name = "unrelated"
type = "list(string)"
default = jsonencode(["a", "b"])
}
data "coder_parameter" "az" {
name = "az"
type = "string"
default = "az.a"
}
data "coder_workspace_tags" "tags" {
tags = {
"platform" = "kubernetes",
"cluster" = "${"devel"}${"opers"}"
"region" = try(split(".", var.region)[1], "placeholder")
"az" = try(split(".", data.coder_parameter.az.value)[1], "placeholder")
"some_path" = pathexpand("~/file.txt")
}
}`,
},
expectTags: map[string]string{
"platform": "kubernetes",
"cluster": "developers",
"region": "us",
"az": "a",
},
expectedFailedTags: map[string]string{
"some_path": unknownTag,
},
},
{
name: "supported types",
files: map[string]string{
"main.tf": `
variable "stringvar" {
type = string
default = "a"
}
variable "numvar" {
type = number
default = 1
}
variable "boolvar" {
type = bool
default = true
}
variable "listvar" {
type = list(string)
default = ["a"]
}
variable "mapvar" {
type = map(string)
default = {"a": "b"}
}
data "coder_parameter" "stringparam" {
name = "stringparam"
type = "string"
default = "a"
}
data "coder_parameter" "numparam" {
name = "numparam"
type = "number"
default = 1
}
data "coder_parameter" "boolparam" {
name = "boolparam"
type = "bool"
default = true
}
data "coder_parameter" "listparam" {
name = "listparam"
type = "list(string)"
default = "[\"a\", \"b\"]"
}
data "coder_workspace_tags" "tags" {
tags = {
"stringvar" = var.stringvar
"numvar" = var.numvar
"boolvar" = var.boolvar
"listvar" = var.listvar
"mapvar" = var.mapvar
"stringparam" = data.coder_parameter.stringparam.value
"numparam" = data.coder_parameter.numparam.value
"boolparam" = data.coder_parameter.boolparam.value
"listparam" = data.coder_parameter.listparam.value
}
}`,
},
expectTags: map[string]string{
"stringvar": "a",
"numvar": "1",
"boolvar": "true",
"stringparam": "a",
"numparam": "1",
"boolparam": "true",
"listparam": `["a", "b"]`, // OK because params are cast to strings
},
expectedFailedTags: map[string]string{
"listvar": invalidValueType,
"mapvar": invalidValueType,
},
},
{
name: "overlapping var name",
files: map[string]string{
`main.tf`: `
variable "a" {
type = string
default = "1"
}
variable "unused" {
type = map(string)
default = {"a" : "b"}
}
variable "ab" {
description = "This is a variable of type string"
type = string
default = "ab"
}
data "coder_workspace_tags" "tags" {
tags = {
"foo": "bar",
"a": var.a,
}
}`,
},
expectTags: map[string]string{"foo": "bar", "a": "1"},
},
} {
t.Run(tc.name+"/tar", func(t *testing.T) {
t.Parallel()
ctx := testutil.Context(t, testutil.WaitShort)
tarData := testutil.CreateTar(t, tc.files)
output, diags := preview.Preview(ctx, preview.Input{}, archivefs.FromTarReader(bytes.NewBuffer(tarData)))
if tc.expectedError != "" {
require.True(t, diags.HasErrors())
require.Contains(t, diags.Error(), tc.expectedError)
return
}
require.False(t, diags.HasErrors(), diags.Error())
tags := output.WorkspaceTags
tagMap := tags.Tags()
failedTags := tags.UnusableTags()
assert.Equal(t, tc.expectTags, tagMap, "expected tags to match, must always provide something")
for _, tag := range failedTags {
verr := failedTagDiagnostic(tag)
expectedErr, ok := tc.expectedFailedTags[tag.KeyString()]
require.Truef(t, ok, "assertion for failed tag required: %s, %s", tag.KeyString(), verr.Error())
assert.Contains(t, verr.Error(), expectedErr, tag.KeyString())
}
})
t.Run(tc.name+"/zip", func(t *testing.T) {
t.Parallel()
ctx := testutil.Context(t, testutil.WaitShort)
zipData := testutil.CreateZip(t, tc.files)
// get the zip fs
r, err := zip.NewReader(bytes.NewReader(zipData), int64(len(zipData)))
require.NoError(t, err)
output, diags := preview.Preview(ctx, preview.Input{}, afero.NewIOFS(zipfs.New(r)))
if tc.expectedError != "" {
require.True(t, diags.HasErrors())
require.Contains(t, diags.Error(), tc.expectedError)
return
}
require.False(t, diags.HasErrors(), diags.Error())
tags := output.WorkspaceTags
tagMap := tags.Tags()
failedTags := tags.UnusableTags()
assert.Equal(t, tc.expectTags, tagMap, "expected tags to match, must always provide something")
for _, tag := range failedTags {
verr := failedTagDiagnostic(tag)
expectedErr, ok := tc.expectedFailedTags[tag.KeyString()]
assert.Truef(t, ok, "assertion for failed tag required: %s, %s", tag.KeyString(), verr.Error())
assert.Contains(t, verr.Error(), expectedErr)
}
})
}
}