mirror of
https://github.com/coder/coder.git
synced 2025-07-06 15:41:45 +00:00
feat: use preview to compute workspace tags from terraform (#18720)
If using dynamic parameters, workspace tags are extracted using `coder/preview`.
This commit is contained in:
@ -25,6 +25,8 @@ type DynamicParameterTemplateParams struct {
|
||||
|
||||
// TemplateID is used to update an existing template instead of creating a new one.
|
||||
TemplateID uuid.UUID
|
||||
|
||||
Version func(request *codersdk.CreateTemplateVersionRequest)
|
||||
}
|
||||
|
||||
func DynamicParameterTemplate(t *testing.T, client *codersdk.Client, org uuid.UUID, args DynamicParameterTemplateParams) (codersdk.Template, codersdk.TemplateVersion) {
|
||||
@ -47,6 +49,9 @@ func DynamicParameterTemplate(t *testing.T, client *codersdk.Client, org uuid.UU
|
||||
if args.TemplateID != uuid.Nil {
|
||||
request.TemplateID = args.TemplateID
|
||||
}
|
||||
if args.Version != nil {
|
||||
args.Version(request)
|
||||
}
|
||||
})
|
||||
AwaitTemplateVersionJobCompleted(t, client, version.ID)
|
||||
|
||||
|
@ -10,7 +10,7 @@ import (
|
||||
"github.com/coder/coder/v2/codersdk"
|
||||
)
|
||||
|
||||
func ParameterValidationError(diags hcl.Diagnostics) *DiagnosticError {
|
||||
func parameterValidationError(diags hcl.Diagnostics) *DiagnosticError {
|
||||
return &DiagnosticError{
|
||||
Message: "Unable to validate parameters",
|
||||
Diagnostics: diags,
|
||||
@ -18,9 +18,9 @@ func ParameterValidationError(diags hcl.Diagnostics) *DiagnosticError {
|
||||
}
|
||||
}
|
||||
|
||||
func TagValidationError(diags hcl.Diagnostics) *DiagnosticError {
|
||||
func tagValidationError(diags hcl.Diagnostics) *DiagnosticError {
|
||||
return &DiagnosticError{
|
||||
Message: "Failed to parse workspace tags",
|
||||
Message: "Unable to parse workspace tags",
|
||||
Diagnostics: diags,
|
||||
KeyedDiagnostics: make(map[string]hcl.Diagnostics),
|
||||
}
|
||||
|
@ -243,7 +243,28 @@ func (r *dynamicRenderer) getWorkspaceOwnerData(ctx context.Context, ownerID uui
|
||||
return nil // already fetched
|
||||
}
|
||||
|
||||
user, err := r.db.GetUserByID(ctx, ownerID)
|
||||
owner, err := WorkspaceOwner(ctx, r.db, r.data.templateVersion.OrganizationID, ownerID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r.currentOwner = owner
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *dynamicRenderer) Close() {
|
||||
r.once.Do(r.close)
|
||||
}
|
||||
|
||||
func ProvisionerVersionSupportsDynamicParameters(version string) bool {
|
||||
major, minor, err := apiversion.Parse(version)
|
||||
// If the api version is not valid or less than 1.6, we need to use the static parameters
|
||||
useStaticParams := err != nil || major < 1 || (major == 1 && minor < 6)
|
||||
return !useStaticParams
|
||||
}
|
||||
|
||||
func WorkspaceOwner(ctx context.Context, db database.Store, org uuid.UUID, ownerID uuid.UUID) (*previewtypes.WorkspaceOwner, error) {
|
||||
user, err := db.GetUserByID(ctx, ownerID)
|
||||
if err != nil {
|
||||
// If the user failed to read, we also try to read the user from their
|
||||
// organization member. You only need to be able to read the organization member
|
||||
@ -252,37 +273,37 @@ func (r *dynamicRenderer) getWorkspaceOwnerData(ctx context.Context, ownerID uui
|
||||
// Only the terraform files can therefore leak more information than the
|
||||
// caller should have access to. All this info should be public assuming you can
|
||||
// read the user though.
|
||||
mem, err := database.ExpectOne(r.db.OrganizationMembers(ctx, database.OrganizationMembersParams{
|
||||
OrganizationID: r.data.templateVersion.OrganizationID,
|
||||
mem, err := database.ExpectOne(db.OrganizationMembers(ctx, database.OrganizationMembersParams{
|
||||
OrganizationID: org,
|
||||
UserID: ownerID,
|
||||
IncludeSystem: true,
|
||||
}))
|
||||
if err != nil {
|
||||
return xerrors.Errorf("fetch user: %w", err)
|
||||
return nil, xerrors.Errorf("fetch user: %w", err)
|
||||
}
|
||||
|
||||
// Org member fetched, so use the provisioner context to fetch the user.
|
||||
//nolint:gocritic // Has the correct permissions, and matches the provisioning flow.
|
||||
user, err = r.db.GetUserByID(dbauthz.AsProvisionerd(ctx), mem.OrganizationMember.UserID)
|
||||
user, err = db.GetUserByID(dbauthz.AsProvisionerd(ctx), mem.OrganizationMember.UserID)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("fetch user: %w", err)
|
||||
return nil, xerrors.Errorf("fetch user: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
// nolint:gocritic // This is kind of the wrong query to use here, but it
|
||||
// matches how the provisioner currently works. We should figure out
|
||||
// something that needs less escalation but has the correct behavior.
|
||||
row, err := r.db.GetAuthorizationUserRoles(dbauthz.AsProvisionerd(ctx), ownerID)
|
||||
row, err := db.GetAuthorizationUserRoles(dbauthz.AsProvisionerd(ctx), ownerID)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("user roles: %w", err)
|
||||
return nil, xerrors.Errorf("user roles: %w", err)
|
||||
}
|
||||
roles, err := row.RoleNames()
|
||||
if err != nil {
|
||||
return xerrors.Errorf("expand roles: %w", err)
|
||||
return nil, xerrors.Errorf("expand roles: %w", err)
|
||||
}
|
||||
ownerRoles := make([]previewtypes.WorkspaceOwnerRBACRole, 0, len(roles))
|
||||
for _, it := range roles {
|
||||
if it.OrganizationID != uuid.Nil && it.OrganizationID != r.data.templateVersion.OrganizationID {
|
||||
if it.OrganizationID != uuid.Nil && it.OrganizationID != org {
|
||||
continue
|
||||
}
|
||||
var orgID string
|
||||
@ -298,28 +319,28 @@ func (r *dynamicRenderer) getWorkspaceOwnerData(ctx context.Context, ownerID uui
|
||||
// The correct public key has to be sent. This will not be leaked
|
||||
// unless the template leaks it.
|
||||
// nolint:gocritic
|
||||
key, err := r.db.GetGitSSHKey(dbauthz.AsProvisionerd(ctx), ownerID)
|
||||
key, err := db.GetGitSSHKey(dbauthz.AsProvisionerd(ctx), ownerID)
|
||||
if err != nil && !xerrors.Is(err, sql.ErrNoRows) {
|
||||
return xerrors.Errorf("ssh key: %w", err)
|
||||
return nil, xerrors.Errorf("ssh key: %w", err)
|
||||
}
|
||||
|
||||
// The groups need to be sent to preview. These groups are not exposed to the
|
||||
// user, unless the template does it through the parameters. Regardless, we need
|
||||
// the correct groups, and a user might not have read access.
|
||||
// nolint:gocritic
|
||||
groups, err := r.db.GetGroups(dbauthz.AsProvisionerd(ctx), database.GetGroupsParams{
|
||||
OrganizationID: r.data.templateVersion.OrganizationID,
|
||||
groups, err := db.GetGroups(dbauthz.AsProvisionerd(ctx), database.GetGroupsParams{
|
||||
OrganizationID: org,
|
||||
HasMemberID: ownerID,
|
||||
})
|
||||
if err != nil {
|
||||
return xerrors.Errorf("groups: %w", err)
|
||||
return nil, xerrors.Errorf("groups: %w", err)
|
||||
}
|
||||
groupNames := make([]string, 0, len(groups))
|
||||
for _, it := range groups {
|
||||
groupNames = append(groupNames, it.Group.Name)
|
||||
}
|
||||
|
||||
r.currentOwner = &previewtypes.WorkspaceOwner{
|
||||
return &previewtypes.WorkspaceOwner{
|
||||
ID: user.ID.String(),
|
||||
Name: user.Username,
|
||||
FullName: user.Name,
|
||||
@ -328,17 +349,5 @@ func (r *dynamicRenderer) getWorkspaceOwnerData(ctx context.Context, ownerID uui
|
||||
RBACRoles: ownerRoles,
|
||||
SSHPublicKey: key.PublicKey,
|
||||
Groups: groupNames,
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *dynamicRenderer) Close() {
|
||||
r.once.Do(r.close)
|
||||
}
|
||||
|
||||
func ProvisionerVersionSupportsDynamicParameters(version string) bool {
|
||||
major, minor, err := apiversion.Parse(version)
|
||||
// If the api version is not valid or less than 1.6, we need to use the static parameters
|
||||
useStaticParams := err != nil || major < 1 || (major == 1 && minor < 6)
|
||||
return !useStaticParams
|
||||
}, nil
|
||||
}
|
||||
|
@ -73,7 +73,7 @@ func ResolveParameters(
|
||||
// always be valid. If there is a case where this is not true, then this has to
|
||||
// be changed to allow the build to continue with a different set of values.
|
||||
|
||||
return nil, ParameterValidationError(diags)
|
||||
return nil, parameterValidationError(diags)
|
||||
}
|
||||
|
||||
// The user's input now needs to be validated against the parameters.
|
||||
@ -113,13 +113,13 @@ func ResolveParameters(
|
||||
// are fatal. Additional validation for immutability has to be done manually.
|
||||
output, diags = renderer.Render(ctx, ownerID, values.ValuesMap())
|
||||
if diags.HasErrors() {
|
||||
return nil, ParameterValidationError(diags)
|
||||
return nil, parameterValidationError(diags)
|
||||
}
|
||||
|
||||
// parameterNames is going to be used to remove any excess values that were left
|
||||
// around without a parameter.
|
||||
parameterNames := make(map[string]struct{}, len(output.Parameters))
|
||||
parameterError := ParameterValidationError(nil)
|
||||
parameterError := parameterValidationError(nil)
|
||||
for _, parameter := range output.Parameters {
|
||||
parameterNames[parameter.Name] = struct{}{}
|
||||
|
||||
|
100
coderd/dynamicparameters/tags.go
Normal file
100
coderd/dynamicparameters/tags.go
Normal file
@ -0,0 +1,100 @@
|
||||
package dynamicparameters
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/hashicorp/hcl/v2"
|
||||
|
||||
"github.com/coder/preview"
|
||||
previewtypes "github.com/coder/preview/types"
|
||||
)
|
||||
|
||||
func CheckTags(output *preview.Output, diags hcl.Diagnostics) *DiagnosticError {
|
||||
de := tagValidationError(diags)
|
||||
failedTags := output.WorkspaceTags.UnusableTags()
|
||||
if len(failedTags) == 0 && !de.HasError() {
|
||||
return nil // No errors, all is good!
|
||||
}
|
||||
|
||||
for _, tag := range failedTags {
|
||||
name := tag.KeyString()
|
||||
if name == previewtypes.UnknownStringValue {
|
||||
name = "unknown" // Best effort to get a name for the tag
|
||||
}
|
||||
de.Extend(name, failedTagDiagnostic(tag))
|
||||
}
|
||||
return de
|
||||
}
|
||||
|
||||
// failedTagDiagnostic is a helper function that takes an invalid tag and
|
||||
// returns an appropriate hcl diagnostic for it.
|
||||
func failedTagDiagnostic(tag previewtypes.Tag) hcl.Diagnostics {
|
||||
const (
|
||||
key = "key"
|
||||
value = "value"
|
||||
)
|
||||
|
||||
diags := hcl.Diagnostics{}
|
||||
|
||||
// TODO: It would be really nice to pull out the variable references to help identify the source of
|
||||
// the unknown or invalid tag.
|
||||
unknownErr := "Tag %s is not known, it likely refers to a variable that is not set or has no default."
|
||||
invalidErr := "Tag %s is not valid, it must be a non-null string value."
|
||||
|
||||
if !tag.Key.Value.IsWhollyKnown() {
|
||||
diags = diags.Append(&hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
Summary: fmt.Sprintf(unknownErr, key),
|
||||
})
|
||||
} else if !tag.Key.Valid() {
|
||||
diags = diags.Append(&hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
Summary: fmt.Sprintf(invalidErr, key),
|
||||
})
|
||||
}
|
||||
|
||||
if !tag.Value.Value.IsWhollyKnown() {
|
||||
diags = diags.Append(&hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
Summary: fmt.Sprintf(unknownErr, value),
|
||||
})
|
||||
} else if !tag.Value.Valid() {
|
||||
diags = diags.Append(&hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
Summary: fmt.Sprintf(invalidErr, value),
|
||||
})
|
||||
}
|
||||
|
||||
if diags.HasErrors() {
|
||||
// Stop here if there are diags, as the diags manually created above are more
|
||||
// informative than the original tag's diagnostics.
|
||||
return diags
|
||||
}
|
||||
|
||||
// If we reach here, decorate the original tag's diagnostics
|
||||
diagErr := "Tag %s: %s"
|
||||
if tag.Key.ValueDiags.HasErrors() {
|
||||
// add 'Tag key' prefix to each diagnostic
|
||||
for _, d := range tag.Key.ValueDiags {
|
||||
d.Summary = fmt.Sprintf(diagErr, key, d.Summary)
|
||||
}
|
||||
}
|
||||
diags = diags.Extend(tag.Key.ValueDiags)
|
||||
|
||||
if tag.Value.ValueDiags.HasErrors() {
|
||||
// add 'Tag value' prefix to each diagnostic
|
||||
for _, d := range tag.Value.ValueDiags {
|
||||
d.Summary = fmt.Sprintf(diagErr, value, d.Summary)
|
||||
}
|
||||
}
|
||||
diags = diags.Extend(tag.Value.ValueDiags)
|
||||
|
||||
if !diags.HasErrors() {
|
||||
diags = diags.Append(&hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
Summary: "Tag is invalid for some unknown reason. Please check the tag's value and key.",
|
||||
})
|
||||
}
|
||||
|
||||
return diags
|
||||
}
|
667
coderd/dynamicparameters/tags_internal_test.go
Normal file
667
coderd/dynamicparameters/tags_internal_test.go
Normal file
@ -0,0 +1,667 @@
|
||||
package dynamicparameters
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"bytes"
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/afero"
|
||||
"github.com/spf13/afero/zipfs"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
archivefs "github.com/coder/coder/v2/archive/fs"
|
||||
"github.com/coder/preview"
|
||||
|
||||
"github.com/coder/coder/v2/testutil"
|
||||
)
|
||||
|
||||
func Test_DynamicWorkspaceTagDefaultsFromFile(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
const (
|
||||
unknownTag = "Tag value is not known"
|
||||
invalidValueType = "Tag value is not valid"
|
||||
)
|
||||
|
||||
for _, tc := range []struct {
|
||||
name string
|
||||
files map[string]string
|
||||
expectTags map[string]string
|
||||
expectedFailedTags map[string]string
|
||||
expectedError string
|
||||
}{
|
||||
{
|
||||
name: "single text file",
|
||||
files: map[string]string{
|
||||
"file.txt": `
|
||||
hello world`,
|
||||
},
|
||||
expectTags: map[string]string{},
|
||||
},
|
||||
{
|
||||
name: "main.tf with no workspace_tags",
|
||||
files: map[string]string{
|
||||
"main.tf": `
|
||||
provider "foo" {}
|
||||
resource "foo_bar" "baz" {}
|
||||
variable "region" {
|
||||
type = string
|
||||
default = "us"
|
||||
}
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
data "coder_parameter" "az" {
|
||||
name = "az"
|
||||
type = "string"
|
||||
default = "a"
|
||||
}`,
|
||||
},
|
||||
expectTags: map[string]string{},
|
||||
},
|
||||
{
|
||||
name: "main.tf with empty workspace tags",
|
||||
files: map[string]string{
|
||||
"main.tf": `
|
||||
provider "foo" {}
|
||||
resource "foo_bar" "baz" {}
|
||||
variable "region" {
|
||||
type = string
|
||||
default = "us"
|
||||
}
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
data "coder_parameter" "az" {
|
||||
name = "az"
|
||||
type = "string"
|
||||
default = "a"
|
||||
}
|
||||
data "coder_workspace_tags" "tags" {}`,
|
||||
},
|
||||
expectTags: map[string]string{},
|
||||
},
|
||||
{
|
||||
name: "main.tf with valid workspace tags",
|
||||
files: map[string]string{
|
||||
"main.tf": `
|
||||
provider "foo" {}
|
||||
resource "foo_bar" "baz" {}
|
||||
variable "region" {
|
||||
type = string
|
||||
default = "us"
|
||||
}
|
||||
variable "unrelated" {
|
||||
type = bool
|
||||
}
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
data "coder_parameter" "az" {
|
||||
name = "az"
|
||||
type = "string"
|
||||
default = "a"
|
||||
}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"platform" = "kubernetes",
|
||||
"cluster" = "${"devel"}${"opers"}"
|
||||
"region" = var.region
|
||||
"az" = data.coder_parameter.az.value
|
||||
}
|
||||
}`,
|
||||
},
|
||||
expectTags: map[string]string{"platform": "kubernetes", "cluster": "developers", "region": "us", "az": "a"},
|
||||
},
|
||||
{
|
||||
name: "main.tf with parameter that has default value from dynamic value",
|
||||
files: map[string]string{
|
||||
"main.tf": `
|
||||
provider "foo" {}
|
||||
resource "foo_bar" "baz" {}
|
||||
variable "region" {
|
||||
type = string
|
||||
default = "us"
|
||||
}
|
||||
variable "az" {
|
||||
type = string
|
||||
default = "${""}${"a"}"
|
||||
}
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
data "coder_parameter" "az" {
|
||||
name = "az"
|
||||
type = "string"
|
||||
default = var.az
|
||||
}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"platform" = "kubernetes",
|
||||
"cluster" = "${"devel"}${"opers"}"
|
||||
"region" = var.region
|
||||
"az" = data.coder_parameter.az.value
|
||||
}
|
||||
}`,
|
||||
},
|
||||
expectTags: map[string]string{"platform": "kubernetes", "cluster": "developers", "region": "us", "az": "a"},
|
||||
},
|
||||
{
|
||||
name: "main.tf with parameter that has default value from another parameter",
|
||||
files: map[string]string{
|
||||
"main.tf": `
|
||||
provider "foo" {}
|
||||
resource "foo_bar" "baz" {}
|
||||
variable "region" {
|
||||
type = string
|
||||
default = "us"
|
||||
}
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
data "coder_parameter" "az" {
|
||||
name = "az"
|
||||
type = string
|
||||
default = "${""}${"a"}"
|
||||
}
|
||||
data "coder_parameter" "az2" {
|
||||
name = "az2"
|
||||
type = "string"
|
||||
default = data.coder_parameter.az.value
|
||||
}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"platform" = "kubernetes",
|
||||
"cluster" = "${"devel"}${"opers"}"
|
||||
"region" = var.region
|
||||
"az" = data.coder_parameter.az2.value
|
||||
}
|
||||
}`,
|
||||
},
|
||||
expectTags: map[string]string{
|
||||
"platform": "kubernetes",
|
||||
"cluster": "developers",
|
||||
"region": "us",
|
||||
"az": "a",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "main.tf with multiple valid workspace tags",
|
||||
files: map[string]string{
|
||||
"main.tf": `
|
||||
provider "foo" {}
|
||||
resource "foo_bar" "baz" {}
|
||||
variable "region" {
|
||||
type = string
|
||||
default = "us"
|
||||
}
|
||||
variable "region2" {
|
||||
type = string
|
||||
default = "eu"
|
||||
}
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
data "coder_parameter" "az" {
|
||||
name = "az"
|
||||
type = "string"
|
||||
default = "a"
|
||||
}
|
||||
data "coder_parameter" "az2" {
|
||||
name = "az2"
|
||||
type = "string"
|
||||
default = "b"
|
||||
}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"platform" = "kubernetes",
|
||||
"cluster" = "${"devel"}${"opers"}"
|
||||
"region" = var.region
|
||||
"az" = data.coder_parameter.az.value
|
||||
}
|
||||
}
|
||||
data "coder_workspace_tags" "more_tags" {
|
||||
tags = {
|
||||
"foo" = "bar"
|
||||
}
|
||||
}`,
|
||||
},
|
||||
expectTags: map[string]string{"platform": "kubernetes", "cluster": "developers", "region": "us", "az": "a", "foo": "bar"},
|
||||
},
|
||||
{
|
||||
name: "main.tf with missing parameter default value for workspace tags",
|
||||
files: map[string]string{
|
||||
"main.tf": `
|
||||
provider "foo" {}
|
||||
resource "foo_bar" "baz" {}
|
||||
variable "region" {
|
||||
type = string
|
||||
default = "us"
|
||||
}
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
data "coder_parameter" "az" {
|
||||
name = "az"
|
||||
type = "string"
|
||||
}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"platform" = "kubernetes",
|
||||
"cluster" = "${"devel"}${"opers"}"
|
||||
"region" = var.region
|
||||
"az" = data.coder_parameter.az.value
|
||||
}
|
||||
}`,
|
||||
},
|
||||
expectTags: map[string]string{"cluster": "developers", "platform": "kubernetes", "region": "us"},
|
||||
expectedFailedTags: map[string]string{
|
||||
"az": "Tag value is not known, it likely refers to a variable that is not set or has no default.",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "main.tf with missing parameter default value outside workspace tags",
|
||||
files: map[string]string{
|
||||
"main.tf": `
|
||||
provider "foo" {}
|
||||
resource "foo_bar" "baz" {}
|
||||
variable "region" {
|
||||
type = string
|
||||
default = "us"
|
||||
}
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
data "coder_parameter" "az" {
|
||||
name = "az"
|
||||
type = "string"
|
||||
default = "a"
|
||||
}
|
||||
data "coder_parameter" "notaz" {
|
||||
name = "notaz"
|
||||
type = "string"
|
||||
}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"platform" = "kubernetes",
|
||||
"cluster" = "${"devel"}${"opers"}"
|
||||
"region" = var.region
|
||||
"az" = data.coder_parameter.az.value
|
||||
}
|
||||
}`,
|
||||
},
|
||||
expectTags: map[string]string{"platform": "kubernetes", "cluster": "developers", "region": "us", "az": "a"},
|
||||
},
|
||||
{
|
||||
name: "main.tf with missing variable default value outside workspace tags",
|
||||
files: map[string]string{
|
||||
"main.tf": `
|
||||
provider "foo" {}
|
||||
resource "foo_bar" "baz" {}
|
||||
variable "region" {
|
||||
type = string
|
||||
default = "us"
|
||||
}
|
||||
variable "notregion" {
|
||||
type = string
|
||||
}
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
data "coder_parameter" "az" {
|
||||
name = "az"
|
||||
type = "string"
|
||||
default = "a"
|
||||
}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"platform" = "kubernetes",
|
||||
"cluster" = "${"devel"}${"opers"}"
|
||||
"region" = var.region
|
||||
"az" = data.coder_parameter.az.value
|
||||
}
|
||||
}`,
|
||||
},
|
||||
expectTags: map[string]string{"platform": "kubernetes", "cluster": "developers", "region": "us", "az": "a"},
|
||||
},
|
||||
{
|
||||
name: "main.tf with disallowed data source for workspace tags",
|
||||
files: map[string]string{
|
||||
"main.tf": `
|
||||
provider "foo" {}
|
||||
resource "foo_bar" "baz" {
|
||||
name = "foobar"
|
||||
}
|
||||
variable "region" {
|
||||
type = string
|
||||
default = "us"
|
||||
}
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
data "coder_parameter" "az" {
|
||||
name = "az"
|
||||
type = "string"
|
||||
default = "a"
|
||||
}
|
||||
data "local_file" "hostname" {
|
||||
filename = "/etc/hostname"
|
||||
}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"platform" = "kubernetes",
|
||||
"cluster" = "${"devel"}${"opers"}"
|
||||
"region" = var.region
|
||||
"az" = data.coder_parameter.az.value
|
||||
"hostname" = data.local_file.hostname.content
|
||||
}
|
||||
}`,
|
||||
},
|
||||
expectTags: map[string]string{
|
||||
"platform": "kubernetes",
|
||||
"cluster": "developers",
|
||||
"region": "us",
|
||||
"az": "a",
|
||||
},
|
||||
expectedFailedTags: map[string]string{
|
||||
"hostname": unknownTag,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "main.tf with disallowed resource for workspace tags",
|
||||
files: map[string]string{
|
||||
"main.tf": `
|
||||
provider "foo" {}
|
||||
resource "foo_bar" "baz" {
|
||||
name = "foobar"
|
||||
}
|
||||
variable "region" {
|
||||
type = string
|
||||
default = "us"
|
||||
}
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
data "coder_parameter" "az" {
|
||||
name = "az"
|
||||
type = "string"
|
||||
default = "a"
|
||||
}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"platform" = "kubernetes",
|
||||
"cluster" = "${"devel"}${"opers"}"
|
||||
"region" = var.region
|
||||
"az" = data.coder_parameter.az.value
|
||||
"foobarbaz" = foo_bar.baz.name
|
||||
}
|
||||
}`,
|
||||
},
|
||||
expectTags: map[string]string{
|
||||
"platform": "kubernetes",
|
||||
"cluster": "developers",
|
||||
"region": "us",
|
||||
"az": "a",
|
||||
"foobarbaz": "foobar",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "main.tf with allowed functions in workspace tags",
|
||||
files: map[string]string{
|
||||
"main.tf": `
|
||||
provider "foo" {}
|
||||
resource "foo_bar" "baz" {
|
||||
name = "foobar"
|
||||
}
|
||||
locals {
|
||||
some_path = pathexpand("file.txt")
|
||||
}
|
||||
variable "region" {
|
||||
type = string
|
||||
default = "us"
|
||||
}
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
data "coder_parameter" "az" {
|
||||
name = "az"
|
||||
type = "string"
|
||||
default = "a"
|
||||
}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"platform" = "kubernetes",
|
||||
"cluster" = "${"devel"}${"opers"}"
|
||||
"region" = try(split(".", var.region)[1], "placeholder")
|
||||
"az" = try(split(".", data.coder_parameter.az.value)[1], "placeholder")
|
||||
}
|
||||
}`,
|
||||
},
|
||||
expectTags: map[string]string{"platform": "kubernetes", "cluster": "developers", "region": "placeholder", "az": "placeholder"},
|
||||
},
|
||||
{
|
||||
// Trying to use '~' in a path expand is not allowed, as there is
|
||||
// no concept of home directory in preview.
|
||||
name: "main.tf with disallowed functions in workspace tags",
|
||||
files: map[string]string{
|
||||
"main.tf": `
|
||||
provider "foo" {}
|
||||
resource "foo_bar" "baz" {
|
||||
name = "foobar"
|
||||
}
|
||||
locals {
|
||||
some_path = pathexpand("file.txt")
|
||||
}
|
||||
variable "region" {
|
||||
type = string
|
||||
default = "region.us"
|
||||
}
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
data "coder_parameter" "az" {
|
||||
name = "az"
|
||||
type = "string"
|
||||
default = "az.a"
|
||||
}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"platform" = "kubernetes",
|
||||
"cluster" = "${"devel"}${"opers"}"
|
||||
"region" = try(split(".", var.region)[1], "placeholder")
|
||||
"az" = try(split(".", data.coder_parameter.az.value)[1], "placeholder")
|
||||
"some_path" = pathexpand("~/file.txt")
|
||||
}
|
||||
}`,
|
||||
},
|
||||
expectTags: map[string]string{
|
||||
"platform": "kubernetes",
|
||||
"cluster": "developers",
|
||||
"region": "us",
|
||||
"az": "a",
|
||||
},
|
||||
expectedFailedTags: map[string]string{
|
||||
"some_path": unknownTag,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "supported types",
|
||||
files: map[string]string{
|
||||
"main.tf": `
|
||||
variable "stringvar" {
|
||||
type = string
|
||||
default = "a"
|
||||
}
|
||||
variable "numvar" {
|
||||
type = number
|
||||
default = 1
|
||||
}
|
||||
variable "boolvar" {
|
||||
type = bool
|
||||
default = true
|
||||
}
|
||||
variable "listvar" {
|
||||
type = list(string)
|
||||
default = ["a"]
|
||||
}
|
||||
variable "mapvar" {
|
||||
type = map(string)
|
||||
default = {"a": "b"}
|
||||
}
|
||||
data "coder_parameter" "stringparam" {
|
||||
name = "stringparam"
|
||||
type = "string"
|
||||
default = "a"
|
||||
}
|
||||
data "coder_parameter" "numparam" {
|
||||
name = "numparam"
|
||||
type = "number"
|
||||
default = 1
|
||||
}
|
||||
data "coder_parameter" "boolparam" {
|
||||
name = "boolparam"
|
||||
type = "bool"
|
||||
default = true
|
||||
}
|
||||
data "coder_parameter" "listparam" {
|
||||
name = "listparam"
|
||||
type = "list(string)"
|
||||
default = "[\"a\", \"b\"]"
|
||||
}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"stringvar" = var.stringvar
|
||||
"numvar" = var.numvar
|
||||
"boolvar" = var.boolvar
|
||||
"listvar" = var.listvar
|
||||
"mapvar" = var.mapvar
|
||||
"stringparam" = data.coder_parameter.stringparam.value
|
||||
"numparam" = data.coder_parameter.numparam.value
|
||||
"boolparam" = data.coder_parameter.boolparam.value
|
||||
"listparam" = data.coder_parameter.listparam.value
|
||||
}
|
||||
}`,
|
||||
},
|
||||
expectTags: map[string]string{
|
||||
"stringvar": "a",
|
||||
"numvar": "1",
|
||||
"boolvar": "true",
|
||||
"stringparam": "a",
|
||||
"numparam": "1",
|
||||
"boolparam": "true",
|
||||
"listparam": `["a", "b"]`, // OK because params are cast to strings
|
||||
},
|
||||
expectedFailedTags: map[string]string{
|
||||
"listvar": invalidValueType,
|
||||
"mapvar": invalidValueType,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "overlapping var name",
|
||||
files: map[string]string{
|
||||
`main.tf`: `
|
||||
variable "a" {
|
||||
type = string
|
||||
default = "1"
|
||||
}
|
||||
variable "unused" {
|
||||
type = map(string)
|
||||
default = {"a" : "b"}
|
||||
}
|
||||
variable "ab" {
|
||||
description = "This is a variable of type string"
|
||||
type = string
|
||||
default = "ab"
|
||||
}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"foo": "bar",
|
||||
"a": var.a,
|
||||
}
|
||||
}`,
|
||||
},
|
||||
expectTags: map[string]string{"foo": "bar", "a": "1"},
|
||||
},
|
||||
} {
|
||||
t.Run(tc.name+"/tar", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := testutil.Context(t, testutil.WaitShort)
|
||||
tarData := testutil.CreateTar(t, tc.files)
|
||||
|
||||
output, diags := preview.Preview(ctx, preview.Input{}, archivefs.FromTarReader(bytes.NewBuffer(tarData)))
|
||||
if tc.expectedError != "" {
|
||||
require.True(t, diags.HasErrors())
|
||||
require.Contains(t, diags.Error(), tc.expectedError)
|
||||
return
|
||||
}
|
||||
require.False(t, diags.HasErrors(), diags.Error())
|
||||
|
||||
tags := output.WorkspaceTags
|
||||
tagMap := tags.Tags()
|
||||
failedTags := tags.UnusableTags()
|
||||
assert.Equal(t, tc.expectTags, tagMap, "expected tags to match, must always provide something")
|
||||
for _, tag := range failedTags {
|
||||
verr := failedTagDiagnostic(tag)
|
||||
expectedErr, ok := tc.expectedFailedTags[tag.KeyString()]
|
||||
require.Truef(t, ok, "assertion for failed tag required: %s, %s", tag.KeyString(), verr.Error())
|
||||
assert.Contains(t, verr.Error(), expectedErr, tag.KeyString())
|
||||
}
|
||||
})
|
||||
|
||||
t.Run(tc.name+"/zip", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := testutil.Context(t, testutil.WaitShort)
|
||||
zipData := testutil.CreateZip(t, tc.files)
|
||||
|
||||
// get the zip fs
|
||||
r, err := zip.NewReader(bytes.NewReader(zipData), int64(len(zipData)))
|
||||
require.NoError(t, err)
|
||||
|
||||
output, diags := preview.Preview(ctx, preview.Input{}, afero.NewIOFS(zipfs.New(r)))
|
||||
if tc.expectedError != "" {
|
||||
require.True(t, diags.HasErrors())
|
||||
require.Contains(t, diags.Error(), tc.expectedError)
|
||||
return
|
||||
}
|
||||
require.False(t, diags.HasErrors(), diags.Error())
|
||||
|
||||
tags := output.WorkspaceTags
|
||||
tagMap := tags.Tags()
|
||||
failedTags := tags.UnusableTags()
|
||||
assert.Equal(t, tc.expectTags, tagMap, "expected tags to match, must always provide something")
|
||||
for _, tag := range failedTags {
|
||||
verr := failedTagDiagnostic(tag)
|
||||
expectedErr, ok := tc.expectedFailedTags[tag.KeyString()]
|
||||
assert.Truef(t, ok, "assertion for failed tag required: %s, %s", tag.KeyString(), verr.Error())
|
||||
assert.Contains(t, verr.Error(), expectedErr)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
@ -70,6 +70,8 @@ func TestDynamicParametersOwnerSSHPublicKey(t *testing.T) {
|
||||
require.Equal(t, sshKey.PublicKey, preview.Parameters[0].Value.Value)
|
||||
}
|
||||
|
||||
// TestDynamicParametersWithTerraformValues is for testing the websocket flow of
|
||||
// dynamic parameters. No workspaces are created.
|
||||
func TestDynamicParametersWithTerraformValues(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
package coderd
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"database/sql"
|
||||
@ -8,6 +9,8 @@ import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
stdslog "log/slog"
|
||||
"net/http"
|
||||
"os"
|
||||
|
||||
@ -18,6 +21,9 @@ import (
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"cdr.dev/slog"
|
||||
archivefs "github.com/coder/coder/v2/archive/fs"
|
||||
"github.com/coder/coder/v2/coderd/dynamicparameters"
|
||||
"github.com/coder/preview"
|
||||
|
||||
"github.com/coder/coder/v2/coderd/audit"
|
||||
"github.com/coder/coder/v2/coderd/database"
|
||||
@ -1464,8 +1470,9 @@ func (api *API) postTemplateVersionsByOrganization(rw http.ResponseWriter, r *ht
|
||||
return
|
||||
}
|
||||
|
||||
var dynamicTemplate bool
|
||||
if req.TemplateID != uuid.Nil {
|
||||
_, err := api.Database.GetTemplateByID(ctx, req.TemplateID)
|
||||
tpl, err := api.Database.GetTemplateByID(ctx, req.TemplateID)
|
||||
if httpapi.Is404Error(err) {
|
||||
httpapi.Write(ctx, rw, http.StatusNotFound, codersdk.Response{
|
||||
Message: "Template does not exist.",
|
||||
@ -1479,6 +1486,7 @@ func (api *API) postTemplateVersionsByOrganization(rw http.ResponseWriter, r *ht
|
||||
})
|
||||
return
|
||||
}
|
||||
dynamicTemplate = !tpl.UseClassicParameterFlow
|
||||
}
|
||||
|
||||
if req.ExampleID != "" && req.FileID != uuid.Nil {
|
||||
@ -1574,45 +1582,18 @@ func (api *API) postTemplateVersionsByOrganization(rw http.ResponseWriter, r *ht
|
||||
}
|
||||
}
|
||||
|
||||
// Try to parse template tags from the given file.
|
||||
tempDir, err := os.MkdirTemp(api.Options.CacheDir, "tfparse-*")
|
||||
if err != nil {
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
Message: "Internal error checking workspace tags",
|
||||
Detail: "create tempdir: " + err.Error(),
|
||||
})
|
||||
return
|
||||
}
|
||||
defer func() {
|
||||
if err := os.RemoveAll(tempDir); err != nil {
|
||||
api.Logger.Error(ctx, "failed to remove temporary tfparse dir", slog.Error(err))
|
||||
var parsedTags map[string]string
|
||||
var ok bool
|
||||
if dynamicTemplate {
|
||||
parsedTags, ok = api.dynamicTemplateVersionTags(ctx, rw, organization.ID, apiKey.UserID, file)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
} else {
|
||||
parsedTags, ok = api.classicTemplateVersionTags(ctx, rw, file)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
}()
|
||||
|
||||
if err := tfparse.WriteArchive(file.Data, file.Mimetype, tempDir); err != nil {
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
Message: "Internal error checking workspace tags",
|
||||
Detail: "extract archive to tempdir: " + err.Error(),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
parser, diags := tfparse.New(tempDir, tfparse.WithLogger(api.Logger.Named("tfparse")))
|
||||
if diags.HasErrors() {
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
Message: "Internal error checking workspace tags",
|
||||
Detail: "parse module: " + diags.Error(),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
parsedTags, err := parser.WorkspaceTagDefaults(ctx)
|
||||
if err != nil {
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
Message: "Internal error checking workspace tags",
|
||||
Detail: "evaluate default values of workspace tags: " + err.Error(),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Ensure the "owner" tag is properly applied in addition to request tags and coder_workspace_tags.
|
||||
@ -1781,6 +1762,105 @@ func (api *API) postTemplateVersionsByOrganization(rw http.ResponseWriter, r *ht
|
||||
warnings))
|
||||
}
|
||||
|
||||
func (api *API) dynamicTemplateVersionTags(ctx context.Context, rw http.ResponseWriter, orgID uuid.UUID, owner uuid.UUID, file database.File) (map[string]string, bool) {
|
||||
ownerData, err := dynamicparameters.WorkspaceOwner(ctx, api.Database, orgID, owner)
|
||||
if err != nil {
|
||||
if httpapi.Is404Error(err) {
|
||||
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
|
||||
Message: "Internal error checking workspace tags",
|
||||
Detail: fmt.Sprintf("Owner not found, uuid=%s", owner.String()),
|
||||
})
|
||||
return nil, false
|
||||
}
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
Message: "Internal error checking workspace tags",
|
||||
Detail: "fetch owner data: " + err.Error(),
|
||||
})
|
||||
return nil, false
|
||||
}
|
||||
|
||||
var files fs.FS
|
||||
switch file.Mimetype {
|
||||
case "application/x-tar":
|
||||
files = archivefs.FromTarReader(bytes.NewBuffer(file.Data))
|
||||
case "application/zip":
|
||||
files, err = archivefs.FromZipReader(bytes.NewReader(file.Data), int64(len(file.Data)))
|
||||
if err != nil {
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
Message: "Internal error checking workspace tags",
|
||||
Detail: "extract zip archive: " + err.Error(),
|
||||
})
|
||||
return nil, false
|
||||
}
|
||||
default:
|
||||
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
|
||||
Message: "Unsupported file type for dynamic template version tags",
|
||||
Detail: fmt.Sprintf("Mimetype %q is not supported for dynamic template version tags", file.Mimetype),
|
||||
})
|
||||
return nil, false
|
||||
}
|
||||
|
||||
output, diags := preview.Preview(ctx, preview.Input{
|
||||
PlanJSON: nil, // Template versions are before `terraform plan`
|
||||
ParameterValues: nil, // No user-specified parameters
|
||||
Owner: *ownerData,
|
||||
Logger: stdslog.New(stdslog.DiscardHandler),
|
||||
}, files)
|
||||
tagErr := dynamicparameters.CheckTags(output, diags)
|
||||
if tagErr != nil {
|
||||
code, resp := tagErr.Response()
|
||||
httpapi.Write(ctx, rw, code, resp)
|
||||
return nil, false
|
||||
}
|
||||
|
||||
return output.WorkspaceTags.Tags(), true
|
||||
}
|
||||
|
||||
func (api *API) classicTemplateVersionTags(ctx context.Context, rw http.ResponseWriter, file database.File) (map[string]string, bool) {
|
||||
// Try to parse template tags from the given file.
|
||||
tempDir, err := os.MkdirTemp(api.Options.CacheDir, "tfparse-*")
|
||||
if err != nil {
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
Message: "Internal error checking workspace tags",
|
||||
Detail: "create tempdir: " + err.Error(),
|
||||
})
|
||||
return nil, false
|
||||
}
|
||||
defer func() {
|
||||
if err := os.RemoveAll(tempDir); err != nil {
|
||||
api.Logger.Error(ctx, "failed to remove temporary tfparse dir", slog.Error(err))
|
||||
}
|
||||
}()
|
||||
|
||||
if err := tfparse.WriteArchive(file.Data, file.Mimetype, tempDir); err != nil {
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
Message: "Internal error checking workspace tags",
|
||||
Detail: "extract archive to tempdir: " + err.Error(),
|
||||
})
|
||||
return nil, false
|
||||
}
|
||||
|
||||
parser, diags := tfparse.New(tempDir, tfparse.WithLogger(api.Logger.Named("tfparse")))
|
||||
if diags.HasErrors() {
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
Message: "Internal error checking workspace tags",
|
||||
Detail: "parse module: " + diags.Error(),
|
||||
})
|
||||
return nil, false
|
||||
}
|
||||
|
||||
parsedTags, err := parser.WorkspaceTagDefaults(ctx)
|
||||
if err != nil {
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
Message: "Internal error checking workspace tags",
|
||||
Detail: "evaluate default values of workspace tags: " + err.Error(),
|
||||
})
|
||||
return nil, false
|
||||
}
|
||||
|
||||
return parsedTags, true
|
||||
}
|
||||
|
||||
// templateVersionResources returns the workspace agent resources associated
|
||||
// with a template version. A template can specify more than one resource to be
|
||||
// provisioned, each resource can have an agent that dials back to coderd. The
|
||||
|
@ -83,6 +83,7 @@ type Builder struct {
|
||||
parameterValues *[]string
|
||||
templateVersionPresetParameterValues *[]database.TemplateVersionPresetParameter
|
||||
parameterRender dynamicparameters.Renderer
|
||||
workspaceTags *map[string]string
|
||||
|
||||
prebuiltWorkspaceBuildStage sdkproto.PrebuiltWorkspaceBuildStage
|
||||
verifyNoLegacyParametersOnce bool
|
||||
@ -939,6 +940,76 @@ func (b *Builder) getLastBuildJob() (*database.ProvisionerJob, error) {
|
||||
}
|
||||
|
||||
func (b *Builder) getProvisionerTags() (map[string]string, error) {
|
||||
if b.workspaceTags != nil {
|
||||
return *b.workspaceTags, nil
|
||||
}
|
||||
|
||||
var tags map[string]string
|
||||
var err error
|
||||
|
||||
if b.usingDynamicParameters() {
|
||||
tags, err = b.getDynamicProvisionerTags()
|
||||
} else {
|
||||
tags, err = b.getClassicProvisionerTags()
|
||||
}
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("get provisioner tags: %w", err)
|
||||
}
|
||||
|
||||
b.workspaceTags = &tags
|
||||
return *b.workspaceTags, nil
|
||||
}
|
||||
|
||||
func (b *Builder) getDynamicProvisionerTags() (map[string]string, error) {
|
||||
// Step 1: Mutate template manually set version tags
|
||||
templateVersionJob, err := b.getTemplateVersionJob()
|
||||
if err != nil {
|
||||
return nil, BuildError{http.StatusInternalServerError, "failed to fetch template version job", err}
|
||||
}
|
||||
annotationTags := provisionersdk.MutateTags(b.workspace.OwnerID, templateVersionJob.Tags)
|
||||
|
||||
tags := map[string]string{}
|
||||
for name, value := range annotationTags {
|
||||
tags[name] = value
|
||||
}
|
||||
|
||||
// Step 2: Fetch tags from the template
|
||||
render, err := b.getDynamicParameterRenderer()
|
||||
if err != nil {
|
||||
return nil, BuildError{http.StatusInternalServerError, "failed to get dynamic parameter renderer", err}
|
||||
}
|
||||
|
||||
names, values, err := b.getParameters()
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("tags render: %w", err)
|
||||
}
|
||||
|
||||
vals := make(map[string]string, len(names))
|
||||
for i, name := range names {
|
||||
if i >= len(values) {
|
||||
return nil, BuildError{
|
||||
http.StatusInternalServerError,
|
||||
fmt.Sprintf("parameter names and values mismatch, %d names & %d values", len(names), len(values)),
|
||||
xerrors.New("names and values mismatch"),
|
||||
}
|
||||
}
|
||||
vals[name] = values[i]
|
||||
}
|
||||
|
||||
output, diags := render.Render(b.ctx, b.workspace.OwnerID, vals)
|
||||
tagErr := dynamicparameters.CheckTags(output, diags)
|
||||
if tagErr != nil {
|
||||
return nil, tagErr
|
||||
}
|
||||
|
||||
for k, v := range output.WorkspaceTags.Tags() {
|
||||
tags[k] = v
|
||||
}
|
||||
|
||||
return tags, nil
|
||||
}
|
||||
|
||||
func (b *Builder) getClassicProvisionerTags() (map[string]string, error) {
|
||||
// Step 1: Mutate template version tags
|
||||
templateVersionJob, err := b.getTemplateVersionJob()
|
||||
if err != nil {
|
||||
|
Reference in New Issue
Block a user