Merge remote-tracking branch 'origin/main' into jjs/presets

This commit is contained in:
Sas Swart
2025-02-11 14:04:37 +00:00
14 changed files with 179 additions and 48 deletions

View File

@ -28,6 +28,7 @@ type WorkspaceResourcesOptions struct {
Title string Title string
ServerVersion string ServerVersion string
ListeningPorts map[uuid.UUID]codersdk.WorkspaceAgentListeningPortsResponse ListeningPorts map[uuid.UUID]codersdk.WorkspaceAgentListeningPortsResponse
Devcontainers map[uuid.UUID]codersdk.WorkspaceAgentListContainersResponse
} }
// WorkspaceResources displays the connection status and tree-view of provided resources. // WorkspaceResources displays the connection status and tree-view of provided resources.
@ -95,15 +96,11 @@ func WorkspaceResources(writer io.Writer, resources []codersdk.WorkspaceResource
// Display all agents associated with the resource. // Display all agents associated with the resource.
for index, agent := range resource.Agents { for index, agent := range resource.Agents {
tableWriter.AppendRow(renderAgentRow(agent, index, totalAgents, options)) tableWriter.AppendRow(renderAgentRow(agent, index, totalAgents, options))
if options.ListeningPorts != nil { for _, row := range renderListeningPorts(options, agent.ID, index, totalAgents) {
if lp, ok := options.ListeningPorts[agent.ID]; ok && len(lp.Ports) > 0 { tableWriter.AppendRow(row)
tableWriter.AppendRow(table.Row{ }
fmt.Sprintf(" %s─ %s", renderPipe(index, totalAgents), "Open Ports"), for _, row := range renderDevcontainers(options, agent.ID, index, totalAgents) {
}) tableWriter.AppendRow(row)
for _, port := range lp.Ports {
tableWriter.AppendRow(renderPortRow(port, index, totalAgents))
}
}
} }
} }
tableWriter.AppendSeparator() tableWriter.AppendSeparator()
@ -137,10 +134,28 @@ func renderAgentRow(agent codersdk.WorkspaceAgent, index, totalAgents int, optio
return row return row
} }
func renderPortRow(port codersdk.WorkspaceAgentListeningPort, index, totalPorts int) table.Row { func renderListeningPorts(wro WorkspaceResourcesOptions, agentID uuid.UUID, idx, total int) []table.Row {
var rows []table.Row
if wro.ListeningPorts == nil {
return []table.Row{}
}
lp, ok := wro.ListeningPorts[agentID]
if !ok || len(lp.Ports) == 0 {
return []table.Row{}
}
rows = append(rows, table.Row{
fmt.Sprintf(" %s─ Open Ports", renderPipe(idx, total)),
})
for idx, port := range lp.Ports {
rows = append(rows, renderPortRow(port, idx, len(lp.Ports)))
}
return rows
}
func renderPortRow(port codersdk.WorkspaceAgentListeningPort, idx, total int) table.Row {
var sb strings.Builder var sb strings.Builder
_, _ = sb.WriteString(" ") _, _ = sb.WriteString(" ")
_, _ = sb.WriteString(renderPipe(index, totalPorts)) _, _ = sb.WriteString(renderPipe(idx, total))
_, _ = sb.WriteString("─ ") _, _ = sb.WriteString("─ ")
_, _ = sb.WriteString(pretty.Sprintf(DefaultStyles.Code, "%5d/%s", port.Port, port.Network)) _, _ = sb.WriteString(pretty.Sprintf(DefaultStyles.Code, "%5d/%s", port.Port, port.Network))
if port.ProcessName != "" { if port.ProcessName != "" {
@ -149,6 +164,47 @@ func renderPortRow(port codersdk.WorkspaceAgentListeningPort, index, totalPorts
return table.Row{sb.String()} return table.Row{sb.String()}
} }
func renderDevcontainers(wro WorkspaceResourcesOptions, agentID uuid.UUID, index, totalAgents int) []table.Row {
var rows []table.Row
if wro.Devcontainers == nil {
return []table.Row{}
}
dc, ok := wro.Devcontainers[agentID]
if !ok || len(dc.Containers) == 0 {
return []table.Row{}
}
rows = append(rows, table.Row{
fmt.Sprintf(" %s─ %s", renderPipe(index, totalAgents), "Devcontainers"),
})
for idx, container := range dc.Containers {
rows = append(rows, renderDevcontainerRow(container, idx, len(dc.Containers)))
}
return rows
}
func renderDevcontainerRow(container codersdk.WorkspaceAgentDevcontainer, index, total int) table.Row {
var row table.Row
var sb strings.Builder
_, _ = sb.WriteString(" ")
_, _ = sb.WriteString(renderPipe(index, total))
_, _ = sb.WriteString("─ ")
_, _ = sb.WriteString(pretty.Sprintf(DefaultStyles.Code, "%s", container.FriendlyName))
row = append(row, sb.String())
sb.Reset()
if container.Running {
_, _ = sb.WriteString(pretty.Sprintf(DefaultStyles.Keyword, "(%s)", container.Status))
} else {
_, _ = sb.WriteString(pretty.Sprintf(DefaultStyles.Error, "(%s)", container.Status))
}
row = append(row, sb.String())
sb.Reset()
// "health" is not applicable here.
row = append(row, sb.String())
_, _ = sb.WriteString(container.Image)
row = append(row, sb.String())
return row
}
func renderAgentStatus(agent codersdk.WorkspaceAgent) string { func renderAgentStatus(agent codersdk.WorkspaceAgent) string {
switch agent.Status { switch agent.Status {
case codersdk.WorkspaceAgentConnecting: case codersdk.WorkspaceAgentConnecting:

View File

@ -2565,6 +2565,8 @@ func parseExternalAuthProvidersFromEnv(prefix string, environ []string) ([]coder
return providers, nil return providers, nil
} }
var reInvalidPortAfterHost = regexp.MustCompile(`invalid port ".+" after host`)
// If the user provides a postgres URL with a password that contains special // If the user provides a postgres URL with a password that contains special
// characters, the URL will be invalid. We need to escape the password so that // characters, the URL will be invalid. We need to escape the password so that
// the URL parse doesn't fail at the DB connector level. // the URL parse doesn't fail at the DB connector level.
@ -2573,7 +2575,11 @@ func escapePostgresURLUserInfo(v string) (string, error) {
// I wish I could use errors.Is here, but this error is not declared as a // I wish I could use errors.Is here, but this error is not declared as a
// variable in net/url. :( // variable in net/url. :(
if err != nil { if err != nil {
if strings.Contains(err.Error(), "net/url: invalid userinfo") { // Warning: The parser may also fail with an "invalid port" error if the password contains special
// characters. It does not detect invalid user information but instead incorrectly reports an invalid port.
//
// See: https://github.com/coder/coder/issues/16319
if strings.Contains(err.Error(), "net/url: invalid userinfo") || reInvalidPortAfterHost.MatchString(err.Error()) {
// If the URL is invalid, we assume it is because the password contains // If the URL is invalid, we assume it is because the password contains
// special characters that need to be escaped. // special characters that need to be escaped.

View File

@ -351,13 +351,23 @@ func TestEscapePostgresURLUserInfo(t *testing.T) {
output: "", output: "",
err: xerrors.New("parse postgres url: parse \"postgres://local host:5432/coder\": invalid character \" \" in host name"), err: xerrors.New("parse postgres url: parse \"postgres://local host:5432/coder\": invalid character \" \" in host name"),
}, },
{
input: "postgres://coder:co?der@localhost:5432/coder",
output: "postgres://coder:co%3Fder@localhost:5432/coder",
err: nil,
},
{
input: "postgres://coder:co#der@localhost:5432/coder",
output: "postgres://coder:co%23der@localhost:5432/coder",
err: nil,
},
} }
for _, tc := range testcases { for _, tc := range testcases {
tc := tc tc := tc
t.Run(tc.input, func(t *testing.T) { t.Run(tc.input, func(t *testing.T) {
t.Parallel() t.Parallel()
o, err := escapePostgresURLUserInfo(tc.input) o, err := escapePostgresURLUserInfo(tc.input)
require.Equal(t, tc.output, o) assert.Equal(t, tc.output, o)
if tc.err != nil { if tc.err != nil {
require.Error(t, err) require.Error(t, err)
require.EqualValues(t, tc.err.Error(), err.Error()) require.EqualValues(t, tc.err.Error(), err.Error())

View File

@ -38,15 +38,18 @@ func (r *RootCmd) show() *serpent.Command {
} }
if workspace.LatestBuild.Status == codersdk.WorkspaceStatusRunning { if workspace.LatestBuild.Status == codersdk.WorkspaceStatusRunning {
// Get listening ports for each agent. // Get listening ports for each agent.
options.ListeningPorts = fetchListeningPorts(inv, client, workspace.LatestBuild.Resources...) ports, devcontainers := fetchRuntimeResources(inv, client, workspace.LatestBuild.Resources...)
options.ListeningPorts = ports
options.Devcontainers = devcontainers
} }
return cliui.WorkspaceResources(inv.Stdout, workspace.LatestBuild.Resources, options) return cliui.WorkspaceResources(inv.Stdout, workspace.LatestBuild.Resources, options)
}, },
} }
} }
func fetchListeningPorts(inv *serpent.Invocation, client *codersdk.Client, resources ...codersdk.WorkspaceResource) map[uuid.UUID]codersdk.WorkspaceAgentListeningPortsResponse { func fetchRuntimeResources(inv *serpent.Invocation, client *codersdk.Client, resources ...codersdk.WorkspaceResource) (map[uuid.UUID]codersdk.WorkspaceAgentListeningPortsResponse, map[uuid.UUID]codersdk.WorkspaceAgentListContainersResponse) {
ports := make(map[uuid.UUID]codersdk.WorkspaceAgentListeningPortsResponse) ports := make(map[uuid.UUID]codersdk.WorkspaceAgentListeningPortsResponse)
devcontainers := make(map[uuid.UUID]codersdk.WorkspaceAgentListContainersResponse)
var wg sync.WaitGroup var wg sync.WaitGroup
var mu sync.Mutex var mu sync.Mutex
for _, res := range resources { for _, res := range resources {
@ -65,8 +68,23 @@ func fetchListeningPorts(inv *serpent.Invocation, client *codersdk.Client, resou
ports[agent.ID] = lp ports[agent.ID] = lp
mu.Unlock() mu.Unlock()
}() }()
wg.Add(1)
go func() {
defer wg.Done()
dc, err := client.WorkspaceAgentListContainers(inv.Context(), agent.ID, map[string]string{
// Labels set by VSCode Remote Containers and @devcontainers/cli.
"devcontainer.config_file": "",
"devcontainer.local_folder": "",
})
if err != nil {
cliui.Warnf(inv.Stderr, "Failed to get devcontainers for agent %s: %v", agent.Name, err)
}
mu.Lock()
devcontainers[agent.ID] = dc
mu.Unlock()
}()
} }
} }
wg.Wait() wg.Wait()
return ports return ports, devcontainers
} }

View File

@ -860,7 +860,6 @@ func (s *MethodTestSuite) TestOrganization() {
rbac.ResourceOrganizationMember.InOrg(o.ID).WithID(u.ID), policy.ActionCreate) rbac.ResourceOrganizationMember.InOrg(o.ID).WithID(u.ID), policy.ActionCreate)
})) }))
s.Run("InsertPreset", s.Subtest(func(db database.Store, check *expects) { s.Run("InsertPreset", s.Subtest(func(db database.Store, check *expects) {
ctx := context.Background()
org := dbgen.Organization(s.T(), db, database.Organization{}) org := dbgen.Organization(s.T(), db, database.Organization{})
user := dbgen.User(s.T(), db, database.User{}) user := dbgen.User(s.T(), db, database.User{})
template := dbgen.Template(s.T(), db, database.Template{ template := dbgen.Template(s.T(), db, database.Template{
@ -887,11 +886,10 @@ func (s *MethodTestSuite) TestOrganization() {
JobID: job.ID, JobID: job.ID,
}) })
insertPresetParams := database.InsertPresetParams{ insertPresetParams := database.InsertPresetParams{
ID: uuid.New(),
TemplateVersionID: workspaceBuild.TemplateVersionID, TemplateVersionID: workspaceBuild.TemplateVersionID,
Name: "test", Name: "test",
} }
_, err := db.InsertPreset(ctx, insertPresetParams)
require.NoError(s.T(), err)
check.Args(insertPresetParams).Asserts(rbac.ResourceTemplate, policy.ActionUpdate) check.Args(insertPresetParams).Asserts(rbac.ResourceTemplate, policy.ActionUpdate)
})) }))
s.Run("InsertPresetParameters", s.Subtest(func(db database.Store, check *expects) { s.Run("InsertPresetParameters", s.Subtest(func(db database.Store, check *expects) {
@ -931,8 +929,6 @@ func (s *MethodTestSuite) TestOrganization() {
Names: []string{"test"}, Names: []string{"test"},
Values: []string{"test"}, Values: []string{"test"},
} }
_, err = db.InsertPresetParameters(context.Background(), insertPresetParametersParams)
require.NoError(s.T(), err)
check.Args(insertPresetParametersParams).Asserts(rbac.ResourceTemplate, policy.ActionUpdate) check.Args(insertPresetParametersParams).Asserts(rbac.ResourceTemplate, policy.ActionUpdate)
})) }))
s.Run("DeleteOrganizationMember", s.Subtest(func(db database.Store, check *expects) { s.Run("DeleteOrganizationMember", s.Subtest(func(db database.Store, check *expects) {
@ -3821,11 +3817,13 @@ func (s *MethodTestSuite) TestSystemFunctions() {
CreatedBy: user.ID, CreatedBy: user.ID,
}) })
preset, err := db.InsertPreset(ctx, database.InsertPresetParams{ preset, err := db.InsertPreset(ctx, database.InsertPresetParams{
ID: uuid.New(),
TemplateVersionID: templateVersion.ID, TemplateVersionID: templateVersion.ID,
Name: "test", Name: "test",
}) })
require.NoError(s.T(), err) require.NoError(s.T(), err)
_, err = db.InsertPresetParameters(ctx, database.InsertPresetParametersParams{ _, err = db.InsertPresetParameters(ctx, database.InsertPresetParametersParams{
ID: uuid.New(),
TemplateVersionPresetID: preset.ID, TemplateVersionPresetID: preset.ID,
Names: []string{"test"}, Names: []string{"test"},
Values: []string{"test"}, Values: []string{"test"},

View File

@ -1266,14 +1266,14 @@ COMMENT ON COLUMN template_version_parameters.display_order IS 'Specifies the or
COMMENT ON COLUMN template_version_parameters.ephemeral IS 'The value of an ephemeral parameter will not be preserved between consecutive workspace builds.'; COMMENT ON COLUMN template_version_parameters.ephemeral IS 'The value of an ephemeral parameter will not be preserved between consecutive workspace builds.';
CREATE TABLE template_version_preset_parameters ( CREATE TABLE template_version_preset_parameters (
id uuid DEFAULT gen_random_uuid() NOT NULL, id uuid NOT NULL,
template_version_preset_id uuid NOT NULL, template_version_preset_id uuid NOT NULL,
name text NOT NULL, name text NOT NULL,
value text NOT NULL value text NOT NULL
); );
CREATE TABLE template_version_presets ( CREATE TABLE template_version_presets (
id uuid DEFAULT gen_random_uuid() NOT NULL, id uuid NOT NULL,
template_version_id uuid NOT NULL, template_version_id uuid NOT NULL,
name text NOT NULL, name text NOT NULL,
created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL

View File

@ -1,22 +1,17 @@
-- TODO (sasswart): add IF NOT EXISTS and other clauses to make the migration more robust
CREATE TABLE template_version_presets CREATE TABLE template_version_presets
( (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(), id UUID PRIMARY KEY NOT NULL,
template_version_id UUID NOT NULL, template_version_id UUID NOT NULL,
name TEXT NOT NULL, name TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP, created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
-- TODO (sasswart): Will auditing have any relevance to presets?
FOREIGN KEY (template_version_id) REFERENCES template_versions (id) ON DELETE CASCADE FOREIGN KEY (template_version_id) REFERENCES template_versions (id) ON DELETE CASCADE
); );
CREATE TABLE template_version_preset_parameters CREATE TABLE template_version_preset_parameters
( (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(), id UUID PRIMARY KEY NOT NULL,
template_version_preset_id UUID NOT NULL, template_version_preset_id UUID NOT NULL,
name TEXT NOT NULL, name TEXT NOT NULL,
-- TODO (sasswart): would it be beneficial to allow presets to still offer a choice for values?
-- This would allow an operator to avoid having to create many similar templates where only one or
-- a few values are different.
value TEXT NOT NULL, value TEXT NOT NULL,
FOREIGN KEY (template_version_preset_id) REFERENCES template_version_presets (id) ON DELETE CASCADE FOREIGN KEY (template_version_preset_id) REFERENCES template_version_presets (id) ON DELETE CASCADE
); );
@ -28,9 +23,6 @@ ALTER TABLE workspace_builds
ADD CONSTRAINT workspace_builds_template_version_preset_id_fkey ADD CONSTRAINT workspace_builds_template_version_preset_id_fkey
FOREIGN KEY (template_version_preset_id) FOREIGN KEY (template_version_preset_id)
REFERENCES template_version_presets (id) REFERENCES template_version_presets (id)
-- TODO (sasswart): SET NULL might not be the best choice here. The rest of the hierarchy has ON DELETE CASCADE.
-- We don't want CASCADE here, because we don't want to delete the workspace build if the preset is deleted.
-- However, do we want to lose record of the preset id for a workspace build?
ON DELETE SET NULL; ON DELETE SET NULL;
-- Recreate the view to include the new column. -- Recreate the view to include the new column.

View File

@ -5494,19 +5494,25 @@ func (q *sqlQuerier) GetPresetsByTemplateVersionID(ctx context.Context, template
const insertPreset = `-- name: InsertPreset :one const insertPreset = `-- name: InsertPreset :one
INSERT INTO INSERT INTO
template_version_presets (template_version_id, name, created_at) template_version_presets (id, template_version_id, name, created_at)
VALUES VALUES
($1, $2, $3) RETURNING id, template_version_id, name, created_at ($1, $2, $3, $4) RETURNING id, template_version_id, name, created_at
` `
type InsertPresetParams struct { type InsertPresetParams struct {
ID uuid.UUID `db:"id" json:"id"`
TemplateVersionID uuid.UUID `db:"template_version_id" json:"template_version_id"` TemplateVersionID uuid.UUID `db:"template_version_id" json:"template_version_id"`
Name string `db:"name" json:"name"` Name string `db:"name" json:"name"`
CreatedAt time.Time `db:"created_at" json:"created_at"` CreatedAt time.Time `db:"created_at" json:"created_at"`
} }
func (q *sqlQuerier) InsertPreset(ctx context.Context, arg InsertPresetParams) (TemplateVersionPreset, error) { func (q *sqlQuerier) InsertPreset(ctx context.Context, arg InsertPresetParams) (TemplateVersionPreset, error) {
row := q.db.QueryRowContext(ctx, insertPreset, arg.TemplateVersionID, arg.Name, arg.CreatedAt) row := q.db.QueryRowContext(ctx, insertPreset,
arg.ID,
arg.TemplateVersionID,
arg.Name,
arg.CreatedAt,
)
var i TemplateVersionPreset var i TemplateVersionPreset
err := row.Scan( err := row.Scan(
&i.ID, &i.ID,
@ -5519,22 +5525,29 @@ func (q *sqlQuerier) InsertPreset(ctx context.Context, arg InsertPresetParams) (
const insertPresetParameters = `-- name: InsertPresetParameters :many const insertPresetParameters = `-- name: InsertPresetParameters :many
INSERT INTO INSERT INTO
template_version_preset_parameters (template_version_preset_id, name, value) template_version_preset_parameters (id, template_version_preset_id, name, value)
SELECT SELECT
$1, $1,
unnest($2 :: TEXT[]), $2,
unnest($3 :: TEXT[]) unnest($3 :: TEXT[]),
unnest($4 :: TEXT[])
RETURNING id, template_version_preset_id, name, value RETURNING id, template_version_preset_id, name, value
` `
type InsertPresetParametersParams struct { type InsertPresetParametersParams struct {
ID uuid.UUID `db:"id" json:"id"`
TemplateVersionPresetID uuid.UUID `db:"template_version_preset_id" json:"template_version_preset_id"` TemplateVersionPresetID uuid.UUID `db:"template_version_preset_id" json:"template_version_preset_id"`
Names []string `db:"names" json:"names"` Names []string `db:"names" json:"names"`
Values []string `db:"values" json:"values"` Values []string `db:"values" json:"values"`
} }
func (q *sqlQuerier) InsertPresetParameters(ctx context.Context, arg InsertPresetParametersParams) ([]TemplateVersionPresetParameter, error) { func (q *sqlQuerier) InsertPresetParameters(ctx context.Context, arg InsertPresetParametersParams) ([]TemplateVersionPresetParameter, error) {
rows, err := q.db.QueryContext(ctx, insertPresetParameters, arg.TemplateVersionPresetID, pq.Array(arg.Names), pq.Array(arg.Values)) rows, err := q.db.QueryContext(ctx, insertPresetParameters,
arg.ID,
arg.TemplateVersionPresetID,
pq.Array(arg.Names),
pq.Array(arg.Values),
)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@ -1,13 +1,14 @@
-- name: InsertPreset :one -- name: InsertPreset :one
INSERT INTO INSERT INTO
template_version_presets (template_version_id, name, created_at) template_version_presets (id, template_version_id, name, created_at)
VALUES VALUES
(@template_version_id, @name, @created_at) RETURNING *; (@id, @template_version_id, @name, @created_at) RETURNING *;
-- name: InsertPresetParameters :many -- name: InsertPresetParameters :many
INSERT INTO INSERT INTO
template_version_preset_parameters (template_version_preset_id, name, value) template_version_preset_parameters (id, template_version_preset_id, name, value)
SELECT SELECT
@id,
@template_version_preset_id, @template_version_preset_id,
unnest(@names :: TEXT[]), unnest(@names :: TEXT[]),
unnest(@values :: TEXT[]) unnest(@values :: TEXT[])

View File

@ -1830,6 +1830,7 @@ func InsertWorkspacePresetsAndParameters(ctx context.Context, logger slog.Logger
func InsertWorkspacePresetAndParameters(ctx context.Context, db database.Store, templateVersionID uuid.UUID, protoPreset *sdkproto.Preset, t time.Time) error { func InsertWorkspacePresetAndParameters(ctx context.Context, db database.Store, templateVersionID uuid.UUID, protoPreset *sdkproto.Preset, t time.Time) error {
err := db.InTx(func(tx database.Store) error { err := db.InTx(func(tx database.Store) error {
dbPreset, err := tx.InsertPreset(ctx, database.InsertPresetParams{ dbPreset, err := tx.InsertPreset(ctx, database.InsertPresetParams{
ID: uuid.New(),
TemplateVersionID: templateVersionID, TemplateVersionID: templateVersionID,
Name: protoPreset.Name, Name: protoPreset.Name,
CreatedAt: t, CreatedAt: t,
@ -1845,6 +1846,7 @@ func InsertWorkspacePresetAndParameters(ctx context.Context, db database.Store,
presetParameterValues = append(presetParameterValues, parameter.Value) presetParameterValues = append(presetParameterValues, parameter.Value)
} }
_, err = tx.InsertPresetParameters(ctx, database.InsertPresetParametersParams{ _, err = tx.InsertPresetParameters(ctx, database.InsertPresetParametersParams{
ID: uuid.New(),
TemplateVersionPresetID: dbPreset.ID, TemplateVersionPresetID: dbPreset.ID,
Names: presetParameterNames, Names: presetParameterNames,
Values: presetParameterValues, Values: presetParameterValues,

View File

@ -8,9 +8,14 @@ import (
// Subset returns true if all the keys of a are present // Subset returns true if all the keys of a are present
// in b and have the same values. // in b and have the same values.
// If the corresponding value of a[k] is the zero value in
// b, Subset will skip comparing that value.
// This allows checking for the presence of map keys.
func Subset[T, U comparable](a, b map[T]U) bool { func Subset[T, U comparable](a, b map[T]U) bool {
var uz U
for ka, va := range a { for ka, va := range a {
if vb, ok := b[ka]; !ok || va != vb { ignoreZeroValue := va == uz
if vb, ok := b[ka]; !ok || (!ignoreZeroValue && va != vb) {
return false return false
} }
} }

View File

@ -11,8 +11,9 @@ func TestSubset(t *testing.T) {
t.Parallel() t.Parallel()
for idx, tc := range []struct { for idx, tc := range []struct {
a map[string]string a map[string]string
b map[string]string b map[string]string
// expected value from Subset
expected bool expected bool
}{ }{
{ {
@ -50,6 +51,24 @@ func TestSubset(t *testing.T) {
b: map[string]string{"a": "1", "b": "3"}, b: map[string]string{"a": "1", "b": "3"},
expected: false, expected: false,
}, },
// Zero value
{
a: map[string]string{"a": "1", "b": ""},
b: map[string]string{"a": "1", "b": "3"},
expected: true,
},
// Zero value, but the other way round
{
a: map[string]string{"a": "1", "b": "3"},
b: map[string]string{"a": "1", "b": ""},
expected: false,
},
// Both zero values
{
a: map[string]string{"a": "1", "b": ""},
b: map[string]string{"a": "1", "b": ""},
expected: true,
},
} { } {
tc := tc tc := tc
t.Run("#"+strconv.Itoa(idx), func(t *testing.T) { t.Run("#"+strconv.Itoa(idx), func(t *testing.T) {

View File

@ -1076,7 +1076,8 @@ func TestWorkspaceAgentContainers(t *testing.T) {
pool, err := dockertest.NewPool("") pool, err := dockertest.NewPool("")
require.NoError(t, err, "Could not connect to docker") require.NoError(t, err, "Could not connect to docker")
testLabels := map[string]string{ testLabels := map[string]string{
"com.coder.test": uuid.New().String(), "com.coder.test": uuid.New().String(),
"com.coder.empty": "",
} }
ct, err := pool.RunWithOptions(&dockertest.RunOptions{ ct, err := pool.RunWithOptions(&dockertest.RunOptions{
Repository: "busybox", Repository: "busybox",
@ -1097,7 +1098,10 @@ func TestWorkspaceAgentContainers(t *testing.T) {
Repository: "busybox", Repository: "busybox",
Tag: "latest", Tag: "latest",
Cmd: []string{"sleep", "infinity"}, Cmd: []string{"sleep", "infinity"},
Labels: map[string]string{"com.coder.test": "ignoreme"}, Labels: map[string]string{
"com.coder.test": "ignoreme",
"com.coder.empty": "",
},
}, func(config *docker.HostConfig) { }, func(config *docker.HostConfig) {
config.AutoRemove = true config.AutoRemove = true
config.RestartPolicy = docker.RestartPolicy{Name: "no"} config.RestartPolicy = docker.RestartPolicy{Name: "no"}

View File

@ -10,6 +10,7 @@ import (
"net/netip" "net/netip"
"net/url" "net/url"
"reflect" "reflect"
"sort"
"strconv" "strconv"
"sync" "sync"
"time" "time"
@ -402,6 +403,9 @@ func (u *updater) createPeerUpdateLocked(update tailnet.WorkspaceUpdate) *PeerUp
for name := range agent.Hosts { for name := range agent.Hosts {
fqdn = append(fqdn, name.WithTrailingDot()) fqdn = append(fqdn, name.WithTrailingDot())
} }
sort.Slice(fqdn, func(i, j int) bool {
return len(fqdn[i]) < len(fqdn[j])
})
out.DeletedAgents[i] = &Agent{ out.DeletedAgents[i] = &Agent{
Id: tailnet.UUIDToByteSlice(agent.ID), Id: tailnet.UUIDToByteSlice(agent.ID),
Name: agent.Name, Name: agent.Name,
@ -424,6 +428,9 @@ func (u *updater) convertAgentsLocked(agents []*tailnet.Agent) []*Agent {
for name := range agent.Hosts { for name := range agent.Hosts {
fqdn = append(fqdn, name.WithTrailingDot()) fqdn = append(fqdn, name.WithTrailingDot())
} }
sort.Slice(fqdn, func(i, j int) bool {
return len(fqdn[i]) < len(fqdn[j])
})
protoAgent := &Agent{ protoAgent := &Agent{
Id: tailnet.UUIDToByteSlice(agent.ID), Id: tailnet.UUIDToByteSlice(agent.ID),
Name: agent.Name, Name: agent.Name,