mirror of
https://github.com/coder/coder.git
synced 2025-07-03 16:13:58 +00:00
* feat: HA tailnet coordinator * fixup! feat: HA tailnet coordinator * fixup! feat: HA tailnet coordinator * remove printlns * close all connections on coordinator * impelement high availability feature * fixup! impelement high availability feature * fixup! impelement high availability feature * fixup! impelement high availability feature * fixup! impelement high availability feature * Add replicas * Add DERP meshing to arbitrary addresses * Move packages to highavailability folder * Move coordinator to high availability package * Add flags for HA * Rename to replicasync * Denest packages for replicas * Add test for multiple replicas * Fix coordination test * Add HA to the helm chart * Rename function pointer * Add warnings for HA * Add the ability to block endpoints * Add flag to disable P2P connections * Wow, I made the tests pass * Add replicas endpoint * Ensure close kills replica * Update sql * Add database latency to high availability * Pipe TLS to DERP mesh * Fix DERP mesh with TLS * Add tests for TLS * Fix replica sync TLS * Fix RootCA for replica meshing * Remove ID from replicasync * Fix getting certificates for meshing * Remove excessive locking * Fix linting * Store mesh key in the database * Fix replica key for tests * Fix types gen * Fix unlocking unlocked * Fix race in tests * Update enterprise/derpmesh/derpmesh.go Co-authored-by: Colin Adler <colin1adler@gmail.com> * Rename to syncReplicas * Reuse http client * Delete old replicas on a CRON * Fix race condition in connection tests * Fix linting * Fix nil type * Move pubsub to in-memory for twenty test * Add comment for configuration tweaking * Fix leak with transport * Fix close leak in derpmesh * Fix race when creating server * Remove handler update * Skip test on Windows * Fix DERP mesh test * Wrap HTTP handler replacement in mutex * Fix error message for relay * Fix API handler for normal tests * Fix speedtest * Fix replica resend * Fix derpmesh send * Ping async * Increase wait time of template version jobd * Fix race when closing replica sync * Add name to client * Log the derpmap being used * Don't connect if DERP is empty * Improve agent coordinator logging * Fix lock in coordinator * Fix relay addr * Fix race when updating durations * Fix client publish race * Run pubsub loop in a queue * Store agent nodes in order * Fix coordinator locking * Check for closed pipe Co-authored-by: Colin Adler <colin1adler@gmail.com>
725 lines
22 KiB
Go
725 lines
22 KiB
Go
package coderd_test
|
|
|
|
import (
|
|
"bufio"
|
|
"context"
|
|
"encoding/json"
|
|
"fmt"
|
|
"net"
|
|
"runtime"
|
|
"strconv"
|
|
"strings"
|
|
"testing"
|
|
"time"
|
|
|
|
"github.com/google/uuid"
|
|
"github.com/stretchr/testify/require"
|
|
|
|
"cdr.dev/slog"
|
|
"cdr.dev/slog/sloggers/slogtest"
|
|
"github.com/coder/coder/agent"
|
|
"github.com/coder/coder/coderd/coderdtest"
|
|
"github.com/coder/coder/codersdk"
|
|
"github.com/coder/coder/provisioner/echo"
|
|
"github.com/coder/coder/provisionersdk/proto"
|
|
"github.com/coder/coder/testutil"
|
|
)
|
|
|
|
func TestWorkspaceAgent(t *testing.T) {
|
|
t.Parallel()
|
|
t.Run("Connect", func(t *testing.T) {
|
|
t.Parallel()
|
|
client := coderdtest.New(t, &coderdtest.Options{
|
|
IncludeProvisionerDaemon: true,
|
|
})
|
|
user := coderdtest.CreateFirstUser(t, client)
|
|
authToken := uuid.NewString()
|
|
tmpDir := t.TempDir()
|
|
version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{
|
|
Parse: echo.ParseComplete,
|
|
ProvisionDryRun: echo.ProvisionComplete,
|
|
Provision: []*proto.Provision_Response{{
|
|
Type: &proto.Provision_Response_Complete{
|
|
Complete: &proto.Provision_Complete{
|
|
Resources: []*proto.Resource{{
|
|
Name: "example",
|
|
Type: "aws_instance",
|
|
Agents: []*proto.Agent{{
|
|
Id: uuid.NewString(),
|
|
Directory: tmpDir,
|
|
Auth: &proto.Agent_Token{
|
|
Token: authToken,
|
|
},
|
|
}},
|
|
}},
|
|
},
|
|
},
|
|
}},
|
|
})
|
|
template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID)
|
|
coderdtest.AwaitTemplateVersionJob(t, client, version.ID)
|
|
workspace := coderdtest.CreateWorkspace(t, client, user.OrganizationID, template.ID)
|
|
coderdtest.AwaitWorkspaceBuildJob(t, client, workspace.LatestBuild.ID)
|
|
|
|
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
|
|
defer cancel()
|
|
|
|
workspace, err := client.Workspace(ctx, workspace.ID)
|
|
require.NoError(t, err)
|
|
require.Equal(t, tmpDir, workspace.LatestBuild.Resources[0].Agents[0].Directory)
|
|
_, err = client.WorkspaceAgent(ctx, workspace.LatestBuild.Resources[0].Agents[0].ID)
|
|
require.NoError(t, err)
|
|
})
|
|
}
|
|
|
|
func TestWorkspaceAgentListen(t *testing.T) {
|
|
t.Parallel()
|
|
|
|
t.Run("Connect", func(t *testing.T) {
|
|
t.Parallel()
|
|
|
|
client := coderdtest.New(t, &coderdtest.Options{
|
|
IncludeProvisionerDaemon: true,
|
|
})
|
|
user := coderdtest.CreateFirstUser(t, client)
|
|
authToken := uuid.NewString()
|
|
version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{
|
|
Parse: echo.ParseComplete,
|
|
ProvisionDryRun: echo.ProvisionComplete,
|
|
Provision: []*proto.Provision_Response{{
|
|
Type: &proto.Provision_Response_Complete{
|
|
Complete: &proto.Provision_Complete{
|
|
Resources: []*proto.Resource{{
|
|
Name: "example",
|
|
Type: "aws_instance",
|
|
Agents: []*proto.Agent{{
|
|
Id: uuid.NewString(),
|
|
Auth: &proto.Agent_Token{
|
|
Token: authToken,
|
|
},
|
|
}},
|
|
}},
|
|
},
|
|
},
|
|
}},
|
|
})
|
|
template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID)
|
|
coderdtest.AwaitTemplateVersionJob(t, client, version.ID)
|
|
workspace := coderdtest.CreateWorkspace(t, client, user.OrganizationID, template.ID)
|
|
coderdtest.AwaitWorkspaceBuildJob(t, client, workspace.LatestBuild.ID)
|
|
|
|
agentClient := codersdk.New(client.URL)
|
|
agentClient.SessionToken = authToken
|
|
agentCloser := agent.New(agent.Options{
|
|
FetchMetadata: agentClient.WorkspaceAgentMetadata,
|
|
CoordinatorDialer: agentClient.ListenWorkspaceAgentTailnet,
|
|
Logger: slogtest.Make(t, nil).Named("agent").Leveled(slog.LevelDebug),
|
|
})
|
|
defer func() {
|
|
_ = agentCloser.Close()
|
|
}()
|
|
|
|
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
|
|
defer cancel()
|
|
|
|
resources := coderdtest.AwaitWorkspaceAgents(t, client, workspace.ID)
|
|
conn, err := client.DialWorkspaceAgent(ctx, resources[0].Agents[0].ID, nil)
|
|
require.NoError(t, err)
|
|
defer func() {
|
|
_ = conn.Close()
|
|
}()
|
|
require.Eventually(t, func() bool {
|
|
_, err := conn.Ping(ctx)
|
|
return err == nil
|
|
}, testutil.WaitLong, testutil.IntervalFast)
|
|
})
|
|
|
|
t.Run("FailNonLatestBuild", func(t *testing.T) {
|
|
t.Parallel()
|
|
|
|
client := coderdtest.New(t, &coderdtest.Options{
|
|
IncludeProvisionerDaemon: true,
|
|
})
|
|
|
|
user := coderdtest.CreateFirstUser(t, client)
|
|
authToken := uuid.NewString()
|
|
version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{
|
|
Parse: echo.ParseComplete,
|
|
ProvisionDryRun: echo.ProvisionComplete,
|
|
Provision: []*proto.Provision_Response{{
|
|
Type: &proto.Provision_Response_Complete{
|
|
Complete: &proto.Provision_Complete{
|
|
Resources: []*proto.Resource{{
|
|
Name: "example",
|
|
Type: "aws_instance",
|
|
Agents: []*proto.Agent{{
|
|
Id: uuid.NewString(),
|
|
Auth: &proto.Agent_Token{
|
|
Token: authToken,
|
|
},
|
|
}},
|
|
}},
|
|
},
|
|
},
|
|
}},
|
|
})
|
|
|
|
template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID)
|
|
coderdtest.AwaitTemplateVersionJob(t, client, version.ID)
|
|
workspace := coderdtest.CreateWorkspace(t, client, user.OrganizationID, template.ID)
|
|
coderdtest.AwaitWorkspaceBuildJob(t, client, workspace.LatestBuild.ID)
|
|
|
|
version = coderdtest.UpdateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{
|
|
Parse: echo.ParseComplete,
|
|
ProvisionDryRun: echo.ProvisionComplete,
|
|
Provision: []*proto.Provision_Response{{
|
|
Type: &proto.Provision_Response_Complete{
|
|
Complete: &proto.Provision_Complete{
|
|
Resources: []*proto.Resource{{
|
|
Name: "example",
|
|
Type: "aws_instance",
|
|
Agents: []*proto.Agent{{
|
|
Id: uuid.NewString(),
|
|
Auth: &proto.Agent_Token{
|
|
Token: uuid.NewString(),
|
|
},
|
|
}},
|
|
}},
|
|
},
|
|
},
|
|
}},
|
|
}, template.ID)
|
|
coderdtest.AwaitTemplateVersionJob(t, client, version.ID)
|
|
|
|
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
|
|
defer cancel()
|
|
|
|
stopBuild, err := client.CreateWorkspaceBuild(ctx, workspace.ID, codersdk.CreateWorkspaceBuildRequest{
|
|
TemplateVersionID: version.ID,
|
|
Transition: codersdk.WorkspaceTransitionStop,
|
|
})
|
|
require.NoError(t, err)
|
|
coderdtest.AwaitWorkspaceBuildJob(t, client, stopBuild.ID)
|
|
|
|
agentClient := codersdk.New(client.URL)
|
|
agentClient.SessionToken = authToken
|
|
|
|
_, err = agentClient.ListenWorkspaceAgentTailnet(ctx)
|
|
require.Error(t, err)
|
|
require.ErrorContains(t, err, "build is outdated")
|
|
})
|
|
}
|
|
|
|
func TestWorkspaceAgentTailnet(t *testing.T) {
|
|
t.Parallel()
|
|
client, daemonCloser := coderdtest.NewWithProvisionerCloser(t, nil)
|
|
user := coderdtest.CreateFirstUser(t, client)
|
|
authToken := uuid.NewString()
|
|
version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{
|
|
Parse: echo.ParseComplete,
|
|
ProvisionDryRun: echo.ProvisionComplete,
|
|
Provision: []*proto.Provision_Response{{
|
|
Type: &proto.Provision_Response_Complete{
|
|
Complete: &proto.Provision_Complete{
|
|
Resources: []*proto.Resource{{
|
|
Name: "example",
|
|
Type: "aws_instance",
|
|
Agents: []*proto.Agent{{
|
|
Id: uuid.NewString(),
|
|
Auth: &proto.Agent_Token{
|
|
Token: authToken,
|
|
},
|
|
}},
|
|
}},
|
|
},
|
|
},
|
|
}},
|
|
})
|
|
template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID)
|
|
coderdtest.AwaitTemplateVersionJob(t, client, version.ID)
|
|
workspace := coderdtest.CreateWorkspace(t, client, user.OrganizationID, template.ID)
|
|
coderdtest.AwaitWorkspaceBuildJob(t, client, workspace.LatestBuild.ID)
|
|
daemonCloser.Close()
|
|
|
|
agentClient := codersdk.New(client.URL)
|
|
agentClient.SessionToken = authToken
|
|
agentCloser := agent.New(agent.Options{
|
|
FetchMetadata: agentClient.WorkspaceAgentMetadata,
|
|
CoordinatorDialer: agentClient.ListenWorkspaceAgentTailnet,
|
|
Logger: slogtest.Make(t, nil).Named("agent").Leveled(slog.LevelDebug),
|
|
})
|
|
defer agentCloser.Close()
|
|
resources := coderdtest.AwaitWorkspaceAgents(t, client, workspace.ID)
|
|
|
|
ctx, cancelFunc := context.WithCancel(context.Background())
|
|
defer cancelFunc()
|
|
conn, err := client.DialWorkspaceAgent(ctx, resources[0].Agents[0].ID, &codersdk.DialWorkspaceAgentOptions{
|
|
Logger: slogtest.Make(t, nil).Named("client").Leveled(slog.LevelDebug),
|
|
})
|
|
require.NoError(t, err)
|
|
defer conn.Close()
|
|
sshClient, err := conn.SSHClient()
|
|
require.NoError(t, err)
|
|
session, err := sshClient.NewSession()
|
|
require.NoError(t, err)
|
|
output, err := session.CombinedOutput("echo test")
|
|
require.NoError(t, err)
|
|
_ = session.Close()
|
|
_ = sshClient.Close()
|
|
_ = conn.Close()
|
|
require.Equal(t, "test", strings.TrimSpace(string(output)))
|
|
}
|
|
|
|
func TestWorkspaceAgentPTY(t *testing.T) {
|
|
t.Parallel()
|
|
if runtime.GOOS == "windows" {
|
|
// This might be our implementation, or ConPTY itself.
|
|
// It's difficult to find extensive tests for it, so
|
|
// it seems like it could be either.
|
|
t.Skip("ConPTY appears to be inconsistent on Windows.")
|
|
}
|
|
client := coderdtest.New(t, &coderdtest.Options{
|
|
IncludeProvisionerDaemon: true,
|
|
})
|
|
user := coderdtest.CreateFirstUser(t, client)
|
|
authToken := uuid.NewString()
|
|
version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{
|
|
Parse: echo.ParseComplete,
|
|
ProvisionDryRun: echo.ProvisionComplete,
|
|
Provision: []*proto.Provision_Response{{
|
|
Type: &proto.Provision_Response_Complete{
|
|
Complete: &proto.Provision_Complete{
|
|
Resources: []*proto.Resource{{
|
|
Name: "example",
|
|
Type: "aws_instance",
|
|
Agents: []*proto.Agent{{
|
|
Id: uuid.NewString(),
|
|
Auth: &proto.Agent_Token{
|
|
Token: authToken,
|
|
},
|
|
}},
|
|
}},
|
|
},
|
|
},
|
|
}},
|
|
})
|
|
template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID)
|
|
coderdtest.AwaitTemplateVersionJob(t, client, version.ID)
|
|
workspace := coderdtest.CreateWorkspace(t, client, user.OrganizationID, template.ID)
|
|
coderdtest.AwaitWorkspaceBuildJob(t, client, workspace.LatestBuild.ID)
|
|
|
|
agentClient := codersdk.New(client.URL)
|
|
agentClient.SessionToken = authToken
|
|
agentCloser := agent.New(agent.Options{
|
|
FetchMetadata: agentClient.WorkspaceAgentMetadata,
|
|
CoordinatorDialer: agentClient.ListenWorkspaceAgentTailnet,
|
|
Logger: slogtest.Make(t, nil).Named("agent").Leveled(slog.LevelDebug),
|
|
})
|
|
defer func() {
|
|
_ = agentCloser.Close()
|
|
}()
|
|
resources := coderdtest.AwaitWorkspaceAgents(t, client, workspace.ID)
|
|
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
|
|
defer cancel()
|
|
|
|
conn, err := client.WorkspaceAgentReconnectingPTY(ctx, resources[0].Agents[0].ID, uuid.New(), 80, 80, "/bin/bash")
|
|
require.NoError(t, err)
|
|
defer conn.Close()
|
|
|
|
// First attempt to resize the TTY.
|
|
// The websocket will close if it fails!
|
|
data, err := json.Marshal(codersdk.ReconnectingPTYRequest{
|
|
Height: 250,
|
|
Width: 250,
|
|
})
|
|
require.NoError(t, err)
|
|
_, err = conn.Write(data)
|
|
require.NoError(t, err)
|
|
bufRead := bufio.NewReader(conn)
|
|
|
|
// Brief pause to reduce the likelihood that we send keystrokes while
|
|
// the shell is simultaneously sending a prompt.
|
|
time.Sleep(100 * time.Millisecond)
|
|
|
|
data, err = json.Marshal(codersdk.ReconnectingPTYRequest{
|
|
Data: "echo test\r\n",
|
|
})
|
|
require.NoError(t, err)
|
|
_, err = conn.Write(data)
|
|
require.NoError(t, err)
|
|
|
|
expectLine := func(matcher func(string) bool) {
|
|
for {
|
|
line, err := bufRead.ReadString('\n')
|
|
require.NoError(t, err)
|
|
if matcher(line) {
|
|
break
|
|
}
|
|
}
|
|
}
|
|
matchEchoCommand := func(line string) bool {
|
|
return strings.Contains(line, "echo test")
|
|
}
|
|
matchEchoOutput := func(line string) bool {
|
|
return strings.Contains(line, "test") && !strings.Contains(line, "echo")
|
|
}
|
|
|
|
expectLine(matchEchoCommand)
|
|
expectLine(matchEchoOutput)
|
|
}
|
|
|
|
func TestWorkspaceAgentListeningPorts(t *testing.T) {
|
|
t.Parallel()
|
|
|
|
setup := func(t *testing.T, apps []*proto.App) (*codersdk.Client, uint16, uuid.UUID) {
|
|
client := coderdtest.New(t, &coderdtest.Options{
|
|
IncludeProvisionerDaemon: true,
|
|
})
|
|
coderdPort, err := strconv.Atoi(client.URL.Port())
|
|
require.NoError(t, err)
|
|
|
|
user := coderdtest.CreateFirstUser(t, client)
|
|
authToken := uuid.NewString()
|
|
version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{
|
|
Parse: echo.ParseComplete,
|
|
ProvisionDryRun: echo.ProvisionComplete,
|
|
Provision: []*proto.Provision_Response{{
|
|
Type: &proto.Provision_Response_Complete{
|
|
Complete: &proto.Provision_Complete{
|
|
Resources: []*proto.Resource{{
|
|
Name: "example",
|
|
Type: "aws_instance",
|
|
Agents: []*proto.Agent{{
|
|
Id: uuid.NewString(),
|
|
Auth: &proto.Agent_Token{
|
|
Token: authToken,
|
|
},
|
|
Apps: apps,
|
|
}},
|
|
}},
|
|
},
|
|
},
|
|
}},
|
|
})
|
|
template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID)
|
|
coderdtest.AwaitTemplateVersionJob(t, client, version.ID)
|
|
workspace := coderdtest.CreateWorkspace(t, client, user.OrganizationID, template.ID)
|
|
coderdtest.AwaitWorkspaceBuildJob(t, client, workspace.LatestBuild.ID)
|
|
|
|
agentClient := codersdk.New(client.URL)
|
|
agentClient.SessionToken = authToken
|
|
agentCloser := agent.New(agent.Options{
|
|
FetchMetadata: agentClient.WorkspaceAgentMetadata,
|
|
CoordinatorDialer: agentClient.ListenWorkspaceAgentTailnet,
|
|
Logger: slogtest.Make(t, nil).Named("agent").Leveled(slog.LevelDebug),
|
|
})
|
|
t.Cleanup(func() {
|
|
_ = agentCloser.Close()
|
|
})
|
|
resources := coderdtest.AwaitWorkspaceAgents(t, client, workspace.ID)
|
|
|
|
return client, uint16(coderdPort), resources[0].Agents[0].ID
|
|
}
|
|
|
|
willFilterPort := func(port int) bool {
|
|
if port < codersdk.MinimumListeningPort || port > 65535 {
|
|
return true
|
|
}
|
|
if _, ok := codersdk.IgnoredListeningPorts[uint16(port)]; ok {
|
|
return true
|
|
}
|
|
|
|
return false
|
|
}
|
|
|
|
generateUnfilteredPort := func(t *testing.T) (net.Listener, uint16) {
|
|
var (
|
|
l net.Listener
|
|
port uint16
|
|
)
|
|
require.Eventually(t, func() bool {
|
|
var err error
|
|
l, err = net.Listen("tcp", "localhost:0")
|
|
if err != nil {
|
|
return false
|
|
}
|
|
tcpAddr, _ := l.Addr().(*net.TCPAddr)
|
|
if willFilterPort(tcpAddr.Port) {
|
|
_ = l.Close()
|
|
return false
|
|
}
|
|
t.Cleanup(func() {
|
|
_ = l.Close()
|
|
})
|
|
|
|
port = uint16(tcpAddr.Port)
|
|
return true
|
|
}, testutil.WaitShort, testutil.IntervalFast)
|
|
|
|
return l, port
|
|
}
|
|
|
|
generateFilteredPort := func(t *testing.T) (net.Listener, uint16) {
|
|
var (
|
|
l net.Listener
|
|
port uint16
|
|
)
|
|
require.Eventually(t, func() bool {
|
|
for ignoredPort := range codersdk.IgnoredListeningPorts {
|
|
if ignoredPort < 1024 || ignoredPort == 5432 {
|
|
continue
|
|
}
|
|
|
|
var err error
|
|
l, err = net.Listen("tcp", fmt.Sprintf("localhost:%d", ignoredPort))
|
|
if err != nil {
|
|
continue
|
|
}
|
|
t.Cleanup(func() {
|
|
_ = l.Close()
|
|
})
|
|
|
|
port = ignoredPort
|
|
return true
|
|
}
|
|
|
|
return false
|
|
}, testutil.WaitShort, testutil.IntervalFast)
|
|
|
|
return l, port
|
|
}
|
|
|
|
t.Run("LinuxAndWindows", func(t *testing.T) {
|
|
t.Parallel()
|
|
if runtime.GOOS != "linux" && runtime.GOOS != "windows" {
|
|
t.Skip("only runs on linux and windows")
|
|
return
|
|
}
|
|
|
|
t.Run("OK", func(t *testing.T) {
|
|
t.Parallel()
|
|
|
|
client, coderdPort, agentID := setup(t, nil)
|
|
|
|
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
|
|
defer cancel()
|
|
|
|
// Generate a random unfiltered port.
|
|
l, lPort := generateUnfilteredPort(t)
|
|
|
|
// List ports and ensure that the port we expect to see is there.
|
|
res, err := client.WorkspaceAgentListeningPorts(ctx, agentID)
|
|
require.NoError(t, err)
|
|
|
|
var (
|
|
expected = map[uint16]bool{
|
|
// expect the listener we made
|
|
lPort: false,
|
|
// expect the coderdtest server
|
|
coderdPort: false,
|
|
}
|
|
)
|
|
for _, port := range res.Ports {
|
|
if port.Network == codersdk.ListeningPortNetworkTCP {
|
|
if val, ok := expected[port.Port]; ok {
|
|
if val {
|
|
t.Fatalf("expected to find TCP port %d only once in response", port.Port)
|
|
}
|
|
}
|
|
expected[port.Port] = true
|
|
}
|
|
}
|
|
for port, found := range expected {
|
|
if !found {
|
|
t.Fatalf("expected to find TCP port %d in response", port)
|
|
}
|
|
}
|
|
|
|
// Close the listener and check that the port is no longer in the response.
|
|
require.NoError(t, l.Close())
|
|
time.Sleep(2 * time.Second) // avoid cache
|
|
res, err = client.WorkspaceAgentListeningPorts(ctx, agentID)
|
|
require.NoError(t, err)
|
|
|
|
for _, port := range res.Ports {
|
|
if port.Network == codersdk.ListeningPortNetworkTCP && port.Port == lPort {
|
|
t.Fatalf("expected to not find TCP port %d in response", lPort)
|
|
}
|
|
}
|
|
})
|
|
|
|
t.Run("Filter", func(t *testing.T) {
|
|
t.Parallel()
|
|
|
|
// Generate an unfiltered port that we will create an app for and
|
|
// should not exist in the response.
|
|
_, appLPort := generateUnfilteredPort(t)
|
|
app := &proto.App{
|
|
Name: "test-app",
|
|
Url: fmt.Sprintf("http://localhost:%d", appLPort),
|
|
}
|
|
|
|
// Generate a filtered port that should not exist in the response.
|
|
_, filteredLPort := generateFilteredPort(t)
|
|
|
|
client, coderdPort, agentID := setup(t, []*proto.App{app})
|
|
|
|
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
|
|
defer cancel()
|
|
|
|
res, err := client.WorkspaceAgentListeningPorts(ctx, agentID)
|
|
require.NoError(t, err)
|
|
|
|
sawCoderdPort := false
|
|
for _, port := range res.Ports {
|
|
if port.Network == codersdk.ListeningPortNetworkTCP {
|
|
if port.Port == appLPort {
|
|
t.Fatalf("expected to not find TCP port (app port) %d in response", appLPort)
|
|
}
|
|
if port.Port == filteredLPort {
|
|
t.Fatalf("expected to not find TCP port (filtered port) %d in response", filteredLPort)
|
|
}
|
|
if port.Port == coderdPort {
|
|
sawCoderdPort = true
|
|
}
|
|
}
|
|
}
|
|
if !sawCoderdPort {
|
|
t.Fatalf("expected to find TCP port (coderd port) %d in response", coderdPort)
|
|
}
|
|
})
|
|
})
|
|
|
|
t.Run("Darwin", func(t *testing.T) {
|
|
t.Parallel()
|
|
if runtime.GOOS != "darwin" {
|
|
t.Skip("only runs on darwin")
|
|
return
|
|
}
|
|
|
|
client, _, agentID := setup(t, nil)
|
|
|
|
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
|
|
defer cancel()
|
|
|
|
// Create a TCP listener on a random port.
|
|
l, err := net.Listen("tcp", "localhost:0")
|
|
require.NoError(t, err)
|
|
defer l.Close()
|
|
|
|
// List ports and ensure that the list is empty because we're on darwin.
|
|
res, err := client.WorkspaceAgentListeningPorts(ctx, agentID)
|
|
require.NoError(t, err)
|
|
require.Len(t, res.Ports, 0)
|
|
})
|
|
}
|
|
|
|
func TestWorkspaceAgentAppHealth(t *testing.T) {
|
|
t.Parallel()
|
|
client := coderdtest.New(t, &coderdtest.Options{
|
|
IncludeProvisionerDaemon: true,
|
|
})
|
|
user := coderdtest.CreateFirstUser(t, client)
|
|
authToken := uuid.NewString()
|
|
apps := []*proto.App{
|
|
{
|
|
Name: "code-server",
|
|
Command: "some-command",
|
|
Url: "http://localhost:3000",
|
|
Icon: "/code.svg",
|
|
},
|
|
{
|
|
Name: "code-server-2",
|
|
Command: "some-command",
|
|
Url: "http://localhost:3000",
|
|
Icon: "/code.svg",
|
|
Healthcheck: &proto.Healthcheck{
|
|
Url: "http://localhost:3000",
|
|
Interval: 5,
|
|
Threshold: 6,
|
|
},
|
|
},
|
|
}
|
|
version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{
|
|
Parse: echo.ParseComplete,
|
|
Provision: []*proto.Provision_Response{{
|
|
Type: &proto.Provision_Response_Complete{
|
|
Complete: &proto.Provision_Complete{
|
|
Resources: []*proto.Resource{{
|
|
Name: "example",
|
|
Type: "aws_instance",
|
|
Agents: []*proto.Agent{{
|
|
Id: uuid.NewString(),
|
|
Auth: &proto.Agent_Token{
|
|
Token: authToken,
|
|
},
|
|
Apps: apps,
|
|
}},
|
|
}},
|
|
},
|
|
},
|
|
}},
|
|
})
|
|
coderdtest.AwaitTemplateVersionJob(t, client, version.ID)
|
|
template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID)
|
|
workspace := coderdtest.CreateWorkspace(t, client, user.OrganizationID, template.ID)
|
|
coderdtest.AwaitWorkspaceBuildJob(t, client, workspace.LatestBuild.ID)
|
|
|
|
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
|
|
defer cancel()
|
|
|
|
agentClient := codersdk.New(client.URL)
|
|
agentClient.SessionToken = authToken
|
|
|
|
apiApps, err := agentClient.WorkspaceAgentApps(ctx)
|
|
require.NoError(t, err)
|
|
require.EqualValues(t, codersdk.WorkspaceAppHealthDisabled, apiApps[0].Health)
|
|
require.EqualValues(t, codersdk.WorkspaceAppHealthInitializing, apiApps[1].Health)
|
|
err = agentClient.PostWorkspaceAgentAppHealth(ctx, codersdk.PostWorkspaceAppHealthsRequest{})
|
|
require.Error(t, err)
|
|
// empty
|
|
err = agentClient.PostWorkspaceAgentAppHealth(ctx, codersdk.PostWorkspaceAppHealthsRequest{})
|
|
require.Error(t, err)
|
|
// invalid name
|
|
err = agentClient.PostWorkspaceAgentAppHealth(ctx, codersdk.PostWorkspaceAppHealthsRequest{
|
|
Healths: map[string]codersdk.WorkspaceAppHealth{
|
|
"bad-name": codersdk.WorkspaceAppHealthDisabled,
|
|
},
|
|
})
|
|
require.Error(t, err)
|
|
// healcheck disabled
|
|
err = agentClient.PostWorkspaceAgentAppHealth(ctx, codersdk.PostWorkspaceAppHealthsRequest{
|
|
Healths: map[string]codersdk.WorkspaceAppHealth{
|
|
"code-server": codersdk.WorkspaceAppHealthInitializing,
|
|
},
|
|
})
|
|
require.Error(t, err)
|
|
// invalid value
|
|
err = agentClient.PostWorkspaceAgentAppHealth(ctx, codersdk.PostWorkspaceAppHealthsRequest{
|
|
Healths: map[string]codersdk.WorkspaceAppHealth{
|
|
"code-server-2": codersdk.WorkspaceAppHealth("bad-value"),
|
|
},
|
|
})
|
|
require.Error(t, err)
|
|
// update to healthy
|
|
err = agentClient.PostWorkspaceAgentAppHealth(ctx, codersdk.PostWorkspaceAppHealthsRequest{
|
|
Healths: map[string]codersdk.WorkspaceAppHealth{
|
|
"code-server-2": codersdk.WorkspaceAppHealthHealthy,
|
|
},
|
|
})
|
|
require.NoError(t, err)
|
|
apiApps, err = agentClient.WorkspaceAgentApps(ctx)
|
|
require.NoError(t, err)
|
|
require.EqualValues(t, codersdk.WorkspaceAppHealthHealthy, apiApps[1].Health)
|
|
// update to unhealthy
|
|
err = agentClient.PostWorkspaceAgentAppHealth(ctx, codersdk.PostWorkspaceAppHealthsRequest{
|
|
Healths: map[string]codersdk.WorkspaceAppHealth{
|
|
"code-server-2": codersdk.WorkspaceAppHealthUnhealthy,
|
|
},
|
|
})
|
|
require.NoError(t, err)
|
|
apiApps, err = agentClient.WorkspaceAgentApps(ctx)
|
|
require.NoError(t, err)
|
|
require.EqualValues(t, codersdk.WorkspaceAppHealthUnhealthy, apiApps[1].Health)
|
|
}
|