mirror of
https://github.com/coder/coder.git
synced 2025-07-12 00:14:10 +00:00
Merge remote-tracking branch 'origin/dk/prebuilds' into jjs/prebuilds
Some checks failed
Deploy PR / check_pr (push) Has been cancelled
Deploy PR / get_info (push) Has been cancelled
Deploy PR / comment-pr (push) Has been cancelled
Deploy PR / build (push) Has been cancelled
Deploy PR / deploy (push) Has been cancelled
Some checks failed
Deploy PR / check_pr (push) Has been cancelled
Deploy PR / get_info (push) Has been cancelled
Deploy PR / comment-pr (push) Has been cancelled
Deploy PR / build (push) Has been cancelled
Deploy PR / deploy (push) Has been cancelled
This commit is contained in:
2
Makefile
2
Makefile
@ -116,7 +116,7 @@ endif
|
|||||||
|
|
||||||
clean:
|
clean:
|
||||||
rm -rf build/ site/build/ site/out/
|
rm -rf build/ site/build/ site/out/
|
||||||
mkdir -p build/ site/out/bin/
|
mkdir -p build/
|
||||||
git restore site/out/
|
git restore site/out/
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
|
|
||||||
|
@ -90,6 +90,8 @@ type Options struct {
|
|||||||
BlockFileTransfer bool
|
BlockFileTransfer bool
|
||||||
Execer agentexec.Execer
|
Execer agentexec.Execer
|
||||||
ContainerLister agentcontainers.Lister
|
ContainerLister agentcontainers.Lister
|
||||||
|
|
||||||
|
ExperimentalContainersEnabled bool
|
||||||
}
|
}
|
||||||
|
|
||||||
type Client interface {
|
type Client interface {
|
||||||
@ -190,6 +192,8 @@ func New(options Options) Agent {
|
|||||||
metrics: newAgentMetrics(prometheusRegistry),
|
metrics: newAgentMetrics(prometheusRegistry),
|
||||||
execer: options.Execer,
|
execer: options.Execer,
|
||||||
lister: options.ContainerLister,
|
lister: options.ContainerLister,
|
||||||
|
|
||||||
|
experimentalDevcontainersEnabled: options.ExperimentalContainersEnabled,
|
||||||
}
|
}
|
||||||
// Initially, we have a closed channel, reflecting the fact that we are not initially connected.
|
// Initially, we have a closed channel, reflecting the fact that we are not initially connected.
|
||||||
// Each time we connect we replace the channel (while holding the closeMutex) with a new one
|
// Each time we connect we replace the channel (while holding the closeMutex) with a new one
|
||||||
@ -260,6 +264,8 @@ type agent struct {
|
|||||||
metrics *agentMetrics
|
metrics *agentMetrics
|
||||||
execer agentexec.Execer
|
execer agentexec.Execer
|
||||||
lister agentcontainers.Lister
|
lister agentcontainers.Lister
|
||||||
|
|
||||||
|
experimentalDevcontainersEnabled bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *agent) TailnetConn() *tailnet.Conn {
|
func (a *agent) TailnetConn() *tailnet.Conn {
|
||||||
@ -299,6 +305,9 @@ func (a *agent) init() {
|
|||||||
a.sshServer,
|
a.sshServer,
|
||||||
a.metrics.connectionsTotal, a.metrics.reconnectingPTYErrors,
|
a.metrics.connectionsTotal, a.metrics.reconnectingPTYErrors,
|
||||||
a.reconnectingPTYTimeout,
|
a.reconnectingPTYTimeout,
|
||||||
|
func(s *reconnectingpty.Server) {
|
||||||
|
s.ExperimentalContainersEnabled = a.experimentalDevcontainersEnabled
|
||||||
|
},
|
||||||
)
|
)
|
||||||
go a.runLoop()
|
go a.runLoop()
|
||||||
}
|
}
|
||||||
|
@ -25,8 +25,14 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"go.uber.org/goleak"
|
||||||
|
"tailscale.com/net/speedtest"
|
||||||
|
"tailscale.com/tailcfg"
|
||||||
|
|
||||||
"github.com/bramvdbogaerde/go-scp"
|
"github.com/bramvdbogaerde/go-scp"
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
|
"github.com/ory/dockertest/v3"
|
||||||
|
"github.com/ory/dockertest/v3/docker"
|
||||||
"github.com/pion/udp"
|
"github.com/pion/udp"
|
||||||
"github.com/pkg/sftp"
|
"github.com/pkg/sftp"
|
||||||
"github.com/prometheus/client_golang/prometheus"
|
"github.com/prometheus/client_golang/prometheus"
|
||||||
@ -34,15 +40,13 @@ import (
|
|||||||
"github.com/spf13/afero"
|
"github.com/spf13/afero"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"go.uber.org/goleak"
|
|
||||||
"golang.org/x/crypto/ssh"
|
"golang.org/x/crypto/ssh"
|
||||||
"golang.org/x/exp/slices"
|
"golang.org/x/exp/slices"
|
||||||
"golang.org/x/xerrors"
|
"golang.org/x/xerrors"
|
||||||
"tailscale.com/net/speedtest"
|
|
||||||
"tailscale.com/tailcfg"
|
|
||||||
|
|
||||||
"cdr.dev/slog"
|
"cdr.dev/slog"
|
||||||
"cdr.dev/slog/sloggers/slogtest"
|
"cdr.dev/slog/sloggers/slogtest"
|
||||||
|
|
||||||
"github.com/coder/coder/v2/agent"
|
"github.com/coder/coder/v2/agent"
|
||||||
"github.com/coder/coder/v2/agent/agentssh"
|
"github.com/coder/coder/v2/agent/agentssh"
|
||||||
"github.com/coder/coder/v2/agent/agenttest"
|
"github.com/coder/coder/v2/agent/agenttest"
|
||||||
@ -1761,6 +1765,74 @@ func TestAgent_ReconnectingPTY(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This tests end-to-end functionality of connecting to a running container
|
||||||
|
// and executing a command. It creates a real Docker container and runs a
|
||||||
|
// command. As such, it does not run by default in CI.
|
||||||
|
// You can run it manually as follows:
|
||||||
|
//
|
||||||
|
// CODER_TEST_USE_DOCKER=1 go test -count=1 ./agent -run TestAgent_ReconnectingPTYContainer
|
||||||
|
func TestAgent_ReconnectingPTYContainer(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
if os.Getenv("CODER_TEST_USE_DOCKER") != "1" {
|
||||||
|
t.Skip("Set CODER_TEST_USE_DOCKER=1 to run this test")
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx := testutil.Context(t, testutil.WaitLong)
|
||||||
|
|
||||||
|
pool, err := dockertest.NewPool("")
|
||||||
|
require.NoError(t, err, "Could not connect to docker")
|
||||||
|
ct, err := pool.RunWithOptions(&dockertest.RunOptions{
|
||||||
|
Repository: "busybox",
|
||||||
|
Tag: "latest",
|
||||||
|
Cmd: []string{"sleep", "infnity"},
|
||||||
|
}, func(config *docker.HostConfig) {
|
||||||
|
config.AutoRemove = true
|
||||||
|
config.RestartPolicy = docker.RestartPolicy{Name: "no"}
|
||||||
|
})
|
||||||
|
require.NoError(t, err, "Could not start container")
|
||||||
|
t.Cleanup(func() {
|
||||||
|
err := pool.Purge(ct)
|
||||||
|
require.NoError(t, err, "Could not stop container")
|
||||||
|
})
|
||||||
|
// Wait for container to start
|
||||||
|
require.Eventually(t, func() bool {
|
||||||
|
ct, ok := pool.ContainerByName(ct.Container.Name)
|
||||||
|
return ok && ct.Container.State.Running
|
||||||
|
}, testutil.WaitShort, testutil.IntervalSlow, "Container did not start in time")
|
||||||
|
|
||||||
|
// nolint: dogsled
|
||||||
|
conn, _, _, _, _ := setupAgent(t, agentsdk.Manifest{}, 0, func(_ *agenttest.Client, o *agent.Options) {
|
||||||
|
o.ExperimentalContainersEnabled = true
|
||||||
|
})
|
||||||
|
ac, err := conn.ReconnectingPTY(ctx, uuid.New(), 80, 80, "/bin/sh", func(arp *workspacesdk.AgentReconnectingPTYInit) {
|
||||||
|
arp.Container = ct.Container.ID
|
||||||
|
})
|
||||||
|
require.NoError(t, err, "failed to create ReconnectingPTY")
|
||||||
|
defer ac.Close()
|
||||||
|
tr := testutil.NewTerminalReader(t, ac)
|
||||||
|
|
||||||
|
require.NoError(t, tr.ReadUntil(ctx, func(line string) bool {
|
||||||
|
return strings.Contains(line, "#") || strings.Contains(line, "$")
|
||||||
|
}), "find prompt")
|
||||||
|
|
||||||
|
require.NoError(t, json.NewEncoder(ac).Encode(workspacesdk.ReconnectingPTYRequest{
|
||||||
|
Data: "hostname\r",
|
||||||
|
}), "write hostname")
|
||||||
|
require.NoError(t, tr.ReadUntil(ctx, func(line string) bool {
|
||||||
|
return strings.Contains(line, "hostname")
|
||||||
|
}), "find hostname command")
|
||||||
|
|
||||||
|
require.NoError(t, tr.ReadUntil(ctx, func(line string) bool {
|
||||||
|
return strings.Contains(line, ct.Container.Config.Hostname)
|
||||||
|
}), "find hostname output")
|
||||||
|
require.NoError(t, json.NewEncoder(ac).Encode(workspacesdk.ReconnectingPTYRequest{
|
||||||
|
Data: "exit\r",
|
||||||
|
}), "write exit command")
|
||||||
|
|
||||||
|
// Wait for the connection to close.
|
||||||
|
require.ErrorIs(t, tr.ReadUntil(ctx, nil), io.EOF)
|
||||||
|
}
|
||||||
|
|
||||||
func TestAgent_Dial(t *testing.T) {
|
func TestAgent_Dial(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
|
@ -6,7 +6,6 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
|
||||||
"os/user"
|
"os/user"
|
||||||
"slices"
|
"slices"
|
||||||
"sort"
|
"sort"
|
||||||
@ -15,6 +14,7 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/coder/coder/v2/agent/agentexec"
|
"github.com/coder/coder/v2/agent/agentexec"
|
||||||
|
"github.com/coder/coder/v2/agent/usershell"
|
||||||
"github.com/coder/coder/v2/codersdk"
|
"github.com/coder/coder/v2/codersdk"
|
||||||
|
|
||||||
"golang.org/x/exp/maps"
|
"golang.org/x/exp/maps"
|
||||||
@ -37,6 +37,7 @@ func NewDocker(execer agentexec.Execer) Lister {
|
|||||||
// DockerEnvInfoer is an implementation of agentssh.EnvInfoer that returns
|
// DockerEnvInfoer is an implementation of agentssh.EnvInfoer that returns
|
||||||
// information about a container.
|
// information about a container.
|
||||||
type DockerEnvInfoer struct {
|
type DockerEnvInfoer struct {
|
||||||
|
usershell.SystemEnvInfo
|
||||||
container string
|
container string
|
||||||
user *user.User
|
user *user.User
|
||||||
userShell string
|
userShell string
|
||||||
@ -122,26 +123,13 @@ func EnvInfo(ctx context.Context, execer agentexec.Execer, container, containerU
|
|||||||
return &dei, nil
|
return &dei, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (dei *DockerEnvInfoer) CurrentUser() (*user.User, error) {
|
func (dei *DockerEnvInfoer) User() (*user.User, error) {
|
||||||
// Clone the user so that the caller can't modify it
|
// Clone the user so that the caller can't modify it
|
||||||
u := *dei.user
|
u := *dei.user
|
||||||
return &u, nil
|
return &u, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (*DockerEnvInfoer) Environ() []string {
|
func (dei *DockerEnvInfoer) Shell(string) (string, error) {
|
||||||
// Return a clone of the environment so that the caller can't modify it
|
|
||||||
return os.Environ()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (*DockerEnvInfoer) UserHomeDir() (string, error) {
|
|
||||||
// We default the working directory of the command to the user's home
|
|
||||||
// directory. Since this came from inside the container, we cannot guarantee
|
|
||||||
// that this exists on the host. Return the "real" home directory of the user
|
|
||||||
// instead.
|
|
||||||
return os.UserHomeDir()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (dei *DockerEnvInfoer) UserShell(string) (string, error) {
|
|
||||||
return dei.userShell, nil
|
return dei.userShell, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -502,15 +502,15 @@ func TestDockerEnvInfoer(t *testing.T) {
|
|||||||
dei, err := EnvInfo(ctx, agentexec.DefaultExecer, ct.Container.ID, tt.containerUser)
|
dei, err := EnvInfo(ctx, agentexec.DefaultExecer, ct.Container.ID, tt.containerUser)
|
||||||
require.NoError(t, err, "Expected no error from DockerEnvInfo()")
|
require.NoError(t, err, "Expected no error from DockerEnvInfo()")
|
||||||
|
|
||||||
u, err := dei.CurrentUser()
|
u, err := dei.User()
|
||||||
require.NoError(t, err, "Expected no error from CurrentUser()")
|
require.NoError(t, err, "Expected no error from CurrentUser()")
|
||||||
require.Equal(t, tt.expectedUsername, u.Username, "Expected username to match")
|
require.Equal(t, tt.expectedUsername, u.Username, "Expected username to match")
|
||||||
|
|
||||||
hd, err := dei.UserHomeDir()
|
hd, err := dei.HomeDir()
|
||||||
require.NoError(t, err, "Expected no error from UserHomeDir()")
|
require.NoError(t, err, "Expected no error from UserHomeDir()")
|
||||||
require.NotEmpty(t, hd, "Expected user homedir to be non-empty")
|
require.NotEmpty(t, hd, "Expected user homedir to be non-empty")
|
||||||
|
|
||||||
sh, err := dei.UserShell(tt.containerUser)
|
sh, err := dei.Shell(tt.containerUser)
|
||||||
require.NoError(t, err, "Expected no error from UserShell()")
|
require.NoError(t, err, "Expected no error from UserShell()")
|
||||||
require.Equal(t, tt.expectedUserShell, sh, "Expected user shell to match")
|
require.Equal(t, tt.expectedUserShell, sh, "Expected user shell to match")
|
||||||
|
|
||||||
|
87
agent/agentrsa/key.go
Normal file
87
agent/agentrsa/key.go
Normal file
@ -0,0 +1,87 @@
|
|||||||
|
package agentrsa
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/rsa"
|
||||||
|
"math/big"
|
||||||
|
"math/rand"
|
||||||
|
)
|
||||||
|
|
||||||
|
// GenerateDeterministicKey generates an RSA private key deterministically based on the provided seed.
|
||||||
|
// This function uses a deterministic random source to generate the primes p and q, ensuring that the
|
||||||
|
// same seed will always produce the same private key. The generated key is 2048 bits in size.
|
||||||
|
//
|
||||||
|
// Reference: https://pkg.go.dev/crypto/rsa#GenerateKey
|
||||||
|
func GenerateDeterministicKey(seed int64) *rsa.PrivateKey {
|
||||||
|
// Since the standard lib purposefully does not generate
|
||||||
|
// deterministic rsa keys, we need to do it ourselves.
|
||||||
|
|
||||||
|
// Create deterministic random source
|
||||||
|
// nolint: gosec
|
||||||
|
deterministicRand := rand.New(rand.NewSource(seed))
|
||||||
|
|
||||||
|
// Use fixed values for p and q based on the seed
|
||||||
|
p := big.NewInt(0)
|
||||||
|
q := big.NewInt(0)
|
||||||
|
e := big.NewInt(65537) // Standard RSA public exponent
|
||||||
|
|
||||||
|
for {
|
||||||
|
// Generate deterministic primes using the seeded random
|
||||||
|
// Each prime should be ~1024 bits to get a 2048-bit key
|
||||||
|
for {
|
||||||
|
p.SetBit(p, 1024, 1) // Ensure it's large enough
|
||||||
|
for i := range 1024 {
|
||||||
|
if deterministicRand.Int63()%2 == 1 {
|
||||||
|
p.SetBit(p, i, 1)
|
||||||
|
} else {
|
||||||
|
p.SetBit(p, i, 0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
p1 := new(big.Int).Sub(p, big.NewInt(1))
|
||||||
|
if p.ProbablyPrime(20) && new(big.Int).GCD(nil, nil, e, p1).Cmp(big.NewInt(1)) == 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for {
|
||||||
|
q.SetBit(q, 1024, 1) // Ensure it's large enough
|
||||||
|
for i := range 1024 {
|
||||||
|
if deterministicRand.Int63()%2 == 1 {
|
||||||
|
q.SetBit(q, i, 1)
|
||||||
|
} else {
|
||||||
|
q.SetBit(q, i, 0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
q1 := new(big.Int).Sub(q, big.NewInt(1))
|
||||||
|
if q.ProbablyPrime(20) && p.Cmp(q) != 0 && new(big.Int).GCD(nil, nil, e, q1).Cmp(big.NewInt(1)) == 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate phi = (p-1) * (q-1)
|
||||||
|
p1 := new(big.Int).Sub(p, big.NewInt(1))
|
||||||
|
q1 := new(big.Int).Sub(q, big.NewInt(1))
|
||||||
|
phi := new(big.Int).Mul(p1, q1)
|
||||||
|
|
||||||
|
// Calculate private exponent d
|
||||||
|
d := new(big.Int).ModInverse(e, phi)
|
||||||
|
if d != nil {
|
||||||
|
// Calculate n = p * q
|
||||||
|
n := new(big.Int).Mul(p, q)
|
||||||
|
|
||||||
|
// Create the private key
|
||||||
|
privateKey := &rsa.PrivateKey{
|
||||||
|
PublicKey: rsa.PublicKey{
|
||||||
|
N: n,
|
||||||
|
E: int(e.Int64()),
|
||||||
|
},
|
||||||
|
D: d,
|
||||||
|
Primes: []*big.Int{p, q},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compute precomputed values
|
||||||
|
privateKey.Precompute()
|
||||||
|
|
||||||
|
return privateKey
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
50
agent/agentrsa/key_test.go
Normal file
50
agent/agentrsa/key_test.go
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
package agentrsa_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/rsa"
|
||||||
|
"math/rand/v2"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
|
||||||
|
"github.com/coder/coder/v2/agent/agentrsa"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestGenerateDeterministicKey(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
key1 := agentrsa.GenerateDeterministicKey(1234)
|
||||||
|
key2 := agentrsa.GenerateDeterministicKey(1234)
|
||||||
|
|
||||||
|
assert.Equal(t, key1, key2)
|
||||||
|
assert.EqualExportedValues(t, key1, key2)
|
||||||
|
}
|
||||||
|
|
||||||
|
var result *rsa.PrivateKey
|
||||||
|
|
||||||
|
func BenchmarkGenerateDeterministicKey(b *testing.B) {
|
||||||
|
var r *rsa.PrivateKey
|
||||||
|
|
||||||
|
for range b.N {
|
||||||
|
// always record the result of DeterministicPrivateKey to prevent
|
||||||
|
// the compiler eliminating the function call.
|
||||||
|
r = agentrsa.GenerateDeterministicKey(rand.Int64())
|
||||||
|
}
|
||||||
|
|
||||||
|
// always store the result to a package level variable
|
||||||
|
// so the compiler cannot eliminate the Benchmark itself.
|
||||||
|
result = r
|
||||||
|
}
|
||||||
|
|
||||||
|
func FuzzGenerateDeterministicKey(f *testing.F) {
|
||||||
|
testcases := []int64{0, 1234, 1010101010}
|
||||||
|
for _, tc := range testcases {
|
||||||
|
f.Add(tc) // Use f.Add to provide a seed corpus
|
||||||
|
}
|
||||||
|
f.Fuzz(func(t *testing.T, seed int64) {
|
||||||
|
key1 := agentrsa.GenerateDeterministicKey(seed)
|
||||||
|
key2 := agentrsa.GenerateDeterministicKey(seed)
|
||||||
|
assert.Equal(t, key1, key2)
|
||||||
|
assert.EqualExportedValues(t, key1, key2)
|
||||||
|
})
|
||||||
|
}
|
@ -3,12 +3,9 @@ package agentssh
|
|||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
||||||
"context"
|
"context"
|
||||||
"crypto/rsa"
|
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"math/big"
|
|
||||||
"math/rand"
|
|
||||||
"net"
|
"net"
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
@ -33,6 +30,7 @@ import (
|
|||||||
"cdr.dev/slog"
|
"cdr.dev/slog"
|
||||||
|
|
||||||
"github.com/coder/coder/v2/agent/agentexec"
|
"github.com/coder/coder/v2/agent/agentexec"
|
||||||
|
"github.com/coder/coder/v2/agent/agentrsa"
|
||||||
"github.com/coder/coder/v2/agent/usershell"
|
"github.com/coder/coder/v2/agent/usershell"
|
||||||
"github.com/coder/coder/v2/codersdk"
|
"github.com/coder/coder/v2/codersdk"
|
||||||
"github.com/coder/coder/v2/pty"
|
"github.com/coder/coder/v2/pty"
|
||||||
@ -698,45 +696,6 @@ func (s *Server) sftpHandler(logger slog.Logger, session ssh.Session) {
|
|||||||
_ = session.Exit(1)
|
_ = session.Exit(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
// EnvInfoer encapsulates external information required by CreateCommand.
|
|
||||||
type EnvInfoer interface {
|
|
||||||
// CurrentUser returns the current user.
|
|
||||||
CurrentUser() (*user.User, error)
|
|
||||||
// Environ returns the environment variables of the current process.
|
|
||||||
Environ() []string
|
|
||||||
// UserHomeDir returns the home directory of the current user.
|
|
||||||
UserHomeDir() (string, error)
|
|
||||||
// UserShell returns the shell of the given user.
|
|
||||||
UserShell(username string) (string, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
type systemEnvInfoer struct{}
|
|
||||||
|
|
||||||
var defaultEnvInfoer EnvInfoer = &systemEnvInfoer{}
|
|
||||||
|
|
||||||
// DefaultEnvInfoer returns a default implementation of
|
|
||||||
// EnvInfoer. This reads information using the default Go
|
|
||||||
// implementations.
|
|
||||||
func DefaultEnvInfoer() EnvInfoer {
|
|
||||||
return defaultEnvInfoer
|
|
||||||
}
|
|
||||||
|
|
||||||
func (systemEnvInfoer) CurrentUser() (*user.User, error) {
|
|
||||||
return user.Current()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (systemEnvInfoer) Environ() []string {
|
|
||||||
return os.Environ()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (systemEnvInfoer) UserHomeDir() (string, error) {
|
|
||||||
return userHomeDir()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (systemEnvInfoer) UserShell(username string) (string, error) {
|
|
||||||
return usershell.Get(username)
|
|
||||||
}
|
|
||||||
|
|
||||||
// CreateCommand processes raw command input with OpenSSH-like behavior.
|
// CreateCommand processes raw command input with OpenSSH-like behavior.
|
||||||
// If the script provided is empty, it will default to the users shell.
|
// If the script provided is empty, it will default to the users shell.
|
||||||
// This injects environment variables specified by the user at launch too.
|
// This injects environment variables specified by the user at launch too.
|
||||||
@ -744,17 +703,17 @@ func (systemEnvInfoer) UserShell(username string) (string, error) {
|
|||||||
// alternative implementations for the dependencies of CreateCommand.
|
// alternative implementations for the dependencies of CreateCommand.
|
||||||
// This is useful when creating a command to be run in a separate environment
|
// This is useful when creating a command to be run in a separate environment
|
||||||
// (for example, a Docker container). Pass in nil to use the default.
|
// (for example, a Docker container). Pass in nil to use the default.
|
||||||
func (s *Server) CreateCommand(ctx context.Context, script string, env []string, deps EnvInfoer) (*pty.Cmd, error) {
|
func (s *Server) CreateCommand(ctx context.Context, script string, env []string, ei usershell.EnvInfoer) (*pty.Cmd, error) {
|
||||||
if deps == nil {
|
if ei == nil {
|
||||||
deps = DefaultEnvInfoer()
|
ei = &usershell.SystemEnvInfo{}
|
||||||
}
|
}
|
||||||
currentUser, err := deps.CurrentUser()
|
currentUser, err := ei.User()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, xerrors.Errorf("get current user: %w", err)
|
return nil, xerrors.Errorf("get current user: %w", err)
|
||||||
}
|
}
|
||||||
username := currentUser.Username
|
username := currentUser.Username
|
||||||
|
|
||||||
shell, err := deps.UserShell(username)
|
shell, err := ei.Shell(username)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, xerrors.Errorf("get user shell: %w", err)
|
return nil, xerrors.Errorf("get user shell: %w", err)
|
||||||
}
|
}
|
||||||
@ -802,7 +761,18 @@ func (s *Server) CreateCommand(ctx context.Context, script string, env []string,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
cmd := s.Execer.PTYCommandContext(ctx, name, args...)
|
// Modify command prior to execution. This will usually be a no-op, but not
|
||||||
|
// always. For example, to run a command in a Docker container, we need to
|
||||||
|
// modify the command to be `docker exec -it <container> <command>`.
|
||||||
|
modifiedName, modifiedArgs := ei.ModifyCommand(name, args...)
|
||||||
|
// Log if the command was modified.
|
||||||
|
if modifiedName != name && slices.Compare(modifiedArgs, args) != 0 {
|
||||||
|
s.logger.Debug(ctx, "modified command",
|
||||||
|
slog.F("before", append([]string{name}, args...)),
|
||||||
|
slog.F("after", append([]string{modifiedName}, modifiedArgs...)),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
cmd := s.Execer.PTYCommandContext(ctx, modifiedName, modifiedArgs...)
|
||||||
cmd.Dir = s.config.WorkingDirectory()
|
cmd.Dir = s.config.WorkingDirectory()
|
||||||
|
|
||||||
// If the metadata directory doesn't exist, we run the command
|
// If the metadata directory doesn't exist, we run the command
|
||||||
@ -810,13 +780,13 @@ func (s *Server) CreateCommand(ctx context.Context, script string, env []string,
|
|||||||
_, err = os.Stat(cmd.Dir)
|
_, err = os.Stat(cmd.Dir)
|
||||||
if cmd.Dir == "" || err != nil {
|
if cmd.Dir == "" || err != nil {
|
||||||
// Default to user home if a directory is not set.
|
// Default to user home if a directory is not set.
|
||||||
homedir, err := deps.UserHomeDir()
|
homedir, err := ei.HomeDir()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, xerrors.Errorf("get home dir: %w", err)
|
return nil, xerrors.Errorf("get home dir: %w", err)
|
||||||
}
|
}
|
||||||
cmd.Dir = homedir
|
cmd.Dir = homedir
|
||||||
}
|
}
|
||||||
cmd.Env = append(deps.Environ(), env...)
|
cmd.Env = append(ei.Environ(), env...)
|
||||||
cmd.Env = append(cmd.Env, fmt.Sprintf("USER=%s", username))
|
cmd.Env = append(cmd.Env, fmt.Sprintf("USER=%s", username))
|
||||||
|
|
||||||
// Set SSH connection environment variables (these are also set by OpenSSH
|
// Set SSH connection environment variables (these are also set by OpenSSH
|
||||||
@ -1120,75 +1090,7 @@ func CoderSigner(seed int64) (gossh.Signer, error) {
|
|||||||
// Clients should ignore the host key when connecting.
|
// Clients should ignore the host key when connecting.
|
||||||
// The agent needs to authenticate with coderd to SSH,
|
// The agent needs to authenticate with coderd to SSH,
|
||||||
// so SSH authentication doesn't improve security.
|
// so SSH authentication doesn't improve security.
|
||||||
|
coderHostKey := agentrsa.GenerateDeterministicKey(seed)
|
||||||
// Since the standard lib purposefully does not generate
|
|
||||||
// deterministic rsa keys, we need to do it ourselves.
|
|
||||||
coderHostKey := func() *rsa.PrivateKey {
|
|
||||||
// Create deterministic random source
|
|
||||||
// nolint: gosec
|
|
||||||
deterministicRand := rand.New(rand.NewSource(seed))
|
|
||||||
|
|
||||||
// Use fixed values for p and q based on the seed
|
|
||||||
p := big.NewInt(0)
|
|
||||||
q := big.NewInt(0)
|
|
||||||
e := big.NewInt(65537) // Standard RSA public exponent
|
|
||||||
|
|
||||||
// Generate deterministic primes using the seeded random
|
|
||||||
// Each prime should be ~1024 bits to get a 2048-bit key
|
|
||||||
for {
|
|
||||||
p.SetBit(p, 1024, 1) // Ensure it's large enough
|
|
||||||
for i := 0; i < 1024; i++ {
|
|
||||||
if deterministicRand.Int63()%2 == 1 {
|
|
||||||
p.SetBit(p, i, 1)
|
|
||||||
} else {
|
|
||||||
p.SetBit(p, i, 0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if p.ProbablyPrime(20) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for {
|
|
||||||
q.SetBit(q, 1024, 1) // Ensure it's large enough
|
|
||||||
for i := 0; i < 1024; i++ {
|
|
||||||
if deterministicRand.Int63()%2 == 1 {
|
|
||||||
q.SetBit(q, i, 1)
|
|
||||||
} else {
|
|
||||||
q.SetBit(q, i, 0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if q.ProbablyPrime(20) && p.Cmp(q) != 0 {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Calculate n = p * q
|
|
||||||
n := new(big.Int).Mul(p, q)
|
|
||||||
|
|
||||||
// Calculate phi = (p-1) * (q-1)
|
|
||||||
p1 := new(big.Int).Sub(p, big.NewInt(1))
|
|
||||||
q1 := new(big.Int).Sub(q, big.NewInt(1))
|
|
||||||
phi := new(big.Int).Mul(p1, q1)
|
|
||||||
|
|
||||||
// Calculate private exponent d
|
|
||||||
d := new(big.Int).ModInverse(e, phi)
|
|
||||||
|
|
||||||
// Create the private key
|
|
||||||
privateKey := &rsa.PrivateKey{
|
|
||||||
PublicKey: rsa.PublicKey{
|
|
||||||
N: n,
|
|
||||||
E: int(e.Int64()),
|
|
||||||
},
|
|
||||||
D: d,
|
|
||||||
Primes: []*big.Int{p, q},
|
|
||||||
}
|
|
||||||
|
|
||||||
// Compute precomputed values
|
|
||||||
privateKey.Precompute()
|
|
||||||
|
|
||||||
return privateKey
|
|
||||||
}()
|
|
||||||
|
|
||||||
coderSigner, err := gossh.NewSignerFromKey(coderHostKey)
|
coderSigner, err := gossh.NewSignerFromKey(coderHostKey)
|
||||||
return coderSigner, err
|
return coderSigner, err
|
||||||
|
@ -124,7 +124,7 @@ type fakeEnvInfoer struct {
|
|||||||
UserShellFn func(string) (string, error)
|
UserShellFn func(string) (string, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *fakeEnvInfoer) CurrentUser() (u *user.User, err error) {
|
func (f *fakeEnvInfoer) User() (u *user.User, err error) {
|
||||||
return f.CurrentUserFn()
|
return f.CurrentUserFn()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -132,14 +132,18 @@ func (f *fakeEnvInfoer) Environ() []string {
|
|||||||
return f.EnvironFn()
|
return f.EnvironFn()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *fakeEnvInfoer) UserHomeDir() (string, error) {
|
func (f *fakeEnvInfoer) HomeDir() (string, error) {
|
||||||
return f.UserHomeDirFn()
|
return f.UserHomeDirFn()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *fakeEnvInfoer) UserShell(u string) (string, error) {
|
func (f *fakeEnvInfoer) Shell(u string) (string, error) {
|
||||||
return f.UserShellFn(u)
|
return f.UserShellFn(u)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (*fakeEnvInfoer) ModifyCommand(cmd string, args ...string) (string, []string) {
|
||||||
|
return cmd, args
|
||||||
|
}
|
||||||
|
|
||||||
func TestNewServer_CloseActiveConnections(t *testing.T) {
|
func TestNewServer_CloseActiveConnections(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
|
@ -14,7 +14,9 @@ import (
|
|||||||
"golang.org/x/xerrors"
|
"golang.org/x/xerrors"
|
||||||
|
|
||||||
"cdr.dev/slog"
|
"cdr.dev/slog"
|
||||||
|
"github.com/coder/coder/v2/agent/agentcontainers"
|
||||||
"github.com/coder/coder/v2/agent/agentssh"
|
"github.com/coder/coder/v2/agent/agentssh"
|
||||||
|
"github.com/coder/coder/v2/agent/usershell"
|
||||||
"github.com/coder/coder/v2/codersdk/workspacesdk"
|
"github.com/coder/coder/v2/codersdk/workspacesdk"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -26,20 +28,26 @@ type Server struct {
|
|||||||
connCount atomic.Int64
|
connCount atomic.Int64
|
||||||
reconnectingPTYs sync.Map
|
reconnectingPTYs sync.Map
|
||||||
timeout time.Duration
|
timeout time.Duration
|
||||||
|
|
||||||
|
ExperimentalContainersEnabled bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewServer returns a new ReconnectingPTY server
|
// NewServer returns a new ReconnectingPTY server
|
||||||
func NewServer(logger slog.Logger, commandCreator *agentssh.Server,
|
func NewServer(logger slog.Logger, commandCreator *agentssh.Server,
|
||||||
connectionsTotal prometheus.Counter, errorsTotal *prometheus.CounterVec,
|
connectionsTotal prometheus.Counter, errorsTotal *prometheus.CounterVec,
|
||||||
timeout time.Duration,
|
timeout time.Duration, opts ...func(*Server),
|
||||||
) *Server {
|
) *Server {
|
||||||
return &Server{
|
s := &Server{
|
||||||
logger: logger,
|
logger: logger,
|
||||||
commandCreator: commandCreator,
|
commandCreator: commandCreator,
|
||||||
connectionsTotal: connectionsTotal,
|
connectionsTotal: connectionsTotal,
|
||||||
errorsTotal: errorsTotal,
|
errorsTotal: errorsTotal,
|
||||||
timeout: timeout,
|
timeout: timeout,
|
||||||
}
|
}
|
||||||
|
for _, o := range opts {
|
||||||
|
o(s)
|
||||||
|
}
|
||||||
|
return s
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Server) Serve(ctx, hardCtx context.Context, l net.Listener) (retErr error) {
|
func (s *Server) Serve(ctx, hardCtx context.Context, l net.Listener) (retErr error) {
|
||||||
@ -116,7 +124,7 @@ func (s *Server) handleConn(ctx context.Context, logger slog.Logger, conn net.Co
|
|||||||
}
|
}
|
||||||
|
|
||||||
connectionID := uuid.NewString()
|
connectionID := uuid.NewString()
|
||||||
connLogger := logger.With(slog.F("message_id", msg.ID), slog.F("connection_id", connectionID))
|
connLogger := logger.With(slog.F("message_id", msg.ID), slog.F("connection_id", connectionID), slog.F("container", msg.Container), slog.F("container_user", msg.ContainerUser))
|
||||||
connLogger.Debug(ctx, "starting handler")
|
connLogger.Debug(ctx, "starting handler")
|
||||||
|
|
||||||
defer func() {
|
defer func() {
|
||||||
@ -158,8 +166,17 @@ func (s *Server) handleConn(ctx context.Context, logger slog.Logger, conn net.Co
|
|||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
|
var ei usershell.EnvInfoer
|
||||||
|
if s.ExperimentalContainersEnabled && msg.Container != "" {
|
||||||
|
dei, err := agentcontainers.EnvInfo(ctx, s.commandCreator.Execer, msg.Container, msg.ContainerUser)
|
||||||
|
if err != nil {
|
||||||
|
return xerrors.Errorf("get container env info: %w", err)
|
||||||
|
}
|
||||||
|
ei = dei
|
||||||
|
s.logger.Info(ctx, "got container env info", slog.F("container", msg.Container))
|
||||||
|
}
|
||||||
// Empty command will default to the users shell!
|
// Empty command will default to the users shell!
|
||||||
cmd, err := s.commandCreator.CreateCommand(ctx, msg.Command, nil, nil)
|
cmd, err := s.commandCreator.CreateCommand(ctx, msg.Command, nil, ei)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
s.errorsTotal.WithLabelValues("create_command").Add(1)
|
s.errorsTotal.WithLabelValues("create_command").Add(1)
|
||||||
return xerrors.Errorf("create command: %w", err)
|
return xerrors.Errorf("create command: %w", err)
|
||||||
|
66
agent/usershell/usershell.go
Normal file
66
agent/usershell/usershell.go
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
package usershell
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"os/user"
|
||||||
|
|
||||||
|
"golang.org/x/xerrors"
|
||||||
|
)
|
||||||
|
|
||||||
|
// HomeDir returns the home directory of the current user, giving
|
||||||
|
// priority to the $HOME environment variable.
|
||||||
|
// Deprecated: use EnvInfoer.HomeDir() instead.
|
||||||
|
func HomeDir() (string, error) {
|
||||||
|
// First we check the environment.
|
||||||
|
homedir, err := os.UserHomeDir()
|
||||||
|
if err == nil {
|
||||||
|
return homedir, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// As a fallback, we try the user information.
|
||||||
|
u, err := user.Current()
|
||||||
|
if err != nil {
|
||||||
|
return "", xerrors.Errorf("current user: %w", err)
|
||||||
|
}
|
||||||
|
return u.HomeDir, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// EnvInfoer encapsulates external information about the environment.
|
||||||
|
type EnvInfoer interface {
|
||||||
|
// User returns the current user.
|
||||||
|
User() (*user.User, error)
|
||||||
|
// Environ returns the environment variables of the current process.
|
||||||
|
Environ() []string
|
||||||
|
// HomeDir returns the home directory of the current user.
|
||||||
|
HomeDir() (string, error)
|
||||||
|
// Shell returns the shell of the given user.
|
||||||
|
Shell(username string) (string, error)
|
||||||
|
// ModifyCommand modifies the command and arguments before execution based on
|
||||||
|
// the environment. This is useful for executing a command inside a container.
|
||||||
|
// In the default case, the command and arguments are returned unchanged.
|
||||||
|
ModifyCommand(name string, args ...string) (string, []string)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SystemEnvInfo encapsulates the information about the environment
|
||||||
|
// just using the default Go implementations.
|
||||||
|
type SystemEnvInfo struct{}
|
||||||
|
|
||||||
|
func (SystemEnvInfo) User() (*user.User, error) {
|
||||||
|
return user.Current()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (SystemEnvInfo) Environ() []string {
|
||||||
|
return os.Environ()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (SystemEnvInfo) HomeDir() (string, error) {
|
||||||
|
return HomeDir()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (SystemEnvInfo) Shell(username string) (string, error) {
|
||||||
|
return Get(username)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (SystemEnvInfo) ModifyCommand(name string, args ...string) (string, []string) {
|
||||||
|
return name, args
|
||||||
|
}
|
@ -10,6 +10,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// Get returns the $SHELL environment variable.
|
// Get returns the $SHELL environment variable.
|
||||||
|
// Deprecated: use SystemEnvInfo.UserShell instead.
|
||||||
func Get(username string) (string, error) {
|
func Get(username string) (string, error) {
|
||||||
// This command will output "UserShell: /bin/zsh" if successful, we
|
// This command will output "UserShell: /bin/zsh" if successful, we
|
||||||
// can ignore the error since we have fallback behavior.
|
// can ignore the error since we have fallback behavior.
|
||||||
|
@ -11,6 +11,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// Get returns the /etc/passwd entry for the username provided.
|
// Get returns the /etc/passwd entry for the username provided.
|
||||||
|
// Deprecated: use SystemEnvInfo.UserShell instead.
|
||||||
func Get(username string) (string, error) {
|
func Get(username string) (string, error) {
|
||||||
contents, err := os.ReadFile("/etc/passwd")
|
contents, err := os.ReadFile("/etc/passwd")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -3,6 +3,7 @@ package usershell
|
|||||||
import "os/exec"
|
import "os/exec"
|
||||||
|
|
||||||
// Get returns the command prompt binary name.
|
// Get returns the command prompt binary name.
|
||||||
|
// Deprecated: use SystemEnvInfo.UserShell instead.
|
||||||
func Get(username string) (string, error) {
|
func Get(username string) (string, error) {
|
||||||
_, err := exec.LookPath("pwsh.exe")
|
_, err := exec.LookPath("pwsh.exe")
|
||||||
if err == nil {
|
if err == nil {
|
||||||
|
@ -383,6 +383,7 @@ func (r *RootCmd) workspaceAgent() *serpent.Command {
|
|||||||
BlockFileTransfer: blockFileTransfer,
|
BlockFileTransfer: blockFileTransfer,
|
||||||
Execer: execer,
|
Execer: execer,
|
||||||
ContainerLister: containerLister,
|
ContainerLister: containerLister,
|
||||||
|
ExperimentalContainersEnabled: devcontainersEnabled,
|
||||||
})
|
})
|
||||||
|
|
||||||
promHandler := agent.PrometheusMetricsHandler(prometheusRegistry, logger)
|
promHandler := agent.PrometheusMetricsHandler(prometheusRegistry, logger)
|
||||||
|
@ -32,6 +32,14 @@ func OverrideVSCodeConfigs(fs afero.Fs) error {
|
|||||||
filepath.Join(xdg.DataHome, "code-server", "Machine", "settings.json"),
|
filepath.Join(xdg.DataHome, "code-server", "Machine", "settings.json"),
|
||||||
// vscode-remote's default configuration path.
|
// vscode-remote's default configuration path.
|
||||||
filepath.Join(home, ".vscode-server", "data", "Machine", "settings.json"),
|
filepath.Join(home, ".vscode-server", "data", "Machine", "settings.json"),
|
||||||
|
// vscode-insiders' default configuration path.
|
||||||
|
filepath.Join(home, ".vscode-insiders-server", "data", "Machine", "settings.json"),
|
||||||
|
// cursor default configuration path.
|
||||||
|
filepath.Join(home, ".cursor-server", "data", "Machine", "settings.json"),
|
||||||
|
// windsurf default configuration path.
|
||||||
|
filepath.Join(home, ".windsurf-server", "data", "Machine", "settings.json"),
|
||||||
|
// vscodium default configuration path.
|
||||||
|
filepath.Join(home, ".vscodium-server", "data", "Machine", "settings.json"),
|
||||||
} {
|
} {
|
||||||
_, err := fs.Stat(configPath)
|
_, err := fs.Stat(configPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -1213,9 +1213,14 @@ func wrapTransportWithVersionMismatchCheck(rt http.RoundTripper, inv *serpent.In
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
upgradeMessage := defaultUpgradeMessage(semver.Canonical(serverVersion))
|
upgradeMessage := defaultUpgradeMessage(semver.Canonical(serverVersion))
|
||||||
serverInfo, err := getBuildInfo(inv.Context())
|
if serverInfo, err := getBuildInfo(inv.Context()); err == nil {
|
||||||
if err == nil && serverInfo.UpgradeMessage != "" {
|
switch {
|
||||||
|
case serverInfo.UpgradeMessage != "":
|
||||||
upgradeMessage = serverInfo.UpgradeMessage
|
upgradeMessage = serverInfo.UpgradeMessage
|
||||||
|
// The site-local `install.sh` was introduced in v2.19.0
|
||||||
|
case serverInfo.DashboardURL != "" && semver.Compare(semver.MajorMinor(serverVersion), "v2.19") >= 0:
|
||||||
|
upgradeMessage = fmt.Sprintf("download %s with: 'curl -fsSL %s/install.sh | sh'", serverVersion, serverInfo.DashboardURL)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
fmtWarningText := "version mismatch: client %s, server %s\n%s"
|
fmtWarningText := "version mismatch: client %s, server %s\n%s"
|
||||||
fmtWarn := pretty.Sprint(cliui.DefaultStyles.Warn, fmtWarningText)
|
fmtWarn := pretty.Sprint(cliui.DefaultStyles.Warn, fmtWarningText)
|
||||||
|
168
cli/server.go
168
cli/server.go
@ -172,6 +172,17 @@ func createOIDCConfig(ctx context.Context, logger slog.Logger, vals *codersdk.De
|
|||||||
groupAllowList[group] = true
|
groupAllowList[group] = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
secondaryClaimsSrc := coderd.MergedClaimsSourceUserInfo
|
||||||
|
if !vals.OIDC.IgnoreUserInfo && vals.OIDC.UserInfoFromAccessToken {
|
||||||
|
return nil, xerrors.Errorf("to use 'oidc-access-token-claims', 'oidc-ignore-userinfo' must be set to 'false'")
|
||||||
|
}
|
||||||
|
if vals.OIDC.IgnoreUserInfo {
|
||||||
|
secondaryClaimsSrc = coderd.MergedClaimsSourceNone
|
||||||
|
}
|
||||||
|
if vals.OIDC.UserInfoFromAccessToken {
|
||||||
|
secondaryClaimsSrc = coderd.MergedClaimsSourceAccessToken
|
||||||
|
}
|
||||||
|
|
||||||
return &coderd.OIDCConfig{
|
return &coderd.OIDCConfig{
|
||||||
OAuth2Config: useCfg,
|
OAuth2Config: useCfg,
|
||||||
Provider: oidcProvider,
|
Provider: oidcProvider,
|
||||||
@ -187,7 +198,7 @@ func createOIDCConfig(ctx context.Context, logger slog.Logger, vals *codersdk.De
|
|||||||
NameField: vals.OIDC.NameField.String(),
|
NameField: vals.OIDC.NameField.String(),
|
||||||
EmailField: vals.OIDC.EmailField.String(),
|
EmailField: vals.OIDC.EmailField.String(),
|
||||||
AuthURLParams: vals.OIDC.AuthURLParams.Value,
|
AuthURLParams: vals.OIDC.AuthURLParams.Value,
|
||||||
IgnoreUserInfo: vals.OIDC.IgnoreUserInfo.Value(),
|
SecondaryClaims: secondaryClaimsSrc,
|
||||||
SignInText: vals.OIDC.SignInText.String(),
|
SignInText: vals.OIDC.SignInText.String(),
|
||||||
SignupsDisabledText: vals.OIDC.SignupsDisabledText.String(),
|
SignupsDisabledText: vals.OIDC.SignupsDisabledText.String(),
|
||||||
IconURL: vals.OIDC.IconURL.String(),
|
IconURL: vals.OIDC.IconURL.String(),
|
||||||
@ -677,24 +688,6 @@ func (r *RootCmd) Server(newAPI func(context.Context, *coderd.Options) (*coderd.
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if vals.OAuth2.Github.ClientSecret != "" || vals.OAuth2.Github.DeviceFlow.Value() {
|
|
||||||
options.GithubOAuth2Config, err = configureGithubOAuth2(
|
|
||||||
oauthInstrument,
|
|
||||||
vals.AccessURL.Value(),
|
|
||||||
vals.OAuth2.Github.ClientID.String(),
|
|
||||||
vals.OAuth2.Github.ClientSecret.String(),
|
|
||||||
vals.OAuth2.Github.DeviceFlow.Value(),
|
|
||||||
vals.OAuth2.Github.AllowSignups.Value(),
|
|
||||||
vals.OAuth2.Github.AllowEveryone.Value(),
|
|
||||||
vals.OAuth2.Github.AllowedOrgs,
|
|
||||||
vals.OAuth2.Github.AllowedTeams,
|
|
||||||
vals.OAuth2.Github.EnterpriseBaseURL.String(),
|
|
||||||
)
|
|
||||||
if err != nil {
|
|
||||||
return xerrors.Errorf("configure github oauth2: %w", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// As OIDC clients can be confidential or public,
|
// As OIDC clients can be confidential or public,
|
||||||
// we should only check for a client id being set.
|
// we should only check for a client id being set.
|
||||||
// The underlying library handles the case of no
|
// The underlying library handles the case of no
|
||||||
@ -782,6 +775,20 @@ func (r *RootCmd) Server(newAPI func(context.Context, *coderd.Options) (*coderd.
|
|||||||
return xerrors.Errorf("set deployment id: %w", err)
|
return xerrors.Errorf("set deployment id: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
githubOAuth2ConfigParams, err := getGithubOAuth2ConfigParams(ctx, options.Database, vals)
|
||||||
|
if err != nil {
|
||||||
|
return xerrors.Errorf("get github oauth2 config params: %w", err)
|
||||||
|
}
|
||||||
|
if githubOAuth2ConfigParams != nil {
|
||||||
|
options.GithubOAuth2Config, err = configureGithubOAuth2(
|
||||||
|
oauthInstrument,
|
||||||
|
githubOAuth2ConfigParams,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return xerrors.Errorf("configure github oauth2: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
options.RuntimeConfig = runtimeconfig.NewManager()
|
options.RuntimeConfig = runtimeconfig.NewManager()
|
||||||
|
|
||||||
// This should be output before the logs start streaming.
|
// This should be output before the logs start streaming.
|
||||||
@ -1832,25 +1839,101 @@ func configureCAPool(tlsClientCAFile string, tlsConfig *tls.Config) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: convert the argument list to a struct, it's easy to mix up the order of the arguments
|
const (
|
||||||
//
|
// Client ID for https://github.com/apps/coder
|
||||||
|
GithubOAuth2DefaultProviderClientID = "Iv1.6a2b4b4aec4f4fe7"
|
||||||
|
GithubOAuth2DefaultProviderAllowEveryone = true
|
||||||
|
GithubOAuth2DefaultProviderDeviceFlow = true
|
||||||
|
)
|
||||||
|
|
||||||
|
type githubOAuth2ConfigParams struct {
|
||||||
|
accessURL *url.URL
|
||||||
|
clientID string
|
||||||
|
clientSecret string
|
||||||
|
deviceFlow bool
|
||||||
|
allowSignups bool
|
||||||
|
allowEveryone bool
|
||||||
|
allowOrgs []string
|
||||||
|
rawTeams []string
|
||||||
|
enterpriseBaseURL string
|
||||||
|
}
|
||||||
|
|
||||||
|
func getGithubOAuth2ConfigParams(ctx context.Context, db database.Store, vals *codersdk.DeploymentValues) (*githubOAuth2ConfigParams, error) {
|
||||||
|
params := githubOAuth2ConfigParams{
|
||||||
|
accessURL: vals.AccessURL.Value(),
|
||||||
|
clientID: vals.OAuth2.Github.ClientID.String(),
|
||||||
|
clientSecret: vals.OAuth2.Github.ClientSecret.String(),
|
||||||
|
deviceFlow: vals.OAuth2.Github.DeviceFlow.Value(),
|
||||||
|
allowSignups: vals.OAuth2.Github.AllowSignups.Value(),
|
||||||
|
allowEveryone: vals.OAuth2.Github.AllowEveryone.Value(),
|
||||||
|
allowOrgs: vals.OAuth2.Github.AllowedOrgs.Value(),
|
||||||
|
rawTeams: vals.OAuth2.Github.AllowedTeams.Value(),
|
||||||
|
enterpriseBaseURL: vals.OAuth2.Github.EnterpriseBaseURL.String(),
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the user manually configured the GitHub OAuth2 provider,
|
||||||
|
// we won't add the default configuration.
|
||||||
|
if params.clientID != "" || params.clientSecret != "" || params.enterpriseBaseURL != "" {
|
||||||
|
return ¶ms, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the user manually disabled the default GitHub OAuth2 provider.
|
||||||
|
if !vals.OAuth2.Github.DefaultProviderEnable.Value() {
|
||||||
|
return nil, nil //nolint:nilnil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the deployment is eligible for the default GitHub OAuth2 provider.
|
||||||
|
// We want to enable it only for new deployments, and avoid enabling it
|
||||||
|
// if a deployment was upgraded from an older version.
|
||||||
|
// nolint:gocritic // Requires system privileges
|
||||||
|
defaultEligible, err := db.GetOAuth2GithubDefaultEligible(dbauthz.AsSystemRestricted(ctx))
|
||||||
|
if err != nil && !errors.Is(err, sql.ErrNoRows) {
|
||||||
|
return nil, xerrors.Errorf("get github default eligible: %w", err)
|
||||||
|
}
|
||||||
|
defaultEligibleNotSet := errors.Is(err, sql.ErrNoRows)
|
||||||
|
|
||||||
|
if defaultEligibleNotSet {
|
||||||
|
// nolint:gocritic // User count requires system privileges
|
||||||
|
userCount, err := db.GetUserCount(dbauthz.AsSystemRestricted(ctx))
|
||||||
|
if err != nil {
|
||||||
|
return nil, xerrors.Errorf("get user count: %w", err)
|
||||||
|
}
|
||||||
|
// We check if a deployment is new by checking if it has any users.
|
||||||
|
defaultEligible = userCount == 0
|
||||||
|
// nolint:gocritic // Requires system privileges
|
||||||
|
if err := db.UpsertOAuth2GithubDefaultEligible(dbauthz.AsSystemRestricted(ctx), defaultEligible); err != nil {
|
||||||
|
return nil, xerrors.Errorf("upsert github default eligible: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !defaultEligible {
|
||||||
|
return nil, nil //nolint:nilnil
|
||||||
|
}
|
||||||
|
|
||||||
|
params.clientID = GithubOAuth2DefaultProviderClientID
|
||||||
|
params.allowEveryone = GithubOAuth2DefaultProviderAllowEveryone
|
||||||
|
params.deviceFlow = GithubOAuth2DefaultProviderDeviceFlow
|
||||||
|
|
||||||
|
return ¶ms, nil
|
||||||
|
}
|
||||||
|
|
||||||
//nolint:revive // Ignore flag-parameter: parameter 'allowEveryone' seems to be a control flag, avoid control coupling (revive)
|
//nolint:revive // Ignore flag-parameter: parameter 'allowEveryone' seems to be a control flag, avoid control coupling (revive)
|
||||||
func configureGithubOAuth2(instrument *promoauth.Factory, accessURL *url.URL, clientID, clientSecret string, deviceFlow, allowSignups, allowEveryone bool, allowOrgs []string, rawTeams []string, enterpriseBaseURL string) (*coderd.GithubOAuth2Config, error) {
|
func configureGithubOAuth2(instrument *promoauth.Factory, params *githubOAuth2ConfigParams) (*coderd.GithubOAuth2Config, error) {
|
||||||
redirectURL, err := accessURL.Parse("/api/v2/users/oauth2/github/callback")
|
redirectURL, err := params.accessURL.Parse("/api/v2/users/oauth2/github/callback")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, xerrors.Errorf("parse github oauth callback url: %w", err)
|
return nil, xerrors.Errorf("parse github oauth callback url: %w", err)
|
||||||
}
|
}
|
||||||
if allowEveryone && len(allowOrgs) > 0 {
|
if params.allowEveryone && len(params.allowOrgs) > 0 {
|
||||||
return nil, xerrors.New("allow everyone and allowed orgs cannot be used together")
|
return nil, xerrors.New("allow everyone and allowed orgs cannot be used together")
|
||||||
}
|
}
|
||||||
if allowEveryone && len(rawTeams) > 0 {
|
if params.allowEveryone && len(params.rawTeams) > 0 {
|
||||||
return nil, xerrors.New("allow everyone and allowed teams cannot be used together")
|
return nil, xerrors.New("allow everyone and allowed teams cannot be used together")
|
||||||
}
|
}
|
||||||
if !allowEveryone && len(allowOrgs) == 0 {
|
if !params.allowEveryone && len(params.allowOrgs) == 0 {
|
||||||
return nil, xerrors.New("allowed orgs is empty: must specify at least one org or allow everyone")
|
return nil, xerrors.New("allowed orgs is empty: must specify at least one org or allow everyone")
|
||||||
}
|
}
|
||||||
allowTeams := make([]coderd.GithubOAuth2Team, 0, len(rawTeams))
|
allowTeams := make([]coderd.GithubOAuth2Team, 0, len(params.rawTeams))
|
||||||
for _, rawTeam := range rawTeams {
|
for _, rawTeam := range params.rawTeams {
|
||||||
parts := strings.SplitN(rawTeam, "/", 2)
|
parts := strings.SplitN(rawTeam, "/", 2)
|
||||||
if len(parts) != 2 {
|
if len(parts) != 2 {
|
||||||
return nil, xerrors.Errorf("github team allowlist is formatted incorrectly. got %s; wanted <organization>/<team>", rawTeam)
|
return nil, xerrors.Errorf("github team allowlist is formatted incorrectly. got %s; wanted <organization>/<team>", rawTeam)
|
||||||
@ -1862,8 +1945,8 @@ func configureGithubOAuth2(instrument *promoauth.Factory, accessURL *url.URL, cl
|
|||||||
}
|
}
|
||||||
|
|
||||||
endpoint := xgithub.Endpoint
|
endpoint := xgithub.Endpoint
|
||||||
if enterpriseBaseURL != "" {
|
if params.enterpriseBaseURL != "" {
|
||||||
enterpriseURL, err := url.Parse(enterpriseBaseURL)
|
enterpriseURL, err := url.Parse(params.enterpriseBaseURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, xerrors.Errorf("parse enterprise base url: %w", err)
|
return nil, xerrors.Errorf("parse enterprise base url: %w", err)
|
||||||
}
|
}
|
||||||
@ -1882,8 +1965,8 @@ func configureGithubOAuth2(instrument *promoauth.Factory, accessURL *url.URL, cl
|
|||||||
}
|
}
|
||||||
|
|
||||||
instrumentedOauth := instrument.NewGithub("github-login", &oauth2.Config{
|
instrumentedOauth := instrument.NewGithub("github-login", &oauth2.Config{
|
||||||
ClientID: clientID,
|
ClientID: params.clientID,
|
||||||
ClientSecret: clientSecret,
|
ClientSecret: params.clientSecret,
|
||||||
Endpoint: endpoint,
|
Endpoint: endpoint,
|
||||||
RedirectURL: redirectURL.String(),
|
RedirectURL: redirectURL.String(),
|
||||||
Scopes: []string{
|
Scopes: []string{
|
||||||
@ -1895,17 +1978,17 @@ func configureGithubOAuth2(instrument *promoauth.Factory, accessURL *url.URL, cl
|
|||||||
|
|
||||||
createClient := func(client *http.Client, source promoauth.Oauth2Source) (*github.Client, error) {
|
createClient := func(client *http.Client, source promoauth.Oauth2Source) (*github.Client, error) {
|
||||||
client = instrumentedOauth.InstrumentHTTPClient(client, source)
|
client = instrumentedOauth.InstrumentHTTPClient(client, source)
|
||||||
if enterpriseBaseURL != "" {
|
if params.enterpriseBaseURL != "" {
|
||||||
return github.NewEnterpriseClient(enterpriseBaseURL, "", client)
|
return github.NewEnterpriseClient(params.enterpriseBaseURL, "", client)
|
||||||
}
|
}
|
||||||
return github.NewClient(client), nil
|
return github.NewClient(client), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var deviceAuth *externalauth.DeviceAuth
|
var deviceAuth *externalauth.DeviceAuth
|
||||||
if deviceFlow {
|
if params.deviceFlow {
|
||||||
deviceAuth = &externalauth.DeviceAuth{
|
deviceAuth = &externalauth.DeviceAuth{
|
||||||
Config: instrumentedOauth,
|
Config: instrumentedOauth,
|
||||||
ClientID: clientID,
|
ClientID: params.clientID,
|
||||||
TokenURL: endpoint.TokenURL,
|
TokenURL: endpoint.TokenURL,
|
||||||
Scopes: []string{"read:user", "read:org", "user:email"},
|
Scopes: []string{"read:user", "read:org", "user:email"},
|
||||||
CodeURL: endpoint.DeviceAuthURL,
|
CodeURL: endpoint.DeviceAuthURL,
|
||||||
@ -1914,9 +1997,9 @@ func configureGithubOAuth2(instrument *promoauth.Factory, accessURL *url.URL, cl
|
|||||||
|
|
||||||
return &coderd.GithubOAuth2Config{
|
return &coderd.GithubOAuth2Config{
|
||||||
OAuth2Config: instrumentedOauth,
|
OAuth2Config: instrumentedOauth,
|
||||||
AllowSignups: allowSignups,
|
AllowSignups: params.allowSignups,
|
||||||
AllowEveryone: allowEveryone,
|
AllowEveryone: params.allowEveryone,
|
||||||
AllowOrganizations: allowOrgs,
|
AllowOrganizations: params.allowOrgs,
|
||||||
AllowTeams: allowTeams,
|
AllowTeams: allowTeams,
|
||||||
AuthenticatedUser: func(ctx context.Context, client *http.Client) (*github.User, error) {
|
AuthenticatedUser: func(ctx context.Context, client *http.Client) (*github.User, error) {
|
||||||
api, err := createClient(client, promoauth.SourceGitAPIAuthUser)
|
api, err := createClient(client, promoauth.SourceGitAPIAuthUser)
|
||||||
@ -1955,19 +2038,20 @@ func configureGithubOAuth2(instrument *promoauth.Factory, accessURL *url.URL, cl
|
|||||||
team, _, err := api.Teams.GetTeamMembershipBySlug(ctx, org, teamSlug, username)
|
team, _, err := api.Teams.GetTeamMembershipBySlug(ctx, org, teamSlug, username)
|
||||||
return team, err
|
return team, err
|
||||||
},
|
},
|
||||||
DeviceFlowEnabled: deviceFlow,
|
DeviceFlowEnabled: params.deviceFlow,
|
||||||
ExchangeDeviceCode: func(ctx context.Context, deviceCode string) (*oauth2.Token, error) {
|
ExchangeDeviceCode: func(ctx context.Context, deviceCode string) (*oauth2.Token, error) {
|
||||||
if !deviceFlow {
|
if !params.deviceFlow {
|
||||||
return nil, xerrors.New("device flow is not enabled")
|
return nil, xerrors.New("device flow is not enabled")
|
||||||
}
|
}
|
||||||
return deviceAuth.ExchangeDeviceCode(ctx, deviceCode)
|
return deviceAuth.ExchangeDeviceCode(ctx, deviceCode)
|
||||||
},
|
},
|
||||||
AuthorizeDevice: func(ctx context.Context) (*codersdk.ExternalAuthDevice, error) {
|
AuthorizeDevice: func(ctx context.Context) (*codersdk.ExternalAuthDevice, error) {
|
||||||
if !deviceFlow {
|
if !params.deviceFlow {
|
||||||
return nil, xerrors.New("device flow is not enabled")
|
return nil, xerrors.New("device flow is not enabled")
|
||||||
}
|
}
|
||||||
return deviceAuth.AuthorizeDevice(ctx)
|
return deviceAuth.AuthorizeDevice(ctx)
|
||||||
},
|
},
|
||||||
|
DefaultProviderConfigured: params.clientID == GithubOAuth2DefaultProviderClientID,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -45,6 +45,8 @@ import (
|
|||||||
"github.com/coder/coder/v2/cli/clitest"
|
"github.com/coder/coder/v2/cli/clitest"
|
||||||
"github.com/coder/coder/v2/cli/config"
|
"github.com/coder/coder/v2/cli/config"
|
||||||
"github.com/coder/coder/v2/coderd/coderdtest"
|
"github.com/coder/coder/v2/coderd/coderdtest"
|
||||||
|
"github.com/coder/coder/v2/coderd/database"
|
||||||
|
"github.com/coder/coder/v2/coderd/database/dbgen"
|
||||||
"github.com/coder/coder/v2/coderd/database/dbtestutil"
|
"github.com/coder/coder/v2/coderd/database/dbtestutil"
|
||||||
"github.com/coder/coder/v2/coderd/database/migrations"
|
"github.com/coder/coder/v2/coderd/database/migrations"
|
||||||
"github.com/coder/coder/v2/coderd/httpapi"
|
"github.com/coder/coder/v2/coderd/httpapi"
|
||||||
@ -306,6 +308,145 @@ func TestServer(t *testing.T) {
|
|||||||
require.Less(t, numLines, 20)
|
require.Less(t, numLines, 20)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("OAuth2GitHubDefaultProvider", func(t *testing.T) {
|
||||||
|
type testCase struct {
|
||||||
|
name string
|
||||||
|
githubDefaultProviderEnabled string
|
||||||
|
githubClientID string
|
||||||
|
githubClientSecret string
|
||||||
|
expectGithubEnabled bool
|
||||||
|
expectGithubDefaultProviderConfigured bool
|
||||||
|
createUserPreStart bool
|
||||||
|
createUserPostRestart bool
|
||||||
|
}
|
||||||
|
|
||||||
|
runGitHubProviderTest := func(t *testing.T, tc testCase) {
|
||||||
|
t.Parallel()
|
||||||
|
if !dbtestutil.WillUsePostgres() {
|
||||||
|
t.Skip("test requires postgres")
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx, cancelFunc := context.WithCancel(testutil.Context(t, testutil.WaitLong))
|
||||||
|
defer cancelFunc()
|
||||||
|
|
||||||
|
dbURL, err := dbtestutil.Open(t)
|
||||||
|
require.NoError(t, err)
|
||||||
|
db, _ := dbtestutil.NewDB(t, dbtestutil.WithURL(dbURL))
|
||||||
|
|
||||||
|
if tc.createUserPreStart {
|
||||||
|
_ = dbgen.User(t, db, database.User{})
|
||||||
|
}
|
||||||
|
|
||||||
|
args := []string{
|
||||||
|
"server",
|
||||||
|
"--postgres-url", dbURL,
|
||||||
|
"--http-address", ":0",
|
||||||
|
"--access-url", "https://example.com",
|
||||||
|
}
|
||||||
|
if tc.githubClientID != "" {
|
||||||
|
args = append(args, fmt.Sprintf("--oauth2-github-client-id=%s", tc.githubClientID))
|
||||||
|
}
|
||||||
|
if tc.githubClientSecret != "" {
|
||||||
|
args = append(args, fmt.Sprintf("--oauth2-github-client-secret=%s", tc.githubClientSecret))
|
||||||
|
}
|
||||||
|
if tc.githubClientID != "" || tc.githubClientSecret != "" {
|
||||||
|
args = append(args, "--oauth2-github-allow-everyone")
|
||||||
|
}
|
||||||
|
if tc.githubDefaultProviderEnabled != "" {
|
||||||
|
args = append(args, fmt.Sprintf("--oauth2-github-default-provider-enable=%s", tc.githubDefaultProviderEnabled))
|
||||||
|
}
|
||||||
|
|
||||||
|
inv, cfg := clitest.New(t, args...)
|
||||||
|
errChan := make(chan error, 1)
|
||||||
|
go func() {
|
||||||
|
errChan <- inv.WithContext(ctx).Run()
|
||||||
|
}()
|
||||||
|
accessURLChan := make(chan *url.URL, 1)
|
||||||
|
go func() {
|
||||||
|
accessURLChan <- waitAccessURL(t, cfg)
|
||||||
|
}()
|
||||||
|
|
||||||
|
var accessURL *url.URL
|
||||||
|
select {
|
||||||
|
case err := <-errChan:
|
||||||
|
require.NoError(t, err)
|
||||||
|
case accessURL = <-accessURLChan:
|
||||||
|
require.NotNil(t, accessURL)
|
||||||
|
}
|
||||||
|
|
||||||
|
client := codersdk.New(accessURL)
|
||||||
|
|
||||||
|
authMethods, err := client.AuthMethods(ctx)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, tc.expectGithubEnabled, authMethods.Github.Enabled)
|
||||||
|
require.Equal(t, tc.expectGithubDefaultProviderConfigured, authMethods.Github.DefaultProviderConfigured)
|
||||||
|
|
||||||
|
cancelFunc()
|
||||||
|
select {
|
||||||
|
case err := <-errChan:
|
||||||
|
require.NoError(t, err)
|
||||||
|
case <-time.After(testutil.WaitLong):
|
||||||
|
t.Fatal("server did not exit")
|
||||||
|
}
|
||||||
|
|
||||||
|
if tc.createUserPostRestart {
|
||||||
|
_ = dbgen.User(t, db, database.User{})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure that it stays at that setting after the server restarts.
|
||||||
|
inv, cfg = clitest.New(t, args...)
|
||||||
|
clitest.Start(t, inv)
|
||||||
|
accessURL = waitAccessURL(t, cfg)
|
||||||
|
client = codersdk.New(accessURL)
|
||||||
|
|
||||||
|
ctx = testutil.Context(t, testutil.WaitLong)
|
||||||
|
authMethods, err = client.AuthMethods(ctx)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, tc.expectGithubEnabled, authMethods.Github.Enabled)
|
||||||
|
require.Equal(t, tc.expectGithubDefaultProviderConfigured, authMethods.Github.DefaultProviderConfigured)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range []testCase{
|
||||||
|
{
|
||||||
|
name: "NewDeployment",
|
||||||
|
expectGithubEnabled: true,
|
||||||
|
expectGithubDefaultProviderConfigured: true,
|
||||||
|
createUserPreStart: false,
|
||||||
|
createUserPostRestart: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "ExistingDeployment",
|
||||||
|
expectGithubEnabled: false,
|
||||||
|
expectGithubDefaultProviderConfigured: false,
|
||||||
|
createUserPreStart: true,
|
||||||
|
createUserPostRestart: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "ManuallyDisabled",
|
||||||
|
githubDefaultProviderEnabled: "false",
|
||||||
|
expectGithubEnabled: false,
|
||||||
|
expectGithubDefaultProviderConfigured: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "ConfiguredClientID",
|
||||||
|
githubClientID: "123",
|
||||||
|
expectGithubEnabled: true,
|
||||||
|
expectGithubDefaultProviderConfigured: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "ConfiguredClientSecret",
|
||||||
|
githubClientSecret: "456",
|
||||||
|
expectGithubEnabled: true,
|
||||||
|
expectGithubDefaultProviderConfigured: false,
|
||||||
|
},
|
||||||
|
} {
|
||||||
|
tc := tc
|
||||||
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
|
runGitHubProviderTest(t, tc)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
// Validate that a warning is printed that it may not be externally
|
// Validate that a warning is printed that it may not be externally
|
||||||
// reachable.
|
// reachable.
|
||||||
t.Run("LocalAccessURL", func(t *testing.T) {
|
t.Run("LocalAccessURL", func(t *testing.T) {
|
||||||
|
3
cli/testdata/coder_server_--help.golden
vendored
3
cli/testdata/coder_server_--help.golden
vendored
@ -498,6 +498,9 @@ OAUTH2 / GITHUB OPTIONS:
|
|||||||
--oauth2-github-client-secret string, $CODER_OAUTH2_GITHUB_CLIENT_SECRET
|
--oauth2-github-client-secret string, $CODER_OAUTH2_GITHUB_CLIENT_SECRET
|
||||||
Client secret for Login with GitHub.
|
Client secret for Login with GitHub.
|
||||||
|
|
||||||
|
--oauth2-github-default-provider-enable bool, $CODER_OAUTH2_GITHUB_DEFAULT_PROVIDER_ENABLE (default: true)
|
||||||
|
Enable the default GitHub OAuth2 provider managed by Coder.
|
||||||
|
|
||||||
--oauth2-github-device-flow bool, $CODER_OAUTH2_GITHUB_DEVICE_FLOW (default: false)
|
--oauth2-github-device-flow bool, $CODER_OAUTH2_GITHUB_DEVICE_FLOW (default: false)
|
||||||
Enable device flow for Login with GitHub.
|
Enable device flow for Login with GitHub.
|
||||||
|
|
||||||
|
9
cli/testdata/server-config.yaml.golden
vendored
9
cli/testdata/server-config.yaml.golden
vendored
@ -265,6 +265,9 @@ oauth2:
|
|||||||
# Enable device flow for Login with GitHub.
|
# Enable device flow for Login with GitHub.
|
||||||
# (default: false, type: bool)
|
# (default: false, type: bool)
|
||||||
deviceFlow: false
|
deviceFlow: false
|
||||||
|
# Enable the default GitHub OAuth2 provider managed by Coder.
|
||||||
|
# (default: true, type: bool)
|
||||||
|
defaultProviderEnable: true
|
||||||
# Organizations the user must be a member of to Login with GitHub.
|
# Organizations the user must be a member of to Login with GitHub.
|
||||||
# (default: <unset>, type: string-array)
|
# (default: <unset>, type: string-array)
|
||||||
allowedOrgs: []
|
allowedOrgs: []
|
||||||
@ -329,6 +332,12 @@ oidc:
|
|||||||
# Ignore the userinfo endpoint and only use the ID token for user information.
|
# Ignore the userinfo endpoint and only use the ID token for user information.
|
||||||
# (default: false, type: bool)
|
# (default: false, type: bool)
|
||||||
ignoreUserInfo: false
|
ignoreUserInfo: false
|
||||||
|
# Source supplemental user claims from the 'access_token'. This assumes the token
|
||||||
|
# is a jwt signed by the same issuer as the id_token. Using this requires setting
|
||||||
|
# 'oidc-ignore-userinfo' to true. This setting is not compliant with the OIDC
|
||||||
|
# specification and is not recommended. Use at your own risk.
|
||||||
|
# (default: false, type: bool)
|
||||||
|
accessTokenClaims: false
|
||||||
# This field must be set if using the organization sync feature. Set to the claim
|
# This field must be set if using the organization sync feature. Set to the claim
|
||||||
# to be used for organizations.
|
# to be used for organizations.
|
||||||
# (default: <unset>, type: string)
|
# (default: <unset>, type: string)
|
||||||
|
23
coderd/apidoc/docs.go
generated
23
coderd/apidoc/docs.go
generated
@ -10331,7 +10331,7 @@ const docTemplate = `{
|
|||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"github": {
|
"github": {
|
||||||
"$ref": "#/definitions/codersdk.AuthMethod"
|
"$ref": "#/definitions/codersdk.GithubAuthMethod"
|
||||||
},
|
},
|
||||||
"oidc": {
|
"oidc": {
|
||||||
"$ref": "#/definitions/codersdk.OIDCAuthMethod"
|
"$ref": "#/definitions/codersdk.OIDCAuthMethod"
|
||||||
@ -11867,6 +11867,17 @@ const docTemplate = `{
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"codersdk.GithubAuthMethod": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"default_provider_configured": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"enabled": {
|
||||||
|
"type": "boolean"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"codersdk.Group": {
|
"codersdk.Group": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
@ -12529,6 +12540,9 @@ const docTemplate = `{
|
|||||||
"client_secret": {
|
"client_secret": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
"default_provider_enable": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
"device_flow": {
|
"device_flow": {
|
||||||
"type": "boolean"
|
"type": "boolean"
|
||||||
},
|
},
|
||||||
@ -12679,6 +12693,7 @@ const docTemplate = `{
|
|||||||
"type": "boolean"
|
"type": "boolean"
|
||||||
},
|
},
|
||||||
"ignore_user_info": {
|
"ignore_user_info": {
|
||||||
|
"description": "IgnoreUserInfo \u0026 UserInfoFromAccessToken are mutually exclusive. Only 1\ncan be set to true. Ideally this would be an enum with 3 states, ['none',\n'userinfo', 'access_token']. However, for backward compatibility,\n` + "`" + `ignore_user_info` + "`" + ` must remain. And ` + "`" + `access_token` + "`" + ` is a niche, non-spec\ncompliant edge case. So it's use is rare, and should not be advised.",
|
||||||
"type": "boolean"
|
"type": "boolean"
|
||||||
},
|
},
|
||||||
"issuer_url": {
|
"issuer_url": {
|
||||||
@ -12711,6 +12726,10 @@ const docTemplate = `{
|
|||||||
"skip_issuer_checks": {
|
"skip_issuer_checks": {
|
||||||
"type": "boolean"
|
"type": "boolean"
|
||||||
},
|
},
|
||||||
|
"source_user_info_from_access_token": {
|
||||||
|
"description": "UserInfoFromAccessToken as mentioned above is an edge case. This allows\nsourcing the user_info from the access token itself instead of a user_info\nendpoint. This assumes the access token is a valid JWT with a set of claims to\nbe merged with the id_token.",
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
"user_role_field": {
|
"user_role_field": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
@ -13740,7 +13759,6 @@ const docTemplate = `{
|
|||||||
"organization_member",
|
"organization_member",
|
||||||
"provisioner_daemon",
|
"provisioner_daemon",
|
||||||
"provisioner_jobs",
|
"provisioner_jobs",
|
||||||
"provisioner_keys",
|
|
||||||
"replicas",
|
"replicas",
|
||||||
"system",
|
"system",
|
||||||
"tailnet_coordinator",
|
"tailnet_coordinator",
|
||||||
@ -13776,7 +13794,6 @@ const docTemplate = `{
|
|||||||
"ResourceOrganizationMember",
|
"ResourceOrganizationMember",
|
||||||
"ResourceProvisionerDaemon",
|
"ResourceProvisionerDaemon",
|
||||||
"ResourceProvisionerJobs",
|
"ResourceProvisionerJobs",
|
||||||
"ResourceProvisionerKeys",
|
|
||||||
"ResourceReplicas",
|
"ResourceReplicas",
|
||||||
"ResourceSystem",
|
"ResourceSystem",
|
||||||
"ResourceTailnetCoordinator",
|
"ResourceTailnetCoordinator",
|
||||||
|
23
coderd/apidoc/swagger.json
generated
23
coderd/apidoc/swagger.json
generated
@ -9189,7 +9189,7 @@
|
|||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"github": {
|
"github": {
|
||||||
"$ref": "#/definitions/codersdk.AuthMethod"
|
"$ref": "#/definitions/codersdk.GithubAuthMethod"
|
||||||
},
|
},
|
||||||
"oidc": {
|
"oidc": {
|
||||||
"$ref": "#/definitions/codersdk.OIDCAuthMethod"
|
"$ref": "#/definitions/codersdk.OIDCAuthMethod"
|
||||||
@ -10652,6 +10652,17 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"codersdk.GithubAuthMethod": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"default_provider_configured": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"enabled": {
|
||||||
|
"type": "boolean"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"codersdk.Group": {
|
"codersdk.Group": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
@ -11265,6 +11276,9 @@
|
|||||||
"client_secret": {
|
"client_secret": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
"default_provider_enable": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
"device_flow": {
|
"device_flow": {
|
||||||
"type": "boolean"
|
"type": "boolean"
|
||||||
},
|
},
|
||||||
@ -11415,6 +11429,7 @@
|
|||||||
"type": "boolean"
|
"type": "boolean"
|
||||||
},
|
},
|
||||||
"ignore_user_info": {
|
"ignore_user_info": {
|
||||||
|
"description": "IgnoreUserInfo \u0026 UserInfoFromAccessToken are mutually exclusive. Only 1\ncan be set to true. Ideally this would be an enum with 3 states, ['none',\n'userinfo', 'access_token']. However, for backward compatibility,\n`ignore_user_info` must remain. And `access_token` is a niche, non-spec\ncompliant edge case. So it's use is rare, and should not be advised.",
|
||||||
"type": "boolean"
|
"type": "boolean"
|
||||||
},
|
},
|
||||||
"issuer_url": {
|
"issuer_url": {
|
||||||
@ -11447,6 +11462,10 @@
|
|||||||
"skip_issuer_checks": {
|
"skip_issuer_checks": {
|
||||||
"type": "boolean"
|
"type": "boolean"
|
||||||
},
|
},
|
||||||
|
"source_user_info_from_access_token": {
|
||||||
|
"description": "UserInfoFromAccessToken as mentioned above is an edge case. This allows\nsourcing the user_info from the access token itself instead of a user_info\nendpoint. This assumes the access token is a valid JWT with a set of claims to\nbe merged with the id_token.",
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
"user_role_field": {
|
"user_role_field": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
@ -12429,7 +12448,6 @@
|
|||||||
"organization_member",
|
"organization_member",
|
||||||
"provisioner_daemon",
|
"provisioner_daemon",
|
||||||
"provisioner_jobs",
|
"provisioner_jobs",
|
||||||
"provisioner_keys",
|
|
||||||
"replicas",
|
"replicas",
|
||||||
"system",
|
"system",
|
||||||
"tailnet_coordinator",
|
"tailnet_coordinator",
|
||||||
@ -12465,7 +12483,6 @@
|
|||||||
"ResourceOrganizationMember",
|
"ResourceOrganizationMember",
|
||||||
"ResourceProvisionerDaemon",
|
"ResourceProvisionerDaemon",
|
||||||
"ResourceProvisionerJobs",
|
"ResourceProvisionerJobs",
|
||||||
"ResourceProvisionerKeys",
|
|
||||||
"ResourceReplicas",
|
"ResourceReplicas",
|
||||||
"ResourceSystem",
|
"ResourceSystem",
|
||||||
"ResourceTailnetCoordinator",
|
"ResourceTailnetCoordinator",
|
||||||
|
@ -105,6 +105,7 @@ type FakeIDP struct {
|
|||||||
// "Authorized Redirect URLs". This can be used to emulate that.
|
// "Authorized Redirect URLs". This can be used to emulate that.
|
||||||
hookValidRedirectURL func(redirectURL string) error
|
hookValidRedirectURL func(redirectURL string) error
|
||||||
hookUserInfo func(email string) (jwt.MapClaims, error)
|
hookUserInfo func(email string) (jwt.MapClaims, error)
|
||||||
|
hookAccessTokenJWT func(email string, exp time.Time) jwt.MapClaims
|
||||||
// defaultIDClaims is if a new client connects and we didn't preset
|
// defaultIDClaims is if a new client connects and we didn't preset
|
||||||
// some claims.
|
// some claims.
|
||||||
defaultIDClaims jwt.MapClaims
|
defaultIDClaims jwt.MapClaims
|
||||||
@ -154,6 +155,12 @@ func WithMiddlewares(mws ...func(http.Handler) http.Handler) func(*FakeIDP) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func WithAccessTokenJWTHook(hook func(email string, exp time.Time) jwt.MapClaims) func(*FakeIDP) {
|
||||||
|
return func(f *FakeIDP) {
|
||||||
|
f.hookAccessTokenJWT = hook
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func WithHookWellKnown(hook func(r *http.Request, j *ProviderJSON) error) func(*FakeIDP) {
|
func WithHookWellKnown(hook func(r *http.Request, j *ProviderJSON) error) func(*FakeIDP) {
|
||||||
return func(f *FakeIDP) {
|
return func(f *FakeIDP) {
|
||||||
f.hookWellKnown = hook
|
f.hookWellKnown = hook
|
||||||
@ -316,8 +323,7 @@ const (
|
|||||||
func NewFakeIDP(t testing.TB, opts ...FakeIDPOpt) *FakeIDP {
|
func NewFakeIDP(t testing.TB, opts ...FakeIDPOpt) *FakeIDP {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
|
|
||||||
block, _ := pem.Decode([]byte(testRSAPrivateKey))
|
pkey, err := FakeIDPKey()
|
||||||
pkey, err := x509.ParsePKCS1PrivateKey(block.Bytes)
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
idp := &FakeIDP{
|
idp := &FakeIDP{
|
||||||
@ -676,8 +682,13 @@ func (f *FakeIDP) newCode(state string) string {
|
|||||||
|
|
||||||
// newToken enforces the access token exchanged is actually a valid access token
|
// newToken enforces the access token exchanged is actually a valid access token
|
||||||
// created by the IDP.
|
// created by the IDP.
|
||||||
func (f *FakeIDP) newToken(email string, expires time.Time) string {
|
func (f *FakeIDP) newToken(t testing.TB, email string, expires time.Time) string {
|
||||||
accessToken := uuid.NewString()
|
accessToken := uuid.NewString()
|
||||||
|
if f.hookAccessTokenJWT != nil {
|
||||||
|
claims := f.hookAccessTokenJWT(email, expires)
|
||||||
|
accessToken = f.encodeClaims(t, claims)
|
||||||
|
}
|
||||||
|
|
||||||
f.accessTokens.Store(accessToken, token{
|
f.accessTokens.Store(accessToken, token{
|
||||||
issued: time.Now(),
|
issued: time.Now(),
|
||||||
email: email,
|
email: email,
|
||||||
@ -963,7 +974,7 @@ func (f *FakeIDP) httpHandler(t testing.TB) http.Handler {
|
|||||||
email := getEmail(claims)
|
email := getEmail(claims)
|
||||||
refreshToken := f.newRefreshTokens(email)
|
refreshToken := f.newRefreshTokens(email)
|
||||||
token := map[string]interface{}{
|
token := map[string]interface{}{
|
||||||
"access_token": f.newToken(email, exp),
|
"access_token": f.newToken(t, email, exp),
|
||||||
"refresh_token": refreshToken,
|
"refresh_token": refreshToken,
|
||||||
"token_type": "Bearer",
|
"token_type": "Bearer",
|
||||||
"expires_in": int64((f.defaultExpire).Seconds()),
|
"expires_in": int64((f.defaultExpire).Seconds()),
|
||||||
@ -1468,6 +1479,7 @@ func (f *FakeIDP) internalOIDCConfig(ctx context.Context, t testing.TB, scopes [
|
|||||||
UsernameField: "preferred_username",
|
UsernameField: "preferred_username",
|
||||||
EmailField: "email",
|
EmailField: "email",
|
||||||
AuthURLParams: map[string]string{"access_type": "offline"},
|
AuthURLParams: map[string]string{"access_type": "offline"},
|
||||||
|
SecondaryClaims: coderd.MergedClaimsSourceUserInfo,
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, opt := range opts {
|
for _, opt := range opts {
|
||||||
@ -1552,3 +1564,8 @@ d8h4Ht09E+f3nhTEc87mODkl7WJZpHL6V2sORfeq/eIkds+H6CJ4hy5w/bSw8tjf
|
|||||||
sz9Di8sGIaUbLZI2rd0CQQCzlVwEtRtoNCyMJTTrkgUuNufLP19RZ5FpyXxBO5/u
|
sz9Di8sGIaUbLZI2rd0CQQCzlVwEtRtoNCyMJTTrkgUuNufLP19RZ5FpyXxBO5/u
|
||||||
QastnN77KfUwdj3SJt44U/uh1jAIv4oSLBr8HYUkbnI8
|
QastnN77KfUwdj3SJt44U/uh1jAIv4oSLBr8HYUkbnI8
|
||||||
-----END RSA PRIVATE KEY-----`
|
-----END RSA PRIVATE KEY-----`
|
||||||
|
|
||||||
|
func FakeIDPKey() (*rsa.PrivateKey, error) {
|
||||||
|
block, _ := pem.Decode([]byte(testRSAPrivateKey))
|
||||||
|
return x509.ParsePKCS1PrivateKey(block.Bytes)
|
||||||
|
}
|
||||||
|
@ -3,9 +3,8 @@
|
|||||||
// Query functions are generated using sqlc.
|
// Query functions are generated using sqlc.
|
||||||
//
|
//
|
||||||
// To modify the database schema:
|
// To modify the database schema:
|
||||||
// 1. Add a new migration using "create_migration.sh" in database/migrations/
|
// 1. Add a new migration using "create_migration.sh" in database/migrations/ and run "make gen" to generate models.
|
||||||
// 2. Run "make coderd/database/generate" in the root to generate models.
|
// 2. Add/Edit queries in "query.sql" and run "make gen" to create Go code.
|
||||||
// 3. Add/Edit queries in "query.sql" and run "make coderd/database/generate" to create Go code.
|
|
||||||
package database
|
package database
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
//go:build linux
|
|
||||||
|
|
||||||
package database_test
|
package database_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
@ -324,7 +324,6 @@ var (
|
|||||||
rbac.ResourceOrganization.Type: {policy.ActionCreate, policy.ActionRead},
|
rbac.ResourceOrganization.Type: {policy.ActionCreate, policy.ActionRead},
|
||||||
rbac.ResourceOrganizationMember.Type: {policy.ActionCreate, policy.ActionDelete, policy.ActionRead},
|
rbac.ResourceOrganizationMember.Type: {policy.ActionCreate, policy.ActionDelete, policy.ActionRead},
|
||||||
rbac.ResourceProvisionerDaemon.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate},
|
rbac.ResourceProvisionerDaemon.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate},
|
||||||
rbac.ResourceProvisionerKeys.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionDelete},
|
|
||||||
rbac.ResourceUser.Type: rbac.ResourceUser.AvailableActions(),
|
rbac.ResourceUser.Type: rbac.ResourceUser.AvailableActions(),
|
||||||
rbac.ResourceWorkspaceDormant.Type: {policy.ActionUpdate, policy.ActionDelete, policy.ActionWorkspaceStop},
|
rbac.ResourceWorkspaceDormant.Type: {policy.ActionUpdate, policy.ActionDelete, policy.ActionWorkspaceStop},
|
||||||
rbac.ResourceWorkspace.Type: {policy.ActionUpdate, policy.ActionDelete, policy.ActionWorkspaceStart, policy.ActionWorkspaceStop, policy.ActionSSH},
|
rbac.ResourceWorkspace.Type: {policy.ActionUpdate, policy.ActionDelete, policy.ActionWorkspaceStart, policy.ActionWorkspaceStop, policy.ActionSSH},
|
||||||
@ -1311,10 +1310,6 @@ func (q *querier) DeleteOldWorkspaceAgentStats(ctx context.Context) error {
|
|||||||
return q.db.DeleteOldWorkspaceAgentStats(ctx)
|
return q.db.DeleteOldWorkspaceAgentStats(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *querier) DeleteOrganization(ctx context.Context, id uuid.UUID) error {
|
|
||||||
return deleteQ(q.log, q.auth, q.db.GetOrganizationByID, q.db.DeleteOrganization)(ctx, id)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (q *querier) DeleteOrganizationMember(ctx context.Context, arg database.DeleteOrganizationMemberParams) error {
|
func (q *querier) DeleteOrganizationMember(ctx context.Context, arg database.DeleteOrganizationMemberParams) error {
|
||||||
return deleteQ[database.OrganizationMember](q.log, q.auth, func(ctx context.Context, arg database.DeleteOrganizationMemberParams) (database.OrganizationMember, error) {
|
return deleteQ[database.OrganizationMember](q.log, q.auth, func(ctx context.Context, arg database.DeleteOrganizationMemberParams) (database.OrganizationMember, error) {
|
||||||
member, err := database.ExpectOne(q.OrganizationMembers(ctx, database.OrganizationMembersParams(arg)))
|
member, err := database.ExpectOne(q.OrganizationMembers(ctx, database.OrganizationMembersParams(arg)))
|
||||||
@ -1859,6 +1854,13 @@ func (q *querier) GetNotificationsSettings(ctx context.Context) (string, error)
|
|||||||
return q.db.GetNotificationsSettings(ctx)
|
return q.db.GetNotificationsSettings(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (q *querier) GetOAuth2GithubDefaultEligible(ctx context.Context) (bool, error) {
|
||||||
|
if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceDeploymentConfig); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
return q.db.GetOAuth2GithubDefaultEligible(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
func (q *querier) GetOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) (database.OAuth2ProviderApp, error) {
|
func (q *querier) GetOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) (database.OAuth2ProviderApp, error) {
|
||||||
if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceOauth2App); err != nil {
|
if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceOauth2App); err != nil {
|
||||||
return database.OAuth2ProviderApp{}, err
|
return database.OAuth2ProviderApp{}, err
|
||||||
@ -1935,7 +1937,7 @@ func (q *querier) GetOrganizationByID(ctx context.Context, id uuid.UUID) (databa
|
|||||||
return fetch(q.log, q.auth, q.db.GetOrganizationByID)(ctx, id)
|
return fetch(q.log, q.auth, q.db.GetOrganizationByID)(ctx, id)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *querier) GetOrganizationByName(ctx context.Context, name string) (database.Organization, error) {
|
func (q *querier) GetOrganizationByName(ctx context.Context, name database.GetOrganizationByNameParams) (database.Organization, error) {
|
||||||
return fetch(q.log, q.auth, q.db.GetOrganizationByName)(ctx, name)
|
return fetch(q.log, q.auth, q.db.GetOrganizationByName)(ctx, name)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1952,7 +1954,7 @@ func (q *querier) GetOrganizations(ctx context.Context, args database.GetOrganiz
|
|||||||
return fetchWithPostFilter(q.auth, policy.ActionRead, fetch)(ctx, nil)
|
return fetchWithPostFilter(q.auth, policy.ActionRead, fetch)(ctx, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *querier) GetOrganizationsByUserID(ctx context.Context, userID uuid.UUID) ([]database.Organization, error) {
|
func (q *querier) GetOrganizationsByUserID(ctx context.Context, userID database.GetOrganizationsByUserIDParams) ([]database.Organization, error) {
|
||||||
return fetchWithPostFilter(q.auth, policy.ActionRead, q.db.GetOrganizationsByUserID)(ctx, userID)
|
return fetchWithPostFilter(q.auth, policy.ActionRead, q.db.GetOrganizationsByUserID)(ctx, userID)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3240,7 +3242,7 @@ func (q *querier) InsertProvisionerJobTimings(ctx context.Context, arg database.
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (q *querier) InsertProvisionerKey(ctx context.Context, arg database.InsertProvisionerKeyParams) (database.ProvisionerKey, error) {
|
func (q *querier) InsertProvisionerKey(ctx context.Context, arg database.InsertProvisionerKeyParams) (database.ProvisionerKey, error) {
|
||||||
return insert(q.log, q.auth, rbac.ResourceProvisionerKeys.InOrg(arg.OrganizationID).WithID(arg.ID), q.db.InsertProvisionerKey)(ctx, arg)
|
return insert(q.log, q.auth, rbac.ResourceProvisionerDaemon.InOrg(arg.OrganizationID).WithID(arg.ID), q.db.InsertProvisionerKey)(ctx, arg)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *querier) InsertReplica(ctx context.Context, arg database.InsertReplicaParams) (database.Replica, error) {
|
func (q *querier) InsertReplica(ctx context.Context, arg database.InsertReplicaParams) (database.Replica, error) {
|
||||||
@ -3781,6 +3783,16 @@ func (q *querier) UpdateOrganization(ctx context.Context, arg database.UpdateOrg
|
|||||||
return updateWithReturn(q.log, q.auth, fetch, q.db.UpdateOrganization)(ctx, arg)
|
return updateWithReturn(q.log, q.auth, fetch, q.db.UpdateOrganization)(ctx, arg)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (q *querier) UpdateOrganizationDeletedByID(ctx context.Context, arg database.UpdateOrganizationDeletedByIDParams) error {
|
||||||
|
deleteF := func(ctx context.Context, id uuid.UUID) error {
|
||||||
|
return q.db.UpdateOrganizationDeletedByID(ctx, database.UpdateOrganizationDeletedByIDParams{
|
||||||
|
ID: id,
|
||||||
|
UpdatedAt: dbtime.Now(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return deleteQ(q.log, q.auth, q.db.GetOrganizationByID, deleteF)(ctx, arg.ID)
|
||||||
|
}
|
||||||
|
|
||||||
func (q *querier) UpdateProvisionerDaemonLastSeenAt(ctx context.Context, arg database.UpdateProvisionerDaemonLastSeenAtParams) error {
|
func (q *querier) UpdateProvisionerDaemonLastSeenAt(ctx context.Context, arg database.UpdateProvisionerDaemonLastSeenAtParams) error {
|
||||||
if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceProvisionerDaemon); err != nil {
|
if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceProvisionerDaemon); err != nil {
|
||||||
return err
|
return err
|
||||||
@ -4474,6 +4486,13 @@ func (q *querier) UpsertNotificationsSettings(ctx context.Context, value string)
|
|||||||
return q.db.UpsertNotificationsSettings(ctx, value)
|
return q.db.UpsertNotificationsSettings(ctx, value)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (q *querier) UpsertOAuth2GithubDefaultEligible(ctx context.Context, eligible bool) error {
|
||||||
|
if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceDeploymentConfig); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return q.db.UpsertOAuth2GithubDefaultEligible(ctx, eligible)
|
||||||
|
}
|
||||||
|
|
||||||
func (q *querier) UpsertOAuthSigningKey(ctx context.Context, value string) error {
|
func (q *querier) UpsertOAuthSigningKey(ctx context.Context, value string) error {
|
||||||
if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil {
|
if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -815,7 +815,7 @@ func (s *MethodTestSuite) TestOrganization() {
|
|||||||
}))
|
}))
|
||||||
s.Run("GetOrganizationByName", s.Subtest(func(db database.Store, check *expects) {
|
s.Run("GetOrganizationByName", s.Subtest(func(db database.Store, check *expects) {
|
||||||
o := dbgen.Organization(s.T(), db, database.Organization{})
|
o := dbgen.Organization(s.T(), db, database.Organization{})
|
||||||
check.Args(o.Name).Asserts(o, policy.ActionRead).Returns(o)
|
check.Args(database.GetOrganizationByNameParams{Name: o.Name, Deleted: o.Deleted}).Asserts(o, policy.ActionRead).Returns(o)
|
||||||
}))
|
}))
|
||||||
s.Run("GetOrganizationIDsByMemberIDs", s.Subtest(func(db database.Store, check *expects) {
|
s.Run("GetOrganizationIDsByMemberIDs", s.Subtest(func(db database.Store, check *expects) {
|
||||||
oa := dbgen.Organization(s.T(), db, database.Organization{})
|
oa := dbgen.Organization(s.T(), db, database.Organization{})
|
||||||
@ -839,7 +839,7 @@ func (s *MethodTestSuite) TestOrganization() {
|
|||||||
_ = dbgen.OrganizationMember(s.T(), db, database.OrganizationMember{UserID: u.ID, OrganizationID: a.ID})
|
_ = dbgen.OrganizationMember(s.T(), db, database.OrganizationMember{UserID: u.ID, OrganizationID: a.ID})
|
||||||
b := dbgen.Organization(s.T(), db, database.Organization{})
|
b := dbgen.Organization(s.T(), db, database.Organization{})
|
||||||
_ = dbgen.OrganizationMember(s.T(), db, database.OrganizationMember{UserID: u.ID, OrganizationID: b.ID})
|
_ = dbgen.OrganizationMember(s.T(), db, database.OrganizationMember{UserID: u.ID, OrganizationID: b.ID})
|
||||||
check.Args(u.ID).Asserts(a, policy.ActionRead, b, policy.ActionRead).Returns(slice.New(a, b))
|
check.Args(database.GetOrganizationsByUserIDParams{UserID: u.ID, Deleted: false}).Asserts(a, policy.ActionRead, b, policy.ActionRead).Returns(slice.New(a, b))
|
||||||
}))
|
}))
|
||||||
s.Run("InsertOrganization", s.Subtest(func(db database.Store, check *expects) {
|
s.Run("InsertOrganization", s.Subtest(func(db database.Store, check *expects) {
|
||||||
check.Args(database.InsertOrganizationParams{
|
check.Args(database.InsertOrganizationParams{
|
||||||
@ -960,13 +960,14 @@ func (s *MethodTestSuite) TestOrganization() {
|
|||||||
Name: "something-different",
|
Name: "something-different",
|
||||||
}).Asserts(o, policy.ActionUpdate)
|
}).Asserts(o, policy.ActionUpdate)
|
||||||
}))
|
}))
|
||||||
s.Run("DeleteOrganization", s.Subtest(func(db database.Store, check *expects) {
|
s.Run("UpdateOrganizationDeletedByID", s.Subtest(func(db database.Store, check *expects) {
|
||||||
o := dbgen.Organization(s.T(), db, database.Organization{
|
o := dbgen.Organization(s.T(), db, database.Organization{
|
||||||
Name: "doomed",
|
Name: "doomed",
|
||||||
})
|
})
|
||||||
check.Args(
|
check.Args(database.UpdateOrganizationDeletedByIDParams{
|
||||||
o.ID,
|
ID: o.ID,
|
||||||
).Asserts(o, policy.ActionDelete)
|
UpdatedAt: o.UpdatedAt,
|
||||||
|
}).Asserts(o, policy.ActionDelete).Returns()
|
||||||
}))
|
}))
|
||||||
s.Run("OrganizationMembers", s.Subtest(func(db database.Store, check *expects) {
|
s.Run("OrganizationMembers", s.Subtest(func(db database.Store, check *expects) {
|
||||||
o := dbgen.Organization(s.T(), db, database.Organization{})
|
o := dbgen.Organization(s.T(), db, database.Organization{})
|
||||||
@ -4404,6 +4405,12 @@ func (s *MethodTestSuite) TestSystemFunctions() {
|
|||||||
Value: "value",
|
Value: "value",
|
||||||
}).Asserts(rbac.ResourceSystem, policy.ActionUpdate)
|
}).Asserts(rbac.ResourceSystem, policy.ActionUpdate)
|
||||||
}))
|
}))
|
||||||
|
s.Run("GetOAuth2GithubDefaultEligible", s.Subtest(func(db database.Store, check *expects) {
|
||||||
|
check.Args().Asserts(rbac.ResourceDeploymentConfig, policy.ActionRead).Errors(sql.ErrNoRows)
|
||||||
|
}))
|
||||||
|
s.Run("UpsertOAuth2GithubDefaultEligible", s.Subtest(func(db database.Store, check *expects) {
|
||||||
|
check.Args(true).Asserts(rbac.ResourceDeploymentConfig, policy.ActionUpdate)
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *MethodTestSuite) TestNotifications() {
|
func (s *MethodTestSuite) TestNotifications() {
|
||||||
|
@ -254,6 +254,7 @@ type data struct {
|
|||||||
announcementBanners []byte
|
announcementBanners []byte
|
||||||
healthSettings []byte
|
healthSettings []byte
|
||||||
notificationsSettings []byte
|
notificationsSettings []byte
|
||||||
|
oauth2GithubDefaultEligible *bool
|
||||||
applicationName string
|
applicationName string
|
||||||
logoURL string
|
logoURL string
|
||||||
appSecurityKey string
|
appSecurityKey string
|
||||||
@ -2161,19 +2162,6 @@ func (q *FakeQuerier) DeleteOldWorkspaceAgentStats(_ context.Context) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *FakeQuerier) DeleteOrganization(_ context.Context, id uuid.UUID) error {
|
|
||||||
q.mutex.Lock()
|
|
||||||
defer q.mutex.Unlock()
|
|
||||||
|
|
||||||
for i, org := range q.organizations {
|
|
||||||
if org.ID == id && !org.IsDefault {
|
|
||||||
q.organizations = append(q.organizations[:i], q.organizations[i+1:]...)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return sql.ErrNoRows
|
|
||||||
}
|
|
||||||
|
|
||||||
func (q *FakeQuerier) DeleteOrganizationMember(ctx context.Context, arg database.DeleteOrganizationMemberParams) error {
|
func (q *FakeQuerier) DeleteOrganizationMember(ctx context.Context, arg database.DeleteOrganizationMemberParams) error {
|
||||||
err := validateDatabaseType(arg)
|
err := validateDatabaseType(arg)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -3532,6 +3520,16 @@ func (q *FakeQuerier) GetNotificationsSettings(_ context.Context) (string, error
|
|||||||
return string(q.notificationsSettings), nil
|
return string(q.notificationsSettings), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (q *FakeQuerier) GetOAuth2GithubDefaultEligible(_ context.Context) (bool, error) {
|
||||||
|
q.mutex.RLock()
|
||||||
|
defer q.mutex.RUnlock()
|
||||||
|
|
||||||
|
if q.oauth2GithubDefaultEligible == nil {
|
||||||
|
return false, sql.ErrNoRows
|
||||||
|
}
|
||||||
|
return *q.oauth2GithubDefaultEligible, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (q *FakeQuerier) GetOAuth2ProviderAppByID(_ context.Context, id uuid.UUID) (database.OAuth2ProviderApp, error) {
|
func (q *FakeQuerier) GetOAuth2ProviderAppByID(_ context.Context, id uuid.UUID) (database.OAuth2ProviderApp, error) {
|
||||||
q.mutex.Lock()
|
q.mutex.Lock()
|
||||||
defer q.mutex.Unlock()
|
defer q.mutex.Unlock()
|
||||||
@ -3692,12 +3690,12 @@ func (q *FakeQuerier) GetOrganizationByID(_ context.Context, id uuid.UUID) (data
|
|||||||
return q.getOrganizationByIDNoLock(id)
|
return q.getOrganizationByIDNoLock(id)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *FakeQuerier) GetOrganizationByName(_ context.Context, name string) (database.Organization, error) {
|
func (q *FakeQuerier) GetOrganizationByName(_ context.Context, params database.GetOrganizationByNameParams) (database.Organization, error) {
|
||||||
q.mutex.RLock()
|
q.mutex.RLock()
|
||||||
defer q.mutex.RUnlock()
|
defer q.mutex.RUnlock()
|
||||||
|
|
||||||
for _, organization := range q.organizations {
|
for _, organization := range q.organizations {
|
||||||
if organization.Name == name {
|
if organization.Name == params.Name && organization.Deleted == params.Deleted {
|
||||||
return organization, nil
|
return organization, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -3744,17 +3742,17 @@ func (q *FakeQuerier) GetOrganizations(_ context.Context, args database.GetOrgan
|
|||||||
return tmp, nil
|
return tmp, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *FakeQuerier) GetOrganizationsByUserID(_ context.Context, userID uuid.UUID) ([]database.Organization, error) {
|
func (q *FakeQuerier) GetOrganizationsByUserID(_ context.Context, arg database.GetOrganizationsByUserIDParams) ([]database.Organization, error) {
|
||||||
q.mutex.RLock()
|
q.mutex.RLock()
|
||||||
defer q.mutex.RUnlock()
|
defer q.mutex.RUnlock()
|
||||||
|
|
||||||
organizations := make([]database.Organization, 0)
|
organizations := make([]database.Organization, 0)
|
||||||
for _, organizationMember := range q.organizationMembers {
|
for _, organizationMember := range q.organizationMembers {
|
||||||
if organizationMember.UserID != userID {
|
if organizationMember.UserID != arg.UserID {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
for _, organization := range q.organizations {
|
for _, organization := range q.organizations {
|
||||||
if organization.ID != organizationMember.OrganizationID {
|
if organization.ID != organizationMember.OrganizationID || organization.Deleted != arg.Deleted {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
organizations = append(organizations, organization)
|
organizations = append(organizations, organization)
|
||||||
@ -9851,6 +9849,26 @@ func (q *FakeQuerier) UpdateOrganization(_ context.Context, arg database.UpdateO
|
|||||||
return database.Organization{}, sql.ErrNoRows
|
return database.Organization{}, sql.ErrNoRows
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (q *FakeQuerier) UpdateOrganizationDeletedByID(_ context.Context, arg database.UpdateOrganizationDeletedByIDParams) error {
|
||||||
|
if err := validateDatabaseType(arg); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
q.mutex.Lock()
|
||||||
|
defer q.mutex.Unlock()
|
||||||
|
|
||||||
|
for index, organization := range q.organizations {
|
||||||
|
if organization.ID != arg.ID || organization.IsDefault {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
organization.Deleted = true
|
||||||
|
organization.UpdatedAt = arg.UpdatedAt
|
||||||
|
q.organizations[index] = organization
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return sql.ErrNoRows
|
||||||
|
}
|
||||||
|
|
||||||
func (q *FakeQuerier) UpdateProvisionerDaemonLastSeenAt(_ context.Context, arg database.UpdateProvisionerDaemonLastSeenAtParams) error {
|
func (q *FakeQuerier) UpdateProvisionerDaemonLastSeenAt(_ context.Context, arg database.UpdateProvisionerDaemonLastSeenAtParams) error {
|
||||||
err := validateDatabaseType(arg)
|
err := validateDatabaseType(arg)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -11176,6 +11194,14 @@ func (q *FakeQuerier) UpsertNotificationsSettings(_ context.Context, data string
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (q *FakeQuerier) UpsertOAuth2GithubDefaultEligible(_ context.Context, eligible bool) error {
|
||||||
|
q.mutex.Lock()
|
||||||
|
defer q.mutex.Unlock()
|
||||||
|
|
||||||
|
q.oauth2GithubDefaultEligible = &eligible
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func (q *FakeQuerier) UpsertOAuthSigningKey(_ context.Context, value string) error {
|
func (q *FakeQuerier) UpsertOAuthSigningKey(_ context.Context, value string) error {
|
||||||
q.mutex.Lock()
|
q.mutex.Lock()
|
||||||
defer q.mutex.Unlock()
|
defer q.mutex.Unlock()
|
||||||
|
@ -77,6 +77,16 @@ func (m queryMetricsStore) InTx(f func(database.Store) error, options *database.
|
|||||||
return m.dbMetrics.InTx(f, options)
|
return m.dbMetrics.InTx(f, options)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (m queryMetricsStore) DeleteOrganization(ctx context.Context, id uuid.UUID) error {
|
||||||
|
start := time.Now()
|
||||||
|
r0 := m.s.UpdateOrganizationDeletedByID(ctx, database.UpdateOrganizationDeletedByIDParams{
|
||||||
|
ID: id,
|
||||||
|
UpdatedAt: time.Now(),
|
||||||
|
})
|
||||||
|
m.queryLatencies.WithLabelValues("DeleteOrganization").Observe(time.Since(start).Seconds())
|
||||||
|
return r0
|
||||||
|
}
|
||||||
|
|
||||||
func (m queryMetricsStore) AcquireLock(ctx context.Context, pgAdvisoryXactLock int64) error {
|
func (m queryMetricsStore) AcquireLock(ctx context.Context, pgAdvisoryXactLock int64) error {
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
err := m.s.AcquireLock(ctx, pgAdvisoryXactLock)
|
err := m.s.AcquireLock(ctx, pgAdvisoryXactLock)
|
||||||
@ -336,13 +346,6 @@ func (m queryMetricsStore) DeleteOldWorkspaceAgentStats(ctx context.Context) err
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m queryMetricsStore) DeleteOrganization(ctx context.Context, id uuid.UUID) error {
|
|
||||||
start := time.Now()
|
|
||||||
r0 := m.s.DeleteOrganization(ctx, id)
|
|
||||||
m.queryLatencies.WithLabelValues("DeleteOrganization").Observe(time.Since(start).Seconds())
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m queryMetricsStore) DeleteOrganizationMember(ctx context.Context, arg database.DeleteOrganizationMemberParams) error {
|
func (m queryMetricsStore) DeleteOrganizationMember(ctx context.Context, arg database.DeleteOrganizationMemberParams) error {
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
r0 := m.s.DeleteOrganizationMember(ctx, arg)
|
r0 := m.s.DeleteOrganizationMember(ctx, arg)
|
||||||
@ -875,6 +878,13 @@ func (m queryMetricsStore) GetNotificationsSettings(ctx context.Context) (string
|
|||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (m queryMetricsStore) GetOAuth2GithubDefaultEligible(ctx context.Context) (bool, error) {
|
||||||
|
start := time.Now()
|
||||||
|
r0, r1 := m.s.GetOAuth2GithubDefaultEligible(ctx)
|
||||||
|
m.queryLatencies.WithLabelValues("GetOAuth2GithubDefaultEligible").Observe(time.Since(start).Seconds())
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
func (m queryMetricsStore) GetOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) (database.OAuth2ProviderApp, error) {
|
func (m queryMetricsStore) GetOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) (database.OAuth2ProviderApp, error) {
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
r0, r1 := m.s.GetOAuth2ProviderAppByID(ctx, id)
|
r0, r1 := m.s.GetOAuth2ProviderAppByID(ctx, id)
|
||||||
@ -952,7 +962,7 @@ func (m queryMetricsStore) GetOrganizationByID(ctx context.Context, id uuid.UUID
|
|||||||
return organization, err
|
return organization, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m queryMetricsStore) GetOrganizationByName(ctx context.Context, name string) (database.Organization, error) {
|
func (m queryMetricsStore) GetOrganizationByName(ctx context.Context, name database.GetOrganizationByNameParams) (database.Organization, error) {
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
organization, err := m.s.GetOrganizationByName(ctx, name)
|
organization, err := m.s.GetOrganizationByName(ctx, name)
|
||||||
m.queryLatencies.WithLabelValues("GetOrganizationByName").Observe(time.Since(start).Seconds())
|
m.queryLatencies.WithLabelValues("GetOrganizationByName").Observe(time.Since(start).Seconds())
|
||||||
@ -973,7 +983,7 @@ func (m queryMetricsStore) GetOrganizations(ctx context.Context, args database.G
|
|||||||
return organizations, err
|
return organizations, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m queryMetricsStore) GetOrganizationsByUserID(ctx context.Context, userID uuid.UUID) ([]database.Organization, error) {
|
func (m queryMetricsStore) GetOrganizationsByUserID(ctx context.Context, userID database.GetOrganizationsByUserIDParams) ([]database.Organization, error) {
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
organizations, err := m.s.GetOrganizationsByUserID(ctx, userID)
|
organizations, err := m.s.GetOrganizationsByUserID(ctx, userID)
|
||||||
m.queryLatencies.WithLabelValues("GetOrganizationsByUserID").Observe(time.Since(start).Seconds())
|
m.queryLatencies.WithLabelValues("GetOrganizationsByUserID").Observe(time.Since(start).Seconds())
|
||||||
@ -2408,6 +2418,13 @@ func (m queryMetricsStore) UpdateOrganization(ctx context.Context, arg database.
|
|||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (m queryMetricsStore) UpdateOrganizationDeletedByID(ctx context.Context, arg database.UpdateOrganizationDeletedByIDParams) error {
|
||||||
|
start := time.Now()
|
||||||
|
r0 := m.s.UpdateOrganizationDeletedByID(ctx, arg)
|
||||||
|
m.queryLatencies.WithLabelValues("UpdateOrganizationDeletedByID").Observe(time.Since(start).Seconds())
|
||||||
|
return r0
|
||||||
|
}
|
||||||
|
|
||||||
func (m queryMetricsStore) UpdateProvisionerDaemonLastSeenAt(ctx context.Context, arg database.UpdateProvisionerDaemonLastSeenAtParams) error {
|
func (m queryMetricsStore) UpdateProvisionerDaemonLastSeenAt(ctx context.Context, arg database.UpdateProvisionerDaemonLastSeenAtParams) error {
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
r0 := m.s.UpdateProvisionerDaemonLastSeenAt(ctx, arg)
|
r0 := m.s.UpdateProvisionerDaemonLastSeenAt(ctx, arg)
|
||||||
@ -2849,6 +2866,13 @@ func (m queryMetricsStore) UpsertNotificationsSettings(ctx context.Context, valu
|
|||||||
return r0
|
return r0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (m queryMetricsStore) UpsertOAuth2GithubDefaultEligible(ctx context.Context, eligible bool) error {
|
||||||
|
start := time.Now()
|
||||||
|
r0 := m.s.UpsertOAuth2GithubDefaultEligible(ctx, eligible)
|
||||||
|
m.queryLatencies.WithLabelValues("UpsertOAuth2GithubDefaultEligible").Observe(time.Since(start).Seconds())
|
||||||
|
return r0
|
||||||
|
}
|
||||||
|
|
||||||
func (m queryMetricsStore) UpsertOAuthSigningKey(ctx context.Context, value string) error {
|
func (m queryMetricsStore) UpsertOAuthSigningKey(ctx context.Context, value string) error {
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
r0 := m.s.UpsertOAuthSigningKey(ctx, value)
|
r0 := m.s.UpsertOAuthSigningKey(ctx, value)
|
||||||
|
@ -572,20 +572,6 @@ func (mr *MockStoreMockRecorder) DeleteOldWorkspaceAgentStats(ctx any) *gomock.C
|
|||||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteOldWorkspaceAgentStats", reflect.TypeOf((*MockStore)(nil).DeleteOldWorkspaceAgentStats), ctx)
|
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteOldWorkspaceAgentStats", reflect.TypeOf((*MockStore)(nil).DeleteOldWorkspaceAgentStats), ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
// DeleteOrganization mocks base method.
|
|
||||||
func (m *MockStore) DeleteOrganization(ctx context.Context, id uuid.UUID) error {
|
|
||||||
m.ctrl.T.Helper()
|
|
||||||
ret := m.ctrl.Call(m, "DeleteOrganization", ctx, id)
|
|
||||||
ret0, _ := ret[0].(error)
|
|
||||||
return ret0
|
|
||||||
}
|
|
||||||
|
|
||||||
// DeleteOrganization indicates an expected call of DeleteOrganization.
|
|
||||||
func (mr *MockStoreMockRecorder) DeleteOrganization(ctx, id any) *gomock.Call {
|
|
||||||
mr.mock.ctrl.T.Helper()
|
|
||||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteOrganization", reflect.TypeOf((*MockStore)(nil).DeleteOrganization), ctx, id)
|
|
||||||
}
|
|
||||||
|
|
||||||
// DeleteOrganizationMember mocks base method.
|
// DeleteOrganizationMember mocks base method.
|
||||||
func (m *MockStore) DeleteOrganizationMember(ctx context.Context, arg database.DeleteOrganizationMemberParams) error {
|
func (m *MockStore) DeleteOrganizationMember(ctx context.Context, arg database.DeleteOrganizationMemberParams) error {
|
||||||
m.ctrl.T.Helper()
|
m.ctrl.T.Helper()
|
||||||
@ -1791,6 +1777,21 @@ func (mr *MockStoreMockRecorder) GetNotificationsSettings(ctx any) *gomock.Call
|
|||||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetNotificationsSettings", reflect.TypeOf((*MockStore)(nil).GetNotificationsSettings), ctx)
|
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetNotificationsSettings", reflect.TypeOf((*MockStore)(nil).GetNotificationsSettings), ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetOAuth2GithubDefaultEligible mocks base method.
|
||||||
|
func (m *MockStore) GetOAuth2GithubDefaultEligible(ctx context.Context) (bool, error) {
|
||||||
|
m.ctrl.T.Helper()
|
||||||
|
ret := m.ctrl.Call(m, "GetOAuth2GithubDefaultEligible", ctx)
|
||||||
|
ret0, _ := ret[0].(bool)
|
||||||
|
ret1, _ := ret[1].(error)
|
||||||
|
return ret0, ret1
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetOAuth2GithubDefaultEligible indicates an expected call of GetOAuth2GithubDefaultEligible.
|
||||||
|
func (mr *MockStoreMockRecorder) GetOAuth2GithubDefaultEligible(ctx any) *gomock.Call {
|
||||||
|
mr.mock.ctrl.T.Helper()
|
||||||
|
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetOAuth2GithubDefaultEligible", reflect.TypeOf((*MockStore)(nil).GetOAuth2GithubDefaultEligible), ctx)
|
||||||
|
}
|
||||||
|
|
||||||
// GetOAuth2ProviderAppByID mocks base method.
|
// GetOAuth2ProviderAppByID mocks base method.
|
||||||
func (m *MockStore) GetOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) (database.OAuth2ProviderApp, error) {
|
func (m *MockStore) GetOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) (database.OAuth2ProviderApp, error) {
|
||||||
m.ctrl.T.Helper()
|
m.ctrl.T.Helper()
|
||||||
@ -1957,18 +1958,18 @@ func (mr *MockStoreMockRecorder) GetOrganizationByID(ctx, id any) *gomock.Call {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// GetOrganizationByName mocks base method.
|
// GetOrganizationByName mocks base method.
|
||||||
func (m *MockStore) GetOrganizationByName(ctx context.Context, name string) (database.Organization, error) {
|
func (m *MockStore) GetOrganizationByName(ctx context.Context, arg database.GetOrganizationByNameParams) (database.Organization, error) {
|
||||||
m.ctrl.T.Helper()
|
m.ctrl.T.Helper()
|
||||||
ret := m.ctrl.Call(m, "GetOrganizationByName", ctx, name)
|
ret := m.ctrl.Call(m, "GetOrganizationByName", ctx, arg)
|
||||||
ret0, _ := ret[0].(database.Organization)
|
ret0, _ := ret[0].(database.Organization)
|
||||||
ret1, _ := ret[1].(error)
|
ret1, _ := ret[1].(error)
|
||||||
return ret0, ret1
|
return ret0, ret1
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetOrganizationByName indicates an expected call of GetOrganizationByName.
|
// GetOrganizationByName indicates an expected call of GetOrganizationByName.
|
||||||
func (mr *MockStoreMockRecorder) GetOrganizationByName(ctx, name any) *gomock.Call {
|
func (mr *MockStoreMockRecorder) GetOrganizationByName(ctx, arg any) *gomock.Call {
|
||||||
mr.mock.ctrl.T.Helper()
|
mr.mock.ctrl.T.Helper()
|
||||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetOrganizationByName", reflect.TypeOf((*MockStore)(nil).GetOrganizationByName), ctx, name)
|
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetOrganizationByName", reflect.TypeOf((*MockStore)(nil).GetOrganizationByName), ctx, arg)
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetOrganizationIDsByMemberIDs mocks base method.
|
// GetOrganizationIDsByMemberIDs mocks base method.
|
||||||
@ -2002,18 +2003,18 @@ func (mr *MockStoreMockRecorder) GetOrganizations(ctx, arg any) *gomock.Call {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// GetOrganizationsByUserID mocks base method.
|
// GetOrganizationsByUserID mocks base method.
|
||||||
func (m *MockStore) GetOrganizationsByUserID(ctx context.Context, userID uuid.UUID) ([]database.Organization, error) {
|
func (m *MockStore) GetOrganizationsByUserID(ctx context.Context, arg database.GetOrganizationsByUserIDParams) ([]database.Organization, error) {
|
||||||
m.ctrl.T.Helper()
|
m.ctrl.T.Helper()
|
||||||
ret := m.ctrl.Call(m, "GetOrganizationsByUserID", ctx, userID)
|
ret := m.ctrl.Call(m, "GetOrganizationsByUserID", ctx, arg)
|
||||||
ret0, _ := ret[0].([]database.Organization)
|
ret0, _ := ret[0].([]database.Organization)
|
||||||
ret1, _ := ret[1].(error)
|
ret1, _ := ret[1].(error)
|
||||||
return ret0, ret1
|
return ret0, ret1
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetOrganizationsByUserID indicates an expected call of GetOrganizationsByUserID.
|
// GetOrganizationsByUserID indicates an expected call of GetOrganizationsByUserID.
|
||||||
func (mr *MockStoreMockRecorder) GetOrganizationsByUserID(ctx, userID any) *gomock.Call {
|
func (mr *MockStoreMockRecorder) GetOrganizationsByUserID(ctx, arg any) *gomock.Call {
|
||||||
mr.mock.ctrl.T.Helper()
|
mr.mock.ctrl.T.Helper()
|
||||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetOrganizationsByUserID", reflect.TypeOf((*MockStore)(nil).GetOrganizationsByUserID), ctx, userID)
|
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetOrganizationsByUserID", reflect.TypeOf((*MockStore)(nil).GetOrganizationsByUserID), ctx, arg)
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetParameterSchemasByJobID mocks base method.
|
// GetParameterSchemasByJobID mocks base method.
|
||||||
@ -5129,6 +5130,20 @@ func (mr *MockStoreMockRecorder) UpdateOrganization(ctx, arg any) *gomock.Call {
|
|||||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateOrganization", reflect.TypeOf((*MockStore)(nil).UpdateOrganization), ctx, arg)
|
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateOrganization", reflect.TypeOf((*MockStore)(nil).UpdateOrganization), ctx, arg)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UpdateOrganizationDeletedByID mocks base method.
|
||||||
|
func (m *MockStore) UpdateOrganizationDeletedByID(ctx context.Context, arg database.UpdateOrganizationDeletedByIDParams) error {
|
||||||
|
m.ctrl.T.Helper()
|
||||||
|
ret := m.ctrl.Call(m, "UpdateOrganizationDeletedByID", ctx, arg)
|
||||||
|
ret0, _ := ret[0].(error)
|
||||||
|
return ret0
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateOrganizationDeletedByID indicates an expected call of UpdateOrganizationDeletedByID.
|
||||||
|
func (mr *MockStoreMockRecorder) UpdateOrganizationDeletedByID(ctx, arg any) *gomock.Call {
|
||||||
|
mr.mock.ctrl.T.Helper()
|
||||||
|
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateOrganizationDeletedByID", reflect.TypeOf((*MockStore)(nil).UpdateOrganizationDeletedByID), ctx, arg)
|
||||||
|
}
|
||||||
|
|
||||||
// UpdateProvisionerDaemonLastSeenAt mocks base method.
|
// UpdateProvisionerDaemonLastSeenAt mocks base method.
|
||||||
func (m *MockStore) UpdateProvisionerDaemonLastSeenAt(ctx context.Context, arg database.UpdateProvisionerDaemonLastSeenAtParams) error {
|
func (m *MockStore) UpdateProvisionerDaemonLastSeenAt(ctx context.Context, arg database.UpdateProvisionerDaemonLastSeenAtParams) error {
|
||||||
m.ctrl.T.Helper()
|
m.ctrl.T.Helper()
|
||||||
@ -6026,6 +6041,20 @@ func (mr *MockStoreMockRecorder) UpsertNotificationsSettings(ctx, value any) *go
|
|||||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertNotificationsSettings", reflect.TypeOf((*MockStore)(nil).UpsertNotificationsSettings), ctx, value)
|
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertNotificationsSettings", reflect.TypeOf((*MockStore)(nil).UpsertNotificationsSettings), ctx, value)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UpsertOAuth2GithubDefaultEligible mocks base method.
|
||||||
|
func (m *MockStore) UpsertOAuth2GithubDefaultEligible(ctx context.Context, eligible bool) error {
|
||||||
|
m.ctrl.T.Helper()
|
||||||
|
ret := m.ctrl.Call(m, "UpsertOAuth2GithubDefaultEligible", ctx, eligible)
|
||||||
|
ret0, _ := ret[0].(error)
|
||||||
|
return ret0
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpsertOAuth2GithubDefaultEligible indicates an expected call of UpsertOAuth2GithubDefaultEligible.
|
||||||
|
func (mr *MockStoreMockRecorder) UpsertOAuth2GithubDefaultEligible(ctx, eligible any) *gomock.Call {
|
||||||
|
mr.mock.ctrl.T.Helper()
|
||||||
|
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertOAuth2GithubDefaultEligible", reflect.TypeOf((*MockStore)(nil).UpsertOAuth2GithubDefaultEligible), ctx, eligible)
|
||||||
|
}
|
||||||
|
|
||||||
// UpsertOAuthSigningKey mocks base method.
|
// UpsertOAuthSigningKey mocks base method.
|
||||||
func (m *MockStore) UpsertOAuthSigningKey(ctx context.Context, value string) error {
|
func (m *MockStore) UpsertOAuthSigningKey(ctx context.Context, value string) error {
|
||||||
m.ctrl.T.Helper()
|
m.ctrl.T.Helper()
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
//go:build linux
|
|
||||||
|
|
||||||
package dbtestutil_test
|
package dbtestutil_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -21,6 +19,9 @@ func TestMain(m *testing.M) {
|
|||||||
|
|
||||||
func TestOpen(t *testing.T) {
|
func TestOpen(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
if !dbtestutil.WillUsePostgres() {
|
||||||
|
t.Skip("this test requires postgres")
|
||||||
|
}
|
||||||
|
|
||||||
connect, err := dbtestutil.Open(t)
|
connect, err := dbtestutil.Open(t)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
@ -35,6 +36,9 @@ func TestOpen(t *testing.T) {
|
|||||||
|
|
||||||
func TestOpen_InvalidDBFrom(t *testing.T) {
|
func TestOpen_InvalidDBFrom(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
if !dbtestutil.WillUsePostgres() {
|
||||||
|
t.Skip("this test requires postgres")
|
||||||
|
}
|
||||||
|
|
||||||
_, err := dbtestutil.Open(t, dbtestutil.WithDBFrom("__invalid__"))
|
_, err := dbtestutil.Open(t, dbtestutil.WithDBFrom("__invalid__"))
|
||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
@ -44,6 +48,9 @@ func TestOpen_InvalidDBFrom(t *testing.T) {
|
|||||||
|
|
||||||
func TestOpen_ValidDBFrom(t *testing.T) {
|
func TestOpen_ValidDBFrom(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
if !dbtestutil.WillUsePostgres() {
|
||||||
|
t.Skip("this test requires postgres")
|
||||||
|
}
|
||||||
|
|
||||||
// first check if we can create a new template db
|
// first check if we can create a new template db
|
||||||
dsn, err := dbtestutil.Open(t, dbtestutil.WithDBFrom(""))
|
dsn, err := dbtestutil.Open(t, dbtestutil.WithDBFrom(""))
|
||||||
|
80
coderd/database/dump.sql
generated
80
coderd/database/dump.sql
generated
@ -438,6 +438,74 @@ BEGIN
|
|||||||
END;
|
END;
|
||||||
$$;
|
$$;
|
||||||
|
|
||||||
|
CREATE FUNCTION protect_deleting_organizations() RETURNS trigger
|
||||||
|
LANGUAGE plpgsql
|
||||||
|
AS $$
|
||||||
|
DECLARE
|
||||||
|
workspace_count int;
|
||||||
|
template_count int;
|
||||||
|
group_count int;
|
||||||
|
member_count int;
|
||||||
|
provisioner_keys_count int;
|
||||||
|
BEGIN
|
||||||
|
workspace_count := (
|
||||||
|
SELECT count(*) as count FROM workspaces
|
||||||
|
WHERE
|
||||||
|
workspaces.organization_id = OLD.id
|
||||||
|
AND workspaces.deleted = false
|
||||||
|
);
|
||||||
|
|
||||||
|
template_count := (
|
||||||
|
SELECT count(*) as count FROM templates
|
||||||
|
WHERE
|
||||||
|
templates.organization_id = OLD.id
|
||||||
|
AND templates.deleted = false
|
||||||
|
);
|
||||||
|
|
||||||
|
group_count := (
|
||||||
|
SELECT count(*) as count FROM groups
|
||||||
|
WHERE
|
||||||
|
groups.organization_id = OLD.id
|
||||||
|
);
|
||||||
|
|
||||||
|
member_count := (
|
||||||
|
SELECT count(*) as count FROM organization_members
|
||||||
|
WHERE
|
||||||
|
organization_members.organization_id = OLD.id
|
||||||
|
);
|
||||||
|
|
||||||
|
provisioner_keys_count := (
|
||||||
|
Select count(*) as count FROM provisioner_keys
|
||||||
|
WHERE
|
||||||
|
provisioner_keys.organization_id = OLD.id
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Fail the deletion if one of the following:
|
||||||
|
-- * the organization has 1 or more workspaces
|
||||||
|
-- * the organization has 1 or more templates
|
||||||
|
-- * the organization has 1 or more groups other than "Everyone" group
|
||||||
|
-- * the organization has 1 or more members other than the organization owner
|
||||||
|
-- * the organization has 1 or more provisioner keys
|
||||||
|
|
||||||
|
IF (workspace_count + template_count + provisioner_keys_count) > 0 THEN
|
||||||
|
RAISE EXCEPTION 'cannot delete organization: organization has % workspaces, % templates, and % provisioner keys that must be deleted first', workspace_count, template_count, provisioner_keys_count;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
IF (group_count) > 1 THEN
|
||||||
|
RAISE EXCEPTION 'cannot delete organization: organization has % groups that must be deleted first', group_count - 1;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Allow 1 member to exist, because you cannot remove yourself. You can
|
||||||
|
-- remove everyone else. Ideally, we only omit the member that matches
|
||||||
|
-- the user_id of the caller, however in a trigger, the caller is unknown.
|
||||||
|
IF (member_count) > 1 THEN
|
||||||
|
RAISE EXCEPTION 'cannot delete organization: organization has % members that must be deleted first', member_count - 1;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
RETURN NEW;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
CREATE FUNCTION provisioner_tagset_contains(provisioner_tags tagset, job_tags tagset) RETURNS boolean
|
CREATE FUNCTION provisioner_tagset_contains(provisioner_tags tagset, job_tags tagset) RETURNS boolean
|
||||||
LANGUAGE plpgsql
|
LANGUAGE plpgsql
|
||||||
AS $$
|
AS $$
|
||||||
@ -970,7 +1038,8 @@ CREATE TABLE organizations (
|
|||||||
updated_at timestamp with time zone NOT NULL,
|
updated_at timestamp with time zone NOT NULL,
|
||||||
is_default boolean DEFAULT false NOT NULL,
|
is_default boolean DEFAULT false NOT NULL,
|
||||||
display_name text NOT NULL,
|
display_name text NOT NULL,
|
||||||
icon text DEFAULT ''::text NOT NULL
|
icon text DEFAULT ''::text NOT NULL,
|
||||||
|
deleted boolean DEFAULT false NOT NULL
|
||||||
);
|
);
|
||||||
|
|
||||||
CREATE TABLE parameter_schemas (
|
CREATE TABLE parameter_schemas (
|
||||||
@ -2114,9 +2183,6 @@ ALTER TABLE ONLY oauth2_provider_apps
|
|||||||
ALTER TABLE ONLY organization_members
|
ALTER TABLE ONLY organization_members
|
||||||
ADD CONSTRAINT organization_members_pkey PRIMARY KEY (organization_id, user_id);
|
ADD CONSTRAINT organization_members_pkey PRIMARY KEY (organization_id, user_id);
|
||||||
|
|
||||||
ALTER TABLE ONLY organizations
|
|
||||||
ADD CONSTRAINT organizations_name UNIQUE (name);
|
|
||||||
|
|
||||||
ALTER TABLE ONLY organizations
|
ALTER TABLE ONLY organizations
|
||||||
ADD CONSTRAINT organizations_pkey PRIMARY KEY (id);
|
ADD CONSTRAINT organizations_pkey PRIMARY KEY (id);
|
||||||
|
|
||||||
@ -2308,9 +2374,7 @@ CREATE INDEX idx_organization_member_organization_id_uuid ON organization_member
|
|||||||
|
|
||||||
CREATE INDEX idx_organization_member_user_id_uuid ON organization_members USING btree (user_id);
|
CREATE INDEX idx_organization_member_user_id_uuid ON organization_members USING btree (user_id);
|
||||||
|
|
||||||
CREATE UNIQUE INDEX idx_organization_name ON organizations USING btree (name);
|
CREATE UNIQUE INDEX idx_organization_name_lower ON organizations USING btree (lower(name)) WHERE (deleted = false);
|
||||||
|
|
||||||
CREATE UNIQUE INDEX idx_organization_name_lower ON organizations USING btree (lower(name));
|
|
||||||
|
|
||||||
CREATE UNIQUE INDEX idx_provisioner_daemons_org_name_owner_key ON provisioner_daemons USING btree (organization_id, name, lower(COALESCE((tags ->> 'owner'::text), ''::text)));
|
CREATE UNIQUE INDEX idx_provisioner_daemons_org_name_owner_key ON provisioner_daemons USING btree (organization_id, name, lower(COALESCE((tags ->> 'owner'::text), ''::text)));
|
||||||
|
|
||||||
@ -2528,6 +2592,8 @@ CREATE OR REPLACE VIEW workspace_prebuilds AS
|
|||||||
|
|
||||||
CREATE TRIGGER inhibit_enqueue_if_disabled BEFORE INSERT ON notification_messages FOR EACH ROW EXECUTE FUNCTION inhibit_enqueue_if_disabled();
|
CREATE TRIGGER inhibit_enqueue_if_disabled BEFORE INSERT ON notification_messages FOR EACH ROW EXECUTE FUNCTION inhibit_enqueue_if_disabled();
|
||||||
|
|
||||||
|
CREATE TRIGGER protect_deleting_organizations BEFORE UPDATE ON organizations FOR EACH ROW WHEN (((new.deleted = true) AND (old.deleted = false))) EXECUTE FUNCTION protect_deleting_organizations();
|
||||||
|
|
||||||
CREATE TRIGGER remove_organization_member_custom_role BEFORE DELETE ON custom_roles FOR EACH ROW EXECUTE FUNCTION remove_organization_member_role();
|
CREATE TRIGGER remove_organization_member_custom_role BEFORE DELETE ON custom_roles FOR EACH ROW EXECUTE FUNCTION remove_organization_member_role();
|
||||||
|
|
||||||
COMMENT ON TRIGGER remove_organization_member_custom_role ON custom_roles IS 'When a custom_role is deleted, this trigger removes the role from all organization members.';
|
COMMENT ON TRIGGER remove_organization_member_custom_role ON custom_roles IS 'When a custom_role is deleted, this trigger removes the role from all organization members.';
|
||||||
|
@ -0,0 +1,12 @@
|
|||||||
|
DROP INDEX IF EXISTS idx_organization_name_lower;
|
||||||
|
|
||||||
|
CREATE UNIQUE INDEX IF NOT EXISTS idx_organization_name ON organizations USING btree (name);
|
||||||
|
CREATE UNIQUE INDEX IF NOT EXISTS idx_organization_name_lower ON organizations USING btree (lower(name));
|
||||||
|
|
||||||
|
ALTER TABLE ONLY organizations
|
||||||
|
ADD CONSTRAINT organizations_name UNIQUE (name);
|
||||||
|
|
||||||
|
DROP TRIGGER IF EXISTS protect_deleting_organizations ON organizations;
|
||||||
|
DROP FUNCTION IF EXISTS protect_deleting_organizations;
|
||||||
|
|
||||||
|
ALTER TABLE organizations DROP COLUMN deleted;
|
@ -0,0 +1,85 @@
|
|||||||
|
ALTER TABLE organizations ADD COLUMN deleted boolean DEFAULT FALSE NOT NULL;
|
||||||
|
|
||||||
|
DROP INDEX IF EXISTS idx_organization_name;
|
||||||
|
DROP INDEX IF EXISTS idx_organization_name_lower;
|
||||||
|
|
||||||
|
CREATE UNIQUE INDEX IF NOT EXISTS idx_organization_name_lower ON organizations USING btree (lower(name))
|
||||||
|
where deleted = false;
|
||||||
|
|
||||||
|
ALTER TABLE ONLY organizations
|
||||||
|
DROP CONSTRAINT IF EXISTS organizations_name;
|
||||||
|
|
||||||
|
CREATE FUNCTION protect_deleting_organizations()
|
||||||
|
RETURNS TRIGGER AS
|
||||||
|
$$
|
||||||
|
DECLARE
|
||||||
|
workspace_count int;
|
||||||
|
template_count int;
|
||||||
|
group_count int;
|
||||||
|
member_count int;
|
||||||
|
provisioner_keys_count int;
|
||||||
|
BEGIN
|
||||||
|
workspace_count := (
|
||||||
|
SELECT count(*) as count FROM workspaces
|
||||||
|
WHERE
|
||||||
|
workspaces.organization_id = OLD.id
|
||||||
|
AND workspaces.deleted = false
|
||||||
|
);
|
||||||
|
|
||||||
|
template_count := (
|
||||||
|
SELECT count(*) as count FROM templates
|
||||||
|
WHERE
|
||||||
|
templates.organization_id = OLD.id
|
||||||
|
AND templates.deleted = false
|
||||||
|
);
|
||||||
|
|
||||||
|
group_count := (
|
||||||
|
SELECT count(*) as count FROM groups
|
||||||
|
WHERE
|
||||||
|
groups.organization_id = OLD.id
|
||||||
|
);
|
||||||
|
|
||||||
|
member_count := (
|
||||||
|
SELECT count(*) as count FROM organization_members
|
||||||
|
WHERE
|
||||||
|
organization_members.organization_id = OLD.id
|
||||||
|
);
|
||||||
|
|
||||||
|
provisioner_keys_count := (
|
||||||
|
Select count(*) as count FROM provisioner_keys
|
||||||
|
WHERE
|
||||||
|
provisioner_keys.organization_id = OLD.id
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Fail the deletion if one of the following:
|
||||||
|
-- * the organization has 1 or more workspaces
|
||||||
|
-- * the organization has 1 or more templates
|
||||||
|
-- * the organization has 1 or more groups other than "Everyone" group
|
||||||
|
-- * the organization has 1 or more members other than the organization owner
|
||||||
|
-- * the organization has 1 or more provisioner keys
|
||||||
|
|
||||||
|
IF (workspace_count + template_count + provisioner_keys_count) > 0 THEN
|
||||||
|
RAISE EXCEPTION 'cannot delete organization: organization has % workspaces, % templates, and % provisioner keys that must be deleted first', workspace_count, template_count, provisioner_keys_count;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
IF (group_count) > 1 THEN
|
||||||
|
RAISE EXCEPTION 'cannot delete organization: organization has % groups that must be deleted first', group_count - 1;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Allow 1 member to exist, because you cannot remove yourself. You can
|
||||||
|
-- remove everyone else. Ideally, we only omit the member that matches
|
||||||
|
-- the user_id of the caller, however in a trigger, the caller is unknown.
|
||||||
|
IF (member_count) > 1 THEN
|
||||||
|
RAISE EXCEPTION 'cannot delete organization: organization has % members that must be deleted first', member_count - 1;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
RETURN NEW;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
|
||||||
|
-- Trigger to protect organizations from being soft deleted with existing resources
|
||||||
|
CREATE TRIGGER protect_deleting_organizations
|
||||||
|
BEFORE UPDATE ON organizations
|
||||||
|
FOR EACH ROW
|
||||||
|
WHEN (NEW.deleted = true AND OLD.deleted = false)
|
||||||
|
EXECUTE FUNCTION protect_deleting_organizations();
|
@ -1,5 +1,3 @@
|
|||||||
//go:build linux
|
|
||||||
|
|
||||||
package migrations_test
|
package migrations_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
@ -277,8 +277,10 @@ func (p GetEligibleProvisionerDaemonsByProvisionerJobIDsRow) RBACObject() rbac.O
|
|||||||
return p.ProvisionerDaemon.RBACObject()
|
return p.ProvisionerDaemon.RBACObject()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// RBACObject for a provisioner key is the same as a provisioner daemon.
|
||||||
|
// Keys == provisioners from a RBAC perspective.
|
||||||
func (p ProvisionerKey) RBACObject() rbac.Object {
|
func (p ProvisionerKey) RBACObject() rbac.Object {
|
||||||
return rbac.ResourceProvisionerKeys.
|
return rbac.ResourceProvisionerDaemon.
|
||||||
WithID(p.ID).
|
WithID(p.ID).
|
||||||
InOrg(p.OrganizationID)
|
InOrg(p.OrganizationID)
|
||||||
}
|
}
|
||||||
|
@ -2675,6 +2675,7 @@ type Organization struct {
|
|||||||
IsDefault bool `db:"is_default" json:"is_default"`
|
IsDefault bool `db:"is_default" json:"is_default"`
|
||||||
DisplayName string `db:"display_name" json:"display_name"`
|
DisplayName string `db:"display_name" json:"display_name"`
|
||||||
Icon string `db:"icon" json:"icon"`
|
Icon string `db:"icon" json:"icon"`
|
||||||
|
Deleted bool `db:"deleted" json:"deleted"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type OrganizationMember struct {
|
type OrganizationMember struct {
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
//go:build linux
|
|
||||||
|
|
||||||
package pubsub_test
|
package pubsub_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
@ -96,7 +96,6 @@ type sqlcQuerier interface {
|
|||||||
// Logs can take up a lot of space, so it's important we clean up frequently.
|
// Logs can take up a lot of space, so it's important we clean up frequently.
|
||||||
DeleteOldWorkspaceAgentLogs(ctx context.Context, threshold time.Time) error
|
DeleteOldWorkspaceAgentLogs(ctx context.Context, threshold time.Time) error
|
||||||
DeleteOldWorkspaceAgentStats(ctx context.Context) error
|
DeleteOldWorkspaceAgentStats(ctx context.Context) error
|
||||||
DeleteOrganization(ctx context.Context, id uuid.UUID) error
|
|
||||||
DeleteOrganizationMember(ctx context.Context, arg DeleteOrganizationMemberParams) error
|
DeleteOrganizationMember(ctx context.Context, arg DeleteOrganizationMemberParams) error
|
||||||
DeleteProvisionerKey(ctx context.Context, id uuid.UUID) error
|
DeleteProvisionerKey(ctx context.Context, id uuid.UUID) error
|
||||||
DeleteReplicasUpdatedBefore(ctx context.Context, updatedAt time.Time) error
|
DeleteReplicasUpdatedBefore(ctx context.Context, updatedAt time.Time) error
|
||||||
@ -188,6 +187,7 @@ type sqlcQuerier interface {
|
|||||||
GetNotificationTemplateByID(ctx context.Context, id uuid.UUID) (NotificationTemplate, error)
|
GetNotificationTemplateByID(ctx context.Context, id uuid.UUID) (NotificationTemplate, error)
|
||||||
GetNotificationTemplatesByKind(ctx context.Context, kind NotificationTemplateKind) ([]NotificationTemplate, error)
|
GetNotificationTemplatesByKind(ctx context.Context, kind NotificationTemplateKind) ([]NotificationTemplate, error)
|
||||||
GetNotificationsSettings(ctx context.Context) (string, error)
|
GetNotificationsSettings(ctx context.Context) (string, error)
|
||||||
|
GetOAuth2GithubDefaultEligible(ctx context.Context) (bool, error)
|
||||||
GetOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) (OAuth2ProviderApp, error)
|
GetOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) (OAuth2ProviderApp, error)
|
||||||
GetOAuth2ProviderAppCodeByID(ctx context.Context, id uuid.UUID) (OAuth2ProviderAppCode, error)
|
GetOAuth2ProviderAppCodeByID(ctx context.Context, id uuid.UUID) (OAuth2ProviderAppCode, error)
|
||||||
GetOAuth2ProviderAppCodeByPrefix(ctx context.Context, secretPrefix []byte) (OAuth2ProviderAppCode, error)
|
GetOAuth2ProviderAppCodeByPrefix(ctx context.Context, secretPrefix []byte) (OAuth2ProviderAppCode, error)
|
||||||
@ -199,10 +199,10 @@ type sqlcQuerier interface {
|
|||||||
GetOAuth2ProviderAppsByUserID(ctx context.Context, userID uuid.UUID) ([]GetOAuth2ProviderAppsByUserIDRow, error)
|
GetOAuth2ProviderAppsByUserID(ctx context.Context, userID uuid.UUID) ([]GetOAuth2ProviderAppsByUserIDRow, error)
|
||||||
GetOAuthSigningKey(ctx context.Context) (string, error)
|
GetOAuthSigningKey(ctx context.Context) (string, error)
|
||||||
GetOrganizationByID(ctx context.Context, id uuid.UUID) (Organization, error)
|
GetOrganizationByID(ctx context.Context, id uuid.UUID) (Organization, error)
|
||||||
GetOrganizationByName(ctx context.Context, name string) (Organization, error)
|
GetOrganizationByName(ctx context.Context, arg GetOrganizationByNameParams) (Organization, error)
|
||||||
GetOrganizationIDsByMemberIDs(ctx context.Context, ids []uuid.UUID) ([]GetOrganizationIDsByMemberIDsRow, error)
|
GetOrganizationIDsByMemberIDs(ctx context.Context, ids []uuid.UUID) ([]GetOrganizationIDsByMemberIDsRow, error)
|
||||||
GetOrganizations(ctx context.Context, arg GetOrganizationsParams) ([]Organization, error)
|
GetOrganizations(ctx context.Context, arg GetOrganizationsParams) ([]Organization, error)
|
||||||
GetOrganizationsByUserID(ctx context.Context, userID uuid.UUID) ([]Organization, error)
|
GetOrganizationsByUserID(ctx context.Context, arg GetOrganizationsByUserIDParams) ([]Organization, error)
|
||||||
GetParameterSchemasByJobID(ctx context.Context, jobID uuid.UUID) ([]ParameterSchema, error)
|
GetParameterSchemasByJobID(ctx context.Context, jobID uuid.UUID) ([]ParameterSchema, error)
|
||||||
GetPrebuildMetrics(ctx context.Context) ([]GetPrebuildMetricsRow, error)
|
GetPrebuildMetrics(ctx context.Context) ([]GetPrebuildMetricsRow, error)
|
||||||
GetPrebuildsInProgress(ctx context.Context) ([]GetPrebuildsInProgressRow, error)
|
GetPrebuildsInProgress(ctx context.Context) ([]GetPrebuildsInProgressRow, error)
|
||||||
@ -492,6 +492,7 @@ type sqlcQuerier interface {
|
|||||||
UpdateOAuth2ProviderAppByID(ctx context.Context, arg UpdateOAuth2ProviderAppByIDParams) (OAuth2ProviderApp, error)
|
UpdateOAuth2ProviderAppByID(ctx context.Context, arg UpdateOAuth2ProviderAppByIDParams) (OAuth2ProviderApp, error)
|
||||||
UpdateOAuth2ProviderAppSecretByID(ctx context.Context, arg UpdateOAuth2ProviderAppSecretByIDParams) (OAuth2ProviderAppSecret, error)
|
UpdateOAuth2ProviderAppSecretByID(ctx context.Context, arg UpdateOAuth2ProviderAppSecretByIDParams) (OAuth2ProviderAppSecret, error)
|
||||||
UpdateOrganization(ctx context.Context, arg UpdateOrganizationParams) (Organization, error)
|
UpdateOrganization(ctx context.Context, arg UpdateOrganizationParams) (Organization, error)
|
||||||
|
UpdateOrganizationDeletedByID(ctx context.Context, arg UpdateOrganizationDeletedByIDParams) error
|
||||||
UpdateProvisionerDaemonLastSeenAt(ctx context.Context, arg UpdateProvisionerDaemonLastSeenAtParams) error
|
UpdateProvisionerDaemonLastSeenAt(ctx context.Context, arg UpdateProvisionerDaemonLastSeenAtParams) error
|
||||||
UpdateProvisionerJobByID(ctx context.Context, arg UpdateProvisionerJobByIDParams) error
|
UpdateProvisionerJobByID(ctx context.Context, arg UpdateProvisionerJobByIDParams) error
|
||||||
UpdateProvisionerJobWithCancelByID(ctx context.Context, arg UpdateProvisionerJobWithCancelByIDParams) error
|
UpdateProvisionerJobWithCancelByID(ctx context.Context, arg UpdateProvisionerJobWithCancelByIDParams) error
|
||||||
@ -560,6 +561,7 @@ type sqlcQuerier interface {
|
|||||||
// Insert or update notification report generator logs with recent activity.
|
// Insert or update notification report generator logs with recent activity.
|
||||||
UpsertNotificationReportGeneratorLog(ctx context.Context, arg UpsertNotificationReportGeneratorLogParams) error
|
UpsertNotificationReportGeneratorLog(ctx context.Context, arg UpsertNotificationReportGeneratorLogParams) error
|
||||||
UpsertNotificationsSettings(ctx context.Context, value string) error
|
UpsertNotificationsSettings(ctx context.Context, value string) error
|
||||||
|
UpsertOAuth2GithubDefaultEligible(ctx context.Context, eligible bool) error
|
||||||
UpsertOAuthSigningKey(ctx context.Context, value string) error
|
UpsertOAuthSigningKey(ctx context.Context, value string) error
|
||||||
UpsertProvisionerDaemon(ctx context.Context, arg UpsertProvisionerDaemonParams) (ProvisionerDaemon, error)
|
UpsertProvisionerDaemon(ctx context.Context, arg UpsertProvisionerDaemonParams) (ProvisionerDaemon, error)
|
||||||
UpsertRuntimeConfig(ctx context.Context, arg UpsertRuntimeConfigParams) error
|
UpsertRuntimeConfig(ctx context.Context, arg UpsertRuntimeConfigParams) error
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
//go:build linux
|
|
||||||
|
|
||||||
package database_test
|
package database_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -21,6 +19,7 @@ import (
|
|||||||
"github.com/coder/coder/v2/coderd/database"
|
"github.com/coder/coder/v2/coderd/database"
|
||||||
"github.com/coder/coder/v2/coderd/database/db2sdk"
|
"github.com/coder/coder/v2/coderd/database/db2sdk"
|
||||||
"github.com/coder/coder/v2/coderd/database/dbauthz"
|
"github.com/coder/coder/v2/coderd/database/dbauthz"
|
||||||
|
"github.com/coder/coder/v2/coderd/database/dbfake"
|
||||||
"github.com/coder/coder/v2/coderd/database/dbgen"
|
"github.com/coder/coder/v2/coderd/database/dbgen"
|
||||||
"github.com/coder/coder/v2/coderd/database/dbtestutil"
|
"github.com/coder/coder/v2/coderd/database/dbtestutil"
|
||||||
"github.com/coder/coder/v2/coderd/database/dbtime"
|
"github.com/coder/coder/v2/coderd/database/dbtime"
|
||||||
@ -2916,6 +2915,136 @@ func TestGetUserStatusCounts(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestOrganizationDeleteTrigger(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
if !dbtestutil.WillUsePostgres() {
|
||||||
|
t.SkipNow()
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Run("WorkspaceExists", func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
db, _ := dbtestutil.NewDB(t)
|
||||||
|
|
||||||
|
orgA := dbfake.Organization(t, db).Do()
|
||||||
|
|
||||||
|
user := dbgen.User(t, db, database.User{})
|
||||||
|
|
||||||
|
dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{
|
||||||
|
OrganizationID: orgA.Org.ID,
|
||||||
|
OwnerID: user.ID,
|
||||||
|
}).Do()
|
||||||
|
|
||||||
|
ctx := testutil.Context(t, testutil.WaitShort)
|
||||||
|
err := db.UpdateOrganizationDeletedByID(ctx, database.UpdateOrganizationDeletedByIDParams{
|
||||||
|
UpdatedAt: dbtime.Now(),
|
||||||
|
ID: orgA.Org.ID,
|
||||||
|
})
|
||||||
|
require.Error(t, err)
|
||||||
|
// cannot delete organization: organization has 1 workspaces and 1 templates that must be deleted first
|
||||||
|
require.ErrorContains(t, err, "cannot delete organization")
|
||||||
|
require.ErrorContains(t, err, "has 1 workspaces")
|
||||||
|
require.ErrorContains(t, err, "1 templates")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("TemplateExists", func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
db, _ := dbtestutil.NewDB(t)
|
||||||
|
|
||||||
|
orgA := dbfake.Organization(t, db).Do()
|
||||||
|
|
||||||
|
user := dbgen.User(t, db, database.User{})
|
||||||
|
|
||||||
|
dbgen.Template(t, db, database.Template{
|
||||||
|
OrganizationID: orgA.Org.ID,
|
||||||
|
CreatedBy: user.ID,
|
||||||
|
})
|
||||||
|
|
||||||
|
ctx := testutil.Context(t, testutil.WaitShort)
|
||||||
|
err := db.UpdateOrganizationDeletedByID(ctx, database.UpdateOrganizationDeletedByIDParams{
|
||||||
|
UpdatedAt: dbtime.Now(),
|
||||||
|
ID: orgA.Org.ID,
|
||||||
|
})
|
||||||
|
require.Error(t, err)
|
||||||
|
// cannot delete organization: organization has 0 workspaces and 1 templates that must be deleted first
|
||||||
|
require.ErrorContains(t, err, "cannot delete organization")
|
||||||
|
require.ErrorContains(t, err, "has 0 workspaces")
|
||||||
|
require.ErrorContains(t, err, "1 templates")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("ProvisionerKeyExists", func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
db, _ := dbtestutil.NewDB(t)
|
||||||
|
|
||||||
|
orgA := dbfake.Organization(t, db).Do()
|
||||||
|
|
||||||
|
dbgen.ProvisionerKey(t, db, database.ProvisionerKey{
|
||||||
|
OrganizationID: orgA.Org.ID,
|
||||||
|
})
|
||||||
|
|
||||||
|
ctx := testutil.Context(t, testutil.WaitShort)
|
||||||
|
err := db.UpdateOrganizationDeletedByID(ctx, database.UpdateOrganizationDeletedByIDParams{
|
||||||
|
UpdatedAt: dbtime.Now(),
|
||||||
|
ID: orgA.Org.ID,
|
||||||
|
})
|
||||||
|
require.Error(t, err)
|
||||||
|
// cannot delete organization: organization has 1 provisioner keys that must be deleted first
|
||||||
|
require.ErrorContains(t, err, "cannot delete organization")
|
||||||
|
require.ErrorContains(t, err, "1 provisioner keys")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("GroupExists", func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
db, _ := dbtestutil.NewDB(t)
|
||||||
|
|
||||||
|
orgA := dbfake.Organization(t, db).Do()
|
||||||
|
|
||||||
|
dbgen.Group(t, db, database.Group{
|
||||||
|
OrganizationID: orgA.Org.ID,
|
||||||
|
})
|
||||||
|
|
||||||
|
ctx := testutil.Context(t, testutil.WaitShort)
|
||||||
|
err := db.UpdateOrganizationDeletedByID(ctx, database.UpdateOrganizationDeletedByIDParams{
|
||||||
|
UpdatedAt: dbtime.Now(),
|
||||||
|
ID: orgA.Org.ID,
|
||||||
|
})
|
||||||
|
require.Error(t, err)
|
||||||
|
// cannot delete organization: organization has 1 groups that must be deleted first
|
||||||
|
require.ErrorContains(t, err, "cannot delete organization")
|
||||||
|
require.ErrorContains(t, err, "has 1 groups")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("MemberExists", func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
db, _ := dbtestutil.NewDB(t)
|
||||||
|
|
||||||
|
orgA := dbfake.Organization(t, db).Do()
|
||||||
|
|
||||||
|
userA := dbgen.User(t, db, database.User{})
|
||||||
|
userB := dbgen.User(t, db, database.User{})
|
||||||
|
|
||||||
|
dbgen.OrganizationMember(t, db, database.OrganizationMember{
|
||||||
|
OrganizationID: orgA.Org.ID,
|
||||||
|
UserID: userA.ID,
|
||||||
|
})
|
||||||
|
|
||||||
|
dbgen.OrganizationMember(t, db, database.OrganizationMember{
|
||||||
|
OrganizationID: orgA.Org.ID,
|
||||||
|
UserID: userB.ID,
|
||||||
|
})
|
||||||
|
|
||||||
|
ctx := testutil.Context(t, testutil.WaitShort)
|
||||||
|
err := db.UpdateOrganizationDeletedByID(ctx, database.UpdateOrganizationDeletedByIDParams{
|
||||||
|
UpdatedAt: dbtime.Now(),
|
||||||
|
ID: orgA.Org.ID,
|
||||||
|
})
|
||||||
|
require.Error(t, err)
|
||||||
|
// cannot delete organization: organization has 1 members that must be deleted first
|
||||||
|
require.ErrorContains(t, err, "cannot delete organization")
|
||||||
|
require.ErrorContains(t, err, "has 1 members")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func requireUsersMatch(t testing.TB, expected []database.User, found []database.GetUsersRow, msg string) {
|
func requireUsersMatch(t testing.TB, expected []database.User, found []database.GetUsersRow, msg string) {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
require.ElementsMatch(t, expected, database.ConvertUserRows(found), msg)
|
require.ElementsMatch(t, expected, database.ConvertUserRows(found), msg)
|
||||||
|
@ -5066,22 +5066,9 @@ func (q *sqlQuerier) UpdateMemberRoles(ctx context.Context, arg UpdateMemberRole
|
|||||||
return i, err
|
return i, err
|
||||||
}
|
}
|
||||||
|
|
||||||
const deleteOrganization = `-- name: DeleteOrganization :exec
|
|
||||||
DELETE FROM
|
|
||||||
organizations
|
|
||||||
WHERE
|
|
||||||
id = $1 AND
|
|
||||||
is_default = false
|
|
||||||
`
|
|
||||||
|
|
||||||
func (q *sqlQuerier) DeleteOrganization(ctx context.Context, id uuid.UUID) error {
|
|
||||||
_, err := q.db.ExecContext(ctx, deleteOrganization, id)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
const getDefaultOrganization = `-- name: GetDefaultOrganization :one
|
const getDefaultOrganization = `-- name: GetDefaultOrganization :one
|
||||||
SELECT
|
SELECT
|
||||||
id, name, description, created_at, updated_at, is_default, display_name, icon
|
id, name, description, created_at, updated_at, is_default, display_name, icon, deleted
|
||||||
FROM
|
FROM
|
||||||
organizations
|
organizations
|
||||||
WHERE
|
WHERE
|
||||||
@ -5102,13 +5089,14 @@ func (q *sqlQuerier) GetDefaultOrganization(ctx context.Context) (Organization,
|
|||||||
&i.IsDefault,
|
&i.IsDefault,
|
||||||
&i.DisplayName,
|
&i.DisplayName,
|
||||||
&i.Icon,
|
&i.Icon,
|
||||||
|
&i.Deleted,
|
||||||
)
|
)
|
||||||
return i, err
|
return i, err
|
||||||
}
|
}
|
||||||
|
|
||||||
const getOrganizationByID = `-- name: GetOrganizationByID :one
|
const getOrganizationByID = `-- name: GetOrganizationByID :one
|
||||||
SELECT
|
SELECT
|
||||||
id, name, description, created_at, updated_at, is_default, display_name, icon
|
id, name, description, created_at, updated_at, is_default, display_name, icon, deleted
|
||||||
FROM
|
FROM
|
||||||
organizations
|
organizations
|
||||||
WHERE
|
WHERE
|
||||||
@ -5127,23 +5115,31 @@ func (q *sqlQuerier) GetOrganizationByID(ctx context.Context, id uuid.UUID) (Org
|
|||||||
&i.IsDefault,
|
&i.IsDefault,
|
||||||
&i.DisplayName,
|
&i.DisplayName,
|
||||||
&i.Icon,
|
&i.Icon,
|
||||||
|
&i.Deleted,
|
||||||
)
|
)
|
||||||
return i, err
|
return i, err
|
||||||
}
|
}
|
||||||
|
|
||||||
const getOrganizationByName = `-- name: GetOrganizationByName :one
|
const getOrganizationByName = `-- name: GetOrganizationByName :one
|
||||||
SELECT
|
SELECT
|
||||||
id, name, description, created_at, updated_at, is_default, display_name, icon
|
id, name, description, created_at, updated_at, is_default, display_name, icon, deleted
|
||||||
FROM
|
FROM
|
||||||
organizations
|
organizations
|
||||||
WHERE
|
WHERE
|
||||||
LOWER("name") = LOWER($1)
|
-- Optionally include deleted organizations
|
||||||
|
deleted = $1 AND
|
||||||
|
LOWER("name") = LOWER($2)
|
||||||
LIMIT
|
LIMIT
|
||||||
1
|
1
|
||||||
`
|
`
|
||||||
|
|
||||||
func (q *sqlQuerier) GetOrganizationByName(ctx context.Context, name string) (Organization, error) {
|
type GetOrganizationByNameParams struct {
|
||||||
row := q.db.QueryRowContext(ctx, getOrganizationByName, name)
|
Deleted bool `db:"deleted" json:"deleted"`
|
||||||
|
Name string `db:"name" json:"name"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *sqlQuerier) GetOrganizationByName(ctx context.Context, arg GetOrganizationByNameParams) (Organization, error) {
|
||||||
|
row := q.db.QueryRowContext(ctx, getOrganizationByName, arg.Deleted, arg.Name)
|
||||||
var i Organization
|
var i Organization
|
||||||
err := row.Scan(
|
err := row.Scan(
|
||||||
&i.ID,
|
&i.ID,
|
||||||
@ -5154,37 +5150,40 @@ func (q *sqlQuerier) GetOrganizationByName(ctx context.Context, name string) (Or
|
|||||||
&i.IsDefault,
|
&i.IsDefault,
|
||||||
&i.DisplayName,
|
&i.DisplayName,
|
||||||
&i.Icon,
|
&i.Icon,
|
||||||
|
&i.Deleted,
|
||||||
)
|
)
|
||||||
return i, err
|
return i, err
|
||||||
}
|
}
|
||||||
|
|
||||||
const getOrganizations = `-- name: GetOrganizations :many
|
const getOrganizations = `-- name: GetOrganizations :many
|
||||||
SELECT
|
SELECT
|
||||||
id, name, description, created_at, updated_at, is_default, display_name, icon
|
id, name, description, created_at, updated_at, is_default, display_name, icon, deleted
|
||||||
FROM
|
FROM
|
||||||
organizations
|
organizations
|
||||||
WHERE
|
WHERE
|
||||||
true
|
-- Optionally include deleted organizations
|
||||||
|
deleted = $1
|
||||||
-- Filter by ids
|
-- Filter by ids
|
||||||
AND CASE
|
AND CASE
|
||||||
WHEN array_length($1 :: uuid[], 1) > 0 THEN
|
WHEN array_length($2 :: uuid[], 1) > 0 THEN
|
||||||
id = ANY($1)
|
id = ANY($2)
|
||||||
ELSE true
|
ELSE true
|
||||||
END
|
END
|
||||||
AND CASE
|
AND CASE
|
||||||
WHEN $2::text != '' THEN
|
WHEN $3::text != '' THEN
|
||||||
LOWER("name") = LOWER($2)
|
LOWER("name") = LOWER($3)
|
||||||
ELSE true
|
ELSE true
|
||||||
END
|
END
|
||||||
`
|
`
|
||||||
|
|
||||||
type GetOrganizationsParams struct {
|
type GetOrganizationsParams struct {
|
||||||
|
Deleted bool `db:"deleted" json:"deleted"`
|
||||||
IDs []uuid.UUID `db:"ids" json:"ids"`
|
IDs []uuid.UUID `db:"ids" json:"ids"`
|
||||||
Name string `db:"name" json:"name"`
|
Name string `db:"name" json:"name"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *sqlQuerier) GetOrganizations(ctx context.Context, arg GetOrganizationsParams) ([]Organization, error) {
|
func (q *sqlQuerier) GetOrganizations(ctx context.Context, arg GetOrganizationsParams) ([]Organization, error) {
|
||||||
rows, err := q.db.QueryContext(ctx, getOrganizations, pq.Array(arg.IDs), arg.Name)
|
rows, err := q.db.QueryContext(ctx, getOrganizations, arg.Deleted, pq.Array(arg.IDs), arg.Name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -5201,6 +5200,7 @@ func (q *sqlQuerier) GetOrganizations(ctx context.Context, arg GetOrganizationsP
|
|||||||
&i.IsDefault,
|
&i.IsDefault,
|
||||||
&i.DisplayName,
|
&i.DisplayName,
|
||||||
&i.Icon,
|
&i.Icon,
|
||||||
|
&i.Deleted,
|
||||||
); err != nil {
|
); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -5217,10 +5217,12 @@ func (q *sqlQuerier) GetOrganizations(ctx context.Context, arg GetOrganizationsP
|
|||||||
|
|
||||||
const getOrganizationsByUserID = `-- name: GetOrganizationsByUserID :many
|
const getOrganizationsByUserID = `-- name: GetOrganizationsByUserID :many
|
||||||
SELECT
|
SELECT
|
||||||
id, name, description, created_at, updated_at, is_default, display_name, icon
|
id, name, description, created_at, updated_at, is_default, display_name, icon, deleted
|
||||||
FROM
|
FROM
|
||||||
organizations
|
organizations
|
||||||
WHERE
|
WHERE
|
||||||
|
-- Optionally include deleted organizations
|
||||||
|
deleted = $2 AND
|
||||||
id = ANY(
|
id = ANY(
|
||||||
SELECT
|
SELECT
|
||||||
organization_id
|
organization_id
|
||||||
@ -5231,8 +5233,13 @@ WHERE
|
|||||||
)
|
)
|
||||||
`
|
`
|
||||||
|
|
||||||
func (q *sqlQuerier) GetOrganizationsByUserID(ctx context.Context, userID uuid.UUID) ([]Organization, error) {
|
type GetOrganizationsByUserIDParams struct {
|
||||||
rows, err := q.db.QueryContext(ctx, getOrganizationsByUserID, userID)
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
||||||
|
Deleted bool `db:"deleted" json:"deleted"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *sqlQuerier) GetOrganizationsByUserID(ctx context.Context, arg GetOrganizationsByUserIDParams) ([]Organization, error) {
|
||||||
|
rows, err := q.db.QueryContext(ctx, getOrganizationsByUserID, arg.UserID, arg.Deleted)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -5249,6 +5256,7 @@ func (q *sqlQuerier) GetOrganizationsByUserID(ctx context.Context, userID uuid.U
|
|||||||
&i.IsDefault,
|
&i.IsDefault,
|
||||||
&i.DisplayName,
|
&i.DisplayName,
|
||||||
&i.Icon,
|
&i.Icon,
|
||||||
|
&i.Deleted,
|
||||||
); err != nil {
|
); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -5268,7 +5276,7 @@ INSERT INTO
|
|||||||
organizations (id, "name", display_name, description, icon, created_at, updated_at, is_default)
|
organizations (id, "name", display_name, description, icon, created_at, updated_at, is_default)
|
||||||
VALUES
|
VALUES
|
||||||
-- If no organizations exist, and this is the first, make it the default.
|
-- If no organizations exist, and this is the first, make it the default.
|
||||||
($1, $2, $3, $4, $5, $6, $7, (SELECT TRUE FROM organizations LIMIT 1) IS NULL) RETURNING id, name, description, created_at, updated_at, is_default, display_name, icon
|
($1, $2, $3, $4, $5, $6, $7, (SELECT TRUE FROM organizations LIMIT 1) IS NULL) RETURNING id, name, description, created_at, updated_at, is_default, display_name, icon, deleted
|
||||||
`
|
`
|
||||||
|
|
||||||
type InsertOrganizationParams struct {
|
type InsertOrganizationParams struct {
|
||||||
@ -5301,6 +5309,7 @@ func (q *sqlQuerier) InsertOrganization(ctx context.Context, arg InsertOrganizat
|
|||||||
&i.IsDefault,
|
&i.IsDefault,
|
||||||
&i.DisplayName,
|
&i.DisplayName,
|
||||||
&i.Icon,
|
&i.Icon,
|
||||||
|
&i.Deleted,
|
||||||
)
|
)
|
||||||
return i, err
|
return i, err
|
||||||
}
|
}
|
||||||
@ -5316,7 +5325,7 @@ SET
|
|||||||
icon = $5
|
icon = $5
|
||||||
WHERE
|
WHERE
|
||||||
id = $6
|
id = $6
|
||||||
RETURNING id, name, description, created_at, updated_at, is_default, display_name, icon
|
RETURNING id, name, description, created_at, updated_at, is_default, display_name, icon, deleted
|
||||||
`
|
`
|
||||||
|
|
||||||
type UpdateOrganizationParams struct {
|
type UpdateOrganizationParams struct {
|
||||||
@ -5347,10 +5356,31 @@ func (q *sqlQuerier) UpdateOrganization(ctx context.Context, arg UpdateOrganizat
|
|||||||
&i.IsDefault,
|
&i.IsDefault,
|
||||||
&i.DisplayName,
|
&i.DisplayName,
|
||||||
&i.Icon,
|
&i.Icon,
|
||||||
|
&i.Deleted,
|
||||||
)
|
)
|
||||||
return i, err
|
return i, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const updateOrganizationDeletedByID = `-- name: UpdateOrganizationDeletedByID :exec
|
||||||
|
UPDATE organizations
|
||||||
|
SET
|
||||||
|
deleted = true,
|
||||||
|
updated_at = $1
|
||||||
|
WHERE
|
||||||
|
id = $2 AND
|
||||||
|
is_default = false
|
||||||
|
`
|
||||||
|
|
||||||
|
type UpdateOrganizationDeletedByIDParams struct {
|
||||||
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
||||||
|
ID uuid.UUID `db:"id" json:"id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *sqlQuerier) UpdateOrganizationDeletedByID(ctx context.Context, arg UpdateOrganizationDeletedByIDParams) error {
|
||||||
|
_, err := q.db.ExecContext(ctx, updateOrganizationDeletedByID, arg.UpdatedAt, arg.ID)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
const getParameterSchemasByJobID = `-- name: GetParameterSchemasByJobID :many
|
const getParameterSchemasByJobID = `-- name: GetParameterSchemasByJobID :many
|
||||||
SELECT
|
SELECT
|
||||||
id, created_at, job_id, name, description, default_source_scheme, default_source_value, allow_override_source, default_destination_scheme, allow_override_destination, default_refresh, redisplay_value, validation_error, validation_condition, validation_type_system, validation_value_type, index
|
id, created_at, job_id, name, description, default_source_scheme, default_source_value, allow_override_source, default_destination_scheme, allow_override_destination, default_refresh, redisplay_value, validation_error, validation_condition, validation_type_system, validation_value_type, index
|
||||||
@ -8395,6 +8425,23 @@ func (q *sqlQuerier) GetNotificationsSettings(ctx context.Context) (string, erro
|
|||||||
return notifications_settings, err
|
return notifications_settings, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const getOAuth2GithubDefaultEligible = `-- name: GetOAuth2GithubDefaultEligible :one
|
||||||
|
SELECT
|
||||||
|
CASE
|
||||||
|
WHEN value = 'true' THEN TRUE
|
||||||
|
ELSE FALSE
|
||||||
|
END
|
||||||
|
FROM site_configs
|
||||||
|
WHERE key = 'oauth2_github_default_eligible'
|
||||||
|
`
|
||||||
|
|
||||||
|
func (q *sqlQuerier) GetOAuth2GithubDefaultEligible(ctx context.Context) (bool, error) {
|
||||||
|
row := q.db.QueryRowContext(ctx, getOAuth2GithubDefaultEligible)
|
||||||
|
var column_1 bool
|
||||||
|
err := row.Scan(&column_1)
|
||||||
|
return column_1, err
|
||||||
|
}
|
||||||
|
|
||||||
const getOAuthSigningKey = `-- name: GetOAuthSigningKey :one
|
const getOAuthSigningKey = `-- name: GetOAuthSigningKey :one
|
||||||
SELECT value FROM site_configs WHERE key = 'oauth_signing_key'
|
SELECT value FROM site_configs WHERE key = 'oauth_signing_key'
|
||||||
`
|
`
|
||||||
@ -8538,6 +8585,28 @@ func (q *sqlQuerier) UpsertNotificationsSettings(ctx context.Context, value stri
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const upsertOAuth2GithubDefaultEligible = `-- name: UpsertOAuth2GithubDefaultEligible :exec
|
||||||
|
INSERT INTO site_configs (key, value)
|
||||||
|
VALUES (
|
||||||
|
'oauth2_github_default_eligible',
|
||||||
|
CASE
|
||||||
|
WHEN $1::bool THEN 'true'
|
||||||
|
ELSE 'false'
|
||||||
|
END
|
||||||
|
)
|
||||||
|
ON CONFLICT (key) DO UPDATE
|
||||||
|
SET value = CASE
|
||||||
|
WHEN $1::bool THEN 'true'
|
||||||
|
ELSE 'false'
|
||||||
|
END
|
||||||
|
WHERE site_configs.key = 'oauth2_github_default_eligible'
|
||||||
|
`
|
||||||
|
|
||||||
|
func (q *sqlQuerier) UpsertOAuth2GithubDefaultEligible(ctx context.Context, eligible bool) error {
|
||||||
|
_, err := q.db.ExecContext(ctx, upsertOAuth2GithubDefaultEligible, eligible)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
const upsertOAuthSigningKey = `-- name: UpsertOAuthSigningKey :exec
|
const upsertOAuthSigningKey = `-- name: UpsertOAuthSigningKey :exec
|
||||||
INSERT INTO site_configs (key, value) VALUES ('oauth_signing_key', $1)
|
INSERT INTO site_configs (key, value) VALUES ('oauth_signing_key', $1)
|
||||||
ON CONFLICT (key) DO UPDATE set value = $1 WHERE site_configs.key = 'oauth_signing_key'
|
ON CONFLICT (key) DO UPDATE set value = $1 WHERE site_configs.key = 'oauth_signing_key'
|
||||||
|
@ -14,7 +14,8 @@ SELECT
|
|||||||
FROM
|
FROM
|
||||||
organizations
|
organizations
|
||||||
WHERE
|
WHERE
|
||||||
true
|
-- Optionally include deleted organizations
|
||||||
|
deleted = @deleted
|
||||||
-- Filter by ids
|
-- Filter by ids
|
||||||
AND CASE
|
AND CASE
|
||||||
WHEN array_length(@ids :: uuid[], 1) > 0 THEN
|
WHEN array_length(@ids :: uuid[], 1) > 0 THEN
|
||||||
@ -42,6 +43,8 @@ SELECT
|
|||||||
FROM
|
FROM
|
||||||
organizations
|
organizations
|
||||||
WHERE
|
WHERE
|
||||||
|
-- Optionally include deleted organizations
|
||||||
|
deleted = @deleted AND
|
||||||
LOWER("name") = LOWER(@name)
|
LOWER("name") = LOWER(@name)
|
||||||
LIMIT
|
LIMIT
|
||||||
1;
|
1;
|
||||||
@ -52,6 +55,8 @@ SELECT
|
|||||||
FROM
|
FROM
|
||||||
organizations
|
organizations
|
||||||
WHERE
|
WHERE
|
||||||
|
-- Optionally include deleted organizations
|
||||||
|
deleted = @deleted AND
|
||||||
id = ANY(
|
id = ANY(
|
||||||
SELECT
|
SELECT
|
||||||
organization_id
|
organization_id
|
||||||
@ -81,9 +86,12 @@ WHERE
|
|||||||
id = @id
|
id = @id
|
||||||
RETURNING *;
|
RETURNING *;
|
||||||
|
|
||||||
-- name: DeleteOrganization :exec
|
-- name: UpdateOrganizationDeletedByID :exec
|
||||||
DELETE FROM
|
UPDATE organizations
|
||||||
organizations
|
SET
|
||||||
|
deleted = true,
|
||||||
|
updated_at = @updated_at
|
||||||
WHERE
|
WHERE
|
||||||
id = $1 AND
|
id = @id AND
|
||||||
is_default = false;
|
is_default = false;
|
||||||
|
|
||||||
|
@ -107,3 +107,27 @@ ON CONFLICT (key) DO UPDATE SET value = $2 WHERE site_configs.key = $1;
|
|||||||
DELETE FROM site_configs
|
DELETE FROM site_configs
|
||||||
WHERE site_configs.key = $1;
|
WHERE site_configs.key = $1;
|
||||||
|
|
||||||
|
-- name: GetOAuth2GithubDefaultEligible :one
|
||||||
|
SELECT
|
||||||
|
CASE
|
||||||
|
WHEN value = 'true' THEN TRUE
|
||||||
|
ELSE FALSE
|
||||||
|
END
|
||||||
|
FROM site_configs
|
||||||
|
WHERE key = 'oauth2_github_default_eligible';
|
||||||
|
|
||||||
|
-- name: UpsertOAuth2GithubDefaultEligible :exec
|
||||||
|
INSERT INTO site_configs (key, value)
|
||||||
|
VALUES (
|
||||||
|
'oauth2_github_default_eligible',
|
||||||
|
CASE
|
||||||
|
WHEN sqlc.arg(eligible)::bool THEN 'true'
|
||||||
|
ELSE 'false'
|
||||||
|
END
|
||||||
|
)
|
||||||
|
ON CONFLICT (key) DO UPDATE
|
||||||
|
SET value = CASE
|
||||||
|
WHEN sqlc.arg(eligible)::bool THEN 'true'
|
||||||
|
ELSE 'false'
|
||||||
|
END
|
||||||
|
WHERE site_configs.key = 'oauth2_github_default_eligible';
|
||||||
|
@ -38,7 +38,6 @@ const (
|
|||||||
UniqueOauth2ProviderAppsNameKey UniqueConstraint = "oauth2_provider_apps_name_key" // ALTER TABLE ONLY oauth2_provider_apps ADD CONSTRAINT oauth2_provider_apps_name_key UNIQUE (name);
|
UniqueOauth2ProviderAppsNameKey UniqueConstraint = "oauth2_provider_apps_name_key" // ALTER TABLE ONLY oauth2_provider_apps ADD CONSTRAINT oauth2_provider_apps_name_key UNIQUE (name);
|
||||||
UniqueOauth2ProviderAppsPkey UniqueConstraint = "oauth2_provider_apps_pkey" // ALTER TABLE ONLY oauth2_provider_apps ADD CONSTRAINT oauth2_provider_apps_pkey PRIMARY KEY (id);
|
UniqueOauth2ProviderAppsPkey UniqueConstraint = "oauth2_provider_apps_pkey" // ALTER TABLE ONLY oauth2_provider_apps ADD CONSTRAINT oauth2_provider_apps_pkey PRIMARY KEY (id);
|
||||||
UniqueOrganizationMembersPkey UniqueConstraint = "organization_members_pkey" // ALTER TABLE ONLY organization_members ADD CONSTRAINT organization_members_pkey PRIMARY KEY (organization_id, user_id);
|
UniqueOrganizationMembersPkey UniqueConstraint = "organization_members_pkey" // ALTER TABLE ONLY organization_members ADD CONSTRAINT organization_members_pkey PRIMARY KEY (organization_id, user_id);
|
||||||
UniqueOrganizationsName UniqueConstraint = "organizations_name" // ALTER TABLE ONLY organizations ADD CONSTRAINT organizations_name UNIQUE (name);
|
|
||||||
UniqueOrganizationsPkey UniqueConstraint = "organizations_pkey" // ALTER TABLE ONLY organizations ADD CONSTRAINT organizations_pkey PRIMARY KEY (id);
|
UniqueOrganizationsPkey UniqueConstraint = "organizations_pkey" // ALTER TABLE ONLY organizations ADD CONSTRAINT organizations_pkey PRIMARY KEY (id);
|
||||||
UniqueParameterSchemasJobIDNameKey UniqueConstraint = "parameter_schemas_job_id_name_key" // ALTER TABLE ONLY parameter_schemas ADD CONSTRAINT parameter_schemas_job_id_name_key UNIQUE (job_id, name);
|
UniqueParameterSchemasJobIDNameKey UniqueConstraint = "parameter_schemas_job_id_name_key" // ALTER TABLE ONLY parameter_schemas ADD CONSTRAINT parameter_schemas_job_id_name_key UNIQUE (job_id, name);
|
||||||
UniqueParameterSchemasPkey UniqueConstraint = "parameter_schemas_pkey" // ALTER TABLE ONLY parameter_schemas ADD CONSTRAINT parameter_schemas_pkey PRIMARY KEY (id);
|
UniqueParameterSchemasPkey UniqueConstraint = "parameter_schemas_pkey" // ALTER TABLE ONLY parameter_schemas ADD CONSTRAINT parameter_schemas_pkey PRIMARY KEY (id);
|
||||||
@ -96,8 +95,7 @@ const (
|
|||||||
UniqueWorkspacesPkey UniqueConstraint = "workspaces_pkey" // ALTER TABLE ONLY workspaces ADD CONSTRAINT workspaces_pkey PRIMARY KEY (id);
|
UniqueWorkspacesPkey UniqueConstraint = "workspaces_pkey" // ALTER TABLE ONLY workspaces ADD CONSTRAINT workspaces_pkey PRIMARY KEY (id);
|
||||||
UniqueIndexAPIKeyName UniqueConstraint = "idx_api_key_name" // CREATE UNIQUE INDEX idx_api_key_name ON api_keys USING btree (user_id, token_name) WHERE (login_type = 'token'::login_type);
|
UniqueIndexAPIKeyName UniqueConstraint = "idx_api_key_name" // CREATE UNIQUE INDEX idx_api_key_name ON api_keys USING btree (user_id, token_name) WHERE (login_type = 'token'::login_type);
|
||||||
UniqueIndexCustomRolesNameLower UniqueConstraint = "idx_custom_roles_name_lower" // CREATE UNIQUE INDEX idx_custom_roles_name_lower ON custom_roles USING btree (lower(name));
|
UniqueIndexCustomRolesNameLower UniqueConstraint = "idx_custom_roles_name_lower" // CREATE UNIQUE INDEX idx_custom_roles_name_lower ON custom_roles USING btree (lower(name));
|
||||||
UniqueIndexOrganizationName UniqueConstraint = "idx_organization_name" // CREATE UNIQUE INDEX idx_organization_name ON organizations USING btree (name);
|
UniqueIndexOrganizationNameLower UniqueConstraint = "idx_organization_name_lower" // CREATE UNIQUE INDEX idx_organization_name_lower ON organizations USING btree (lower(name)) WHERE (deleted = false);
|
||||||
UniqueIndexOrganizationNameLower UniqueConstraint = "idx_organization_name_lower" // CREATE UNIQUE INDEX idx_organization_name_lower ON organizations USING btree (lower(name));
|
|
||||||
UniqueIndexProvisionerDaemonsOrgNameOwnerKey UniqueConstraint = "idx_provisioner_daemons_org_name_owner_key" // CREATE UNIQUE INDEX idx_provisioner_daemons_org_name_owner_key ON provisioner_daemons USING btree (organization_id, name, lower(COALESCE((tags ->> 'owner'::text), ''::text)));
|
UniqueIndexProvisionerDaemonsOrgNameOwnerKey UniqueConstraint = "idx_provisioner_daemons_org_name_owner_key" // CREATE UNIQUE INDEX idx_provisioner_daemons_org_name_owner_key ON provisioner_daemons USING btree (organization_id, name, lower(COALESCE((tags ->> 'owner'::text), ''::text)));
|
||||||
UniqueIndexUsersEmail UniqueConstraint = "idx_users_email" // CREATE UNIQUE INDEX idx_users_email ON users USING btree (email) WHERE (deleted = false);
|
UniqueIndexUsersEmail UniqueConstraint = "idx_users_email" // CREATE UNIQUE INDEX idx_users_email ON users USING btree (email) WHERE (deleted = false);
|
||||||
UniqueIndexUsersUsername UniqueConstraint = "idx_users_username" // CREATE UNIQUE INDEX idx_users_username ON users USING btree (username) WHERE (deleted = false);
|
UniqueIndexUsersUsername UniqueConstraint = "idx_users_username" // CREATE UNIQUE INDEX idx_users_username ON users USING btree (username) WHERE (deleted = false);
|
||||||
|
@ -73,7 +73,10 @@ func ExtractOrganizationParam(db database.Store) func(http.Handler) http.Handler
|
|||||||
if err == nil {
|
if err == nil {
|
||||||
organization, dbErr = db.GetOrganizationByID(ctx, id)
|
organization, dbErr = db.GetOrganizationByID(ctx, id)
|
||||||
} else {
|
} else {
|
||||||
organization, dbErr = db.GetOrganizationByName(ctx, arg)
|
organization, dbErr = db.GetOrganizationByName(ctx, database.GetOrganizationByNameParams{
|
||||||
|
Name: arg,
|
||||||
|
Deleted: false,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if httpapi.Is404Error(dbErr) {
|
if httpapi.Is404Error(dbErr) {
|
||||||
|
@ -97,7 +97,10 @@ func (s AGPLIDPSync) SyncOrganizations(ctx context.Context, tx database.Store, u
|
|||||||
return xerrors.Errorf("organization claims: %w", err)
|
return xerrors.Errorf("organization claims: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
existingOrgs, err := tx.GetOrganizationsByUserID(ctx, user.ID)
|
existingOrgs, err := tx.GetOrganizationsByUserID(ctx, database.GetOrganizationsByUserIDParams{
|
||||||
|
UserID: user.ID,
|
||||||
|
Deleted: false,
|
||||||
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return xerrors.Errorf("failed to get user organizations: %w", err)
|
return xerrors.Errorf("failed to get user organizations: %w", err)
|
||||||
}
|
}
|
||||||
|
@ -3,10 +3,8 @@ package prebuilds
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|
||||||
"github.com/google/uuid"
|
|
||||||
"golang.org/x/xerrors"
|
|
||||||
|
|
||||||
"github.com/coder/coder/v2/coderd/database"
|
"github.com/coder/coder/v2/coderd/database"
|
||||||
|
"github.com/google/uuid"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Claimer interface {
|
type Claimer interface {
|
||||||
@ -17,7 +15,8 @@ type Claimer interface {
|
|||||||
type AGPLPrebuildClaimer struct{}
|
type AGPLPrebuildClaimer struct{}
|
||||||
|
|
||||||
func (c AGPLPrebuildClaimer) Claim(context.Context, database.Store, uuid.UUID, string, uuid.UUID) (*uuid.UUID, error) {
|
func (c AGPLPrebuildClaimer) Claim(context.Context, database.Store, uuid.UUID, string, uuid.UUID) (*uuid.UUID, error) {
|
||||||
return nil, xerrors.Errorf("not entitled to claim prebuilds")
|
// Not entitled to claim prebuilds in AGPL version.
|
||||||
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c AGPLPrebuildClaimer) Initiator() uuid.UUID {
|
func (c AGPLPrebuildClaimer) Initiator() uuid.UUID {
|
||||||
|
@ -206,8 +206,8 @@ var (
|
|||||||
|
|
||||||
// ResourceProvisionerDaemon
|
// ResourceProvisionerDaemon
|
||||||
// Valid Actions
|
// Valid Actions
|
||||||
// - "ActionCreate" :: create a provisioner daemon
|
// - "ActionCreate" :: create a provisioner daemon/key
|
||||||
// - "ActionDelete" :: delete a provisioner daemon
|
// - "ActionDelete" :: delete a provisioner daemon/key
|
||||||
// - "ActionRead" :: read provisioner daemon
|
// - "ActionRead" :: read provisioner daemon
|
||||||
// - "ActionUpdate" :: update a provisioner daemon
|
// - "ActionUpdate" :: update a provisioner daemon
|
||||||
ResourceProvisionerDaemon = Object{
|
ResourceProvisionerDaemon = Object{
|
||||||
@ -221,15 +221,6 @@ var (
|
|||||||
Type: "provisioner_jobs",
|
Type: "provisioner_jobs",
|
||||||
}
|
}
|
||||||
|
|
||||||
// ResourceProvisionerKeys
|
|
||||||
// Valid Actions
|
|
||||||
// - "ActionCreate" :: create a provisioner key
|
|
||||||
// - "ActionDelete" :: delete a provisioner key
|
|
||||||
// - "ActionRead" :: read provisioner keys
|
|
||||||
ResourceProvisionerKeys = Object{
|
|
||||||
Type: "provisioner_keys",
|
|
||||||
}
|
|
||||||
|
|
||||||
// ResourceReplicas
|
// ResourceReplicas
|
||||||
// Valid Actions
|
// Valid Actions
|
||||||
// - "ActionRead" :: read replicas
|
// - "ActionRead" :: read replicas
|
||||||
@ -355,7 +346,6 @@ func AllResources() []Objecter {
|
|||||||
ResourceOrganizationMember,
|
ResourceOrganizationMember,
|
||||||
ResourceProvisionerDaemon,
|
ResourceProvisionerDaemon,
|
||||||
ResourceProvisionerJobs,
|
ResourceProvisionerJobs,
|
||||||
ResourceProvisionerKeys,
|
|
||||||
ResourceReplicas,
|
ResourceReplicas,
|
||||||
ResourceSystem,
|
ResourceSystem,
|
||||||
ResourceTailnetCoordinator,
|
ResourceTailnetCoordinator,
|
||||||
|
@ -162,11 +162,11 @@ var RBACPermissions = map[string]PermissionDefinition{
|
|||||||
},
|
},
|
||||||
"provisioner_daemon": {
|
"provisioner_daemon": {
|
||||||
Actions: map[Action]ActionDefinition{
|
Actions: map[Action]ActionDefinition{
|
||||||
ActionCreate: actDef("create a provisioner daemon"),
|
ActionCreate: actDef("create a provisioner daemon/key"),
|
||||||
// TODO: Move to use?
|
// TODO: Move to use?
|
||||||
ActionRead: actDef("read provisioner daemon"),
|
ActionRead: actDef("read provisioner daemon"),
|
||||||
ActionUpdate: actDef("update a provisioner daemon"),
|
ActionUpdate: actDef("update a provisioner daemon"),
|
||||||
ActionDelete: actDef("delete a provisioner daemon"),
|
ActionDelete: actDef("delete a provisioner daemon/key"),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"provisioner_jobs": {
|
"provisioner_jobs": {
|
||||||
@ -174,13 +174,6 @@ var RBACPermissions = map[string]PermissionDefinition{
|
|||||||
ActionRead: actDef("read provisioner jobs"),
|
ActionRead: actDef("read provisioner jobs"),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"provisioner_keys": {
|
|
||||||
Actions: map[Action]ActionDefinition{
|
|
||||||
ActionCreate: actDef("create a provisioner key"),
|
|
||||||
ActionRead: actDef("read provisioner keys"),
|
|
||||||
ActionDelete: actDef("delete a provisioner key"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"organization": {
|
"organization": {
|
||||||
Actions: map[Action]ActionDefinition{
|
Actions: map[Action]ActionDefinition{
|
||||||
ActionCreate: actDef("create an organization"),
|
ActionCreate: actDef("create an organization"),
|
||||||
|
@ -556,15 +556,6 @@ func TestRolePermissions(t *testing.T) {
|
|||||||
false: {setOtherOrg, memberMe, userAdmin, orgUserAdmin, orgAuditor},
|
false: {setOtherOrg, memberMe, userAdmin, orgUserAdmin, orgAuditor},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
|
||||||
Name: "ProvisionerKeys",
|
|
||||||
Actions: []policy.Action{policy.ActionCreate, policy.ActionRead, policy.ActionDelete},
|
|
||||||
Resource: rbac.ResourceProvisionerKeys.InOrg(orgID),
|
|
||||||
AuthorizeMap: map[bool][]hasAuthSubjects{
|
|
||||||
true: {owner, orgAdmin},
|
|
||||||
false: {setOtherOrg, memberMe, orgMemberMe, userAdmin, templateAdmin, orgTemplateAdmin, orgUserAdmin, orgAuditor},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
Name: "ProvisionerJobs",
|
Name: "ProvisionerJobs",
|
||||||
Actions: []policy.Action{policy.ActionRead},
|
Actions: []policy.Action{policy.ActionRead},
|
||||||
|
@ -258,7 +258,9 @@ func parseOrganization(ctx context.Context, db database.Store, parser *httpapi.Q
|
|||||||
if err == nil {
|
if err == nil {
|
||||||
return organizationID, nil
|
return organizationID, nil
|
||||||
}
|
}
|
||||||
organization, err := db.GetOrganizationByName(ctx, v)
|
organization, err := db.GetOrganizationByName(ctx, database.GetOrganizationByNameParams{
|
||||||
|
Name: v, Deleted: false,
|
||||||
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return uuid.Nil, xerrors.Errorf("organization %q either does not exist, or you are unauthorized to view it", v)
|
return uuid.Nil, xerrors.Errorf("organization %q either does not exist, or you are unauthorized to view it", v)
|
||||||
}
|
}
|
||||||
|
@ -947,6 +947,7 @@ func ConvertUser(dbUser database.User) User {
|
|||||||
CreatedAt: dbUser.CreatedAt,
|
CreatedAt: dbUser.CreatedAt,
|
||||||
Status: dbUser.Status,
|
Status: dbUser.Status,
|
||||||
GithubComUserID: dbUser.GithubComUserID.Int64,
|
GithubComUserID: dbUser.GithubComUserID.Int64,
|
||||||
|
LoginType: string(dbUser.LoginType),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1149,6 +1150,8 @@ type User struct {
|
|||||||
RBACRoles []string `json:"rbac_roles"`
|
RBACRoles []string `json:"rbac_roles"`
|
||||||
Status database.UserStatus `json:"status"`
|
Status database.UserStatus `json:"status"`
|
||||||
GithubComUserID int64 `json:"github_com_user_id"`
|
GithubComUserID int64 `json:"github_com_user_id"`
|
||||||
|
// Omitempty for backwards compatibility.
|
||||||
|
LoginType string `json:"login_type,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Group struct {
|
type Group struct {
|
||||||
|
@ -27,6 +27,7 @@ import (
|
|||||||
"github.com/coder/coder/v2/coderd/cryptokeys"
|
"github.com/coder/coder/v2/coderd/cryptokeys"
|
||||||
"github.com/coder/coder/v2/coderd/idpsync"
|
"github.com/coder/coder/v2/coderd/idpsync"
|
||||||
"github.com/coder/coder/v2/coderd/jwtutils"
|
"github.com/coder/coder/v2/coderd/jwtutils"
|
||||||
|
"github.com/coder/coder/v2/coderd/telemetry"
|
||||||
"github.com/coder/coder/v2/coderd/util/ptr"
|
"github.com/coder/coder/v2/coderd/util/ptr"
|
||||||
|
|
||||||
"github.com/coder/coder/v2/coderd/apikey"
|
"github.com/coder/coder/v2/coderd/apikey"
|
||||||
@ -46,6 +47,14 @@ import (
|
|||||||
"github.com/coder/coder/v2/cryptorand"
|
"github.com/coder/coder/v2/cryptorand"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type MergedClaimsSource string
|
||||||
|
|
||||||
|
var (
|
||||||
|
MergedClaimsSourceNone MergedClaimsSource = "none"
|
||||||
|
MergedClaimsSourceUserInfo MergedClaimsSource = "user_info"
|
||||||
|
MergedClaimsSourceAccessToken MergedClaimsSource = "access_token"
|
||||||
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
userAuthLoggerName = "userauth"
|
userAuthLoggerName = "userauth"
|
||||||
OAuthConvertCookieValue = "coder_oauth_convert_jwt"
|
OAuthConvertCookieValue = "coder_oauth_convert_jwt"
|
||||||
@ -756,6 +765,8 @@ type GithubOAuth2Config struct {
|
|||||||
AllowEveryone bool
|
AllowEveryone bool
|
||||||
AllowOrganizations []string
|
AllowOrganizations []string
|
||||||
AllowTeams []GithubOAuth2Team
|
AllowTeams []GithubOAuth2Team
|
||||||
|
|
||||||
|
DefaultProviderConfigured bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *GithubOAuth2Config) Exchange(ctx context.Context, code string, opts ...oauth2.AuthCodeOption) (*oauth2.Token, error) {
|
func (c *GithubOAuth2Config) Exchange(ctx context.Context, code string, opts ...oauth2.AuthCodeOption) (*oauth2.Token, error) {
|
||||||
@ -797,7 +808,10 @@ func (api *API) userAuthMethods(rw http.ResponseWriter, r *http.Request) {
|
|||||||
Password: codersdk.AuthMethod{
|
Password: codersdk.AuthMethod{
|
||||||
Enabled: !api.DeploymentValues.DisablePasswordAuth.Value(),
|
Enabled: !api.DeploymentValues.DisablePasswordAuth.Value(),
|
||||||
},
|
},
|
||||||
Github: codersdk.AuthMethod{Enabled: api.GithubOAuth2Config != nil},
|
Github: codersdk.GithubAuthMethod{
|
||||||
|
Enabled: api.GithubOAuth2Config != nil,
|
||||||
|
DefaultProviderConfigured: api.GithubOAuth2Config != nil && api.GithubOAuth2Config.DefaultProviderConfigured,
|
||||||
|
},
|
||||||
OIDC: codersdk.OIDCAuthMethod{
|
OIDC: codersdk.OIDCAuthMethod{
|
||||||
AuthMethod: codersdk.AuthMethod{Enabled: api.OIDCConfig != nil},
|
AuthMethod: codersdk.AuthMethod{Enabled: api.OIDCConfig != nil},
|
||||||
SignInText: signInText,
|
SignInText: signInText,
|
||||||
@ -1046,6 +1060,10 @@ func (api *API) userOAuth2Github(rw http.ResponseWriter, r *http.Request) {
|
|||||||
defer params.CommitAuditLogs()
|
defer params.CommitAuditLogs()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if httpErr := idpsync.IsHTTPError(err); httpErr != nil {
|
if httpErr := idpsync.IsHTTPError(err); httpErr != nil {
|
||||||
|
// In the device flow, the error page is rendered client-side.
|
||||||
|
if api.GithubOAuth2Config.DeviceFlowEnabled && httpErr.RenderStaticPage {
|
||||||
|
httpErr.RenderStaticPage = false
|
||||||
|
}
|
||||||
httpErr.Write(rw, r)
|
httpErr.Write(rw, r)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -1116,11 +1134,13 @@ type OIDCConfig struct {
|
|||||||
// AuthURLParams are additional parameters to be passed to the OIDC provider
|
// AuthURLParams are additional parameters to be passed to the OIDC provider
|
||||||
// when requesting an access token.
|
// when requesting an access token.
|
||||||
AuthURLParams map[string]string
|
AuthURLParams map[string]string
|
||||||
// IgnoreUserInfo causes Coder to only use claims from the ID token to
|
// SecondaryClaims indicates where to source additional claim information from.
|
||||||
// process OIDC logins. This is useful if the OIDC provider does not
|
// The standard is either 'MergedClaimsSourceNone' or 'MergedClaimsSourceUserInfo'.
|
||||||
// support the userinfo endpoint, or if the userinfo endpoint causes
|
//
|
||||||
// undesirable behavior.
|
// The OIDC compliant way is to use the userinfo endpoint. This option
|
||||||
IgnoreUserInfo bool
|
// is useful when the userinfo endpoint does not exist or causes undesirable
|
||||||
|
// behavior.
|
||||||
|
SecondaryClaims MergedClaimsSource
|
||||||
// SignInText is the text to display on the OIDC login button
|
// SignInText is the text to display on the OIDC login button
|
||||||
SignInText string
|
SignInText string
|
||||||
// IconURL points to the URL of an icon to display on the OIDC login button
|
// IconURL points to the URL of an icon to display on the OIDC login button
|
||||||
@ -1216,50 +1236,39 @@ func (api *API) userOIDC(rw http.ResponseWriter, r *http.Request) {
|
|||||||
// Some providers (e.g. ADFS) do not support custom OIDC claims in the
|
// Some providers (e.g. ADFS) do not support custom OIDC claims in the
|
||||||
// UserInfo endpoint, so we allow users to disable it and only rely on the
|
// UserInfo endpoint, so we allow users to disable it and only rely on the
|
||||||
// ID token.
|
// ID token.
|
||||||
userInfoClaims := make(map[string]interface{})
|
//
|
||||||
// If user info is skipped, the idtokenClaims are the claims.
|
// If user info is skipped, the idtokenClaims are the claims.
|
||||||
mergedClaims := idtokenClaims
|
mergedClaims := idtokenClaims
|
||||||
if !api.OIDCConfig.IgnoreUserInfo {
|
supplementaryClaims := make(map[string]interface{})
|
||||||
userInfo, err := api.OIDCConfig.Provider.UserInfo(ctx, oauth2.StaticTokenSource(state.Token))
|
switch api.OIDCConfig.SecondaryClaims {
|
||||||
if err == nil {
|
case MergedClaimsSourceUserInfo:
|
||||||
err = userInfo.Claims(&userInfoClaims)
|
supplementaryClaims, ok = api.userInfoClaims(ctx, rw, state, logger)
|
||||||
if err != nil {
|
if !ok {
|
||||||
logger.Error(ctx, "oauth2: unable to unmarshal user info claims", slog.Error(err))
|
|
||||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
|
||||||
Message: "Failed to unmarshal user info claims.",
|
|
||||||
Detail: err.Error(),
|
|
||||||
})
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
logger.Debug(ctx, "got oidc claims",
|
|
||||||
slog.F("source", "userinfo"),
|
|
||||||
slog.F("claim_fields", claimFields(userInfoClaims)),
|
|
||||||
slog.F("blank", blankFields(userInfoClaims)),
|
|
||||||
)
|
|
||||||
|
|
||||||
// Merge the claims from the ID token and the UserInfo endpoint.
|
// The precedence ordering is userInfoClaims > idTokenClaims.
|
||||||
// Information from UserInfo takes precedence.
|
// Note: Unsure why exactly this is the case. idTokenClaims feels more
|
||||||
mergedClaims = mergeClaims(idtokenClaims, userInfoClaims)
|
// important?
|
||||||
|
mergedClaims = mergeClaims(idtokenClaims, supplementaryClaims)
|
||||||
// Log all of the field names after merging.
|
case MergedClaimsSourceAccessToken:
|
||||||
logger.Debug(ctx, "got oidc claims",
|
supplementaryClaims, ok = api.accessTokenClaims(ctx, rw, state, logger)
|
||||||
slog.F("source", "merged"),
|
if !ok {
|
||||||
slog.F("claim_fields", claimFields(mergedClaims)),
|
|
||||||
slog.F("blank", blankFields(mergedClaims)),
|
|
||||||
)
|
|
||||||
} else if !strings.Contains(err.Error(), "user info endpoint is not supported by this provider") {
|
|
||||||
logger.Error(ctx, "oauth2: unable to obtain user information claims", slog.Error(err))
|
|
||||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
|
||||||
Message: "Failed to obtain user information claims.",
|
|
||||||
Detail: "The attempt to fetch claims via the UserInfo endpoint failed: " + err.Error(),
|
|
||||||
})
|
|
||||||
return
|
return
|
||||||
} else {
|
|
||||||
// The OIDC provider does not support the UserInfo endpoint.
|
|
||||||
// This is not an error, but we should log it as it may mean
|
|
||||||
// that some claims are missing.
|
|
||||||
logger.Warn(ctx, "OIDC provider does not support the user info endpoint, ensure that all required claims are present in the id_token")
|
|
||||||
}
|
}
|
||||||
|
// idTokenClaims take priority over accessTokenClaims. The order should
|
||||||
|
// not matter. It is just safer to assume idTokenClaims is the truth,
|
||||||
|
// and accessTokenClaims are supplemental.
|
||||||
|
mergedClaims = mergeClaims(supplementaryClaims, idtokenClaims)
|
||||||
|
case MergedClaimsSourceNone:
|
||||||
|
// noop, keep the userInfoClaims empty
|
||||||
|
default:
|
||||||
|
// This should never happen and is a developer error
|
||||||
|
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||||
|
Message: "Invalid source for secondary user claims.",
|
||||||
|
Detail: fmt.Sprintf("invalid source: %q", api.OIDCConfig.SecondaryClaims),
|
||||||
|
})
|
||||||
|
return // Invalid MergedClaimsSource
|
||||||
}
|
}
|
||||||
|
|
||||||
usernameRaw, ok := mergedClaims[api.OIDCConfig.UsernameField]
|
usernameRaw, ok := mergedClaims[api.OIDCConfig.UsernameField]
|
||||||
@ -1413,7 +1422,7 @@ func (api *API) userOIDC(rw http.ResponseWriter, r *http.Request) {
|
|||||||
RoleSync: roleSync,
|
RoleSync: roleSync,
|
||||||
UserClaims: database.UserLinkClaims{
|
UserClaims: database.UserLinkClaims{
|
||||||
IDTokenClaims: idtokenClaims,
|
IDTokenClaims: idtokenClaims,
|
||||||
UserInfoClaims: userInfoClaims,
|
UserInfoClaims: supplementaryClaims,
|
||||||
MergedClaims: mergedClaims,
|
MergedClaims: mergedClaims,
|
||||||
},
|
},
|
||||||
}).SetInitAuditRequest(func(params *audit.RequestParams) (*audit.Request[database.User], func()) {
|
}).SetInitAuditRequest(func(params *audit.RequestParams) (*audit.Request[database.User], func()) {
|
||||||
@ -1447,6 +1456,68 @@ func (api *API) userOIDC(rw http.ResponseWriter, r *http.Request) {
|
|||||||
http.Redirect(rw, r, redirect, http.StatusTemporaryRedirect)
|
http.Redirect(rw, r, redirect, http.StatusTemporaryRedirect)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (api *API) accessTokenClaims(ctx context.Context, rw http.ResponseWriter, state httpmw.OAuth2State, logger slog.Logger) (accessTokenClaims map[string]interface{}, ok bool) {
|
||||||
|
// Assume the access token is a jwt, and signed by the provider.
|
||||||
|
accessToken, err := api.OIDCConfig.Verifier.Verify(ctx, state.Token.AccessToken)
|
||||||
|
if err != nil {
|
||||||
|
logger.Error(ctx, "oauth2: unable to verify access token as secondary claims source", slog.Error(err))
|
||||||
|
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
|
||||||
|
Message: "Failed to verify access token.",
|
||||||
|
Detail: fmt.Sprintf("sourcing secondary claims from access token: %s", err.Error()),
|
||||||
|
})
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
|
||||||
|
rawClaims := make(map[string]any)
|
||||||
|
err = accessToken.Claims(&rawClaims)
|
||||||
|
if err != nil {
|
||||||
|
logger.Error(ctx, "oauth2: unable to unmarshal access token claims", slog.Error(err))
|
||||||
|
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||||
|
Message: "Failed to unmarshal access token claims.",
|
||||||
|
Detail: err.Error(),
|
||||||
|
})
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
|
||||||
|
return rawClaims, true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) userInfoClaims(ctx context.Context, rw http.ResponseWriter, state httpmw.OAuth2State, logger slog.Logger) (userInfoClaims map[string]interface{}, ok bool) {
|
||||||
|
userInfoClaims = make(map[string]interface{})
|
||||||
|
userInfo, err := api.OIDCConfig.Provider.UserInfo(ctx, oauth2.StaticTokenSource(state.Token))
|
||||||
|
if err == nil {
|
||||||
|
err = userInfo.Claims(&userInfoClaims)
|
||||||
|
if err != nil {
|
||||||
|
logger.Error(ctx, "oauth2: unable to unmarshal user info claims", slog.Error(err))
|
||||||
|
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||||
|
Message: "Failed to unmarshal user info claims.",
|
||||||
|
Detail: err.Error(),
|
||||||
|
})
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
logger.Debug(ctx, "got oidc claims",
|
||||||
|
slog.F("source", "userinfo"),
|
||||||
|
slog.F("claim_fields", claimFields(userInfoClaims)),
|
||||||
|
slog.F("blank", blankFields(userInfoClaims)),
|
||||||
|
)
|
||||||
|
} else if !strings.Contains(err.Error(), "user info endpoint is not supported by this provider") {
|
||||||
|
logger.Error(ctx, "oauth2: unable to obtain user information claims", slog.Error(err))
|
||||||
|
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||||
|
Message: "Failed to obtain user information claims.",
|
||||||
|
Detail: "The attempt to fetch claims via the UserInfo endpoint failed: " + err.Error(),
|
||||||
|
})
|
||||||
|
return nil, false
|
||||||
|
} else {
|
||||||
|
// The OIDC provider does not support the UserInfo endpoint.
|
||||||
|
// This is not an error, but we should log it as it may mean
|
||||||
|
// that some claims are missing.
|
||||||
|
logger.Warn(ctx, "OIDC provider does not support the user info endpoint, ensure that all required claims are present in the id_token",
|
||||||
|
slog.Error(err),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return userInfoClaims, true
|
||||||
|
}
|
||||||
|
|
||||||
// claimFields returns the sorted list of fields in the claims map.
|
// claimFields returns the sorted list of fields in the claims map.
|
||||||
func claimFields(claims map[string]interface{}) []string {
|
func claimFields(claims map[string]interface{}) []string {
|
||||||
fields := []string{}
|
fields := []string{}
|
||||||
@ -1573,7 +1644,17 @@ func (api *API) oauthLogin(r *http.Request, params *oauthLoginParams) ([]*http.C
|
|||||||
isConvertLoginType = true
|
isConvertLoginType = true
|
||||||
}
|
}
|
||||||
|
|
||||||
if user.ID == uuid.Nil && !params.AllowSignups {
|
// nolint:gocritic // Getting user count is a system function.
|
||||||
|
userCount, err := tx.GetUserCount(dbauthz.AsSystemRestricted(ctx))
|
||||||
|
if err != nil {
|
||||||
|
return xerrors.Errorf("unable to fetch user count: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Allow the first user to sign up with OIDC, regardless of
|
||||||
|
// whether signups are enabled or not.
|
||||||
|
allowSignup := userCount == 0 || params.AllowSignups
|
||||||
|
|
||||||
|
if user.ID == uuid.Nil && !allowSignup {
|
||||||
signupsDisabledText := "Please contact your Coder administrator to request access."
|
signupsDisabledText := "Please contact your Coder administrator to request access."
|
||||||
if api.OIDCConfig != nil && api.OIDCConfig.SignupsDisabledText != "" {
|
if api.OIDCConfig != nil && api.OIDCConfig.SignupsDisabledText != "" {
|
||||||
signupsDisabledText = render.HTMLFromMarkdown(api.OIDCConfig.SignupsDisabledText)
|
signupsDisabledText = render.HTMLFromMarkdown(api.OIDCConfig.SignupsDisabledText)
|
||||||
@ -1634,6 +1715,12 @@ func (api *API) oauthLogin(r *http.Request, params *oauthLoginParams) ([]*http.C
|
|||||||
return xerrors.Errorf("unable to fetch default organization: %w", err)
|
return xerrors.Errorf("unable to fetch default organization: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
rbacRoles := []string{}
|
||||||
|
// If this is the first user, add the owner role.
|
||||||
|
if userCount == 0 {
|
||||||
|
rbacRoles = append(rbacRoles, rbac.RoleOwner().String())
|
||||||
|
}
|
||||||
|
|
||||||
//nolint:gocritic
|
//nolint:gocritic
|
||||||
user, err = api.CreateUser(dbauthz.AsSystemRestricted(ctx), tx, CreateUserRequest{
|
user, err = api.CreateUser(dbauthz.AsSystemRestricted(ctx), tx, CreateUserRequest{
|
||||||
CreateUserRequestWithOrgs: codersdk.CreateUserRequestWithOrgs{
|
CreateUserRequestWithOrgs: codersdk.CreateUserRequestWithOrgs{
|
||||||
@ -1648,10 +1735,20 @@ func (api *API) oauthLogin(r *http.Request, params *oauthLoginParams) ([]*http.C
|
|||||||
},
|
},
|
||||||
LoginType: params.LoginType,
|
LoginType: params.LoginType,
|
||||||
accountCreatorName: "oauth",
|
accountCreatorName: "oauth",
|
||||||
|
RBACRoles: rbacRoles,
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return xerrors.Errorf("create user: %w", err)
|
return xerrors.Errorf("create user: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if userCount == 0 {
|
||||||
|
telemetryUser := telemetry.ConvertUser(user)
|
||||||
|
// The email is not anonymized for the first user.
|
||||||
|
telemetryUser.Email = &user.Email
|
||||||
|
api.Telemetry.Report(&telemetry.Snapshot{
|
||||||
|
Users: []telemetry.User{telemetryUser},
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Activate dormant user on sign-in
|
// Activate dormant user on sign-in
|
||||||
|
@ -22,6 +22,7 @@ import (
|
|||||||
"github.com/prometheus/client_golang/prometheus"
|
"github.com/prometheus/client_golang/prometheus"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
"go.uber.org/atomic"
|
||||||
"golang.org/x/oauth2"
|
"golang.org/x/oauth2"
|
||||||
"golang.org/x/xerrors"
|
"golang.org/x/xerrors"
|
||||||
|
|
||||||
@ -61,7 +62,7 @@ func TestOIDCOauthLoginWithExisting(t *testing.T) {
|
|||||||
|
|
||||||
cfg := fake.OIDCConfig(t, nil, func(cfg *coderd.OIDCConfig) {
|
cfg := fake.OIDCConfig(t, nil, func(cfg *coderd.OIDCConfig) {
|
||||||
cfg.AllowSignups = true
|
cfg.AllowSignups = true
|
||||||
cfg.IgnoreUserInfo = true
|
cfg.SecondaryClaims = coderd.MergedClaimsSourceNone
|
||||||
})
|
})
|
||||||
|
|
||||||
client, _, api := coderdtest.NewWithAPI(t, &coderdtest.Options{
|
client, _, api := coderdtest.NewWithAPI(t, &coderdtest.Options{
|
||||||
@ -254,11 +255,20 @@ func TestUserOAuth2Github(t *testing.T) {
|
|||||||
})
|
})
|
||||||
t.Run("BlockSignups", func(t *testing.T) {
|
t.Run("BlockSignups", func(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
|
db, ps := dbtestutil.NewDB(t)
|
||||||
|
|
||||||
|
id := atomic.NewInt64(100)
|
||||||
|
login := atomic.NewString("testuser")
|
||||||
|
email := atomic.NewString("testuser@coder.com")
|
||||||
|
|
||||||
client := coderdtest.New(t, &coderdtest.Options{
|
client := coderdtest.New(t, &coderdtest.Options{
|
||||||
|
Database: db,
|
||||||
|
Pubsub: ps,
|
||||||
GithubOAuth2Config: &coderd.GithubOAuth2Config{
|
GithubOAuth2Config: &coderd.GithubOAuth2Config{
|
||||||
OAuth2Config: &testutil.OAuth2Config{},
|
OAuth2Config: &testutil.OAuth2Config{},
|
||||||
AllowOrganizations: []string{"coder"},
|
AllowOrganizations: []string{"coder"},
|
||||||
ListOrganizationMemberships: func(ctx context.Context, client *http.Client) ([]*github.Membership, error) {
|
ListOrganizationMemberships: func(_ context.Context, _ *http.Client) ([]*github.Membership, error) {
|
||||||
return []*github.Membership{{
|
return []*github.Membership{{
|
||||||
State: &stateActive,
|
State: &stateActive,
|
||||||
Organization: &github.Organization{
|
Organization: &github.Organization{
|
||||||
@ -266,16 +276,19 @@ func TestUserOAuth2Github(t *testing.T) {
|
|||||||
},
|
},
|
||||||
}}, nil
|
}}, nil
|
||||||
},
|
},
|
||||||
AuthenticatedUser: func(ctx context.Context, client *http.Client) (*github.User, error) {
|
AuthenticatedUser: func(_ context.Context, _ *http.Client) (*github.User, error) {
|
||||||
|
id := id.Load()
|
||||||
|
login := login.Load()
|
||||||
return &github.User{
|
return &github.User{
|
||||||
ID: github.Int64(100),
|
ID: &id,
|
||||||
Login: github.String("testuser"),
|
Login: &login,
|
||||||
Name: github.String("The Right Honorable Sir Test McUser"),
|
Name: github.String("The Right Honorable Sir Test McUser"),
|
||||||
}, nil
|
}, nil
|
||||||
},
|
},
|
||||||
ListEmails: func(ctx context.Context, client *http.Client) ([]*github.UserEmail, error) {
|
ListEmails: func(_ context.Context, _ *http.Client) ([]*github.UserEmail, error) {
|
||||||
|
email := email.Load()
|
||||||
return []*github.UserEmail{{
|
return []*github.UserEmail{{
|
||||||
Email: github.String("testuser@coder.com"),
|
Email: &email,
|
||||||
Verified: github.Bool(true),
|
Verified: github.Bool(true),
|
||||||
Primary: github.Bool(true),
|
Primary: github.Bool(true),
|
||||||
}}, nil
|
}}, nil
|
||||||
@ -283,8 +296,23 @@ func TestUserOAuth2Github(t *testing.T) {
|
|||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// The first user in a deployment with signups disabled will be allowed to sign up,
|
||||||
|
// but all the other users will not.
|
||||||
resp := oauth2Callback(t, client)
|
resp := oauth2Callback(t, client)
|
||||||
|
require.Equal(t, http.StatusTemporaryRedirect, resp.StatusCode)
|
||||||
|
|
||||||
|
ctx := testutil.Context(t, testutil.WaitLong)
|
||||||
|
|
||||||
|
// nolint:gocritic // Unit test
|
||||||
|
count, err := db.GetUserCount(dbauthz.AsSystemRestricted(ctx))
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, int64(1), count)
|
||||||
|
|
||||||
|
id.Store(101)
|
||||||
|
email.Store("someotheruser@coder.com")
|
||||||
|
login.Store("someotheruser")
|
||||||
|
|
||||||
|
resp = oauth2Callback(t, client)
|
||||||
require.Equal(t, http.StatusForbidden, resp.StatusCode)
|
require.Equal(t, http.StatusForbidden, resp.StatusCode)
|
||||||
})
|
})
|
||||||
t.Run("MultiLoginNotAllowed", func(t *testing.T) {
|
t.Run("MultiLoginNotAllowed", func(t *testing.T) {
|
||||||
@ -979,6 +1007,7 @@ func TestUserOIDC(t *testing.T) {
|
|||||||
Name string
|
Name string
|
||||||
IDTokenClaims jwt.MapClaims
|
IDTokenClaims jwt.MapClaims
|
||||||
UserInfoClaims jwt.MapClaims
|
UserInfoClaims jwt.MapClaims
|
||||||
|
AccessTokenClaims jwt.MapClaims
|
||||||
AllowSignups bool
|
AllowSignups bool
|
||||||
EmailDomain []string
|
EmailDomain []string
|
||||||
AssertUser func(t testing.TB, u codersdk.User)
|
AssertUser func(t testing.TB, u codersdk.User)
|
||||||
@ -986,6 +1015,8 @@ func TestUserOIDC(t *testing.T) {
|
|||||||
AssertResponse func(t testing.TB, resp *http.Response)
|
AssertResponse func(t testing.TB, resp *http.Response)
|
||||||
IgnoreEmailVerified bool
|
IgnoreEmailVerified bool
|
||||||
IgnoreUserInfo bool
|
IgnoreUserInfo bool
|
||||||
|
UseAccessToken bool
|
||||||
|
PrecreateFirstUser bool
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
Name: "NoSub",
|
Name: "NoSub",
|
||||||
@ -995,6 +1026,32 @@ func TestUserOIDC(t *testing.T) {
|
|||||||
AllowSignups: true,
|
AllowSignups: true,
|
||||||
StatusCode: http.StatusBadRequest,
|
StatusCode: http.StatusBadRequest,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "AccessTokenMerge",
|
||||||
|
IDTokenClaims: jwt.MapClaims{
|
||||||
|
"sub": uuid.NewString(),
|
||||||
|
},
|
||||||
|
AccessTokenClaims: jwt.MapClaims{
|
||||||
|
"email": "kyle@kwc.io",
|
||||||
|
},
|
||||||
|
IgnoreUserInfo: true,
|
||||||
|
AllowSignups: true,
|
||||||
|
UseAccessToken: true,
|
||||||
|
StatusCode: http.StatusOK,
|
||||||
|
AssertUser: func(t testing.TB, u codersdk.User) {
|
||||||
|
assert.Equal(t, "kyle@kwc.io", u.Email)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "AccessTokenMergeNotJWT",
|
||||||
|
IDTokenClaims: jwt.MapClaims{
|
||||||
|
"sub": uuid.NewString(),
|
||||||
|
},
|
||||||
|
IgnoreUserInfo: true,
|
||||||
|
AllowSignups: true,
|
||||||
|
UseAccessToken: true,
|
||||||
|
StatusCode: http.StatusBadRequest,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Name: "EmailOnly",
|
Name: "EmailOnly",
|
||||||
IDTokenClaims: jwt.MapClaims{
|
IDTokenClaims: jwt.MapClaims{
|
||||||
@ -1123,6 +1180,16 @@ func TestUserOIDC(t *testing.T) {
|
|||||||
"sub": uuid.NewString(),
|
"sub": uuid.NewString(),
|
||||||
},
|
},
|
||||||
StatusCode: http.StatusForbidden,
|
StatusCode: http.StatusForbidden,
|
||||||
|
PrecreateFirstUser: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "FirstSignup",
|
||||||
|
IDTokenClaims: jwt.MapClaims{
|
||||||
|
"email": "kyle@kwc.io",
|
||||||
|
"email_verified": true,
|
||||||
|
"sub": uuid.NewString(),
|
||||||
|
},
|
||||||
|
StatusCode: http.StatusOK,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Name: "UsernameFromEmail",
|
Name: "UsernameFromEmail",
|
||||||
@ -1377,18 +1444,32 @@ func TestUserOIDC(t *testing.T) {
|
|||||||
tc := tc
|
tc := tc
|
||||||
t.Run(tc.Name, func(t *testing.T) {
|
t.Run(tc.Name, func(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
fake := oidctest.NewFakeIDP(t,
|
opts := []oidctest.FakeIDPOpt{
|
||||||
oidctest.WithRefresh(func(_ string) error {
|
oidctest.WithRefresh(func(_ string) error {
|
||||||
return xerrors.New("refreshing token should never occur")
|
return xerrors.New("refreshing token should never occur")
|
||||||
}),
|
}),
|
||||||
oidctest.WithServing(),
|
oidctest.WithServing(),
|
||||||
oidctest.WithStaticUserInfo(tc.UserInfoClaims),
|
oidctest.WithStaticUserInfo(tc.UserInfoClaims),
|
||||||
)
|
}
|
||||||
|
|
||||||
|
if tc.AccessTokenClaims != nil && len(tc.AccessTokenClaims) > 0 {
|
||||||
|
opts = append(opts, oidctest.WithAccessTokenJWTHook(func(email string, exp time.Time) jwt.MapClaims {
|
||||||
|
return tc.AccessTokenClaims
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
fake := oidctest.NewFakeIDP(t, opts...)
|
||||||
cfg := fake.OIDCConfig(t, nil, func(cfg *coderd.OIDCConfig) {
|
cfg := fake.OIDCConfig(t, nil, func(cfg *coderd.OIDCConfig) {
|
||||||
cfg.AllowSignups = tc.AllowSignups
|
cfg.AllowSignups = tc.AllowSignups
|
||||||
cfg.EmailDomain = tc.EmailDomain
|
cfg.EmailDomain = tc.EmailDomain
|
||||||
cfg.IgnoreEmailVerified = tc.IgnoreEmailVerified
|
cfg.IgnoreEmailVerified = tc.IgnoreEmailVerified
|
||||||
cfg.IgnoreUserInfo = tc.IgnoreUserInfo
|
cfg.SecondaryClaims = coderd.MergedClaimsSourceUserInfo
|
||||||
|
if tc.IgnoreUserInfo {
|
||||||
|
cfg.SecondaryClaims = coderd.MergedClaimsSourceNone
|
||||||
|
}
|
||||||
|
if tc.UseAccessToken {
|
||||||
|
cfg.SecondaryClaims = coderd.MergedClaimsSourceAccessToken
|
||||||
|
}
|
||||||
cfg.NameField = "name"
|
cfg.NameField = "name"
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -1401,6 +1482,15 @@ func TestUserOIDC(t *testing.T) {
|
|||||||
})
|
})
|
||||||
numLogs := len(auditor.AuditLogs())
|
numLogs := len(auditor.AuditLogs())
|
||||||
|
|
||||||
|
ctx := testutil.Context(t, testutil.WaitShort)
|
||||||
|
if tc.PrecreateFirstUser {
|
||||||
|
owner.CreateFirstUser(ctx, codersdk.CreateFirstUserRequest{
|
||||||
|
Email: "precreated@coder.com",
|
||||||
|
Username: "precreated",
|
||||||
|
Password: "SomeSecurePassword!",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
client, resp := fake.AttemptLogin(t, owner, tc.IDTokenClaims)
|
client, resp := fake.AttemptLogin(t, owner, tc.IDTokenClaims)
|
||||||
numLogs++ // add an audit log for login
|
numLogs++ // add an audit log for login
|
||||||
require.Equal(t, tc.StatusCode, resp.StatusCode)
|
require.Equal(t, tc.StatusCode, resp.StatusCode)
|
||||||
@ -1408,8 +1498,6 @@ func TestUserOIDC(t *testing.T) {
|
|||||||
tc.AssertResponse(t, resp)
|
tc.AssertResponse(t, resp)
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx := testutil.Context(t, testutil.WaitShort)
|
|
||||||
|
|
||||||
if tc.AssertUser != nil {
|
if tc.AssertUser != nil {
|
||||||
user, err := client.User(ctx, "me")
|
user, err := client.User(ctx, "me")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
@ -118,6 +118,8 @@ func (api *API) firstUser(rw http.ResponseWriter, r *http.Request) {
|
|||||||
// @Success 201 {object} codersdk.CreateFirstUserResponse
|
// @Success 201 {object} codersdk.CreateFirstUserResponse
|
||||||
// @Router /users/first [post]
|
// @Router /users/first [post]
|
||||||
func (api *API) postFirstUser(rw http.ResponseWriter, r *http.Request) {
|
func (api *API) postFirstUser(rw http.ResponseWriter, r *http.Request) {
|
||||||
|
// The first user can also be created via oidc, so if making changes to the flow,
|
||||||
|
// ensure that the oidc flow is also updated.
|
||||||
ctx := r.Context()
|
ctx := r.Context()
|
||||||
var createUser codersdk.CreateFirstUserRequest
|
var createUser codersdk.CreateFirstUserRequest
|
||||||
if !httpapi.Read(ctx, rw, r, &createUser) {
|
if !httpapi.Read(ctx, rw, r, &createUser) {
|
||||||
@ -198,6 +200,7 @@ func (api *API) postFirstUser(rw http.ResponseWriter, r *http.Request) {
|
|||||||
OrganizationIDs: []uuid.UUID{defaultOrg.ID},
|
OrganizationIDs: []uuid.UUID{defaultOrg.ID},
|
||||||
},
|
},
|
||||||
LoginType: database.LoginTypePassword,
|
LoginType: database.LoginTypePassword,
|
||||||
|
RBACRoles: []string{rbac.RoleOwner().String()},
|
||||||
accountCreatorName: "coder",
|
accountCreatorName: "coder",
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -225,23 +228,6 @@ func (api *API) postFirstUser(rw http.ResponseWriter, r *http.Request) {
|
|||||||
Users: []telemetry.User{telemetryUser},
|
Users: []telemetry.User{telemetryUser},
|
||||||
})
|
})
|
||||||
|
|
||||||
// TODO: @emyrk this currently happens outside the database tx used to create
|
|
||||||
// the user. Maybe I add this ability to grant roles in the createUser api
|
|
||||||
// and add some rbac bypass when calling api functions this way??
|
|
||||||
// Add the admin role to this first user.
|
|
||||||
//nolint:gocritic // needed to create first user
|
|
||||||
_, err = api.Database.UpdateUserRoles(dbauthz.AsSystemRestricted(ctx), database.UpdateUserRolesParams{
|
|
||||||
GrantedRoles: []string{rbac.RoleOwner().String()},
|
|
||||||
ID: user.ID,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
|
||||||
Message: "Internal error updating user's roles.",
|
|
||||||
Detail: err.Error(),
|
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
httpapi.Write(ctx, rw, http.StatusCreated, codersdk.CreateFirstUserResponse{
|
httpapi.Write(ctx, rw, http.StatusCreated, codersdk.CreateFirstUserResponse{
|
||||||
UserID: user.ID,
|
UserID: user.ID,
|
||||||
OrganizationID: defaultOrg.ID,
|
OrganizationID: defaultOrg.ID,
|
||||||
@ -1286,7 +1272,10 @@ func (api *API) organizationsByUser(rw http.ResponseWriter, r *http.Request) {
|
|||||||
ctx := r.Context()
|
ctx := r.Context()
|
||||||
user := httpmw.UserParam(r)
|
user := httpmw.UserParam(r)
|
||||||
|
|
||||||
organizations, err := api.Database.GetOrganizationsByUserID(ctx, user.ID)
|
organizations, err := api.Database.GetOrganizationsByUserID(ctx, database.GetOrganizationsByUserIDParams{
|
||||||
|
UserID: user.ID,
|
||||||
|
Deleted: false,
|
||||||
|
})
|
||||||
if errors.Is(err, sql.ErrNoRows) {
|
if errors.Is(err, sql.ErrNoRows) {
|
||||||
err = nil
|
err = nil
|
||||||
organizations = []database.Organization{}
|
organizations = []database.Organization{}
|
||||||
@ -1324,7 +1313,10 @@ func (api *API) organizationsByUser(rw http.ResponseWriter, r *http.Request) {
|
|||||||
func (api *API) organizationByUserAndName(rw http.ResponseWriter, r *http.Request) {
|
func (api *API) organizationByUserAndName(rw http.ResponseWriter, r *http.Request) {
|
||||||
ctx := r.Context()
|
ctx := r.Context()
|
||||||
organizationName := chi.URLParam(r, "organizationname")
|
organizationName := chi.URLParam(r, "organizationname")
|
||||||
organization, err := api.Database.GetOrganizationByName(ctx, organizationName)
|
organization, err := api.Database.GetOrganizationByName(ctx, database.GetOrganizationByNameParams{
|
||||||
|
Name: organizationName,
|
||||||
|
Deleted: false,
|
||||||
|
})
|
||||||
if httpapi.Is404Error(err) {
|
if httpapi.Is404Error(err) {
|
||||||
httpapi.ResourceNotFound(rw)
|
httpapi.ResourceNotFound(rw)
|
||||||
return
|
return
|
||||||
@ -1345,6 +1337,7 @@ type CreateUserRequest struct {
|
|||||||
LoginType database.LoginType
|
LoginType database.LoginType
|
||||||
SkipNotifications bool
|
SkipNotifications bool
|
||||||
accountCreatorName string
|
accountCreatorName string
|
||||||
|
RBACRoles []string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) CreateUser(ctx context.Context, store database.Store, req CreateUserRequest) (database.User, error) {
|
func (api *API) CreateUser(ctx context.Context, store database.Store, req CreateUserRequest) (database.User, error) {
|
||||||
@ -1354,6 +1347,13 @@ func (api *API) CreateUser(ctx context.Context, store database.Store, req Create
|
|||||||
return database.User{}, xerrors.Errorf("invalid username %q: %w", req.Username, usernameValid)
|
return database.User{}, xerrors.Errorf("invalid username %q: %w", req.Username, usernameValid)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If the caller didn't specify rbac roles, default to
|
||||||
|
// a member of the site.
|
||||||
|
rbacRoles := []string{}
|
||||||
|
if req.RBACRoles != nil {
|
||||||
|
rbacRoles = req.RBACRoles
|
||||||
|
}
|
||||||
|
|
||||||
var user database.User
|
var user database.User
|
||||||
err := store.InTx(func(tx database.Store) error {
|
err := store.InTx(func(tx database.Store) error {
|
||||||
orgRoles := make([]string, 0)
|
orgRoles := make([]string, 0)
|
||||||
@ -1370,8 +1370,7 @@ func (api *API) CreateUser(ctx context.Context, store database.Store, req Create
|
|||||||
CreatedAt: dbtime.Now(),
|
CreatedAt: dbtime.Now(),
|
||||||
UpdatedAt: dbtime.Now(),
|
UpdatedAt: dbtime.Now(),
|
||||||
HashedPassword: []byte{},
|
HashedPassword: []byte{},
|
||||||
// All new users are defaulted to members of the site.
|
RBACRoles: rbacRoles,
|
||||||
RBACRoles: []string{},
|
|
||||||
LoginType: req.LoginType,
|
LoginType: req.LoginType,
|
||||||
Status: status,
|
Status: status,
|
||||||
}
|
}
|
||||||
@ -1431,6 +1430,10 @@ func (api *API) CreateUser(ctx context.Context, store database.Store, req Create
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, u := range userAdmins {
|
for _, u := range userAdmins {
|
||||||
|
if u.ID == user.ID {
|
||||||
|
// If the new user is an admin, don't notify them about themselves.
|
||||||
|
continue
|
||||||
|
}
|
||||||
if _, err := api.NotificationsEnqueuer.EnqueueWithData(
|
if _, err := api.NotificationsEnqueuer.EnqueueWithData(
|
||||||
// nolint:gocritic // Need notifier actor to enqueue notifications
|
// nolint:gocritic // Need notifier actor to enqueue notifications
|
||||||
dbauthz.AsNotifier(ctx),
|
dbauthz.AsNotifier(ctx),
|
||||||
|
@ -653,6 +653,8 @@ func (s *Server) workspaceAgentPTY(rw http.ResponseWriter, r *http.Request) {
|
|||||||
reconnect := parser.RequiredNotEmpty("reconnect").UUID(values, uuid.New(), "reconnect")
|
reconnect := parser.RequiredNotEmpty("reconnect").UUID(values, uuid.New(), "reconnect")
|
||||||
height := parser.UInt(values, 80, "height")
|
height := parser.UInt(values, 80, "height")
|
||||||
width := parser.UInt(values, 80, "width")
|
width := parser.UInt(values, 80, "width")
|
||||||
|
container := parser.String(values, "", "container")
|
||||||
|
containerUser := parser.String(values, "", "container_user")
|
||||||
if len(parser.Errors) > 0 {
|
if len(parser.Errors) > 0 {
|
||||||
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
|
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
|
||||||
Message: "Invalid query parameters.",
|
Message: "Invalid query parameters.",
|
||||||
@ -690,7 +692,10 @@ func (s *Server) workspaceAgentPTY(rw http.ResponseWriter, r *http.Request) {
|
|||||||
}
|
}
|
||||||
defer release()
|
defer release()
|
||||||
log.Debug(ctx, "dialed workspace agent")
|
log.Debug(ctx, "dialed workspace agent")
|
||||||
ptNetConn, err := agentConn.ReconnectingPTY(ctx, reconnect, uint16(height), uint16(width), r.URL.Query().Get("command"))
|
ptNetConn, err := agentConn.ReconnectingPTY(ctx, reconnect, uint16(height), uint16(width), r.URL.Query().Get("command"), func(arp *workspacesdk.AgentReconnectingPTYInit) {
|
||||||
|
arp.Container = container
|
||||||
|
arp.ContainerUser = containerUser
|
||||||
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Debug(ctx, "dial reconnecting pty server in workspace agent", slog.Error(err))
|
log.Debug(ctx, "dial reconnecting pty server in workspace agent", slog.Error(err))
|
||||||
_ = conn.Close(websocket.StatusInternalError, httpapi.WebsocketCloseSprintf("dial: %s", err))
|
_ = conn.Close(websocket.StatusInternalError, httpapi.WebsocketCloseSprintf("dial: %s", err))
|
||||||
|
@ -510,6 +510,7 @@ type OAuth2GithubConfig struct {
|
|||||||
ClientID serpent.String `json:"client_id" typescript:",notnull"`
|
ClientID serpent.String `json:"client_id" typescript:",notnull"`
|
||||||
ClientSecret serpent.String `json:"client_secret" typescript:",notnull"`
|
ClientSecret serpent.String `json:"client_secret" typescript:",notnull"`
|
||||||
DeviceFlow serpent.Bool `json:"device_flow" typescript:",notnull"`
|
DeviceFlow serpent.Bool `json:"device_flow" typescript:",notnull"`
|
||||||
|
DefaultProviderEnable serpent.Bool `json:"default_provider_enable" typescript:",notnull"`
|
||||||
AllowedOrgs serpent.StringArray `json:"allowed_orgs" typescript:",notnull"`
|
AllowedOrgs serpent.StringArray `json:"allowed_orgs" typescript:",notnull"`
|
||||||
AllowedTeams serpent.StringArray `json:"allowed_teams" typescript:",notnull"`
|
AllowedTeams serpent.StringArray `json:"allowed_teams" typescript:",notnull"`
|
||||||
AllowSignups serpent.Bool `json:"allow_signups" typescript:",notnull"`
|
AllowSignups serpent.Bool `json:"allow_signups" typescript:",notnull"`
|
||||||
@ -532,7 +533,17 @@ type OIDCConfig struct {
|
|||||||
NameField serpent.String `json:"name_field" typescript:",notnull"`
|
NameField serpent.String `json:"name_field" typescript:",notnull"`
|
||||||
EmailField serpent.String `json:"email_field" typescript:",notnull"`
|
EmailField serpent.String `json:"email_field" typescript:",notnull"`
|
||||||
AuthURLParams serpent.Struct[map[string]string] `json:"auth_url_params" typescript:",notnull"`
|
AuthURLParams serpent.Struct[map[string]string] `json:"auth_url_params" typescript:",notnull"`
|
||||||
|
// IgnoreUserInfo & UserInfoFromAccessToken are mutually exclusive. Only 1
|
||||||
|
// can be set to true. Ideally this would be an enum with 3 states, ['none',
|
||||||
|
// 'userinfo', 'access_token']. However, for backward compatibility,
|
||||||
|
// `ignore_user_info` must remain. And `access_token` is a niche, non-spec
|
||||||
|
// compliant edge case. So it's use is rare, and should not be advised.
|
||||||
IgnoreUserInfo serpent.Bool `json:"ignore_user_info" typescript:",notnull"`
|
IgnoreUserInfo serpent.Bool `json:"ignore_user_info" typescript:",notnull"`
|
||||||
|
// UserInfoFromAccessToken as mentioned above is an edge case. This allows
|
||||||
|
// sourcing the user_info from the access token itself instead of a user_info
|
||||||
|
// endpoint. This assumes the access token is a valid JWT with a set of claims to
|
||||||
|
// be merged with the id_token.
|
||||||
|
UserInfoFromAccessToken serpent.Bool `json:"source_user_info_from_access_token" typescript:",notnull"`
|
||||||
OrganizationField serpent.String `json:"organization_field" typescript:",notnull"`
|
OrganizationField serpent.String `json:"organization_field" typescript:",notnull"`
|
||||||
OrganizationMapping serpent.Struct[map[string][]uuid.UUID] `json:"organization_mapping" typescript:",notnull"`
|
OrganizationMapping serpent.Struct[map[string][]uuid.UUID] `json:"organization_mapping" typescript:",notnull"`
|
||||||
OrganizationAssignDefault serpent.Bool `json:"organization_assign_default" typescript:",notnull"`
|
OrganizationAssignDefault serpent.Bool `json:"organization_assign_default" typescript:",notnull"`
|
||||||
@ -1596,6 +1607,16 @@ func (c *DeploymentValues) Options() serpent.OptionSet {
|
|||||||
YAML: "deviceFlow",
|
YAML: "deviceFlow",
|
||||||
Default: "false",
|
Default: "false",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "OAuth2 GitHub Default Provider Enable",
|
||||||
|
Description: "Enable the default GitHub OAuth2 provider managed by Coder.",
|
||||||
|
Flag: "oauth2-github-default-provider-enable",
|
||||||
|
Env: "CODER_OAUTH2_GITHUB_DEFAULT_PROVIDER_ENABLE",
|
||||||
|
Value: &c.OAuth2.Github.DefaultProviderEnable,
|
||||||
|
Group: &deploymentGroupOAuth2GitHub,
|
||||||
|
YAML: "defaultProviderEnable",
|
||||||
|
Default: "true",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Name: "OAuth2 GitHub Allowed Orgs",
|
Name: "OAuth2 GitHub Allowed Orgs",
|
||||||
Description: "Organizations the user must be a member of to Login with GitHub.",
|
Description: "Organizations the user must be a member of to Login with GitHub.",
|
||||||
@ -1777,6 +1798,23 @@ func (c *DeploymentValues) Options() serpent.OptionSet {
|
|||||||
Group: &deploymentGroupOIDC,
|
Group: &deploymentGroupOIDC,
|
||||||
YAML: "ignoreUserInfo",
|
YAML: "ignoreUserInfo",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "OIDC Access Token Claims",
|
||||||
|
// This is a niche edge case that should not be advertised. Alternatives should
|
||||||
|
// be investigated before turning this on. A properly configured IdP should
|
||||||
|
// always have a userinfo endpoint which is preferred.
|
||||||
|
Hidden: true,
|
||||||
|
Description: "Source supplemental user claims from the 'access_token'. This assumes the " +
|
||||||
|
"token is a jwt signed by the same issuer as the id_token. Using this requires setting " +
|
||||||
|
"'oidc-ignore-userinfo' to true. This setting is not compliant with the OIDC specification " +
|
||||||
|
"and is not recommended. Use at your own risk.",
|
||||||
|
Flag: "oidc-access-token-claims",
|
||||||
|
Env: "CODER_OIDC_ACCESS_TOKEN_CLAIMS",
|
||||||
|
Default: "false",
|
||||||
|
Value: &c.OIDC.UserInfoFromAccessToken,
|
||||||
|
Group: &deploymentGroupOIDC,
|
||||||
|
YAML: "accessTokenClaims",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Name: "OIDC Organization Field",
|
Name: "OIDC Organization Field",
|
||||||
Description: "This field must be set if using the organization sync feature." +
|
Description: "This field must be set if using the organization sync feature." +
|
||||||
|
@ -28,7 +28,6 @@ const (
|
|||||||
ResourceOrganizationMember RBACResource = "organization_member"
|
ResourceOrganizationMember RBACResource = "organization_member"
|
||||||
ResourceProvisionerDaemon RBACResource = "provisioner_daemon"
|
ResourceProvisionerDaemon RBACResource = "provisioner_daemon"
|
||||||
ResourceProvisionerJobs RBACResource = "provisioner_jobs"
|
ResourceProvisionerJobs RBACResource = "provisioner_jobs"
|
||||||
ResourceProvisionerKeys RBACResource = "provisioner_keys"
|
|
||||||
ResourceReplicas RBACResource = "replicas"
|
ResourceReplicas RBACResource = "replicas"
|
||||||
ResourceSystem RBACResource = "system"
|
ResourceSystem RBACResource = "system"
|
||||||
ResourceTailnetCoordinator RBACResource = "tailnet_coordinator"
|
ResourceTailnetCoordinator RBACResource = "tailnet_coordinator"
|
||||||
@ -85,7 +84,6 @@ var RBACResourceActions = map[RBACResource][]RBACAction{
|
|||||||
ResourceOrganizationMember: {ActionCreate, ActionDelete, ActionRead, ActionUpdate},
|
ResourceOrganizationMember: {ActionCreate, ActionDelete, ActionRead, ActionUpdate},
|
||||||
ResourceProvisionerDaemon: {ActionCreate, ActionDelete, ActionRead, ActionUpdate},
|
ResourceProvisionerDaemon: {ActionCreate, ActionDelete, ActionRead, ActionUpdate},
|
||||||
ResourceProvisionerJobs: {ActionRead},
|
ResourceProvisionerJobs: {ActionRead},
|
||||||
ResourceProvisionerKeys: {ActionCreate, ActionDelete, ActionRead},
|
|
||||||
ResourceReplicas: {ActionRead},
|
ResourceReplicas: {ActionRead},
|
||||||
ResourceSystem: {ActionCreate, ActionDelete, ActionRead, ActionUpdate},
|
ResourceSystem: {ActionCreate, ActionDelete, ActionRead, ActionUpdate},
|
||||||
ResourceTailnetCoordinator: {ActionCreate, ActionDelete, ActionRead, ActionUpdate},
|
ResourceTailnetCoordinator: {ActionCreate, ActionDelete, ActionRead, ActionUpdate},
|
||||||
|
@ -277,7 +277,7 @@ type OAuthConversionResponse struct {
|
|||||||
type AuthMethods struct {
|
type AuthMethods struct {
|
||||||
TermsOfServiceURL string `json:"terms_of_service_url,omitempty"`
|
TermsOfServiceURL string `json:"terms_of_service_url,omitempty"`
|
||||||
Password AuthMethod `json:"password"`
|
Password AuthMethod `json:"password"`
|
||||||
Github AuthMethod `json:"github"`
|
Github GithubAuthMethod `json:"github"`
|
||||||
OIDC OIDCAuthMethod `json:"oidc"`
|
OIDC OIDCAuthMethod `json:"oidc"`
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -289,6 +289,11 @@ type UserLoginType struct {
|
|||||||
LoginType LoginType `json:"login_type"`
|
LoginType LoginType `json:"login_type"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type GithubAuthMethod struct {
|
||||||
|
Enabled bool `json:"enabled"`
|
||||||
|
DefaultProviderConfigured bool `json:"default_provider_configured"`
|
||||||
|
}
|
||||||
|
|
||||||
type OIDCAuthMethod struct {
|
type OIDCAuthMethod struct {
|
||||||
AuthMethod
|
AuthMethod
|
||||||
SignInText string `json:"signInText"`
|
SignInText string `json:"signInText"`
|
||||||
|
@ -93,6 +93,24 @@ type AgentReconnectingPTYInit struct {
|
|||||||
Height uint16
|
Height uint16
|
||||||
Width uint16
|
Width uint16
|
||||||
Command string
|
Command string
|
||||||
|
// Container, if set, will attempt to exec into a running container visible to the agent.
|
||||||
|
// This should be a unique container ID (implementation-dependent).
|
||||||
|
Container string
|
||||||
|
// ContainerUser, if set, will set the target user when execing into a container.
|
||||||
|
// This can be a username or UID, depending on the underlying implementation.
|
||||||
|
// This is ignored if Container is not set.
|
||||||
|
ContainerUser string
|
||||||
|
}
|
||||||
|
|
||||||
|
// AgentReconnectingPTYInitOption is a functional option for AgentReconnectingPTYInit.
|
||||||
|
type AgentReconnectingPTYInitOption func(*AgentReconnectingPTYInit)
|
||||||
|
|
||||||
|
// AgentReconnectingPTYInitWithContainer sets the container and container user for the reconnecting PTY session.
|
||||||
|
func AgentReconnectingPTYInitWithContainer(container, containerUser string) AgentReconnectingPTYInitOption {
|
||||||
|
return func(init *AgentReconnectingPTYInit) {
|
||||||
|
init.Container = container
|
||||||
|
init.ContainerUser = containerUser
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ReconnectingPTYRequest is sent from the client to the server
|
// ReconnectingPTYRequest is sent from the client to the server
|
||||||
@ -107,7 +125,7 @@ type ReconnectingPTYRequest struct {
|
|||||||
// ReconnectingPTY spawns a new reconnecting terminal session.
|
// ReconnectingPTY spawns a new reconnecting terminal session.
|
||||||
// `ReconnectingPTYRequest` should be JSON marshaled and written to the returned net.Conn.
|
// `ReconnectingPTYRequest` should be JSON marshaled and written to the returned net.Conn.
|
||||||
// Raw terminal output will be read from the returned net.Conn.
|
// Raw terminal output will be read from the returned net.Conn.
|
||||||
func (c *AgentConn) ReconnectingPTY(ctx context.Context, id uuid.UUID, height, width uint16, command string) (net.Conn, error) {
|
func (c *AgentConn) ReconnectingPTY(ctx context.Context, id uuid.UUID, height, width uint16, command string, initOpts ...AgentReconnectingPTYInitOption) (net.Conn, error) {
|
||||||
ctx, span := tracing.StartSpan(ctx)
|
ctx, span := tracing.StartSpan(ctx)
|
||||||
defer span.End()
|
defer span.End()
|
||||||
|
|
||||||
@ -119,12 +137,16 @@ func (c *AgentConn) ReconnectingPTY(ctx context.Context, id uuid.UUID, height, w
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
data, err := json.Marshal(AgentReconnectingPTYInit{
|
rptyInit := AgentReconnectingPTYInit{
|
||||||
ID: id,
|
ID: id,
|
||||||
Height: height,
|
Height: height,
|
||||||
Width: width,
|
Width: width,
|
||||||
Command: command,
|
Command: command,
|
||||||
})
|
}
|
||||||
|
for _, o := range initOpts {
|
||||||
|
o(&rptyInit)
|
||||||
|
}
|
||||||
|
data, err := json.Marshal(rptyInit)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
_ = conn.Close()
|
_ = conn.Close()
|
||||||
return nil, err
|
return nil, err
|
||||||
|
@ -12,12 +12,14 @@ import (
|
|||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/google/uuid"
|
|
||||||
"golang.org/x/xerrors"
|
|
||||||
"tailscale.com/tailcfg"
|
"tailscale.com/tailcfg"
|
||||||
"tailscale.com/wgengine/capture"
|
"tailscale.com/wgengine/capture"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"golang.org/x/xerrors"
|
||||||
|
|
||||||
"cdr.dev/slog"
|
"cdr.dev/slog"
|
||||||
|
|
||||||
"github.com/coder/coder/v2/codersdk"
|
"github.com/coder/coder/v2/codersdk"
|
||||||
"github.com/coder/coder/v2/tailnet"
|
"github.com/coder/coder/v2/tailnet"
|
||||||
"github.com/coder/coder/v2/tailnet/proto"
|
"github.com/coder/coder/v2/tailnet/proto"
|
||||||
@ -305,6 +307,16 @@ type WorkspaceAgentReconnectingPTYOpts struct {
|
|||||||
// issue-reconnecting-pty-signed-token endpoint. If set, the session token
|
// issue-reconnecting-pty-signed-token endpoint. If set, the session token
|
||||||
// on the client will not be sent.
|
// on the client will not be sent.
|
||||||
SignedToken string
|
SignedToken string
|
||||||
|
|
||||||
|
// Experimental: Container, if set, will attempt to exec into a running container
|
||||||
|
// visible to the agent. This should be a unique container ID
|
||||||
|
// (implementation-dependent).
|
||||||
|
// ContainerUser is the user as which to exec into the container.
|
||||||
|
// NOTE: This feature is currently experimental and is currently "opt-in".
|
||||||
|
// In order to use this feature, the agent must have the environment variable
|
||||||
|
// CODER_AGENT_DEVCONTAINERS_ENABLE set to "true".
|
||||||
|
Container string
|
||||||
|
ContainerUser string
|
||||||
}
|
}
|
||||||
|
|
||||||
// AgentReconnectingPTY spawns a PTY that reconnects using the token provided.
|
// AgentReconnectingPTY spawns a PTY that reconnects using the token provided.
|
||||||
@ -320,6 +332,12 @@ func (c *Client) AgentReconnectingPTY(ctx context.Context, opts WorkspaceAgentRe
|
|||||||
q.Set("width", strconv.Itoa(int(opts.Width)))
|
q.Set("width", strconv.Itoa(int(opts.Width)))
|
||||||
q.Set("height", strconv.Itoa(int(opts.Height)))
|
q.Set("height", strconv.Itoa(int(opts.Height)))
|
||||||
q.Set("command", opts.Command)
|
q.Set("command", opts.Command)
|
||||||
|
if opts.Container != "" {
|
||||||
|
q.Set("container", opts.Container)
|
||||||
|
}
|
||||||
|
if opts.ContainerUser != "" {
|
||||||
|
q.Set("container_user", opts.ContainerUser)
|
||||||
|
}
|
||||||
// If we're using a signed token, set the query parameter.
|
// If we're using a signed token, set the query parameter.
|
||||||
if opts.SignedToken != "" {
|
if opts.SignedToken != "" {
|
||||||
q.Set(codersdk.SignedAppTokenQueryParameter, opts.SignedToken)
|
q.Set(codersdk.SignedAppTokenQueryParameter, opts.SignedToken)
|
||||||
|
@ -159,17 +159,17 @@ Database migrations are managed with
|
|||||||
To add new migrations, use the following command:
|
To add new migrations, use the following command:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
./coderd/database/migrations/create_fixture.sh my name
|
./coderd/database/migrations/create_migration.sh my name
|
||||||
/home/coder/src/coder/coderd/database/migrations/000070_my_name.up.sql
|
/home/coder/src/coder/coderd/database/migrations/000070_my_name.up.sql
|
||||||
/home/coder/src/coder/coderd/database/migrations/000070_my_name.down.sql
|
/home/coder/src/coder/coderd/database/migrations/000070_my_name.down.sql
|
||||||
```
|
```
|
||||||
|
|
||||||
Run "make gen" to generate models.
|
|
||||||
|
|
||||||
Then write queries into the generated `.up.sql` and `.down.sql` files and commit
|
Then write queries into the generated `.up.sql` and `.down.sql` files and commit
|
||||||
them into the repository. The down script should make a best-effort to retain as
|
them into the repository. The down script should make a best-effort to retain as
|
||||||
much data as possible.
|
much data as possible.
|
||||||
|
|
||||||
|
Run `make gen` to generate models.
|
||||||
|
|
||||||
#### Database fixtures (for testing migrations)
|
#### Database fixtures (for testing migrations)
|
||||||
|
|
||||||
There are two types of fixtures that are used to test that migrations don't
|
There are two types of fixtures that are used to test that migrations don't
|
||||||
|
@ -23,7 +23,7 @@ We track the following resources:
|
|||||||
| NotificationsSettings<br><i></i> | <table><thead><tr><th>Field</th><th>Tracked</th></tr></thead><tbody> | <tr><td>id</td><td>false</td></tr><tr><td>notifier_paused</td><td>true</td></tr></tbody></table> |
|
| NotificationsSettings<br><i></i> | <table><thead><tr><th>Field</th><th>Tracked</th></tr></thead><tbody> | <tr><td>id</td><td>false</td></tr><tr><td>notifier_paused</td><td>true</td></tr></tbody></table> |
|
||||||
| OAuth2ProviderApp<br><i></i> | <table><thead><tr><th>Field</th><th>Tracked</th></tr></thead><tbody> | <tr><td>callback_url</td><td>true</td></tr><tr><td>created_at</td><td>false</td></tr><tr><td>icon</td><td>true</td></tr><tr><td>id</td><td>false</td></tr><tr><td>name</td><td>true</td></tr><tr><td>updated_at</td><td>false</td></tr></tbody></table> |
|
| OAuth2ProviderApp<br><i></i> | <table><thead><tr><th>Field</th><th>Tracked</th></tr></thead><tbody> | <tr><td>callback_url</td><td>true</td></tr><tr><td>created_at</td><td>false</td></tr><tr><td>icon</td><td>true</td></tr><tr><td>id</td><td>false</td></tr><tr><td>name</td><td>true</td></tr><tr><td>updated_at</td><td>false</td></tr></tbody></table> |
|
||||||
| OAuth2ProviderAppSecret<br><i></i> | <table><thead><tr><th>Field</th><th>Tracked</th></tr></thead><tbody> | <tr><td>app_id</td><td>false</td></tr><tr><td>created_at</td><td>false</td></tr><tr><td>display_secret</td><td>false</td></tr><tr><td>hashed_secret</td><td>false</td></tr><tr><td>id</td><td>false</td></tr><tr><td>last_used_at</td><td>false</td></tr><tr><td>secret_prefix</td><td>false</td></tr></tbody></table> |
|
| OAuth2ProviderAppSecret<br><i></i> | <table><thead><tr><th>Field</th><th>Tracked</th></tr></thead><tbody> | <tr><td>app_id</td><td>false</td></tr><tr><td>created_at</td><td>false</td></tr><tr><td>display_secret</td><td>false</td></tr><tr><td>hashed_secret</td><td>false</td></tr><tr><td>id</td><td>false</td></tr><tr><td>last_used_at</td><td>false</td></tr><tr><td>secret_prefix</td><td>false</td></tr></tbody></table> |
|
||||||
| Organization<br><i></i> | <table><thead><tr><th>Field</th><th>Tracked</th></tr></thead><tbody> | <tr><td>created_at</td><td>false</td></tr><tr><td>description</td><td>true</td></tr><tr><td>display_name</td><td>true</td></tr><tr><td>icon</td><td>true</td></tr><tr><td>id</td><td>false</td></tr><tr><td>is_default</td><td>true</td></tr><tr><td>name</td><td>true</td></tr><tr><td>updated_at</td><td>true</td></tr></tbody></table> |
|
| Organization<br><i></i> | <table><thead><tr><th>Field</th><th>Tracked</th></tr></thead><tbody> | <tr><td>created_at</td><td>false</td></tr><tr><td>deleted</td><td>true</td></tr><tr><td>description</td><td>true</td></tr><tr><td>display_name</td><td>true</td></tr><tr><td>icon</td><td>true</td></tr><tr><td>id</td><td>false</td></tr><tr><td>is_default</td><td>true</td></tr><tr><td>name</td><td>true</td></tr><tr><td>updated_at</td><td>true</td></tr></tbody></table> |
|
||||||
| OrganizationSyncSettings<br><i></i> | <table><thead><tr><th>Field</th><th>Tracked</th></tr></thead><tbody> | <tr><td>assign_default</td><td>true</td></tr><tr><td>field</td><td>true</td></tr><tr><td>mapping</td><td>true</td></tr></tbody></table> |
|
| OrganizationSyncSettings<br><i></i> | <table><thead><tr><th>Field</th><th>Tracked</th></tr></thead><tbody> | <tr><td>assign_default</td><td>true</td></tr><tr><td>field</td><td>true</td></tr><tr><td>mapping</td><td>true</td></tr></tbody></table> |
|
||||||
| RoleSyncSettings<br><i></i> | <table><thead><tr><th>Field</th><th>Tracked</th></tr></thead><tbody> | <tr><td>field</td><td>true</td></tr><tr><td>mapping</td><td>true</td></tr></tbody></table> |
|
| RoleSyncSettings<br><i></i> | <table><thead><tr><th>Field</th><th>Tracked</th></tr></thead><tbody> | <tr><td>field</td><td>true</td></tr><tr><td>mapping</td><td>true</td></tr></tbody></table> |
|
||||||
| Template<br><i>write, delete</i> | <table><thead><tr><th>Field</th><th>Tracked</th></tr></thead><tbody> | <tr><td>active_version_id</td><td>true</td></tr><tr><td>activity_bump</td><td>true</td></tr><tr><td>allow_user_autostart</td><td>true</td></tr><tr><td>allow_user_autostop</td><td>true</td></tr><tr><td>allow_user_cancel_workspace_jobs</td><td>true</td></tr><tr><td>autostart_block_days_of_week</td><td>true</td></tr><tr><td>autostop_requirement_days_of_week</td><td>true</td></tr><tr><td>autostop_requirement_weeks</td><td>true</td></tr><tr><td>created_at</td><td>false</td></tr><tr><td>created_by</td><td>true</td></tr><tr><td>created_by_avatar_url</td><td>false</td></tr><tr><td>created_by_username</td><td>false</td></tr><tr><td>default_ttl</td><td>true</td></tr><tr><td>deleted</td><td>false</td></tr><tr><td>deprecated</td><td>true</td></tr><tr><td>description</td><td>true</td></tr><tr><td>display_name</td><td>true</td></tr><tr><td>failure_ttl</td><td>true</td></tr><tr><td>group_acl</td><td>true</td></tr><tr><td>icon</td><td>true</td></tr><tr><td>id</td><td>true</td></tr><tr><td>max_port_sharing_level</td><td>true</td></tr><tr><td>name</td><td>true</td></tr><tr><td>organization_display_name</td><td>false</td></tr><tr><td>organization_icon</td><td>false</td></tr><tr><td>organization_id</td><td>false</td></tr><tr><td>organization_name</td><td>false</td></tr><tr><td>provisioner</td><td>true</td></tr><tr><td>require_active_version</td><td>true</td></tr><tr><td>time_til_dormant</td><td>true</td></tr><tr><td>time_til_dormant_autodelete</td><td>true</td></tr><tr><td>updated_at</td><td>false</td></tr><tr><td>user_acl</td><td>true</td></tr></tbody></table> |
|
| Template<br><i>write, delete</i> | <table><thead><tr><th>Field</th><th>Tracked</th></tr></thead><tbody> | <tr><td>active_version_id</td><td>true</td></tr><tr><td>activity_bump</td><td>true</td></tr><tr><td>allow_user_autostart</td><td>true</td></tr><tr><td>allow_user_autostop</td><td>true</td></tr><tr><td>allow_user_cancel_workspace_jobs</td><td>true</td></tr><tr><td>autostart_block_days_of_week</td><td>true</td></tr><tr><td>autostop_requirement_days_of_week</td><td>true</td></tr><tr><td>autostop_requirement_weeks</td><td>true</td></tr><tr><td>created_at</td><td>false</td></tr><tr><td>created_by</td><td>true</td></tr><tr><td>created_by_avatar_url</td><td>false</td></tr><tr><td>created_by_username</td><td>false</td></tr><tr><td>default_ttl</td><td>true</td></tr><tr><td>deleted</td><td>false</td></tr><tr><td>deprecated</td><td>true</td></tr><tr><td>description</td><td>true</td></tr><tr><td>display_name</td><td>true</td></tr><tr><td>failure_ttl</td><td>true</td></tr><tr><td>group_acl</td><td>true</td></tr><tr><td>icon</td><td>true</td></tr><tr><td>id</td><td>true</td></tr><tr><td>max_port_sharing_level</td><td>true</td></tr><tr><td>name</td><td>true</td></tr><tr><td>organization_display_name</td><td>false</td></tr><tr><td>organization_icon</td><td>false</td></tr><tr><td>organization_id</td><td>false</td></tr><tr><td>organization_name</td><td>false</td></tr><tr><td>provisioner</td><td>true</td></tr><tr><td>require_active_version</td><td>true</td></tr><tr><td>time_til_dormant</td><td>true</td></tr><tr><td>time_til_dormant_autodelete</td><td>true</td></tr><tr><td>updated_at</td><td>false</td></tr><tr><td>user_acl</td><td>true</td></tr></tbody></table> |
|
||||||
|
Binary file not shown.
Before Width: | Height: | Size: 47 KiB After Width: | Height: | Size: 72 KiB |
2
docs/reference/api/general.md
generated
2
docs/reference/api/general.md
generated
@ -328,6 +328,7 @@ curl -X GET http://coder-server:8080/api/v2/deployment/config \
|
|||||||
],
|
],
|
||||||
"client_id": "string",
|
"client_id": "string",
|
||||||
"client_secret": "string",
|
"client_secret": "string",
|
||||||
|
"default_provider_enable": true,
|
||||||
"device_flow": true,
|
"device_flow": true,
|
||||||
"enterprise_base_url": "string"
|
"enterprise_base_url": "string"
|
||||||
}
|
}
|
||||||
@ -376,6 +377,7 @@ curl -X GET http://coder-server:8080/api/v2/deployment/config \
|
|||||||
"sign_in_text": "string",
|
"sign_in_text": "string",
|
||||||
"signups_disabled_text": "string",
|
"signups_disabled_text": "string",
|
||||||
"skip_issuer_checks": true,
|
"skip_issuer_checks": true,
|
||||||
|
"source_user_info_from_access_token": true,
|
||||||
"user_role_field": "string",
|
"user_role_field": "string",
|
||||||
"user_role_mapping": {},
|
"user_role_mapping": {},
|
||||||
"user_roles_default": [
|
"user_roles_default": [
|
||||||
|
5
docs/reference/api/members.md
generated
5
docs/reference/api/members.md
generated
@ -203,7 +203,6 @@ Status Code **200**
|
|||||||
| `resource_type` | `organization_member` |
|
| `resource_type` | `organization_member` |
|
||||||
| `resource_type` | `provisioner_daemon` |
|
| `resource_type` | `provisioner_daemon` |
|
||||||
| `resource_type` | `provisioner_jobs` |
|
| `resource_type` | `provisioner_jobs` |
|
||||||
| `resource_type` | `provisioner_keys` |
|
|
||||||
| `resource_type` | `replicas` |
|
| `resource_type` | `replicas` |
|
||||||
| `resource_type` | `system` |
|
| `resource_type` | `system` |
|
||||||
| `resource_type` | `tailnet_coordinator` |
|
| `resource_type` | `tailnet_coordinator` |
|
||||||
@ -366,7 +365,6 @@ Status Code **200**
|
|||||||
| `resource_type` | `organization_member` |
|
| `resource_type` | `organization_member` |
|
||||||
| `resource_type` | `provisioner_daemon` |
|
| `resource_type` | `provisioner_daemon` |
|
||||||
| `resource_type` | `provisioner_jobs` |
|
| `resource_type` | `provisioner_jobs` |
|
||||||
| `resource_type` | `provisioner_keys` |
|
|
||||||
| `resource_type` | `replicas` |
|
| `resource_type` | `replicas` |
|
||||||
| `resource_type` | `system` |
|
| `resource_type` | `system` |
|
||||||
| `resource_type` | `tailnet_coordinator` |
|
| `resource_type` | `tailnet_coordinator` |
|
||||||
@ -529,7 +527,6 @@ Status Code **200**
|
|||||||
| `resource_type` | `organization_member` |
|
| `resource_type` | `organization_member` |
|
||||||
| `resource_type` | `provisioner_daemon` |
|
| `resource_type` | `provisioner_daemon` |
|
||||||
| `resource_type` | `provisioner_jobs` |
|
| `resource_type` | `provisioner_jobs` |
|
||||||
| `resource_type` | `provisioner_keys` |
|
|
||||||
| `resource_type` | `replicas` |
|
| `resource_type` | `replicas` |
|
||||||
| `resource_type` | `system` |
|
| `resource_type` | `system` |
|
||||||
| `resource_type` | `tailnet_coordinator` |
|
| `resource_type` | `tailnet_coordinator` |
|
||||||
@ -661,7 +658,6 @@ Status Code **200**
|
|||||||
| `resource_type` | `organization_member` |
|
| `resource_type` | `organization_member` |
|
||||||
| `resource_type` | `provisioner_daemon` |
|
| `resource_type` | `provisioner_daemon` |
|
||||||
| `resource_type` | `provisioner_jobs` |
|
| `resource_type` | `provisioner_jobs` |
|
||||||
| `resource_type` | `provisioner_keys` |
|
|
||||||
| `resource_type` | `replicas` |
|
| `resource_type` | `replicas` |
|
||||||
| `resource_type` | `system` |
|
| `resource_type` | `system` |
|
||||||
| `resource_type` | `tailnet_coordinator` |
|
| `resource_type` | `tailnet_coordinator` |
|
||||||
@ -925,7 +921,6 @@ Status Code **200**
|
|||||||
| `resource_type` | `organization_member` |
|
| `resource_type` | `organization_member` |
|
||||||
| `resource_type` | `provisioner_daemon` |
|
| `resource_type` | `provisioner_daemon` |
|
||||||
| `resource_type` | `provisioner_jobs` |
|
| `resource_type` | `provisioner_jobs` |
|
||||||
| `resource_type` | `provisioner_keys` |
|
|
||||||
| `resource_type` | `replicas` |
|
| `resource_type` | `replicas` |
|
||||||
| `resource_type` | `system` |
|
| `resource_type` | `system` |
|
||||||
| `resource_type` | `tailnet_coordinator` |
|
| `resource_type` | `tailnet_coordinator` |
|
||||||
|
37
docs/reference/api/schemas.md
generated
37
docs/reference/api/schemas.md
generated
@ -787,6 +787,7 @@
|
|||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"github": {
|
"github": {
|
||||||
|
"default_provider_configured": true,
|
||||||
"enabled": true
|
"enabled": true
|
||||||
},
|
},
|
||||||
"oidc": {
|
"oidc": {
|
||||||
@ -804,8 +805,8 @@
|
|||||||
### Properties
|
### Properties
|
||||||
|
|
||||||
| Name | Type | Required | Restrictions | Description |
|
| Name | Type | Required | Restrictions | Description |
|
||||||
|------------------------|----------------------------------------------------|----------|--------------|-------------|
|
|------------------------|--------------------------------------------------------|----------|--------------|-------------|
|
||||||
| `github` | [codersdk.AuthMethod](#codersdkauthmethod) | false | | |
|
| `github` | [codersdk.GithubAuthMethod](#codersdkgithubauthmethod) | false | | |
|
||||||
| `oidc` | [codersdk.OIDCAuthMethod](#codersdkoidcauthmethod) | false | | |
|
| `oidc` | [codersdk.OIDCAuthMethod](#codersdkoidcauthmethod) | false | | |
|
||||||
| `password` | [codersdk.AuthMethod](#codersdkauthmethod) | false | | |
|
| `password` | [codersdk.AuthMethod](#codersdkauthmethod) | false | | |
|
||||||
| `terms_of_service_url` | string | false | | |
|
| `terms_of_service_url` | string | false | | |
|
||||||
@ -1981,6 +1982,7 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o
|
|||||||
],
|
],
|
||||||
"client_id": "string",
|
"client_id": "string",
|
||||||
"client_secret": "string",
|
"client_secret": "string",
|
||||||
|
"default_provider_enable": true,
|
||||||
"device_flow": true,
|
"device_flow": true,
|
||||||
"enterprise_base_url": "string"
|
"enterprise_base_url": "string"
|
||||||
}
|
}
|
||||||
@ -2029,6 +2031,7 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o
|
|||||||
"sign_in_text": "string",
|
"sign_in_text": "string",
|
||||||
"signups_disabled_text": "string",
|
"signups_disabled_text": "string",
|
||||||
"skip_issuer_checks": true,
|
"skip_issuer_checks": true,
|
||||||
|
"source_user_info_from_access_token": true,
|
||||||
"user_role_field": "string",
|
"user_role_field": "string",
|
||||||
"user_role_mapping": {},
|
"user_role_mapping": {},
|
||||||
"user_roles_default": [
|
"user_roles_default": [
|
||||||
@ -2452,6 +2455,7 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o
|
|||||||
],
|
],
|
||||||
"client_id": "string",
|
"client_id": "string",
|
||||||
"client_secret": "string",
|
"client_secret": "string",
|
||||||
|
"default_provider_enable": true,
|
||||||
"device_flow": true,
|
"device_flow": true,
|
||||||
"enterprise_base_url": "string"
|
"enterprise_base_url": "string"
|
||||||
}
|
}
|
||||||
@ -2500,6 +2504,7 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o
|
|||||||
"sign_in_text": "string",
|
"sign_in_text": "string",
|
||||||
"signups_disabled_text": "string",
|
"signups_disabled_text": "string",
|
||||||
"skip_issuer_checks": true,
|
"skip_issuer_checks": true,
|
||||||
|
"source_user_info_from_access_token": true,
|
||||||
"user_role_field": "string",
|
"user_role_field": "string",
|
||||||
"user_role_mapping": {},
|
"user_role_mapping": {},
|
||||||
"user_roles_default": [
|
"user_roles_default": [
|
||||||
@ -3104,6 +3109,22 @@ Git clone makes use of this by parsing the URL from: 'Username for "https://gith
|
|||||||
| `updated_at` | string | false | | |
|
| `updated_at` | string | false | | |
|
||||||
| `user_id` | string | false | | |
|
| `user_id` | string | false | | |
|
||||||
|
|
||||||
|
## codersdk.GithubAuthMethod
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"default_provider_configured": true,
|
||||||
|
"enabled": true
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Properties
|
||||||
|
|
||||||
|
| Name | Type | Required | Restrictions | Description |
|
||||||
|
|-------------------------------|---------|----------|--------------|-------------|
|
||||||
|
| `default_provider_configured` | boolean | false | | |
|
||||||
|
| `enabled` | boolean | false | | |
|
||||||
|
|
||||||
## codersdk.Group
|
## codersdk.Group
|
||||||
|
|
||||||
```json
|
```json
|
||||||
@ -3810,6 +3831,7 @@ Git clone makes use of this by parsing the URL from: 'Username for "https://gith
|
|||||||
],
|
],
|
||||||
"client_id": "string",
|
"client_id": "string",
|
||||||
"client_secret": "string",
|
"client_secret": "string",
|
||||||
|
"default_provider_enable": true,
|
||||||
"device_flow": true,
|
"device_flow": true,
|
||||||
"enterprise_base_url": "string"
|
"enterprise_base_url": "string"
|
||||||
}
|
}
|
||||||
@ -3836,6 +3858,7 @@ Git clone makes use of this by parsing the URL from: 'Username for "https://gith
|
|||||||
],
|
],
|
||||||
"client_id": "string",
|
"client_id": "string",
|
||||||
"client_secret": "string",
|
"client_secret": "string",
|
||||||
|
"default_provider_enable": true,
|
||||||
"device_flow": true,
|
"device_flow": true,
|
||||||
"enterprise_base_url": "string"
|
"enterprise_base_url": "string"
|
||||||
}
|
}
|
||||||
@ -3844,13 +3867,14 @@ Git clone makes use of this by parsing the URL from: 'Username for "https://gith
|
|||||||
### Properties
|
### Properties
|
||||||
|
|
||||||
| Name | Type | Required | Restrictions | Description |
|
| Name | Type | Required | Restrictions | Description |
|
||||||
|-----------------------|-----------------|----------|--------------|-------------|
|
|---------------------------|-----------------|----------|--------------|-------------|
|
||||||
| `allow_everyone` | boolean | false | | |
|
| `allow_everyone` | boolean | false | | |
|
||||||
| `allow_signups` | boolean | false | | |
|
| `allow_signups` | boolean | false | | |
|
||||||
| `allowed_orgs` | array of string | false | | |
|
| `allowed_orgs` | array of string | false | | |
|
||||||
| `allowed_teams` | array of string | false | | |
|
| `allowed_teams` | array of string | false | | |
|
||||||
| `client_id` | string | false | | |
|
| `client_id` | string | false | | |
|
||||||
| `client_secret` | string | false | | |
|
| `client_secret` | string | false | | |
|
||||||
|
| `default_provider_enable` | boolean | false | | |
|
||||||
| `device_flow` | boolean | false | | |
|
| `device_flow` | boolean | false | | |
|
||||||
| `enterprise_base_url` | string | false | | |
|
| `enterprise_base_url` | string | false | | |
|
||||||
|
|
||||||
@ -3999,6 +4023,7 @@ Git clone makes use of this by parsing the URL from: 'Username for "https://gith
|
|||||||
"sign_in_text": "string",
|
"sign_in_text": "string",
|
||||||
"signups_disabled_text": "string",
|
"signups_disabled_text": "string",
|
||||||
"skip_issuer_checks": true,
|
"skip_issuer_checks": true,
|
||||||
|
"source_user_info_from_access_token": true,
|
||||||
"user_role_field": "string",
|
"user_role_field": "string",
|
||||||
"user_role_mapping": {},
|
"user_role_mapping": {},
|
||||||
"user_roles_default": [
|
"user_roles_default": [
|
||||||
@ -4011,7 +4036,7 @@ Git clone makes use of this by parsing the URL from: 'Username for "https://gith
|
|||||||
### Properties
|
### Properties
|
||||||
|
|
||||||
| Name | Type | Required | Restrictions | Description |
|
| Name | Type | Required | Restrictions | Description |
|
||||||
|-------------------------------|----------------------------------|----------|--------------|----------------------------------------------------------------------------------|
|
|--------------------------------------|----------------------------------|----------|--------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||||
| `allow_signups` | boolean | false | | |
|
| `allow_signups` | boolean | false | | |
|
||||||
| `auth_url_params` | object | false | | |
|
| `auth_url_params` | object | false | | |
|
||||||
| `client_cert_file` | string | false | | |
|
| `client_cert_file` | string | false | | |
|
||||||
@ -4027,7 +4052,7 @@ Git clone makes use of this by parsing the URL from: 'Username for "https://gith
|
|||||||
| `groups_field` | string | false | | |
|
| `groups_field` | string | false | | |
|
||||||
| `icon_url` | [serpent.URL](#serpenturl) | false | | |
|
| `icon_url` | [serpent.URL](#serpenturl) | false | | |
|
||||||
| `ignore_email_verified` | boolean | false | | |
|
| `ignore_email_verified` | boolean | false | | |
|
||||||
| `ignore_user_info` | boolean | false | | |
|
| `ignore_user_info` | boolean | false | | Ignore user info & UserInfoFromAccessToken are mutually exclusive. Only 1 can be set to true. Ideally this would be an enum with 3 states, ['none', 'userinfo', 'access_token']. However, for backward compatibility, `ignore_user_info` must remain. And `access_token` is a niche, non-spec compliant edge case. So it's use is rare, and should not be advised. |
|
||||||
| `issuer_url` | string | false | | |
|
| `issuer_url` | string | false | | |
|
||||||
| `name_field` | string | false | | |
|
| `name_field` | string | false | | |
|
||||||
| `organization_assign_default` | boolean | false | | |
|
| `organization_assign_default` | boolean | false | | |
|
||||||
@ -4037,6 +4062,7 @@ Git clone makes use of this by parsing the URL from: 'Username for "https://gith
|
|||||||
| `sign_in_text` | string | false | | |
|
| `sign_in_text` | string | false | | |
|
||||||
| `signups_disabled_text` | string | false | | |
|
| `signups_disabled_text` | string | false | | |
|
||||||
| `skip_issuer_checks` | boolean | false | | |
|
| `skip_issuer_checks` | boolean | false | | |
|
||||||
|
| `source_user_info_from_access_token` | boolean | false | | Source user info from access token as mentioned above is an edge case. This allows sourcing the user_info from the access token itself instead of a user_info endpoint. This assumes the access token is a valid JWT with a set of claims to be merged with the id_token. |
|
||||||
| `user_role_field` | string | false | | |
|
| `user_role_field` | string | false | | |
|
||||||
| `user_role_mapping` | object | false | | |
|
| `user_role_mapping` | object | false | | |
|
||||||
| `user_roles_default` | array of string | false | | |
|
| `user_roles_default` | array of string | false | | |
|
||||||
@ -5126,7 +5152,6 @@ Git clone makes use of this by parsing the URL from: 'Username for "https://gith
|
|||||||
| `organization_member` |
|
| `organization_member` |
|
||||||
| `provisioner_daemon` |
|
| `provisioner_daemon` |
|
||||||
| `provisioner_jobs` |
|
| `provisioner_jobs` |
|
||||||
| `provisioner_keys` |
|
|
||||||
| `replicas` |
|
| `replicas` |
|
||||||
| `system` |
|
| `system` |
|
||||||
| `tailnet_coordinator` |
|
| `tailnet_coordinator` |
|
||||||
|
1
docs/reference/api/users.md
generated
1
docs/reference/api/users.md
generated
@ -159,6 +159,7 @@ curl -X GET http://coder-server:8080/api/v2/users/authmethods \
|
|||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"github": {
|
"github": {
|
||||||
|
"default_provider_configured": true,
|
||||||
"enabled": true
|
"enabled": true
|
||||||
},
|
},
|
||||||
"oidc": {
|
"oidc": {
|
||||||
|
11
docs/reference/cli/server.md
generated
11
docs/reference/cli/server.md
generated
@ -373,6 +373,17 @@ Client secret for Login with GitHub.
|
|||||||
|
|
||||||
Enable device flow for Login with GitHub.
|
Enable device flow for Login with GitHub.
|
||||||
|
|
||||||
|
### --oauth2-github-default-provider-enable
|
||||||
|
|
||||||
|
| | |
|
||||||
|
|-------------|-----------------------------------------------------------|
|
||||||
|
| Type | <code>bool</code> |
|
||||||
|
| Environment | <code>$CODER_OAUTH2_GITHUB_DEFAULT_PROVIDER_ENABLE</code> |
|
||||||
|
| YAML | <code>oauth2.github.defaultProviderEnable</code> |
|
||||||
|
| Default | <code>true</code> |
|
||||||
|
|
||||||
|
Enable the default GitHub OAuth2 provider managed by Coder.
|
||||||
|
|
||||||
### --oauth2-github-allowed-orgs
|
### --oauth2-github-allowed-orgs
|
||||||
|
|
||||||
| | |
|
| | |
|
||||||
|
@ -82,18 +82,22 @@ persistent environment from your main device, a tablet, or your phone.
|
|||||||
|
|
||||||
## Configure Coder with a new Workspace
|
## Configure Coder with a new Workspace
|
||||||
|
|
||||||
1. If you're running Coder locally, go to <http://0.0.0.0:3001/setup>.
|
1. Coder will attempt to open the setup page in your browser. If it doesn't open
|
||||||
|
automatically, go to <http://localhost:3000>.
|
||||||
|
|
||||||
- If you get a browser warning similar to `Secure Site Not Available`, you
|
- If you get a browser warning similar to `Secure Site Not Available`, you
|
||||||
can ignore the warning and continue to the setup page.
|
can ignore the warning and continue to the setup page.
|
||||||
|
|
||||||
If your Coder server is on a network or cloud device, locate the message in
|
If your Coder server is on a network or cloud device, or you are having
|
||||||
your terminal that reads,
|
trouble viewing the page, locate the web UI URL in Coder logs in your
|
||||||
`View the Web UI: https://<CUSTOM-STRING>.<TUNNEL>.try.coder.app`. The server
|
terminal. It looks like `https://<CUSTOM-STRING>.<TUNNEL>.try.coder.app`.
|
||||||
begins to stream logs immediately and you might have to scroll up to find it.
|
It's one of the first lines of output, so you might have to scroll up to find
|
||||||
|
it.
|
||||||
|
|
||||||
1. On the **Welcome to Coder** page, enter the information to create an admin
|
1. On the **Welcome to Coder** page, to use your GitHub account to log in,
|
||||||
user, then select **Create account**.
|
select **Continue with GitHub**.
|
||||||
|
You can also enter an email and password to create a new admin account on
|
||||||
|
the Coder deployment:
|
||||||
|
|
||||||
_Welcome
|
_Welcome
|
||||||
to Coder - Create admin user_
|
to Coder - Create admin user_
|
||||||
|
@ -160,6 +160,7 @@ RUN apt-get update --quiet && apt-get install --yes \
|
|||||||
kubectl \
|
kubectl \
|
||||||
language-pack-en \
|
language-pack-en \
|
||||||
less \
|
less \
|
||||||
|
libgbm-dev \
|
||||||
libssl-dev \
|
libssl-dev \
|
||||||
lsb-release \
|
lsb-release \
|
||||||
man \
|
man \
|
||||||
|
@ -91,14 +91,22 @@ data "coder_parameter" "res_mon_memory_threshold" {
|
|||||||
default = 80
|
default = 80
|
||||||
description = "The memory usage threshold used in resources monitoring to trigger notifications."
|
description = "The memory usage threshold used in resources monitoring to trigger notifications."
|
||||||
mutable = true
|
mutable = true
|
||||||
|
validation {
|
||||||
|
min = 0
|
||||||
|
max = 100
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
data "coder_parameter" "res_mon_volume_threshold" {
|
data "coder_parameter" "res_mon_volume_threshold" {
|
||||||
type = "number"
|
type = "number"
|
||||||
name = "Volume usage threshold"
|
name = "Volume usage threshold"
|
||||||
default = 80
|
default = 90
|
||||||
description = "The volume usage threshold used in resources monitoring to trigger notifications."
|
description = "The volume usage threshold used in resources monitoring to trigger notifications."
|
||||||
mutable = true
|
mutable = true
|
||||||
|
validation {
|
||||||
|
min = 0
|
||||||
|
max = 100
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
data "coder_parameter" "res_mon_volume_path" {
|
data "coder_parameter" "res_mon_volume_path" {
|
||||||
@ -342,6 +350,7 @@ resource "coder_agent" "dev" {
|
|||||||
while ! [[ -f "${local.repo_dir}/site/package.json" ]]; do
|
while ! [[ -f "${local.repo_dir}/site/package.json" ]]; do
|
||||||
sleep 1
|
sleep 1
|
||||||
done
|
done
|
||||||
|
cd "${local.repo_dir}" && make clean
|
||||||
cd "${local.repo_dir}/site" && pnpm install && pnpm playwright:install
|
cd "${local.repo_dir}/site" && pnpm install && pnpm playwright:install
|
||||||
EOT
|
EOT
|
||||||
}
|
}
|
||||||
|
@ -276,6 +276,7 @@ var auditableResourcesTypes = map[any]map[string]Action{
|
|||||||
"id": ActionIgnore,
|
"id": ActionIgnore,
|
||||||
"name": ActionTrack,
|
"name": ActionTrack,
|
||||||
"description": ActionTrack,
|
"description": ActionTrack,
|
||||||
|
"deleted": ActionTrack,
|
||||||
"created_at": ActionIgnore,
|
"created_at": ActionIgnore,
|
||||||
"updated_at": ActionTrack,
|
"updated_at": ActionTrack,
|
||||||
"is_default": ActionTrack,
|
"is_default": ActionTrack,
|
||||||
|
@ -499,6 +499,9 @@ OAUTH2 / GITHUB OPTIONS:
|
|||||||
--oauth2-github-client-secret string, $CODER_OAUTH2_GITHUB_CLIENT_SECRET
|
--oauth2-github-client-secret string, $CODER_OAUTH2_GITHUB_CLIENT_SECRET
|
||||||
Client secret for Login with GitHub.
|
Client secret for Login with GitHub.
|
||||||
|
|
||||||
|
--oauth2-github-default-provider-enable bool, $CODER_OAUTH2_GITHUB_DEFAULT_PROVIDER_ENABLE (default: true)
|
||||||
|
Enable the default GitHub OAuth2 provider managed by Coder.
|
||||||
|
|
||||||
--oauth2-github-device-flow bool, $CODER_OAUTH2_GITHUB_DEVICE_FLOW (default: false)
|
--oauth2-github-device-flow bool, $CODER_OAUTH2_GITHUB_DEVICE_FLOW (default: false)
|
||||||
Enable device flow for Login with GitHub.
|
Enable device flow for Login with GitHub.
|
||||||
|
|
||||||
|
@ -75,10 +75,6 @@ func TestEnterpriseAuditLogs(t *testing.T) {
|
|||||||
require.Equal(t, int64(1), alogs.Count)
|
require.Equal(t, int64(1), alogs.Count)
|
||||||
require.Len(t, alogs.AuditLogs, 1)
|
require.Len(t, alogs.AuditLogs, 1)
|
||||||
|
|
||||||
require.Equal(t, &codersdk.MinimalOrganization{
|
|
||||||
ID: o.ID,
|
|
||||||
}, alogs.AuditLogs[0].Organization)
|
|
||||||
|
|
||||||
// OrganizationID is deprecated, but make sure it is set.
|
// OrganizationID is deprecated, but make sure it is set.
|
||||||
require.Equal(t, o.ID, alogs.AuditLogs[0].OrganizationID)
|
require.Equal(t, o.ID, alogs.AuditLogs[0].OrganizationID)
|
||||||
|
|
||||||
|
@ -440,7 +440,10 @@ func (api *API) groups(rw http.ResponseWriter, r *http.Request) {
|
|||||||
parser := httpapi.NewQueryParamParser()
|
parser := httpapi.NewQueryParamParser()
|
||||||
// Organization selector can be an org ID or name
|
// Organization selector can be an org ID or name
|
||||||
filter.OrganizationID = parser.UUIDorName(r.URL.Query(), uuid.Nil, "organization", func(orgName string) (uuid.UUID, error) {
|
filter.OrganizationID = parser.UUIDorName(r.URL.Query(), uuid.Nil, "organization", func(orgName string) (uuid.UUID, error) {
|
||||||
org, err := api.Database.GetOrganizationByName(ctx, orgName)
|
org, err := api.Database.GetOrganizationByName(ctx, database.GetOrganizationByNameParams{
|
||||||
|
Name: orgName,
|
||||||
|
Deleted: false,
|
||||||
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return uuid.Nil, xerrors.Errorf("organization %q not found", orgName)
|
return uuid.Nil, xerrors.Errorf("organization %q not found", orgName)
|
||||||
}
|
}
|
||||||
|
@ -150,7 +150,16 @@ func (api *API) deleteOrganization(rw http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
err := api.Database.DeleteOrganization(ctx, organization.ID)
|
err := api.Database.InTx(func(tx database.Store) error {
|
||||||
|
err := tx.UpdateOrganizationDeletedByID(ctx, database.UpdateOrganizationDeletedByIDParams{
|
||||||
|
ID: organization.ID,
|
||||||
|
UpdatedAt: dbtime.Now(),
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return xerrors.Errorf("delete organization: %w", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||||
Message: "Internal error deleting organization.",
|
Message: "Internal error deleting organization.",
|
||||||
@ -204,7 +213,10 @@ func (api *API) postOrganizations(rw http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err := api.Database.GetOrganizationByName(ctx, req.Name)
|
_, err := api.Database.GetOrganizationByName(ctx, database.GetOrganizationByNameParams{
|
||||||
|
Name: req.Name,
|
||||||
|
Deleted: false,
|
||||||
|
})
|
||||||
if err == nil {
|
if err == nil {
|
||||||
httpapi.Write(ctx, rw, http.StatusConflict, codersdk.Response{
|
httpapi.Write(ctx, rw, http.StatusConflict, codersdk.Response{
|
||||||
Message: "Organization already exists with that name.",
|
Message: "Organization already exists with that name.",
|
||||||
|
@ -11,6 +11,7 @@ import (
|
|||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
"cdr.dev/slog/sloggers/slogtest"
|
"cdr.dev/slog/sloggers/slogtest"
|
||||||
|
|
||||||
"github.com/coder/coder/v2/coderd/database"
|
"github.com/coder/coder/v2/coderd/database"
|
||||||
"github.com/coder/coder/v2/coderd/database/dbgen"
|
"github.com/coder/coder/v2/coderd/database/dbgen"
|
||||||
"github.com/coder/coder/v2/coderd/database/dbtestutil"
|
"github.com/coder/coder/v2/coderd/database/dbtestutil"
|
||||||
@ -20,6 +21,10 @@ import (
|
|||||||
func TestMetricsCollector(t *testing.T) {
|
func TestMetricsCollector(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
|
if !dbtestutil.WillUsePostgres() {
|
||||||
|
t.Skip("this test requires postgres")
|
||||||
|
}
|
||||||
|
|
||||||
db, _ := dbtestutil.NewDB(t, dbtestutil.WithDumpOnFailure())
|
db, _ := dbtestutil.NewDB(t, dbtestutil.WithDumpOnFailure())
|
||||||
|
|
||||||
org := dbgen.Organization(t, db, database.Organization{})
|
org := dbgen.Organization(t, db, database.Organization{})
|
||||||
|
@ -147,9 +147,13 @@ func (api *API) putOrgRoles(rw http.ResponseWriter, r *http.Request) {
|
|||||||
UUID: organization.ID,
|
UUID: organization.ID,
|
||||||
Valid: true,
|
Valid: true,
|
||||||
},
|
},
|
||||||
SitePermissions: db2sdk.List(req.SitePermissions, sdkPermissionToDB),
|
// Invalid permissions are filtered out. If this is changed
|
||||||
OrgPermissions: db2sdk.List(req.OrganizationPermissions, sdkPermissionToDB),
|
// to throw an error, then the story of a previously valid role
|
||||||
UserPermissions: db2sdk.List(req.UserPermissions, sdkPermissionToDB),
|
// now being invalid has to be addressed. Coder can change permissions,
|
||||||
|
// objects, and actions at any time.
|
||||||
|
SitePermissions: db2sdk.List(filterInvalidPermissions(req.SitePermissions), sdkPermissionToDB),
|
||||||
|
OrgPermissions: db2sdk.List(filterInvalidPermissions(req.OrganizationPermissions), sdkPermissionToDB),
|
||||||
|
UserPermissions: db2sdk.List(filterInvalidPermissions(req.UserPermissions), sdkPermissionToDB),
|
||||||
})
|
})
|
||||||
if httpapi.Is404Error(err) {
|
if httpapi.Is404Error(err) {
|
||||||
httpapi.ResourceNotFound(rw)
|
httpapi.ResourceNotFound(rw)
|
||||||
@ -247,6 +251,23 @@ func (api *API) deleteOrgRole(rw http.ResponseWriter, r *http.Request) {
|
|||||||
httpapi.Write(ctx, rw, http.StatusNoContent, nil)
|
httpapi.Write(ctx, rw, http.StatusNoContent, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func filterInvalidPermissions(permissions []codersdk.Permission) []codersdk.Permission {
|
||||||
|
// Filter out any invalid permissions
|
||||||
|
var validPermissions []codersdk.Permission
|
||||||
|
for _, permission := range permissions {
|
||||||
|
err := rbac.Permission{
|
||||||
|
Negate: permission.Negate,
|
||||||
|
ResourceType: string(permission.ResourceType),
|
||||||
|
Action: policy.Action(permission.Action),
|
||||||
|
}.Valid()
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
validPermissions = append(validPermissions, permission)
|
||||||
|
}
|
||||||
|
return validPermissions
|
||||||
|
}
|
||||||
|
|
||||||
func sdkPermissionToDB(p codersdk.Permission) database.CustomRolePermission {
|
func sdkPermissionToDB(p codersdk.Permission) database.CustomRolePermission {
|
||||||
return database.CustomRolePermission{
|
return database.CustomRolePermission{
|
||||||
Negate: p.Negate,
|
Negate: p.Negate,
|
||||||
|
2
go.mod
2
go.mod
@ -117,7 +117,7 @@ require (
|
|||||||
github.com/go-chi/cors v1.2.1
|
github.com/go-chi/cors v1.2.1
|
||||||
github.com/go-chi/httprate v0.14.1
|
github.com/go-chi/httprate v0.14.1
|
||||||
github.com/go-chi/render v1.0.1
|
github.com/go-chi/render v1.0.1
|
||||||
github.com/go-jose/go-jose/v4 v4.0.2
|
github.com/go-jose/go-jose/v4 v4.0.5
|
||||||
github.com/go-logr/logr v1.4.2
|
github.com/go-logr/logr v1.4.2
|
||||||
github.com/go-playground/validator/v10 v10.25.0
|
github.com/go-playground/validator/v10 v10.25.0
|
||||||
github.com/gofrs/flock v0.12.0
|
github.com/gofrs/flock v0.12.0
|
||||||
|
4
go.sum
4
go.sum
@ -365,8 +365,8 @@ github.com/go-chi/render v1.0.1 h1:4/5tis2cKaNdnv9zFLfXzcquC9HbeZgCnxGnKrltBS8=
|
|||||||
github.com/go-chi/render v1.0.1/go.mod h1:pq4Rr7HbnsdaeHagklXub+p6Wd16Af5l9koip1OvJns=
|
github.com/go-chi/render v1.0.1/go.mod h1:pq4Rr7HbnsdaeHagklXub+p6Wd16Af5l9koip1OvJns=
|
||||||
github.com/go-ini/ini v1.67.0 h1:z6ZrTEZqSWOTyH2FlglNbNgARyHG8oLW9gMELqKr06A=
|
github.com/go-ini/ini v1.67.0 h1:z6ZrTEZqSWOTyH2FlglNbNgARyHG8oLW9gMELqKr06A=
|
||||||
github.com/go-ini/ini v1.67.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=
|
github.com/go-ini/ini v1.67.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=
|
||||||
github.com/go-jose/go-jose/v4 v4.0.2 h1:R3l3kkBds16bO7ZFAEEcofK0MkrAJt3jlJznWZG0nvk=
|
github.com/go-jose/go-jose/v4 v4.0.5 h1:M6T8+mKZl/+fNNuFHvGIzDz7BTLQPIounk/b9dw3AaE=
|
||||||
github.com/go-jose/go-jose/v4 v4.0.2/go.mod h1:WVf9LFMHh/QVrmqrOfqun0C45tMe3RoiKJMPvgWwLfY=
|
github.com/go-jose/go-jose/v4 v4.0.5/go.mod h1:s3P1lRrkT8igV8D9OjyL4WRyHvjB6a4JSllnOrmmBOA=
|
||||||
github.com/go-logr/logr v1.2.0/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
github.com/go-logr/logr v1.2.0/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||||
github.com/go-logr/logr v1.2.1/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
github.com/go-logr/logr v1.2.1/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||||
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||||
|
@ -11,6 +11,7 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/golang-jwt/jwt/v4"
|
"github.com/golang-jwt/jwt/v4"
|
||||||
|
"github.com/google/uuid"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
"cdr.dev/slog"
|
"cdr.dev/slog"
|
||||||
@ -88,6 +89,7 @@ func RunIDP() func(t *testing.T) {
|
|||||||
// This is a static set of auth fields. Might be beneficial to make flags
|
// This is a static set of auth fields. Might be beneficial to make flags
|
||||||
// to allow different values here. This is only required for using the
|
// to allow different values here. This is only required for using the
|
||||||
// testIDP as primary auth. External auth does not ever fetch these fields.
|
// testIDP as primary auth. External auth does not ever fetch these fields.
|
||||||
|
"sub": uuid.MustParse("26c6a19c-b9b8-493b-a991-88a4c3310314"),
|
||||||
"email": "oidc_member@coder.com",
|
"email": "oidc_member@coder.com",
|
||||||
"preferred_username": "oidc_member",
|
"preferred_username": "oidc_member",
|
||||||
"email_verified": true,
|
"email_verified": true,
|
||||||
|
@ -24,16 +24,22 @@ export const users = {
|
|||||||
password: defaultPassword,
|
password: defaultPassword,
|
||||||
email: "admin@coder.com",
|
email: "admin@coder.com",
|
||||||
},
|
},
|
||||||
|
templateAdmin: {
|
||||||
|
username: "template-admin",
|
||||||
|
password: defaultPassword,
|
||||||
|
email: "templateadmin@coder.com",
|
||||||
|
roles: ["Template Admin"],
|
||||||
|
},
|
||||||
auditor: {
|
auditor: {
|
||||||
username: "auditor",
|
username: "auditor",
|
||||||
password: defaultPassword,
|
password: defaultPassword,
|
||||||
email: "auditor@coder.com",
|
email: "auditor@coder.com",
|
||||||
roles: ["Template Admin", "Auditor"],
|
roles: ["Template Admin", "Auditor"],
|
||||||
},
|
},
|
||||||
user: {
|
member: {
|
||||||
username: "user",
|
username: "member",
|
||||||
password: defaultPassword,
|
password: defaultPassword,
|
||||||
email: "user@coder.com",
|
email: "member@coder.com",
|
||||||
},
|
},
|
||||||
} satisfies Record<
|
} satisfies Record<
|
||||||
string,
|
string,
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
import { type ChildProcess, exec, spawn } from "node:child_process";
|
import { type ChildProcess, exec, spawn } from "node:child_process";
|
||||||
import { randomUUID } from "node:crypto";
|
import { randomUUID } from "node:crypto";
|
||||||
|
import fs from "node:fs";
|
||||||
import net from "node:net";
|
import net from "node:net";
|
||||||
|
import * as os from "node:os";
|
||||||
import path from "node:path";
|
import path from "node:path";
|
||||||
import { Duplex } from "node:stream";
|
import { Duplex } from "node:stream";
|
||||||
import { type BrowserContext, type Page, expect, test } from "@playwright/test";
|
import { type BrowserContext, type Page, expect, test } from "@playwright/test";
|
||||||
@ -10,6 +12,7 @@ import type {
|
|||||||
WorkspaceBuildParameter,
|
WorkspaceBuildParameter,
|
||||||
} from "api/typesGenerated";
|
} from "api/typesGenerated";
|
||||||
import express from "express";
|
import express from "express";
|
||||||
|
import JSZip from "jszip";
|
||||||
import capitalize from "lodash/capitalize";
|
import capitalize from "lodash/capitalize";
|
||||||
import * as ssh from "ssh2";
|
import * as ssh from "ssh2";
|
||||||
import { TarWriter } from "utils/tar";
|
import { TarWriter } from "utils/tar";
|
||||||
@ -150,7 +153,6 @@ export const createWorkspace = async (
|
|||||||
await page.getByRole("button", { name: /create workspace/i }).click();
|
await page.getByRole("button", { name: /create workspace/i }).click();
|
||||||
|
|
||||||
const user = currentUser(page);
|
const user = currentUser(page);
|
||||||
|
|
||||||
await expectUrl(page).toHavePathName(`/@${user.username}/${name}`);
|
await expectUrl(page).toHavePathName(`/@${user.username}/${name}`);
|
||||||
|
|
||||||
await page.waitForSelector("[data-testid='build-status'] >> text=Running", {
|
await page.waitForSelector("[data-testid='build-status'] >> text=Running", {
|
||||||
@ -165,12 +167,10 @@ export const verifyParameters = async (
|
|||||||
richParameters: RichParameter[],
|
richParameters: RichParameter[],
|
||||||
expectedBuildParameters: WorkspaceBuildParameter[],
|
expectedBuildParameters: WorkspaceBuildParameter[],
|
||||||
) => {
|
) => {
|
||||||
await page.goto(`/@admin/${workspaceName}/settings/parameters`, {
|
const user = currentUser(page);
|
||||||
|
await page.goto(`/@${user.username}/${workspaceName}/settings/parameters`, {
|
||||||
waitUntil: "domcontentloaded",
|
waitUntil: "domcontentloaded",
|
||||||
});
|
});
|
||||||
await expectUrl(page).toHavePathName(
|
|
||||||
`/@admin/${workspaceName}/settings/parameters`,
|
|
||||||
);
|
|
||||||
|
|
||||||
for (const buildParameter of expectedBuildParameters) {
|
for (const buildParameter of expectedBuildParameters) {
|
||||||
const richParameter = richParameters.find(
|
const richParameter = richParameters.find(
|
||||||
@ -356,10 +356,10 @@ export const sshIntoWorkspace = async (
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const stopWorkspace = async (page: Page, workspaceName: string) => {
|
export const stopWorkspace = async (page: Page, workspaceName: string) => {
|
||||||
await page.goto(`/@admin/${workspaceName}`, {
|
const user = currentUser(page);
|
||||||
|
await page.goto(`/@${user.username}/${workspaceName}`, {
|
||||||
waitUntil: "domcontentloaded",
|
waitUntil: "domcontentloaded",
|
||||||
});
|
});
|
||||||
await expectUrl(page).toHavePathName(`/@admin/${workspaceName}`);
|
|
||||||
|
|
||||||
await page.getByTestId("workspace-stop-button").click();
|
await page.getByTestId("workspace-stop-button").click();
|
||||||
|
|
||||||
@ -375,10 +375,10 @@ export const buildWorkspaceWithParameters = async (
|
|||||||
buildParameters: WorkspaceBuildParameter[] = [],
|
buildParameters: WorkspaceBuildParameter[] = [],
|
||||||
confirm = false,
|
confirm = false,
|
||||||
) => {
|
) => {
|
||||||
await page.goto(`/@admin/${workspaceName}`, {
|
const user = currentUser(page);
|
||||||
|
await page.goto(`/@${user.username}/${workspaceName}`, {
|
||||||
waitUntil: "domcontentloaded",
|
waitUntil: "domcontentloaded",
|
||||||
});
|
});
|
||||||
await expectUrl(page).toHavePathName(`/@admin/${workspaceName}`);
|
|
||||||
|
|
||||||
await page.getByTestId("build-parameters-button").click();
|
await page.getByTestId("build-parameters-button").click();
|
||||||
|
|
||||||
@ -993,10 +993,10 @@ export const updateWorkspace = async (
|
|||||||
richParameters: RichParameter[] = [],
|
richParameters: RichParameter[] = [],
|
||||||
buildParameters: WorkspaceBuildParameter[] = [],
|
buildParameters: WorkspaceBuildParameter[] = [],
|
||||||
) => {
|
) => {
|
||||||
await page.goto(`/@admin/${workspaceName}`, {
|
const user = currentUser(page);
|
||||||
|
await page.goto(`/@${user.username}/${workspaceName}`, {
|
||||||
waitUntil: "domcontentloaded",
|
waitUntil: "domcontentloaded",
|
||||||
});
|
});
|
||||||
await expectUrl(page).toHavePathName(`/@admin/${workspaceName}`);
|
|
||||||
|
|
||||||
await page.getByTestId("workspace-update-button").click();
|
await page.getByTestId("workspace-update-button").click();
|
||||||
await page.getByTestId("confirm-button").click();
|
await page.getByTestId("confirm-button").click();
|
||||||
@ -1015,12 +1015,10 @@ export const updateWorkspaceParameters = async (
|
|||||||
richParameters: RichParameter[] = [],
|
richParameters: RichParameter[] = [],
|
||||||
buildParameters: WorkspaceBuildParameter[] = [],
|
buildParameters: WorkspaceBuildParameter[] = [],
|
||||||
) => {
|
) => {
|
||||||
await page.goto(`/@admin/${workspaceName}/settings/parameters`, {
|
const user = currentUser(page);
|
||||||
|
await page.goto(`/@${user.username}/${workspaceName}/settings/parameters`, {
|
||||||
waitUntil: "domcontentloaded",
|
waitUntil: "domcontentloaded",
|
||||||
});
|
});
|
||||||
await expectUrl(page).toHavePathName(
|
|
||||||
`/@admin/${workspaceName}/settings/parameters`,
|
|
||||||
);
|
|
||||||
|
|
||||||
await fillParameters(page, richParameters, buildParameters);
|
await fillParameters(page, richParameters, buildParameters);
|
||||||
await page.getByRole("button", { name: /submit and restart/i }).click();
|
await page.getByRole("button", { name: /submit and restart/i }).click();
|
||||||
@ -1044,11 +1042,14 @@ export async function openTerminalWindow(
|
|||||||
|
|
||||||
// Specify that the shell should be `bash`, to prevent inheriting a shell that
|
// Specify that the shell should be `bash`, to prevent inheriting a shell that
|
||||||
// isn't POSIX compatible, such as Fish.
|
// isn't POSIX compatible, such as Fish.
|
||||||
|
const user = currentUser(page);
|
||||||
const commandQuery = `?command=${encodeURIComponent("/usr/bin/env bash")}`;
|
const commandQuery = `?command=${encodeURIComponent("/usr/bin/env bash")}`;
|
||||||
await expectUrl(terminal).toHavePathName(
|
await expectUrl(terminal).toHavePathName(
|
||||||
`/@admin/${workspaceName}.${agentName}/terminal`,
|
`/@${user.username}/${workspaceName}.${agentName}/terminal`,
|
||||||
|
);
|
||||||
|
await terminal.goto(
|
||||||
|
`/@${user.username}/${workspaceName}.${agentName}/terminal${commandQuery}`,
|
||||||
);
|
);
|
||||||
await terminal.goto(`/@admin/${workspaceName}.dev/terminal${commandQuery}`);
|
|
||||||
|
|
||||||
return terminal;
|
return terminal;
|
||||||
}
|
}
|
||||||
@ -1100,7 +1101,7 @@ export async function createUser(
|
|||||||
// Give them a role
|
// Give them a role
|
||||||
await addedRow.getByLabel("Edit user roles").click();
|
await addedRow.getByLabel("Edit user roles").click();
|
||||||
for (const role of roles) {
|
for (const role of roles) {
|
||||||
await page.getByText(role, { exact: true }).click();
|
await page.getByRole("group").getByText(role, { exact: true }).click();
|
||||||
}
|
}
|
||||||
await page.mouse.click(10, 10); // close the popover by clicking outside of it
|
await page.mouse.click(10, 10); // close the popover by clicking outside of it
|
||||||
|
|
||||||
@ -1129,3 +1130,84 @@ export async function createOrganization(page: Page): Promise<{
|
|||||||
|
|
||||||
return { name, displayName, description };
|
return { name, displayName, description };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: convert to test fixture and dispose after each test.
|
||||||
|
export async function importTemplate(
|
||||||
|
page: Page,
|
||||||
|
templateName: string,
|
||||||
|
files: string[],
|
||||||
|
orgName = defaultOrganizationName,
|
||||||
|
): Promise<string> {
|
||||||
|
// Create a ZIP from the given input files.
|
||||||
|
const tmpdir = fs.mkdtempSync(path.join(os.tmpdir(), templateName));
|
||||||
|
const templatePath = path.join(tmpdir, `${templateName}.zip`);
|
||||||
|
await createZIP(templatePath, files);
|
||||||
|
|
||||||
|
// Create new template.
|
||||||
|
await page.goto("/templates/new", { waitUntil: "domcontentloaded" });
|
||||||
|
await page.getByTestId("drop-zone").click();
|
||||||
|
|
||||||
|
// Select the template file.
|
||||||
|
const [fileChooser] = await Promise.all([
|
||||||
|
page.waitForEvent("filechooser"),
|
||||||
|
page.getByTestId("drop-zone").click(),
|
||||||
|
]);
|
||||||
|
await fileChooser.setFiles(templatePath);
|
||||||
|
|
||||||
|
// Set name and submit.
|
||||||
|
await page.locator("input[name=name]").fill(templateName);
|
||||||
|
|
||||||
|
// If the organization picker is present on the page, select the default
|
||||||
|
// organization.
|
||||||
|
const orgPicker = page.getByLabel("Belongs to *");
|
||||||
|
const organizationsEnabled = await orgPicker.isVisible();
|
||||||
|
if (organizationsEnabled) {
|
||||||
|
if (orgName !== defaultOrganizationName) {
|
||||||
|
throw new Error(
|
||||||
|
`No provisioners registered for ${orgName}, creating this template will fail`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
await orgPicker.click();
|
||||||
|
await page.getByText(orgName, { exact: true }).click();
|
||||||
|
}
|
||||||
|
|
||||||
|
await page.getByRole("button", { name: "Save" }).click();
|
||||||
|
|
||||||
|
await page.waitForURL(`/templates/${orgName}/${templateName}/files`, {
|
||||||
|
timeout: 120_000,
|
||||||
|
});
|
||||||
|
return templateName;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function createZIP(
|
||||||
|
outpath: string,
|
||||||
|
inputFiles: string[],
|
||||||
|
): Promise<{ path: string; length: number }> {
|
||||||
|
const zip = new JSZip();
|
||||||
|
|
||||||
|
let found = false;
|
||||||
|
for (const file of inputFiles) {
|
||||||
|
if (!fs.existsSync(file)) {
|
||||||
|
console.warn(`${file} not found, not including in zip`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
found = true;
|
||||||
|
|
||||||
|
const contents = fs.readFileSync(file);
|
||||||
|
zip.file(path.basename(file), contents);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!found) {
|
||||||
|
throw new Error(`no files found to zip into ${outpath}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
zip
|
||||||
|
.generateNodeStream({ type: "nodebuffer", streamFiles: true })
|
||||||
|
.pipe(fs.createWriteStream(outpath));
|
||||||
|
|
||||||
|
return {
|
||||||
|
path: outpath,
|
||||||
|
length: zip.length,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
@ -31,7 +31,7 @@ export default defineConfig({
|
|||||||
],
|
],
|
||||||
reporter: [["./reporter.ts"]],
|
reporter: [["./reporter.ts"]],
|
||||||
use: {
|
use: {
|
||||||
actionTimeout: 5000,
|
actionTimeout: 60_000,
|
||||||
baseURL: `http://localhost:${coderPort}`,
|
baseURL: `http://localhost:${coderPort}`,
|
||||||
video: "retain-on-failure",
|
video: "retain-on-failure",
|
||||||
...(wsEndpoint
|
...(wsEndpoint
|
||||||
@ -111,7 +111,7 @@ export default defineConfig({
|
|||||||
gitAuth.validatePath,
|
gitAuth.validatePath,
|
||||||
),
|
),
|
||||||
CODER_PPROF_ADDRESS: `127.0.0.1:${coderdPProfPort}`,
|
CODER_PPROF_ADDRESS: `127.0.0.1:${coderdPProfPort}`,
|
||||||
CODER_EXPERIMENTS: `${e2eFakeExperiment1},${e2eFakeExperiment2}`,
|
CODER_EXPERIMENTS: `${e2eFakeExperiment1},${e2eFakeExperiment2},${process.env.CODER_EXPERIMENTS}`,
|
||||||
|
|
||||||
// Tests for Deployment / User Authentication / OIDC
|
// Tests for Deployment / User Authentication / OIDC
|
||||||
CODER_OIDC_ISSUER_URL: "https://accounts.google.com",
|
CODER_OIDC_ISSUER_URL: "https://accounts.google.com",
|
||||||
@ -122,6 +122,8 @@ export default defineConfig({
|
|||||||
CODER_OIDC_SIGN_IN_TEXT: "Hello",
|
CODER_OIDC_SIGN_IN_TEXT: "Hello",
|
||||||
CODER_OIDC_ICON_URL: "/icon/google.svg",
|
CODER_OIDC_ICON_URL: "/icon/google.svg",
|
||||||
},
|
},
|
||||||
reuseExistingServer: false,
|
reuseExistingServer: process.env.CODER_E2E_REUSE_EXISTING_SERVER
|
||||||
|
? Boolean(process.env.CODER_E2E_REUSE_EXISTING_SERVER)
|
||||||
|
: false,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
@ -16,7 +16,6 @@ test("setup deployment", async ({ page }) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Setup first user
|
// Setup first user
|
||||||
await page.getByLabel(Language.usernameLabel).fill(users.admin.username);
|
|
||||||
await page.getByLabel(Language.emailLabel).fill(users.admin.email);
|
await page.getByLabel(Language.emailLabel).fill(users.admin.email);
|
||||||
await page.getByLabel(Language.passwordLabel).fill(users.admin.password);
|
await page.getByLabel(Language.passwordLabel).fill(users.admin.password);
|
||||||
await page.getByTestId("create").click();
|
await page.getByTestId("create").click();
|
||||||
|
@ -36,7 +36,7 @@ export default function () {
|
|||||||
throw new Error(msg);
|
throw new Error(msg);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!process.env.CI) {
|
if (!process.env.CI && !process.env.CODER_E2E_REUSE_EXISTING_SERVER) {
|
||||||
console.info("==> make site/e2e/bin/coder");
|
console.info("==> make site/e2e/bin/coder");
|
||||||
execSync("make site/e2e/bin/coder", {
|
execSync("make site/e2e/bin/coder", {
|
||||||
cwd: path.join(__dirname, "../../../"),
|
cwd: path.join(__dirname, "../../../"),
|
||||||
|
@ -52,5 +52,6 @@ test("create and delete organization", async ({ page }) => {
|
|||||||
const dialog = page.getByTestId("dialog");
|
const dialog = page.getByTestId("dialog");
|
||||||
await dialog.getByLabel("Name").fill(newName);
|
await dialog.getByLabel("Name").fill(newName);
|
||||||
await dialog.getByRole("button", { name: "Delete" }).click();
|
await dialog.getByRole("button", { name: "Delete" }).click();
|
||||||
await expect(page.getByText("Organization deleted.")).toBeVisible();
|
await page.waitForTimeout(1000);
|
||||||
|
await expect(page.getByText("Organization deleted")).toBeVisible();
|
||||||
});
|
});
|
||||||
|
151
site/e2e/tests/presets/basic-presets-with-prebuild/main.tf
Normal file
151
site/e2e/tests/presets/basic-presets-with-prebuild/main.tf
Normal file
@ -0,0 +1,151 @@
|
|||||||
|
terraform {
|
||||||
|
required_providers {
|
||||||
|
coder = {
|
||||||
|
source = "coder/coder"
|
||||||
|
version = "2.1.3"
|
||||||
|
}
|
||||||
|
docker = {
|
||||||
|
source = "kreuzwerker/docker"
|
||||||
|
version = "3.0.2"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "docker_socket" {
|
||||||
|
default = ""
|
||||||
|
description = "(Optional) Docker socket URI"
|
||||||
|
type = string
|
||||||
|
}
|
||||||
|
|
||||||
|
provider "docker" {
|
||||||
|
# Defaulting to null if the variable is an empty string lets us have an optional variable without having to set our own default
|
||||||
|
host = var.docker_socket != "" ? var.docker_socket : null
|
||||||
|
}
|
||||||
|
|
||||||
|
data "coder_provisioner" "me" {}
|
||||||
|
data "coder_workspace" "me" {}
|
||||||
|
data "coder_workspace_owner" "me" {}
|
||||||
|
|
||||||
|
data "coder_workspace_preset" "goland" {
|
||||||
|
name = "I Like GoLand"
|
||||||
|
parameters = {
|
||||||
|
"jetbrains_ide" = "GO"
|
||||||
|
}
|
||||||
|
prebuilds {
|
||||||
|
instances = 2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
data "coder_workspace_preset" "python" {
|
||||||
|
name = "Some Like PyCharm"
|
||||||
|
parameters = {
|
||||||
|
"jetbrains_ide" = "PY"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "coder_agent" "main" {
|
||||||
|
arch = data.coder_provisioner.me.arch
|
||||||
|
os = "linux"
|
||||||
|
startup_script = <<-EOT
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Prepare user home with default files on first start!
|
||||||
|
if [ ! -f ~/.init_done ]; then
|
||||||
|
cp -rT /etc/skel ~
|
||||||
|
touch ~/.init_done
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "${data.coder_workspace.me.prebuild_count}" -eq 1 ]]; then
|
||||||
|
touch ~/.prebuild_note
|
||||||
|
fi
|
||||||
|
EOT
|
||||||
|
|
||||||
|
# These environment variables allow you to make Git commits right away after creating a
|
||||||
|
# workspace. Note that they take precedence over configuration defined in ~/.gitconfig!
|
||||||
|
# You can remove this block if you'd prefer to configure Git manually or using
|
||||||
|
# dotfiles. (see docs/dotfiles.md)
|
||||||
|
env = {
|
||||||
|
GIT_AUTHOR_NAME = coalesce(data.coder_workspace_owner.me.full_name, data.coder_workspace_owner.me.name)
|
||||||
|
GIT_AUTHOR_EMAIL = "${data.coder_workspace_owner.me.email}"
|
||||||
|
GIT_COMMITTER_NAME = coalesce(data.coder_workspace_owner.me.full_name, data.coder_workspace_owner.me.name)
|
||||||
|
GIT_COMMITTER_EMAIL = "${data.coder_workspace_owner.me.email}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# The following metadata blocks are optional. They are used to display
|
||||||
|
# information about your workspace in the dashboard. You can remove them
|
||||||
|
# if you don't want to display any information.
|
||||||
|
# For basic resources, you can use the `coder stat` command.
|
||||||
|
# If you need more control, you can write your own script.
|
||||||
|
metadata {
|
||||||
|
display_name = "Was Prebuild"
|
||||||
|
key = "prebuild"
|
||||||
|
script = "[[ -e ~/.prebuild_note ]] && echo 'Yes' || echo 'No'"
|
||||||
|
interval = 10
|
||||||
|
timeout = 1
|
||||||
|
}
|
||||||
|
|
||||||
|
metadata {
|
||||||
|
display_name = "Hostname"
|
||||||
|
key = "hostname"
|
||||||
|
script = "hostname"
|
||||||
|
interval = 10
|
||||||
|
timeout = 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# See https://registry.coder.com/modules/jetbrains-gateway
|
||||||
|
module "jetbrains_gateway" {
|
||||||
|
count = data.coder_workspace.me.start_count
|
||||||
|
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
||||||
|
|
||||||
|
# JetBrains IDEs to make available for the user to select
|
||||||
|
jetbrains_ides = ["IU", "PY", "WS", "PS", "RD", "CL", "GO", "RM"]
|
||||||
|
default = "IU"
|
||||||
|
|
||||||
|
# Default folder to open when starting a JetBrains IDE
|
||||||
|
folder = "/home/coder"
|
||||||
|
|
||||||
|
# This ensures that the latest version of the module gets downloaded, you can also pin the module version to prevent breaking changes in production.
|
||||||
|
version = ">= 1.0.0"
|
||||||
|
|
||||||
|
agent_id = coder_agent.main.id
|
||||||
|
agent_name = "main"
|
||||||
|
order = 2
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "docker_volume" "home_volume" {
|
||||||
|
name = "coder-${data.coder_workspace.me.id}-home"
|
||||||
|
# Protect the volume from being deleted due to changes in attributes.
|
||||||
|
lifecycle {
|
||||||
|
ignore_changes = all
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "docker_container" "workspace" {
|
||||||
|
lifecycle {
|
||||||
|
ignore_changes = all
|
||||||
|
}
|
||||||
|
|
||||||
|
network_mode = "host"
|
||||||
|
|
||||||
|
count = data.coder_workspace.me.start_count
|
||||||
|
image = "codercom/enterprise-base:ubuntu"
|
||||||
|
# Uses lower() to avoid Docker restriction on container names.
|
||||||
|
name = "coder-${data.coder_workspace_owner.me.name}-${lower(data.coder_workspace.me.name)}"
|
||||||
|
# Hostname makes the shell more user friendly: coder@my-workspace:~$
|
||||||
|
hostname = data.coder_workspace.me.name
|
||||||
|
# Use the docker gateway if the access URL is 127.0.0.1
|
||||||
|
entrypoint = [
|
||||||
|
"sh", "-c", replace(coder_agent.main.init_script, "/localhost|127\\.0\\.0\\.1/", "host.docker.internal")
|
||||||
|
]
|
||||||
|
env = ["CODER_AGENT_TOKEN=${coder_agent.main.token}"]
|
||||||
|
host {
|
||||||
|
host = "host.docker.internal"
|
||||||
|
ip = "host-gateway"
|
||||||
|
}
|
||||||
|
volumes {
|
||||||
|
container_path = "/home/coder"
|
||||||
|
volume_name = docker_volume.home_volume.name
|
||||||
|
read_only = false
|
||||||
|
}
|
||||||
|
}
|
146
site/e2e/tests/presets/basic-presets/main.tf
Normal file
146
site/e2e/tests/presets/basic-presets/main.tf
Normal file
@ -0,0 +1,146 @@
|
|||||||
|
terraform {
|
||||||
|
required_providers {
|
||||||
|
coder = {
|
||||||
|
source = "coder/coder"
|
||||||
|
version = "2.1.3"
|
||||||
|
}
|
||||||
|
docker = {
|
||||||
|
source = "kreuzwerker/docker"
|
||||||
|
version = "3.0.2"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "docker_socket" {
|
||||||
|
default = ""
|
||||||
|
description = "(Optional) Docker socket URI"
|
||||||
|
type = string
|
||||||
|
}
|
||||||
|
|
||||||
|
provider "docker" {
|
||||||
|
# Defaulting to null if the variable is an empty string lets us have an optional variable without having to set our own default
|
||||||
|
host = var.docker_socket != "" ? var.docker_socket : null
|
||||||
|
}
|
||||||
|
|
||||||
|
data "coder_provisioner" "me" {}
|
||||||
|
data "coder_workspace" "me" {}
|
||||||
|
data "coder_workspace_owner" "me" {}
|
||||||
|
|
||||||
|
data "coder_workspace_preset" "goland" {
|
||||||
|
name = "I Like GoLand"
|
||||||
|
parameters = {
|
||||||
|
"jetbrains_ide" = "GO"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
data "coder_workspace_preset" "python" {
|
||||||
|
name = "Some Like PyCharm"
|
||||||
|
parameters = {
|
||||||
|
"jetbrains_ide" = "PY"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "coder_agent" "main" {
|
||||||
|
arch = data.coder_provisioner.me.arch
|
||||||
|
os = "linux"
|
||||||
|
startup_script = <<-EOT
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Prepare user home with default files on first start!
|
||||||
|
if [ ! -f ~/.init_done ]; then
|
||||||
|
cp -rT /etc/skel ~
|
||||||
|
touch ~/.init_done
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Add any commands that should be executed at workspace startup (e.g install requirements, start a program, etc) here
|
||||||
|
EOT
|
||||||
|
|
||||||
|
# These environment variables allow you to make Git commits right away after creating a
|
||||||
|
# workspace. Note that they take precedence over configuration defined in ~/.gitconfig!
|
||||||
|
# You can remove this block if you'd prefer to configure Git manually or using
|
||||||
|
# dotfiles. (see docs/dotfiles.md)
|
||||||
|
env = {
|
||||||
|
GIT_AUTHOR_NAME = coalesce(data.coder_workspace_owner.me.full_name, data.coder_workspace_owner.me.name)
|
||||||
|
GIT_AUTHOR_EMAIL = "${data.coder_workspace_owner.me.email}"
|
||||||
|
GIT_COMMITTER_NAME = coalesce(data.coder_workspace_owner.me.full_name, data.coder_workspace_owner.me.name)
|
||||||
|
GIT_COMMITTER_EMAIL = "${data.coder_workspace_owner.me.email}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# The following metadata blocks are optional. They are used to display
|
||||||
|
# information about your workspace in the dashboard. You can remove them
|
||||||
|
# if you don't want to display any information.
|
||||||
|
# For basic resources, you can use the `coder stat` command.
|
||||||
|
# If you need more control, you can write your own script.
|
||||||
|
metadata {
|
||||||
|
display_name = "Is Prebuild"
|
||||||
|
key = "prebuild"
|
||||||
|
script = "echo ${data.coder_workspace.me.prebuild_count}"
|
||||||
|
interval = 10
|
||||||
|
timeout = 1
|
||||||
|
}
|
||||||
|
|
||||||
|
metadata {
|
||||||
|
display_name = "Hostname"
|
||||||
|
key = "hostname"
|
||||||
|
script = "hostname"
|
||||||
|
interval = 10
|
||||||
|
timeout = 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# See https://registry.coder.com/modules/jetbrains-gateway
|
||||||
|
module "jetbrains_gateway" {
|
||||||
|
count = data.coder_workspace.me.start_count
|
||||||
|
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
||||||
|
|
||||||
|
# JetBrains IDEs to make available for the user to select
|
||||||
|
jetbrains_ides = ["IU", "PY", "WS", "PS", "RD", "CL", "GO", "RM"]
|
||||||
|
default = "IU"
|
||||||
|
|
||||||
|
# Default folder to open when starting a JetBrains IDE
|
||||||
|
folder = "/home/coder"
|
||||||
|
|
||||||
|
# This ensures that the latest version of the module gets downloaded, you can also pin the module version to prevent breaking changes in production.
|
||||||
|
version = ">= 1.0.0"
|
||||||
|
|
||||||
|
agent_id = coder_agent.main.id
|
||||||
|
agent_name = "main"
|
||||||
|
order = 2
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "docker_volume" "home_volume" {
|
||||||
|
name = "coder-${data.coder_workspace.me.id}-home"
|
||||||
|
# Protect the volume from being deleted due to changes in attributes.
|
||||||
|
lifecycle {
|
||||||
|
ignore_changes = all
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "docker_container" "workspace" {
|
||||||
|
lifecycle {
|
||||||
|
ignore_changes = all
|
||||||
|
}
|
||||||
|
|
||||||
|
network_mode = "host"
|
||||||
|
|
||||||
|
count = data.coder_workspace.me.start_count
|
||||||
|
image = "codercom/enterprise-base:ubuntu"
|
||||||
|
# Uses lower() to avoid Docker restriction on container names.
|
||||||
|
name = "coder-${data.coder_workspace_owner.me.name}-${lower(data.coder_workspace.me.name)}"
|
||||||
|
# Hostname makes the shell more user friendly: coder@my-workspace:~$
|
||||||
|
hostname = data.coder_workspace.me.name
|
||||||
|
# Use the docker gateway if the access URL is 127.0.0.1
|
||||||
|
entrypoint = [
|
||||||
|
"sh", "-c", replace(coder_agent.main.init_script, "/localhost|127\\.0\\.0\\.1/", "host.docker.internal")
|
||||||
|
]
|
||||||
|
env = ["CODER_AGENT_TOKEN=${coder_agent.main.token}"]
|
||||||
|
host {
|
||||||
|
host = "host.docker.internal"
|
||||||
|
ip = "host-gateway"
|
||||||
|
}
|
||||||
|
volumes {
|
||||||
|
container_path = "/home/coder"
|
||||||
|
volume_name = docker_volume.home_volume.name
|
||||||
|
read_only = false
|
||||||
|
}
|
||||||
|
}
|
156
site/e2e/tests/presets/prebuilds.spec.ts
Normal file
156
site/e2e/tests/presets/prebuilds.spec.ts
Normal file
@ -0,0 +1,156 @@
|
|||||||
|
import path from "node:path";
|
||||||
|
import { type Locator, expect, test } from "@playwright/test";
|
||||||
|
import {
|
||||||
|
currentUser,
|
||||||
|
importTemplate,
|
||||||
|
login,
|
||||||
|
randomName,
|
||||||
|
requiresLicense,
|
||||||
|
} from "../../helpers";
|
||||||
|
import { beforeCoderTest } from "../../hooks";
|
||||||
|
|
||||||
|
test.beforeEach(async ({ page }) => {
|
||||||
|
beforeCoderTest(page);
|
||||||
|
await login(page);
|
||||||
|
});
|
||||||
|
|
||||||
|
const waitForBuildTimeout = 120_000; // Builds can take a while, let's give them at most 2m.
|
||||||
|
|
||||||
|
const templateFiles = [
|
||||||
|
path.join(__dirname, "basic-presets-with-prebuild/main.tf"),
|
||||||
|
path.join(__dirname, "basic-presets-with-prebuild/.terraform.lock.hcl"),
|
||||||
|
];
|
||||||
|
|
||||||
|
const expectedPrebuilds = 2;
|
||||||
|
|
||||||
|
// NOTE: requires the `workspace-prebuilds` experiment enabled!
|
||||||
|
test("create template with desired prebuilds", async ({ page, baseURL }) => {
|
||||||
|
requiresLicense();
|
||||||
|
|
||||||
|
// Create new template.
|
||||||
|
const templateName = randomName();
|
||||||
|
await importTemplate(page, templateName, templateFiles);
|
||||||
|
|
||||||
|
await page.goto(
|
||||||
|
`/workspaces?filter=owner:prebuilds%20template:${templateName}&page=1`,
|
||||||
|
{ waitUntil: "domcontentloaded" },
|
||||||
|
);
|
||||||
|
|
||||||
|
// Wait for prebuilds to show up.
|
||||||
|
const prebuilds = page.getByTestId(/^workspace-.+$/);
|
||||||
|
await waitForExpectedCount(prebuilds, expectedPrebuilds);
|
||||||
|
|
||||||
|
// Wait for prebuilds to start.
|
||||||
|
const runningPrebuilds = page
|
||||||
|
.getByTestId("build-status")
|
||||||
|
.getByText("Running");
|
||||||
|
await waitForExpectedCount(runningPrebuilds, expectedPrebuilds);
|
||||||
|
});
|
||||||
|
|
||||||
|
// NOTE: requires the `workspace-prebuilds` experiment enabled!
|
||||||
|
test("claim prebuild matching selected preset", async ({ page, baseURL }) => {
|
||||||
|
test.setTimeout(300_000);
|
||||||
|
|
||||||
|
requiresLicense();
|
||||||
|
|
||||||
|
// Create new template.
|
||||||
|
const templateName = randomName();
|
||||||
|
await importTemplate(page, templateName, templateFiles);
|
||||||
|
|
||||||
|
await page.goto(
|
||||||
|
`/workspaces?filter=owner:prebuilds%20template:${templateName}&page=1`,
|
||||||
|
{ waitUntil: "domcontentloaded" },
|
||||||
|
);
|
||||||
|
|
||||||
|
// Wait for prebuilds to show up.
|
||||||
|
const prebuilds = page.getByTestId(/^workspace-.+$/);
|
||||||
|
await waitForExpectedCount(prebuilds, expectedPrebuilds);
|
||||||
|
|
||||||
|
const previousWorkspaceNames = await Promise.all(
|
||||||
|
(await prebuilds.all()).map((value) => {
|
||||||
|
return value.getByText(/prebuild-.+/).textContent();
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Wait for prebuilds to start.
|
||||||
|
let runningPrebuilds = page.getByTestId("build-status").getByText("Running");
|
||||||
|
await waitForExpectedCount(runningPrebuilds, expectedPrebuilds);
|
||||||
|
|
||||||
|
// Open the first prebuild.
|
||||||
|
await runningPrebuilds.first().click();
|
||||||
|
await page.waitForURL(/\/@prebuilds\/prebuild-.+/);
|
||||||
|
|
||||||
|
// Wait for the prebuild to become ready so it's eligible to be claimed.
|
||||||
|
await page.getByTestId("agent-status-ready").waitFor({ timeout: 60_000 });
|
||||||
|
|
||||||
|
// Create a new workspace using the same preset as one of the prebuilds.
|
||||||
|
await page.goto(`/templates/coder/${templateName}/workspace`, {
|
||||||
|
waitUntil: "domcontentloaded",
|
||||||
|
});
|
||||||
|
|
||||||
|
// Visit workspace creation page for new template.
|
||||||
|
await page.goto(`/templates/default/${templateName}/workspace`, {
|
||||||
|
waitUntil: "domcontentloaded",
|
||||||
|
});
|
||||||
|
|
||||||
|
// Choose a preset.
|
||||||
|
await page.locator('button[aria-label="Preset"]').click();
|
||||||
|
// Choose the GoLand preset.
|
||||||
|
const preset = page.getByText("I Like GoLand");
|
||||||
|
await expect(preset).toBeVisible();
|
||||||
|
await preset.click();
|
||||||
|
|
||||||
|
// Create a workspace.
|
||||||
|
const workspaceName = randomName();
|
||||||
|
await page.locator("input[name=name]").fill(workspaceName);
|
||||||
|
await page.getByRole("button", { name: "Create workspace" }).click();
|
||||||
|
|
||||||
|
// Wait for the workspace build display to be navigated to.
|
||||||
|
const user = currentUser(page);
|
||||||
|
await page.waitForURL(`/@${user.username}/${workspaceName}`, {
|
||||||
|
timeout: waitForBuildTimeout, // Account for workspace build time.
|
||||||
|
});
|
||||||
|
|
||||||
|
// Validate the workspace metadata that it was indeed a claimed prebuild.
|
||||||
|
const indicator = page.getByText("Was Prebuild");
|
||||||
|
await indicator.waitFor({ timeout: 60_000 });
|
||||||
|
const text = indicator.locator("xpath=..").getByText("Yes");
|
||||||
|
await text.waitFor({ timeout: 30_000 });
|
||||||
|
|
||||||
|
// Navigate back to prebuilds page to see that a new prebuild replaced the claimed one.
|
||||||
|
await page.goto(
|
||||||
|
`/workspaces?filter=owner:prebuilds%20template:${templateName}&page=1`,
|
||||||
|
{ waitUntil: "domcontentloaded" },
|
||||||
|
);
|
||||||
|
|
||||||
|
// Wait for prebuilds to show up.
|
||||||
|
const newPrebuilds = page.getByTestId(/^workspace-.+$/);
|
||||||
|
await waitForExpectedCount(newPrebuilds, expectedPrebuilds);
|
||||||
|
|
||||||
|
const currentWorkspaceNames = await Promise.all(
|
||||||
|
(await newPrebuilds.all()).map((value) => {
|
||||||
|
return value.getByText(/prebuild-.+/).textContent();
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Ensure the prebuilds have changed.
|
||||||
|
expect(currentWorkspaceNames).not.toEqual(previousWorkspaceNames);
|
||||||
|
|
||||||
|
// Wait for prebuilds to start.
|
||||||
|
runningPrebuilds = page.getByTestId("build-status").getByText("Running");
|
||||||
|
await waitForExpectedCount(runningPrebuilds, expectedPrebuilds);
|
||||||
|
});
|
||||||
|
|
||||||
|
function waitForExpectedCount(prebuilds: Locator, expectedCount: number) {
|
||||||
|
return expect
|
||||||
|
.poll(
|
||||||
|
async () => {
|
||||||
|
return (await prebuilds.all()).length === expectedCount;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
intervals: [100],
|
||||||
|
timeout: waitForBuildTimeout,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.toBe(true);
|
||||||
|
}
|
59
site/e2e/tests/presets/presets.spec.ts
Normal file
59
site/e2e/tests/presets/presets.spec.ts
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
import path from "node:path";
|
||||||
|
import { expect, test } from "@playwright/test";
|
||||||
|
import { currentUser, importTemplate, login, randomName } from "../../helpers";
|
||||||
|
import { beforeCoderTest } from "../../hooks";
|
||||||
|
|
||||||
|
test.beforeEach(async ({ page }) => {
|
||||||
|
beforeCoderTest(page);
|
||||||
|
await login(page);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("create template with preset and use in workspace", async ({
|
||||||
|
page,
|
||||||
|
baseURL,
|
||||||
|
}) => {
|
||||||
|
test.setTimeout(300_000);
|
||||||
|
|
||||||
|
// Create new template.
|
||||||
|
const templateName = randomName();
|
||||||
|
await importTemplate(page, templateName, [
|
||||||
|
path.join(__dirname, "basic-presets/main.tf"),
|
||||||
|
path.join(__dirname, "basic-presets/.terraform.lock.hcl"),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Visit workspace creation page for new template.
|
||||||
|
await page.goto(`/templates/default/${templateName}/workspace`, {
|
||||||
|
waitUntil: "domcontentloaded",
|
||||||
|
});
|
||||||
|
|
||||||
|
await page.locator('button[aria-label="Preset"]').click();
|
||||||
|
|
||||||
|
const preset1 = page.getByText("I Like GoLand");
|
||||||
|
const preset2 = page.getByText("Some Like PyCharm");
|
||||||
|
|
||||||
|
await expect(preset1).toBeVisible();
|
||||||
|
await expect(preset2).toBeVisible();
|
||||||
|
|
||||||
|
// Choose the GoLand preset.
|
||||||
|
await preset1.click();
|
||||||
|
|
||||||
|
// Validate the preset was applied correctly.
|
||||||
|
await expect(page.locator('input[value="GO"]')).toBeChecked();
|
||||||
|
|
||||||
|
// Create a workspace.
|
||||||
|
const workspaceName = randomName();
|
||||||
|
await page.locator("input[name=name]").fill(workspaceName);
|
||||||
|
await page.getByRole("button", { name: "Create workspace" }).click();
|
||||||
|
|
||||||
|
// Wait for the workspace build display to be navigated to.
|
||||||
|
const user = currentUser(page);
|
||||||
|
await page.waitForURL(`/@${user.username}/${workspaceName}`, {
|
||||||
|
timeout: 120_000, // Account for workspace build time.
|
||||||
|
});
|
||||||
|
|
||||||
|
// Visit workspace settings page.
|
||||||
|
await page.goto(`/@${user.username}/${workspaceName}/settings/parameters`);
|
||||||
|
|
||||||
|
// Validate the preset was applied correctly.
|
||||||
|
await expect(page.locator('input[value="GO"]')).toBeChecked();
|
||||||
|
});
|
@ -16,7 +16,7 @@ let template!: string;
|
|||||||
|
|
||||||
test.beforeAll(async ({ browser }) => {
|
test.beforeAll(async ({ browser }) => {
|
||||||
const page = await (await browser.newContext()).newPage();
|
const page = await (await browser.newContext()).newPage();
|
||||||
await login(page);
|
await login(page, users.templateAdmin);
|
||||||
|
|
||||||
const richParameters: RichParameter[] = [
|
const richParameters: RichParameter[] = [
|
||||||
{ ...emptyParameter, name: "repo", type: "string" },
|
{ ...emptyParameter, name: "repo", type: "string" },
|
||||||
@ -29,7 +29,7 @@ test.beforeAll(async ({ browser }) => {
|
|||||||
|
|
||||||
test.beforeEach(async ({ page }) => {
|
test.beforeEach(async ({ page }) => {
|
||||||
beforeCoderTest(page);
|
beforeCoderTest(page);
|
||||||
await login(page, users.user);
|
await login(page, users.member);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("create workspace in auto mode", async ({ page }) => {
|
test("create workspace in auto mode", async ({ page }) => {
|
||||||
@ -40,7 +40,7 @@ test("create workspace in auto mode", async ({ page }) => {
|
|||||||
waitUntil: "domcontentloaded",
|
waitUntil: "domcontentloaded",
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
await expect(page).toHaveTitle(`${users.user.username}/${name} - Coder`);
|
await expect(page).toHaveTitle(`${users.member.username}/${name} - Coder`);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("use an existing workspace that matches the `match` parameter instead of creating a new one", async ({
|
test("use an existing workspace that matches the `match` parameter instead of creating a new one", async ({
|
||||||
@ -54,7 +54,7 @@ test("use an existing workspace that matches the `match` parameter instead of cr
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
await expect(page).toHaveTitle(
|
await expect(page).toHaveTitle(
|
||||||
`${users.user.username}/${prevWorkspace} - Coder`,
|
`${users.member.username}/${prevWorkspace} - Coder`,
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user