diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index bf1428df6c..7b47532ed4 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -1021,7 +1021,10 @@ jobs:
if: github.ref == 'refs/heads/main' && needs.changes.outputs.docs-only == 'false' && !github.event.pull_request.head.repo.fork
runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-8' || 'ubuntu-22.04' }}
permissions:
- packages: write # Needed to push images to ghcr.io
+ # Necessary to push docker images to ghcr.io.
+ packages: write
+ # Necessary for GCP authentication (https://github.com/google-github-actions/setup-gcloud#usage)
+ id-token: write
env:
DOCKER_CLI_EXPERIMENTAL: "enabled"
outputs:
@@ -1050,12 +1053,44 @@ jobs:
- name: Setup Go
uses: ./.github/actions/setup-go
+ # Necessary for signing Windows binaries.
+ - name: Setup Java
+ uses: actions/setup-java@3a4f6e1af504cf6a31855fa899c6aa5355ba6c12 # v4.7.0
+ with:
+ distribution: "zulu"
+ java-version: "11.0"
+
+ - name: Install go-winres
+ run: go install github.com/tc-hib/go-winres@d743268d7ea168077ddd443c4240562d4f5e8c3e # v0.3.3
+
- name: Install nfpm
run: go install github.com/goreleaser/nfpm/v2/cmd/nfpm@v2.35.1
- name: Install zstd
run: sudo apt-get install -y zstd
+ - name: Setup Windows EV Signing Certificate
+ run: |
+ set -euo pipefail
+ touch /tmp/ev_cert.pem
+ chmod 600 /tmp/ev_cert.pem
+ echo "$EV_SIGNING_CERT" > /tmp/ev_cert.pem
+ wget https://github.com/ebourg/jsign/releases/download/6.0/jsign-6.0.jar -O /tmp/jsign-6.0.jar
+ env:
+ EV_SIGNING_CERT: ${{ secrets.EV_SIGNING_CERT }}
+
+ # Setup GCloud for signing Windows binaries.
+ - name: Authenticate to Google Cloud
+ id: gcloud_auth
+ uses: google-github-actions/auth@71f986410dfbc7added4569d411d040a91dc6935 # v2.1.8
+ with:
+ workload_identity_provider: ${{ secrets.GCP_CODE_SIGNING_WORKLOAD_ID_PROVIDER }}
+ service_account: ${{ secrets.GCP_CODE_SIGNING_SERVICE_ACCOUNT }}
+ token_format: "access_token"
+
+ - name: Setup GCloud SDK
+ uses: google-github-actions/setup-gcloud@77e7a554d41e2ee56fc945c52dfd3f33d12def9a # v2.1.4
+
- name: Download dylibs
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
@@ -1082,6 +1117,18 @@ jobs:
build/coder_linux_{amd64,arm64,armv7} \
build/coder_"$version"_windows_amd64.zip \
build/coder_"$version"_linux_amd64.{tar.gz,deb}
+ env:
+ # The Windows slim binary must be signed for Coder Desktop to accept
+ # it. The darwin executables don't need to be signed, but the dylibs
+ # do (see above).
+ CODER_SIGN_WINDOWS: "1"
+ CODER_WINDOWS_RESOURCES: "1"
+ EV_KEY: ${{ secrets.EV_KEY }}
+ EV_KEYSTORE: ${{ secrets.EV_KEYSTORE }}
+ EV_TSA_URL: ${{ secrets.EV_TSA_URL }}
+ EV_CERTIFICATE_PATH: /tmp/ev_cert.pem
+ GCLOUD_ACCESS_TOKEN: ${{ steps.gcloud_auth.outputs.access_token }}
+ JSIGN_PATH: /tmp/jsign-6.0.jar
- name: Build Linux Docker images
id: build-docker
@@ -1183,10 +1230,10 @@ jobs:
uses: google-github-actions/setup-gcloud@77e7a554d41e2ee56fc945c52dfd3f33d12def9a # v2.1.4
- name: Set up Flux CLI
- uses: fluxcd/flux2/action@af67405ee43a6cd66e0b73f4b3802e8583f9d961 # v2.5.0
+ uses: fluxcd/flux2/action@8d5f40dca5aa5d3c0fc3414457dda15a0ac92fa4 # v2.5.1
with:
# Keep this and the github action up to date with the version of flux installed in dogfood cluster
- version: "2.2.1"
+ version: "2.5.1"
- name: Get Cluster Credentials
uses: google-github-actions/get-gke-credentials@7a108e64ed8546fe38316b4086e91da13f4785e1 # v2.3.1
@@ -1219,6 +1266,8 @@ jobs:
kubectl --namespace coder rollout status deployment/coder
kubectl --namespace coder rollout restart deployment/coder-provisioner
kubectl --namespace coder rollout status deployment/coder-provisioner
+ kubectl --namespace coder rollout restart deployment/coder-provisioner-tagged
+ kubectl --namespace coder rollout status deployment/coder-provisioner-tagged
deploy-wsproxies:
runs-on: ubuntu-latest
diff --git a/.github/workflows/nightly-gauntlet.yaml b/.github/workflows/nightly-gauntlet.yaml
index 3965aeab34..2168be9c6b 100644
--- a/.github/workflows/nightly-gauntlet.yaml
+++ b/.github/workflows/nightly-gauntlet.yaml
@@ -20,6 +20,7 @@ jobs:
# even if some of the preceding steps are slow.
timeout-minutes: 25
strategy:
+ fail-fast: false
matrix:
os:
- macos-latest
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index 89b4e4e84a..614b3542d5 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -223,21 +223,12 @@ jobs:
distribution: "zulu"
java-version: "11.0"
+ - name: Install go-winres
+ run: go install github.com/tc-hib/go-winres@d743268d7ea168077ddd443c4240562d4f5e8c3e # v0.3.3
+
- name: Install nsis and zstd
run: sudo apt-get install -y nsis zstd
- - name: Download dylibs
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
- with:
- name: dylibs
- path: ./build
-
- - name: Insert dylibs
- run: |
- mv ./build/*amd64.dylib ./site/out/bin/coder-vpn-darwin-amd64.dylib
- mv ./build/*arm64.dylib ./site/out/bin/coder-vpn-darwin-arm64.dylib
- mv ./build/*arm64.h ./site/out/bin/coder-vpn-darwin-dylib.h
-
- name: Install nfpm
run: |
set -euo pipefail
@@ -294,6 +285,18 @@ jobs:
- name: Setup GCloud SDK
uses: google-github-actions/setup-gcloud@77e7a554d41e2ee56fc945c52dfd3f33d12def9a # v2.1.4
+ - name: Download dylibs
+ uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
+ with:
+ name: dylibs
+ path: ./build
+
+ - name: Insert dylibs
+ run: |
+ mv ./build/*amd64.dylib ./site/out/bin/coder-vpn-darwin-amd64.dylib
+ mv ./build/*arm64.dylib ./site/out/bin/coder-vpn-darwin-arm64.dylib
+ mv ./build/*arm64.h ./site/out/bin/coder-vpn-darwin-dylib.h
+
- name: Build binaries
run: |
set -euo pipefail
@@ -310,6 +313,7 @@ jobs:
env:
CODER_SIGN_WINDOWS: "1"
CODER_SIGN_DARWIN: "1"
+ CODER_WINDOWS_RESOURCES: "1"
AC_CERTIFICATE_FILE: /tmp/apple_cert.p12
AC_CERTIFICATE_PASSWORD_FILE: /tmp/apple_cert_password.txt
AC_APIKEY_ISSUER_ID: ${{ secrets.AC_APIKEY_ISSUER_ID }}
diff --git a/agent/agent.go b/agent/agent.go
index 4e0c6fbb40..135d1c6dad 100644
--- a/agent/agent.go
+++ b/agent/agent.go
@@ -8,11 +8,13 @@ import (
"fmt"
"hash/fnv"
"io"
+ "net"
"net/http"
"net/netip"
"os"
"os/user"
"path/filepath"
+ "slices"
"sort"
"strconv"
"strings"
@@ -25,9 +27,9 @@ import (
"github.com/prometheus/common/expfmt"
"github.com/spf13/afero"
"go.uber.org/atomic"
- "golang.org/x/exp/slices"
"golang.org/x/sync/errgroup"
"golang.org/x/xerrors"
+ "google.golang.org/protobuf/types/known/timestamppb"
"tailscale.com/net/speedtest"
"tailscale.com/tailcfg"
"tailscale.com/types/netlogtype"
@@ -91,7 +93,7 @@ type Options struct {
Execer agentexec.Execer
ContainerLister agentcontainers.Lister
- ExperimentalContainersEnabled bool
+ ExperimentalDevcontainersEnabled bool
}
type Client interface {
@@ -155,7 +157,7 @@ func New(options Options) Agent {
options.Execer = agentexec.DefaultExecer
}
if options.ContainerLister == nil {
- options.ContainerLister = agentcontainers.NewDocker(options.Execer)
+ options.ContainerLister = agentcontainers.NoopLister{}
}
hardCtx, hardCancel := context.WithCancel(context.Background())
@@ -179,6 +181,7 @@ func New(options Options) Agent {
lifecycleUpdate: make(chan struct{}, 1),
lifecycleReported: make(chan codersdk.WorkspaceAgentLifecycle, 1),
lifecycleStates: []agentsdk.PostLifecycleRequest{{State: codersdk.WorkspaceAgentLifecycleCreated}},
+ reportConnectionsUpdate: make(chan struct{}, 1),
ignorePorts: options.IgnorePorts,
portCacheDuration: options.PortCacheDuration,
reportMetadataInterval: options.ReportMetadataInterval,
@@ -193,7 +196,7 @@ func New(options Options) Agent {
execer: options.Execer,
lister: options.ContainerLister,
- experimentalDevcontainersEnabled: options.ExperimentalContainersEnabled,
+ experimentalDevcontainersEnabled: options.ExperimentalDevcontainersEnabled,
}
// Initially, we have a closed channel, reflecting the fact that we are not initially connected.
// Each time we connect we replace the channel (while holding the closeMutex) with a new one
@@ -254,6 +257,10 @@ type agent struct {
lifecycleStates []agentsdk.PostLifecycleRequest
lifecycleLastReportedIndex int // Keeps track of the last lifecycle state we successfully reported.
+ reportConnectionsUpdate chan struct{}
+ reportConnectionsMu sync.Mutex
+ reportConnections []*proto.ReportConnectionRequest
+
network *tailnet.Conn
statsReporter *statsReporter
logSender *agentsdk.LogSender
@@ -281,6 +288,26 @@ func (a *agent) init() {
UpdateEnv: a.updateCommandEnv,
WorkingDirectory: func() string { return a.manifest.Load().Directory },
BlockFileTransfer: a.blockFileTransfer,
+ ReportConnection: func(id uuid.UUID, magicType agentssh.MagicSessionType, ip string) func(code int, reason string) {
+ var connectionType proto.Connection_Type
+ switch magicType {
+ case agentssh.MagicSessionTypeSSH:
+ connectionType = proto.Connection_SSH
+ case agentssh.MagicSessionTypeVSCode:
+ connectionType = proto.Connection_VSCODE
+ case agentssh.MagicSessionTypeJetBrains:
+ connectionType = proto.Connection_JETBRAINS
+ case agentssh.MagicSessionTypeUnknown:
+ connectionType = proto.Connection_TYPE_UNSPECIFIED
+ default:
+ a.logger.Error(a.hardCtx, "unhandled magic session type when reporting connection", slog.F("magic_type", magicType))
+ connectionType = proto.Connection_TYPE_UNSPECIFIED
+ }
+
+ return a.reportConnection(id, connectionType, ip)
+ },
+
+ ExperimentalDevContainersEnabled: a.experimentalDevcontainersEnabled,
})
if err != nil {
panic(err)
@@ -303,10 +330,13 @@ func (a *agent) init() {
a.reconnectingPTYServer = reconnectingpty.NewServer(
a.logger.Named("reconnecting-pty"),
a.sshServer,
+ func(id uuid.UUID, ip string) func(code int, reason string) {
+ return a.reportConnection(id, proto.Connection_RECONNECTING_PTY, ip)
+ },
a.metrics.connectionsTotal, a.metrics.reconnectingPTYErrors,
a.reconnectingPTYTimeout,
func(s *reconnectingpty.Server) {
- s.ExperimentalContainersEnabled = a.experimentalDevcontainersEnabled
+ s.ExperimentalDevcontainersEnabled = a.experimentalDevcontainersEnabled
},
)
go a.runLoop()
@@ -717,6 +747,124 @@ func (a *agent) setLifecycle(state codersdk.WorkspaceAgentLifecycle) {
}
}
+// reportConnectionsLoop reports connections to the agent for auditing.
+func (a *agent) reportConnectionsLoop(ctx context.Context, aAPI proto.DRPCAgentClient24) error {
+ for {
+ select {
+ case <-a.reportConnectionsUpdate:
+ case <-ctx.Done():
+ return ctx.Err()
+ }
+
+ for {
+ a.reportConnectionsMu.Lock()
+ if len(a.reportConnections) == 0 {
+ a.reportConnectionsMu.Unlock()
+ break
+ }
+ payload := a.reportConnections[0]
+ // Release lock while we send the payload, this is safe
+ // since we only append to the slice.
+ a.reportConnectionsMu.Unlock()
+
+ logger := a.logger.With(slog.F("payload", payload))
+ logger.Debug(ctx, "reporting connection")
+ _, err := aAPI.ReportConnection(ctx, payload)
+ if err != nil {
+ return xerrors.Errorf("failed to report connection: %w", err)
+ }
+
+ logger.Debug(ctx, "successfully reported connection")
+
+ // Remove the payload we sent.
+ a.reportConnectionsMu.Lock()
+ a.reportConnections[0] = nil // Release the pointer from the underlying array.
+ a.reportConnections = a.reportConnections[1:]
+ a.reportConnectionsMu.Unlock()
+ }
+ }
+}
+
+const (
+ // reportConnectionBufferLimit limits the number of connection reports we
+ // buffer to avoid growing the buffer indefinitely. This should not happen
+ // unless the agent has lost connection to coderd for a long time or if
+ // the agent is being spammed with connections.
+ //
+ // If we assume ~150 byte per connection report, this would be around 300KB
+ // of memory which seems acceptable. We could reduce this if necessary by
+ // not using the proto struct directly.
+ reportConnectionBufferLimit = 2048
+)
+
+func (a *agent) reportConnection(id uuid.UUID, connectionType proto.Connection_Type, ip string) (disconnected func(code int, reason string)) {
+ // Remove the port from the IP because ports are not supported in coderd.
+ if host, _, err := net.SplitHostPort(ip); err != nil {
+ a.logger.Error(a.hardCtx, "split host and port for connection report failed", slog.F("ip", ip), slog.Error(err))
+ } else {
+ // Best effort.
+ ip = host
+ }
+
+ a.reportConnectionsMu.Lock()
+ defer a.reportConnectionsMu.Unlock()
+
+ if len(a.reportConnections) >= reportConnectionBufferLimit {
+ a.logger.Warn(a.hardCtx, "connection report buffer limit reached, dropping connect",
+ slog.F("limit", reportConnectionBufferLimit),
+ slog.F("connection_id", id),
+ slog.F("connection_type", connectionType),
+ slog.F("ip", ip),
+ )
+ } else {
+ a.reportConnections = append(a.reportConnections, &proto.ReportConnectionRequest{
+ Connection: &proto.Connection{
+ Id: id[:],
+ Action: proto.Connection_CONNECT,
+ Type: connectionType,
+ Timestamp: timestamppb.New(time.Now()),
+ Ip: ip,
+ StatusCode: 0,
+ Reason: nil,
+ },
+ })
+ select {
+ case a.reportConnectionsUpdate <- struct{}{}:
+ default:
+ }
+ }
+
+ return func(code int, reason string) {
+ a.reportConnectionsMu.Lock()
+ defer a.reportConnectionsMu.Unlock()
+ if len(a.reportConnections) >= reportConnectionBufferLimit {
+ a.logger.Warn(a.hardCtx, "connection report buffer limit reached, dropping disconnect",
+ slog.F("limit", reportConnectionBufferLimit),
+ slog.F("connection_id", id),
+ slog.F("connection_type", connectionType),
+ slog.F("ip", ip),
+ )
+ return
+ }
+
+ a.reportConnections = append(a.reportConnections, &proto.ReportConnectionRequest{
+ Connection: &proto.Connection{
+ Id: id[:],
+ Action: proto.Connection_DISCONNECT,
+ Type: connectionType,
+ Timestamp: timestamppb.New(time.Now()),
+ Ip: ip,
+ StatusCode: int32(code), //nolint:gosec
+ Reason: &reason,
+ },
+ })
+ select {
+ case a.reportConnectionsUpdate <- struct{}{}:
+ default:
+ }
+ }
+}
+
// fetchServiceBannerLoop fetches the service banner on an interval. It will
// not be fetched immediately; the expectation is that it is primed elsewhere
// (and must be done before the session actually starts).
@@ -827,6 +975,10 @@ func (a *agent) run() (retErr error) {
return resourcesmonitor.Start(ctx)
})
+ // Connection reports are part of auditing, we should keep sending them via
+ // gracefulShutdownBehaviorRemain.
+ connMan.startAgentAPI("report connections", gracefulShutdownBehaviorRemain, a.reportConnectionsLoop)
+
// channels to sync goroutines below
// handle manifest
// |
@@ -1208,19 +1360,22 @@ func (a *agent) createTailnet(
return nil, xerrors.Errorf("update host signer: %w", err)
}
- sshListener, err := network.Listen("tcp", ":"+strconv.Itoa(workspacesdk.AgentSSHPort))
- if err != nil {
- return nil, xerrors.Errorf("listen on the ssh port: %w", err)
- }
- defer func() {
+ for _, port := range []int{workspacesdk.AgentSSHPort, workspacesdk.AgentStandardSSHPort} {
+ sshListener, err := network.Listen("tcp", ":"+strconv.Itoa(port))
if err != nil {
- _ = sshListener.Close()
+ return nil, xerrors.Errorf("listen on the ssh port (%v): %w", port, err)
+ }
+ // nolint:revive // We do want to run the deferred functions when createTailnet returns.
+ defer func() {
+ if err != nil {
+ _ = sshListener.Close()
+ }
+ }()
+ if err = a.trackGoroutine(func() {
+ _ = a.sshServer.Serve(sshListener)
+ }); err != nil {
+ return nil, err
}
- }()
- if err = a.trackGoroutine(func() {
- _ = a.sshServer.Serve(sshListener)
- }); err != nil {
- return nil, err
}
reconnectingPTYListener, err := network.Listen("tcp", ":"+strconv.Itoa(workspacesdk.AgentReconnectingPTYPort))
diff --git a/agent/agent_test.go b/agent/agent_test.go
index 935309e98d..d6c8e4d976 100644
--- a/agent/agent_test.go
+++ b/agent/agent_test.go
@@ -19,6 +19,7 @@ import (
"path/filepath"
"regexp"
"runtime"
+ "slices"
"strconv"
"strings"
"sync/atomic"
@@ -41,7 +42,6 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"golang.org/x/crypto/ssh"
- "golang.org/x/exp/slices"
"golang.org/x/xerrors"
"cdr.dev/slog"
@@ -65,38 +65,48 @@ func TestMain(m *testing.M) {
goleak.VerifyTestMain(m, testutil.GoleakOptions...)
}
+var sshPorts = []uint16{workspacesdk.AgentSSHPort, workspacesdk.AgentStandardSSHPort}
+
// NOTE: These tests only work when your default shell is bash for some reason.
func TestAgent_Stats_SSH(t *testing.T) {
t.Parallel()
- ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
- defer cancel()
- //nolint:dogsled
- conn, _, stats, _, _ := setupAgent(t, agentsdk.Manifest{}, 0)
+ for _, port := range sshPorts {
+ port := port
+ t.Run(fmt.Sprintf("(:%d)", port), func(t *testing.T) {
+ t.Parallel()
- sshClient, err := conn.SSHClient(ctx)
- require.NoError(t, err)
- defer sshClient.Close()
- session, err := sshClient.NewSession()
- require.NoError(t, err)
- defer session.Close()
- stdin, err := session.StdinPipe()
- require.NoError(t, err)
- err = session.Shell()
- require.NoError(t, err)
+ ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
+ defer cancel()
- var s *proto.Stats
- require.Eventuallyf(t, func() bool {
- var ok bool
- s, ok = <-stats
- return ok && s.ConnectionCount > 0 && s.RxBytes > 0 && s.TxBytes > 0 && s.SessionCountSsh == 1
- }, testutil.WaitLong, testutil.IntervalFast,
- "never saw stats: %+v", s,
- )
- _ = stdin.Close()
- err = session.Wait()
- require.NoError(t, err)
+ //nolint:dogsled
+ conn, _, stats, _, _ := setupAgent(t, agentsdk.Manifest{}, 0)
+
+ sshClient, err := conn.SSHClientOnPort(ctx, port)
+ require.NoError(t, err)
+ defer sshClient.Close()
+ session, err := sshClient.NewSession()
+ require.NoError(t, err)
+ defer session.Close()
+ stdin, err := session.StdinPipe()
+ require.NoError(t, err)
+ err = session.Shell()
+ require.NoError(t, err)
+
+ var s *proto.Stats
+ require.Eventuallyf(t, func() bool {
+ var ok bool
+ s, ok = <-stats
+ return ok && s.ConnectionCount > 0 && s.RxBytes > 0 && s.TxBytes > 0 && s.SessionCountSsh == 1
+ }, testutil.WaitLong, testutil.IntervalFast,
+ "never saw stats: %+v", s,
+ )
+ _ = stdin.Close()
+ err = session.Wait()
+ require.NoError(t, err)
+ })
+ }
}
func TestAgent_Stats_ReconnectingPTY(t *testing.T) {
@@ -163,7 +173,7 @@ func TestAgent_Stats_Magic(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
defer cancel()
//nolint:dogsled
- conn, _, stats, _, _ := setupAgent(t, agentsdk.Manifest{}, 0)
+ conn, agentClient, stats, _, _ := setupAgent(t, agentsdk.Manifest{}, 0)
sshClient, err := conn.SSHClient(ctx)
require.NoError(t, err)
defer sshClient.Close()
@@ -193,6 +203,8 @@ func TestAgent_Stats_Magic(t *testing.T) {
_ = stdin.Close()
err = session.Wait()
require.NoError(t, err)
+
+ assertConnectionReport(t, agentClient, proto.Connection_VSCODE, 0, "")
})
t.Run("TracksJetBrains", func(t *testing.T) {
@@ -229,7 +241,7 @@ func TestAgent_Stats_Magic(t *testing.T) {
remotePort := sc.Text()
//nolint:dogsled
- conn, _, stats, _, _ := setupAgent(t, agentsdk.Manifest{}, 0)
+ conn, agentClient, stats, _, _ := setupAgent(t, agentsdk.Manifest{}, 0)
sshClient, err := conn.SSHClient(ctx)
require.NoError(t, err)
@@ -265,20 +277,30 @@ func TestAgent_Stats_Magic(t *testing.T) {
}, testutil.WaitLong, testutil.IntervalFast,
"never saw stats after conn closes",
)
+
+ assertConnectionReport(t, agentClient, proto.Connection_JETBRAINS, 0, "")
})
}
func TestAgent_SessionExec(t *testing.T) {
t.Parallel()
- session := setupSSHSession(t, agentsdk.Manifest{}, codersdk.ServiceBannerConfig{}, nil)
- command := "echo test"
- if runtime.GOOS == "windows" {
- command = "cmd.exe /c echo test"
+ for _, port := range sshPorts {
+ port := port
+ t.Run(fmt.Sprintf("(:%d)", port), func(t *testing.T) {
+ t.Parallel()
+
+ session := setupSSHSessionOnPort(t, agentsdk.Manifest{}, codersdk.ServiceBannerConfig{}, nil, port)
+
+ command := "echo test"
+ if runtime.GOOS == "windows" {
+ command = "cmd.exe /c echo test"
+ }
+ output, err := session.Output(command)
+ require.NoError(t, err)
+ require.Equal(t, "test", strings.TrimSpace(string(output)))
+ })
}
- output, err := session.Output(command)
- require.NoError(t, err)
- require.Equal(t, "test", strings.TrimSpace(string(output)))
}
//nolint:tparallel // Sub tests need to run sequentially.
@@ -388,25 +410,33 @@ func TestAgent_SessionTTYShell(t *testing.T) {
// it seems like it could be either.
t.Skip("ConPTY appears to be inconsistent on Windows.")
}
- session := setupSSHSession(t, agentsdk.Manifest{}, codersdk.ServiceBannerConfig{}, nil)
- command := "sh"
- if runtime.GOOS == "windows" {
- command = "cmd.exe"
+
+ for _, port := range sshPorts {
+ port := port
+ t.Run(fmt.Sprintf("(%d)", port), func(t *testing.T) {
+ t.Parallel()
+
+ session := setupSSHSessionOnPort(t, agentsdk.Manifest{}, codersdk.ServiceBannerConfig{}, nil, port)
+ command := "sh"
+ if runtime.GOOS == "windows" {
+ command = "cmd.exe"
+ }
+ err := session.RequestPty("xterm", 128, 128, ssh.TerminalModes{})
+ require.NoError(t, err)
+ ptty := ptytest.New(t)
+ session.Stdout = ptty.Output()
+ session.Stderr = ptty.Output()
+ session.Stdin = ptty.Input()
+ err = session.Start(command)
+ require.NoError(t, err)
+ _ = ptty.Peek(ctx, 1) // wait for the prompt
+ ptty.WriteLine("echo test")
+ ptty.ExpectMatch("test")
+ ptty.WriteLine("exit")
+ err = session.Wait()
+ require.NoError(t, err)
+ })
}
- err := session.RequestPty("xterm", 128, 128, ssh.TerminalModes{})
- require.NoError(t, err)
- ptty := ptytest.New(t)
- session.Stdout = ptty.Output()
- session.Stderr = ptty.Output()
- session.Stdin = ptty.Input()
- err = session.Start(command)
- require.NoError(t, err)
- _ = ptty.Peek(ctx, 1) // wait for the prompt
- ptty.WriteLine("echo test")
- ptty.ExpectMatch("test")
- ptty.WriteLine("exit")
- err = session.Wait()
- require.NoError(t, err)
}
func TestAgent_SessionTTYExitCode(t *testing.T) {
@@ -600,37 +630,41 @@ func TestAgent_Session_TTY_MOTD_Update(t *testing.T) {
//nolint:dogsled // Allow the blank identifiers.
conn, client, _, _, _ := setupAgent(t, agentsdk.Manifest{}, 0, setSBInterval)
- sshClient, err := conn.SSHClient(ctx)
- require.NoError(t, err)
- t.Cleanup(func() {
- _ = sshClient.Close()
- })
-
//nolint:paralleltest // These tests need to swap the banner func.
- for i, test := range tests {
- test := test
- t.Run(fmt.Sprintf("%d", i), func(t *testing.T) {
- // Set new banner func and wait for the agent to call it to update the
- // banner.
- ready := make(chan struct{}, 2)
- client.SetAnnouncementBannersFunc(func() ([]codersdk.BannerConfig, error) {
- select {
- case ready <- struct{}{}:
- default:
- }
- return []codersdk.BannerConfig{test.banner}, nil
- })
- <-ready
- <-ready // Wait for two updates to ensure the value has propagated.
+ for _, port := range sshPorts {
+ port := port
- session, err := sshClient.NewSession()
- require.NoError(t, err)
- t.Cleanup(func() {
- _ = session.Close()
- })
-
- testSessionOutput(t, session, test.expected, test.unexpected, nil)
+ sshClient, err := conn.SSHClientOnPort(ctx, port)
+ require.NoError(t, err)
+ t.Cleanup(func() {
+ _ = sshClient.Close()
})
+
+ for i, test := range tests {
+ test := test
+ t.Run(fmt.Sprintf("(:%d)/%d", port, i), func(t *testing.T) {
+ // Set new banner func and wait for the agent to call it to update the
+ // banner.
+ ready := make(chan struct{}, 2)
+ client.SetAnnouncementBannersFunc(func() ([]codersdk.BannerConfig, error) {
+ select {
+ case ready <- struct{}{}:
+ default:
+ }
+ return []codersdk.BannerConfig{test.banner}, nil
+ })
+ <-ready
+ <-ready // Wait for two updates to ensure the value has propagated.
+
+ session, err := sshClient.NewSession()
+ require.NoError(t, err)
+ t.Cleanup(func() {
+ _ = session.Close()
+ })
+
+ testSessionOutput(t, session, test.expected, test.unexpected, nil)
+ })
+ }
}
}
@@ -922,7 +956,7 @@ func TestAgent_SFTP(t *testing.T) {
home = "/" + strings.ReplaceAll(home, "\\", "/")
}
//nolint:dogsled
- conn, _, _, _, _ := setupAgent(t, agentsdk.Manifest{}, 0)
+ conn, agentClient, _, _, _ := setupAgent(t, agentsdk.Manifest{}, 0)
sshClient, err := conn.SSHClient(ctx)
require.NoError(t, err)
defer sshClient.Close()
@@ -945,6 +979,10 @@ func TestAgent_SFTP(t *testing.T) {
require.NoError(t, err)
_, err = os.Stat(tempFile)
require.NoError(t, err)
+
+ // Close the client to trigger disconnect event.
+ _ = client.Close()
+ assertConnectionReport(t, agentClient, proto.Connection_SSH, 0, "")
}
func TestAgent_SCP(t *testing.T) {
@@ -954,7 +992,7 @@ func TestAgent_SCP(t *testing.T) {
defer cancel()
//nolint:dogsled
- conn, _, _, _, _ := setupAgent(t, agentsdk.Manifest{}, 0)
+ conn, agentClient, _, _, _ := setupAgent(t, agentsdk.Manifest{}, 0)
sshClient, err := conn.SSHClient(ctx)
require.NoError(t, err)
defer sshClient.Close()
@@ -967,6 +1005,10 @@ func TestAgent_SCP(t *testing.T) {
require.NoError(t, err)
_, err = os.Stat(tempFile)
require.NoError(t, err)
+
+ // Close the client to trigger disconnect event.
+ scpClient.Close()
+ assertConnectionReport(t, agentClient, proto.Connection_SSH, 0, "")
}
func TestAgent_FileTransferBlocked(t *testing.T) {
@@ -991,7 +1033,7 @@ func TestAgent_FileTransferBlocked(t *testing.T) {
defer cancel()
//nolint:dogsled
- conn, _, _, _, _ := setupAgent(t, agentsdk.Manifest{}, 0, func(_ *agenttest.Client, o *agent.Options) {
+ conn, agentClient, _, _, _ := setupAgent(t, agentsdk.Manifest{}, 0, func(_ *agenttest.Client, o *agent.Options) {
o.BlockFileTransfer = true
})
sshClient, err := conn.SSHClient(ctx)
@@ -1000,6 +1042,8 @@ func TestAgent_FileTransferBlocked(t *testing.T) {
_, err = sftp.NewClient(sshClient)
require.Error(t, err)
assertFileTransferBlocked(t, err.Error())
+
+ assertConnectionReport(t, agentClient, proto.Connection_SSH, agentssh.BlockedFileTransferErrorCode, "")
})
t.Run("SCP with go-scp package", func(t *testing.T) {
@@ -1009,7 +1053,7 @@ func TestAgent_FileTransferBlocked(t *testing.T) {
defer cancel()
//nolint:dogsled
- conn, _, _, _, _ := setupAgent(t, agentsdk.Manifest{}, 0, func(_ *agenttest.Client, o *agent.Options) {
+ conn, agentClient, _, _, _ := setupAgent(t, agentsdk.Manifest{}, 0, func(_ *agenttest.Client, o *agent.Options) {
o.BlockFileTransfer = true
})
sshClient, err := conn.SSHClient(ctx)
@@ -1022,6 +1066,8 @@ func TestAgent_FileTransferBlocked(t *testing.T) {
err = scpClient.CopyFile(context.Background(), strings.NewReader("hello world"), tempFile, "0755")
require.Error(t, err)
assertFileTransferBlocked(t, err.Error())
+
+ assertConnectionReport(t, agentClient, proto.Connection_SSH, agentssh.BlockedFileTransferErrorCode, "")
})
t.Run("Forbidden commands", func(t *testing.T) {
@@ -1035,7 +1081,7 @@ func TestAgent_FileTransferBlocked(t *testing.T) {
defer cancel()
//nolint:dogsled
- conn, _, _, _, _ := setupAgent(t, agentsdk.Manifest{}, 0, func(_ *agenttest.Client, o *agent.Options) {
+ conn, agentClient, _, _, _ := setupAgent(t, agentsdk.Manifest{}, 0, func(_ *agenttest.Client, o *agent.Options) {
o.BlockFileTransfer = true
})
sshClient, err := conn.SSHClient(ctx)
@@ -1057,6 +1103,8 @@ func TestAgent_FileTransferBlocked(t *testing.T) {
msg, err := io.ReadAll(stdout)
require.NoError(t, err)
assertFileTransferBlocked(t, string(msg))
+
+ assertConnectionReport(t, agentClient, proto.Connection_SSH, agentssh.BlockedFileTransferErrorCode, "")
})
}
})
@@ -1665,8 +1713,16 @@ func TestAgent_ReconnectingPTY(t *testing.T) {
defer cancel()
//nolint:dogsled
- conn, _, _, _, _ := setupAgent(t, agentsdk.Manifest{}, 0)
+ conn, agentClient, _, _, _ := setupAgent(t, agentsdk.Manifest{}, 0)
id := uuid.New()
+
+ // Test that the connection is reported. This must be tested in the
+ // first connection because we care about verifying all of these.
+ netConn0, err := conn.ReconnectingPTY(ctx, id, 80, 80, "bash --norc")
+ require.NoError(t, err)
+ _ = netConn0.Close()
+ assertConnectionReport(t, agentClient, proto.Connection_RECONNECTING_PTY, 0, "")
+
// --norc disables executing .bashrc, which is often used to customize the bash prompt
netConn1, err := conn.ReconnectingPTY(ctx, id, 80, 80, "bash --norc")
require.NoError(t, err)
@@ -1802,7 +1858,7 @@ func TestAgent_ReconnectingPTYContainer(t *testing.T) {
// nolint: dogsled
conn, _, _, _, _ := setupAgent(t, agentsdk.Manifest{}, 0, func(_ *agenttest.Client, o *agent.Options) {
- o.ExperimentalContainersEnabled = true
+ o.ExperimentalDevcontainersEnabled = true
})
ac, err := conn.ReconnectingPTY(ctx, uuid.New(), 80, 80, "/bin/sh", func(arp *workspacesdk.AgentReconnectingPTYInit) {
arp.Container = ct.Container.ID
@@ -2385,6 +2441,17 @@ func setupSSHSession(
banner codersdk.BannerConfig,
prepareFS func(fs afero.Fs),
opts ...func(*agenttest.Client, *agent.Options),
+) *ssh.Session {
+ return setupSSHSessionOnPort(t, manifest, banner, prepareFS, workspacesdk.AgentSSHPort, opts...)
+}
+
+func setupSSHSessionOnPort(
+ t *testing.T,
+ manifest agentsdk.Manifest,
+ banner codersdk.BannerConfig,
+ prepareFS func(fs afero.Fs),
+ port uint16,
+ opts ...func(*agenttest.Client, *agent.Options),
) *ssh.Session {
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
defer cancel()
@@ -2398,7 +2465,7 @@ func setupSSHSession(
if prepareFS != nil {
prepareFS(fs)
}
- sshClient, err := conn.SSHClient(ctx)
+ sshClient, err := conn.SSHClientOnPort(ctx, port)
require.NoError(t, err)
t.Cleanup(func() {
_ = sshClient.Close()
@@ -2763,3 +2830,35 @@ func requireEcho(t *testing.T, conn net.Conn) {
require.NoError(t, err)
require.Equal(t, "test", string(b))
}
+
+func assertConnectionReport(t testing.TB, agentClient *agenttest.Client, connectionType proto.Connection_Type, status int, reason string) {
+ t.Helper()
+
+ var reports []*proto.ReportConnectionRequest
+ if !assert.Eventually(t, func() bool {
+ reports = agentClient.GetConnectionReports()
+ return len(reports) >= 2
+ }, testutil.WaitMedium, testutil.IntervalFast, "waiting for 2 connection reports or more; got %d", len(reports)) {
+ return
+ }
+
+ assert.Len(t, reports, 2, "want 2 connection reports")
+
+ assert.Equal(t, proto.Connection_CONNECT, reports[0].GetConnection().GetAction(), "first report should be connect")
+ assert.Equal(t, proto.Connection_DISCONNECT, reports[1].GetConnection().GetAction(), "second report should be disconnect")
+ assert.Equal(t, connectionType, reports[0].GetConnection().GetType(), "connect type should be %s", connectionType)
+ assert.Equal(t, connectionType, reports[1].GetConnection().GetType(), "disconnect type should be %s", connectionType)
+ t1 := reports[0].GetConnection().GetTimestamp().AsTime()
+ t2 := reports[1].GetConnection().GetTimestamp().AsTime()
+ assert.True(t, t1.Before(t2) || t1.Equal(t2), "connect timestamp should be before or equal to disconnect timestamp")
+ assert.NotEmpty(t, reports[0].GetConnection().GetIp(), "connect ip should not be empty")
+ assert.NotEmpty(t, reports[1].GetConnection().GetIp(), "disconnect ip should not be empty")
+ assert.Equal(t, 0, int(reports[0].GetConnection().GetStatusCode()), "connect status code should be 0")
+ assert.Equal(t, status, int(reports[1].GetConnection().GetStatusCode()), "disconnect status code should be %d", status)
+ assert.Equal(t, "", reports[0].GetConnection().GetReason(), "connect reason should be empty")
+ if reason != "" {
+ assert.Contains(t, reports[1].GetConnection().GetReason(), reason, "disconnect reason should contain %s", reason)
+ } else {
+ t.Logf("connection report disconnect reason: %s", reports[1].GetConnection().GetReason())
+ }
+}
diff --git a/agent/agentcontainers/containers_dockercli.go b/agent/agentcontainers/containers_dockercli.go
index 27e5f835d5..5218153bde 100644
--- a/agent/agentcontainers/containers_dockercli.go
+++ b/agent/agentcontainers/containers_dockercli.go
@@ -253,11 +253,16 @@ func (dcl *DockerCLILister) List(ctx context.Context) (codersdk.WorkspaceAgentLi
return codersdk.WorkspaceAgentListContainersResponse{}, xerrors.Errorf("scan docker ps output: %w", err)
}
+ res := codersdk.WorkspaceAgentListContainersResponse{
+ Containers: make([]codersdk.WorkspaceAgentDevcontainer, 0, len(ids)),
+ Warnings: make([]string, 0),
+ }
dockerPsStderr := strings.TrimSpace(stderrBuf.String())
+ if dockerPsStderr != "" {
+ res.Warnings = append(res.Warnings, dockerPsStderr)
+ }
if len(ids) == 0 {
- return codersdk.WorkspaceAgentListContainersResponse{
- Warnings: []string{dockerPsStderr},
- }, nil
+ return res, nil
}
// now we can get the detailed information for each container
@@ -273,13 +278,10 @@ func (dcl *DockerCLILister) List(ctx context.Context) (codersdk.WorkspaceAgentLi
return codersdk.WorkspaceAgentListContainersResponse{}, xerrors.Errorf("run docker inspect: %w", err)
}
- res := codersdk.WorkspaceAgentListContainersResponse{
- Containers: make([]codersdk.WorkspaceAgentDevcontainer, len(ins)),
- }
- for idx, in := range ins {
+ for _, in := range ins {
out, warns := convertDockerInspect(in)
res.Warnings = append(res.Warnings, warns...)
- res.Containers[idx] = out
+ res.Containers = append(res.Containers, out)
}
if dockerPsStderr != "" {
diff --git a/agent/agentssh/agentssh.go b/agent/agentssh/agentssh.go
index 3b09df0e38..816bdf5555 100644
--- a/agent/agentssh/agentssh.go
+++ b/agent/agentssh/agentssh.go
@@ -12,6 +12,7 @@ import (
"os/user"
"path/filepath"
"runtime"
+ "slices"
"strings"
"sync"
"time"
@@ -24,11 +25,11 @@ import (
"github.com/spf13/afero"
"go.uber.org/atomic"
gossh "golang.org/x/crypto/ssh"
- "golang.org/x/exp/slices"
"golang.org/x/xerrors"
"cdr.dev/slog"
+ "github.com/coder/coder/v2/agent/agentcontainers"
"github.com/coder/coder/v2/agent/agentexec"
"github.com/coder/coder/v2/agent/agentrsa"
"github.com/coder/coder/v2/agent/usershell"
@@ -60,6 +61,14 @@ const (
// MagicSessionTypeEnvironmentVariable is used to track the purpose behind an SSH connection.
// This is stripped from any commands being executed, and is counted towards connection stats.
MagicSessionTypeEnvironmentVariable = "CODER_SSH_SESSION_TYPE"
+ // ContainerEnvironmentVariable is used to specify the target container for an SSH connection.
+ // This is stripped from any commands being executed.
+ // Only available if CODER_AGENT_DEVCONTAINERS_ENABLE=true.
+ ContainerEnvironmentVariable = "CODER_CONTAINER"
+ // ContainerUserEnvironmentVariable is used to specify the container user for
+ // an SSH connection.
+ // Only available if CODER_AGENT_DEVCONTAINERS_ENABLE=true.
+ ContainerUserEnvironmentVariable = "CODER_CONTAINER_USER"
)
// MagicSessionType enums.
@@ -78,6 +87,8 @@ const (
// BlockedFileTransferCommands contains a list of restricted file transfer commands.
var BlockedFileTransferCommands = []string{"nc", "rsync", "scp", "sftp"}
+type reportConnectionFunc func(id uuid.UUID, sessionType MagicSessionType, ip string) (disconnected func(code int, reason string))
+
// Config sets configuration parameters for the agent SSH server.
type Config struct {
// MaxTimeout sets the absolute connection timeout, none if empty. If set to
@@ -100,6 +111,11 @@ type Config struct {
X11DisplayOffset *int
// BlockFileTransfer restricts use of file transfer applications.
BlockFileTransfer bool
+ // ReportConnection.
+ ReportConnection reportConnectionFunc
+ // Experimental: allow connecting to running containers if
+ // CODER_AGENT_DEVCONTAINERS_ENABLE=true.
+ ExperimentalDevContainersEnabled bool
}
type Server struct {
@@ -152,6 +168,9 @@ func NewServer(ctx context.Context, logger slog.Logger, prometheusRegistry *prom
return home
}
}
+ if config.ReportConnection == nil {
+ config.ReportConnection = func(uuid.UUID, MagicSessionType, string) func(int, string) { return func(int, string) {} }
+ }
forwardHandler := &ssh.ForwardedTCPHandler{}
unixForwardHandler := newForwardedUnixHandler(logger)
@@ -174,7 +193,7 @@ func NewServer(ctx context.Context, logger slog.Logger, prometheusRegistry *prom
ChannelHandlers: map[string]ssh.ChannelHandler{
"direct-tcpip": func(srv *ssh.Server, conn *gossh.ServerConn, newChan gossh.NewChannel, ctx ssh.Context) {
// Wrapper is designed to find and track JetBrains Gateway connections.
- wrapped := NewJetbrainsChannelWatcher(ctx, s.logger, newChan, &s.connCountJetBrains)
+ wrapped := NewJetbrainsChannelWatcher(ctx, s.logger, s.config.ReportConnection, newChan, &s.connCountJetBrains)
ssh.DirectTCPIPHandler(srv, conn, wrapped, ctx)
},
"direct-streamlocal@openssh.com": directStreamLocalHandler,
@@ -288,6 +307,51 @@ func extractMagicSessionType(env []string) (magicType MagicSessionType, rawType
})
}
+// sessionCloseTracker is a wrapper around Session that tracks the exit code.
+type sessionCloseTracker struct {
+ ssh.Session
+ exitOnce sync.Once
+ code atomic.Int64
+}
+
+var _ ssh.Session = &sessionCloseTracker{}
+
+func (s *sessionCloseTracker) track(code int) {
+ s.exitOnce.Do(func() {
+ s.code.Store(int64(code))
+ })
+}
+
+func (s *sessionCloseTracker) exitCode() int {
+ return int(s.code.Load())
+}
+
+func (s *sessionCloseTracker) Exit(code int) error {
+ s.track(code)
+ return s.Session.Exit(code)
+}
+
+func (s *sessionCloseTracker) Close() error {
+ s.track(1)
+ return s.Session.Close()
+}
+
+func extractContainerInfo(env []string) (container, containerUser string, filteredEnv []string) {
+ for _, kv := range env {
+ if strings.HasPrefix(kv, ContainerEnvironmentVariable+"=") {
+ container = strings.TrimPrefix(kv, ContainerEnvironmentVariable+"=")
+ }
+
+ if strings.HasPrefix(kv, ContainerUserEnvironmentVariable+"=") {
+ containerUser = strings.TrimPrefix(kv, ContainerUserEnvironmentVariable+"=")
+ }
+ }
+
+ return container, containerUser, slices.DeleteFunc(env, func(kv string) bool {
+ return strings.HasPrefix(kv, ContainerEnvironmentVariable+"=") || strings.HasPrefix(kv, ContainerUserEnvironmentVariable+"=")
+ })
+}
+
func (s *Server) sessionHandler(session ssh.Session) {
ctx := session.Context()
id := uuid.New()
@@ -300,16 +364,23 @@ func (s *Server) sessionHandler(session ssh.Session) {
)
logger.Info(ctx, "handling ssh session")
+ env := session.Environ()
+ magicType, magicTypeRaw, env := extractMagicSessionType(env)
+
if !s.trackSession(session, true) {
+ reason := "unable to accept new session, server is closing"
+ // Report connection attempt even if we couldn't accept it.
+ disconnected := s.config.ReportConnection(id, magicType, session.RemoteAddr().String())
+ defer disconnected(1, reason)
+
+ logger.Info(ctx, reason)
// See (*Server).Close() for why we call Close instead of Exit.
_ = session.Close()
- logger.Info(ctx, "unable to accept new session, server is closing")
return
}
defer s.trackSession(session, false)
- env := session.Environ()
- magicType, magicTypeRaw, env := extractMagicSessionType(env)
+ reportSession := true
switch magicType {
case MagicSessionTypeVSCode:
@@ -318,6 +389,7 @@ func (s *Server) sessionHandler(session ssh.Session) {
case MagicSessionTypeJetBrains:
// Do nothing here because JetBrains launches hundreds of ssh sessions.
// We instead track JetBrains in the single persistent tcp forwarding channel.
+ reportSession = false
case MagicSessionTypeSSH:
s.connCountSSHSession.Add(1)
defer s.connCountSSHSession.Add(-1)
@@ -325,6 +397,20 @@ func (s *Server) sessionHandler(session ssh.Session) {
logger.Warn(ctx, "invalid magic ssh session type specified", slog.F("raw_type", magicTypeRaw))
}
+ closeCause := func(string) {}
+ if reportSession {
+ var reason string
+ closeCause = func(r string) { reason = r }
+
+ scr := &sessionCloseTracker{Session: session}
+ session = scr
+
+ disconnected := s.config.ReportConnection(id, magicType, session.RemoteAddr().String())
+ defer func() {
+ disconnected(scr.exitCode(), reason)
+ }()
+ }
+
if s.fileTransferBlocked(session) {
s.logger.Warn(ctx, "file transfer blocked", slog.F("session_subsystem", session.Subsystem()), slog.F("raw_command", session.RawCommand()))
@@ -333,17 +419,35 @@ func (s *Server) sessionHandler(session ssh.Session) {
errorMessage := fmt.Sprintf("\x02%s\n", BlockedFileTransferErrorMessage)
_, _ = session.Write([]byte(errorMessage))
}
+ closeCause("file transfer blocked")
_ = session.Exit(BlockedFileTransferErrorCode)
return
}
+ container, containerUser, env := extractContainerInfo(env)
+ if container != "" {
+ s.logger.Debug(ctx, "container info",
+ slog.F("container", container),
+ slog.F("container_user", containerUser),
+ )
+ }
+
switch ss := session.Subsystem(); ss {
case "":
case "sftp":
- s.sftpHandler(logger, session)
+ if s.config.ExperimentalDevContainersEnabled && container != "" {
+ closeCause("sftp not yet supported with containers")
+ _ = session.Exit(1)
+ return
+ }
+ err := s.sftpHandler(logger, session)
+ if err != nil {
+ closeCause(err.Error())
+ }
return
default:
logger.Warn(ctx, "unsupported subsystem", slog.F("subsystem", ss))
+ closeCause(fmt.Sprintf("unsupported subsystem: %s", ss))
_ = session.Exit(1)
return
}
@@ -352,14 +456,15 @@ func (s *Server) sessionHandler(session ssh.Session) {
if hasX11 {
display, handled := s.x11Handler(session.Context(), x11)
if !handled {
- _ = session.Exit(1)
logger.Error(ctx, "x11 handler failed")
+ closeCause("x11 handler failed")
+ _ = session.Exit(1)
return
}
env = append(env, fmt.Sprintf("DISPLAY=localhost:%d.%d", display, x11.ScreenNumber))
}
- err := s.sessionStart(logger, session, env, magicType)
+ err := s.sessionStart(logger, session, env, magicType, container, containerUser)
var exitError *exec.ExitError
if xerrors.As(err, &exitError) {
code := exitError.ExitCode()
@@ -380,6 +485,8 @@ func (s *Server) sessionHandler(session ssh.Session) {
slog.F("exit_code", code),
)
+ closeCause(fmt.Sprintf("process exited with error status: %d", exitError.ExitCode()))
+
// TODO(mafredri): For signal exit, there's also an "exit-signal"
// request (session.Exit sends "exit-status"), however, since it's
// not implemented on the session interface and not used by
@@ -391,6 +498,7 @@ func (s *Server) sessionHandler(session ssh.Session) {
logger.Warn(ctx, "ssh session failed", slog.Error(err))
// This exit code is designed to be unlikely to be confused for a legit exit code
// from the process.
+ closeCause(err.Error())
_ = session.Exit(MagicSessionErrorCode)
return
}
@@ -429,18 +537,27 @@ func (s *Server) fileTransferBlocked(session ssh.Session) bool {
return false
}
-func (s *Server) sessionStart(logger slog.Logger, session ssh.Session, env []string, magicType MagicSessionType) (retErr error) {
+func (s *Server) sessionStart(logger slog.Logger, session ssh.Session, env []string, magicType MagicSessionType, container, containerUser string) (retErr error) {
ctx := session.Context()
magicTypeLabel := magicTypeMetricLabel(magicType)
sshPty, windowSize, isPty := session.Pty()
+ ptyLabel := "no"
+ if isPty {
+ ptyLabel = "yes"
+ }
- cmd, err := s.CreateCommand(ctx, session.RawCommand(), env, nil)
- if err != nil {
- ptyLabel := "no"
- if isPty {
- ptyLabel = "yes"
+ var ei usershell.EnvInfoer
+ var err error
+ if s.config.ExperimentalDevContainersEnabled && container != "" {
+ ei, err = agentcontainers.EnvInfo(ctx, s.Execer, container, containerUser)
+ if err != nil {
+ s.metrics.sessionErrors.WithLabelValues(magicTypeLabel, ptyLabel, "container_env_info").Add(1)
+ return err
}
+ }
+ cmd, err := s.CreateCommand(ctx, session.RawCommand(), env, ei)
+ if err != nil {
s.metrics.sessionErrors.WithLabelValues(magicTypeLabel, ptyLabel, "create_command").Add(1)
return err
}
@@ -448,11 +565,6 @@ func (s *Server) sessionStart(logger slog.Logger, session ssh.Session, env []str
if ssh.AgentRequested(session) {
l, err := ssh.NewAgentListener()
if err != nil {
- ptyLabel := "no"
- if isPty {
- ptyLabel = "yes"
- }
-
s.metrics.sessionErrors.WithLabelValues(magicTypeLabel, ptyLabel, "listener").Add(1)
return xerrors.Errorf("new agent listener: %w", err)
}
@@ -650,7 +762,7 @@ func handleSignal(logger slog.Logger, ssig ssh.Signal, signaler interface{ Signa
}
}
-func (s *Server) sftpHandler(logger slog.Logger, session ssh.Session) {
+func (s *Server) sftpHandler(logger slog.Logger, session ssh.Session) error {
s.metrics.sftpConnectionsTotal.Add(1)
ctx := session.Context()
@@ -674,7 +786,7 @@ func (s *Server) sftpHandler(logger slog.Logger, session ssh.Session) {
server, err := sftp.NewServer(session, opts...)
if err != nil {
logger.Debug(ctx, "initialize sftp server", slog.Error(err))
- return
+ return xerrors.Errorf("initialize sftp server: %w", err)
}
defer server.Close()
@@ -689,11 +801,12 @@ func (s *Server) sftpHandler(logger slog.Logger, session ssh.Session) {
// code but `scp` on macOS does (when using the default
// SFTP backend).
_ = session.Exit(0)
- return
+ return nil
}
logger.Warn(ctx, "sftp server closed with error", slog.Error(err))
s.metrics.sftpServerErrors.Add(1)
_ = session.Exit(1)
+ return xerrors.Errorf("sftp server closed with error: %w", err)
}
// CreateCommand processes raw command input with OpenSSH-like behavior.
diff --git a/agent/agentssh/jetbrainstrack.go b/agent/agentssh/jetbrainstrack.go
index 534f2899b1..9b2fdf83b2 100644
--- a/agent/agentssh/jetbrainstrack.go
+++ b/agent/agentssh/jetbrainstrack.go
@@ -6,6 +6,7 @@ import (
"sync"
"github.com/gliderlabs/ssh"
+ "github.com/google/uuid"
"go.uber.org/atomic"
gossh "golang.org/x/crypto/ssh"
@@ -28,9 +29,11 @@ type JetbrainsChannelWatcher struct {
gossh.NewChannel
jetbrainsCounter *atomic.Int64
logger slog.Logger
+ originAddr string
+ reportConnection reportConnectionFunc
}
-func NewJetbrainsChannelWatcher(ctx ssh.Context, logger slog.Logger, newChannel gossh.NewChannel, counter *atomic.Int64) gossh.NewChannel {
+func NewJetbrainsChannelWatcher(ctx ssh.Context, logger slog.Logger, reportConnection reportConnectionFunc, newChannel gossh.NewChannel, counter *atomic.Int64) gossh.NewChannel {
d := localForwardChannelData{}
if err := gossh.Unmarshal(newChannel.ExtraData(), &d); err != nil {
// If the data fails to unmarshal, do nothing.
@@ -61,12 +64,17 @@ func NewJetbrainsChannelWatcher(ctx ssh.Context, logger slog.Logger, newChannel
NewChannel: newChannel,
jetbrainsCounter: counter,
logger: logger.With(slog.F("destination_port", d.DestPort)),
+ originAddr: d.OriginAddr,
+ reportConnection: reportConnection,
}
}
func (w *JetbrainsChannelWatcher) Accept() (gossh.Channel, <-chan *gossh.Request, error) {
+ disconnected := w.reportConnection(uuid.New(), MagicSessionTypeJetBrains, w.originAddr)
+
c, r, err := w.NewChannel.Accept()
if err != nil {
+ disconnected(1, err.Error())
return c, r, err
}
w.jetbrainsCounter.Add(1)
@@ -77,6 +85,7 @@ func (w *JetbrainsChannelWatcher) Accept() (gossh.Channel, <-chan *gossh.Request
Channel: c,
done: func() {
w.jetbrainsCounter.Add(-1)
+ disconnected(0, "")
// nolint: gocritic // JetBrains is a proper noun and should be capitalized
w.logger.Debug(context.Background(), "JetBrains watcher channel closed")
},
diff --git a/agent/agenttest/client.go b/agent/agenttest/client.go
index f176691632..e0a3bc4edd 100644
--- a/agent/agenttest/client.go
+++ b/agent/agenttest/client.go
@@ -3,6 +3,7 @@ package agenttest
import (
"context"
"io"
+ "slices"
"sync"
"sync/atomic"
"testing"
@@ -12,7 +13,6 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"golang.org/x/exp/maps"
- "golang.org/x/exp/slices"
"golang.org/x/xerrors"
"google.golang.org/protobuf/types/known/durationpb"
"google.golang.org/protobuf/types/known/emptypb"
@@ -159,20 +159,24 @@ func (c *Client) SetLogsChannel(ch chan<- *agentproto.BatchCreateLogsRequest) {
c.fakeAgentAPI.SetLogsChannel(ch)
}
+func (c *Client) GetConnectionReports() []*agentproto.ReportConnectionRequest {
+ return c.fakeAgentAPI.GetConnectionReports()
+}
+
type FakeAgentAPI struct {
sync.Mutex
t testing.TB
logger slog.Logger
- manifest *agentproto.Manifest
- startupCh chan *agentproto.Startup
- statsCh chan *agentproto.Stats
- appHealthCh chan *agentproto.BatchUpdateAppHealthRequest
- logsCh chan<- *agentproto.BatchCreateLogsRequest
- lifecycleStates []codersdk.WorkspaceAgentLifecycle
- metadata map[string]agentsdk.Metadata
- timings []*agentproto.Timing
- connections []*agentproto.Connection
+ manifest *agentproto.Manifest
+ startupCh chan *agentproto.Startup
+ statsCh chan *agentproto.Stats
+ appHealthCh chan *agentproto.BatchUpdateAppHealthRequest
+ logsCh chan<- *agentproto.BatchCreateLogsRequest
+ lifecycleStates []codersdk.WorkspaceAgentLifecycle
+ metadata map[string]agentsdk.Metadata
+ timings []*agentproto.Timing
+ connectionReports []*agentproto.ReportConnectionRequest
getAnnouncementBannersFunc func() ([]codersdk.BannerConfig, error)
getResourcesMonitoringConfigurationFunc func() (*agentproto.GetResourcesMonitoringConfigurationResponse, error)
@@ -349,12 +353,18 @@ func (f *FakeAgentAPI) ScriptCompleted(_ context.Context, req *agentproto.Worksp
func (f *FakeAgentAPI) ReportConnection(_ context.Context, req *agentproto.ReportConnectionRequest) (*emptypb.Empty, error) {
f.Lock()
- f.connections = append(f.connections, req.GetConnection())
+ f.connectionReports = append(f.connectionReports, req)
f.Unlock()
return &emptypb.Empty{}, nil
}
+func (f *FakeAgentAPI) GetConnectionReports() []*agentproto.ReportConnectionRequest {
+ f.Lock()
+ defer f.Unlock()
+ return slices.Clone(f.connectionReports)
+}
+
func NewFakeAgentAPI(t testing.TB, logger slog.Logger, manifest *agentproto.Manifest, statsCh chan *agentproto.Stats) *FakeAgentAPI {
return &FakeAgentAPI{
t: t,
diff --git a/agent/reconnectingpty/buffered.go b/agent/reconnectingpty/buffered.go
index 6f314333a7..fb3c9907f4 100644
--- a/agent/reconnectingpty/buffered.go
+++ b/agent/reconnectingpty/buffered.go
@@ -5,11 +5,11 @@ import (
"errors"
"io"
"net"
+ "slices"
"time"
"github.com/armon/circbuf"
"github.com/prometheus/client_golang/prometheus"
- "golang.org/x/exp/slices"
"golang.org/x/xerrors"
"cdr.dev/slog"
diff --git a/agent/reconnectingpty/server.go b/agent/reconnectingpty/server.go
index ab4ce854c7..33ed76a73c 100644
--- a/agent/reconnectingpty/server.go
+++ b/agent/reconnectingpty/server.go
@@ -20,26 +20,35 @@ import (
"github.com/coder/coder/v2/codersdk/workspacesdk"
)
+type reportConnectionFunc func(id uuid.UUID, ip string) (disconnected func(code int, reason string))
+
type Server struct {
logger slog.Logger
connectionsTotal prometheus.Counter
errorsTotal *prometheus.CounterVec
commandCreator *agentssh.Server
+ reportConnection reportConnectionFunc
connCount atomic.Int64
reconnectingPTYs sync.Map
timeout time.Duration
- ExperimentalContainersEnabled bool
+ ExperimentalDevcontainersEnabled bool
}
// NewServer returns a new ReconnectingPTY server
-func NewServer(logger slog.Logger, commandCreator *agentssh.Server,
+func NewServer(logger slog.Logger, commandCreator *agentssh.Server, reportConnection reportConnectionFunc,
connectionsTotal prometheus.Counter, errorsTotal *prometheus.CounterVec,
timeout time.Duration, opts ...func(*Server),
) *Server {
+ if reportConnection == nil {
+ reportConnection = func(uuid.UUID, string) func(int, string) {
+ return func(int, string) {}
+ }
+ }
s := &Server{
logger: logger,
commandCreator: commandCreator,
+ reportConnection: reportConnection,
connectionsTotal: connectionsTotal,
errorsTotal: errorsTotal,
timeout: timeout,
@@ -67,20 +76,31 @@ func (s *Server) Serve(ctx, hardCtx context.Context, l net.Listener) (retErr err
slog.F("local", conn.LocalAddr().String()))
clog.Info(ctx, "accepted conn")
wg.Add(1)
+ disconnected := s.reportConnection(uuid.New(), conn.RemoteAddr().String())
closed := make(chan struct{})
go func() {
+ defer wg.Done()
select {
case <-closed:
case <-hardCtx.Done():
+ disconnected(1, "server shut down")
_ = conn.Close()
}
- wg.Done()
}()
wg.Add(1)
go func() {
defer close(closed)
defer wg.Done()
- _ = s.handleConn(ctx, clog, conn)
+ err := s.handleConn(ctx, clog, conn)
+ if err != nil {
+ if ctx.Err() != nil {
+ disconnected(1, "server shutting down")
+ } else {
+ disconnected(1, err.Error())
+ }
+ } else {
+ disconnected(0, "")
+ }
}()
}
wg.Wait()
@@ -167,7 +187,7 @@ func (s *Server) handleConn(ctx context.Context, logger slog.Logger, conn net.Co
}()
var ei usershell.EnvInfoer
- if s.ExperimentalContainersEnabled && msg.Container != "" {
+ if s.ExperimentalDevcontainersEnabled && msg.Container != "" {
dei, err := agentcontainers.EnvInfo(ctx, s.commandCreator.Execer, msg.Container, msg.ContainerUser)
if err != nil {
return xerrors.Errorf("get container env info: %w", err)
diff --git a/agent/usershell/usershell_darwin.go b/agent/usershell/usershell_darwin.go
index 5f221bc43e..acc990db83 100644
--- a/agent/usershell/usershell_darwin.go
+++ b/agent/usershell/usershell_darwin.go
@@ -18,7 +18,7 @@ func Get(username string) (string, error) {
return "", xerrors.Errorf("username is nonlocal path: %s", username)
}
//nolint: gosec // input checked above
- out, _ := exec.Command("dscl", ".", "-read", filepath.Join("/Users", username), "UserShell").Output()
+ out, _ := exec.Command("dscl", ".", "-read", filepath.Join("/Users", username), "UserShell").Output() //nolint:gocritic
s, ok := strings.CutPrefix(string(out), "UserShell: ")
if ok {
return strings.TrimSpace(s), nil
diff --git a/buildinfo/resources/.gitignore b/buildinfo/resources/.gitignore
new file mode 100644
index 0000000000..40679b193b
--- /dev/null
+++ b/buildinfo/resources/.gitignore
@@ -0,0 +1 @@
+*.syso
diff --git a/buildinfo/resources/resources.go b/buildinfo/resources/resources.go
new file mode 100644
index 0000000000..cd1e3e70af
--- /dev/null
+++ b/buildinfo/resources/resources.go
@@ -0,0 +1,8 @@
+// This package is used for embedding .syso resource files into the binary
+// during build and does not contain any code. During build, .syso files will be
+// dropped in this directory and then removed after the build completes.
+//
+// This package must be imported by all binaries for this to work.
+//
+// See build_go.sh for more details.
+package resources
diff --git a/cli/agent.go b/cli/agent.go
index 8676381f9e..2dba78ccdc 100644
--- a/cli/agent.go
+++ b/cli/agent.go
@@ -41,22 +41,23 @@ import (
func (r *RootCmd) workspaceAgent() *serpent.Command {
var (
- auth string
- logDir string
- scriptDataDir string
- pprofAddress string
- noReap bool
- sshMaxTimeout time.Duration
- tailnetListenPort int64
- prometheusAddress string
- debugAddress string
- slogHumanPath string
- slogJSONPath string
- slogStackdriverPath string
- blockFileTransfer bool
- agentHeaderCommand string
- agentHeader []string
- devcontainersEnabled bool
+ auth string
+ logDir string
+ scriptDataDir string
+ pprofAddress string
+ noReap bool
+ sshMaxTimeout time.Duration
+ tailnetListenPort int64
+ prometheusAddress string
+ debugAddress string
+ slogHumanPath string
+ slogJSONPath string
+ slogStackdriverPath string
+ blockFileTransfer bool
+ agentHeaderCommand string
+ agentHeader []string
+
+ experimentalDevcontainersEnabled bool
)
cmd := &serpent.Command{
Use: "agent",
@@ -321,7 +322,7 @@ func (r *RootCmd) workspaceAgent() *serpent.Command {
}
var containerLister agentcontainers.Lister
- if !devcontainersEnabled {
+ if !experimentalDevcontainersEnabled {
logger.Info(ctx, "agent devcontainer detection not enabled")
containerLister = &agentcontainers.NoopLister{}
} else {
@@ -383,7 +384,7 @@ func (r *RootCmd) workspaceAgent() *serpent.Command {
BlockFileTransfer: blockFileTransfer,
Execer: execer,
ContainerLister: containerLister,
- ExperimentalContainersEnabled: devcontainersEnabled,
+ ExperimentalDevcontainersEnabled: experimentalDevcontainersEnabled,
})
promHandler := agent.PrometheusMetricsHandler(prometheusRegistry, logger)
@@ -528,7 +529,7 @@ func (r *RootCmd) workspaceAgent() *serpent.Command {
Default: "false",
Env: "CODER_AGENT_DEVCONTAINERS_ENABLE",
Description: "Allow the agent to automatically detect running devcontainers.",
- Value: serpent.BoolOf(&devcontainersEnabled),
+ Value: serpent.BoolOf(&experimentalDevcontainersEnabled),
},
}
diff --git a/cli/configssh.go b/cli/configssh.go
index a7aed33eba..b3c29f711b 100644
--- a/cli/configssh.go
+++ b/cli/configssh.go
@@ -11,6 +11,7 @@ import (
"os"
"path/filepath"
"runtime"
+ "slices"
"strconv"
"strings"
@@ -19,7 +20,6 @@ import (
"github.com/pkg/diff"
"github.com/pkg/diff/write"
"golang.org/x/exp/constraints"
- "golang.org/x/exp/slices"
"golang.org/x/xerrors"
"github.com/coder/coder/v2/cli/cliui"
diff --git a/cli/create.go b/cli/create.go
index f3709314cd..bb2e8dde02 100644
--- a/cli/create.go
+++ b/cli/create.go
@@ -4,11 +4,11 @@ import (
"context"
"fmt"
"io"
+ "slices"
"strings"
"time"
"github.com/google/uuid"
- "golang.org/x/exp/slices"
"golang.org/x/xerrors"
"github.com/coder/pretty"
diff --git a/cli/dotfiles_test.go b/cli/dotfiles_test.go
index 2f16929cc2..002f001e04 100644
--- a/cli/dotfiles_test.go
+++ b/cli/dotfiles_test.go
@@ -17,6 +17,10 @@ import (
func TestDotfiles(t *testing.T) {
t.Parallel()
+ // This test will time out if the user has commit signing enabled.
+ if _, gpgTTYFound := os.LookupEnv("GPG_TTY"); gpgTTYFound {
+ t.Skip("GPG_TTY is set, skipping test to avoid hanging")
+ }
t.Run("MissingArg", func(t *testing.T) {
t.Parallel()
inv, _ := clitest.New(t, "dotfiles")
diff --git a/cli/exp.go b/cli/exp.go
index 5c72d0f9fc..2339da8631 100644
--- a/cli/exp.go
+++ b/cli/exp.go
@@ -14,6 +14,7 @@ func (r *RootCmd) expCmd() *serpent.Command {
r.scaletestCmd(),
r.errorExample(),
r.promptExample(),
+ r.rptyCommand(),
},
}
return cmd
diff --git a/cli/errors.go b/cli/exp_errors.go
similarity index 100%
rename from cli/errors.go
rename to cli/exp_errors.go
diff --git a/cli/errors_test.go b/cli/exp_errors_test.go
similarity index 100%
rename from cli/errors_test.go
rename to cli/exp_errors_test.go
diff --git a/cli/prompts.go b/cli/exp_prompts.go
similarity index 100%
rename from cli/prompts.go
rename to cli/exp_prompts.go
diff --git a/cli/exp_rpty.go b/cli/exp_rpty.go
new file mode 100644
index 0000000000..ddfdc15ece
--- /dev/null
+++ b/cli/exp_rpty.go
@@ -0,0 +1,216 @@
+package cli
+
+import (
+ "bufio"
+ "context"
+ "encoding/json"
+ "fmt"
+ "io"
+ "os"
+ "strings"
+
+ "github.com/google/uuid"
+ "github.com/mattn/go-isatty"
+ "golang.org/x/term"
+ "golang.org/x/xerrors"
+
+ "github.com/coder/coder/v2/cli/cliui"
+ "github.com/coder/coder/v2/codersdk"
+ "github.com/coder/coder/v2/codersdk/workspacesdk"
+ "github.com/coder/coder/v2/pty"
+ "github.com/coder/serpent"
+)
+
+func (r *RootCmd) rptyCommand() *serpent.Command {
+ var (
+ client = new(codersdk.Client)
+ args handleRPTYArgs
+ )
+
+ cmd := &serpent.Command{
+ Handler: func(inv *serpent.Invocation) error {
+ if r.disableDirect {
+ return xerrors.New("direct connections are disabled, but you can try websocat ;-)")
+ }
+ args.NamedWorkspace = inv.Args[0]
+ args.Command = inv.Args[1:]
+ return handleRPTY(inv, client, args)
+ },
+ Long: "Establish an RPTY session with a workspace/agent. This uses the same mechanism as the Web Terminal.",
+ Middleware: serpent.Chain(
+ serpent.RequireRangeArgs(1, -1),
+ r.InitClient(client),
+ ),
+ Options: []serpent.Option{
+ {
+ Name: "container",
+ Description: "The container name or ID to connect to.",
+ Flag: "container",
+ FlagShorthand: "c",
+ Default: "",
+ Value: serpent.StringOf(&args.Container),
+ },
+ {
+ Name: "container-user",
+ Description: "The user to connect as.",
+ Flag: "container-user",
+ FlagShorthand: "u",
+ Default: "",
+ Value: serpent.StringOf(&args.ContainerUser),
+ },
+ {
+ Name: "reconnect",
+ Description: "The reconnect ID to use.",
+ Flag: "reconnect",
+ FlagShorthand: "r",
+ Default: "",
+ Value: serpent.StringOf(&args.ReconnectID),
+ },
+ },
+ Short: "Establish an RPTY session with a workspace/agent.",
+ Use: "rpty",
+ }
+
+ return cmd
+}
+
+type handleRPTYArgs struct {
+ Command []string
+ Container string
+ ContainerUser string
+ NamedWorkspace string
+ ReconnectID string
+}
+
+func handleRPTY(inv *serpent.Invocation, client *codersdk.Client, args handleRPTYArgs) error {
+ ctx, cancel := context.WithCancel(inv.Context())
+ defer cancel()
+
+ var reconnectID uuid.UUID
+ if args.ReconnectID != "" {
+ rid, err := uuid.Parse(args.ReconnectID)
+ if err != nil {
+ return xerrors.Errorf("invalid reconnect ID: %w", err)
+ }
+ reconnectID = rid
+ } else {
+ reconnectID = uuid.New()
+ }
+ ws, agt, err := getWorkspaceAndAgent(ctx, inv, client, true, args.NamedWorkspace)
+ if err != nil {
+ return err
+ }
+
+ var ctID string
+ if args.Container != "" {
+ cts, err := client.WorkspaceAgentListContainers(ctx, agt.ID, nil)
+ if err != nil {
+ return err
+ }
+ for _, ct := range cts.Containers {
+ if ct.FriendlyName == args.Container || ct.ID == args.Container {
+ ctID = ct.ID
+ break
+ }
+ }
+ if ctID == "" {
+ return xerrors.Errorf("container %q not found", args.Container)
+ }
+ }
+
+ if err := cliui.Agent(ctx, inv.Stderr, agt.ID, cliui.AgentOptions{
+ FetchInterval: 0,
+ Fetch: client.WorkspaceAgent,
+ Wait: false,
+ }); err != nil {
+ return err
+ }
+
+ // Get the width and height of the terminal.
+ var termWidth, termHeight uint16
+ stdoutFile, validOut := inv.Stdout.(*os.File)
+ if validOut && isatty.IsTerminal(stdoutFile.Fd()) {
+ w, h, err := term.GetSize(int(stdoutFile.Fd()))
+ if err == nil {
+ //nolint: gosec
+ termWidth, termHeight = uint16(w), uint16(h)
+ }
+ }
+
+ // Set stdin to raw mode so that control characters work.
+ stdinFile, validIn := inv.Stdin.(*os.File)
+ if validIn && isatty.IsTerminal(stdinFile.Fd()) {
+ inState, err := pty.MakeInputRaw(stdinFile.Fd())
+ if err != nil {
+ return xerrors.Errorf("failed to set input terminal to raw mode: %w", err)
+ }
+ defer func() {
+ _ = pty.RestoreTerminal(stdinFile.Fd(), inState)
+ }()
+ }
+
+ conn, err := workspacesdk.New(client).AgentReconnectingPTY(ctx, workspacesdk.WorkspaceAgentReconnectingPTYOpts{
+ AgentID: agt.ID,
+ Reconnect: reconnectID,
+ Command: strings.Join(args.Command, " "),
+ Container: ctID,
+ ContainerUser: args.ContainerUser,
+ Width: termWidth,
+ Height: termHeight,
+ })
+ if err != nil {
+ return xerrors.Errorf("open reconnecting PTY: %w", err)
+ }
+ defer conn.Close()
+
+ cliui.Infof(inv.Stderr, "Connected to %s (agent id: %s)", args.NamedWorkspace, agt.ID)
+ cliui.Infof(inv.Stderr, "Reconnect ID: %s", reconnectID)
+ closeUsage := client.UpdateWorkspaceUsageWithBodyContext(ctx, ws.ID, codersdk.PostWorkspaceUsageRequest{
+ AgentID: agt.ID,
+ AppName: codersdk.UsageAppNameReconnectingPty,
+ })
+ defer closeUsage()
+
+ br := bufio.NewScanner(inv.Stdin)
+ // Split on bytes, otherwise you have to send a newline to flush the buffer.
+ br.Split(bufio.ScanBytes)
+ je := json.NewEncoder(conn)
+
+ go func() {
+ for br.Scan() {
+ if err := je.Encode(map[string]string{
+ "data": br.Text(),
+ }); err != nil {
+ return
+ }
+ }
+ }()
+
+ windowChange := listenWindowSize(ctx)
+ go func() {
+ for {
+ select {
+ case <-ctx.Done():
+ return
+ case <-windowChange:
+ }
+ width, height, err := term.GetSize(int(stdoutFile.Fd()))
+ if err != nil {
+ continue
+ }
+ if err := je.Encode(map[string]int{
+ "width": width,
+ "height": height,
+ }); err != nil {
+ cliui.Errorf(inv.Stderr, "Failed to send window size: %v", err)
+ }
+ }
+ }()
+
+ _, _ = io.Copy(inv.Stdout, conn)
+ cancel()
+ _ = conn.Close()
+ _, _ = fmt.Fprintf(inv.Stderr, "Connection closed\n")
+
+ return nil
+}
diff --git a/cli/exp_rpty_test.go b/cli/exp_rpty_test.go
new file mode 100644
index 0000000000..bfede8213d
--- /dev/null
+++ b/cli/exp_rpty_test.go
@@ -0,0 +1,114 @@
+package cli_test
+
+import (
+ "fmt"
+ "runtime"
+ "testing"
+
+ "github.com/ory/dockertest/v3"
+ "github.com/ory/dockertest/v3/docker"
+
+ "github.com/coder/coder/v2/agent"
+ "github.com/coder/coder/v2/agent/agentcontainers"
+ "github.com/coder/coder/v2/agent/agenttest"
+ "github.com/coder/coder/v2/cli/clitest"
+ "github.com/coder/coder/v2/coderd/coderdtest"
+ "github.com/coder/coder/v2/pty/ptytest"
+ "github.com/coder/coder/v2/testutil"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestExpRpty(t *testing.T) {
+ t.Parallel()
+
+ t.Run("OK", func(t *testing.T) {
+ t.Parallel()
+
+ client, workspace, agentToken := setupWorkspaceForAgent(t)
+ inv, root := clitest.New(t, "exp", "rpty", workspace.Name)
+ clitest.SetupConfig(t, client, root)
+ pty := ptytest.New(t).Attach(inv)
+
+ ctx := testutil.Context(t, testutil.WaitLong)
+
+ cmdDone := tGo(t, func() {
+ err := inv.WithContext(ctx).Run()
+ assert.NoError(t, err)
+ })
+
+ _ = agenttest.New(t, client.URL, agentToken)
+ _ = coderdtest.NewWorkspaceAgentWaiter(t, client, workspace.ID).Wait()
+
+ pty.ExpectMatch(fmt.Sprintf("Connected to %s", workspace.Name))
+ pty.WriteLine("exit")
+ <-cmdDone
+ })
+
+ t.Run("NotFound", func(t *testing.T) {
+ t.Parallel()
+
+ client, _, _ := setupWorkspaceForAgent(t)
+ inv, root := clitest.New(t, "exp", "rpty", "not-found")
+ clitest.SetupConfig(t, client, root)
+
+ ctx := testutil.Context(t, testutil.WaitShort)
+ err := inv.WithContext(ctx).Run()
+ require.ErrorContains(t, err, "not found")
+ })
+
+ t.Run("Container", func(t *testing.T) {
+ t.Parallel()
+ // Skip this test on non-Linux platforms since it requires Docker
+ if runtime.GOOS != "linux" {
+ t.Skip("Skipping test on non-Linux platform")
+ }
+
+ client, workspace, agentToken := setupWorkspaceForAgent(t)
+ ctx := testutil.Context(t, testutil.WaitLong)
+ pool, err := dockertest.NewPool("")
+ require.NoError(t, err, "Could not connect to docker")
+ ct, err := pool.RunWithOptions(&dockertest.RunOptions{
+ Repository: "busybox",
+ Tag: "latest",
+ Cmd: []string{"sleep", "infnity"},
+ }, func(config *docker.HostConfig) {
+ config.AutoRemove = true
+ config.RestartPolicy = docker.RestartPolicy{Name: "no"}
+ })
+ require.NoError(t, err, "Could not start container")
+ // Wait for container to start
+ require.Eventually(t, func() bool {
+ ct, ok := pool.ContainerByName(ct.Container.Name)
+ return ok && ct.Container.State.Running
+ }, testutil.WaitShort, testutil.IntervalSlow, "Container did not start in time")
+ t.Cleanup(func() {
+ err := pool.Purge(ct)
+ require.NoError(t, err, "Could not stop container")
+ })
+
+ _ = agenttest.New(t, client.URL, agentToken, func(o *agent.Options) {
+ o.ExperimentalDevcontainersEnabled = true
+ o.ContainerLister = agentcontainers.NewDocker(o.Execer)
+ })
+ _ = coderdtest.NewWorkspaceAgentWaiter(t, client, workspace.ID).Wait()
+
+ inv, root := clitest.New(t, "exp", "rpty", workspace.Name, "-c", ct.Container.ID)
+ clitest.SetupConfig(t, client, root)
+ pty := ptytest.New(t).Attach(inv)
+
+ cmdDone := tGo(t, func() {
+ err := inv.WithContext(ctx).Run()
+ assert.NoError(t, err)
+ })
+
+ pty.ExpectMatch(fmt.Sprintf("Connected to %s", workspace.Name))
+ pty.ExpectMatch("Reconnect ID: ")
+ pty.ExpectMatch(" #")
+ pty.WriteLine("hostname")
+ pty.ExpectMatch(ct.Container.Config.Hostname)
+ pty.WriteLine("exit")
+ <-cmdDone
+ })
+}
diff --git a/cli/exp_scaletest.go b/cli/exp_scaletest.go
index a7bd0f396b..a844a7e8c6 100644
--- a/cli/exp_scaletest.go
+++ b/cli/exp_scaletest.go
@@ -12,6 +12,7 @@ import (
"net/url"
"os"
"os/signal"
+ "slices"
"strconv"
"strings"
"sync"
@@ -21,7 +22,6 @@ import (
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promhttp"
"go.opentelemetry.io/otel/trace"
- "golang.org/x/exp/slices"
"golang.org/x/xerrors"
"cdr.dev/slog"
diff --git a/cli/provisioners.go b/cli/provisioners.go
index 08d96493b8..5dd3a70361 100644
--- a/cli/provisioners.go
+++ b/cli/provisioners.go
@@ -39,6 +39,7 @@ func (r *RootCmd) provisionerList() *serpent.Command {
cliui.TableFormat([]provisionerDaemonRow{}, []string{"name", "organization", "status", "key name", "created at", "last seen at", "version", "tags"}),
cliui.JSONFormat(),
)
+ limit int64
)
cmd := &serpent.Command{
@@ -57,7 +58,9 @@ func (r *RootCmd) provisionerList() *serpent.Command {
return xerrors.Errorf("current organization: %w", err)
}
- daemons, err := client.OrganizationProvisionerDaemons(ctx, org.ID, nil)
+ daemons, err := client.OrganizationProvisionerDaemons(ctx, org.ID, &codersdk.OrganizationProvisionerDaemonsOptions{
+ Limit: int(limit),
+ })
if err != nil {
return xerrors.Errorf("list provisioner daemons: %w", err)
}
@@ -86,6 +89,17 @@ func (r *RootCmd) provisionerList() *serpent.Command {
},
}
+ cmd.Options = append(cmd.Options, []serpent.Option{
+ {
+ Flag: "limit",
+ FlagShorthand: "l",
+ Env: "CODER_PROVISIONER_LIST_LIMIT",
+ Description: "Limit the number of provisioners returned.",
+ Default: "50",
+ Value: serpent.Int64Of(&limit),
+ },
+ }...)
+
orgContext.AttachOptions(cmd)
formatter.AttachOptions(&cmd.Options)
diff --git a/cli/root.go b/cli/root.go
index 09044ad3e2..816d7b769e 100644
--- a/cli/root.go
+++ b/cli/root.go
@@ -17,6 +17,7 @@ import (
"path/filepath"
"runtime"
"runtime/trace"
+ "slices"
"strings"
"sync"
"syscall"
@@ -25,7 +26,6 @@ import (
"github.com/mattn/go-isatty"
"github.com/mitchellh/go-wordwrap"
- "golang.org/x/exp/slices"
"golang.org/x/mod/semver"
"golang.org/x/xerrors"
diff --git a/cli/server.go b/cli/server.go
index 933ab64ab2..745794a236 100644
--- a/cli/server.go
+++ b/cli/server.go
@@ -1911,8 +1911,10 @@ func getGithubOAuth2ConfigParams(ctx context.Context, db database.Store, vals *c
}
params.clientID = GithubOAuth2DefaultProviderClientID
- params.allowEveryone = GithubOAuth2DefaultProviderAllowEveryone
params.deviceFlow = GithubOAuth2DefaultProviderDeviceFlow
+ if len(params.allowOrgs) == 0 {
+ params.allowEveryone = GithubOAuth2DefaultProviderAllowEveryone
+ }
return ¶ms, nil
}
diff --git a/cli/server_test.go b/cli/server_test.go
index d4031faf94..64ad535ea3 100644
--- a/cli/server_test.go
+++ b/cli/server_test.go
@@ -314,6 +314,7 @@ func TestServer(t *testing.T) {
githubDefaultProviderEnabled string
githubClientID string
githubClientSecret string
+ allowedOrg string
expectGithubEnabled bool
expectGithubDefaultProviderConfigured bool
createUserPreStart bool
@@ -355,7 +356,9 @@ func TestServer(t *testing.T) {
if tc.githubDefaultProviderEnabled != "" {
args = append(args, fmt.Sprintf("--oauth2-github-default-provider-enable=%s", tc.githubDefaultProviderEnabled))
}
-
+ if tc.allowedOrg != "" {
+ args = append(args, fmt.Sprintf("--oauth2-github-allowed-orgs=%s", tc.allowedOrg))
+ }
inv, cfg := clitest.New(t, args...)
errChan := make(chan error, 1)
go func() {
@@ -439,6 +442,12 @@ func TestServer(t *testing.T) {
expectGithubEnabled: true,
expectGithubDefaultProviderConfigured: false,
},
+ {
+ name: "AllowedOrg",
+ allowedOrg: "coder",
+ expectGithubEnabled: true,
+ expectGithubDefaultProviderConfigured: true,
+ },
} {
tc := tc
t.Run(tc.name, func(t *testing.T) {
diff --git a/cli/ssh.go b/cli/ssh.go
index 884c5500d7..da84a7886b 100644
--- a/cli/ssh.go
+++ b/cli/ssh.go
@@ -34,6 +34,7 @@ import (
"cdr.dev/slog"
"cdr.dev/slog/sloggers/sloghuman"
+ "github.com/coder/coder/v2/agent/agentssh"
"github.com/coder/coder/v2/cli/cliui"
"github.com/coder/coder/v2/cli/cliutil"
"github.com/coder/coder/v2/coderd/autobuild/notify"
@@ -76,6 +77,9 @@ func (r *RootCmd) ssh() *serpent.Command {
appearanceConfig codersdk.AppearanceConfig
networkInfoDir string
networkInfoInterval time.Duration
+
+ containerName string
+ containerUser string
)
client := new(codersdk.Client)
cmd := &serpent.Command{
@@ -282,6 +286,34 @@ func (r *RootCmd) ssh() *serpent.Command {
}
conn.AwaitReachable(ctx)
+ if containerName != "" {
+ cts, err := client.WorkspaceAgentListContainers(ctx, workspaceAgent.ID, nil)
+ if err != nil {
+ return xerrors.Errorf("list containers: %w", err)
+ }
+ if len(cts.Containers) == 0 {
+ cliui.Info(inv.Stderr, "No containers found!")
+ cliui.Info(inv.Stderr, "Tip: Agent container integration is experimental and not enabled by default.")
+ cliui.Info(inv.Stderr, " To enable it, set CODER_AGENT_DEVCONTAINERS_ENABLE=true in your template.")
+ return nil
+ }
+ var found bool
+ for _, c := range cts.Containers {
+ if c.FriendlyName == containerName || c.ID == containerName {
+ found = true
+ break
+ }
+ }
+ if !found {
+ availableContainers := make([]string, len(cts.Containers))
+ for i, c := range cts.Containers {
+ availableContainers[i] = c.FriendlyName
+ }
+ cliui.Errorf(inv.Stderr, "Container not found: %q\nAvailable containers: %v", containerName, availableContainers)
+ return nil
+ }
+ }
+
stopPolling := tryPollWorkspaceAutostop(ctx, client, workspace)
defer stopPolling()
@@ -454,6 +486,17 @@ func (r *RootCmd) ssh() *serpent.Command {
}
}
+ if containerName != "" {
+ for k, v := range map[string]string{
+ agentssh.ContainerEnvironmentVariable: containerName,
+ agentssh.ContainerUserEnvironmentVariable: containerUser,
+ } {
+ if err := sshSession.Setenv(k, v); err != nil {
+ return xerrors.Errorf("setenv: %w", err)
+ }
+ }
+ }
+
err = sshSession.RequestPty("xterm-256color", 128, 128, gossh.TerminalModes{})
if err != nil {
return xerrors.Errorf("request pty: %w", err)
@@ -594,6 +637,19 @@ func (r *RootCmd) ssh() *serpent.Command {
Default: "5s",
Value: serpent.DurationOf(&networkInfoInterval),
},
+ {
+ Flag: "container",
+ FlagShorthand: "c",
+ Description: "Specifies a container inside the workspace to connect to.",
+ Value: serpent.StringOf(&containerName),
+ Hidden: true, // Hidden until this features is at least in beta.
+ },
+ {
+ Flag: "container-user",
+ Description: "When connecting to a container, specifies the user to connect as.",
+ Value: serpent.StringOf(&containerUser),
+ Hidden: true, // Hidden until this features is at least in beta.
+ },
sshDisableAutostartOption(serpent.BoolOf(&disableAutostart)),
}
return cmd
diff --git a/cli/ssh_test.go b/cli/ssh_test.go
index d20278bbf7..1fd4069ae3 100644
--- a/cli/ssh_test.go
+++ b/cli/ssh_test.go
@@ -24,15 +24,20 @@ import (
"time"
"github.com/google/uuid"
+ "github.com/ory/dockertest/v3"
+ "github.com/ory/dockertest/v3/docker"
"github.com/spf13/afero"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
+ "go.uber.org/mock/gomock"
"golang.org/x/crypto/ssh"
gosshagent "golang.org/x/crypto/ssh/agent"
"golang.org/x/sync/errgroup"
"golang.org/x/xerrors"
"github.com/coder/coder/v2/agent"
+ "github.com/coder/coder/v2/agent/agentcontainers"
+ "github.com/coder/coder/v2/agent/agentcontainers/acmock"
"github.com/coder/coder/v2/agent/agentssh"
"github.com/coder/coder/v2/agent/agenttest"
agentproto "github.com/coder/coder/v2/agent/proto"
@@ -1924,6 +1929,121 @@ Expire-Date: 0
<-cmdDone
}
+func TestSSH_Container(t *testing.T) {
+ t.Parallel()
+ if runtime.GOOS != "linux" {
+ t.Skip("Skipping test on non-Linux platform")
+ }
+
+ t.Run("OK", func(t *testing.T) {
+ t.Parallel()
+
+ client, workspace, agentToken := setupWorkspaceForAgent(t)
+ ctx := testutil.Context(t, testutil.WaitLong)
+ pool, err := dockertest.NewPool("")
+ require.NoError(t, err, "Could not connect to docker")
+ ct, err := pool.RunWithOptions(&dockertest.RunOptions{
+ Repository: "busybox",
+ Tag: "latest",
+ Cmd: []string{"sleep", "infnity"},
+ }, func(config *docker.HostConfig) {
+ config.AutoRemove = true
+ config.RestartPolicy = docker.RestartPolicy{Name: "no"}
+ })
+ require.NoError(t, err, "Could not start container")
+ // Wait for container to start
+ require.Eventually(t, func() bool {
+ ct, ok := pool.ContainerByName(ct.Container.Name)
+ return ok && ct.Container.State.Running
+ }, testutil.WaitShort, testutil.IntervalSlow, "Container did not start in time")
+ t.Cleanup(func() {
+ err := pool.Purge(ct)
+ require.NoError(t, err, "Could not stop container")
+ })
+
+ _ = agenttest.New(t, client.URL, agentToken, func(o *agent.Options) {
+ o.ExperimentalDevcontainersEnabled = true
+ o.ContainerLister = agentcontainers.NewDocker(o.Execer)
+ })
+ _ = coderdtest.NewWorkspaceAgentWaiter(t, client, workspace.ID).Wait()
+
+ inv, root := clitest.New(t, "ssh", workspace.Name, "-c", ct.Container.ID)
+ clitest.SetupConfig(t, client, root)
+ ptty := ptytest.New(t).Attach(inv)
+
+ cmdDone := tGo(t, func() {
+ err := inv.WithContext(ctx).Run()
+ assert.NoError(t, err)
+ })
+
+ ptty.ExpectMatch(" #")
+ ptty.WriteLine("hostname")
+ ptty.ExpectMatch(ct.Container.Config.Hostname)
+ ptty.WriteLine("exit")
+ <-cmdDone
+ })
+
+ t.Run("NotFound", func(t *testing.T) {
+ t.Parallel()
+
+ ctx := testutil.Context(t, testutil.WaitShort)
+ client, workspace, agentToken := setupWorkspaceForAgent(t)
+ ctrl := gomock.NewController(t)
+ mLister := acmock.NewMockLister(ctrl)
+ _ = agenttest.New(t, client.URL, agentToken, func(o *agent.Options) {
+ o.ExperimentalDevcontainersEnabled = true
+ o.ContainerLister = mLister
+ })
+ _ = coderdtest.NewWorkspaceAgentWaiter(t, client, workspace.ID).Wait()
+
+ mLister.EXPECT().List(gomock.Any()).Return(codersdk.WorkspaceAgentListContainersResponse{
+ Containers: []codersdk.WorkspaceAgentDevcontainer{
+ {
+ ID: uuid.NewString(),
+ FriendlyName: "something_completely_different",
+ },
+ },
+ Warnings: nil,
+ }, nil)
+
+ cID := uuid.NewString()
+ inv, root := clitest.New(t, "ssh", workspace.Name, "-c", cID)
+ clitest.SetupConfig(t, client, root)
+ ptty := ptytest.New(t).Attach(inv)
+
+ cmdDone := tGo(t, func() {
+ err := inv.WithContext(ctx).Run()
+ assert.NoError(t, err)
+ })
+
+ ptty.ExpectMatch(fmt.Sprintf("Container not found: %q", cID))
+ ptty.ExpectMatch("Available containers: [something_completely_different]")
+ <-cmdDone
+ })
+
+ t.Run("NotEnabled", func(t *testing.T) {
+ t.Parallel()
+
+ ctx := testutil.Context(t, testutil.WaitShort)
+ client, workspace, agentToken := setupWorkspaceForAgent(t)
+ _ = agenttest.New(t, client.URL, agentToken)
+ _ = coderdtest.NewWorkspaceAgentWaiter(t, client, workspace.ID).Wait()
+
+ inv, root := clitest.New(t, "ssh", workspace.Name, "-c", uuid.NewString())
+ clitest.SetupConfig(t, client, root)
+ ptty := ptytest.New(t).Attach(inv)
+
+ cmdDone := tGo(t, func() {
+ err := inv.WithContext(ctx).Run()
+ assert.NoError(t, err)
+ })
+
+ ptty.ExpectMatch("No containers found!")
+ ptty.ExpectMatch("Tip: Agent container integration is experimental and not enabled by default.")
+ <-cmdDone
+ })
+}
+
// tGoContext runs fn in a goroutine passing a context that will be
// canceled on test completion and wait until fn has finished executing.
// Done and cancel are returned for optionally waiting until completion
diff --git a/cli/testdata/coder_provisioner_list_--help.golden b/cli/testdata/coder_provisioner_list_--help.golden
index 111eb8315b..ac889fb6dc 100644
--- a/cli/testdata/coder_provisioner_list_--help.golden
+++ b/cli/testdata/coder_provisioner_list_--help.golden
@@ -14,6 +14,9 @@ OPTIONS:
-c, --column [id|organization id|created at|last seen at|name|version|api version|tags|key name|status|current job id|current job status|current job template name|current job template icon|current job template display name|previous job id|previous job status|previous job template name|previous job template icon|previous job template display name|organization] (default: name,organization,status,key name,created at,last seen at,version,tags)
Columns to display in table output.
+ -l, --limit int, $CODER_PROVISIONER_LIST_LIMIT (default: 50)
+ Limit the number of provisioners returned.
+
-o, --output table|json (default: table)
Output format.
diff --git a/cli/tokens.go b/cli/tokens.go
index d132547576..7873882e3a 100644
--- a/cli/tokens.go
+++ b/cli/tokens.go
@@ -3,10 +3,10 @@ package cli
import (
"fmt"
"os"
+ "slices"
"strings"
"time"
- "golang.org/x/exp/slices"
"golang.org/x/xerrors"
"github.com/coder/coder/v2/cli/cliui"
diff --git a/cmd/coder/main.go b/cmd/coder/main.go
index 1c22d578d7..27918798b3 100644
--- a/cmd/coder/main.go
+++ b/cmd/coder/main.go
@@ -8,6 +8,7 @@ import (
tea "github.com/charmbracelet/bubbletea"
"github.com/coder/coder/v2/agent/agentexec"
+ _ "github.com/coder/coder/v2/buildinfo/resources"
"github.com/coder/coder/v2/cli"
)
diff --git a/coderd/agentapi/lifecycle.go b/coderd/agentapi/lifecycle.go
index 5dd5e7b0c1..6bb3fedc51 100644
--- a/coderd/agentapi/lifecycle.go
+++ b/coderd/agentapi/lifecycle.go
@@ -3,10 +3,10 @@ package agentapi
import (
"context"
"database/sql"
+ "slices"
"time"
"github.com/google/uuid"
- "golang.org/x/exp/slices"
"golang.org/x/mod/semver"
"golang.org/x/xerrors"
"google.golang.org/protobuf/types/known/timestamppb"
diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go
index 133c65cb25..c67573ec73 100644
--- a/coderd/apidoc/docs.go
+++ b/coderd/apidoc/docs.go
@@ -13709,6 +13709,7 @@ const docTemplate = `{
"read",
"read_personal",
"ssh",
+ "unassign",
"update",
"update_personal",
"use",
@@ -13724,6 +13725,7 @@ const docTemplate = `{
"ActionRead",
"ActionReadPersonal",
"ActionSSH",
+ "ActionUnassign",
"ActionUpdate",
"ActionUpdatePersonal",
"ActionUse",
@@ -13748,6 +13750,7 @@ const docTemplate = `{
"group",
"group_member",
"idpsync_settings",
+ "inbox_notification",
"license",
"notification_message",
"notification_preference",
@@ -13783,6 +13786,7 @@ const docTemplate = `{
"ResourceGroup",
"ResourceGroupMember",
"ResourceIdpsyncSettings",
+ "ResourceInboxNotification",
"ResourceLicense",
"ResourceNotificationMessage",
"ResourceNotificationPreference",
diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json
index c8325a9d5f..dc6c141e1a 100644
--- a/coderd/apidoc/swagger.json
+++ b/coderd/apidoc/swagger.json
@@ -12398,6 +12398,7 @@
"read",
"read_personal",
"ssh",
+ "unassign",
"update",
"update_personal",
"use",
@@ -12413,6 +12414,7 @@
"ActionRead",
"ActionReadPersonal",
"ActionSSH",
+ "ActionUnassign",
"ActionUpdate",
"ActionUpdatePersonal",
"ActionUse",
@@ -12437,6 +12439,7 @@
"group",
"group_member",
"idpsync_settings",
+ "inbox_notification",
"license",
"notification_message",
"notification_preference",
@@ -12472,6 +12475,7 @@
"ResourceGroup",
"ResourceGroupMember",
"ResourceIdpsyncSettings",
+ "ResourceInboxNotification",
"ResourceLicense",
"ResourceNotificationMessage",
"ResourceNotificationPreference",
diff --git a/coderd/audit.go b/coderd/audit.go
index 72be70754c..ce932c9143 100644
--- a/coderd/audit.go
+++ b/coderd/audit.go
@@ -367,6 +367,26 @@ func (api *API) auditLogIsResourceDeleted(ctx context.Context, alog database.Get
api.Logger.Error(ctx, "unable to fetch workspace", slog.Error(err))
}
return workspace.Deleted
+ case database.ResourceTypeWorkspaceAgent:
+ // We use workspace as a proxy for workspace agents.
+ workspace, err := api.Database.GetWorkspaceByAgentID(ctx, alog.AuditLog.ResourceID)
+ if err != nil {
+ if xerrors.Is(err, sql.ErrNoRows) {
+ return true
+ }
+ api.Logger.Error(ctx, "unable to fetch workspace", slog.Error(err))
+ }
+ return workspace.Deleted
+ case database.ResourceTypeWorkspaceApp:
+ // We use workspace as a proxy for workspace apps.
+ workspace, err := api.Database.GetWorkspaceByWorkspaceAppID(ctx, alog.AuditLog.ResourceID)
+ if err != nil {
+ if xerrors.Is(err, sql.ErrNoRows) {
+ return true
+ }
+ api.Logger.Error(ctx, "unable to fetch workspace", slog.Error(err))
+ }
+ return workspace.Deleted
case database.ResourceTypeOauth2ProviderApp:
_, err := api.Database.GetOAuth2ProviderAppByID(ctx, alog.AuditLog.ResourceID)
if xerrors.Is(err, sql.ErrNoRows) {
@@ -429,6 +449,26 @@ func (api *API) auditLogResourceLink(ctx context.Context, alog database.GetAudit
return fmt.Sprintf("/@%s/%s/builds/%s",
workspaceOwner.Username, additionalFields.WorkspaceName, additionalFields.BuildNumber)
+ case database.ResourceTypeWorkspaceAgent:
+ if additionalFields.WorkspaceOwner != "" && additionalFields.WorkspaceName != "" {
+ return fmt.Sprintf("/@%s/%s", additionalFields.WorkspaceOwner, additionalFields.WorkspaceName)
+ }
+ workspace, getWorkspaceErr := api.Database.GetWorkspaceByAgentID(ctx, alog.AuditLog.ResourceID)
+ if getWorkspaceErr != nil {
+ return ""
+ }
+ return fmt.Sprintf("/@%s/%s", workspace.OwnerUsername, workspace.Name)
+
+ case database.ResourceTypeWorkspaceApp:
+ if additionalFields.WorkspaceOwner != "" && additionalFields.WorkspaceName != "" {
+ return fmt.Sprintf("/@%s/%s", additionalFields.WorkspaceOwner, additionalFields.WorkspaceName)
+ }
+ workspace, getWorkspaceErr := api.Database.GetWorkspaceByWorkspaceAppID(ctx, alog.AuditLog.ResourceID)
+ if getWorkspaceErr != nil {
+ return ""
+ }
+ return fmt.Sprintf("/@%s/%s", workspace.OwnerUsername, workspace.Name)
+
case database.ResourceTypeOauth2ProviderApp:
return fmt.Sprintf("/deployment/oauth2-provider/apps/%s", alog.AuditLog.ResourceID)
diff --git a/coderd/audit/audit.go b/coderd/audit/audit.go
index 097b0c6f49..a965c27a00 100644
--- a/coderd/audit/audit.go
+++ b/coderd/audit/audit.go
@@ -2,11 +2,11 @@ package audit
import (
"context"
+ "slices"
"sync"
"testing"
"github.com/google/uuid"
- "golang.org/x/exp/slices"
"github.com/coder/coder/v2/coderd/database"
)
diff --git a/coderd/coderd.go b/coderd/coderd.go
index 186336753d..d7d8873046 100644
--- a/coderd/coderd.go
+++ b/coderd/coderd.go
@@ -424,6 +424,7 @@ func New(options *Options) *API {
metricsCache := metricscache.New(
options.Database,
options.Logger.Named("metrics_cache"),
+ options.Clock,
metricscache.Intervals{
TemplateBuildTimes: options.MetricsCacheRefreshInterval,
DeploymentStats: options.AgentStatsRefreshInterval,
diff --git a/coderd/database/db2sdk/db2sdk.go b/coderd/database/db2sdk/db2sdk.go
index 2249e0c9f3..53cd272b32 100644
--- a/coderd/database/db2sdk/db2sdk.go
+++ b/coderd/database/db2sdk/db2sdk.go
@@ -5,13 +5,13 @@ import (
"encoding/json"
"fmt"
"net/url"
+ "slices"
"sort"
"strconv"
"strings"
"time"
"github.com/google/uuid"
- "golang.org/x/exp/slices"
"golang.org/x/xerrors"
"tailscale.com/tailcfg"
diff --git a/coderd/database/dbauthz/customroles_test.go b/coderd/database/dbauthz/customroles_test.go
index c5d40b0323..815d6629f6 100644
--- a/coderd/database/dbauthz/customroles_test.go
+++ b/coderd/database/dbauthz/customroles_test.go
@@ -34,11 +34,12 @@ func TestInsertCustomRoles(t *testing.T) {
}
}
- canAssignRole := rbac.Role{
+ canCreateCustomRole := rbac.Role{
Identifier: rbac.RoleIdentifier{Name: "can-assign"},
DisplayName: "",
Site: rbac.Permissions(map[string][]policy.Action{
- rbac.ResourceAssignRole.Type: {policy.ActionRead, policy.ActionCreate},
+ rbac.ResourceAssignRole.Type: {policy.ActionRead},
+ rbac.ResourceAssignOrgRole.Type: {policy.ActionRead, policy.ActionCreate},
}),
}
@@ -61,17 +62,15 @@ func TestInsertCustomRoles(t *testing.T) {
return all
}
- orgID := uuid.NullUUID{
- UUID: uuid.New(),
- Valid: true,
- }
+ orgID := uuid.New()
+
testCases := []struct {
name string
subject rbac.ExpandableRoles
// Perms to create on new custom role
- organizationID uuid.NullUUID
+ organizationID uuid.UUID
site []codersdk.Permission
org []codersdk.Permission
user []codersdk.Permission
@@ -79,19 +78,21 @@ func TestInsertCustomRoles(t *testing.T) {
}{
{
// No roles, so no assign role
- name: "no-roles",
- subject: rbac.RoleIdentifiers{},
- errorContains: "forbidden",
+ name: "no-roles",
+ organizationID: orgID,
+ subject: rbac.RoleIdentifiers{},
+ errorContains: "forbidden",
},
{
// This works because the new role has 0 perms
- name: "empty",
- subject: merge(canAssignRole),
+ name: "empty",
+ organizationID: orgID,
+ subject: merge(canCreateCustomRole),
},
{
name: "mixed-scopes",
- subject: merge(canAssignRole, rbac.RoleOwner()),
organizationID: orgID,
+ subject: merge(canCreateCustomRole, rbac.RoleOwner()),
site: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{
codersdk.ResourceWorkspace: {codersdk.ActionRead},
}),
@@ -101,27 +102,30 @@ func TestInsertCustomRoles(t *testing.T) {
errorContains: "organization roles specify site or user permissions",
},
{
- name: "invalid-action",
- subject: merge(canAssignRole, rbac.RoleOwner()),
- site: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{
+ name: "invalid-action",
+ organizationID: orgID,
+ subject: merge(canCreateCustomRole, rbac.RoleOwner()),
+ org: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{
// Action does not go with resource
codersdk.ResourceWorkspace: {codersdk.ActionViewInsights},
}),
errorContains: "invalid action",
},
{
- name: "invalid-resource",
- subject: merge(canAssignRole, rbac.RoleOwner()),
- site: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{
+ name: "invalid-resource",
+ organizationID: orgID,
+ subject: merge(canCreateCustomRole, rbac.RoleOwner()),
+ org: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{
"foobar": {codersdk.ActionViewInsights},
}),
errorContains: "invalid resource",
},
{
// Not allowing these at this time.
- name: "negative-permission",
- subject: merge(canAssignRole, rbac.RoleOwner()),
- site: []codersdk.Permission{
+ name: "negative-permission",
+ organizationID: orgID,
+ subject: merge(canCreateCustomRole, rbac.RoleOwner()),
+ org: []codersdk.Permission{
{
Negate: true,
ResourceType: codersdk.ResourceWorkspace,
@@ -131,89 +135,69 @@ func TestInsertCustomRoles(t *testing.T) {
errorContains: "no negative permissions",
},
{
- name: "wildcard", // not allowed
- subject: merge(canAssignRole, rbac.RoleOwner()),
- site: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{
+ name: "wildcard", // not allowed
+ organizationID: orgID,
+ subject: merge(canCreateCustomRole, rbac.RoleOwner()),
+ org: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{
codersdk.ResourceWorkspace: {"*"},
}),
errorContains: "no wildcard symbols",
},
// escalation checks
{
- name: "read-workspace-escalation",
- subject: merge(canAssignRole),
- site: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{
+ name: "read-workspace-escalation",
+ organizationID: orgID,
+ subject: merge(canCreateCustomRole),
+ org: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{
codersdk.ResourceWorkspace: {codersdk.ActionRead},
}),
errorContains: "not allowed to grant this permission",
},
{
- name: "read-workspace-outside-org",
- organizationID: uuid.NullUUID{
- UUID: uuid.New(),
- Valid: true,
- },
- subject: merge(canAssignRole, rbac.ScopedRoleOrgAdmin(orgID.UUID)),
+ name: "read-workspace-outside-org",
+ organizationID: uuid.New(),
+ subject: merge(canCreateCustomRole, rbac.ScopedRoleOrgAdmin(orgID)),
org: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{
codersdk.ResourceWorkspace: {codersdk.ActionRead},
}),
- errorContains: "forbidden",
+ errorContains: "not allowed to grant this permission",
},
{
name: "user-escalation",
// These roles do not grant user perms
- subject: merge(canAssignRole, rbac.ScopedRoleOrgAdmin(orgID.UUID)),
+ organizationID: orgID,
+ subject: merge(canCreateCustomRole, rbac.ScopedRoleOrgAdmin(orgID)),
user: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{
codersdk.ResourceWorkspace: {codersdk.ActionRead},
}),
- errorContains: "not allowed to grant this permission",
+ errorContains: "organization roles specify site or user permissions",
},
{
- name: "template-admin-escalation",
- subject: merge(canAssignRole, rbac.RoleTemplateAdmin()),
+ name: "site-escalation",
+ organizationID: orgID,
+ subject: merge(canCreateCustomRole, rbac.RoleTemplateAdmin()),
site: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{
- codersdk.ResourceWorkspace: {codersdk.ActionRead}, // ok!
codersdk.ResourceDeploymentConfig: {codersdk.ActionUpdate}, // not ok!
}),
- user: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{
- codersdk.ResourceWorkspace: {codersdk.ActionRead}, // ok!
- }),
- errorContains: "deployment_config",
+ errorContains: "organization roles specify site or user permissions",
},
// ok!
{
- name: "read-workspace-template-admin",
- subject: merge(canAssignRole, rbac.RoleTemplateAdmin()),
- site: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{
+ name: "read-workspace-template-admin",
+ organizationID: orgID,
+ subject: merge(canCreateCustomRole, rbac.RoleTemplateAdmin()),
+ org: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{
codersdk.ResourceWorkspace: {codersdk.ActionRead},
}),
},
{
name: "read-workspace-in-org",
- subject: merge(canAssignRole, rbac.ScopedRoleOrgAdmin(orgID.UUID)),
organizationID: orgID,
+ subject: merge(canCreateCustomRole, rbac.ScopedRoleOrgAdmin(orgID)),
org: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{
codersdk.ResourceWorkspace: {codersdk.ActionRead},
}),
},
- {
- name: "user-perms",
- // This is weird, but is ok
- subject: merge(canAssignRole, rbac.RoleMember()),
- user: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{
- codersdk.ResourceWorkspace: {codersdk.ActionRead},
- }),
- },
- {
- name: "site+user-perms",
- subject: merge(canAssignRole, rbac.RoleMember(), rbac.RoleTemplateAdmin()),
- site: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{
- codersdk.ResourceWorkspace: {codersdk.ActionRead},
- }),
- user: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{
- codersdk.ResourceWorkspace: {codersdk.ActionRead},
- }),
- },
}
for _, tc := range testCases {
@@ -234,7 +218,7 @@ func TestInsertCustomRoles(t *testing.T) {
_, err := az.InsertCustomRole(ctx, database.InsertCustomRoleParams{
Name: "test-role",
DisplayName: "",
- OrganizationID: tc.organizationID,
+ OrganizationID: uuid.NullUUID{UUID: tc.organizationID, Valid: true},
SitePermissions: db2sdk.List(tc.site, convertSDKPerm),
OrgPermissions: db2sdk.List(tc.org, convertSDKPerm),
UserPermissions: db2sdk.List(tc.user, convertSDKPerm),
@@ -249,11 +233,11 @@ func TestInsertCustomRoles(t *testing.T) {
LookupRoles: []database.NameOrganizationPair{
{
Name: "test-role",
- OrganizationID: tc.organizationID.UUID,
+ OrganizationID: tc.organizationID,
},
},
ExcludeOrgRoles: false,
- OrganizationID: uuid.UUID{},
+ OrganizationID: uuid.Nil,
})
require.NoError(t, err)
require.Len(t, roles, 1)
diff --git a/coderd/database/dbauthz/dbauthz.go b/coderd/database/dbauthz/dbauthz.go
index f7366af00b..877c0d4788 100644
--- a/coderd/database/dbauthz/dbauthz.go
+++ b/coderd/database/dbauthz/dbauthz.go
@@ -5,13 +5,13 @@ import (
"database/sql"
"encoding/json"
"errors"
+ "slices"
"strings"
"sync/atomic"
"testing"
"time"
"github.com/google/uuid"
- "golang.org/x/exp/slices"
"golang.org/x/xerrors"
"github.com/open-policy-agent/opa/topdown"
@@ -281,6 +281,7 @@ var (
DisplayName: "Notifier",
Site: rbac.Permissions(map[string][]policy.Action{
rbac.ResourceNotificationMessage.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete},
+ rbac.ResourceInboxNotification.Type: {policy.ActionCreate},
}),
Org: map[string][]rbac.Permission{},
User: []rbac.Permission{},
@@ -747,7 +748,7 @@ func (*querier) convertToDeploymentRoles(names []string) []rbac.RoleIdentifier {
}
// canAssignRoles handles assigning built in and custom roles.
-func (q *querier) canAssignRoles(ctx context.Context, orgID *uuid.UUID, added, removed []rbac.RoleIdentifier) error {
+func (q *querier) canAssignRoles(ctx context.Context, orgID uuid.UUID, added, removed []rbac.RoleIdentifier) error {
actor, ok := ActorFromContext(ctx)
if !ok {
return NoActorError
@@ -755,12 +756,14 @@ func (q *querier) canAssignRoles(ctx context.Context, orgID *uuid.UUID, added, r
roleAssign := rbac.ResourceAssignRole
shouldBeOrgRoles := false
- if orgID != nil {
- roleAssign = rbac.ResourceAssignOrgRole.InOrg(*orgID)
+ if orgID != uuid.Nil {
+ roleAssign = rbac.ResourceAssignOrgRole.InOrg(orgID)
shouldBeOrgRoles = true
}
- grantedRoles := append(added, removed...)
+ grantedRoles := make([]rbac.RoleIdentifier, 0, len(added)+len(removed))
+ grantedRoles = append(grantedRoles, added...)
+ grantedRoles = append(grantedRoles, removed...)
customRoles := make([]rbac.RoleIdentifier, 0)
// Validate that the roles being assigned are valid.
for _, r := range grantedRoles {
@@ -774,11 +777,11 @@ func (q *querier) canAssignRoles(ctx context.Context, orgID *uuid.UUID, added, r
}
if shouldBeOrgRoles {
- if orgID == nil {
+ if orgID == uuid.Nil {
return xerrors.Errorf("should never happen, orgID is nil, but trying to assign an organization role")
}
- if r.OrganizationID != *orgID {
+ if r.OrganizationID != orgID {
return xerrors.Errorf("attempted to assign role from a different org, role %q to %q", r, orgID.String())
}
}
@@ -824,7 +827,7 @@ func (q *querier) canAssignRoles(ctx context.Context, orgID *uuid.UUID, added, r
}
if len(removed) > 0 {
- if err := q.authorizeContext(ctx, policy.ActionDelete, roleAssign); err != nil {
+ if err := q.authorizeContext(ctx, policy.ActionUnassign, roleAssign); err != nil {
return err
}
}
@@ -1133,11 +1136,23 @@ func (q *querier) CleanTailnetTunnels(ctx context.Context) error {
return q.db.CleanTailnetTunnels(ctx)
}
+func (q *querier) CountUnreadInboxNotificationsByUserID(ctx context.Context, userID uuid.UUID) (int64, error) {
+ if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceInboxNotification.WithOwner(userID.String())); err != nil {
+ return 0, err
+ }
+ return q.db.CountUnreadInboxNotificationsByUserID(ctx, userID)
+}
+
// TODO: Handle org scoped lookups
func (q *querier) CustomRoles(ctx context.Context, arg database.CustomRolesParams) ([]database.CustomRole, error) {
- if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceAssignRole); err != nil {
+ roleObject := rbac.ResourceAssignRole
+ if arg.OrganizationID != uuid.Nil {
+ roleObject = rbac.ResourceAssignOrgRole.InOrg(arg.OrganizationID)
+ }
+ if err := q.authorizeContext(ctx, policy.ActionRead, roleObject); err != nil {
return nil, err
}
+
return q.db.CustomRoles(ctx, arg)
}
@@ -1194,14 +1209,11 @@ func (q *querier) DeleteCryptoKey(ctx context.Context, arg database.DeleteCrypto
}
func (q *querier) DeleteCustomRole(ctx context.Context, arg database.DeleteCustomRoleParams) error {
- if arg.OrganizationID.UUID != uuid.Nil {
- if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceAssignOrgRole.InOrg(arg.OrganizationID.UUID)); err != nil {
- return err
- }
- } else {
- if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceAssignRole); err != nil {
- return err
- }
+ if !arg.OrganizationID.Valid || arg.OrganizationID.UUID == uuid.Nil {
+ return NotAuthorizedError{Err: xerrors.New("custom roles must belong to an organization")}
+ }
+ if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceAssignOrgRole.InOrg(arg.OrganizationID.UUID)); err != nil {
+ return err
}
return q.db.DeleteCustomRole(ctx, arg)
@@ -1435,6 +1447,17 @@ func (q *querier) FetchMemoryResourceMonitorsByAgentID(ctx context.Context, agen
return q.db.FetchMemoryResourceMonitorsByAgentID(ctx, agentID)
}
+func (q *querier) FetchMemoryResourceMonitorsUpdatedAfter(ctx context.Context, updatedAt time.Time) ([]database.WorkspaceAgentMemoryResourceMonitor, error) {
+ // Ideally, we would return a list of monitors that the user has access to. However, that check would need to
+ // be implemented similarly to GetWorkspaces, which is more complex than what we're doing here. Since this query
+ // was introduced for telemetry, we perform a simpler check.
+ if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceWorkspaceAgentResourceMonitor); err != nil {
+ return nil, err
+ }
+
+ return q.db.FetchMemoryResourceMonitorsUpdatedAfter(ctx, updatedAt)
+}
+
func (q *querier) FetchNewMessageMetadata(ctx context.Context, arg database.FetchNewMessageMetadataParams) (database.FetchNewMessageMetadataRow, error) {
if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceNotificationMessage); err != nil {
return database.FetchNewMessageMetadataRow{}, err
@@ -1456,6 +1479,17 @@ func (q *querier) FetchVolumesResourceMonitorsByAgentID(ctx context.Context, age
return q.db.FetchVolumesResourceMonitorsByAgentID(ctx, agentID)
}
+func (q *querier) FetchVolumesResourceMonitorsUpdatedAfter(ctx context.Context, updatedAt time.Time) ([]database.WorkspaceAgentVolumeResourceMonitor, error) {
+ // Ideally, we would return a list of monitors that the user has access to. However, that check would need to
+ // be implemented similarly to GetWorkspaces, which is more complex than what we're doing here. Since this query
+ // was introduced for telemetry, we perform a simpler check.
+ if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceWorkspaceAgentResourceMonitor); err != nil {
+ return nil, err
+ }
+
+ return q.db.FetchVolumesResourceMonitorsUpdatedAfter(ctx, updatedAt)
+}
+
func (q *querier) GetAPIKeyByID(ctx context.Context, id string) (database.APIKey, error) {
return fetch(q.log, q.auth, q.db.GetAPIKeyByID)(ctx, id)
}
@@ -1695,6 +1729,10 @@ func (q *querier) GetFileTemplates(ctx context.Context, fileID uuid.UUID) ([]dat
return q.db.GetFileTemplates(ctx, fileID)
}
+func (q *querier) GetFilteredInboxNotificationsByUserID(ctx context.Context, arg database.GetFilteredInboxNotificationsByUserIDParams) ([]database.InboxNotification, error) {
+ return fetchWithPostFilter(q.auth, policy.ActionRead, q.db.GetFilteredInboxNotificationsByUserID)(ctx, arg)
+}
+
func (q *querier) GetGitSSHKey(ctx context.Context, userID uuid.UUID) (database.GitSSHKey, error) {
return fetchWithAction(q.log, q.auth, policy.ActionReadPersonal, q.db.GetGitSSHKey)(ctx, userID)
}
@@ -1754,6 +1792,14 @@ func (q *querier) GetHungProvisionerJobs(ctx context.Context, hungSince time.Tim
return q.db.GetHungProvisionerJobs(ctx, hungSince)
}
+func (q *querier) GetInboxNotificationByID(ctx context.Context, id uuid.UUID) (database.InboxNotification, error) {
+ return fetchWithAction(q.log, q.auth, policy.ActionRead, q.db.GetInboxNotificationByID)(ctx, id)
+}
+
+func (q *querier) GetInboxNotificationsByUserID(ctx context.Context, userID database.GetInboxNotificationsByUserIDParams) ([]database.InboxNotification, error) {
+ return fetchWithPostFilter(q.auth, policy.ActionRead, q.db.GetInboxNotificationsByUserID)(ctx, userID)
+}
+
func (q *querier) GetJFrogXrayScanByWorkspaceAndAgentID(ctx context.Context, arg database.GetJFrogXrayScanByWorkspaceAndAgentIDParams) (database.JfrogXrayScan, error) {
if _, err := fetch(q.log, q.auth, q.db.GetWorkspaceByID)(ctx, arg.WorkspaceID); err != nil {
return database.JfrogXrayScan{}, err
@@ -3046,14 +3092,11 @@ func (q *querier) InsertCryptoKey(ctx context.Context, arg database.InsertCrypto
func (q *querier) InsertCustomRole(ctx context.Context, arg database.InsertCustomRoleParams) (database.CustomRole, error) {
// Org and site role upsert share the same query. So switch the assertion based on the org uuid.
- if arg.OrganizationID.UUID != uuid.Nil {
- if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceAssignOrgRole.InOrg(arg.OrganizationID.UUID)); err != nil {
- return database.CustomRole{}, err
- }
- } else {
- if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceAssignRole); err != nil {
- return database.CustomRole{}, err
- }
+ if !arg.OrganizationID.Valid || arg.OrganizationID.UUID == uuid.Nil {
+ return database.CustomRole{}, NotAuthorizedError{Err: xerrors.New("custom roles must belong to an organization")}
+ }
+ if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceAssignOrgRole.InOrg(arg.OrganizationID.UUID)); err != nil {
+ return database.CustomRole{}, err
}
if err := q.customRoleCheck(ctx, database.CustomRole{
@@ -3116,6 +3159,10 @@ func (q *querier) InsertGroupMember(ctx context.Context, arg database.InsertGrou
return update(q.log, q.auth, fetch, q.db.InsertGroupMember)(ctx, arg)
}
+func (q *querier) InsertInboxNotification(ctx context.Context, arg database.InsertInboxNotificationParams) (database.InboxNotification, error) {
+ return insert(q.log, q.auth, rbac.ResourceInboxNotification.WithOwner(arg.UserID.String()), q.db.InsertInboxNotification)(ctx, arg)
+}
+
func (q *querier) InsertLicense(ctx context.Context, arg database.InsertLicenseParams) (database.License, error) {
if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceLicense); err != nil {
return database.License{}, err
@@ -3183,7 +3230,7 @@ func (q *querier) InsertOrganizationMember(ctx context.Context, arg database.Ins
// All roles are added roles. Org member is always implied.
addedRoles := append(orgRoles, rbac.ScopedRoleOrgMember(arg.OrganizationID))
- err = q.canAssignRoles(ctx, &arg.OrganizationID, addedRoles, []rbac.RoleIdentifier{})
+ err = q.canAssignRoles(ctx, arg.OrganizationID, addedRoles, []rbac.RoleIdentifier{})
if err != nil {
return database.OrganizationMember{}, err
}
@@ -3314,7 +3361,7 @@ func (q *querier) InsertTemplateVersionWorkspaceTag(ctx context.Context, arg dat
func (q *querier) InsertUser(ctx context.Context, arg database.InsertUserParams) (database.User, error) {
// Always check if the assigned roles can actually be assigned by this actor.
impliedRoles := append([]rbac.RoleIdentifier{rbac.RoleMember()}, q.convertToDeploymentRoles(arg.RBACRoles)...)
- err := q.canAssignRoles(ctx, nil, impliedRoles, []rbac.RoleIdentifier{})
+ err := q.canAssignRoles(ctx, uuid.Nil, impliedRoles, []rbac.RoleIdentifier{})
if err != nil {
return database.User{}, err
}
@@ -3652,14 +3699,11 @@ func (q *querier) UpdateCryptoKeyDeletesAt(ctx context.Context, arg database.Upd
}
func (q *querier) UpdateCustomRole(ctx context.Context, arg database.UpdateCustomRoleParams) (database.CustomRole, error) {
- if arg.OrganizationID.UUID != uuid.Nil {
- if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceAssignOrgRole.InOrg(arg.OrganizationID.UUID)); err != nil {
- return database.CustomRole{}, err
- }
- } else {
- if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceAssignRole); err != nil {
- return database.CustomRole{}, err
- }
+ if !arg.OrganizationID.Valid || arg.OrganizationID.UUID == uuid.Nil {
+ return database.CustomRole{}, NotAuthorizedError{Err: xerrors.New("custom roles must belong to an organization")}
+ }
+ if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceAssignOrgRole.InOrg(arg.OrganizationID.UUID)); err != nil {
+ return database.CustomRole{}, err
}
if err := q.customRoleCheck(ctx, database.CustomRole{
@@ -3713,6 +3757,14 @@ func (q *querier) UpdateInactiveUsersToDormant(ctx context.Context, lastSeenAfte
return q.db.UpdateInactiveUsersToDormant(ctx, lastSeenAfter)
}
+func (q *querier) UpdateInboxNotificationReadStatus(ctx context.Context, args database.UpdateInboxNotificationReadStatusParams) error {
+ fetchFunc := func(ctx context.Context, args database.UpdateInboxNotificationReadStatusParams) (database.InboxNotification, error) {
+ return q.db.GetInboxNotificationByID(ctx, args.ID)
+ }
+
+ return update(q.log, q.auth, fetchFunc, q.db.UpdateInboxNotificationReadStatus)(ctx, args)
+}
+
func (q *querier) UpdateMemberRoles(ctx context.Context, arg database.UpdateMemberRolesParams) (database.OrganizationMember, error) {
// Authorized fetch will check that the actor has read access to the org member since the org member is returned.
member, err := database.ExpectOne(q.OrganizationMembers(ctx, database.OrganizationMembersParams{
@@ -3739,7 +3791,7 @@ func (q *querier) UpdateMemberRoles(ctx context.Context, arg database.UpdateMemb
impliedTypes := append(scopedGranted, rbac.ScopedRoleOrgMember(arg.OrgID))
added, removed := rbac.ChangeRoleSet(originalRoles, impliedTypes)
- err = q.canAssignRoles(ctx, &arg.OrgID, added, removed)
+ err = q.canAssignRoles(ctx, arg.OrgID, added, removed)
if err != nil {
return database.OrganizationMember{}, err
}
@@ -4146,7 +4198,7 @@ func (q *querier) UpdateUserRoles(ctx context.Context, arg database.UpdateUserRo
impliedTypes := append(q.convertToDeploymentRoles(arg.GrantedRoles), rbac.RoleMember())
// If the changeset is nothing, less rbac checks need to be done.
added, removed := rbac.ChangeRoleSet(q.convertToDeploymentRoles(user.RBACRoles), impliedTypes)
- err = q.canAssignRoles(ctx, nil, added, removed)
+ err = q.canAssignRoles(ctx, uuid.Nil, added, removed)
if err != nil {
return database.User{}, err
}
diff --git a/coderd/database/dbauthz/dbauthz_test.go b/coderd/database/dbauthz/dbauthz_test.go
index 108a8166d1..a2ac739042 100644
--- a/coderd/database/dbauthz/dbauthz_test.go
+++ b/coderd/database/dbauthz/dbauthz_test.go
@@ -1011,7 +1011,7 @@ func (s *MethodTestSuite) TestOrganization() {
Asserts(
mem, policy.ActionRead,
rbac.ResourceAssignOrgRole.InOrg(o.ID), policy.ActionAssign, // org-mem
- rbac.ResourceAssignOrgRole.InOrg(o.ID), policy.ActionDelete, // org-admin
+ rbac.ResourceAssignOrgRole.InOrg(o.ID), policy.ActionUnassign, // org-admin
).Returns(out)
}))
}
@@ -1619,7 +1619,7 @@ func (s *MethodTestSuite) TestUser() {
}).Asserts(
u, policy.ActionRead,
rbac.ResourceAssignRole, policy.ActionAssign,
- rbac.ResourceAssignRole, policy.ActionDelete,
+ rbac.ResourceAssignRole, policy.ActionUnassign,
).Returns(o)
}))
s.Run("AllUserIDs", s.Subtest(func(db database.Store, check *expects) {
@@ -1653,30 +1653,28 @@ func (s *MethodTestSuite) TestUser() {
check.Args(database.DeleteCustomRoleParams{
Name: customRole.Name,
}).Asserts(
- rbac.ResourceAssignRole, policy.ActionDelete)
+ // fails immediately, missing organization id
+ ).Errors(dbauthz.NotAuthorizedError{Err: xerrors.New("custom roles must belong to an organization")})
}))
s.Run("Blank/UpdateCustomRole", s.Subtest(func(db database.Store, check *expects) {
dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
- customRole := dbgen.CustomRole(s.T(), db, database.CustomRole{})
+ customRole := dbgen.CustomRole(s.T(), db, database.CustomRole{
+ OrganizationID: uuid.NullUUID{UUID: uuid.New(), Valid: true},
+ })
// Blank is no perms in the role
check.Args(database.UpdateCustomRoleParams{
Name: customRole.Name,
DisplayName: "Test Name",
+ OrganizationID: customRole.OrganizationID,
SitePermissions: nil,
OrgPermissions: nil,
UserPermissions: nil,
- }).Asserts(rbac.ResourceAssignRole, policy.ActionUpdate).ErrorsWithPG(sql.ErrNoRows)
+ }).Asserts(rbac.ResourceAssignOrgRole.InOrg(customRole.OrganizationID.UUID), policy.ActionUpdate)
}))
s.Run("SitePermissions/UpdateCustomRole", s.Subtest(func(db database.Store, check *expects) {
- customRole := dbgen.CustomRole(s.T(), db, database.CustomRole{
- OrganizationID: uuid.NullUUID{
- UUID: uuid.Nil,
- Valid: false,
- },
- })
check.Args(database.UpdateCustomRoleParams{
- Name: customRole.Name,
- OrganizationID: customRole.OrganizationID,
+ Name: "",
+ OrganizationID: uuid.NullUUID{UUID: uuid.Nil, Valid: false},
DisplayName: "Test Name",
SitePermissions: db2sdk.List(codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{
codersdk.ResourceTemplate: {codersdk.ActionCreate, codersdk.ActionRead, codersdk.ActionUpdate, codersdk.ActionDelete, codersdk.ActionViewInsights},
@@ -1686,17 +1684,8 @@ func (s *MethodTestSuite) TestUser() {
codersdk.ResourceWorkspace: {codersdk.ActionRead},
}), convertSDKPerm),
}).Asserts(
- // First check
- rbac.ResourceAssignRole, policy.ActionUpdate,
- // Escalation checks
- rbac.ResourceTemplate, policy.ActionCreate,
- rbac.ResourceTemplate, policy.ActionRead,
- rbac.ResourceTemplate, policy.ActionUpdate,
- rbac.ResourceTemplate, policy.ActionDelete,
- rbac.ResourceTemplate, policy.ActionViewInsights,
-
- rbac.ResourceWorkspace.WithOwner(testActorID.String()), policy.ActionRead,
- ).ErrorsWithPG(sql.ErrNoRows)
+ // fails immediately, missing organization id
+ ).Errors(dbauthz.NotAuthorizedError{Err: xerrors.New("custom roles must belong to an organization")})
}))
s.Run("OrgPermissions/UpdateCustomRole", s.Subtest(func(db database.Store, check *expects) {
orgID := uuid.New()
@@ -1726,13 +1715,15 @@ func (s *MethodTestSuite) TestUser() {
}))
s.Run("Blank/InsertCustomRole", s.Subtest(func(db database.Store, check *expects) {
// Blank is no perms in the role
+ orgID := uuid.New()
check.Args(database.InsertCustomRoleParams{
Name: "test",
DisplayName: "Test Name",
+ OrganizationID: uuid.NullUUID{UUID: orgID, Valid: true},
SitePermissions: nil,
OrgPermissions: nil,
UserPermissions: nil,
- }).Asserts(rbac.ResourceAssignRole, policy.ActionCreate)
+ }).Asserts(rbac.ResourceAssignOrgRole.InOrg(orgID), policy.ActionCreate)
}))
s.Run("SitePermissions/InsertCustomRole", s.Subtest(func(db database.Store, check *expects) {
check.Args(database.InsertCustomRoleParams{
@@ -1746,17 +1737,8 @@ func (s *MethodTestSuite) TestUser() {
codersdk.ResourceWorkspace: {codersdk.ActionRead},
}), convertSDKPerm),
}).Asserts(
- // First check
- rbac.ResourceAssignRole, policy.ActionCreate,
- // Escalation checks
- rbac.ResourceTemplate, policy.ActionCreate,
- rbac.ResourceTemplate, policy.ActionRead,
- rbac.ResourceTemplate, policy.ActionUpdate,
- rbac.ResourceTemplate, policy.ActionDelete,
- rbac.ResourceTemplate, policy.ActionViewInsights,
-
- rbac.ResourceWorkspace.WithOwner(testActorID.String()), policy.ActionRead,
- )
+ // fails immediately, missing organization id
+ ).Errors(dbauthz.NotAuthorizedError{Err: xerrors.New("custom roles must belong to an organization")})
}))
s.Run("OrgPermissions/InsertCustomRole", s.Subtest(func(db database.Store, check *expects) {
orgID := uuid.New()
@@ -4484,6 +4466,141 @@ func (s *MethodTestSuite) TestNotifications() {
Disableds: []bool{true, false},
}).Asserts(rbac.ResourceNotificationPreference.WithOwner(user.ID.String()), policy.ActionUpdate)
}))
+
+ s.Run("GetInboxNotificationsByUserID", s.Subtest(func(db database.Store, check *expects) {
+ u := dbgen.User(s.T(), db, database.User{})
+
+ notifID := uuid.New()
+
+ notif := dbgen.NotificationInbox(s.T(), db, database.InsertInboxNotificationParams{
+ ID: notifID,
+ UserID: u.ID,
+ TemplateID: notifications.TemplateWorkspaceAutoUpdated,
+ Title: "test title",
+ Content: "test content notification",
+ Icon: "https://coder.com/favicon.ico",
+ Actions: json.RawMessage("{}"),
+ })
+
+ check.Args(database.GetInboxNotificationsByUserIDParams{
+ UserID: u.ID,
+ ReadStatus: database.InboxNotificationReadStatusAll,
+ }).Asserts(rbac.ResourceInboxNotification.WithID(notifID).WithOwner(u.ID.String()), policy.ActionRead).Returns([]database.InboxNotification{notif})
+ }))
+
+ s.Run("GetFilteredInboxNotificationsByUserID", s.Subtest(func(db database.Store, check *expects) {
+ u := dbgen.User(s.T(), db, database.User{})
+
+ notifID := uuid.New()
+
+ targets := []uuid.UUID{u.ID, notifications.TemplateWorkspaceAutoUpdated}
+
+ notif := dbgen.NotificationInbox(s.T(), db, database.InsertInboxNotificationParams{
+ ID: notifID,
+ UserID: u.ID,
+ TemplateID: notifications.TemplateWorkspaceAutoUpdated,
+ Targets: targets,
+ Title: "test title",
+ Content: "test content notification",
+ Icon: "https://coder.com/favicon.ico",
+ Actions: json.RawMessage("{}"),
+ })
+
+ check.Args(database.GetFilteredInboxNotificationsByUserIDParams{
+ UserID: u.ID,
+ Templates: []uuid.UUID{notifications.TemplateWorkspaceAutoUpdated},
+ Targets: []uuid.UUID{u.ID},
+ ReadStatus: database.InboxNotificationReadStatusAll,
+ }).Asserts(rbac.ResourceInboxNotification.WithID(notifID).WithOwner(u.ID.String()), policy.ActionRead).Returns([]database.InboxNotification{notif})
+ }))
+
+ s.Run("GetInboxNotificationByID", s.Subtest(func(db database.Store, check *expects) {
+ u := dbgen.User(s.T(), db, database.User{})
+
+ notifID := uuid.New()
+
+ targets := []uuid.UUID{u.ID, notifications.TemplateWorkspaceAutoUpdated}
+
+ notif := dbgen.NotificationInbox(s.T(), db, database.InsertInboxNotificationParams{
+ ID: notifID,
+ UserID: u.ID,
+ TemplateID: notifications.TemplateWorkspaceAutoUpdated,
+ Targets: targets,
+ Title: "test title",
+ Content: "test content notification",
+ Icon: "https://coder.com/favicon.ico",
+ Actions: json.RawMessage("{}"),
+ })
+
+ check.Args(notifID).Asserts(rbac.ResourceInboxNotification.WithID(notifID).WithOwner(u.ID.String()), policy.ActionRead).Returns(notif)
+ }))
+
+ s.Run("CountUnreadInboxNotificationsByUserID", s.Subtest(func(db database.Store, check *expects) {
+ u := dbgen.User(s.T(), db, database.User{})
+
+ notifID := uuid.New()
+
+ targets := []uuid.UUID{u.ID, notifications.TemplateWorkspaceAutoUpdated}
+
+ _ = dbgen.NotificationInbox(s.T(), db, database.InsertInboxNotificationParams{
+ ID: notifID,
+ UserID: u.ID,
+ TemplateID: notifications.TemplateWorkspaceAutoUpdated,
+ Targets: targets,
+ Title: "test title",
+ Content: "test content notification",
+ Icon: "https://coder.com/favicon.ico",
+ Actions: json.RawMessage("{}"),
+ })
+
+ check.Args(u.ID).Asserts(rbac.ResourceInboxNotification.WithOwner(u.ID.String()), policy.ActionRead).Returns(int64(1))
+ }))
+
+ s.Run("InsertInboxNotification", s.Subtest(func(db database.Store, check *expects) {
+ u := dbgen.User(s.T(), db, database.User{})
+
+ notifID := uuid.New()
+
+ targets := []uuid.UUID{u.ID, notifications.TemplateWorkspaceAutoUpdated}
+
+ check.Args(database.InsertInboxNotificationParams{
+ ID: notifID,
+ UserID: u.ID,
+ TemplateID: notifications.TemplateWorkspaceAutoUpdated,
+ Targets: targets,
+ Title: "test title",
+ Content: "test content notification",
+ Icon: "https://coder.com/favicon.ico",
+ Actions: json.RawMessage("{}"),
+ }).Asserts(rbac.ResourceInboxNotification.WithOwner(u.ID.String()), policy.ActionCreate)
+ }))
+
+ s.Run("UpdateInboxNotificationReadStatus", s.Subtest(func(db database.Store, check *expects) {
+ u := dbgen.User(s.T(), db, database.User{})
+
+ notifID := uuid.New()
+
+ targets := []uuid.UUID{u.ID, notifications.TemplateWorkspaceAutoUpdated}
+ readAt := dbtestutil.NowInDefaultTimezone()
+
+ notif := dbgen.NotificationInbox(s.T(), db, database.InsertInboxNotificationParams{
+ ID: notifID,
+ UserID: u.ID,
+ TemplateID: notifications.TemplateWorkspaceAutoUpdated,
+ Targets: targets,
+ Title: "test title",
+ Content: "test content notification",
+ Icon: "https://coder.com/favicon.ico",
+ Actions: json.RawMessage("{}"),
+ })
+
+ notif.ReadAt = sql.NullTime{Time: readAt, Valid: true}
+
+ check.Args(database.UpdateInboxNotificationReadStatusParams{
+ ID: notifID,
+ ReadAt: sql.NullTime{Time: readAt, Valid: true},
+ }).Asserts(rbac.ResourceInboxNotification.WithID(notifID).WithOwner(u.ID.String()), policy.ActionUpdate)
+ }))
}
func (s *MethodTestSuite) TestOAuth2ProviderApps() {
@@ -4802,6 +4919,14 @@ func (s *MethodTestSuite) TestResourcesMonitor() {
}).Asserts(rbac.ResourceWorkspaceAgentResourceMonitor, policy.ActionUpdate)
}))
+ s.Run("FetchMemoryResourceMonitorsUpdatedAfter", s.Subtest(func(db database.Store, check *expects) {
+ check.Args(dbtime.Now()).Asserts(rbac.ResourceWorkspaceAgentResourceMonitor, policy.ActionRead)
+ }))
+
+ s.Run("FetchVolumesResourceMonitorsUpdatedAfter", s.Subtest(func(db database.Store, check *expects) {
+ check.Args(dbtime.Now()).Asserts(rbac.ResourceWorkspaceAgentResourceMonitor, policy.ActionRead)
+ }))
+
s.Run("FetchMemoryResourceMonitorsByAgentID", s.Subtest(func(db database.Store, check *expects) {
agt, w := createAgent(s.T(), db)
diff --git a/coderd/database/dbgen/dbgen.go b/coderd/database/dbgen/dbgen.go
index 9c4ebbe8bb..3810fcb505 100644
--- a/coderd/database/dbgen/dbgen.go
+++ b/coderd/database/dbgen/dbgen.go
@@ -450,6 +450,22 @@ func OrganizationMember(t testing.TB, db database.Store, orig database.Organizat
return mem
}
+func NotificationInbox(t testing.TB, db database.Store, orig database.InsertInboxNotificationParams) database.InboxNotification {
+ notification, err := db.InsertInboxNotification(genCtx, database.InsertInboxNotificationParams{
+ ID: takeFirst(orig.ID, uuid.New()),
+ UserID: takeFirst(orig.UserID, uuid.New()),
+ TemplateID: takeFirst(orig.TemplateID, uuid.New()),
+ Targets: takeFirstSlice(orig.Targets, []uuid.UUID{}),
+ Title: takeFirst(orig.Title, testutil.GetRandomName(t)),
+ Content: takeFirst(orig.Content, testutil.GetRandomName(t)),
+ Icon: takeFirst(orig.Icon, ""),
+ Actions: orig.Actions,
+ CreatedAt: takeFirst(orig.CreatedAt, dbtime.Now()),
+ })
+ require.NoError(t, err, "insert notification")
+ return notification
+}
+
func Group(t testing.TB, db database.Store, orig database.Group) database.Group {
t.Helper()
diff --git a/coderd/database/dbmem/dbmem.go b/coderd/database/dbmem/dbmem.go
index 5e92f9560c..38c30031d9 100644
--- a/coderd/database/dbmem/dbmem.go
+++ b/coderd/database/dbmem/dbmem.go
@@ -10,6 +10,7 @@ import (
"math"
"reflect"
"regexp"
+ "slices"
"sort"
"strings"
"sync"
@@ -19,7 +20,6 @@ import (
"github.com/lib/pq"
"golang.org/x/exp/constraints"
"golang.org/x/exp/maps"
- "golang.org/x/exp/slices"
"golang.org/x/xerrors"
"github.com/coder/coder/v2/coderd/notifications/types"
@@ -67,6 +67,7 @@ func New() database.Store {
gitSSHKey: make([]database.GitSSHKey, 0),
notificationMessages: make([]database.NotificationMessage, 0),
notificationPreferences: make([]database.NotificationPreference, 0),
+ InboxNotification: make([]database.InboxNotification, 0),
parameterSchemas: make([]database.ParameterSchema, 0),
provisionerDaemons: make([]database.ProvisionerDaemon, 0),
provisionerKeys: make([]database.ProvisionerKey, 0),
@@ -206,6 +207,7 @@ type data struct {
notificationMessages []database.NotificationMessage
notificationPreferences []database.NotificationPreference
notificationReportGeneratorLogs []database.NotificationReportGeneratorLog
+ InboxNotification []database.InboxNotification
oauth2ProviderApps []database.OAuth2ProviderApp
oauth2ProviderAppSecrets []database.OAuth2ProviderAppSecret
oauth2ProviderAppCodes []database.OAuth2ProviderAppCode
@@ -269,7 +271,7 @@ type data struct {
presetParameters []database.TemplateVersionPresetParameter
}
-func tryPercentile(fs []float64, p float64) float64 {
+func tryPercentileCont(fs []float64, p float64) float64 {
if len(fs) == 0 {
return -1
}
@@ -282,6 +284,14 @@ func tryPercentile(fs []float64, p float64) float64 {
return fs[lower] + (fs[upper]-fs[lower])*(pos-float64(lower))
}
+func tryPercentileDisc(fs []float64, p float64) float64 {
+ if len(fs) == 0 {
+ return -1
+ }
+ sort.Float64s(fs)
+ return fs[max(int(math.Ceil(float64(len(fs))*p/100-1)), 0)]
+}
+
func validateDatabaseTypeWithValid(v reflect.Value) (handled bool, err error) {
if v.Kind() == reflect.Struct {
return false, nil
@@ -1139,7 +1149,119 @@ func getOwnerFromTags(tags map[string]string) string {
return ""
}
-func (q *FakeQuerier) getProvisionerJobsByIDsWithQueuePositionLocked(_ context.Context, ids []uuid.UUID) ([]database.GetProvisionerJobsByIDsWithQueuePositionRow, error) {
+// provisionerTagsetContains checks if daemonTags contain all key-value pairs from jobTags
+func provisionerTagsetContains(daemonTags, jobTags map[string]string) bool {
+ for jobKey, jobValue := range jobTags {
+ if daemonValue, exists := daemonTags[jobKey]; !exists || daemonValue != jobValue {
+ return false
+ }
+ }
+ return true
+}
+
+// GetProvisionerJobsByIDsWithQueuePosition mimics the SQL logic in pure Go
+func (q *FakeQuerier) getProvisionerJobsByIDsWithQueuePositionLockedTagBasedQueue(_ context.Context, jobIDs []uuid.UUID) ([]database.GetProvisionerJobsByIDsWithQueuePositionRow, error) {
+ // Step 1: Filter provisionerJobs based on jobIDs
+ filteredJobs := make(map[uuid.UUID]database.ProvisionerJob)
+ for _, job := range q.provisionerJobs {
+ for _, id := range jobIDs {
+ if job.ID == id {
+ filteredJobs[job.ID] = job
+ }
+ }
+ }
+
+ // Step 2: Identify pending jobs
+ pendingJobs := make(map[uuid.UUID]database.ProvisionerJob)
+ for _, job := range q.provisionerJobs {
+ if job.JobStatus == "pending" {
+ pendingJobs[job.ID] = job
+ }
+ }
+
+ // Step 3: Identify pending jobs that have a matching provisioner
+ matchedJobs := make(map[uuid.UUID]struct{})
+ for _, job := range pendingJobs {
+ for _, daemon := range q.provisionerDaemons {
+ if provisionerTagsetContains(daemon.Tags, job.Tags) {
+ matchedJobs[job.ID] = struct{}{}
+ break
+ }
+ }
+ }
+
+ // Step 4: Rank pending jobs per provisioner
+ jobRanks := make(map[uuid.UUID][]database.ProvisionerJob)
+ for _, job := range pendingJobs {
+ for _, daemon := range q.provisionerDaemons {
+ if provisionerTagsetContains(daemon.Tags, job.Tags) {
+ jobRanks[daemon.ID] = append(jobRanks[daemon.ID], job)
+ }
+ }
+ }
+
+ // Sort jobs per provisioner by CreatedAt
+ for daemonID := range jobRanks {
+ sort.Slice(jobRanks[daemonID], func(i, j int) bool {
+ return jobRanks[daemonID][i].CreatedAt.Before(jobRanks[daemonID][j].CreatedAt)
+ })
+ }
+
+ // Step 5: Compute queue position & max queue size across all provisioners
+ jobQueueStats := make(map[uuid.UUID]database.GetProvisionerJobsByIDsWithQueuePositionRow)
+ for _, jobs := range jobRanks {
+ queueSize := int64(len(jobs)) // Queue size per provisioner
+ for i, job := range jobs {
+ queuePosition := int64(i + 1)
+
+ // If the job already exists, update only if this queuePosition is better
+ if existing, exists := jobQueueStats[job.ID]; exists {
+ jobQueueStats[job.ID] = database.GetProvisionerJobsByIDsWithQueuePositionRow{
+ ID: job.ID,
+ CreatedAt: job.CreatedAt,
+ ProvisionerJob: job,
+ QueuePosition: min(existing.QueuePosition, queuePosition),
+ QueueSize: max(existing.QueueSize, queueSize), // Take the maximum queue size across provisioners
+ }
+ } else {
+ jobQueueStats[job.ID] = database.GetProvisionerJobsByIDsWithQueuePositionRow{
+ ID: job.ID,
+ CreatedAt: job.CreatedAt,
+ ProvisionerJob: job,
+ QueuePosition: queuePosition,
+ QueueSize: queueSize,
+ }
+ }
+ }
+ }
+
+ // Step 6: Compute the final results with minimal checks
+ var results []database.GetProvisionerJobsByIDsWithQueuePositionRow
+ for _, job := range filteredJobs {
+ // If the job has a computed rank, use it
+ if rank, found := jobQueueStats[job.ID]; found {
+ results = append(results, rank)
+ } else {
+ // Otherwise, return (0,0) for non-pending jobs and unranked pending jobs
+ results = append(results, database.GetProvisionerJobsByIDsWithQueuePositionRow{
+ ID: job.ID,
+ CreatedAt: job.CreatedAt,
+ ProvisionerJob: job,
+ QueuePosition: 0,
+ QueueSize: 0,
+ })
+ }
+ }
+
+ // Step 7: Sort results by CreatedAt
+ sort.Slice(results, func(i, j int) bool {
+ return results[i].CreatedAt.Before(results[j].CreatedAt)
+ })
+
+ return results, nil
+}
+
+func (q *FakeQuerier) getProvisionerJobsByIDsWithQueuePositionLockedGlobalQueue(_ context.Context, ids []uuid.UUID) ([]database.GetProvisionerJobsByIDsWithQueuePositionRow, error) {
// WITH pending_jobs AS (
// SELECT
// id, created_at
@@ -1602,6 +1724,26 @@ func (*FakeQuerier) CleanTailnetTunnels(context.Context) error {
return ErrUnimplemented
}
+func (q *FakeQuerier) CountUnreadInboxNotificationsByUserID(_ context.Context, userID uuid.UUID) (int64, error) {
+ q.mutex.RLock()
+ defer q.mutex.RUnlock()
+
+ var count int64
+ for _, notification := range q.InboxNotification {
+ if notification.UserID != userID {
+ continue
+ }
+
+ if notification.ReadAt.Valid {
+ continue
+ }
+
+ count++
+ }
+
+ return count, nil
+}
+
func (q *FakeQuerier) CustomRoles(_ context.Context, arg database.CustomRolesParams) ([]database.CustomRole, error) {
q.mutex.Lock()
defer q.mutex.Unlock()
@@ -2365,6 +2507,19 @@ func (q *FakeQuerier) FetchMemoryResourceMonitorsByAgentID(_ context.Context, ag
return database.WorkspaceAgentMemoryResourceMonitor{}, sql.ErrNoRows
}
+func (q *FakeQuerier) FetchMemoryResourceMonitorsUpdatedAfter(_ context.Context, updatedAt time.Time) ([]database.WorkspaceAgentMemoryResourceMonitor, error) {
+ q.mutex.RLock()
+ defer q.mutex.RUnlock()
+
+ monitors := []database.WorkspaceAgentMemoryResourceMonitor{}
+ for _, monitor := range q.workspaceAgentMemoryResourceMonitors {
+ if monitor.UpdatedAt.After(updatedAt) {
+ monitors = append(monitors, monitor)
+ }
+ }
+ return monitors, nil
+}
+
func (q *FakeQuerier) FetchNewMessageMetadata(_ context.Context, arg database.FetchNewMessageMetadataParams) (database.FetchNewMessageMetadataRow, error) {
err := validateDatabaseType(arg)
if err != nil {
@@ -2409,6 +2564,19 @@ func (q *FakeQuerier) FetchVolumesResourceMonitorsByAgentID(_ context.Context, a
return monitors, nil
}
+func (q *FakeQuerier) FetchVolumesResourceMonitorsUpdatedAfter(_ context.Context, updatedAt time.Time) ([]database.WorkspaceAgentVolumeResourceMonitor, error) {
+ q.mutex.RLock()
+ defer q.mutex.RUnlock()
+
+ monitors := []database.WorkspaceAgentVolumeResourceMonitor{}
+ for _, monitor := range q.workspaceAgentVolumeResourceMonitors {
+ if monitor.UpdatedAt.After(updatedAt) {
+ monitors = append(monitors, monitor)
+ }
+ }
+ return monitors, nil
+}
+
func (q *FakeQuerier) GetAPIKeyByID(_ context.Context, id string) (database.APIKey, error) {
q.mutex.RLock()
defer q.mutex.RUnlock()
@@ -2794,8 +2962,8 @@ func (q *FakeQuerier) GetDeploymentWorkspaceAgentStats(_ context.Context, create
latencies = append(latencies, agentStat.ConnectionMedianLatencyMS)
}
- stat.WorkspaceConnectionLatency50 = tryPercentile(latencies, 50)
- stat.WorkspaceConnectionLatency95 = tryPercentile(latencies, 95)
+ stat.WorkspaceConnectionLatency50 = tryPercentileCont(latencies, 50)
+ stat.WorkspaceConnectionLatency95 = tryPercentileCont(latencies, 95)
return stat, nil
}
@@ -2843,8 +3011,8 @@ func (q *FakeQuerier) GetDeploymentWorkspaceAgentUsageStats(_ context.Context, c
stat.WorkspaceTxBytes += agentStat.TxBytes
latencies = append(latencies, agentStat.ConnectionMedianLatencyMS)
}
- stat.WorkspaceConnectionLatency50 = tryPercentile(latencies, 50)
- stat.WorkspaceConnectionLatency95 = tryPercentile(latencies, 95)
+ stat.WorkspaceConnectionLatency50 = tryPercentileCont(latencies, 50)
+ stat.WorkspaceConnectionLatency95 = tryPercentileCont(latencies, 95)
for _, agentStat := range sessions {
stat.SessionCountVSCode += agentStat.SessionCountVSCode
@@ -3126,6 +3294,45 @@ func (q *FakeQuerier) GetFileTemplates(_ context.Context, id uuid.UUID) ([]datab
return rows, nil
}
+func (q *FakeQuerier) GetFilteredInboxNotificationsByUserID(_ context.Context, arg database.GetFilteredInboxNotificationsByUserIDParams) ([]database.InboxNotification, error) {
+ q.mutex.RLock()
+ defer q.mutex.RUnlock()
+
+ notifications := make([]database.InboxNotification, 0)
+ for _, notification := range q.InboxNotification {
+ if notification.UserID == arg.UserID {
+ for _, template := range arg.Templates {
+ templateFound := false
+ if notification.TemplateID == template {
+ templateFound = true
+ }
+
+ if !templateFound {
+ continue
+ }
+ }
+
+ for _, target := range arg.Targets {
+ isFound := false
+ for _, insertedTarget := range notification.Targets {
+ if insertedTarget == target {
+ isFound = true
+ break
+ }
+ }
+
+ if !isFound {
+ continue
+ }
+
+ notifications = append(notifications, notification)
+ }
+ }
+ }
+
+ return notifications, nil
+}
+
func (q *FakeQuerier) GetGitSSHKey(_ context.Context, userID uuid.UUID) (database.GitSSHKey, error) {
q.mutex.RLock()
defer q.mutex.RUnlock()
@@ -3324,6 +3531,33 @@ func (q *FakeQuerier) GetHungProvisionerJobs(_ context.Context, hungSince time.T
return hungJobs, nil
}
+func (q *FakeQuerier) GetInboxNotificationByID(_ context.Context, id uuid.UUID) (database.InboxNotification, error) {
+ q.mutex.RLock()
+ defer q.mutex.RUnlock()
+
+ for _, notification := range q.InboxNotification {
+ if notification.ID == id {
+ return notification, nil
+ }
+ }
+
+ return database.InboxNotification{}, sql.ErrNoRows
+}
+
+func (q *FakeQuerier) GetInboxNotificationsByUserID(_ context.Context, params database.GetInboxNotificationsByUserIDParams) ([]database.InboxNotification, error) {
+ q.mutex.RLock()
+ defer q.mutex.RUnlock()
+
+ notifications := make([]database.InboxNotification, 0)
+ for _, notification := range q.InboxNotification {
+ if notification.UserID == params.UserID {
+ notifications = append(notifications, notification)
+ }
+ }
+
+ return notifications, nil
+}
+
func (q *FakeQuerier) GetJFrogXrayScanByWorkspaceAndAgentID(_ context.Context, arg database.GetJFrogXrayScanByWorkspaceAndAgentIDParams) (database.JfrogXrayScan, error) {
err := validateDatabaseType(arg)
if err != nil {
@@ -4085,7 +4319,7 @@ func (q *FakeQuerier) GetProvisionerDaemonsWithStatusByOrganization(ctx context.
}
slices.SortFunc(rows, func(a, b database.GetProvisionerDaemonsWithStatusByOrganizationRow) int {
- return a.ProvisionerDaemon.CreatedAt.Compare(b.ProvisionerDaemon.CreatedAt)
+ return b.ProvisionerDaemon.CreatedAt.Compare(a.ProvisionerDaemon.CreatedAt)
})
if arg.Limit.Valid && arg.Limit.Int32 > 0 && len(rows) > int(arg.Limit.Int32) {
@@ -4153,7 +4387,7 @@ func (q *FakeQuerier) GetProvisionerJobsByIDsWithQueuePosition(ctx context.Conte
if ids == nil {
ids = []uuid.UUID{}
}
- return q.getProvisionerJobsByIDsWithQueuePositionLocked(ctx, ids)
+ return q.getProvisionerJobsByIDsWithQueuePositionLockedTagBasedQueue(ctx, ids)
}
func (q *FakeQuerier) GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisioner(ctx context.Context, arg database.GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisionerParams) ([]database.GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisionerRow, error) {
@@ -4222,7 +4456,7 @@ func (q *FakeQuerier) GetProvisionerJobsByOrganizationAndStatusWithQueuePosition
LIMIT
sqlc.narg('limit')::int;
*/
- rowsWithQueuePosition, err := q.getProvisionerJobsByIDsWithQueuePositionLocked(ctx, nil)
+ rowsWithQueuePosition, err := q.getProvisionerJobsByIDsWithQueuePositionLockedGlobalQueue(ctx, nil)
if err != nil {
return nil, err
}
@@ -5003,9 +5237,9 @@ func (q *FakeQuerier) GetTemplateAverageBuildTime(ctx context.Context, arg datab
}
var row database.GetTemplateAverageBuildTimeRow
- row.Delete50, row.Delete95 = tryPercentile(deleteTimes, 50), tryPercentile(deleteTimes, 95)
- row.Stop50, row.Stop95 = tryPercentile(stopTimes, 50), tryPercentile(stopTimes, 95)
- row.Start50, row.Start95 = tryPercentile(startTimes, 50), tryPercentile(startTimes, 95)
+ row.Delete50, row.Delete95 = tryPercentileDisc(deleteTimes, 50), tryPercentileDisc(deleteTimes, 95)
+ row.Stop50, row.Stop95 = tryPercentileDisc(stopTimes, 50), tryPercentileDisc(stopTimes, 95)
+ row.Start50, row.Start95 = tryPercentileDisc(startTimes, 50), tryPercentileDisc(startTimes, 95)
return row, nil
}
@@ -6044,8 +6278,8 @@ func (q *FakeQuerier) GetUserLatencyInsights(_ context.Context, arg database.Get
Username: user.Username,
AvatarURL: user.AvatarURL,
TemplateIDs: seenTemplatesByUserID[userID],
- WorkspaceConnectionLatency50: tryPercentile(latencies, 50),
- WorkspaceConnectionLatency95: tryPercentile(latencies, 95),
+ WorkspaceConnectionLatency50: tryPercentileCont(latencies, 50),
+ WorkspaceConnectionLatency95: tryPercentileCont(latencies, 95),
}
rows = append(rows, row)
}
@@ -6689,8 +6923,8 @@ func (q *FakeQuerier) GetWorkspaceAgentStats(_ context.Context, createdAfter tim
if !ok {
continue
}
- stat.WorkspaceConnectionLatency50 = tryPercentile(latencies, 50)
- stat.WorkspaceConnectionLatency95 = tryPercentile(latencies, 95)
+ stat.WorkspaceConnectionLatency50 = tryPercentileCont(latencies, 50)
+ stat.WorkspaceConnectionLatency95 = tryPercentileCont(latencies, 95)
statByAgent[stat.AgentID] = stat
}
@@ -6827,8 +7061,8 @@ func (q *FakeQuerier) GetWorkspaceAgentUsageStats(_ context.Context, createdAt t
for key, latencies := range latestAgentLatencies {
val, ok := latestAgentStats[key]
if ok {
- val.WorkspaceConnectionLatency50 = tryPercentile(latencies, 50)
- val.WorkspaceConnectionLatency95 = tryPercentile(latencies, 95)
+ val.WorkspaceConnectionLatency50 = tryPercentileCont(latencies, 50)
+ val.WorkspaceConnectionLatency95 = tryPercentileCont(latencies, 95)
}
latestAgentStats[key] = val
}
@@ -6938,7 +7172,7 @@ func (q *FakeQuerier) GetWorkspaceAgentUsageStatsAndLabels(_ context.Context, cr
}
// WHERE usage = true AND created_at > now() - '1 minute'::interval
// GROUP BY user_id, agent_id, workspace_id
- if agentStat.Usage && agentStat.CreatedAt.After(time.Now().Add(-time.Minute)) {
+ if agentStat.Usage && agentStat.CreatedAt.After(dbtime.Now().Add(-time.Minute)) {
val, ok := latestAgentStats[key]
if !ok {
latestAgentStats[key] = agentStat
@@ -7977,6 +8211,30 @@ func (q *FakeQuerier) InsertGroupMember(_ context.Context, arg database.InsertGr
return nil
}
+func (q *FakeQuerier) InsertInboxNotification(_ context.Context, arg database.InsertInboxNotificationParams) (database.InboxNotification, error) {
+ if err := validateDatabaseType(arg); err != nil {
+ return database.InboxNotification{}, err
+ }
+
+ q.mutex.Lock()
+ defer q.mutex.Unlock()
+
+ notification := database.InboxNotification{
+ ID: arg.ID,
+ UserID: arg.UserID,
+ TemplateID: arg.TemplateID,
+ Targets: arg.Targets,
+ Title: arg.Title,
+ Content: arg.Content,
+ Icon: arg.Icon,
+ Actions: arg.Actions,
+ CreatedAt: time.Now(),
+ }
+
+ q.InboxNotification = append(q.InboxNotification, notification)
+ return notification, nil
+}
+
func (q *FakeQuerier) InsertLicense(
_ context.Context, arg database.InsertLicenseParams,
) (database.License, error) {
@@ -9700,6 +9958,24 @@ func (q *FakeQuerier) UpdateInactiveUsersToDormant(_ context.Context, params dat
return updated, nil
}
+func (q *FakeQuerier) UpdateInboxNotificationReadStatus(_ context.Context, arg database.UpdateInboxNotificationReadStatusParams) error {
+ err := validateDatabaseType(arg)
+ if err != nil {
+ return err
+ }
+
+ q.mutex.Lock()
+ defer q.mutex.Unlock()
+
+ for i := range q.InboxNotification {
+ if q.InboxNotification[i].ID == arg.ID {
+ q.InboxNotification[i].ReadAt = arg.ReadAt
+ }
+ }
+
+ return nil
+}
+
func (q *FakeQuerier) UpdateMemberRoles(_ context.Context, arg database.UpdateMemberRolesParams) (database.OrganizationMember, error) {
if err := validateDatabaseType(arg); err != nil {
return database.OrganizationMember{}, err
diff --git a/coderd/database/dbmetrics/dbmetrics.go b/coderd/database/dbmetrics/dbmetrics.go
index b0309f9f2e..fbf4a3cae6 100644
--- a/coderd/database/dbmetrics/dbmetrics.go
+++ b/coderd/database/dbmetrics/dbmetrics.go
@@ -2,11 +2,11 @@ package dbmetrics
import (
"context"
+ "slices"
"strconv"
"time"
"github.com/prometheus/client_golang/prometheus"
- "golang.org/x/exp/slices"
"cdr.dev/slog"
"github.com/coder/coder/v2/coderd/database"
diff --git a/coderd/database/dbmetrics/querymetrics.go b/coderd/database/dbmetrics/querymetrics.go
index c6b5c422ee..8a2923465a 100644
--- a/coderd/database/dbmetrics/querymetrics.go
+++ b/coderd/database/dbmetrics/querymetrics.go
@@ -5,11 +5,11 @@ package dbmetrics
import (
"context"
+ "slices"
"time"
"github.com/google/uuid"
"github.com/prometheus/client_golang/prometheus"
- "golang.org/x/exp/slices"
"cdr.dev/slog"
"github.com/coder/coder/v2/coderd/database"
@@ -185,6 +185,13 @@ func (m queryMetricsStore) CleanTailnetTunnels(ctx context.Context) error {
return r0
}
+func (m queryMetricsStore) CountUnreadInboxNotificationsByUserID(ctx context.Context, userID uuid.UUID) (int64, error) {
+ start := time.Now()
+ r0, r1 := m.s.CountUnreadInboxNotificationsByUserID(ctx, userID)
+ m.queryLatencies.WithLabelValues("CountUnreadInboxNotificationsByUserID").Observe(time.Since(start).Seconds())
+ return r0, r1
+}
+
func (m queryMetricsStore) CustomRoles(ctx context.Context, arg database.CustomRolesParams) ([]database.CustomRole, error) {
start := time.Now()
r0, r1 := m.s.CustomRoles(ctx, arg)
@@ -451,6 +458,13 @@ func (m queryMetricsStore) FetchMemoryResourceMonitorsByAgentID(ctx context.Cont
return r0, r1
}
+func (m queryMetricsStore) FetchMemoryResourceMonitorsUpdatedAfter(ctx context.Context, updatedAt time.Time) ([]database.WorkspaceAgentMemoryResourceMonitor, error) {
+ start := time.Now()
+ r0, r1 := m.s.FetchMemoryResourceMonitorsUpdatedAfter(ctx, updatedAt)
+ m.queryLatencies.WithLabelValues("FetchMemoryResourceMonitorsUpdatedAfter").Observe(time.Since(start).Seconds())
+ return r0, r1
+}
+
func (m queryMetricsStore) FetchNewMessageMetadata(ctx context.Context, arg database.FetchNewMessageMetadataParams) (database.FetchNewMessageMetadataRow, error) {
start := time.Now()
r0, r1 := m.s.FetchNewMessageMetadata(ctx, arg)
@@ -465,6 +479,13 @@ func (m queryMetricsStore) FetchVolumesResourceMonitorsByAgentID(ctx context.Con
return r0, r1
}
+func (m queryMetricsStore) FetchVolumesResourceMonitorsUpdatedAfter(ctx context.Context, updatedAt time.Time) ([]database.WorkspaceAgentVolumeResourceMonitor, error) {
+ start := time.Now()
+ r0, r1 := m.s.FetchVolumesResourceMonitorsUpdatedAfter(ctx, updatedAt)
+ m.queryLatencies.WithLabelValues("FetchVolumesResourceMonitorsUpdatedAfter").Observe(time.Since(start).Seconds())
+ return r0, r1
+}
+
func (m queryMetricsStore) GetAPIKeyByID(ctx context.Context, id string) (database.APIKey, error) {
start := time.Now()
apiKey, err := m.s.GetAPIKeyByID(ctx, id)
@@ -717,6 +738,13 @@ func (m queryMetricsStore) GetFileTemplates(ctx context.Context, fileID uuid.UUI
return rows, err
}
+func (m queryMetricsStore) GetFilteredInboxNotificationsByUserID(ctx context.Context, arg database.GetFilteredInboxNotificationsByUserIDParams) ([]database.InboxNotification, error) {
+ start := time.Now()
+ r0, r1 := m.s.GetFilteredInboxNotificationsByUserID(ctx, arg)
+ m.queryLatencies.WithLabelValues("GetFilteredInboxNotificationsByUserID").Observe(time.Since(start).Seconds())
+ return r0, r1
+}
+
func (m queryMetricsStore) GetGitSSHKey(ctx context.Context, userID uuid.UUID) (database.GitSSHKey, error) {
start := time.Now()
key, err := m.s.GetGitSSHKey(ctx, userID)
@@ -780,6 +808,20 @@ func (m queryMetricsStore) GetHungProvisionerJobs(ctx context.Context, hungSince
return jobs, err
}
+func (m queryMetricsStore) GetInboxNotificationByID(ctx context.Context, id uuid.UUID) (database.InboxNotification, error) {
+ start := time.Now()
+ r0, r1 := m.s.GetInboxNotificationByID(ctx, id)
+ m.queryLatencies.WithLabelValues("GetInboxNotificationByID").Observe(time.Since(start).Seconds())
+ return r0, r1
+}
+
+func (m queryMetricsStore) GetInboxNotificationsByUserID(ctx context.Context, userID database.GetInboxNotificationsByUserIDParams) ([]database.InboxNotification, error) {
+ start := time.Now()
+ r0, r1 := m.s.GetInboxNotificationsByUserID(ctx, userID)
+ m.queryLatencies.WithLabelValues("GetInboxNotificationsByUserID").Observe(time.Since(start).Seconds())
+ return r0, r1
+}
+
func (m queryMetricsStore) GetJFrogXrayScanByWorkspaceAndAgentID(ctx context.Context, arg database.GetJFrogXrayScanByWorkspaceAndAgentIDParams) (database.JfrogXrayScan, error) {
start := time.Now()
r0, r1 := m.s.GetJFrogXrayScanByWorkspaceAndAgentID(ctx, arg)
@@ -1914,6 +1956,13 @@ func (m queryMetricsStore) InsertGroupMember(ctx context.Context, arg database.I
return err
}
+func (m queryMetricsStore) InsertInboxNotification(ctx context.Context, arg database.InsertInboxNotificationParams) (database.InboxNotification, error) {
+ start := time.Now()
+ r0, r1 := m.s.InsertInboxNotification(ctx, arg)
+ m.queryLatencies.WithLabelValues("InsertInboxNotification").Observe(time.Since(start).Seconds())
+ return r0, r1
+}
+
func (m queryMetricsStore) InsertLicense(ctx context.Context, arg database.InsertLicenseParams) (database.License, error) {
start := time.Now()
license, err := m.s.InsertLicense(ctx, arg)
@@ -2376,6 +2425,13 @@ func (m queryMetricsStore) UpdateInactiveUsersToDormant(ctx context.Context, las
return r0, r1
}
+func (m queryMetricsStore) UpdateInboxNotificationReadStatus(ctx context.Context, arg database.UpdateInboxNotificationReadStatusParams) error {
+ start := time.Now()
+ r0 := m.s.UpdateInboxNotificationReadStatus(ctx, arg)
+ m.queryLatencies.WithLabelValues("UpdateInboxNotificationReadStatus").Observe(time.Since(start).Seconds())
+ return r0
+}
+
func (m queryMetricsStore) UpdateMemberRoles(ctx context.Context, arg database.UpdateMemberRolesParams) (database.OrganizationMember, error) {
start := time.Now()
member, err := m.s.UpdateMemberRoles(ctx, arg)
diff --git a/coderd/database/dbmock/dbmock.go b/coderd/database/dbmock/dbmock.go
index 55877f2493..dc08a629b7 100644
--- a/coderd/database/dbmock/dbmock.go
+++ b/coderd/database/dbmock/dbmock.go
@@ -247,6 +247,21 @@ func (mr *MockStoreMockRecorder) CleanTailnetTunnels(ctx any) *gomock.Call {
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CleanTailnetTunnels", reflect.TypeOf((*MockStore)(nil).CleanTailnetTunnels), ctx)
}
+// CountUnreadInboxNotificationsByUserID mocks base method.
+func (m *MockStore) CountUnreadInboxNotificationsByUserID(ctx context.Context, userID uuid.UUID) (int64, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "CountUnreadInboxNotificationsByUserID", ctx, userID)
+ ret0, _ := ret[0].(int64)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// CountUnreadInboxNotificationsByUserID indicates an expected call of CountUnreadInboxNotificationsByUserID.
+func (mr *MockStoreMockRecorder) CountUnreadInboxNotificationsByUserID(ctx, userID any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CountUnreadInboxNotificationsByUserID", reflect.TypeOf((*MockStore)(nil).CountUnreadInboxNotificationsByUserID), ctx, userID)
+}
+
// CustomRoles mocks base method.
func (m *MockStore) CustomRoles(ctx context.Context, arg database.CustomRolesParams) ([]database.CustomRole, error) {
m.ctrl.T.Helper()
@@ -787,6 +802,21 @@ func (mr *MockStoreMockRecorder) FetchMemoryResourceMonitorsByAgentID(ctx, agent
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FetchMemoryResourceMonitorsByAgentID", reflect.TypeOf((*MockStore)(nil).FetchMemoryResourceMonitorsByAgentID), ctx, agentID)
}
+// FetchMemoryResourceMonitorsUpdatedAfter mocks base method.
+func (m *MockStore) FetchMemoryResourceMonitorsUpdatedAfter(ctx context.Context, updatedAt time.Time) ([]database.WorkspaceAgentMemoryResourceMonitor, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "FetchMemoryResourceMonitorsUpdatedAfter", ctx, updatedAt)
+ ret0, _ := ret[0].([]database.WorkspaceAgentMemoryResourceMonitor)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// FetchMemoryResourceMonitorsUpdatedAfter indicates an expected call of FetchMemoryResourceMonitorsUpdatedAfter.
+func (mr *MockStoreMockRecorder) FetchMemoryResourceMonitorsUpdatedAfter(ctx, updatedAt any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FetchMemoryResourceMonitorsUpdatedAfter", reflect.TypeOf((*MockStore)(nil).FetchMemoryResourceMonitorsUpdatedAfter), ctx, updatedAt)
+}
+
// FetchNewMessageMetadata mocks base method.
func (m *MockStore) FetchNewMessageMetadata(ctx context.Context, arg database.FetchNewMessageMetadataParams) (database.FetchNewMessageMetadataRow, error) {
m.ctrl.T.Helper()
@@ -817,6 +847,21 @@ func (mr *MockStoreMockRecorder) FetchVolumesResourceMonitorsByAgentID(ctx, agen
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FetchVolumesResourceMonitorsByAgentID", reflect.TypeOf((*MockStore)(nil).FetchVolumesResourceMonitorsByAgentID), ctx, agentID)
}
+// FetchVolumesResourceMonitorsUpdatedAfter mocks base method.
+func (m *MockStore) FetchVolumesResourceMonitorsUpdatedAfter(ctx context.Context, updatedAt time.Time) ([]database.WorkspaceAgentVolumeResourceMonitor, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "FetchVolumesResourceMonitorsUpdatedAfter", ctx, updatedAt)
+ ret0, _ := ret[0].([]database.WorkspaceAgentVolumeResourceMonitor)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// FetchVolumesResourceMonitorsUpdatedAfter indicates an expected call of FetchVolumesResourceMonitorsUpdatedAfter.
+func (mr *MockStoreMockRecorder) FetchVolumesResourceMonitorsUpdatedAfter(ctx, updatedAt any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FetchVolumesResourceMonitorsUpdatedAfter", reflect.TypeOf((*MockStore)(nil).FetchVolumesResourceMonitorsUpdatedAfter), ctx, updatedAt)
+}
+
// GetAPIKeyByID mocks base method.
func (m *MockStore) GetAPIKeyByID(ctx context.Context, id string) (database.APIKey, error) {
m.ctrl.T.Helper()
@@ -1432,6 +1477,21 @@ func (mr *MockStoreMockRecorder) GetFileTemplates(ctx, fileID any) *gomock.Call
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetFileTemplates", reflect.TypeOf((*MockStore)(nil).GetFileTemplates), ctx, fileID)
}
+// GetFilteredInboxNotificationsByUserID mocks base method.
+func (m *MockStore) GetFilteredInboxNotificationsByUserID(ctx context.Context, arg database.GetFilteredInboxNotificationsByUserIDParams) ([]database.InboxNotification, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "GetFilteredInboxNotificationsByUserID", ctx, arg)
+ ret0, _ := ret[0].([]database.InboxNotification)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// GetFilteredInboxNotificationsByUserID indicates an expected call of GetFilteredInboxNotificationsByUserID.
+func (mr *MockStoreMockRecorder) GetFilteredInboxNotificationsByUserID(ctx, arg any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetFilteredInboxNotificationsByUserID", reflect.TypeOf((*MockStore)(nil).GetFilteredInboxNotificationsByUserID), ctx, arg)
+}
+
// GetGitSSHKey mocks base method.
func (m *MockStore) GetGitSSHKey(ctx context.Context, userID uuid.UUID) (database.GitSSHKey, error) {
m.ctrl.T.Helper()
@@ -1567,6 +1627,36 @@ func (mr *MockStoreMockRecorder) GetHungProvisionerJobs(ctx, updatedAt any) *gom
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetHungProvisionerJobs", reflect.TypeOf((*MockStore)(nil).GetHungProvisionerJobs), ctx, updatedAt)
}
+// GetInboxNotificationByID mocks base method.
+func (m *MockStore) GetInboxNotificationByID(ctx context.Context, id uuid.UUID) (database.InboxNotification, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "GetInboxNotificationByID", ctx, id)
+ ret0, _ := ret[0].(database.InboxNotification)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// GetInboxNotificationByID indicates an expected call of GetInboxNotificationByID.
+func (mr *MockStoreMockRecorder) GetInboxNotificationByID(ctx, id any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetInboxNotificationByID", reflect.TypeOf((*MockStore)(nil).GetInboxNotificationByID), ctx, id)
+}
+
+// GetInboxNotificationsByUserID mocks base method.
+func (m *MockStore) GetInboxNotificationsByUserID(ctx context.Context, arg database.GetInboxNotificationsByUserIDParams) ([]database.InboxNotification, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "GetInboxNotificationsByUserID", ctx, arg)
+ ret0, _ := ret[0].([]database.InboxNotification)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// GetInboxNotificationsByUserID indicates an expected call of GetInboxNotificationsByUserID.
+func (mr *MockStoreMockRecorder) GetInboxNotificationsByUserID(ctx, arg any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetInboxNotificationsByUserID", reflect.TypeOf((*MockStore)(nil).GetInboxNotificationsByUserID), ctx, arg)
+}
+
// GetJFrogXrayScanByWorkspaceAndAgentID mocks base method.
func (m *MockStore) GetJFrogXrayScanByWorkspaceAndAgentID(ctx context.Context, arg database.GetJFrogXrayScanByWorkspaceAndAgentIDParams) (database.JfrogXrayScan, error) {
m.ctrl.T.Helper()
@@ -4037,6 +4127,21 @@ func (mr *MockStoreMockRecorder) InsertGroupMember(ctx, arg any) *gomock.Call {
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InsertGroupMember", reflect.TypeOf((*MockStore)(nil).InsertGroupMember), ctx, arg)
}
+// InsertInboxNotification mocks base method.
+func (m *MockStore) InsertInboxNotification(ctx context.Context, arg database.InsertInboxNotificationParams) (database.InboxNotification, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "InsertInboxNotification", ctx, arg)
+ ret0, _ := ret[0].(database.InboxNotification)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// InsertInboxNotification indicates an expected call of InsertInboxNotification.
+func (mr *MockStoreMockRecorder) InsertInboxNotification(ctx, arg any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InsertInboxNotification", reflect.TypeOf((*MockStore)(nil).InsertInboxNotification), ctx, arg)
+}
+
// InsertLicense mocks base method.
func (m *MockStore) InsertLicense(ctx context.Context, arg database.InsertLicenseParams) (database.License, error) {
m.ctrl.T.Helper()
@@ -5041,6 +5146,20 @@ func (mr *MockStoreMockRecorder) UpdateInactiveUsersToDormant(ctx, arg any) *gom
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateInactiveUsersToDormant", reflect.TypeOf((*MockStore)(nil).UpdateInactiveUsersToDormant), ctx, arg)
}
+// UpdateInboxNotificationReadStatus mocks base method.
+func (m *MockStore) UpdateInboxNotificationReadStatus(ctx context.Context, arg database.UpdateInboxNotificationReadStatusParams) error {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "UpdateInboxNotificationReadStatus", ctx, arg)
+ ret0, _ := ret[0].(error)
+ return ret0
+}
+
+// UpdateInboxNotificationReadStatus indicates an expected call of UpdateInboxNotificationReadStatus.
+func (mr *MockStoreMockRecorder) UpdateInboxNotificationReadStatus(ctx, arg any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateInboxNotificationReadStatus", reflect.TypeOf((*MockStore)(nil).UpdateInboxNotificationReadStatus), ctx, arg)
+}
+
// UpdateMemberRoles mocks base method.
func (m *MockStore) UpdateMemberRoles(ctx context.Context, arg database.UpdateMemberRolesParams) (database.OrganizationMember, error) {
m.ctrl.T.Helper()
diff --git a/coderd/database/dbpurge/dbpurge_test.go b/coderd/database/dbpurge/dbpurge_test.go
index 3b21b1076c..2422bcc91d 100644
--- a/coderd/database/dbpurge/dbpurge_test.go
+++ b/coderd/database/dbpurge/dbpurge_test.go
@@ -7,6 +7,7 @@ import (
"database/sql"
"encoding/json"
"fmt"
+ "slices"
"testing"
"time"
@@ -14,7 +15,6 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.uber.org/goleak"
- "golang.org/x/exp/slices"
"cdr.dev/slog"
"cdr.dev/slog/sloggers/slogtest"
diff --git a/coderd/database/dump.sql b/coderd/database/dump.sql
index c2479e6e15..14841812ad 100644
--- a/coderd/database/dump.sql
+++ b/coderd/database/dump.sql
@@ -66,6 +66,12 @@ CREATE TYPE group_source AS ENUM (
'oidc'
);
+CREATE TYPE inbox_notification_read_status AS ENUM (
+ 'all',
+ 'unread',
+ 'read'
+);
+
CREATE TYPE log_level AS ENUM (
'trace',
'debug',
@@ -902,6 +908,19 @@ CREATE VIEW group_members_expanded AS
COMMENT ON VIEW group_members_expanded IS 'Joins group members with user information, organization ID, group name. Includes both regular group members and organization members (as part of the "Everyone" group).';
+CREATE TABLE inbox_notifications (
+ id uuid NOT NULL,
+ user_id uuid NOT NULL,
+ template_id uuid NOT NULL,
+ targets uuid[],
+ title text NOT NULL,
+ content text NOT NULL,
+ icon text NOT NULL,
+ actions jsonb NOT NULL,
+ read_at timestamp with time zone,
+ created_at timestamp with time zone DEFAULT now() NOT NULL
+);
+
CREATE TABLE jfrog_xray_scans (
agent_id uuid NOT NULL,
workspace_id uuid NOT NULL,
@@ -2132,6 +2151,9 @@ ALTER TABLE ONLY groups
ALTER TABLE ONLY groups
ADD CONSTRAINT groups_pkey PRIMARY KEY (id);
+ALTER TABLE ONLY inbox_notifications
+ ADD CONSTRAINT inbox_notifications_pkey PRIMARY KEY (id);
+
ALTER TABLE ONLY jfrog_xray_scans
ADD CONSTRAINT jfrog_xray_scans_pkey PRIMARY KEY (agent_id, workspace_id);
@@ -2368,6 +2390,10 @@ CREATE INDEX idx_custom_roles_id ON custom_roles USING btree (id);
CREATE UNIQUE INDEX idx_custom_roles_name_lower ON custom_roles USING btree (lower(name));
+CREATE INDEX idx_inbox_notifications_user_id_read_at ON inbox_notifications USING btree (user_id, read_at);
+
+CREATE INDEX idx_inbox_notifications_user_id_template_id_targets ON inbox_notifications USING btree (user_id, template_id, targets);
+
CREATE INDEX idx_notification_messages_status ON notification_messages USING btree (status);
CREATE INDEX idx_organization_member_organization_id_uuid ON organization_members USING btree (organization_id);
@@ -2380,6 +2406,8 @@ CREATE UNIQUE INDEX idx_provisioner_daemons_org_name_owner_key ON provisioner_da
COMMENT ON INDEX idx_provisioner_daemons_org_name_owner_key IS 'Allow unique provisioner daemon names by organization and user';
+CREATE INDEX idx_provisioner_jobs_status ON provisioner_jobs USING btree (job_status);
+
CREATE INDEX idx_tailnet_agents_coordinator ON tailnet_agents USING btree (coordinator_id);
CREATE INDEX idx_tailnet_clients_coordinator ON tailnet_clients USING btree (coordinator_id);
@@ -2650,6 +2678,12 @@ ALTER TABLE ONLY group_members
ALTER TABLE ONLY groups
ADD CONSTRAINT groups_organization_id_fkey FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE;
+ALTER TABLE ONLY inbox_notifications
+ ADD CONSTRAINT inbox_notifications_template_id_fkey FOREIGN KEY (template_id) REFERENCES notification_templates(id) ON DELETE CASCADE;
+
+ALTER TABLE ONLY inbox_notifications
+ ADD CONSTRAINT inbox_notifications_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
+
ALTER TABLE ONLY jfrog_xray_scans
ADD CONSTRAINT jfrog_xray_scans_agent_id_fkey FOREIGN KEY (agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE;
diff --git a/coderd/database/foreign_key_constraint.go b/coderd/database/foreign_key_constraint.go
index f292ec8204..b284b42435 100644
--- a/coderd/database/foreign_key_constraint.go
+++ b/coderd/database/foreign_key_constraint.go
@@ -14,6 +14,8 @@ const (
ForeignKeyGroupMembersGroupID ForeignKeyConstraint = "group_members_group_id_fkey" // ALTER TABLE ONLY group_members ADD CONSTRAINT group_members_group_id_fkey FOREIGN KEY (group_id) REFERENCES groups(id) ON DELETE CASCADE;
ForeignKeyGroupMembersUserID ForeignKeyConstraint = "group_members_user_id_fkey" // ALTER TABLE ONLY group_members ADD CONSTRAINT group_members_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
ForeignKeyGroupsOrganizationID ForeignKeyConstraint = "groups_organization_id_fkey" // ALTER TABLE ONLY groups ADD CONSTRAINT groups_organization_id_fkey FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE;
+ ForeignKeyInboxNotificationsTemplateID ForeignKeyConstraint = "inbox_notifications_template_id_fkey" // ALTER TABLE ONLY inbox_notifications ADD CONSTRAINT inbox_notifications_template_id_fkey FOREIGN KEY (template_id) REFERENCES notification_templates(id) ON DELETE CASCADE;
+ ForeignKeyInboxNotificationsUserID ForeignKeyConstraint = "inbox_notifications_user_id_fkey" // ALTER TABLE ONLY inbox_notifications ADD CONSTRAINT inbox_notifications_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
ForeignKeyJfrogXrayScansAgentID ForeignKeyConstraint = "jfrog_xray_scans_agent_id_fkey" // ALTER TABLE ONLY jfrog_xray_scans ADD CONSTRAINT jfrog_xray_scans_agent_id_fkey FOREIGN KEY (agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE;
ForeignKeyJfrogXrayScansWorkspaceID ForeignKeyConstraint = "jfrog_xray_scans_workspace_id_fkey" // ALTER TABLE ONLY jfrog_xray_scans ADD CONSTRAINT jfrog_xray_scans_workspace_id_fkey FOREIGN KEY (workspace_id) REFERENCES workspaces(id) ON DELETE CASCADE;
ForeignKeyNotificationMessagesNotificationTemplateID ForeignKeyConstraint = "notification_messages_notification_template_id_fkey" // ALTER TABLE ONLY notification_messages ADD CONSTRAINT notification_messages_notification_template_id_fkey FOREIGN KEY (notification_template_id) REFERENCES notification_templates(id) ON DELETE CASCADE;
diff --git a/coderd/database/gentest/modelqueries_test.go b/coderd/database/gentest/modelqueries_test.go
index 52a99b5440..1025aaf324 100644
--- a/coderd/database/gentest/modelqueries_test.go
+++ b/coderd/database/gentest/modelqueries_test.go
@@ -5,11 +5,11 @@ import (
"go/ast"
"go/parser"
"go/token"
+ "slices"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
- "golang.org/x/exp/slices"
)
// TestCustomQueriesSynced makes sure the manual custom queries in modelqueries.go
diff --git a/coderd/database/migrations/000297_notifications_inbox.down.sql b/coderd/database/migrations/000297_notifications_inbox.down.sql
new file mode 100644
index 0000000000..9d39b226c8
--- /dev/null
+++ b/coderd/database/migrations/000297_notifications_inbox.down.sql
@@ -0,0 +1,3 @@
+DROP TABLE IF EXISTS inbox_notifications;
+
+DROP TYPE IF EXISTS inbox_notification_read_status;
diff --git a/coderd/database/migrations/000297_notifications_inbox.up.sql b/coderd/database/migrations/000297_notifications_inbox.up.sql
new file mode 100644
index 0000000000..c3754c5367
--- /dev/null
+++ b/coderd/database/migrations/000297_notifications_inbox.up.sql
@@ -0,0 +1,17 @@
+CREATE TYPE inbox_notification_read_status AS ENUM ('all', 'unread', 'read');
+
+CREATE TABLE inbox_notifications (
+ id UUID PRIMARY KEY,
+ user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
+ template_id UUID NOT NULL REFERENCES notification_templates(id) ON DELETE CASCADE,
+ targets UUID[],
+ title TEXT NOT NULL,
+ content TEXT NOT NULL,
+ icon TEXT NOT NULL,
+ actions JSONB NOT NULL,
+ read_at TIMESTAMP WITH TIME ZONE,
+ created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
+);
+
+CREATE INDEX idx_inbox_notifications_user_id_read_at ON inbox_notifications(user_id, read_at);
+CREATE INDEX idx_inbox_notifications_user_id_template_id_targets ON inbox_notifications(user_id, template_id, targets);
diff --git a/coderd/database/migrations/000298_provisioner_jobs_status_idx.down.sql b/coderd/database/migrations/000298_provisioner_jobs_status_idx.down.sql
new file mode 100644
index 0000000000..e7e976e0e2
--- /dev/null
+++ b/coderd/database/migrations/000298_provisioner_jobs_status_idx.down.sql
@@ -0,0 +1 @@
+DROP INDEX idx_provisioner_jobs_status;
diff --git a/coderd/database/migrations/000298_provisioner_jobs_status_idx.up.sql b/coderd/database/migrations/000298_provisioner_jobs_status_idx.up.sql
new file mode 100644
index 0000000000..8a13752324
--- /dev/null
+++ b/coderd/database/migrations/000298_provisioner_jobs_status_idx.up.sql
@@ -0,0 +1 @@
+CREATE INDEX idx_provisioner_jobs_status ON provisioner_jobs USING btree (job_status);
diff --git a/coderd/database/migrations/000297_system_user.down.sql b/coderd/database/migrations/000299_system_user.down.sql
similarity index 100%
rename from coderd/database/migrations/000297_system_user.down.sql
rename to coderd/database/migrations/000299_system_user.down.sql
diff --git a/coderd/database/migrations/000297_system_user.up.sql b/coderd/database/migrations/000299_system_user.up.sql
similarity index 100%
rename from coderd/database/migrations/000297_system_user.up.sql
rename to coderd/database/migrations/000299_system_user.up.sql
diff --git a/coderd/database/migrations/000298_prebuilds.down.sql b/coderd/database/migrations/000300_prebuilds.down.sql
similarity index 100%
rename from coderd/database/migrations/000298_prebuilds.down.sql
rename to coderd/database/migrations/000300_prebuilds.down.sql
diff --git a/coderd/database/migrations/000298_prebuilds.up.sql b/coderd/database/migrations/000300_prebuilds.up.sql
similarity index 100%
rename from coderd/database/migrations/000298_prebuilds.up.sql
rename to coderd/database/migrations/000300_prebuilds.up.sql
diff --git a/coderd/database/migrations/000299_preset_prebuilds.down.sql b/coderd/database/migrations/000301_preset_prebuilds.down.sql
similarity index 100%
rename from coderd/database/migrations/000299_preset_prebuilds.down.sql
rename to coderd/database/migrations/000301_preset_prebuilds.down.sql
diff --git a/coderd/database/migrations/000299_preset_prebuilds.up.sql b/coderd/database/migrations/000301_preset_prebuilds.up.sql
similarity index 100%
rename from coderd/database/migrations/000299_preset_prebuilds.up.sql
rename to coderd/database/migrations/000301_preset_prebuilds.up.sql
diff --git a/coderd/database/migrations/migrate_test.go b/coderd/database/migrations/migrate_test.go
index bd347af0be..62e301a422 100644
--- a/coderd/database/migrations/migrate_test.go
+++ b/coderd/database/migrations/migrate_test.go
@@ -6,6 +6,7 @@ import (
"fmt"
"os"
"path/filepath"
+ "slices"
"sync"
"testing"
@@ -17,7 +18,6 @@ import (
"github.com/lib/pq"
"github.com/stretchr/testify/require"
"go.uber.org/goleak"
- "golang.org/x/exp/slices"
"golang.org/x/sync/errgroup"
"github.com/coder/coder/v2/coderd/database/dbtestutil"
diff --git a/coderd/database/migrations/testdata/fixtures/000297_notifications_inbox.up.sql b/coderd/database/migrations/testdata/fixtures/000297_notifications_inbox.up.sql
new file mode 100644
index 0000000000..fb4cecf096
--- /dev/null
+++ b/coderd/database/migrations/testdata/fixtures/000297_notifications_inbox.up.sql
@@ -0,0 +1,25 @@
+INSERT INTO
+ inbox_notifications (
+ id,
+ user_id,
+ template_id,
+ targets,
+ title,
+ content,
+ icon,
+ actions,
+ read_at,
+ created_at
+ )
+ VALUES (
+ '68b396aa-7f53-4bf1-b8d8-4cbf5fa244e5', -- uuid
+ '5755e622-fadd-44ca-98da-5df070491844', -- uuid
+ 'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11', -- uuid
+ ARRAY[]::UUID[], -- uuid[]
+ 'Test Notification',
+ 'This is a test notification',
+ 'https://test.coder.com/favicon.ico',
+ '{}',
+ '2025-01-01 00:00:00',
+ '2025-01-01 00:00:00'
+ );
diff --git a/coderd/database/modelmethods.go b/coderd/database/modelmethods.go
index 803cfbf01c..d9013b1f08 100644
--- a/coderd/database/modelmethods.go
+++ b/coderd/database/modelmethods.go
@@ -168,6 +168,12 @@ func (TemplateVersion) RBACObject(template Template) rbac.Object {
return template.RBACObject()
}
+func (i InboxNotification) RBACObject() rbac.Object {
+ return rbac.ResourceInboxNotification.
+ WithID(i.ID).
+ WithOwner(i.UserID.String())
+}
+
// RBACObjectNoTemplate is for orphaned template versions.
func (v TemplateVersion) RBACObjectNoTemplate() rbac.Object {
return rbac.ResourceTemplate.InOrg(v.OrganizationID)
diff --git a/coderd/database/models.go b/coderd/database/models.go
index 20b0154e38..f19018b9d1 100644
--- a/coderd/database/models.go
+++ b/coderd/database/models.go
@@ -543,6 +543,67 @@ func AllGroupSourceValues() []GroupSource {
}
}
+type InboxNotificationReadStatus string
+
+const (
+ InboxNotificationReadStatusAll InboxNotificationReadStatus = "all"
+ InboxNotificationReadStatusUnread InboxNotificationReadStatus = "unread"
+ InboxNotificationReadStatusRead InboxNotificationReadStatus = "read"
+)
+
+func (e *InboxNotificationReadStatus) Scan(src interface{}) error {
+ switch s := src.(type) {
+ case []byte:
+ *e = InboxNotificationReadStatus(s)
+ case string:
+ *e = InboxNotificationReadStatus(s)
+ default:
+ return fmt.Errorf("unsupported scan type for InboxNotificationReadStatus: %T", src)
+ }
+ return nil
+}
+
+type NullInboxNotificationReadStatus struct {
+ InboxNotificationReadStatus InboxNotificationReadStatus `json:"inbox_notification_read_status"`
+ Valid bool `json:"valid"` // Valid is true if InboxNotificationReadStatus is not NULL
+}
+
+// Scan implements the Scanner interface.
+func (ns *NullInboxNotificationReadStatus) Scan(value interface{}) error {
+ if value == nil {
+ ns.InboxNotificationReadStatus, ns.Valid = "", false
+ return nil
+ }
+ ns.Valid = true
+ return ns.InboxNotificationReadStatus.Scan(value)
+}
+
+// Value implements the driver Valuer interface.
+func (ns NullInboxNotificationReadStatus) Value() (driver.Value, error) {
+ if !ns.Valid {
+ return nil, nil
+ }
+ return string(ns.InboxNotificationReadStatus), nil
+}
+
+func (e InboxNotificationReadStatus) Valid() bool {
+ switch e {
+ case InboxNotificationReadStatusAll,
+ InboxNotificationReadStatusUnread,
+ InboxNotificationReadStatusRead:
+ return true
+ }
+ return false
+}
+
+func AllInboxNotificationReadStatusValues() []InboxNotificationReadStatus {
+ return []InboxNotificationReadStatus{
+ InboxNotificationReadStatusAll,
+ InboxNotificationReadStatusUnread,
+ InboxNotificationReadStatusRead,
+ }
+}
+
type LogLevel string
const (
@@ -2557,6 +2618,19 @@ type GroupMemberTable struct {
GroupID uuid.UUID `db:"group_id" json:"group_id"`
}
+type InboxNotification struct {
+ ID uuid.UUID `db:"id" json:"id"`
+ UserID uuid.UUID `db:"user_id" json:"user_id"`
+ TemplateID uuid.UUID `db:"template_id" json:"template_id"`
+ Targets []uuid.UUID `db:"targets" json:"targets"`
+ Title string `db:"title" json:"title"`
+ Content string `db:"content" json:"content"`
+ Icon string `db:"icon" json:"icon"`
+ Actions json.RawMessage `db:"actions" json:"actions"`
+ ReadAt sql.NullTime `db:"read_at" json:"read_at"`
+ CreatedAt time.Time `db:"created_at" json:"created_at"`
+}
+
type JfrogXrayScan struct {
AgentID uuid.UUID `db:"agent_id" json:"agent_id"`
WorkspaceID uuid.UUID `db:"workspace_id" json:"workspace_id"`
diff --git a/coderd/database/querier.go b/coderd/database/querier.go
index 20e5b29e27..f0e05ada67 100644
--- a/coderd/database/querier.go
+++ b/coderd/database/querier.go
@@ -65,6 +65,7 @@ type sqlcQuerier interface {
CleanTailnetCoordinators(ctx context.Context) error
CleanTailnetLostPeers(ctx context.Context) error
CleanTailnetTunnels(ctx context.Context) error
+ CountUnreadInboxNotificationsByUserID(ctx context.Context, userID uuid.UUID) (int64, error)
CustomRoles(ctx context.Context, arg CustomRolesParams) ([]CustomRole, error)
DeleteAPIKeyByID(ctx context.Context, id string) error
DeleteAPIKeysByUserID(ctx context.Context, userID uuid.UUID) error
@@ -114,9 +115,11 @@ type sqlcQuerier interface {
EnqueueNotificationMessage(ctx context.Context, arg EnqueueNotificationMessageParams) error
FavoriteWorkspace(ctx context.Context, id uuid.UUID) error
FetchMemoryResourceMonitorsByAgentID(ctx context.Context, agentID uuid.UUID) (WorkspaceAgentMemoryResourceMonitor, error)
+ FetchMemoryResourceMonitorsUpdatedAfter(ctx context.Context, updatedAt time.Time) ([]WorkspaceAgentMemoryResourceMonitor, error)
// This is used to build up the notification_message's JSON payload.
FetchNewMessageMetadata(ctx context.Context, arg FetchNewMessageMetadataParams) (FetchNewMessageMetadataRow, error)
FetchVolumesResourceMonitorsByAgentID(ctx context.Context, agentID uuid.UUID) ([]WorkspaceAgentVolumeResourceMonitor, error)
+ FetchVolumesResourceMonitorsUpdatedAfter(ctx context.Context, updatedAt time.Time) ([]WorkspaceAgentVolumeResourceMonitor, error)
GetAPIKeyByID(ctx context.Context, id string) (APIKey, error)
// there is no unique constraint on empty token names
GetAPIKeyByName(ctx context.Context, arg GetAPIKeyByNameParams) (APIKey, error)
@@ -160,6 +163,14 @@ type sqlcQuerier interface {
GetFileByID(ctx context.Context, id uuid.UUID) (File, error)
// Get all templates that use a file.
GetFileTemplates(ctx context.Context, fileID uuid.UUID) ([]GetFileTemplatesRow, error)
+ // Fetches inbox notifications for a user filtered by templates and targets
+ // param user_id: The user ID
+ // param templates: The template IDs to filter by - the template_id = ANY(@templates::UUID[]) condition checks if the template_id is in the @templates array
+ // param targets: The target IDs to filter by - the targets @> COALESCE(@targets, ARRAY[]::UUID[]) condition checks if the targets array (from the DB) contains all the elements in the @targets array
+ // param read_status: The read status to filter by - can be any of 'ALL', 'UNREAD', 'READ'
+ // param created_at_opt: The created_at timestamp to filter by. This parameter is usd for pagination - it fetches notifications created before the specified timestamp if it is not the zero value
+ // param limit_opt: The limit of notifications to fetch. If the limit is not specified, it defaults to 25
+ GetFilteredInboxNotificationsByUserID(ctx context.Context, arg GetFilteredInboxNotificationsByUserIDParams) ([]InboxNotification, error)
GetGitSSHKey(ctx context.Context, userID uuid.UUID) (GitSSHKey, error)
GetGroupByID(ctx context.Context, id uuid.UUID) (Group, error)
GetGroupByOrgAndName(ctx context.Context, arg GetGroupByOrgAndNameParams) (Group, error)
@@ -172,6 +183,13 @@ type sqlcQuerier interface {
GetGroups(ctx context.Context, arg GetGroupsParams) ([]GetGroupsRow, error)
GetHealthSettings(ctx context.Context) (string, error)
GetHungProvisionerJobs(ctx context.Context, updatedAt time.Time) ([]ProvisionerJob, error)
+ GetInboxNotificationByID(ctx context.Context, id uuid.UUID) (InboxNotification, error)
+ // Fetches inbox notifications for a user filtered by templates and targets
+ // param user_id: The user ID
+ // param read_status: The read status to filter by - can be any of 'ALL', 'UNREAD', 'READ'
+ // param created_at_opt: The created_at timestamp to filter by. This parameter is usd for pagination - it fetches notifications created before the specified timestamp if it is not the zero value
+ // param limit_opt: The limit of notifications to fetch. If the limit is not specified, it defaults to 25
+ GetInboxNotificationsByUserID(ctx context.Context, arg GetInboxNotificationsByUserIDParams) ([]InboxNotification, error)
GetJFrogXrayScanByWorkspaceAndAgentID(ctx context.Context, arg GetJFrogXrayScanByWorkspaceAndAgentIDParams) (JfrogXrayScan, error)
GetLastUpdateCheck(ctx context.Context) (string, error)
GetLatestCryptoKeyByFeature(ctx context.Context, feature CryptoKeyFeature) (CryptoKey, error)
@@ -402,6 +420,7 @@ type sqlcQuerier interface {
InsertGitSSHKey(ctx context.Context, arg InsertGitSSHKeyParams) (GitSSHKey, error)
InsertGroup(ctx context.Context, arg InsertGroupParams) (Group, error)
InsertGroupMember(ctx context.Context, arg InsertGroupMemberParams) error
+ InsertInboxNotification(ctx context.Context, arg InsertInboxNotificationParams) (InboxNotification, error)
InsertLicense(ctx context.Context, arg InsertLicenseParams) (License, error)
InsertMemoryResourceMonitor(ctx context.Context, arg InsertMemoryResourceMonitorParams) (WorkspaceAgentMemoryResourceMonitor, error)
// Inserts any group by name that does not exist. All new groups are given
@@ -486,6 +505,7 @@ type sqlcQuerier interface {
UpdateGitSSHKey(ctx context.Context, arg UpdateGitSSHKeyParams) (GitSSHKey, error)
UpdateGroupByID(ctx context.Context, arg UpdateGroupByIDParams) (Group, error)
UpdateInactiveUsersToDormant(ctx context.Context, arg UpdateInactiveUsersToDormantParams) ([]UpdateInactiveUsersToDormantRow, error)
+ UpdateInboxNotificationReadStatus(ctx context.Context, arg UpdateInboxNotificationReadStatusParams) error
UpdateMemberRoles(ctx context.Context, arg UpdateMemberRolesParams) (OrganizationMember, error)
UpdateMemoryResourceMonitor(ctx context.Context, arg UpdateMemoryResourceMonitorParams) error
UpdateNotificationTemplateMethodByID(ctx context.Context, arg UpdateNotificationTemplateMethodByIDParams) (NotificationTemplate, error)
diff --git a/coderd/database/querier_test.go b/coderd/database/querier_test.go
index 5d3e65bb51..ecf9a59c0a 100644
--- a/coderd/database/querier_test.go
+++ b/coderd/database/querier_test.go
@@ -1257,6 +1257,15 @@ func TestQueuePosition(t *testing.T) {
time.Sleep(time.Millisecond)
}
+ // Create default provisioner daemon:
+ dbgen.ProvisionerDaemon(t, db, database.ProvisionerDaemon{
+ Name: "default_provisioner",
+ Provisioners: []database.ProvisionerType{database.ProvisionerTypeEcho},
+ // Ensure the `tags` field is NOT NULL for the default provisioner;
+ // otherwise, it won't be able to pick up any jobs.
+ Tags: database.StringMap{},
+ })
+
queued, err := db.GetProvisionerJobsByIDsWithQueuePosition(ctx, jobIDs)
require.NoError(t, err)
require.Len(t, queued, jobCount)
@@ -2159,6 +2168,307 @@ func TestExpectOne(t *testing.T) {
func TestGetProvisionerJobsByIDsWithQueuePosition(t *testing.T) {
t.Parallel()
+
+ now := dbtime.Now()
+ ctx := testutil.Context(t, testutil.WaitShort)
+
+ testCases := []struct {
+ name string
+ jobTags []database.StringMap
+ daemonTags []database.StringMap
+ queueSizes []int64
+ queuePositions []int64
+ // GetProvisionerJobsByIDsWithQueuePosition takes jobIDs as a parameter.
+ // If skipJobIDs is empty, all jobs are passed to the function; otherwise, the specified jobs are skipped.
+ // NOTE: Skipping job IDs means they will be excluded from the result,
+ // but this should not affect the queue position or queue size of other jobs.
+ skipJobIDs map[int]struct{}
+ }{
+ // Baseline test case
+ {
+ name: "test-case-1",
+ jobTags: []database.StringMap{
+ {"a": "1", "b": "2"},
+ {"a": "1"},
+ {"a": "1", "c": "3"},
+ },
+ daemonTags: []database.StringMap{
+ {"a": "1", "b": "2"},
+ {"a": "1"},
+ },
+ queueSizes: []int64{2, 2, 0},
+ queuePositions: []int64{1, 1, 0},
+ },
+ // Includes an additional provisioner
+ {
+ name: "test-case-2",
+ jobTags: []database.StringMap{
+ {"a": "1", "b": "2"},
+ {"a": "1"},
+ {"a": "1", "c": "3"},
+ },
+ daemonTags: []database.StringMap{
+ {"a": "1", "b": "2"},
+ {"a": "1"},
+ {"a": "1", "b": "2", "c": "3"},
+ },
+ queueSizes: []int64{3, 3, 3},
+ queuePositions: []int64{1, 1, 3},
+ },
+ // Skips job at index 0
+ {
+ name: "test-case-3",
+ jobTags: []database.StringMap{
+ {"a": "1", "b": "2"},
+ {"a": "1"},
+ {"a": "1", "c": "3"},
+ },
+ daemonTags: []database.StringMap{
+ {"a": "1", "b": "2"},
+ {"a": "1"},
+ {"a": "1", "b": "2", "c": "3"},
+ },
+ queueSizes: []int64{3, 3},
+ queuePositions: []int64{1, 3},
+ skipJobIDs: map[int]struct{}{
+ 0: {},
+ },
+ },
+ // Skips job at index 1
+ {
+ name: "test-case-4",
+ jobTags: []database.StringMap{
+ {"a": "1", "b": "2"},
+ {"a": "1"},
+ {"a": "1", "c": "3"},
+ },
+ daemonTags: []database.StringMap{
+ {"a": "1", "b": "2"},
+ {"a": "1"},
+ {"a": "1", "b": "2", "c": "3"},
+ },
+ queueSizes: []int64{3, 3},
+ queuePositions: []int64{1, 3},
+ skipJobIDs: map[int]struct{}{
+ 1: {},
+ },
+ },
+ // Skips job at index 2
+ {
+ name: "test-case-5",
+ jobTags: []database.StringMap{
+ {"a": "1", "b": "2"},
+ {"a": "1"},
+ {"a": "1", "c": "3"},
+ },
+ daemonTags: []database.StringMap{
+ {"a": "1", "b": "2"},
+ {"a": "1"},
+ {"a": "1", "b": "2", "c": "3"},
+ },
+ queueSizes: []int64{3, 3},
+ queuePositions: []int64{1, 1},
+ skipJobIDs: map[int]struct{}{
+ 2: {},
+ },
+ },
+ // Skips jobs at indexes 0 and 2
+ {
+ name: "test-case-6",
+ jobTags: []database.StringMap{
+ {"a": "1", "b": "2"},
+ {"a": "1"},
+ {"a": "1", "c": "3"},
+ },
+ daemonTags: []database.StringMap{
+ {"a": "1", "b": "2"},
+ {"a": "1"},
+ {"a": "1", "b": "2", "c": "3"},
+ },
+ queueSizes: []int64{3},
+ queuePositions: []int64{1},
+ skipJobIDs: map[int]struct{}{
+ 0: {},
+ 2: {},
+ },
+ },
+ // Includes two additional jobs that any provisioner can execute.
+ {
+ name: "test-case-7",
+ jobTags: []database.StringMap{
+ {},
+ {},
+ {"a": "1", "b": "2"},
+ {"a": "1"},
+ {"a": "1", "c": "3"},
+ },
+ daemonTags: []database.StringMap{
+ {"a": "1", "b": "2"},
+ {"a": "1"},
+ {"a": "1", "b": "2", "c": "3"},
+ },
+ queueSizes: []int64{5, 5, 5, 5, 5},
+ queuePositions: []int64{1, 2, 3, 3, 5},
+ },
+ // Includes two additional jobs that any provisioner can execute, but they are intentionally skipped.
+ {
+ name: "test-case-8",
+ jobTags: []database.StringMap{
+ {},
+ {},
+ {"a": "1", "b": "2"},
+ {"a": "1"},
+ {"a": "1", "c": "3"},
+ },
+ daemonTags: []database.StringMap{
+ {"a": "1", "b": "2"},
+ {"a": "1"},
+ {"a": "1", "b": "2", "c": "3"},
+ },
+ queueSizes: []int64{5, 5, 5},
+ queuePositions: []int64{3, 3, 5},
+ skipJobIDs: map[int]struct{}{
+ 0: {},
+ 1: {},
+ },
+ },
+ // N jobs (1 job with 0 tags) & 0 provisioners exist
+ {
+ name: "test-case-9",
+ jobTags: []database.StringMap{
+ {},
+ {"a": "1"},
+ {"b": "2"},
+ },
+ daemonTags: []database.StringMap{},
+ queueSizes: []int64{0, 0, 0},
+ queuePositions: []int64{0, 0, 0},
+ },
+ // N jobs (1 job with 0 tags) & N provisioners
+ {
+ name: "test-case-10",
+ jobTags: []database.StringMap{
+ {},
+ {"a": "1"},
+ {"b": "2"},
+ },
+ daemonTags: []database.StringMap{
+ {},
+ {"a": "1"},
+ {"b": "2"},
+ },
+ queueSizes: []int64{2, 2, 2},
+ queuePositions: []int64{1, 2, 2},
+ },
+ // (N + 1) jobs (1 job with 0 tags) & N provisioners
+ // 1 job not matching any provisioner (first in the list)
+ {
+ name: "test-case-11",
+ jobTags: []database.StringMap{
+ {"c": "3"},
+ {},
+ {"a": "1"},
+ {"b": "2"},
+ },
+ daemonTags: []database.StringMap{
+ {},
+ {"a": "1"},
+ {"b": "2"},
+ },
+ queueSizes: []int64{0, 2, 2, 2},
+ queuePositions: []int64{0, 1, 2, 2},
+ },
+ // 0 jobs & 0 provisioners
+ {
+ name: "test-case-12",
+ jobTags: []database.StringMap{},
+ daemonTags: []database.StringMap{},
+ queueSizes: nil, // TODO(yevhenii): should it be empty array instead?
+ queuePositions: nil,
+ },
+ }
+
+ for _, tc := range testCases {
+ tc := tc // Capture loop variable to avoid data races
+ t.Run(tc.name, func(t *testing.T) {
+ t.Parallel()
+ db, _ := dbtestutil.NewDB(t)
+
+ // Create provisioner jobs based on provided tags:
+ allJobs := make([]database.ProvisionerJob, len(tc.jobTags))
+ for idx, tags := range tc.jobTags {
+ // Make sure jobs are stored in correct order, first job should have the earliest createdAt timestamp.
+ // Example for 3 jobs:
+ // job_1 createdAt: now - 3 minutes
+ // job_2 createdAt: now - 2 minutes
+ // job_3 createdAt: now - 1 minute
+ timeOffsetInMinutes := len(tc.jobTags) - idx
+ timeOffset := time.Duration(timeOffsetInMinutes) * time.Minute
+ createdAt := now.Add(-timeOffset)
+
+ allJobs[idx] = dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{
+ CreatedAt: createdAt,
+ Tags: tags,
+ })
+ }
+
+ // Create provisioner daemons based on provided tags:
+ for idx, tags := range tc.daemonTags {
+ dbgen.ProvisionerDaemon(t, db, database.ProvisionerDaemon{
+ Name: fmt.Sprintf("prov_%v", idx),
+ Provisioners: []database.ProvisionerType{database.ProvisionerTypeEcho},
+ Tags: tags,
+ })
+ }
+
+ // Assert invariant: the jobs are in pending status
+ for idx, job := range allJobs {
+ require.Equal(t, database.ProvisionerJobStatusPending, job.JobStatus, "expected job %d to have status %s", idx, database.ProvisionerJobStatusPending)
+ }
+
+ filteredJobs := make([]database.ProvisionerJob, 0)
+ filteredJobIDs := make([]uuid.UUID, 0)
+ for idx, job := range allJobs {
+ if _, skip := tc.skipJobIDs[idx]; skip {
+ continue
+ }
+
+ filteredJobs = append(filteredJobs, job)
+ filteredJobIDs = append(filteredJobIDs, job.ID)
+ }
+
+ // When: we fetch the jobs by their IDs
+ actualJobs, err := db.GetProvisionerJobsByIDsWithQueuePosition(ctx, filteredJobIDs)
+ require.NoError(t, err)
+ require.Len(t, actualJobs, len(filteredJobs), "should return all unskipped jobs")
+
+ // Then: the jobs should be returned in the correct order (sorted by createdAt)
+ sort.Slice(filteredJobs, func(i, j int) bool {
+ return filteredJobs[i].CreatedAt.Before(filteredJobs[j].CreatedAt)
+ })
+ for idx, job := range actualJobs {
+ assert.EqualValues(t, filteredJobs[idx], job.ProvisionerJob)
+ }
+
+ // Then: the queue size should be set correctly
+ var queueSizes []int64
+ for _, job := range actualJobs {
+ queueSizes = append(queueSizes, job.QueueSize)
+ }
+ assert.EqualValues(t, tc.queueSizes, queueSizes, "expected queue positions to be set correctly")
+
+ // Then: the queue position should be set correctly:
+ var queuePositions []int64
+ for _, job := range actualJobs {
+ queuePositions = append(queuePositions, job.QueuePosition)
+ }
+ assert.EqualValues(t, tc.queuePositions, queuePositions, "expected queue positions to be set correctly")
+ })
+ }
+}
+
+func TestGetProvisionerJobsByIDsWithQueuePosition_MixedStatuses(t *testing.T) {
+ t.Parallel()
if !dbtestutil.WillUsePostgres() {
t.SkipNow()
}
@@ -2167,7 +2477,7 @@ func TestGetProvisionerJobsByIDsWithQueuePosition(t *testing.T) {
now := dbtime.Now()
ctx := testutil.Context(t, testutil.WaitShort)
- // Given the following provisioner jobs:
+ // Create the following provisioner jobs:
allJobs := []database.ProvisionerJob{
// Pending. This will be the last in the queue because
// it was created most recently.
@@ -2177,6 +2487,9 @@ func TestGetProvisionerJobsByIDsWithQueuePosition(t *testing.T) {
CanceledAt: sql.NullTime{},
CompletedAt: sql.NullTime{},
Error: sql.NullString{},
+ // Ensure the `tags` field is NOT NULL for both provisioner jobs and provisioner daemons;
+ // otherwise, provisioner daemons won't be able to pick up any jobs.
+ Tags: database.StringMap{},
}),
// Another pending. This will come first in the queue
@@ -2187,6 +2500,7 @@ func TestGetProvisionerJobsByIDsWithQueuePosition(t *testing.T) {
CanceledAt: sql.NullTime{},
CompletedAt: sql.NullTime{},
Error: sql.NullString{},
+ Tags: database.StringMap{},
}),
// Running
@@ -2196,6 +2510,7 @@ func TestGetProvisionerJobsByIDsWithQueuePosition(t *testing.T) {
CanceledAt: sql.NullTime{},
CompletedAt: sql.NullTime{},
Error: sql.NullString{},
+ Tags: database.StringMap{},
}),
// Succeeded
@@ -2205,6 +2520,7 @@ func TestGetProvisionerJobsByIDsWithQueuePosition(t *testing.T) {
CanceledAt: sql.NullTime{},
CompletedAt: sql.NullTime{Valid: true, Time: now},
Error: sql.NullString{},
+ Tags: database.StringMap{},
}),
// Canceling
@@ -2214,6 +2530,7 @@ func TestGetProvisionerJobsByIDsWithQueuePosition(t *testing.T) {
CanceledAt: sql.NullTime{Valid: true, Time: now},
CompletedAt: sql.NullTime{},
Error: sql.NullString{},
+ Tags: database.StringMap{},
}),
// Canceled
@@ -2223,6 +2540,7 @@ func TestGetProvisionerJobsByIDsWithQueuePosition(t *testing.T) {
CanceledAt: sql.NullTime{Valid: true, Time: now},
CompletedAt: sql.NullTime{Valid: true, Time: now},
Error: sql.NullString{},
+ Tags: database.StringMap{},
}),
// Failed
@@ -2232,9 +2550,17 @@ func TestGetProvisionerJobsByIDsWithQueuePosition(t *testing.T) {
CanceledAt: sql.NullTime{},
CompletedAt: sql.NullTime{},
Error: sql.NullString{String: "failed", Valid: true},
+ Tags: database.StringMap{},
}),
}
+ // Create default provisioner daemon:
+ dbgen.ProvisionerDaemon(t, db, database.ProvisionerDaemon{
+ Name: "default_provisioner",
+ Provisioners: []database.ProvisionerType{database.ProvisionerTypeEcho},
+ Tags: database.StringMap{},
+ })
+
// Assert invariant: the jobs are in the expected order
require.Len(t, allJobs, 7, "expected 7 jobs")
for idx, status := range []database.ProvisionerJobStatus{
@@ -2259,22 +2585,123 @@ func TestGetProvisionerJobsByIDsWithQueuePosition(t *testing.T) {
require.NoError(t, err)
require.Len(t, actualJobs, len(allJobs), "should return all jobs")
- // Then: the jobs should be returned in the correct order (by IDs in the input slice)
+ // Then: the jobs should be returned in the correct order (sorted by createdAt)
+ sort.Slice(allJobs, func(i, j int) bool {
+ return allJobs[i].CreatedAt.Before(allJobs[j].CreatedAt)
+ })
for idx, job := range actualJobs {
assert.EqualValues(t, allJobs[idx], job.ProvisionerJob)
}
// Then: the queue size should be set correctly
+ var queueSizes []int64
for _, job := range actualJobs {
- assert.EqualValues(t, job.QueueSize, 2, "should have queue size 2")
+ queueSizes = append(queueSizes, job.QueueSize)
}
+ assert.EqualValues(t, []int64{0, 0, 0, 0, 0, 2, 2}, queueSizes, "expected queue positions to be set correctly")
// Then: the queue position should be set correctly:
var queuePositions []int64
for _, job := range actualJobs {
queuePositions = append(queuePositions, job.QueuePosition)
}
- assert.EqualValues(t, []int64{2, 1, 0, 0, 0, 0, 0}, queuePositions, "expected queue positions to be set correctly")
+ assert.EqualValues(t, []int64{0, 0, 0, 0, 0, 1, 2}, queuePositions, "expected queue positions to be set correctly")
+}
+
+func TestGetProvisionerJobsByIDsWithQueuePosition_OrderValidation(t *testing.T) {
+ t.Parallel()
+
+ db, _ := dbtestutil.NewDB(t)
+ now := dbtime.Now()
+ ctx := testutil.Context(t, testutil.WaitShort)
+
+ // Create the following provisioner jobs:
+ allJobs := []database.ProvisionerJob{
+ dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{
+ CreatedAt: now.Add(-4 * time.Minute),
+ // Ensure the `tags` field is NOT NULL for both provisioner jobs and provisioner daemons;
+ // otherwise, provisioner daemons won't be able to pick up any jobs.
+ Tags: database.StringMap{},
+ }),
+
+ dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{
+ CreatedAt: now.Add(-5 * time.Minute),
+ Tags: database.StringMap{},
+ }),
+
+ dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{
+ CreatedAt: now.Add(-6 * time.Minute),
+ Tags: database.StringMap{},
+ }),
+
+ dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{
+ CreatedAt: now.Add(-3 * time.Minute),
+ Tags: database.StringMap{},
+ }),
+
+ dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{
+ CreatedAt: now.Add(-2 * time.Minute),
+ Tags: database.StringMap{},
+ }),
+
+ dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{
+ CreatedAt: now.Add(-1 * time.Minute),
+ Tags: database.StringMap{},
+ }),
+ }
+
+ // Create default provisioner daemon:
+ dbgen.ProvisionerDaemon(t, db, database.ProvisionerDaemon{
+ Name: "default_provisioner",
+ Provisioners: []database.ProvisionerType{database.ProvisionerTypeEcho},
+ Tags: database.StringMap{},
+ })
+
+ // Assert invariant: the jobs are in the expected order
+ require.Len(t, allJobs, 6, "expected 7 jobs")
+ for idx, status := range []database.ProvisionerJobStatus{
+ database.ProvisionerJobStatusPending,
+ database.ProvisionerJobStatusPending,
+ database.ProvisionerJobStatusPending,
+ database.ProvisionerJobStatusPending,
+ database.ProvisionerJobStatusPending,
+ database.ProvisionerJobStatusPending,
+ } {
+ require.Equal(t, status, allJobs[idx].JobStatus, "expected job %d to have status %s", idx, status)
+ }
+
+ var jobIDs []uuid.UUID
+ for _, job := range allJobs {
+ jobIDs = append(jobIDs, job.ID)
+ }
+
+ // When: we fetch the jobs by their IDs
+ actualJobs, err := db.GetProvisionerJobsByIDsWithQueuePosition(ctx, jobIDs)
+ require.NoError(t, err)
+ require.Len(t, actualJobs, len(allJobs), "should return all jobs")
+
+ // Then: the jobs should be returned in the correct order (sorted by createdAt)
+ sort.Slice(allJobs, func(i, j int) bool {
+ return allJobs[i].CreatedAt.Before(allJobs[j].CreatedAt)
+ })
+ for idx, job := range actualJobs {
+ assert.EqualValues(t, allJobs[idx], job.ProvisionerJob)
+ assert.EqualValues(t, allJobs[idx].CreatedAt, job.ProvisionerJob.CreatedAt)
+ }
+
+ // Then: the queue size should be set correctly
+ var queueSizes []int64
+ for _, job := range actualJobs {
+ queueSizes = append(queueSizes, job.QueueSize)
+ }
+ assert.EqualValues(t, []int64{6, 6, 6, 6, 6, 6}, queueSizes, "expected queue positions to be set correctly")
+
+ // Then: the queue position should be set correctly:
+ var queuePositions []int64
+ for _, job := range actualJobs {
+ queuePositions = append(queuePositions, job.QueuePosition)
+ }
+ assert.EqualValues(t, []int64{1, 2, 3, 4, 5, 6}, queuePositions, "expected queue positions to be set correctly")
}
func TestGroupRemovalTrigger(t *testing.T) {
diff --git a/coderd/database/queries.sql.go b/coderd/database/queries.sql.go
index e4d70d576f..cf34699378 100644
--- a/coderd/database/queries.sql.go
+++ b/coderd/database/queries.sql.go
@@ -4298,6 +4298,243 @@ func (q *sqlQuerier) UpsertNotificationReportGeneratorLog(ctx context.Context, a
return err
}
+const countUnreadInboxNotificationsByUserID = `-- name: CountUnreadInboxNotificationsByUserID :one
+SELECT COUNT(*) FROM inbox_notifications WHERE user_id = $1 AND read_at IS NULL
+`
+
+func (q *sqlQuerier) CountUnreadInboxNotificationsByUserID(ctx context.Context, userID uuid.UUID) (int64, error) {
+ row := q.db.QueryRowContext(ctx, countUnreadInboxNotificationsByUserID, userID)
+ var count int64
+ err := row.Scan(&count)
+ return count, err
+}
+
+const getFilteredInboxNotificationsByUserID = `-- name: GetFilteredInboxNotificationsByUserID :many
+SELECT id, user_id, template_id, targets, title, content, icon, actions, read_at, created_at FROM inbox_notifications WHERE
+ user_id = $1 AND
+ template_id = ANY($2::UUID[]) AND
+ targets @> COALESCE($3, ARRAY[]::UUID[]) AND
+ ($4::inbox_notification_read_status = 'all' OR ($4::inbox_notification_read_status = 'unread' AND read_at IS NULL) OR ($4::inbox_notification_read_status = 'read' AND read_at IS NOT NULL)) AND
+ ($5::TIMESTAMPTZ = '0001-01-01 00:00:00Z' OR created_at < $5::TIMESTAMPTZ)
+ ORDER BY created_at DESC
+ LIMIT (COALESCE(NULLIF($6 :: INT, 0), 25))
+`
+
+type GetFilteredInboxNotificationsByUserIDParams struct {
+ UserID uuid.UUID `db:"user_id" json:"user_id"`
+ Templates []uuid.UUID `db:"templates" json:"templates"`
+ Targets []uuid.UUID `db:"targets" json:"targets"`
+ ReadStatus InboxNotificationReadStatus `db:"read_status" json:"read_status"`
+ CreatedAtOpt time.Time `db:"created_at_opt" json:"created_at_opt"`
+ LimitOpt int32 `db:"limit_opt" json:"limit_opt"`
+}
+
+// Fetches inbox notifications for a user filtered by templates and targets
+// param user_id: The user ID
+// param templates: The template IDs to filter by - the template_id = ANY(@templates::UUID[]) condition checks if the template_id is in the @templates array
+// param targets: The target IDs to filter by - the targets @> COALESCE(@targets, ARRAY[]::UUID[]) condition checks if the targets array (from the DB) contains all the elements in the @targets array
+// param read_status: The read status to filter by - can be any of 'ALL', 'UNREAD', 'READ'
+// param created_at_opt: The created_at timestamp to filter by. This parameter is usd for pagination - it fetches notifications created before the specified timestamp if it is not the zero value
+// param limit_opt: The limit of notifications to fetch. If the limit is not specified, it defaults to 25
+func (q *sqlQuerier) GetFilteredInboxNotificationsByUserID(ctx context.Context, arg GetFilteredInboxNotificationsByUserIDParams) ([]InboxNotification, error) {
+ rows, err := q.db.QueryContext(ctx, getFilteredInboxNotificationsByUserID,
+ arg.UserID,
+ pq.Array(arg.Templates),
+ pq.Array(arg.Targets),
+ arg.ReadStatus,
+ arg.CreatedAtOpt,
+ arg.LimitOpt,
+ )
+ if err != nil {
+ return nil, err
+ }
+ defer rows.Close()
+ var items []InboxNotification
+ for rows.Next() {
+ var i InboxNotification
+ if err := rows.Scan(
+ &i.ID,
+ &i.UserID,
+ &i.TemplateID,
+ pq.Array(&i.Targets),
+ &i.Title,
+ &i.Content,
+ &i.Icon,
+ &i.Actions,
+ &i.ReadAt,
+ &i.CreatedAt,
+ ); err != nil {
+ return nil, err
+ }
+ items = append(items, i)
+ }
+ if err := rows.Close(); err != nil {
+ return nil, err
+ }
+ if err := rows.Err(); err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
+const getInboxNotificationByID = `-- name: GetInboxNotificationByID :one
+SELECT id, user_id, template_id, targets, title, content, icon, actions, read_at, created_at FROM inbox_notifications WHERE id = $1
+`
+
+func (q *sqlQuerier) GetInboxNotificationByID(ctx context.Context, id uuid.UUID) (InboxNotification, error) {
+ row := q.db.QueryRowContext(ctx, getInboxNotificationByID, id)
+ var i InboxNotification
+ err := row.Scan(
+ &i.ID,
+ &i.UserID,
+ &i.TemplateID,
+ pq.Array(&i.Targets),
+ &i.Title,
+ &i.Content,
+ &i.Icon,
+ &i.Actions,
+ &i.ReadAt,
+ &i.CreatedAt,
+ )
+ return i, err
+}
+
+const getInboxNotificationsByUserID = `-- name: GetInboxNotificationsByUserID :many
+SELECT id, user_id, template_id, targets, title, content, icon, actions, read_at, created_at FROM inbox_notifications WHERE
+ user_id = $1 AND
+ ($2::inbox_notification_read_status = 'all' OR ($2::inbox_notification_read_status = 'unread' AND read_at IS NULL) OR ($2::inbox_notification_read_status = 'read' AND read_at IS NOT NULL)) AND
+ ($3::TIMESTAMPTZ = '0001-01-01 00:00:00Z' OR created_at < $3::TIMESTAMPTZ)
+ ORDER BY created_at DESC
+ LIMIT (COALESCE(NULLIF($4 :: INT, 0), 25))
+`
+
+type GetInboxNotificationsByUserIDParams struct {
+ UserID uuid.UUID `db:"user_id" json:"user_id"`
+ ReadStatus InboxNotificationReadStatus `db:"read_status" json:"read_status"`
+ CreatedAtOpt time.Time `db:"created_at_opt" json:"created_at_opt"`
+ LimitOpt int32 `db:"limit_opt" json:"limit_opt"`
+}
+
+// Fetches inbox notifications for a user filtered by templates and targets
+// param user_id: The user ID
+// param read_status: The read status to filter by - can be any of 'ALL', 'UNREAD', 'READ'
+// param created_at_opt: The created_at timestamp to filter by. This parameter is usd for pagination - it fetches notifications created before the specified timestamp if it is not the zero value
+// param limit_opt: The limit of notifications to fetch. If the limit is not specified, it defaults to 25
+func (q *sqlQuerier) GetInboxNotificationsByUserID(ctx context.Context, arg GetInboxNotificationsByUserIDParams) ([]InboxNotification, error) {
+ rows, err := q.db.QueryContext(ctx, getInboxNotificationsByUserID,
+ arg.UserID,
+ arg.ReadStatus,
+ arg.CreatedAtOpt,
+ arg.LimitOpt,
+ )
+ if err != nil {
+ return nil, err
+ }
+ defer rows.Close()
+ var items []InboxNotification
+ for rows.Next() {
+ var i InboxNotification
+ if err := rows.Scan(
+ &i.ID,
+ &i.UserID,
+ &i.TemplateID,
+ pq.Array(&i.Targets),
+ &i.Title,
+ &i.Content,
+ &i.Icon,
+ &i.Actions,
+ &i.ReadAt,
+ &i.CreatedAt,
+ ); err != nil {
+ return nil, err
+ }
+ items = append(items, i)
+ }
+ if err := rows.Close(); err != nil {
+ return nil, err
+ }
+ if err := rows.Err(); err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
+const insertInboxNotification = `-- name: InsertInboxNotification :one
+INSERT INTO
+ inbox_notifications (
+ id,
+ user_id,
+ template_id,
+ targets,
+ title,
+ content,
+ icon,
+ actions,
+ created_at
+ )
+VALUES
+ ($1, $2, $3, $4, $5, $6, $7, $8, $9) RETURNING id, user_id, template_id, targets, title, content, icon, actions, read_at, created_at
+`
+
+type InsertInboxNotificationParams struct {
+ ID uuid.UUID `db:"id" json:"id"`
+ UserID uuid.UUID `db:"user_id" json:"user_id"`
+ TemplateID uuid.UUID `db:"template_id" json:"template_id"`
+ Targets []uuid.UUID `db:"targets" json:"targets"`
+ Title string `db:"title" json:"title"`
+ Content string `db:"content" json:"content"`
+ Icon string `db:"icon" json:"icon"`
+ Actions json.RawMessage `db:"actions" json:"actions"`
+ CreatedAt time.Time `db:"created_at" json:"created_at"`
+}
+
+func (q *sqlQuerier) InsertInboxNotification(ctx context.Context, arg InsertInboxNotificationParams) (InboxNotification, error) {
+ row := q.db.QueryRowContext(ctx, insertInboxNotification,
+ arg.ID,
+ arg.UserID,
+ arg.TemplateID,
+ pq.Array(arg.Targets),
+ arg.Title,
+ arg.Content,
+ arg.Icon,
+ arg.Actions,
+ arg.CreatedAt,
+ )
+ var i InboxNotification
+ err := row.Scan(
+ &i.ID,
+ &i.UserID,
+ &i.TemplateID,
+ pq.Array(&i.Targets),
+ &i.Title,
+ &i.Content,
+ &i.Icon,
+ &i.Actions,
+ &i.ReadAt,
+ &i.CreatedAt,
+ )
+ return i, err
+}
+
+const updateInboxNotificationReadStatus = `-- name: UpdateInboxNotificationReadStatus :exec
+UPDATE
+ inbox_notifications
+SET
+ read_at = $1
+WHERE
+ id = $2
+`
+
+type UpdateInboxNotificationReadStatusParams struct {
+ ReadAt sql.NullTime `db:"read_at" json:"read_at"`
+ ID uuid.UUID `db:"id" json:"id"`
+}
+
+func (q *sqlQuerier) UpdateInboxNotificationReadStatus(ctx context.Context, arg UpdateInboxNotificationReadStatusParams) error {
+ _, err := q.db.ExecContext(ctx, updateInboxNotificationReadStatus, arg.ReadAt, arg.ID)
+ return err
+}
+
const deleteOAuth2ProviderAppByID = `-- name: DeleteOAuth2ProviderAppByID :exec
DELETE FROM oauth2_provider_apps WHERE id = $1
`
@@ -6170,7 +6407,7 @@ WHERE
AND (COALESCE(array_length($3::uuid[], 1), 0) = 0 OR pd.id = ANY($3::uuid[]))
AND ($4::tagset = 'null'::tagset OR provisioner_tagset_contains(pd.tags::tagset, $4::tagset))
ORDER BY
- pd.created_at ASC
+ pd.created_at DESC
LIMIT
$5::int
`
@@ -6715,45 +6952,69 @@ func (q *sqlQuerier) GetProvisionerJobsByIDs(ctx context.Context, ids []uuid.UUI
}
const getProvisionerJobsByIDsWithQueuePosition = `-- name: GetProvisionerJobsByIDsWithQueuePosition :many
-WITH pending_jobs AS (
- SELECT
- id, created_at
- FROM
- provisioner_jobs
- WHERE
- started_at IS NULL
- AND
- canceled_at IS NULL
- AND
- completed_at IS NULL
- AND
- error IS NULL
+WITH filtered_provisioner_jobs AS (
+ -- Step 1: Filter provisioner_jobs
+ SELECT
+ id, created_at
+ FROM
+ provisioner_jobs
+ WHERE
+ id = ANY($1 :: uuid [ ]) -- Apply filter early to reduce dataset size before expensive JOIN
),
-queue_position AS (
- SELECT
- id,
- ROW_NUMBER() OVER (ORDER BY created_at ASC) AS queue_position
- FROM
- pending_jobs
+pending_jobs AS (
+ -- Step 2: Extract only pending jobs
+ SELECT
+ id, created_at, tags
+ FROM
+ provisioner_jobs
+ WHERE
+ job_status = 'pending'
),
-queue_size AS (
- SELECT COUNT(*) AS count FROM pending_jobs
+ranked_jobs AS (
+ -- Step 3: Rank only pending jobs based on provisioner availability
+ SELECT
+ pj.id,
+ pj.created_at,
+ ROW_NUMBER() OVER (PARTITION BY pd.id ORDER BY pj.created_at ASC) AS queue_position,
+ COUNT(*) OVER (PARTITION BY pd.id) AS queue_size
+ FROM
+ pending_jobs pj
+ INNER JOIN provisioner_daemons pd
+ ON provisioner_tagset_contains(pd.tags, pj.tags) -- Join only on the small pending set
+),
+final_jobs AS (
+ -- Step 4: Compute best queue position and max queue size per job
+ SELECT
+ fpj.id,
+ fpj.created_at,
+ COALESCE(MIN(rj.queue_position), 0) :: BIGINT AS queue_position, -- Best queue position across provisioners
+ COALESCE(MAX(rj.queue_size), 0) :: BIGINT AS queue_size -- Max queue size across provisioners
+ FROM
+ filtered_provisioner_jobs fpj -- Use the pre-filtered dataset instead of full provisioner_jobs
+ LEFT JOIN ranked_jobs rj
+ ON fpj.id = rj.id -- Join with the ranking jobs CTE to assign a rank to each specified provisioner job.
+ GROUP BY
+ fpj.id, fpj.created_at
)
SELECT
+ -- Step 5: Final SELECT with INNER JOIN provisioner_jobs
+ fj.id,
+ fj.created_at,
pj.id, pj.created_at, pj.updated_at, pj.started_at, pj.canceled_at, pj.completed_at, pj.error, pj.organization_id, pj.initiator_id, pj.provisioner, pj.storage_method, pj.type, pj.input, pj.worker_id, pj.file_id, pj.tags, pj.error_code, pj.trace_metadata, pj.job_status,
- COALESCE(qp.queue_position, 0) AS queue_position,
- COALESCE(qs.count, 0) AS queue_size
+ fj.queue_position,
+ fj.queue_size
FROM
- provisioner_jobs pj
-LEFT JOIN
- queue_position qp ON qp.id = pj.id
-LEFT JOIN
- queue_size qs ON TRUE
-WHERE
- pj.id = ANY($1 :: uuid [ ])
+ final_jobs fj
+ INNER JOIN provisioner_jobs pj
+ ON fj.id = pj.id -- Ensure we retrieve full details from ` + "`" + `provisioner_jobs` + "`" + `.
+ -- JOIN with pj is required for sqlc.embed(pj) to compile successfully.
+ORDER BY
+ fj.created_at
`
type GetProvisionerJobsByIDsWithQueuePositionRow struct {
+ ID uuid.UUID `db:"id" json:"id"`
+ CreatedAt time.Time `db:"created_at" json:"created_at"`
ProvisionerJob ProvisionerJob `db:"provisioner_job" json:"provisioner_job"`
QueuePosition int64 `db:"queue_position" json:"queue_position"`
QueueSize int64 `db:"queue_size" json:"queue_size"`
@@ -6769,6 +7030,8 @@ func (q *sqlQuerier) GetProvisionerJobsByIDsWithQueuePosition(ctx context.Contex
for rows.Next() {
var i GetProvisionerJobsByIDsWithQueuePositionRow
if err := rows.Scan(
+ &i.ID,
+ &i.CreatedAt,
&i.ProvisionerJob.ID,
&i.ProvisionerJob.CreatedAt,
&i.ProvisionerJob.UpdatedAt,
@@ -8100,25 +8363,25 @@ SELECT
FROM
custom_roles
WHERE
- true
- -- @lookup_roles will filter for exact (role_name, org_id) pairs
- -- To do this manually in SQL, you can construct an array and cast it:
- -- cast(ARRAY[('customrole','ece79dac-926e-44ca-9790-2ff7c5eb6e0c')] AS name_organization_pair[])
- AND CASE WHEN array_length($1 :: name_organization_pair[], 1) > 0 THEN
- -- Using 'coalesce' to avoid troubles with null literals being an empty string.
- (name, coalesce(organization_id, '00000000-0000-0000-0000-000000000000' ::uuid)) = ANY ($1::name_organization_pair[])
- ELSE true
- END
- -- This allows fetching all roles, or just site wide roles
- AND CASE WHEN $2 :: boolean THEN
- organization_id IS null
+ true
+ -- @lookup_roles will filter for exact (role_name, org_id) pairs
+ -- To do this manually in SQL, you can construct an array and cast it:
+ -- cast(ARRAY[('customrole','ece79dac-926e-44ca-9790-2ff7c5eb6e0c')] AS name_organization_pair[])
+ AND CASE WHEN array_length($1 :: name_organization_pair[], 1) > 0 THEN
+ -- Using 'coalesce' to avoid troubles with null literals being an empty string.
+ (name, coalesce(organization_id, '00000000-0000-0000-0000-000000000000' ::uuid)) = ANY ($1::name_organization_pair[])
ELSE true
- END
- -- Allows fetching all roles to a particular organization
- AND CASE WHEN $3 :: uuid != '00000000-0000-0000-0000-000000000000'::uuid THEN
- organization_id = $3
- ELSE true
- END
+ END
+ -- This allows fetching all roles, or just site wide roles
+ AND CASE WHEN $2 :: boolean THEN
+ organization_id IS null
+ ELSE true
+ END
+ -- Allows fetching all roles to a particular organization
+ AND CASE WHEN $3 :: uuid != '00000000-0000-0000-0000-000000000000'::uuid THEN
+ organization_id = $3
+ ELSE true
+ END
`
type CustomRolesParams struct {
@@ -8191,16 +8454,16 @@ INSERT INTO
updated_at
)
VALUES (
- -- Always force lowercase names
- lower($1),
- $2,
- $3,
- $4,
- $5,
- $6,
- now(),
- now()
- )
+ -- Always force lowercase names
+ lower($1),
+ $2,
+ $3,
+ $4,
+ $5,
+ $6,
+ now(),
+ now()
+)
RETURNING name, display_name, site_permissions, org_permissions, user_permissions, created_at, updated_at, organization_id, id
`
@@ -12478,6 +12741,46 @@ func (q *sqlQuerier) FetchMemoryResourceMonitorsByAgentID(ctx context.Context, a
return i, err
}
+const fetchMemoryResourceMonitorsUpdatedAfter = `-- name: FetchMemoryResourceMonitorsUpdatedAfter :many
+SELECT
+ agent_id, enabled, threshold, created_at, updated_at, state, debounced_until
+FROM
+ workspace_agent_memory_resource_monitors
+WHERE
+ updated_at > $1
+`
+
+func (q *sqlQuerier) FetchMemoryResourceMonitorsUpdatedAfter(ctx context.Context, updatedAt time.Time) ([]WorkspaceAgentMemoryResourceMonitor, error) {
+ rows, err := q.db.QueryContext(ctx, fetchMemoryResourceMonitorsUpdatedAfter, updatedAt)
+ if err != nil {
+ return nil, err
+ }
+ defer rows.Close()
+ var items []WorkspaceAgentMemoryResourceMonitor
+ for rows.Next() {
+ var i WorkspaceAgentMemoryResourceMonitor
+ if err := rows.Scan(
+ &i.AgentID,
+ &i.Enabled,
+ &i.Threshold,
+ &i.CreatedAt,
+ &i.UpdatedAt,
+ &i.State,
+ &i.DebouncedUntil,
+ ); err != nil {
+ return nil, err
+ }
+ items = append(items, i)
+ }
+ if err := rows.Close(); err != nil {
+ return nil, err
+ }
+ if err := rows.Err(); err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
const fetchVolumesResourceMonitorsByAgentID = `-- name: FetchVolumesResourceMonitorsByAgentID :many
SELECT
agent_id, enabled, threshold, path, created_at, updated_at, state, debounced_until
@@ -12519,6 +12822,47 @@ func (q *sqlQuerier) FetchVolumesResourceMonitorsByAgentID(ctx context.Context,
return items, nil
}
+const fetchVolumesResourceMonitorsUpdatedAfter = `-- name: FetchVolumesResourceMonitorsUpdatedAfter :many
+SELECT
+ agent_id, enabled, threshold, path, created_at, updated_at, state, debounced_until
+FROM
+ workspace_agent_volume_resource_monitors
+WHERE
+ updated_at > $1
+`
+
+func (q *sqlQuerier) FetchVolumesResourceMonitorsUpdatedAfter(ctx context.Context, updatedAt time.Time) ([]WorkspaceAgentVolumeResourceMonitor, error) {
+ rows, err := q.db.QueryContext(ctx, fetchVolumesResourceMonitorsUpdatedAfter, updatedAt)
+ if err != nil {
+ return nil, err
+ }
+ defer rows.Close()
+ var items []WorkspaceAgentVolumeResourceMonitor
+ for rows.Next() {
+ var i WorkspaceAgentVolumeResourceMonitor
+ if err := rows.Scan(
+ &i.AgentID,
+ &i.Enabled,
+ &i.Threshold,
+ &i.Path,
+ &i.CreatedAt,
+ &i.UpdatedAt,
+ &i.State,
+ &i.DebouncedUntil,
+ ); err != nil {
+ return nil, err
+ }
+ items = append(items, i)
+ }
+ if err := rows.Close(); err != nil {
+ return nil, err
+ }
+ if err := rows.Err(); err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
const insertMemoryResourceMonitor = `-- name: InsertMemoryResourceMonitor :one
INSERT INTO
workspace_agent_memory_resource_monitors (
@@ -16596,13 +16940,11 @@ func (q *sqlQuerier) GetWorkspaceByWorkspaceAppID(ctx context.Context, workspace
}
const getWorkspaceUniqueOwnerCountByTemplateIDs = `-- name: GetWorkspaceUniqueOwnerCountByTemplateIDs :many
-SELECT
- template_id, COUNT(DISTINCT owner_id) AS unique_owners_sum
-FROM
- workspaces
-WHERE
- template_id = ANY($1 :: uuid[]) AND deleted = false
-GROUP BY template_id
+SELECT templates.id AS template_id, COUNT(DISTINCT workspaces.owner_id) AS unique_owners_sum
+FROM templates
+LEFT JOIN workspaces ON workspaces.template_id = templates.id AND workspaces.deleted = false
+WHERE templates.id = ANY($1 :: uuid[])
+GROUP BY templates.id
`
type GetWorkspaceUniqueOwnerCountByTemplateIDsRow struct {
diff --git a/coderd/database/queries/notificationsinbox.sql b/coderd/database/queries/notificationsinbox.sql
new file mode 100644
index 0000000000..cdaf1cf78c
--- /dev/null
+++ b/coderd/database/queries/notificationsinbox.sql
@@ -0,0 +1,59 @@
+-- name: GetInboxNotificationsByUserID :many
+-- Fetches inbox notifications for a user filtered by templates and targets
+-- param user_id: The user ID
+-- param read_status: The read status to filter by - can be any of 'ALL', 'UNREAD', 'READ'
+-- param created_at_opt: The created_at timestamp to filter by. This parameter is usd for pagination - it fetches notifications created before the specified timestamp if it is not the zero value
+-- param limit_opt: The limit of notifications to fetch. If the limit is not specified, it defaults to 25
+SELECT * FROM inbox_notifications WHERE
+ user_id = @user_id AND
+ (@read_status::inbox_notification_read_status = 'all' OR (@read_status::inbox_notification_read_status = 'unread' AND read_at IS NULL) OR (@read_status::inbox_notification_read_status = 'read' AND read_at IS NOT NULL)) AND
+ (@created_at_opt::TIMESTAMPTZ = '0001-01-01 00:00:00Z' OR created_at < @created_at_opt::TIMESTAMPTZ)
+ ORDER BY created_at DESC
+ LIMIT (COALESCE(NULLIF(@limit_opt :: INT, 0), 25));
+
+-- name: GetFilteredInboxNotificationsByUserID :many
+-- Fetches inbox notifications for a user filtered by templates and targets
+-- param user_id: The user ID
+-- param templates: The template IDs to filter by - the template_id = ANY(@templates::UUID[]) condition checks if the template_id is in the @templates array
+-- param targets: The target IDs to filter by - the targets @> COALESCE(@targets, ARRAY[]::UUID[]) condition checks if the targets array (from the DB) contains all the elements in the @targets array
+-- param read_status: The read status to filter by - can be any of 'ALL', 'UNREAD', 'READ'
+-- param created_at_opt: The created_at timestamp to filter by. This parameter is usd for pagination - it fetches notifications created before the specified timestamp if it is not the zero value
+-- param limit_opt: The limit of notifications to fetch. If the limit is not specified, it defaults to 25
+SELECT * FROM inbox_notifications WHERE
+ user_id = @user_id AND
+ template_id = ANY(@templates::UUID[]) AND
+ targets @> COALESCE(@targets, ARRAY[]::UUID[]) AND
+ (@read_status::inbox_notification_read_status = 'all' OR (@read_status::inbox_notification_read_status = 'unread' AND read_at IS NULL) OR (@read_status::inbox_notification_read_status = 'read' AND read_at IS NOT NULL)) AND
+ (@created_at_opt::TIMESTAMPTZ = '0001-01-01 00:00:00Z' OR created_at < @created_at_opt::TIMESTAMPTZ)
+ ORDER BY created_at DESC
+ LIMIT (COALESCE(NULLIF(@limit_opt :: INT, 0), 25));
+
+-- name: GetInboxNotificationByID :one
+SELECT * FROM inbox_notifications WHERE id = $1;
+
+-- name: CountUnreadInboxNotificationsByUserID :one
+SELECT COUNT(*) FROM inbox_notifications WHERE user_id = $1 AND read_at IS NULL;
+
+-- name: InsertInboxNotification :one
+INSERT INTO
+ inbox_notifications (
+ id,
+ user_id,
+ template_id,
+ targets,
+ title,
+ content,
+ icon,
+ actions,
+ created_at
+ )
+VALUES
+ ($1, $2, $3, $4, $5, $6, $7, $8, $9) RETURNING *;
+
+-- name: UpdateInboxNotificationReadStatus :exec
+UPDATE
+ inbox_notifications
+SET
+ read_at = $1
+WHERE
+ id = $2;
diff --git a/coderd/database/queries/provisionerdaemons.sql b/coderd/database/queries/provisionerdaemons.sql
index ab1668e537..4f7c7a8b22 100644
--- a/coderd/database/queries/provisionerdaemons.sql
+++ b/coderd/database/queries/provisionerdaemons.sql
@@ -111,7 +111,7 @@ WHERE
AND (COALESCE(array_length(@ids::uuid[], 1), 0) = 0 OR pd.id = ANY(@ids::uuid[]))
AND (@tags::tagset = 'null'::tagset OR provisioner_tagset_contains(pd.tags::tagset, @tags::tagset))
ORDER BY
- pd.created_at ASC
+ pd.created_at DESC
LIMIT
sqlc.narg('limit')::int;
diff --git a/coderd/database/queries/provisionerjobs.sql b/coderd/database/queries/provisionerjobs.sql
index 592b228af2..2d544aedb9 100644
--- a/coderd/database/queries/provisionerjobs.sql
+++ b/coderd/database/queries/provisionerjobs.sql
@@ -50,42 +50,64 @@ WHERE
id = ANY(@ids :: uuid [ ]);
-- name: GetProvisionerJobsByIDsWithQueuePosition :many
-WITH pending_jobs AS (
- SELECT
- id, created_at
- FROM
- provisioner_jobs
- WHERE
- started_at IS NULL
- AND
- canceled_at IS NULL
- AND
- completed_at IS NULL
- AND
- error IS NULL
+WITH filtered_provisioner_jobs AS (
+ -- Step 1: Filter provisioner_jobs
+ SELECT
+ id, created_at
+ FROM
+ provisioner_jobs
+ WHERE
+ id = ANY(@ids :: uuid [ ]) -- Apply filter early to reduce dataset size before expensive JOIN
),
-queue_position AS (
- SELECT
- id,
- ROW_NUMBER() OVER (ORDER BY created_at ASC) AS queue_position
- FROM
- pending_jobs
+pending_jobs AS (
+ -- Step 2: Extract only pending jobs
+ SELECT
+ id, created_at, tags
+ FROM
+ provisioner_jobs
+ WHERE
+ job_status = 'pending'
),
-queue_size AS (
- SELECT COUNT(*) AS count FROM pending_jobs
+ranked_jobs AS (
+ -- Step 3: Rank only pending jobs based on provisioner availability
+ SELECT
+ pj.id,
+ pj.created_at,
+ ROW_NUMBER() OVER (PARTITION BY pd.id ORDER BY pj.created_at ASC) AS queue_position,
+ COUNT(*) OVER (PARTITION BY pd.id) AS queue_size
+ FROM
+ pending_jobs pj
+ INNER JOIN provisioner_daemons pd
+ ON provisioner_tagset_contains(pd.tags, pj.tags) -- Join only on the small pending set
+),
+final_jobs AS (
+ -- Step 4: Compute best queue position and max queue size per job
+ SELECT
+ fpj.id,
+ fpj.created_at,
+ COALESCE(MIN(rj.queue_position), 0) :: BIGINT AS queue_position, -- Best queue position across provisioners
+ COALESCE(MAX(rj.queue_size), 0) :: BIGINT AS queue_size -- Max queue size across provisioners
+ FROM
+ filtered_provisioner_jobs fpj -- Use the pre-filtered dataset instead of full provisioner_jobs
+ LEFT JOIN ranked_jobs rj
+ ON fpj.id = rj.id -- Join with the ranking jobs CTE to assign a rank to each specified provisioner job.
+ GROUP BY
+ fpj.id, fpj.created_at
)
SELECT
+ -- Step 5: Final SELECT with INNER JOIN provisioner_jobs
+ fj.id,
+ fj.created_at,
sqlc.embed(pj),
- COALESCE(qp.queue_position, 0) AS queue_position,
- COALESCE(qs.count, 0) AS queue_size
+ fj.queue_position,
+ fj.queue_size
FROM
- provisioner_jobs pj
-LEFT JOIN
- queue_position qp ON qp.id = pj.id
-LEFT JOIN
- queue_size qs ON TRUE
-WHERE
- pj.id = ANY(@ids :: uuid [ ]);
+ final_jobs fj
+ INNER JOIN provisioner_jobs pj
+ ON fj.id = pj.id -- Ensure we retrieve full details from `provisioner_jobs`.
+ -- JOIN with pj is required for sqlc.embed(pj) to compile successfully.
+ORDER BY
+ fj.created_at;
-- name: GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisioner :many
WITH pending_jobs AS (
diff --git a/coderd/database/queries/roles.sql b/coderd/database/queries/roles.sql
index 7246ddb6de..ee5d35d91a 100644
--- a/coderd/database/queries/roles.sql
+++ b/coderd/database/queries/roles.sql
@@ -4,25 +4,25 @@ SELECT
FROM
custom_roles
WHERE
- true
- -- @lookup_roles will filter for exact (role_name, org_id) pairs
- -- To do this manually in SQL, you can construct an array and cast it:
- -- cast(ARRAY[('customrole','ece79dac-926e-44ca-9790-2ff7c5eb6e0c')] AS name_organization_pair[])
- AND CASE WHEN array_length(@lookup_roles :: name_organization_pair[], 1) > 0 THEN
- -- Using 'coalesce' to avoid troubles with null literals being an empty string.
- (name, coalesce(organization_id, '00000000-0000-0000-0000-000000000000' ::uuid)) = ANY (@lookup_roles::name_organization_pair[])
- ELSE true
- END
- -- This allows fetching all roles, or just site wide roles
- AND CASE WHEN @exclude_org_roles :: boolean THEN
- organization_id IS null
+ true
+ -- @lookup_roles will filter for exact (role_name, org_id) pairs
+ -- To do this manually in SQL, you can construct an array and cast it:
+ -- cast(ARRAY[('customrole','ece79dac-926e-44ca-9790-2ff7c5eb6e0c')] AS name_organization_pair[])
+ AND CASE WHEN array_length(@lookup_roles :: name_organization_pair[], 1) > 0 THEN
+ -- Using 'coalesce' to avoid troubles with null literals being an empty string.
+ (name, coalesce(organization_id, '00000000-0000-0000-0000-000000000000' ::uuid)) = ANY (@lookup_roles::name_organization_pair[])
ELSE true
- END
- -- Allows fetching all roles to a particular organization
- AND CASE WHEN @organization_id :: uuid != '00000000-0000-0000-0000-000000000000'::uuid THEN
- organization_id = @organization_id
- ELSE true
- END
+ END
+ -- This allows fetching all roles, or just site wide roles
+ AND CASE WHEN @exclude_org_roles :: boolean THEN
+ organization_id IS null
+ ELSE true
+ END
+ -- Allows fetching all roles to a particular organization
+ AND CASE WHEN @organization_id :: uuid != '00000000-0000-0000-0000-000000000000'::uuid THEN
+ organization_id = @organization_id
+ ELSE true
+ END
;
-- name: DeleteCustomRole :exec
@@ -46,16 +46,16 @@ INSERT INTO
updated_at
)
VALUES (
- -- Always force lowercase names
- lower(@name),
- @display_name,
- @organization_id,
- @site_permissions,
- @org_permissions,
- @user_permissions,
- now(),
- now()
- )
+ -- Always force lowercase names
+ lower(@name),
+ @display_name,
+ @organization_id,
+ @site_permissions,
+ @org_permissions,
+ @user_permissions,
+ now(),
+ now()
+)
RETURNING *;
-- name: UpdateCustomRole :one
diff --git a/coderd/database/queries/workspaceagentresourcemonitors.sql b/coderd/database/queries/workspaceagentresourcemonitors.sql
index 84ee5c67b3..50e7e818f7 100644
--- a/coderd/database/queries/workspaceagentresourcemonitors.sql
+++ b/coderd/database/queries/workspaceagentresourcemonitors.sql
@@ -1,3 +1,19 @@
+-- name: FetchVolumesResourceMonitorsUpdatedAfter :many
+SELECT
+ *
+FROM
+ workspace_agent_volume_resource_monitors
+WHERE
+ updated_at > $1;
+
+-- name: FetchMemoryResourceMonitorsUpdatedAfter :many
+SELECT
+ *
+FROM
+ workspace_agent_memory_resource_monitors
+WHERE
+ updated_at > $1;
+
-- name: FetchMemoryResourceMonitorsByAgentID :one
SELECT
*
diff --git a/coderd/database/queries/workspaces.sql b/coderd/database/queries/workspaces.sql
index cb0d11e8a8..4ec74c066f 100644
--- a/coderd/database/queries/workspaces.sql
+++ b/coderd/database/queries/workspaces.sql
@@ -415,13 +415,11 @@ WHERE
ORDER BY created_at DESC;
-- name: GetWorkspaceUniqueOwnerCountByTemplateIDs :many
-SELECT
- template_id, COUNT(DISTINCT owner_id) AS unique_owners_sum
-FROM
- workspaces
-WHERE
- template_id = ANY(@template_ids :: uuid[]) AND deleted = false
-GROUP BY template_id;
+SELECT templates.id AS template_id, COUNT(DISTINCT workspaces.owner_id) AS unique_owners_sum
+FROM templates
+LEFT JOIN workspaces ON workspaces.template_id = templates.id AND workspaces.deleted = false
+WHERE templates.id = ANY(@template_ids :: uuid[])
+GROUP BY templates.id;
-- name: InsertWorkspace :one
INSERT INTO
diff --git a/coderd/database/unique_constraint.go b/coderd/database/unique_constraint.go
index dca42f4a66..a8d46ae7ab 100644
--- a/coderd/database/unique_constraint.go
+++ b/coderd/database/unique_constraint.go
@@ -21,6 +21,7 @@ const (
UniqueGroupMembersUserIDGroupIDKey UniqueConstraint = "group_members_user_id_group_id_key" // ALTER TABLE ONLY group_members ADD CONSTRAINT group_members_user_id_group_id_key UNIQUE (user_id, group_id);
UniqueGroupsNameOrganizationIDKey UniqueConstraint = "groups_name_organization_id_key" // ALTER TABLE ONLY groups ADD CONSTRAINT groups_name_organization_id_key UNIQUE (name, organization_id);
UniqueGroupsPkey UniqueConstraint = "groups_pkey" // ALTER TABLE ONLY groups ADD CONSTRAINT groups_pkey PRIMARY KEY (id);
+ UniqueInboxNotificationsPkey UniqueConstraint = "inbox_notifications_pkey" // ALTER TABLE ONLY inbox_notifications ADD CONSTRAINT inbox_notifications_pkey PRIMARY KEY (id);
UniqueJfrogXrayScansPkey UniqueConstraint = "jfrog_xray_scans_pkey" // ALTER TABLE ONLY jfrog_xray_scans ADD CONSTRAINT jfrog_xray_scans_pkey PRIMARY KEY (agent_id, workspace_id);
UniqueLicensesJWTKey UniqueConstraint = "licenses_jwt_key" // ALTER TABLE ONLY licenses ADD CONSTRAINT licenses_jwt_key UNIQUE (jwt);
UniqueLicensesPkey UniqueConstraint = "licenses_pkey" // ALTER TABLE ONLY licenses ADD CONSTRAINT licenses_pkey PRIMARY KEY (id);
diff --git a/coderd/debug.go b/coderd/debug.go
index a34e211ef0..0ae62282a2 100644
--- a/coderd/debug.go
+++ b/coderd/debug.go
@@ -7,10 +7,10 @@ import (
"encoding/json"
"fmt"
"net/http"
+ "slices"
"time"
"github.com/google/uuid"
- "golang.org/x/exp/slices"
"golang.org/x/xerrors"
"cdr.dev/slog"
diff --git a/coderd/devtunnel/servers.go b/coderd/devtunnel/servers.go
index 498ba74e42..79be97db87 100644
--- a/coderd/devtunnel/servers.go
+++ b/coderd/devtunnel/servers.go
@@ -2,11 +2,11 @@ package devtunnel
import (
"runtime"
+ "slices"
"sync"
"time"
ping "github.com/prometheus-community/pro-bing"
- "golang.org/x/exp/slices"
"golang.org/x/sync/errgroup"
"golang.org/x/xerrors"
diff --git a/coderd/entitlements/entitlements.go b/coderd/entitlements/entitlements.go
index e141a861a9..6bbe32ade4 100644
--- a/coderd/entitlements/entitlements.go
+++ b/coderd/entitlements/entitlements.go
@@ -4,10 +4,10 @@ import (
"context"
"encoding/json"
"net/http"
+ "slices"
"sync"
"time"
- "golang.org/x/exp/slices"
"golang.org/x/xerrors"
"github.com/coder/coder/v2/codersdk"
diff --git a/coderd/healthcheck/database.go b/coderd/healthcheck/database.go
index 275124c5b1..97b4783231 100644
--- a/coderd/healthcheck/database.go
+++ b/coderd/healthcheck/database.go
@@ -2,10 +2,9 @@ package healthcheck
import (
"context"
+ "slices"
"time"
- "golang.org/x/exp/slices"
-
"github.com/coder/coder/v2/coderd/database"
"github.com/coder/coder/v2/coderd/healthcheck/health"
"github.com/coder/coder/v2/codersdk/healthsdk"
diff --git a/coderd/healthcheck/derphealth/derp.go b/coderd/healthcheck/derphealth/derp.go
index f74db243cb..fa24ebe757 100644
--- a/coderd/healthcheck/derphealth/derp.go
+++ b/coderd/healthcheck/derphealth/derp.go
@@ -6,12 +6,12 @@ import (
"net"
"net/netip"
"net/url"
+ "slices"
"strings"
"sync"
"sync/atomic"
"time"
- "golang.org/x/exp/slices"
"golang.org/x/xerrors"
"tailscale.com/derp"
"tailscale.com/derp/derphttp"
diff --git a/coderd/httpapi/httpapi.go b/coderd/httpapi/httpapi.go
index a9687d58a0..d5895dcbf8 100644
--- a/coderd/httpapi/httpapi.go
+++ b/coderd/httpapi/httpapi.go
@@ -151,11 +151,13 @@ func ResourceNotFound(rw http.ResponseWriter) {
Write(context.Background(), rw, http.StatusNotFound, ResourceNotFoundResponse)
}
+var ResourceForbiddenResponse = codersdk.Response{
+ Message: "Forbidden.",
+ Detail: "You don't have permission to view this content. If you believe this is a mistake, please contact your administrator or try signing in with different credentials.",
+}
+
func Forbidden(rw http.ResponseWriter) {
- Write(context.Background(), rw, http.StatusForbidden, codersdk.Response{
- Message: "Forbidden.",
- Detail: "You don't have permission to view this content. If you believe this is a mistake, please contact your administrator or try signing in with different credentials.",
- })
+ Write(context.Background(), rw, http.StatusForbidden, ResourceForbiddenResponse)
}
func InternalServerError(rw http.ResponseWriter, err error) {
diff --git a/coderd/httpmw/apikey_test.go b/coderd/httpmw/apikey_test.go
index c2e69eb7ae..bd979e8823 100644
--- a/coderd/httpmw/apikey_test.go
+++ b/coderd/httpmw/apikey_test.go
@@ -9,6 +9,7 @@ import (
"net"
"net/http"
"net/http/httptest"
+ "slices"
"strings"
"sync/atomic"
"testing"
@@ -17,7 +18,6 @@ import (
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
- "golang.org/x/exp/slices"
"golang.org/x/oauth2"
"github.com/coder/coder/v2/coderd/database"
diff --git a/coderd/idpsync/group_test.go b/coderd/idpsync/group_test.go
index 2baafd53ff..7fbfd3bfe4 100644
--- a/coderd/idpsync/group_test.go
+++ b/coderd/idpsync/group_test.go
@@ -4,12 +4,12 @@ import (
"context"
"database/sql"
"regexp"
+ "slices"
"testing"
"github.com/golang-jwt/jwt/v4"
"github.com/google/uuid"
"github.com/stretchr/testify/require"
- "golang.org/x/exp/slices"
"golang.org/x/xerrors"
"cdr.dev/slog/sloggers/slogtest"
diff --git a/coderd/idpsync/role.go b/coderd/idpsync/role.go
index 5cb0ac1725..22e0edc3bc 100644
--- a/coderd/idpsync/role.go
+++ b/coderd/idpsync/role.go
@@ -3,10 +3,10 @@ package idpsync
import (
"context"
"encoding/json"
+ "slices"
"github.com/golang-jwt/jwt/v4"
"github.com/google/uuid"
- "golang.org/x/exp/slices"
"golang.org/x/xerrors"
"cdr.dev/slog"
diff --git a/coderd/idpsync/role_test.go b/coderd/idpsync/role_test.go
index 45e9edd6c1..7d68644214 100644
--- a/coderd/idpsync/role_test.go
+++ b/coderd/idpsync/role_test.go
@@ -3,13 +3,13 @@ package idpsync_test
import (
"context"
"encoding/json"
+ "slices"
"testing"
"github.com/golang-jwt/jwt/v4"
"github.com/google/uuid"
"github.com/stretchr/testify/require"
"go.uber.org/mock/gomock"
- "golang.org/x/exp/slices"
"cdr.dev/slog/sloggers/slogtest"
"github.com/coder/coder/v2/coderd/database"
diff --git a/coderd/insights.go b/coderd/insights.go
index 9c9fdcfa3c..9f2bbf5d8b 100644
--- a/coderd/insights.go
+++ b/coderd/insights.go
@@ -5,18 +5,17 @@ import (
"database/sql"
"fmt"
"net/http"
+ "slices"
"strings"
"time"
- "github.com/coder/coder/v2/coderd/database/dbtime"
-
"github.com/google/uuid"
- "golang.org/x/exp/slices"
"golang.org/x/sync/errgroup"
"golang.org/x/xerrors"
"github.com/coder/coder/v2/coderd/database"
"github.com/coder/coder/v2/coderd/database/db2sdk"
+ "github.com/coder/coder/v2/coderd/database/dbtime"
"github.com/coder/coder/v2/coderd/httpapi"
"github.com/coder/coder/v2/coderd/rbac"
"github.com/coder/coder/v2/coderd/rbac/policy"
diff --git a/coderd/members.go b/coderd/members.go
index 97950b19e9..c89b4c9c09 100644
--- a/coderd/members.go
+++ b/coderd/members.go
@@ -323,7 +323,7 @@ func convertOrganizationMembers(ctx context.Context, db database.Store, mems []d
customRoles, err := db.CustomRoles(ctx, database.CustomRolesParams{
LookupRoles: roleLookup,
ExcludeOrgRoles: false,
- OrganizationID: uuid.UUID{},
+ OrganizationID: uuid.Nil,
})
if err != nil {
// We are missing the display names, but that is not absolutely required. So just
diff --git a/coderd/metricscache/metricscache.go b/coderd/metricscache/metricscache.go
index 3452ef2cce..9a18400c8d 100644
--- a/coderd/metricscache/metricscache.go
+++ b/coderd/metricscache/metricscache.go
@@ -15,6 +15,7 @@ import (
"github.com/coder/coder/v2/coderd/database/dbauthz"
"github.com/coder/coder/v2/coderd/database/dbtime"
"github.com/coder/coder/v2/codersdk"
+ "github.com/coder/quartz"
"github.com/coder/retry"
)
@@ -26,6 +27,7 @@ import (
type Cache struct {
database database.Store
log slog.Logger
+ clock quartz.Clock
intervals Intervals
templateWorkspaceOwners atomic.Pointer[map[uuid.UUID]int]
@@ -45,7 +47,7 @@ type Intervals struct {
DeploymentStats time.Duration
}
-func New(db database.Store, log slog.Logger, intervals Intervals, usage bool) *Cache {
+func New(db database.Store, log slog.Logger, clock quartz.Clock, intervals Intervals, usage bool) *Cache {
if intervals.TemplateBuildTimes <= 0 {
intervals.TemplateBuildTimes = time.Hour
}
@@ -55,6 +57,7 @@ func New(db database.Store, log slog.Logger, intervals Intervals, usage bool) *C
ctx, cancel := context.WithCancel(context.Background())
c := &Cache{
+ clock: clock,
database: db,
intervals: intervals,
log: log,
@@ -104,7 +107,7 @@ func (c *Cache) refreshTemplateBuildTimes(ctx context.Context) error {
Valid: true,
},
StartTime: sql.NullTime{
- Time: dbtime.Time(time.Now().AddDate(0, 0, -30)),
+ Time: dbtime.Time(c.clock.Now().AddDate(0, 0, -30)),
Valid: true,
},
})
@@ -131,7 +134,7 @@ func (c *Cache) refreshTemplateBuildTimes(ctx context.Context) error {
func (c *Cache) refreshDeploymentStats(ctx context.Context) error {
var (
- from = dbtime.Now().Add(-15 * time.Minute)
+ from = c.clock.Now().Add(-15 * time.Minute)
agentStats database.GetDeploymentWorkspaceAgentStatsRow
err error
)
@@ -155,8 +158,8 @@ func (c *Cache) refreshDeploymentStats(ctx context.Context) error {
}
c.deploymentStatsResponse.Store(&codersdk.DeploymentStats{
AggregatedFrom: from,
- CollectedAt: dbtime.Now(),
- NextUpdateAt: dbtime.Now().Add(c.intervals.DeploymentStats),
+ CollectedAt: dbtime.Time(c.clock.Now()),
+ NextUpdateAt: dbtime.Time(c.clock.Now().Add(c.intervals.DeploymentStats)),
Workspaces: codersdk.WorkspaceDeploymentStats{
Pending: workspaceStats.PendingWorkspaces,
Building: workspaceStats.BuildingWorkspaces,
diff --git a/coderd/metricscache/metricscache_test.go b/coderd/metricscache/metricscache_test.go
index 24b22d012c..b825bc6454 100644
--- a/coderd/metricscache/metricscache_test.go
+++ b/coderd/metricscache/metricscache_test.go
@@ -4,42 +4,68 @@ import (
"context"
"database/sql"
"encoding/json"
+ "sync/atomic"
"testing"
"time"
"github.com/google/uuid"
+ "github.com/prometheus/client_golang/prometheus"
"github.com/stretchr/testify/require"
+ "cdr.dev/slog"
"github.com/coder/coder/v2/coderd/database"
+ "github.com/coder/coder/v2/coderd/database/dbauthz"
"github.com/coder/coder/v2/coderd/database/dbgen"
- "github.com/coder/coder/v2/coderd/database/dbmem"
- "github.com/coder/coder/v2/coderd/database/dbtime"
+ "github.com/coder/coder/v2/coderd/database/dbtestutil"
"github.com/coder/coder/v2/coderd/metricscache"
+ "github.com/coder/coder/v2/coderd/rbac"
"github.com/coder/coder/v2/codersdk"
"github.com/coder/coder/v2/testutil"
+ "github.com/coder/quartz"
)
func date(year, month, day int) time.Time {
return time.Date(year, time.Month(month), day, 0, 0, 0, 0, time.UTC)
}
+func newMetricsCache(t *testing.T, log slog.Logger, clock quartz.Clock, intervals metricscache.Intervals, usage bool) (*metricscache.Cache, database.Store) {
+ t.Helper()
+
+ accessControlStore := &atomic.Pointer[dbauthz.AccessControlStore]{}
+ var acs dbauthz.AccessControlStore = dbauthz.AGPLTemplateAccessControlStore{}
+ accessControlStore.Store(&acs)
+
+ var (
+ auth = rbac.NewStrictCachingAuthorizer(prometheus.NewRegistry())
+ db, _ = dbtestutil.NewDB(t)
+ dbauth = dbauthz.New(db, auth, log, accessControlStore)
+ cache = metricscache.New(dbauth, log, clock, intervals, usage)
+ )
+
+ t.Cleanup(func() { cache.Close() })
+
+ return cache, db
+}
+
func TestCache_TemplateWorkspaceOwners(t *testing.T) {
t.Parallel()
var ()
var (
- db = dbmem.New()
- cache = metricscache.New(db, testutil.Logger(t), metricscache.Intervals{
+ log = testutil.Logger(t)
+ clock = quartz.NewReal()
+ cache, db = newMetricsCache(t, log, clock, metricscache.Intervals{
TemplateBuildTimes: testutil.IntervalFast,
}, false)
)
- defer cache.Close()
-
+ org := dbgen.Organization(t, db, database.Organization{})
user1 := dbgen.User(t, db, database.User{})
user2 := dbgen.User(t, db, database.User{})
template := dbgen.Template(t, db, database.Template{
- Provisioner: database.ProvisionerTypeEcho,
+ OrganizationID: org.ID,
+ Provisioner: database.ProvisionerTypeEcho,
+ CreatedBy: user1.ID,
})
require.Eventuallyf(t, func() bool {
count, ok := cache.TemplateWorkspaceOwners(template.ID)
@@ -49,8 +75,9 @@ func TestCache_TemplateWorkspaceOwners(t *testing.T) {
)
dbgen.Workspace(t, db, database.WorkspaceTable{
- TemplateID: template.ID,
- OwnerID: user1.ID,
+ OrganizationID: org.ID,
+ TemplateID: template.ID,
+ OwnerID: user1.ID,
})
require.Eventuallyf(t, func() bool {
@@ -61,8 +88,9 @@ func TestCache_TemplateWorkspaceOwners(t *testing.T) {
)
workspace2 := dbgen.Workspace(t, db, database.WorkspaceTable{
- TemplateID: template.ID,
- OwnerID: user2.ID,
+ OrganizationID: org.ID,
+ TemplateID: template.ID,
+ OwnerID: user2.ID,
})
require.Eventuallyf(t, func() bool {
@@ -74,8 +102,9 @@ func TestCache_TemplateWorkspaceOwners(t *testing.T) {
// 3rd workspace should not be counted since we have the same owner as workspace2.
dbgen.Workspace(t, db, database.WorkspaceTable{
- TemplateID: template.ID,
- OwnerID: user1.ID,
+ OrganizationID: org.ID,
+ TemplateID: template.ID,
+ OwnerID: user1.ID,
})
db.UpdateWorkspaceDeletedByID(context.Background(), database.UpdateWorkspaceDeletedByIDParams{
@@ -149,7 +178,7 @@ func TestCache_BuildTime(t *testing.T) {
},
},
transition: database.WorkspaceTransitionStop,
- }, want{30 * 1000, true},
+ }, want{10 * 1000, true},
},
{
"three/delete", args{
@@ -176,67 +205,57 @@ func TestCache_BuildTime(t *testing.T) {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
- ctx := context.Background()
var (
- db = dbmem.New()
- cache = metricscache.New(db, testutil.Logger(t), metricscache.Intervals{
+ log = testutil.Logger(t)
+ clock = quartz.NewMock(t)
+ cache, db = newMetricsCache(t, log, clock, metricscache.Intervals{
TemplateBuildTimes: testutil.IntervalFast,
}, false)
)
- defer cache.Close()
+ clock.Set(someDay)
- id := uuid.New()
- err := db.InsertTemplate(ctx, database.InsertTemplateParams{
- ID: id,
- Provisioner: database.ProvisionerTypeEcho,
- MaxPortSharingLevel: database.AppSharingLevelOwner,
- })
- require.NoError(t, err)
- template, err := db.GetTemplateByID(ctx, id)
- require.NoError(t, err)
+ org := dbgen.Organization(t, db, database.Organization{})
+ user := dbgen.User(t, db, database.User{})
- templateVersionID := uuid.New()
- err = db.InsertTemplateVersion(ctx, database.InsertTemplateVersionParams{
- ID: templateVersionID,
- TemplateID: uuid.NullUUID{UUID: template.ID, Valid: true},
+ template := dbgen.Template(t, db, database.Template{
+ CreatedBy: user.ID,
+ OrganizationID: org.ID,
+ })
+
+ templateVersion := dbgen.TemplateVersion(t, db, database.TemplateVersion{
+ OrganizationID: org.ID,
+ CreatedBy: user.ID,
+ TemplateID: uuid.NullUUID{UUID: template.ID, Valid: true},
+ })
+
+ workspace := dbgen.Workspace(t, db, database.WorkspaceTable{
+ OrganizationID: org.ID,
+ OwnerID: user.ID,
+ TemplateID: template.ID,
})
- require.NoError(t, err)
gotStats := cache.TemplateBuildTimeStats(template.ID)
requireBuildTimeStatsEmpty(t, gotStats)
- for _, row := range tt.args.rows {
- _, err := db.InsertProvisionerJob(ctx, database.InsertProvisionerJobParams{
- ID: uuid.New(),
- Provisioner: database.ProvisionerTypeEcho,
- StorageMethod: database.ProvisionerStorageMethodFile,
- Type: database.ProvisionerJobTypeWorkspaceBuild,
+ for buildNumber, row := range tt.args.rows {
+ job := dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{
+ OrganizationID: org.ID,
+ InitiatorID: user.ID,
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ StartedAt: sql.NullTime{Time: row.startedAt, Valid: true},
+ CompletedAt: sql.NullTime{Time: row.completedAt, Valid: true},
})
- require.NoError(t, err)
- job, err := db.AcquireProvisionerJob(ctx, database.AcquireProvisionerJobParams{
- StartedAt: sql.NullTime{Time: row.startedAt, Valid: true},
- Types: []database.ProvisionerType{
- database.ProvisionerTypeEcho,
- },
- })
- require.NoError(t, err)
-
- err = db.InsertWorkspaceBuild(ctx, database.InsertWorkspaceBuildParams{
- TemplateVersionID: templateVersionID,
+ dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{
+ BuildNumber: int32(1 + buildNumber),
+ WorkspaceID: workspace.ID,
+ InitiatorID: user.ID,
+ TemplateVersionID: templateVersion.ID,
JobID: job.ID,
Transition: tt.args.transition,
- Reason: database.BuildReasonInitiator,
})
- require.NoError(t, err)
-
- err = db.UpdateProvisionerJobWithCompleteByID(ctx, database.UpdateProvisionerJobWithCompleteByIDParams{
- ID: job.ID,
- CompletedAt: sql.NullTime{Time: row.completedAt, Valid: true},
- })
- require.NoError(t, err)
}
if tt.want.loads {
@@ -274,15 +293,18 @@ func TestCache_BuildTime(t *testing.T) {
func TestCache_DeploymentStats(t *testing.T) {
t.Parallel()
- db := dbmem.New()
- cache := metricscache.New(db, testutil.Logger(t), metricscache.Intervals{
- DeploymentStats: testutil.IntervalFast,
- }, false)
- defer cache.Close()
+
+ var (
+ log = testutil.Logger(t)
+ clock = quartz.NewMock(t)
+ cache, db = newMetricsCache(t, log, clock, metricscache.Intervals{
+ DeploymentStats: testutil.IntervalFast,
+ }, false)
+ )
err := db.InsertWorkspaceAgentStats(context.Background(), database.InsertWorkspaceAgentStatsParams{
ID: []uuid.UUID{uuid.New()},
- CreatedAt: []time.Time{dbtime.Now()},
+ CreatedAt: []time.Time{clock.Now()},
WorkspaceID: []uuid.UUID{uuid.New()},
UserID: []uuid.UUID{uuid.New()},
TemplateID: []uuid.UUID{uuid.New()},
diff --git a/coderd/notifications_test.go b/coderd/notifications_test.go
index 2e8d851522..d504648692 100644
--- a/coderd/notifications_test.go
+++ b/coderd/notifications_test.go
@@ -2,10 +2,10 @@ package coderd_test
import (
"net/http"
+ "slices"
"testing"
"github.com/stretchr/testify/require"
- "golang.org/x/exp/slices"
"github.com/coder/serpent"
diff --git a/coderd/prometheusmetrics/insights/metricscollector.go b/coderd/prometheusmetrics/insights/metricscollector.go
index 7dcf6025f2..f7ecb06e96 100644
--- a/coderd/prometheusmetrics/insights/metricscollector.go
+++ b/coderd/prometheusmetrics/insights/metricscollector.go
@@ -2,12 +2,12 @@ package insights
import (
"context"
+ "slices"
"sync/atomic"
"time"
"github.com/google/uuid"
"github.com/prometheus/client_golang/prometheus"
- "golang.org/x/exp/slices"
"golang.org/x/sync/errgroup"
"golang.org/x/xerrors"
diff --git a/coderd/provisionerdaemons_test.go b/coderd/provisionerdaemons_test.go
index d6d1138f7a..249da9d6bc 100644
--- a/coderd/provisionerdaemons_test.go
+++ b/coderd/provisionerdaemons_test.go
@@ -159,8 +159,8 @@ func TestProvisionerDaemons(t *testing.T) {
})
require.NoError(t, err)
require.Len(t, daemons, 2)
- require.Equal(t, pd1.ID, daemons[0].ID)
- require.Equal(t, pd2.ID, daemons[1].ID)
+ require.Equal(t, pd1.ID, daemons[1].ID)
+ require.Equal(t, pd2.ID, daemons[0].ID)
})
t.Run("Tags", func(t *testing.T) {
diff --git a/coderd/provisionerdserver/acquirer.go b/coderd/provisionerdserver/acquirer.go
index 4c2fe6b1d4..a655edebfd 100644
--- a/coderd/provisionerdserver/acquirer.go
+++ b/coderd/provisionerdserver/acquirer.go
@@ -4,13 +4,13 @@ import (
"context"
"database/sql"
"encoding/json"
+ "slices"
"strings"
"sync"
"time"
"github.com/cenkalti/backoff/v4"
"github.com/google/uuid"
- "golang.org/x/exp/slices"
"golang.org/x/xerrors"
"cdr.dev/slog"
diff --git a/coderd/provisionerdserver/acquirer_test.go b/coderd/provisionerdserver/acquirer_test.go
index 6e4d6a4ff7..22794c7265 100644
--- a/coderd/provisionerdserver/acquirer_test.go
+++ b/coderd/provisionerdserver/acquirer_test.go
@@ -5,6 +5,7 @@ import (
"database/sql"
"encoding/json"
"fmt"
+ "slices"
"strings"
"sync"
"testing"
@@ -15,7 +16,6 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.uber.org/goleak"
- "golang.org/x/exp/slices"
"github.com/coder/coder/v2/coderd/database"
"github.com/coder/coder/v2/coderd/database/dbmem"
diff --git a/coderd/provisionerdserver/provisionerdserver.go b/coderd/provisionerdserver/provisionerdserver.go
index b1ae055324..90bb76be56 100644
--- a/coderd/provisionerdserver/provisionerdserver.go
+++ b/coderd/provisionerdserver/provisionerdserver.go
@@ -9,6 +9,7 @@ import (
"net/http"
"net/url"
"reflect"
+ "slices"
"sort"
"strconv"
"strings"
@@ -22,7 +23,6 @@ import (
semconv "go.opentelemetry.io/otel/semconv/v1.14.0"
"go.opentelemetry.io/otel/trace"
"golang.org/x/exp/maps"
- "golang.org/x/exp/slices"
"golang.org/x/oauth2"
"golang.org/x/xerrors"
protobuf "google.golang.org/protobuf/proto"
@@ -608,6 +608,19 @@ func (s *server) acquireProtoJob(ctx context.Context, job database.ProvisionerJo
})
}
+ roles, err := s.Database.GetAuthorizationUserRoles(ctx, owner.ID)
+ if err != nil {
+ return nil, failJob(fmt.Sprintf("get owner authorization roles: %s", err))
+ }
+ ownerRbacRoles := []*sdkproto.Role{}
+ for _, role := range roles.Roles {
+ if s.OrganizationID == uuid.Nil {
+ ownerRbacRoles = append(ownerRbacRoles, &sdkproto.Role{Name: role, OrgId: ""})
+ continue
+ }
+ ownerRbacRoles = append(ownerRbacRoles, &sdkproto.Role{Name: role, OrgId: s.OrganizationID.String()})
+ }
+
protoJob.Type = &proto.AcquiredJob_WorkspaceBuild_{
WorkspaceBuild: &proto.AcquiredJob_WorkspaceBuild{
WorkspaceBuildId: workspaceBuild.ID.String(),
@@ -635,6 +648,7 @@ func (s *server) acquireProtoJob(ctx context.Context, job database.ProvisionerJo
WorkspaceOwnerSshPrivateKey: ownerSSHPrivateKey,
WorkspaceBuildId: workspaceBuild.ID.String(),
WorkspaceOwnerLoginType: string(owner.LoginType),
+ WorkspaceOwnerRbacRoles: ownerRbacRoles,
IsPrebuild: input.IsPrebuild,
RunningWorkspaceAgentToken: runningWorkspaceAgentToken,
},
diff --git a/coderd/provisionerdserver/provisionerdserver_test.go b/coderd/provisionerdserver/provisionerdserver_test.go
index cc73089e82..4d147a48f6 100644
--- a/coderd/provisionerdserver/provisionerdserver_test.go
+++ b/coderd/provisionerdserver/provisionerdserver_test.go
@@ -377,6 +377,7 @@ func TestAcquireJob(t *testing.T) {
WorkspaceOwnerSshPrivateKey: sshKey.PrivateKey,
WorkspaceBuildId: build.ID.String(),
WorkspaceOwnerLoginType: string(user.LoginType),
+ WorkspaceOwnerRbacRoles: []*sdkproto.Role{{Name: "member", OrgId: pd.OrganizationID.String()}},
},
},
})
diff --git a/coderd/rbac/object_gen.go b/coderd/rbac/object_gen.go
index e1fefada0f..47b8c58a6f 100644
--- a/coderd/rbac/object_gen.go
+++ b/coderd/rbac/object_gen.go
@@ -27,22 +27,21 @@ var (
// ResourceAssignOrgRole
// Valid Actions
- // - "ActionAssign" :: ability to assign org scoped roles
- // - "ActionCreate" :: ability to create/delete custom roles within an organization
- // - "ActionDelete" :: ability to delete org scoped roles
- // - "ActionRead" :: view what roles are assignable
- // - "ActionUpdate" :: ability to edit custom roles within an organization
+ // - "ActionAssign" :: assign org scoped roles
+ // - "ActionCreate" :: create/delete custom roles within an organization
+ // - "ActionDelete" :: delete roles within an organization
+ // - "ActionRead" :: view what roles are assignable within an organization
+ // - "ActionUnassign" :: unassign org scoped roles
+ // - "ActionUpdate" :: edit custom roles within an organization
ResourceAssignOrgRole = Object{
Type: "assign_org_role",
}
// ResourceAssignRole
// Valid Actions
- // - "ActionAssign" :: ability to assign roles
- // - "ActionCreate" :: ability to create/delete/edit custom roles
- // - "ActionDelete" :: ability to unassign roles
+ // - "ActionAssign" :: assign user roles
// - "ActionRead" :: view what roles are assignable
- // - "ActionUpdate" :: ability to edit custom roles
+ // - "ActionUnassign" :: unassign user roles
ResourceAssignRole = Object{
Type: "assign_role",
}
@@ -120,6 +119,15 @@ var (
Type: "idpsync_settings",
}
+ // ResourceInboxNotification
+ // Valid Actions
+ // - "ActionCreate" :: create inbox notifications
+ // - "ActionRead" :: read inbox notifications
+ // - "ActionUpdate" :: update inbox notifications
+ ResourceInboxNotification = Object{
+ Type: "inbox_notification",
+ }
+
// ResourceLicense
// Valid Actions
// - "ActionCreate" :: create a license
@@ -335,6 +343,7 @@ func AllResources() []Objecter {
ResourceGroup,
ResourceGroupMember,
ResourceIdpsyncSettings,
+ ResourceInboxNotification,
ResourceLicense,
ResourceNotificationMessage,
ResourceNotificationPreference,
@@ -367,6 +376,7 @@ func AllActions() []policy.Action {
policy.ActionRead,
policy.ActionReadPersonal,
policy.ActionSSH,
+ policy.ActionUnassign,
policy.ActionUpdate,
policy.ActionUpdatePersonal,
policy.ActionUse,
diff --git a/coderd/rbac/policy/policy.go b/coderd/rbac/policy/policy.go
index 2aae17badf..7f9736eaad 100644
--- a/coderd/rbac/policy/policy.go
+++ b/coderd/rbac/policy/policy.go
@@ -19,7 +19,8 @@ const (
ActionWorkspaceStart Action = "start"
ActionWorkspaceStop Action = "stop"
- ActionAssign Action = "assign"
+ ActionAssign Action = "assign"
+ ActionUnassign Action = "unassign"
ActionReadPersonal Action = "read_personal"
ActionUpdatePersonal Action = "update_personal"
@@ -221,20 +222,19 @@ var RBACPermissions = map[string]PermissionDefinition{
},
"assign_role": {
Actions: map[Action]ActionDefinition{
- ActionAssign: actDef("ability to assign roles"),
- ActionRead: actDef("view what roles are assignable"),
- ActionDelete: actDef("ability to unassign roles"),
- ActionCreate: actDef("ability to create/delete/edit custom roles"),
- ActionUpdate: actDef("ability to edit custom roles"),
+ ActionAssign: actDef("assign user roles"),
+ ActionUnassign: actDef("unassign user roles"),
+ ActionRead: actDef("view what roles are assignable"),
},
},
"assign_org_role": {
Actions: map[Action]ActionDefinition{
- ActionAssign: actDef("ability to assign org scoped roles"),
- ActionRead: actDef("view what roles are assignable"),
- ActionDelete: actDef("ability to delete org scoped roles"),
- ActionCreate: actDef("ability to create/delete custom roles within an organization"),
- ActionUpdate: actDef("ability to edit custom roles within an organization"),
+ ActionAssign: actDef("assign org scoped roles"),
+ ActionUnassign: actDef("unassign org scoped roles"),
+ ActionCreate: actDef("create/delete custom roles within an organization"),
+ ActionRead: actDef("view what roles are assignable within an organization"),
+ ActionUpdate: actDef("edit custom roles within an organization"),
+ ActionDelete: actDef("delete roles within an organization"),
},
},
"oauth2_app": {
@@ -280,6 +280,13 @@ var RBACPermissions = map[string]PermissionDefinition{
ActionUpdate: actDef("update notification preferences"),
},
},
+ "inbox_notification": {
+ Actions: map[Action]ActionDefinition{
+ ActionCreate: actDef("create inbox notifications"),
+ ActionRead: actDef("read inbox notifications"),
+ ActionUpdate: actDef("update inbox notifications"),
+ },
+ },
"crypto_key": {
Actions: map[Action]ActionDefinition{
ActionRead: actDef("read crypto keys"),
diff --git a/coderd/rbac/roles.go b/coderd/rbac/roles.go
index 7c73301643..6b99cb4e87 100644
--- a/coderd/rbac/roles.go
+++ b/coderd/rbac/roles.go
@@ -27,11 +27,12 @@ const (
customSiteRole string = "custom-site-role"
customOrganizationRole string = "custom-organization-role"
- orgAdmin string = "organization-admin"
- orgMember string = "organization-member"
- orgAuditor string = "organization-auditor"
- orgUserAdmin string = "organization-user-admin"
- orgTemplateAdmin string = "organization-template-admin"
+ orgAdmin string = "organization-admin"
+ orgMember string = "organization-member"
+ orgAuditor string = "organization-auditor"
+ orgUserAdmin string = "organization-user-admin"
+ orgTemplateAdmin string = "organization-template-admin"
+ orgWorkspaceCreationBan string = "organization-workspace-creation-ban"
)
func init() {
@@ -159,6 +160,10 @@ func RoleOrgTemplateAdmin() string {
return orgTemplateAdmin
}
+func RoleOrgWorkspaceCreationBan() string {
+ return orgWorkspaceCreationBan
+}
+
// ScopedRoleOrgAdmin is the org role with the organization ID
func ScopedRoleOrgAdmin(organizationID uuid.UUID) RoleIdentifier {
return RoleIdentifier{Name: RoleOrgAdmin(), OrganizationID: organizationID}
@@ -181,6 +186,10 @@ func ScopedRoleOrgTemplateAdmin(organizationID uuid.UUID) RoleIdentifier {
return RoleIdentifier{Name: RoleOrgTemplateAdmin(), OrganizationID: organizationID}
}
+func ScopedRoleOrgWorkspaceCreationBan(organizationID uuid.UUID) RoleIdentifier {
+ return RoleIdentifier{Name: RoleOrgWorkspaceCreationBan(), OrganizationID: organizationID}
+}
+
func allPermsExcept(excepts ...Objecter) []Permission {
resources := AllResources()
var perms []Permission
@@ -298,7 +307,8 @@ func ReloadBuiltinRoles(opts *RoleOptions) {
Identifier: RoleAuditor(),
DisplayName: "Auditor",
Site: Permissions(map[string][]policy.Action{
- ResourceAuditLog.Type: {policy.ActionRead},
+ ResourceAssignOrgRole.Type: {policy.ActionRead},
+ ResourceAuditLog.Type: {policy.ActionRead},
// Allow auditors to see the resources that audit logs reflect.
ResourceTemplate.Type: {policy.ActionRead, policy.ActionViewInsights},
ResourceUser.Type: {policy.ActionRead},
@@ -318,7 +328,8 @@ func ReloadBuiltinRoles(opts *RoleOptions) {
Identifier: RoleTemplateAdmin(),
DisplayName: "Template Admin",
Site: Permissions(map[string][]policy.Action{
- ResourceTemplate.Type: ResourceTemplate.AvailableActions(),
+ ResourceAssignOrgRole.Type: {policy.ActionRead},
+ ResourceTemplate.Type: ResourceTemplate.AvailableActions(),
// CRUD all files, even those they did not upload.
ResourceFile.Type: {policy.ActionCreate, policy.ActionRead},
ResourceWorkspace.Type: {policy.ActionRead},
@@ -339,10 +350,10 @@ func ReloadBuiltinRoles(opts *RoleOptions) {
Identifier: RoleUserAdmin(),
DisplayName: "User Admin",
Site: Permissions(map[string][]policy.Action{
- ResourceAssignRole.Type: {policy.ActionAssign, policy.ActionDelete, policy.ActionRead},
+ ResourceAssignRole.Type: {policy.ActionAssign, policy.ActionUnassign, policy.ActionRead},
// Need organization assign as well to create users. At present, creating a user
// will always assign them to some organization.
- ResourceAssignOrgRole.Type: {policy.ActionAssign, policy.ActionDelete, policy.ActionRead},
+ ResourceAssignOrgRole.Type: {policy.ActionAssign, policy.ActionUnassign, policy.ActionRead},
ResourceUser.Type: {
policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete,
policy.ActionUpdatePersonal, policy.ActionReadPersonal,
@@ -459,7 +470,7 @@ func ReloadBuiltinRoles(opts *RoleOptions) {
Org: map[string][]Permission{
organizationID.String(): Permissions(map[string][]policy.Action{
// Assign, remove, and read roles in the organization.
- ResourceAssignOrgRole.Type: {policy.ActionAssign, policy.ActionDelete, policy.ActionRead},
+ ResourceAssignOrgRole.Type: {policy.ActionAssign, policy.ActionUnassign, policy.ActionRead},
ResourceOrganization.Type: {policy.ActionRead},
ResourceOrganizationMember.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete},
ResourceGroup.Type: ResourceGroup.AvailableActions(),
@@ -496,6 +507,31 @@ func ReloadBuiltinRoles(opts *RoleOptions) {
User: []Permission{},
}
},
+ // orgWorkspaceCreationBan prevents creating & deleting workspaces. This
+ // overrides any permissions granted by the org or user level. It accomplishes
+ // this by using negative permissions.
+ orgWorkspaceCreationBan: func(organizationID uuid.UUID) Role {
+ return Role{
+ Identifier: RoleIdentifier{Name: orgWorkspaceCreationBan, OrganizationID: organizationID},
+ DisplayName: "Organization Workspace Creation Ban",
+ Site: []Permission{},
+ Org: map[string][]Permission{
+ organizationID.String(): {
+ {
+ Negate: true,
+ ResourceType: ResourceWorkspace.Type,
+ Action: policy.ActionCreate,
+ },
+ {
+ Negate: true,
+ ResourceType: ResourceWorkspace.Type,
+ Action: policy.ActionDelete,
+ },
+ },
+ },
+ User: []Permission{},
+ }
+ },
}
}
@@ -506,44 +542,47 @@ func ReloadBuiltinRoles(opts *RoleOptions) {
// map[actor_role][assign_role]
+ This is a more complex content example with various elements.
+ To enable early access features:
+
+Use the [Coder CLI](../install/cli.md) `--experiments` flag to enable early access features:
+
+- Enable all early access features:
+
+ ```shell
+ coder server --experiments=*
+ ```
+
+- Enable multiple early access features:
+
+ ```shell
+ coder server --experiments=feature1,feature2
+ ```
+
+You can also use the `CODER_EXPERIMENTS` [environment variable](../admin/setup/index.md).
+
+You can opt-out of a feature after you've enabled it.
+
+int
|
+| Environment | $CODER_PROVISIONER_LIST_LIMIT
|
+| Default | 50
|
+
+Limit the number of provisioners returned.
+
### -O, --org
| | |
diff --git a/enterprise/cli/provisionerdaemonstart.go b/enterprise/cli/provisionerdaemonstart.go
index 8d7d319d39..e0b3e00c63 100644
--- a/enterprise/cli/provisionerdaemonstart.go
+++ b/enterprise/cli/provisionerdaemonstart.go
@@ -225,7 +225,6 @@ func (r *RootCmd) provisionerDaemonStart() *serpent.Command {
}
srv := provisionerd.New(func(ctx context.Context) (provisionerdproto.DRPCProvisionerDaemonClient, error) {
return client.ServeProvisionerDaemon(ctx, codersdk.ServeProvisionerDaemonRequest{
- ID: uuid.New(),
Name: name,
Provisioners: []codersdk.ProvisionerType{
codersdk.ProvisionerTypeTerraform,
diff --git a/enterprise/cli/testdata/coder_provisioner_list_--help.golden b/enterprise/cli/testdata/coder_provisioner_list_--help.golden
index 111eb8315b..ac889fb6dc 100644
--- a/enterprise/cli/testdata/coder_provisioner_list_--help.golden
+++ b/enterprise/cli/testdata/coder_provisioner_list_--help.golden
@@ -14,6 +14,9 @@ OPTIONS:
-c, --column [id|organization id|created at|last seen at|name|version|api version|tags|key name|status|current job id|current job status|current job template name|current job template icon|current job template display name|previous job id|previous job status|previous job template name|previous job template icon|previous job template display name|organization] (default: name,organization,status,key name,created at,last seen at,version,tags)
Columns to display in table output.
+ -l, --limit int, $CODER_PROVISIONER_LIST_LIMIT (default: 50)
+ Limit the number of provisioners returned.
+
-o, --output table|json (default: table)
Output format.
diff --git a/enterprise/cmd/coder/main.go b/enterprise/cmd/coder/main.go
index 803903f390..217cca324b 100644
--- a/enterprise/cmd/coder/main.go
+++ b/enterprise/cmd/coder/main.go
@@ -8,6 +8,7 @@ import (
tea "github.com/charmbracelet/bubbletea"
"github.com/coder/coder/v2/agent/agentexec"
+ _ "github.com/coder/coder/v2/buildinfo/resources"
entcli "github.com/coder/coder/v2/enterprise/cli"
)
diff --git a/enterprise/coderd/coderdenttest/coderdenttest.go b/enterprise/coderd/coderdenttest/coderdenttest.go
index d76722b5ba..a72c8c0199 100644
--- a/enterprise/coderd/coderdenttest/coderdenttest.go
+++ b/enterprise/coderd/coderdenttest/coderdenttest.go
@@ -388,7 +388,6 @@ func newExternalProvisionerDaemon(t testing.TB, client *codersdk.Client, org uui
daemon := provisionerd.New(func(ctx context.Context) (provisionerdproto.DRPCProvisionerDaemonClient, error) {
return client.ServeProvisionerDaemon(ctx, codersdk.ServeProvisionerDaemonRequest{
- ID: uuid.New(),
Name: testutil.GetRandomName(t),
Organization: org,
Provisioners: []codersdk.ProvisionerType{provisionerType},
diff --git a/enterprise/coderd/license/license_test.go b/enterprise/coderd/license/license_test.go
index ad7fc68f58..b8b25b9535 100644
--- a/enterprise/coderd/license/license_test.go
+++ b/enterprise/coderd/license/license_test.go
@@ -3,13 +3,13 @@ package license_test
import (
"context"
"fmt"
+ "slices"
"testing"
"time"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
- "golang.org/x/exp/slices"
"github.com/coder/coder/v2/coderd/database"
"github.com/coder/coder/v2/coderd/database/dbmem"
diff --git a/enterprise/coderd/provisionerdaemons.go b/enterprise/coderd/provisionerdaemons.go
index f433543865..5b0f0ca197 100644
--- a/enterprise/coderd/provisionerdaemons.go
+++ b/enterprise/coderd/provisionerdaemons.go
@@ -175,11 +175,6 @@ func (api *API) provisionerDaemonServe(rw http.ResponseWriter, r *http.Request)
return
}
- id, _ := uuid.Parse(r.URL.Query().Get("id"))
- if id == uuid.Nil {
- id = uuid.New()
- }
-
provisionersMap := map[codersdk.ProvisionerType]struct{}{}
for _, provisioner := range r.URL.Query()["provisioner"] {
switch provisioner {
@@ -295,7 +290,7 @@ func (api *API) provisionerDaemonServe(rw http.ResponseWriter, r *http.Request)
api.AGPL.WebsocketWaitMutex.Unlock()
defer api.AGPL.WebsocketWaitGroup.Done()
- tep := telemetry.ConvertExternalProvisioner(id, tags, provisioners)
+ tep := telemetry.ConvertExternalProvisioner(daemon.ID, tags, provisioners)
api.Telemetry.Report(&telemetry.Snapshot{ExternalProvisioners: []telemetry.ExternalProvisioner{tep}})
defer func() {
tep.ShutdownAt = ptr.Ref(time.Now())
diff --git a/enterprise/coderd/provisionerdaemons_test.go b/enterprise/coderd/provisionerdaemons_test.go
index 0cd812b45c..a84213f718 100644
--- a/enterprise/coderd/provisionerdaemons_test.go
+++ b/enterprise/coderd/provisionerdaemons_test.go
@@ -50,7 +50,6 @@ func TestProvisionerDaemonServe(t *testing.T) {
defer cancel()
daemonName := testutil.MustRandString(t, 63)
srv, err := templateAdminClient.ServeProvisionerDaemon(ctx, codersdk.ServeProvisionerDaemonRequest{
- ID: uuid.New(),
Name: daemonName,
Organization: user.OrganizationID,
Provisioners: []codersdk.ProvisionerType{
@@ -180,7 +179,6 @@ func TestProvisionerDaemonServe(t *testing.T) {
defer cancel()
daemonName := testutil.MustRandString(t, 63)
_, err := templateAdminClient.ServeProvisionerDaemon(ctx, codersdk.ServeProvisionerDaemonRequest{
- ID: uuid.New(),
Name: daemonName,
Organization: user.OrganizationID,
Provisioners: []codersdk.ProvisionerType{
@@ -205,7 +203,6 @@ func TestProvisionerDaemonServe(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
defer cancel()
_, err := another.ServeProvisionerDaemon(ctx, codersdk.ServeProvisionerDaemonRequest{
- ID: uuid.New(),
Name: testutil.MustRandString(t, 63),
Organization: user.OrganizationID,
Provisioners: []codersdk.ProvisionerType{
@@ -229,7 +226,6 @@ func TestProvisionerDaemonServe(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
defer cancel()
_, err := another.ServeProvisionerDaemon(ctx, codersdk.ServeProvisionerDaemonRequest{
- ID: uuid.New(),
Name: testutil.MustRandString(t, 63),
Organization: user.OrganizationID,
Provisioners: []codersdk.ProvisionerType{
@@ -360,7 +356,6 @@ func TestProvisionerDaemonServe(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
defer cancel()
req := codersdk.ServeProvisionerDaemonRequest{
- ID: uuid.New(),
Name: testutil.MustRandString(t, 63),
Organization: user.OrganizationID,
Provisioners: []codersdk.ProvisionerType{
@@ -425,7 +420,6 @@ func TestProvisionerDaemonServe(t *testing.T) {
another := codersdk.New(client.URL)
pd := provisionerd.New(func(ctx context.Context) (proto.DRPCProvisionerDaemonClient, error) {
return another.ServeProvisionerDaemon(ctx, codersdk.ServeProvisionerDaemonRequest{
- ID: uuid.New(),
Name: testutil.MustRandString(t, 63),
Organization: user.OrganizationID,
Provisioners: []codersdk.ProvisionerType{
@@ -503,7 +497,6 @@ func TestProvisionerDaemonServe(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
defer cancel()
_, err := another.ServeProvisionerDaemon(ctx, codersdk.ServeProvisionerDaemonRequest{
- ID: uuid.New(),
Name: testutil.MustRandString(t, 32),
Organization: user.OrganizationID,
Provisioners: []codersdk.ProvisionerType{
@@ -538,7 +531,6 @@ func TestProvisionerDaemonServe(t *testing.T) {
defer cancel()
another := codersdk.New(client.URL)
_, err := another.ServeProvisionerDaemon(ctx, codersdk.ServeProvisionerDaemonRequest{
- ID: uuid.New(),
Name: testutil.MustRandString(t, 63),
Organization: user.OrganizationID,
Provisioners: []codersdk.ProvisionerType{
@@ -571,7 +563,6 @@ func TestProvisionerDaemonServe(t *testing.T) {
defer cancel()
another := codersdk.New(client.URL)
_, err := another.ServeProvisionerDaemon(ctx, codersdk.ServeProvisionerDaemonRequest{
- ID: uuid.New(),
Name: testutil.MustRandString(t, 63),
Organization: user.OrganizationID,
Provisioners: []codersdk.ProvisionerType{
@@ -698,7 +689,6 @@ func TestProvisionerDaemonServe(t *testing.T) {
another := codersdk.New(client.URL)
srv, err := another.ServeProvisionerDaemon(ctx, codersdk.ServeProvisionerDaemonRequest{
- ID: uuid.New(),
Name: testutil.MustRandString(t, 63),
Organization: user.OrganizationID,
Provisioners: []codersdk.ProvisionerType{
@@ -758,7 +748,6 @@ func TestGetProvisionerDaemons(t *testing.T) {
defer cancel()
daemonName := testutil.MustRandString(t, 63)
srv, err := orgAdmin.ServeProvisionerDaemon(ctx, codersdk.ServeProvisionerDaemonRequest{
- ID: uuid.New(),
Name: daemonName,
Organization: org.ID,
Provisioners: []codersdk.ProvisionerType{
diff --git a/enterprise/coderd/roles.go b/enterprise/coderd/roles.go
index d5af54a35b..30432af76c 100644
--- a/enterprise/coderd/roles.go
+++ b/enterprise/coderd/roles.go
@@ -127,8 +127,7 @@ func (api *API) putOrgRoles(rw http.ResponseWriter, r *http.Request) {
},
},
ExcludeOrgRoles: false,
- // Linter requires all fields to be set. This field is not actually required.
- OrganizationID: organization.ID,
+ OrganizationID: organization.ID,
})
// If it is a 404 (not found) error, ignore it.
if err != nil && !httpapi.Is404Error(err) {
diff --git a/enterprise/coderd/roles_test.go b/enterprise/coderd/roles_test.go
index 8bbf921805..57b66a3682 100644
--- a/enterprise/coderd/roles_test.go
+++ b/enterprise/coderd/roles_test.go
@@ -441,10 +441,11 @@ func TestListRoles(t *testing.T) {
return member.ListOrganizationRoles(ctx, owner.OrganizationID)
},
ExpectedRoles: convertRoles(map[rbac.RoleIdentifier]bool{
- {Name: codersdk.RoleOrganizationAdmin, OrganizationID: owner.OrganizationID}: false,
- {Name: codersdk.RoleOrganizationAuditor, OrganizationID: owner.OrganizationID}: false,
- {Name: codersdk.RoleOrganizationTemplateAdmin, OrganizationID: owner.OrganizationID}: false,
- {Name: codersdk.RoleOrganizationUserAdmin, OrganizationID: owner.OrganizationID}: false,
+ {Name: codersdk.RoleOrganizationAdmin, OrganizationID: owner.OrganizationID}: false,
+ {Name: codersdk.RoleOrganizationAuditor, OrganizationID: owner.OrganizationID}: false,
+ {Name: codersdk.RoleOrganizationTemplateAdmin, OrganizationID: owner.OrganizationID}: false,
+ {Name: codersdk.RoleOrganizationUserAdmin, OrganizationID: owner.OrganizationID}: false,
+ {Name: codersdk.RoleOrganizationWorkspaceCreationBan, OrganizationID: owner.OrganizationID}: false,
}),
},
{
@@ -473,10 +474,11 @@ func TestListRoles(t *testing.T) {
return orgAdmin.ListOrganizationRoles(ctx, owner.OrganizationID)
},
ExpectedRoles: convertRoles(map[rbac.RoleIdentifier]bool{
- {Name: codersdk.RoleOrganizationAdmin, OrganizationID: owner.OrganizationID}: true,
- {Name: codersdk.RoleOrganizationAuditor, OrganizationID: owner.OrganizationID}: true,
- {Name: codersdk.RoleOrganizationTemplateAdmin, OrganizationID: owner.OrganizationID}: true,
- {Name: codersdk.RoleOrganizationUserAdmin, OrganizationID: owner.OrganizationID}: true,
+ {Name: codersdk.RoleOrganizationAdmin, OrganizationID: owner.OrganizationID}: true,
+ {Name: codersdk.RoleOrganizationAuditor, OrganizationID: owner.OrganizationID}: true,
+ {Name: codersdk.RoleOrganizationTemplateAdmin, OrganizationID: owner.OrganizationID}: true,
+ {Name: codersdk.RoleOrganizationUserAdmin, OrganizationID: owner.OrganizationID}: true,
+ {Name: codersdk.RoleOrganizationWorkspaceCreationBan, OrganizationID: owner.OrganizationID}: true,
}),
},
{
@@ -505,10 +507,11 @@ func TestListRoles(t *testing.T) {
return client.ListOrganizationRoles(ctx, owner.OrganizationID)
},
ExpectedRoles: convertRoles(map[rbac.RoleIdentifier]bool{
- {Name: codersdk.RoleOrganizationAdmin, OrganizationID: owner.OrganizationID}: true,
- {Name: codersdk.RoleOrganizationAuditor, OrganizationID: owner.OrganizationID}: true,
- {Name: codersdk.RoleOrganizationTemplateAdmin, OrganizationID: owner.OrganizationID}: true,
- {Name: codersdk.RoleOrganizationUserAdmin, OrganizationID: owner.OrganizationID}: true,
+ {Name: codersdk.RoleOrganizationAdmin, OrganizationID: owner.OrganizationID}: true,
+ {Name: codersdk.RoleOrganizationAuditor, OrganizationID: owner.OrganizationID}: true,
+ {Name: codersdk.RoleOrganizationTemplateAdmin, OrganizationID: owner.OrganizationID}: true,
+ {Name: codersdk.RoleOrganizationUserAdmin, OrganizationID: owner.OrganizationID}: true,
+ {Name: codersdk.RoleOrganizationWorkspaceCreationBan, OrganizationID: owner.OrganizationID}: true,
}),
},
}
diff --git a/go.mod b/go.mod
index c00f069c1b..3d9d0653ea 100644
--- a/go.mod
+++ b/go.mod
@@ -36,7 +36,7 @@ replace github.com/tcnksm/go-httpstat => github.com/coder/go-httpstat v0.0.0-202
// There are a few minor changes we make to Tailscale that we're slowly upstreaming. Compare here:
// https://github.com/tailscale/tailscale/compare/main...coder:tailscale:main
-replace tailscale.com => github.com/coder/tailscale v1.1.1-0.20250129014916-8086c871eae6
+replace tailscale.com => github.com/coder/tailscale v1.1.1-0.20250227024825-c9983534152a
// This is replaced to include
// 1. a fix for a data race: c.f. https://github.com/tailscale/wireguard-go/pull/25
diff --git a/go.sum b/go.sum
index bc0da5e530..7b32aabd47 100644
--- a/go.sum
+++ b/go.sum
@@ -236,8 +236,8 @@ github.com/coder/serpent v0.10.0 h1:ofVk9FJXSek+SmL3yVE3GoArP83M+1tX+H7S4t8BSuM=
github.com/coder/serpent v0.10.0/go.mod h1:cZFW6/fP+kE9nd/oRkEHJpG6sXCtQ+AX7WMMEHv0Y3Q=
github.com/coder/ssh v0.0.0-20231128192721-70855dedb788 h1:YoUSJ19E8AtuUFVYBpXuOD6a/zVP3rcxezNsoDseTUw=
github.com/coder/ssh v0.0.0-20231128192721-70855dedb788/go.mod h1:aGQbuCLyhRLMzZF067xc84Lh7JDs1FKwCmF1Crl9dxQ=
-github.com/coder/tailscale v1.1.1-0.20250129014916-8086c871eae6 h1:prDIwUcsSEKbs1Rc5FfdvtSfz2XGpW3FnJtWR+Mc7MY=
-github.com/coder/tailscale v1.1.1-0.20250129014916-8086c871eae6/go.mod h1:1ggFFdHTRjPRu9Yc1yA7nVHBYB50w9Ce7VIXNqcW6Ko=
+github.com/coder/tailscale v1.1.1-0.20250227024825-c9983534152a h1:18TQ03KlYrkW8hOohTQaDnlmkY1H9pDPGbZwOnUUmm8=
+github.com/coder/tailscale v1.1.1-0.20250227024825-c9983534152a/go.mod h1:1ggFFdHTRjPRu9Yc1yA7nVHBYB50w9Ce7VIXNqcW6Ko=
github.com/coder/terraform-config-inspect v0.0.0-20250107175719-6d06d90c630e h1:JNLPDi2P73laR1oAclY6jWzAbucf70ASAvf5mh2cME0=
github.com/coder/terraform-config-inspect v0.0.0-20250107175719-6d06d90c630e/go.mod h1:Gz/z9Hbn+4KSp8A2FBtNszfLSdT2Tn/uAKGuVqqWmDI=
github.com/coder/terraform-provider-coder/v2 v2.1.4-0.20250211100915-129c295afed8 h1:qslh7kQytybvJHlqTI3XKUuFRnZWgvEjzZKq6e1aQ2M=
diff --git a/helm/coder/README.md b/helm/coder/README.md
index 015c2e7039..172f880c83 100644
--- a/helm/coder/README.md
+++ b/helm/coder/README.md
@@ -47,6 +47,10 @@ coder:
# This env enables the Prometheus metrics endpoint.
- name: CODER_PROMETHEUS_ADDRESS
value: "0.0.0.0:2112"
+ # For production deployments, we recommend configuring your own GitHub
+ # OAuth2 provider and disabling the default one.
+ - name: CODER_OAUTH2_GITHUB_DEFAULT_PROVIDER_ENABLE
+ value: "false"
tls:
secretNames:
- my-tls-secret-name
diff --git a/provisioner/terraform/provision.go b/provisioner/terraform/provision.go
index 33aa3556a4..3e69439575 100644
--- a/provisioner/terraform/provision.go
+++ b/provisioner/terraform/provision.go
@@ -242,6 +242,11 @@ func provisionEnv(
return nil, xerrors.Errorf("marshal owner groups: %w", err)
}
+ ownerRbacRoles, err := json.Marshal(metadata.GetWorkspaceOwnerRbacRoles())
+ if err != nil {
+ return nil, xerrors.Errorf("marshal owner rbac roles: %w", err)
+ }
+
env = append(env,
"CODER_AGENT_URL="+metadata.GetCoderUrl(),
"CODER_WORKSPACE_TRANSITION="+strings.ToLower(metadata.GetWorkspaceTransition().String()),
@@ -254,6 +259,7 @@ func provisionEnv(
"CODER_WORKSPACE_OWNER_SSH_PUBLIC_KEY="+metadata.GetWorkspaceOwnerSshPublicKey(),
"CODER_WORKSPACE_OWNER_SSH_PRIVATE_KEY="+metadata.GetWorkspaceOwnerSshPrivateKey(),
"CODER_WORKSPACE_OWNER_LOGIN_TYPE="+metadata.GetWorkspaceOwnerLoginType(),
+ "CODER_WORKSPACE_OWNER_RBAC_ROLES="+string(ownerRbacRoles),
"CODER_WORKSPACE_ID="+metadata.GetWorkspaceId(),
"CODER_WORKSPACE_OWNER_ID="+metadata.GetWorkspaceOwnerId(),
"CODER_WORKSPACE_OWNER_SESSION_TOKEN="+metadata.GetWorkspaceOwnerSessionToken(),
diff --git a/provisioner/terraform/provision_test.go b/provisioner/terraform/provision_test.go
index 50681f276c..cd09ea2adf 100644
--- a/provisioner/terraform/provision_test.go
+++ b/provisioner/terraform/provision_test.go
@@ -764,6 +764,53 @@ func TestProvision(t *testing.T) {
}},
},
},
+ {
+ Name: "workspace-owner-rbac-roles",
+ SkipReason: "field will be added in provider version 2.2.0",
+ Files: map[string]string{
+ "main.tf": `terraform {
+ required_providers {
+ coder = {
+ source = "coder/coder"
+ version = "2.2.0"
+ }
+ }
+ }
+
+ resource "null_resource" "example" {}
+ data "coder_workspace_owner" "me" {}
+ resource "coder_metadata" "example" {
+ resource_id = null_resource.example.id
+ item {
+ key = "rbac_roles_name"
+ value = data.coder_workspace_owner.me.rbac_roles[0].name
+ }
+ item {
+ key = "rbac_roles_org_id"
+ value = data.coder_workspace_owner.me.rbac_roles[0].org_id
+ }
+ }
+ `,
+ },
+ Request: &proto.PlanRequest{
+ Metadata: &proto.Metadata{
+ WorkspaceOwnerRbacRoles: []*proto.Role{{Name: "member", OrgId: ""}},
+ },
+ },
+ Response: &proto.PlanComplete{
+ Resources: []*proto.Resource{{
+ Name: "example",
+ Type: "null_resource",
+ Metadata: []*proto.Resource_Metadata{{
+ Key: "rbac_roles_name",
+ Value: "member",
+ }, {
+ Key: "rbac_roles_org_id",
+ Value: "",
+ }},
+ }},
+ },
+ },
}
for _, testCase := range testCases {
diff --git a/provisionersdk/proto/provisioner.pb.go b/provisionersdk/proto/provisioner.pb.go
index 7453483108..e44afce39e 100644
--- a/provisionersdk/proto/provisioner.pb.go
+++ b/provisionersdk/proto/provisioner.pb.go
@@ -699,53 +699,6 @@ func (x *RichParameterValue) GetValue() string {
return ""
}
-type Prebuild struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- Instances int32 `protobuf:"varint,1,opt,name=instances,proto3" json:"instances,omitempty"`
-}
-
-func (x *Prebuild) Reset() {
- *x = Prebuild{}
- if protoimpl.UnsafeEnabled {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[5]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
-}
-
-func (x *Prebuild) String() string {
- return protoimpl.X.MessageStringOf(x)
-}
-
-func (*Prebuild) ProtoMessage() {}
-
-func (x *Prebuild) ProtoReflect() protoreflect.Message {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[5]
- if protoimpl.UnsafeEnabled && x != nil {
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- if ms.LoadMessageInfo() == nil {
- ms.StoreMessageInfo(mi)
- }
- return ms
- }
- return mi.MessageOf(x)
-}
-
-// Deprecated: Use Prebuild.ProtoReflect.Descriptor instead.
-func (*Prebuild) Descriptor() ([]byte, []int) {
- return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{5}
-}
-
-func (x *Prebuild) GetInstances() int32 {
- if x != nil {
- return x.Instances
- }
- return 0
-}
-
// Preset represents a set of preset parameters for a template version.
type Preset struct {
state protoimpl.MessageState
@@ -754,13 +707,12 @@ type Preset struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Parameters []*PresetParameter `protobuf:"bytes,2,rep,name=parameters,proto3" json:"parameters,omitempty"`
- Prebuild *Prebuild `protobuf:"bytes,3,opt,name=prebuild,proto3" json:"prebuild,omitempty"`
}
func (x *Preset) Reset() {
*x = Preset{}
if protoimpl.UnsafeEnabled {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[6]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[5]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -773,7 +725,7 @@ func (x *Preset) String() string {
func (*Preset) ProtoMessage() {}
func (x *Preset) ProtoReflect() protoreflect.Message {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[6]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[5]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -786,7 +738,7 @@ func (x *Preset) ProtoReflect() protoreflect.Message {
// Deprecated: Use Preset.ProtoReflect.Descriptor instead.
func (*Preset) Descriptor() ([]byte, []int) {
- return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{6}
+ return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{5}
}
func (x *Preset) GetName() string {
@@ -803,13 +755,6 @@ func (x *Preset) GetParameters() []*PresetParameter {
return nil
}
-func (x *Preset) GetPrebuild() *Prebuild {
- if x != nil {
- return x.Prebuild
- }
- return nil
-}
-
type PresetParameter struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
@@ -822,7 +767,7 @@ type PresetParameter struct {
func (x *PresetParameter) Reset() {
*x = PresetParameter{}
if protoimpl.UnsafeEnabled {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[7]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[6]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -835,7 +780,7 @@ func (x *PresetParameter) String() string {
func (*PresetParameter) ProtoMessage() {}
func (x *PresetParameter) ProtoReflect() protoreflect.Message {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[7]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[6]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -848,7 +793,7 @@ func (x *PresetParameter) ProtoReflect() protoreflect.Message {
// Deprecated: Use PresetParameter.ProtoReflect.Descriptor instead.
func (*PresetParameter) Descriptor() ([]byte, []int) {
- return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{7}
+ return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{6}
}
func (x *PresetParameter) GetName() string {
@@ -879,7 +824,7 @@ type VariableValue struct {
func (x *VariableValue) Reset() {
*x = VariableValue{}
if protoimpl.UnsafeEnabled {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[8]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[7]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -892,7 +837,7 @@ func (x *VariableValue) String() string {
func (*VariableValue) ProtoMessage() {}
func (x *VariableValue) ProtoReflect() protoreflect.Message {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[8]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[7]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -905,7 +850,7 @@ func (x *VariableValue) ProtoReflect() protoreflect.Message {
// Deprecated: Use VariableValue.ProtoReflect.Descriptor instead.
func (*VariableValue) Descriptor() ([]byte, []int) {
- return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{8}
+ return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{7}
}
func (x *VariableValue) GetName() string {
@@ -942,7 +887,7 @@ type Log struct {
func (x *Log) Reset() {
*x = Log{}
if protoimpl.UnsafeEnabled {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[9]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[8]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -955,7 +900,7 @@ func (x *Log) String() string {
func (*Log) ProtoMessage() {}
func (x *Log) ProtoReflect() protoreflect.Message {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[9]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[8]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -968,7 +913,7 @@ func (x *Log) ProtoReflect() protoreflect.Message {
// Deprecated: Use Log.ProtoReflect.Descriptor instead.
func (*Log) Descriptor() ([]byte, []int) {
- return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{9}
+ return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{8}
}
func (x *Log) GetLevel() LogLevel {
@@ -996,7 +941,7 @@ type InstanceIdentityAuth struct {
func (x *InstanceIdentityAuth) Reset() {
*x = InstanceIdentityAuth{}
if protoimpl.UnsafeEnabled {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[10]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[9]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -1009,7 +954,7 @@ func (x *InstanceIdentityAuth) String() string {
func (*InstanceIdentityAuth) ProtoMessage() {}
func (x *InstanceIdentityAuth) ProtoReflect() protoreflect.Message {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[10]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[9]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -1022,7 +967,7 @@ func (x *InstanceIdentityAuth) ProtoReflect() protoreflect.Message {
// Deprecated: Use InstanceIdentityAuth.ProtoReflect.Descriptor instead.
func (*InstanceIdentityAuth) Descriptor() ([]byte, []int) {
- return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{10}
+ return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{9}
}
func (x *InstanceIdentityAuth) GetInstanceId() string {
@@ -1044,7 +989,7 @@ type ExternalAuthProviderResource struct {
func (x *ExternalAuthProviderResource) Reset() {
*x = ExternalAuthProviderResource{}
if protoimpl.UnsafeEnabled {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[11]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[10]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -1057,7 +1002,7 @@ func (x *ExternalAuthProviderResource) String() string {
func (*ExternalAuthProviderResource) ProtoMessage() {}
func (x *ExternalAuthProviderResource) ProtoReflect() protoreflect.Message {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[11]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[10]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -1070,7 +1015,7 @@ func (x *ExternalAuthProviderResource) ProtoReflect() protoreflect.Message {
// Deprecated: Use ExternalAuthProviderResource.ProtoReflect.Descriptor instead.
func (*ExternalAuthProviderResource) Descriptor() ([]byte, []int) {
- return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{11}
+ return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{10}
}
func (x *ExternalAuthProviderResource) GetId() string {
@@ -1099,7 +1044,7 @@ type ExternalAuthProvider struct {
func (x *ExternalAuthProvider) Reset() {
*x = ExternalAuthProvider{}
if protoimpl.UnsafeEnabled {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[12]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[11]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -1112,7 +1057,7 @@ func (x *ExternalAuthProvider) String() string {
func (*ExternalAuthProvider) ProtoMessage() {}
func (x *ExternalAuthProvider) ProtoReflect() protoreflect.Message {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[12]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[11]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -1125,7 +1070,7 @@ func (x *ExternalAuthProvider) ProtoReflect() protoreflect.Message {
// Deprecated: Use ExternalAuthProvider.ProtoReflect.Descriptor instead.
func (*ExternalAuthProvider) Descriptor() ([]byte, []int) {
- return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{12}
+ return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{11}
}
func (x *ExternalAuthProvider) GetId() string {
@@ -1178,7 +1123,7 @@ type Agent struct {
func (x *Agent) Reset() {
*x = Agent{}
if protoimpl.UnsafeEnabled {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[13]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[12]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -1191,7 +1136,7 @@ func (x *Agent) String() string {
func (*Agent) ProtoMessage() {}
func (x *Agent) ProtoReflect() protoreflect.Message {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[13]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[12]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -1204,7 +1149,7 @@ func (x *Agent) ProtoReflect() protoreflect.Message {
// Deprecated: Use Agent.ProtoReflect.Descriptor instead.
func (*Agent) Descriptor() ([]byte, []int) {
- return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{13}
+ return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{12}
}
func (x *Agent) GetId() string {
@@ -1368,7 +1313,7 @@ type ResourcesMonitoring struct {
func (x *ResourcesMonitoring) Reset() {
*x = ResourcesMonitoring{}
if protoimpl.UnsafeEnabled {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[14]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[13]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -1381,7 +1326,7 @@ func (x *ResourcesMonitoring) String() string {
func (*ResourcesMonitoring) ProtoMessage() {}
func (x *ResourcesMonitoring) ProtoReflect() protoreflect.Message {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[14]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[13]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -1394,7 +1339,7 @@ func (x *ResourcesMonitoring) ProtoReflect() protoreflect.Message {
// Deprecated: Use ResourcesMonitoring.ProtoReflect.Descriptor instead.
func (*ResourcesMonitoring) Descriptor() ([]byte, []int) {
- return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{14}
+ return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{13}
}
func (x *ResourcesMonitoring) GetMemory() *MemoryResourceMonitor {
@@ -1423,7 +1368,7 @@ type MemoryResourceMonitor struct {
func (x *MemoryResourceMonitor) Reset() {
*x = MemoryResourceMonitor{}
if protoimpl.UnsafeEnabled {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[15]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[14]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -1436,7 +1381,7 @@ func (x *MemoryResourceMonitor) String() string {
func (*MemoryResourceMonitor) ProtoMessage() {}
func (x *MemoryResourceMonitor) ProtoReflect() protoreflect.Message {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[15]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[14]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -1449,7 +1394,7 @@ func (x *MemoryResourceMonitor) ProtoReflect() protoreflect.Message {
// Deprecated: Use MemoryResourceMonitor.ProtoReflect.Descriptor instead.
func (*MemoryResourceMonitor) Descriptor() ([]byte, []int) {
- return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{15}
+ return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{14}
}
func (x *MemoryResourceMonitor) GetEnabled() bool {
@@ -1479,7 +1424,7 @@ type VolumeResourceMonitor struct {
func (x *VolumeResourceMonitor) Reset() {
*x = VolumeResourceMonitor{}
if protoimpl.UnsafeEnabled {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[16]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[15]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -1492,7 +1437,7 @@ func (x *VolumeResourceMonitor) String() string {
func (*VolumeResourceMonitor) ProtoMessage() {}
func (x *VolumeResourceMonitor) ProtoReflect() protoreflect.Message {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[16]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[15]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -1505,7 +1450,7 @@ func (x *VolumeResourceMonitor) ProtoReflect() protoreflect.Message {
// Deprecated: Use VolumeResourceMonitor.ProtoReflect.Descriptor instead.
func (*VolumeResourceMonitor) Descriptor() ([]byte, []int) {
- return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{16}
+ return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{15}
}
func (x *VolumeResourceMonitor) GetPath() string {
@@ -1544,7 +1489,7 @@ type DisplayApps struct {
func (x *DisplayApps) Reset() {
*x = DisplayApps{}
if protoimpl.UnsafeEnabled {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[17]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[16]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -1557,7 +1502,7 @@ func (x *DisplayApps) String() string {
func (*DisplayApps) ProtoMessage() {}
func (x *DisplayApps) ProtoReflect() protoreflect.Message {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[17]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[16]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -1570,7 +1515,7 @@ func (x *DisplayApps) ProtoReflect() protoreflect.Message {
// Deprecated: Use DisplayApps.ProtoReflect.Descriptor instead.
func (*DisplayApps) Descriptor() ([]byte, []int) {
- return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{17}
+ return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{16}
}
func (x *DisplayApps) GetVscode() bool {
@@ -1620,7 +1565,7 @@ type Env struct {
func (x *Env) Reset() {
*x = Env{}
if protoimpl.UnsafeEnabled {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[18]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[17]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -1633,7 +1578,7 @@ func (x *Env) String() string {
func (*Env) ProtoMessage() {}
func (x *Env) ProtoReflect() protoreflect.Message {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[18]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[17]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -1646,7 +1591,7 @@ func (x *Env) ProtoReflect() protoreflect.Message {
// Deprecated: Use Env.ProtoReflect.Descriptor instead.
func (*Env) Descriptor() ([]byte, []int) {
- return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{18}
+ return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{17}
}
func (x *Env) GetName() string {
@@ -1683,7 +1628,7 @@ type Script struct {
func (x *Script) Reset() {
*x = Script{}
if protoimpl.UnsafeEnabled {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[19]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[18]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -1696,7 +1641,7 @@ func (x *Script) String() string {
func (*Script) ProtoMessage() {}
func (x *Script) ProtoReflect() protoreflect.Message {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[19]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[18]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -1709,7 +1654,7 @@ func (x *Script) ProtoReflect() protoreflect.Message {
// Deprecated: Use Script.ProtoReflect.Descriptor instead.
func (*Script) Descriptor() ([]byte, []int) {
- return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{19}
+ return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{18}
}
func (x *Script) GetDisplayName() string {
@@ -1800,7 +1745,7 @@ type App struct {
func (x *App) Reset() {
*x = App{}
if protoimpl.UnsafeEnabled {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[20]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[19]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -1813,7 +1758,7 @@ func (x *App) String() string {
func (*App) ProtoMessage() {}
func (x *App) ProtoReflect() protoreflect.Message {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[20]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[19]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -1826,7 +1771,7 @@ func (x *App) ProtoReflect() protoreflect.Message {
// Deprecated: Use App.ProtoReflect.Descriptor instead.
func (*App) Descriptor() ([]byte, []int) {
- return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{20}
+ return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{19}
}
func (x *App) GetSlug() string {
@@ -1927,7 +1872,7 @@ type Healthcheck struct {
func (x *Healthcheck) Reset() {
*x = Healthcheck{}
if protoimpl.UnsafeEnabled {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[21]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[20]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -1940,7 +1885,7 @@ func (x *Healthcheck) String() string {
func (*Healthcheck) ProtoMessage() {}
func (x *Healthcheck) ProtoReflect() protoreflect.Message {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[21]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[20]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -1953,7 +1898,7 @@ func (x *Healthcheck) ProtoReflect() protoreflect.Message {
// Deprecated: Use Healthcheck.ProtoReflect.Descriptor instead.
func (*Healthcheck) Descriptor() ([]byte, []int) {
- return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{21}
+ return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{20}
}
func (x *Healthcheck) GetUrl() string {
@@ -1997,7 +1942,7 @@ type Resource struct {
func (x *Resource) Reset() {
*x = Resource{}
if protoimpl.UnsafeEnabled {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[22]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[21]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -2010,7 +1955,7 @@ func (x *Resource) String() string {
func (*Resource) ProtoMessage() {}
func (x *Resource) ProtoReflect() protoreflect.Message {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[22]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[21]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -2023,7 +1968,7 @@ func (x *Resource) ProtoReflect() protoreflect.Message {
// Deprecated: Use Resource.ProtoReflect.Descriptor instead.
func (*Resource) Descriptor() ([]byte, []int) {
- return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{22}
+ return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{21}
}
func (x *Resource) GetName() string {
@@ -2102,7 +2047,7 @@ type Module struct {
func (x *Module) Reset() {
*x = Module{}
if protoimpl.UnsafeEnabled {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[23]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[22]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -2115,7 +2060,7 @@ func (x *Module) String() string {
func (*Module) ProtoMessage() {}
func (x *Module) ProtoReflect() protoreflect.Message {
- mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[23]
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[22]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -2128,7 +2073,7 @@ func (x *Module) ProtoReflect() protoreflect.Message {
// Deprecated: Use Module.ProtoReflect.Descriptor instead.
func (*Module) Descriptor() ([]byte, []int) {
- return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{23}
+ return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{22}
}
func (x *Module) GetSource() string {
@@ -2152,6 +2097,61 @@ func (x *Module) GetKey() string {
return ""
}
+type Role struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
+ OrgId string `protobuf:"bytes,2,opt,name=org_id,json=orgId,proto3" json:"org_id,omitempty"`
+}
+
+func (x *Role) Reset() {
+ *x = Role{}
+ if protoimpl.UnsafeEnabled {
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[23]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
+}
+
+func (x *Role) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*Role) ProtoMessage() {}
+
+func (x *Role) ProtoReflect() protoreflect.Message {
+ mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[23]
+ if protoimpl.UnsafeEnabled && x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use Role.ProtoReflect.Descriptor instead.
+func (*Role) Descriptor() ([]byte, []int) {
+ return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{23}
+}
+
+func (x *Role) GetName() string {
+ if x != nil {
+ return x.Name
+ }
+ return ""
+}
+
+func (x *Role) GetOrgId() string {
+ if x != nil {
+ return x.OrgId
+ }
+ return ""
+}
+
// Metadata is information about a workspace used in the execution of a build
type Metadata struct {
state protoimpl.MessageState
@@ -2176,8 +2176,7 @@ type Metadata struct {
WorkspaceOwnerSshPrivateKey string `protobuf:"bytes,16,opt,name=workspace_owner_ssh_private_key,json=workspaceOwnerSshPrivateKey,proto3" json:"workspace_owner_ssh_private_key,omitempty"`
WorkspaceBuildId string `protobuf:"bytes,17,opt,name=workspace_build_id,json=workspaceBuildId,proto3" json:"workspace_build_id,omitempty"`
WorkspaceOwnerLoginType string `protobuf:"bytes,18,opt,name=workspace_owner_login_type,json=workspaceOwnerLoginType,proto3" json:"workspace_owner_login_type,omitempty"`
- IsPrebuild bool `protobuf:"varint,19,opt,name=is_prebuild,json=isPrebuild,proto3" json:"is_prebuild,omitempty"`
- RunningWorkspaceAgentToken string `protobuf:"bytes,20,opt,name=running_workspace_agent_token,json=runningWorkspaceAgentToken,proto3" json:"running_workspace_agent_token,omitempty"`
+ WorkspaceOwnerRbacRoles []*Role `protobuf:"bytes,19,rep,name=workspace_owner_rbac_roles,json=workspaceOwnerRbacRoles,proto3" json:"workspace_owner_rbac_roles,omitempty"`
}
func (x *Metadata) Reset() {
@@ -2338,18 +2337,11 @@ func (x *Metadata) GetWorkspaceOwnerLoginType() string {
return ""
}
-func (x *Metadata) GetIsPrebuild() bool {
+func (x *Metadata) GetWorkspaceOwnerRbacRoles() []*Role {
if x != nil {
- return x.IsPrebuild
+ return x.WorkspaceOwnerRbacRoles
}
- return false
-}
-
-func (x *Metadata) GetRunningWorkspaceAgentToken() string {
- if x != nil {
- return x.RunningWorkspaceAgentToken
- }
- return ""
+ return nil
}
// Config represents execution configuration shared by all subsequent requests in the Session
@@ -3242,7 +3234,7 @@ func (x *Agent_Metadata) ProtoReflect() protoreflect.Message {
// Deprecated: Use Agent_Metadata.ProtoReflect.Descriptor instead.
func (*Agent_Metadata) Descriptor() ([]byte, []int) {
- return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{13, 0}
+ return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{12, 0}
}
func (x *Agent_Metadata) GetKey() string {
@@ -3327,7 +3319,7 @@ func (x *Resource_Metadata) ProtoReflect() protoreflect.Message {
// Deprecated: Use Resource_Metadata.ProtoReflect.Descriptor instead.
func (*Resource_Metadata) Descriptor() ([]byte, []int) {
- return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{22, 0}
+ return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{21, 0}
}
func (x *Resource_Metadata) GetKey() string {
@@ -3430,460 +3422,456 @@ var file_provisionersdk_proto_provisioner_proto_rawDesc = []byte{
0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e,
0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12,
0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05,
- 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x28, 0x0a, 0x08, 0x50, 0x72, 0x65, 0x62, 0x75, 0x69, 0x6c,
- 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x01,
- 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x73, 0x22,
- 0x8d, 0x01, 0x0a, 0x06, 0x50, 0x72, 0x65, 0x73, 0x65, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61,
- 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x3c,
- 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03,
- 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72,
- 0x2e, 0x50, 0x72, 0x65, 0x73, 0x65, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72,
- 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x31, 0x0a, 0x08,
- 0x70, 0x72, 0x65, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15,
- 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, 0x65,
- 0x62, 0x75, 0x69, 0x6c, 0x64, 0x52, 0x08, 0x70, 0x72, 0x65, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x22,
- 0x3b, 0x0a, 0x0f, 0x50, 0x72, 0x65, 0x73, 0x65, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74,
- 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,
- 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18,
- 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x57, 0x0a, 0x0d,
- 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x12, 0x0a,
- 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d,
- 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09,
- 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x65, 0x6e, 0x73, 0x69,
- 0x74, 0x69, 0x76, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x73, 0x65, 0x6e, 0x73,
- 0x69, 0x74, 0x69, 0x76, 0x65, 0x22, 0x4a, 0x0a, 0x03, 0x4c, 0x6f, 0x67, 0x12, 0x2b, 0x0a, 0x05,
- 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x70, 0x72,
- 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76,
- 0x65, 0x6c, 0x52, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x75, 0x74,
- 0x70, 0x75, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75,
- 0x74, 0x22, 0x37, 0x0a, 0x14, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x49, 0x64, 0x65,
- 0x6e, 0x74, 0x69, 0x74, 0x79, 0x41, 0x75, 0x74, 0x68, 0x12, 0x1f, 0x0a, 0x0b, 0x69, 0x6e, 0x73,
- 0x74, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a,
- 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x49, 0x64, 0x22, 0x4a, 0x0a, 0x1c, 0x45, 0x78,
- 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64,
- 0x65, 0x72, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64,
- 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x6f, 0x70,
- 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x6f, 0x70,
- 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x22, 0x49, 0x0a, 0x14, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e,
- 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x12, 0x0e,
- 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x21,
- 0x0a, 0x0c, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x02,
- 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65,
- 0x6e, 0x22, 0xf5, 0x07, 0x0a, 0x05, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69,
- 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e,
- 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12,
- 0x2d, 0x0a, 0x03, 0x65, 0x6e, 0x76, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x70,
+ 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x5a, 0x0a, 0x06, 0x50, 0x72, 0x65, 0x73, 0x65, 0x74, 0x12,
+ 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e,
+ 0x61, 0x6d, 0x65, 0x12, 0x3c, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72,
+ 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73,
+ 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, 0x65, 0x73, 0x65, 0x74, 0x50, 0x61, 0x72, 0x61,
+ 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72,
+ 0x73, 0x22, 0x3b, 0x0a, 0x0f, 0x50, 0x72, 0x65, 0x73, 0x65, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d,
+ 0x65, 0x74, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01,
+ 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75,
+ 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x57,
+ 0x0a, 0x0d, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12,
+ 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e,
+ 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01,
+ 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x65, 0x6e,
+ 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x73, 0x65,
+ 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x22, 0x4a, 0x0a, 0x03, 0x4c, 0x6f, 0x67, 0x12, 0x2b,
+ 0x0a, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e,
+ 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4c, 0x6f, 0x67, 0x4c,
+ 0x65, 0x76, 0x65, 0x6c, 0x52, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x16, 0x0a, 0x06, 0x6f,
+ 0x75, 0x74, 0x70, 0x75, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6f, 0x75, 0x74,
+ 0x70, 0x75, 0x74, 0x22, 0x37, 0x0a, 0x14, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x49,
+ 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x41, 0x75, 0x74, 0x68, 0x12, 0x1f, 0x0a, 0x0b, 0x69,
+ 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,
+ 0x52, 0x0a, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x49, 0x64, 0x22, 0x4a, 0x0a, 0x1c,
+ 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76,
+ 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x0e, 0x0a, 0x02,
+ 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x1a, 0x0a, 0x08,
+ 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08,
+ 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x22, 0x49, 0x0a, 0x14, 0x45, 0x78, 0x74, 0x65,
+ 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72,
+ 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64,
+ 0x12, 0x21, 0x0a, 0x0c, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e,
+ 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f,
+ 0x6b, 0x65, 0x6e, 0x22, 0xf5, 0x07, 0x0a, 0x05, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x12, 0x0e, 0x0a,
+ 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a,
+ 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d,
+ 0x65, 0x12, 0x2d, 0x0a, 0x03, 0x65, 0x6e, 0x76, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b,
+ 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x67, 0x65,
+ 0x6e, 0x74, 0x2e, 0x45, 0x6e, 0x76, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x03, 0x65, 0x6e, 0x76,
+ 0x12, 0x29, 0x0a, 0x10, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x5f, 0x73, 0x79,
+ 0x73, 0x74, 0x65, 0x6d, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x6f, 0x70, 0x65, 0x72,
+ 0x61, 0x74, 0x69, 0x6e, 0x67, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x12, 0x22, 0x0a, 0x0c, 0x61,
+ 0x72, 0x63, 0x68, 0x69, 0x74, 0x65, 0x63, 0x74, 0x75, 0x72, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28,
+ 0x09, 0x52, 0x0c, 0x61, 0x72, 0x63, 0x68, 0x69, 0x74, 0x65, 0x63, 0x74, 0x75, 0x72, 0x65, 0x12,
+ 0x1c, 0x0a, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x07, 0x20, 0x01,
+ 0x28, 0x09, 0x52, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x24, 0x0a,
+ 0x04, 0x61, 0x70, 0x70, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72,
+ 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x70, 0x70, 0x52, 0x04, 0x61,
+ 0x70, 0x70, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x09, 0x20, 0x01,
+ 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x21, 0x0a, 0x0b, 0x69,
+ 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09,
+ 0x48, 0x00, 0x52, 0x0a, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x49, 0x64, 0x12, 0x3c,
+ 0x0a, 0x1a, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x74, 0x69, 0x6d,
+ 0x65, 0x6f, 0x75, 0x74, 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x18, 0x0b, 0x20, 0x01,
+ 0x28, 0x05, 0x52, 0x18, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x69,
+ 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x53, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x12, 0x2f, 0x0a, 0x13,
+ 0x74, 0x72, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x73, 0x68, 0x6f, 0x6f, 0x74, 0x69, 0x6e, 0x67, 0x5f,
+ 0x75, 0x72, 0x6c, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x74, 0x72, 0x6f, 0x75, 0x62,
+ 0x6c, 0x65, 0x73, 0x68, 0x6f, 0x6f, 0x74, 0x69, 0x6e, 0x67, 0x55, 0x72, 0x6c, 0x12, 0x1b, 0x0a,
+ 0x09, 0x6d, 0x6f, 0x74, 0x64, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x09,
+ 0x52, 0x08, 0x6d, 0x6f, 0x74, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x37, 0x0a, 0x08, 0x6d, 0x65,
+ 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x12, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x70,
0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x67, 0x65, 0x6e, 0x74,
- 0x2e, 0x45, 0x6e, 0x76, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x03, 0x65, 0x6e, 0x76, 0x12, 0x29,
- 0x0a, 0x10, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x5f, 0x73, 0x79, 0x73, 0x74,
- 0x65, 0x6d, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74,
- 0x69, 0x6e, 0x67, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x12, 0x22, 0x0a, 0x0c, 0x61, 0x72, 0x63,
- 0x68, 0x69, 0x74, 0x65, 0x63, 0x74, 0x75, 0x72, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52,
- 0x0c, 0x61, 0x72, 0x63, 0x68, 0x69, 0x74, 0x65, 0x63, 0x74, 0x75, 0x72, 0x65, 0x12, 0x1c, 0x0a,
- 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09,
- 0x52, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x24, 0x0a, 0x04, 0x61,
- 0x70, 0x70, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x76,
- 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x70, 0x70, 0x52, 0x04, 0x61, 0x70, 0x70,
- 0x73, 0x12, 0x16, 0x0a, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09,
- 0x48, 0x00, 0x52, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x21, 0x0a, 0x0b, 0x69, 0x6e, 0x73,
- 0x74, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00,
- 0x52, 0x0a, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x49, 0x64, 0x12, 0x3c, 0x0a, 0x1a,
- 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x6f,
- 0x75, 0x74, 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x05,
- 0x52, 0x18, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x69, 0x6d, 0x65,
- 0x6f, 0x75, 0x74, 0x53, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x12, 0x2f, 0x0a, 0x13, 0x74, 0x72,
- 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x73, 0x68, 0x6f, 0x6f, 0x74, 0x69, 0x6e, 0x67, 0x5f, 0x75, 0x72,
- 0x6c, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x74, 0x72, 0x6f, 0x75, 0x62, 0x6c, 0x65,
- 0x73, 0x68, 0x6f, 0x6f, 0x74, 0x69, 0x6e, 0x67, 0x55, 0x72, 0x6c, 0x12, 0x1b, 0x0a, 0x09, 0x6d,
- 0x6f, 0x74, 0x64, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08,
- 0x6d, 0x6f, 0x74, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x37, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61,
- 0x64, 0x61, 0x74, 0x61, 0x18, 0x12, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x70, 0x72, 0x6f,
- 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x4d,
- 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74,
- 0x61, 0x12, 0x3b, 0x0a, 0x0c, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x5f, 0x61, 0x70, 0x70,
- 0x73, 0x18, 0x14, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73,
- 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x44, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x41, 0x70, 0x70,
- 0x73, 0x52, 0x0b, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x41, 0x70, 0x70, 0x73, 0x12, 0x2d,
- 0x0a, 0x07, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x73, 0x18, 0x15, 0x20, 0x03, 0x28, 0x0b, 0x32,
- 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x53, 0x63,
- 0x72, 0x69, 0x70, 0x74, 0x52, 0x07, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x73, 0x12, 0x2f, 0x0a,
- 0x0a, 0x65, 0x78, 0x74, 0x72, 0x61, 0x5f, 0x65, 0x6e, 0x76, 0x73, 0x18, 0x16, 0x20, 0x03, 0x28,
- 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e,
- 0x45, 0x6e, 0x76, 0x52, 0x09, 0x65, 0x78, 0x74, 0x72, 0x61, 0x45, 0x6e, 0x76, 0x73, 0x12, 0x14,
- 0x0a, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x18, 0x17, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x6f,
- 0x72, 0x64, 0x65, 0x72, 0x12, 0x53, 0x0a, 0x14, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
- 0x73, 0x5f, 0x6d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x18, 0x20, 0x01,
- 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72,
- 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x4d, 0x6f, 0x6e, 0x69, 0x74, 0x6f,
- 0x72, 0x69, 0x6e, 0x67, 0x52, 0x13, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x4d,
- 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x1a, 0xa3, 0x01, 0x0a, 0x08, 0x4d, 0x65,
- 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20,
- 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x21, 0x0a, 0x0c, 0x64, 0x69, 0x73, 0x70,
- 0x6c, 0x61, 0x79, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b,
- 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73,
- 0x63, 0x72, 0x69, 0x70, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x63, 0x72,
- 0x69, 0x70, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x18,
- 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x12,
- 0x18, 0x0a, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x03,
- 0x52, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x6f, 0x72, 0x64,
- 0x65, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x1a,
- 0x36, 0x0a, 0x08, 0x45, 0x6e, 0x76, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b,
- 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a,
- 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61,
- 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x06, 0x0a, 0x04, 0x61, 0x75, 0x74, 0x68, 0x4a,
- 0x04, 0x08, 0x0e, 0x10, 0x0f, 0x52, 0x12, 0x6c, 0x6f, 0x67, 0x69, 0x6e, 0x5f, 0x62, 0x65, 0x66,
- 0x6f, 0x72, 0x65, 0x5f, 0x72, 0x65, 0x61, 0x64, 0x79, 0x22, 0x8f, 0x01, 0x0a, 0x13, 0x52, 0x65,
- 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x4d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x69, 0x6e,
- 0x67, 0x12, 0x3a, 0x0a, 0x06, 0x6d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28,
- 0x0b, 0x32, 0x22, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e,
- 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x6f,
- 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x52, 0x06, 0x6d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x12, 0x3c, 0x0a,
- 0x07, 0x76, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22,
- 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x56, 0x6f, 0x6c,
- 0x75, 0x6d, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x6f, 0x6e, 0x69, 0x74,
- 0x6f, 0x72, 0x52, 0x07, 0x76, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x73, 0x22, 0x4f, 0x0a, 0x15, 0x4d,
- 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x6f, 0x6e,
- 0x69, 0x74, 0x6f, 0x72, 0x12, 0x18, 0x0a, 0x07, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x18,
- 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x12, 0x1c,
- 0x0a, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28,
- 0x05, 0x52, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x22, 0x63, 0x0a, 0x15,
- 0x56, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x6f,
- 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20,
- 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x18, 0x0a, 0x07, 0x65, 0x6e, 0x61,
- 0x62, 0x6c, 0x65, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x65, 0x6e, 0x61, 0x62,
- 0x6c, 0x65, 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64,
- 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c,
- 0x64, 0x22, 0xc6, 0x01, 0x0a, 0x0b, 0x44, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x41, 0x70, 0x70,
- 0x73, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x73, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28,
- 0x08, 0x52, 0x06, 0x76, 0x73, 0x63, 0x6f, 0x64, 0x65, 0x12, 0x27, 0x0a, 0x0f, 0x76, 0x73, 0x63,
- 0x6f, 0x64, 0x65, 0x5f, 0x69, 0x6e, 0x73, 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x02, 0x20, 0x01,
- 0x28, 0x08, 0x52, 0x0e, 0x76, 0x73, 0x63, 0x6f, 0x64, 0x65, 0x49, 0x6e, 0x73, 0x69, 0x64, 0x65,
- 0x72, 0x73, 0x12, 0x21, 0x0a, 0x0c, 0x77, 0x65, 0x62, 0x5f, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e,
- 0x61, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0b, 0x77, 0x65, 0x62, 0x54, 0x65, 0x72,
- 0x6d, 0x69, 0x6e, 0x61, 0x6c, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x73, 0x68, 0x5f, 0x68, 0x65, 0x6c,
- 0x70, 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x73, 0x73, 0x68, 0x48, 0x65,
- 0x6c, 0x70, 0x65, 0x72, 0x12, 0x34, 0x0a, 0x16, 0x70, 0x6f, 0x72, 0x74, 0x5f, 0x66, 0x6f, 0x72,
- 0x77, 0x61, 0x72, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x68, 0x65, 0x6c, 0x70, 0x65, 0x72, 0x18, 0x05,
- 0x20, 0x01, 0x28, 0x08, 0x52, 0x14, 0x70, 0x6f, 0x72, 0x74, 0x46, 0x6f, 0x72, 0x77, 0x61, 0x72,
- 0x64, 0x69, 0x6e, 0x67, 0x48, 0x65, 0x6c, 0x70, 0x65, 0x72, 0x22, 0x2f, 0x0a, 0x03, 0x45, 0x6e,
- 0x76, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52,
- 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02,
- 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x9f, 0x02, 0x0a, 0x06,
- 0x53, 0x63, 0x72, 0x69, 0x70, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61,
- 0x79, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x69,
- 0x73, 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x69, 0x63, 0x6f,
- 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x69, 0x63, 0x6f, 0x6e, 0x12, 0x16, 0x0a,
+ 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64,
+ 0x61, 0x74, 0x61, 0x12, 0x3b, 0x0a, 0x0c, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x5f, 0x61,
+ 0x70, 0x70, 0x73, 0x18, 0x14, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x70, 0x72, 0x6f, 0x76,
+ 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x44, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x41,
+ 0x70, 0x70, 0x73, 0x52, 0x0b, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x41, 0x70, 0x70, 0x73,
+ 0x12, 0x2d, 0x0a, 0x07, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x73, 0x18, 0x15, 0x20, 0x03, 0x28,
+ 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e,
+ 0x53, 0x63, 0x72, 0x69, 0x70, 0x74, 0x52, 0x07, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x73, 0x12,
+ 0x2f, 0x0a, 0x0a, 0x65, 0x78, 0x74, 0x72, 0x61, 0x5f, 0x65, 0x6e, 0x76, 0x73, 0x18, 0x16, 0x20,
+ 0x03, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65,
+ 0x72, 0x2e, 0x45, 0x6e, 0x76, 0x52, 0x09, 0x65, 0x78, 0x74, 0x72, 0x61, 0x45, 0x6e, 0x76, 0x73,
+ 0x12, 0x14, 0x0a, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x18, 0x17, 0x20, 0x01, 0x28, 0x03, 0x52,
+ 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x12, 0x53, 0x0a, 0x14, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72,
+ 0x63, 0x65, 0x73, 0x5f, 0x6d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x18,
+ 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e,
+ 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x4d, 0x6f, 0x6e, 0x69,
+ 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x13, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
+ 0x73, 0x4d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x1a, 0xa3, 0x01, 0x0a, 0x08,
+ 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18,
+ 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x21, 0x0a, 0x0c, 0x64, 0x69,
+ 0x73, 0x70, 0x6c, 0x61, 0x79, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09,
+ 0x52, 0x0b, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x16, 0x0a,
0x06, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73,
- 0x63, 0x72, 0x69, 0x70, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x63, 0x72, 0x6f, 0x6e, 0x18, 0x04, 0x20,
- 0x01, 0x28, 0x09, 0x52, 0x04, 0x63, 0x72, 0x6f, 0x6e, 0x12, 0x2c, 0x0a, 0x12, 0x73, 0x74, 0x61,
- 0x72, 0x74, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x73, 0x5f, 0x6c, 0x6f, 0x67, 0x69, 0x6e, 0x18,
- 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x73, 0x74, 0x61, 0x72, 0x74, 0x42, 0x6c, 0x6f, 0x63,
- 0x6b, 0x73, 0x4c, 0x6f, 0x67, 0x69, 0x6e, 0x12, 0x20, 0x0a, 0x0c, 0x72, 0x75, 0x6e, 0x5f, 0x6f,
- 0x6e, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x72,
- 0x75, 0x6e, 0x4f, 0x6e, 0x53, 0x74, 0x61, 0x72, 0x74, 0x12, 0x1e, 0x0a, 0x0b, 0x72, 0x75, 0x6e,
- 0x5f, 0x6f, 0x6e, 0x5f, 0x73, 0x74, 0x6f, 0x70, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09,
- 0x72, 0x75, 0x6e, 0x4f, 0x6e, 0x53, 0x74, 0x6f, 0x70, 0x12, 0x27, 0x0a, 0x0f, 0x74, 0x69, 0x6d,
- 0x65, 0x6f, 0x75, 0x74, 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x18, 0x08, 0x20, 0x01,
- 0x28, 0x05, 0x52, 0x0e, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x53, 0x65, 0x63, 0x6f, 0x6e,
- 0x64, 0x73, 0x12, 0x19, 0x0a, 0x08, 0x6c, 0x6f, 0x67, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x09,
- 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6c, 0x6f, 0x67, 0x50, 0x61, 0x74, 0x68, 0x22, 0x94, 0x03,
- 0x0a, 0x03, 0x41, 0x70, 0x70, 0x12, 0x12, 0x0a, 0x04, 0x73, 0x6c, 0x75, 0x67, 0x18, 0x01, 0x20,
- 0x01, 0x28, 0x09, 0x52, 0x04, 0x73, 0x6c, 0x75, 0x67, 0x12, 0x21, 0x0a, 0x0c, 0x64, 0x69, 0x73,
- 0x70, 0x6c, 0x61, 0x79, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52,
- 0x0b, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07,
- 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x63,
- 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x04, 0x20,
- 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x12, 0x0a, 0x04, 0x69, 0x63, 0x6f, 0x6e,
- 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x69, 0x63, 0x6f, 0x6e, 0x12, 0x1c, 0x0a, 0x09,
- 0x73, 0x75, 0x62, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52,
- 0x09, 0x73, 0x75, 0x62, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x3a, 0x0a, 0x0b, 0x68, 0x65,
- 0x61, 0x6c, 0x74, 0x68, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32,
- 0x18, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x48, 0x65,
- 0x61, 0x6c, 0x74, 0x68, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x52, 0x0b, 0x68, 0x65, 0x61, 0x6c, 0x74,
- 0x68, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x12, 0x41, 0x0a, 0x0d, 0x73, 0x68, 0x61, 0x72, 0x69, 0x6e,
- 0x67, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1c, 0x2e,
- 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x70, 0x70, 0x53,
- 0x68, 0x61, 0x72, 0x69, 0x6e, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x52, 0x0c, 0x73, 0x68, 0x61,
- 0x72, 0x69, 0x6e, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x1a, 0x0a, 0x08, 0x65, 0x78, 0x74,
- 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x18, 0x09, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x65, 0x78, 0x74,
- 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x12, 0x14, 0x0a, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x18, 0x0a,
- 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x12, 0x16, 0x0a, 0x06, 0x68,
- 0x69, 0x64, 0x64, 0x65, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x68, 0x69, 0x64,
- 0x64, 0x65, 0x6e, 0x12, 0x2f, 0x0a, 0x07, 0x6f, 0x70, 0x65, 0x6e, 0x5f, 0x69, 0x6e, 0x18, 0x0c,
- 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e,
- 0x65, 0x72, 0x2e, 0x41, 0x70, 0x70, 0x4f, 0x70, 0x65, 0x6e, 0x49, 0x6e, 0x52, 0x06, 0x6f, 0x70,
- 0x65, 0x6e, 0x49, 0x6e, 0x22, 0x59, 0x0a, 0x0b, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x63, 0x68,
- 0x65, 0x63, 0x6b, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,
- 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x1a, 0x0a, 0x08, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61,
- 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61,
- 0x6c, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x18, 0x03,
- 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x22,
- 0x92, 0x03, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04,
+ 0x63, 0x72, 0x69, 0x70, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61,
+ 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61,
+ 0x6c, 0x12, 0x18, 0x0a, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x18, 0x05, 0x20, 0x01,
+ 0x28, 0x03, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x6f,
+ 0x72, 0x64, 0x65, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x6f, 0x72, 0x64, 0x65,
+ 0x72, 0x1a, 0x36, 0x0a, 0x08, 0x45, 0x6e, 0x76, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a,
+ 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12,
+ 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05,
+ 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x06, 0x0a, 0x04, 0x61, 0x75, 0x74,
+ 0x68, 0x4a, 0x04, 0x08, 0x0e, 0x10, 0x0f, 0x52, 0x12, 0x6c, 0x6f, 0x67, 0x69, 0x6e, 0x5f, 0x62,
+ 0x65, 0x66, 0x6f, 0x72, 0x65, 0x5f, 0x72, 0x65, 0x61, 0x64, 0x79, 0x22, 0x8f, 0x01, 0x0a, 0x13,
+ 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x4d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72,
+ 0x69, 0x6e, 0x67, 0x12, 0x3a, 0x0a, 0x06, 0x6d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20,
+ 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65,
+ 0x72, 0x2e, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
+ 0x4d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x52, 0x06, 0x6d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x12,
+ 0x3c, 0x0a, 0x07, 0x76, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b,
+ 0x32, 0x22, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x56,
+ 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x6f, 0x6e,
+ 0x69, 0x74, 0x6f, 0x72, 0x52, 0x07, 0x76, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x73, 0x22, 0x4f, 0x0a,
+ 0x15, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d,
+ 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x12, 0x18, 0x0a, 0x07, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65,
+ 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x64,
+ 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x18, 0x02, 0x20,
+ 0x01, 0x28, 0x05, 0x52, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x22, 0x63,
+ 0x0a, 0x15, 0x56, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
+ 0x4d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18,
+ 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x18, 0x0a, 0x07, 0x65,
+ 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x65, 0x6e,
+ 0x61, 0x62, 0x6c, 0x65, 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f,
+ 0x6c, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68,
+ 0x6f, 0x6c, 0x64, 0x22, 0xc6, 0x01, 0x0a, 0x0b, 0x44, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x41,
+ 0x70, 0x70, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x73, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20,
+ 0x01, 0x28, 0x08, 0x52, 0x06, 0x76, 0x73, 0x63, 0x6f, 0x64, 0x65, 0x12, 0x27, 0x0a, 0x0f, 0x76,
+ 0x73, 0x63, 0x6f, 0x64, 0x65, 0x5f, 0x69, 0x6e, 0x73, 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x02,
+ 0x20, 0x01, 0x28, 0x08, 0x52, 0x0e, 0x76, 0x73, 0x63, 0x6f, 0x64, 0x65, 0x49, 0x6e, 0x73, 0x69,
+ 0x64, 0x65, 0x72, 0x73, 0x12, 0x21, 0x0a, 0x0c, 0x77, 0x65, 0x62, 0x5f, 0x74, 0x65, 0x72, 0x6d,
+ 0x69, 0x6e, 0x61, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0b, 0x77, 0x65, 0x62, 0x54,
+ 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x6c, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x73, 0x68, 0x5f, 0x68,
+ 0x65, 0x6c, 0x70, 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x73, 0x73, 0x68,
+ 0x48, 0x65, 0x6c, 0x70, 0x65, 0x72, 0x12, 0x34, 0x0a, 0x16, 0x70, 0x6f, 0x72, 0x74, 0x5f, 0x66,
+ 0x6f, 0x72, 0x77, 0x61, 0x72, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x68, 0x65, 0x6c, 0x70, 0x65, 0x72,
+ 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x14, 0x70, 0x6f, 0x72, 0x74, 0x46, 0x6f, 0x72, 0x77,
+ 0x61, 0x72, 0x64, 0x69, 0x6e, 0x67, 0x48, 0x65, 0x6c, 0x70, 0x65, 0x72, 0x22, 0x2f, 0x0a, 0x03,
+ 0x45, 0x6e, 0x76, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28,
+ 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65,
+ 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x9f, 0x02,
+ 0x0a, 0x06, 0x53, 0x63, 0x72, 0x69, 0x70, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x64, 0x69, 0x73, 0x70,
+ 0x6c, 0x61, 0x79, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b,
+ 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x69,
+ 0x63, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x69, 0x63, 0x6f, 0x6e, 0x12,
+ 0x16, 0x0a, 0x06, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52,
+ 0x06, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x63, 0x72, 0x6f, 0x6e, 0x18,
+ 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x63, 0x72, 0x6f, 0x6e, 0x12, 0x2c, 0x0a, 0x12, 0x73,
+ 0x74, 0x61, 0x72, 0x74, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x73, 0x5f, 0x6c, 0x6f, 0x67, 0x69,
+ 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x73, 0x74, 0x61, 0x72, 0x74, 0x42, 0x6c,
+ 0x6f, 0x63, 0x6b, 0x73, 0x4c, 0x6f, 0x67, 0x69, 0x6e, 0x12, 0x20, 0x0a, 0x0c, 0x72, 0x75, 0x6e,
+ 0x5f, 0x6f, 0x6e, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52,
+ 0x0a, 0x72, 0x75, 0x6e, 0x4f, 0x6e, 0x53, 0x74, 0x61, 0x72, 0x74, 0x12, 0x1e, 0x0a, 0x0b, 0x72,
+ 0x75, 0x6e, 0x5f, 0x6f, 0x6e, 0x5f, 0x73, 0x74, 0x6f, 0x70, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08,
+ 0x52, 0x09, 0x72, 0x75, 0x6e, 0x4f, 0x6e, 0x53, 0x74, 0x6f, 0x70, 0x12, 0x27, 0x0a, 0x0f, 0x74,
+ 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x18, 0x08,
+ 0x20, 0x01, 0x28, 0x05, 0x52, 0x0e, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x53, 0x65, 0x63,
+ 0x6f, 0x6e, 0x64, 0x73, 0x12, 0x19, 0x0a, 0x08, 0x6c, 0x6f, 0x67, 0x5f, 0x70, 0x61, 0x74, 0x68,
+ 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6c, 0x6f, 0x67, 0x50, 0x61, 0x74, 0x68, 0x22,
+ 0x94, 0x03, 0x0a, 0x03, 0x41, 0x70, 0x70, 0x12, 0x12, 0x0a, 0x04, 0x73, 0x6c, 0x75, 0x67, 0x18,
+ 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x73, 0x6c, 0x75, 0x67, 0x12, 0x21, 0x0a, 0x0c, 0x64,
+ 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28,
+ 0x09, 0x52, 0x0b, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x18,
+ 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52,
+ 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18,
+ 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x12, 0x0a, 0x04, 0x69, 0x63,
+ 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x69, 0x63, 0x6f, 0x6e, 0x12, 0x1c,
+ 0x0a, 0x09, 0x73, 0x75, 0x62, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28,
+ 0x08, 0x52, 0x09, 0x73, 0x75, 0x62, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x3a, 0x0a, 0x0b,
+ 0x68, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x18, 0x07, 0x20, 0x01, 0x28,
+ 0x0b, 0x32, 0x18, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e,
+ 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x52, 0x0b, 0x68, 0x65, 0x61,
+ 0x6c, 0x74, 0x68, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x12, 0x41, 0x0a, 0x0d, 0x73, 0x68, 0x61, 0x72,
+ 0x69, 0x6e, 0x67, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0e, 0x32,
+ 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x70,
+ 0x70, 0x53, 0x68, 0x61, 0x72, 0x69, 0x6e, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x52, 0x0c, 0x73,
+ 0x68, 0x61, 0x72, 0x69, 0x6e, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x1a, 0x0a, 0x08, 0x65,
+ 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x18, 0x09, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x65,
+ 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x12, 0x14, 0x0a, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72,
+ 0x18, 0x0a, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x12, 0x16, 0x0a,
+ 0x06, 0x68, 0x69, 0x64, 0x64, 0x65, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x68,
+ 0x69, 0x64, 0x64, 0x65, 0x6e, 0x12, 0x2f, 0x0a, 0x07, 0x6f, 0x70, 0x65, 0x6e, 0x5f, 0x69, 0x6e,
+ 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69,
+ 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x70, 0x70, 0x4f, 0x70, 0x65, 0x6e, 0x49, 0x6e, 0x52, 0x06,
+ 0x6f, 0x70, 0x65, 0x6e, 0x49, 0x6e, 0x22, 0x59, 0x0a, 0x0b, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68,
+ 0x63, 0x68, 0x65, 0x63, 0x6b, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01,
+ 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x1a, 0x0a, 0x08, 0x69, 0x6e, 0x74, 0x65, 0x72,
+ 0x76, 0x61, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x69, 0x6e, 0x74, 0x65, 0x72,
+ 0x76, 0x61, 0x6c, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64,
+ 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c,
+ 0x64, 0x22, 0x92, 0x03, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x12,
+ 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61,
+ 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09,
+ 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x2a, 0x0a, 0x06, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73,
+ 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69,
+ 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x52, 0x06, 0x61, 0x67, 0x65, 0x6e,
+ 0x74, 0x73, 0x12, 0x3a, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x04,
+ 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e,
+ 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x4d, 0x65, 0x74, 0x61,
+ 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x12,
+ 0x0a, 0x04, 0x68, 0x69, 0x64, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x04, 0x68, 0x69,
+ 0x64, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x69, 0x63, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09,
+ 0x52, 0x04, 0x69, 0x63, 0x6f, 0x6e, 0x12, 0x23, 0x0a, 0x0d, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e,
+ 0x63, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x69,
+ 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x64,
+ 0x61, 0x69, 0x6c, 0x79, 0x5f, 0x63, 0x6f, 0x73, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x05, 0x52,
+ 0x09, 0x64, 0x61, 0x69, 0x6c, 0x79, 0x43, 0x6f, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x6f,
+ 0x64, 0x75, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52,
+ 0x0a, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x1a, 0x69, 0x0a, 0x08, 0x4d,
+ 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01,
+ 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c,
+ 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12,
+ 0x1c, 0x0a, 0x09, 0x73, 0x65, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x18, 0x03, 0x20, 0x01,
+ 0x28, 0x08, 0x52, 0x09, 0x73, 0x65, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x12, 0x17, 0x0a,
+ 0x07, 0x69, 0x73, 0x5f, 0x6e, 0x75, 0x6c, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06,
+ 0x69, 0x73, 0x4e, 0x75, 0x6c, 0x6c, 0x22, 0x4c, 0x0a, 0x06, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65,
+ 0x12, 0x16, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,
+ 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73,
+ 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69,
+ 0x6f, 0x6e, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52,
+ 0x03, 0x6b, 0x65, 0x79, 0x22, 0x31, 0x0a, 0x04, 0x52, 0x6f, 0x6c, 0x65, 0x12, 0x12, 0x0a, 0x04,
0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65,
- 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04,
- 0x74, 0x79, 0x70, 0x65, 0x12, 0x2a, 0x0a, 0x06, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x03,
- 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e,
- 0x65, 0x72, 0x2e, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x52, 0x06, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73,
- 0x12, 0x3a, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x04, 0x20, 0x03,
- 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72,
- 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61,
- 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x12, 0x0a, 0x04,
- 0x68, 0x69, 0x64, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x04, 0x68, 0x69, 0x64, 0x65,
- 0x12, 0x12, 0x0a, 0x04, 0x69, 0x63, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04,
- 0x69, 0x63, 0x6f, 0x6e, 0x12, 0x23, 0x0a, 0x0d, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65,
- 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x69, 0x6e, 0x73,
- 0x74, 0x61, 0x6e, 0x63, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x64, 0x61, 0x69,
- 0x6c, 0x79, 0x5f, 0x63, 0x6f, 0x73, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x64,
- 0x61, 0x69, 0x6c, 0x79, 0x43, 0x6f, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x6f, 0x64, 0x75,
- 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6d,
- 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x1a, 0x69, 0x0a, 0x08, 0x4d, 0x65, 0x74,
- 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01,
- 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65,
- 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1c, 0x0a,
- 0x09, 0x73, 0x65, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08,
- 0x52, 0x09, 0x73, 0x65, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x69,
- 0x73, 0x5f, 0x6e, 0x75, 0x6c, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x69, 0x73,
- 0x4e, 0x75, 0x6c, 0x6c, 0x22, 0x4c, 0x0a, 0x06, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x12, 0x16,
- 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06,
- 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f,
- 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e,
- 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b,
- 0x65, 0x79, 0x22, 0x90, 0x08, 0x0a, 0x08, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12,
- 0x1b, 0x0a, 0x09, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01,
- 0x28, 0x09, 0x52, 0x08, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x55, 0x72, 0x6c, 0x12, 0x53, 0x0a, 0x14,
- 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x69,
- 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x20, 0x2e, 0x70, 0x72, 0x6f,
- 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61,
- 0x63, 0x65, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x13, 0x77, 0x6f,
+ 0x12, 0x15, 0x0a, 0x06, 0x6f, 0x72, 0x67, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09,
+ 0x52, 0x05, 0x6f, 0x72, 0x67, 0x49, 0x64, 0x22, 0xfc, 0x07, 0x0a, 0x08, 0x4d, 0x65, 0x74, 0x61,
+ 0x64, 0x61, 0x74, 0x61, 0x12, 0x1b, 0x0a, 0x09, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x5f, 0x75, 0x72,
+ 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x55, 0x72,
+ 0x6c, 0x12, 0x53, 0x0a, 0x14, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74,
+ 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32,
+ 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x57, 0x6f,
0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x6f,
- 0x6e, 0x12, 0x25, 0x0a, 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6e,
- 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x73,
- 0x70, 0x61, 0x63, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x27, 0x0a, 0x0f, 0x77, 0x6f, 0x72, 0x6b,
- 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28,
- 0x09, 0x52, 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65,
- 0x72, 0x12, 0x21, 0x0a, 0x0c, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x69,
- 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61,
- 0x63, 0x65, 0x49, 0x64, 0x12, 0x2c, 0x0a, 0x12, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
- 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09,
- 0x52, 0x10, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72,
- 0x49, 0x64, 0x12, 0x32, 0x0a, 0x15, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f,
- 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x65, 0x6d, 0x61, 0x69, 0x6c, 0x18, 0x07, 0x20, 0x01, 0x28,
- 0x09, 0x52, 0x13, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65,
- 0x72, 0x45, 0x6d, 0x61, 0x69, 0x6c, 0x12, 0x23, 0x0a, 0x0d, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61,
- 0x74, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x74,
- 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x29, 0x0a, 0x10, 0x74,
- 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18,
- 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56,
- 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x48, 0x0a, 0x21, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70,
- 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x6f, 0x69, 0x64, 0x63, 0x5f, 0x61,
- 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28,
- 0x09, 0x52, 0x1d, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65,
- 0x72, 0x4f, 0x69, 0x64, 0x63, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e,
- 0x12, 0x41, 0x0a, 0x1d, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77,
- 0x6e, 0x65, 0x72, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x74, 0x6f, 0x6b, 0x65,
- 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, 0x1a, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61,
- 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x54, 0x6f,
- 0x6b, 0x65, 0x6e, 0x12, 0x1f, 0x0a, 0x0b, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f,
- 0x69, 0x64, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61,
- 0x74, 0x65, 0x49, 0x64, 0x12, 0x30, 0x0a, 0x14, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
- 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x0d, 0x20, 0x01,
- 0x28, 0x09, 0x52, 0x12, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e,
- 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x34, 0x0a, 0x16, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70,
- 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x73,
- 0x18, 0x0e, 0x20, 0x03, 0x28, 0x09, 0x52, 0x14, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
- 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x12, 0x42, 0x0a, 0x1e,
- 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f,
- 0x73, 0x73, 0x68, 0x5f, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x0f,
- 0x20, 0x01, 0x28, 0x09, 0x52, 0x1a, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f,
- 0x77, 0x6e, 0x65, 0x72, 0x53, 0x73, 0x68, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79,
- 0x12, 0x44, 0x0a, 0x1f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77,
- 0x6e, 0x65, 0x72, 0x5f, 0x73, 0x73, 0x68, 0x5f, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x5f,
- 0x6b, 0x65, 0x79, 0x18, 0x10, 0x20, 0x01, 0x28, 0x09, 0x52, 0x1b, 0x77, 0x6f, 0x72, 0x6b, 0x73,
- 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x73, 0x68, 0x50, 0x72, 0x69, 0x76,
- 0x61, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x12, 0x2c, 0x0a, 0x12, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70,
- 0x61, 0x63, 0x65, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x69, 0x64, 0x18, 0x11, 0x20, 0x01,
- 0x28, 0x09, 0x52, 0x10, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x75, 0x69,
- 0x6c, 0x64, 0x49, 0x64, 0x12, 0x3b, 0x0a, 0x1a, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
- 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x6c, 0x6f, 0x67, 0x69, 0x6e, 0x5f, 0x74, 0x79,
- 0x70, 0x65, 0x18, 0x12, 0x20, 0x01, 0x28, 0x09, 0x52, 0x17, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70,
- 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x4c, 0x6f, 0x67, 0x69, 0x6e, 0x54, 0x79, 0x70,
- 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x69, 0x73, 0x5f, 0x70, 0x72, 0x65, 0x62, 0x75, 0x69, 0x6c, 0x64,
- 0x18, 0x13, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x69, 0x73, 0x50, 0x72, 0x65, 0x62, 0x75, 0x69,
- 0x6c, 0x64, 0x12, 0x41, 0x0a, 0x1d, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x5f, 0x77, 0x6f,
- 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x6f,
- 0x6b, 0x65, 0x6e, 0x18, 0x14, 0x20, 0x01, 0x28, 0x09, 0x52, 0x1a, 0x72, 0x75, 0x6e, 0x6e, 0x69,
- 0x6e, 0x67, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x41, 0x67, 0x65, 0x6e, 0x74,
- 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x22, 0x8a, 0x01, 0x0a, 0x06, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67,
- 0x12, 0x36, 0x0a, 0x17, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x73, 0x6f, 0x75,
- 0x72, 0x63, 0x65, 0x5f, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28,
- 0x0c, 0x52, 0x15, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x53, 0x6f, 0x75, 0x72, 0x63,
- 0x65, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74,
- 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x32,
- 0x0a, 0x15, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x5f, 0x6c, 0x6f,
- 0x67, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x70,
- 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76,
- 0x65, 0x6c, 0x22, 0x0e, 0x0a, 0x0c, 0x50, 0x61, 0x72, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65,
- 0x73, 0x74, 0x22, 0xa3, 0x02, 0x0a, 0x0d, 0x50, 0x61, 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70,
- 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20,
- 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x4c, 0x0a, 0x12, 0x74, 0x65,
- 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x73,
- 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69,
- 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x61, 0x72,
- 0x69, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x11, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56,
- 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x61, 0x64,
- 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x72, 0x65, 0x61, 0x64, 0x6d, 0x65,
- 0x12, 0x54, 0x0a, 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74, 0x61,
- 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69,
- 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70,
- 0x6c, 0x65, 0x74, 0x65, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x61,
- 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61,
- 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, 0x1a, 0x40, 0x0a, 0x12, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70,
- 0x61, 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03,
- 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14,
- 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76,
- 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xb5, 0x02, 0x0a, 0x0b, 0x50, 0x6c, 0x61,
- 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x31, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61,
- 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f,
- 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74,
- 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x53, 0x0a, 0x15, 0x72,
- 0x69, 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61,
- 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x70, 0x72, 0x6f,
- 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72,
- 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x13, 0x72, 0x69, 0x63,
- 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73,
- 0x12, 0x43, 0x0a, 0x0f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x61, 0x6c,
- 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76,
- 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65,
- 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0e, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56,
- 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x59, 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61,
- 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73,
- 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69,
- 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74,
- 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65, 0x72,
- 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73,
- 0x22, 0x85, 0x03, 0x0a, 0x0c, 0x50, 0x6c, 0x61, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74,
- 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,
- 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75,
- 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f,
- 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63,
- 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x3a, 0x0a, 0x0a,
- 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b,
- 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52,
- 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61,
- 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x61, 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65,
- 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64,
- 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x70, 0x72, 0x6f, 0x76,
- 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c,
- 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x73, 0x6f,
- 0x75, 0x72, 0x63, 0x65, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75,
- 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x74,
- 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70,
- 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x69, 0x6d, 0x69, 0x6e,
- 0x67, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x6d, 0x6f,
- 0x64, 0x75, 0x6c, 0x65, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72,
- 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65,
- 0x52, 0x07, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x70, 0x72, 0x65,
- 0x73, 0x65, 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f,
- 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, 0x65, 0x73, 0x65, 0x74, 0x52,
- 0x07, 0x70, 0x72, 0x65, 0x73, 0x65, 0x74, 0x73, 0x22, 0x41, 0x0a, 0x0c, 0x41, 0x70, 0x70, 0x6c,
- 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x31, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61,
- 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f,
- 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74,
- 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, 0xbe, 0x02, 0x0a, 0x0d,
- 0x41, 0x70, 0x70, 0x6c, 0x79, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a,
- 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74,
- 0x61, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01,
- 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65, 0x73,
- 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70,
- 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75,
- 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x3a,
- 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03,
- 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72,
- 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a,
- 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x61, 0x0a, 0x17, 0x65, 0x78,
- 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x76,
- 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x70, 0x72,
- 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e,
- 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65,
- 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c,
- 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x12, 0x2d, 0x0a,
- 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13,
- 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x69, 0x6d,
- 0x69, 0x6e, 0x67, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x22, 0xfa, 0x01, 0x0a,
- 0x06, 0x54, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x12, 0x30, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74,
- 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e,
- 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61,
- 0x6d, 0x70, 0x52, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x12, 0x2c, 0x0a, 0x03, 0x65, 0x6e, 0x64,
- 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e,
- 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61,
- 0x6d, 0x70, 0x52, 0x03, 0x65, 0x6e, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x61, 0x63, 0x74, 0x69, 0x6f,
- 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12,
- 0x16, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52,
- 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75,
- 0x72, 0x63, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75,
- 0x72, 0x63, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x67, 0x65, 0x18, 0x06, 0x20, 0x01,
- 0x28, 0x09, 0x52, 0x05, 0x73, 0x74, 0x61, 0x67, 0x65, 0x12, 0x2e, 0x0a, 0x05, 0x73, 0x74, 0x61,
- 0x74, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x18, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69,
- 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x53, 0x74, 0x61,
- 0x74, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x22, 0x0f, 0x0a, 0x0d, 0x43, 0x61, 0x6e,
- 0x63, 0x65, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x8c, 0x02, 0x0a, 0x07, 0x52,
- 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2d, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67,
- 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69,
- 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x06, 0x63,
- 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x31, 0x0a, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x18, 0x02,
- 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e,
- 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48,
- 0x00, 0x52, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x12, 0x2e, 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e,
- 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69,
- 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
- 0x48, 0x00, 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x12, 0x31, 0x0a, 0x05, 0x61, 0x70, 0x70, 0x6c,
- 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73,
- 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65,
- 0x73, 0x74, 0x48, 0x00, 0x52, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x12, 0x34, 0x0a, 0x06, 0x63,
- 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72,
- 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c,
- 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x06, 0x63, 0x61, 0x6e, 0x63, 0x65,
- 0x6c, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xd1, 0x01, 0x0a, 0x08, 0x52, 0x65,
- 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x03, 0x6c, 0x6f, 0x67, 0x18, 0x01, 0x20,
- 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65,
- 0x72, 0x2e, 0x4c, 0x6f, 0x67, 0x48, 0x00, 0x52, 0x03, 0x6c, 0x6f, 0x67, 0x12, 0x32, 0x0a, 0x05,
- 0x70, 0x61, 0x72, 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72,
- 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x43,
- 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65,
- 0x12, 0x2f, 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19,
- 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x6c, 0x61,
- 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x04, 0x70, 0x6c, 0x61,
- 0x6e, 0x12, 0x32, 0x0a, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b,
- 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41,
- 0x70, 0x70, 0x6c, 0x79, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x05,
- 0x61, 0x70, 0x70, 0x6c, 0x79, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x2a, 0x3f, 0x0a,
- 0x08, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x54, 0x52, 0x41,
- 0x43, 0x45, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x44, 0x45, 0x42, 0x55, 0x47, 0x10, 0x01, 0x12,
- 0x08, 0x0a, 0x04, 0x49, 0x4e, 0x46, 0x4f, 0x10, 0x02, 0x12, 0x08, 0x0a, 0x04, 0x57, 0x41, 0x52,
- 0x4e, 0x10, 0x03, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x04, 0x2a, 0x3b,
- 0x0a, 0x0f, 0x41, 0x70, 0x70, 0x53, 0x68, 0x61, 0x72, 0x69, 0x6e, 0x67, 0x4c, 0x65, 0x76, 0x65,
- 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x4f, 0x57, 0x4e, 0x45, 0x52, 0x10, 0x00, 0x12, 0x11, 0x0a, 0x0d,
- 0x41, 0x55, 0x54, 0x48, 0x45, 0x4e, 0x54, 0x49, 0x43, 0x41, 0x54, 0x45, 0x44, 0x10, 0x01, 0x12,
- 0x0a, 0x0a, 0x06, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x10, 0x02, 0x2a, 0x35, 0x0a, 0x09, 0x41,
- 0x70, 0x70, 0x4f, 0x70, 0x65, 0x6e, 0x49, 0x6e, 0x12, 0x0e, 0x0a, 0x06, 0x57, 0x49, 0x4e, 0x44,
- 0x4f, 0x57, 0x10, 0x00, 0x1a, 0x02, 0x08, 0x01, 0x12, 0x0f, 0x0a, 0x0b, 0x53, 0x4c, 0x49, 0x4d,
- 0x5f, 0x57, 0x49, 0x4e, 0x44, 0x4f, 0x57, 0x10, 0x01, 0x12, 0x07, 0x0a, 0x03, 0x54, 0x41, 0x42,
- 0x10, 0x02, 0x2a, 0x37, 0x0a, 0x13, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54,
- 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x09, 0x0a, 0x05, 0x53, 0x54, 0x41,
- 0x52, 0x54, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x53, 0x54, 0x4f, 0x50, 0x10, 0x01, 0x12, 0x0b,
- 0x0a, 0x07, 0x44, 0x45, 0x53, 0x54, 0x52, 0x4f, 0x59, 0x10, 0x02, 0x2a, 0x35, 0x0a, 0x0b, 0x54,
- 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x53, 0x54,
- 0x41, 0x52, 0x54, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x43, 0x4f, 0x4d, 0x50, 0x4c,
- 0x45, 0x54, 0x45, 0x44, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x46, 0x41, 0x49, 0x4c, 0x45, 0x44,
- 0x10, 0x02, 0x32, 0x49, 0x0a, 0x0b, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65,
- 0x72, 0x12, 0x3a, 0x0a, 0x07, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x2e, 0x70,
- 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65,
- 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72,
- 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x28, 0x01, 0x30, 0x01, 0x42, 0x30, 0x5a,
- 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65,
- 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x76, 0x32, 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x69,
- 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x73, 0x64, 0x6b, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62,
- 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
+ 0x6e, 0x52, 0x13, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x72, 0x61, 0x6e,
+ 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x25, 0x0a, 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70,
+ 0x61, 0x63, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d,
+ 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x27, 0x0a,
+ 0x0f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72,
+ 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
+ 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x12, 0x21, 0x0a, 0x0c, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70,
+ 0x61, 0x63, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x77, 0x6f,
+ 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x49, 0x64, 0x12, 0x2c, 0x0a, 0x12, 0x77, 0x6f, 0x72,
+ 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18,
+ 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65,
+ 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x49, 0x64, 0x12, 0x32, 0x0a, 0x15, 0x77, 0x6f, 0x72, 0x6b, 0x73,
+ 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x65, 0x6d, 0x61, 0x69, 0x6c,
+ 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
+ 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x45, 0x6d, 0x61, 0x69, 0x6c, 0x12, 0x23, 0x0a, 0x0d, 0x74,
+ 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x08, 0x20, 0x01,
+ 0x28, 0x09, 0x52, 0x0c, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65,
+ 0x12, 0x29, 0x0a, 0x10, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x76, 0x65, 0x72,
+ 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x74, 0x65, 0x6d, 0x70,
+ 0x6c, 0x61, 0x74, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x48, 0x0a, 0x21, 0x77,
+ 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x6f,
+ 0x69, 0x64, 0x63, 0x5f, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e,
+ 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x1d, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
+ 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x4f, 0x69, 0x64, 0x63, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73,
+ 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x41, 0x0a, 0x1d, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61,
+ 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e,
+ 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, 0x1a, 0x77, 0x6f,
+ 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x65, 0x73, 0x73,
+ 0x69, 0x6f, 0x6e, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x1f, 0x0a, 0x0b, 0x74, 0x65, 0x6d, 0x70,
+ 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x74,
+ 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x49, 0x64, 0x12, 0x30, 0x0a, 0x14, 0x77, 0x6f, 0x72,
+ 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x6e, 0x61, 0x6d,
+ 0x65, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61,
+ 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x34, 0x0a, 0x16, 0x77,
+ 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x67,
+ 0x72, 0x6f, 0x75, 0x70, 0x73, 0x18, 0x0e, 0x20, 0x03, 0x28, 0x09, 0x52, 0x14, 0x77, 0x6f, 0x72,
+ 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x47, 0x72, 0x6f, 0x75, 0x70,
+ 0x73, 0x12, 0x42, 0x0a, 0x1e, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f,
+ 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x73, 0x73, 0x68, 0x5f, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x5f,
+ 0x6b, 0x65, 0x79, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x09, 0x52, 0x1a, 0x77, 0x6f, 0x72, 0x6b, 0x73,
+ 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x73, 0x68, 0x50, 0x75, 0x62, 0x6c,
+ 0x69, 0x63, 0x4b, 0x65, 0x79, 0x12, 0x44, 0x0a, 0x1f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61,
+ 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x73, 0x73, 0x68, 0x5f, 0x70, 0x72, 0x69,
+ 0x76, 0x61, 0x74, 0x65, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x10, 0x20, 0x01, 0x28, 0x09, 0x52, 0x1b,
+ 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x73,
+ 0x68, 0x50, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x12, 0x2c, 0x0a, 0x12, 0x77,
+ 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x69,
+ 0x64, 0x18, 0x11, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61,
+ 0x63, 0x65, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x49, 0x64, 0x12, 0x3b, 0x0a, 0x1a, 0x77, 0x6f, 0x72,
+ 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x6c, 0x6f, 0x67,
+ 0x69, 0x6e, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x12, 0x20, 0x01, 0x28, 0x09, 0x52, 0x17, 0x77,
+ 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x4c, 0x6f, 0x67,
+ 0x69, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x12, 0x4e, 0x0a, 0x1a, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70,
+ 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x72, 0x62, 0x61, 0x63, 0x5f, 0x72,
+ 0x6f, 0x6c, 0x65, 0x73, 0x18, 0x13, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x70, 0x72, 0x6f,
+ 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x6f, 0x6c, 0x65, 0x52, 0x17, 0x77,
+ 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x52, 0x62, 0x61,
+ 0x63, 0x52, 0x6f, 0x6c, 0x65, 0x73, 0x22, 0x8a, 0x01, 0x0a, 0x06, 0x43, 0x6f, 0x6e, 0x66, 0x69,
+ 0x67, 0x12, 0x36, 0x0a, 0x17, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x73, 0x6f,
+ 0x75, 0x72, 0x63, 0x65, 0x5f, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x18, 0x01, 0x20, 0x01,
+ 0x28, 0x0c, 0x52, 0x15, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x53, 0x6f, 0x75, 0x72,
+ 0x63, 0x65, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61,
+ 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12,
+ 0x32, 0x0a, 0x15, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x5f, 0x6c,
+ 0x6f, 0x67, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13,
+ 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x4c, 0x6f, 0x67, 0x4c, 0x65,
+ 0x76, 0x65, 0x6c, 0x22, 0x0e, 0x0a, 0x0c, 0x50, 0x61, 0x72, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75,
+ 0x65, 0x73, 0x74, 0x22, 0xa3, 0x02, 0x0a, 0x0d, 0x50, 0x61, 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d,
+ 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01,
+ 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x4c, 0x0a, 0x12, 0x74,
+ 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65,
+ 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73,
+ 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x61,
+ 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x11, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65,
+ 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x61,
+ 0x64, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x72, 0x65, 0x61, 0x64, 0x6d,
+ 0x65, 0x12, 0x54, 0x0a, 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74,
+ 0x61, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x70, 0x72, 0x6f, 0x76,
+ 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d,
+ 0x70, 0x6c, 0x65, 0x74, 0x65, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54,
+ 0x61, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70,
+ 0x61, 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, 0x1a, 0x40, 0x0a, 0x12, 0x57, 0x6f, 0x72, 0x6b, 0x73,
+ 0x70, 0x61, 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a,
+ 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12,
+ 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05,
+ 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xb5, 0x02, 0x0a, 0x0b, 0x50, 0x6c,
+ 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x31, 0x0a, 0x08, 0x6d, 0x65, 0x74,
+ 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72,
+ 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61,
+ 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x53, 0x0a, 0x15,
+ 0x72, 0x69, 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76,
+ 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x70, 0x72,
+ 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61,
+ 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x13, 0x72, 0x69,
+ 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65,
+ 0x73, 0x12, 0x43, 0x0a, 0x0f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x61,
+ 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f,
+ 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c,
+ 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0e, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65,
+ 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x59, 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e,
+ 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72,
+ 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73,
+ 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75,
+ 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65,
+ 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72,
+ 0x73, 0x22, 0x85, 0x03, 0x0a, 0x0c, 0x50, 0x6c, 0x61, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65,
+ 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28,
+ 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f,
+ 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72,
+ 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72,
+ 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x3a, 0x0a,
+ 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28,
+ 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e,
+ 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, 0x70,
+ 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x61, 0x0a, 0x17, 0x65, 0x78, 0x74,
+ 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69,
+ 0x64, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x70, 0x72, 0x6f,
+ 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61,
+ 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x73,
+ 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41,
+ 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x12, 0x2d, 0x0a, 0x07,
+ 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e,
+ 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x69, 0x6d, 0x69,
+ 0x6e, 0x67, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x6d,
+ 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70,
+ 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x6f, 0x64, 0x75, 0x6c,
+ 0x65, 0x52, 0x07, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x70, 0x72,
+ 0x65, 0x73, 0x65, 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72,
+ 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, 0x65, 0x73, 0x65, 0x74,
+ 0x52, 0x07, 0x70, 0x72, 0x65, 0x73, 0x65, 0x74, 0x73, 0x22, 0x41, 0x0a, 0x0c, 0x41, 0x70, 0x70,
+ 0x6c, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x31, 0x0a, 0x08, 0x6d, 0x65, 0x74,
+ 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72,
+ 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61,
+ 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, 0xbe, 0x02, 0x0a,
+ 0x0d, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14,
+ 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73,
+ 0x74, 0x61, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20,
+ 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65,
+ 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e,
+ 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f,
+ 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12,
+ 0x3a, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20,
+ 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65,
+ 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52,
+ 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x61, 0x0a, 0x17, 0x65,
+ 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x70, 0x72, 0x6f,
+ 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x70,
+ 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72,
+ 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52,
+ 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61,
+ 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x12, 0x2d,
+ 0x0a, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32,
+ 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x69,
+ 0x6d, 0x69, 0x6e, 0x67, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x22, 0xfa, 0x01,
+ 0x0a, 0x06, 0x54, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x12, 0x30, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x72,
+ 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
+ 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74,
+ 0x61, 0x6d, 0x70, 0x52, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x12, 0x2c, 0x0a, 0x03, 0x65, 0x6e,
+ 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
+ 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74,
+ 0x61, 0x6d, 0x70, 0x52, 0x03, 0x65, 0x6e, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x61, 0x63, 0x74, 0x69,
+ 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e,
+ 0x12, 0x16, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09,
+ 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x6f,
+ 0x75, 0x72, 0x63, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x72, 0x65, 0x73, 0x6f,
+ 0x75, 0x72, 0x63, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x67, 0x65, 0x18, 0x06, 0x20,
+ 0x01, 0x28, 0x09, 0x52, 0x05, 0x73, 0x74, 0x61, 0x67, 0x65, 0x12, 0x2e, 0x0a, 0x05, 0x73, 0x74,
+ 0x61, 0x74, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x18, 0x2e, 0x70, 0x72, 0x6f, 0x76,
+ 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x53, 0x74,
+ 0x61, 0x74, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x22, 0x0f, 0x0a, 0x0d, 0x43, 0x61,
+ 0x6e, 0x63, 0x65, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x8c, 0x02, 0x0a, 0x07,
+ 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2d, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69,
+ 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73,
+ 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x06,
+ 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x31, 0x0a, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x18,
+ 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f,
+ 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
+ 0x48, 0x00, 0x52, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x12, 0x2e, 0x0a, 0x04, 0x70, 0x6c, 0x61,
+ 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73,
+ 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
+ 0x74, 0x48, 0x00, 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x12, 0x31, 0x0a, 0x05, 0x61, 0x70, 0x70,
+ 0x6c, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69,
+ 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65, 0x71, 0x75,
+ 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x12, 0x34, 0x0a, 0x06,
+ 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70,
+ 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x43, 0x61, 0x6e, 0x63, 0x65,
+ 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x06, 0x63, 0x61, 0x6e, 0x63,
+ 0x65, 0x6c, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xd1, 0x01, 0x0a, 0x08, 0x52,
+ 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x03, 0x6c, 0x6f, 0x67, 0x18, 0x01,
+ 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e,
+ 0x65, 0x72, 0x2e, 0x4c, 0x6f, 0x67, 0x48, 0x00, 0x52, 0x03, 0x6c, 0x6f, 0x67, 0x12, 0x32, 0x0a,
+ 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70,
+ 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65,
+ 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x05, 0x70, 0x61, 0x72, 0x73,
+ 0x65, 0x12, 0x2f, 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32,
+ 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x6c,
+ 0x61, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x04, 0x70, 0x6c,
+ 0x61, 0x6e, 0x12, 0x32, 0x0a, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28,
+ 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e,
+ 0x41, 0x70, 0x70, 0x6c, 0x79, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52,
+ 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x2a, 0x3f,
+ 0x0a, 0x08, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x54, 0x52,
+ 0x41, 0x43, 0x45, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x44, 0x45, 0x42, 0x55, 0x47, 0x10, 0x01,
+ 0x12, 0x08, 0x0a, 0x04, 0x49, 0x4e, 0x46, 0x4f, 0x10, 0x02, 0x12, 0x08, 0x0a, 0x04, 0x57, 0x41,
+ 0x52, 0x4e, 0x10, 0x03, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x04, 0x2a,
+ 0x3b, 0x0a, 0x0f, 0x41, 0x70, 0x70, 0x53, 0x68, 0x61, 0x72, 0x69, 0x6e, 0x67, 0x4c, 0x65, 0x76,
+ 0x65, 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x4f, 0x57, 0x4e, 0x45, 0x52, 0x10, 0x00, 0x12, 0x11, 0x0a,
+ 0x0d, 0x41, 0x55, 0x54, 0x48, 0x45, 0x4e, 0x54, 0x49, 0x43, 0x41, 0x54, 0x45, 0x44, 0x10, 0x01,
+ 0x12, 0x0a, 0x0a, 0x06, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x10, 0x02, 0x2a, 0x35, 0x0a, 0x09,
+ 0x41, 0x70, 0x70, 0x4f, 0x70, 0x65, 0x6e, 0x49, 0x6e, 0x12, 0x0e, 0x0a, 0x06, 0x57, 0x49, 0x4e,
+ 0x44, 0x4f, 0x57, 0x10, 0x00, 0x1a, 0x02, 0x08, 0x01, 0x12, 0x0f, 0x0a, 0x0b, 0x53, 0x4c, 0x49,
+ 0x4d, 0x5f, 0x57, 0x49, 0x4e, 0x44, 0x4f, 0x57, 0x10, 0x01, 0x12, 0x07, 0x0a, 0x03, 0x54, 0x41,
+ 0x42, 0x10, 0x02, 0x2a, 0x37, 0x0a, 0x13, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65,
+ 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x09, 0x0a, 0x05, 0x53, 0x54,
+ 0x41, 0x52, 0x54, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x53, 0x54, 0x4f, 0x50, 0x10, 0x01, 0x12,
+ 0x0b, 0x0a, 0x07, 0x44, 0x45, 0x53, 0x54, 0x52, 0x4f, 0x59, 0x10, 0x02, 0x2a, 0x35, 0x0a, 0x0b,
+ 0x54, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x53,
+ 0x54, 0x41, 0x52, 0x54, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x43, 0x4f, 0x4d, 0x50,
+ 0x4c, 0x45, 0x54, 0x45, 0x44, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x46, 0x41, 0x49, 0x4c, 0x45,
+ 0x44, 0x10, 0x02, 0x32, 0x49, 0x0a, 0x0b, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e,
+ 0x65, 0x72, 0x12, 0x3a, 0x0a, 0x07, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x2e,
+ 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x71, 0x75,
+ 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65,
+ 0x72, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x28, 0x01, 0x30, 0x01, 0x42, 0x30,
+ 0x5a, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64,
+ 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x76, 0x32, 0x2f, 0x70, 0x72, 0x6f, 0x76,
+ 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x73, 0x64, 0x6b, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f,
+ 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
@@ -3911,25 +3899,25 @@ var file_provisionersdk_proto_provisioner_proto_goTypes = []interface{}{
(*RichParameterOption)(nil), // 7: provisioner.RichParameterOption
(*RichParameter)(nil), // 8: provisioner.RichParameter
(*RichParameterValue)(nil), // 9: provisioner.RichParameterValue
- (*Prebuild)(nil), // 10: provisioner.Prebuild
- (*Preset)(nil), // 11: provisioner.Preset
- (*PresetParameter)(nil), // 12: provisioner.PresetParameter
- (*VariableValue)(nil), // 13: provisioner.VariableValue
- (*Log)(nil), // 14: provisioner.Log
- (*InstanceIdentityAuth)(nil), // 15: provisioner.InstanceIdentityAuth
- (*ExternalAuthProviderResource)(nil), // 16: provisioner.ExternalAuthProviderResource
- (*ExternalAuthProvider)(nil), // 17: provisioner.ExternalAuthProvider
- (*Agent)(nil), // 18: provisioner.Agent
- (*ResourcesMonitoring)(nil), // 19: provisioner.ResourcesMonitoring
- (*MemoryResourceMonitor)(nil), // 20: provisioner.MemoryResourceMonitor
- (*VolumeResourceMonitor)(nil), // 21: provisioner.VolumeResourceMonitor
- (*DisplayApps)(nil), // 22: provisioner.DisplayApps
- (*Env)(nil), // 23: provisioner.Env
- (*Script)(nil), // 24: provisioner.Script
- (*App)(nil), // 25: provisioner.App
- (*Healthcheck)(nil), // 26: provisioner.Healthcheck
- (*Resource)(nil), // 27: provisioner.Resource
- (*Module)(nil), // 28: provisioner.Module
+ (*Preset)(nil), // 10: provisioner.Preset
+ (*PresetParameter)(nil), // 11: provisioner.PresetParameter
+ (*VariableValue)(nil), // 12: provisioner.VariableValue
+ (*Log)(nil), // 13: provisioner.Log
+ (*InstanceIdentityAuth)(nil), // 14: provisioner.InstanceIdentityAuth
+ (*ExternalAuthProviderResource)(nil), // 15: provisioner.ExternalAuthProviderResource
+ (*ExternalAuthProvider)(nil), // 16: provisioner.ExternalAuthProvider
+ (*Agent)(nil), // 17: provisioner.Agent
+ (*ResourcesMonitoring)(nil), // 18: provisioner.ResourcesMonitoring
+ (*MemoryResourceMonitor)(nil), // 19: provisioner.MemoryResourceMonitor
+ (*VolumeResourceMonitor)(nil), // 20: provisioner.VolumeResourceMonitor
+ (*DisplayApps)(nil), // 21: provisioner.DisplayApps
+ (*Env)(nil), // 22: provisioner.Env
+ (*Script)(nil), // 23: provisioner.Script
+ (*App)(nil), // 24: provisioner.App
+ (*Healthcheck)(nil), // 25: provisioner.Healthcheck
+ (*Resource)(nil), // 26: provisioner.Resource
+ (*Module)(nil), // 27: provisioner.Module
+ (*Role)(nil), // 28: provisioner.Role
(*Metadata)(nil), // 29: provisioner.Metadata
(*Config)(nil), // 30: provisioner.Config
(*ParseRequest)(nil), // 31: provisioner.ParseRequest
@@ -3950,40 +3938,40 @@ var file_provisionersdk_proto_provisioner_proto_goTypes = []interface{}{
}
var file_provisionersdk_proto_provisioner_proto_depIdxs = []int32{
7, // 0: provisioner.RichParameter.options:type_name -> provisioner.RichParameterOption
- 12, // 1: provisioner.Preset.parameters:type_name -> provisioner.PresetParameter
- 10, // 2: provisioner.Preset.prebuild:type_name -> provisioner.Prebuild
- 0, // 3: provisioner.Log.level:type_name -> provisioner.LogLevel
- 42, // 4: provisioner.Agent.env:type_name -> provisioner.Agent.EnvEntry
- 25, // 5: provisioner.Agent.apps:type_name -> provisioner.App
- 41, // 6: provisioner.Agent.metadata:type_name -> provisioner.Agent.Metadata
- 22, // 7: provisioner.Agent.display_apps:type_name -> provisioner.DisplayApps
- 24, // 8: provisioner.Agent.scripts:type_name -> provisioner.Script
- 23, // 9: provisioner.Agent.extra_envs:type_name -> provisioner.Env
- 19, // 10: provisioner.Agent.resources_monitoring:type_name -> provisioner.ResourcesMonitoring
- 20, // 11: provisioner.ResourcesMonitoring.memory:type_name -> provisioner.MemoryResourceMonitor
- 21, // 12: provisioner.ResourcesMonitoring.volumes:type_name -> provisioner.VolumeResourceMonitor
- 26, // 13: provisioner.App.healthcheck:type_name -> provisioner.Healthcheck
- 1, // 14: provisioner.App.sharing_level:type_name -> provisioner.AppSharingLevel
- 2, // 15: provisioner.App.open_in:type_name -> provisioner.AppOpenIn
- 18, // 16: provisioner.Resource.agents:type_name -> provisioner.Agent
- 43, // 17: provisioner.Resource.metadata:type_name -> provisioner.Resource.Metadata
- 3, // 18: provisioner.Metadata.workspace_transition:type_name -> provisioner.WorkspaceTransition
+ 11, // 1: provisioner.Preset.parameters:type_name -> provisioner.PresetParameter
+ 0, // 2: provisioner.Log.level:type_name -> provisioner.LogLevel
+ 42, // 3: provisioner.Agent.env:type_name -> provisioner.Agent.EnvEntry
+ 24, // 4: provisioner.Agent.apps:type_name -> provisioner.App
+ 41, // 5: provisioner.Agent.metadata:type_name -> provisioner.Agent.Metadata
+ 21, // 6: provisioner.Agent.display_apps:type_name -> provisioner.DisplayApps
+ 23, // 7: provisioner.Agent.scripts:type_name -> provisioner.Script
+ 22, // 8: provisioner.Agent.extra_envs:type_name -> provisioner.Env
+ 18, // 9: provisioner.Agent.resources_monitoring:type_name -> provisioner.ResourcesMonitoring
+ 19, // 10: provisioner.ResourcesMonitoring.memory:type_name -> provisioner.MemoryResourceMonitor
+ 20, // 11: provisioner.ResourcesMonitoring.volumes:type_name -> provisioner.VolumeResourceMonitor
+ 25, // 12: provisioner.App.healthcheck:type_name -> provisioner.Healthcheck
+ 1, // 13: provisioner.App.sharing_level:type_name -> provisioner.AppSharingLevel
+ 2, // 14: provisioner.App.open_in:type_name -> provisioner.AppOpenIn
+ 17, // 15: provisioner.Resource.agents:type_name -> provisioner.Agent
+ 43, // 16: provisioner.Resource.metadata:type_name -> provisioner.Resource.Metadata
+ 3, // 17: provisioner.Metadata.workspace_transition:type_name -> provisioner.WorkspaceTransition
+ 28, // 18: provisioner.Metadata.workspace_owner_rbac_roles:type_name -> provisioner.Role
6, // 19: provisioner.ParseComplete.template_variables:type_name -> provisioner.TemplateVariable
44, // 20: provisioner.ParseComplete.workspace_tags:type_name -> provisioner.ParseComplete.WorkspaceTagsEntry
29, // 21: provisioner.PlanRequest.metadata:type_name -> provisioner.Metadata
9, // 22: provisioner.PlanRequest.rich_parameter_values:type_name -> provisioner.RichParameterValue
- 13, // 23: provisioner.PlanRequest.variable_values:type_name -> provisioner.VariableValue
- 17, // 24: provisioner.PlanRequest.external_auth_providers:type_name -> provisioner.ExternalAuthProvider
- 27, // 25: provisioner.PlanComplete.resources:type_name -> provisioner.Resource
+ 12, // 23: provisioner.PlanRequest.variable_values:type_name -> provisioner.VariableValue
+ 16, // 24: provisioner.PlanRequest.external_auth_providers:type_name -> provisioner.ExternalAuthProvider
+ 26, // 25: provisioner.PlanComplete.resources:type_name -> provisioner.Resource
8, // 26: provisioner.PlanComplete.parameters:type_name -> provisioner.RichParameter
- 16, // 27: provisioner.PlanComplete.external_auth_providers:type_name -> provisioner.ExternalAuthProviderResource
+ 15, // 27: provisioner.PlanComplete.external_auth_providers:type_name -> provisioner.ExternalAuthProviderResource
37, // 28: provisioner.PlanComplete.timings:type_name -> provisioner.Timing
- 28, // 29: provisioner.PlanComplete.modules:type_name -> provisioner.Module
- 11, // 30: provisioner.PlanComplete.presets:type_name -> provisioner.Preset
+ 27, // 29: provisioner.PlanComplete.modules:type_name -> provisioner.Module
+ 10, // 30: provisioner.PlanComplete.presets:type_name -> provisioner.Preset
29, // 31: provisioner.ApplyRequest.metadata:type_name -> provisioner.Metadata
- 27, // 32: provisioner.ApplyComplete.resources:type_name -> provisioner.Resource
+ 26, // 32: provisioner.ApplyComplete.resources:type_name -> provisioner.Resource
8, // 33: provisioner.ApplyComplete.parameters:type_name -> provisioner.RichParameter
- 16, // 34: provisioner.ApplyComplete.external_auth_providers:type_name -> provisioner.ExternalAuthProviderResource
+ 15, // 34: provisioner.ApplyComplete.external_auth_providers:type_name -> provisioner.ExternalAuthProviderResource
37, // 35: provisioner.ApplyComplete.timings:type_name -> provisioner.Timing
45, // 36: provisioner.Timing.start:type_name -> google.protobuf.Timestamp
45, // 37: provisioner.Timing.end:type_name -> google.protobuf.Timestamp
@@ -3993,7 +3981,7 @@ var file_provisionersdk_proto_provisioner_proto_depIdxs = []int32{
33, // 41: provisioner.Request.plan:type_name -> provisioner.PlanRequest
35, // 42: provisioner.Request.apply:type_name -> provisioner.ApplyRequest
38, // 43: provisioner.Request.cancel:type_name -> provisioner.CancelRequest
- 14, // 44: provisioner.Response.log:type_name -> provisioner.Log
+ 13, // 44: provisioner.Response.log:type_name -> provisioner.Log
32, // 45: provisioner.Response.parse:type_name -> provisioner.ParseComplete
34, // 46: provisioner.Response.plan:type_name -> provisioner.PlanComplete
36, // 47: provisioner.Response.apply:type_name -> provisioner.ApplyComplete
@@ -4073,18 +4061,6 @@ func file_provisionersdk_proto_provisioner_proto_init() {
}
}
file_provisionersdk_proto_provisioner_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*Prebuild); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_provisionersdk_proto_provisioner_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Preset); i {
case 0:
return &v.state
@@ -4096,7 +4072,7 @@ func file_provisionersdk_proto_provisioner_proto_init() {
return nil
}
}
- file_provisionersdk_proto_provisioner_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} {
+ file_provisionersdk_proto_provisioner_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*PresetParameter); i {
case 0:
return &v.state
@@ -4108,7 +4084,7 @@ func file_provisionersdk_proto_provisioner_proto_init() {
return nil
}
}
- file_provisionersdk_proto_provisioner_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} {
+ file_provisionersdk_proto_provisioner_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*VariableValue); i {
case 0:
return &v.state
@@ -4120,7 +4096,7 @@ func file_provisionersdk_proto_provisioner_proto_init() {
return nil
}
}
- file_provisionersdk_proto_provisioner_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} {
+ file_provisionersdk_proto_provisioner_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Log); i {
case 0:
return &v.state
@@ -4132,7 +4108,7 @@ func file_provisionersdk_proto_provisioner_proto_init() {
return nil
}
}
- file_provisionersdk_proto_provisioner_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} {
+ file_provisionersdk_proto_provisioner_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*InstanceIdentityAuth); i {
case 0:
return &v.state
@@ -4144,7 +4120,7 @@ func file_provisionersdk_proto_provisioner_proto_init() {
return nil
}
}
- file_provisionersdk_proto_provisioner_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} {
+ file_provisionersdk_proto_provisioner_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ExternalAuthProviderResource); i {
case 0:
return &v.state
@@ -4156,7 +4132,7 @@ func file_provisionersdk_proto_provisioner_proto_init() {
return nil
}
}
- file_provisionersdk_proto_provisioner_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} {
+ file_provisionersdk_proto_provisioner_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ExternalAuthProvider); i {
case 0:
return &v.state
@@ -4168,7 +4144,7 @@ func file_provisionersdk_proto_provisioner_proto_init() {
return nil
}
}
- file_provisionersdk_proto_provisioner_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} {
+ file_provisionersdk_proto_provisioner_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Agent); i {
case 0:
return &v.state
@@ -4180,7 +4156,7 @@ func file_provisionersdk_proto_provisioner_proto_init() {
return nil
}
}
- file_provisionersdk_proto_provisioner_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} {
+ file_provisionersdk_proto_provisioner_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ResourcesMonitoring); i {
case 0:
return &v.state
@@ -4192,7 +4168,7 @@ func file_provisionersdk_proto_provisioner_proto_init() {
return nil
}
}
- file_provisionersdk_proto_provisioner_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} {
+ file_provisionersdk_proto_provisioner_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*MemoryResourceMonitor); i {
case 0:
return &v.state
@@ -4204,7 +4180,7 @@ func file_provisionersdk_proto_provisioner_proto_init() {
return nil
}
}
- file_provisionersdk_proto_provisioner_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} {
+ file_provisionersdk_proto_provisioner_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*VolumeResourceMonitor); i {
case 0:
return &v.state
@@ -4216,7 +4192,7 @@ func file_provisionersdk_proto_provisioner_proto_init() {
return nil
}
}
- file_provisionersdk_proto_provisioner_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} {
+ file_provisionersdk_proto_provisioner_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*DisplayApps); i {
case 0:
return &v.state
@@ -4228,7 +4204,7 @@ func file_provisionersdk_proto_provisioner_proto_init() {
return nil
}
}
- file_provisionersdk_proto_provisioner_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} {
+ file_provisionersdk_proto_provisioner_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Env); i {
case 0:
return &v.state
@@ -4240,7 +4216,7 @@ func file_provisionersdk_proto_provisioner_proto_init() {
return nil
}
}
- file_provisionersdk_proto_provisioner_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} {
+ file_provisionersdk_proto_provisioner_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Script); i {
case 0:
return &v.state
@@ -4252,7 +4228,7 @@ func file_provisionersdk_proto_provisioner_proto_init() {
return nil
}
}
- file_provisionersdk_proto_provisioner_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} {
+ file_provisionersdk_proto_provisioner_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*App); i {
case 0:
return &v.state
@@ -4264,7 +4240,7 @@ func file_provisionersdk_proto_provisioner_proto_init() {
return nil
}
}
- file_provisionersdk_proto_provisioner_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} {
+ file_provisionersdk_proto_provisioner_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Healthcheck); i {
case 0:
return &v.state
@@ -4276,7 +4252,7 @@ func file_provisionersdk_proto_provisioner_proto_init() {
return nil
}
}
- file_provisionersdk_proto_provisioner_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} {
+ file_provisionersdk_proto_provisioner_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Resource); i {
case 0:
return &v.state
@@ -4288,7 +4264,7 @@ func file_provisionersdk_proto_provisioner_proto_init() {
return nil
}
}
- file_provisionersdk_proto_provisioner_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} {
+ file_provisionersdk_proto_provisioner_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Module); i {
case 0:
return &v.state
@@ -4300,6 +4276,18 @@ func file_provisionersdk_proto_provisioner_proto_init() {
return nil
}
}
+ file_provisionersdk_proto_provisioner_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} {
+ switch v := v.(*Role); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
file_provisionersdk_proto_provisioner_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Metadata); i {
case 0:
@@ -4470,7 +4458,7 @@ func file_provisionersdk_proto_provisioner_proto_init() {
}
}
file_provisionersdk_proto_provisioner_proto_msgTypes[3].OneofWrappers = []interface{}{}
- file_provisionersdk_proto_provisioner_proto_msgTypes[13].OneofWrappers = []interface{}{
+ file_provisionersdk_proto_provisioner_proto_msgTypes[12].OneofWrappers = []interface{}{
(*Agent_Token)(nil),
(*Agent_InstanceId)(nil),
}
diff --git a/provisionersdk/proto/provisioner.proto b/provisionersdk/proto/provisioner.proto
index 1db08ef835..b3edb3d0d0 100644
--- a/provisionersdk/proto/provisioner.proto
+++ b/provisionersdk/proto/provisioner.proto
@@ -260,6 +260,11 @@ enum WorkspaceTransition {
DESTROY = 2;
}
+message Role {
+ string name = 1;
+ string org_id = 2;
+}
+
// Metadata is information about a workspace used in the execution of a build
message Metadata {
string coder_url = 1;
@@ -280,8 +285,9 @@ message Metadata {
string workspace_owner_ssh_private_key = 16;
string workspace_build_id = 17;
string workspace_owner_login_type = 18;
- bool is_prebuild = 19;
- string running_workspace_agent_token = 20;
+ repeated Role workspace_owner_rbac_roles = 19;
+ bool is_prebuild = 20;
+ string running_workspace_agent_token = 21;
}
// Config represents execution configuration shared by all subsequent requests in the Session
diff --git a/pty/ptytest/ptytest.go b/pty/ptytest/ptytest.go
index a871a0ddca..3c86970ec0 100644
--- a/pty/ptytest/ptytest.go
+++ b/pty/ptytest/ptytest.go
@@ -8,6 +8,7 @@ import (
"io"
"regexp"
"runtime"
+ "slices"
"strings"
"sync"
"testing"
@@ -16,7 +17,6 @@ import (
"github.com/acarl005/stripansi"
"github.com/stretchr/testify/require"
- "golang.org/x/exp/slices"
"golang.org/x/xerrors"
"github.com/coder/coder/v2/pty"
diff --git a/scaletest/workspacetraffic/run_test.go b/scaletest/workspacetraffic/run_test.go
index 980e0d62ed..fe3fd389df 100644
--- a/scaletest/workspacetraffic/run_test.go
+++ b/scaletest/workspacetraffic/run_test.go
@@ -7,6 +7,7 @@ import (
"net/http"
"net/http/httptest"
"runtime"
+ "slices"
"strings"
"sync"
"testing"
@@ -15,7 +16,6 @@ import (
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
- "golang.org/x/exp/slices"
"github.com/coder/coder/v2/agent/agenttest"
"github.com/coder/coder/v2/coderd/coderdtest"
diff --git a/scripts/build_go.sh b/scripts/build_go.sh
index 91fc3a1e4b..3e23e15d8b 100755
--- a/scripts/build_go.sh
+++ b/scripts/build_go.sh
@@ -36,17 +36,19 @@ source "$(dirname "${BASH_SOURCE[0]}")/lib.sh"
version=""
os="${GOOS:-linux}"
arch="${GOARCH:-amd64}"
+output_path=""
slim="${CODER_SLIM_BUILD:-0}"
+agpl="${CODER_BUILD_AGPL:-0}"
sign_darwin="${CODER_SIGN_DARWIN:-0}"
sign_windows="${CODER_SIGN_WINDOWS:-0}"
-bin_ident="com.coder.cli"
-output_path=""
-agpl="${CODER_BUILD_AGPL:-0}"
boringcrypto=${CODER_BUILD_BORINGCRYPTO:-0}
-debug=0
dylib=0
+windows_resources="${CODER_WINDOWS_RESOURCES:-0}"
+debug=0
-args="$(getopt -o "" -l version:,os:,arch:,output:,slim,agpl,sign-darwin,boringcrypto,dylib,debug -- "$@")"
+bin_ident="com.coder.cli"
+
+args="$(getopt -o "" -l version:,os:,arch:,output:,slim,agpl,sign-darwin,sign-windows,boringcrypto,dylib,windows-resources,debug -- "$@")"
eval set -- "$args"
while true; do
case "$1" in
@@ -79,6 +81,10 @@ while true; do
sign_darwin=1
shift
;;
+ --sign-windows)
+ sign_windows=1
+ shift
+ ;;
--boringcrypto)
boringcrypto=1
shift
@@ -87,6 +93,10 @@ while true; do
dylib=1
shift
;;
+ --windows-resources)
+ windows_resources=1
+ shift
+ ;;
--debug)
debug=1
shift
@@ -115,11 +125,13 @@ if [[ "$sign_darwin" == 1 ]]; then
dependencies rcodesign
requiredenvs AC_CERTIFICATE_FILE AC_CERTIFICATE_PASSWORD_FILE
fi
-
if [[ "$sign_windows" == 1 ]]; then
dependencies java
requiredenvs JSIGN_PATH EV_KEYSTORE EV_KEY EV_CERTIFICATE_PATH EV_TSA_URL GCLOUD_ACCESS_TOKEN
fi
+if [[ "$windows_resources" == 1 ]]; then
+ dependencies go-winres
+fi
ldflags=(
-X "'github.com/coder/coder/v2/buildinfo.tag=$version'"
@@ -204,10 +216,100 @@ if [[ "$boringcrypto" == 1 ]]; then
goexp="boringcrypto"
fi
+# On Windows, we use go-winres to embed the resources into the binary.
+if [[ "$windows_resources" == 1 ]] && [[ "$os" == "windows" ]]; then
+ # Convert the version to a format that Windows understands.
+ # Remove any trailing data after a "+" or "-".
+ version_windows=$version
+ version_windows="${version_windows%+*}"
+ version_windows="${version_windows%-*}"
+ # If there wasn't any extra data, add a .0 to the version. Otherwise, add
+ # a .1 to the version to signify that this is not a release build so it can
+ # be distinguished from a release build.
+ non_release_build=0
+ if [[ "$version_windows" == "$version" ]]; then
+ version_windows+=".0"
+ else
+ version_windows+=".1"
+ non_release_build=1
+ fi
+
+ if [[ ! "$version_windows" =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-1]$ ]]; then
+ error "Computed invalid windows version format: $version_windows"
+ fi
+
+ # File description changes based on slimness, AGPL status, and architecture.
+ file_description="Coder"
+ if [[ "$agpl" == 1 ]]; then
+ file_description+=" AGPL"
+ fi
+ if [[ "$slim" == 1 ]]; then
+ file_description+=" CLI"
+ fi
+ if [[ "$non_release_build" == 1 ]]; then
+ file_description+=" (development build)"
+ fi
+
+ # Because this writes to a file with the OS and arch in the filename, we
+ # don't support concurrent builds for the same OS and arch (irregardless of
+ # slimness or AGPL status).
+ #
+ # This is fine since we only embed resources during dogfood and release
+ # builds, which use make (which will build all slim targets in parallel,
+ # then all non-slim targets in parallel).
+ expected_rsrc_file="./buildinfo/resources/resources_windows_${arch}.syso"
+ if [[ -f "$expected_rsrc_file" ]]; then
+ rm "$expected_rsrc_file"
+ fi
+ touch "$expected_rsrc_file"
+
+ pushd ./buildinfo/resources
+ GOARCH="$arch" go-winres simply \
+ --arch "$arch" \
+ --out "resources" \
+ --product-version "$version_windows" \
+ --file-version "$version_windows" \
+ --manifest "cli" \
+ --file-description "$file_description" \
+ --product-name "Coder" \
+ --copyright "Copyright $(date +%Y) Coder Technologies Inc." \
+ --original-filename "coder.exe" \
+ --icon ../../scripts/win-installer/coder.ico
+ popd
+
+ if [[ ! -f "$expected_rsrc_file" ]]; then
+ error "Failed to generate $expected_rsrc_file"
+ fi
+fi
+
+set +e
GOEXPERIMENT="$goexp" CGO_ENABLED="$cgo" GOOS="$os" GOARCH="$arch" GOARM="$arm_version" \
go build \
"${build_args[@]}" \
"$cmd_path" 1>&2
+exit_code=$?
+set -e
+
+# Clean up the resources file if it was generated.
+if [[ "$windows_resources" == 1 ]] && [[ "$os" == "windows" ]]; then
+ rm "$expected_rsrc_file"
+fi
+
+if [[ "$exit_code" != 0 ]]; then
+ exit "$exit_code"
+fi
+
+# If we did embed resources, verify that they were included.
+if [[ "$windows_resources" == 1 ]] && [[ "$os" == "windows" ]]; then
+ winres_dir=$(mktemp -d)
+ if ! go-winres extract --dir "$winres_dir" "$output_path" 1>&2; then
+ rm -rf "$winres_dir"
+ error "Compiled binary does not contain embedded resources"
+ fi
+ # If go-winres didn't return an error, it means it did find embedded
+ # resources.
+ rm -rf "$winres_dir"
+fi
if [[ "$sign_darwin" == 1 ]] && [[ "$os" == "darwin" ]]; then
execrelative ./sign_darwin.sh "$output_path" "$bin_ident" 1>&2
diff --git a/scripts/release/docs_update_experiments.sh b/scripts/release/docs_update_experiments.sh
index 8ed380a356..1c6afdb87b 100755
--- a/scripts/release/docs_update_experiments.sh
+++ b/scripts/release/docs_update_experiments.sh
@@ -94,7 +94,7 @@ parse_experiments() {
}
workdir=build/docs/experiments
-dest=docs/contributing/feature-stages.md
+dest=docs/about/feature-stages.md
log "Updating available experimental features in ${dest}"
diff --git a/site/e2e/api.ts b/site/e2e/api.ts
index 902485b7b1..0dc9e46831 100644
--- a/site/e2e/api.ts
+++ b/site/e2e/api.ts
@@ -3,8 +3,8 @@ import { expect } from "@playwright/test";
import { API, type DeploymentConfig } from "api/api";
import type { SerpentOption } from "api/typesGenerated";
import { formatDuration, intervalToDuration } from "date-fns";
-import { coderPort } from "./constants";
-import { findSessionToken, randomName } from "./helpers";
+import { coderPort, defaultPassword } from "./constants";
+import { type LoginOptions, findSessionToken, randomName } from "./helpers";
let currentOrgId: string;
@@ -29,14 +29,40 @@ export const createUser = async (...orgIds: string[]) => {
email: `${name}@coder.com`,
username: name,
name: name,
- password: "s3cure&password!",
+ password: defaultPassword,
login_type: "password",
organization_ids: orgIds,
user_status: null,
});
+
return user;
};
+export const createOrganizationMember = async (
+ orgRoles: RecordComplex Content
+