mirror of
https://github.com/coder/coder.git
synced 2025-07-06 15:41:45 +00:00
feat: Add "coder projects create" command (#246)
* Refactor parameter parsing to return nil values if none computed * Refactor parameter to allow for hiding redisplay * Refactor parameters to enable schema matching * Refactor provisionerd to dynamically update parameter schemas * Refactor job update for provisionerd * Handle multiple states correctly when provisioning a project * Add project import job resource table * Basic creation flow works! * Create project fully works!!! * Only show job status if completed * Add create workspace support * Replace Netflix/go-expect with ActiveState * Fix linting errors * Use forked chzyer/readline * Add create workspace CLI * Add CLI test * Move jobs to their own APIs * Remove go-expect * Fix requested changes * Skip workspacecreate test on windows
This commit is contained in:
@ -73,7 +73,6 @@ func New(options *Options) http.Handler {
|
||||
r.Route("/{projectversion}", func(r chi.Router) {
|
||||
r.Use(httpmw.ExtractProjectVersionParam(api.Database))
|
||||
r.Get("/", api.projectVersionByOrganizationAndName)
|
||||
r.Get("/parameters", api.projectVersionParametersByOrganizationAndName)
|
||||
})
|
||||
})
|
||||
})
|
||||
@ -108,24 +107,38 @@ func New(options *Options) http.Handler {
|
||||
r.Post("/", api.postFiles)
|
||||
})
|
||||
|
||||
r.Route("/provisioners", func(r chi.Router) {
|
||||
r.Route("/daemons", func(r chi.Router) {
|
||||
r.Get("/", api.provisionerDaemons)
|
||||
r.Get("/serve", api.provisionerDaemonsServe)
|
||||
r.Route("/projectimport/{organization}", func(r chi.Router) {
|
||||
r.Use(
|
||||
httpmw.ExtractAPIKey(options.Database, nil),
|
||||
httpmw.ExtractOrganizationParam(options.Database),
|
||||
)
|
||||
r.Post("/", api.postProjectImportByOrganization)
|
||||
r.Route("/{provisionerjob}", func(r chi.Router) {
|
||||
r.Use(httpmw.ExtractProvisionerJobParam(options.Database))
|
||||
r.Get("/", api.provisionerJobByID)
|
||||
r.Get("/schemas", api.projectImportJobSchemasByID)
|
||||
r.Get("/parameters", api.projectImportJobParametersByID)
|
||||
r.Get("/resources", api.projectImportJobResourcesByID)
|
||||
r.Get("/logs", api.provisionerJobLogsByID)
|
||||
})
|
||||
r.Route("/jobs/{organization}", func(r chi.Router) {
|
||||
r.Use(
|
||||
httpmw.ExtractAPIKey(options.Database, nil),
|
||||
httpmw.ExtractOrganizationParam(options.Database),
|
||||
)
|
||||
r.Post("/import", api.postProvisionerImportJobByOrganization)
|
||||
r.Route("/{provisionerjob}", func(r chi.Router) {
|
||||
r.Use(httpmw.ExtractProvisionerJobParam(options.Database))
|
||||
r.Get("/", api.provisionerJobByOrganization)
|
||||
r.Get("/logs", api.provisionerJobLogsByID)
|
||||
})
|
||||
})
|
||||
|
||||
r.Route("/workspaceprovision/{organization}", func(r chi.Router) {
|
||||
r.Use(
|
||||
httpmw.ExtractAPIKey(options.Database, nil),
|
||||
httpmw.ExtractOrganizationParam(options.Database),
|
||||
)
|
||||
r.Route("/{provisionerjob}", func(r chi.Router) {
|
||||
r.Use(httpmw.ExtractProvisionerJobParam(options.Database))
|
||||
r.Get("/", api.provisionerJobByID)
|
||||
r.Get("/logs", api.provisionerJobLogsByID)
|
||||
})
|
||||
})
|
||||
|
||||
r.Route("/provisioners/daemons", func(r chi.Router) {
|
||||
r.Get("/", api.provisionerDaemons)
|
||||
r.Get("/serve", api.provisionerDaemonsServe)
|
||||
})
|
||||
})
|
||||
r.NotFound(site.Handler(options.Logger).ServeHTTP)
|
||||
return r
|
||||
|
@ -28,8 +28,8 @@ import (
|
||||
"github.com/coder/coder/provisionersdk/proto"
|
||||
)
|
||||
|
||||
// New constructs a new coderd test instance. This returned Server
|
||||
// should contain no side-effects.
|
||||
// New constructs an in-memory coderd instance and returns
|
||||
// the connected client.
|
||||
func New(t *testing.T) *codersdk.Client {
|
||||
// This can be hotswapped for a live database instance.
|
||||
db := databasefake.New()
|
||||
@ -117,20 +117,19 @@ func CreateInitialUser(t *testing.T, client *codersdk.Client) coderd.CreateIniti
|
||||
Password: req.Password,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
err = client.SetSessionToken(login.SessionToken)
|
||||
require.NoError(t, err)
|
||||
client.SessionToken = login.SessionToken
|
||||
return req
|
||||
}
|
||||
|
||||
// CreateProjectImportProvisionerJob creates a project import provisioner job
|
||||
// CreateProjectImportJob creates a project import provisioner job
|
||||
// with the responses provided. It uses the "echo" provisioner for compatibility
|
||||
// with testing.
|
||||
func CreateProjectImportProvisionerJob(t *testing.T, client *codersdk.Client, organization string, res *echo.Responses) coderd.ProvisionerJob {
|
||||
func CreateProjectImportJob(t *testing.T, client *codersdk.Client, organization string, res *echo.Responses) coderd.ProvisionerJob {
|
||||
data, err := echo.Tar(res)
|
||||
require.NoError(t, err)
|
||||
file, err := client.UploadFile(context.Background(), codersdk.ContentTypeTar, data)
|
||||
require.NoError(t, err)
|
||||
job, err := client.CreateProjectVersionImportProvisionerJob(context.Background(), organization, coderd.CreateProjectImportJobRequest{
|
||||
job, err := client.CreateProjectImportJob(context.Background(), organization, coderd.CreateProjectImportJobRequest{
|
||||
StorageSource: file.Hash,
|
||||
StorageMethod: database.ProvisionerStorageMethodFile,
|
||||
Provisioner: database.ProvisionerTypeEcho,
|
||||
@ -150,12 +149,24 @@ func CreateProject(t *testing.T, client *codersdk.Client, organization string, j
|
||||
return project
|
||||
}
|
||||
|
||||
// AwaitProvisionerJob awaits for a job to reach completed status.
|
||||
func AwaitProvisionerJob(t *testing.T, client *codersdk.Client, organization string, job uuid.UUID) coderd.ProvisionerJob {
|
||||
// AwaitProjectImportJob awaits for an import job to reach completed status.
|
||||
func AwaitProjectImportJob(t *testing.T, client *codersdk.Client, organization string, job uuid.UUID) coderd.ProvisionerJob {
|
||||
var provisionerJob coderd.ProvisionerJob
|
||||
require.Eventually(t, func() bool {
|
||||
var err error
|
||||
provisionerJob, err = client.ProvisionerJob(context.Background(), organization, job)
|
||||
provisionerJob, err = client.ProjectImportJob(context.Background(), organization, job)
|
||||
require.NoError(t, err)
|
||||
return provisionerJob.Status.Completed()
|
||||
}, 3*time.Second, 25*time.Millisecond)
|
||||
return provisionerJob
|
||||
}
|
||||
|
||||
// AwaitWorkspaceProvisionJob awaits for a workspace provision job to reach completed status.
|
||||
func AwaitWorkspaceProvisionJob(t *testing.T, client *codersdk.Client, organization string, job uuid.UUID) coderd.ProvisionerJob {
|
||||
var provisionerJob coderd.ProvisionerJob
|
||||
require.Eventually(t, func() bool {
|
||||
var err error
|
||||
provisionerJob, err = client.WorkspaceProvisionJob(context.Background(), organization, job)
|
||||
require.NoError(t, err)
|
||||
return provisionerJob.Status.Completed()
|
||||
}, 3*time.Second, 25*time.Millisecond)
|
||||
|
@ -22,8 +22,8 @@ func TestNew(t *testing.T) {
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
closer := coderdtest.NewProvisionerDaemon(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil)
|
||||
coderdtest.AwaitProvisionerJob(t, client, user.Organization, job.ID)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID)
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID)
|
||||
history, err := client.CreateWorkspaceHistory(context.Background(), "me", workspace.Name, coderd.CreateWorkspaceHistoryRequest{
|
||||
@ -31,6 +31,6 @@ func TestNew(t *testing.T) {
|
||||
Transition: database.WorkspaceTransitionStart,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
coderdtest.AwaitProvisionerJob(t, client, user.Organization, history.ProvisionJobID)
|
||||
coderdtest.AwaitWorkspaceProvisionJob(t, client, user.Organization, history.ProvisionJobID)
|
||||
closer.Close()
|
||||
}
|
||||
|
@ -40,8 +40,18 @@ func (api *api) postFiles(rw http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
hashBytes := sha256.Sum256(data)
|
||||
file, err := api.Database.InsertFile(r.Context(), database.InsertFileParams{
|
||||
Hash: hex.EncodeToString(hashBytes[:]),
|
||||
hash := hex.EncodeToString(hashBytes[:])
|
||||
file, err := api.Database.GetFileByHash(r.Context(), hash)
|
||||
if err == nil {
|
||||
// The file already exists!
|
||||
render.Status(r, http.StatusOK)
|
||||
render.JSON(rw, r, UploadFileResponse{
|
||||
Hash: file.Hash,
|
||||
})
|
||||
return
|
||||
}
|
||||
file, err = api.Database.InsertFile(r.Context(), database.InsertFileParams{
|
||||
Hash: hash,
|
||||
CreatedBy: apiKey.UserID,
|
||||
CreatedAt: database.Now(),
|
||||
Mimetype: contentType,
|
||||
|
@ -27,4 +27,15 @@ func TestPostFiles(t *testing.T) {
|
||||
_, err := client.UploadFile(context.Background(), codersdk.ContentTypeTar, make([]byte, 1024))
|
||||
require.NoError(t, err)
|
||||
})
|
||||
|
||||
t.Run("InsertAlreadyExists", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
_ = coderdtest.CreateInitialUser(t, client)
|
||||
data := make([]byte, 1024)
|
||||
_, err := client.UploadFile(context.Background(), codersdk.ContentTypeTar, data)
|
||||
require.NoError(t, err)
|
||||
_, err = client.UploadFile(context.Background(), codersdk.ContentTypeTar, data)
|
||||
require.NoError(t, err)
|
||||
})
|
||||
}
|
||||
|
215
coderd/parameter/compute.go
Normal file
215
coderd/parameter/compute.go
Normal file
@ -0,0 +1,215 @@
|
||||
package parameter
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"errors"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/coder/coder/database"
|
||||
)
|
||||
|
||||
const (
|
||||
CoderUsername = "coder_username"
|
||||
CoderWorkspaceTransition = "coder_workspace_transition"
|
||||
)
|
||||
|
||||
// ComputeScope targets identifiers to pull parameters from.
|
||||
type ComputeScope struct {
|
||||
ProjectImportJobID uuid.UUID
|
||||
OrganizationID string
|
||||
UserID string
|
||||
ProjectID uuid.NullUUID
|
||||
WorkspaceID uuid.NullUUID
|
||||
}
|
||||
|
||||
type ComputeOptions struct {
|
||||
// HideRedisplayValues removes the value from parameters that
|
||||
// come from schemas with RedisplayValue set to false.
|
||||
HideRedisplayValues bool
|
||||
}
|
||||
|
||||
// ComputedValue represents a computed parameter value.
|
||||
type ComputedValue struct {
|
||||
database.ParameterValue
|
||||
SchemaID uuid.UUID `json:"schema_id"`
|
||||
DefaultSourceValue bool `json:"default_source_value"`
|
||||
}
|
||||
|
||||
// Compute accepts a scope in which parameter values are sourced.
|
||||
// These sources are iterated in a hierarchical fashion to determine
|
||||
// the runtime parameter values for schemas provided.
|
||||
func Compute(ctx context.Context, db database.Store, scope ComputeScope, options *ComputeOptions) ([]ComputedValue, error) {
|
||||
if options == nil {
|
||||
options = &ComputeOptions{}
|
||||
}
|
||||
compute := &compute{
|
||||
options: options,
|
||||
db: db,
|
||||
computedParameterByName: map[string]ComputedValue{},
|
||||
parameterSchemasByName: map[string]database.ParameterSchema{},
|
||||
}
|
||||
|
||||
// All parameters for the import job ID!
|
||||
parameterSchemas, err := db.GetParameterSchemasByJobID(ctx, scope.ProjectImportJobID)
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
err = nil
|
||||
}
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("get project parameters: %w", err)
|
||||
}
|
||||
for _, parameterSchema := range parameterSchemas {
|
||||
compute.parameterSchemasByName[parameterSchema.Name] = parameterSchema
|
||||
}
|
||||
|
||||
// Organization parameters come first!
|
||||
err = compute.injectScope(ctx, database.GetParameterValuesByScopeParams{
|
||||
Scope: database.ParameterScopeOrganization,
|
||||
ScopeID: scope.OrganizationID,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Job parameters come second!
|
||||
err = compute.injectScope(ctx, database.GetParameterValuesByScopeParams{
|
||||
Scope: database.ParameterScopeImportJob,
|
||||
ScopeID: scope.ProjectImportJobID.String(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Default project parameter values come second!
|
||||
for _, parameterSchema := range parameterSchemas {
|
||||
if parameterSchema.DefaultSourceScheme == database.ParameterSourceSchemeNone {
|
||||
continue
|
||||
}
|
||||
if _, ok := compute.computedParameterByName[parameterSchema.Name]; ok {
|
||||
// We already have a value! No need to use the default.
|
||||
continue
|
||||
}
|
||||
|
||||
switch parameterSchema.DefaultSourceScheme {
|
||||
case database.ParameterSourceSchemeData:
|
||||
// Inject a default value scoped to the import job ID.
|
||||
// This doesn't need to be inserted into the database,
|
||||
// because it's a dynamic value associated with the schema.
|
||||
err = compute.injectSingle(database.ParameterValue{
|
||||
ID: uuid.New(),
|
||||
CreatedAt: database.Now(),
|
||||
UpdatedAt: database.Now(),
|
||||
SourceScheme: database.ParameterSourceSchemeData,
|
||||
Name: parameterSchema.Name,
|
||||
DestinationScheme: parameterSchema.DefaultDestinationScheme,
|
||||
SourceValue: parameterSchema.DefaultSourceValue,
|
||||
Scope: database.ParameterScopeImportJob,
|
||||
ScopeID: scope.ProjectImportJobID.String(),
|
||||
}, true)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("insert default value: %w", err)
|
||||
}
|
||||
default:
|
||||
return nil, xerrors.Errorf("unsupported source scheme for project version parameter %q: %q", parameterSchema.Name, string(parameterSchema.DefaultSourceScheme))
|
||||
}
|
||||
}
|
||||
|
||||
if scope.ProjectID.Valid {
|
||||
// Project parameters come third!
|
||||
err = compute.injectScope(ctx, database.GetParameterValuesByScopeParams{
|
||||
Scope: database.ParameterScopeProject,
|
||||
ScopeID: scope.ProjectID.UUID.String(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// User parameters come fourth!
|
||||
err = compute.injectScope(ctx, database.GetParameterValuesByScopeParams{
|
||||
Scope: database.ParameterScopeUser,
|
||||
ScopeID: scope.UserID,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if scope.WorkspaceID.Valid {
|
||||
// Workspace parameters come last!
|
||||
err = compute.injectScope(ctx, database.GetParameterValuesByScopeParams{
|
||||
Scope: database.ParameterScopeWorkspace,
|
||||
ScopeID: scope.WorkspaceID.UUID.String(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
values := make([]ComputedValue, 0, len(compute.computedParameterByName))
|
||||
for _, value := range compute.computedParameterByName {
|
||||
values = append(values, value)
|
||||
}
|
||||
return values, nil
|
||||
}
|
||||
|
||||
type compute struct {
|
||||
options *ComputeOptions
|
||||
db database.Store
|
||||
computedParameterByName map[string]ComputedValue
|
||||
parameterSchemasByName map[string]database.ParameterSchema
|
||||
}
|
||||
|
||||
// Validates and computes the value for parameters; setting the value on "parameterByName".
|
||||
func (c *compute) injectScope(ctx context.Context, scopeParams database.GetParameterValuesByScopeParams) error {
|
||||
scopedParameters, err := c.db.GetParameterValuesByScope(ctx, scopeParams)
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
err = nil
|
||||
}
|
||||
if err != nil {
|
||||
return xerrors.Errorf("get %s parameters: %w", scopeParams.Scope, err)
|
||||
}
|
||||
|
||||
for _, scopedParameter := range scopedParameters {
|
||||
err = c.injectSingle(scopedParameter, false)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("inject single %q: %w", scopedParameter.Name, err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *compute) injectSingle(scopedParameter database.ParameterValue, defaultValue bool) error {
|
||||
parameterSchema, hasParameterSchema := c.parameterSchemasByName[scopedParameter.Name]
|
||||
if !hasParameterSchema {
|
||||
// Don't inject parameters that aren't defined by the project.
|
||||
return nil
|
||||
}
|
||||
|
||||
_, hasParameterValue := c.computedParameterByName[scopedParameter.Name]
|
||||
if hasParameterValue {
|
||||
if !parameterSchema.AllowOverrideSource &&
|
||||
// Users and workspaces cannot override anything on a project!
|
||||
(scopedParameter.Scope == database.ParameterScopeUser ||
|
||||
scopedParameter.Scope == database.ParameterScopeWorkspace) {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
switch scopedParameter.SourceScheme {
|
||||
case database.ParameterSourceSchemeData:
|
||||
value := ComputedValue{
|
||||
ParameterValue: scopedParameter,
|
||||
SchemaID: parameterSchema.ID,
|
||||
DefaultSourceValue: defaultValue,
|
||||
}
|
||||
if c.options.HideRedisplayValues && !parameterSchema.RedisplayValue {
|
||||
value.SourceValue = ""
|
||||
}
|
||||
c.computedParameterByName[scopedParameter.Name] = value
|
||||
default:
|
||||
return xerrors.Errorf("unsupported source scheme: %q", string(parameterSchema.DefaultSourceScheme))
|
||||
}
|
||||
return nil
|
||||
}
|
222
coderd/parameter/compute_test.go
Normal file
222
coderd/parameter/compute_test.go
Normal file
@ -0,0 +1,222 @@
|
||||
package parameter_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/coder/coder/coderd/parameter"
|
||||
"github.com/coder/coder/cryptorand"
|
||||
"github.com/coder/coder/database"
|
||||
"github.com/coder/coder/database/databasefake"
|
||||
)
|
||||
|
||||
func TestCompute(t *testing.T) {
|
||||
t.Parallel()
|
||||
generateScope := func() parameter.ComputeScope {
|
||||
return parameter.ComputeScope{
|
||||
ProjectImportJobID: uuid.New(),
|
||||
OrganizationID: uuid.NewString(),
|
||||
ProjectID: uuid.NullUUID{
|
||||
UUID: uuid.New(),
|
||||
Valid: true,
|
||||
},
|
||||
WorkspaceID: uuid.NullUUID{
|
||||
UUID: uuid.New(),
|
||||
Valid: true,
|
||||
},
|
||||
UserID: uuid.NewString(),
|
||||
}
|
||||
}
|
||||
type parameterOptions struct {
|
||||
AllowOverrideSource bool
|
||||
AllowOverrideDestination bool
|
||||
DefaultDestinationScheme database.ParameterDestinationScheme
|
||||
ProjectImportJobID uuid.UUID
|
||||
}
|
||||
generateParameter := func(t *testing.T, db database.Store, opts parameterOptions) database.ParameterSchema {
|
||||
if opts.DefaultDestinationScheme == "" {
|
||||
opts.DefaultDestinationScheme = database.ParameterDestinationSchemeEnvironmentVariable
|
||||
}
|
||||
name, err := cryptorand.String(8)
|
||||
require.NoError(t, err)
|
||||
sourceValue, err := cryptorand.String(8)
|
||||
require.NoError(t, err)
|
||||
param, err := db.InsertParameterSchema(context.Background(), database.InsertParameterSchemaParams{
|
||||
ID: uuid.New(),
|
||||
Name: name,
|
||||
JobID: opts.ProjectImportJobID,
|
||||
DefaultSourceScheme: database.ParameterSourceSchemeData,
|
||||
DefaultSourceValue: sourceValue,
|
||||
AllowOverrideSource: opts.AllowOverrideSource,
|
||||
AllowOverrideDestination: opts.AllowOverrideDestination,
|
||||
DefaultDestinationScheme: opts.DefaultDestinationScheme,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
return param
|
||||
}
|
||||
|
||||
t.Run("NoValue", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
db := databasefake.New()
|
||||
scope := generateScope()
|
||||
_, err := db.InsertParameterSchema(context.Background(), database.InsertParameterSchemaParams{
|
||||
ID: uuid.New(),
|
||||
JobID: scope.ProjectImportJobID,
|
||||
Name: "hey",
|
||||
DefaultSourceScheme: database.ParameterSourceSchemeNone,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
computed, err := parameter.Compute(context.Background(), db, scope, nil)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, computed, 0)
|
||||
})
|
||||
|
||||
t.Run("UseDefaultProjectValue", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
db := databasefake.New()
|
||||
scope := generateScope()
|
||||
parameterSchema := generateParameter(t, db, parameterOptions{
|
||||
ProjectImportJobID: scope.ProjectImportJobID,
|
||||
DefaultDestinationScheme: database.ParameterDestinationSchemeProvisionerVariable,
|
||||
})
|
||||
computed, err := parameter.Compute(context.Background(), db, scope, nil)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, computed, 1)
|
||||
computedValue := computed[0]
|
||||
require.True(t, computedValue.DefaultSourceValue)
|
||||
require.Equal(t, database.ParameterScopeImportJob, computedValue.Scope)
|
||||
require.Equal(t, scope.ProjectImportJobID.String(), computedValue.ScopeID)
|
||||
require.Equal(t, computedValue.SourceValue, parameterSchema.DefaultSourceValue)
|
||||
})
|
||||
|
||||
t.Run("OverrideOrganizationWithImportJob", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
db := databasefake.New()
|
||||
scope := generateScope()
|
||||
parameterSchema := generateParameter(t, db, parameterOptions{
|
||||
ProjectImportJobID: scope.ProjectImportJobID,
|
||||
})
|
||||
_, err := db.InsertParameterValue(context.Background(), database.InsertParameterValueParams{
|
||||
ID: uuid.New(),
|
||||
Name: parameterSchema.Name,
|
||||
Scope: database.ParameterScopeOrganization,
|
||||
ScopeID: scope.OrganizationID,
|
||||
SourceScheme: database.ParameterSourceSchemeData,
|
||||
SourceValue: "firstnop",
|
||||
DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
value, err := db.InsertParameterValue(context.Background(), database.InsertParameterValueParams{
|
||||
ID: uuid.New(),
|
||||
Name: parameterSchema.Name,
|
||||
Scope: database.ParameterScopeImportJob,
|
||||
ScopeID: scope.ProjectImportJobID.String(),
|
||||
SourceScheme: database.ParameterSourceSchemeData,
|
||||
SourceValue: "secondnop",
|
||||
DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
computed, err := parameter.Compute(context.Background(), db, scope, nil)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, computed, 1)
|
||||
require.Equal(t, false, computed[0].DefaultSourceValue)
|
||||
require.Equal(t, value.SourceValue, computed[0].SourceValue)
|
||||
})
|
||||
|
||||
t.Run("ProjectOverridesProjectDefault", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
db := databasefake.New()
|
||||
scope := generateScope()
|
||||
parameterSchema := generateParameter(t, db, parameterOptions{
|
||||
ProjectImportJobID: scope.ProjectImportJobID,
|
||||
})
|
||||
value, err := db.InsertParameterValue(context.Background(), database.InsertParameterValueParams{
|
||||
ID: uuid.New(),
|
||||
Name: parameterSchema.Name,
|
||||
Scope: database.ParameterScopeProject,
|
||||
ScopeID: scope.ProjectID.UUID.String(),
|
||||
SourceScheme: database.ParameterSourceSchemeData,
|
||||
SourceValue: "nop",
|
||||
DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
computed, err := parameter.Compute(context.Background(), db, scope, nil)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, computed, 1)
|
||||
require.Equal(t, false, computed[0].DefaultSourceValue)
|
||||
require.Equal(t, value.SourceValue, computed[0].SourceValue)
|
||||
})
|
||||
|
||||
t.Run("WorkspaceCannotOverwriteProjectDefault", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
db := databasefake.New()
|
||||
scope := generateScope()
|
||||
parameterSchema := generateParameter(t, db, parameterOptions{
|
||||
ProjectImportJobID: scope.ProjectImportJobID,
|
||||
})
|
||||
_, err := db.InsertParameterValue(context.Background(), database.InsertParameterValueParams{
|
||||
ID: uuid.New(),
|
||||
Name: parameterSchema.Name,
|
||||
Scope: database.ParameterScopeWorkspace,
|
||||
ScopeID: scope.WorkspaceID.UUID.String(),
|
||||
SourceScheme: database.ParameterSourceSchemeData,
|
||||
SourceValue: "nop",
|
||||
DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
computed, err := parameter.Compute(context.Background(), db, scope, nil)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, computed, 1)
|
||||
require.Equal(t, true, computed[0].DefaultSourceValue)
|
||||
})
|
||||
|
||||
t.Run("WorkspaceOverwriteProjectDefault", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
db := databasefake.New()
|
||||
scope := generateScope()
|
||||
parameterSchema := generateParameter(t, db, parameterOptions{
|
||||
AllowOverrideSource: true,
|
||||
ProjectImportJobID: scope.ProjectImportJobID,
|
||||
})
|
||||
_, err := db.InsertParameterValue(context.Background(), database.InsertParameterValueParams{
|
||||
ID: uuid.New(),
|
||||
Name: parameterSchema.Name,
|
||||
Scope: database.ParameterScopeWorkspace,
|
||||
ScopeID: scope.WorkspaceID.UUID.String(),
|
||||
SourceScheme: database.ParameterSourceSchemeData,
|
||||
SourceValue: "nop",
|
||||
DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
computed, err := parameter.Compute(context.Background(), db, scope, nil)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, computed, 1)
|
||||
require.Equal(t, false, computed[0].DefaultSourceValue)
|
||||
})
|
||||
|
||||
t.Run("HideRedisplay", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
db := databasefake.New()
|
||||
scope := generateScope()
|
||||
_ = generateParameter(t, db, parameterOptions{
|
||||
ProjectImportJobID: scope.ProjectImportJobID,
|
||||
DefaultDestinationScheme: database.ParameterDestinationSchemeProvisionerVariable,
|
||||
})
|
||||
computed, err := parameter.Compute(context.Background(), db, scope, ¶meter.ComputeOptions{
|
||||
HideRedisplayValues: true,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, computed, 1)
|
||||
computedValue := computed[0]
|
||||
require.True(t, computedValue.DefaultSourceValue)
|
||||
require.Equal(t, computedValue.SourceValue, "")
|
||||
})
|
||||
}
|
@ -1,107 +0,0 @@
|
||||
package coderd
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/go-chi/render"
|
||||
"github.com/google/uuid"
|
||||
|
||||
"github.com/coder/coder/database"
|
||||
"github.com/coder/coder/httpapi"
|
||||
)
|
||||
|
||||
// CreateParameterValueRequest is used to create a new parameter value for a scope.
|
||||
type CreateParameterValueRequest struct {
|
||||
Name string `json:"name" validate:"required"`
|
||||
SourceValue string `json:"source_value" validate:"required"`
|
||||
SourceScheme database.ParameterSourceScheme `json:"source_scheme" validate:"oneof=data,required"`
|
||||
DestinationScheme database.ParameterDestinationScheme `json:"destination_scheme" validate:"oneof=environment_variable provisioner_variable,required"`
|
||||
DestinationValue string `json:"destination_value" validate:"required"`
|
||||
}
|
||||
|
||||
// ParameterValue represents a set value for the scope.
|
||||
type ParameterValue struct {
|
||||
ID uuid.UUID `json:"id"`
|
||||
Name string `json:"name"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
Scope database.ParameterScope `json:"scope"`
|
||||
ScopeID string `json:"scope_id"`
|
||||
SourceScheme database.ParameterSourceScheme `json:"source_scheme"`
|
||||
DestinationScheme database.ParameterDestinationScheme `json:"destination_scheme"`
|
||||
DestinationValue string `json:"destination_value"`
|
||||
}
|
||||
|
||||
// Abstracts creating parameters into a single request/response format.
|
||||
// Callers are in charge of validating the requester has permissions to
|
||||
// perform the creation.
|
||||
func postParameterValueForScope(rw http.ResponseWriter, r *http.Request, db database.Store, scope database.ParameterScope, scopeID string) {
|
||||
var createRequest CreateParameterValueRequest
|
||||
if !httpapi.Read(rw, r, &createRequest) {
|
||||
return
|
||||
}
|
||||
parameterValue, err := db.InsertParameterValue(r.Context(), database.InsertParameterValueParams{
|
||||
ID: uuid.New(),
|
||||
Name: createRequest.Name,
|
||||
CreatedAt: database.Now(),
|
||||
UpdatedAt: database.Now(),
|
||||
Scope: scope,
|
||||
ScopeID: scopeID,
|
||||
SourceScheme: createRequest.SourceScheme,
|
||||
SourceValue: createRequest.SourceValue,
|
||||
DestinationScheme: createRequest.DestinationScheme,
|
||||
DestinationValue: createRequest.DestinationValue,
|
||||
})
|
||||
if err != nil {
|
||||
httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{
|
||||
Message: fmt.Sprintf("insert parameter value: %s", err),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
render.Status(r, http.StatusCreated)
|
||||
render.JSON(rw, r, parameterValue)
|
||||
}
|
||||
|
||||
// Abstracts returning parameters for a scope into a standardized
|
||||
// request/response format. Callers are responsible for checking
|
||||
// requester permissions.
|
||||
func parametersForScope(rw http.ResponseWriter, r *http.Request, db database.Store, req database.GetParameterValuesByScopeParams) {
|
||||
parameterValues, err := db.GetParameterValuesByScope(r.Context(), req)
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
err = nil
|
||||
parameterValues = []database.ParameterValue{}
|
||||
}
|
||||
if err != nil {
|
||||
httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{
|
||||
Message: fmt.Sprintf("get parameter values: %s", err),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
apiParameterValues := make([]ParameterValue, 0, len(parameterValues))
|
||||
for _, parameterValue := range parameterValues {
|
||||
apiParameterValues = append(apiParameterValues, convertParameterValue(parameterValue))
|
||||
}
|
||||
|
||||
render.Status(r, http.StatusOK)
|
||||
render.JSON(rw, r, apiParameterValues)
|
||||
}
|
||||
|
||||
func convertParameterValue(parameterValue database.ParameterValue) ParameterValue {
|
||||
return ParameterValue{
|
||||
ID: parameterValue.ID,
|
||||
Name: parameterValue.Name,
|
||||
CreatedAt: parameterValue.CreatedAt,
|
||||
UpdatedAt: parameterValue.UpdatedAt,
|
||||
Scope: parameterValue.Scope,
|
||||
ScopeID: parameterValue.ScopeID,
|
||||
SourceScheme: parameterValue.SourceScheme,
|
||||
DestinationScheme: parameterValue.DestinationScheme,
|
||||
DestinationValue: parameterValue.DestinationValue,
|
||||
}
|
||||
}
|
185
coderd/projectimport.go
Normal file
185
coderd/projectimport.go
Normal file
@ -0,0 +1,185 @@
|
||||
package coderd
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/go-chi/render"
|
||||
"github.com/google/uuid"
|
||||
|
||||
"github.com/coder/coder/coderd/parameter"
|
||||
"github.com/coder/coder/database"
|
||||
"github.com/coder/coder/httpapi"
|
||||
"github.com/coder/coder/httpmw"
|
||||
)
|
||||
|
||||
// ParameterSchema represents a parameter parsed from project version source.
|
||||
type ParameterSchema database.ParameterSchema
|
||||
|
||||
// ComputedParameterValue represents a computed parameter value.
|
||||
type ComputedParameterValue parameter.ComputedValue
|
||||
|
||||
// ProjectImportJobResource is a resource created by a project import job.
|
||||
type ProjectImportJobResource database.ProjectImportJobResource
|
||||
|
||||
// CreateProjectImportJobRequest provides options to create a project import job.
|
||||
type CreateProjectImportJobRequest struct {
|
||||
StorageMethod database.ProvisionerStorageMethod `json:"storage_method" validate:"oneof=file,required"`
|
||||
StorageSource string `json:"storage_source" validate:"required"`
|
||||
Provisioner database.ProvisionerType `json:"provisioner" validate:"oneof=terraform echo,required"`
|
||||
// ParameterValues allows for additional parameters to be provided
|
||||
// during the dry-run provision stage.
|
||||
ParameterValues []CreateParameterValueRequest `json:"parameter_values"`
|
||||
}
|
||||
|
||||
// Create a new project import job!
|
||||
func (api *api) postProjectImportByOrganization(rw http.ResponseWriter, r *http.Request) {
|
||||
apiKey := httpmw.APIKey(r)
|
||||
organization := httpmw.OrganizationParam(r)
|
||||
var req CreateProjectImportJobRequest
|
||||
if !httpapi.Read(rw, r, &req) {
|
||||
return
|
||||
}
|
||||
file, err := api.Database.GetFileByHash(r.Context(), req.StorageSource)
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
httpapi.Write(rw, http.StatusBadRequest, httpapi.Response{
|
||||
Message: "file not found",
|
||||
})
|
||||
return
|
||||
}
|
||||
if err != nil {
|
||||
httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{
|
||||
Message: fmt.Sprintf("get file: %s", err),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
jobID := uuid.New()
|
||||
for _, parameterValue := range req.ParameterValues {
|
||||
_, err = api.Database.InsertParameterValue(r.Context(), database.InsertParameterValueParams{
|
||||
ID: uuid.New(),
|
||||
Name: parameterValue.Name,
|
||||
CreatedAt: database.Now(),
|
||||
UpdatedAt: database.Now(),
|
||||
Scope: database.ParameterScopeImportJob,
|
||||
ScopeID: jobID.String(),
|
||||
SourceScheme: parameterValue.SourceScheme,
|
||||
SourceValue: parameterValue.SourceValue,
|
||||
DestinationScheme: parameterValue.DestinationScheme,
|
||||
})
|
||||
if err != nil {
|
||||
httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{
|
||||
Message: fmt.Sprintf("insert parameter value: %s", err),
|
||||
})
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
job, err := api.Database.InsertProvisionerJob(r.Context(), database.InsertProvisionerJobParams{
|
||||
ID: jobID,
|
||||
CreatedAt: database.Now(),
|
||||
UpdatedAt: database.Now(),
|
||||
OrganizationID: organization.ID,
|
||||
InitiatorID: apiKey.UserID,
|
||||
Provisioner: req.Provisioner,
|
||||
StorageMethod: database.ProvisionerStorageMethodFile,
|
||||
StorageSource: file.Hash,
|
||||
Type: database.ProvisionerJobTypeProjectVersionImport,
|
||||
Input: []byte{'{', '}'},
|
||||
})
|
||||
if err != nil {
|
||||
httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{
|
||||
Message: fmt.Sprintf("insert provisioner job: %s", err),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
render.Status(r, http.StatusCreated)
|
||||
render.JSON(rw, r, convertProvisionerJob(job))
|
||||
}
|
||||
|
||||
// Returns imported parameter schemas from a completed job!
|
||||
func (api *api) projectImportJobSchemasByID(rw http.ResponseWriter, r *http.Request) {
|
||||
job := httpmw.ProvisionerJobParam(r)
|
||||
if !convertProvisionerJob(job).Status.Completed() {
|
||||
httpapi.Write(rw, http.StatusPreconditionFailed, httpapi.Response{
|
||||
Message: "Job hasn't completed!",
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
schemas, err := api.Database.GetParameterSchemasByJobID(r.Context(), job.ID)
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
err = nil
|
||||
}
|
||||
if err != nil {
|
||||
httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{
|
||||
Message: fmt.Sprintf("list parameter schemas: %s", err),
|
||||
})
|
||||
return
|
||||
}
|
||||
if schemas == nil {
|
||||
schemas = []database.ParameterSchema{}
|
||||
}
|
||||
render.Status(r, http.StatusOK)
|
||||
render.JSON(rw, r, schemas)
|
||||
}
|
||||
|
||||
// Returns computed parameters for an import job by ID.
|
||||
func (api *api) projectImportJobParametersByID(rw http.ResponseWriter, r *http.Request) {
|
||||
apiKey := httpmw.APIKey(r)
|
||||
job := httpmw.ProvisionerJobParam(r)
|
||||
if !convertProvisionerJob(job).Status.Completed() {
|
||||
httpapi.Write(rw, http.StatusPreconditionFailed, httpapi.Response{
|
||||
Message: "Job hasn't completed!",
|
||||
})
|
||||
return
|
||||
}
|
||||
values, err := parameter.Compute(r.Context(), api.Database, parameter.ComputeScope{
|
||||
ProjectImportJobID: job.ID,
|
||||
OrganizationID: job.OrganizationID,
|
||||
UserID: apiKey.UserID,
|
||||
}, ¶meter.ComputeOptions{
|
||||
// We *never* want to send the client secret parameter values.
|
||||
HideRedisplayValues: true,
|
||||
})
|
||||
if err != nil {
|
||||
httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{
|
||||
Message: fmt.Sprintf("compute values: %s", err),
|
||||
})
|
||||
return
|
||||
}
|
||||
if values == nil {
|
||||
values = []parameter.ComputedValue{}
|
||||
}
|
||||
render.Status(r, http.StatusOK)
|
||||
render.JSON(rw, r, values)
|
||||
}
|
||||
|
||||
// Returns resources for an import job by ID.
|
||||
func (api *api) projectImportJobResourcesByID(rw http.ResponseWriter, r *http.Request) {
|
||||
job := httpmw.ProvisionerJobParam(r)
|
||||
if !convertProvisionerJob(job).Status.Completed() {
|
||||
httpapi.Write(rw, http.StatusPreconditionFailed, httpapi.Response{
|
||||
Message: "Job hasn't completed!",
|
||||
})
|
||||
return
|
||||
}
|
||||
resources, err := api.Database.GetProjectImportJobResourcesByJobID(r.Context(), job.ID)
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
err = nil
|
||||
}
|
||||
if err != nil {
|
||||
httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{
|
||||
Message: fmt.Sprintf("get project import job resources: %s", err),
|
||||
})
|
||||
return
|
||||
}
|
||||
if resources == nil {
|
||||
resources = []database.ProjectImportJobResource{}
|
||||
}
|
||||
render.Status(r, http.StatusOK)
|
||||
render.JSON(rw, r, resources)
|
||||
}
|
162
coderd/projectimport_test.go
Normal file
162
coderd/projectimport_test.go
Normal file
@ -0,0 +1,162 @@
|
||||
package coderd_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"testing"
|
||||
|
||||
"github.com/coder/coder/coderd"
|
||||
"github.com/coder/coder/coderd/coderdtest"
|
||||
"github.com/coder/coder/codersdk"
|
||||
"github.com/coder/coder/database"
|
||||
"github.com/coder/coder/provisioner/echo"
|
||||
"github.com/coder/coder/provisionersdk/proto"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestPostProjectImportByOrganization(t *testing.T) {
|
||||
t.Parallel()
|
||||
t.Run("FileNotFound", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
_, err := client.CreateProjectImportJob(context.Background(), user.Organization, coderd.CreateProjectImportJobRequest{
|
||||
StorageMethod: database.ProvisionerStorageMethodFile,
|
||||
StorageSource: "bananas",
|
||||
Provisioner: database.ProvisionerTypeEcho,
|
||||
})
|
||||
require.Error(t, err)
|
||||
})
|
||||
t.Run("Create", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
_ = coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
})
|
||||
}
|
||||
|
||||
func TestProjectImportJobSchemasByID(t *testing.T) {
|
||||
t.Parallel()
|
||||
t.Run("ListRunning", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
_, err := client.ProjectImportJobSchemas(context.Background(), user.Organization, job.ID)
|
||||
var apiErr *codersdk.Error
|
||||
require.ErrorAs(t, err, &apiErr)
|
||||
require.Equal(t, http.StatusPreconditionFailed, apiErr.StatusCode())
|
||||
})
|
||||
t.Run("List", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
coderdtest.NewProvisionerDaemon(t, client)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, &echo.Responses{
|
||||
Parse: []*proto.Parse_Response{{
|
||||
Type: &proto.Parse_Response_Complete{
|
||||
Complete: &proto.Parse_Complete{
|
||||
ParameterSchemas: []*proto.ParameterSchema{{
|
||||
Name: "example",
|
||||
DefaultDestination: &proto.ParameterDestination{
|
||||
Scheme: proto.ParameterDestination_PROVISIONER_VARIABLE,
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
}},
|
||||
Provision: echo.ProvisionComplete,
|
||||
})
|
||||
coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID)
|
||||
schemas, err := client.ProjectImportJobSchemas(context.Background(), user.Organization, job.ID)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, schemas)
|
||||
require.Len(t, schemas, 1)
|
||||
})
|
||||
}
|
||||
|
||||
func TestProjectImportJobParametersByID(t *testing.T) {
|
||||
t.Parallel()
|
||||
t.Run("ListRunning", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
_, err := client.ProjectImportJobSchemas(context.Background(), user.Organization, job.ID)
|
||||
var apiErr *codersdk.Error
|
||||
require.ErrorAs(t, err, &apiErr)
|
||||
require.Equal(t, http.StatusPreconditionFailed, apiErr.StatusCode())
|
||||
})
|
||||
t.Run("List", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
coderdtest.NewProvisionerDaemon(t, client)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, &echo.Responses{
|
||||
Parse: []*proto.Parse_Response{{
|
||||
Type: &proto.Parse_Response_Complete{
|
||||
Complete: &proto.Parse_Complete{
|
||||
ParameterSchemas: []*proto.ParameterSchema{{
|
||||
Name: "example",
|
||||
RedisplayValue: true,
|
||||
DefaultSource: &proto.ParameterSource{
|
||||
Scheme: proto.ParameterSource_DATA,
|
||||
Value: "hello",
|
||||
},
|
||||
DefaultDestination: &proto.ParameterDestination{
|
||||
Scheme: proto.ParameterDestination_PROVISIONER_VARIABLE,
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
}},
|
||||
Provision: echo.ProvisionComplete,
|
||||
})
|
||||
coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID)
|
||||
params, err := client.ProjectImportJobParameters(context.Background(), user.Organization, job.ID)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, params)
|
||||
require.Len(t, params, 1)
|
||||
require.Equal(t, "hello", params[0].SourceValue)
|
||||
})
|
||||
}
|
||||
|
||||
func TestProjectImportJobResourcesByID(t *testing.T) {
|
||||
t.Parallel()
|
||||
t.Run("ListRunning", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
_, err := client.ProjectImportJobResources(context.Background(), user.Organization, job.ID)
|
||||
var apiErr *codersdk.Error
|
||||
require.ErrorAs(t, err, &apiErr)
|
||||
require.Equal(t, http.StatusPreconditionFailed, apiErr.StatusCode())
|
||||
})
|
||||
t.Run("List", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
coderdtest.NewProvisionerDaemon(t, client)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, &echo.Responses{
|
||||
Parse: echo.ParseComplete,
|
||||
Provision: []*proto.Provision_Response{{
|
||||
Type: &proto.Provision_Response_Complete{
|
||||
Complete: &proto.Provision_Complete{
|
||||
Resources: []*proto.Resource{{
|
||||
Name: "some",
|
||||
Type: "example",
|
||||
}},
|
||||
},
|
||||
},
|
||||
}},
|
||||
})
|
||||
coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID)
|
||||
resources, err := client.ProjectImportJobResources(context.Background(), user.Organization, job.ID)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, resources)
|
||||
require.Len(t, resources, 2)
|
||||
require.Equal(t, "some", resources[0].Name)
|
||||
require.Equal(t, "example", resources[0].Type)
|
||||
})
|
||||
}
|
@ -1,233 +0,0 @@
|
||||
package projectparameter
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/coder/coder/database"
|
||||
"github.com/coder/coder/provisionersdk/proto"
|
||||
)
|
||||
|
||||
// Scope targets identifiers to pull parameters from.
|
||||
type Scope struct {
|
||||
ImportJobID uuid.UUID
|
||||
OrganizationID string
|
||||
ProjectID uuid.NullUUID
|
||||
UserID sql.NullString
|
||||
WorkspaceID uuid.NullUUID
|
||||
}
|
||||
|
||||
// Value represents a computed parameter.
|
||||
type Value struct {
|
||||
Proto *proto.ParameterValue
|
||||
// DefaultValue is whether a default value for the scope
|
||||
// was consumed. This can only be true for projects.
|
||||
DefaultValue bool
|
||||
Scope database.ParameterScope
|
||||
ScopeID string
|
||||
}
|
||||
|
||||
// Compute accepts a scope in which parameter values are sourced.
|
||||
// These sources are iterated in a hierarchical fashion to determine
|
||||
// the runtime parameter values for a project.
|
||||
func Compute(ctx context.Context, db database.Store, scope Scope, additional ...database.ParameterValue) ([]Value, error) {
|
||||
compute := &compute{
|
||||
db: db,
|
||||
computedParameterByName: map[string]Value{},
|
||||
parameterSchemasByName: map[string]database.ParameterSchema{},
|
||||
}
|
||||
|
||||
// All parameters for the import job ID!
|
||||
parameterSchemas, err := db.GetParameterSchemasByJobID(ctx, scope.ImportJobID)
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
err = nil
|
||||
}
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("get project parameters: %w", err)
|
||||
}
|
||||
for _, projectVersionParameter := range parameterSchemas {
|
||||
compute.parameterSchemasByName[projectVersionParameter.Name] = projectVersionParameter
|
||||
}
|
||||
|
||||
// Organization parameters come first!
|
||||
err = compute.injectScope(ctx, database.GetParameterValuesByScopeParams{
|
||||
Scope: database.ParameterScopeOrganization,
|
||||
ScopeID: scope.OrganizationID,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Default project parameter values come second!
|
||||
for _, projectVersionParameter := range parameterSchemas {
|
||||
if !projectVersionParameter.DefaultSourceValue.Valid {
|
||||
continue
|
||||
}
|
||||
if !projectVersionParameter.DefaultDestinationValue.Valid {
|
||||
continue
|
||||
}
|
||||
|
||||
destinationScheme, err := convertDestinationScheme(projectVersionParameter.DefaultDestinationScheme)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("convert default destination scheme for project version parameter %q: %w", projectVersionParameter.Name, err)
|
||||
}
|
||||
|
||||
switch projectVersionParameter.DefaultSourceScheme {
|
||||
case database.ParameterSourceSchemeData:
|
||||
compute.computedParameterByName[projectVersionParameter.Name] = Value{
|
||||
Proto: &proto.ParameterValue{
|
||||
DestinationScheme: destinationScheme,
|
||||
Name: projectVersionParameter.DefaultDestinationValue.String,
|
||||
Value: projectVersionParameter.DefaultSourceValue.String,
|
||||
},
|
||||
DefaultValue: true,
|
||||
Scope: database.ParameterScopeProject,
|
||||
ScopeID: scope.ProjectID.UUID.String(),
|
||||
}
|
||||
default:
|
||||
return nil, xerrors.Errorf("unsupported source scheme for project version parameter %q: %q", projectVersionParameter.Name, string(projectVersionParameter.DefaultSourceScheme))
|
||||
}
|
||||
}
|
||||
|
||||
if scope.ProjectID.Valid {
|
||||
// Project parameters come third!
|
||||
err = compute.injectScope(ctx, database.GetParameterValuesByScopeParams{
|
||||
Scope: database.ParameterScopeProject,
|
||||
ScopeID: scope.ProjectID.UUID.String(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if scope.UserID.Valid {
|
||||
// User parameters come fourth!
|
||||
err = compute.injectScope(ctx, database.GetParameterValuesByScopeParams{
|
||||
Scope: database.ParameterScopeUser,
|
||||
ScopeID: scope.UserID.String,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if scope.WorkspaceID.Valid {
|
||||
// Workspace parameters come last!
|
||||
err = compute.injectScope(ctx, database.GetParameterValuesByScopeParams{
|
||||
Scope: database.ParameterScopeWorkspace,
|
||||
ScopeID: scope.WorkspaceID.UUID.String(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
for _, parameterValue := range additional {
|
||||
err = compute.injectSingle(parameterValue)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("inject %q: %w", parameterValue.Name, err)
|
||||
}
|
||||
}
|
||||
|
||||
for _, projectVersionParameter := range compute.parameterSchemasByName {
|
||||
if _, ok := compute.computedParameterByName[projectVersionParameter.Name]; ok {
|
||||
continue
|
||||
}
|
||||
return nil, NoValueError{
|
||||
ParameterID: projectVersionParameter.ID,
|
||||
ParameterName: projectVersionParameter.Name,
|
||||
}
|
||||
}
|
||||
|
||||
values := make([]Value, 0, len(compute.computedParameterByName))
|
||||
for _, value := range compute.computedParameterByName {
|
||||
values = append(values, value)
|
||||
}
|
||||
return values, nil
|
||||
}
|
||||
|
||||
type compute struct {
|
||||
db database.Store
|
||||
computedParameterByName map[string]Value
|
||||
parameterSchemasByName map[string]database.ParameterSchema
|
||||
}
|
||||
|
||||
// Validates and computes the value for parameters; setting the value on "parameterByName".
|
||||
func (c *compute) injectScope(ctx context.Context, scopeParams database.GetParameterValuesByScopeParams) error {
|
||||
scopedParameters, err := c.db.GetParameterValuesByScope(ctx, scopeParams)
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
err = nil
|
||||
}
|
||||
if err != nil {
|
||||
return xerrors.Errorf("get %s parameters: %w", scopeParams.Scope, err)
|
||||
}
|
||||
|
||||
for _, scopedParameter := range scopedParameters {
|
||||
err = c.injectSingle(scopedParameter)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("inject single %q: %w", scopedParameter.Name, err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *compute) injectSingle(scopedParameter database.ParameterValue) error {
|
||||
parameterSchema, hasParameterSchema := c.parameterSchemasByName[scopedParameter.Name]
|
||||
if hasParameterSchema {
|
||||
// Don't inject parameters that aren't defined by the project.
|
||||
_, hasExistingParameter := c.computedParameterByName[scopedParameter.Name]
|
||||
if hasExistingParameter {
|
||||
// If a parameter already exists, check if this variable can override it.
|
||||
// Injection hierarchy is the responsibility of the caller. This check ensures
|
||||
// project parameters cannot be overridden if already set.
|
||||
if !parameterSchema.AllowOverrideSource && scopedParameter.Scope != database.ParameterScopeProject {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
destinationScheme, err := convertDestinationScheme(scopedParameter.DestinationScheme)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("convert destination scheme: %w", err)
|
||||
}
|
||||
|
||||
switch scopedParameter.SourceScheme {
|
||||
case database.ParameterSourceSchemeData:
|
||||
c.computedParameterByName[scopedParameter.Name] = Value{
|
||||
Proto: &proto.ParameterValue{
|
||||
DestinationScheme: destinationScheme,
|
||||
Name: scopedParameter.SourceValue,
|
||||
Value: scopedParameter.DestinationValue,
|
||||
},
|
||||
}
|
||||
default:
|
||||
return xerrors.Errorf("unsupported source scheme: %q", string(parameterSchema.DefaultSourceScheme))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Converts the database destination scheme to the protobuf version.
|
||||
func convertDestinationScheme(scheme database.ParameterDestinationScheme) (proto.ParameterDestination_Scheme, error) {
|
||||
switch scheme {
|
||||
case database.ParameterDestinationSchemeEnvironmentVariable:
|
||||
return proto.ParameterDestination_ENVIRONMENT_VARIABLE, nil
|
||||
case database.ParameterDestinationSchemeProvisionerVariable:
|
||||
return proto.ParameterDestination_PROVISIONER_VARIABLE, nil
|
||||
default:
|
||||
return 0, xerrors.Errorf("unsupported destination scheme: %q", scheme)
|
||||
}
|
||||
}
|
||||
|
||||
type NoValueError struct {
|
||||
ParameterID uuid.UUID
|
||||
ParameterName string
|
||||
}
|
||||
|
||||
func (e NoValueError) Error() string {
|
||||
return fmt.Sprintf("no value for parameter %q found", e.ParameterName)
|
||||
}
|
@ -1,248 +0,0 @@
|
||||
package projectparameter_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"testing"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/coder/coder/coderd/projectparameter"
|
||||
"github.com/coder/coder/cryptorand"
|
||||
"github.com/coder/coder/database"
|
||||
"github.com/coder/coder/database/databasefake"
|
||||
"github.com/coder/coder/provisionersdk/proto"
|
||||
)
|
||||
|
||||
func TestCompute(t *testing.T) {
|
||||
t.Parallel()
|
||||
generateScope := func() projectparameter.Scope {
|
||||
return projectparameter.Scope{
|
||||
ImportJobID: uuid.New(),
|
||||
OrganizationID: uuid.NewString(),
|
||||
ProjectID: uuid.NullUUID{
|
||||
UUID: uuid.New(),
|
||||
Valid: true,
|
||||
},
|
||||
WorkspaceID: uuid.NullUUID{
|
||||
UUID: uuid.New(),
|
||||
Valid: true,
|
||||
},
|
||||
UserID: sql.NullString{
|
||||
String: uuid.NewString(),
|
||||
Valid: true,
|
||||
},
|
||||
}
|
||||
}
|
||||
type projectParameterOptions struct {
|
||||
AllowOverrideSource bool
|
||||
AllowOverrideDestination bool
|
||||
DefaultDestinationScheme database.ParameterDestinationScheme
|
||||
ImportJobID uuid.UUID
|
||||
}
|
||||
generateProjectParameter := func(t *testing.T, db database.Store, opts projectParameterOptions) database.ParameterSchema {
|
||||
if opts.DefaultDestinationScheme == "" {
|
||||
opts.DefaultDestinationScheme = database.ParameterDestinationSchemeEnvironmentVariable
|
||||
}
|
||||
name, err := cryptorand.String(8)
|
||||
require.NoError(t, err)
|
||||
sourceValue, err := cryptorand.String(8)
|
||||
require.NoError(t, err)
|
||||
destinationValue, err := cryptorand.String(8)
|
||||
require.NoError(t, err)
|
||||
param, err := db.InsertParameterSchema(context.Background(), database.InsertParameterSchemaParams{
|
||||
ID: uuid.New(),
|
||||
Name: name,
|
||||
JobID: opts.ImportJobID,
|
||||
DefaultSourceScheme: database.ParameterSourceSchemeData,
|
||||
DefaultSourceValue: sql.NullString{
|
||||
String: sourceValue,
|
||||
Valid: true,
|
||||
},
|
||||
DefaultDestinationValue: sql.NullString{
|
||||
String: destinationValue,
|
||||
Valid: true,
|
||||
},
|
||||
AllowOverrideSource: opts.AllowOverrideSource,
|
||||
AllowOverrideDestination: opts.AllowOverrideDestination,
|
||||
DefaultDestinationScheme: opts.DefaultDestinationScheme,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
return param
|
||||
}
|
||||
|
||||
t.Run("NoValue", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
db := databasefake.New()
|
||||
scope := generateScope()
|
||||
parameter, err := db.InsertParameterSchema(context.Background(), database.InsertParameterSchemaParams{
|
||||
ID: uuid.New(),
|
||||
JobID: scope.ImportJobID,
|
||||
Name: "hey",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = projectparameter.Compute(context.Background(), db, scope)
|
||||
var noValueErr projectparameter.NoValueError
|
||||
require.ErrorAs(t, err, &noValueErr)
|
||||
require.Equal(t, parameter.ID.String(), noValueErr.ParameterID.String())
|
||||
require.Equal(t, parameter.Name, noValueErr.ParameterName)
|
||||
})
|
||||
|
||||
t.Run("UseDefaultProjectValue", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
db := databasefake.New()
|
||||
scope := generateScope()
|
||||
parameter := generateProjectParameter(t, db, projectParameterOptions{
|
||||
ImportJobID: scope.ImportJobID,
|
||||
DefaultDestinationScheme: database.ParameterDestinationSchemeProvisionerVariable,
|
||||
})
|
||||
values, err := projectparameter.Compute(context.Background(), db, scope)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, values, 1)
|
||||
value := values[0]
|
||||
require.True(t, value.DefaultValue)
|
||||
require.Equal(t, database.ParameterScopeProject, value.Scope)
|
||||
require.Equal(t, scope.ProjectID.UUID.String(), value.ScopeID)
|
||||
require.Equal(t, value.Proto.Name, parameter.DefaultDestinationValue.String)
|
||||
require.Equal(t, value.Proto.DestinationScheme, proto.ParameterDestination_PROVISIONER_VARIABLE)
|
||||
require.Equal(t, value.Proto.Value, parameter.DefaultSourceValue.String)
|
||||
})
|
||||
|
||||
t.Run("OverrideOrganizationWithProjectDefault", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
db := databasefake.New()
|
||||
scope := generateScope()
|
||||
parameter := generateProjectParameter(t, db, projectParameterOptions{
|
||||
ImportJobID: scope.ImportJobID,
|
||||
})
|
||||
_, err := db.InsertParameterValue(context.Background(), database.InsertParameterValueParams{
|
||||
ID: uuid.New(),
|
||||
Name: parameter.Name,
|
||||
Scope: database.ParameterScopeOrganization,
|
||||
ScopeID: scope.OrganizationID,
|
||||
SourceScheme: database.ParameterSourceSchemeData,
|
||||
SourceValue: "nop",
|
||||
DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable,
|
||||
DestinationValue: "organizationvalue",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
values, err := projectparameter.Compute(context.Background(), db, scope)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, values, 1)
|
||||
require.Equal(t, true, values[0].DefaultValue)
|
||||
require.Equal(t, parameter.DefaultSourceValue.String, values[0].Proto.Value)
|
||||
})
|
||||
|
||||
t.Run("ProjectOverridesProjectDefault", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
db := databasefake.New()
|
||||
scope := generateScope()
|
||||
parameter := generateProjectParameter(t, db, projectParameterOptions{
|
||||
ImportJobID: scope.ImportJobID,
|
||||
})
|
||||
value, err := db.InsertParameterValue(context.Background(), database.InsertParameterValueParams{
|
||||
ID: uuid.New(),
|
||||
Name: parameter.Name,
|
||||
Scope: database.ParameterScopeProject,
|
||||
ScopeID: scope.ProjectID.UUID.String(),
|
||||
SourceScheme: database.ParameterSourceSchemeData,
|
||||
SourceValue: "nop",
|
||||
DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable,
|
||||
DestinationValue: "projectvalue",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
values, err := projectparameter.Compute(context.Background(), db, scope)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, values, 1)
|
||||
require.Equal(t, false, values[0].DefaultValue)
|
||||
require.Equal(t, value.DestinationValue, values[0].Proto.Value)
|
||||
})
|
||||
|
||||
t.Run("WorkspaceCannotOverwriteProjectDefault", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
db := databasefake.New()
|
||||
scope := generateScope()
|
||||
parameter := generateProjectParameter(t, db, projectParameterOptions{
|
||||
ImportJobID: scope.ImportJobID,
|
||||
})
|
||||
_, err := db.InsertParameterValue(context.Background(), database.InsertParameterValueParams{
|
||||
ID: uuid.New(),
|
||||
Name: parameter.Name,
|
||||
Scope: database.ParameterScopeWorkspace,
|
||||
ScopeID: scope.WorkspaceID.UUID.String(),
|
||||
SourceScheme: database.ParameterSourceSchemeData,
|
||||
SourceValue: "nop",
|
||||
DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable,
|
||||
DestinationValue: "projectvalue",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
values, err := projectparameter.Compute(context.Background(), db, scope)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, values, 1)
|
||||
require.Equal(t, true, values[0].DefaultValue)
|
||||
})
|
||||
|
||||
t.Run("WorkspaceOverwriteProjectDefault", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
db := databasefake.New()
|
||||
scope := generateScope()
|
||||
parameter := generateProjectParameter(t, db, projectParameterOptions{
|
||||
AllowOverrideSource: true,
|
||||
ImportJobID: scope.ImportJobID,
|
||||
})
|
||||
_, err := db.InsertParameterValue(context.Background(), database.InsertParameterValueParams{
|
||||
ID: uuid.New(),
|
||||
Name: parameter.Name,
|
||||
Scope: database.ParameterScopeWorkspace,
|
||||
ScopeID: scope.WorkspaceID.UUID.String(),
|
||||
SourceScheme: database.ParameterSourceSchemeData,
|
||||
SourceValue: "nop",
|
||||
DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable,
|
||||
DestinationValue: "projectvalue",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
values, err := projectparameter.Compute(context.Background(), db, scope)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, values, 1)
|
||||
require.Equal(t, false, values[0].DefaultValue)
|
||||
})
|
||||
|
||||
t.Run("AdditionalOverwriteWorkspace", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
db := databasefake.New()
|
||||
scope := generateScope()
|
||||
parameter := generateProjectParameter(t, db, projectParameterOptions{
|
||||
AllowOverrideSource: true,
|
||||
ImportJobID: scope.ImportJobID,
|
||||
})
|
||||
_, err := db.InsertParameterValue(context.Background(), database.InsertParameterValueParams{
|
||||
ID: uuid.New(),
|
||||
Name: parameter.Name,
|
||||
Scope: database.ParameterScopeWorkspace,
|
||||
ScopeID: scope.WorkspaceID.UUID.String(),
|
||||
SourceScheme: database.ParameterSourceSchemeData,
|
||||
SourceValue: "nop",
|
||||
DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable,
|
||||
DestinationValue: "projectvalue",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
values, err := projectparameter.Compute(context.Background(), db, scope, database.ParameterValue{
|
||||
Name: parameter.Name,
|
||||
Scope: database.ParameterScopeUser,
|
||||
SourceScheme: database.ParameterSourceSchemeData,
|
||||
SourceValue: "nop",
|
||||
DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable,
|
||||
DestinationValue: "testing",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, values, 1)
|
||||
require.Equal(t, "testing", values[0].Proto.Value)
|
||||
})
|
||||
}
|
@ -16,6 +16,17 @@ import (
|
||||
"github.com/coder/coder/httpmw"
|
||||
)
|
||||
|
||||
// ParameterValue represents a set value for the scope.
|
||||
type ParameterValue database.ParameterValue
|
||||
|
||||
// CreateParameterValueRequest is used to create a new parameter value for a scope.
|
||||
type CreateParameterValueRequest struct {
|
||||
Name string `json:"name" validate:"required"`
|
||||
SourceValue string `json:"source_value" validate:"required"`
|
||||
SourceScheme database.ParameterSourceScheme `json:"source_scheme" validate:"oneof=data,required"`
|
||||
DestinationScheme database.ParameterDestinationScheme `json:"destination_scheme" validate:"oneof=environment_variable provisioner_variable,required"`
|
||||
}
|
||||
|
||||
// Project is the JSON representation of a Coder project.
|
||||
// This type matches the database object for now, but is
|
||||
// abstracted for ease of change later on.
|
||||
@ -177,16 +188,60 @@ func (*api) projectByOrganization(rw http.ResponseWriter, r *http.Request) {
|
||||
// This should validate the calling user has permissions!
|
||||
func (api *api) postParametersByProject(rw http.ResponseWriter, r *http.Request) {
|
||||
project := httpmw.ProjectParam(r)
|
||||
var createRequest CreateParameterValueRequest
|
||||
if !httpapi.Read(rw, r, &createRequest) {
|
||||
return
|
||||
}
|
||||
parameterValue, err := api.Database.InsertParameterValue(r.Context(), database.InsertParameterValueParams{
|
||||
ID: uuid.New(),
|
||||
Name: createRequest.Name,
|
||||
CreatedAt: database.Now(),
|
||||
UpdatedAt: database.Now(),
|
||||
Scope: database.ParameterScopeProject,
|
||||
ScopeID: project.ID.String(),
|
||||
SourceScheme: createRequest.SourceScheme,
|
||||
SourceValue: createRequest.SourceValue,
|
||||
DestinationScheme: createRequest.DestinationScheme,
|
||||
})
|
||||
if err != nil {
|
||||
httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{
|
||||
Message: fmt.Sprintf("insert parameter value: %s", err),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
postParameterValueForScope(rw, r, api.Database, database.ParameterScopeProject, project.ID.String())
|
||||
render.Status(r, http.StatusCreated)
|
||||
render.JSON(rw, r, parameterValue)
|
||||
}
|
||||
|
||||
// Lists parameters for a project.
|
||||
func (api *api) parametersByProject(rw http.ResponseWriter, r *http.Request) {
|
||||
project := httpmw.ProjectParam(r)
|
||||
|
||||
parametersForScope(rw, r, api.Database, database.GetParameterValuesByScopeParams{
|
||||
parameterValues, err := api.Database.GetParameterValuesByScope(r.Context(), database.GetParameterValuesByScopeParams{
|
||||
Scope: database.ParameterScopeProject,
|
||||
ScopeID: project.ID.String(),
|
||||
})
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
err = nil
|
||||
parameterValues = []database.ParameterValue{}
|
||||
}
|
||||
if err != nil {
|
||||
httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{
|
||||
Message: fmt.Sprintf("get parameter values: %s", err),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
apiParameterValues := make([]ParameterValue, 0, len(parameterValues))
|
||||
for _, parameterValue := range parameterValues {
|
||||
apiParameterValues = append(apiParameterValues, convertParameterValue(parameterValue))
|
||||
}
|
||||
|
||||
render.Status(r, http.StatusOK)
|
||||
render.JSON(rw, r, apiParameterValues)
|
||||
}
|
||||
|
||||
func convertParameterValue(parameterValue database.ParameterValue) ParameterValue {
|
||||
parameterValue.SourceValue = ""
|
||||
return ParameterValue(parameterValue)
|
||||
}
|
||||
|
@ -30,7 +30,7 @@ func TestProjects(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
_ = coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
projects, err := client.Projects(context.Background(), "")
|
||||
require.NoError(t, err)
|
||||
@ -54,7 +54,7 @@ func TestProjectsByOrganization(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
_ = coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
projects, err := client.Projects(context.Background(), "")
|
||||
require.NoError(t, err)
|
||||
@ -68,7 +68,7 @@ func TestPostProjectsByOrganization(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
_ = coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
})
|
||||
|
||||
@ -76,7 +76,7 @@ func TestPostProjectsByOrganization(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
_, err := client.CreateProject(context.Background(), user.Organization, coderd.CreateProjectRequest{
|
||||
Name: project.Name,
|
||||
@ -94,7 +94,7 @@ func TestProjectByOrganization(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
_, err := client.Project(context.Background(), user.Organization, project.Name)
|
||||
require.NoError(t, err)
|
||||
@ -107,14 +107,13 @@ func TestPostParametersByProject(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
_, err := client.CreateProjectParameter(context.Background(), user.Organization, project.Name, coderd.CreateParameterValueRequest{
|
||||
Name: "somename",
|
||||
SourceValue: "tomato",
|
||||
SourceScheme: database.ParameterSourceSchemeData,
|
||||
DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable,
|
||||
DestinationValue: "moo",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
})
|
||||
@ -126,7 +125,7 @@ func TestParametersByProject(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
params, err := client.ProjectParameters(context.Background(), user.Organization, project.Name)
|
||||
require.NoError(t, err)
|
||||
@ -137,14 +136,13 @@ func TestParametersByProject(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
_, err := client.CreateProjectParameter(context.Background(), user.Organization, project.Name, coderd.CreateParameterValueRequest{
|
||||
Name: "example",
|
||||
SourceValue: "source-value",
|
||||
SourceScheme: database.ParameterSourceSchemeData,
|
||||
DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable,
|
||||
DestinationValue: "destination-value",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
params, err := client.ProjectParameters(context.Background(), user.Organization, project.Name)
|
||||
|
@ -26,26 +26,6 @@ type ProjectVersion struct {
|
||||
ImportJobID uuid.UUID `json:"import_job_id"`
|
||||
}
|
||||
|
||||
// ProjectVersionParameter represents a parameter parsed from project version source on creation.
|
||||
type ProjectVersionParameter struct {
|
||||
ID uuid.UUID `json:"id"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description,omitempty"`
|
||||
DefaultSourceScheme database.ParameterSourceScheme `json:"default_source_scheme,omitempty"`
|
||||
DefaultSourceValue string `json:"default_source_value,omitempty"`
|
||||
AllowOverrideSource bool `json:"allow_override_source"`
|
||||
DefaultDestinationScheme database.ParameterDestinationScheme `json:"default_destination_scheme,omitempty"`
|
||||
DefaultDestinationValue string `json:"default_destination_value,omitempty"`
|
||||
AllowOverrideDestination bool `json:"allow_override_destination"`
|
||||
DefaultRefresh string `json:"default_refresh"`
|
||||
RedisplayValue bool `json:"redisplay_value"`
|
||||
ValidationError string `json:"validation_error,omitempty"`
|
||||
ValidationCondition string `json:"validation_condition,omitempty"`
|
||||
ValidationTypeSystem database.ParameterTypeSystem `json:"validation_type_system,omitempty"`
|
||||
ValidationValueType string `json:"validation_value_type,omitempty"`
|
||||
}
|
||||
|
||||
// CreateProjectVersionRequest enables callers to create a new Project Version.
|
||||
type CreateProjectVersionRequest struct {
|
||||
ImportJobID uuid.UUID `json:"import_job_id" validate:"required"`
|
||||
@ -121,50 +101,6 @@ func (api *api) postProjectVersionByOrganization(rw http.ResponseWriter, r *http
|
||||
render.JSON(rw, r, convertProjectVersion(projectVersion))
|
||||
}
|
||||
|
||||
func (api *api) projectVersionParametersByOrganizationAndName(rw http.ResponseWriter, r *http.Request) {
|
||||
projectVersion := httpmw.ProjectVersionParam(r)
|
||||
job, err := api.Database.GetProvisionerJobByID(r.Context(), projectVersion.ImportJobID)
|
||||
if err != nil {
|
||||
httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{
|
||||
Message: fmt.Sprintf("get provisioner job: %s", err),
|
||||
})
|
||||
return
|
||||
}
|
||||
apiJob := convertProvisionerJob(job)
|
||||
if !apiJob.Status.Completed() {
|
||||
httpapi.Write(rw, http.StatusPreconditionRequired, httpapi.Response{
|
||||
Message: fmt.Sprintf("import job hasn't completed: %s", apiJob.Status),
|
||||
})
|
||||
return
|
||||
}
|
||||
if apiJob.Status != ProvisionerJobStatusSucceeded {
|
||||
httpapi.Write(rw, http.StatusPreconditionFailed, httpapi.Response{
|
||||
Message: "import job wasn't successful. no parameters were parsed",
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
parameters, err := api.Database.GetParameterSchemasByJobID(r.Context(), projectVersion.ImportJobID)
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
err = nil
|
||||
parameters = []database.ParameterSchema{}
|
||||
}
|
||||
if err != nil {
|
||||
httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{
|
||||
Message: fmt.Sprintf("get project parameters: %s", err),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
apiParameters := make([]ProjectVersionParameter, 0, len(parameters))
|
||||
for _, parameter := range parameters {
|
||||
apiParameters = append(apiParameters, convertProjectParameter(parameter))
|
||||
}
|
||||
|
||||
render.Status(r, http.StatusOK)
|
||||
render.JSON(rw, r, apiParameters)
|
||||
}
|
||||
|
||||
func convertProjectVersion(version database.ProjectVersion) ProjectVersion {
|
||||
return ProjectVersion{
|
||||
ID: version.ID,
|
||||
@ -175,24 +111,3 @@ func convertProjectVersion(version database.ProjectVersion) ProjectVersion {
|
||||
ImportJobID: version.ImportJobID,
|
||||
}
|
||||
}
|
||||
|
||||
func convertProjectParameter(parameter database.ParameterSchema) ProjectVersionParameter {
|
||||
return ProjectVersionParameter{
|
||||
ID: parameter.ID,
|
||||
CreatedAt: parameter.CreatedAt,
|
||||
Name: parameter.Name,
|
||||
Description: parameter.Description,
|
||||
DefaultSourceScheme: parameter.DefaultSourceScheme,
|
||||
DefaultSourceValue: parameter.DefaultSourceValue.String,
|
||||
AllowOverrideSource: parameter.AllowOverrideSource,
|
||||
DefaultDestinationScheme: parameter.DefaultDestinationScheme,
|
||||
DefaultDestinationValue: parameter.DefaultDestinationValue.String,
|
||||
AllowOverrideDestination: parameter.AllowOverrideDestination,
|
||||
DefaultRefresh: parameter.DefaultRefresh,
|
||||
RedisplayValue: parameter.RedisplayValue,
|
||||
ValidationError: parameter.ValidationError,
|
||||
ValidationCondition: parameter.ValidationCondition,
|
||||
ValidationTypeSystem: parameter.ValidationTypeSystem,
|
||||
ValidationValueType: parameter.ValidationValueType,
|
||||
}
|
||||
}
|
||||
|
@ -2,16 +2,12 @@ package coderd_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/coder/coder/coderd"
|
||||
"github.com/coder/coder/coderd/coderdtest"
|
||||
"github.com/coder/coder/codersdk"
|
||||
"github.com/coder/coder/provisioner/echo"
|
||||
"github.com/coder/coder/provisionersdk/proto"
|
||||
)
|
||||
|
||||
func TestProjectVersionsByOrganization(t *testing.T) {
|
||||
@ -20,7 +16,7 @@ func TestProjectVersionsByOrganization(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
versions, err := client.ProjectVersions(context.Background(), user.Organization, project.Name)
|
||||
require.NoError(t, err)
|
||||
@ -35,7 +31,7 @@ func TestProjectVersionByOrganizationAndName(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
_, err := client.ProjectVersion(context.Background(), user.Organization, project.Name, project.ActiveVersionID.String())
|
||||
require.NoError(t, err)
|
||||
@ -48,7 +44,7 @@ func TestPostProjectVersionByOrganization(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
_, err := client.CreateProjectVersion(context.Background(), user.Organization, project.Name, coderd.CreateProjectVersionRequest{
|
||||
ImportJobID: job.ID,
|
||||
@ -56,57 +52,3 @@ func TestPostProjectVersionByOrganization(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
})
|
||||
}
|
||||
|
||||
func TestProjectVersionParametersByOrganizationAndName(t *testing.T) {
|
||||
t.Parallel()
|
||||
t.Run("NotImported", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil)
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
_, err := client.ProjectVersionParameters(context.Background(), user.Organization, project.Name, project.ActiveVersionID.String())
|
||||
var apiErr *codersdk.Error
|
||||
require.ErrorAs(t, err, &apiErr)
|
||||
require.Equal(t, http.StatusPreconditionRequired, apiErr.StatusCode())
|
||||
})
|
||||
|
||||
t.Run("FailedImport", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
_ = coderdtest.NewProvisionerDaemon(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, &echo.Responses{
|
||||
Provision: []*proto.Provision_Response{{}},
|
||||
})
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
coderdtest.AwaitProvisionerJob(t, client, user.Organization, job.ID)
|
||||
_, err := client.ProjectVersionParameters(context.Background(), user.Organization, project.Name, project.ActiveVersionID.String())
|
||||
var apiErr *codersdk.Error
|
||||
require.ErrorAs(t, err, &apiErr)
|
||||
require.Equal(t, http.StatusPreconditionFailed, apiErr.StatusCode())
|
||||
})
|
||||
t.Run("List", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
_ = coderdtest.NewProvisionerDaemon(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, &echo.Responses{
|
||||
Parse: []*proto.Parse_Response{{
|
||||
Type: &proto.Parse_Response_Complete{
|
||||
Complete: &proto.Parse_Complete{
|
||||
ParameterSchemas: []*proto.ParameterSchema{{
|
||||
Name: "example",
|
||||
}},
|
||||
},
|
||||
},
|
||||
}},
|
||||
Provision: echo.ProvisionComplete,
|
||||
})
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
coderdtest.AwaitProvisionerJob(t, client, user.Organization, job.ID)
|
||||
params, err := client.ProjectVersionParameters(context.Background(), user.Organization, project.Name, project.ActiveVersionID.String())
|
||||
require.NoError(t, err)
|
||||
require.Len(t, params, 1)
|
||||
})
|
||||
}
|
||||
|
@ -22,7 +22,7 @@ import (
|
||||
|
||||
"cdr.dev/slog"
|
||||
|
||||
"github.com/coder/coder/coderd/projectparameter"
|
||||
"github.com/coder/coder/coderd/parameter"
|
||||
"github.com/coder/coder/database"
|
||||
"github.com/coder/coder/httpapi"
|
||||
"github.com/coder/coder/provisionerd/proto"
|
||||
@ -109,16 +109,6 @@ type workspaceProvisionJob struct {
|
||||
DryRun bool `json:"dry_run"`
|
||||
}
|
||||
|
||||
// The input for a "project_import" job.
|
||||
type projectVersionImportJob struct {
|
||||
OrganizationID string `json:"organization_id"`
|
||||
ProjectID uuid.UUID `json:"project_id"`
|
||||
|
||||
AdditionalParameters []database.ParameterValue `json:"parameters"`
|
||||
SkipParameterSchemas bool `json:"skip_parameter_schemas"`
|
||||
SkipResources bool `json:"skip_resources"`
|
||||
}
|
||||
|
||||
// Implementation of the provisioner daemon protobuf server.
|
||||
type provisionerdServer struct {
|
||||
ID uuid.UUID
|
||||
@ -205,36 +195,38 @@ func (server *provisionerdServer) AcquireJob(ctx context.Context, _ *proto.Empty
|
||||
if err != nil {
|
||||
return nil, failJob(fmt.Sprintf("get project: %s", err))
|
||||
}
|
||||
organization, err := server.Database.GetOrganizationByID(ctx, project.OrganizationID)
|
||||
if err != nil {
|
||||
return nil, failJob(fmt.Sprintf("get organization: %s", err))
|
||||
}
|
||||
|
||||
// Compute parameters for the workspace to consume.
|
||||
parameters, err := projectparameter.Compute(ctx, server.Database, projectparameter.Scope{
|
||||
ImportJobID: projectVersion.ImportJobID,
|
||||
OrganizationID: organization.ID,
|
||||
parameters, err := parameter.Compute(ctx, server.Database, parameter.ComputeScope{
|
||||
ProjectImportJobID: projectVersion.ImportJobID,
|
||||
OrganizationID: job.OrganizationID,
|
||||
ProjectID: uuid.NullUUID{
|
||||
UUID: project.ID,
|
||||
Valid: true,
|
||||
},
|
||||
UserID: sql.NullString{
|
||||
String: user.ID,
|
||||
Valid: true,
|
||||
},
|
||||
UserID: user.ID,
|
||||
WorkspaceID: uuid.NullUUID{
|
||||
UUID: workspace.ID,
|
||||
Valid: true,
|
||||
},
|
||||
})
|
||||
}, nil)
|
||||
if err != nil {
|
||||
return nil, failJob(fmt.Sprintf("compute parameters: %s", err))
|
||||
}
|
||||
// Convert parameters to the protobuf type.
|
||||
protoParameters := make([]*sdkproto.ParameterValue, 0, len(parameters))
|
||||
for _, parameter := range parameters {
|
||||
protoParameters = append(protoParameters, parameter.Proto)
|
||||
for _, computedParameter := range parameters {
|
||||
converted, err := convertComputedParameterValue(computedParameter)
|
||||
if err != nil {
|
||||
return nil, failJob(fmt.Sprintf("convert parameter: %s", err))
|
||||
}
|
||||
protoParameters = append(protoParameters, converted)
|
||||
}
|
||||
protoParameters = append(protoParameters, &sdkproto.ParameterValue{
|
||||
DestinationScheme: sdkproto.ParameterDestination_PROVISIONER_VARIABLE,
|
||||
Name: parameter.CoderWorkspaceTransition,
|
||||
Value: string(workspaceHistory.Transition),
|
||||
})
|
||||
|
||||
protoJob.Type = &proto.AcquiredJob_WorkspaceProvision_{
|
||||
WorkspaceProvision: &proto.AcquiredJob_WorkspaceProvision{
|
||||
@ -245,40 +237,8 @@ func (server *provisionerdServer) AcquireJob(ctx context.Context, _ *proto.Empty
|
||||
},
|
||||
}
|
||||
case database.ProvisionerJobTypeProjectVersionImport:
|
||||
var input projectVersionImportJob
|
||||
err = json.Unmarshal(job.Input, &input)
|
||||
if err != nil {
|
||||
return nil, failJob(fmt.Sprintf("unmarshal job input %q: %s", job.Input, err))
|
||||
}
|
||||
|
||||
// Compute parameters for the workspace to consume.
|
||||
parameters, err := projectparameter.Compute(ctx, server.Database, projectparameter.Scope{
|
||||
ImportJobID: job.ID,
|
||||
OrganizationID: input.OrganizationID,
|
||||
ProjectID: uuid.NullUUID{
|
||||
UUID: input.ProjectID,
|
||||
Valid: input.ProjectID.String() != uuid.Nil.String(),
|
||||
},
|
||||
UserID: sql.NullString{
|
||||
String: user.ID,
|
||||
Valid: true,
|
||||
},
|
||||
}, input.AdditionalParameters...)
|
||||
if err != nil {
|
||||
return nil, failJob(fmt.Sprintf("compute parameters: %s", err))
|
||||
}
|
||||
// Convert parameters to the protobuf type.
|
||||
protoParameters := make([]*sdkproto.ParameterValue, 0, len(parameters))
|
||||
for _, parameter := range parameters {
|
||||
protoParameters = append(protoParameters, parameter.Proto)
|
||||
}
|
||||
|
||||
protoJob.Type = &proto.AcquiredJob_ProjectImport_{
|
||||
ProjectImport: &proto.AcquiredJob_ProjectImport{
|
||||
ParameterValues: protoParameters,
|
||||
SkipParameterSchemas: input.SkipParameterSchemas,
|
||||
SkipResources: input.SkipResources,
|
||||
},
|
||||
ProjectImport: &proto.AcquiredJob_ProjectImport{},
|
||||
}
|
||||
}
|
||||
switch job.StorageMethod {
|
||||
@ -295,45 +255,41 @@ func (server *provisionerdServer) AcquireJob(ctx context.Context, _ *proto.Empty
|
||||
return protoJob, err
|
||||
}
|
||||
|
||||
func (server *provisionerdServer) UpdateJob(stream proto.DRPCProvisionerDaemon_UpdateJobStream) error {
|
||||
for {
|
||||
update, err := stream.Recv()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
parsedID, err := uuid.Parse(update.JobId)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("parse job id: %w", err)
|
||||
}
|
||||
job, err := server.Database.GetProvisionerJobByID(stream.Context(), parsedID)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("get job: %w", err)
|
||||
}
|
||||
if !job.WorkerID.Valid {
|
||||
return xerrors.New("job isn't running yet")
|
||||
}
|
||||
if job.WorkerID.UUID.String() != server.ID.String() {
|
||||
return xerrors.New("you don't own this job")
|
||||
}
|
||||
func (server *provisionerdServer) UpdateJob(ctx context.Context, request *proto.UpdateJobRequest) (*proto.UpdateJobResponse, error) {
|
||||
parsedID, err := uuid.Parse(request.JobId)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("parse job id: %w", err)
|
||||
}
|
||||
job, err := server.Database.GetProvisionerJobByID(ctx, parsedID)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("get job: %w", err)
|
||||
}
|
||||
if !job.WorkerID.Valid {
|
||||
return nil, xerrors.New("job isn't running yet")
|
||||
}
|
||||
if job.WorkerID.UUID.String() != server.ID.String() {
|
||||
return nil, xerrors.New("you don't own this job")
|
||||
}
|
||||
err = server.Database.UpdateProvisionerJobByID(ctx, database.UpdateProvisionerJobByIDParams{
|
||||
ID: parsedID,
|
||||
UpdatedAt: database.Now(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("update job: %w", err)
|
||||
}
|
||||
|
||||
err = server.Database.UpdateProvisionerJobByID(stream.Context(), database.UpdateProvisionerJobByIDParams{
|
||||
ID: parsedID,
|
||||
UpdatedAt: database.Now(),
|
||||
})
|
||||
if err != nil {
|
||||
return xerrors.Errorf("update job: %w", err)
|
||||
}
|
||||
if len(request.Logs) > 0 {
|
||||
insertParams := database.InsertProvisionerJobLogsParams{
|
||||
JobID: parsedID,
|
||||
}
|
||||
for _, log := range update.Logs {
|
||||
for _, log := range request.Logs {
|
||||
logLevel, err := convertLogLevel(log.Level)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("convert log level: %w", err)
|
||||
return nil, xerrors.Errorf("convert log level: %w", err)
|
||||
}
|
||||
logSource, err := convertLogSource(log.Source)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("convert log source: %w", err)
|
||||
return nil, xerrors.Errorf("convert log source: %w", err)
|
||||
}
|
||||
insertParams.ID = append(insertParams.ID, uuid.New())
|
||||
insertParams.CreatedAt = append(insertParams.CreatedAt, time.UnixMilli(log.CreatedAt))
|
||||
@ -343,17 +299,93 @@ func (server *provisionerdServer) UpdateJob(stream proto.DRPCProvisionerDaemon_U
|
||||
}
|
||||
logs, err := server.Database.InsertProvisionerJobLogs(context.Background(), insertParams)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("insert job logs: %w", err)
|
||||
return nil, xerrors.Errorf("insert job logs: %w", err)
|
||||
}
|
||||
data, err := json.Marshal(logs)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("marshal job log: %w", err)
|
||||
return nil, xerrors.Errorf("marshal job log: %w", err)
|
||||
}
|
||||
err = server.Pubsub.Publish(provisionerJobLogsChannel(parsedID), data)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("publish job log: %w", err)
|
||||
return nil, xerrors.Errorf("publish job log: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if len(request.ParameterSchemas) > 0 {
|
||||
for _, protoParameter := range request.ParameterSchemas {
|
||||
validationTypeSystem, err := convertValidationTypeSystem(protoParameter.ValidationTypeSystem)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("convert validation type system for %q: %w", protoParameter.Name, err)
|
||||
}
|
||||
|
||||
parameterSchema := database.InsertParameterSchemaParams{
|
||||
ID: uuid.New(),
|
||||
CreatedAt: database.Now(),
|
||||
JobID: job.ID,
|
||||
Name: protoParameter.Name,
|
||||
Description: protoParameter.Description,
|
||||
RedisplayValue: protoParameter.RedisplayValue,
|
||||
ValidationError: protoParameter.ValidationError,
|
||||
ValidationCondition: protoParameter.ValidationCondition,
|
||||
ValidationValueType: protoParameter.ValidationValueType,
|
||||
ValidationTypeSystem: validationTypeSystem,
|
||||
|
||||
DefaultSourceScheme: database.ParameterSourceSchemeNone,
|
||||
DefaultDestinationScheme: database.ParameterDestinationSchemeNone,
|
||||
|
||||
AllowOverrideDestination: protoParameter.AllowOverrideDestination,
|
||||
AllowOverrideSource: protoParameter.AllowOverrideSource,
|
||||
}
|
||||
|
||||
// It's possible a parameter doesn't define a default source!
|
||||
if protoParameter.DefaultSource != nil {
|
||||
parameterSourceScheme, err := convertParameterSourceScheme(protoParameter.DefaultSource.Scheme)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("convert parameter source scheme: %w", err)
|
||||
}
|
||||
parameterSchema.DefaultSourceScheme = parameterSourceScheme
|
||||
parameterSchema.DefaultSourceValue = protoParameter.DefaultSource.Value
|
||||
}
|
||||
|
||||
// It's possible a parameter doesn't define a default destination!
|
||||
if protoParameter.DefaultDestination != nil {
|
||||
parameterDestinationScheme, err := convertParameterDestinationScheme(protoParameter.DefaultDestination.Scheme)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("convert parameter destination scheme: %w", err)
|
||||
}
|
||||
parameterSchema.DefaultDestinationScheme = parameterDestinationScheme
|
||||
}
|
||||
|
||||
_, err = server.Database.InsertParameterSchema(ctx, parameterSchema)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("insert parameter schema: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
parameters, err := parameter.Compute(ctx, server.Database, parameter.ComputeScope{
|
||||
ProjectImportJobID: job.ID,
|
||||
OrganizationID: job.OrganizationID,
|
||||
UserID: job.InitiatorID,
|
||||
}, nil)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("compute parameters: %w", err)
|
||||
}
|
||||
// Convert parameters to the protobuf type.
|
||||
protoParameters := make([]*sdkproto.ParameterValue, 0, len(parameters))
|
||||
for _, computedParameter := range parameters {
|
||||
converted, err := convertComputedParameterValue(computedParameter)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("convert parameter: %s", err)
|
||||
}
|
||||
protoParameters = append(protoParameters, converted)
|
||||
}
|
||||
|
||||
return &proto.UpdateJobResponse{
|
||||
ParameterValues: protoParameters,
|
||||
}, nil
|
||||
}
|
||||
|
||||
return &proto.UpdateJobResponse{}, nil
|
||||
}
|
||||
|
||||
func (server *provisionerdServer) CancelJob(ctx context.Context, cancelJob *proto.CancelledJob) (*proto.Empty, error) {
|
||||
@ -400,98 +432,48 @@ func (server *provisionerdServer) CompleteJob(ctx context.Context, completed *pr
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("get job by id: %w", err)
|
||||
}
|
||||
// TODO: Check if the worker ID matches!
|
||||
// If it doesn't, a provisioner daemon could be impersonating another job!
|
||||
if job.WorkerID.UUID.String() != server.ID.String() {
|
||||
return nil, xerrors.Errorf("you don't have permission to update this job")
|
||||
}
|
||||
|
||||
switch jobType := completed.Type.(type) {
|
||||
case *proto.CompletedJob_ProjectImport_:
|
||||
var input projectVersionImportJob
|
||||
err = json.Unmarshal(job.Input, &input)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("unmarshal job data: %w", err)
|
||||
for transition, resources := range map[database.WorkspaceTransition][]*sdkproto.Resource{
|
||||
database.WorkspaceTransitionStart: jobType.ProjectImport.StartResources,
|
||||
database.WorkspaceTransitionStop: jobType.ProjectImport.StopResources,
|
||||
} {
|
||||
for _, resource := range resources {
|
||||
server.Logger.Info(ctx, "inserting project import job resource",
|
||||
slog.F("job_id", job.ID.String()),
|
||||
slog.F("resource_name", resource.Name),
|
||||
slog.F("resource_type", resource.Type),
|
||||
slog.F("transition", transition))
|
||||
_, err = server.Database.InsertProjectImportJobResource(ctx, database.InsertProjectImportJobResourceParams{
|
||||
ID: uuid.New(),
|
||||
CreatedAt: database.Now(),
|
||||
JobID: jobID,
|
||||
Transition: transition,
|
||||
Type: resource.Type,
|
||||
Name: resource.Name,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("insert resource: %w", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Validate that all parameters send from the provisioner daemon
|
||||
// follow the protocol.
|
||||
parameterSchemas := make([]database.InsertParameterSchemaParams, 0, len(jobType.ProjectImport.ParameterSchemas))
|
||||
for _, protoParameter := range jobType.ProjectImport.ParameterSchemas {
|
||||
validationTypeSystem, err := convertValidationTypeSystem(protoParameter.ValidationTypeSystem)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("convert validation type system for %q: %w", protoParameter.Name, err)
|
||||
}
|
||||
|
||||
parameterSchema := database.InsertParameterSchemaParams{
|
||||
ID: uuid.New(),
|
||||
CreatedAt: database.Now(),
|
||||
JobID: job.ID,
|
||||
Name: protoParameter.Name,
|
||||
Description: protoParameter.Description,
|
||||
RedisplayValue: protoParameter.RedisplayValue,
|
||||
ValidationError: protoParameter.ValidationError,
|
||||
ValidationCondition: protoParameter.ValidationCondition,
|
||||
ValidationValueType: protoParameter.ValidationValueType,
|
||||
ValidationTypeSystem: validationTypeSystem,
|
||||
|
||||
DefaultSourceScheme: database.ParameterSourceSchemeNone,
|
||||
DefaultDestinationScheme: database.ParameterDestinationSchemeNone,
|
||||
|
||||
AllowOverrideDestination: protoParameter.AllowOverrideDestination,
|
||||
AllowOverrideSource: protoParameter.AllowOverrideSource,
|
||||
}
|
||||
|
||||
// It's possible a parameter doesn't define a default source!
|
||||
if protoParameter.DefaultSource != nil {
|
||||
parameterSourceScheme, err := convertParameterSourceScheme(protoParameter.DefaultSource.Scheme)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("convert parameter source scheme: %w", err)
|
||||
}
|
||||
parameterSchema.DefaultSourceScheme = parameterSourceScheme
|
||||
parameterSchema.DefaultSourceValue = sql.NullString{
|
||||
String: protoParameter.DefaultSource.Value,
|
||||
Valid: protoParameter.DefaultSource.Value != "",
|
||||
}
|
||||
}
|
||||
|
||||
// It's possible a parameter doesn't define a default destination!
|
||||
if protoParameter.DefaultDestination != nil {
|
||||
parameterDestinationScheme, err := convertParameterDestinationScheme(protoParameter.DefaultDestination.Scheme)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("convert parameter destination scheme: %w", err)
|
||||
}
|
||||
parameterSchema.DefaultDestinationScheme = parameterDestinationScheme
|
||||
parameterSchema.DefaultDestinationValue = sql.NullString{
|
||||
String: protoParameter.DefaultDestination.Value,
|
||||
Valid: protoParameter.DefaultDestination.Value != "",
|
||||
}
|
||||
}
|
||||
|
||||
parameterSchemas = append(parameterSchemas, parameterSchema)
|
||||
}
|
||||
|
||||
// This must occur in a transaction in case of failure.
|
||||
err = server.Database.InTx(func(db database.Store) error {
|
||||
err = db.UpdateProvisionerJobWithCompleteByID(ctx, database.UpdateProvisionerJobWithCompleteByIDParams{
|
||||
ID: jobID,
|
||||
UpdatedAt: database.Now(),
|
||||
CompletedAt: sql.NullTime{
|
||||
Time: database.Now(),
|
||||
Valid: true,
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return xerrors.Errorf("update provisioner job: %w", err)
|
||||
}
|
||||
// This could be a bulk-insert operation to improve performance.
|
||||
// See the "InsertWorkspaceHistoryLogs" query.
|
||||
for _, parameterSchema := range parameterSchemas {
|
||||
_, err = db.InsertParameterSchema(ctx, parameterSchema)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("insert parameter schema %q: %w", parameterSchema.Name, err)
|
||||
}
|
||||
}
|
||||
server.Logger.Debug(ctx, "marked import job as completed", slog.F("job_id", jobID))
|
||||
return nil
|
||||
err = server.Database.UpdateProvisionerJobWithCompleteByID(ctx, database.UpdateProvisionerJobWithCompleteByIDParams{
|
||||
ID: jobID,
|
||||
UpdatedAt: database.Now(),
|
||||
CompletedAt: sql.NullTime{
|
||||
Time: database.Now(),
|
||||
Valid: true,
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("update provisioner job: %w", err)
|
||||
}
|
||||
server.Logger.Debug(ctx, "marked import job as completed", slog.F("job_id", jobID))
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("complete job: %w", err)
|
||||
}
|
||||
@ -614,3 +596,21 @@ func convertLogSource(logSource proto.LogSource) (database.LogSource, error) {
|
||||
return database.LogSource(""), xerrors.Errorf("unknown log source: %d", logSource)
|
||||
}
|
||||
}
|
||||
|
||||
func convertComputedParameterValue(param parameter.ComputedValue) (*sdkproto.ParameterValue, error) {
|
||||
var scheme sdkproto.ParameterDestination_Scheme
|
||||
switch param.DestinationScheme {
|
||||
case database.ParameterDestinationSchemeEnvironmentVariable:
|
||||
scheme = sdkproto.ParameterDestination_ENVIRONMENT_VARIABLE
|
||||
case database.ParameterDestinationSchemeProvisionerVariable:
|
||||
scheme = sdkproto.ParameterDestination_PROVISIONER_VARIABLE
|
||||
default:
|
||||
return nil, xerrors.Errorf("unrecognized destination scheme: %q", param.DestinationScheme)
|
||||
}
|
||||
|
||||
return &sdkproto.ParameterValue{
|
||||
DestinationScheme: scheme,
|
||||
Name: param.Name,
|
||||
Value: param.SourceValue,
|
||||
}, nil
|
||||
}
|
||||
|
@ -11,11 +11,8 @@ import (
|
||||
)
|
||||
|
||||
func TestProvisionerDaemons(t *testing.T) {
|
||||
// Tests for properly processing specific job
|
||||
// types should be placed in their respective
|
||||
// resource location.
|
||||
//
|
||||
// eg. project import is a project-related job
|
||||
// Tests for properly processing specific job types should be placed
|
||||
// in their respective files.
|
||||
t.Parallel()
|
||||
|
||||
client := coderdtest.New(t)
|
||||
|
@ -1,196 +0,0 @@
|
||||
package coderd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/go-chi/render"
|
||||
"github.com/google/uuid"
|
||||
|
||||
"cdr.dev/slog"
|
||||
"github.com/coder/coder/database"
|
||||
"github.com/coder/coder/httpapi"
|
||||
"github.com/coder/coder/httpmw"
|
||||
)
|
||||
|
||||
// ProvisionerJobLog represents a single log from a provisioner job.
|
||||
type ProvisionerJobLog struct {
|
||||
ID uuid.UUID
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
Source database.LogSource `json:"log_source"`
|
||||
Level database.LogLevel `json:"log_level"`
|
||||
Output string `json:"output"`
|
||||
}
|
||||
|
||||
// Returns provisioner logs based on query parameters.
|
||||
// The intended usage for a client to stream all logs (with JS API):
|
||||
// const timestamp = new Date().getTime();
|
||||
// 1. GET /logs?before=<timestamp>
|
||||
// 2. GET /logs?after=<timestamp>&follow
|
||||
// The combination of these responses should provide all current logs
|
||||
// to the consumer, and future logs are streamed in the follow request.
|
||||
func (api *api) provisionerJobLogsByID(rw http.ResponseWriter, r *http.Request) {
|
||||
follow := r.URL.Query().Has("follow")
|
||||
afterRaw := r.URL.Query().Get("after")
|
||||
beforeRaw := r.URL.Query().Get("before")
|
||||
if beforeRaw != "" && follow {
|
||||
httpapi.Write(rw, http.StatusBadRequest, httpapi.Response{
|
||||
Message: "before cannot be used with follow",
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
var after time.Time
|
||||
// Only fetch logs created after the time provided.
|
||||
if afterRaw != "" {
|
||||
afterMS, err := strconv.ParseInt(afterRaw, 10, 64)
|
||||
if err != nil {
|
||||
httpapi.Write(rw, http.StatusBadRequest, httpapi.Response{
|
||||
Message: fmt.Sprintf("unable to parse after %q: %s", afterRaw, err),
|
||||
})
|
||||
return
|
||||
}
|
||||
after = time.UnixMilli(afterMS)
|
||||
} else {
|
||||
if follow {
|
||||
after = database.Now()
|
||||
}
|
||||
}
|
||||
var before time.Time
|
||||
// Only fetch logs created before the time provided.
|
||||
if beforeRaw != "" {
|
||||
beforeMS, err := strconv.ParseInt(beforeRaw, 10, 64)
|
||||
if err != nil {
|
||||
httpapi.Write(rw, http.StatusBadRequest, httpapi.Response{
|
||||
Message: fmt.Sprintf("unable to parse before %q: %s", beforeRaw, err),
|
||||
})
|
||||
return
|
||||
}
|
||||
before = time.UnixMilli(beforeMS)
|
||||
} else {
|
||||
before = database.Now()
|
||||
}
|
||||
|
||||
job := httpmw.ProvisionerJobParam(r)
|
||||
if !follow {
|
||||
logs, err := api.Database.GetProvisionerLogsByIDBetween(r.Context(), database.GetProvisionerLogsByIDBetweenParams{
|
||||
JobID: job.ID,
|
||||
CreatedAfter: after,
|
||||
CreatedBefore: before,
|
||||
})
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
err = nil
|
||||
}
|
||||
if err != nil {
|
||||
httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{
|
||||
Message: fmt.Sprintf("get provisioner logs: %s", err),
|
||||
})
|
||||
return
|
||||
}
|
||||
if logs == nil {
|
||||
logs = []database.ProvisionerJobLog{}
|
||||
}
|
||||
render.Status(r, http.StatusOK)
|
||||
render.JSON(rw, r, logs)
|
||||
return
|
||||
}
|
||||
|
||||
bufferedLogs := make(chan database.ProvisionerJobLog, 128)
|
||||
closeSubscribe, err := api.Pubsub.Subscribe(provisionerJobLogsChannel(job.ID), func(ctx context.Context, message []byte) {
|
||||
var logs []database.ProvisionerJobLog
|
||||
err := json.Unmarshal(message, &logs)
|
||||
if err != nil {
|
||||
api.Logger.Warn(r.Context(), fmt.Sprintf("invalid provisioner job log on channel %q: %s", provisionerJobLogsChannel(job.ID), err.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
for _, log := range logs {
|
||||
select {
|
||||
case bufferedLogs <- log:
|
||||
default:
|
||||
// If this overflows users could miss logs streaming. This can happen
|
||||
// if a database request takes a long amount of time, and we get a lot of logs.
|
||||
api.Logger.Warn(r.Context(), "provisioner job log overflowing channel")
|
||||
}
|
||||
}
|
||||
})
|
||||
if err != nil {
|
||||
httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{
|
||||
Message: fmt.Sprintf("subscribe to provisioner job logs: %s", err),
|
||||
})
|
||||
return
|
||||
}
|
||||
defer closeSubscribe()
|
||||
|
||||
provisionerJobLogs, err := api.Database.GetProvisionerLogsByIDBetween(r.Context(), database.GetProvisionerLogsByIDBetweenParams{
|
||||
JobID: job.ID,
|
||||
CreatedAfter: after,
|
||||
CreatedBefore: before,
|
||||
})
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
err = nil
|
||||
provisionerJobLogs = []database.ProvisionerJobLog{}
|
||||
}
|
||||
if err != nil {
|
||||
httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{
|
||||
Message: fmt.Sprint("get provisioner job logs: %w", err),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// "follow" uses the ndjson format to stream data.
|
||||
// See: https://canjs.com/doc/can-ndjson-stream.html
|
||||
rw.Header().Set("Content-Type", "application/stream+json")
|
||||
rw.WriteHeader(http.StatusOK)
|
||||
rw.(http.Flusher).Flush()
|
||||
|
||||
// The Go stdlib JSON encoder appends a newline character after message write.
|
||||
encoder := json.NewEncoder(rw)
|
||||
|
||||
for _, provisionerJobLog := range provisionerJobLogs {
|
||||
err = encoder.Encode(convertProvisionerJobLog(provisionerJobLog))
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
ticker := time.NewTicker(250 * time.Millisecond)
|
||||
defer ticker.Stop()
|
||||
for {
|
||||
select {
|
||||
case <-r.Context().Done():
|
||||
return
|
||||
case log := <-bufferedLogs:
|
||||
err = encoder.Encode(convertProvisionerJobLog(log))
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
rw.(http.Flusher).Flush()
|
||||
case <-ticker.C:
|
||||
job, err := api.Database.GetProvisionerJobByID(r.Context(), job.ID)
|
||||
if err != nil {
|
||||
api.Logger.Warn(r.Context(), "streaming job logs; checking if completed", slog.Error(err), slog.F("job_id", job.ID.String()))
|
||||
continue
|
||||
}
|
||||
if convertProvisionerJob(job).Status.Completed() {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func convertProvisionerJobLog(provisionerJobLog database.ProvisionerJobLog) ProvisionerJobLog {
|
||||
return ProvisionerJobLog{
|
||||
ID: provisionerJobLog.ID,
|
||||
CreatedAt: provisionerJobLog.CreatedAt,
|
||||
Source: provisionerJobLog.Source,
|
||||
Level: provisionerJobLog.Level,
|
||||
Output: provisionerJobLog.Output,
|
||||
}
|
||||
}
|
@ -1,133 +0,0 @@
|
||||
package coderd_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/coder/coder/coderd"
|
||||
"github.com/coder/coder/coderd/coderdtest"
|
||||
"github.com/coder/coder/database"
|
||||
"github.com/coder/coder/provisioner/echo"
|
||||
"github.com/coder/coder/provisionersdk/proto"
|
||||
)
|
||||
|
||||
func TestProvisionerJobLogsByName(t *testing.T) {
|
||||
t.Parallel()
|
||||
t.Run("List", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
coderdtest.NewProvisionerDaemon(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, &echo.Responses{
|
||||
Parse: echo.ParseComplete,
|
||||
Provision: []*proto.Provision_Response{{
|
||||
Type: &proto.Provision_Response_Log{
|
||||
Log: &proto.Log{
|
||||
Level: proto.LogLevel_INFO,
|
||||
Output: "log-output",
|
||||
},
|
||||
},
|
||||
}, {
|
||||
Type: &proto.Provision_Response_Complete{
|
||||
Complete: &proto.Provision_Complete{},
|
||||
},
|
||||
}},
|
||||
})
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
coderdtest.AwaitProvisionerJob(t, client, user.Organization, job.ID)
|
||||
workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID)
|
||||
history, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{
|
||||
ProjectVersionID: project.ActiveVersionID,
|
||||
Transition: database.WorkspaceTransitionStart,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
coderdtest.AwaitProvisionerJob(t, client, user.Organization, history.ProvisionJobID)
|
||||
// Return the log after completion!
|
||||
logs, err := client.ProvisionerJobLogs(context.Background(), user.Organization, history.ProvisionJobID)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, logs)
|
||||
require.Len(t, logs, 1)
|
||||
})
|
||||
|
||||
t.Run("StreamAfterComplete", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
coderdtest.NewProvisionerDaemon(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, &echo.Responses{
|
||||
Parse: echo.ParseComplete,
|
||||
Provision: []*proto.Provision_Response{{
|
||||
Type: &proto.Provision_Response_Log{
|
||||
Log: &proto.Log{
|
||||
Level: proto.LogLevel_INFO,
|
||||
Output: "log-output",
|
||||
},
|
||||
},
|
||||
}, {
|
||||
Type: &proto.Provision_Response_Complete{
|
||||
Complete: &proto.Provision_Complete{},
|
||||
},
|
||||
}},
|
||||
})
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
coderdtest.AwaitProvisionerJob(t, client, user.Organization, job.ID)
|
||||
workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID)
|
||||
before := time.Now().UTC()
|
||||
history, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{
|
||||
ProjectVersionID: project.ActiveVersionID,
|
||||
Transition: database.WorkspaceTransitionStart,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
coderdtest.AwaitProvisionerJob(t, client, user.Organization, history.ProvisionJobID)
|
||||
|
||||
logs, err := client.FollowProvisionerJobLogsAfter(context.Background(), user.Organization, history.ProvisionJobID, before)
|
||||
require.NoError(t, err)
|
||||
log, ok := <-logs
|
||||
require.True(t, ok)
|
||||
require.Equal(t, "log-output", log.Output)
|
||||
// Make sure the channel automatically closes!
|
||||
_, ok = <-logs
|
||||
require.False(t, ok)
|
||||
})
|
||||
|
||||
t.Run("StreamWhileRunning", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
coderdtest.NewProvisionerDaemon(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, &echo.Responses{
|
||||
Parse: echo.ParseComplete,
|
||||
Provision: []*proto.Provision_Response{{
|
||||
Type: &proto.Provision_Response_Log{
|
||||
Log: &proto.Log{
|
||||
Level: proto.LogLevel_INFO,
|
||||
Output: "log-output",
|
||||
},
|
||||
},
|
||||
}, {
|
||||
Type: &proto.Provision_Response_Complete{
|
||||
Complete: &proto.Provision_Complete{},
|
||||
},
|
||||
}},
|
||||
})
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
coderdtest.AwaitProvisionerJob(t, client, user.Organization, job.ID)
|
||||
workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID)
|
||||
before := database.Now()
|
||||
history, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{
|
||||
ProjectVersionID: project.ActiveVersionID,
|
||||
Transition: database.WorkspaceTransitionStart,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
logs, err := client.FollowProvisionerJobLogsAfter(context.Background(), user.Organization, history.ProvisionJobID, before)
|
||||
require.NoError(t, err)
|
||||
log := <-logs
|
||||
require.Equal(t, "log-output", log.Output)
|
||||
// Make sure the channel automatically closes!
|
||||
_, ok := <-logs
|
||||
require.False(t, ok)
|
||||
})
|
||||
}
|
@ -1,16 +1,20 @@
|
||||
package coderd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/go-chi/render"
|
||||
"github.com/google/uuid"
|
||||
|
||||
"cdr.dev/slog"
|
||||
|
||||
"github.com/coder/coder/database"
|
||||
"github.com/coder/coder/httpapi"
|
||||
"github.com/coder/coder/httpmw"
|
||||
@ -44,78 +48,185 @@ type ProvisionerJob struct {
|
||||
WorkerID *uuid.UUID `json:"worker_id,omitempty"`
|
||||
}
|
||||
|
||||
type CreateProjectImportJobRequest struct {
|
||||
StorageMethod database.ProvisionerStorageMethod `json:"storage_method" validate:"oneof=file,required"`
|
||||
StorageSource string `json:"storage_source" validate:"required"`
|
||||
Provisioner database.ProvisionerType `json:"provisioner" validate:"oneof=terraform echo,required"`
|
||||
|
||||
AdditionalParameters []ParameterValue `json:"parameter_values"`
|
||||
SkipParameterSchemas bool `json:"skip_parameter_schemas"`
|
||||
SkipResources bool `json:"skip_resources"`
|
||||
// ProvisionerJobLog represents a single log from a provisioner job.
|
||||
type ProvisionerJobLog struct {
|
||||
ID uuid.UUID
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
Source database.LogSource `json:"log_source"`
|
||||
Level database.LogLevel `json:"log_level"`
|
||||
Output string `json:"output"`
|
||||
}
|
||||
|
||||
func (*api) provisionerJobByOrganization(rw http.ResponseWriter, r *http.Request) {
|
||||
func (*api) provisionerJobByID(rw http.ResponseWriter, r *http.Request) {
|
||||
job := httpmw.ProvisionerJobParam(r)
|
||||
|
||||
render.Status(r, http.StatusOK)
|
||||
render.JSON(rw, r, convertProvisionerJob(job))
|
||||
}
|
||||
|
||||
func (api *api) postProvisionerImportJobByOrganization(rw http.ResponseWriter, r *http.Request) {
|
||||
apiKey := httpmw.APIKey(r)
|
||||
organization := httpmw.OrganizationParam(r)
|
||||
var req CreateProjectImportJobRequest
|
||||
if !httpapi.Read(rw, r, &req) {
|
||||
return
|
||||
}
|
||||
file, err := api.Database.GetFileByHash(r.Context(), req.StorageSource)
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
// Returns provisioner logs based on query parameters.
|
||||
// The intended usage for a client to stream all logs (with JS API):
|
||||
// const timestamp = new Date().getTime();
|
||||
// 1. GET /logs?before=<timestamp>
|
||||
// 2. GET /logs?after=<timestamp>&follow
|
||||
// The combination of these responses should provide all current logs
|
||||
// to the consumer, and future logs are streamed in the follow request.
|
||||
func (api *api) provisionerJobLogsByID(rw http.ResponseWriter, r *http.Request) {
|
||||
follow := r.URL.Query().Has("follow")
|
||||
afterRaw := r.URL.Query().Get("after")
|
||||
beforeRaw := r.URL.Query().Get("before")
|
||||
if beforeRaw != "" && follow {
|
||||
httpapi.Write(rw, http.StatusBadRequest, httpapi.Response{
|
||||
Message: "file not found",
|
||||
})
|
||||
return
|
||||
}
|
||||
if err != nil {
|
||||
httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{
|
||||
Message: fmt.Sprintf("get file: %s", err),
|
||||
Message: "before cannot be used with follow",
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
input, err := json.Marshal(projectVersionImportJob{
|
||||
// AdditionalParameters: req.AdditionalParameters,
|
||||
OrganizationID: organization.ID,
|
||||
SkipParameterSchemas: req.SkipParameterSchemas,
|
||||
SkipResources: req.SkipResources,
|
||||
var after time.Time
|
||||
// Only fetch logs created after the time provided.
|
||||
if afterRaw != "" {
|
||||
afterMS, err := strconv.ParseInt(afterRaw, 10, 64)
|
||||
if err != nil {
|
||||
httpapi.Write(rw, http.StatusBadRequest, httpapi.Response{
|
||||
Message: fmt.Sprintf("unable to parse after %q: %s", afterRaw, err),
|
||||
})
|
||||
return
|
||||
}
|
||||
after = time.UnixMilli(afterMS)
|
||||
} else {
|
||||
if follow {
|
||||
after = database.Now()
|
||||
}
|
||||
}
|
||||
var before time.Time
|
||||
// Only fetch logs created before the time provided.
|
||||
if beforeRaw != "" {
|
||||
beforeMS, err := strconv.ParseInt(beforeRaw, 10, 64)
|
||||
if err != nil {
|
||||
httpapi.Write(rw, http.StatusBadRequest, httpapi.Response{
|
||||
Message: fmt.Sprintf("unable to parse before %q: %s", beforeRaw, err),
|
||||
})
|
||||
return
|
||||
}
|
||||
before = time.UnixMilli(beforeMS)
|
||||
} else {
|
||||
before = database.Now()
|
||||
}
|
||||
|
||||
job := httpmw.ProvisionerJobParam(r)
|
||||
if !follow {
|
||||
logs, err := api.Database.GetProvisionerLogsByIDBetween(r.Context(), database.GetProvisionerLogsByIDBetweenParams{
|
||||
JobID: job.ID,
|
||||
CreatedAfter: after,
|
||||
CreatedBefore: before,
|
||||
})
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
err = nil
|
||||
}
|
||||
if err != nil {
|
||||
httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{
|
||||
Message: fmt.Sprintf("get provisioner logs: %s", err),
|
||||
})
|
||||
return
|
||||
}
|
||||
if logs == nil {
|
||||
logs = []database.ProvisionerJobLog{}
|
||||
}
|
||||
render.Status(r, http.StatusOK)
|
||||
render.JSON(rw, r, logs)
|
||||
return
|
||||
}
|
||||
|
||||
bufferedLogs := make(chan database.ProvisionerJobLog, 128)
|
||||
closeSubscribe, err := api.Pubsub.Subscribe(provisionerJobLogsChannel(job.ID), func(ctx context.Context, message []byte) {
|
||||
var logs []database.ProvisionerJobLog
|
||||
err := json.Unmarshal(message, &logs)
|
||||
if err != nil {
|
||||
api.Logger.Warn(r.Context(), fmt.Sprintf("invalid provisioner job log on channel %q: %s", provisionerJobLogsChannel(job.ID), err.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
for _, log := range logs {
|
||||
select {
|
||||
case bufferedLogs <- log:
|
||||
default:
|
||||
// If this overflows users could miss logs streaming. This can happen
|
||||
// if a database request takes a long amount of time, and we get a lot of logs.
|
||||
api.Logger.Warn(r.Context(), "provisioner job log overflowing channel")
|
||||
}
|
||||
}
|
||||
})
|
||||
if err != nil {
|
||||
httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{
|
||||
Message: fmt.Sprintf("marshal job: %s", err),
|
||||
Message: fmt.Sprintf("subscribe to provisioner job logs: %s", err),
|
||||
})
|
||||
return
|
||||
}
|
||||
defer closeSubscribe()
|
||||
|
||||
job, err := api.Database.InsertProvisionerJob(r.Context(), database.InsertProvisionerJobParams{
|
||||
ID: uuid.New(),
|
||||
CreatedAt: database.Now(),
|
||||
UpdatedAt: database.Now(),
|
||||
OrganizationID: organization.ID,
|
||||
InitiatorID: apiKey.UserID,
|
||||
Provisioner: req.Provisioner,
|
||||
StorageMethod: database.ProvisionerStorageMethodFile,
|
||||
StorageSource: file.Hash,
|
||||
Type: database.ProvisionerJobTypeProjectVersionImport,
|
||||
Input: input,
|
||||
provisionerJobLogs, err := api.Database.GetProvisionerLogsByIDBetween(r.Context(), database.GetProvisionerLogsByIDBetweenParams{
|
||||
JobID: job.ID,
|
||||
CreatedAfter: after,
|
||||
CreatedBefore: before,
|
||||
})
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
err = nil
|
||||
}
|
||||
if err != nil {
|
||||
httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{
|
||||
Message: fmt.Sprintf("insert provisioner job: %s", err),
|
||||
Message: fmt.Sprint("get provisioner job logs: %w", err),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
render.Status(r, http.StatusCreated)
|
||||
render.JSON(rw, r, convertProvisionerJob(job))
|
||||
// "follow" uses the ndjson format to stream data.
|
||||
// See: https://canjs.com/doc/can-ndjson-stream.html
|
||||
rw.Header().Set("Content-Type", "application/stream+json")
|
||||
rw.WriteHeader(http.StatusOK)
|
||||
rw.(http.Flusher).Flush()
|
||||
|
||||
// The Go stdlib JSON encoder appends a newline character after message write.
|
||||
encoder := json.NewEncoder(rw)
|
||||
|
||||
for _, provisionerJobLog := range provisionerJobLogs {
|
||||
err = encoder.Encode(convertProvisionerJobLog(provisionerJobLog))
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
ticker := time.NewTicker(250 * time.Millisecond)
|
||||
defer ticker.Stop()
|
||||
for {
|
||||
select {
|
||||
case <-r.Context().Done():
|
||||
return
|
||||
case log := <-bufferedLogs:
|
||||
err = encoder.Encode(convertProvisionerJobLog(log))
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
rw.(http.Flusher).Flush()
|
||||
case <-ticker.C:
|
||||
job, err := api.Database.GetProvisionerJobByID(r.Context(), job.ID)
|
||||
if err != nil {
|
||||
api.Logger.Warn(r.Context(), "streaming job logs; checking if completed", slog.Error(err), slog.F("job_id", job.ID.String()))
|
||||
continue
|
||||
}
|
||||
if convertProvisionerJob(job).Status.Completed() {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func convertProvisionerJobLog(provisionerJobLog database.ProvisionerJobLog) ProvisionerJobLog {
|
||||
return ProvisionerJobLog{
|
||||
ID: provisionerJobLog.ID,
|
||||
CreatedAt: provisionerJobLog.CreatedAt,
|
||||
Source: provisionerJobLog.Source,
|
||||
Level: provisionerJobLog.Level,
|
||||
Output: provisionerJobLog.Output,
|
||||
}
|
||||
}
|
||||
|
||||
func convertProvisionerJob(provisionerJob database.ProvisionerJob) ProvisionerJob {
|
||||
|
@ -2,12 +2,17 @@ package coderd_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/coder/coder/coderd"
|
||||
"github.com/coder/coder/coderd/coderdtest"
|
||||
"github.com/coder/coder/coderd/parameter"
|
||||
"github.com/coder/coder/codersdk"
|
||||
"github.com/coder/coder/database"
|
||||
"github.com/coder/coder/provisioner/echo"
|
||||
"github.com/coder/coder/provisionersdk/proto"
|
||||
)
|
||||
@ -20,7 +25,7 @@ func TestPostProvisionerImportJobByOrganization(t *testing.T) {
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
_ = coderdtest.NewProvisionerDaemon(t, client)
|
||||
before := time.Now()
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, &echo.Responses{
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, &echo.Responses{
|
||||
Parse: []*proto.Parse_Response{{
|
||||
Type: &proto.Parse_Response_Complete{
|
||||
Complete: &proto.Parse_Complete{
|
||||
@ -39,7 +44,7 @@ func TestPostProvisionerImportJobByOrganization(t *testing.T) {
|
||||
},
|
||||
}},
|
||||
})
|
||||
logs, err := client.FollowProvisionerJobLogsAfter(context.Background(), user.Organization, job.ID, before)
|
||||
logs, err := client.ProjectImportJobLogsAfter(context.Background(), user.Organization, job.ID, before)
|
||||
require.NoError(t, err)
|
||||
for {
|
||||
log, ok := <-logs
|
||||
@ -49,4 +54,274 @@ func TestPostProvisionerImportJobByOrganization(t *testing.T) {
|
||||
t.Log(log.Output)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("CreateWithParameters", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
_ = coderdtest.NewProvisionerDaemon(t, client)
|
||||
data, err := echo.Tar(&echo.Responses{
|
||||
Parse: []*proto.Parse_Response{{
|
||||
Type: &proto.Parse_Response_Complete{
|
||||
Complete: &proto.Parse_Complete{
|
||||
ParameterSchemas: []*proto.ParameterSchema{{
|
||||
Name: "test",
|
||||
RedisplayValue: true,
|
||||
}},
|
||||
},
|
||||
},
|
||||
}},
|
||||
Provision: echo.ProvisionComplete,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
file, err := client.UploadFile(context.Background(), codersdk.ContentTypeTar, data)
|
||||
require.NoError(t, err)
|
||||
job, err := client.CreateProjectImportJob(context.Background(), user.Organization, coderd.CreateProjectImportJobRequest{
|
||||
StorageSource: file.Hash,
|
||||
StorageMethod: database.ProvisionerStorageMethodFile,
|
||||
Provisioner: database.ProvisionerTypeEcho,
|
||||
ParameterValues: []coderd.CreateParameterValueRequest{{
|
||||
Name: "test",
|
||||
SourceValue: "somevalue",
|
||||
SourceScheme: database.ParameterSourceSchemeData,
|
||||
DestinationScheme: database.ParameterDestinationSchemeProvisionerVariable,
|
||||
}},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
job = coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID)
|
||||
values, err := client.ProjectImportJobParameters(context.Background(), user.Organization, job.ID)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "somevalue", values[0].SourceValue)
|
||||
})
|
||||
}
|
||||
|
||||
func TestProvisionerJobParametersByID(t *testing.T) {
|
||||
t.Parallel()
|
||||
t.Run("NotImported", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
_, err := client.ProjectImportJobParameters(context.Background(), user.Organization, job.ID)
|
||||
var apiErr *codersdk.Error
|
||||
require.ErrorAs(t, err, &apiErr)
|
||||
require.Equal(t, http.StatusPreconditionFailed, apiErr.StatusCode())
|
||||
})
|
||||
|
||||
t.Run("List", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
_ = coderdtest.NewProvisionerDaemon(t, client)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, &echo.Responses{
|
||||
Parse: []*proto.Parse_Response{{
|
||||
Type: &proto.Parse_Response_Complete{
|
||||
Complete: &proto.Parse_Complete{
|
||||
ParameterSchemas: []*proto.ParameterSchema{{
|
||||
Name: "example",
|
||||
DefaultSource: &proto.ParameterSource{
|
||||
Scheme: proto.ParameterSource_DATA,
|
||||
Value: "hello",
|
||||
},
|
||||
DefaultDestination: &proto.ParameterDestination{
|
||||
Scheme: proto.ParameterDestination_PROVISIONER_VARIABLE,
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
}},
|
||||
Provision: echo.ProvisionComplete,
|
||||
})
|
||||
job = coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID)
|
||||
params, err := client.ProjectImportJobParameters(context.Background(), user.Organization, job.ID)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, params, 1)
|
||||
})
|
||||
|
||||
t.Run("ListNoRedisplay", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
_ = coderdtest.NewProvisionerDaemon(t, client)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, &echo.Responses{
|
||||
Parse: []*proto.Parse_Response{{
|
||||
Type: &proto.Parse_Response_Complete{
|
||||
Complete: &proto.Parse_Complete{
|
||||
ParameterSchemas: []*proto.ParameterSchema{{
|
||||
Name: "example",
|
||||
DefaultSource: &proto.ParameterSource{
|
||||
Scheme: proto.ParameterSource_DATA,
|
||||
Value: "tomato",
|
||||
},
|
||||
DefaultDestination: &proto.ParameterDestination{
|
||||
Scheme: proto.ParameterDestination_PROVISIONER_VARIABLE,
|
||||
},
|
||||
RedisplayValue: false,
|
||||
}},
|
||||
},
|
||||
},
|
||||
}},
|
||||
Provision: echo.ProvisionComplete,
|
||||
})
|
||||
coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID)
|
||||
params, err := client.ProjectImportJobParameters(context.Background(), user.Organization, job.ID)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, params, 1)
|
||||
require.NotNil(t, params[0])
|
||||
require.Equal(t, params[0].SourceValue, "")
|
||||
})
|
||||
}
|
||||
|
||||
func TestProvisionerJobResourcesByID(t *testing.T) {
|
||||
t.Parallel()
|
||||
t.Run("Something", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
_ = coderdtest.NewProvisionerDaemon(t, client)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, &echo.Responses{
|
||||
Parse: []*proto.Parse_Response{{
|
||||
Type: &proto.Parse_Response_Complete{
|
||||
Complete: &proto.Parse_Complete{
|
||||
ParameterSchemas: []*proto.ParameterSchema{{
|
||||
Name: parameter.CoderWorkspaceTransition,
|
||||
}},
|
||||
},
|
||||
},
|
||||
}},
|
||||
Provision: []*proto.Provision_Response{{
|
||||
Type: &proto.Provision_Response_Complete{
|
||||
Complete: &proto.Provision_Complete{
|
||||
Resources: []*proto.Resource{{
|
||||
Name: "hello",
|
||||
Type: "ec2_instance",
|
||||
}},
|
||||
},
|
||||
},
|
||||
}},
|
||||
})
|
||||
coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID)
|
||||
resources, err := client.ProjectImportJobResources(context.Background(), user.Organization, job.ID)
|
||||
require.NoError(t, err)
|
||||
// One for start, and one for stop!
|
||||
require.Len(t, resources, 2)
|
||||
})
|
||||
}
|
||||
|
||||
func TestProvisionerJobLogsByName(t *testing.T) {
|
||||
t.Parallel()
|
||||
t.Run("List", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
coderdtest.NewProvisionerDaemon(t, client)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, &echo.Responses{
|
||||
Parse: echo.ParseComplete,
|
||||
Provision: []*proto.Provision_Response{{
|
||||
Type: &proto.Provision_Response_Log{
|
||||
Log: &proto.Log{
|
||||
Level: proto.LogLevel_INFO,
|
||||
Output: "log-output",
|
||||
},
|
||||
},
|
||||
}, {
|
||||
Type: &proto.Provision_Response_Complete{
|
||||
Complete: &proto.Provision_Complete{},
|
||||
},
|
||||
}},
|
||||
})
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID)
|
||||
workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID)
|
||||
history, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{
|
||||
ProjectVersionID: project.ActiveVersionID,
|
||||
Transition: database.WorkspaceTransitionStart,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
coderdtest.AwaitProjectImportJob(t, client, user.Organization, history.ProvisionJobID)
|
||||
// Return the log after completion!
|
||||
logs, err := client.WorkspaceProvisionJobLogsBefore(context.Background(), user.Organization, history.ProvisionJobID, time.Time{})
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, logs)
|
||||
require.Len(t, logs, 1)
|
||||
})
|
||||
|
||||
t.Run("StreamAfterComplete", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
coderdtest.NewProvisionerDaemon(t, client)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, &echo.Responses{
|
||||
Parse: echo.ParseComplete,
|
||||
Provision: []*proto.Provision_Response{{
|
||||
Type: &proto.Provision_Response_Log{
|
||||
Log: &proto.Log{
|
||||
Level: proto.LogLevel_INFO,
|
||||
Output: "log-output",
|
||||
},
|
||||
},
|
||||
}, {
|
||||
Type: &proto.Provision_Response_Complete{
|
||||
Complete: &proto.Provision_Complete{},
|
||||
},
|
||||
}},
|
||||
})
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID)
|
||||
workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID)
|
||||
before := time.Now().UTC()
|
||||
history, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{
|
||||
ProjectVersionID: project.ActiveVersionID,
|
||||
Transition: database.WorkspaceTransitionStart,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
coderdtest.AwaitProjectImportJob(t, client, user.Organization, history.ProvisionJobID)
|
||||
|
||||
logs, err := client.WorkspaceProvisionJobLogsAfter(context.Background(), user.Organization, history.ProvisionJobID, before)
|
||||
require.NoError(t, err)
|
||||
log, ok := <-logs
|
||||
require.True(t, ok)
|
||||
require.Equal(t, "log-output", log.Output)
|
||||
// Make sure the channel automatically closes!
|
||||
_, ok = <-logs
|
||||
require.False(t, ok)
|
||||
})
|
||||
|
||||
t.Run("StreamWhileRunning", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
coderdtest.NewProvisionerDaemon(t, client)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, &echo.Responses{
|
||||
Parse: echo.ParseComplete,
|
||||
Provision: []*proto.Provision_Response{{
|
||||
Type: &proto.Provision_Response_Log{
|
||||
Log: &proto.Log{
|
||||
Level: proto.LogLevel_INFO,
|
||||
Output: "log-output",
|
||||
},
|
||||
},
|
||||
}, {
|
||||
Type: &proto.Provision_Response_Complete{
|
||||
Complete: &proto.Provision_Complete{},
|
||||
},
|
||||
}},
|
||||
})
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID)
|
||||
workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID)
|
||||
before := database.Now()
|
||||
history, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{
|
||||
ProjectVersionID: project.ActiveVersionID,
|
||||
Transition: database.WorkspaceTransitionStart,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
logs, err := client.WorkspaceProvisionJobLogsAfter(context.Background(), user.Organization, history.ProvisionJobID, before)
|
||||
require.NoError(t, err)
|
||||
log := <-logs
|
||||
require.Equal(t, "log-output", log.Output)
|
||||
// Make sure the channel automatically closes!
|
||||
_, ok := <-logs
|
||||
require.False(t, ok)
|
||||
})
|
||||
}
|
||||
|
@ -22,7 +22,7 @@ func TestPostWorkspaceHistoryByUser(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID)
|
||||
_, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{
|
||||
@ -40,11 +40,11 @@ func TestPostWorkspaceHistoryByUser(t *testing.T) {
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
coderdtest.NewProvisionerDaemon(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, &echo.Responses{
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, &echo.Responses{
|
||||
Provision: []*proto.Provision_Response{{}},
|
||||
})
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
coderdtest.AwaitProvisionerJob(t, client, user.Organization, job.ID)
|
||||
coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID)
|
||||
workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID)
|
||||
_, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{
|
||||
ProjectVersionID: project.ActiveVersionID,
|
||||
@ -61,9 +61,9 @@ func TestPostWorkspaceHistoryByUser(t *testing.T) {
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
closeDaemon := coderdtest.NewProvisionerDaemon(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
coderdtest.AwaitProvisionerJob(t, client, user.Organization, job.ID)
|
||||
coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID)
|
||||
// Close here so workspace history doesn't process!
|
||||
closeDaemon.Close()
|
||||
workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID)
|
||||
@ -87,16 +87,16 @@ func TestPostWorkspaceHistoryByUser(t *testing.T) {
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
coderdtest.NewProvisionerDaemon(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
coderdtest.AwaitProvisionerJob(t, client, user.Organization, job.ID)
|
||||
coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID)
|
||||
workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID)
|
||||
firstHistory, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{
|
||||
ProjectVersionID: project.ActiveVersionID,
|
||||
Transition: database.WorkspaceTransitionStart,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
coderdtest.AwaitProvisionerJob(t, client, user.Organization, firstHistory.ProvisionJobID)
|
||||
coderdtest.AwaitWorkspaceProvisionJob(t, client, user.Organization, firstHistory.ProvisionJobID)
|
||||
secondHistory, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{
|
||||
ProjectVersionID: project.ActiveVersionID,
|
||||
Transition: database.WorkspaceTransitionStart,
|
||||
@ -117,7 +117,7 @@ func TestWorkspaceHistoryByUser(t *testing.T) {
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
coderdtest.NewProvisionerDaemon(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID)
|
||||
history, err := client.ListWorkspaceHistory(context.Background(), "me", workspace.Name)
|
||||
@ -131,9 +131,9 @@ func TestWorkspaceHistoryByUser(t *testing.T) {
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
coderdtest.NewProvisionerDaemon(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
coderdtest.AwaitProvisionerJob(t, client, user.Organization, job.ID)
|
||||
coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID)
|
||||
workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID)
|
||||
_, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{
|
||||
ProjectVersionID: project.ActiveVersionID,
|
||||
@ -152,8 +152,8 @@ func TestWorkspaceHistoryByName(t *testing.T) {
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
coderdtest.NewProvisionerDaemon(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil)
|
||||
coderdtest.AwaitProvisionerJob(t, client, user.Organization, job.ID)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID)
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID)
|
||||
history, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{
|
||||
|
@ -29,7 +29,7 @@ func TestWorkspaces(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
_ = coderdtest.CreateWorkspace(t, client, "", project.ID)
|
||||
workspaces, err := client.Workspaces(context.Background(), "")
|
||||
@ -58,7 +58,7 @@ func TestPostWorkspaceByUser(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
|
||||
anotherUser := coderd.CreateUserRequest{
|
||||
@ -73,7 +73,7 @@ func TestPostWorkspaceByUser(t *testing.T) {
|
||||
Password: anotherUser.Password,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
err = client.SetSessionToken(token.SessionToken)
|
||||
client.SessionToken = token.SessionToken
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = client.CreateWorkspace(context.Background(), "", coderd.CreateWorkspaceRequest{
|
||||
@ -90,7 +90,7 @@ func TestPostWorkspaceByUser(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
workspace := coderdtest.CreateWorkspace(t, client, "", project.ID)
|
||||
_, err := client.CreateWorkspace(context.Background(), "", coderd.CreateWorkspaceRequest{
|
||||
@ -107,7 +107,7 @@ func TestPostWorkspaceByUser(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
_ = coderdtest.CreateWorkspace(t, client, "", project.ID)
|
||||
})
|
||||
@ -117,7 +117,7 @@ func TestWorkspaceByUser(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
workspace := coderdtest.CreateWorkspace(t, client, "", project.ID)
|
||||
_, err := client.Workspace(context.Background(), "", workspace.Name)
|
||||
@ -130,7 +130,7 @@ func TestWorkspacesByProject(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
workspaces, err := client.WorkspacesByProject(context.Background(), user.Organization, project.Name)
|
||||
require.NoError(t, err)
|
||||
@ -141,7 +141,7 @@ func TestWorkspacesByProject(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t)
|
||||
user := coderdtest.CreateInitialUser(t, client)
|
||||
job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil)
|
||||
job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil)
|
||||
project := coderdtest.CreateProject(t, client, user.Organization, job.ID)
|
||||
_ = coderdtest.CreateWorkspace(t, client, "", project.ID)
|
||||
workspaces, err := client.WorkspacesByProject(context.Background(), user.Organization, project.Name)
|
||||
|
Reference in New Issue
Block a user