mirror of
https://github.com/coder/coder.git
synced 2025-07-15 22:20:27 +00:00
ci: Replace DataDog CI with custom upload script (#169)
* ci: Replace DataDog CI with custom upload script This will reduce CI time by ~6 minutes across all of our runners. It's a bit janky, but I believe worth the slight maintainance burden. * Fix test race when job would complete too early * Fix job cancelation override * Fix race where provisioner job is inserted before project version
This commit is contained in:
42
.github/workflows/coder.yaml
vendored
42
.github/workflows/coder.yaml
vendored
@ -159,45 +159,27 @@ jobs:
|
|||||||
-covermode=atomic -coverprofile="gotests.coverage"
|
-covermode=atomic -coverprofile="gotests.coverage"
|
||||||
-timeout=3m -count=5 -race -short -parallel=2
|
-timeout=3m -count=5 -race -short -parallel=2
|
||||||
|
|
||||||
|
- name: Upload DataDog Trace
|
||||||
|
if: (success() || failure()) && github.actor != 'dependabot[bot]'
|
||||||
|
env:
|
||||||
|
DATADOG_API_KEY: ${{ secrets.DATADOG_API_KEY }}
|
||||||
|
DD_DATABASE: fake
|
||||||
|
GIT_COMMIT_MESSAGE: ${{ github.event.head_commit.message }}
|
||||||
|
run: go run scripts/datadog-cireport/main.go gotests.xml
|
||||||
|
|
||||||
- name: Test with PostgreSQL Database
|
- name: Test with PostgreSQL Database
|
||||||
if: runner.os == 'Linux'
|
if: runner.os == 'Linux'
|
||||||
run: DB=true gotestsum --junitfile="gotests.xml" --packages="./..." --
|
run: DB=true gotestsum --junitfile="gotests.xml" --packages="./..." --
|
||||||
-covermode=atomic -coverprofile="gotests.coverage" -timeout=3m
|
-covermode=atomic -coverprofile="gotests.coverage" -timeout=3m
|
||||||
-count=1 -race -parallel=2
|
-count=1 -race -parallel=2
|
||||||
|
|
||||||
- name: Setup Node for DataDog CLI
|
|
||||||
uses: actions/setup-node@v2
|
|
||||||
if: always() && github.actor != 'dependabot[bot]'
|
|
||||||
with:
|
|
||||||
node-version: "14"
|
|
||||||
|
|
||||||
- name: Cache DataDog CLI
|
|
||||||
if: always() && github.actor != 'dependabot[bot]'
|
|
||||||
uses: actions/cache@v2
|
|
||||||
with:
|
|
||||||
path: |
|
|
||||||
~/.npm
|
|
||||||
%LocalAppData%\npm-cache
|
|
||||||
key: datadogci-
|
|
||||||
restore-keys: datadogci-
|
|
||||||
|
|
||||||
- name: Upload DataDog Trace
|
- name: Upload DataDog Trace
|
||||||
if: always() && github.actor != 'dependabot[bot]'
|
if: (success() || failure()) && github.actor != 'dependabot[bot]'
|
||||||
# See: https://docs.datadoghq.com/continuous_integration/setup_tests/junit_upload/#collecting-environment-configuration-metadata
|
|
||||||
env:
|
env:
|
||||||
DATADOG_API_KEY: ${{ secrets.DATADOG_API_KEY }}
|
DATADOG_API_KEY: ${{ secrets.DATADOG_API_KEY }}
|
||||||
DD_GIT_REPOSITORY_URL: ${{ github.repositoryUrl }}
|
DD_DATABASE: postgresql
|
||||||
DD_GIT_BRANCH: ${{ github.head_ref }}
|
GIT_COMMIT_MESSAGE: ${{ github.event.head_commit.message }}
|
||||||
DD_GIT_COMMIT_SHA: ${{ github.sha }}
|
run: go run scripts/datadog-cireport/main.go gotests.xml
|
||||||
DD_GIT_COMMIT_MESSAGE: ${{ github.event.head_commit.message }}
|
|
||||||
DD_GIT_COMMIT_AUTHOR_NAME: ${{ github.event.head_commit.author.name }}
|
|
||||||
DD_GIT_COMMIT_AUTHOR_EMAIL: ${{ github.event.head_commit.author.email }}
|
|
||||||
DD_GIT_COMMIT_COMMITTER_NAME: ${{ github.event.head_commit.committer.name }}
|
|
||||||
DD_GIT_COMMIT_COMMITTER_EMAIL: ${{ github.event.head_commit.committer.email }}
|
|
||||||
DD_TAGS: ${{ format('os.platform:{0},os.architecture:{1}', runner.os, runner.arch) }}
|
|
||||||
run: |
|
|
||||||
npm install -g @datadog/datadog-ci
|
|
||||||
datadog-ci junit upload --service coder gotests.xml
|
|
||||||
|
|
||||||
- uses: codecov/codecov-action@v2
|
- uses: codecov/codecov-action@v2
|
||||||
if: github.actor != 'dependabot[bot]'
|
if: github.actor != 'dependabot[bot]'
|
||||||
|
@ -31,3 +31,4 @@ ignore:
|
|||||||
- peerbroker/proto
|
- peerbroker/proto
|
||||||
- provisionerd/proto
|
- provisionerd/proto
|
||||||
- provisionersdk/proto
|
- provisionersdk/proto
|
||||||
|
- scripts/datadog-cireport
|
||||||
|
@ -125,16 +125,29 @@ func (api *api) postProjectVersionByOrganization(rw http.ResponseWriter, r *http
|
|||||||
var provisionerJob database.ProvisionerJob
|
var provisionerJob database.ProvisionerJob
|
||||||
var projectVersion database.ProjectVersion
|
var projectVersion database.ProjectVersion
|
||||||
err = api.Database.InTx(func(db database.Store) error {
|
err = api.Database.InTx(func(db database.Store) error {
|
||||||
projectVersionID := uuid.New()
|
provisionerJobID := uuid.New()
|
||||||
|
projectVersion, err = api.Database.InsertProjectVersion(r.Context(), database.InsertProjectVersionParams{
|
||||||
|
ID: uuid.New(),
|
||||||
|
ProjectID: project.ID,
|
||||||
|
CreatedAt: database.Now(),
|
||||||
|
UpdatedAt: database.Now(),
|
||||||
|
Name: namesgenerator.GetRandomName(1),
|
||||||
|
StorageMethod: createProjectVersion.StorageMethod,
|
||||||
|
StorageSource: createProjectVersion.StorageSource,
|
||||||
|
ImportJobID: provisionerJobID,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return xerrors.Errorf("insert project version: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
input, err := json.Marshal(projectImportJob{
|
input, err := json.Marshal(projectImportJob{
|
||||||
ProjectVersionID: projectVersionID,
|
ProjectVersionID: projectVersion.ID,
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return xerrors.Errorf("marshal import job: %w", err)
|
return xerrors.Errorf("marshal import job: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
provisionerJob, err = db.InsertProvisionerJob(r.Context(), database.InsertProvisionerJobParams{
|
provisionerJob, err = db.InsertProvisionerJob(r.Context(), database.InsertProvisionerJobParams{
|
||||||
ID: uuid.New(),
|
ID: provisionerJobID,
|
||||||
CreatedAt: database.Now(),
|
CreatedAt: database.Now(),
|
||||||
UpdatedAt: database.Now(),
|
UpdatedAt: database.Now(),
|
||||||
InitiatorID: apiKey.UserID,
|
InitiatorID: apiKey.UserID,
|
||||||
@ -146,20 +159,6 @@ func (api *api) postProjectVersionByOrganization(rw http.ResponseWriter, r *http
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return xerrors.Errorf("insert provisioner job: %w", err)
|
return xerrors.Errorf("insert provisioner job: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
projectVersion, err = api.Database.InsertProjectVersion(r.Context(), database.InsertProjectVersionParams{
|
|
||||||
ID: projectVersionID,
|
|
||||||
ProjectID: project.ID,
|
|
||||||
CreatedAt: database.Now(),
|
|
||||||
UpdatedAt: database.Now(),
|
|
||||||
Name: namesgenerator.GetRandomName(1),
|
|
||||||
StorageMethod: createProjectVersion.StorageMethod,
|
|
||||||
StorageSource: createProjectVersion.StorageSource,
|
|
||||||
ImportJobID: provisionerJob.ID,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return xerrors.Errorf("insert project version: %s", err)
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -377,6 +377,13 @@ func (server *provisionerdServer) CancelJob(ctx context.Context, cancelJob *prot
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, xerrors.Errorf("parse job id: %w", err)
|
return nil, xerrors.Errorf("parse job id: %w", err)
|
||||||
}
|
}
|
||||||
|
job, err := server.Database.GetProvisionerJobByID(ctx, jobID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, xerrors.Errorf("get provisioner job: %w", err)
|
||||||
|
}
|
||||||
|
if job.CompletedAt.Valid {
|
||||||
|
return nil, xerrors.Errorf("job already completed")
|
||||||
|
}
|
||||||
err = server.Database.UpdateProvisionerJobWithCompleteByID(ctx, database.UpdateProvisionerJobWithCompleteByIDParams{
|
err = server.Database.UpdateProvisionerJobWithCompleteByID(ctx, database.UpdateProvisionerJobWithCompleteByIDParams{
|
||||||
ID: jobID,
|
ID: jobID,
|
||||||
CompletedAt: sql.NullTime{
|
CompletedAt: sql.NullTime{
|
||||||
|
@ -126,17 +126,32 @@ func (api *api) postWorkspaceHistoryByUser(rw http.ResponseWriter, r *http.Reque
|
|||||||
// This must happen in a transaction to ensure history can be inserted, and
|
// This must happen in a transaction to ensure history can be inserted, and
|
||||||
// the prior history can update it's "after" column to point at the new.
|
// the prior history can update it's "after" column to point at the new.
|
||||||
err = api.Database.InTx(func(db database.Store) error {
|
err = api.Database.InTx(func(db database.Store) error {
|
||||||
// Generate the ID before-hand so the provisioner job is aware of it!
|
provisionerJobID := uuid.New()
|
||||||
workspaceHistoryID := uuid.New()
|
workspaceHistory, err = db.InsertWorkspaceHistory(r.Context(), database.InsertWorkspaceHistoryParams{
|
||||||
|
ID: uuid.New(),
|
||||||
|
CreatedAt: database.Now(),
|
||||||
|
UpdatedAt: database.Now(),
|
||||||
|
WorkspaceID: workspace.ID,
|
||||||
|
ProjectVersionID: projectVersion.ID,
|
||||||
|
BeforeID: priorHistoryID,
|
||||||
|
Name: namesgenerator.GetRandomName(1),
|
||||||
|
Initiator: user.ID,
|
||||||
|
Transition: createBuild.Transition,
|
||||||
|
ProvisionJobID: provisionerJobID,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return xerrors.Errorf("insert workspace history: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
input, err := json.Marshal(workspaceProvisionJob{
|
input, err := json.Marshal(workspaceProvisionJob{
|
||||||
WorkspaceHistoryID: workspaceHistoryID,
|
WorkspaceHistoryID: workspaceHistory.ID,
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return xerrors.Errorf("marshal provision job: %w", err)
|
return xerrors.Errorf("marshal provision job: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
provisionerJob, err = db.InsertProvisionerJob(r.Context(), database.InsertProvisionerJobParams{
|
provisionerJob, err = db.InsertProvisionerJob(r.Context(), database.InsertProvisionerJobParams{
|
||||||
ID: uuid.New(),
|
ID: provisionerJobID,
|
||||||
CreatedAt: database.Now(),
|
CreatedAt: database.Now(),
|
||||||
UpdatedAt: database.Now(),
|
UpdatedAt: database.Now(),
|
||||||
InitiatorID: user.ID,
|
InitiatorID: user.ID,
|
||||||
@ -149,22 +164,6 @@ func (api *api) postWorkspaceHistoryByUser(rw http.ResponseWriter, r *http.Reque
|
|||||||
return xerrors.Errorf("insert provisioner job: %w", err)
|
return xerrors.Errorf("insert provisioner job: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
workspaceHistory, err = db.InsertWorkspaceHistory(r.Context(), database.InsertWorkspaceHistoryParams{
|
|
||||||
ID: workspaceHistoryID,
|
|
||||||
CreatedAt: database.Now(),
|
|
||||||
UpdatedAt: database.Now(),
|
|
||||||
WorkspaceID: workspace.ID,
|
|
||||||
ProjectVersionID: projectVersion.ID,
|
|
||||||
BeforeID: priorHistoryID,
|
|
||||||
Name: namesgenerator.GetRandomName(1),
|
|
||||||
Initiator: user.ID,
|
|
||||||
Transition: createBuild.Transition,
|
|
||||||
ProvisionJobID: provisionerJob.ID,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return xerrors.Errorf("insert workspace history: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if priorHistoryID.Valid {
|
if priorHistoryID.Valid {
|
||||||
// Update the prior history entries "after" column.
|
// Update the prior history entries "after" column.
|
||||||
err = db.UpdateWorkspaceHistoryByID(r.Context(), database.UpdateWorkspaceHistoryByIDParams{
|
err = db.UpdateWorkspaceHistoryByID(r.Context(), database.UpdateWorkspaceHistoryByIDParams{
|
||||||
|
@ -59,10 +59,12 @@ func TestPostWorkspaceHistoryByUser(t *testing.T) {
|
|||||||
t.Parallel()
|
t.Parallel()
|
||||||
client := coderdtest.New(t)
|
client := coderdtest.New(t)
|
||||||
user := coderdtest.CreateInitialUser(t, client)
|
user := coderdtest.CreateInitialUser(t, client)
|
||||||
coderdtest.NewProvisionerDaemon(t, client)
|
closeDaemon := coderdtest.NewProvisionerDaemon(t, client)
|
||||||
project := coderdtest.CreateProject(t, client, user.Organization)
|
project := coderdtest.CreateProject(t, client, user.Organization)
|
||||||
version := coderdtest.CreateProjectVersion(t, client, user.Organization, project.Name, nil)
|
version := coderdtest.CreateProjectVersion(t, client, user.Organization, project.Name, nil)
|
||||||
coderdtest.AwaitProjectVersionImported(t, client, user.Organization, project.Name, version.Name)
|
coderdtest.AwaitProjectVersionImported(t, client, user.Organization, project.Name, version.Name)
|
||||||
|
// Close here so workspace history doesn't process!
|
||||||
|
closeDaemon.Close()
|
||||||
workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID)
|
workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID)
|
||||||
_, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{
|
_, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{
|
||||||
ProjectVersionID: version.ID,
|
ProjectVersionID: version.ID,
|
||||||
|
@ -220,12 +220,13 @@ func TestFollowWorkspaceHistoryLogsAfter(t *testing.T) {
|
|||||||
})
|
})
|
||||||
coderdtest.AwaitProjectVersionImported(t, client, user.Organization, project.Name, version.Name)
|
coderdtest.AwaitProjectVersionImported(t, client, user.Organization, project.Name, version.Name)
|
||||||
workspace := coderdtest.CreateWorkspace(t, client, "", project.ID)
|
workspace := coderdtest.CreateWorkspace(t, client, "", project.ID)
|
||||||
|
after := database.Now()
|
||||||
history, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{
|
history, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{
|
||||||
ProjectVersionID: version.ID,
|
ProjectVersionID: version.ID,
|
||||||
Transition: database.WorkspaceTransitionCreate,
|
Transition: database.WorkspaceTransitionCreate,
|
||||||
})
|
})
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
logs, err := client.FollowWorkspaceHistoryLogsAfter(context.Background(), "", workspace.Name, history.Name, time.Time{})
|
logs, err := client.FollowWorkspaceHistoryLogsAfter(context.Background(), "", workspace.Name, history.Name, after)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
_, ok := <-logs
|
_, ok := <-logs
|
||||||
require.True(t, ok)
|
require.True(t, ok)
|
||||||
|
7
rules.go
7
rules.go
@ -10,18 +10,17 @@ func xerrors(m dsl.Matcher) {
|
|||||||
m.Import("errors")
|
m.Import("errors")
|
||||||
m.Import("fmt")
|
m.Import("fmt")
|
||||||
m.Import("golang.org/x/xerrors")
|
m.Import("golang.org/x/xerrors")
|
||||||
msg := "Use xerrors to provide additional stacktrace information!"
|
|
||||||
|
|
||||||
m.Match("fmt.Errorf($*args)").
|
m.Match("fmt.Errorf($*args)").
|
||||||
Suggest("xerrors.New($args)").
|
Suggest("xerrors.New($args)").
|
||||||
Report(msg)
|
Report("Use xerrors to provide additional stacktrace information!")
|
||||||
|
|
||||||
m.Match("fmt.Errorf($*args)").
|
m.Match("fmt.Errorf($*args)").
|
||||||
Suggest("xerrors.Errorf($args)").
|
Suggest("xerrors.Errorf($args)").
|
||||||
Report(msg)
|
Report("Use xerrors to provide additional stacktrace information!")
|
||||||
|
|
||||||
m.Match("errors.New($msg)").
|
m.Match("errors.New($msg)").
|
||||||
Where(m["msg"].Type.Is("string")).
|
Where(m["msg"].Type.Is("string")).
|
||||||
Suggest("xerrors.New($msg)").
|
Suggest("xerrors.New($msg)").
|
||||||
Report(msg)
|
Report("Use xerrors to provide additional stacktrace information!")
|
||||||
}
|
}
|
||||||
|
184
scripts/datadog-cireport/main.go
Normal file
184
scripts/datadog-cireport/main.go
Normal file
@ -0,0 +1,184 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"compress/gzip"
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"mime/multipart"
|
||||||
|
"net/http"
|
||||||
|
"net/textproto"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
|
"runtime"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// The DataDog "cireport" API is not publicly documented,
|
||||||
|
// but implementation is available in their open-source CLI
|
||||||
|
// built for CI: https://github.com/DataDog/datadog-ci
|
||||||
|
//
|
||||||
|
// It's built using node, and took ~3 minutes to install and
|
||||||
|
// run on our Windows runner, and ~1 minute on all others.
|
||||||
|
//
|
||||||
|
// This script models that code as much as possible.
|
||||||
|
func main() {
|
||||||
|
apiKey := os.Getenv("DATADOG_API_KEY")
|
||||||
|
if apiKey == "" {
|
||||||
|
log.Fatal("DATADOG_API_KEY must be set!")
|
||||||
|
}
|
||||||
|
if len(os.Args) <= 1 {
|
||||||
|
log.Fatal("You must supply a filename to upload!")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Code (almost) verbatim translated from:
|
||||||
|
// https://github.com/DataDog/datadog-ci/blob/78d0da28e1c1af44333deabf1c9486e2ad66b8af/src/helpers/ci.ts#L194-L229
|
||||||
|
var (
|
||||||
|
githubServerURL = os.Getenv("GITHUB_SERVER_URL")
|
||||||
|
githubRepository = os.Getenv("GITHUB_REPOSITORY")
|
||||||
|
githubSHA = os.Getenv("GITHUB_SHA")
|
||||||
|
githubRunID = os.Getenv("GITHUB_RUN_ID")
|
||||||
|
pipelineURL = fmt.Sprintf("%s/%s/actions/runs/%s", githubServerURL, githubRepository, githubRunID)
|
||||||
|
jobURL = fmt.Sprintf("%s/%s/commit/%s/checks", githubServerURL, githubRepository, githubSHA)
|
||||||
|
)
|
||||||
|
if os.Getenv("GITHUB_RUN_ATTEMPT") != "" {
|
||||||
|
pipelineURL += fmt.Sprintf("/attempts/%s", os.Getenv("GITHUB_RUN_ATTEMPT"))
|
||||||
|
}
|
||||||
|
|
||||||
|
commitMessage, err := exec.Command("git", "log", "-1", "--pretty=format:%s").CombinedOutput()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Get commit message: %s", err)
|
||||||
|
}
|
||||||
|
commitData, err := exec.Command("git", "show", "-s", "--format=%an,%ae,%ad,%cn,%ce,%cd").CombinedOutput()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Get commit data: %s", err)
|
||||||
|
}
|
||||||
|
commitParts := strings.Split(string(commitData), ",")
|
||||||
|
|
||||||
|
tags := map[string]string{
|
||||||
|
"service": "coder",
|
||||||
|
"_dd.cireport_version": "2",
|
||||||
|
|
||||||
|
"test.traits": fmt.Sprintf(`{"database":["%s"]}`, os.Getenv("DD_DATABASE")),
|
||||||
|
|
||||||
|
// Additional tags found in DataDog docs. See:
|
||||||
|
// https://docs.datadoghq.com/continuous_integration/setup_tests/junit_upload/#collecting-environment-configuration-metadata
|
||||||
|
"os.platform": runtime.GOOS,
|
||||||
|
"os.architecture": runtime.GOARCH,
|
||||||
|
|
||||||
|
"ci.job.url": jobURL,
|
||||||
|
"ci.pipeline.id": githubRunID,
|
||||||
|
"ci.pipeline.name": os.Getenv("GITHUB_WORKFLOW"),
|
||||||
|
"ci.pipeline.number": os.Getenv("GITHUB_RUN_NUMBER"),
|
||||||
|
"ci.pipeline.url": pipelineURL,
|
||||||
|
"ci.provider.name": "github",
|
||||||
|
"ci.workspace_path": os.Getenv("GITHUB_WORKSPACE"),
|
||||||
|
|
||||||
|
"git.branch": os.Getenv("GITHUB_HEAD_REF"),
|
||||||
|
"git.commit.sha": githubSHA,
|
||||||
|
"git.repository_url": fmt.Sprintf("%s/%s.git", githubServerURL, githubRepository),
|
||||||
|
|
||||||
|
"git.commit.message": string(commitMessage),
|
||||||
|
"git.commit.author.name": commitParts[0],
|
||||||
|
"git.commit.author.email": commitParts[1],
|
||||||
|
"git.commit.author.date": commitParts[2],
|
||||||
|
"git.commit.committer.name": commitParts[3],
|
||||||
|
"git.commit.committer.email": commitParts[4],
|
||||||
|
"git.commit.committer.date": commitParts[5],
|
||||||
|
}
|
||||||
|
|
||||||
|
xmlFilePath := filepath.Clean(os.Args[1])
|
||||||
|
xmlFileData, err := os.ReadFile(xmlFilePath)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Read %q: %s", xmlFilePath, err)
|
||||||
|
}
|
||||||
|
// https://github.com/DataDog/datadog-ci/blob/78d0da28e1c1af44333deabf1c9486e2ad66b8af/src/commands/junit/api.ts#L53
|
||||||
|
var xmlCompressedBuffer bytes.Buffer
|
||||||
|
xmlGzipWriter := gzip.NewWriter(&xmlCompressedBuffer)
|
||||||
|
_, err = xmlGzipWriter.Write(xmlFileData)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Write xml: %s", err)
|
||||||
|
}
|
||||||
|
err = xmlGzipWriter.Close()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Close xml gzip writer: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Represents FormData. See:
|
||||||
|
// https://github.com/DataDog/datadog-ci/blob/78d0da28e1c1af44333deabf1c9486e2ad66b8af/src/commands/junit/api.ts#L27
|
||||||
|
var multipartBuffer bytes.Buffer
|
||||||
|
multipartWriter := multipart.NewWriter(&multipartBuffer)
|
||||||
|
|
||||||
|
// Adds the event data. See:
|
||||||
|
// https://github.com/DataDog/datadog-ci/blob/78d0da28e1c1af44333deabf1c9486e2ad66b8af/src/commands/junit/api.ts#L42
|
||||||
|
eventMimeHeader := make(textproto.MIMEHeader)
|
||||||
|
eventMimeHeader.Set("Content-Disposition", `form-data; name="event"; filename="event.json"`)
|
||||||
|
eventMimeHeader.Set("Content-Type", "application/json")
|
||||||
|
eventMultipartWriter, err := multipartWriter.CreatePart(eventMimeHeader)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Create event multipart: %s", err)
|
||||||
|
}
|
||||||
|
eventJSON, err := json.Marshal(tags)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Marshal tags: %s", err)
|
||||||
|
}
|
||||||
|
_, err = eventMultipartWriter.Write(eventJSON)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Write event JSON: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// This seems really strange, but better to follow the implementation. See:
|
||||||
|
// https://github.com/DataDog/datadog-ci/blob/78d0da28e1c1af44333deabf1c9486e2ad66b8af/src/commands/junit/api.ts#L44-L55
|
||||||
|
xmlFilename := fmt.Sprintf("%s-coder-%s-%s-%s", filepath.Base(xmlFilePath), githubSHA, pipelineURL, jobURL)
|
||||||
|
xmlFilename = regexp.MustCompile("[^a-z0-9]").ReplaceAllString(xmlFilename, "_")
|
||||||
|
|
||||||
|
xmlMimeHeader := make(textproto.MIMEHeader)
|
||||||
|
xmlMimeHeader.Set("Content-Disposition", fmt.Sprintf(`form-data; name="junit_xml_report_file"; filename="%s.xml.gz"`, xmlFilename))
|
||||||
|
xmlMimeHeader.Set("Content-Type", "application/octet-stream")
|
||||||
|
inputWriter, err := multipartWriter.CreatePart(xmlMimeHeader)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Create xml.gz multipart: %s", err)
|
||||||
|
}
|
||||||
|
_, err = inputWriter.Write(xmlCompressedBuffer.Bytes())
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Write xml.gz: %s", err)
|
||||||
|
}
|
||||||
|
err = multipartWriter.Close()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Close: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
req, err := http.NewRequestWithContext(ctx, "POST", "https://cireport-intake.datadoghq.com/api/v2/cireport", &multipartBuffer)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Create request: %s", err)
|
||||||
|
}
|
||||||
|
req.Header.Set("Content-Type", multipartWriter.FormDataContentType())
|
||||||
|
req.Header.Set("DD-API-KEY", apiKey)
|
||||||
|
|
||||||
|
res, err := http.DefaultClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Do request: %s", err)
|
||||||
|
}
|
||||||
|
defer res.Body.Close()
|
||||||
|
var msg json.RawMessage
|
||||||
|
err = json.NewDecoder(res.Body).Decode(&msg)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Decode response: %s", err)
|
||||||
|
}
|
||||||
|
msg, err = json.MarshalIndent(msg, "", "\t")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Pretty print: %s", err)
|
||||||
|
}
|
||||||
|
_, _ = fmt.Println(string(msg))
|
||||||
|
msg, err = json.MarshalIndent(tags, "", "\t")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Marshal tags: %s", err)
|
||||||
|
}
|
||||||
|
_, _ = fmt.Println(string(msg))
|
||||||
|
_, _ = fmt.Printf("Status: %d\n", res.StatusCode)
|
||||||
|
}
|
Reference in New Issue
Block a user