mirror of
https://github.com/Infisical/infisical.git
synced 2025-03-22 07:12:17 +00:00
Compare commits
1 Commits
feat/permi
...
sso
Author | SHA1 | Date | |
---|---|---|---|
5ed3965c79 |
.dockerignore.env.example.env.migration.example.env.test.example.eslintignore.gitignore.goreleaser.yamlcypress.config.jsdocker-compose.dev-read-replica.ymldocker-compose.dev.ymldocker-compose.prod.ymldocker-compose.yml
.github
ISSUE_TEMPLATE
images
pull_request_template.mdresources
values.yamlworkflows
be-test-report.ymlbuild-binaries.ymlbuild-docker-image-to-prod.ymlbuild-patroni-docker-img.ymlcheck-api-for-breaking-changes.ymlcheck-be-pull-request.ymlcheck-be-ts-and-lint.ymlcheck-fe-pull-request.ymlcheck-fe-ts-and-lint.ymlcheck-migration-file-edited.ymlclose_inactive_issues.ymldeployment-pipeline.ymldocker-image.ymlgenerate-release-changelog.ymlrelease-standalone-docker-img-postgres-offical.ymlrelease_build.ymlrelease_build_infisical_cli.ymlrelease_docker_k8_operator.yamlrun-backend-tests.ymlrun-cli-tests.yml
.husky
.infisicalignoreCONTRIBUTING.mdDockerfile.fips.standalone-infisicalDockerfile.standalone-infisicalMakefileREADME.mdSECURITY.mdbackend
.eslintignore.eslintrc.eslintrc.js.gitignore.prettierrc.jsonDockerfileDockerfile.devbabel.config.jsonenvironment.d.tshealthcheck.jsmain.tstsconfig.jsontsup.config.jsvitest.e2e.config.ts
e2e-test
mocks
routes
v1
identity.spec.tslogin.spec.tsorg.spec.tsproject-env.spec.tssecret-approval-policy.spec.tssecret-folder.spec.tssecret-import.spec.tssecret-replication.spec.tsstatus.spec.ts
v2
v3
testUtils
vitest-environment-knex.tsimg
nodemon.jsonpackage-lock.jsonpackage.jsonscripts
create-backend-file.tscreate-migration.tscreate-seed-file.tsgenerate-schema-types.tsmigrate-organization.tsrename-mjs.ts
src
@types
config
controllers
authController.tsbotController.tsindex.tsintegrationAuthController.tsintegrationController.tskeyController.tsmembershipController.tsmembershipOrgController.tsorganizationController.tspasswordController.tssecretController.tsserviceTokenController.tssignupController.tsstripeController.tsuserActionController.tsuserController.tsworkspaceController.ts
db
auditlog-knexfile.tsindex.tsinstance.tsknexfile.ts
manual-migrations
migrations
20231128072457_user.ts20231128092347_user-encryption-key.ts20231129072939_auth-token.ts20231130072734_auth-token-session.ts20231201151432_backup-key.ts20231204092737_organization.ts20231204092747_org-membership.ts20231205151331_incident-contact.ts20231207055643_user-action.ts20231207055701_super-admin.ts20231207105059_api-key.ts20231212110939_project.ts20231212110946_project-membership.ts20231218092441_secret-folder.ts20231218092508_secret-import.ts20231218092517_secret-tag.ts20231218103423_secret.ts20231220052508_secret-version.ts20231222092113_project-bot.ts20231222172455_integration.ts20231225072545_service-token.ts20231225072552_webhook.ts20231228074856_identity.ts20231228074908_identity-universal-auth.ts20231228075011_identity-access-token.ts20231228075023_identity-membership.ts20240101054849_secret-approval-policy.ts20240101104907_secret-approval-request.ts20240102152111_secret-rotation.ts20240104140641_secret-snapshot.ts20240107153439_saml-config.ts20240107163155_org-bot.ts20240108134148_audit-log.ts20240111051011_secret-scanning.ts20240113103743_trusted-ip.ts20240204171758_org-based-auth.ts20240208234120_scim-token.ts20240216154123_ghost_users.ts20240222201806_admin-signup-control.ts20240226094411_instance-id.ts20240307232900_integration-last-used.ts20240311210135_ldap-config.ts20240312162549_temp-roles.ts20240312162556_temp-role-identity.ts20240318164718_dynamic-secret.ts20240326172010_project-user-additional-privilege.ts20240326172011_machine-identity-additional-privilege.ts20240405000045_org-memberships-unique-constraint.ts20240412174842_group.ts20240414192520_drop-role-roleid-project-membership.ts20240417032913_pending-group-addition.ts20240423023203_ldap-config-groups.ts20240424235842_user-search-filter.ts20240429154610_audit-log-index.ts20240503101144_audit-log-stream.ts20240507032811_trusted-saml-ldap-emails.ts20240507162140_access-approval-policy.ts20240507162141_access.ts20240507210655_identity-aws-auth.ts20240514041650_identity-gcp-auth.ts20240514141809_inline-secret-reference-sync.ts20240518142614_kubernetes-auth.ts20240520064127_add-integration-sync-status.ts20240522193447_index-audit-logs-project-id-org-id.ts20240522203425_index-secret-snapshot-secrets-envid.ts20240522204414_index-secret-version-envId.ts20240522212706_secret-snapshot-secrets-index-on-snapshotId.ts20240522221147_secret-snapshot-folder-index-on-snapshotId.ts20240522225402_secrets-index-on-folder-id-user-id.ts20240523003158_audit-log-add-expireAt-index.ts20240527073740_identity-azure-auth.ts20240528153905_add-user-account-mfa-locking.ts20240528190137_secret_sharing.ts20240529060752_snap-shot-secret-index-secretversionid.ts20240529203152_secret_sharing.ts20240530044702_universal-text-in-secret-sharing.ts20240531220007_secret-replication.ts20240603075514_kms.ts20240609133400_private-key-handoff.ts20240610181521_add-consecutive-failed-password-attempts-user.ts20240612200518_add-pit-version-limit.ts20240614010847_custom-rate-limits-for-self-hosting.ts20240614115952_tag-machine-identity.ts20240614154212_certificate-mgmt.ts20240614184133_make-secret-sharing-public.ts20240624161942_add-oidc-auth.ts20240624172027_default-saml-ldap-org.ts20240624221840_certificate-alt-names.ts20240626111536_integration-auth-aws-assume-role.ts20240626115035_admin-login-method-config.ts20240626171758_add-ldap-unique-user-attribute.ts20240626171943_configurable-audit-log-retention.ts20240627173239_add-oidc-updated-at-trigger.ts20240701143900_member-project-favorite.ts20240702055253_add-encrypted-webhook-url.ts20240702131735_secret-approval-groups.ts20240702175124_identity-token-auth.ts20240704161322_identity-access-token-name.ts20240708100026_external-kms.ts20240710045107_identity-oidc-auth.ts20240715113110_org-membership-active-status.ts20240717184929_add-enforcement-level-secrets-policies.ts20240717194958_add-enforcement-level-access-policies.ts20240718170955_add-access-secret-sharing.ts20240719182539_add-bypass-reason-secret-approval-requets.ts20240724101056_access-request-groups.ts20240728010334_secret-sharing-name.ts20240730181830_add-org-kms-data-key.ts20240730181840_add-project-data-key.ts20240730181850_secret-v2.ts20240802181855_ca-cert-version.ts20240806083221_secret-sharing-password.ts20240806113425_remove-creation-limit-rate-limit.ts20240806185442_drop-tag-name.ts20240818024923_cert-alerting.ts20240818184238_add-certificate-template.ts20240819092916_certificate-template-est-configuration.ts20240821212643_crl-ca-secret-binding.ts20240830142938_native-slack-integration.ts20240909145938_cert-template-enforcement.ts20240910070128_add-pki-key-usages.ts20240918005344_add-group-approvals.ts20240924100329_identity-metadata.ts20240925100349_managed-secret-sharing.ts20240930072738_add-oidc-auth-enforced-to-org.ts20241003220151_kms-key-cmek-alterations.ts20241005170802_kms-keys-temp-slug-col.ts20241007052025_make-audit-log-independent.ts20241007202149_default-org-membership-roles.ts20241008172622_project-permission-split.ts20241014084900_identity-multiple-auth-methods.ts20241015084434_increase-identity-metadata-col-length.ts20241015145450_external-group-org-role-mapping.ts20241016183616_add-org-enforce-mfa.ts20241021114650_add-missing-org-cascade-references.ts20241101174939_project-templates.ts20241107112632_skip-bootstrap-cert-validation-est.ts20241111175154_kms-root-cfg-hsm.ts
utils
schemas
access-approval-policies-approvers.tsaccess-approval-policies.tsaccess-approval-requests-reviewers.tsaccess-approval-requests.tsapi-keys.tsaudit-log-streams.tsaudit-logs.tsauth-token-sessions.tsauth-tokens.tsbackup-private-key.tscertificate-authorities.tscertificate-authority-certs.tscertificate-authority-crl.tscertificate-authority-secret.tscertificate-bodies.tscertificate-secrets.tscertificate-template-est-configs.tscertificate-templates.tscertificates.tsdynamic-secret-leases.tsdynamic-secrets.tsexternal-group-org-role-mappings.tsexternal-kms.tsgit-app-install-sessions.tsgit-app-org.tsgroup-project-membership-roles.tsgroup-project-memberships.tsgroups.tsidentities.tsidentity-access-tokens.tsidentity-aws-auths.tsidentity-azure-auths.tsidentity-gcp-auths.tsidentity-kubernetes-auths.tsidentity-metadata.tsidentity-oidc-auths.tsidentity-org-memberships.tsidentity-project-additional-privilege.tsidentity-project-membership-role.tsidentity-project-memberships.tsidentity-token-auths.tsidentity-ua-client-secrets.tsidentity-universal-auths.tsincident-contacts.tsindex.tsintegration-auths.tsintegrations.tsinternal-kms-key-version.tsinternal-kms.tskms-key-versions.tskms-keys.tskms-root-config.tsldap-configs.tsldap-group-maps.tsmodels.tsoidc-configs.tsorg-bots.tsorg-memberships.tsorg-roles.tsorganizations.tspki-alerts.tspki-collection-items.tspki-collections.tsproject-bots.tsproject-environments.tsproject-keys.tsproject-memberships.tsproject-roles.tsproject-slack-configs.tsproject-templates.tsproject-user-additional-privilege.tsproject-user-membership-roles.tsprojects.tsrate-limit.tssaml-configs.tsscim-tokens.tssecret-approval-policies-approvers.tssecret-approval-policies.tssecret-approval-request-secret-tags-v2.tssecret-approval-request-secret-tags.tssecret-approval-requests-reviewers.tssecret-approval-requests-secrets-v2.tssecret-approval-requests-secrets.tssecret-approval-requests.tssecret-blind-indexes.tssecret-folder-versions.tssecret-folders.tssecret-imports.tssecret-references-v2.tssecret-references.tssecret-rotation-output-v2.tssecret-rotation-outputs.tssecret-rotations.tssecret-scanning-git-risks.tssecret-sharing.tssecret-snapshot-folders.tssecret-snapshot-secrets-v2.tssecret-snapshot-secrets.tssecret-snapshots.tssecret-tag-junction.tssecret-tags.tssecret-v2-tag-junction.tssecret-version-tag-junction.tssecret-version-v2-tag-junction.tssecret-versions-v2.tssecret-versions.tssecrets-v2.tssecrets.tsservice-tokens.tsslack-integrations.tssuper-admin.tstrusted-ips.tsuser-actions.tsuser-aliases.tsuser-encryption-keys.tsuser-group-membership.tsusers.tswebhooks.tsworkflow-integrations.ts
seed-data.tsseeds
utils.tsee
LICENSEvariables.ts
controllers
helpers
middleware
routes
est
stripe.tsv1
access-approval-policy-router.tsaccess-approval-request-router.tsaudit-log-stream-router.tscertificate-authority-crl-router.tsdynamic-secret-lease-router.tsdynamic-secret-router.tsexternal-kms-router.tsgroup-router.tsidentity-project-additional-privilege-router.tsindex.tsldap-router.tslicense-router.tsoidc-router.tsorg-role-router.tsproject-role-router.tsproject-router.tsproject-template-router.tsrate-limit-router.tssaml-router.tsscim-router.tssecret-approval-policy-router.tssecret-approval-request-router.tssecret-rotation-provider-router.tssecret-rotation-router.tssecret-scanning-router.tssecret-version-router.tssnapshot-router.tstrusted-ip-router.tsuser-additional-privilege-router.ts
v2
services
access-approval-policy
access-approval-policy-approver-dal.tsaccess-approval-policy-dal.tsaccess-approval-policy-service.tsaccess-approval-policy-types.ts
access-approval-request
access-approval-request-dal.tsaccess-approval-request-fns.tsaccess-approval-request-reviewer-dal.tsaccess-approval-request-service.tsaccess-approval-request-types.ts
audit-log-stream
audit-log
certificate-authority-crl
certificate-authority-crl-dal.tscertificate-authority-crl-service.tscertificate-authority-crl-types.ts
certificate-est
dynamic-secret-lease
dynamic-secret-lease-dal.tsdynamic-secret-lease-queue.tsdynamic-secret-lease-service.tsdynamic-secret-lease-types.ts
dynamic-secret
external-kms
group
hsm
identity-project-additional-privilege-v2
identity-project-additional-privilege-v2-dal.tsidentity-project-additional-privilege-v2-service.tsidentity-project-additional-privilege-v2-types.ts
identity-project-additional-privilege
identity-project-additional-privilege-dal.tsidentity-project-additional-privilege-service.tsidentity-project-additional-privilege-types.ts
ldap-config
license
oidc
permission
org-permission.tspermission-dal.tspermission-fns.tspermission-service-types.tspermission-service.tspermission-types.tsproject-permission.ts
project-template
project-template-constants.tsproject-template-dal.tsproject-template-fns.tsproject-template-service.tsproject-template-types.ts
project-user-additional-privilege
project-user-additional-privilege-dal.tsproject-user-additional-privilege-service.tsproject-user-additional-privilege-types.ts
rate-limit
saml-config
scim
secret-approval-policy
secret-approval-policy-approver-dal.tssecret-approval-policy-dal.tssecret-approval-policy-service.tssecret-approval-policy-types.ts
secret-approval-request
secret-approval-request-dal.tssecret-approval-request-fns.tssecret-approval-request-reviewer-dal.tssecret-approval-request-secret-dal.tssecret-approval-request-service.tssecret-approval-request-types.ts
secret-replication
secret-replication-constants.tssecret-replication-dal.tssecret-replication-service.tssecret-replication-types.ts
secret-rotation
secret-rotation-dal.ts
secret-rotation-queue
secret-rotation-service.tssecret-rotation-types.tstemplates
secret-scanning
git-app-dal.tsgit-app-install-session-dal.tssecret-scanning-dal.ts
secret-scanning-queue
secret-scanning-service.tssecret-scanning-types.tssecret-snapshot
secret-snapshot-service.tssecret-snapshot-types.tssnapshot-dal.tssnapshot-folder-dal.tssnapshot-secret-dal.tssnapshot-secret-v2-dal.tssnapshot-service-fns.ts
trusted-ip
events
helpers
auth.tsbot.tsevent.tsintegration.tsintegrationAuth.tskey.tsmembership.tsmembershipOrg.tsnodemailer.tsorganization.tsrateLimiter.tssecret.tssignup.tsuser.tsworkspace.ts
index.tsintegrations
keystore
lib
api-docs
axios
base64
casl
config
crypto
dates
errors
fn
ip
knex
logger
nanoid
picomatch
red-lock
requests
types
validator
zod
middleware
index.tsrequireAuth.tsrequireBotAuth.tsrequireIntegrationAuth.tsrequireIntegrationAuthorizationAuth.tsrequireOrganizationAuth.tsrequireServiceTokenAuth.tsrequireSignupAuth.tsrequireWorkspaceAuth.tsvalidateRequest.ts
models
backupPrivateKey.tsbot.tsbotKey.tsincidentContactOrg.tsindex.tsintegration.tsintegrationAuth.tskey.tsmembership.tsmembershipOrg.tsorganization.tssecret.tsserviceToken.tstoken.tsuser.tsuserAction.tsworkspace.ts
queue
routes
auth.tsbot.tsindex.tsintegration.tsintegrationAuth.tsinviteOrg.tskey.tsmembership.tsmembershipOrg.tsorganization.tspassword.tssecret.tsserviceToken.tssignup.tsstripe.tsuser.tsuserAction.tsworkspace.ts
server
app.tsboot-strap-check.ts
config
lib
plugins
add-errors-to-response-schemas.tsaudit-log.ts
auth
error-handler.tsexternal-nextjs.tsfastify-zod.tsinject-rate-limits.tsip.tsmaintenanceMode.tssecret-scanner.tsswagger.tsroutes
index.tssanitizedSchemas.ts
santizedSchemas
v1
admin-router.tsauth-router.tsbot-router.tscertificate-authority-router.tscertificate-router.tscertificate-template-router.tscmek-router.tsdashboard-router.tsexternal-group-org-role-mapping-router.tsidentity-access-token-router.tsidentity-aws-iam-auth-router.tsidentity-azure-auth-router.tsidentity-gcp-auth-router.tsidentity-kubernetes-auth-router.tsidentity-oidc-auth-router.tsidentity-router.tsidentity-token-auth-router.tsidentity-universal-auth-router.tsindex.tsintegration-auth-router.tsintegration-router.tsinvite-org-router.tsorg-admin-router.tsorganization-router.tspassword-router.tspki-alert-router.tspki-collection-router.tsproject-env-router.tsproject-key-router.tsproject-membership-router.tsproject-router.tssecret-folder-router.tssecret-import-router.tssecret-sharing-router.tssecret-tag-router.tsslack-router.tssso-router.tsuser-action-router.tsuser-engagement-router.tsuser-router.tswebhook-router.tsworkflow-integration-router.ts
v2
group-project-router.tsidentity-org-router.tsidentity-project-router.tsindex.tsmfa-router.tsorganization-router.tsproject-membership-router.tsproject-router.tsservice-token-router.tsuser-router.ts
v3
services
BotService.tsEventService.tsIntegrationService.tsPostHogClient.ts
access-token-queue
api-key
auth-token
auth
auth-dal.tsauth-fns.tsauth-login-service.tsauth-login-type.tsauth-password-service.tsauth-password-type.tsauth-signup-service.tsauth-signup-type.tsauth-type.ts
certificate-authority
certificate-authority-cert-dal.tscertificate-authority-dal.tscertificate-authority-fns.tscertificate-authority-queue.tscertificate-authority-secret-dal.tscertificate-authority-service.tscertificate-authority-types.tscertificate-authority-validators.ts
certificate-template
certificate-template-dal.tscertificate-template-est-config-dal.tscertificate-template-fns.tscertificate-template-schema.tscertificate-template-service.tscertificate-template-types.tscertificate-template-validators.ts
certificate
certificate-body-dal.tscertificate-dal.tscertificate-fns.tscertificate-service.tscertificate-types.ts
cmek
external-group-org-role-mapping
external-group-org-role-mapping-dal.tsexternal-group-org-role-mapping-fns.tsexternal-group-org-role-mapping-service.tsexternal-group-org-role-mapping-types.ts
external-migration
external-migration-fns.tsexternal-migration-queue.tsexternal-migration-service.tsexternal-migration-types.ts
group-project
group-project-dal.tsgroup-project-membership-role-dal.tsgroup-project-service.tsgroup-project-types.ts
identity-access-token
identity-aws-auth
identity-aws-auth-dal.tsidentity-aws-auth-fns.tsidentity-aws-auth-service.tsidentity-aws-auth-types.tsidentity-aws-auth-validators.ts
identity-azure-auth
identity-azure-auth-dal.tsidentity-azure-auth-fns.tsidentity-azure-auth-service.tsidentity-azure-auth-types.tsidentity-azure-auth-validators.ts
identity-gcp-auth
identity-gcp-auth-dal.tsidentity-gcp-auth-fns.tsidentity-gcp-auth-service.tsidentity-gcp-auth-types.tsidentity-gcp-auth-validators.ts
identity-kubernetes-auth
identity-kubernetes-auth-dal.tsidentity-kubernetes-auth-fns.tsidentity-kubernetes-auth-service.tsidentity-kubernetes-auth-types.ts
identity-oidc-auth
identity-oidc-auth-dal.tsidentity-oidc-auth-fns.tsidentity-oidc-auth-service.tsidentity-oidc-auth-types.tsidentity-oidc-auth-validators.ts
identity-project
identity-project-dal.tsidentity-project-membership-role-dal.tsidentity-project-service.tsidentity-project-types.ts
identity-token-auth
identity-ua
identity
identity-dal.tsidentity-fns.tsidentity-metadata-dal.tsidentity-org-dal.tsidentity-service.tsidentity-types.ts
index.tsintegration-auth
integration-app-list.tsintegration-auth-dal.tsintegration-auth-schema.tsintegration-auth-service.tsintegration-auth-types.tsintegration-delete-secret.tsintegration-list.tsintegration-sync-secret.tsintegration-team.tsintegration-token.ts
integration
kms
org-admin
org-membership
org
incident-contacts-dal.tsorg-bot-dal.tsorg-dal.tsorg-fns.tsorg-role-dal.tsorg-role-fns.tsorg-role-service.tsorg-service.tsorg-types.ts
pki-alert
pki-collection
pki-collection-dal.tspki-collection-fns.tspki-collection-item-dal.tspki-collection-service.tspki-collection-types.ts
project-bot
project-env
project-key
project-membership
project-membership-dal.tsproject-membership-service.tsproject-membership-types.tsproject-user-membership-role-dal.ts
project-role
project
resource-cleanup
secret-blind-index
secret-folder
secret-import
secret-sharing
secret-tag
secret-v2-bridge
secret-v2-bridge-dal.tssecret-v2-bridge-fns.tssecret-v2-bridge-service.tssecret-v2-bridge-types.tssecret-version-dal.tssecret-version-tag-dal.ts
secret
secret-dal.tssecret-fns.tssecret-queue.tssecret-service.tssecret-types.tssecret-version-dal.tssecret-version-tag-dal.ts
service-token
slack
project-slack-config-dal.tsslack-auth-validators.tsslack-fns.tsslack-integration-dal.tsslack-service.tsslack-types.ts
smtp
smtp-service.ts
templates
accessApprovalRequest.handlebarsaccessSecretRequestBypassed.handlebarsemailMfa.handlebarsemailVerification.handlebarsexternalImportFailed.handlebarsexternalImportStarted.handlebarsexternalImportSuccessful.handlebarshistoricalSecretLeakIncident.handlebarsintegrationSyncFailed.handlebarsnewDevice.handlebarsorganizationInvitation.handlebarspasswordReset.handlebarspkiExpirationAlert.handlebarsscimUserProvisioned.handlebarssecretApprovalRequestNeedsReview.handlebarssecretLeakIncident.handlebarssecretReminder.handlebarssignupEmailVerification.handlebarsunlockAccount.handlebarsworkspaceInvitation.handlebars
super-admin
telemetry
user-alias
user-engagement
user
webhook
workflow-integration
templates
emailVerification.handlebarsorganizationInvitation.handlebarspasswordReset.handlebarsworkspaceInvitation.handlebars
types/express
utils
variables
cli
.gitignore.infisicalignoreagent-config.yaml
config
detect
docker
go.modgo.summain.gopackages
api
cmd
agent.gocmd_test.goexport.goexport_test.gofolder.goinit.gologging.gologin.goman.go
pre-commit-script
reset.goroot.gorun.goscan.gosecrets.gotoken.gotokens.gouser.govault.goconfig
crypto
models
telemetry
util
agent.goauth.gocheck-for-update.gocommon.goconfig.goconstants.gocredentials.gocrypto.goexec.gofolders.gohelper.goinit.gokeyringwrapper.golog.gosecrets.gosecrets_test.go
testdata
vault.govisualize
report
constants.gocsv.gocsv_test.gofinding.gofinding_test.gojson.gojson_test.goreport.goreport_test.gosarif.gosarif_test.go
scripts
secret-render-templatetest
.snapshots
test-TestServiceToken_ExportSecretsWithImportstest-TestServiceToken_ExportSecretsWithoutImportstest-TestServiceToken_GetSecretsByNameRecursivetest-TestServiceToken_GetSecretsByNameWithImportstest-TestServiceToken_GetSecretsByNameWithNotFoundSecrettest-TestServiceToken_RunCmdRecursiveAndImportstest-TestServiceToken_RunCmdWithImportstest-TestServiceToken_RunCmdWithoutImportstest-TestServiceToken_SecretsGetWithImportsAndRecursiveCmdtest-TestServiceToken_SecretsGetWithoutImportsAndWithoutRecursiveCmdtest-TestUniversalAuth_ExportSecretsWithImportstest-TestUniversalAuth_ExportSecretsWithoutImportstest-TestUniversalAuth_GetSecretsByNameRecursivetest-TestUniversalAuth_GetSecretsByNameWithImportstest-TestUniversalAuth_GetSecretsByNameWithNotFoundSecrettest-TestUniversalAuth_RunCmdRecursiveAndImportstest-TestUniversalAuth_RunCmdWithImportstest-TestUniversalAuth_RunCmdWithoutImportstest-TestUniversalAuth_SecretsGetWithImportsAndRecursiveCmdtest-TestUniversalAuth_SecretsGetWithoutImportsAndWithoutRecursiveCmdtest-TestUniversalAuth_SecretsGetWrongEnvironmenttest-TestUserAuth_SecretsGetAlltest-testUserAuth_SecretsGetAllWithoutConnection
export_test.gohelper.gologin_test.gomain_test.gorun_test.gosecrets_by_name_test.gosecrets_test.gotestdata
baseline
config
allow_aws_re.tomlallow_commit.tomlallow_global_aws_re.tomlallow_path.tomlbad_entropy_group.tomlbase.tomlentropy_group.tomlescaped_character_group.tomlextend_1.tomlextend_2.tomlextend_3.tomlgeneric.tomlgeneric_with_py_path.tomlpath_only.tomlsimple.toml
expected
git
report
repos
nogit
small
README.md
api
dotGit
COMMIT_EDITMSGFETCH_HEADHEADORIG_HEADconfigdescriptionindexpacked-refs
main.goinfo
logs
objects
02
15
2e
49
5c
78
90
9a
a1
a5
a9
bc
d8
da
e5
f1
pack
refs
staged
.gitleaksignoreREADME.md
api
dotGit
COMMIT_EDITMSGFETCH_HEADHEADORIG_HEADconfigdescriptionindexpacked-refs
main.goinfo
logs
objects
02
15
2e
46
49
5c
65
66
78
90
9a
a1
a5
a9
b1
bc
bf
d8
da
e5
f1
pack
refs
symlinks
tmp
cloudformation/ec2-deployment
company
documentation/getting-started
favicon.pnghandbook
hiring.mdxmeetings.mdxonboarding.mdxoverview.mdxspending-money.mdxtalking-to-customers.mdxtime-off.mdx
logo
mint.jsonstyle.cssdocker-swarm
docs
CONTRIBUTING.MDintegration-folders.pngintegrations.pngproject-download.pngproject-drag-drop.pngproject-envar-toggle-moved.pngproject-envar-toggle-open.pngproject-envar-toggle.pngproject-environment.pngproject-hide.pngproject-integrations.pngproject-members.pngproject-quickstart.pngproject-search-typed.pngproject-search.pngproject-settings-blind-indices.pngproject-sort.pngproject-token-add.pngproject-token-added.pngproject-token-name.pngproject-token-old-add.pngproject-token-old-permissions.pngproject-token-permissions.pngproject_settings_page.pngsdk-flow.pngspring-maven-debug-1.pngspring-maven-debug-2.pngspring-maven-debug-3.pngspring-maven-debug-4.pngspring-maven-debug-5.pngwebhook-create.pngwebhooks.pngstyle.css
api-reference
endpoints
audit-logs
aws-auth
azure-auth
certificate-authorities
cert.mdxcreate.mdxcrl.mdxcsr.mdxdelete.mdximport-cert.mdxissue-cert.mdxlist-ca-certs.mdxlist.mdxread.mdxrenew.mdxsign-cert.mdxsign-intermediate.mdxupdate.mdx
certificate-templates
certificates
dynamic-secrets
create-lease.mdxcreate.mdxdelete-lease.mdxdelete.mdxget-lease.mdxget.mdxlist-leases.mdxlist.mdxrenew-lease.mdxupdate.mdx
environments
folders
gcp-auth
groups
add-group-user.mdxcreate.mdxdelete.mdxget-by-id.mdxget.mdxlist-group-users.mdxremove-group-user.mdxupdate.mdx
identities
identity-specific-privilege
integrations
create-auth.mdxcreate.mdxdelete-auth-by-id.mdxdelete-auth.mdxdelete.mdxfind-auth.mdxlist-auth.mdxlist-project-integrations.mdxupdate.mdx
kms/keys
kubernetes-auth
oidc-auth
organizations
delete-membership.mdxlist-identity-memberships.mdxmemberships.mdxupdate-membership.mdxworkspaces.mdx
pki-alerts
pki-collections
project-groups
project-identities
add-identity-membership.mdxdelete-identity-membership.mdxget-by-id.mdxlist-identity-memberships.mdxupdate-identity-membership.mdx
project-roles
project-templates
project-users
delete-membership.mdxget-by-username.mdxinvite-member-to-workspace.mdxmemberships.mdxremove-member-from-workspace.mdxupdate-membership.mdx
secret-imports
secret-tags
secrets
attach-tags.mdxcreate-many.mdxcreate.mdxdelete-many.mdxdelete.mdxdetach-tags.mdxlist.mdxread.mdxupdate-many.mdxupdate.mdx
service-tokens
token-auth
attach.mdxcreate-token.mdxget-tokens.mdxretrieve.mdxrevoke-token.mdxrevoke.mdxupdate-token.mdxupdate.mdx
universal-auth
attach.mdxcreate-client-secret.mdxget-client-secret-by-id.mdxlist-client-secrets.mdxlogin.mdxrenew-access-token.mdxretrieve.mdxrevoke-access-token.mdxrevoke-client-secret.mdxrevoke.mdxupdate.mdx
users
workspaces
overview
changelog
cli
commands
commands.mdxexport.mdxinit.mdxlogin.mdxreset.mdxrun.mdxscan-git-changes.mdxscan-install.mdxscan.mdxsecrets.mdxservice-token.mdxtoken.mdxuser.mdxvault.mdx
faq.mdxoverview.mdxproject-config.mdxscanning-overview.mdxtoken.mdxusage.mdxcontributing
documentation
getting-started
guides
introduction.mdxlocal-development.mdxmicrosoft-power-apps.mdxmigrating-from-envkey.mdxnextjs-vercel.mdxnode.mdxorganization-structure.mdxpython.mdx
platform
access-controls
access-requests.mdxadditional-privileges.mdxattribute-based-access-controls.mdxoverview.mdxrole-based-access-controls.mdxtemporary-access.mdx
admin-panel
audit-log-streams
audit-logs.mdxauth-methods
dynamic-secrets
aws-elasticache.mdxaws-iam.mdxazure-entra-id.mdxcassandra.mdxelastic-search.mdxldap.mdxmongo-atlas.mdxmongo-db.mdxmssql.mdxmysql.mdxoracle.mdxoverview.mdxpostgresql.mdxrabbit-mq.mdxredis.mdxsap-hana.mdxsnowflake.mdx
folder.mdxgroups.mdxidentities
aws-auth.mdxazure-auth.mdxgcp-auth.mdxkubernetes-auth.mdxmachine-identities.mdx
integrations.mdxoidc-auth
overview.mdxtoken-auth.mdxuniversal-auth.mdxuser-identities.mdxkms-configuration
kms
ldap
mfa.mdxorganization.mdxpit-recovery.mdxpki
pr-workflows.mdxproject-templates.mdxproject-upgrade.mdxproject.mdxscim
secret-reference.mdxsecret-rotation
secret-sharing.mdxsecret-versioning.mdxsso
auth0-oidc.mdxazure.mdxgeneral-oidc.mdxgithub.mdxgitlab.mdxgoogle-saml.mdxgoogle.mdxjumpcloud.mdxkeycloak-oidc.mdxkeycloak-saml.mdxokta.mdxoverview.mdx
token.mdxwebhooks.mdxworkflow-integrations
getting-started
images
agent
api-key-add.pngapi-key-dashboard.pngapi-key-settings.pngauth-methods
dashboard-name-modal-organization.pngdashboard-name-selected.pngdashboard-secrets-overview.pngdashboard.pngdeploy-aws-button.pngdo-k8-install-btn.pngdocker-swarm-secrets-complete.pnggetting-started/api
org-create-project-1.pngorg-create-project-2.pngproject-create-secret.pngproject-dashboard.pngproject-explore-env.png
guides
agent-with-ecs
access-token-deposit.pngecs-diagram.pngfile_browser_main.pngfilebrowser_afterlogin.pngsecrets-deposit.png
import-envkey
copy-encryption-key.pngenvkey-dashboard.pngenvkey-export.pnginfisical-import-dashboard.pnginfisical-import-envkey.png
microsoft-power-apps
integrations
aws
integration-aws-iam-assume-arn.pngintegration-aws-iam-assume-permission.pngintegration-aws-iam-assume-role.pngintegration-aws-iam-assume-select.pngintegration-aws-parameter-store-iam-assume-select.pngintegrations-amplify-app-id.pngintegrations-amplify-env-console-identity.pngintegrations-amplify-env-console.pngintegrations-aws-access-key-1.pngintegrations-aws-access-key-2.pngintegrations-aws-access-key-3.pngintegrations-aws-iam-1.pngintegrations-aws-parameter-store-auth.pngintegrations-aws-parameter-store-create.pngintegrations-aws-parameter-store-iam-2.pngintegrations-aws-parameter-store-iam-3.pngintegrations-aws-parameter-store.pngintegrations-aws-secret-manager-auth.pngintegrations-aws-secret-manager-create.pngintegrations-aws-secret-manager-iam-2.pngintegrations-aws-secret-manager-iam-3.pngintegrations-aws-secret-manager-options.pngintegrations-aws-secret-manager.png
azure-app-configuration
app-api-permissions.pngapp-registration-redirect.pngazure-app-config-endpoint.pngconfig-aad.pngconfig-credentials-1.pngconfig-credentials-2.pngconfig-credentials-3.pngconfig-new-app.pngcreate-integration-form.pngnew-infisical-integration.png
azure-devops
create-new-token.pngnew-infiscial-integration-step-1.pngnew-infiscial-integration-step-2.pngnew-token-created.pngoverview-page.png
azure-key-vault
integrations-azure-key-vault-config-aad.pngintegrations-azure-key-vault-config-credentials-1.pngintegrations-azure-key-vault-config-credentials-2.pngintegrations-azure-key-vault-config-credentials-3.pngintegrations-azure-key-vault-config-new-app-form.pngintegrations-azure-key-vault-config-new-app.pngintegrations-azure-key-vault-create.pngintegrations-azure-key-vault-vault-uri.pngintegrations-azure-key-vault.png
bitbucket
integrations-bitbucket-auth.pngintegrations-bitbucket-configuration.pngintegrations-bitbucket-create.pngintegrations-bitbucket-env.pngintegrations-bitbucket.png
checkly
integrations-checkly-auth.pngintegrations-checkly-create.pngintegrations-checkly-dashboard.pngintegrations-checkly-token.pngintegrations-checkly.png
circleci
integrations-circleci-auth.pngintegrations-circleci-create.pngintegrations-circleci-token.pngintegrations-circleci.png
cloud-66
integrations-cloud-66-access-token.pngintegrations-cloud-66-copy-pat.pngintegrations-cloud-66-create.pngintegrations-cloud-66-dashboard.pngintegrations-cloud-66-done.pngintegrations-cloud-66-infisical-dashboard.pngintegrations-cloud-66-paste-pat.pngintegrations-cloud-66-pat-setup.pngintegrations-cloud-66-pat.png
cloudflare
integration-cloudflare-workers-connect.pngintegration-cloudflare-workers-create.pngintegrations-cloudflare-auth.pngintegrations-cloudflare-create.pngintegrations-cloudflare-credentials-1.pngintegrations-cloudflare-credentials-2.pngintegrations-cloudflare-credentials-3.pngintegrations-cloudflare-credentials-4.pngintegrations-cloudflare-workers-permission.pngintegrations-cloudflare.png
codefresh
integrations-codefresh-auth.pngintegrations-codefresh-create.pngintegrations-codefresh-dashboard.pngintegrations-codefresh-token.pngintegrations-codefresh.png
databricks
integrations-databricks-auth.pngintegrations-databricks-create.pngintegrations-databricks.pngpat-token.png
digital-ocean
integrations-do-dashboard.pngintegrations-do-enter-token.pngintegrations-do-select-projects.pngintegrations-do-success.pngintegrations-do-token-modal.png
flyio
integrations-flyio-auth.pngintegrations-flyio-create.pngintegrations-flyio-dashboard.pngintegrations-flyio-token.pngintegrations-flyio.png
gcp-secret-manager
integrations-gcp-secret-manager-auth-options.pngintegrations-gcp-secret-manager-auth.pngintegrations-gcp-secret-manager-config-api-services.pngintegrations-gcp-secret-manager-config-credentials.pngintegrations-gcp-secret-manager-config-new-app-form.pngintegrations-gcp-secret-manager-config-new-app.pngintegrations-gcp-secret-manager-create-options.pngintegrations-gcp-secret-manager-create.pngintegrations-gcp-secret-manager-iam-key.pngintegrations-gcp-secret-manager-iam.pngintegrations-gcp-secret-manager.png
github
app
github-app-installation.pnggithub-app-method-selection.pngintegration-overview.pngself-hosted-github-app-basic-details.pngself-hosted-github-app-create-confirm.pngself-hosted-github-app-create.pngself-hosted-github-app-credentials.pngself-hosted-github-app-enable-oauth.pngself-hosted-github-app-organization.pngself-hosted-github-app-private-key.pngself-hosted-github-app-repository.pngself-hosted-github-app-secret.pngself-hosted-github-app-webhook.png
github-oauth-method-selection.pngintegration-overview.pngintegrations-github-auth.pngintegrations-github-config-credentials.pngintegrations-github-config-dev-settings.pngintegrations-github-config-new-app-form.pngintegrations-github-config-new-app.pngintegrations-github-config-settings.pngintegrations-github-scope-env.pngintegrations-github-scope-org.pngintegrations-github-scope-repo.pngintegrations-github.pnggitlab
integrations-gitlab-auth.pngintegrations-gitlab-config-credentials.pngintegrations-gitlab-config-edit-profile.pngintegrations-gitlab-config-new-app-form.pngintegrations-gitlab-config-new-app.pngintegrations-gitlab-create-options.pngintegrations-gitlab-create.pngintegrations-gitlab.png
hashicorp-vault
integrations-hashicorp-vault-access-1.pngintegrations-hashicorp-vault-access-2.pngintegrations-hashicorp-vault-access-3.pngintegrations-hashicorp-vault-auth.pngintegrations-hashicorp-vault-cluster-url.pngintegrations-hashicorp-vault-create.pngintegrations-hashicorp-vault-engine-1.pngintegrations-hashicorp-vault-engine-2.pngintegrations-hashicorp-vault-engine-3.pngintegrations-hashicorp-vault-policy-1.pngintegrations-hashicorp-vault-policy-2.pngintegrations-hashicorp-vault-policy-3.pngintegrations-hashicorp-vault-shell.pngintegrations-hashicorp-vault.png
hasura-cloud
integrations-hasura-cloud-auth.pngintegrations-hasura-cloud-create.pngintegrations-hasura-cloud-tokens.pngintegrations-hasura-cloud.png
heroku
integrations-heroku-auth.pngintegrations-heroku-config-applications.pngintegrations-heroku-config-credentials.pngintegrations-heroku-config-new-app-form.pngintegrations-heroku-config-new-app.pngintegrations-heroku-config-settings.pngintegrations-heroku-create.pngintegrations-heroku.png
jenkins
jenkins_1.pngjenkins_10.pngjenkins_10_identity.pngjenkins_11.pngjenkins_11_identity.pngjenkins_12.pngjenkins_2.pngjenkins_3.pngjenkins_4.pngjenkins_4_identity_id.pngjenkins_4_identity_secret.pngjenkins_5.pngjenkins_5_identity.pngjenkins_6.pngjenkins_7.pngjenkins_8.pngjenkins_9.pngjenkins_9_identity.png
plugin
laravel-forge
integrations-laravelforge-api.pngintegrations-laravelforge-auth.pngintegrations-laravelforge-create.pngintegrations-laravelforge-dashboard.pngintegrations-laravelforge-serverid.pngintegrations-laravelforge-servers.pngintegrations-laravelforge.png
netlify
integrations-netlify-auth.pngintegrations-netlify-config-credentials.pngintegrations-netlify-config-new-app-form.pngintegrations-netlify-config-new-app.pngintegrations-netlify-config-user-settings.pngintegrations-netlify-create.pngintegrations-netlify.png
northflank
integrations-northflank-auth.pngintegrations-northflank-create.pngintegrations-northflank-dashboard.pngintegrations-northflank-token.pngintegrations-northflank.png
qovery
integrations-qovery-auth.pngintegrations-qovery-create-1.pngintegrations-qovery-create-2.pngintegrations-qovery-token.pngintegrations-qovery.png
railway
integrations-railway-authorization.pngintegrations-railway-create.pngintegrations-railway-dashboard.pngintegrations-railway-token.pngintegrations-railway.png
render
integrations-render-auth.pngintegrations-render-create.pngintegrations-render-dashboard.pngintegrations-render-token.pngintegrations-render.png
rundeck
integrations-rundeck-auth.pngintegrations-rundeck-create.pngintegrations-rundeck-token.pngintegrations-rundeck.png
supabase
integrations-supabase-authorization.pngintegrations-supabase-create.pngintegrations-supabase-dashboard.pngintegrations-supabase-token.pngintegrations-supabase.png
teamcity
integrations-teamcity-auth.pngintegrations-teamcity-create.pngintegrations-teamcity-dashboard.pngintegrations-teamcity-token.pngintegrations-teamcity.png
terraform
integrations-terraformcloud-auth.pngintegrations-terraformcloud-create.pngintegrations-terraformcloud-dashboard.pngintegrations-terraformcloud-tokens.pngintegrations-terraformcloud-workspaceid.pngintegrations-terraformcloud-workspaces.pngintegrations-terraformcloud.png
travis-ci
integrations-travisci-auth.pngintegrations-travisci-create.pngintegrations-travisci-token.pngintegrations-travisci.png
vercel
integrations-vercel-auth.pngintegrations-vercel-config-credentials.pngintegrations-vercel-config-integrations-console.pngintegrations-vercel-config-new-app-form-1.pngintegrations-vercel-config-new-app-form-2.pngintegrations-vercel-config-new-app.pngintegrations-vercel-create.pngintegrations-vercel.png
windmill
internals
k8-diagram.pngmfa-email.pngorganization-ic-add.pngorganization-ic.pngorganization-members-add.pngorganization-members.pngorganization-overview.pngorganization.pngplatform
access-controls
access-request-bypass.pngaccess-request-policies.pngaccess-requests-completed.pngaccess-requests-pending.pngadd-additional-privileges.pngadd-metadata-step1.pngadd-metadata-step2.pngadd-metadata-step3.pngadditional-privileges.pngconfigure-temporary-access.pngconfirm-additional-privileges.pngcreate-access-request-policy.pngedit-role.pngexample-abac-1.pngrbac.pngrequest-access.pngreview-access-request.pngtemporary-access.png
admin-panels
access-org-admin-console.pngaccess-server-admin-panel.pngadmin-panel-auths.pngadmin-panel-general.pngadmin-panel-integration.pngadmin-panel-rate-limits.pngadmin-panel-users.pngorg-admin-console-access.pngorg-admin-console-projects.png
audit-log-streams
betterstack-create-source.pngbetterstack-source-details.pngdata-create-api-key.pngdata-dog-api-key.pngdatadog-api-sidebar.pngdatadog-logging-endpoint.pngdatadog-source-details.pngstream-create.pngstream-inputs.pngstream-list.png
audit-logs
dynamic-secrets
add-dynamic-secret-button.pngadvanced-option-atlas.pngdynamic-secret-ad-add-assignments.pngdynamic-secret-ad-add-client-secret.pngdynamic-secret-ad-add-permission.pngdynamic-secret-ad-admin-consent.pngdynamic-secret-ad-copy-app-id.pngdynamic-secret-ad-lease.pngdynamic-secret-ad-modal.pngdynamic-secret-ad-new-registration.pngdynamic-secret-ad-select-graph.pngdynamic-secret-ad-select-perms.pngdynamic-secret-ad-show-more.pngdynamic-secret-ad-tenant-id.pngdynamic-secret-ad-user-admin.pngdynamic-secret-atlas-modal.pngdynamic-secret-generate-redis.pngdynamic-secret-generate.pngdynamic-secret-input-modal-elastic-search.pngdynamic-secret-input-modal-rabbit-mq.pngdynamic-secret-ldap-lease.pngdynamic-secret-ldap-select.pngdynamic-secret-lease-empty-redis.pngdynamic-secret-lease-empty.pngdynamic-secret-lease-renew.pngdynamic-secret-modal-atlas.pngdynamic-secret-modal-aws-elasti-cache.pngdynamic-secret-modal-aws-iam.pngdynamic-secret-modal-cassandra.pngdynamic-secret-modal-elastic-search.pngdynamic-secret-modal-mongodb.pngdynamic-secret-modal-mysql.pngdynamic-secret-modal-oracle.pngdynamic-secret-modal-redis.pngdynamic-secret-modal-sap-hana.pngdynamic-secret-modal.pngdynamic-secret-mongodb.pngdynamic-secret-rabbit-mq-modal.pngdynamic-secret-setup-modal-aws-iam.pngdynamic-secret-setup-modal-cassandra.pngdynamic-secret-setup-modal-mssql.pngdynamic-secret-setup-modal-sap-hana.pngdynamic-secret-setup-modal.pngdynamic-secret.pnglease-data.pnglease-values-aws-iam.pnglease-values.pngmodify-cql-statements.pngmodify-elastic-search-statement.pngmodify-elasticache-statement.pngmodify-redis-statement.pngmodify-sap-hana-sql-statements.pngmodify-sql-statement-mysql.pngmodify-sql-statement-oracle.pngmodify-sql-statements-mssql.pngmodify-sql-statements.pngprovision-lease.png
snowflake
folder
groups
groups-org-create.pnggroups-org-users-assign.pnggroups-org-users.pnggroups-org.pnggroups-project-create.pnggroups-project.png
identities
identities-org-client-secret-create-1.pngidentities-org-client-secret-create-2.pngidentities-org-client-secret.pngidentities-org-create-auth-method.pngidentities-org-create-aws-auth-method.pngidentities-org-create-azure-auth-method.pngidentities-org-create-gcp-gce-auth-method.pngidentities-org-create-gcp-iam-auth-method.pngidentities-org-create-kubernetes-auth-method.pngidentities-org-create-oidc-auth-method.pngidentities-org-create-token-auth-method.pngidentities-org-create-universal-auth-method.pngidentities-org-create.pngidentities-org.pngidentities-page-remove-default-auth.pngidentities-page.pngidentities-project-create.pngidentities-project.pngidentities-token-auth-create-1.pngidentities-token-auth-create-2.pngidentities-token-auth-create-3.pngidentities-universal-auth-create-1.pngidentities-universal-auth-create-2.pngidentities-universal-auth-create-3.png
ip-allowlisting
kms
aws-hsm
create-key-store-cert.pngcreate-key-store-cluster.pngcreate-key-store-name.pngcreate-key-store-password.pngcreate-kms-key-1.pngcreate-kms-key-2.pngcreate-kms-select-hsm.png
aws
aws-kms-key-id.pngencryption-modal-provider-select.pngencryption-org-settings-add.pngencryption-org-settings.pngencryption-project-settings-select.pngencryption-project-settings.png
configure-kms-existing.pngconfigure-kms-new.pnghsm
infisical-kms
ldap
jumpcloud
ldap-config.pngldap-group-mappings-section.pngldap-group-mappings-table.pngldap-test-connection.pngldap-toggle.pngmfa/entra
mfa_entra_conditional_access.pngmfa_entra_confirm_policy.pngmfa_entra_create_policy.pngmfa_entra_infisical_app.pngmfa_entra_login.pngmfa_entra_review_policy.png
organization
organization-machine-identities.pngorganization-members-roles-add-perm.pngorganization-members-roles.pngorganization-members.pngorganization-projects.pngorganization-settings-auth.pngorganization-settings-general.pngorganization-usage-billing.png
pit-recovery
pki
alerting
alert-create-2.pngalert-create.pngalerts.pngcollection-add-cert.pngcollection-create-2.pngcollection-create.png
ca-crl.pngca-renewal-modal.pngca-renewal-page.pngca
ca-create-intermediate.pngca-create-root.pngca-create.pngca-install-intermediate-csr.pngca-install-intermediate-opt.pngca-install-intermediate.pngcas.png
cert-revoke-modal.pngcert-revoke.pngcertificate
est
pr-workflows
project-templates
project-template-add-button.pngproject-template-apply.pngproject-template-create.pngproject-template-customized.pngproject-template-edit-form.png
project
project-environments.pngproject-secrets-add.pngproject-secrets-comment.pngproject-secrets-delete-batch.pngproject-secrets-delete.pngproject-secrets-download-env.pngproject-secrets-drawer.pngproject-secrets-drop-env.pngproject-secrets-ellipses.pngproject-secrets-filter.pngproject-secrets-override.pngproject-secrets-overview-open.pngproject-secrets-overview.pngproject-secrets-search.pngproject-secrets-tag.pngproject-secrets-unhide.png
rbac
scim
azure
scim-azure-add-users-and-groups.pngscim-azure-config.pngscim-azure-get-started.pngscim-azure-provisioning-status.pngscim-azure-select-user-mappings.pngscim-azure-start-provisioning.pngscim-azure-user-mappings.png
jumpcloud
okta
scim-okta-app-settings.pngscim-okta-auth.pngscim-okta-config.pngscim-okta-enable-provisioning.pngscim-okta-test.png
scim-copy-token.pngscim-create-token.pngscim-enable-provisioning.pngscim-group-mapping.pngsecret-references-imports
secret-import-add.pngsecret-import-added.pngsecret-import-delete.pngsecret-import-reorder.pngsecret-reference.png
secret-rotation/aws-iam
rotation-config-1.pngrotation-config-2.pngrotation-config-secrets.pngrotation-manager-access-key-third-party.pngrotation-manager-access-keys.pngrotation-manager-attach-policy.pngrotation-manager-create-access-key.pngrotation-manager-create-policy.pngrotation-manager-create-user.pngrotation-manager-policy-review.pngrotation-manager-user-review.pngrotation-manager-username.pngrotations-aws-iam-user.pngrotations-select-aws-iam-user.png
secret-sharing
secret-versioning.pngworkflow-integrations/slack-integration
admin-slack-integration-app-credential-form.pngadmin-slack-integration-app-credentials.pngadmin-slack-integration-app-summary.pngadmin-slack-integration-app-workspace-select.pngadmin-slack-integration-create-app.pngadmin-slack-integration-overview.pngcloud-org-slack-integration-authenticate.pngcloud-org-slack-integration-workspace.pngorg-slack-integration-add-form.pngorg-slack-integration-authenticate.pngorg-slack-integration-created.pngorg-slack-integration-initial-add.pngorg-slack-integration-overview.pngorg-slack-integration-workspace.pngprivate-slack-setup-add.pngprivate-slack-setup-channel-field.pngprivate-slack-setup-form.pngprivate-slack-setup-menu.pngproject-slack-integration-config.pngproject-slack-integration-overview.pngproject-slack-integration-select.png
secret-rotation
self-hosting
applicable-to-all
configuration/email
email-aws-ses-console.pngemail-aws-ses-user.pngemail-gmail-app-access.pngemail-mailhog-credentials.pngemail-resend-create-domain.pngemail-resend-create-key.pngemail-resend-smtp-settings.pngemail-sendgrid-create-key.pngemail-sendgrid-restrictions.pngemail-socketlabs-credentials.pngemail-socketlabs-dashboard.pngemail-socketlabs-domains.pngses-create-identity.png
deployment-options
aws-lightsail
awsl-container-service-overview.pngawsl-create-container-service-capacity.pngawsl-create-container-service-deployment.pngawsl-create-container-service-envars.pngawsl-create-container-service-public-endpoint.pngawsl-create-container-service-summary.pngawsl-create-container-service.pngawsl-select-lightsail.png
azure-app-services
aas-app-service-configuration.pngaas-app-service-deployment-complete.pngaas-app-service-overview.pngaas-create-app-service-basics.pngaas-create-app-service-docker.pngaas-create-app-service-review.pngaas-create-app-service.pngaas-select-app-services.png
azure-container-instances
aci-container-instance-overview.pngaci-create-container-instance-advanced.pngaci-create-container-instance-basics-1.pngaci-create-container-instance-basics-2.pngaci-create-container-instance-networking.pngaci-create-container-instance-review.pngaci-create-container-instance.pngaci-select-container-instances.png
docker-swarm
flyio
gcp-cloud-run
gcp-cloud-run-create-project-2.pnggcp-cloud-run-create-project.pnggcp-cloud-run-create-service-docker-image.pnggcp-cloud-run-create-service-envars.pnggcp-cloud-run-create-service.pnggcp-cloud-run-select-cloud-run.pnggcp-cloud-run-service-details.png
native
railway
guides/mongo-postgres
reference-architectures
sso
auth0-oidc
application-connections.pngapplication-credential.pngapplication-origin.pngapplication-settings.pngapplication-uris.pngapplication-urls.pngenable-oidc.pngorg-oidc-overview.pngorg-update-oidc.png
azure
assignment.pngcreate-own-application.pngedit-attributes-claims-2.pngedit-attributes-claims.pngedit-basic-config-2.pngedit-basic-config.pngedit-saml-certificate-2.pngedit-saml-certificate.pngenable-saml.pngenterprise-applications.pngidp-values-2.pngidp-values-3.pngidp-values.pnginit-config.pngnew-application.pngsso-method.png
general-oidc
github
gitlab
google-saml
attribute-mapping.pngcreate-custom-saml-app.pngcustom-saml-app-config-2.pngcustom-saml-app-config.pngenable-saml.pnginfisical-config.pnginit-config.pngname-custom-saml-app.pnguser-access-assign.pnguser-access.png
google
jumpcloud
assignment.pngattribute-statements.pngcustom-saml-app.pngdownload-saml-certificate.pngedit-basic-config-2.pngedit-basic-config.pngenable-saml.pnggeneral-info.pngidp-values.pnginit-config.pngnew-application.png
keycloak-oidc
client-scope-complete-overview.pngclient-scope-list.pngclient-scope-mapper-menu.pngclient-secret.pngclients-list.pngcreate-client-capability.pngcreate-client-general-settings.pngcreate-client-login-settings.pngcreate-oidc.pngenable-oidc.pngmanage-org-oidc.pngrealm-setting-oidc-config.pngscope-predefined-mapper-1.pngscope-predefined-mapper-2.png
keycloak
client-mappers-by-configuration.pngclient-mappers-completed.pngclient-mappers-email.pngclient-mappers-empty.pngclient-mappers-id.pngclient-mappers-predefined.pngclient-mappers-user-property.pngclient-mappers-username.pngclient-saml-capabilities.pngclient-scopes-list.pngclient-signature-encryption.pngclients-list.pngcreate-client-general-settings.pngcreate-client-login-settings.pngenable-saml.pngidp-values.pnginit-config.pngorg-security-section.pngrealm-saml-metadata.pngrealm-settings-keys.png
okta
integrations
build-tools
cicd
cloud
aws-amplify.mdxaws-parameter-store.mdxaws-secret-manager.mdxazure-app-configuration.mdxazure-devops.mdxazure-key-vault.mdxcheckly.mdxcloud-66.mdxcloudflare-pages.mdxcloudflare-workers.mdxdatabricks.mdxdigital-ocean-app-platform.mdxflyio.mdxgcp-secret-manager.mdxhashicorp-vault.mdxhasura-cloud.mdxheroku.mdxlaravel-forge.mdxnetlify.mdxnorthflank.mdxqovery.mdxrailway.mdxrender.mdxsupabase.mdxteamcity.mdxterraform-cloud.mdxvercel.mdxwindmill.mdx
frameworks
bun.mdxdjango.mdxdotnet.mdxexpress.mdxfiber.mdxflask.mdxgatsby.mdxlaravel.mdxnestjs.mdxnextjs.mdxnuxt.mdxrails.mdxreact.mdxremix.mdxspring-boot-maven.mdxsveltekit.mdxterraform.mdxvite.mdxvue.mdx
overview.mdxplatforms
internals
components.mdxflows.mdxoverview.mdxpermissions.mdxsecurity.mdxservice-tokens-new.mdxservice-tokens.mdx
mint.jsonsdks
security
self-hosting
configuration
deployment-options
deployments
ee.mdxfaq.mdxguides
overview.mdxreference-architectures
frontend
.eslintrc.eslintrc.js.gitignore.prettierrcnext.config.jspackage-lock.jsonpackage.jsonindex.tsuseDebounce.tsxuseLeaveConfirm.tsxusePagination.tsxusePersistentState.tsusePopUp.tsxuseResetPageHelper.tsuseSyntaxHighlight.tsxuseTimedReset.tsxuseToggle.tsxi18n.tsindex.tsx
.storybook
DockerfileREADME.mdcomponents
RouteGuard.js
const.jscypress.config.jsanalytics
basic
Error.tsxInputField.tsxLayout.tsxListbox.tsx
buttons
dialog
ActivateBotDialog.jsAddIncidentContactDialog.jsAddProjectMemberDialog.jsAddServiceTokenDialog.jsAddUserDialog.jsAddWorkspaceDialog.jsDeleteUserDialog.jsIntegrationAccessTokenDialog.js
popups
table
billing
context/Notifications
dashboard
integrations
CloudIntegration.tsxCloudIntegrationSection.tsxFrameworkIntegration.tsxFrameworkIntegrationSection.tsxIntegration.tsxIntegrationSection.tsx
navigation
utilities
SecurityClient.tsattemptLogin.js
checks
config
cryptography
file.tsgenerateBackupPDF.tsrandomId.tssaveTokenToLocalStorage.tssecrets
telemetry
cypress
e2e
fixtures
support
pages
_app.jsdashboard.js
api
auth
ChangePassword2.tsCheckAuth.tsCheckEmailVerificationCode.tsCompleteAccountInformationSignup.tsCompleteAccountInformationSignupInvite.tsEmailVerifyOnPasswordReset.tsIssueBackupPrivateKey.tsLogin1.tsLogin2.tsLogout.tsSRP1.tsSendEmailOnPasswordReset.tsSendVerificationEmail.tsToken.tsVerifySignupInvite.tsgetBackupEncryptedPrivateKey.tspublicKeyInfisical.tsresetPasswordOnAccountRecovery.ts
bot
files
integrations
ChangeHerokuConfigVars.tsDeleteIntegration.tsDeleteIntegrationAuth.tsGetIntegrationApps.tsGetIntegrationOptions.tsStartIntegration.tsauthorizeIntegration.tsgetWorkspaceAuthorizations.tsgetWorkspaceIntegrations.tsupdateIntegration.ts
organization
GetOrg.tsGetOrgProjects.tsGetOrgSubscription.tsGetOrgUserProjects.tsGetOrgUsers.tsStripeRedirect.tsaddIncidentContact.tsaddUserToOrg.tsdeleteIncidentContact.tsdeleteUserFromOrganization.tsgetIncidentContacts.tsgetOrgs.tsrenameOrg.ts
serviceToken
user
userActions
workspace
dashboard
email-not-verified.jsheroku.jshome
index.jsintegrations
login.tsxnetlify.jsnoprojects.jspassword-reset.tsxrequestnewinvite.jssettings
signup.tsxsignupinvite.jssso-decrypt.tsxusers
vercel.jsverify-email.tsxpublic
data
images
dragon-404.svgdragon-book.svgdragon-cant-find.svgdragon-signupinvite.svggradientLogo.svginfisical-update-december-2023.pnginfisical-update-september-2023.png
integrations
Agent.pngAnsible.pngBitBucket.pngCheckly.pngCloud 66.pngCloudflare.pngCodefresh.pngDatabricks.pngECS.pngFlyio.svgGitHub.pngGitLab.pngHasura.svgJenkins.pngKubernetes.pngLaravel Forge.pngNorthflank.pngQovery.pngRailway.pngRender.pngRundeck.svgSupabase.pngSvelteKit.pngTeamCity.pngTerraform Cloud.pngTerraform.pngVault.pngWindmill.png
invitation-expired.svgkubernetes-asset.pngloading
maintenance.pngsecretRotation
json
locales
en
es
fr
ko
pt-BR
tr
lotties
rotation.jsonsystem-outline-103-coin-cash-monetization.jsonsystem-outline-109-slider-toggle-settings.jsonsystem-outline-126-verified.jsonsystem-outline-165-view-carousel.jsonsystem-outline-168-view-headline.jsonsystem-outline-189-domain-verification.jsonsystem-outline-69-document-scan.jsonsystem-outline-82-extension.jsonsystem-outline-90-lock-closed.jsonsystem-outline-96-groups.json
scripts
initialize-standalone-build.shreplace-standalone-build-variable.shreplace-variable.shset-standalone-build-telemetry.shset-telemetry.shstart.sh
src
components
AddTagPopoverContent
RouteGuard.tsxanalytics
basic
Error.tsxInputField.tsxListbox.tsxToggle.tsx
buttons
dialog
ActivateBotDialog.tsxAddProjectMemberDialog.tsxAddUpdateEnvironmentDialog.tsxAddWorkspaceDialog.tsxDeleteActionModal.tsxDeleteEnvVar.tsxDeleteUserDialog.tsxUpgradePlan.tsx
popups
table
dashboard
AddTagsMenu.tsxCommentField.tsxConfirmEnvOverwriteModal.tsxDashboardInputField.tsxDeleteActionButton.tsxDownloadSecretsMenu.tsxDropZone.tsxGenerateSecretMenu.tsxKeyPair.tsx
features
integrations
navigation
notifications
permissions
GlobPermissionInfo.tsxOrgPermissionCan.tsxPermissionDeniedBanner.tsxProjectPermissionCan.tsxindex.tsx
signup
CodeInputStep.tsxDonwloadBackupPDFStep.tsxEnterEmailStep.tsxInitialSignupStep.tsxTeamInviteStep.tsxUserInfoStep.tsx
tags/CreateTagModal
utilities
SecurityClient.tsattemptChangePassword.tsattemptCliLogin.tsattemptCliLoginMfa.tsattemptLogin.tsattemptLoginMfa.ts
checks
OnboardingCheck.ts
password
config
cryptography
generateBackupPDF.tsintercom
isValidHexColor.tsparseSecrets.tsrandomId.tssaveTokenToLocalStorage.tssecrets
telemetry
v2
Accordion
Alert
Badge
Button
Card
Checkbox
ComboBox
ContentLoader
DatePicker
DeleteActionModal
Divider
Drawer
Dropdown
EmailServiceSetupModal
EmptyState
FilterableSelect
FontAwesomeSymbol
FormControl
HoverCard
HoverCardv2
IconButton
InfisicalSecretInput
Input
LeaveProjectModal
Menu
Modal
NoticeBanner
Pagination
Popover
Popoverv2
RadioGroup
SecretInput
SecretPathInput
Select
Skeleton
Spinner
Stepper
Switch
Table
Tabs
Tag
TextArea
Tooltip
UpgradePlanModal
index.tsxconfig
const.tscontext
AuthContext
OrgPermissionContext
OrganizationContext
ProjectPermissionContext
ServerConfigContext
SubscriptionContext
UserContext
WorkspaceContext
index.tsxee
api
memberships
secrets
utilities
helpers
hoc
hooks
api
accessApproval
admin
apiKeys
auditLogStreams
auditLogs
auth
bots
ca
certificateTemplates
certificates
cmeks
dashboard
dynamicSecret
dynamicSecretLease
externalGroupOrgRoleMappings
generic
groups
identities
identityProjectAdditionalPrivilege
incidentContacts
index.tsxintegrationAuth
integrations
keys
kms
ldapConfig
migration
oidcConfig
orgAdmin
organization
pkiAlerts
pkiCollections
policies
projectTemplates
projectUserAdditionalPrivilege
rateLimit
roles
scim
secretApproval
secretApprovalRequest
secretFolders
secretImports
secretRotation
secretSharing
secretSnapshots
secrets
serverDetails
serviceTokens
ssoConfig
subscriptions
tags
trustedIps
types.tsuserEngagement
users
webhooks
workflowIntegrations
workspace
utils
layouts
lib
pages
404.tsx_app.tsxcli-redirect.tsxdashboard.tsxemail-not-verified.tsxindex.tsxpassword-reset.tsxpersonal-settings.tsxrequestnewinvite.tsxsecret-scanning.tsx
reactQuery.tsxadmin
api
auth
secret-scanning
integrations
[id].tsx
aws-parameter-store
aws-secret-manager
azure-app-configuration
azure-devops
azure-key-vault
bitbucket
checkly
circleci
cloud-66
cloudflare-pages
cloudflare-workers
codefresh
databricks
details
digital-ocean-app-platform
flyio
gcp-secret-manager
github
gitlab
hashicorp-vault
hasura-cloud
heroku
laravel-forge
netlify
northflank
qovery
railway
render
rundeck
select-integration-auth.tsxsupabase
teamcity
terraform-cloud
travisci
vercel
windmill
login
org
[id]
admin
audit-logs
billing
identities/[identityId]
members
memberships/[membershipId]
overview
roles/[roleId]
secret-scanning
secret-sharing
settings
none
project/[id]
allowlist
approval
ca/[caId]
certificates
identities/[identityId]
kms
members
pki-collections/[collectionId]
roles/[roleSlug]
secret-rotation
secrets
settings
share-secret
shared/secret/[id]
signup
signupinvite.tsxverify-email.tsxservices
styles
views
IntegrationsPage
IntegrationDetailsPage
IntegrationDetailsPage.tsx
IntegrationPage.utils.tsxIntegrationsPage.tsxcomponents
IntegrationAuditLogsSection.tsxIntegrationConnectionSection.tsxIntegrationDetailsSection.tsxIntegrationSettingsSection.tsx
index.tsxcomponents
CloudIntegrationSection
FrameworkIntegrationSection
InfrastructureIntegrationSection
IntegrationsSection
Login
Login.tsxLogin.utils.tsxLoginLDAP.tsxLoginSSO.tsxMfa.tsx
components
index.tsxOrg
AuditLogsPage
IdentityPage
IdentityPage.tsxindex.tsx
components
IdentityAuthenticationSection
IdentityClientSecretModal.tsxIdentityDetailsSection.tsxIdentityProjectsSection
IdentityAddToProjectModal.tsxIdentityProjectRow.tsxIdentityProjectsSection.tsxIdentityProjectsTable.tsxindex.tsx
IdentityTokenListModal.tsxIdentityTokenModal.tsxindex.tsxMembersPage
MembersPage.tsxindex.tsx
components
OrgGroupsTab
OrgIdentityTab
OrgIdentityTab.tsxindex.tsx
components
IdentitySection
IdentityAuthMethodModal.tsxIdentityAuthMethodModalContent.tsxIdentityAwsAuthForm.tsxIdentityAzureAuthForm.tsxIdentityGcpAuthForm.tsxIdentityKubernetesAuthForm.tsxIdentityModal.tsxIdentityOidcAuthForm.tsxIdentitySection.tsxIdentityTable.tsxIdentityTokenAuthForm.tsxIdentityTokenAuthTokenModal.tsxIdentityUniversalAuthClientSecretModal.tsxIdentityUniversalAuthForm.tsxindex.tsx
index.tsxOrgMembersTab
OrgRoleTabSection
index.tsxNonePage
RolePage
RolePage.tsx
components
OrgRoleModifySection.utils.tsRoleDetailsSection.tsxRoleModal.tsx
index.tsxRolePermissionsSection
OrgPermissionAdminConsoleRow.tsxOrgRoleWorkspaceRow.tsxRolePermissionRow.tsxRolePermissionsSection.tsxindex.tsx
index.tsxTypes
UserPage
UserPage.tsx
components
UserDetailsSection.tsxUserOrgMembershipModal.tsx
index.tsxUserProjectsSection
UserAddToProjectModal.tsxUserAuditLogsSection.tsxUserGroupsRow.tsxUserGroupsSection.tsxUserGroupsTable.tsxUserProjectRow.tsxUserProjectsSection.tsxUserProjectsTable.tsxindex.tsx
index.tsxcomponents
OrgAdminPage
Project
CaPage
CaPage.tsx
components
index.tsxCertificatesPage
CertificatesPage.tsxindex.tsx
components
CaTab
CaTab.tsx
components
index.tsxCertificatesTab
CertificatesTab.tsx
components
CertificateCertModal.tsxCertificateContent.tsxCertificateModal.tsxCertificateRevocationModal.tsxCertificateTemplateEnrollmentModal.tsxCertificateTemplateModal.tsxCertificateTemplatesSection.tsxCertificateTemplatesTable.tsxCertificatesSection.tsxCertificatesTable.tsxCertificatesTable.utils.tsindex.tsx
index.tsxPkiAlertsTab
PkiAlertsTab.tsx
index.tsxcomponents
PkiAlertModal.tsxPkiAlertRow.tsxPkiAlertsSection.tsxPkiAlertsTable.tsxPkiCollectionModal.tsxPkiCollectionSection.tsxPkiCollectionTable.tsxindex.tsx
index.tsxIPAllowListPage
IdentityDetailsPage
IdentityDetailPage.tsxindex.tsx
components
IdentityProjectAdditionalPrivilegeSection
IdentityProjectAdditionalPrivilegeModifySection.tsxIdentityProjectAdditionalPrivilegeSection.tsxindex.tsx
IdentityRoleDetailsSection
KmsPage
components
index.tsxMemberDetailsPage
MemberDetailPage.tsxindex.tsx
components
MemberProjectAdditionalPrivilegeSection
MemberProjectAdditionalPrivilegeSection.tsxMembershipProjectAdditionalPrivilegeModifySection.tsxindex.tsx
MemberRoleDetailsSection
MembersPage
MembersPage.tsxindex.tsx
components
GroupsTab
IdentityTab
MembersTab
MembersTab.tsx
components
index.tsxProjectRoleListTab
ServiceTokenTab
index.tsxPkiCollectionPage
PkiCollectionPage.tsx
components
AddPkiCollectionItemModal.tsxPkiCollectionDetailsSection.tsxPkiCollectionItemsSection.tsxPkiCollectionItemsTable.tsxindex.tsx
index.tsxRolePage
RolePage.tsx
components
RoleDetailsSection.tsxRoleModal.tsx
index.tsxRolePermissionsSection
PermissionEmptyState.tsxProjectRoleModifySection.utils.tsxRolePermissionsSection.tsx
index.tsxcomponents
GeneralPermissionConditions.tsxGeneralPermissionPolicies.tsxNewPermissionRule.tsxPermissionConditionHelpers.tsxSecretPermissionConditions.tsx
index.tsxTypes
SecretApprovalPage
SecretApprovalPage.tsxindex.tsx
components
AccessApprovalRequest
ApprovalPolicyList
SecretApprovalRequest
SecretMainPage
SecretMainPage.store.tsxSecretMainPage.tsxSecretMainPage.types.tsindex.tsx
components
ActionBar
ActionBar.tsx
CreateDynamicSecretForm
AwsElastiCacheInputForm.tsxAwsIamInputForm.tsxAzureEntraIdInputForm.tsxCassandraInputForm.tsxCreateDynamicSecretForm.tsxElasticSearchInputForm.tsxLdapInputForm.tsxMongoAtlasInputForm.tsxMongoDBInputForm.tsxRabbitMqInputForm.tsxRedisInputForm.tsxSapHanaInputForm.tsxSnowflakeInputForm.tsxSqlDatabaseInputForm.tsxindex.tsx
CreateSecretImportForm.tsxFolderForm.tsxMoveSecretsModal.tsxindex.tsxCreateSecretForm
DynamicSecretListView
CreateDynamicSecretLease.tsxDynamicSecretLease.tsxDynamicSecretListView.tsx
EditDynamicSecretForm
EditDynamicSecretAwsElastiCacheProviderForm.tsxEditDynamicSecretAwsIamForm.tsxEditDynamicSecretAzureEntraIdForm.tsxEditDynamicSecretCassandraForm.tsxEditDynamicSecretElasticSearchForm.tsxEditDynamicSecretForm.tsxEditDynamicSecretLdapForm.tsxEditDynamicSecretMongoAtlasForm.tsxEditDynamicSecretMongoDBForm.tsxEditDynamicSecretRabbitMqForm.tsxEditDynamicSecretRedisProviderForm.tsxEditDynamicSecretSapHanaForm.tsxEditDynamicSecretSnowflakeForm.tsxEditDynamicSecretSqlProviderForm.tsxindex.tsx
RenewDynamicSecretLease.tsxindex.tsxFolderListView
PitDrawer
SecretDropzone
SecretImportListView
SecretListView
CreateReminderForm.tsxGenRandomNumber.tsxSecretDetailSidebar.tsxSecretItem.tsxSecretListView.tsxSecretListView.utils.tsSecretNoAccessListView.tsxindex.tsx
SecretReferenceDetails
SnapshotView
SecretOverviewPage
SecretOverviewPage.tsxindex.tsx
components
CreateSecretForm
FolderBreadCrumbs
SecretOverviewDynamicSecretRow
SecretOverviewFolderRow
SecretOverviewTableRow
SecretEditRow.tsxSecretNoAccessOverviewTableRow.tsxSecretOverviewTableRow.tsxSecretRenameRow.tsxindex.tsx
SecretSearchInput
SecretSearchInput.tsx
components
QuickSearchDynamicSecretItem.tsxQuickSearchFolderItem.tsxQuickSearchModal.tsxQuickSearchSecretItem.tsxindex.tsx
index.tsxSecretTableResourceCount
SecretV2MigrationSection
SelectionPanel
SecretRotationPage
SecretScanning/components
Settings
BillingSettingsPage
BillingSettingsPage.tsxindex.tsx
components
BillingCloudTab
BillingCloudTab.tsxCurrentPlanSection.tsxManagePlansModal.tsxManagePlansTable.tsxPreviewSection.tsxindex.tsx
BillingDetailsTab
BillingDetailsTab.tsxCompanyNameSection.tsxInvoiceEmailSection.tsxPmtMethodsSection.tsxPmtMethodsTable.tsxTaxIDModal.tsxTaxIDSection.tsxTaxIDTable.tsxindex.tsx
BillingReceiptsTab
BillingSelfHostedTab
BillingTabGroup
index.tsxOrgSettingsPage
OrgSettingsPage.tsxindex.tsx
components
AuditLogStreamTab
ImportTab
OrgAuthTab
ExternalGroupOrgRoleMappings.tsxLDAPGroupMapModal.tsxLDAPModal.tsxOIDCModal.tsxOrgAuthTab.tsxOrgGeneralAuthSection.tsxOrgGenericAuthSection.tsxOrgLDAPSection.tsxOrgOIDCSection.tsxOrgSCIMSection.tsxOrgSSOSection.tsxSSOModal.tsxScimTokenModal.tsxindex.tsx
OrgDeleteSection
OrgEncryptionTab
OrgGeneralTab
OrgIncidentContactsSection
OrgNameChangeSection
OrgTabGroup
OrgWorkflowIntegrationTab
AddWorkflowIntegrationForm.tsxIntegrationFormDetails.tsxOrgWorkflowIntegrationTab.tsxSlackIntegrationForm.tsx
ProjectTemplatesTab
ProjectTemplatesTab.tsx
index.tsxcomponents
DeleteProjectTemplateModal.tsx
index.tsxEditProjectTemplateSection
EditProjectTemplateSection.tsx
ProjectTemplateDetailsModal.tsxProjectTemplatesSection.tsxProjectTemplatesTable.tsxindex.tsxcomponents
EditProjectTemplate.tsxProjectTemplateEditRoleForm.tsxProjectTemplateEnvironmentsForm.tsxProjectTemplateRolesSection.tsxindex.tsx
index.tsxPersonalSettingsPage
APIKeySection
AuthMethodSection
ChangeLanguageSection
ChangePasswordSection
DeleteAccountSection
EmergencyKitSection
PersonalAPIKeyTab
PersonalAuthTab
PersonalGeneralTab
PersonalSettingsPage.tsxPersonalTabGroup
SecuritySection
SessionsSection
UserNameSection
index.tsxProjectSettingsPage
ProjectSettingsPage.tsxindex.tsx
components
AuditLogsRetentionSection
AutoCapitalizationSection
BackfillSecretReferenceSection
DeleteProjectSection
EncryptionTab
EnvironmentSection
AddEnvironmentModal.tsxEnvironmentSection.tsxEnvironmentTable.tsxUpdateEnvironmentModal.tsxindex.tsx
PointInTimeVersionLimitSection
ProjectGeneralTab
ProjectNameChangeSection
RebuildSecretIndicesSection
SecretTagsSection
WebhooksTab
WorkflowIntegrationSection
index.tsxShareSecretPage
ShareSecretPublicPage
Signup
ViewSecretPublicPage
admin
styles
tailwind.config.jstsconfig.jsontsconfig.tsbuildinfohelm-charts
README.md
infisical-standalone-postgres
infisical
secrets-operator
Chart.yamlREADME.md
upload-to-cloudsmith.shtemplates
_helpers.tpldeployment.yamlinfisicalsecret-crd.yamlmanager-rbac.yamlmetrics-service.yamlserviceaccount.yaml
values.yamlimg
k8-operator
DockerfileMakefileREADME.md
api/v1alpha1
config
crd/bases
default
rbac
samples
controllers
auto_redeployment.goconditions.goinfisicalsecret_auth.goinfisicalsecret_controller.goinfisicalsecret_helper.go
go.modgo.sumkubectl-install
packages
migration
nginx
npm
package-lock.jsonpackage.jsonrender.yamlsink
standalone-entrypoint.sh@ -1,10 +0,0 @@
|
||||
backend/node_modules
|
||||
frontend/node_modules
|
||||
backend/frontend-build
|
||||
**/node_modules
|
||||
**/.next
|
||||
.dockerignore
|
||||
.git
|
||||
README.md
|
||||
.dockerignore
|
||||
**/Dockerfile
|
90
.env.example
90
.env.example
@ -1,33 +1,48 @@
|
||||
# Keys
|
||||
# Required key for platform encryption/decryption ops
|
||||
# THIS IS A SAMPLE ENCRYPTION KEY AND SHOULD NEVER BE USED FOR PRODUCTION
|
||||
ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218
|
||||
# Required keys for platform encryption/decryption ops
|
||||
PRIVATE_KEY=replace_with_nacl_sk
|
||||
PUBLIC_KEY=replace_with_nacl_pk
|
||||
ENCRYPTION_KEY=replace_with_lengthy_secure_hex
|
||||
|
||||
# JWT
|
||||
# Required secrets to sign JWT tokens
|
||||
# THIS IS A SAMPLE AUTH_SECRET KEY AND SHOULD NEVER BE USED FOR PRODUCTION
|
||||
AUTH_SECRET=5lrMXKKWCVocS/uerPsl7V+TX/aaUaI7iDkgl3tSmLE=
|
||||
JWT_SIGNUP_SECRET=replace_with_lengthy_secure_hex
|
||||
JWT_REFRESH_SECRET=replace_with_lengthy_secure_hex
|
||||
JWT_AUTH_SECRET=replace_with_lengthy_secure_hex
|
||||
|
||||
# Postgres creds
|
||||
POSTGRES_PASSWORD=infisical
|
||||
POSTGRES_USER=infisical
|
||||
POSTGRES_DB=infisical
|
||||
# JWT lifetime
|
||||
# Optional lifetimes for JWT tokens expressed in seconds or a string
|
||||
# describing a time span (e.g. 60, "2 days", "10h", "7d")
|
||||
JWT_AUTH_LIFETIME=
|
||||
JWT_REFRESH_LIFETIME=
|
||||
JWT_SERVICE_SECRET=
|
||||
JWT_SIGNUP_LIFETIME=
|
||||
|
||||
# Optional lifetimes for OTP expressed in seconds
|
||||
EMAIL_TOKEN_LIFETIME=
|
||||
|
||||
# MongoDB
|
||||
# Backend will connect to the MongoDB instance at connection string MONGO_URL which can either be a ref
|
||||
# to the MongoDB container instance or Mongo Cloud
|
||||
# Required
|
||||
DB_CONNECTION_URI=postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/${POSTGRES_DB}
|
||||
MONGO_URL=mongodb://root:example@mongo:27017/?authSource=admin
|
||||
|
||||
# Redis
|
||||
REDIS_URL=redis://redis:6379
|
||||
# Optional credentials for MongoDB container instance and Mongo-Express
|
||||
MONGO_USERNAME=root
|
||||
MONGO_PASSWORD=example
|
||||
|
||||
# Website URL
|
||||
# Required
|
||||
|
||||
SITE_URL=http://localhost:8080
|
||||
|
||||
# Mail/SMTP
|
||||
SMTP_HOST=
|
||||
SMTP_PORT=
|
||||
SMTP_NAME=
|
||||
SMTP_USERNAME=
|
||||
# Mail/SMTP
|
||||
# Required to send emails
|
||||
# By default, SMTP_HOST is set to smtp.gmail.com
|
||||
SMTP_HOST=smtp.gmail.com
|
||||
SMTP_PORT=587
|
||||
SMTP_NAME=Team
|
||||
SMTP_USERNAME=team@infisical.com
|
||||
SMTP_PASSWORD=
|
||||
|
||||
# Integration
|
||||
@ -35,22 +50,9 @@ SMTP_PASSWORD=
|
||||
CLIENT_ID_HEROKU=
|
||||
CLIENT_ID_VERCEL=
|
||||
CLIENT_ID_NETLIFY=
|
||||
CLIENT_ID_GITHUB=
|
||||
CLIENT_ID_GITHUB_APP=
|
||||
CLIENT_SLUG_GITHUB_APP=
|
||||
CLIENT_ID_GITLAB=
|
||||
CLIENT_ID_BITBUCKET=
|
||||
CLIENT_SECRET_HEROKU=
|
||||
CLIENT_SECRET_VERCEL=
|
||||
CLIENT_SECRET_NETLIFY=
|
||||
CLIENT_SECRET_GITHUB=
|
||||
CLIENT_SECRET_GITHUB_APP=
|
||||
CLIENT_SECRET_GITLAB=
|
||||
CLIENT_SECRET_BITBUCKET=
|
||||
CLIENT_SLUG_VERCEL=
|
||||
|
||||
CLIENT_PRIVATE_KEY_GITHUB_APP=
|
||||
CLIENT_APP_ID_GITHUB_APP=
|
||||
|
||||
# Sentry (optional) for monitoring errors
|
||||
SENTRY_DSN=
|
||||
@ -59,24 +61,10 @@ SENTRY_DSN=
|
||||
# Ignore - Not applicable for self-hosted version
|
||||
POSTHOG_HOST=
|
||||
POSTHOG_PROJECT_API_KEY=
|
||||
|
||||
# SSO-specific variables
|
||||
CLIENT_ID_GOOGLE_LOGIN=
|
||||
CLIENT_SECRET_GOOGLE_LOGIN=
|
||||
|
||||
CLIENT_ID_GITHUB_LOGIN=
|
||||
CLIENT_SECRET_GITHUB_LOGIN=
|
||||
|
||||
CLIENT_ID_GITLAB_LOGIN=
|
||||
CLIENT_SECRET_GITLAB_LOGIN=
|
||||
|
||||
CAPTCHA_SECRET=
|
||||
|
||||
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
|
||||
|
||||
PLAIN_API_KEY=
|
||||
PLAIN_WISH_LABEL_IDS=
|
||||
|
||||
SSL_CLIENT_CERTIFICATE_HEADER_KEY=
|
||||
|
||||
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT=true
|
||||
STRIPE_SECRET_KEY=
|
||||
STRIPE_PUBLISHABLE_KEY=
|
||||
STRIPE_WEBHOOK_SECRET=
|
||||
STRIPE_PRODUCT_CARD_AUTH=
|
||||
STRIPE_PRODUCT_PRO=
|
||||
STRIPE_PRODUCT_STARTER=
|
||||
NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=
|
@ -1,2 +0,0 @@
|
||||
DB_CONNECTION_URI=
|
||||
AUDIT_LOGS_DB_CONNECTION_URI=
|
@ -1,4 +0,0 @@
|
||||
REDIS_URL=redis://localhost:6379
|
||||
DB_CONNECTION_URI=postgres://infisical:infisical@localhost/infisical?sslmode=disable
|
||||
AUTH_SECRET=4bnfe4e407b8921c104518903515b218
|
||||
ENCRYPTION_KEY=4bnfe4e407b8921c104518903515b218
|
@ -1,4 +1,3 @@
|
||||
node_modules
|
||||
built
|
||||
healthcheck.js
|
||||
tailwind.config.js
|
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@ -8,7 +8,7 @@ assignees: ''
|
||||
---
|
||||
|
||||
### Feature description
|
||||
A clear and concise description of what the feature should be.
|
||||
A clear and concise description of what the the feature should be.
|
||||
|
||||
### Why would it be useful?
|
||||
Why would this feature be useful for Infisical users?
|
||||
|
BIN
.github/images/Deploy to AWS.png
vendored
BIN
.github/images/Deploy to AWS.png
vendored
Binary file not shown.
Before ![]() (image error) Size: 2.3 KiB |
BIN
.github/images/deploy-aws-button.png
vendored
BIN
.github/images/deploy-aws-button.png
vendored
Binary file not shown.
Before ![]() (image error) Size: 19 KiB |
BIN
.github/images/deploy-to-aws.png
vendored
BIN
.github/images/deploy-to-aws.png
vendored
Binary file not shown.
Before ![]() (image error) Size: 2.8 KiB |
BIN
.github/images/do-k8-install-btn.png
vendored
BIN
.github/images/do-k8-install-btn.png
vendored
Binary file not shown.
Before ![]() (image error) Size: 28 KiB |
25
.github/pull_request_template.md
vendored
25
.github/pull_request_template.md
vendored
@ -1,25 +0,0 @@
|
||||
# Description 📣
|
||||
|
||||
<!-- Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change. Here's how we expect a pull request to be : https://infisical.com/docs/contributing/getting-started/pull-requests -->
|
||||
|
||||
## Type ✨
|
||||
|
||||
- [ ] Bug fix
|
||||
- [ ] New feature
|
||||
- [ ] Improvement
|
||||
- [ ] Breaking change
|
||||
- [ ] Documentation
|
||||
|
||||
# Tests 🛠️
|
||||
|
||||
<!-- Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration. You may want to add screenshots when relevant and possible -->
|
||||
|
||||
```sh
|
||||
# Here's some code block to paste some code snippets
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
- [ ] I have read the [contributing guide](https://infisical.com/docs/contributing/getting-started/overview), agreed and acknowledged the [code of conduct](https://infisical.com/docs/contributing/getting-started/code-of-conduct). 📝
|
||||
|
||||
<!-- If you have any questions regarding contribution, here's the FAQ : https://infisical.com/docs/contributing/getting-started/faq -->
|
190
.github/resources/changelog-generator.py
vendored
190
.github/resources/changelog-generator.py
vendored
@ -1,190 +0,0 @@
|
||||
# inspired by https://www.photoroom.com/inside-photoroom/how-we-automated-our-changelog-thanks-to-chatgpt
|
||||
import os
|
||||
import requests
|
||||
import re
|
||||
from openai import OpenAI
|
||||
import subprocess
|
||||
from datetime import datetime
|
||||
|
||||
import uuid
|
||||
|
||||
# Constants
|
||||
REPO_OWNER = "infisical"
|
||||
REPO_NAME = "infisical"
|
||||
TOKEN = os.environ["GITHUB_TOKEN"]
|
||||
SLACK_WEBHOOK_URL = os.environ["SLACK_WEBHOOK_URL"]
|
||||
OPENAI_API_KEY = os.environ["OPENAI_API_KEY"]
|
||||
SLACK_MSG_COLOR = "#36a64f"
|
||||
|
||||
headers = {
|
||||
"Authorization": f"Bearer {TOKEN}",
|
||||
"Accept": "application/vnd.github+json",
|
||||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
}
|
||||
|
||||
|
||||
def set_multiline_output(name, value):
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh:
|
||||
delimiter = uuid.uuid1()
|
||||
print(f'{name}<<{delimiter}', file=fh)
|
||||
print(value, file=fh)
|
||||
print(delimiter, file=fh)
|
||||
|
||||
def post_changelog_to_slack(changelog, tag):
|
||||
slack_payload = {
|
||||
"text": "Hey team, it's changelog time! :wave:",
|
||||
"attachments": [
|
||||
{
|
||||
"color": SLACK_MSG_COLOR,
|
||||
"title": f"🗓️Infisical Changelog - {tag}",
|
||||
"text": changelog,
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
response = requests.post(SLACK_WEBHOOK_URL, json=slack_payload)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception("Failed to post changelog to Slack.")
|
||||
|
||||
def find_previous_release_tag(release_tag:str):
|
||||
previous_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0", f"{release_tag}^"]).decode("utf-8").strip()
|
||||
while not(previous_tag.startswith("infisical/")):
|
||||
previous_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0", f"{previous_tag}^"]).decode("utf-8").strip()
|
||||
return previous_tag
|
||||
|
||||
def get_tag_creation_date(tag_name):
|
||||
url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/git/refs/tags/{tag_name}"
|
||||
response = requests.get(url, headers=headers)
|
||||
response.raise_for_status()
|
||||
commit_sha = response.json()['object']['sha']
|
||||
|
||||
commit_url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/commits/{commit_sha}"
|
||||
commit_response = requests.get(commit_url, headers=headers)
|
||||
commit_response.raise_for_status()
|
||||
creation_date = commit_response.json()['commit']['author']['date']
|
||||
|
||||
return datetime.strptime(creation_date, '%Y-%m-%dT%H:%M:%SZ')
|
||||
|
||||
|
||||
def fetch_prs_between_tags(previous_tag_date:datetime, release_tag_date:datetime):
|
||||
# Use GitHub API to fetch PRs merged between the commits
|
||||
url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/pulls?state=closed&merged=true"
|
||||
response = requests.get(url, headers=headers)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception("Error fetching PRs from GitHub API!")
|
||||
|
||||
prs = []
|
||||
for pr in response.json():
|
||||
# the idea is as tags happen recently we get last 100 closed PRs and then filter by tag creation date
|
||||
if pr["merged_at"] and datetime.strptime(pr["merged_at"],'%Y-%m-%dT%H:%M:%SZ') < release_tag_date and datetime.strptime(pr["merged_at"],'%Y-%m-%dT%H:%M:%SZ') > previous_tag_date:
|
||||
prs.append(pr)
|
||||
|
||||
return prs
|
||||
|
||||
|
||||
def extract_commit_details_from_prs(prs):
|
||||
commit_details = []
|
||||
for pr in prs:
|
||||
commit_message = pr["title"]
|
||||
commit_url = pr["html_url"]
|
||||
pr_number = pr["number"]
|
||||
branch_name = pr["head"]["ref"]
|
||||
issue_numbers = re.findall(r"(www-\d+|web-\d+)", branch_name)
|
||||
|
||||
# If no issue numbers are found, add the PR details without issue numbers and URLs
|
||||
if not issue_numbers:
|
||||
commit_details.append(
|
||||
{
|
||||
"message": commit_message,
|
||||
"pr_number": pr_number,
|
||||
"pr_url": commit_url,
|
||||
"issue_number": None,
|
||||
"issue_url": None,
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
for issue in issue_numbers:
|
||||
commit_details.append(
|
||||
{
|
||||
"message": commit_message,
|
||||
"pr_number": pr_number,
|
||||
"pr_url": commit_url,
|
||||
"issue_number": issue,
|
||||
}
|
||||
)
|
||||
|
||||
return commit_details
|
||||
|
||||
# Function to generate changelog using OpenAI
|
||||
def generate_changelog_with_openai(commit_details):
|
||||
commit_messages = []
|
||||
for details in commit_details:
|
||||
base_message = f"{details['pr_url']} - {details['message']}"
|
||||
# Add the issue URL if available
|
||||
# if details["issue_url"]:
|
||||
# base_message += f" (Linear Issue: {details['issue_url']})"
|
||||
commit_messages.append(base_message)
|
||||
|
||||
commit_list = "\n".join(commit_messages)
|
||||
prompt = """
|
||||
Generate a changelog for Infisical, opensource secretops
|
||||
The changelog should:
|
||||
1. Be Informative: Using the provided list of GitHub commits, break them down into categories such as Features, Fixes & Improvements, and Technical Updates. Summarize each commit concisely, ensuring the key points are highlighted.
|
||||
2. Have a Professional yet Friendly tone: The tone should be balanced, not too corporate or too informal.
|
||||
3. Celebratory Introduction and Conclusion: Start the changelog with a celebratory note acknowledging the team's hard work and progress. End with a shoutout to the team and wishes for a pleasant weekend.
|
||||
4. Formatting: you cannot use Markdown formatting, and you can only use emojis for the introductory paragraph or the conclusion paragraph, nowhere else.
|
||||
5. Links: the syntax to create links is the following: `<http://www.example.com|This message is a link>`.
|
||||
6. Linear Links: note that the Linear link is optional, include it only if provided.
|
||||
7. Do not wrap your answer in a codeblock. Just output the text, nothing else
|
||||
Here's a good example to follow, please try to match the formatting as closely as possible, only changing the content of the changelog and have some liberty with the introduction. Notice the importance of the formatting of a changelog item:
|
||||
- <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>))
|
||||
And here's an example of the full changelog:
|
||||
|
||||
*Features*
|
||||
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
|
||||
*Fixes & Improvements*
|
||||
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
|
||||
*Technical Updates*
|
||||
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
|
||||
|
||||
Stay tuned for more exciting updates coming soon!
|
||||
And here are the commits:
|
||||
{}
|
||||
""".format(
|
||||
commit_list
|
||||
)
|
||||
|
||||
client = OpenAI(api_key=OPENAI_API_KEY)
|
||||
messages = [{"role": "user", "content": prompt}]
|
||||
response = client.chat.completions.create(model="gpt-3.5-turbo", messages=messages)
|
||||
|
||||
if "error" in response.choices[0].message:
|
||||
raise Exception("Error generating changelog with OpenAI!")
|
||||
|
||||
return response.choices[0].message.content.strip()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
# Get the latest and previous release tags
|
||||
latest_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0"]).decode("utf-8").strip()
|
||||
previous_tag = find_previous_release_tag(latest_tag)
|
||||
|
||||
latest_tag_date = get_tag_creation_date(latest_tag)
|
||||
previous_tag_date = get_tag_creation_date(previous_tag)
|
||||
|
||||
prs = fetch_prs_between_tags(previous_tag_date,latest_tag_date)
|
||||
pr_details = extract_commit_details_from_prs(prs)
|
||||
|
||||
# Generate changelog
|
||||
changelog = generate_changelog_with_openai(pr_details)
|
||||
|
||||
post_changelog_to_slack(changelog,latest_tag)
|
||||
# Print or post changelog to Slack
|
||||
# set_multiline_output("changelog", changelog)
|
||||
|
||||
except Exception as e:
|
||||
print(str(e))
|
3
.github/resources/docker-compose.be-test.yml
vendored
3
.github/resources/docker-compose.be-test.yml
vendored
@ -6,14 +6,13 @@ services:
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- mongo
|
||||
image: infisical/infisical:test
|
||||
image: infisical/backend:test
|
||||
command: npm run start
|
||||
environment:
|
||||
- NODE_ENV=production
|
||||
- MONGO_URL=mongodb://test:example@mongo:27017/?authSource=admin
|
||||
- MONGO_USERNAME=test
|
||||
- MONGO_PASSWORD=example
|
||||
- ENCRYPTION_KEY=a984ecdf82ec779e55dbcc21303a900f
|
||||
networks:
|
||||
- infisical-test
|
||||
|
||||
|
26
.github/resources/rename_migration_files.py
vendored
26
.github/resources/rename_migration_files.py
vendored
@ -1,26 +0,0 @@
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
def rename_migrations():
|
||||
migration_folder = "./backend/src/db/migrations"
|
||||
with open("added_files.txt", "r") as file:
|
||||
changed_files = file.readlines()
|
||||
|
||||
# Find the latest file among the changed files
|
||||
latest_timestamp = datetime.now() # utc time
|
||||
for file_path in changed_files:
|
||||
file_path = file_path.strip()
|
||||
# each new file bump by 1s
|
||||
latest_timestamp = latest_timestamp + timedelta(seconds=1)
|
||||
|
||||
new_filename = os.path.join(migration_folder, latest_timestamp.strftime("%Y%m%d%H%M%S") + f"_{file_path.split('_')[1]}")
|
||||
old_filename = os.path.join(migration_folder, file_path)
|
||||
os.rename(old_filename, new_filename)
|
||||
print(f"Renamed {old_filename} to {new_filename}")
|
||||
|
||||
if len(changed_files) == 0:
|
||||
print("No new files added to migration folder")
|
||||
|
||||
if __name__ == "__main__":
|
||||
rename_migrations()
|
||||
|
57
.github/values.yaml
vendored
57
.github/values.yaml
vendored
@ -1,57 +0,0 @@
|
||||
## @section Common parameters
|
||||
##
|
||||
|
||||
## @param nameOverride Override release name
|
||||
##
|
||||
nameOverride: ""
|
||||
## @param fullnameOverride Override release fullname
|
||||
##
|
||||
fullnameOverride: ""
|
||||
|
||||
## @section Infisical backend parameters
|
||||
## Documentation : https://infisical.com/docs/self-hosting/deployments/kubernetes
|
||||
##
|
||||
|
||||
infisical:
|
||||
autoDatabaseSchemaMigration: false
|
||||
|
||||
enabled: false
|
||||
|
||||
name: infisical
|
||||
replicaCount: 3
|
||||
image:
|
||||
repository: infisical/staging_infisical
|
||||
tag: "latest"
|
||||
pullPolicy: Always
|
||||
|
||||
deploymentAnnotations:
|
||||
secrets.infisical.com/auto-reload: "true"
|
||||
|
||||
kubeSecretRef: "managed-secret"
|
||||
|
||||
ingress:
|
||||
## @param ingress.enabled Enable ingress
|
||||
##
|
||||
enabled: true
|
||||
## @param ingress.ingressClassName Ingress class name
|
||||
##
|
||||
ingressClassName: nginx
|
||||
## @param ingress.nginx.enabled Ingress controller
|
||||
##
|
||||
# nginx:
|
||||
# enabled: true
|
||||
## @param ingress.annotations Ingress annotations
|
||||
##
|
||||
annotations:
|
||||
cert-manager.io/cluster-issuer: "letsencrypt-prod"
|
||||
hostName: "gamma.infisical.com"
|
||||
tls:
|
||||
- secretName: letsencrypt-prod
|
||||
hosts:
|
||||
- gamma.infisical.com
|
||||
|
||||
postgresql:
|
||||
enabled: false
|
||||
|
||||
redis:
|
||||
enabled: false
|
41
.github/workflows/be-test-report.yml
vendored
41
.github/workflows/be-test-report.yml
vendored
@ -1,41 +0,0 @@
|
||||
name: "Backend Test Report"
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["Check Backend Pull Request"]
|
||||
types:
|
||||
- completed
|
||||
|
||||
jobs:
|
||||
be-report:
|
||||
name: Backend test report
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: 📁 Download test results
|
||||
id: download-artifact
|
||||
uses: dawidd6/action-download-artifact@v2
|
||||
with:
|
||||
name: be-test-results
|
||||
path: backend
|
||||
workflow: check-be-pull-request.yml
|
||||
workflow_conclusion: success
|
||||
- name: 📋 Publish test results
|
||||
uses: dorny/test-reporter@v1
|
||||
with:
|
||||
name: Test Results
|
||||
path: reports/jest-*.xml
|
||||
reporter: jest-junit
|
||||
working-directory: backend
|
||||
- name: 📋 Publish coverage
|
||||
uses: ArtiomTr/jest-coverage-report-action@v2
|
||||
id: coverage
|
||||
with:
|
||||
output: comment, report-markdown
|
||||
coverage-file: coverage/report.json
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
working-directory: backend
|
||||
- uses: marocchino/sticky-pull-request-comment@v2
|
||||
with:
|
||||
message: ${{ steps.coverage.outputs.report }}
|
189
.github/workflows/build-binaries.yml
vendored
189
.github/workflows/build-binaries.yml
vendored
@ -1,189 +0,0 @@
|
||||
name: Build Binaries and Deploy
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: "Version number"
|
||||
required: true
|
||||
type: string
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./backend
|
||||
|
||||
jobs:
|
||||
build-and-deploy:
|
||||
strategy:
|
||||
matrix:
|
||||
arch: [x64, arm64]
|
||||
os: [linux, win]
|
||||
include:
|
||||
- os: linux
|
||||
target: node20-linux
|
||||
- os: win
|
||||
target: node20-win
|
||||
runs-on: ${{ (matrix.arch == 'arm64' && matrix.os == 'linux') && 'ubuntu24-arm64' || 'ubuntu-latest' }}
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install pkg
|
||||
run: npm install -g @yao-pkg/pkg
|
||||
|
||||
- name: Install dependencies (backend)
|
||||
run: npm install
|
||||
|
||||
- name: Install dependencies (frontend)
|
||||
run: npm install --prefix ../frontend
|
||||
|
||||
- name: Prerequisites for pkg
|
||||
run: npm run binary:build
|
||||
|
||||
- name: Package into node binary
|
||||
run: |
|
||||
if [ "${{ matrix.os }}" != "linux" ]; then
|
||||
pkg --no-bytecode --public-packages "*" --public --compress GZip --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }} .
|
||||
else
|
||||
pkg --no-bytecode --public-packages "*" --public --compress GZip --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core .
|
||||
fi
|
||||
|
||||
# Set up .deb package structure (Debian/Ubuntu only)
|
||||
- name: Set up .deb package structure
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
mkdir -p infisical-core/DEBIAN
|
||||
mkdir -p infisical-core/usr/local/bin
|
||||
cp ./binary/infisical-core infisical-core/usr/local/bin/
|
||||
chmod +x infisical-core/usr/local/bin/infisical-core
|
||||
|
||||
- name: Create control file
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
cat <<EOF > infisical-core/DEBIAN/control
|
||||
Package: infisical-core
|
||||
Version: ${{ github.event.inputs.version }}
|
||||
Section: base
|
||||
Priority: optional
|
||||
Architecture: ${{ matrix.arch == 'x64' && 'amd64' || matrix.arch }}
|
||||
Maintainer: Infisical <daniel@infisical.com>
|
||||
Description: Infisical Core standalone executable (app.infisical.com)
|
||||
EOF
|
||||
|
||||
# Build .deb file (Debian/Ubunutu only)
|
||||
- name: Build .deb package
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
dpkg-deb --build infisical-core
|
||||
mv infisical-core.deb ./binary/infisical-core-${{matrix.arch}}.deb
|
||||
|
||||
### RPM
|
||||
|
||||
# Set up .rpm package structure
|
||||
- name: Set up .rpm package structure
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
mkdir -p infisical-core-rpm/usr/local/bin
|
||||
cp ./binary/infisical-core infisical-core-rpm/usr/local/bin/
|
||||
chmod +x infisical-core-rpm/usr/local/bin/infisical-core
|
||||
|
||||
# Install RPM build tools
|
||||
- name: Install RPM build tools
|
||||
if: matrix.os == 'linux'
|
||||
run: sudo apt-get update && sudo apt-get install -y rpm
|
||||
|
||||
# Create .spec file for RPM
|
||||
- name: Create .spec file for RPM
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
cat <<EOF > infisical-core.spec
|
||||
|
||||
%global _enable_debug_package 0
|
||||
%global debug_package %{nil}
|
||||
%global __os_install_post /usr/lib/rpm/brp-compress %{nil}
|
||||
|
||||
Name: infisical-core
|
||||
Version: ${{ github.event.inputs.version }}
|
||||
Release: 1%{?dist}
|
||||
Summary: Infisical Core standalone executable
|
||||
License: Proprietary
|
||||
URL: https://app.infisical.com
|
||||
|
||||
%description
|
||||
Infisical Core standalone executable (app.infisical.com)
|
||||
|
||||
%install
|
||||
mkdir -p %{buildroot}/usr/local/bin
|
||||
cp %{_sourcedir}/infisical-core %{buildroot}/usr/local/bin/
|
||||
|
||||
%files
|
||||
/usr/local/bin/infisical-core
|
||||
|
||||
%pre
|
||||
|
||||
%post
|
||||
|
||||
%preun
|
||||
|
||||
%postun
|
||||
EOF
|
||||
|
||||
# Build .rpm file
|
||||
- name: Build .rpm package
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
# Create necessary directories
|
||||
mkdir -p rpmbuild/{BUILD,RPMS,SOURCES,SPECS,SRPMS}
|
||||
|
||||
# Copy the binary directly to SOURCES
|
||||
cp ./binary/infisical-core rpmbuild/SOURCES/
|
||||
|
||||
# Run rpmbuild with verbose output
|
||||
rpmbuild -vv -bb \
|
||||
--define "_topdir $(pwd)/rpmbuild" \
|
||||
--define "_sourcedir $(pwd)/rpmbuild/SOURCES" \
|
||||
--define "_rpmdir $(pwd)/rpmbuild/RPMS" \
|
||||
--target ${{ matrix.arch == 'x64' && 'x86_64' || 'aarch64' }} \
|
||||
infisical-core.spec
|
||||
|
||||
# Try to find the RPM file
|
||||
find rpmbuild -name "*.rpm"
|
||||
|
||||
# Move the RPM file if found
|
||||
if [ -n "$(find rpmbuild -name '*.rpm')" ]; then
|
||||
mv $(find rpmbuild -name '*.rpm') ./binary/infisical-core-${{matrix.arch}}.rpm
|
||||
else
|
||||
echo "RPM file not found!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.x" # Specify the Python version you need
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install --upgrade cloudsmith-cli
|
||||
|
||||
# Publish .deb file to Cloudsmith (Debian/Ubuntu only)
|
||||
- name: Publish to Cloudsmith (Debian/Ubuntu)
|
||||
if: matrix.os == 'linux'
|
||||
working-directory: ./backend
|
||||
run: cloudsmith push deb --republish --no-wait-for-sync --api-key=${{ secrets.CLOUDSMITH_API_KEY }} infisical/infisical-core/any-distro/any-version ./binary/infisical-core-${{ matrix.arch }}.deb
|
||||
|
||||
# Publish .rpm file to Cloudsmith (Red Hat-based systems only)
|
||||
- name: Publish .rpm to Cloudsmith
|
||||
if: matrix.os == 'linux'
|
||||
working-directory: ./backend
|
||||
run: cloudsmith push rpm --republish --no-wait-for-sync --api-key=${{ secrets.CLOUDSMITH_API_KEY }} infisical/infisical-core/any-distro/any-version ./binary/infisical-core-${{ matrix.arch }}.rpm
|
||||
|
||||
# Publish .exe file to Cloudsmith (Windows only)
|
||||
- name: Publish to Cloudsmith (Windows)
|
||||
if: matrix.os == 'win'
|
||||
working-directory: ./backend
|
||||
run: cloudsmith push raw infisical/infisical-core ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }}.exe --republish --no-wait-for-sync --version ${{ github.event.inputs.version }} --api-key ${{ secrets.CLOUDSMITH_API_KEY }}
|
123
.github/workflows/build-docker-image-to-prod.yml
vendored
123
.github/workflows/build-docker-image-to-prod.yml
vendored
@ -1,123 +0,0 @@
|
||||
name: Release production images (frontend, backend)
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical/v*.*.*"
|
||||
- "!infisical/v*.*.*-postgres"
|
||||
|
||||
jobs:
|
||||
backend-image:
|
||||
name: Build backend image
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
# - name: 🧪 Run tests
|
||||
# run: npm run test:ci
|
||||
# working-directory: backend
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build backend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
load: true
|
||||
context: backend
|
||||
tags: infisical/infisical:test
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- name: ⏻ Spawn backend container and dependencies
|
||||
run: |
|
||||
docker compose -f .github/resources/docker-compose.be-test.yml up --wait --quiet-pull
|
||||
- name: 🧪 Test backend image
|
||||
run: |
|
||||
./.github/resources/healthcheck.sh infisical-backend-test
|
||||
- name: ⏻ Shut down backend container and dependencies
|
||||
run: |
|
||||
docker compose -f .github/resources/docker-compose.be-test.yml down
|
||||
- name: 🏗️ Build backend and push
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: backend
|
||||
tags: |
|
||||
infisical/backend:${{ steps.commit.outputs.short }}
|
||||
infisical/backend:latest
|
||||
infisical/backend:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
frontend-image:
|
||||
name: Build frontend image
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build frontend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
load: true
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
project: 64mmf0n610
|
||||
context: frontend
|
||||
tags: infisical/frontend:test
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
NEXT_INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
||||
- name: ⏻ Spawn frontend container
|
||||
run: |
|
||||
docker run -d --rm --name infisical-frontend-test infisical/frontend:test
|
||||
- name: 🧪 Test frontend image
|
||||
run: |
|
||||
./.github/resources/healthcheck.sh infisical-frontend-test
|
||||
- name: ⏻ Shut down frontend container
|
||||
run: |
|
||||
docker stop infisical-frontend-test
|
||||
- name: 🏗️ Build frontend and push
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
push: true
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
context: frontend
|
||||
tags: |
|
||||
infisical/frontend:${{ steps.commit.outputs.short }}
|
||||
infisical/frontend:latest
|
||||
infisical/frontend:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
NEXT_INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
38
.github/workflows/build-patroni-docker-img.yml
vendored
38
.github/workflows/build-patroni-docker-img.yml
vendored
@ -1,38 +0,0 @@
|
||||
name: Build patroni
|
||||
on: [workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
patroni-image:
|
||||
name: Build patroni
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: 'zalando/patroni'
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 🏗️ Build backend and push to docker hub
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
file: Dockerfile
|
||||
tags: |
|
||||
infisical/patroni:${{ steps.commit.outputs.short }}
|
||||
infisical/patroni:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
|
@ -1,77 +0,0 @@
|
||||
name: "Check API For Breaking Changes"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "backend/src/server/routes/**"
|
||||
- "backend/src/ee/routes/**"
|
||||
|
||||
jobs:
|
||||
check-be-api-changes:
|
||||
name: Check API Changes
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
steps:
|
||||
- name: Checkout source
|
||||
uses: actions/checkout@v3
|
||||
# - name: Setup Node 20
|
||||
# uses: actions/setup-node@v3
|
||||
# with:
|
||||
# node-version: "20"
|
||||
# uncomment this when testing locally using nektos/act
|
||||
- uses: KengoTODA/actions-setup-docker-compose@v1
|
||||
if: ${{ env.ACT }}
|
||||
name: Install `docker compose` for local simulations
|
||||
with:
|
||||
version: "2.14.2"
|
||||
- name: 📦Build the latest image
|
||||
run: docker build --tag infisical-api .
|
||||
working-directory: backend
|
||||
- name: Start postgres and redis
|
||||
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
||||
- name: Start the server
|
||||
run: |
|
||||
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
||||
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
|
||||
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
|
||||
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET -e ENCRYPTION_KEY=$ENCRYPTION_KEY --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
|
||||
env:
|
||||
REDIS_URL: redis://172.17.0.1:6379
|
||||
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
||||
JWT_AUTH_SECRET: something-random
|
||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '1.21.5'
|
||||
- name: Wait for container to be stable and check logs
|
||||
run: |
|
||||
SECONDS=0
|
||||
HEALTHY=0
|
||||
while [ $SECONDS -lt 60 ]; do
|
||||
if docker ps | grep infisical-api | grep -q healthy; then
|
||||
echo "Container is healthy."
|
||||
HEALTHY=1
|
||||
break
|
||||
fi
|
||||
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
|
||||
|
||||
docker logs infisical-api
|
||||
|
||||
sleep 2
|
||||
SECONDS=$((SECONDS+2))
|
||||
done
|
||||
|
||||
if [ $HEALTHY -ne 1 ]; then
|
||||
echo "Container did not become healthy in time"
|
||||
exit 1
|
||||
fi
|
||||
- name: Install openapi-diff
|
||||
run: go install github.com/tufin/oasdiff@latest
|
||||
- name: Running OpenAPI Spec diff action
|
||||
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
|
||||
- name: cleanup
|
||||
run: |
|
||||
docker compose -f "docker-compose.dev.yml" down
|
||||
docker stop infisical-api
|
||||
docker remove infisical-api
|
41
.github/workflows/check-be-pull-request.yml
vendored
Normal file
41
.github/workflows/check-be-pull-request.yml
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
name: Check Backend Pull Request
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [ opened, synchronize ]
|
||||
paths:
|
||||
- 'backend/**'
|
||||
- '!backend/README.md'
|
||||
- '!backend/.*'
|
||||
- 'backend/.eslintrc.js'
|
||||
|
||||
|
||||
jobs:
|
||||
|
||||
check-be-pr:
|
||||
name: Check
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
-
|
||||
name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
-
|
||||
name: 🔧 Setup Node 16
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: backend/package-lock.json
|
||||
-
|
||||
name: 📦 Install dependencies
|
||||
run: npm ci --only-production --ignore-scripts
|
||||
working-directory: backend
|
||||
# -
|
||||
# name: 🧪 Run tests
|
||||
# run: npm run test:ci
|
||||
# working-directory: backend
|
||||
-
|
||||
name: 🏗️ Run build
|
||||
run: npm run build
|
||||
working-directory: backend
|
35
.github/workflows/check-be-ts-and-lint.yml
vendored
35
.github/workflows/check-be-ts-and-lint.yml
vendored
@ -1,35 +0,0 @@
|
||||
name: "Check Backend PR types and lint"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "backend/**"
|
||||
- "!backend/README.md"
|
||||
- "!backend/.*"
|
||||
- "backend/.eslintrc.js"
|
||||
|
||||
jobs:
|
||||
check-be-pr:
|
||||
name: Check TS and Lint
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 🔧 Setup Node 20
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: backend/package-lock.json
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
working-directory: backend
|
||||
- name: Run type check
|
||||
run: npm run type:check
|
||||
working-directory: backend
|
||||
- name: Run lint check
|
||||
run: npm run lint
|
||||
working-directory: backend
|
41
.github/workflows/check-fe-pull-request.yml
vendored
Normal file
41
.github/workflows/check-fe-pull-request.yml
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
name: Check Frontend Pull Request
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [ opened, synchronize ]
|
||||
paths:
|
||||
- 'frontend/**'
|
||||
- '!frontend/README.md'
|
||||
- '!frontend/.*'
|
||||
- 'frontend/.eslintrc.js'
|
||||
|
||||
|
||||
jobs:
|
||||
|
||||
check-fe-pr:
|
||||
name: Check
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
-
|
||||
name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
-
|
||||
name: 🔧 Setup Node 16
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: frontend/package-lock.json
|
||||
-
|
||||
name: 📦 Install dependencies
|
||||
run: npm ci --only-production --ignore-scripts
|
||||
working-directory: frontend
|
||||
# -
|
||||
# name: 🧪 Run tests
|
||||
# run: npm run test:ci
|
||||
# working-directory: frontend
|
||||
-
|
||||
name: 🏗️ Run build
|
||||
run: npm run build
|
||||
working-directory: frontend
|
35
.github/workflows/check-fe-ts-and-lint.yml
vendored
35
.github/workflows/check-fe-ts-and-lint.yml
vendored
@ -1,35 +0,0 @@
|
||||
name: Check Frontend Type and Lint check
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "frontend/**"
|
||||
- "!frontend/README.md"
|
||||
- "!frontend/.*"
|
||||
- "frontend/.eslintrc.js"
|
||||
|
||||
jobs:
|
||||
check-fe-ts-lint:
|
||||
name: Check Frontend Type and Lint check
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 🔧 Setup Node 16
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "16"
|
||||
cache: "npm"
|
||||
cache-dependency-path: frontend/package-lock.json
|
||||
- name: 📦 Install dependencies
|
||||
run: npm install
|
||||
working-directory: frontend
|
||||
- name: 🏗️ Run Type check
|
||||
run: npm run type:check
|
||||
working-directory: frontend
|
||||
- name: 🏗️ Run Link check
|
||||
run: npm run lint:fix
|
||||
working-directory: frontend
|
@ -1,25 +0,0 @@
|
||||
name: Check migration file edited
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- 'backend/src/db/migrations/**'
|
||||
|
||||
jobs:
|
||||
rename:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check any migration files are modified, renamed or duplicated.
|
||||
run: |
|
||||
git diff --name-status HEAD^ HEAD backend/src/db/migrations | grep '^M\|^R\|^C' || true | cut -f2 | xargs -r -n1 basename > edited_files.txt
|
||||
if [ -s edited_files.txt ]; then
|
||||
echo "Exiting migration files cannot be modified."
|
||||
cat edited_files.txt
|
||||
exit 1
|
||||
fi
|
22
.github/workflows/close_inactive_issues.yml
vendored
Normal file
22
.github/workflows/close_inactive_issues.yml
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
name: Close inactive issues
|
||||
on:
|
||||
schedule:
|
||||
- cron: "30 1 * * *"
|
||||
|
||||
jobs:
|
||||
close-issues:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/stale@v4
|
||||
with:
|
||||
days-before-issue-stale: 30
|
||||
days-before-issue-close: 14
|
||||
stale-issue-label: "stale"
|
||||
stale-issue-message: "This issue is stale because it has been open for 30 days with no activity."
|
||||
close-issue-message: "This issue was closed because it has been inactive for 14 days since being marked as stale."
|
||||
days-before-pr-stale: -1
|
||||
days-before-pr-close: -1
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
212
.github/workflows/deployment-pipeline.yml
vendored
212
.github/workflows/deployment-pipeline.yml
vendored
@ -1,212 +0,0 @@
|
||||
name: Deployment pipeline
|
||||
on: [workflow_dispatch]
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
infisical-tests:
|
||||
name: Integration tests
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||
uses: ./.github/workflows/run-backend-tests.yml
|
||||
|
||||
infisical-image:
|
||||
name: Build
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 🏗️ Build backend and push to docker hub
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
file: Dockerfile.standalone-infisical
|
||||
tags: |
|
||||
infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
infisical/staging_infisical:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
INFISICAL_PLATFORM_VERSION=${{ steps.commit.outputs.short }}
|
||||
|
||||
gamma-deployment:
|
||||
name: Deploy to gamma
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-image]
|
||||
environment:
|
||||
name: Gamma
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
# The Twingate Service Key used to connect Twingate to the proper service
|
||||
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
|
||||
#
|
||||
# Required
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Change directory to backend and install dependencies
|
||||
env:
|
||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
run: |
|
||||
cd backend
|
||||
npm install
|
||||
npm run migration:latest
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
audience: sts.amazonaws.com
|
||||
aws-region: us-east-1
|
||||
role-to-assume: arn:aws:iam::905418227878:role/deploy-new-ecs-img
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-core-gamma-stage --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-core
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||
with:
|
||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||
service: infisical-core-gamma-stage
|
||||
cluster: infisical-gamma-stage
|
||||
wait-for-service-stability: true
|
||||
|
||||
production-us:
|
||||
name: US production deploy
|
||||
runs-on: ubuntu-latest
|
||||
needs: [gamma-deployment]
|
||||
environment:
|
||||
name: Production
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
# The Twingate Service Key used to connect Twingate to the proper service
|
||||
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
|
||||
#
|
||||
# Required
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Change directory to backend and install dependencies
|
||||
env:
|
||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
AUDIT_LOGS_DB_CONNECTION_URI: ${{ secrets.AUDIT_LOGS_DB_CONNECTION_URI }}
|
||||
run: |
|
||||
cd backend
|
||||
npm install
|
||||
npm run migration:latest
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
audience: sts.amazonaws.com
|
||||
aws-region: us-east-1
|
||||
role-to-assume: arn:aws:iam::381492033652:role/gha-make-prod-deployment
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-core-platform
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||
with:
|
||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||
service: infisical-core-platform
|
||||
cluster: infisical-core-platform
|
||||
wait-for-service-stability: true
|
||||
|
||||
production-eu:
|
||||
name: EU production deploy
|
||||
runs-on: ubuntu-latest
|
||||
needs: [production-us]
|
||||
environment:
|
||||
name: production-eu
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
audience: sts.amazonaws.com
|
||||
aws-region: eu-central-1
|
||||
role-to-assume: arn:aws:iam::345594589636:role/gha-make-prod-deployment
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Change directory to backend and install dependencies
|
||||
env:
|
||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
run: |
|
||||
cd backend
|
||||
npm install
|
||||
npm run migration:latest
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-core-platform
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||
with:
|
||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||
service: infisical-core-platform
|
||||
cluster: infisical-core-platform
|
||||
wait-for-service-stability: true
|
86
.github/workflows/docker-image.yml
vendored
Normal file
86
.github/workflows/docker-image.yml
vendored
Normal file
@ -0,0 +1,86 @@
|
||||
name: Push to Docker Hub
|
||||
|
||||
on: [workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
backend-image:
|
||||
name: Build backend image
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 🔧 Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: 📦 Build backend and export to Docker
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
load: true
|
||||
context: backend
|
||||
tags: infisical/backend:test
|
||||
- name: ⏻ Spawn backend container and dependencies
|
||||
run: |
|
||||
docker compose -f .github/resources/docker-compose.be-test.yml up --wait --quiet-pull
|
||||
- name: 🧪 Test backend image
|
||||
run: |
|
||||
./.github/resources/healthcheck.sh infisical-backend-test
|
||||
- name: ⏻ Shut down backend container and dependencies
|
||||
run: |
|
||||
docker compose -f .github/resources/docker-compose.be-test.yml down
|
||||
- name: 🏗️ Build backend and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
push: true
|
||||
context: backend
|
||||
tags: infisical/backend:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
frontend-image:
|
||||
name: Build frontend image
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 🔧 Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: 📦 Build frontend and export to Docker
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
load: true
|
||||
context: frontend
|
||||
tags: infisical/frontend:test
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
- name: ⏻ Spawn frontend container
|
||||
run: |
|
||||
docker run -d --rm --name infisical-frontend-test infisical/frontend:test
|
||||
- name: 🧪 Test frontend image
|
||||
run: |
|
||||
./.github/resources/healthcheck.sh infisical-frontend-test
|
||||
- name: ⏻ Shut down frontend container
|
||||
run: |
|
||||
docker stop infisical-frontend-test
|
||||
- name: 🏗️ Build frontend and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
push: true
|
||||
context: frontend
|
||||
tags: infisical/frontend:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
34
.github/workflows/generate-release-changelog.yml
vendored
34
.github/workflows/generate-release-changelog.yml
vendored
@ -1,34 +0,0 @@
|
||||
name: Generate Changelog
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
tags:
|
||||
- "infisical/v*.*.*-postgres"
|
||||
|
||||
jobs:
|
||||
generate_changelog:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-tags: true
|
||||
fetch-depth: 0
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.12.0"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install requests openai
|
||||
- name: Generate Changelog and Post to Slack
|
||||
id: gen-changelog
|
||||
run: python .github/resources/changelog-generator.py
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
@ -1,115 +0,0 @@
|
||||
name: Release standalone docker image
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical/v*.*.*-postgres"
|
||||
|
||||
jobs:
|
||||
infisical-tests:
|
||||
name: Run tests before deployment
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||
uses: ./.github/workflows/run-backend-tests.yml
|
||||
|
||||
infisical-standalone:
|
||||
name: Build infisical standalone image postgres
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
- name: version output
|
||||
run: |
|
||||
echo "Output Value: ${{ steps.version.outputs.major }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.minor }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.patch }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version_type }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.increment }}"
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build backend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
tags: |
|
||||
infisical/infisical:latest-postgres
|
||||
infisical/infisical:${{ steps.commit.outputs.short }}
|
||||
infisical/infisical:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
file: Dockerfile.standalone-infisical
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
||||
|
||||
infisical-fips-standalone:
|
||||
name: Build infisical standalone image postgres
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
- name: version output
|
||||
run: |
|
||||
echo "Output Value: ${{ steps.version.outputs.major }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.minor }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.patch }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version_type }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.increment }}"
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build backend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
tags: |
|
||||
infisical/infisical-fips:latest-postgres
|
||||
infisical/infisical-fips:${{ steps.commit.outputs.short }}
|
||||
infisical/infisical-fips:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
file: Dockerfile.fips.standalone-infisical
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
41
.github/workflows/release_build.yml
vendored
Normal file
41
.github/workflows/release_build.yml
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
name: Go releaser
|
||||
|
||||
on:
|
||||
push:
|
||||
# run only against tags
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
# packages: write
|
||||
# issues: write
|
||||
|
||||
jobs:
|
||||
goreleaser:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- run: git fetch --force --tags
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: '>=1.19.3'
|
||||
cache: true
|
||||
cache-dependency-path: cli/go.sum
|
||||
- uses: goreleaser/goreleaser-action@v2
|
||||
with:
|
||||
distribution: goreleaser
|
||||
version: latest
|
||||
args: release --rm-dist
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
|
||||
- uses: actions/setup-python@v4
|
||||
- run: pip install --upgrade cloudsmith-cli
|
||||
- name: Publish to CloudSmith
|
||||
run: sh cli/upload_to_cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
|
131
.github/workflows/release_build_infisical_cli.yml
vendored
131
.github/workflows/release_build_infisical_cli.yml
vendored
@ -1,131 +0,0 @@
|
||||
name: Build and release CLI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
push:
|
||||
# run only against tags
|
||||
tags:
|
||||
- "infisical-cli/v*.*.*"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
cli-integration-tests:
|
||||
name: Run tests before deployment
|
||||
uses: ./.github/workflows/run-cli-tests.yml
|
||||
secrets:
|
||||
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
|
||||
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
|
||||
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
|
||||
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
|
||||
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
npm-release:
|
||||
runs-on: ubuntu-20.04
|
||||
env:
|
||||
working-directory: ./npm
|
||||
needs:
|
||||
- cli-integration-tests
|
||||
- goreleaser
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Extract version
|
||||
run: |
|
||||
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
|
||||
echo "Version extracted: $VERSION"
|
||||
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
|
||||
|
||||
- name: Print version
|
||||
run: echo ${{ env.CLI_VERSION }}
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
cache-dependency-path: ./npm/package-lock.json
|
||||
- name: Install dependencies
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm install --ignore-scripts
|
||||
|
||||
- name: Set NPM version
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
|
||||
|
||||
- name: Setup NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: |
|
||||
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
|
||||
|
||||
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
- name: Pack NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm pack
|
||||
|
||||
- name: Publish NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
goreleaser:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [cli-integration-tests]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- run: git fetch --force --tags
|
||||
- run: echo "Ref name ${{github.ref_name}}"
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ">=1.19.3"
|
||||
cache: true
|
||||
cache-dependency-path: cli/go.sum
|
||||
- name: libssl1.1 => libssl1.0-dev for OSXCross
|
||||
run: |
|
||||
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
|
||||
sudo apt update && apt-cache policy libssl1.0-dev
|
||||
sudo apt-get install libssl1.0-dev
|
||||
- name: OSXCross for CGO Support
|
||||
run: |
|
||||
mkdir ../../osxcross
|
||||
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
|
||||
- uses: goreleaser/goreleaser-action@v4
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: v1.26.2-pro
|
||||
args: release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
|
||||
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
|
||||
AUR_KEY: ${{ secrets.AUR_KEY }}
|
||||
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
|
||||
- uses: actions/setup-python@v4
|
||||
- run: pip install --upgrade cloudsmith-cli
|
||||
- name: Publish to CloudSmith
|
||||
run: sh cli/upload_to_cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
@ -1,16 +1,10 @@
|
||||
name: Release Docker image for K8 operator
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical-k8-operator/v*.*.*"
|
||||
name: Release Docker image for K8 operator
|
||||
on: [workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: 🔧 Set up QEMU
|
||||
@ -32,6 +26,4 @@ jobs:
|
||||
context: k8-operator
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: |
|
||||
infisical/kubernetes-operator:latest
|
||||
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
|
||||
tags: infisical/kubernetes-operator:latest
|
47
.github/workflows/run-backend-tests.yml
vendored
47
.github/workflows/run-backend-tests.yml
vendored
@ -1,47 +0,0 @@
|
||||
name: "Run backend tests"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "backend/**"
|
||||
- "!backend/README.md"
|
||||
- "!backend/.*"
|
||||
- "backend/.eslintrc.js"
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
check-be-pr:
|
||||
name: Run integration test
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- uses: KengoTODA/actions-setup-docker-compose@v1
|
||||
if: ${{ env.ACT }}
|
||||
name: Install `docker compose` for local simulations
|
||||
with:
|
||||
version: "2.14.2"
|
||||
- name: 🔧 Setup Node 20
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: backend/package-lock.json
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
working-directory: backend
|
||||
- name: Start postgres and redis
|
||||
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
||||
- name: Start integration test
|
||||
run: npm run test:e2e
|
||||
working-directory: backend
|
||||
env:
|
||||
REDIS_URL: redis://172.17.0.1:6379
|
||||
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
||||
AUTH_SECRET: something-random
|
||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||
- name: cleanup
|
||||
run: |
|
||||
docker compose -f "docker-compose.dev.yml" down
|
55
.github/workflows/run-cli-tests.yml
vendored
55
.github/workflows/run-cli-tests.yml
vendored
@ -1,55 +0,0 @@
|
||||
name: Go CLI Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "cli/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
workflow_call:
|
||||
secrets:
|
||||
CLI_TESTS_UA_CLIENT_ID:
|
||||
required: true
|
||||
CLI_TESTS_UA_CLIENT_SECRET:
|
||||
required: true
|
||||
CLI_TESTS_SERVICE_TOKEN:
|
||||
required: true
|
||||
CLI_TESTS_PROJECT_ID:
|
||||
required: true
|
||||
CLI_TESTS_ENV_SLUG:
|
||||
required: true
|
||||
CLI_TESTS_USER_EMAIL:
|
||||
required: true
|
||||
CLI_TESTS_USER_PASSWORD:
|
||||
required: true
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE:
|
||||
required: true
|
||||
jobs:
|
||||
test:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./cli
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: "1.21.x"
|
||||
- name: Install dependencies
|
||||
run: go get .
|
||||
- name: Test with the Go CLI
|
||||
env:
|
||||
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
|
||||
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
|
||||
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
|
||||
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
|
||||
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
# INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
run: go test -v -count=1 ./test
|
27
.gitignore
vendored
27
.gitignore
vendored
@ -1,12 +1,10 @@
|
||||
# backend
|
||||
node_modules
|
||||
.env
|
||||
.env.test
|
||||
.env.dev
|
||||
.env.gamma
|
||||
.env.prod
|
||||
.env.infisical
|
||||
.env.migration
|
||||
|
||||
*~
|
||||
*.swp
|
||||
*.swo
|
||||
@ -14,8 +12,6 @@ node_modules
|
||||
.DS_Store
|
||||
|
||||
/dist
|
||||
/completions/
|
||||
/manpages/
|
||||
|
||||
# frontend
|
||||
|
||||
@ -29,12 +25,10 @@ node_modules
|
||||
.env
|
||||
|
||||
# testing
|
||||
coverage
|
||||
reports
|
||||
junit.xml
|
||||
/coverage
|
||||
|
||||
# next.js
|
||||
.next/
|
||||
/.next/
|
||||
/out/
|
||||
|
||||
# production
|
||||
@ -58,18 +52,3 @@ yarn-error.log*
|
||||
|
||||
# Infisical init
|
||||
.infisical.json
|
||||
|
||||
.infisicalignore
|
||||
|
||||
# Editor specific
|
||||
.vscode/*
|
||||
.idea/*
|
||||
|
||||
frontend-build
|
||||
|
||||
*.tgz
|
||||
cli/infisical-merge
|
||||
cli/test/infisical-merge
|
||||
/backend/binary
|
||||
|
||||
/npm/bin
|
||||
|
210
.goreleaser.yaml
210
.goreleaser.yaml
@ -6,83 +6,49 @@
|
||||
# - cd cli && go mod tidy
|
||||
# # you may remove this if you don't need go generate
|
||||
# - cd cli && go generate ./...
|
||||
before:
|
||||
hooks:
|
||||
- ./cli/scripts/completions.sh
|
||||
- ./cli/scripts/manpages.sh
|
||||
|
||||
monorepo:
|
||||
tag_prefix: infisical-cli/
|
||||
dir: cli
|
||||
|
||||
builds:
|
||||
- id: darwin-build
|
||||
binary: infisical
|
||||
ldflags:
|
||||
- -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
|
||||
- -X github.com/Infisical/infisical-merge/packages/telemetry.POSTHOG_API_KEY_FOR_CLI={{ .Env.POSTHOG_API_KEY_FOR_CLI }}
|
||||
flags:
|
||||
- -trimpath
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=/home/runner/work/osxcross/target/bin/o64-clang
|
||||
- CXX=/home/runner/work/osxcross/target/bin/o64-clang++
|
||||
goos:
|
||||
- darwin
|
||||
ignore:
|
||||
- goos: darwin
|
||||
goarch: "386"
|
||||
dir: ./cli
|
||||
|
||||
- id: all-other-builds
|
||||
env:
|
||||
- env:
|
||||
- CGO_ENABLED=0
|
||||
binary: infisical
|
||||
ldflags:
|
||||
- -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
|
||||
- -X github.com/Infisical/infisical-merge/packages/telemetry.POSTHOG_API_KEY_FOR_CLI={{ .Env.POSTHOG_API_KEY_FOR_CLI }}
|
||||
flags:
|
||||
- -trimpath
|
||||
id: infisical
|
||||
goos:
|
||||
- darwin
|
||||
- freebsd
|
||||
- linux
|
||||
- netbsd
|
||||
- openbsd
|
||||
- windows
|
||||
goarch:
|
||||
- "386"
|
||||
- 386
|
||||
- amd64
|
||||
- arm
|
||||
- arm64
|
||||
goarm:
|
||||
- "6"
|
||||
- "7"
|
||||
- 6
|
||||
- 7
|
||||
ignore:
|
||||
- goos: darwin
|
||||
goarch: "386"
|
||||
- goos: windows
|
||||
goarch: "386"
|
||||
- goos: freebsd
|
||||
goarch: "386"
|
||||
dir: ./cli
|
||||
|
||||
archives:
|
||||
- format_overrides:
|
||||
- goos: windows
|
||||
format: zip
|
||||
files:
|
||||
- ../README*
|
||||
- ../LICENSE*
|
||||
- ../manpages/*
|
||||
- ../completions/*
|
||||
|
||||
release:
|
||||
replace_existing_draft: true
|
||||
mode: "replace"
|
||||
mode: 'replace'
|
||||
|
||||
checksum:
|
||||
name_template: "checksums.txt"
|
||||
|
||||
name_template: 'checksums.txt'
|
||||
snapshot:
|
||||
name_template: "{{ .Version }}-devel"
|
||||
name_template: "{{ incpatch .Version }}"
|
||||
changelog:
|
||||
sort: asc
|
||||
filters:
|
||||
exclude:
|
||||
- '^docs:'
|
||||
- '^test:'
|
||||
|
||||
# publishers:
|
||||
# - name: fury.io
|
||||
@ -90,7 +56,6 @@ snapshot:
|
||||
# - infisical
|
||||
# dir: "{{ dir .ArtifactPath }}"
|
||||
# cmd: curl -F package=@{{ .ArtifactName }} https://{{ .Env.FURY_TOKEN }}@push.fury.io/infisical/
|
||||
|
||||
brews:
|
||||
- name: infisical
|
||||
tap:
|
||||
@ -102,55 +67,22 @@ brews:
|
||||
folder: Formula
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
install: |-
|
||||
bin.install "infisical"
|
||||
bash_completion.install "completions/infisical.bash" => "infisical"
|
||||
zsh_completion.install "completions/infisical.zsh" => "_infisical"
|
||||
fish_completion.install "completions/infisical.fish"
|
||||
man1.install "manpages/infisical.1.gz"
|
||||
- name: "infisical@{{.Version}}"
|
||||
tap:
|
||||
owner: Infisical
|
||||
name: homebrew-get-cli
|
||||
commit_author:
|
||||
name: "Infisical"
|
||||
email: ai@infisical.com
|
||||
folder: Formula
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
install: |-
|
||||
bin.install "infisical"
|
||||
bash_completion.install "completions/infisical.bash" => "infisical"
|
||||
zsh_completion.install "completions/infisical.zsh" => "_infisical"
|
||||
fish_completion.install "completions/infisical.fish"
|
||||
man1.install "manpages/infisical.1.gz"
|
||||
|
||||
nfpms:
|
||||
- id: infisical
|
||||
package_name: infisical
|
||||
builds:
|
||||
- all-other-builds
|
||||
vendor: Infisical, Inc
|
||||
homepage: https://infisical.com/
|
||||
maintainer: Infisical, Inc
|
||||
description: The offical Infisical CLI
|
||||
license: MIT
|
||||
formats:
|
||||
- rpm
|
||||
- deb
|
||||
- apk
|
||||
- archlinux
|
||||
bindir: /usr/bin
|
||||
contents:
|
||||
- src: ./completions/infisical.bash
|
||||
dst: /etc/bash_completion.d/infisical
|
||||
- src: ./completions/infisical.fish
|
||||
dst: /usr/share/fish/vendor_completions.d/infisical.fish
|
||||
- src: ./completions/infisical.zsh
|
||||
dst: /usr/share/zsh/site-functions/_infisical
|
||||
- src: ./manpages/infisical.1.gz
|
||||
dst: /usr/share/man/man1/infisical.1.gz
|
||||
|
||||
- id: infisical
|
||||
package_name: infisical
|
||||
builds:
|
||||
- infisical
|
||||
vendor: Infisical, Inc
|
||||
homepage: https://infisical.com/
|
||||
maintainer: Infisical, Inc
|
||||
description: The offical Infisical CLI
|
||||
license: Apache 2.0
|
||||
formats:
|
||||
- rpm
|
||||
- deb
|
||||
- apk
|
||||
- archlinux
|
||||
bindir: /usr/bin
|
||||
scoop:
|
||||
bucket:
|
||||
owner: Infisical
|
||||
@ -160,64 +92,20 @@ scoop:
|
||||
email: ai@infisical.com
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
license: MIT
|
||||
|
||||
aurs:
|
||||
- name: infisical-bin
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
maintainers:
|
||||
- Infisical, Inc <support@infisical.com>
|
||||
license: MIT
|
||||
private_key: "{{ .Env.AUR_KEY }}"
|
||||
git_url: "ssh://aur@aur.archlinux.org/infisical-bin.git"
|
||||
package: |-
|
||||
# bin
|
||||
install -Dm755 "./infisical" "${pkgdir}/usr/bin/infisical"
|
||||
# license
|
||||
install -Dm644 "./LICENSE" "${pkgdir}/usr/share/licenses/infisical/LICENSE"
|
||||
# completions
|
||||
mkdir -p "${pkgdir}/usr/share/bash-completion/completions/"
|
||||
mkdir -p "${pkgdir}/usr/share/zsh/site-functions/"
|
||||
mkdir -p "${pkgdir}/usr/share/fish/vendor_completions.d/"
|
||||
install -Dm644 "./completions/infisical.bash" "${pkgdir}/usr/share/bash-completion/completions/infisical"
|
||||
install -Dm644 "./completions/infisical.zsh" "${pkgdir}/usr/share/zsh/site-functions/_infisical"
|
||||
install -Dm644 "./completions/infisical.fish" "${pkgdir}/usr/share/fish/vendor_completions.d/infisical.fish"
|
||||
# man pages
|
||||
install -Dm644 "./manpages/infisical.1.gz" "${pkgdir}/usr/share/man/man1/infisical.1.gz"
|
||||
|
||||
dockers:
|
||||
- dockerfile: docker/alpine
|
||||
goos: linux
|
||||
goarch: amd64
|
||||
use: buildx
|
||||
ids:
|
||||
- all-other-builds
|
||||
image_templates:
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
|
||||
- "infisical/cli:latest-amd64"
|
||||
build_flag_templates:
|
||||
- "--pull"
|
||||
- "--platform=linux/amd64"
|
||||
- dockerfile: docker/alpine
|
||||
goos: linux
|
||||
goarch: amd64
|
||||
use: buildx
|
||||
ids:
|
||||
- all-other-builds
|
||||
image_templates:
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
|
||||
- "infisical/cli:latest-arm64"
|
||||
build_flag_templates:
|
||||
- "--pull"
|
||||
- "--platform=linux/arm64"
|
||||
|
||||
docker_manifests:
|
||||
- name_template: "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}"
|
||||
image_templates:
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
|
||||
- name_template: "infisical/cli:latest"
|
||||
image_templates:
|
||||
- "infisical/cli:latest-amd64"
|
||||
- "infisical/cli:latest-arm64"
|
||||
license: Apache-2.0
|
||||
# dockers:
|
||||
# - dockerfile: goreleaser.dockerfile
|
||||
# goos: linux
|
||||
# goarch: amd64
|
||||
# ids:
|
||||
# - infisical
|
||||
# image_templates:
|
||||
# - "infisical/cli:{{ .Version }}"
|
||||
# - "infisical/cli:{{ .Major }}.{{ .Minor }}"
|
||||
# - "infisical/cli:{{ .Major }}"
|
||||
# - "infisical/cli:latest"
|
||||
# build_flag_templates:
|
||||
# - "--label=org.label-schema.schema-version=1.0"
|
||||
# - "--label=org.label-schema.version={{.Version}}"
|
||||
# - "--label=org.label-schema.name={{.ProjectName}}"
|
||||
# - "--platform=linux/amd64"
|
@ -1,6 +1,5 @@
|
||||
|
||||
#!/usr/bin/env sh
|
||||
. "$(dirname -- "$0")/_/husky.sh"
|
||||
|
||||
npx lint-staged
|
||||
|
||||
infisical scan git-changes --staged -v
|
||||
|
@ -1,9 +0,0 @@
|
||||
.github/resources/docker-compose.be-test.yml:generic-api-key:16
|
||||
frontend/src/views/Project/MembersPage/components/IdentityTab/components/IdentityRoleForm/IdentityRbacSection.tsx:generic-api-key:206
|
||||
frontend/src/views/Project/MembersPage/components/IdentityTab/components/IdentityRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:304
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/MemberRbacSection.tsx:generic-api-key:206
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:292
|
||||
docs/self-hosting/configuration/envars.mdx:generic-api-key:106
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
|
||||
docs/mint.json:generic-api-key:651
|
||||
backend/src/ee/services/hsm/hsm-service.ts:generic-api-key:134
|
@ -2,6 +2,6 @@
|
||||
|
||||
Thanks for taking the time to contribute! 😃 🚀
|
||||
|
||||
Please refer to our [Contributing Guide](https://infisical.com/docs/contributing/getting-started/overview) for instructions on how to contribute.
|
||||
Please refer to our [Contributing Guide](https://infisical.com/docs/contributing/overview) for instructions on how to contribute.
|
||||
|
||||
We also have some 🔥amazing🔥 merch for our contributors. Please reach out to tony@infisical.com for more info 👀
|
||||
|
@ -1,167 +0,0 @@
|
||||
ARG POSTHOG_HOST=https://app.posthog.com
|
||||
ARG POSTHOG_API_KEY=posthog-api-key
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ARG CAPTCHA_SITE_KEY=captcha-site-key
|
||||
|
||||
FROM node:20-slim AS base
|
||||
|
||||
FROM base AS frontend-dependencies
|
||||
WORKDIR /app
|
||||
|
||||
COPY frontend/package.json frontend/package-lock.json frontend/next.config.js ./
|
||||
|
||||
# Install dependencies
|
||||
RUN npm ci --only-production --ignore-scripts
|
||||
|
||||
# Rebuild the source code only when needed
|
||||
FROM base AS frontend-builder
|
||||
WORKDIR /app
|
||||
|
||||
# Copy dependencies
|
||||
COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
||||
# Copy all files
|
||||
COPY /frontend .
|
||||
|
||||
ENV NODE_ENV production
|
||||
ENV NEXT_PUBLIC_ENV production
|
||||
ARG POSTHOG_HOST
|
||||
ENV NEXT_PUBLIC_POSTHOG_HOST $POSTHOG_HOST
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
|
||||
ARG INTERCOM_ID
|
||||
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||
|
||||
# Build
|
||||
RUN npm run build
|
||||
|
||||
# Production image
|
||||
FROM base AS frontend-runner
|
||||
WORKDIR /app
|
||||
|
||||
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||
|
||||
RUN mkdir -p /app/.next/cache/images && chown non-root-user:nodejs /app/.next/cache/images
|
||||
VOLUME /app/.next/cache/images
|
||||
|
||||
COPY --chown=non-root-user:nodejs --chmod=555 frontend/scripts ./scripts
|
||||
COPY --from=frontend-builder /app/public ./public
|
||||
RUN chown non-root-user:nodejs ./public/data
|
||||
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/standalone ./
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/static ./.next/static
|
||||
|
||||
USER non-root-user
|
||||
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
|
||||
##
|
||||
## BACKEND
|
||||
##
|
||||
FROM base AS backend-build
|
||||
|
||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
COPY /backend .
|
||||
COPY --chown=non-root-user:nodejs standalone-entrypoint.sh standalone-entrypoint.sh
|
||||
RUN npm i -D tsconfig-paths
|
||||
RUN npm run build
|
||||
|
||||
# Production stage
|
||||
FROM base AS backend-runner
|
||||
|
||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
COPY --from=backend-build /app .
|
||||
|
||||
RUN mkdir frontend-build
|
||||
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
|
||||
# Install necessary packages
|
||||
RUN apt-get update && apt-get install -y \
|
||||
ca-certificates \
|
||||
curl \
|
||||
git \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical=0.31.1 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||
|
||||
# Give non-root-user permission to update SSL certs
|
||||
RUN chown -R non-root-user /etc/ssl/certs
|
||||
RUN chown non-root-user /etc/ssl/certs/ca-certificates.crt
|
||||
RUN chmod -R u+rwx /etc/ssl/certs
|
||||
RUN chmod u+rw /etc/ssl/certs/ca-certificates.crt
|
||||
RUN chown non-root-user /usr/sbin/update-ca-certificates
|
||||
RUN chmod u+rx /usr/sbin/update-ca-certificates
|
||||
|
||||
## set pre baked keys
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
|
||||
BAKED_NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
|
||||
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
|
||||
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
|
||||
WORKDIR /
|
||||
|
||||
COPY --from=backend-runner /app /backend
|
||||
|
||||
COPY --from=frontend-runner /app ./backend/frontend-build
|
||||
|
||||
ENV PORT 8080
|
||||
ENV HOST=0.0.0.0
|
||||
ENV HTTPS_ENABLED false
|
||||
ENV NODE_ENV production
|
||||
ENV STANDALONE_BUILD true
|
||||
ENV STANDALONE_MODE true
|
||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
WORKDIR /backend
|
||||
|
||||
ENV TELEMETRY_ENABLED true
|
||||
|
||||
EXPOSE 8080
|
||||
EXPOSE 443
|
||||
|
||||
USER non-root-user
|
||||
|
||||
CMD ["./standalone-entrypoint.sh"]
|
@ -1,152 +0,0 @@
|
||||
ARG POSTHOG_HOST=https://app.posthog.com
|
||||
ARG POSTHOG_API_KEY=posthog-api-key
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ARG CAPTCHA_SITE_KEY=captcha-site-key
|
||||
|
||||
FROM node:20-alpine AS base
|
||||
|
||||
FROM base AS frontend-dependencies
|
||||
|
||||
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
|
||||
RUN apk add --no-cache libc6-compat
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY frontend/package.json frontend/package-lock.json frontend/next.config.js ./
|
||||
|
||||
# Install dependencies
|
||||
RUN npm ci --only-production --ignore-scripts
|
||||
|
||||
# Rebuild the source code only when needed
|
||||
FROM base AS frontend-builder
|
||||
WORKDIR /app
|
||||
|
||||
# Copy dependencies
|
||||
COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
||||
# Copy all files
|
||||
COPY /frontend .
|
||||
|
||||
ENV NODE_ENV production
|
||||
ENV NEXT_PUBLIC_ENV production
|
||||
ARG POSTHOG_HOST
|
||||
ENV NEXT_PUBLIC_POSTHOG_HOST $POSTHOG_HOST
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
|
||||
ARG INTERCOM_ID
|
||||
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||
|
||||
# Build
|
||||
RUN npm run build
|
||||
|
||||
# Production image
|
||||
FROM base AS frontend-runner
|
||||
WORKDIR /app
|
||||
|
||||
RUN addgroup --system --gid 1001 nodejs
|
||||
RUN adduser --system --uid 1001 non-root-user
|
||||
|
||||
RUN mkdir -p /app/.next/cache/images && chown non-root-user:nodejs /app/.next/cache/images
|
||||
VOLUME /app/.next/cache/images
|
||||
|
||||
COPY --chown=non-root-user:nodejs --chmod=555 frontend/scripts ./scripts
|
||||
COPY --from=frontend-builder /app/public ./public
|
||||
RUN chown non-root-user:nodejs ./public/data
|
||||
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/standalone ./
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/static ./.next/static
|
||||
|
||||
USER non-root-user
|
||||
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
|
||||
##
|
||||
## BACKEND
|
||||
##
|
||||
FROM base AS backend-build
|
||||
RUN addgroup --system --gid 1001 nodejs \
|
||||
&& adduser --system --uid 1001 non-root-user
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
RUN apk add --no-cache python3 make g++
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
COPY /backend .
|
||||
COPY --chown=non-root-user:nodejs standalone-entrypoint.sh standalone-entrypoint.sh
|
||||
RUN npm i -D tsconfig-paths
|
||||
RUN npm run build
|
||||
|
||||
# Production stage
|
||||
FROM base AS backend-runner
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
RUN apk add --no-cache python3 make g++
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
COPY --from=backend-build /app .
|
||||
|
||||
RUN mkdir frontend-build
|
||||
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
RUN apk add --upgrade --no-cache ca-certificates
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.31.1 && apk add --no-cache git
|
||||
|
||||
RUN addgroup --system --gid 1001 nodejs \
|
||||
&& adduser --system --uid 1001 non-root-user
|
||||
|
||||
# Give non-root-user permission to update SSL certs
|
||||
RUN chown -R non-root-user /etc/ssl/certs
|
||||
RUN chown non-root-user /etc/ssl/certs/ca-certificates.crt
|
||||
RUN chmod -R u+rwx /etc/ssl/certs
|
||||
RUN chmod u+rw /etc/ssl/certs/ca-certificates.crt
|
||||
RUN chown non-root-user /usr/sbin/update-ca-certificates
|
||||
RUN chmod u+rx /usr/sbin/update-ca-certificates
|
||||
|
||||
## set pre baked keys
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
|
||||
BAKED_NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
|
||||
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
|
||||
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
|
||||
WORKDIR /
|
||||
|
||||
COPY --from=backend-runner /app /backend
|
||||
|
||||
COPY --from=frontend-runner /app ./backend/frontend-build
|
||||
|
||||
|
||||
ENV PORT 8080
|
||||
ENV HOST=0.0.0.0
|
||||
ENV HTTPS_ENABLED false
|
||||
ENV NODE_ENV production
|
||||
ENV STANDALONE_BUILD true
|
||||
ENV STANDALONE_MODE true
|
||||
WORKDIR /backend
|
||||
|
||||
ENV TELEMETRY_ENABLED true
|
||||
|
||||
EXPOSE 8080
|
||||
EXPOSE 443
|
||||
|
||||
USER non-root-user
|
||||
|
||||
CMD ["./standalone-entrypoint.sh"]
|
22
Makefile
22
Makefile
@ -5,26 +5,10 @@ push:
|
||||
docker-compose -f docker-compose.yml push
|
||||
|
||||
up-dev:
|
||||
docker compose -f docker-compose.dev.yml up --build
|
||||
|
||||
up-dev-ldap:
|
||||
docker compose -f docker-compose.dev.yml --profile ldap up --build
|
||||
docker-compose -f docker-compose.dev.yml up --build
|
||||
|
||||
up-prod:
|
||||
docker-compose -f docker-compose.prod.yml up --build
|
||||
docker-compose -f docker-compose.yml up --build
|
||||
|
||||
down:
|
||||
docker compose -f docker-compose.dev.yml down
|
||||
|
||||
reviewable-ui:
|
||||
cd frontend && \
|
||||
npm run lint:fix && \
|
||||
npm run type:check
|
||||
|
||||
reviewable-api:
|
||||
cd backend && \
|
||||
npm run lint:fix && \
|
||||
npm run type:check
|
||||
|
||||
reviewable: reviewable-ui reviewable-api
|
||||
|
||||
docker-compose down
|
||||
|
397
README.md
397
README.md
@ -1,42 +1,30 @@
|
||||
<h1 align="center">
|
||||
<img width="300" src="/img/logoname-black.svg#gh-light-mode-only" alt="infisical">
|
||||
<img width="300" src="/img/logoname-white.svg#gh-dark-mode-only" alt="infisical">
|
||||
</h1>
|
||||
<p align="center">
|
||||
<p align="center"><b>The open-source secret management platform</b>: Sync secrets/configs across your team/infrastructure and prevent secret leaks.</p>
|
||||
<p align="center">Open-source, E2EE, simple tool to manage and sync environment variables across your team and infrastructure.</p>
|
||||
</p>
|
||||
|
||||
<h4 align="center">
|
||||
<a href="https://infisical.com/slack">Slack</a> |
|
||||
<a href="https://join.slack.com/t/infisical-users/shared_invite/zt-1kdbk07ro-RtoyEt_9E~fyzGo_xQYP6g">Slack</a> |
|
||||
<a href="https://infisical.com/">Infisical Cloud</a> |
|
||||
<a href="https://infisical.com/docs/self-hosting/overview">Self-Hosting</a> |
|
||||
<a href="https://infisical.com/docs/documentation/getting-started/introduction">Docs</a> |
|
||||
<a href="https://www.infisical.com">Website</a> |
|
||||
<a href="https://infisical.com/careers">Hiring (Remote/SF)</a>
|
||||
<a href="https://infisical.com/docs/getting-started/introduction">Docs</a> |
|
||||
<a href="https://www.infisical.com">Website</a>
|
||||
</h4>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://infisical.com/docs/self-hosting/deployment-options/aws-ec2">
|
||||
<img src=".github/images/deploy-to-aws.png" width="137" />
|
||||
</a>
|
||||
<a href="https://infisical.com/docs/self-hosting/deployment-options/digital-ocean-marketplace" alt="Deploy to DigitalOcean">
|
||||
<img width="200" alt="Deploy to DO" src="https://www.deploytodo.com/do-btn-blue.svg"/>
|
||||
</a>
|
||||
</p>
|
||||
|
||||
<h4 align="center">
|
||||
<a href="https://github.com/Infisical/infisical/blob/main/LICENSE">
|
||||
<img src="https://img.shields.io/badge/license-MIT-blue.svg" alt="Infisical is released under the MIT license." />
|
||||
<a href="https://github.com/medusajs/medusa/blob/master/LICENSE">
|
||||
<img src="https://img.shields.io/badge/license-MIT-blue.svg" alt="Medusa is released under the MIT license." />
|
||||
</a>
|
||||
<a href="https://github.com/infisical/infisical/blob/main/CONTRIBUTING.md">
|
||||
<img src="https://img.shields.io/badge/PRs-Welcome-brightgreen" alt="PRs welcome!" />
|
||||
</a>
|
||||
<a href="https://github.com/Infisical/infisical/issues">
|
||||
<a href="">
|
||||
<img src="https://img.shields.io/github/commit-activity/m/infisical/infisical" alt="git commit activity" />
|
||||
</a>
|
||||
<a href="https://cloudsmith.io/~infisical/repos/">
|
||||
<img src="https://img.shields.io/badge/Downloads-6.95M-orange" alt="Cloudsmith downloads" />
|
||||
</a>
|
||||
<a href="https://infisical.com/slack">
|
||||
<a href="https://join.slack.com/t/infisical-users/shared_invite/zt-1kdbk07ro-RtoyEt_9E~fyzGo_xQYP6g">
|
||||
<img src="https://img.shields.io/badge/chat-on%20Slack-blueviolet" alt="Slack community channel" />
|
||||
</a>
|
||||
<a href="https://twitter.com/infisical">
|
||||
@ -44,120 +32,289 @@
|
||||
</a>
|
||||
</h4>
|
||||
|
||||
<img src="/img/infisical_github_repo2.png" width="100%" alt="Dashboard" />
|
||||
<img src="/img/infisical_github_repo.png" width="100%" alt="Dashboard" />
|
||||
|
||||
## Introduction
|
||||
**[Infisical](https://infisical.com)** is an open source, E2EE tool to help teams manage and sync environment variables across their development workflow and infrastructure. It's designed to be simple and take minutes to get going.
|
||||
|
||||
**[Infisical](https://infisical.com)** is the open source secret management platform that teams use to centralize their application configuration and secrets like API keys and database credentials as well as manage their internal PKI.
|
||||
- **[User-Friendly Dashboard](https://infisical.com/docs/getting-started/dashboard/project)** to manage your team's environment variables within projects
|
||||
- **[Language-Agnostic CLI](https://infisical.com/docs/cli/overview)** that pulls and injects environment variables into your local workflow
|
||||
- **[Complete control over your data](https://infisical.com/docs/self-hosting/overview)** - host it yourself on any infrastructure
|
||||
- **Navigate Multiple Environments** per project (e.g. development, staging, production, etc.)
|
||||
- **Personal/Shared** scoping for environment variables
|
||||
- **[Integrations](https://infisical.com/docs/integrations/overview)** with CI/CD and production infrastructure (Heroku available, more coming soon)
|
||||
- 🔜 **1-Click Deploy** to Digital Ocean and Heroku
|
||||
- 🔜 **Authentication/Authorization** for projects (read/write controls soon)
|
||||
- 🔜 **Automatic Secret Rotation**
|
||||
- 🔜 **2FA**
|
||||
- 🔜 **Access Logs**
|
||||
- 🔜 **Slack Integration & MS Teams** integrations
|
||||
|
||||
We're on a mission to make security tooling more accessible to everyone, not just security teams, and that means redesigning the entire developer experience from ground up.
|
||||
And more.
|
||||
|
||||
## Features
|
||||
## 🚀 Get started
|
||||
|
||||
### Secrets Management:
|
||||
To quickly get started, visit our [get started guide](https://infisical.com/docs/getting-started/introduction).
|
||||
|
||||
- **[Dashboard](https://infisical.com/docs/documentation/platform/project)**: Manage secrets across projects and environments (e.g. development, production, etc.) through a user-friendly interface.
|
||||
- **[Native Integrations](https://infisical.com/docs/integrations/overview)**: Sync secrets to platforms like [GitHub](https://infisical.com/docs/integrations/cicd/githubactions), [Vercel](https://infisical.com/docs/integrations/cloud/vercel), [AWS](https://infisical.com/docs/integrations/cloud/aws-secret-manager), and use tools like [Terraform](https://infisical.com/docs/integrations/frameworks/terraform), [Ansible](https://infisical.com/docs/integrations/platforms/ansible), and more.
|
||||
- **[Secret versioning](https://infisical.com/docs/documentation/platform/secret-versioning)** and **[Point-in-Time Recovery](https://infisical.com/docs/documentation/platform/pit-recovery)**: Keep track of every secret and project state; roll back when needed.
|
||||
- **[Secret Rotation](https://infisical.com/docs/documentation/platform/secret-rotation/overview)**: Rotate secrets at regular intervals for services like [PostgreSQL](https://infisical.com/docs/documentation/platform/secret-rotation/postgres), [MySQL](https://infisical.com/docs/documentation/platform/secret-rotation/mysql), [AWS IAM](https://infisical.com/docs/documentation/platform/secret-rotation/aws-iam), and more.
|
||||
- **[Dynamic Secrets](https://infisical.com/docs/documentation/platform/dynamic-secrets/overview)**: Generate ephemeral secrets on-demand for services like [PostgreSQL](https://infisical.com/docs/documentation/platform/dynamic-secrets/postgresql), [MySQL](https://infisical.com/docs/documentation/platform/dynamic-secrets/mysql), [RabbitMQ](https://infisical.com/docs/documentation/platform/dynamic-secrets/rabbit-mq), and more.
|
||||
- **[Secret Scanning and Leak Prevention](https://infisical.com/docs/cli/scanning-overview)**: Prevent secrets from leaking to git.
|
||||
- **[Infisical Kubernetes Operator](https://infisical.com/docs/documentation/getting-started/kubernetes)**: Deliver secrets to your Kubernetes workloads and automatically reload deployments.
|
||||
- **[Infisical Agent](https://infisical.com/docs/infisical-agent/overview)**: Inject secrets into applications without modifying any code logic.
|
||||
<p>
|
||||
<a href="https://infisical.com/docs/self-hosting/overview" target="_blank"><img src="https://user-images.githubusercontent.com/78047717/206356882-2b773eed-b0da-4725-ae2f-83e3cd7f2713.png" height=120 /> </a>
|
||||
<a href="https://www.youtube.com/watch?v=JS3OKYU2078" target="_blank"><img src="https://user-images.githubusercontent.com/78047717/206356600-8833b128-6cae-408c-a703-07b2fc6aff4b.png" height=120 /> </a>
|
||||
<a href="https://app.infisical.com/signup" target="_blank"><img src="https://user-images.githubusercontent.com/78047717/206355970-f4c09062-b88f-452a-94e0-9c61a0651170.png" height=120></a>
|
||||
</p>
|
||||
|
||||
### Internal PKI:
|
||||
## 🔥 What's cool about this?
|
||||
|
||||
- **[Private Certificate Authority](https://infisical.com/docs/documentation/platform/pki/private-ca)**: Create CA hierarchies, configure [certificate templates](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-issuing-certificates) for policy enforcement, and start issuing X.509 certificates.
|
||||
- **[Certificate Management](https://infisical.com/docs/documentation/platform/pki/certificates)**: Manage the certificate lifecycle from [issuance](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-issuing-certificates) to [revocation](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-revoking-certificates) with support for CRL.
|
||||
- **[Alerting](https://infisical.com/docs/documentation/platform/pki/alerting)**: Configure alerting for expiring CA and end-entity certificates.
|
||||
- **[Infisical PKI Issuer for Kubernetes](https://infisical.com/docs/documentation/platform/pki/pki-issuer)**: Deliver TLS certificates to your Kubernetes workloads with automatic renewal.
|
||||
- **[Enrollment over Secure Transport](https://infisical.com/docs/documentation/platform/pki/est)**: Enroll and manage certificates via EST protocol.
|
||||
Infisical makes secret management simple and end-to-end encrypted by default. We're on a mission to make it more accessible to all developers, <i>not just security teams</i>.
|
||||
|
||||
### Key Management (KMS):
|
||||
According to a [report](https://www.ekransystem.com/en/blog/secrets-management) in 2019, only 10% of organizations use secret management solutions despite all using digital secrets to some extent.
|
||||
|
||||
- **[Cryptograhic Keys](https://infisical.com/docs/documentation/platform/kms)**: Centrally manage keys across projects through a user-friendly interface or via the API.
|
||||
- **[Encrypt and Decrypt Data](https://infisical.com/docs/documentation/platform/kms#guide-to-encrypting-data)**: Use symmetric keys to encrypt and decrypt data.
|
||||
If you care about efficiency and security, then Infisical is right for you.
|
||||
|
||||
### General Platform:
|
||||
- **Authentication Methods**: Authenticate machine identities with Infisical using a cloud-native or platform agnostic authentication method ([Kubernetes Auth](https://infisical.com/docs/documentation/platform/identities/kubernetes-auth), [GCP Auth](https://infisical.com/docs/documentation/platform/identities/gcp-auth), [Azure Auth](https://infisical.com/docs/documentation/platform/identities/azure-auth), [AWS Auth](https://infisical.com/docs/documentation/platform/identities/aws-auth), [OIDC Auth](https://infisical.com/docs/documentation/platform/identities/oidc-auth/general), [Universal Auth](https://infisical.com/docs/documentation/platform/identities/universal-auth)).
|
||||
- **[Access Controls](https://infisical.com/docs/documentation/platform/access-controls/overview)**: Define advanced authorization controls for users and machine identities with [RBAC](https://infisical.com/docs/documentation/platform/access-controls/role-based-access-controls), [additional privileges](https://infisical.com/docs/documentation/platform/access-controls/additional-privileges), [temporary access](https://infisical.com/docs/documentation/platform/access-controls/temporary-access), [access requests](https://infisical.com/docs/documentation/platform/access-controls/access-requests), [approval workflows](https://infisical.com/docs/documentation/platform/pr-workflows), and more.
|
||||
- **[Audit logs](https://infisical.com/docs/documentation/platform/audit-logs)**: Track every action taken on the platform.
|
||||
- **[Self-hosting](https://infisical.com/docs/self-hosting/overview)**: Deploy Infisical on-prem or cloud with ease; keep data on your own infrastructure.
|
||||
- **[Infisical SDK](https://infisical.com/docs/sdks/overview)**: Interact with Infisical via client SDKs ([Node](https://infisical.com/docs/sdks/languages/node), [Python](https://github.com/Infisical/python-sdk-official?tab=readme-ov-file#infisical-python-sdk), [Go](https://infisical.com/docs/sdks/languages/go), [Ruby](https://infisical.com/docs/sdks/languages/ruby), [Java](https://infisical.com/docs/sdks/languages/java), [.NET](https://infisical.com/docs/sdks/languages/csharp))
|
||||
- **[Infisical CLI](https://infisical.com/docs/cli/overview)**: Interact with Infisical via CLI; useful for injecting secrets into local development and CI/CD pipelines.
|
||||
- **[Infisical API](https://infisical.com/docs/api-reference/overview/introduction)**: Interact with Infisical via API.
|
||||
We are currently working hard to make Infisical more extensive. Need any integrations or want a new feature? Feel free to [create an issue](https://github.com/Infisical/infisical/issues) or [contribute](https://infisical.com/docs/contributing/overview) directly to the repository.
|
||||
|
||||
## Getting started
|
||||
## 🌱 Contributing
|
||||
|
||||
Check out the [Quickstart Guides](https://infisical.com/docs/getting-started/introduction)
|
||||
|
||||
| Use Infisical Cloud | Deploy Infisical on premise |
|
||||
| ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------ |
|
||||
| The fastest and most reliable way to <br> get started with Infisical is signing up <br> for free to [Infisical Cloud](https://app.infisical.com/login). | <br> View all [deployment options](https://infisical.com/docs/self-hosting/overview) |
|
||||
|
||||
### Run Infisical locally
|
||||
|
||||
To set up and run Infisical locally, make sure you have Git and Docker installed on your system. Then run the command for your system:
|
||||
|
||||
Linux/macOS:
|
||||
|
||||
```console
|
||||
git clone https://github.com/Infisical/infisical && cd "$(basename $_ .git)" && cp .env.example .env && docker compose -f docker-compose.prod.yml up
|
||||
```
|
||||
|
||||
Windows Command Prompt:
|
||||
|
||||
```console
|
||||
git clone https://github.com/Infisical/infisical && cd infisical && copy .env.example .env && docker compose -f docker-compose.prod.yml up
|
||||
```
|
||||
|
||||
Create an account at `http://localhost:80`
|
||||
|
||||
### Scan and prevent secret leaks
|
||||
|
||||
On top managing secrets with Infisical, you can also [scan for over 140+ secret types]() in your files, directories and git repositories.
|
||||
|
||||
To scan your full git history, run:
|
||||
|
||||
```
|
||||
infisical scan --verbose
|
||||
```
|
||||
|
||||
Install pre commit hook to scan each commit before you push to your repository
|
||||
|
||||
```
|
||||
infisical scan install --pre-commit-hook
|
||||
```
|
||||
|
||||
Lean about Infisical's code scanning feature [here](https://infisical.com/docs/cli/scanning-overview)
|
||||
|
||||
## Open-source vs. paid
|
||||
|
||||
This repo available under the [MIT expat license](https://github.com/Infisical/infisical/blob/main/LICENSE), with the exception of the `ee` directory which will contain premium enterprise features requiring a Infisical license.
|
||||
|
||||
If you are interested in managed Infisical Cloud of self-hosted Enterprise Offering, take a look at [our website](https://infisical.com/) or [book a meeting with us](https://infisical.cal.com/vlad/infisical-demo).
|
||||
|
||||
## Security
|
||||
|
||||
Please do not file GitHub issues or post on our public forum for security vulnerabilities, as they are public!
|
||||
|
||||
Infisical takes security issues very seriously. If you have any concerns about Infisical or believe you have uncovered a vulnerability, please get in touch via the e-mail address security@infisical.com. In the message, try to provide a description of the issue and ideally a way of reproducing it. The security team will get back to you as soon as possible.
|
||||
|
||||
Note that this security address should be used only for undisclosed vulnerabilities. Please report any security problems to us before disclosing it publicly.
|
||||
|
||||
## Contributing
|
||||
|
||||
Whether it's big or small, we love contributions. Check out our guide to see how to [get started](https://infisical.com/docs/contributing/getting-started).
|
||||
Whether it's big or small, we love contributions ❤️ Check out our guide to see how to [get started](https://infisical.com/docs/contributing/overview).
|
||||
|
||||
Not sure where to get started? You can:
|
||||
|
||||
- Join our <a href="https://infisical.com/slack">Slack</a>, and ask us any questions there.
|
||||
- [Book a free, non-pressure pairing sessions with one of our teammates](mailto:tony@infisical.com?subject=Pairing%20session&body=I'd%20like%20to%20do%20a%20pairing%20session!)!
|
||||
- Join our <a href="https://join.slack.com/t/infisical-users/shared_invite/zt-1kdbk07ro-RtoyEt_9E~fyzGo_xQYP6g">Slack</a>, and ask us any questions there.
|
||||
|
||||
## Resources
|
||||
## 💚 Community & Support
|
||||
|
||||
- [Docs](https://infisical.com/docs/documentation/getting-started/introduction) for comprehensive documentation and guides
|
||||
- [Slack](https://infisical.com/slack) for discussion with the community and Infisical team.
|
||||
- [GitHub](https://github.com/Infisical/infisical) for code, issues, and pull requests
|
||||
- [Twitter](https://twitter.com/infisical) for fast news
|
||||
- [YouTube](https://www.youtube.com/@infisical_os) for videos on secret management
|
||||
- [Blog](https://infisical.com/blog) for secret management insights, articles, tutorials, and updates
|
||||
- [Slack](https://join.slack.com/t/infisical-users/shared_invite/zt-1kdbk07ro-RtoyEt_9E~fyzGo_xQYP6g) (For live discussion with the community and the Infisical team)
|
||||
- [GitHub Discussions](https://github.com/Infisical/infisical/discussions) (For help with building and deeper conversations about features)
|
||||
- [GitHub Issues](https://github.com/Infisical/infisical-cli/issues) (For any bugs and errors you encounter using Infisical)
|
||||
- [Twitter](https://twitter.com/infisical) (Get news fast)
|
||||
|
||||
## 🐥 Status
|
||||
|
||||
- [x] Public Alpha: Anyone can sign up over at [infisical.com](https://infisical.com) but go easy on us, there are kinks and we're just getting started.
|
||||
- [ ] Public Beta: Stable enough for most non-enterprise use-cases.
|
||||
- [ ] Public: Production-ready.
|
||||
|
||||
We're currently in Public Alpha.
|
||||
|
||||
## 🔌 Integrations
|
||||
|
||||
We're currently setting the foundation and building [integrations](https://infisical.com/docs/integrations/overview) so secrets can be synced everywhere. Any help is welcome! :)
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<th>Platforms </th>
|
||||
<th>Frameworks</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
<table>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/platforms/docker?ref=github.com">
|
||||
✔️ Docker
|
||||
</a>
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/platforms/docker-compose?ref=github.com">
|
||||
✔️ Docker Compose
|
||||
</a>
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/cloud/heroku?ref=github.com">
|
||||
✔️ Heroku
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/cloud/vercel?ref=github.com">
|
||||
✔️ Vercel
|
||||
</a>
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/platforms/kubernetes?ref=github.com">
|
||||
✔️ Kubernetes
|
||||
</a>
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Fly.io
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
🔜 AWS
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 GitHub Actions (https://github.com/Infisical/infisical/issues/54)
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Railway
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
🔜 GCP
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 GitLab CI/CD
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 CircleCI
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Jenkins
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Digital Ocean
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Azure
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
🔜 TravisCI
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Netlify (https://github.com/Infisical/infisical/issues/55)
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Railway
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Bitbucket
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Supabase
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Serverless
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
<table>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/react?ref=github.com">
|
||||
✔️ React
|
||||
</a>
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/express?ref=github.com">
|
||||
✔️ Express
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/gatsby?ref=github.com">
|
||||
✔️ Gatsby
|
||||
</a>
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/flask?ref=github.com">
|
||||
✔️ Flask
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/django?ref=github.com">
|
||||
✔️ Django
|
||||
</a>
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/laravel?ref=github.com">
|
||||
✔️ Laravel
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/nestjs?ref=github.com">
|
||||
✔️ NestJS
|
||||
</a>
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/remix?ref=github.com">
|
||||
✔️ Remix
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/nextjs?ref=github.com">
|
||||
✔️ Next.js
|
||||
</a>
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/vite?ref=github.com">
|
||||
✔️ Vite
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/rails?ref=github.com">
|
||||
✔️ Ruby on Rails
|
||||
</a>
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/vue?ref=github.com">
|
||||
✔️ Vue
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/fiber?ref=github.com">
|
||||
✔️ Fiber
|
||||
</a>
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/nuxt?ref=github.com">
|
||||
✔️ Nuxt
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
## 🏘 Open-source vs. paid
|
||||
|
||||
This repo is entirely MIT licensed, with the exception of the `ee` directory which will contain premium enterprise features requiring a Infisical license in the future. We're currently focused on developing non-enterprise offerings first that should suit most use-cases.
|
||||
|
||||
## 🛡 Security
|
||||
|
||||
Looking to report a security vulnerability? Please don't post about it in GitHub issue. Instead, refer to our [SECURITY.md](./SECURITY.md) file.
|
||||
|
||||
## 🚨 Stay Up-to-Date
|
||||
|
||||
Infisical officially launched as v.1.0 on November 21st, 2022. However, a lot of new features are coming very quickly. Watch **releases** of this repository to be notified about future updates:
|
||||
|
||||

|
||||
|
||||
## 🦸 Contributors
|
||||
|
||||
[//]: contributor-faces
|
||||
|
||||
<!-- ALL-CONTRIBUTORS-LIST:START - Do not remove or modify this section -->
|
||||
<!-- prettier-ignore-start -->
|
||||
<!-- markdownlint-disable -->
|
||||
|
||||
<a href="https://github.com/dangtony98"><img src="https://avatars.githubusercontent.com/u/25857006?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/mv-turtle"><img src="https://avatars.githubusercontent.com/u/78047717?s=96&v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/maidul98"><img src="https://avatars.githubusercontent.com/u/9300960?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/gangjun06"><img src="https://avatars.githubusercontent.com/u/50910815?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/reginaldbondoc"><img src="https://avatars.githubusercontent.com/u/7693108?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/SH5H"><img src="https://avatars.githubusercontent.com/u/25437192?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/asharonbaltazar"><img src="https://avatars.githubusercontent.com/u/58940073?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/edgarrmondragon"><img src="https://avatars.githubusercontent.com/u/16805946?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/arjunyel"><img src="https://avatars.githubusercontent.com/u/11153289?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/LemmyMwaura"><img src="https://avatars.githubusercontent.com/u/20738858?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/adrianmarinwork"><img src="https://avatars.githubusercontent.com/u/118568289?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/hanywang2"><img src="https://avatars.githubusercontent.com/u/44352119?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/tobias-mintlify"><img src="https://avatars.githubusercontent.com/u/110702161?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/0xflotus"><img src="https://avatars.githubusercontent.com/u/26602940?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/wanjohiryan"><img src="https://avatars.githubusercontent.com/u/71614375?v=4" width="50" height="50" alt=""/></a>
|
||||
|
10
SECURITY.md
10
SECURITY.md
@ -1,13 +1,9 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported versions
|
||||
## Supported Versions
|
||||
|
||||
We always recommend using the latest version of Infisical to ensure you get all security updates.
|
||||
|
||||
## Reporting vulnerabilities
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Please do not file GitHub issues or post on our public forum for security vulnerabilities, as they are public!
|
||||
|
||||
Infisical takes security issues very seriously. If you have any concerns about Infisical or believe you have uncovered a vulnerability, please get in touch via the e-mail address security@infisical.com. In the message, try to provide a description of the issue and ideally a way of reproducing it. The security team will get back to you as soon as possible.
|
||||
|
||||
Note that this security address should be used only for undisclosed vulnerabilities. Please report any security problems to us before disclosing it publicly.
|
||||
Please report security vulnerabilities or concerns to team@infisical.com.
|
||||
|
@ -1,3 +1,2 @@
|
||||
vitest-environment-infisical.ts
|
||||
vitest.config.ts
|
||||
vitest.e2e.config.ts
|
||||
node_modules
|
||||
built
|
12
backend/.eslintrc
Normal file
12
backend/.eslintrc
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"plugins": ["@typescript-eslint"],
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/eslint-recommended",
|
||||
"plugin:@typescript-eslint/recommended"
|
||||
],
|
||||
"rules": {
|
||||
"no-console": 2
|
||||
}
|
||||
}
|
@ -1,74 +0,0 @@
|
||||
/* eslint-env node */
|
||||
module.exports = {
|
||||
env: {
|
||||
es6: true,
|
||||
node: true
|
||||
},
|
||||
extends: [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"plugin:@typescript-eslint/recommended-type-checked",
|
||||
"airbnb-base",
|
||||
"airbnb-typescript/base",
|
||||
"plugin:prettier/recommended",
|
||||
"prettier"
|
||||
],
|
||||
plugins: ["@typescript-eslint", "simple-import-sort", "import"],
|
||||
parser: "@typescript-eslint/parser",
|
||||
parserOptions: {
|
||||
project: true,
|
||||
sourceType: "module",
|
||||
tsconfigRootDir: __dirname
|
||||
},
|
||||
root: true,
|
||||
overrides: [
|
||||
{
|
||||
files: ["./e2e-test/**/*", "./src/db/migrations/**/*"],
|
||||
rules: {
|
||||
"@typescript-eslint/no-unsafe-member-access": "off",
|
||||
"@typescript-eslint/no-unsafe-assignment": "off",
|
||||
"@typescript-eslint/no-unsafe-argument": "off",
|
||||
"@typescript-eslint/no-unsafe-return": "off",
|
||||
"@typescript-eslint/no-unsafe-call": "off"
|
||||
}
|
||||
}
|
||||
],
|
||||
|
||||
rules: {
|
||||
"@typescript-eslint/no-empty-function": "off",
|
||||
"@typescript-eslint/no-unsafe-enum-comparison": "off",
|
||||
"no-void": "off",
|
||||
"consistent-return": "off", // my style
|
||||
"import/order": "off", // for simple-import-order
|
||||
"import/prefer-default-export": "off", // why
|
||||
"no-restricted-syntax": "off",
|
||||
// importing rules
|
||||
"simple-import-sort/exports": "error",
|
||||
"import/first": "error",
|
||||
"import/newline-after-import": "error",
|
||||
"import/no-duplicates": "error",
|
||||
"simple-import-sort/imports": [
|
||||
"warn",
|
||||
{
|
||||
groups: [
|
||||
// Side effect imports.
|
||||
["^\\u0000"],
|
||||
// Node.js builtins prefixed with `node:`.
|
||||
["^node:"],
|
||||
// Packages.
|
||||
// Things that start with a letter (or digit or underscore), or `@` followed by a letter.
|
||||
["^@?\\w"],
|
||||
["^@app"],
|
||||
["@lib"],
|
||||
["@server"],
|
||||
// Absolute imports and other imports such as Vue-style `@/foo`.
|
||||
// Anything not matched in another group.
|
||||
["^"],
|
||||
// Relative imports.
|
||||
// Anything that starts with a dot.
|
||||
["^\\."]
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
1
backend/.gitignore
vendored
1
backend/.gitignore
vendored
@ -1 +0,0 @@
|
||||
dist
|
@ -1,7 +0,0 @@
|
||||
{
|
||||
"singleQuote": false,
|
||||
"printWidth": 120,
|
||||
"trailingComma": "none",
|
||||
"tabWidth": 2,
|
||||
"semi": true
|
||||
}
|
@ -1,46 +1,15 @@
|
||||
# Build stage
|
||||
FROM node:20-alpine AS build
|
||||
FROM node:16-bullseye-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++
|
||||
COPY package.json package-lock.json ./
|
||||
|
||||
COPY package*.json ./
|
||||
RUN npm ci --only-production
|
||||
RUN npm ci --only-production --ignore-scripts
|
||||
|
||||
COPY . .
|
||||
RUN npm run build
|
||||
|
||||
# Production stage
|
||||
FROM node:20-alpine
|
||||
WORKDIR /app
|
||||
|
||||
ENV npm_config_cache /home/node/.npm
|
||||
|
||||
COPY package*.json ./
|
||||
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++
|
||||
|
||||
RUN npm ci --only-production && npm cache clean --force
|
||||
|
||||
COPY --from=build /app .
|
||||
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.8.1 && apk add --no-cache git
|
||||
|
||||
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
|
||||
CMD node healthcheck.js
|
||||
|
||||
ENV HOST=0.0.0.0
|
||||
|
||||
EXPOSE 4000
|
||||
|
||||
CMD ["npm", "start"]
|
||||
CMD ["npm", "run", "start"]
|
||||
|
@ -1,57 +0,0 @@
|
||||
FROM node:20-alpine
|
||||
|
||||
# ? Setup a test SoftHSM module. In production a real HSM is used.
|
||||
|
||||
ARG SOFTHSM2_VERSION=2.5.0
|
||||
|
||||
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
|
||||
SOFTHSM2_SOURCES=/tmp/softhsm2
|
||||
|
||||
# install build dependencies including python3
|
||||
RUN apk --update add \
|
||||
alpine-sdk \
|
||||
autoconf \
|
||||
automake \
|
||||
git \
|
||||
libtool \
|
||||
openssl-dev \
|
||||
python3 \
|
||||
make \
|
||||
g++
|
||||
|
||||
# build and install SoftHSM2
|
||||
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
|
||||
WORKDIR ${SOFTHSM2_SOURCES}
|
||||
|
||||
RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
|
||||
&& sh autogen.sh \
|
||||
&& ./configure --prefix=/usr/local --disable-gost \
|
||||
&& make \
|
||||
&& make install
|
||||
|
||||
WORKDIR /root
|
||||
RUN rm -fr ${SOFTHSM2_SOURCES}
|
||||
|
||||
# install pkcs11-tool
|
||||
RUN apk --update add opensc
|
||||
|
||||
RUN softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
||||
|
||||
# ? App setup
|
||||
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.8.1 && apk add --no-cache git
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY package.json package.json
|
||||
COPY package-lock.json package-lock.json
|
||||
|
||||
RUN npm install
|
||||
|
||||
COPY . .
|
||||
|
||||
ENV HOST=0.0.0.0
|
||||
|
||||
CMD ["npm", "run", "dev:docker"]
|
@ -1,4 +0,0 @@
|
||||
{
|
||||
"presets": ["@babel/preset-env", "@babel/preset-react"],
|
||||
"plugins": ["@babel/plugin-syntax-import-attributes", "babel-plugin-transform-import-meta"]
|
||||
}
|
@ -1,37 +0,0 @@
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { Lock } from "@app/lib/red-lock";
|
||||
|
||||
export const mockKeyStore = (): TKeyStoreFactory => {
|
||||
const store: Record<string, string | number | Buffer> = {};
|
||||
|
||||
return {
|
||||
setItem: async (key, value) => {
|
||||
store[key] = value;
|
||||
return "OK";
|
||||
},
|
||||
setItemWithExpiry: async (key, value) => {
|
||||
store[key] = value;
|
||||
return "OK";
|
||||
},
|
||||
deleteItem: async (key) => {
|
||||
delete store[key];
|
||||
return 1;
|
||||
},
|
||||
getItem: async (key) => {
|
||||
const value = store[key];
|
||||
if (typeof value === "string") {
|
||||
return value;
|
||||
}
|
||||
return null;
|
||||
},
|
||||
incrementBy: async () => {
|
||||
return 1;
|
||||
},
|
||||
acquireLock: () => {
|
||||
return Promise.resolve({
|
||||
release: () => {}
|
||||
}) as Promise<Lock>;
|
||||
},
|
||||
waitTillReady: async () => {}
|
||||
};
|
||||
};
|
@ -1,26 +0,0 @@
|
||||
import { TQueueServiceFactory } from "@app/queue";
|
||||
|
||||
export const mockQueue = (): TQueueServiceFactory => {
|
||||
const queues: Record<string, unknown> = {};
|
||||
const workers: Record<string, unknown> = {};
|
||||
const job: Record<string, unknown> = {};
|
||||
const events: Record<string, unknown> = {};
|
||||
|
||||
return {
|
||||
queue: async (name, jobData) => {
|
||||
job[name] = jobData;
|
||||
},
|
||||
shutdown: async () => undefined,
|
||||
stopRepeatableJob: async () => true,
|
||||
start: (name, jobFn) => {
|
||||
queues[name] = jobFn;
|
||||
workers[name] = jobFn;
|
||||
},
|
||||
listen: (name, event) => {
|
||||
events[name] = event;
|
||||
},
|
||||
clearQueue: async () => {},
|
||||
stopJobById: async () => {},
|
||||
stopRepeatableJobByJobId: async () => true
|
||||
};
|
||||
};
|
@ -1,10 +0,0 @@
|
||||
import { TSmtpSendMail, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
|
||||
export const mockSmtpServer = (): TSmtpService => {
|
||||
const storage: TSmtpSendMail[] = [];
|
||||
return {
|
||||
sendMail: async (data) => {
|
||||
storage.push(data);
|
||||
}
|
||||
};
|
||||
};
|
@ -1,71 +0,0 @@
|
||||
import { OrgMembershipRole } from "@app/db/schemas";
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
export const createIdentity = async (name: string, role: string) => {
|
||||
const createIdentityRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: "/api/v1/identities",
|
||||
body: {
|
||||
name,
|
||||
role,
|
||||
organizationId: seedData1.organization.id
|
||||
},
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
expect(createIdentityRes.statusCode).toBe(200);
|
||||
return createIdentityRes.json().identity;
|
||||
};
|
||||
|
||||
export const deleteIdentity = async (id: string) => {
|
||||
const deleteIdentityRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/identities/${id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
expect(deleteIdentityRes.statusCode).toBe(200);
|
||||
return deleteIdentityRes.json().identity;
|
||||
};
|
||||
|
||||
describe("Identity v1", async () => {
|
||||
test("Create identity", async () => {
|
||||
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
|
||||
expect(newIdentity.name).toBe("mac1");
|
||||
expect(newIdentity.authMethods).toEqual([]);
|
||||
|
||||
await deleteIdentity(newIdentity.id);
|
||||
});
|
||||
|
||||
test("Update identity", async () => {
|
||||
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
|
||||
expect(newIdentity.name).toBe("mac1");
|
||||
expect(newIdentity.authMethods).toEqual([]);
|
||||
|
||||
const updatedIdentity = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v1/identities/${newIdentity.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
name: "updated-mac-1",
|
||||
role: OrgMembershipRole.Member
|
||||
}
|
||||
});
|
||||
|
||||
expect(updatedIdentity.statusCode).toBe(200);
|
||||
expect(updatedIdentity.json().identity.name).toBe("updated-mac-1");
|
||||
|
||||
await deleteIdentity(newIdentity.id);
|
||||
});
|
||||
|
||||
test("Delete Identity", async () => {
|
||||
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
|
||||
|
||||
const deletedIdentity = await deleteIdentity(newIdentity.id);
|
||||
expect(deletedIdentity.name).toBe("mac1");
|
||||
});
|
||||
});
|
@ -1,44 +0,0 @@
|
||||
import jsrp from "jsrp";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
describe("Login V1 Router", async () => {
|
||||
// eslint-disable-next-line
|
||||
const client = new jsrp.client();
|
||||
await new Promise((resolve) => {
|
||||
client.init({ username: seedData1.email, password: seedData1.password }, () => resolve(null));
|
||||
});
|
||||
let clientProof: string;
|
||||
|
||||
test("Login first phase", async () => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: "/api/v3/auth/login1",
|
||||
body: {
|
||||
email: "test@localhost.local",
|
||||
clientPublicKey: client.getPublicKey()
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("serverPublicKey");
|
||||
expect(payload).toHaveProperty("salt");
|
||||
client.setSalt(payload.salt);
|
||||
client.setServerPublicKey(payload.serverPublicKey);
|
||||
clientProof = client.getProof(); // called M1
|
||||
});
|
||||
|
||||
test("Login second phase", async () => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: "/api/v3/auth/login2",
|
||||
body: {
|
||||
email: seedData1.email,
|
||||
clientProof
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("token");
|
||||
});
|
||||
});
|
@ -1,19 +0,0 @@
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
describe("Org V1 Router", async () => {
|
||||
test("GET Org list", async () => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: "/api/v1/organization",
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("organizations");
|
||||
expect(payload).toEqual({
|
||||
organizations: [expect.objectContaining({ name: seedData1.organization.name })]
|
||||
});
|
||||
});
|
||||
});
|
@ -1,132 +0,0 @@
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
import { DEFAULT_PROJECT_ENVS } from "@app/db/seeds/3-project";
|
||||
|
||||
const createProjectEnvironment = async (name: string, slug: string) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/workspace/${seedData1.project.id}/environments`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
name,
|
||||
slug
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("environment");
|
||||
return payload.environment;
|
||||
};
|
||||
|
||||
const deleteProjectEnvironment = async (envId: string) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/workspace/${seedData1.project.id}/environments/${envId}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("environment");
|
||||
return payload.environment;
|
||||
};
|
||||
|
||||
describe("Project Environment Router", async () => {
|
||||
test("Get default environments", async () => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/workspace/${seedData1.project.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("workspace");
|
||||
// check for default environments
|
||||
expect(payload).toEqual({
|
||||
workspace: expect.objectContaining({
|
||||
name: seedData1.project.name,
|
||||
id: seedData1.project.id,
|
||||
slug: seedData1.project.slug,
|
||||
environments: expect.arrayContaining([
|
||||
expect.objectContaining(DEFAULT_PROJECT_ENVS[0]),
|
||||
expect.objectContaining(DEFAULT_PROJECT_ENVS[1]),
|
||||
expect.objectContaining(DEFAULT_PROJECT_ENVS[2])
|
||||
])
|
||||
})
|
||||
});
|
||||
// ensure only two default environments exist
|
||||
expect(payload.workspace.environments.length).toBe(3);
|
||||
});
|
||||
|
||||
const mockProjectEnv = { name: "temp", slug: "temp" }; // id will be filled in create op
|
||||
test("Create environment", async () => {
|
||||
const newEnvironment = await createProjectEnvironment(mockProjectEnv.name, mockProjectEnv.slug);
|
||||
expect(newEnvironment).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
name: mockProjectEnv.name,
|
||||
slug: mockProjectEnv.slug,
|
||||
projectId: seedData1.project.id,
|
||||
position: DEFAULT_PROJECT_ENVS.length + 1,
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String)
|
||||
})
|
||||
);
|
||||
await deleteProjectEnvironment(newEnvironment.id);
|
||||
});
|
||||
|
||||
test("Update environment", async () => {
|
||||
const newEnvironment = await createProjectEnvironment(mockProjectEnv.name, mockProjectEnv.slug);
|
||||
const updatedName = { name: "temp#2", slug: "temp2" };
|
||||
const res = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v1/workspace/${seedData1.project.id}/environments/${newEnvironment.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
name: updatedName.name,
|
||||
slug: updatedName.slug,
|
||||
position: 1
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("environment");
|
||||
expect(payload.environment).toEqual(
|
||||
expect.objectContaining({
|
||||
id: newEnvironment.id,
|
||||
name: updatedName.name,
|
||||
slug: updatedName.slug,
|
||||
projectId: seedData1.project.id,
|
||||
position: 1,
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String)
|
||||
})
|
||||
);
|
||||
await deleteProjectEnvironment(newEnvironment.id);
|
||||
});
|
||||
|
||||
test("Delete environment", async () => {
|
||||
const newEnvironment = await createProjectEnvironment(mockProjectEnv.name, mockProjectEnv.slug);
|
||||
const deletedProjectEnvironment = await deleteProjectEnvironment(newEnvironment.id);
|
||||
expect(deletedProjectEnvironment).toEqual(
|
||||
expect.objectContaining({
|
||||
id: deletedProjectEnvironment.id,
|
||||
name: mockProjectEnv.name,
|
||||
slug: mockProjectEnv.slug,
|
||||
position: 5,
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String)
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
@ -1,36 +0,0 @@
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
|
||||
|
||||
const createPolicy = async (dto: { name: string; secretPath: string; approvers: {type: ApproverType.User, id: string}[]; approvals: number }) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-approvals`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
name: dto.name,
|
||||
secretPath: dto.secretPath,
|
||||
approvers: dto.approvers,
|
||||
approvals: dto.approvals
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().approval;
|
||||
};
|
||||
|
||||
describe("Secret approval policy router", async () => {
|
||||
test("Create policy", async () => {
|
||||
const policy = await createPolicy({
|
||||
secretPath: "/",
|
||||
approvals: 1,
|
||||
approvers: [{id:seedData1.id, type: ApproverType.User}],
|
||||
name: "test-policy"
|
||||
});
|
||||
|
||||
expect(policy.name).toBe("test-policy");
|
||||
});
|
||||
});
|
@ -1,165 +0,0 @@
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
const createFolder = async (dto: { path: string; name: string }) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
name: dto.name,
|
||||
path: dto.path
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folder;
|
||||
};
|
||||
|
||||
const deleteFolder = async (dto: { path: string; id: string }) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/folders/${dto.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: dto.path
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folder;
|
||||
};
|
||||
|
||||
describe("Secret Folder Router", async () => {
|
||||
test.each([
|
||||
{ name: "folder1", path: "/" }, // one in root
|
||||
{ name: "folder1", path: "/level1/level2" }, // then create a deep one creating intermediate ones
|
||||
{ name: "folder2", path: "/" },
|
||||
{ name: "folder1", path: "/level1/level2" } // this should not create folder return same thing
|
||||
])("Create folder $name in $path", async ({ name, path }) => {
|
||||
const createdFolder = await createFolder({ path, name });
|
||||
// check for default environments
|
||||
expect(createdFolder).toEqual(
|
||||
expect.objectContaining({
|
||||
name,
|
||||
id: expect.any(String)
|
||||
})
|
||||
);
|
||||
await deleteFolder({ path, id: createdFolder.id });
|
||||
});
|
||||
|
||||
test.each([
|
||||
{
|
||||
path: "/",
|
||||
expected: {
|
||||
folders: [{ name: "folder1" }, { name: "level1" }, { name: "folder2" }],
|
||||
length: 3
|
||||
}
|
||||
},
|
||||
{ path: "/level1/level2", expected: { folders: [{ name: "folder1" }], length: 1 } }
|
||||
])("Get folders $path", async ({ path, expected }) => {
|
||||
const newFolders = await Promise.all(expected.folders.map(({ name }) => createFolder({ name, path })));
|
||||
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("folders");
|
||||
expect(payload.folders.length >= expected.folders.length).toBeTruthy();
|
||||
expect(payload).toEqual({
|
||||
folders: expect.arrayContaining(expected.folders.map((el) => expect.objectContaining(el)))
|
||||
});
|
||||
|
||||
await Promise.all(newFolders.map(({ id }) => deleteFolder({ path, id })));
|
||||
});
|
||||
|
||||
test("Update a deep folder", async () => {
|
||||
const newFolder = await createFolder({ name: "folder-updated", path: "/level1/level2" });
|
||||
expect(newFolder).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
name: "folder-updated"
|
||||
})
|
||||
);
|
||||
|
||||
const resUpdatedFolders = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/level1/level2"
|
||||
}
|
||||
});
|
||||
|
||||
expect(resUpdatedFolders.statusCode).toBe(200);
|
||||
const updatedFolderList = JSON.parse(resUpdatedFolders.payload);
|
||||
expect(updatedFolderList).toHaveProperty("folders");
|
||||
expect(updatedFolderList.folders[0].name).toEqual("folder-updated");
|
||||
|
||||
await deleteFolder({ path: "/level1/level2", id: newFolder.id });
|
||||
});
|
||||
|
||||
test("Delete a deep folder", async () => {
|
||||
const newFolder = await createFolder({ name: "folder-updated", path: "/level1/level2" });
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/folders/${newFolder.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/level1/level2"
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("folder");
|
||||
expect(payload.folder).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
name: "folder-updated"
|
||||
})
|
||||
);
|
||||
|
||||
const resUpdatedFolders = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/level1/level2"
|
||||
}
|
||||
});
|
||||
|
||||
expect(resUpdatedFolders.statusCode).toBe(200);
|
||||
const updatedFolderList = JSON.parse(resUpdatedFolders.payload);
|
||||
expect(updatedFolderList).toHaveProperty("folders");
|
||||
expect(updatedFolderList.folders.length).toEqual(0);
|
||||
});
|
||||
});
|
@ -1,808 +0,0 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
describe("Secret Import Router", async () => {
|
||||
test.each([
|
||||
{ importEnv: "prod", importPath: "/" }, // one in root
|
||||
{ importEnv: "staging", importPath: "/" } // then create a deep one creating intermediate ones
|
||||
])("Create secret import $importEnv with path $importPath", async ({ importPath, importEnv }) => {
|
||||
// check for default environments
|
||||
const payload = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath,
|
||||
importEnv
|
||||
});
|
||||
expect(payload).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath,
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: importEnv,
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
);
|
||||
|
||||
await deleteSecretImport({
|
||||
id: payload.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
});
|
||||
|
||||
test("Get secret imports", async () => {
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "prod"
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "staging"
|
||||
});
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/"
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImports");
|
||||
expect(payload.secretImports.length).toBe(2);
|
||||
expect(payload.secretImports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: "/",
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: "prod",
|
||||
id: expect.any(String)
|
||||
})
|
||||
}),
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: "/",
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: "staging",
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
])
|
||||
);
|
||||
await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
});
|
||||
|
||||
test("Update secret import position", async () => {
|
||||
const prodImportDetails = { path: "/", envSlug: "prod" };
|
||||
const stagingImportDetails = { path: "/", envSlug: "staging" };
|
||||
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: prodImportDetails.path,
|
||||
importEnv: prodImportDetails.envSlug
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: stagingImportDetails.path,
|
||||
importEnv: stagingImportDetails.envSlug
|
||||
});
|
||||
|
||||
const updateImportRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v1/secret-imports/${createdImport1.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/",
|
||||
import: {
|
||||
position: 2
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(updateImportRes.statusCode).toBe(200);
|
||||
const payload = JSON.parse(updateImportRes.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
// check for default environments
|
||||
expect(payload.secretImport).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: expect.any(String),
|
||||
position: 2,
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: expect.stringMatching(prodImportDetails.envSlug),
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
);
|
||||
|
||||
const secretImportsListRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/"
|
||||
}
|
||||
});
|
||||
|
||||
expect(secretImportsListRes.statusCode).toBe(200);
|
||||
const secretImportList = JSON.parse(secretImportsListRes.payload);
|
||||
expect(secretImportList).toHaveProperty("secretImports");
|
||||
expect(secretImportList.secretImports[1].id).toEqual(createdImport1.id);
|
||||
expect(secretImportList.secretImports[0].id).toEqual(createdImport2.id);
|
||||
|
||||
await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
});
|
||||
|
||||
test("Delete secret import position", async () => {
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "prod"
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "staging"
|
||||
});
|
||||
const deletedImport = await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
// check for default environments
|
||||
expect(deletedImport).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: "/",
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: "prod",
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
);
|
||||
|
||||
const secretImportsListRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/"
|
||||
}
|
||||
});
|
||||
|
||||
expect(secretImportsListRes.statusCode).toBe(200);
|
||||
const secretImportList = JSON.parse(secretImportsListRes.payload);
|
||||
expect(secretImportList).toHaveProperty("secretImports");
|
||||
expect(secretImportList.secretImports.length).toEqual(1);
|
||||
expect(secretImportList.secretImports[0].position).toEqual(1);
|
||||
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// dev <- stage <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import waterfall pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging"
|
||||
});
|
||||
|
||||
const stageImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check one level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
});
|
||||
|
||||
test("Check two level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("PROD_KEY");
|
||||
expect(secret.secretValue).toBe("prod-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
// dev <- stage, dev <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import multiple destination to one source pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging"
|
||||
});
|
||||
|
||||
const devImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: devImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
// dev -> stage, prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import one source to multiple destination pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const stageImportFromDev = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: seedData1.environment.slug
|
||||
});
|
||||
|
||||
const prodImportFromDev = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: seedData1.environment.slug
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: prodImportFromDev.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromDev.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const stagingSecret = await getSecretByNameV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(stagingSecret.secretKey).toBe("STAGING_KEY");
|
||||
expect(stagingSecret.secretValue).toBe("stage-value");
|
||||
|
||||
const prodSecret = await getSecretByNameV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(prodSecret.secretKey).toBe("PROD_KEY");
|
||||
expect(prodSecret.secretValue).toBe("prod-value");
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
@ -1,406 +0,0 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
// dev <- stage <- prod
|
||||
describe.each([{ secretPath: "/" }, { secretPath: "/deep" }])(
|
||||
"Secret replication waterfall pattern testing - %secretPath",
|
||||
({ secretPath: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
const stageImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check one level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
// wait for 10 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 10000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
});
|
||||
|
||||
test("Check two level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
// wait for 10 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 10000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("PROD_KEY");
|
||||
expect(secret.secretValue).toBe("prod-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
},
|
||||
{ timeout: 30000 }
|
||||
);
|
||||
|
||||
// dev <- stage, dev <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret replication 1-N pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
const devImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: devImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
// wait for 10 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 10000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
},
|
||||
{ timeout: 30000 }
|
||||
);
|
@ -1,9 +0,0 @@
|
||||
describe("Status V1 Router", async () => {
|
||||
test("Simple check", async () => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: "/api/status"
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
});
|
||||
});
|
@ -1,579 +0,0 @@
|
||||
import crypto from "node:crypto";
|
||||
|
||||
import { SecretType, TSecrets } from "@app/db/schemas";
|
||||
import { decryptSecret, encryptSecret, getUserPrivateKey, seedData1 } from "@app/db/seed-data";
|
||||
import { decryptAsymmetric, decryptSymmetric128BitHexKeyUTF8, encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
|
||||
|
||||
const createServiceToken = async (
|
||||
scopes: { environment: string; secretPath: string }[],
|
||||
permissions: ("read" | "write")[]
|
||||
) => {
|
||||
const projectKeyRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v2/workspace/${seedData1.project.id}/encrypted-key`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
const projectKeyEnc = JSON.parse(projectKeyRes.payload);
|
||||
|
||||
const userInfoRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: "/api/v2/users/me",
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
const { user: userInfo } = JSON.parse(userInfoRes.payload);
|
||||
const privateKey = await getUserPrivateKey(seedData1.password, userInfo);
|
||||
const projectKey = decryptAsymmetric({
|
||||
ciphertext: projectKeyEnc.encryptedKey,
|
||||
nonce: projectKeyEnc.nonce,
|
||||
publicKey: projectKeyEnc.sender.publicKey,
|
||||
privateKey
|
||||
});
|
||||
|
||||
const randomBytes = crypto.randomBytes(16).toString("hex");
|
||||
const { ciphertext, iv, tag } = encryptSymmetric128BitHexKeyUTF8(projectKey, randomBytes);
|
||||
const serviceTokenRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: "/api/v2/service-token",
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
name: "test-token",
|
||||
workspaceId: seedData1.project.id,
|
||||
scopes,
|
||||
encryptedKey: ciphertext,
|
||||
iv,
|
||||
tag,
|
||||
permissions,
|
||||
expiresIn: null
|
||||
}
|
||||
});
|
||||
expect(serviceTokenRes.statusCode).toBe(200);
|
||||
const serviceTokenInfo = serviceTokenRes.json();
|
||||
expect(serviceTokenInfo).toHaveProperty("serviceToken");
|
||||
expect(serviceTokenInfo).toHaveProperty("serviceTokenData");
|
||||
return `${serviceTokenInfo.serviceToken}.${randomBytes}`;
|
||||
};
|
||||
|
||||
const deleteServiceToken = async () => {
|
||||
const serviceTokenListRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/workspace/${seedData1.project.id}/service-token-data`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
expect(serviceTokenListRes.statusCode).toBe(200);
|
||||
const serviceTokens = JSON.parse(serviceTokenListRes.payload).serviceTokenData as { name: string; id: string }[];
|
||||
expect(serviceTokens.length).toBeGreaterThan(0);
|
||||
const serviceTokenInfo = serviceTokens.find(({ name }) => name === "test-token");
|
||||
expect(serviceTokenInfo).toBeDefined();
|
||||
|
||||
const deleteTokenRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v2/service-token/${serviceTokenInfo?.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
expect(deleteTokenRes.statusCode).toBe(200);
|
||||
};
|
||||
|
||||
const createSecret = async (dto: {
|
||||
projectKey: string;
|
||||
path: string;
|
||||
key: string;
|
||||
value: string;
|
||||
comment: string;
|
||||
type?: SecretType;
|
||||
token: string;
|
||||
}) => {
|
||||
const createSecretReqBody = {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
type: dto.type || SecretType.Shared,
|
||||
secretPath: dto.path,
|
||||
...encryptSecret(dto.projectKey, dto.key, dto.value, dto.comment)
|
||||
};
|
||||
const createSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.token}`
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
expect(createSecRes.statusCode).toBe(200);
|
||||
const createdSecretPayload = JSON.parse(createSecRes.payload);
|
||||
expect(createdSecretPayload).toHaveProperty("secret");
|
||||
return createdSecretPayload.secret;
|
||||
};
|
||||
|
||||
const deleteSecret = async (dto: { path: string; key: string; token: string }) => {
|
||||
const deleteSecRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v3/secrets/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.token}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: dto.path
|
||||
}
|
||||
});
|
||||
expect(deleteSecRes.statusCode).toBe(200);
|
||||
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
|
||||
expect(updatedSecretPayload).toHaveProperty("secret");
|
||||
return updatedSecretPayload.secret;
|
||||
};
|
||||
|
||||
describe("Service token secret ops", async () => {
|
||||
let serviceToken = "";
|
||||
let projectKey = "";
|
||||
let folderId = "";
|
||||
beforeAll(async () => {
|
||||
serviceToken = await createServiceToken(
|
||||
[{ secretPath: "/**", environment: seedData1.environment.slug }],
|
||||
["read", "write"]
|
||||
);
|
||||
|
||||
// this is ensure cli service token decryptiong working fine
|
||||
const serviceTokenInfoRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: "/api/v2/service-token",
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(serviceTokenInfoRes.statusCode).toBe(200);
|
||||
const serviceTokenInfo = serviceTokenInfoRes.json();
|
||||
const serviceTokenParts = serviceToken.split(".");
|
||||
projectKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
key: serviceTokenParts[3],
|
||||
tag: serviceTokenInfo.tag,
|
||||
ciphertext: serviceTokenInfo.encryptedKey,
|
||||
iv: serviceTokenInfo.iv
|
||||
});
|
||||
|
||||
// create a deep folder
|
||||
const folderCreate = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
name: "folder",
|
||||
path: "/nested1/nested2"
|
||||
}
|
||||
});
|
||||
expect(folderCreate.statusCode).toBe(200);
|
||||
folderId = folderCreate.json().folder.id;
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await deleteServiceToken();
|
||||
|
||||
// create a deep folder
|
||||
const deleteFolder = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/folders/${folderId}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/nested1/nested2"
|
||||
}
|
||||
});
|
||||
expect(deleteFolder.statusCode).toBe(200);
|
||||
});
|
||||
|
||||
const testSecrets = [
|
||||
{
|
||||
path: "/",
|
||||
secret: {
|
||||
key: "ST-SEC",
|
||||
value: "something-secret",
|
||||
comment: "some comment"
|
||||
}
|
||||
},
|
||||
{
|
||||
path: "/nested1/nested2/folder",
|
||||
secret: {
|
||||
key: "NESTED-ST-SEC",
|
||||
value: "something-secret",
|
||||
comment: "some comment"
|
||||
}
|
||||
}
|
||||
];
|
||||
|
||||
const getSecrets = async (environment: string, secretPath = "/") => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
query: {
|
||||
secretPath,
|
||||
environment,
|
||||
workspaceId: seedData1.project.id
|
||||
}
|
||||
});
|
||||
const secrets: TSecrets[] = JSON.parse(res.payload).secrets || [];
|
||||
return secrets.map((el) => ({ ...decryptSecret(projectKey, el), type: el.type }));
|
||||
};
|
||||
|
||||
test.each(testSecrets)("Create secret in path $path", async ({ secret, path }) => {
|
||||
const createdSecret = await createSecret({ projectKey, path, ...secret, token: serviceToken });
|
||||
const decryptedSecret = decryptSecret(projectKey, createdSecret);
|
||||
expect(decryptedSecret.key).toEqual(secret.key);
|
||||
expect(decryptedSecret.value).toEqual(secret.value);
|
||||
expect(decryptedSecret.comment).toEqual(secret.comment);
|
||||
expect(decryptedSecret.version).toEqual(1);
|
||||
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: secret.key,
|
||||
value: secret.value,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
])
|
||||
);
|
||||
await deleteSecret({ path, key: secret.key, token: serviceToken });
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Get secret by name in path $path", async ({ secret, path }) => {
|
||||
await createSecret({ projectKey, path, ...secret, token: serviceToken });
|
||||
|
||||
const getSecByNameRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/${secret.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
query: {
|
||||
secretPath: path,
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug
|
||||
}
|
||||
});
|
||||
expect(getSecByNameRes.statusCode).toBe(200);
|
||||
const getSecretByNamePayload = JSON.parse(getSecByNameRes.payload);
|
||||
expect(getSecretByNamePayload).toHaveProperty("secret");
|
||||
const decryptedSecret = decryptSecret(projectKey, getSecretByNamePayload.secret);
|
||||
expect(decryptedSecret.key).toEqual(secret.key);
|
||||
expect(decryptedSecret.value).toEqual(secret.value);
|
||||
expect(decryptedSecret.comment).toEqual(secret.comment);
|
||||
|
||||
await deleteSecret({ path, key: secret.key, token: serviceToken });
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Update secret in path $path", async ({ path, secret }) => {
|
||||
await createSecret({ projectKey, path, ...secret, token: serviceToken });
|
||||
const updateSecretReqBody = {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
type: SecretType.Shared,
|
||||
secretPath: path,
|
||||
...encryptSecret(projectKey, secret.key, "new-value", secret.comment)
|
||||
};
|
||||
const updateSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/${secret.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
body: updateSecretReqBody
|
||||
});
|
||||
expect(updateSecRes.statusCode).toBe(200);
|
||||
const updatedSecretPayload = JSON.parse(updateSecRes.payload);
|
||||
expect(updatedSecretPayload).toHaveProperty("secret");
|
||||
const decryptedSecret = decryptSecret(projectKey, updatedSecretPayload.secret);
|
||||
expect(decryptedSecret.key).toEqual(secret.key);
|
||||
expect(decryptedSecret.value).toEqual("new-value");
|
||||
expect(decryptedSecret.comment).toEqual(secret.comment);
|
||||
|
||||
// list secret should have updated value
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: secret.key,
|
||||
value: "new-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecret({ path, key: secret.key, token: serviceToken });
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Delete secret in path $path", async ({ secret, path }) => {
|
||||
await createSecret({ projectKey, path, ...secret, token: serviceToken });
|
||||
const deletedSecret = await deleteSecret({ path, key: secret.key, token: serviceToken });
|
||||
const decryptedSecret = decryptSecret(projectKey, deletedSecret);
|
||||
expect(decryptedSecret.key).toEqual(secret.key);
|
||||
|
||||
// shared secret deletion should delete personal ones also
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.not.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: secret.key,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
])
|
||||
);
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Bulk create secrets in path $path", async ({ secret, path }) => {
|
||||
const createSharedSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/batch`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretName: `BULK-${secret.key}-${i + 1}`,
|
||||
...encryptSecret(projectKey, `BULK-${secret.key}-${i + 1}`, secret.value, secret.comment)
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(createSharedSecRes.statusCode).toBe(200);
|
||||
const createSharedSecPayload = JSON.parse(createSharedSecRes.payload);
|
||||
expect(createSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
key: `BULK-${secret.key}-${i + 1}`,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
deleteSecret({ path, token: serviceToken, key: `BULK-${secret.key}-${i + 1}` })
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Bulk create fail on existing secret in path $path", async ({ secret, path }) => {
|
||||
await createSecret({ projectKey, ...secret, key: `BULK-${secret.key}-1`, path, token: serviceToken });
|
||||
|
||||
const createSharedSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/batch`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretName: `BULK-${secret.key}-${i + 1}`,
|
||||
...encryptSecret(projectKey, `BULK-${secret.key}-${i + 1}`, secret.value, secret.comment)
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(createSharedSecRes.statusCode).toBe(400);
|
||||
|
||||
await deleteSecret({ path, key: `BULK-${secret.key}-1`, token: serviceToken });
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Bulk update secrets in path $path", async ({ secret, path }) => {
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
createSecret({ projectKey, token: serviceToken, ...secret, key: `BULK-${secret.key}-${i + 1}`, path })
|
||||
)
|
||||
);
|
||||
|
||||
const updateSharedSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/batch`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretName: `BULK-${secret.key}-${i + 1}`,
|
||||
...encryptSecret(projectKey, `BULK-${secret.key}-${i + 1}`, "update-value", secret.comment)
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
key: `BULK-${secret.key}-${i + 1}`,
|
||||
value: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}`, token: serviceToken })
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Bulk delete secrets in path $path", async ({ secret, path }) => {
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
createSecret({ projectKey, token: serviceToken, ...secret, key: `BULK-${secret.key}-${i + 1}`, path })
|
||||
)
|
||||
);
|
||||
|
||||
const deletedSharedSecRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v3/secrets/batch`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretName: `BULK-${secret.key}-${i + 1}`
|
||||
}))
|
||||
}
|
||||
});
|
||||
|
||||
expect(deletedSharedSecRes.statusCode).toBe(200);
|
||||
const deletedSecretPayload = JSON.parse(deletedSharedSecRes.payload);
|
||||
expect(deletedSecretPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.not.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
key: `BULK-${secret.value}-${i + 1}`,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Service token fail cases", async () => {
|
||||
test("Unauthorized secret path access", async () => {
|
||||
const serviceToken = await createServiceToken(
|
||||
[{ secretPath: "/", environment: seedData1.environment.slug }],
|
||||
["read", "write"]
|
||||
);
|
||||
const fetchSecrets = await testServer.inject({
|
||||
method: "GET",
|
||||
url: "/api/v3/secrets",
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: "/nested/deep"
|
||||
},
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(fetchSecrets.statusCode).toBe(403);
|
||||
expect(fetchSecrets.json().error).toBe("PermissionDenied");
|
||||
await deleteServiceToken();
|
||||
});
|
||||
|
||||
test("Unauthorized secret environment access", async () => {
|
||||
const serviceToken = await createServiceToken(
|
||||
[{ secretPath: "/", environment: seedData1.environment.slug }],
|
||||
["read", "write"]
|
||||
);
|
||||
const fetchSecrets = await testServer.inject({
|
||||
method: "GET",
|
||||
url: "/api/v3/secrets",
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: "prod",
|
||||
secretPath: "/"
|
||||
},
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(fetchSecrets.statusCode).toBe(403);
|
||||
expect(fetchSecrets.json().error).toBe("PermissionDenied");
|
||||
await deleteServiceToken();
|
||||
});
|
||||
|
||||
test("Unauthorized write operation", async () => {
|
||||
const serviceToken = await createServiceToken(
|
||||
[{ secretPath: "/", environment: seedData1.environment.slug }],
|
||||
["read"]
|
||||
);
|
||||
const writeSecrets = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/NEW`,
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
type: SecretType.Shared,
|
||||
secretPath: "/",
|
||||
// doesn't matter project key because this will fail before that due to read only access
|
||||
...encryptSecret(crypto.randomBytes(16).toString("hex"), "NEW", "value", "")
|
||||
},
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(writeSecrets.statusCode).toBe(403);
|
||||
expect(writeSecrets.json().error).toBe("PermissionDenied");
|
||||
|
||||
// but read access should still work fine
|
||||
const fetchSecrets = await testServer.inject({
|
||||
method: "GET",
|
||||
url: "/api/v3/secrets",
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: "/"
|
||||
},
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(fetchSecrets.statusCode).toBe(200);
|
||||
await deleteServiceToken();
|
||||
});
|
||||
});
|
@ -1,344 +0,0 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
describe("Secret expansion", () => {
|
||||
const projectId = seedData1.projectV3.id;
|
||||
|
||||
beforeAll(async () => {
|
||||
const prodRootFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
name: "nested"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodRootFolder.id,
|
||||
workspaceId: projectId,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
};
|
||||
});
|
||||
|
||||
test("Local secret reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "HELLO",
|
||||
value: "world"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "TEST",
|
||||
// eslint-disable-next-line
|
||||
value: "hello ${HELLO}"
|
||||
}
|
||||
];
|
||||
|
||||
for (const secret of secrets) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await createSecretV2(secret);
|
||||
}
|
||||
|
||||
const expandedSecret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "TEST"
|
||||
});
|
||||
expect(expandedSecret.secretValue).toBe("hello world");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "TEST",
|
||||
secretValue: "hello world"
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
});
|
||||
|
||||
test("Cross environment secret reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello ${prod.deep.DEEP_KEY_1} ${prod.deep.nested.NESTED_KEY_2}"
|
||||
}
|
||||
];
|
||||
|
||||
for (const secret of secrets) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await createSecretV2(secret);
|
||||
}
|
||||
|
||||
const expandedSecret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY"
|
||||
});
|
||||
expect(expandedSecret.secretValue).toBe("hello testing secret reference");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "KEY",
|
||||
secretValue: "hello testing secret reference"
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
});
|
||||
|
||||
test("Non replicated secret import secret expansion on local reference and nested reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello world"
|
||||
}
|
||||
];
|
||||
|
||||
for (const secret of secrets) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await createSecretV2(secret);
|
||||
}
|
||||
|
||||
const secretImportFromProdToDev = await createSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
importEnv: "prod",
|
||||
importPath: "/deep/nested"
|
||||
});
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretPath: "/deep/nested",
|
||||
environment: "prod",
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_1",
|
||||
secretValue: "reference"
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_2",
|
||||
secretValue: "secret reference testing"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
await deleteSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
authToken: jwtAuthToken,
|
||||
id: secretImportFromProdToDev.id,
|
||||
secretPath: "/"
|
||||
});
|
||||
});
|
||||
|
||||
test(
|
||||
"Replicated secret import secret expansion on local reference and nested reference",
|
||||
async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello world"
|
||||
}
|
||||
];
|
||||
|
||||
for (const secret of secrets) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await createSecretV2(secret);
|
||||
}
|
||||
|
||||
const secretImportFromProdToDev = await createSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
importEnv: "prod",
|
||||
importPath: "/deep/nested",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretPath: `/__reserve_replication_${secretImportFromProdToDev.id}`,
|
||||
environment: seedData1.environment.slug,
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_1",
|
||||
secretValue: "reference"
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_2",
|
||||
secretValue: "secret reference testing"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
await deleteSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
authToken: jwtAuthToken,
|
||||
id: secretImportFromProdToDev.id,
|
||||
secretPath: "/"
|
||||
});
|
||||
},
|
||||
{ timeout: 10000 }
|
||||
);
|
||||
});
|
@ -1,577 +0,0 @@
|
||||
import { SecretType } from "@app/db/schemas";
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
type TRawSecret = {
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
secretComment?: string;
|
||||
version: number;
|
||||
};
|
||||
|
||||
const createSecret = async (dto: { path: string; key: string; value: string; comment: string; type?: SecretType }) => {
|
||||
const createSecretReqBody = {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
type: dto.type || SecretType.Shared,
|
||||
secretPath: dto.path,
|
||||
secretKey: dto.key,
|
||||
secretValue: dto.value,
|
||||
secretComment: dto.comment
|
||||
};
|
||||
const createSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
expect(createSecRes.statusCode).toBe(200);
|
||||
const createdSecretPayload = JSON.parse(createSecRes.payload);
|
||||
expect(createdSecretPayload).toHaveProperty("secret");
|
||||
return createdSecretPayload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
const deleteSecret = async (dto: { path: string; key: string }) => {
|
||||
const deleteSecRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: dto.path
|
||||
}
|
||||
});
|
||||
expect(deleteSecRes.statusCode).toBe(200);
|
||||
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
|
||||
expect(updatedSecretPayload).toHaveProperty("secret");
|
||||
return updatedSecretPayload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }])(
|
||||
"Secret V2 Architecture - $auth mode",
|
||||
async ({ auth }) => {
|
||||
let folderId = "";
|
||||
let authToken = "";
|
||||
const secretTestCases = [
|
||||
{
|
||||
path: "/",
|
||||
secret: {
|
||||
key: "SEC1",
|
||||
value: "something-secret",
|
||||
comment: "some comment"
|
||||
}
|
||||
},
|
||||
{
|
||||
path: "/nested1/nested2/folder",
|
||||
secret: {
|
||||
key: "NESTED-SEC1",
|
||||
value: "something-secret",
|
||||
comment: "some comment"
|
||||
}
|
||||
},
|
||||
{
|
||||
path: "/",
|
||||
secret: {
|
||||
key: "secret-key-2",
|
||||
value: `-----BEGIN PRIVATE KEY-----
|
||||
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCa6eeFk+cMVqFn
|
||||
hoVQDYgn2Ptp5Azysr2UPq6P73pCL9BzUtOXKZROqDyGehzzfg3wE2KdYU1Jk5Uq
|
||||
fP0ZOWDIlM2SaVCSI3FW32o5+ZiggjpqcVdLFc/PS0S/ZdSmpPd8h11iO2brtIAI
|
||||
ugTW8fcKlGSNUwx9aFmE7A6JnTRliTxB1l6QaC+YAwTK39VgeVH2gDSWC407aS15
|
||||
QobAkaBKKmFkzB5D7i2ZJwt+uXJV/rbLmyDmtnw0lubciGn7NX9wbYef180fisqT
|
||||
aPNAz0nPKk0fFH2Wd5MZixNGbrrpDA+FCYvI5doThZyT2hpj08qWP07oXXCAqw46
|
||||
IEupNSILAgMBAAECggEBAIJb5KzeaiZS3B3O8G4OBQ5rJB3WfyLYUHnoSWLsBbie
|
||||
nc392/ovThLmtZAAQE6SO85Tsb93+t64Z2TKqv1H8G658UeMgfWIB78v4CcLJ2mi
|
||||
TN/3opqXrzjkQOTDHzBgT7al/mpETHZ6fOdbCemK0fVALGFUioUZg4M8VXtuI4Jw
|
||||
q28jAyoRKrCrzda4BeQ553NZ4G5RvwhX3O2I8B8upTbt5hLcisBKy8MPLYY5LUFj
|
||||
YKAP+raf6QLliP6KYHuVxUlgzxjLTxVG41etcyqqZF+foyiKBO3PU3n8oh++tgQP
|
||||
ExOxiR0JSkBG5b+oOBD0zxcvo3/SjBHn0dJOZCSU2SkCgYEAyCe676XnNyBZMRD7
|
||||
6trsaoiCWBpA6M8H44+x3w4cQFtqV38RyLy60D+iMKjIaLqeBbnay61VMzo24Bz3
|
||||
EuF2n4+9k/MetLJ0NCw8HmN5k0WSMD2BFsJWG8glVbzaqzehP4tIclwDTYc1jQVt
|
||||
IoV2/iL7HGT+x2daUwbU5kN5hK0CgYEAxiLB+fmjxJW7VY4SHDLqPdpIW0q/kv4K
|
||||
d/yZBrCX799vjmFb9vLh7PkQUfJhMJ/ttJOd7EtT3xh4mfkBeLfHwVU0d/ahbmSH
|
||||
UJu/E9ZGxAW3PP0kxHZtPrLKQwBnfq8AxBauIhR3rPSorQTIOKtwz1jMlHFSUpuL
|
||||
3KeK2YfDYJcCgYEAkQnJOlNcAuRb/WQzSHIvktssqK8NjiZHryy3Vc0hx7j2jES2
|
||||
HGI2dSVHYD9OSiXA0KFm3OTTsnViwm/60iGzFdjRJV6tR39xGUVcoyCuPnvRfUd0
|
||||
PYvBXgxgkYpyYlPDcwp5CvWGJy3tLi1acgOIwIuUr3S38sL//t4adGk8q1kCgYB8
|
||||
Jbs1Tl53BvrimKpwUNbE+sjrquJu0A7vL68SqgQJoQ7dP9PH4Ff/i+/V6PFM7mib
|
||||
BQOm02wyFbs7fvKVGVJoqWK+6CIucX732x7W5yRgHtS5ukQXdbzt1Ek3wkEW98Cb
|
||||
HTruz7RNAt/NyXlLSODeit1lBbx3Vk9EaxZtRsv88QKBgGn7JwXgez9NOyobsNIo
|
||||
QVO80rpUeenSjuFi+R0VmbLKe/wgAQbYJ0xTAsQ0btqViMzB27D6mJyC+KUIwWNX
|
||||
MN8a+m46v4kqvZkKL2c4gmDibyURNe/vCtCHFuanJS/1mo2tr4XDyEeiuK52eTd9
|
||||
omQDpP86RX/hIIQ+JyLSaWYa
|
||||
-----END PRIVATE KEY-----`,
|
||||
comment:
|
||||
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation"
|
||||
}
|
||||
},
|
||||
{
|
||||
path: "/nested1/nested2/folder",
|
||||
secret: {
|
||||
key: "secret-key-3",
|
||||
value: `-----BEGIN PRIVATE KEY-----
|
||||
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCa6eeFk+cMVqFn
|
||||
hoVQDYgn2Ptp5Azysr2UPq6P73pCL9BzUtOXKZROqDyGehzzfg3wE2KdYU1Jk5Uq
|
||||
fP0ZOWDIlM2SaVCSI3FW32o5+ZiggjpqcVdLFc/PS0S/ZdSmpPd8h11iO2brtIAI
|
||||
ugTW8fcKlGSNUwx9aFmE7A6JnTRliTxB1l6QaC+YAwTK39VgeVH2gDSWC407aS15
|
||||
QobAkaBKKmFkzB5D7i2ZJwt+uXJV/rbLmyDmtnw0lubciGn7NX9wbYef180fisqT
|
||||
aPNAz0nPKk0fFH2Wd5MZixNGbrrpDA+FCYvI5doThZyT2hpj08qWP07oXXCAqw46
|
||||
IEupNSILAgMBAAECggEBAIJb5KzeaiZS3B3O8G4OBQ5rJB3WfyLYUHnoSWLsBbie
|
||||
nc392/ovThLmtZAAQE6SO85Tsb93+t64Z2TKqv1H8G658UeMgfWIB78v4CcLJ2mi
|
||||
TN/3opqXrzjkQOTDHzBgT7al/mpETHZ6fOdbCemK0fVALGFUioUZg4M8VXtuI4Jw
|
||||
q28jAyoRKrCrzda4BeQ553NZ4G5RvwhX3O2I8B8upTbt5hLcisBKy8MPLYY5LUFj
|
||||
YKAP+raf6QLliP6KYHuVxUlgzxjLTxVG41etcyqqZF+foyiKBO3PU3n8oh++tgQP
|
||||
ExOxiR0JSkBG5b+oOBD0zxcvo3/SjBHn0dJOZCSU2SkCgYEAyCe676XnNyBZMRD7
|
||||
6trsaoiCWBpA6M8H44+x3w4cQFtqV38RyLy60D+iMKjIaLqeBbnay61VMzo24Bz3
|
||||
EuF2n4+9k/MetLJ0NCw8HmN5k0WSMD2BFsJWG8glVbzaqzehP4tIclwDTYc1jQVt
|
||||
IoV2/iL7HGT+x2daUwbU5kN5hK0CgYEAxiLB+fmjxJW7VY4SHDLqPdpIW0q/kv4K
|
||||
d/yZBrCX799vjmFb9vLh7PkQUfJhMJ/ttJOd7EtT3xh4mfkBeLfHwVU0d/ahbmSH
|
||||
UJu/E9ZGxAW3PP0kxHZtPrLKQwBnfq8AxBauIhR3rPSorQTIOKtwz1jMlHFSUpuL
|
||||
3KeK2YfDYJcCgYEAkQnJOlNcAuRb/WQzSHIvktssqK8NjiZHryy3Vc0hx7j2jES2
|
||||
HGI2dSVHYD9OSiXA0KFm3OTTsnViwm/60iGzFdjRJV6tR39xGUVcoyCuPnvRfUd0
|
||||
PYvBXgxgkYpyYlPDcwp5CvWGJy3tLi1acgOIwIuUr3S38sL//t4adGk8q1kCgYB8
|
||||
Jbs1Tl53BvrimKpwUNbE+sjrquJu0A7vL68SqgQJoQ7dP9PH4Ff/i+/V6PFM7mib
|
||||
BQOm02wyFbs7fvKVGVJoqWK+6CIucX732x7W5yRgHtS5ukQXdbzt1Ek3wkEW98Cb
|
||||
HTruz7RNAt/NyXlLSODeit1lBbx3Vk9EaxZtRsv88QKBgGn7JwXgez9NOyobsNIo
|
||||
QVO80rpUeenSjuFi+R0VmbLKe/wgAQbYJ0xTAsQ0btqViMzB27D6mJyC+KUIwWNX
|
||||
MN8a+m46v4kqvZkKL2c4gmDibyURNe/vCtCHFuanJS/1mo2tr4XDyEeiuK52eTd9
|
||||
omQDpP86RX/hIIQ+JyLSaWYa
|
||||
-----END PRIVATE KEY-----`,
|
||||
comment:
|
||||
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation"
|
||||
}
|
||||
},
|
||||
{
|
||||
path: "/nested1/nested2/folder",
|
||||
secret: {
|
||||
key: "secret-key-3",
|
||||
value:
|
||||
"TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gU2VkIGRvIGVpdXNtb2QgdGVtcG9yIGluY2lkaWR1bnQgdXQgbGFib3JlIGV0IGRvbG9yZSBtYWduYSBhbGlxdWEuIFV0IGVuaW0gYWQgbWluaW0gdmVuaWFtLCBxdWlzIG5vc3RydWQgZXhlcmNpdGF0aW9uCg==",
|
||||
comment: ""
|
||||
}
|
||||
}
|
||||
];
|
||||
|
||||
beforeAll(async () => {
|
||||
if (auth === AuthMode.JWT) {
|
||||
authToken = jwtAuthToken;
|
||||
} else if (auth === AuthMode.IDENTITY_ACCESS_TOKEN) {
|
||||
const identityLogin = await testServer.inject({
|
||||
method: "POST",
|
||||
url: "/api/v1/auth/universal-auth/login",
|
||||
body: {
|
||||
clientSecret: seedData1.machineIdentity.clientCredentials.secret,
|
||||
clientId: seedData1.machineIdentity.clientCredentials.id
|
||||
}
|
||||
});
|
||||
expect(identityLogin.statusCode).toBe(200);
|
||||
authToken = identityLogin.json().accessToken;
|
||||
}
|
||||
// create a deep folder
|
||||
const folderCreate = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
name: "folder",
|
||||
path: "/nested1/nested2"
|
||||
}
|
||||
});
|
||||
expect(folderCreate.statusCode).toBe(200);
|
||||
folderId = folderCreate.json().folder.id;
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
const deleteFolder = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/folders/${folderId}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/nested1/nested2"
|
||||
}
|
||||
});
|
||||
expect(deleteFolder.statusCode).toBe(200);
|
||||
});
|
||||
|
||||
const getSecrets = async (environment: string, secretPath = "/") => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
query: {
|
||||
secretPath,
|
||||
environment,
|
||||
workspaceId: seedData1.projectV3.id
|
||||
}
|
||||
});
|
||||
const secrets: TRawSecret[] = JSON.parse(res.payload).secrets || [];
|
||||
return secrets;
|
||||
};
|
||||
|
||||
test.each(secretTestCases)("Create secret in path $path", async ({ secret, path }) => {
|
||||
const createdSecret = await createSecret({ path, ...secret });
|
||||
expect(createdSecret.secretKey).toEqual(secret.key);
|
||||
expect(createdSecret.secretValue).toEqual(secret.value);
|
||||
expect(createdSecret.secretComment || "").toEqual(secret.comment);
|
||||
expect(createdSecret.version).toEqual(1);
|
||||
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: secret.key,
|
||||
secretValue: secret.value,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
])
|
||||
);
|
||||
await deleteSecret({ path, key: secret.key });
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Get secret by name in path $path", async ({ secret, path }) => {
|
||||
await createSecret({ path, ...secret });
|
||||
|
||||
const getSecByNameRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/raw/${secret.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
query: {
|
||||
secretPath: path,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug
|
||||
}
|
||||
});
|
||||
expect(getSecByNameRes.statusCode).toBe(200);
|
||||
const getSecretByNamePayload = JSON.parse(getSecByNameRes.payload);
|
||||
expect(getSecretByNamePayload).toHaveProperty("secret");
|
||||
const decryptedSecret = getSecretByNamePayload.secret as TRawSecret;
|
||||
expect(decryptedSecret.secretKey).toEqual(secret.key);
|
||||
expect(decryptedSecret.secretValue).toEqual(secret.value);
|
||||
expect(decryptedSecret.secretComment || "").toEqual(secret.comment);
|
||||
|
||||
await deleteSecret({ path, key: secret.key });
|
||||
});
|
||||
|
||||
if (auth === AuthMode.JWT) {
|
||||
test.each(secretTestCases)(
|
||||
"Creating personal secret without shared throw error in path $path",
|
||||
async ({ secret }) => {
|
||||
const createSecretReqBody = {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
type: SecretType.Personal,
|
||||
secretKey: secret.key,
|
||||
secretValue: secret.value,
|
||||
secretComment: secret.comment
|
||||
};
|
||||
const createSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/raw/SEC2`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
const payload = JSON.parse(createSecRes.payload);
|
||||
expect(createSecRes.statusCode).toBe(400);
|
||||
expect(payload.error).toEqual("BadRequest");
|
||||
}
|
||||
);
|
||||
|
||||
test.each(secretTestCases)("Creating personal secret in path $path", async ({ secret, path }) => {
|
||||
await createSecret({ path, ...secret });
|
||||
|
||||
const createSecretReqBody = {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
type: SecretType.Personal,
|
||||
secretPath: path,
|
||||
secretKey: secret.key,
|
||||
secretValue: "personal-value",
|
||||
secretComment: secret.comment
|
||||
};
|
||||
const createSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/raw/${secret.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
expect(createSecRes.statusCode).toBe(200);
|
||||
|
||||
// list secrets should contain personal one and shared one
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: secret.key,
|
||||
secretValue: secret.value,
|
||||
type: SecretType.Shared
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secretKey: secret.key,
|
||||
secretValue: "personal-value",
|
||||
type: SecretType.Personal
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecret({ path, key: secret.key });
|
||||
});
|
||||
|
||||
test.each(secretTestCases)(
|
||||
"Deleting personal one should not delete shared secret in path $path",
|
||||
async ({ secret, path }) => {
|
||||
await createSecret({ path, ...secret }); // shared one
|
||||
await createSecret({ path, ...secret, type: SecretType.Personal });
|
||||
|
||||
// shared secret deletion should delete personal ones also
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: secret.key,
|
||||
type: SecretType.Shared
|
||||
}),
|
||||
expect.not.objectContaining({
|
||||
secretKey: secret.key,
|
||||
type: SecretType.Personal
|
||||
})
|
||||
])
|
||||
);
|
||||
await deleteSecret({ path, key: secret.key });
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
test.each(secretTestCases)("Update secret in path $path", async ({ path, secret }) => {
|
||||
await createSecret({ path, ...secret });
|
||||
const updateSecretReqBody = {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
type: SecretType.Shared,
|
||||
secretPath: path,
|
||||
secretKey: secret.key,
|
||||
secretValue: "new-value",
|
||||
secretComment: secret.comment
|
||||
};
|
||||
const updateSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/raw/${secret.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: updateSecretReqBody
|
||||
});
|
||||
expect(updateSecRes.statusCode).toBe(200);
|
||||
const updatedSecretPayload = JSON.parse(updateSecRes.payload);
|
||||
expect(updatedSecretPayload).toHaveProperty("secret");
|
||||
const decryptedSecret = updatedSecretPayload.secret;
|
||||
expect(decryptedSecret.secretKey).toEqual(secret.key);
|
||||
expect(decryptedSecret.secretValue).toEqual("new-value");
|
||||
expect(decryptedSecret.secretComment || "").toEqual(secret.comment);
|
||||
|
||||
// list secret should have updated value
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: secret.key,
|
||||
secretValue: "new-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecret({ path, key: secret.key });
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Delete secret in path $path", async ({ secret, path }) => {
|
||||
await createSecret({ path, ...secret });
|
||||
const deletedSecret = await deleteSecret({ path, key: secret.key });
|
||||
expect(deletedSecret.secretKey).toEqual(secret.key);
|
||||
|
||||
// shared secret deletion should delete personal ones also
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.not.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: secret.key,
|
||||
type: SecretType.Shared
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secretKey: secret.key,
|
||||
type: SecretType.Personal
|
||||
})
|
||||
])
|
||||
);
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Bulk create secrets in path $path", async ({ secret, path }) => {
|
||||
const createSharedSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: secret.value,
|
||||
secretComment: secret.comment
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(createSharedSecRes.statusCode).toBe(200);
|
||||
const createSharedSecPayload = JSON.parse(createSharedSecRes.payload);
|
||||
expect(createSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: secret.value,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) => deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}` }))
|
||||
);
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Bulk create fail on existing secret in path $path", async ({ secret, path }) => {
|
||||
await createSecret({ ...secret, key: `BULK-${secret.key}-1`, path });
|
||||
|
||||
const createSharedSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: secret.value,
|
||||
secretComment: secret.comment
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(createSharedSecRes.statusCode).toBe(400);
|
||||
|
||||
await deleteSecret({ path, key: `BULK-${secret.key}-1` });
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Bulk update secrets in path $path", async ({ secret, path }) => {
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) => createSecret({ ...secret, key: `BULK-${secret.key}-${i + 1}`, path }))
|
||||
);
|
||||
|
||||
const updateSharedSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: secret.comment
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) => deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}` }))
|
||||
);
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Bulk delete secrets in path $path", async ({ secret, path }) => {
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) => createSecret({ ...secret, key: `BULK-${secret.key}-${i + 1}`, path }))
|
||||
);
|
||||
|
||||
const deletedSharedSecRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`
|
||||
}))
|
||||
}
|
||||
});
|
||||
|
||||
expect(deletedSharedSecRes.statusCode).toBe(200);
|
||||
const deletedSecretPayload = JSON.parse(deletedSharedSecRes.payload);
|
||||
expect(deletedSecretPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.not.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
secretKey: `BULK-${secret.value}-${i + 1}`,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
});
|
||||
}
|
||||
);
|
File diff suppressed because it is too large
Load Diff
@ -1,73 +0,0 @@
|
||||
type TFolder = {
|
||||
id: string;
|
||||
name: string;
|
||||
};
|
||||
|
||||
export const createFolder = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
name: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
name: dto.name,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folder as TFolder;
|
||||
};
|
||||
|
||||
export const deleteFolder = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
id: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/folders/${dto.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folder as TFolder;
|
||||
};
|
||||
|
||||
export const listFolders = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folders as TFolder[];
|
||||
};
|
@ -1,93 +0,0 @@
|
||||
type TSecretImport = {
|
||||
id: string;
|
||||
importEnv: {
|
||||
name: string;
|
||||
slug: string;
|
||||
id: string;
|
||||
};
|
||||
importPath: string;
|
||||
};
|
||||
|
||||
export const createSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
isReplication?: boolean;
|
||||
secretPath: string;
|
||||
importPath: string;
|
||||
importEnv: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
isReplication: dto.isReplication,
|
||||
path: dto.secretPath,
|
||||
import: {
|
||||
environment: dto.importEnv,
|
||||
path: dto.importPath
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport as TSecretImport;
|
||||
};
|
||||
|
||||
export const deleteSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
id: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/secret-imports/${dto.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport as TSecretImport;
|
||||
};
|
||||
|
||||
export const listSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImports");
|
||||
return payload.secretImports as TSecretImport[];
|
||||
};
|
@ -1,128 +0,0 @@
|
||||
import { SecretType } from "@app/db/schemas";
|
||||
|
||||
type TRawSecret = {
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
secretComment?: string;
|
||||
version: number;
|
||||
};
|
||||
|
||||
export const createSecretV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
value: string;
|
||||
comment?: string;
|
||||
authToken: string;
|
||||
type?: SecretType;
|
||||
}) => {
|
||||
const createSecretReqBody = {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
type: dto.type || SecretType.Shared,
|
||||
secretPath: dto.secretPath,
|
||||
secretKey: dto.key,
|
||||
secretValue: dto.value,
|
||||
secretComment: dto.comment
|
||||
};
|
||||
const createSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
expect(createSecRes.statusCode).toBe(200);
|
||||
const createdSecretPayload = JSON.parse(createSecRes.payload);
|
||||
expect(createdSecretPayload).toHaveProperty("secret");
|
||||
return createdSecretPayload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const deleteSecretV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const deleteSecRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(deleteSecRes.statusCode).toBe(200);
|
||||
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
|
||||
expect(updatedSecretPayload).toHaveProperty("secret");
|
||||
return updatedSecretPayload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const getSecretByNameV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const response = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath,
|
||||
expandSecretReferences: "true",
|
||||
include_imports: "true"
|
||||
}
|
||||
});
|
||||
expect(response.statusCode).toBe(200);
|
||||
const payload = JSON.parse(response.payload);
|
||||
expect(payload).toHaveProperty("secret");
|
||||
return payload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const getSecretsV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const getSecretsResponse = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath,
|
||||
expandSecretReferences: "true",
|
||||
include_imports: "true"
|
||||
}
|
||||
});
|
||||
expect(getSecretsResponse.statusCode).toBe(200);
|
||||
const getSecretsPayload = JSON.parse(getSecretsResponse.payload);
|
||||
expect(getSecretsPayload).toHaveProperty("secrets");
|
||||
expect(getSecretsPayload).toHaveProperty("imports");
|
||||
return getSecretsPayload as {
|
||||
secrets: TRawSecret[];
|
||||
imports: {
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
folderId: string;
|
||||
secrets: TRawSecret[];
|
||||
}[];
|
||||
};
|
||||
};
|
@ -1,111 +0,0 @@
|
||||
// eslint-disable-next-line
|
||||
import "ts-node/register";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
import jwt from "jsonwebtoken";
|
||||
import path from "path";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
import { initEnvConfig } from "@app/lib/config/env";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { main } from "@app/server/app";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { mockSmtpServer } from "./mocks/smtp";
|
||||
import { initDbConnection } from "@app/db";
|
||||
import { queueServiceFactory } from "@app/queue";
|
||||
import { keyStoreFactory } from "@app/keystore/keystore";
|
||||
import { Redis } from "ioredis";
|
||||
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
|
||||
export default {
|
||||
name: "knex-env",
|
||||
transformMode: "ssr",
|
||||
async setup() {
|
||||
const logger = await initLogger();
|
||||
const cfg = initEnvConfig(logger);
|
||||
const db = initDbConnection({
|
||||
dbConnectionUri: cfg.DB_CONNECTION_URI,
|
||||
dbRootCert: cfg.DB_ROOT_CERT
|
||||
});
|
||||
|
||||
const redis = new Redis(cfg.REDIS_URL);
|
||||
await redis.flushdb("SYNC");
|
||||
|
||||
try {
|
||||
await db.migrate.rollback(
|
||||
{
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
},
|
||||
true
|
||||
);
|
||||
await db.migrate.latest({
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
});
|
||||
|
||||
await db.seed.run({
|
||||
directory: path.join(__dirname, "../src/db/seeds"),
|
||||
extension: "ts"
|
||||
});
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = queueServiceFactory(cfg.REDIS_URL);
|
||||
const keyStore = keyStoreFactory(cfg.REDIS_URL);
|
||||
|
||||
const hsmModule = initializeHsmModule();
|
||||
hsmModule.initialize();
|
||||
|
||||
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule() });
|
||||
|
||||
// @ts-expect-error type
|
||||
globalThis.testServer = server;
|
||||
// @ts-expect-error type
|
||||
globalThis.jwtAuthToken = jwt.sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.ACCESS_TOKEN,
|
||||
userId: seedData1.id,
|
||||
tokenVersionId: seedData1.token.id,
|
||||
authMethod: AuthMethod.EMAIL,
|
||||
organizationId: seedData1.organization.id,
|
||||
accessVersion: 1
|
||||
},
|
||||
cfg.AUTH_SECRET,
|
||||
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
|
||||
);
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line
|
||||
console.log("[TEST] Error setting up environment", error);
|
||||
await db.destroy();
|
||||
throw error;
|
||||
}
|
||||
|
||||
// custom setup
|
||||
return {
|
||||
async teardown() {
|
||||
// @ts-expect-error type
|
||||
await globalThis.testServer.close();
|
||||
// @ts-expect-error type
|
||||
delete globalThis.testServer;
|
||||
// @ts-expect-error type
|
||||
delete globalThis.jwtToken;
|
||||
// called after all tests with this env have been run
|
||||
await db.migrate.rollback(
|
||||
{
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
},
|
||||
true
|
||||
);
|
||||
|
||||
await redis.flushdb("ASYNC");
|
||||
redis.disconnect();
|
||||
await db.destroy();
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
41
backend/environment.d.ts
vendored
Normal file
41
backend/environment.d.ts
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
export {};
|
||||
|
||||
declare global {
|
||||
namespace NodeJS {
|
||||
interface ProcessEnv {
|
||||
EMAIL_TOKEN_LIFETIME: string;
|
||||
ENCRYPTION_KEY: string;
|
||||
JWT_AUTH_LIFETIME: string;
|
||||
JWT_AUTH_SECRET: string;
|
||||
JWT_REFRESH_LIFETIME: string;
|
||||
JWT_REFRESH_SECRET: string;
|
||||
JWT_SERVICE_SECRET: string;
|
||||
JWT_SIGNUP_LIFETIME: string;
|
||||
JWT_SIGNUP_SECRET: string;
|
||||
MONGO_URL: string;
|
||||
NODE_ENV: 'development' | 'staging' | 'testing' | 'production';
|
||||
CLIENT_ID_HEROKU: string;
|
||||
CLIENT_ID_VERCEL: string;
|
||||
CLIENT_ID_NETLIFY: string;
|
||||
CLIENT_SECRET_HEROKU: string;
|
||||
CLIENT_SECRET_VERCEL: string;
|
||||
CLIENT_SECRET_NETLIFY: string;
|
||||
POSTHOG_HOST: string;
|
||||
POSTHOG_PROJECT_API_KEY: string;
|
||||
PRIVATE_KEY: string;
|
||||
PUBLIC_KEY: string;
|
||||
SENTRY_DSN: string;
|
||||
SITE_URL: string;
|
||||
SMTP_HOST: string;
|
||||
SMTP_NAME: string;
|
||||
SMTP_PASSWORD: string;
|
||||
SMTP_USERNAME: string;
|
||||
STRIPE_PRODUCT_CARD_AUTH: string;
|
||||
STRIPE_PRODUCT_PRO: string;
|
||||
STRIPE_PRODUCT_STARTER: string;
|
||||
STRIPE_PUBLISHABLE_KEY: string;
|
||||
STRIPE_SECRET_KEY: string;
|
||||
STRIPE_WEBHOOK_SECRET: string;
|
||||
}
|
||||
}
|
||||
}
|
24
backend/healthcheck.js
Normal file
24
backend/healthcheck.js
Normal file
@ -0,0 +1,24 @@
|
||||
const http = require('http');
|
||||
const PORT = process.env.PORT || 4000;
|
||||
const options = {
|
||||
host: 'localhost',
|
||||
port: PORT,
|
||||
timeout: 2000,
|
||||
path: '/healthcheck'
|
||||
};
|
||||
|
||||
const healthCheck = http.request(options, (res) => {
|
||||
console.log(`HEALTHCHECK STATUS: ${res.statusCode}`);
|
||||
if (res.statusCode == 200) {
|
||||
process.exit(0);
|
||||
} else {
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
healthCheck.on('error', function (err) {
|
||||
console.error(`HEALTH CHECK ERROR: ${err}`);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
healthCheck.end();
|
BIN
backend/img/dashboard.png
Normal file
BIN
backend/img/dashboard.png
Normal file
Binary file not shown.
After ![]() (image error) Size: 493 KiB |
@ -1,6 +1,6 @@
|
||||
{
|
||||
"watch": ["src"],
|
||||
"ext": ".ts,.js",
|
||||
"ignore": [],
|
||||
"exec": "tsx ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine"
|
||||
}
|
||||
"watch": ["src"],
|
||||
"ext": ".ts,.js",
|
||||
"ignore": [],
|
||||
"exec": "ts-node ./src/index.ts"
|
||||
}
|
33088
backend/package-lock.json
generated
33088
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -1,209 +1,72 @@
|
||||
{
|
||||
"name": "backend",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "./dist/main.mjs",
|
||||
"bin": "dist/main.js",
|
||||
"pkg": {
|
||||
"scripts": [
|
||||
"dist/**/*.js",
|
||||
"../frontend/node_modules/next/**/*.js",
|
||||
"../frontend/.next/*/**/*.js",
|
||||
"../frontend/node_modules/next/dist/server/**/*.js",
|
||||
"../frontend/node_modules/@fortawesome/fontawesome-svg-core/**/*.js"
|
||||
],
|
||||
"assets": [
|
||||
"dist/**",
|
||||
"!dist/**/*.js",
|
||||
"node_modules/**",
|
||||
"../frontend/node_modules/**",
|
||||
"../frontend/.next/**",
|
||||
"!../frontend/node_modules/next/dist/server/**/*.js",
|
||||
"../frontend/node_modules/@fortawesome/fontawesome-svg-core/**/*",
|
||||
"../frontend/public/**"
|
||||
],
|
||||
"outputPath": "binary"
|
||||
},
|
||||
"scripts": {
|
||||
"binary:build": "npm run binary:clean && npm run build:frontend && npm run build && npm run binary:babel-frontend && npm run binary:babel-backend && npm run binary:rename-imports",
|
||||
"binary:package": "pkg --no-bytecode --public-packages \"*\" --public --target host .",
|
||||
"binary:babel-backend": " babel ./dist -d ./dist",
|
||||
"binary:babel-frontend": "babel --copy-files ../frontend/.next/server -d ../frontend/.next/server",
|
||||
"binary:clean": "rm -rf ./dist && rm -rf ./binary",
|
||||
"binary:rename-imports": "ts-node ./scripts/rename-mjs.ts",
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"dev": "tsx watch --clear-screen=false ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine",
|
||||
"dev:docker": "nodemon",
|
||||
"build": "tsup --sourcemap",
|
||||
"build:frontend": "npm run build --prefix ../frontend",
|
||||
"start": "node --enable-source-maps dist/main.mjs",
|
||||
"type:check": "tsc --noEmit",
|
||||
"lint:fix": "eslint --fix --ext js,ts ./src",
|
||||
"lint": "eslint 'src/**/*.ts'",
|
||||
"test:e2e": "vitest run -c vitest.e2e.config.ts --bail=1",
|
||||
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
|
||||
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
|
||||
"generate:component": "tsx ./scripts/create-backend-file.ts",
|
||||
"generate:schema": "tsx ./scripts/generate-schema-types.ts && eslint --fix --ext ts ./src/db/schemas",
|
||||
"auditlog-migration:latest": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:latest",
|
||||
"auditlog-migration:up": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:up",
|
||||
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
|
||||
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
|
||||
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
|
||||
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
|
||||
"migration:new": "tsx ./scripts/create-migration.ts",
|
||||
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
||||
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
||||
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
||||
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.18.10",
|
||||
"@babel/core": "^7.18.10",
|
||||
"@babel/plugin-syntax-import-attributes": "^7.24.7",
|
||||
"@babel/preset-env": "^7.18.10",
|
||||
"@babel/preset-react": "^7.24.7",
|
||||
"@types/bcrypt": "^5.0.2",
|
||||
"@types/jmespath": "^0.15.2",
|
||||
"@types/jsonwebtoken": "^9.0.5",
|
||||
"@types/jsrp": "^0.2.6",
|
||||
"@types/libsodium-wrappers": "^0.7.13",
|
||||
"@types/lodash.isequal": "^4.5.8",
|
||||
"@types/node": "^20.9.5",
|
||||
"@types/nodemailer": "^6.4.14",
|
||||
"@types/passport-github": "^1.1.12",
|
||||
"@types/passport-google-oauth20": "^2.0.14",
|
||||
"@types/pg": "^8.10.9",
|
||||
"@types/picomatch": "^2.3.3",
|
||||
"@types/pkcs11js": "^1.0.4",
|
||||
"@types/prompt-sync": "^4.2.3",
|
||||
"@types/resolve": "^1.20.6",
|
||||
"@types/safe-regex": "^1.1.6",
|
||||
"@types/sjcl": "^1.0.34",
|
||||
"@types/uuid": "^9.0.7",
|
||||
"@typescript-eslint/eslint-plugin": "^6.20.0",
|
||||
"@typescript-eslint/parser": "^6.20.0",
|
||||
"@yao-pkg/pkg": "^5.12.0",
|
||||
"babel-plugin-transform-import-meta": "^2.2.1",
|
||||
"eslint": "^8.56.0",
|
||||
"eslint-config-airbnb-base": "^15.0.0",
|
||||
"eslint-config-airbnb-typescript": "^17.1.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-import-resolver-typescript": "^3.6.1",
|
||||
"eslint-plugin-import": "^2.29.1",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-simple-import-sort": "^10.0.0",
|
||||
"nodemon": "^3.0.2",
|
||||
"pino-pretty": "^10.2.3",
|
||||
"prompt-sync": "^4.2.0",
|
||||
"rimraf": "^5.0.5",
|
||||
"ts-node": "^10.9.2",
|
||||
"tsc-alias": "^1.8.8",
|
||||
"tsconfig-paths": "^4.2.0",
|
||||
"tsup": "^8.0.1",
|
||||
"tsx": "^4.4.0",
|
||||
"typescript": "^5.3.2",
|
||||
"vitest": "^1.2.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-elasticache": "^3.637.0",
|
||||
"@aws-sdk/client-iam": "^3.525.0",
|
||||
"@aws-sdk/client-kms": "^3.609.0",
|
||||
"@aws-sdk/client-secrets-manager": "^3.504.0",
|
||||
"@aws-sdk/client-sts": "^3.600.0",
|
||||
"@casl/ability": "^6.5.0",
|
||||
"@elastic/elasticsearch": "^8.15.0",
|
||||
"@fastify/cookie": "^9.3.1",
|
||||
"@fastify/cors": "^8.5.0",
|
||||
"@fastify/etag": "^5.1.0",
|
||||
"@fastify/formbody": "^7.4.0",
|
||||
"@fastify/helmet": "^11.1.1",
|
||||
"@fastify/multipart": "8.3.0",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/session": "^10.7.0",
|
||||
"@fastify/swagger": "^8.14.0",
|
||||
"@fastify/swagger-ui": "^2.1.0",
|
||||
"@node-saml/passport-saml": "^4.0.4",
|
||||
"@octokit/auth-app": "^7.1.1",
|
||||
"@octokit/plugin-retry": "^5.0.5",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@peculiar/asn1-schema": "^2.3.8",
|
||||
"@peculiar/x509": "^1.12.1",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
"@sindresorhus/slugify": "1.1.0",
|
||||
"@slack/oauth": "^3.0.1",
|
||||
"@slack/web-api": "^7.3.4",
|
||||
"@team-plain/typescript-sdk": "^4.6.1",
|
||||
"@ucast/mongo2js": "^1.3.4",
|
||||
"ajv": "^8.12.0",
|
||||
"argon2": "^0.31.2",
|
||||
"aws-sdk": "^2.1553.0",
|
||||
"axios": "^1.6.7",
|
||||
"axios-retry": "^4.0.0",
|
||||
"bcrypt": "^5.1.1",
|
||||
"bullmq": "^5.4.2",
|
||||
"cassandra-driver": "^4.7.2",
|
||||
"connect-redis": "^7.1.1",
|
||||
"cron": "^3.1.7",
|
||||
"dotenv": "^16.4.1",
|
||||
"fastify": "^4.28.1",
|
||||
"fastify-plugin": "^4.5.1",
|
||||
"google-auth-library": "^9.9.0",
|
||||
"googleapis": "^137.1.0",
|
||||
"handlebars": "^4.7.8",
|
||||
"hdb": "^0.19.10",
|
||||
"ioredis": "^5.3.2",
|
||||
"jmespath": "^0.16.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"@godaddy/terminus": "^4.11.2",
|
||||
"@sentry/node": "^7.14.0",
|
||||
"@sentry/tracing": "^7.19.0",
|
||||
"@types/crypto-js": "^4.1.1",
|
||||
"axios": "^1.1.3",
|
||||
"bigint-conversion": "^2.2.2",
|
||||
"cookie-parser": "^1.4.6",
|
||||
"cors": "^2.8.5",
|
||||
"crypto-js": "^4.1.1",
|
||||
"dotenv": "^16.0.1",
|
||||
"express": "^4.18.1",
|
||||
"express-rate-limit": "^6.7.0",
|
||||
"express-validator": "^6.14.2",
|
||||
"handlebars": "^4.7.7",
|
||||
"helmet": "^5.1.1",
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"jsrp": "^0.2.4",
|
||||
"jwks-rsa": "^3.1.0",
|
||||
"knex": "^3.0.1",
|
||||
"ldapjs": "^3.0.7",
|
||||
"ldif": "0.5.1",
|
||||
"libsodium-wrappers": "^0.7.13",
|
||||
"lodash.isequal": "^4.5.0",
|
||||
"mongodb": "^6.8.1",
|
||||
"ms": "^2.1.3",
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^3.3.4",
|
||||
"nodemailer": "^6.9.9",
|
||||
"openid-client": "^5.6.5",
|
||||
"ora": "^7.0.1",
|
||||
"oracledb": "^6.4.0",
|
||||
"passport-github": "^1.1.0",
|
||||
"passport-gitlab2": "^5.0.0",
|
||||
"passport-google-oauth20": "^2.0.0",
|
||||
"passport-ldapauth": "^3.0.1",
|
||||
"pg": "^8.11.3",
|
||||
"pg-query-stream": "^4.5.3",
|
||||
"picomatch": "^3.0.1",
|
||||
"pino": "^8.16.2",
|
||||
"pkcs11js": "^2.1.6",
|
||||
"pkijs": "^3.2.4",
|
||||
"posthog-node": "^3.6.2",
|
||||
"probot": "^13.3.8",
|
||||
"safe-regex": "^2.1.1",
|
||||
"scim-patch": "^0.8.3",
|
||||
"scim2-parse-filter": "^0.2.10",
|
||||
"sjcl": "^1.0.8",
|
||||
"smee-client": "^2.0.0",
|
||||
"snowflake-sdk": "^1.14.0",
|
||||
"tedious": "^18.2.1",
|
||||
"mongoose": "^6.7.2",
|
||||
"nodemailer": "^6.8.0",
|
||||
"posthog-node": "^2.1.0",
|
||||
"query-string": "^7.1.3",
|
||||
"rimraf": "^3.0.2",
|
||||
"stripe": "^10.7.0",
|
||||
"tweetnacl": "^1.0.3",
|
||||
"tweetnacl-util": "^0.15.1",
|
||||
"uuid": "^9.0.1",
|
||||
"zod": "^3.22.4",
|
||||
"zod-to-json-schema": "^3.22.4"
|
||||
"typescript": "^4.9.3"
|
||||
},
|
||||
"name": "infisical-api",
|
||||
"version": "1.0.0",
|
||||
"main": "src/index.js",
|
||||
"scripts": {
|
||||
"prepare": "cd .. && npm install",
|
||||
"start": "npm run build && node build/index.js",
|
||||
"dev": "nodemon",
|
||||
"build": "rimraf ./build && tsc && cp -R ./src/templates ./build",
|
||||
"lint": "eslint . --ext .ts",
|
||||
"lint-and-fix": "eslint . --ext .ts --fix",
|
||||
"lint-staged": "lint-staged"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/Infisical/infisical-api.git"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"bugs": {
|
||||
"url": "https://github.com/Infisical/infisical-api/issues"
|
||||
},
|
||||
"homepage": "https://github.com/Infisical/infisical-api#readme",
|
||||
"description": "",
|
||||
"devDependencies": {
|
||||
"@posthog/plugin-scaffold": "^1.3.4",
|
||||
"@types/cookie-parser": "^1.4.3",
|
||||
"@types/cors": "^2.8.12",
|
||||
"@types/express": "^4.17.14",
|
||||
"@types/jsonwebtoken": "^8.5.9",
|
||||
"@types/node": "^18.11.3",
|
||||
"@types/nodemailer": "^6.4.6",
|
||||
"@types/swagger-jsdoc": "^6.0.1",
|
||||
"@types/swagger-ui-express": "^4.1.3",
|
||||
"@typescript-eslint/eslint-plugin": "^5.40.1",
|
||||
"@typescript-eslint/parser": "^5.40.1",
|
||||
"eslint": "^8.26.0",
|
||||
"install": "^0.13.0",
|
||||
"jest": "^29.3.1",
|
||||
"nodemon": "^2.0.19",
|
||||
"npm": "^8.19.3",
|
||||
"ts-node": "^10.9.1"
|
||||
}
|
||||
}
|
||||
|
@ -1,146 +0,0 @@
|
||||
/* eslint-disable */
|
||||
import { mkdirSync, writeFileSync } from "fs";
|
||||
import path from "path";
|
||||
import promptSync from "prompt-sync";
|
||||
|
||||
const prompt = promptSync({
|
||||
sigint: true
|
||||
});
|
||||
|
||||
type ComponentType = 1 | 2 | 3;
|
||||
|
||||
console.log(`
|
||||
Component List
|
||||
--------------
|
||||
0. Exit
|
||||
1. Service component
|
||||
2. DAL component
|
||||
3. Router component
|
||||
`);
|
||||
|
||||
function getComponentType(): ComponentType {
|
||||
while (true) {
|
||||
const input = prompt("Select a component (0-3): ");
|
||||
const componentType = parseInt(input, 10);
|
||||
|
||||
if (componentType === 0) {
|
||||
console.log("Exiting the program. Goodbye!");
|
||||
process.exit(0);
|
||||
} else if (componentType === 1 || componentType === 2 || componentType === 3) {
|
||||
return componentType;
|
||||
} else {
|
||||
console.log("Invalid input. Please enter 0, 1, 2, or 3.");
|
||||
}
|
||||
}
|
||||
}
|
||||
const componentType = getComponentType();
|
||||
|
||||
if (componentType === 1) {
|
||||
const componentName = prompt("Enter service name: ");
|
||||
const dir = path.join(__dirname, `../src/services/${componentName}`);
|
||||
const pascalCase = componentName
|
||||
.split("-")
|
||||
.map((el) => `${el[0].toUpperCase()}${el.slice(1)}`)
|
||||
.join("");
|
||||
const camelCase = componentName
|
||||
.split("-")
|
||||
.map((el, index) => (index === 0 ? el : `${el[0].toUpperCase()}${el.slice(1)}`))
|
||||
.join("");
|
||||
const dalTypeName = `T${pascalCase}DALFactory`;
|
||||
const dalName = `${camelCase}DALFactory`;
|
||||
const serviceTypeName = `T${pascalCase}ServiceFactory`;
|
||||
const serviceName = `${camelCase}ServiceFactory`;
|
||||
|
||||
mkdirSync(dir);
|
||||
|
||||
writeFileSync(
|
||||
path.join(dir, `${componentName}-dal.ts`),
|
||||
`import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export type ${dalTypeName} = ReturnType<typeof ${dalName}>;
|
||||
|
||||
export const ${dalName} = (db: TDbClient) => {
|
||||
|
||||
return { };
|
||||
};
|
||||
`
|
||||
);
|
||||
|
||||
writeFileSync(
|
||||
path.join(dir, `${componentName}-service.ts`),
|
||||
`import { ${dalTypeName} } from "./${componentName}-dal";
|
||||
|
||||
type ${serviceTypeName}Dep = {
|
||||
${camelCase}DAL: ${dalTypeName};
|
||||
};
|
||||
|
||||
export type ${serviceTypeName} = ReturnType<typeof ${serviceName}>;
|
||||
|
||||
export const ${serviceName} = ({ ${camelCase}DAL }: ${serviceTypeName}Dep) => {
|
||||
return {};
|
||||
};
|
||||
`
|
||||
);
|
||||
writeFileSync(path.join(dir, `${componentName}-types.ts`), "");
|
||||
} else if (componentType === 2) {
|
||||
const componentName = prompt("Enter service name: ");
|
||||
const componentPath = prompt("Path wrt service folder: ");
|
||||
const pascalCase = componentName
|
||||
.split("-")
|
||||
.map((el) => `${el[0].toUpperCase()}${el.slice(1)}`)
|
||||
.join("");
|
||||
const camelCase = componentName
|
||||
.split("-")
|
||||
.map((el, index) => (index === 0 ? el : `${el[0].toUpperCase()}${el.slice(1)}`))
|
||||
.join("");
|
||||
const dalTypeName = `T${pascalCase}DALFactory`;
|
||||
const dalName = `${camelCase}DALFactory`;
|
||||
|
||||
writeFileSync(
|
||||
path.join(__dirname, "../src/services", componentPath, `${componentName}-dal.ts`),
|
||||
`import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export type ${dalTypeName} = ReturnType<typeof ${dalName}>;
|
||||
|
||||
export const ${dalName} = (db: TDbClient) => {
|
||||
|
||||
return { };
|
||||
};
|
||||
`
|
||||
);
|
||||
} else if (componentType === 3) {
|
||||
const name = prompt("Enter router name: ");
|
||||
const version = prompt("Version number: ");
|
||||
const pascalCase = name
|
||||
.split("-")
|
||||
.map((el) => `${el[0].toUpperCase()}${el.slice(1)}`)
|
||||
.join("");
|
||||
writeFileSync(
|
||||
path.join(__dirname, `../src/server/routes/v${Number(version)}/${name}-router.ts`),
|
||||
`import { z } from "zod";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
|
||||
export const register${pascalCase}Router = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({}),
|
||||
response: {
|
||||
200: z.object({})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {}
|
||||
});
|
||||
};
|
||||
`
|
||||
);
|
||||
}
|
@ -1,17 +0,0 @@
|
||||
/* eslint-disable */
|
||||
import { execSync } from "child_process";
|
||||
import path from "path";
|
||||
import promptSync from "prompt-sync";
|
||||
import slugify from "@sindresorhus/slugify"
|
||||
|
||||
const prompt = promptSync({ sigint: true });
|
||||
|
||||
const migrationName = prompt("Enter name for migration: ");
|
||||
|
||||
// Remove spaces from migration name and replace with hyphens
|
||||
const formattedMigrationName = slugify(migrationName);
|
||||
|
||||
execSync(
|
||||
`npx knex migrate:make --knexfile ${path.join(__dirname, "../src/db/knexfile.ts")} -x ts ${formattedMigrationName}`,
|
||||
{ stdio: "inherit" }
|
||||
);
|
@ -1,16 +0,0 @@
|
||||
/* eslint-disable */
|
||||
import { execSync } from "child_process";
|
||||
import { readdirSync } from "fs";
|
||||
import path from "path";
|
||||
import promptSync from "prompt-sync";
|
||||
|
||||
const prompt = promptSync({ sigint: true });
|
||||
|
||||
const migrationName = prompt("Enter name for seedfile: ");
|
||||
const fileCounter = readdirSync(path.join(__dirname, "../src/db/seeds")).length || 1;
|
||||
execSync(
|
||||
`npx knex seed:make --knexfile ${path.join(__dirname, "../src/db/knexfile.ts")} -x ts ${
|
||||
fileCounter + 1
|
||||
}-${migrationName}`,
|
||||
{ stdio: "inherit" }
|
||||
);
|
@ -1,164 +0,0 @@
|
||||
/* eslint-disable */
|
||||
import dotenv from "dotenv";
|
||||
import path from "path";
|
||||
import knex from "knex";
|
||||
import { writeFileSync } from "fs";
|
||||
|
||||
dotenv.config({
|
||||
path: path.join(__dirname, "../../.env.migration")
|
||||
});
|
||||
|
||||
const db = knex({
|
||||
client: "pg",
|
||||
connection: process.env.DB_CONNECTION_URI
|
||||
});
|
||||
|
||||
const getZodPrimitiveType = (type: string) => {
|
||||
switch (type) {
|
||||
case "uuid":
|
||||
return "z.string().uuid()";
|
||||
case "character varying":
|
||||
return "z.string()";
|
||||
case "ARRAY":
|
||||
return "z.string().array()";
|
||||
case "boolean":
|
||||
return "z.boolean()";
|
||||
case "jsonb":
|
||||
return "z.unknown()";
|
||||
case "json":
|
||||
return "z.unknown()";
|
||||
case "timestamp with time zone":
|
||||
return "z.date()";
|
||||
case "integer":
|
||||
return "z.number()";
|
||||
case "bigint":
|
||||
return "z.coerce.number()";
|
||||
case "text":
|
||||
return "z.string()";
|
||||
case "bytea":
|
||||
return "zodBuffer";
|
||||
default:
|
||||
throw new Error(`Invalid type: ${type}`);
|
||||
}
|
||||
};
|
||||
|
||||
const getZodDefaultValue = (type: unknown, value: string | number | boolean | Object) => {
|
||||
if (!value || value === "null") return;
|
||||
switch (type) {
|
||||
case "uuid":
|
||||
return `.default("00000000-0000-0000-0000-000000000000")`;
|
||||
case "character varying": {
|
||||
if (value === "gen_random_uuid()") return;
|
||||
if (typeof value === "string" && value.includes("::")) {
|
||||
return `.default(${value.split("::")[0]})`;
|
||||
}
|
||||
return `.default(${value})`;
|
||||
}
|
||||
case "ARRAY":
|
||||
return `.default(${value})`;
|
||||
case "boolean":
|
||||
return `.default(${value})`;
|
||||
case "jsonb":
|
||||
return "z.string()";
|
||||
case "json":
|
||||
return "z.string()";
|
||||
case "timestamp with time zone": {
|
||||
if (value === "CURRENT_TIMESTAMP") return;
|
||||
return "z.string().datetime()";
|
||||
}
|
||||
case "integer": {
|
||||
if ((value as string).includes("nextval")) return;
|
||||
return `.default(${value})`;
|
||||
}
|
||||
case "bigint": {
|
||||
if ((value as string).includes("nextval")) return;
|
||||
return `.default(${parseInt((value as string).split("::")[0].slice(1, -1), 10)})`;
|
||||
}
|
||||
case "text":
|
||||
if (typeof value === "string" && value.includes("::")) {
|
||||
return `.default(${value.split("::")[0]})`;
|
||||
}
|
||||
return `.default(${value})`;
|
||||
default:
|
||||
throw new Error(`Invalid type: ${type}`);
|
||||
}
|
||||
};
|
||||
|
||||
const main = async () => {
|
||||
const tables = (
|
||||
await db("information_schema.tables")
|
||||
.whereRaw("table_schema = current_schema()")
|
||||
.select<{ tableName: string }[]>("table_name as tableName")
|
||||
.orderBy("table_name")
|
||||
).filter(
|
||||
(el) =>
|
||||
!el.tableName.includes("_migrations") &&
|
||||
!el.tableName.includes("audit_logs_") &&
|
||||
el.tableName !== "intermediate_audit_logs"
|
||||
);
|
||||
|
||||
for (let i = 0; i < tables.length; i += 1) {
|
||||
const { tableName } = tables[i];
|
||||
const columns = await db(tableName).columnInfo();
|
||||
const columnNames = Object.keys(columns);
|
||||
|
||||
let schema = "";
|
||||
const zodImportSet = new Set<string>();
|
||||
for (let colNum = 0; colNum < columnNames.length; colNum++) {
|
||||
const columnName = columnNames[colNum];
|
||||
const colInfo = columns[columnName];
|
||||
let ztype = getZodPrimitiveType(colInfo.type);
|
||||
if (["zodBuffer"].includes(ztype)) {
|
||||
zodImportSet.add(ztype);
|
||||
}
|
||||
|
||||
// don't put optional on id
|
||||
if (colInfo.defaultValue && columnName !== "id") {
|
||||
const { defaultValue } = colInfo;
|
||||
const zSchema = getZodDefaultValue(colInfo.type, defaultValue);
|
||||
if (zSchema) {
|
||||
ztype = ztype.concat(zSchema);
|
||||
}
|
||||
}
|
||||
if (colInfo.nullable) {
|
||||
ztype = ztype.concat(".nullable().optional()");
|
||||
}
|
||||
schema = schema.concat(
|
||||
`${!schema ? "\n" : ""} ${columnName}: ${ztype}${colNum === columnNames.length - 1 ? "" : ","}\n`
|
||||
);
|
||||
}
|
||||
|
||||
const dashcase = tableName.split("_").join("-");
|
||||
const pascalCase = tableName
|
||||
.split("_")
|
||||
.reduce((prev, curr) => prev + `${curr.at(0)?.toUpperCase()}${curr.slice(1).toLowerCase()}`, "");
|
||||
|
||||
const zodImports = Array.from(zodImportSet);
|
||||
|
||||
// the insert and update are changed to zod input type to use default cases
|
||||
writeFileSync(
|
||||
path.join(__dirname, "../src/db/schemas", `${dashcase}.ts`),
|
||||
`// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
${zodImports.length ? `import { ${zodImports.join(",")} } from \"@app/lib/zod\";` : ""}
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const ${pascalCase}Schema = z.object({${schema}});
|
||||
|
||||
export type T${pascalCase} = z.infer<typeof ${pascalCase}Schema>;
|
||||
export type T${pascalCase}Insert = Omit<z.input<typeof ${pascalCase}Schema>, TImmutableDBKeys>;
|
||||
export type T${pascalCase}Update = Partial<Omit<z.input<typeof ${pascalCase}Schema>, TImmutableDBKeys>>;
|
||||
`
|
||||
);
|
||||
}
|
||||
|
||||
process.exit(0);
|
||||
};
|
||||
|
||||
main();
|
@ -1,84 +0,0 @@
|
||||
/* eslint-disable */
|
||||
import promptSync from "prompt-sync";
|
||||
import { execSync } from "child_process";
|
||||
import path from "path";
|
||||
import { existsSync } from "fs";
|
||||
|
||||
const prompt = promptSync({
|
||||
sigint: true
|
||||
});
|
||||
|
||||
const exportDb = () => {
|
||||
const exportHost = prompt("Enter your Postgres Host to migrate from: ");
|
||||
const exportPort = prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432";
|
||||
const exportUser = prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical";
|
||||
const exportPassword = prompt("Enter your Postgres Password to migrate from: ");
|
||||
const exportDatabase = prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical";
|
||||
|
||||
// we do not include the audit_log and secret_sharing entries
|
||||
execSync(
|
||||
`PGDATABASE="${exportDatabase}" PGPASSWORD="${exportPassword}" PGHOST="${exportHost}" PGPORT=${exportPort} PGUSER=${exportUser} pg_dump infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join(
|
||||
__dirname,
|
||||
"../src/db/dump.sql"
|
||||
)}`,
|
||||
{ stdio: "inherit" }
|
||||
);
|
||||
};
|
||||
|
||||
const importDbForOrg = () => {
|
||||
const importHost = prompt("Enter your Postgres Host to migrate to: ");
|
||||
const importPort = prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432";
|
||||
const importUser = prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical";
|
||||
const importPassword = prompt("Enter your Postgres Password to migrate to: ");
|
||||
const importDatabase = prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical";
|
||||
const orgId = prompt("Enter the organization ID to migrate: ");
|
||||
|
||||
if (!existsSync(path.join(__dirname, "../src/db/dump.sql"))) {
|
||||
console.log("File not found, please export the database first.");
|
||||
return;
|
||||
}
|
||||
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -f ${path.join(
|
||||
__dirname,
|
||||
"../src/db/dump.sql"
|
||||
)}`
|
||||
);
|
||||
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"`
|
||||
);
|
||||
|
||||
// delete global/instance-level resources not relevant to the organization to migrate
|
||||
// users
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'`
|
||||
);
|
||||
|
||||
// identities
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'`
|
||||
);
|
||||
|
||||
// reset slack configuration in superAdmin
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'`
|
||||
);
|
||||
|
||||
console.log("Organization migrated successfully.");
|
||||
};
|
||||
|
||||
const main = () => {
|
||||
const action = prompt(
|
||||
"Enter the action to perform\n 1. Export from existing instance.\n 2. Import org to instance.\n \n Action: "
|
||||
);
|
||||
if (action === "1") {
|
||||
exportDb();
|
||||
} else if (action === "2") {
|
||||
importDbForOrg();
|
||||
} else {
|
||||
console.log("Invalid action");
|
||||
}
|
||||
};
|
||||
|
||||
main();
|
@ -1,27 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/no-shadow */
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
|
||||
function replaceMjsOccurrences(directory: string) {
|
||||
fs.readdir(directory, (err, files) => {
|
||||
if (err) throw err;
|
||||
files.forEach((file) => {
|
||||
const filePath = path.join(directory, file);
|
||||
if (fs.statSync(filePath).isDirectory()) {
|
||||
replaceMjsOccurrences(filePath);
|
||||
} else {
|
||||
fs.readFile(filePath, "utf8", (err, data) => {
|
||||
if (err) throw err;
|
||||
const result = data.replace(/\.mjs/g, ".js");
|
||||
fs.writeFile(filePath, result, "utf8", (err) => {
|
||||
if (err) throw err;
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Updated: ${filePath}`);
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
replaceMjsOccurrences("dist");
|
18
backend/src/@types/fastify-zod.d.ts
vendored
18
backend/src/@types/fastify-zod.d.ts
vendored
@ -1,18 +0,0 @@
|
||||
import { FastifyInstance, RawReplyDefaultExpression, RawRequestDefaultExpression, RawServerDefault } from "fastify";
|
||||
import { Logger } from "pino";
|
||||
|
||||
import { ZodTypeProvider } from "@app/server/plugins/fastify-zod";
|
||||
|
||||
declare global {
|
||||
type FastifyZodProvider = FastifyInstance<
|
||||
RawServerDefault,
|
||||
RawRequestDefaultExpression<RawServerDefault>,
|
||||
RawReplyDefaultExpression<RawServerDefault>,
|
||||
Readonly<Logger>,
|
||||
ZodTypeProvider
|
||||
>;
|
||||
|
||||
// used only for testing
|
||||
const testServer: FastifyZodProvider;
|
||||
const jwtAuthToken: string;
|
||||
}
|
203
backend/src/@types/fastify.d.ts
vendored
203
backend/src/@types/fastify.d.ts
vendored
@ -1,203 +0,0 @@
|
||||
import "fastify";
|
||||
|
||||
import { TUsers } from "@app/db/schemas";
|
||||
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
|
||||
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-service";
|
||||
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
|
||||
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
|
||||
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service";
|
||||
import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service";
|
||||
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
|
||||
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
|
||||
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
|
||||
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
|
||||
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
||||
import { TIdentityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
|
||||
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service";
|
||||
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
|
||||
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
|
||||
import { RateLimitConfiguration } from "@app/ee/services/rate-limit/rate-limit-types";
|
||||
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
|
||||
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
|
||||
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
|
||||
import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-service";
|
||||
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
|
||||
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
|
||||
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
|
||||
import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
|
||||
import { TAuthMode } from "@app/server/plugins/auth/inject-identity";
|
||||
import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
|
||||
import { TAuthLoginFactory } from "@app/services/auth/auth-login-service";
|
||||
import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
|
||||
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
|
||||
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service";
|
||||
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
|
||||
import { TCertificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
|
||||
import { TCmekServiceFactory } from "@app/services/cmek/cmek-service";
|
||||
import { TExternalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service";
|
||||
import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
|
||||
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
|
||||
import { THsmServiceFactory } from "@app/services/hsm/hsm-service";
|
||||
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
|
||||
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
|
||||
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
|
||||
import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service";
|
||||
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
|
||||
import { TIdentityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
|
||||
import { TIdentityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service";
|
||||
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
|
||||
import { TIdentityTokenAuthServiceFactory } from "@app/services/identity-token-auth/identity-token-auth-service";
|
||||
import { TIdentityUaServiceFactory } from "@app/services/identity-ua/identity-ua-service";
|
||||
import { TIntegrationServiceFactory } from "@app/services/integration/integration-service";
|
||||
import { TIntegrationAuthServiceFactory } from "@app/services/integration-auth/integration-auth-service";
|
||||
import { TOrgRoleServiceFactory } from "@app/services/org/org-role-service";
|
||||
import { TOrgServiceFactory } from "@app/services/org/org-service";
|
||||
import { TOrgAdminServiceFactory } from "@app/services/org-admin/org-admin-service";
|
||||
import { TPkiAlertServiceFactory } from "@app/services/pki-alert/pki-alert-service";
|
||||
import { TPkiCollectionServiceFactory } from "@app/services/pki-collection/pki-collection-service";
|
||||
import { TProjectServiceFactory } from "@app/services/project/project-service";
|
||||
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
|
||||
import { TProjectEnvServiceFactory } from "@app/services/project-env/project-env-service";
|
||||
import { TProjectKeyServiceFactory } from "@app/services/project-key/project-key-service";
|
||||
import { TProjectMembershipServiceFactory } from "@app/services/project-membership/project-membership-service";
|
||||
import { TProjectRoleServiceFactory } from "@app/services/project-role/project-role-service";
|
||||
import { TSecretServiceFactory } from "@app/services/secret/secret-service";
|
||||
import { TSecretBlindIndexServiceFactory } from "@app/services/secret-blind-index/secret-blind-index-service";
|
||||
import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-folder-service";
|
||||
import { TSecretImportServiceFactory } from "@app/services/secret-import/secret-import-service";
|
||||
import { TSecretReplicationServiceFactory } from "@app/services/secret-replication/secret-replication-service";
|
||||
import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
|
||||
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
|
||||
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
|
||||
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
|
||||
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
|
||||
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { TUserServiceFactory } from "@app/services/user/user-service";
|
||||
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
|
||||
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
|
||||
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
|
||||
|
||||
declare module "fastify" {
|
||||
interface FastifyRequest {
|
||||
realIp: string;
|
||||
// used for mfa session authentication
|
||||
mfa: {
|
||||
userId: string;
|
||||
orgId?: string;
|
||||
user: TUsers;
|
||||
};
|
||||
// identity injection. depending on which kinda of token the information is filled in auth
|
||||
auth: TAuthMode;
|
||||
permission: {
|
||||
authMethod: ActorAuthMethod;
|
||||
type: ActorType;
|
||||
id: string;
|
||||
orgId: string;
|
||||
};
|
||||
rateLimits: RateLimitConfiguration;
|
||||
// passport data
|
||||
passportUser: {
|
||||
isUserCompleted: string;
|
||||
providerAuthToken: string;
|
||||
};
|
||||
auditLogInfo: Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
|
||||
ssoConfig: Awaited<ReturnType<TSamlConfigServiceFactory["getSaml"]>>;
|
||||
ldapConfig: Awaited<ReturnType<TLdapConfigServiceFactory["getLdapCfg"]>>;
|
||||
}
|
||||
|
||||
interface FastifyInstance {
|
||||
services: {
|
||||
login: TAuthLoginFactory;
|
||||
password: TAuthPasswordFactory;
|
||||
signup: TAuthSignupFactory;
|
||||
authToken: TAuthTokenServiceFactory;
|
||||
permission: TPermissionServiceFactory;
|
||||
org: TOrgServiceFactory;
|
||||
orgRole: TOrgRoleServiceFactory;
|
||||
oidc: TOidcConfigServiceFactory;
|
||||
superAdmin: TSuperAdminServiceFactory;
|
||||
user: TUserServiceFactory;
|
||||
group: TGroupServiceFactory;
|
||||
groupProject: TGroupProjectServiceFactory;
|
||||
apiKey: TApiKeyServiceFactory;
|
||||
pkiAlert: TPkiAlertServiceFactory;
|
||||
project: TProjectServiceFactory;
|
||||
projectMembership: TProjectMembershipServiceFactory;
|
||||
projectEnv: TProjectEnvServiceFactory;
|
||||
projectKey: TProjectKeyServiceFactory;
|
||||
projectRole: TProjectRoleServiceFactory;
|
||||
secret: TSecretServiceFactory;
|
||||
secretReplication: TSecretReplicationServiceFactory;
|
||||
secretTag: TSecretTagServiceFactory;
|
||||
secretImport: TSecretImportServiceFactory;
|
||||
projectBot: TProjectBotServiceFactory;
|
||||
folder: TSecretFolderServiceFactory;
|
||||
integration: TIntegrationServiceFactory;
|
||||
integrationAuth: TIntegrationAuthServiceFactory;
|
||||
webhook: TWebhookServiceFactory;
|
||||
serviceToken: TServiceTokenServiceFactory;
|
||||
identity: TIdentityServiceFactory;
|
||||
identityAccessToken: TIdentityAccessTokenServiceFactory;
|
||||
identityProject: TIdentityProjectServiceFactory;
|
||||
identityTokenAuth: TIdentityTokenAuthServiceFactory;
|
||||
identityUa: TIdentityUaServiceFactory;
|
||||
identityKubernetesAuth: TIdentityKubernetesAuthServiceFactory;
|
||||
identityGcpAuth: TIdentityGcpAuthServiceFactory;
|
||||
identityAwsAuth: TIdentityAwsAuthServiceFactory;
|
||||
identityAzureAuth: TIdentityAzureAuthServiceFactory;
|
||||
identityOidcAuth: TIdentityOidcAuthServiceFactory;
|
||||
accessApprovalPolicy: TAccessApprovalPolicyServiceFactory;
|
||||
accessApprovalRequest: TAccessApprovalRequestServiceFactory;
|
||||
secretApprovalPolicy: TSecretApprovalPolicyServiceFactory;
|
||||
secretApprovalRequest: TSecretApprovalRequestServiceFactory;
|
||||
secretRotation: TSecretRotationServiceFactory;
|
||||
snapshot: TSecretSnapshotServiceFactory;
|
||||
saml: TSamlConfigServiceFactory;
|
||||
scim: TScimServiceFactory;
|
||||
ldap: TLdapConfigServiceFactory;
|
||||
auditLog: TAuditLogServiceFactory;
|
||||
auditLogStream: TAuditLogStreamServiceFactory;
|
||||
certificate: TCertificateServiceFactory;
|
||||
certificateTemplate: TCertificateTemplateServiceFactory;
|
||||
certificateAuthority: TCertificateAuthorityServiceFactory;
|
||||
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
|
||||
certificateEst: TCertificateEstServiceFactory;
|
||||
pkiCollection: TPkiCollectionServiceFactory;
|
||||
secretScanning: TSecretScanningServiceFactory;
|
||||
license: TLicenseServiceFactory;
|
||||
trustedIp: TTrustedIpServiceFactory;
|
||||
secretBlindIndex: TSecretBlindIndexServiceFactory;
|
||||
telemetry: TTelemetryServiceFactory;
|
||||
dynamicSecret: TDynamicSecretServiceFactory;
|
||||
dynamicSecretLease: TDynamicSecretLeaseServiceFactory;
|
||||
projectUserAdditionalPrivilege: TProjectUserAdditionalPrivilegeServiceFactory;
|
||||
identityProjectAdditionalPrivilege: TIdentityProjectAdditionalPrivilegeServiceFactory;
|
||||
identityProjectAdditionalPrivilegeV2: TIdentityProjectAdditionalPrivilegeV2ServiceFactory;
|
||||
secretSharing: TSecretSharingServiceFactory;
|
||||
rateLimit: TRateLimitServiceFactory;
|
||||
userEngagement: TUserEngagementServiceFactory;
|
||||
externalKms: TExternalKmsServiceFactory;
|
||||
hsm: THsmServiceFactory;
|
||||
orgAdmin: TOrgAdminServiceFactory;
|
||||
slack: TSlackServiceFactory;
|
||||
workflowIntegration: TWorkflowIntegrationServiceFactory;
|
||||
cmek: TCmekServiceFactory;
|
||||
migration: TExternalMigrationServiceFactory;
|
||||
externalGroupOrgRoleMapping: TExternalGroupOrgRoleMappingServiceFactory;
|
||||
projectTemplate: TProjectTemplateServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
store: {
|
||||
user: Pick<TUserDALFactory, "findById">;
|
||||
};
|
||||
}
|
||||
}
|
4
backend/src/@types/hdb.d.ts
vendored
4
backend/src/@types/hdb.d.ts
vendored
@ -1,4 +0,0 @@
|
||||
declare module "hdb" {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Untyped, the function returns `any`.
|
||||
function createClient(options): any;
|
||||
}
|
830
backend/src/@types/knex.d.ts
vendored
830
backend/src/@types/knex.d.ts
vendored
@ -1,830 +0,0 @@
|
||||
import { Knex as KnexOriginal } from "knex";
|
||||
|
||||
import {
|
||||
TableName,
|
||||
TAccessApprovalPolicies,
|
||||
TAccessApprovalPoliciesApprovers,
|
||||
TAccessApprovalPoliciesApproversInsert,
|
||||
TAccessApprovalPoliciesApproversUpdate,
|
||||
TAccessApprovalPoliciesInsert,
|
||||
TAccessApprovalPoliciesUpdate,
|
||||
TAccessApprovalRequests,
|
||||
TAccessApprovalRequestsInsert,
|
||||
TAccessApprovalRequestsReviewers,
|
||||
TAccessApprovalRequestsReviewersInsert,
|
||||
TAccessApprovalRequestsReviewersUpdate,
|
||||
TAccessApprovalRequestsUpdate,
|
||||
TApiKeys,
|
||||
TApiKeysInsert,
|
||||
TApiKeysUpdate,
|
||||
TAuditLogs,
|
||||
TAuditLogsInsert,
|
||||
TAuditLogStreams,
|
||||
TAuditLogStreamsInsert,
|
||||
TAuditLogStreamsUpdate,
|
||||
TAuditLogsUpdate,
|
||||
TAuthTokens,
|
||||
TAuthTokenSessions,
|
||||
TAuthTokenSessionsInsert,
|
||||
TAuthTokenSessionsUpdate,
|
||||
TAuthTokensInsert,
|
||||
TAuthTokensUpdate,
|
||||
TBackupPrivateKey,
|
||||
TBackupPrivateKeyInsert,
|
||||
TBackupPrivateKeyUpdate,
|
||||
TCertificateAuthorities,
|
||||
TCertificateAuthoritiesInsert,
|
||||
TCertificateAuthoritiesUpdate,
|
||||
TCertificateAuthorityCerts,
|
||||
TCertificateAuthorityCertsInsert,
|
||||
TCertificateAuthorityCertsUpdate,
|
||||
TCertificateAuthorityCrl,
|
||||
TCertificateAuthorityCrlInsert,
|
||||
TCertificateAuthorityCrlUpdate,
|
||||
TCertificateAuthoritySecret,
|
||||
TCertificateAuthoritySecretInsert,
|
||||
TCertificateAuthoritySecretUpdate,
|
||||
TCertificateBodies,
|
||||
TCertificateBodiesInsert,
|
||||
TCertificateBodiesUpdate,
|
||||
TCertificates,
|
||||
TCertificateSecrets,
|
||||
TCertificateSecretsInsert,
|
||||
TCertificateSecretsUpdate,
|
||||
TCertificatesInsert,
|
||||
TCertificatesUpdate,
|
||||
TCertificateTemplateEstConfigs,
|
||||
TCertificateTemplateEstConfigsInsert,
|
||||
TCertificateTemplateEstConfigsUpdate,
|
||||
TCertificateTemplates,
|
||||
TCertificateTemplatesInsert,
|
||||
TCertificateTemplatesUpdate,
|
||||
TDynamicSecretLeases,
|
||||
TDynamicSecretLeasesInsert,
|
||||
TDynamicSecretLeasesUpdate,
|
||||
TDynamicSecrets,
|
||||
TDynamicSecretsInsert,
|
||||
TDynamicSecretsUpdate,
|
||||
TExternalKms,
|
||||
TExternalKmsInsert,
|
||||
TExternalKmsUpdate,
|
||||
TGitAppInstallSessions,
|
||||
TGitAppInstallSessionsInsert,
|
||||
TGitAppInstallSessionsUpdate,
|
||||
TGitAppOrg,
|
||||
TGitAppOrgInsert,
|
||||
TGitAppOrgUpdate,
|
||||
TGroupProjectMembershipRoles,
|
||||
TGroupProjectMembershipRolesInsert,
|
||||
TGroupProjectMembershipRolesUpdate,
|
||||
TGroupProjectMemberships,
|
||||
TGroupProjectMembershipsInsert,
|
||||
TGroupProjectMembershipsUpdate,
|
||||
TGroups,
|
||||
TGroupsInsert,
|
||||
TGroupsUpdate,
|
||||
TIdentities,
|
||||
TIdentitiesInsert,
|
||||
TIdentitiesUpdate,
|
||||
TIdentityAccessTokens,
|
||||
TIdentityAccessTokensInsert,
|
||||
TIdentityAccessTokensUpdate,
|
||||
TIdentityAwsAuths,
|
||||
TIdentityAwsAuthsInsert,
|
||||
TIdentityAwsAuthsUpdate,
|
||||
TIdentityAzureAuths,
|
||||
TIdentityAzureAuthsInsert,
|
||||
TIdentityAzureAuthsUpdate,
|
||||
TIdentityGcpAuths,
|
||||
TIdentityGcpAuthsInsert,
|
||||
TIdentityGcpAuthsUpdate,
|
||||
TIdentityKubernetesAuths,
|
||||
TIdentityKubernetesAuthsInsert,
|
||||
TIdentityKubernetesAuthsUpdate,
|
||||
TIdentityMetadata,
|
||||
TIdentityMetadataInsert,
|
||||
TIdentityMetadataUpdate,
|
||||
TIdentityOidcAuths,
|
||||
TIdentityOidcAuthsInsert,
|
||||
TIdentityOidcAuthsUpdate,
|
||||
TIdentityOrgMemberships,
|
||||
TIdentityOrgMembershipsInsert,
|
||||
TIdentityOrgMembershipsUpdate,
|
||||
TIdentityProjectAdditionalPrivilege,
|
||||
TIdentityProjectAdditionalPrivilegeInsert,
|
||||
TIdentityProjectAdditionalPrivilegeUpdate,
|
||||
TIdentityProjectMembershipRole,
|
||||
TIdentityProjectMembershipRoleInsert,
|
||||
TIdentityProjectMembershipRoleUpdate,
|
||||
TIdentityProjectMemberships,
|
||||
TIdentityProjectMembershipsInsert,
|
||||
TIdentityProjectMembershipsUpdate,
|
||||
TIdentityTokenAuths,
|
||||
TIdentityTokenAuthsInsert,
|
||||
TIdentityTokenAuthsUpdate,
|
||||
TIdentityUaClientSecrets,
|
||||
TIdentityUaClientSecretsInsert,
|
||||
TIdentityUaClientSecretsUpdate,
|
||||
TIdentityUniversalAuths,
|
||||
TIdentityUniversalAuthsInsert,
|
||||
TIdentityUniversalAuthsUpdate,
|
||||
TIncidentContacts,
|
||||
TIncidentContactsInsert,
|
||||
TIncidentContactsUpdate,
|
||||
TIntegrationAuths,
|
||||
TIntegrationAuthsInsert,
|
||||
TIntegrationAuthsUpdate,
|
||||
TIntegrations,
|
||||
TIntegrationsInsert,
|
||||
TIntegrationsUpdate,
|
||||
TInternalKms,
|
||||
TInternalKmsInsert,
|
||||
TInternalKmsUpdate,
|
||||
TKmsKeys,
|
||||
TKmsKeysInsert,
|
||||
TKmsKeysUpdate,
|
||||
TKmsKeyVersions,
|
||||
TKmsKeyVersionsInsert,
|
||||
TKmsKeyVersionsUpdate,
|
||||
TKmsRootConfig,
|
||||
TKmsRootConfigInsert,
|
||||
TKmsRootConfigUpdate,
|
||||
TLdapConfigs,
|
||||
TLdapConfigsInsert,
|
||||
TLdapConfigsUpdate,
|
||||
TLdapGroupMaps,
|
||||
TLdapGroupMapsInsert,
|
||||
TLdapGroupMapsUpdate,
|
||||
TOidcConfigs,
|
||||
TOidcConfigsInsert,
|
||||
TOidcConfigsUpdate,
|
||||
TOrganizations,
|
||||
TOrganizationsInsert,
|
||||
TOrganizationsUpdate,
|
||||
TOrgBots,
|
||||
TOrgBotsInsert,
|
||||
TOrgBotsUpdate,
|
||||
TOrgMemberships,
|
||||
TOrgMembershipsInsert,
|
||||
TOrgMembershipsUpdate,
|
||||
TOrgRoles,
|
||||
TOrgRolesInsert,
|
||||
TOrgRolesUpdate,
|
||||
TPkiAlerts,
|
||||
TPkiAlertsInsert,
|
||||
TPkiAlertsUpdate,
|
||||
TPkiCollectionItems,
|
||||
TPkiCollectionItemsInsert,
|
||||
TPkiCollectionItemsUpdate,
|
||||
TPkiCollections,
|
||||
TPkiCollectionsInsert,
|
||||
TPkiCollectionsUpdate,
|
||||
TProjectBots,
|
||||
TProjectBotsInsert,
|
||||
TProjectBotsUpdate,
|
||||
TProjectEnvironments,
|
||||
TProjectEnvironmentsInsert,
|
||||
TProjectEnvironmentsUpdate,
|
||||
TProjectKeys,
|
||||
TProjectKeysInsert,
|
||||
TProjectKeysUpdate,
|
||||
TProjectMemberships,
|
||||
TProjectMembershipsInsert,
|
||||
TProjectMembershipsUpdate,
|
||||
TProjectRoles,
|
||||
TProjectRolesInsert,
|
||||
TProjectRolesUpdate,
|
||||
TProjects,
|
||||
TProjectsInsert,
|
||||
TProjectSlackConfigs,
|
||||
TProjectSlackConfigsInsert,
|
||||
TProjectSlackConfigsUpdate,
|
||||
TProjectsUpdate,
|
||||
TProjectTemplates,
|
||||
TProjectTemplatesInsert,
|
||||
TProjectTemplatesUpdate,
|
||||
TProjectUserAdditionalPrivilege,
|
||||
TProjectUserAdditionalPrivilegeInsert,
|
||||
TProjectUserAdditionalPrivilegeUpdate,
|
||||
TProjectUserMembershipRoles,
|
||||
TProjectUserMembershipRolesInsert,
|
||||
TProjectUserMembershipRolesUpdate,
|
||||
TRateLimit,
|
||||
TRateLimitInsert,
|
||||
TRateLimitUpdate,
|
||||
TSamlConfigs,
|
||||
TSamlConfigsInsert,
|
||||
TSamlConfigsUpdate,
|
||||
TScimTokens,
|
||||
TScimTokensInsert,
|
||||
TScimTokensUpdate,
|
||||
TSecretApprovalPolicies,
|
||||
TSecretApprovalPoliciesApprovers,
|
||||
TSecretApprovalPoliciesApproversInsert,
|
||||
TSecretApprovalPoliciesApproversUpdate,
|
||||
TSecretApprovalPoliciesInsert,
|
||||
TSecretApprovalPoliciesUpdate,
|
||||
TSecretApprovalRequests,
|
||||
TSecretApprovalRequestSecretTags,
|
||||
TSecretApprovalRequestSecretTagsInsert,
|
||||
TSecretApprovalRequestSecretTagsUpdate,
|
||||
TSecretApprovalRequestSecretTagsV2,
|
||||
TSecretApprovalRequestSecretTagsV2Insert,
|
||||
TSecretApprovalRequestSecretTagsV2Update,
|
||||
TSecretApprovalRequestsInsert,
|
||||
TSecretApprovalRequestsReviewers,
|
||||
TSecretApprovalRequestsReviewersInsert,
|
||||
TSecretApprovalRequestsReviewersUpdate,
|
||||
TSecretApprovalRequestsSecrets,
|
||||
TSecretApprovalRequestsSecretsInsert,
|
||||
TSecretApprovalRequestsSecretsUpdate,
|
||||
TSecretApprovalRequestsSecretsV2,
|
||||
TSecretApprovalRequestsSecretsV2Insert,
|
||||
TSecretApprovalRequestsSecretsV2Update,
|
||||
TSecretApprovalRequestsUpdate,
|
||||
TSecretBlindIndexes,
|
||||
TSecretBlindIndexesInsert,
|
||||
TSecretBlindIndexesUpdate,
|
||||
TSecretFolders,
|
||||
TSecretFoldersInsert,
|
||||
TSecretFoldersUpdate,
|
||||
TSecretFolderVersions,
|
||||
TSecretFolderVersionsInsert,
|
||||
TSecretFolderVersionsUpdate,
|
||||
TSecretImports,
|
||||
TSecretImportsInsert,
|
||||
TSecretImportsUpdate,
|
||||
TSecretReferences,
|
||||
TSecretReferencesInsert,
|
||||
TSecretReferencesUpdate,
|
||||
TSecretReferencesV2,
|
||||
TSecretReferencesV2Insert,
|
||||
TSecretReferencesV2Update,
|
||||
TSecretRotationOutputs,
|
||||
TSecretRotationOutputsInsert,
|
||||
TSecretRotationOutputsUpdate,
|
||||
TSecretRotationOutputV2,
|
||||
TSecretRotationOutputV2Insert,
|
||||
TSecretRotationOutputV2Update,
|
||||
TSecretRotations,
|
||||
TSecretRotationsInsert,
|
||||
TSecretRotationsUpdate,
|
||||
TSecrets,
|
||||
TSecretScanningGitRisks,
|
||||
TSecretScanningGitRisksInsert,
|
||||
TSecretScanningGitRisksUpdate,
|
||||
TSecretSharing,
|
||||
TSecretSharingInsert,
|
||||
TSecretSharingUpdate,
|
||||
TSecretsInsert,
|
||||
TSecretSnapshotFolders,
|
||||
TSecretSnapshotFoldersInsert,
|
||||
TSecretSnapshotFoldersUpdate,
|
||||
TSecretSnapshots,
|
||||
TSecretSnapshotSecrets,
|
||||
TSecretSnapshotSecretsInsert,
|
||||
TSecretSnapshotSecretsUpdate,
|
||||
TSecretSnapshotSecretsV2,
|
||||
TSecretSnapshotSecretsV2Insert,
|
||||
TSecretSnapshotSecretsV2Update,
|
||||
TSecretSnapshotsInsert,
|
||||
TSecretSnapshotsUpdate,
|
||||
TSecretsUpdate,
|
||||
TSecretTagJunction,
|
||||
TSecretTagJunctionInsert,
|
||||
TSecretTagJunctionUpdate,
|
||||
TSecretTags,
|
||||
TSecretTagsInsert,
|
||||
TSecretTagsUpdate,
|
||||
TSecretVersions,
|
||||
TSecretVersionsInsert,
|
||||
TSecretVersionsUpdate,
|
||||
TSecretVersionTagJunction,
|
||||
TSecretVersionTagJunctionInsert,
|
||||
TSecretVersionTagJunctionUpdate,
|
||||
TSecretVersionV2TagJunction,
|
||||
TSecretVersionV2TagJunctionInsert,
|
||||
TSecretVersionV2TagJunctionUpdate,
|
||||
TServiceTokens,
|
||||
TServiceTokensInsert,
|
||||
TServiceTokensUpdate,
|
||||
TSlackIntegrations,
|
||||
TSlackIntegrationsInsert,
|
||||
TSlackIntegrationsUpdate,
|
||||
TSuperAdmin,
|
||||
TSuperAdminInsert,
|
||||
TSuperAdminUpdate,
|
||||
TTrustedIps,
|
||||
TTrustedIpsInsert,
|
||||
TTrustedIpsUpdate,
|
||||
TUserActions,
|
||||
TUserActionsInsert,
|
||||
TUserActionsUpdate,
|
||||
TUserAliases,
|
||||
TUserAliasesInsert,
|
||||
TUserAliasesUpdate,
|
||||
TUserEncryptionKeys,
|
||||
TUserEncryptionKeysInsert,
|
||||
TUserEncryptionKeysUpdate,
|
||||
TUserGroupMembership,
|
||||
TUserGroupMembershipInsert,
|
||||
TUserGroupMembershipUpdate,
|
||||
TUsers,
|
||||
TUsersInsert,
|
||||
TUsersUpdate,
|
||||
TWebhooks,
|
||||
TWebhooksInsert,
|
||||
TWebhooksUpdate,
|
||||
TWorkflowIntegrations,
|
||||
TWorkflowIntegrationsInsert,
|
||||
TWorkflowIntegrationsUpdate
|
||||
} from "@app/db/schemas";
|
||||
import {
|
||||
TExternalGroupOrgRoleMappings,
|
||||
TExternalGroupOrgRoleMappingsInsert,
|
||||
TExternalGroupOrgRoleMappingsUpdate
|
||||
} from "@app/db/schemas/external-group-org-role-mappings";
|
||||
import {
|
||||
TSecretV2TagJunction,
|
||||
TSecretV2TagJunctionInsert,
|
||||
TSecretV2TagJunctionUpdate
|
||||
} from "@app/db/schemas/secret-v2-tag-junction";
|
||||
import {
|
||||
TSecretVersionsV2,
|
||||
TSecretVersionsV2Insert,
|
||||
TSecretVersionsV2Update
|
||||
} from "@app/db/schemas/secret-versions-v2";
|
||||
import { TSecretsV2, TSecretsV2Insert, TSecretsV2Update } from "@app/db/schemas/secrets-v2";
|
||||
|
||||
declare module "knex" {
|
||||
namespace Knex {
|
||||
interface QueryInterface {
|
||||
primaryNode(): KnexOriginal;
|
||||
replicaNode(): KnexOriginal;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
declare module "knex/types/tables" {
|
||||
interface Tables {
|
||||
[TableName.Users]: KnexOriginal.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
|
||||
[TableName.Groups]: KnexOriginal.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
|
||||
[TableName.CertificateAuthority]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthorities,
|
||||
TCertificateAuthoritiesInsert,
|
||||
TCertificateAuthoritiesUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthorityCert]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthorityCerts,
|
||||
TCertificateAuthorityCertsInsert,
|
||||
TCertificateAuthorityCertsUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthoritySecret]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthoritySecret,
|
||||
TCertificateAuthoritySecretInsert,
|
||||
TCertificateAuthoritySecretUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthorityCrl]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthorityCrl,
|
||||
TCertificateAuthorityCrlInsert,
|
||||
TCertificateAuthorityCrlUpdate
|
||||
>;
|
||||
[TableName.Certificate]: KnexOriginal.CompositeTableType<TCertificates, TCertificatesInsert, TCertificatesUpdate>;
|
||||
[TableName.CertificateTemplate]: KnexOriginal.CompositeTableType<
|
||||
TCertificateTemplates,
|
||||
TCertificateTemplatesInsert,
|
||||
TCertificateTemplatesUpdate
|
||||
>;
|
||||
[TableName.CertificateTemplateEstConfig]: KnexOriginal.CompositeTableType<
|
||||
TCertificateTemplateEstConfigs,
|
||||
TCertificateTemplateEstConfigsInsert,
|
||||
TCertificateTemplateEstConfigsUpdate
|
||||
>;
|
||||
[TableName.CertificateBody]: KnexOriginal.CompositeTableType<
|
||||
TCertificateBodies,
|
||||
TCertificateBodiesInsert,
|
||||
TCertificateBodiesUpdate
|
||||
>;
|
||||
[TableName.CertificateSecret]: KnexOriginal.CompositeTableType<
|
||||
TCertificateSecrets,
|
||||
TCertificateSecretsInsert,
|
||||
TCertificateSecretsUpdate
|
||||
>;
|
||||
[TableName.PkiAlert]: KnexOriginal.CompositeTableType<TPkiAlerts, TPkiAlertsInsert, TPkiAlertsUpdate>;
|
||||
[TableName.PkiCollection]: KnexOriginal.CompositeTableType<
|
||||
TPkiCollections,
|
||||
TPkiCollectionsInsert,
|
||||
TPkiCollectionsUpdate
|
||||
>;
|
||||
[TableName.PkiCollectionItem]: KnexOriginal.CompositeTableType<
|
||||
TPkiCollectionItems,
|
||||
TPkiCollectionItemsInsert,
|
||||
TPkiCollectionItemsUpdate
|
||||
>;
|
||||
[TableName.UserGroupMembership]: KnexOriginal.CompositeTableType<
|
||||
TUserGroupMembership,
|
||||
TUserGroupMembershipInsert,
|
||||
TUserGroupMembershipUpdate
|
||||
>;
|
||||
[TableName.GroupProjectMembership]: KnexOriginal.CompositeTableType<
|
||||
TGroupProjectMemberships,
|
||||
TGroupProjectMembershipsInsert,
|
||||
TGroupProjectMembershipsUpdate
|
||||
>;
|
||||
[TableName.GroupProjectMembershipRole]: KnexOriginal.CompositeTableType<
|
||||
TGroupProjectMembershipRoles,
|
||||
TGroupProjectMembershipRolesInsert,
|
||||
TGroupProjectMembershipRolesUpdate
|
||||
>;
|
||||
[TableName.UserAliases]: KnexOriginal.CompositeTableType<TUserAliases, TUserAliasesInsert, TUserAliasesUpdate>;
|
||||
[TableName.UserEncryptionKey]: KnexOriginal.CompositeTableType<
|
||||
TUserEncryptionKeys,
|
||||
TUserEncryptionKeysInsert,
|
||||
TUserEncryptionKeysUpdate
|
||||
>;
|
||||
[TableName.AuthTokens]: KnexOriginal.CompositeTableType<TAuthTokens, TAuthTokensInsert, TAuthTokensUpdate>;
|
||||
[TableName.AuthTokenSession]: KnexOriginal.CompositeTableType<
|
||||
TAuthTokenSessions,
|
||||
TAuthTokenSessionsInsert,
|
||||
TAuthTokenSessionsUpdate
|
||||
>;
|
||||
[TableName.BackupPrivateKey]: KnexOriginal.CompositeTableType<
|
||||
TBackupPrivateKey,
|
||||
TBackupPrivateKeyInsert,
|
||||
TBackupPrivateKeyUpdate
|
||||
>;
|
||||
[TableName.Organization]: KnexOriginal.CompositeTableType<
|
||||
TOrganizations,
|
||||
TOrganizationsInsert,
|
||||
TOrganizationsUpdate
|
||||
>;
|
||||
[TableName.OrgMembership]: KnexOriginal.CompositeTableType<
|
||||
TOrgMemberships,
|
||||
TOrgMembershipsInsert,
|
||||
TOrgMembershipsUpdate
|
||||
>;
|
||||
[TableName.OrgRoles]: KnexOriginal.CompositeTableType<TOrgRoles, TOrgRolesInsert, TOrgRolesUpdate>;
|
||||
[TableName.IncidentContact]: KnexOriginal.CompositeTableType<
|
||||
TIncidentContacts,
|
||||
TIncidentContactsInsert,
|
||||
TIncidentContactsUpdate
|
||||
>;
|
||||
[TableName.UserAction]: KnexOriginal.CompositeTableType<TUserActions, TUserActionsInsert, TUserActionsUpdate>;
|
||||
[TableName.SuperAdmin]: KnexOriginal.CompositeTableType<TSuperAdmin, TSuperAdminInsert, TSuperAdminUpdate>;
|
||||
[TableName.ApiKey]: KnexOriginal.CompositeTableType<TApiKeys, TApiKeysInsert, TApiKeysUpdate>;
|
||||
[TableName.Project]: KnexOriginal.CompositeTableType<TProjects, TProjectsInsert, TProjectsUpdate>;
|
||||
[TableName.ProjectMembership]: KnexOriginal.CompositeTableType<
|
||||
TProjectMemberships,
|
||||
TProjectMembershipsInsert,
|
||||
TProjectMembershipsUpdate
|
||||
>;
|
||||
[TableName.Environment]: KnexOriginal.CompositeTableType<
|
||||
TProjectEnvironments,
|
||||
TProjectEnvironmentsInsert,
|
||||
TProjectEnvironmentsUpdate
|
||||
>;
|
||||
[TableName.ProjectBot]: KnexOriginal.CompositeTableType<TProjectBots, TProjectBotsInsert, TProjectBotsUpdate>;
|
||||
[TableName.ProjectUserMembershipRole]: KnexOriginal.CompositeTableType<
|
||||
TProjectUserMembershipRoles,
|
||||
TProjectUserMembershipRolesInsert,
|
||||
TProjectUserMembershipRolesUpdate
|
||||
>;
|
||||
[TableName.ProjectRoles]: KnexOriginal.CompositeTableType<TProjectRoles, TProjectRolesInsert, TProjectRolesUpdate>;
|
||||
[TableName.ProjectUserAdditionalPrivilege]: KnexOriginal.CompositeTableType<
|
||||
TProjectUserAdditionalPrivilege,
|
||||
TProjectUserAdditionalPrivilegeInsert,
|
||||
TProjectUserAdditionalPrivilegeUpdate
|
||||
>;
|
||||
[TableName.ProjectKeys]: KnexOriginal.CompositeTableType<TProjectKeys, TProjectKeysInsert, TProjectKeysUpdate>;
|
||||
[TableName.Secret]: KnexOriginal.CompositeTableType<TSecrets, TSecretsInsert, TSecretsUpdate>;
|
||||
[TableName.SecretReference]: KnexOriginal.CompositeTableType<
|
||||
TSecretReferences,
|
||||
TSecretReferencesInsert,
|
||||
TSecretReferencesUpdate
|
||||
>;
|
||||
[TableName.SecretBlindIndex]: KnexOriginal.CompositeTableType<
|
||||
TSecretBlindIndexes,
|
||||
TSecretBlindIndexesInsert,
|
||||
TSecretBlindIndexesUpdate
|
||||
>;
|
||||
[TableName.SecretVersion]: KnexOriginal.CompositeTableType<
|
||||
TSecretVersions,
|
||||
TSecretVersionsInsert,
|
||||
TSecretVersionsUpdate
|
||||
>;
|
||||
[TableName.SecretFolder]: KnexOriginal.CompositeTableType<
|
||||
TSecretFolders,
|
||||
TSecretFoldersInsert,
|
||||
TSecretFoldersUpdate
|
||||
>;
|
||||
[TableName.SecretFolderVersion]: KnexOriginal.CompositeTableType<
|
||||
TSecretFolderVersions,
|
||||
TSecretFolderVersionsInsert,
|
||||
TSecretFolderVersionsUpdate
|
||||
>;
|
||||
[TableName.SecretSharing]: KnexOriginal.CompositeTableType<
|
||||
TSecretSharing,
|
||||
TSecretSharingInsert,
|
||||
TSecretSharingUpdate
|
||||
>;
|
||||
[TableName.RateLimit]: KnexOriginal.CompositeTableType<TRateLimit, TRateLimitInsert, TRateLimitUpdate>;
|
||||
[TableName.SecretTag]: KnexOriginal.CompositeTableType<TSecretTags, TSecretTagsInsert, TSecretTagsUpdate>;
|
||||
[TableName.SecretImport]: KnexOriginal.CompositeTableType<
|
||||
TSecretImports,
|
||||
TSecretImportsInsert,
|
||||
TSecretImportsUpdate
|
||||
>;
|
||||
[TableName.Integration]: KnexOriginal.CompositeTableType<TIntegrations, TIntegrationsInsert, TIntegrationsUpdate>;
|
||||
[TableName.Webhook]: KnexOriginal.CompositeTableType<TWebhooks, TWebhooksInsert, TWebhooksUpdate>;
|
||||
[TableName.ServiceToken]: KnexOriginal.CompositeTableType<
|
||||
TServiceTokens,
|
||||
TServiceTokensInsert,
|
||||
TServiceTokensUpdate
|
||||
>;
|
||||
[TableName.IntegrationAuth]: KnexOriginal.CompositeTableType<
|
||||
TIntegrationAuths,
|
||||
TIntegrationAuthsInsert,
|
||||
TIntegrationAuthsUpdate
|
||||
>;
|
||||
[TableName.Identity]: KnexOriginal.CompositeTableType<TIdentities, TIdentitiesInsert, TIdentitiesUpdate>;
|
||||
[TableName.IdentityTokenAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityTokenAuths,
|
||||
TIdentityTokenAuthsInsert,
|
||||
TIdentityTokenAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityUniversalAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityUniversalAuths,
|
||||
TIdentityUniversalAuthsInsert,
|
||||
TIdentityUniversalAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityMetadata]: KnexOriginal.CompositeTableType<
|
||||
TIdentityMetadata,
|
||||
TIdentityMetadataInsert,
|
||||
TIdentityMetadataUpdate
|
||||
>;
|
||||
[TableName.IdentityKubernetesAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityKubernetesAuths,
|
||||
TIdentityKubernetesAuthsInsert,
|
||||
TIdentityKubernetesAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityGcpAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityGcpAuths,
|
||||
TIdentityGcpAuthsInsert,
|
||||
TIdentityGcpAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityAwsAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityAwsAuths,
|
||||
TIdentityAwsAuthsInsert,
|
||||
TIdentityAwsAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityAzureAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityAzureAuths,
|
||||
TIdentityAzureAuthsInsert,
|
||||
TIdentityAzureAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityOidcAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityOidcAuths,
|
||||
TIdentityOidcAuthsInsert,
|
||||
TIdentityOidcAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityUaClientSecret]: KnexOriginal.CompositeTableType<
|
||||
TIdentityUaClientSecrets,
|
||||
TIdentityUaClientSecretsInsert,
|
||||
TIdentityUaClientSecretsUpdate
|
||||
>;
|
||||
[TableName.IdentityAccessToken]: KnexOriginal.CompositeTableType<
|
||||
TIdentityAccessTokens,
|
||||
TIdentityAccessTokensInsert,
|
||||
TIdentityAccessTokensUpdate
|
||||
>;
|
||||
[TableName.IdentityOrgMembership]: KnexOriginal.CompositeTableType<
|
||||
TIdentityOrgMemberships,
|
||||
TIdentityOrgMembershipsInsert,
|
||||
TIdentityOrgMembershipsUpdate
|
||||
>;
|
||||
[TableName.IdentityProjectMembership]: KnexOriginal.CompositeTableType<
|
||||
TIdentityProjectMemberships,
|
||||
TIdentityProjectMembershipsInsert,
|
||||
TIdentityProjectMembershipsUpdate
|
||||
>;
|
||||
[TableName.IdentityProjectMembershipRole]: KnexOriginal.CompositeTableType<
|
||||
TIdentityProjectMembershipRole,
|
||||
TIdentityProjectMembershipRoleInsert,
|
||||
TIdentityProjectMembershipRoleUpdate
|
||||
>;
|
||||
[TableName.IdentityProjectAdditionalPrivilege]: KnexOriginal.CompositeTableType<
|
||||
TIdentityProjectAdditionalPrivilege,
|
||||
TIdentityProjectAdditionalPrivilegeInsert,
|
||||
TIdentityProjectAdditionalPrivilegeUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalPolicy]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalPolicies,
|
||||
TAccessApprovalPoliciesInsert,
|
||||
TAccessApprovalPoliciesUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalPolicyApprover]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalPoliciesApprovers,
|
||||
TAccessApprovalPoliciesApproversInsert,
|
||||
TAccessApprovalPoliciesApproversUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalRequest]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalRequests,
|
||||
TAccessApprovalRequestsInsert,
|
||||
TAccessApprovalRequestsUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalRequestReviewer]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalRequestsReviewers,
|
||||
TAccessApprovalRequestsReviewersInsert,
|
||||
TAccessApprovalRequestsReviewersUpdate
|
||||
>;
|
||||
|
||||
[TableName.ScimToken]: KnexOriginal.CompositeTableType<TScimTokens, TScimTokensInsert, TScimTokensUpdate>;
|
||||
[TableName.SecretApprovalPolicy]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalPolicies,
|
||||
TSecretApprovalPoliciesInsert,
|
||||
TSecretApprovalPoliciesUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalPolicyApprover]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalPoliciesApprovers,
|
||||
TSecretApprovalPoliciesApproversInsert,
|
||||
TSecretApprovalPoliciesApproversUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequest]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequests,
|
||||
TSecretApprovalRequestsInsert,
|
||||
TSecretApprovalRequestsUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequestReviewer]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequestsReviewers,
|
||||
TSecretApprovalRequestsReviewersInsert,
|
||||
TSecretApprovalRequestsReviewersUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequestSecret]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequestsSecrets,
|
||||
TSecretApprovalRequestsSecretsInsert,
|
||||
TSecretApprovalRequestsSecretsUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequestSecretTag]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequestSecretTags,
|
||||
TSecretApprovalRequestSecretTagsInsert,
|
||||
TSecretApprovalRequestSecretTagsUpdate
|
||||
>;
|
||||
[TableName.SecretRotation]: KnexOriginal.CompositeTableType<
|
||||
TSecretRotations,
|
||||
TSecretRotationsInsert,
|
||||
TSecretRotationsUpdate
|
||||
>;
|
||||
[TableName.SecretRotationOutput]: KnexOriginal.CompositeTableType<
|
||||
TSecretRotationOutputs,
|
||||
TSecretRotationOutputsInsert,
|
||||
TSecretRotationOutputsUpdate
|
||||
>;
|
||||
[TableName.Snapshot]: KnexOriginal.CompositeTableType<
|
||||
TSecretSnapshots,
|
||||
TSecretSnapshotsInsert,
|
||||
TSecretSnapshotsUpdate
|
||||
>;
|
||||
[TableName.SnapshotSecret]: KnexOriginal.CompositeTableType<
|
||||
TSecretSnapshotSecrets,
|
||||
TSecretSnapshotSecretsInsert,
|
||||
TSecretSnapshotSecretsUpdate
|
||||
>;
|
||||
[TableName.SnapshotFolder]: KnexOriginal.CompositeTableType<
|
||||
TSecretSnapshotFolders,
|
||||
TSecretSnapshotFoldersInsert,
|
||||
TSecretSnapshotFoldersUpdate
|
||||
>;
|
||||
[TableName.DynamicSecret]: KnexOriginal.CompositeTableType<
|
||||
TDynamicSecrets,
|
||||
TDynamicSecretsInsert,
|
||||
TDynamicSecretsUpdate
|
||||
>;
|
||||
[TableName.DynamicSecretLease]: KnexOriginal.CompositeTableType<
|
||||
TDynamicSecretLeases,
|
||||
TDynamicSecretLeasesInsert,
|
||||
TDynamicSecretLeasesUpdate
|
||||
>;
|
||||
[TableName.SamlConfig]: KnexOriginal.CompositeTableType<TSamlConfigs, TSamlConfigsInsert, TSamlConfigsUpdate>;
|
||||
[TableName.OidcConfig]: KnexOriginal.CompositeTableType<TOidcConfigs, TOidcConfigsInsert, TOidcConfigsUpdate>;
|
||||
[TableName.LdapConfig]: KnexOriginal.CompositeTableType<TLdapConfigs, TLdapConfigsInsert, TLdapConfigsUpdate>;
|
||||
[TableName.LdapGroupMap]: KnexOriginal.CompositeTableType<
|
||||
TLdapGroupMaps,
|
||||
TLdapGroupMapsInsert,
|
||||
TLdapGroupMapsUpdate
|
||||
>;
|
||||
[TableName.OrgBot]: KnexOriginal.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
|
||||
[TableName.AuditLog]: KnexOriginal.CompositeTableType<TAuditLogs, TAuditLogsInsert, TAuditLogsUpdate>;
|
||||
[TableName.AuditLogStream]: KnexOriginal.CompositeTableType<
|
||||
TAuditLogStreams,
|
||||
TAuditLogStreamsInsert,
|
||||
TAuditLogStreamsUpdate
|
||||
>;
|
||||
[TableName.GitAppInstallSession]: KnexOriginal.CompositeTableType<
|
||||
TGitAppInstallSessions,
|
||||
TGitAppInstallSessionsInsert,
|
||||
TGitAppInstallSessionsUpdate
|
||||
>;
|
||||
[TableName.GitAppOrg]: KnexOriginal.CompositeTableType<TGitAppOrg, TGitAppOrgInsert, TGitAppOrgUpdate>;
|
||||
[TableName.SecretScanningGitRisk]: KnexOriginal.CompositeTableType<
|
||||
TSecretScanningGitRisks,
|
||||
TSecretScanningGitRisksInsert,
|
||||
TSecretScanningGitRisksUpdate
|
||||
>;
|
||||
[TableName.TrustedIps]: KnexOriginal.CompositeTableType<TTrustedIps, TTrustedIpsInsert, TTrustedIpsUpdate>;
|
||||
[TableName.SecretV2]: KnexOriginal.CompositeTableType<TSecretsV2, TSecretsV2Insert, TSecretsV2Update>;
|
||||
[TableName.SecretVersionV2]: KnexOriginal.CompositeTableType<
|
||||
TSecretVersionsV2,
|
||||
TSecretVersionsV2Insert,
|
||||
TSecretVersionsV2Update
|
||||
>;
|
||||
[TableName.SecretReferenceV2]: KnexOriginal.CompositeTableType<
|
||||
TSecretReferencesV2,
|
||||
TSecretReferencesV2Insert,
|
||||
TSecretReferencesV2Update
|
||||
>;
|
||||
// Junction tables
|
||||
[TableName.SecretV2JnTag]: KnexOriginal.CompositeTableType<
|
||||
TSecretV2TagJunction,
|
||||
TSecretV2TagJunctionInsert,
|
||||
TSecretV2TagJunctionUpdate
|
||||
>;
|
||||
[TableName.JnSecretTag]: KnexOriginal.CompositeTableType<
|
||||
TSecretTagJunction,
|
||||
TSecretTagJunctionInsert,
|
||||
TSecretTagJunctionUpdate
|
||||
>;
|
||||
[TableName.SecretVersionTag]: KnexOriginal.CompositeTableType<
|
||||
TSecretVersionTagJunction,
|
||||
TSecretVersionTagJunctionInsert,
|
||||
TSecretVersionTagJunctionUpdate
|
||||
>;
|
||||
[TableName.SecretVersionV2Tag]: KnexOriginal.CompositeTableType<
|
||||
TSecretVersionV2TagJunction,
|
||||
TSecretVersionV2TagJunctionInsert,
|
||||
TSecretVersionV2TagJunctionUpdate
|
||||
>;
|
||||
[TableName.SnapshotSecretV2]: KnexOriginal.CompositeTableType<
|
||||
TSecretSnapshotSecretsV2,
|
||||
TSecretSnapshotSecretsV2Insert,
|
||||
TSecretSnapshotSecretsV2Update
|
||||
>;
|
||||
[TableName.SecretApprovalRequestSecretV2]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequestsSecretsV2,
|
||||
TSecretApprovalRequestsSecretsV2Insert,
|
||||
TSecretApprovalRequestsSecretsV2Update
|
||||
>;
|
||||
[TableName.SecretApprovalRequestSecretTagV2]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequestSecretTagsV2,
|
||||
TSecretApprovalRequestSecretTagsV2Insert,
|
||||
TSecretApprovalRequestSecretTagsV2Update
|
||||
>;
|
||||
[TableName.SecretRotationOutputV2]: KnexOriginal.CompositeTableType<
|
||||
TSecretRotationOutputV2,
|
||||
TSecretRotationOutputV2Insert,
|
||||
TSecretRotationOutputV2Update
|
||||
>;
|
||||
// KMS service
|
||||
[TableName.KmsServerRootConfig]: KnexOriginal.CompositeTableType<
|
||||
TKmsRootConfig,
|
||||
TKmsRootConfigInsert,
|
||||
TKmsRootConfigUpdate
|
||||
>;
|
||||
[TableName.InternalKms]: KnexOriginal.CompositeTableType<TInternalKms, TInternalKmsInsert, TInternalKmsUpdate>;
|
||||
[TableName.ExternalKms]: KnexOriginal.CompositeTableType<TExternalKms, TExternalKmsInsert, TExternalKmsUpdate>;
|
||||
[TableName.KmsKey]: KnexOriginal.CompositeTableType<TKmsKeys, TKmsKeysInsert, TKmsKeysUpdate>;
|
||||
[TableName.KmsKeyVersion]: KnexOriginal.CompositeTableType<
|
||||
TKmsKeyVersions,
|
||||
TKmsKeyVersionsInsert,
|
||||
TKmsKeyVersionsUpdate
|
||||
>;
|
||||
[TableName.SlackIntegrations]: KnexOriginal.CompositeTableType<
|
||||
TSlackIntegrations,
|
||||
TSlackIntegrationsInsert,
|
||||
TSlackIntegrationsUpdate
|
||||
>;
|
||||
[TableName.ProjectSlackConfigs]: KnexOriginal.CompositeTableType<
|
||||
TProjectSlackConfigs,
|
||||
TProjectSlackConfigsInsert,
|
||||
TProjectSlackConfigsUpdate
|
||||
>;
|
||||
[TableName.WorkflowIntegrations]: KnexOriginal.CompositeTableType<
|
||||
TWorkflowIntegrations,
|
||||
TWorkflowIntegrationsInsert,
|
||||
TWorkflowIntegrationsUpdate
|
||||
>;
|
||||
[TableName.ExternalGroupOrgRoleMapping]: KnexOriginal.CompositeTableType<
|
||||
TExternalGroupOrgRoleMappings,
|
||||
TExternalGroupOrgRoleMappingsInsert,
|
||||
TExternalGroupOrgRoleMappingsUpdate
|
||||
>;
|
||||
[TableName.ProjectTemplates]: KnexOriginal.CompositeTableType<
|
||||
TProjectTemplates,
|
||||
TProjectTemplatesInsert,
|
||||
TProjectTemplatesUpdate
|
||||
>;
|
||||
}
|
||||
}
|
4
backend/src/@types/ldif.d.ts
vendored
4
backend/src/@types/ldif.d.ts
vendored
@ -1,4 +0,0 @@
|
||||
declare module "ldif" {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Untyped, the function returns `any`.
|
||||
function parse(input: string, ...args: any[]): any;
|
||||
}
|
1
backend/src/@types/passport-gitlab2.d.ts
vendored
1
backend/src/@types/passport-gitlab2.d.ts
vendored
@ -1 +0,0 @@
|
||||
declare module "passport-gitlab2";
|
79
backend/src/config/index.ts
Normal file
79
backend/src/config/index.ts
Normal file
@ -0,0 +1,79 @@
|
||||
const PORT = process.env.PORT || 4000;
|
||||
const EMAIL_TOKEN_LIFETIME = process.env.EMAIL_TOKEN_LIFETIME! || '86400';
|
||||
const ENCRYPTION_KEY = process.env.ENCRYPTION_KEY!;
|
||||
const JWT_AUTH_LIFETIME = process.env.JWT_AUTH_LIFETIME! || '10d';
|
||||
const JWT_AUTH_SECRET = process.env.JWT_AUTH_SECRET!;
|
||||
const JWT_REFRESH_LIFETIME = process.env.JWT_REFRESH_LIFETIME! || '90d';
|
||||
const JWT_REFRESH_SECRET = process.env.JWT_REFRESH_SECRET!;
|
||||
const JWT_SERVICE_SECRET = process.env.JWT_SERVICE_SECRET!;
|
||||
const JWT_SIGNUP_LIFETIME = process.env.JWT_SIGNUP_LIFETIME! || '15m';
|
||||
const JWT_SIGNUP_SECRET = process.env.JWT_SIGNUP_SECRET!;
|
||||
const MONGO_URL = process.env.MONGO_URL!;
|
||||
const NODE_ENV = process.env.NODE_ENV! || 'production';
|
||||
const CLIENT_SECRET_HEROKU = process.env.CLIENT_SECRET_HEROKU!;
|
||||
const CLIENT_ID_HEROKU = process.env.CLIENT_ID_HEROKU!;
|
||||
const CLIENT_ID_VERCEL = process.env.CLIENT_ID_VERCEL!;
|
||||
const CLIENT_ID_NETLIFY = process.env.CLIENT_ID_NETLIFY!;
|
||||
const CLIENT_SECRET_VERCEL = process.env.CLIENT_SECRET_VERCEL!;
|
||||
const CLIENT_SECRET_NETLIFY = process.env.CLIENT_SECRET_NETLIFY!;
|
||||
const CLIENT_SLUG_VERCEL= process.env.CLIENT_SLUG_VERCEL!;
|
||||
const POSTHOG_HOST = process.env.POSTHOG_HOST! || 'https://app.posthog.com';
|
||||
const POSTHOG_PROJECT_API_KEY =
|
||||
process.env.POSTHOG_PROJECT_API_KEY! ||
|
||||
'phc_nSin8j5q2zdhpFDI1ETmFNUIuTG4DwKVyIigrY10XiE';
|
||||
const PRIVATE_KEY = process.env.PRIVATE_KEY!;
|
||||
const PUBLIC_KEY = process.env.PUBLIC_KEY!;
|
||||
const SENTRY_DSN = process.env.SENTRY_DSN!;
|
||||
const SITE_URL = process.env.SITE_URL!;
|
||||
const SMTP_HOST = process.env.SMTP_HOST! || 'smtp.gmail.com';
|
||||
const SMTP_PORT = process.env.SMTP_PORT! || 587;
|
||||
const SMTP_NAME = process.env.SMTP_NAME!;
|
||||
const SMTP_USERNAME = process.env.SMTP_USERNAME!;
|
||||
const SMTP_PASSWORD = process.env.SMTP_PASSWORD!;
|
||||
const STRIPE_PRODUCT_CARD_AUTH = process.env.STRIPE_PRODUCT_CARD_AUTH!;
|
||||
const STRIPE_PRODUCT_PRO = process.env.STRIPE_PRODUCT_PRO!;
|
||||
const STRIPE_PRODUCT_STARTER = process.env.STRIPE_PRODUCT_STARTER!;
|
||||
const STRIPE_PUBLISHABLE_KEY = process.env.STRIPE_PUBLISHABLE_KEY!;
|
||||
const STRIPE_SECRET_KEY = process.env.STRIPE_SECRET_KEY!;
|
||||
const STRIPE_WEBHOOK_SECRET = process.env.STRIPE_WEBHOOK_SECRET!;
|
||||
const TELEMETRY_ENABLED = process.env.TELEMETRY_ENABLED! !== 'false' && true;
|
||||
|
||||
export {
|
||||
PORT,
|
||||
EMAIL_TOKEN_LIFETIME,
|
||||
ENCRYPTION_KEY,
|
||||
JWT_AUTH_LIFETIME,
|
||||
JWT_AUTH_SECRET,
|
||||
JWT_REFRESH_LIFETIME,
|
||||
JWT_REFRESH_SECRET,
|
||||
JWT_SERVICE_SECRET,
|
||||
JWT_SIGNUP_LIFETIME,
|
||||
JWT_SIGNUP_SECRET,
|
||||
MONGO_URL,
|
||||
NODE_ENV,
|
||||
CLIENT_ID_HEROKU,
|
||||
CLIENT_ID_VERCEL,
|
||||
CLIENT_ID_NETLIFY,
|
||||
CLIENT_SECRET_HEROKU,
|
||||
CLIENT_SECRET_VERCEL,
|
||||
CLIENT_SECRET_NETLIFY,
|
||||
CLIENT_SLUG_VERCEL,
|
||||
POSTHOG_HOST,
|
||||
POSTHOG_PROJECT_API_KEY,
|
||||
PRIVATE_KEY,
|
||||
PUBLIC_KEY,
|
||||
SENTRY_DSN,
|
||||
SITE_URL,
|
||||
SMTP_HOST,
|
||||
SMTP_PORT,
|
||||
SMTP_NAME,
|
||||
SMTP_USERNAME,
|
||||
SMTP_PASSWORD,
|
||||
STRIPE_PRODUCT_CARD_AUTH,
|
||||
STRIPE_PRODUCT_PRO,
|
||||
STRIPE_PRODUCT_STARTER,
|
||||
STRIPE_PUBLISHABLE_KEY,
|
||||
STRIPE_SECRET_KEY,
|
||||
STRIPE_WEBHOOK_SECRET,
|
||||
TELEMETRY_ENABLED
|
||||
};
|
222
backend/src/controllers/authController.ts
Normal file
222
backend/src/controllers/authController.ts
Normal file
@ -0,0 +1,222 @@
|
||||
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||
import { Request, Response } from 'express';
|
||||
import jwt from 'jsonwebtoken';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import * as bigintConversion from 'bigint-conversion';
|
||||
const jsrp = require('jsrp');
|
||||
import { User } from '../models';
|
||||
import { createToken, issueTokens, clearTokens } from '../helpers/auth';
|
||||
import {
|
||||
NODE_ENV,
|
||||
JWT_AUTH_LIFETIME,
|
||||
JWT_AUTH_SECRET,
|
||||
JWT_REFRESH_SECRET
|
||||
} from '../config';
|
||||
|
||||
declare module 'jsonwebtoken' {
|
||||
export interface UserIDJwtPayload extends jwt.JwtPayload {
|
||||
userId: string;
|
||||
}
|
||||
}
|
||||
|
||||
const clientPublicKeys: any = {};
|
||||
|
||||
/**
|
||||
* Log in user step 1: Return [salt] and [serverPublicKey] as part of step 1 of SRP protocol
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const login1 = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const {
|
||||
email,
|
||||
clientPublicKey
|
||||
}: { email: string; clientPublicKey: string } = req.body;
|
||||
|
||||
const user = await User.findOne({
|
||||
email
|
||||
}).select('+salt +verifier');
|
||||
|
||||
if (!user) throw new Error('Failed to find user');
|
||||
|
||||
const server = new jsrp.server();
|
||||
server.init(
|
||||
{
|
||||
salt: user.salt,
|
||||
verifier: user.verifier
|
||||
},
|
||||
() => {
|
||||
// generate server-side public key
|
||||
const serverPublicKey = server.getPublicKey();
|
||||
clientPublicKeys[email] = {
|
||||
clientPublicKey,
|
||||
serverBInt: bigintConversion.bigintToBuf(server.bInt)
|
||||
};
|
||||
|
||||
return res.status(200).send({
|
||||
serverPublicKey,
|
||||
salt: user.salt
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to start authentication process'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Log in user step 2: complete step 2 of SRP protocol and return token and their (encrypted)
|
||||
* private key
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const login2 = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { email, clientProof } = req.body;
|
||||
const user = await User.findOne({
|
||||
email
|
||||
}).select('+salt +verifier +publicKey +encryptedPrivateKey +iv +tag');
|
||||
|
||||
if (!user) throw new Error('Failed to find user');
|
||||
|
||||
const server = new jsrp.server();
|
||||
server.init(
|
||||
{
|
||||
salt: user.salt,
|
||||
verifier: user.verifier,
|
||||
b: clientPublicKeys[email].serverBInt
|
||||
},
|
||||
async () => {
|
||||
server.setClientPublicKey(clientPublicKeys[email].clientPublicKey);
|
||||
|
||||
// compare server and client shared keys
|
||||
if (server.checkClientProof(clientProof)) {
|
||||
// issue tokens
|
||||
const tokens = await issueTokens({ userId: user._id.toString() });
|
||||
|
||||
// store (refresh) token in httpOnly cookie
|
||||
res.cookie('jid', tokens.refreshToken, {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: NODE_ENV === 'production' ? true : false
|
||||
});
|
||||
|
||||
// return (access) token in response
|
||||
return res.status(200).send({
|
||||
token: tokens.token,
|
||||
publicKey: user.publicKey,
|
||||
encryptedPrivateKey: user.encryptedPrivateKey,
|
||||
iv: user.iv,
|
||||
tag: user.tag
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(400).send({
|
||||
message: 'Failed to authenticate. Try again?'
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to authenticate. Try again?'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Log out user
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const logout = async (req: Request, res: Response) => {
|
||||
try {
|
||||
await clearTokens({
|
||||
userId: req.user._id.toString()
|
||||
});
|
||||
|
||||
// clear httpOnly cookie
|
||||
res.cookie('jid', '', {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: NODE_ENV === 'production' ? true : false
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to logout'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully logged out.'
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return user is authenticated
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const checkAuth = async (req: Request, res: Response) =>
|
||||
res.status(200).send({
|
||||
message: 'Authenticated'
|
||||
});
|
||||
|
||||
/**
|
||||
* Return new token by redeeming refresh token
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getNewToken = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const refreshToken = req.cookies.jid;
|
||||
|
||||
if (!refreshToken) {
|
||||
throw new Error('Failed to find token in request cookies');
|
||||
}
|
||||
|
||||
const decodedToken = <jwt.UserIDJwtPayload>(
|
||||
jwt.verify(refreshToken, JWT_REFRESH_SECRET)
|
||||
);
|
||||
|
||||
const user = await User.findOne({
|
||||
_id: decodedToken.userId
|
||||
}).select('+publicKey');
|
||||
|
||||
if (!user) throw new Error('Failed to authenticate unfound user');
|
||||
if (!user?.publicKey)
|
||||
throw new Error('Failed to authenticate not fully set up account');
|
||||
|
||||
const token = createToken({
|
||||
payload: {
|
||||
userId: decodedToken.userId
|
||||
},
|
||||
expiresIn: JWT_AUTH_LIFETIME,
|
||||
secret: JWT_AUTH_SECRET
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
token
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Invalid request'
|
||||
});
|
||||
}
|
||||
};
|
107
backend/src/controllers/botController.ts
Normal file
107
backend/src/controllers/botController.ts
Normal file
@ -0,0 +1,107 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Bot, BotKey } from '../models';
|
||||
import { createBot } from '../helpers/bot';
|
||||
|
||||
interface BotKey {
|
||||
encryptedKey: string;
|
||||
nonce: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return bot for workspace with id [workspaceId]. If a workspace bot doesn't exist,
|
||||
* then create and return a new bot.
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getBotByWorkspaceId = async (req: Request, res: Response) => {
|
||||
let bot;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
bot = await Bot.findOne({
|
||||
workspace: workspaceId
|
||||
});
|
||||
|
||||
if (!bot) {
|
||||
// case: bot doesn't exist for workspace with id [workspaceId]
|
||||
// -> create a new bot and return it
|
||||
bot = await createBot({
|
||||
name: 'Infisical Bot',
|
||||
workspaceId
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get bot for workspace'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
bot
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return bot with id [req.bot._id] with active state set to [isActive].
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const setBotActiveState = async (req: Request, res: Response) => {
|
||||
let bot;
|
||||
try {
|
||||
const { isActive, botKey }: { isActive: boolean, botKey: BotKey } = req.body;
|
||||
|
||||
if (isActive) {
|
||||
// bot state set to active -> share workspace key with bot
|
||||
if (!botKey?.encryptedKey || !botKey?.nonce) {
|
||||
return res.status(400).send({
|
||||
message: 'Failed to set bot state to active - missing bot key'
|
||||
});
|
||||
}
|
||||
|
||||
await BotKey.findOneAndUpdate({
|
||||
workspace: req.bot.workspace
|
||||
}, {
|
||||
encryptedKey: botKey.encryptedKey,
|
||||
nonce: botKey.nonce,
|
||||
sender: req.user._id,
|
||||
bot: req.bot._id,
|
||||
workspace: req.bot.workspace
|
||||
}, {
|
||||
upsert: true,
|
||||
new: true
|
||||
});
|
||||
} else {
|
||||
// case: bot state set to inactive -> delete bot's workspace key
|
||||
await BotKey.deleteOne({
|
||||
bot: req.bot._id
|
||||
});
|
||||
}
|
||||
|
||||
bot = await Bot.findOneAndUpdate({
|
||||
_id: req.bot._id
|
||||
}, {
|
||||
isActive
|
||||
}, {
|
||||
new: true
|
||||
});
|
||||
|
||||
if (!bot) throw new Error('Failed to update bot active state');
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to update bot active state'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
bot
|
||||
});
|
||||
};
|
35
backend/src/controllers/index.ts
Normal file
35
backend/src/controllers/index.ts
Normal file
@ -0,0 +1,35 @@
|
||||
import * as authController from './authController';
|
||||
import * as botController from './botController';
|
||||
import * as integrationAuthController from './integrationAuthController';
|
||||
import * as integrationController from './integrationController';
|
||||
import * as keyController from './keyController';
|
||||
import * as membershipController from './membershipController';
|
||||
import * as membershipOrgController from './membershipOrgController';
|
||||
import * as organizationController from './organizationController';
|
||||
import * as passwordController from './passwordController';
|
||||
import * as secretController from './secretController';
|
||||
import * as serviceTokenController from './serviceTokenController';
|
||||
import * as signupController from './signupController';
|
||||
import * as stripeController from './stripeController';
|
||||
import * as userActionController from './userActionController';
|
||||
import * as userController from './userController';
|
||||
import * as workspaceController from './workspaceController';
|
||||
|
||||
export {
|
||||
authController,
|
||||
botController,
|
||||
integrationAuthController,
|
||||
integrationController,
|
||||
keyController,
|
||||
membershipController,
|
||||
membershipOrgController,
|
||||
organizationController,
|
||||
passwordController,
|
||||
secretController,
|
||||
serviceTokenController,
|
||||
signupController,
|
||||
stripeController,
|
||||
userActionController,
|
||||
userController,
|
||||
workspaceController
|
||||
};
|
104
backend/src/controllers/integrationAuthController.ts
Normal file
104
backend/src/controllers/integrationAuthController.ts
Normal file
@ -0,0 +1,104 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import axios from 'axios';
|
||||
import { readFileSync } from 'fs';
|
||||
import { IntegrationAuth, Integration } from '../models';
|
||||
import { INTEGRATION_SET, INTEGRATION_OPTIONS, ENV_DEV } from '../variables';
|
||||
import { IntegrationService } from '../services';
|
||||
import { getApps, revokeAccess } from '../integrations';
|
||||
|
||||
export const getIntegrationOptions = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
return res.status(200).send({
|
||||
integrationOptions: INTEGRATION_OPTIONS
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform OAuth2 code-token exchange as part of integration [integration] for workspace with id [workspaceId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const oAuthExchange = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
try {
|
||||
const { workspaceId, code, integration } = req.body;
|
||||
|
||||
if (!INTEGRATION_SET.has(integration))
|
||||
throw new Error('Failed to validate integration');
|
||||
|
||||
await IntegrationService.handleOAuthExchange({
|
||||
workspaceId,
|
||||
integration,
|
||||
code
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get OAuth2 code-token exchange'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully enabled integration authorization'
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return list of applications allowed for integration with integration authorization id [integrationAuthId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getIntegrationAuthApps = async (req: Request, res: Response) => {
|
||||
let apps;
|
||||
try {
|
||||
apps = await getApps({
|
||||
integrationAuth: req.integrationAuth,
|
||||
accessToken: req.accessToken
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get integration authorization applications'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
apps
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete integration authorization with id [integrationAuthId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const deleteIntegrationAuth = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { integrationAuthId } = req.params;
|
||||
|
||||
await revokeAccess({
|
||||
integrationAuth: req.integrationAuth,
|
||||
accessToken: req.accessToken
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to delete integration authorization'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully deleted integration authorization'
|
||||
});
|
||||
}
|
134
backend/src/controllers/integrationController.ts
Normal file
134
backend/src/controllers/integrationController.ts
Normal file
@ -0,0 +1,134 @@
|
||||
import { Request, Response } from 'express';
|
||||
import { readFileSync } from 'fs';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Integration, Bot, BotKey } from '../models';
|
||||
import { EventService } from '../services';
|
||||
import { eventPushSecrets } from '../events';
|
||||
|
||||
interface Key {
|
||||
encryptedKey: string;
|
||||
nonce: string;
|
||||
}
|
||||
|
||||
interface PushSecret {
|
||||
ciphertextKey: string;
|
||||
ivKey: string;
|
||||
tagKey: string;
|
||||
hashKey: string;
|
||||
ciphertextValue: string;
|
||||
ivValue: string;
|
||||
tagValue: string;
|
||||
hashValue: string;
|
||||
type: 'shared' | 'personal';
|
||||
}
|
||||
|
||||
/**
|
||||
* Change environment or name of integration with id [integrationId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const updateIntegration = async (req: Request, res: Response) => {
|
||||
let integration;
|
||||
|
||||
// TODO: add integration-specific validation to ensure that each
|
||||
// integration has the correct fields populated in [Integration]
|
||||
|
||||
try {
|
||||
const {
|
||||
app,
|
||||
environment,
|
||||
isActive,
|
||||
target, // vercel-specific integration param
|
||||
context, // netlify-specific integration param
|
||||
siteId // netlify-specific integration param
|
||||
} = req.body;
|
||||
|
||||
integration = await Integration.findOneAndUpdate(
|
||||
{
|
||||
_id: req.integration._id
|
||||
},
|
||||
{
|
||||
environment,
|
||||
isActive,
|
||||
app,
|
||||
target,
|
||||
context,
|
||||
siteId
|
||||
},
|
||||
{
|
||||
new: true
|
||||
}
|
||||
);
|
||||
|
||||
if (integration) {
|
||||
// trigger event - push secrets
|
||||
EventService.handleEvent({
|
||||
event: eventPushSecrets({
|
||||
workspaceId: integration.workspace.toString()
|
||||
})
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to update integration'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integration
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete integration with id [integrationId] and deactivate bot if there are
|
||||
* no integrations left
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const deleteIntegration = async (req: Request, res: Response) => {
|
||||
let deletedIntegration;
|
||||
try {
|
||||
const { integrationId } = req.params;
|
||||
|
||||
deletedIntegration = await Integration.findOneAndDelete({
|
||||
_id: integrationId
|
||||
});
|
||||
|
||||
if (!deletedIntegration) throw new Error('Failed to find integration');
|
||||
|
||||
const integrations = await Integration.find({
|
||||
workspace: deletedIntegration.workspace
|
||||
});
|
||||
|
||||
if (integrations.length === 0) {
|
||||
// case: no integrations left, deactivate bot
|
||||
const bot = await Bot.findOneAndUpdate({
|
||||
workspace: deletedIntegration.workspace
|
||||
}, {
|
||||
isActive: false
|
||||
}, {
|
||||
new: true
|
||||
});
|
||||
|
||||
if (bot) {
|
||||
await BotKey.deleteOne({
|
||||
bot: bot._id
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to delete integration'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
deletedIntegration
|
||||
});
|
||||
};
|
99
backend/src/controllers/keyController.ts
Normal file
99
backend/src/controllers/keyController.ts
Normal file
@ -0,0 +1,99 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Key } from '../models';
|
||||
import { findMembership } from '../helpers/membership';
|
||||
import { PUBLIC_KEY } from '../config';
|
||||
import { GRANTED } from '../variables';
|
||||
|
||||
/**
|
||||
* Add (encrypted) copy of workspace key for workspace with id [workspaceId] for user with
|
||||
* id [key.userId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const uploadKey = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
const { key } = req.body;
|
||||
|
||||
// validate membership of receiver
|
||||
const receiverMembership = await findMembership({
|
||||
user: key.userId,
|
||||
workspace: workspaceId
|
||||
});
|
||||
|
||||
if (!receiverMembership) {
|
||||
throw new Error('Failed receiver membership validation for workspace');
|
||||
}
|
||||
|
||||
receiverMembership.status = GRANTED;
|
||||
await receiverMembership.save();
|
||||
|
||||
await new Key({
|
||||
encryptedKey: key.encryptedKey,
|
||||
nonce: key.nonce,
|
||||
sender: req.user._id,
|
||||
receiver: key.userId,
|
||||
workspace: workspaceId
|
||||
}).save();
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to upload key to workspace'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully uploaded key to workspace'
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return latest (encrypted) copy of workspace key for user
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getLatestKey = async (req: Request, res: Response) => {
|
||||
let latestKey;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
// get latest key
|
||||
latestKey = await Key.find({
|
||||
workspace: workspaceId,
|
||||
receiver: req.user._id
|
||||
})
|
||||
.sort({ createdAt: -1 })
|
||||
.limit(1)
|
||||
.populate('sender', '+publicKey');
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get latest key'
|
||||
});
|
||||
}
|
||||
|
||||
const resObj: any = {};
|
||||
|
||||
if (latestKey.length > 0) {
|
||||
resObj['latestKey'] = latestKey[0];
|
||||
}
|
||||
|
||||
return res.status(200).send(resObj);
|
||||
};
|
||||
|
||||
/**
|
||||
* Return public key of Infisical
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getPublicKeyInfisical = async (req: Request, res: Response) => {
|
||||
return res.status(200).send({
|
||||
publicKey: PUBLIC_KEY
|
||||
});
|
||||
};
|
235
backend/src/controllers/membershipController.ts
Normal file
235
backend/src/controllers/membershipController.ts
Normal file
@ -0,0 +1,235 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Membership, MembershipOrg, User, Key } from '../models';
|
||||
import {
|
||||
findMembership,
|
||||
deleteMembership as deleteMember
|
||||
} from '../helpers/membership';
|
||||
import { sendMail } from '../helpers/nodemailer';
|
||||
import { SITE_URL } from '../config';
|
||||
import { ADMIN, MEMBER, GRANTED, ACCEPTED } from '../variables';
|
||||
|
||||
/**
|
||||
* Check that user is a member of workspace with id [workspaceId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const validateMembership = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
// validate membership
|
||||
const membership = await findMembership({
|
||||
user: req.user._id,
|
||||
workspace: workspaceId
|
||||
});
|
||||
|
||||
if (!membership) {
|
||||
throw new Error('Failed to validate membership');
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed workspace connection check'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Workspace membership confirmed'
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete membership with id [membershipId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const deleteMembership = async (req: Request, res: Response) => {
|
||||
let deletedMembership;
|
||||
try {
|
||||
const { membershipId } = req.params;
|
||||
|
||||
// check if membership to delete exists
|
||||
const membershipToDelete = await Membership.findOne({
|
||||
_id: membershipId
|
||||
}).populate('user');
|
||||
|
||||
if (!membershipToDelete) {
|
||||
throw new Error(
|
||||
"Failed to delete workspace membership that doesn't exist"
|
||||
);
|
||||
}
|
||||
|
||||
// check if user is a member and admin of the workspace
|
||||
// whose membership we wish to delete
|
||||
const membership = await Membership.findOne({
|
||||
user: req.user._id,
|
||||
workspace: membershipToDelete.workspace
|
||||
});
|
||||
|
||||
if (!membership) {
|
||||
throw new Error('Failed to validate workspace membership');
|
||||
}
|
||||
|
||||
if (membership.role !== ADMIN) {
|
||||
// user is not an admin member of the workspace
|
||||
throw new Error('Insufficient role for deleting workspace membership');
|
||||
}
|
||||
|
||||
// delete workspace membership
|
||||
deletedMembership = await deleteMember({
|
||||
membershipId: membershipToDelete._id.toString()
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to delete membership'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
deletedMembership
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Change and return workspace membership role
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const changeMembershipRole = async (req: Request, res: Response) => {
|
||||
let membershipToChangeRole;
|
||||
try {
|
||||
const { membershipId } = req.params;
|
||||
const { role } = req.body;
|
||||
|
||||
if (![ADMIN, MEMBER].includes(role)) {
|
||||
throw new Error('Failed to validate role');
|
||||
}
|
||||
|
||||
// validate target membership
|
||||
membershipToChangeRole = await findMembership({
|
||||
_id: membershipId
|
||||
});
|
||||
|
||||
if (!membershipToChangeRole) {
|
||||
throw new Error('Failed to find membership to change role');
|
||||
}
|
||||
|
||||
// check if user is a member and admin of target membership's
|
||||
// workspace
|
||||
const membership = await findMembership({
|
||||
user: req.user._id,
|
||||
workspace: membershipToChangeRole.workspace
|
||||
});
|
||||
|
||||
if (!membership) {
|
||||
throw new Error('Failed to validate membership');
|
||||
}
|
||||
|
||||
if (membership.role !== ADMIN) {
|
||||
// user is not an admin member of the workspace
|
||||
throw new Error('Insufficient role for changing member roles');
|
||||
}
|
||||
|
||||
membershipToChangeRole.role = role;
|
||||
await membershipToChangeRole.save();
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to change membership role'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
membership: membershipToChangeRole
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Add user with email [email] to workspace with id [workspaceId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const inviteUserToWorkspace = async (req: Request, res: Response) => {
|
||||
let invitee, latestKey;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
const { email }: { email: string } = req.body;
|
||||
|
||||
invitee = await User.findOne({
|
||||
email
|
||||
}).select('+publicKey');
|
||||
|
||||
if (!invitee || !invitee?.publicKey)
|
||||
throw new Error('Failed to validate invitee');
|
||||
|
||||
// validate invitee's workspace membership - ensure member isn't
|
||||
// already a member of the workspace
|
||||
const inviteeMembership = await Membership.findOne({
|
||||
user: invitee._id,
|
||||
workspace: workspaceId,
|
||||
status: GRANTED
|
||||
});
|
||||
|
||||
if (inviteeMembership)
|
||||
throw new Error('Failed to add existing member of workspace');
|
||||
|
||||
// validate invitee's organization membership - ensure that only
|
||||
// (accepted) organization members can be added to the workspace
|
||||
const membershipOrg = await MembershipOrg.findOne({
|
||||
user: invitee._id,
|
||||
organization: req.membership.workspace.organization,
|
||||
status: ACCEPTED
|
||||
});
|
||||
|
||||
if (!membershipOrg)
|
||||
throw new Error("Failed to validate invitee's organization membership");
|
||||
|
||||
// get latest key
|
||||
latestKey = await Key.findOne({
|
||||
workspace: workspaceId,
|
||||
receiver: req.user._id
|
||||
})
|
||||
.sort({ createdAt: -1 })
|
||||
.populate('sender', '+publicKey');
|
||||
|
||||
// create new workspace membership
|
||||
const m = await new Membership({
|
||||
user: invitee._id,
|
||||
workspace: workspaceId,
|
||||
role: MEMBER,
|
||||
status: GRANTED
|
||||
}).save();
|
||||
|
||||
await sendMail({
|
||||
template: 'workspaceInvitation.handlebars',
|
||||
subjectLine: 'Infisical workspace invitation',
|
||||
recipients: [invitee.email],
|
||||
substitutions: {
|
||||
inviterFirstName: req.user.firstName,
|
||||
inviterEmail: req.user.email,
|
||||
workspaceName: req.membership.workspace.name,
|
||||
callback_url: SITE_URL + '/login'
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to invite user to workspace'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
invitee,
|
||||
latestKey
|
||||
});
|
||||
};
|
269
backend/src/controllers/membershipOrgController.ts
Normal file
269
backend/src/controllers/membershipOrgController.ts
Normal file
@ -0,0 +1,269 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import crypto from 'crypto';
|
||||
import { SITE_URL, JWT_SIGNUP_LIFETIME, JWT_SIGNUP_SECRET } from '../config';
|
||||
import { MembershipOrg, Organization, User, Token } from '../models';
|
||||
import { deleteMembershipOrg as deleteMemberFromOrg } from '../helpers/membershipOrg';
|
||||
import { checkEmailVerification } from '../helpers/signup';
|
||||
import { createToken } from '../helpers/auth';
|
||||
import { updateSubscriptionOrgQuantity } from '../helpers/organization';
|
||||
import { sendMail } from '../helpers/nodemailer';
|
||||
import { OWNER, ADMIN, MEMBER, ACCEPTED, INVITED } from '../variables';
|
||||
|
||||
/**
|
||||
* Delete organization membership with id [membershipOrgId] from organization
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const deleteMembershipOrg = async (req: Request, res: Response) => {
|
||||
let membershipOrgToDelete;
|
||||
try {
|
||||
const { membershipOrgId } = req.params;
|
||||
|
||||
// check if organization membership to delete exists
|
||||
membershipOrgToDelete = await MembershipOrg.findOne({
|
||||
_id: membershipOrgId
|
||||
}).populate('user');
|
||||
|
||||
if (!membershipOrgToDelete) {
|
||||
throw new Error(
|
||||
"Failed to delete organization membership that doesn't exist"
|
||||
);
|
||||
}
|
||||
|
||||
// check if user is a member and admin of the organization
|
||||
// whose membership we wish to delete
|
||||
const membershipOrg = await MembershipOrg.findOne({
|
||||
user: req.user._id,
|
||||
organization: membershipOrgToDelete.organization
|
||||
});
|
||||
|
||||
if (!membershipOrg) {
|
||||
throw new Error('Failed to validate organization membership');
|
||||
}
|
||||
|
||||
if (membershipOrg.role !== OWNER && membershipOrg.role !== ADMIN) {
|
||||
// user is not an admin member of the organization
|
||||
throw new Error('Insufficient role for deleting organization membership');
|
||||
}
|
||||
|
||||
// delete organization membership
|
||||
const deletedMembershipOrg = await deleteMemberFromOrg({
|
||||
membershipOrgId: membershipOrgToDelete._id.toString()
|
||||
});
|
||||
|
||||
await updateSubscriptionOrgQuantity({
|
||||
organizationId: membershipOrg.organization.toString()
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to delete organization membership'
|
||||
});
|
||||
}
|
||||
|
||||
return membershipOrgToDelete;
|
||||
};
|
||||
|
||||
/**
|
||||
* Change and return organization membership role
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const changeMembershipOrgRole = async (req: Request, res: Response) => {
|
||||
// change role for (target) organization membership with id
|
||||
// [membershipOrgId]
|
||||
|
||||
// TODO
|
||||
|
||||
let membershipToChangeRole;
|
||||
try {
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to change organization membership role'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
membershipOrg: membershipToChangeRole
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Organization invitation step 1: Send email invitation to user with email [email]
|
||||
* for organization with id [organizationId] containing magic link
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const inviteUserToOrganization = async (req: Request, res: Response) => {
|
||||
let invitee, inviteeMembershipOrg;
|
||||
try {
|
||||
const { organizationId, inviteeEmail } = req.body;
|
||||
|
||||
// validate membership
|
||||
const membershipOrg = await MembershipOrg.findOne({
|
||||
user: req.user._id,
|
||||
organization: organizationId
|
||||
});
|
||||
|
||||
if (!membershipOrg) {
|
||||
throw new Error('Failed to validate organization membership');
|
||||
}
|
||||
|
||||
invitee = await User.findOne({
|
||||
email: inviteeEmail
|
||||
});
|
||||
|
||||
if (invitee) {
|
||||
// case: invitee is an existing user
|
||||
inviteeMembershipOrg = await MembershipOrg.findOne({
|
||||
user: invitee._id,
|
||||
organization: organizationId
|
||||
});
|
||||
|
||||
if (inviteeMembershipOrg && inviteeMembershipOrg.status === ACCEPTED) {
|
||||
throw new Error(
|
||||
'Failed to invite an existing member of the organization'
|
||||
);
|
||||
}
|
||||
|
||||
if (!inviteeMembershipOrg) {
|
||||
await new MembershipOrg({
|
||||
user: invitee,
|
||||
inviteEmail: inviteeEmail,
|
||||
organization: organizationId,
|
||||
role: MEMBER,
|
||||
status: invitee?.publicKey ? ACCEPTED : INVITED
|
||||
}).save();
|
||||
}
|
||||
} else {
|
||||
// check if invitee has been invited before
|
||||
inviteeMembershipOrg = await MembershipOrg.findOne({
|
||||
inviteEmail: inviteeEmail,
|
||||
organization: organizationId
|
||||
});
|
||||
|
||||
if (!inviteeMembershipOrg) {
|
||||
// case: invitee has never been invited before
|
||||
|
||||
await new MembershipOrg({
|
||||
inviteEmail: inviteeEmail,
|
||||
organization: organizationId,
|
||||
role: MEMBER,
|
||||
status: INVITED
|
||||
}).save();
|
||||
}
|
||||
}
|
||||
|
||||
const organization = await Organization.findOne({ _id: organizationId });
|
||||
|
||||
if (organization) {
|
||||
const token = crypto.randomBytes(16).toString('hex');
|
||||
|
||||
await Token.findOneAndUpdate(
|
||||
{ email: inviteeEmail },
|
||||
{
|
||||
email: inviteeEmail,
|
||||
token,
|
||||
createdAt: new Date()
|
||||
},
|
||||
{ upsert: true, new: true }
|
||||
);
|
||||
|
||||
await sendMail({
|
||||
template: 'organizationInvitation.handlebars',
|
||||
subjectLine: 'Infisical organization invitation',
|
||||
recipients: [inviteeEmail],
|
||||
substitutions: {
|
||||
inviterFirstName: req.user.firstName,
|
||||
inviterEmail: req.user.email,
|
||||
organizationName: organization.name,
|
||||
email: inviteeEmail,
|
||||
token,
|
||||
callback_url: SITE_URL + '/signupinvite'
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
await updateSubscriptionOrgQuantity({ organizationId });
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to send organization invite'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: `Sent an invite link to ${req.body.inviteeEmail}`
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Organization invitation step 2: Verify that code [code] was sent to email [email] as part of
|
||||
* magic link and issue a temporary signup token for user to complete setting up their account
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const verifyUserToOrganization = async (req: Request, res: Response) => {
|
||||
let user, token;
|
||||
try {
|
||||
const { email, code } = req.body;
|
||||
|
||||
user = await User.findOne({ email }).select('+publicKey');
|
||||
if (user && user?.publicKey) {
|
||||
// case: user has already completed account
|
||||
return res.status(403).send({
|
||||
error: 'Failed email magic link verification for complete account'
|
||||
});
|
||||
}
|
||||
|
||||
const membershipOrg = await MembershipOrg.findOne({
|
||||
inviteEmail: email,
|
||||
status: INVITED
|
||||
});
|
||||
|
||||
if (!membershipOrg)
|
||||
throw new Error('Failed to find any invitations for email');
|
||||
|
||||
await checkEmailVerification({
|
||||
email,
|
||||
code
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
// initialize user account
|
||||
user = await new User({
|
||||
email
|
||||
}).save();
|
||||
}
|
||||
|
||||
// generate temporary signup token
|
||||
token = createToken({
|
||||
payload: {
|
||||
userId: user._id.toString()
|
||||
},
|
||||
expiresIn: JWT_SIGNUP_LIFETIME,
|
||||
secret: JWT_SIGNUP_SECRET
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
error: 'Failed email magic link verification for organization invitation'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully verified email',
|
||||
user,
|
||||
token
|
||||
});
|
||||
};
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user