1
0
mirror of https://github.com/Infisical/infisical.git synced 2025-03-21 09:39:30 +00:00

Compare commits

..

62 Commits

Author SHA1 Message Date
4f5e9f7be1 misc: removed minimum requirements for kms description 2024-07-25 00:15:47 +08:00
2d1a553f14 misc: addressed minor kms issues 2024-07-24 23:45:38 +08:00
=
b4ae9b7299 feat: resolved bug reported on search and integration failing due to typo in integration field 2024-07-24 19:42:29 +05:30
=
e83a56735a feat: added test case for secret v2 with raw endpoints 2024-07-24 15:38:45 +05:30
=
5564461294 feat: resolved failing testcases 2024-07-24 13:31:38 +05:30
=
857de5c4b5 feat: lint fix 2024-07-24 01:07:41 +05:30
=
e170062969 feat: updated kms service to return only kms details and some more minor changes 2024-07-24 00:56:23 +05:30
=
20cfee087b feat: fixed secret approval for architecture v2 2024-07-23 22:35:06 +05:30
=
6b37a353e6 feat: secret v2 architecture for secret rotation 2024-07-23 22:35:06 +05:30
=
e9389d9b05 feat: testing v2 architecture changes and corrections as needed 2024-07-23 22:35:06 +05:30
=
2c1ee52dea feat: ui removed all private key except secret rotation to raw endpoints version 2024-07-23 22:35:05 +05:30
=
8255d71711 feat: resolved concurrent bug with kms management 2024-07-23 22:35:05 +05:30
=
77e7ab9013 feat: resolved all ts issues on router schema and other functions 2024-07-23 22:34:07 +05:30
=
86daf2be00 feat: all the services are now working with secrets v2 architecture 2024-07-23 22:34:07 +05:30
=
a616f16bad checkpoint 2024-07-23 22:34:07 +05:30
=
1b933f4e70 feat: added bridge logic in secret replication, snapshot and approval for raw endpoint 2024-07-23 22:34:07 +05:30
=
66156129e7 feat: migration updated for secret v2 snapshot and secret approval 2024-07-23 22:33:17 +05:30
=
fca21a0b97 feat: added kms encryption and decryption secret bridge 2024-07-23 22:33:17 +05:30
=
98e155498b feat: created base for secret v2 bridge and plugged it to secret-router 2024-07-23 22:33:17 +05:30
=
cd089649d7 feat: added new secret v2 data structures 2024-07-23 22:33:17 +05:30
8b4a100709 Merge pull request from Infisical/feat/integrate-external-kms
feat: external kms integration
2024-07-23 22:31:12 +05:30
da8483cff6 misc: made kms description optional 2024-07-22 18:39:27 +08:00
d047be499b doc: added aws permission setup doc for kms 2024-07-22 18:34:09 +08:00
3d30271ab3 Merge pull request from Infisical/doc/external-kms
doc: initial docs for kms
2024-07-22 17:27:14 +08:00
9f73c77624 doc: initial docs for kms 2024-07-22 17:25:50 +08:00
5b7afea3f5 misc: made kms hook generic 2024-07-22 14:17:07 +08:00
fe9318cf8d misc: renamed project method 2024-07-20 02:56:04 +08:00
c5a9f36a0c misc: removed kms from service 2024-07-20 02:52:34 +08:00
5bd6a193f4 misc: created abstraction for get kms by id 2024-07-20 02:33:16 +08:00
9ac17718b3 misc: modified design of advanced settings 2024-07-20 02:07:35 +08:00
b9f35d16a5 misc: finalized project backup prompts 2024-07-20 01:25:00 +08:00
5e9929a9d5 misc: added empty metadata 2024-07-20 01:23:54 +08:00
c2870dffcd misc: added ability for users to select KMS during project creation 2024-07-20 00:47:48 +08:00
e9ee38fb54 misc: modified modal text 2024-07-19 19:45:17 +08:00
9d88caf66b misc: addressed type issue with audit log 2024-07-19 19:43:07 +08:00
7ef4b68503 feat: load project kms backup 2024-07-19 19:37:25 +08:00
d2456b5bd8 misc: added UI for load backup 2024-07-19 17:09:14 +08:00
1b64cdf09c misc: added audit logs for kms backup and other minor edits 2024-07-19 02:05:53 +08:00
73a00df439 misc: developed create kms backup feature 2024-07-19 01:31:13 +08:00
9f87689a8f misc: made project key and data key creation concurrency safe 2024-07-18 22:44:32 +08:00
5d6bbdfd24 misc: made org key and data key concurrency safe 2024-07-18 22:06:07 +08:00
f1b5e6104c misc: finalized switching of project KMS 2024-07-18 20:50:31 +08:00
73a552d60c Merge pull request from akhilmhdh/feat/ui-secret-raw
Migrated secret endpoints to raw endpoints in ui
2024-07-18 03:24:36 +08:00
0f7e055981 misc: partial project kms switch 2024-07-18 03:00:43 +08:00
2045305127 Merge branch 'secret-engine-v2-bridge' into feat/integrate-external-kms 2024-07-18 00:22:18 +08:00
6b6f8f5523 Merge pull request from Infisical/feat/add-project-data-key
feat: added project data key
2024-07-18 00:20:36 +08:00
9860d15d33 Merge branch 'feat/add-project-data-key' into feat/integrate-external-kms 2024-07-17 23:42:06 +08:00
166de417f1 feat: added project data key 2024-07-17 23:19:32 +08:00
65f416378a misc: changed order of aws validate connection and creation 2024-07-17 15:11:13 +08:00
de0b179b0c misc: added audit logs for external kms 2024-07-17 13:39:47 +08:00
8b0c62fbdb misc: added license checks for external kms management 2024-07-17 13:04:26 +08:00
0d512f041f misc: migrated to dedicated org permissions for kms management 2024-07-17 12:43:55 +08:00
eb03fa4d4e misc: minor UI updates 2024-07-17 00:40:54 +08:00
0a7a9b6c37 feat: finalized kms settings in org-level 2024-07-16 21:29:20 +08:00
a1bfbdf32e misc: modified encryption/decryption of external kms config 2024-07-16 15:52:29 +08:00
=
df0c11471b fix: resolving undefined secret key 2024-07-16 12:53:08 +05:30
a07983ddc8 Merge remote-tracking branch 'akhilmhdh/feat/aws-kms-sm' into feat/integrate-external-kms 2024-07-16 14:19:51 +08:00
b9d5330db6 Merge remote-tracking branch 'akhilmhdh/feat/aws-kms-sm' into feat/integrate-external-kms 2024-07-16 14:19:18 +08:00
538ca972e6 misc: connected aws add kms 2024-07-16 14:17:54 +08:00
9cce604ca8 feat: added initial aws form 2024-07-16 02:21:14 +08:00
=
2bfecdb095 refactor(ui): migrated secret endpoints of e2ee to raw 2024-07-15 23:15:37 +05:30
=
09dfdd93d1 feat: made raw secret endpoints and normal e2ee ones to be same functionality 2024-07-15 23:13:03 +05:30
3500 changed files with 79070 additions and 223749 deletions
.env.example.env.migration.example
.github
.gitignore
.husky
.infisicalignoreDockerfile.fips.standalone-infisicalDockerfile.standalone-infisicalMakefileREADME.md
backend
DockerfileDockerfile.dev
e2e-test
package-lock.jsonpackage.json
scripts
src
@types
auto-start-migrations.ts
db
auditlog-knexfile.tsindex.tsinstance.tsknexfile.ts
manual-migrations
migrations
20231222172455_integration.ts20240702131735_secret-approval-groups.ts20240716063111_add-org-kms-data-key.ts20240716105646_secret-v2.ts20240717114407_add-project-data-key.ts20240717184929_add-enforcement-level-secrets-policies.ts20240717194958_add-enforcement-level-access-policies.ts20240718170955_add-access-secret-sharing.ts20240719182539_add-bypass-reason-secret-approval-requets.ts20240724101056_access-request-groups.ts20240728010334_secret-sharing-name.ts20240802181855_ca-cert-version.ts20240806083221_secret-sharing-password.ts20240806113425_remove-creation-limit-rate-limit.ts20240806185442_drop-tag-name.ts20240818024923_cert-alerting.ts20240818184238_add-certificate-template.ts20240819092916_certificate-template-est-configuration.ts20240821212643_crl-ca-secret-binding.ts20240830142938_native-slack-integration.ts20240909145938_cert-template-enforcement.ts20240910070128_add-pki-key-usages.ts20240918005344_add-group-approvals.ts20240924100329_identity-metadata.ts20240925100349_managed-secret-sharing.ts20240930072738_add-oidc-auth-enforced-to-org.ts20241003220151_kms-key-cmek-alterations.ts20241005170802_kms-keys-temp-slug-col.ts20241007052025_make-audit-log-independent.ts20241007202149_default-org-membership-roles.ts20241008172622_project-permission-split.ts20241014084900_identity-multiple-auth-methods.ts20241015084434_increase-identity-metadata-col-length.ts20241015145450_external-group-org-role-mapping.ts20241016183616_add-org-enforce-mfa.ts20241021114650_add-missing-org-cascade-references.ts20241101174939_project-templates.ts20241107112632_skip-bootstrap-cert-validation-est.ts20241110032223_add-missing-oidc-org-cascade-reference.ts20241111175154_kms-root-cfg-hsm.ts20241112082701_add-totp-support.ts20241119143026_add-project-descripton.ts20241121131344_make-identity-metadata-not-nullable-again.ts20241203165840_allow-disabling-approval-workflows.ts20241209144123_add-identity-jwt-auth.ts20241213122320_add-index-for-secret-version-v2-folder.ts20241213122350_project-split-to-products.ts20241216013357_ssh-mgmt.ts20241218165837_resource-metadata.ts20241218181018_app-connection.ts20250115222458_groups-unique-name.ts20250116092245_add-enforce-capitalization-project-flag.ts20250122055102_secret-sync.ts20250129214629_oidc-configs-manage-group-memberships-col.ts20250203141127_add-kmip.ts20250204025010_app-connections-and-secret-syncs-unique-constraint.ts20250205045509_increase-gcp-auth-limit.ts20250205220952_kms-keys-drop-slug-col.ts20250207002643_secret-syncs-increase-message-length.ts20250210101840_webhook-to-kms.ts20250210101841_dynamic-secret-root-to-kms.ts20250210101841_secret-rotation-to-kms.ts20250210101842_identity-k8-auth-to-kms.ts20250210101842_identity-oidc-auth-to-kms.ts20250210101845_directory-config-to-kms.ts20250212191958_create-gateway.ts20250226021631_secret-requests.ts20250226082254_add-gov-banner-and-consent-fields.ts20250228022604_increase-secret-reminder-note-max-length.ts20250303213350_add-folder-description.ts20250305080145_add-secret-review-comment.ts20250305131152_add-actor-id-to-secret-versions-v2.ts
utils
rename-migrations-to-mjs.ts
schemas
seeds
utils.ts
ee
routes
services
access-approval-policy
access-approval-request
audit-log-stream
audit-log
certificate-authority-crl
certificate-est
dynamic-secret-lease
dynamic-secret
external-kms
gateway
group
hsm
identity-project-additional-privilege-v2
identity-project-additional-privilege
kmip
ldap-config
license
oidc
permission
project-template
project-user-additional-privilege
rate-limit
saml-config
scim
secret-approval-policy
secret-approval-request
secret-replication
secret-rotation
secret-scanning
secret-snapshot
ssh-certificate-template
ssh-certificate
ssh
trusted-ip
keystore
lib
main.ts
queue
server
app.tsboot-strap-check.ts
config
lib
plugins
routes
index.ts
sanitizedSchema
sanitizedSchemas.ts
v1
v2
v3
services
access-token-queue
api-key
app-connection
auth-token
auth
certificate-authority
certificate-template
certificate
cmek
external-group-org-role-mapping
external-migration
group-project
identity-access-token
identity-aws-auth
identity-azure-auth
identity-gcp-auth
identity-jwt-auth
identity-kubernetes-auth
identity-oidc-auth
identity-project
identity-token-auth
identity-ua
identity
integration-auth
integration
kms
org-admin
org-membership
org
pki-alert
pki-collection
project-bot
project-env
project-key
project-membership
project-role
project
resource-cleanup
resource-metadata
secret-blind-index
secret-folder
secret-import
secret-sharing
secret-sync
secret-tag
secret-v2-bridge
secret
service-token
slack
smtp
super-admin
telemetry
totp
user-engagement
user
webhook
workflow-integration
tsconfig.jsonvitest.e2e.config.ts
cli
company
docker-compose.dev.ymldocker-compose.prod.yml
docker-swarm
docs
api-reference
endpoints
app-connections
audit-logs
certificate-authorities
certificate-templates
certificates
dynamic-secrets
groups
jwt-auth
kms/keys
pki-alerts
pki-collections
project-groups
project-roles
project-templates
secret-syncs
secrets
ssh
overview
changelog
cli
contributing/platform
documentation
guides
platform
setup
images
app-connections
guides/import-envkey
integrations
aws
azure-app-configuration
azure-devops
azure-key-vault
bitbucket
circleci
cloudflare
databricks
github
octopus-deploy
mfa-authenticator.pngmfa-email.png
platform
access-controls
admin-panels
dynamic-secrets
add-dynamic-secret-button.pngadvanced-option-atlas.pngdynamic-secret-ad-add-assignments.pngdynamic-secret-ad-add-client-secret.pngdynamic-secret-ad-add-permission.pngdynamic-secret-ad-admin-consent.pngdynamic-secret-ad-copy-app-id.pngdynamic-secret-ad-lease.pngdynamic-secret-ad-modal.pngdynamic-secret-ad-new-registration.pngdynamic-secret-ad-select-graph.pngdynamic-secret-ad-select-perms.pngdynamic-secret-ad-show-more.pngdynamic-secret-ad-tenant-id.pngdynamic-secret-ad-user-admin.pngdynamic-secret-atlas-modal.pngdynamic-secret-generate-redis.pngdynamic-secret-input-modal-elastic-search.pngdynamic-secret-input-modal-rabbit-mq.pngdynamic-secret-ldap-lease.pngdynamic-secret-ldap-select.pngdynamic-secret-lease-empty-redis.pngdynamic-secret-lease-renew.pngdynamic-secret-modal-atlas.pngdynamic-secret-modal-aws-elasti-cache.pngdynamic-secret-modal-elastic-search.pngdynamic-secret-modal-mongodb.pngdynamic-secret-modal-redis.pngdynamic-secret-modal-sap-hana.pngdynamic-secret-modal-totp.pngdynamic-secret-mongodb.pngdynamic-secret-rabbit-mq-modal.pngdynamic-secret-setup-modal-sap-hana.pngdynamic-secret-setup-modal-totp-manual.pngdynamic-secret-setup-modal-totp-url.pnglease-data.pnglease-values.pngmodify-elastic-search-statement.pngmodify-elasticache-statement.pngmodify-redis-statement.pngmodify-sap-hana-sql-statements.pngprovision-lease.png
sap-ase
snowflake
totp-lease-value.png
gateways
identities
kms
mfa/entra
organization
pki
pr-workflows
project-templates
scim
secret-sharing
ssh
workflow-integrations/slack-integration
secret-syncs
self-hosting/deployment-options/native
sso
integrations
internals
mint.json
sdks
self-hosting
frontend
.dockerignore.eslintrc.js.gitignore.prettierrc
.storybook
DockerfileDockerfile.devREADME.mdcypress.config.js
cypress
eslint.config.jsindex.htmlnext-env.d.tsnext.config.jspackage-lock.jsonpackage.jsonpostcss.config.js
public
scripts
src
components
AddTagPopoverContent
RouteGuard.tsx
analytics
auth
basic
dashboard
features
integrations
mfa
navigation
notifications
page-frames
permissions
projects
secret-syncs
CreateSecretSyncModal.tsxDeleteSecretSyncModal.tsxEditSecretSyncModal.tsxSecretSyncImportSecretsModal.tsxSecretSyncImportStatusBadge.tsxSecretSyncLabel.tsxSecretSyncModalHeader.tsxSecretSyncRemoveSecretsModal.tsxSecretSyncRemoveStatusBadge.tsxSecretSyncSelect.tsxSecretSyncStatusBadge.tsx
forms
github
index.ts
types
secrets/SecretReferenceDetails
signup
tags/CreateTagModal
utilities
v2
config
const.ts
const
context
ee
global.d.ts
helpers
hoc
withPermission
withProjectPermission
hooks
api
accessApproval
admin
apiKeys
appConnections
auditLogStreams
auditLogs
auth
bots
ca
certificateTemplates
certificates
cmeks
dashboard
dynamicSecret
dynamicSecretLease
externalGroupOrgRoleMappings
gateways
generic
groups
identities
identityProjectAdditionalPrivilege
incidentContacts
index.tsx
integrationAuth
integrations
keys
kmip
kms
ldapConfig
migration
oidcConfig
orgAdmin
organization
pkiAlerts
pkiCollections
policies
projectTemplates
projectUserAdditionalPrivilege
rateLimit
reactQuery.tsx
roles
scim
secretApproval
secretApprovalRequest
secretFolders
secretImports
secretRotation
secretScanning
secretSharing
secretSnapshots
secretSyncs
secrets
serviceTokens
sshCa
sshCertificateTemplates
ssoConfig
subscriptions
tags
trustedIps
types.ts
userEngagement
users
webhooks
workflowIntegrations
workspace
index.tsuseDebounce.tsxuseLeaveConfirm.tsxusePagination.tsxusePersistentState.tsusePopUp.tsxuseResetPageHelper.tsuseTimedReset.tsxuseToggle.tsx
utils
i18n.ts
layouts
lib
main.tsx
pages
404.tsx_app.tsx
admin
api
auth
CliRedirectPage
EmailNotVerifiedPage
LoginLdapPage
LoginPage
LoginSsoPage
PasswordResetPage
PasswordSetupPage
ProviderErrorPage
ProviderSuccessPage
RequestNewInvitePage
SelectOrgPage
SignUpInvitePage
SignUpPage
SignUpSsoPage
VerifyEmailPage
cert-manager
cli-redirect.tsxdashboard.tsxemail-not-verified.tsxindex.tsx
integrations
kms
login
middlewares
org
[id]
billing
identities/[identityId]
members
overview
secret-scanning
secret-sharing
settings
none
organization
AccessManagementPage
AdminPage
AppConnections
AuditLogsPage
BillingPage
BillingPage.tsx
components
BillingDetailsTab
BillingTabGroup
route.tsx
CertManagerOverviewPage
Gateways/GatewayListPage
GroupDetailsByIDPage
IdentityDetailsByIDPage
KmsOverviewPage
NoOrgPage
RoleByIDPage
SecretManagerOverviewPage
SecretScanningPage
SecretSharingPage
SettingsPage
SshOverviewPage
UserDetailsByIDPage
layout.tsx
password-reset.tsxpersonal-settings.tsx
project
AccessControlPage
IdentityDetailsByIDPage
MemberDetailsByIDPage
RoleDetailsBySlugPage
[id]
allowlist
approval
audit-logs
certificates
members
secret-rotation
secrets
settings
public
requestnewinvite.tsxroot.tsx
secret-manager
IPAllowlistPage
IntegrationsDetailsByIDPage
IntegrationsListPage
OverviewPage
SecretApprovalsPage
SecretDashboardPage
SecretRotationPage
SecretSyncDetailsByIDPage
SettingsPage
integrations
AwsParameterStoreAuthorizePage
AwsParameterStoreConfigurePage
AwsSecretManagerAuthorizePage
AwsSecretManagerConfigurePage
AzureAppConfigurationConfigurePage
AzureAppConfigurationOauthCallbackPage
AzureDevopsAuthorizePage
AzureDevopsConfigurePage
AzureKeyVaultAuthorizePage
AzureKeyVaultConfigurePage
AzureKeyVaultOauthCallbackPage
BitbucketConfigurePage
BitbucketOauthCallbackPage
ChecklyAuthorizePage
ChecklyConfigurePage
CircleCIAuthorizePage
CircleCIConfigurePage
Cloud66AuthorizePage
Cloud66ConfigurePage
CloudflarePagesAuthorizePage
CloudflarePagesConfigurePage
CloudflareWorkersAuthorizePage
CloudflareWorkersConfigurePage
CodefreshAuthorizePage
CodefreshConfigurePage
DatabricksAuthorizePage
DatabricksConfigurePage
DigitalOceanAppPlatformAuthorizePage
DigitalOceanAppPlatformConfigurePage
FlyioAuthorizePage
FlyioConfigurePage
GcpSecretManagerAuthorizePage
GcpSecretManagerConfigurePage
GcpSecretManagerOauthCallbackPage
GithubAuthorizePage
GithubConfigurePage
GithubOauthCallbackPage
GitlabAuthorizePage
GitlabConfigurePage
GitlabOauthCallbackPage
HashicorpVaultAuthorizePage
HashicorpVaultConfigurePage
HasuraCloudAuthorizePage
HasuraCloudConfigurePage
HerokuConfigurePage
HerokuOauthCallbackPage
LaravelForgeAuthorizePage
LaravelForgeConfigurePage
NetlifyConfigurePage
NetlifyOauthCallbackPage
NorthflankAuthorizePage
NorthflankConfigurePage
OctopusDeployAuthorizePage
OctopusDeployConfigurePage
QoveryAuthorizePage
QoveryConfigurePage
RailwayAuthorizePage
RailwayConfigurePage
RenderAuthorizePage
RenderConfigurePage
RundeckAuthorizePage
RundeckConfigurePage
SelectIntegrationAuthPage
SupabaseAuthorizePage
SupabaseConfigurePage
TeamcityAuthorizePage
TeamcityConfigurePage
TerraformCloudAuthorizePage
TerraformCloudConfigurePage
TravisCIAuthorizePage
TravisCIConfigurePage
VercelConfigurePage
VercelOauthCallbackPage
WindmillAuthorizePage
WindmillConfigurePage
route-azure-app-configurations-oauth-redirect.tsxroute-azure-key-vault-oauth-redirect.tsxroute-bitbucket-oauth-redirect.tsxroute-gcp-oauth-redirect.tsxroute-github-oauth-redirect.tsxroute-gitlab-oauth-redirect.tsxroute-heroku-oauth-redirect.tsxroute-netlify-oauth-redirect.tsxroute-vercel-oauth-redirect.tsx
layout.tsx
secret-scanning.tsx
share-secret
shared/secret/[id]
signup
signupinvite.tsx
ssh
user
PersonalSettingsPage
layout.tsx
verify-email.tsx
reactQuery.tsxrouteTree.gen.tsroutes.ts
services
styles
types
views
IntegrationsPage
Login
Org
Project
AuditLogsPage
CertificatesPage
IPAllowListPage
MembersPage
SecretApprovalPage
SecretMainPage
SecretOverviewPage
SecretRotationPage
SecretScanning/components
Settings
BillingSettingsPage
OrgSettingsPage
PersonalSettingsPage
ProjectSettingsPage
ShareSecretPage
ShareSecretPublicPage
Signup
admin
vite-env.d.ts
tsconfig.app.jsontsconfig.jsontsconfig.node.jsontsconfig.tsbuildinfotsr.config.jsonvite.config.ts
helm-charts
k8-operator
nginx
npm
otel-collector-config.yamlpackage-lock.jsonpackage.jsonprometheus.dev.yml
sink
standalone-entrypoint.sh

@ -26,8 +26,7 @@ SITE_URL=http://localhost:8080
# Mail/SMTP
SMTP_HOST=
SMTP_PORT=
SMTP_FROM_ADDRESS=
SMTP_FROM_NAME=
SMTP_NAME=
SMTP_USERNAME=
SMTP_PASSWORD=
@ -37,22 +36,16 @@ CLIENT_ID_HEROKU=
CLIENT_ID_VERCEL=
CLIENT_ID_NETLIFY=
CLIENT_ID_GITHUB=
CLIENT_ID_GITHUB_APP=
CLIENT_SLUG_GITHUB_APP=
CLIENT_ID_GITLAB=
CLIENT_ID_BITBUCKET=
CLIENT_SECRET_HEROKU=
CLIENT_SECRET_VERCEL=
CLIENT_SECRET_NETLIFY=
CLIENT_SECRET_GITHUB=
CLIENT_SECRET_GITHUB_APP=
CLIENT_SECRET_GITLAB=
CLIENT_SECRET_BITBUCKET=
CLIENT_SLUG_VERCEL=
CLIENT_PRIVATE_KEY_GITHUB_APP=
CLIENT_APP_ID_GITHUB_APP=
# Sentry (optional) for monitoring errors
SENTRY_DSN=
@ -75,48 +68,5 @@ CAPTCHA_SECRET=
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
OTEL_TELEMETRY_COLLECTION_ENABLED=false
OTEL_EXPORT_TYPE=prometheus
OTEL_EXPORT_OTLP_ENDPOINT=
OTEL_OTLP_PUSH_INTERVAL=
OTEL_COLLECTOR_BASIC_AUTH_USERNAME=
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD=
PLAIN_API_KEY=
PLAIN_WISH_LABEL_IDS=
SSL_CLIENT_CERTIFICATE_HEADER_KEY=
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT=true
# App Connections
# aws assume-role connection
INF_APP_CONNECTION_AWS_ACCESS_KEY_ID=
INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY=
# github oauth connection
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID=
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET=
#github app connection
INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID=
INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET=
INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
INF_APP_CONNECTION_GITHUB_APP_SLUG=
INF_APP_CONNECTION_GITHUB_APP_ID=
#gcp app connection
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
# azure app connection
INF_APP_CONNECTION_AZURE_CLIENT_ID=
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=
# datadog
SHOULD_USE_DATADOG_TRACER=
DATADOG_PROFILING_ENABLED=
DATADOG_ENV=
DATADOG_SERVICE=
DATADOG_HOSTNAME=

@ -1,2 +1 @@
DB_CONNECTION_URI=
AUDIT_LOGS_DB_CONNECTION_URI=

@ -6,7 +6,6 @@
- [ ] Bug fix
- [ ] New feature
- [ ] Improvement
- [ ] Breaking change
- [ ] Documentation

@ -7,12 +7,14 @@ on:
description: "Version number"
required: true
type: string
defaults:
run:
working-directory: ./backend
jobs:
build-and-deploy:
runs-on: ubuntu-20.04
strategy:
matrix:
arch: [x64, arm64]
@ -22,7 +24,6 @@ jobs:
target: node20-linux
- os: win
target: node20-win
runs-on: ${{ (matrix.arch == 'arm64' && matrix.os == 'linux') && 'ubuntu24-arm64' || 'ubuntu-latest' }}
steps:
- name: Checkout code
@ -48,9 +49,9 @@ jobs:
- name: Package into node binary
run: |
if [ "${{ matrix.os }}" != "linux" ]; then
pkg --no-bytecode --public-packages "*" --public --compress GZip --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }} .
pkg --no-bytecode --public-packages "*" --public --compress Brotli --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }} .
else
pkg --no-bytecode --public-packages "*" --public --compress GZip --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core .
pkg --no-bytecode --public-packages "*" --public --compress Brotli --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core .
fi
# Set up .deb package structure (Debian/Ubuntu only)
@ -82,93 +83,8 @@ jobs:
dpkg-deb --build infisical-core
mv infisical-core.deb ./binary/infisical-core-${{matrix.arch}}.deb
### RPM
# Set up .rpm package structure
- name: Set up .rpm package structure
if: matrix.os == 'linux'
run: |
mkdir -p infisical-core-rpm/usr/local/bin
cp ./binary/infisical-core infisical-core-rpm/usr/local/bin/
chmod +x infisical-core-rpm/usr/local/bin/infisical-core
# Install RPM build tools
- name: Install RPM build tools
if: matrix.os == 'linux'
run: sudo apt-get update && sudo apt-get install -y rpm
# Create .spec file for RPM
- name: Create .spec file for RPM
if: matrix.os == 'linux'
run: |
cat <<EOF > infisical-core.spec
%global _enable_debug_package 0
%global debug_package %{nil}
%global __os_install_post /usr/lib/rpm/brp-compress %{nil}
Name: infisical-core
Version: ${{ github.event.inputs.version }}
Release: 1%{?dist}
Summary: Infisical Core standalone executable
License: Proprietary
URL: https://app.infisical.com
%description
Infisical Core standalone executable (app.infisical.com)
%install
mkdir -p %{buildroot}/usr/local/bin
cp %{_sourcedir}/infisical-core %{buildroot}/usr/local/bin/
%files
/usr/local/bin/infisical-core
%pre
%post
%preun
%postun
EOF
# Build .rpm file
- name: Build .rpm package
if: matrix.os == 'linux'
run: |
# Create necessary directories
mkdir -p rpmbuild/{BUILD,RPMS,SOURCES,SPECS,SRPMS}
# Copy the binary directly to SOURCES
cp ./binary/infisical-core rpmbuild/SOURCES/
# Run rpmbuild with verbose output
rpmbuild -vv -bb \
--define "_topdir $(pwd)/rpmbuild" \
--define "_sourcedir $(pwd)/rpmbuild/SOURCES" \
--define "_rpmdir $(pwd)/rpmbuild/RPMS" \
--target ${{ matrix.arch == 'x64' && 'x86_64' || 'aarch64' }} \
infisical-core.spec
# Try to find the RPM file
find rpmbuild -name "*.rpm"
# Move the RPM file if found
if [ -n "$(find rpmbuild -name '*.rpm')" ]; then
mv $(find rpmbuild -name '*.rpm') ./binary/infisical-core-${{matrix.arch}}.rpm
else
echo "RPM file not found!"
exit 1
fi
- uses: actions/setup-python@v4
with:
python-version: "3.x" # Specify the Python version you need
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
pip install --upgrade cloudsmith-cli
- run: pip install --upgrade cloudsmith-cli
# Publish .deb file to Cloudsmith (Debian/Ubuntu only)
- name: Publish to Cloudsmith (Debian/Ubuntu)
@ -176,12 +92,6 @@ jobs:
working-directory: ./backend
run: cloudsmith push deb --republish --no-wait-for-sync --api-key=${{ secrets.CLOUDSMITH_API_KEY }} infisical/infisical-core/any-distro/any-version ./binary/infisical-core-${{ matrix.arch }}.deb
# Publish .rpm file to Cloudsmith (Red Hat-based systems only)
- name: Publish .rpm to Cloudsmith
if: matrix.os == 'linux'
working-directory: ./backend
run: cloudsmith push rpm --republish --no-wait-for-sync --api-key=${{ secrets.CLOUDSMITH_API_KEY }} infisical/infisical-core/any-distro/any-version ./binary/infisical-core-${{ matrix.arch }}.rpm
# Publish .exe file to Cloudsmith (Windows only)
- name: Publish to Cloudsmith (Windows)
if: matrix.os == 'win'

@ -0,0 +1,154 @@
name: Deployment pipeline
on: [workflow_dispatch]
permissions:
id-token: write
contents: read
jobs:
infisical-image:
name: Build backend image
runs-on: ubuntu-latest
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: 📦 Install dependencies to test all dependencies
run: npm ci --only-production
working-directory: backend
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 🏗️ Build backend and push to docker hub
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
file: Dockerfile.standalone-infisical
tags: |
infisical/staging_infisical:${{ steps.commit.outputs.short }}
infisical/staging_infisical:latest
platforms: linux/amd64,linux/arm64
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
INFISICAL_PLATFORM_VERSION=${{ steps.commit.outputs.short }}
gamma-deployment:
name: Deploy to gamma
runs-on: ubuntu-latest
needs: [infisical-image]
environment:
name: Gamma
steps:
- uses: twingate/github-action@v1
with:
# The Twingate Service Key used to connect Twingate to the proper service
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
#
# Required
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
uses: actions/setup-node@v2
with:
node-version: "20"
- name: Change directory to backend and install dependencies
env:
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
run: |
cd backend
npm install
npm run migration:latest
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
audience: sts.amazonaws.com
aws-region: us-east-1
role-to-assume: arn:aws:iam::905418227878:role/deploy-new-ecs-img
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition infisical-core-gamma-stage --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition
id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: infisical-core
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-core-gamma-stage
cluster: infisical-gamma-stage
wait-for-service-stability: true
production-postgres-deployment:
name: Deploy to production
runs-on: ubuntu-latest
needs: [gamma-deployment]
environment:
name: Production
steps:
- uses: twingate/github-action@v1
with:
# The Twingate Service Key used to connect Twingate to the proper service
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
#
# Required
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
uses: actions/setup-node@v2
with:
node-version: "20"
- name: Change directory to backend and install dependencies
env:
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
run: |
cd backend
npm install
npm run migration:latest
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
audience: sts.amazonaws.com
aws-region: us-east-1
role-to-assume: arn:aws:iam::381492033652:role/gha-make-prod-deployment
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition
id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: infisical-core-platform
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-core-platform
cluster: infisical-core-platform
wait-for-service-stability: true

@ -22,33 +22,20 @@ jobs:
# uncomment this when testing locally using nektos/act
- uses: KengoTODA/actions-setup-docker-compose@v1
if: ${{ env.ACT }}
name: Install `docker compose` for local simulations
name: Install `docker-compose` for local simulations
with:
version: "2.14.2"
- name: 📦Build the latest image
run: docker build --tag infisical-api .
working-directory: backend
- name: Start postgres and redis
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
run: touch .env && docker-compose -f docker-compose.dev.yml up -d db redis
- name: Start the server
run: |
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
echo "Examining built image:"
docker image inspect infisical-api | grep -A 5 "Entrypoint"
docker run --name infisical-api -d -p 4000:4000 \
-e DB_CONNECTION_URI=$DB_CONNECTION_URI \
-e REDIS_URL=$REDIS_URL \
-e JWT_AUTH_SECRET=$JWT_AUTH_SECRET \
-e ENCRYPTION_KEY=$ENCRYPTION_KEY \
--env-file .env \
infisical-api
echo "Container status right after creation:"
docker ps -a | grep infisical-api
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET -e ENCRYPTION_KEY=$ENCRYPTION_KEY --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
env:
REDIS_URL: redis://172.17.0.1:6379
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
@ -56,39 +43,27 @@ jobs:
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
- uses: actions/setup-go@v5
with:
go-version: "1.21.5"
go-version: '1.21.5'
- name: Wait for container to be stable and check logs
run: |
SECONDS=0
HEALTHY=0
while [ $SECONDS -lt 60 ]; do
# Check if container is running
if docker ps | grep infisical-api; then
# Try to access the API endpoint
if curl -s -f http://localhost:4000/api/docs/json > /dev/null 2>&1; then
echo "API endpoint is responding. Container seems healthy."
HEALTHY=1
break
fi
else
echo "Container is not running!"
docker ps -a | grep infisical-api
if docker ps | grep infisical-api | grep -q healthy; then
echo "Container is healthy."
HEALTHY=1
break
fi
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
sleep 5
SECONDS=$((SECONDS+5))
docker logs infisical-api
sleep 2
SECONDS=$((SECONDS+2))
done
if [ $HEALTHY -ne 1 ]; then
echo "Container did not become healthy in time"
echo "Container status:"
docker ps -a | grep infisical-api
echo "Container logs (if any):"
docker logs infisical-api || echo "No logs available"
echo "Container inspection:"
docker inspect infisical-api | grep -A 5 "State"
exit 1
fi
- name: Install openapi-diff
@ -96,8 +71,7 @@ jobs:
- name: Running OpenAPI Spec diff action
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
- name: cleanup
if: always()
run: |
docker compose -f "docker-compose.dev.yml" down
docker stop infisical-api || true
docker rm infisical-api || true
docker-compose -f "docker-compose.dev.yml" down
docker stop infisical-api
docker remove infisical-api

@ -18,18 +18,18 @@ jobs:
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: 🔧 Setup Node 20
- name: 🔧 Setup Node 16
uses: actions/setup-node@v3
with:
node-version: "20"
node-version: "16"
cache: "npm"
cache-dependency-path: frontend/package-lock.json
- name: 📦 Install dependencies
run: npm install
working-directory: frontend
- name: 🏗️ Run Type check
run: npm run type:check
run: npm run type:check
working-directory: frontend
- name: 🏗️ Run Link check
run: npm run lint:fix
run: npm run lint:fix
working-directory: frontend

@ -1,4 +1,4 @@
name: Release Infisical Core Helm chart
name: Release Helm Charts
on: [workflow_dispatch]
@ -17,6 +17,6 @@ jobs:
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to Cloudsmith
run: cd helm-charts && sh upload-infisical-core-helm-cloudsmith.sh
run: cd helm-charts && sh upload-to-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

@ -1,115 +1,62 @@
name: Release standalone docker image
on:
push:
tags:
- "infisical/v*.*.*-postgres"
push:
tags:
- "infisical/v*.*.*-postgres"
jobs:
infisical-tests:
name: Run tests before deployment
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
uses: ./.github/workflows/run-backend-tests.yml
infisical-standalone:
name: Build infisical standalone image postgres
runs-on: ubuntu-latest
needs: [infisical-tests]
steps:
- name: Extract version from tag
id: extract_version
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
- name: ☁️ Checkout source
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 📦 Install dependencies to test all dependencies
run: npm ci --only-production
working-directory: backend
- name: version output
run: |
echo "Output Value: ${{ steps.version.outputs.major }}"
echo "Output Value: ${{ steps.version.outputs.minor }}"
echo "Output Value: ${{ steps.version.outputs.patch }}"
echo "Output Value: ${{ steps.version.outputs.version }}"
echo "Output Value: ${{ steps.version.outputs.version_type }}"
echo "Output Value: ${{ steps.version.outputs.increment }}"
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 📦 Build backend and export to Docker
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
tags: |
infisical/infisical:latest-postgres
infisical/infisical:${{ steps.commit.outputs.short }}
infisical/infisical:${{ steps.extract_version.outputs.version }}
platforms: linux/amd64,linux/arm64
file: Dockerfile.standalone-infisical
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
infisical-fips-standalone:
name: Build infisical standalone image postgres
runs-on: ubuntu-latest
needs: [infisical-tests]
steps:
- name: Extract version from tag
id: extract_version
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
- name: ☁️ Checkout source
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 📦 Install dependencies to test all dependencies
run: npm ci --only-production
working-directory: backend
- name: version output
run: |
echo "Output Value: ${{ steps.version.outputs.major }}"
echo "Output Value: ${{ steps.version.outputs.minor }}"
echo "Output Value: ${{ steps.version.outputs.patch }}"
echo "Output Value: ${{ steps.version.outputs.version }}"
echo "Output Value: ${{ steps.version.outputs.version_type }}"
echo "Output Value: ${{ steps.version.outputs.increment }}"
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 📦 Build backend and export to Docker
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
tags: |
infisical/infisical-fips:latest-postgres
infisical/infisical-fips:${{ steps.commit.outputs.short }}
infisical/infisical-fips:${{ steps.extract_version.outputs.version }}
platforms: linux/amd64,linux/arm64
file: Dockerfile.fips.standalone-infisical
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
infisical-tests:
name: Run tests before deployment
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
uses: ./.github/workflows/run-backend-tests.yml
infisical-standalone:
name: Build infisical standalone image postgres
runs-on: ubuntu-latest
needs: [infisical-tests]
steps:
- name: Extract version from tag
id: extract_version
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
- name: ☁️ Checkout source
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 📦 Install dependencies to test all dependencies
run: npm ci --only-production
working-directory: backend
- name: version output
run: |
echo "Output Value: ${{ steps.version.outputs.major }}"
echo "Output Value: ${{ steps.version.outputs.minor }}"
echo "Output Value: ${{ steps.version.outputs.patch }}"
echo "Output Value: ${{ steps.version.outputs.version }}"
echo "Output Value: ${{ steps.version.outputs.version_type }}"
echo "Output Value: ${{ steps.version.outputs.increment }}"
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 📦 Build backend and export to Docker
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
tags: |
infisical/infisical:latest-postgres
infisical/infisical:${{ steps.commit.outputs.short }}
infisical/infisical:${{ steps.extract_version.outputs.version }}
platforms: linux/amd64,linux/arm64
file: Dockerfile.standalone-infisical
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}

@ -10,7 +10,8 @@ on:
permissions:
contents: write
# packages: write
# issues: write
jobs:
cli-integration-tests:
name: Run tests before deployment
@ -25,65 +26,8 @@ jobs:
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
npm-release:
runs-on: ubuntu-latest
env:
working-directory: ./npm
needs:
- cli-integration-tests
- goreleaser
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Extract version
run: |
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
echo "Version extracted: $VERSION"
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
- name: Print version
run: echo ${{ env.CLI_VERSION }}
- name: Setup Node
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
with:
node-version: 20
cache: "npm"
cache-dependency-path: ./npm/package-lock.json
- name: Install dependencies
working-directory: ${{ env.working-directory }}
run: npm install --ignore-scripts
- name: Set NPM version
working-directory: ${{ env.working-directory }}
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
- name: Setup NPM
working-directory: ${{ env.working-directory }}
run: |
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Pack NPM
working-directory: ${{ env.working-directory }}
run: npm pack
- name: Publish NPM
working-directory: ${{ env.working-directory }}
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
goreleaser:
runs-on: ubuntu-latest
runs-on: ubuntu-20.04
needs: [cli-integration-tests]
steps:
- uses: actions/checkout@v3
@ -103,12 +47,11 @@ jobs:
go-version: ">=1.19.3"
cache: true
cache-dependency-path: cli/go.sum
- name: Setup for libssl1.0-dev
- name: libssl1.1 => libssl1.0-dev for OSXCross
run: |
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32
sudo apt update
sudo apt-get install -y libssl1.0-dev
sudo apt update && apt-cache policy libssl1.0-dev
sudo apt-get install libssl1.0-dev
- name: OSXCross for CGO Support
run: |
mkdir ../../osxcross

@ -1,4 +1,4 @@
name: Release image + Helm chart K8s Operator
name: Release Docker image for K8 operator
on:
push:
tags:
@ -35,18 +35,3 @@ jobs:
tags: |
infisical/kubernetes-operator:latest
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
- name: Checkout
uses: actions/checkout@v2
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install python
uses: actions/setup-python@v4
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to Cloudsmith
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

@ -20,7 +20,7 @@ jobs:
uses: actions/checkout@v3
- uses: KengoTODA/actions-setup-docker-compose@v1
if: ${{ env.ACT }}
name: Install `docker compose` for local simulations
name: Install `docker-compose` for local simulations
with:
version: "2.14.2"
- name: 🔧 Setup Node 20
@ -33,7 +33,7 @@ jobs:
run: npm install
working-directory: backend
- name: Start postgres and redis
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
run: touch .env && docker-compose -f docker-compose.dev.yml up -d db redis
- name: Start integration test
run: npm run test:e2e
working-directory: backend
@ -44,4 +44,4 @@ jobs:
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
- name: cleanup
run: |
docker compose -f "docker-compose.dev.yml" down
docker-compose -f "docker-compose.dev.yml" down

@ -50,6 +50,6 @@ jobs:
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
# INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
run: go test -v -count=1 ./test

3
.gitignore vendored

@ -63,7 +63,6 @@ yarn-error.log*
# Editor specific
.vscode/*
.idea/*
frontend-build
@ -71,5 +70,3 @@ frontend-build
cli/infisical-merge
cli/test/infisical-merge
/backend/binary
/npm/bin

@ -1,12 +1,6 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
# Check if infisical is installed
if ! command -v infisical >/dev/null 2>&1; then
echo "\nError: Infisical CLI is not installed. Please install the Infisical CLI before comitting.\n You can refer to the documentation at https://infisical.com/docs/cli/overview\n\n"
exit 1
fi
npx lint-staged
infisical scan git-changes --staged -v

@ -6,5 +6,3 @@ frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/S
docs/self-hosting/configuration/envars.mdx:generic-api-key:106
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
docs/mint.json:generic-api-key:651
backend/src/ee/services/hsm/hsm-service.ts:generic-api-key:134
docs/documentation/platform/audit-log-streams/audit-log-streams.mdx:generic-api-key:104

@ -1,184 +0,0 @@
ARG POSTHOG_HOST=https://app.posthog.com
ARG POSTHOG_API_KEY=posthog-api-key
ARG INTERCOM_ID=intercom-id
ARG CAPTCHA_SITE_KEY=captcha-site-key
FROM node:20-slim AS base
FROM base AS frontend-dependencies
WORKDIR /app
COPY frontend/package.json frontend/package-lock.json ./
# Install dependencies
RUN npm ci --only-production --ignore-scripts
# Rebuild the source code only when needed
FROM base AS frontend-builder
WORKDIR /app
# Copy dependencies
COPY --from=frontend-dependencies /app/node_modules ./node_modules
# Copy all files
COPY /frontend .
ENV NODE_ENV production
ARG POSTHOG_HOST
ENV VITE_POSTHOG_HOST $POSTHOG_HOST
ARG POSTHOG_API_KEY
ENV VITE_POSTHOG_API_KEY $POSTHOG_API_KEY
ARG INTERCOM_ID
ENV VITE_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
# Build
RUN npm run build
# Production image
FROM base AS frontend-runner
WORKDIR /app
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
USER non-root-user
##
## BACKEND
##
FROM base AS backend-build
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
WORKDIR /app
# Required for pkcs11js and ODBC
RUN apt-get update && apt-get install -y \
python3 \
make \
g++ \
unixodbc \
unixodbc-dev \
freetds-dev \
freetds-bin \
tdsodbc \
&& rm -rf /var/lib/apt/lists/*
# Configure ODBC
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
COPY backend/package*.json ./
RUN npm ci --only-production
COPY /backend .
COPY --chown=non-root-user:nodejs standalone-entrypoint.sh standalone-entrypoint.sh
RUN npm i -D tsconfig-paths
RUN npm run build
# Production stage
FROM base AS backend-runner
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
WORKDIR /app
# Required for pkcs11js and ODBC
RUN apt-get update && apt-get install -y \
python3 \
make \
g++ \
unixodbc \
unixodbc-dev \
freetds-dev \
freetds-bin \
tdsodbc \
&& rm -rf /var/lib/apt/lists/*
# Configure ODBC
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
COPY backend/package*.json ./
RUN npm ci --only-production
COPY --from=backend-build /app .
RUN mkdir frontend-build
# Production stage
FROM base AS production
# Install necessary packages including ODBC
RUN apt-get update && apt-get install -y \
ca-certificates \
curl \
git \
python3 \
make \
g++ \
unixodbc \
unixodbc-dev \
freetds-dev \
freetds-bin \
tdsodbc \
openssh-client \
&& rm -rf /var/lib/apt/lists/*
# Configure ODBC in production
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.31.1 \
&& rm -rf /var/lib/apt/lists/*
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
# Give non-root-user permission to update SSL certs
RUN chown -R non-root-user /etc/ssl/certs
RUN chown non-root-user /etc/ssl/certs/ca-certificates.crt
RUN chmod -R u+rwx /etc/ssl/certs
RUN chmod u+rw /etc/ssl/certs/ca-certificates.crt
RUN chown non-root-user /usr/sbin/update-ca-certificates
RUN chmod u+rx /usr/sbin/update-ca-certificates
## set pre baked keys
ARG POSTHOG_API_KEY
ENV POSTHOG_API_KEY=$POSTHOG_API_KEY
ARG INTERCOM_ID=intercom-id
ENV INTERCOM_ID=$INTERCOM_ID
ARG CAPTCHA_SITE_KEY
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
WORKDIR /
COPY --from=backend-runner /app /backend
COPY --from=frontend-runner /app ./backend/frontend-build
ARG INFISICAL_PLATFORM_VERSION
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ENV PORT 8080
ENV HOST=0.0.0.0
ENV HTTPS_ENABLED false
ENV NODE_ENV production
ENV STANDALONE_BUILD true
ENV STANDALONE_MODE true
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
WORKDIR /backend
ENV TELEMETRY_ENABLED true
EXPOSE 8080
EXPOSE 443
USER non-root-user
CMD ["./standalone-entrypoint.sh"]

@ -3,13 +3,16 @@ ARG POSTHOG_API_KEY=posthog-api-key
ARG INTERCOM_ID=intercom-id
ARG CAPTCHA_SITE_KEY=captcha-site-key
FROM node:20-slim AS base
FROM node:20-alpine AS base
FROM base AS frontend-dependencies
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
RUN apk add --no-cache libc6-compat
WORKDIR /app
COPY frontend/package.json frontend/package-lock.json ./
COPY frontend/package.json frontend/package-lock.json frontend/next.config.js ./
# Install dependencies
RUN npm ci --only-production --ignore-scripts
@ -24,16 +27,17 @@ COPY --from=frontend-dependencies /app/node_modules ./node_modules
COPY /frontend .
ENV NODE_ENV production
ENV NEXT_PUBLIC_ENV production
ARG POSTHOG_HOST
ENV VITE_POSTHOG_HOST $POSTHOG_HOST
ENV NEXT_PUBLIC_POSTHOG_HOST $POSTHOG_HOST
ARG POSTHOG_API_KEY
ENV VITE_POSTHOG_API_KEY $POSTHOG_API_KEY
ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
ARG INTERCOM_ID
ENV VITE_INTERCOM_ID $INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
# Build
RUN npm run build
@ -42,35 +46,32 @@ RUN npm run build
FROM base AS frontend-runner
WORKDIR /app
RUN groupadd --system --gid 1001 nodejs
RUN useradd --system --uid 1001 --gid nodejs non-root-user
RUN addgroup --system --gid 1001 nodejs
RUN adduser --system --uid 1001 non-root-user
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
RUN mkdir -p /app/.next/cache/images && chown non-root-user:nodejs /app/.next/cache/images
VOLUME /app/.next/cache/images
COPY --chown=non-root-user:nodejs --chmod=555 frontend/scripts ./scripts
COPY --from=frontend-builder /app/public ./public
RUN chown non-root-user:nodejs ./public/data
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/standalone ./
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/static ./.next/static
USER non-root-user
ENV NEXT_TELEMETRY_DISABLED 1
##
## BACKEND
##
FROM base AS backend-build
RUN addgroup --system --gid 1001 nodejs \
&& adduser --system --uid 1001 non-root-user
WORKDIR /app
# Install all required dependencies for build
RUN apt-get update && apt-get install -y \
python3 \
make \
g++ \
unixodbc \
freetds-bin \
unixodbc-dev \
libc-dev \
freetds-dev \
&& rm -rf /var/lib/apt/lists/*
RUN groupadd --system --gid 1001 nodejs
RUN useradd --system --uid 1001 --gid nodejs non-root-user
COPY backend/package*.json ./
RUN npm ci --only-production
@ -84,21 +85,6 @@ FROM base AS backend-runner
WORKDIR /app
# Install all required dependencies for runtime
RUN apt-get update && apt-get install -y \
python3 \
make \
g++ \
unixodbc \
freetds-bin \
unixodbc-dev \
libc-dev \
freetds-dev \
&& rm -rf /var/lib/apt/lists/*
# Configure ODBC
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
COPY backend/package*.json ./
RUN npm ci --only-production
@ -108,37 +94,9 @@ RUN mkdir frontend-build
# Production stage
FROM base AS production
RUN apt-get update && apt-get install -y \
ca-certificates \
bash \
curl \
git \
python3 \
make \
g++ \
unixodbc \
freetds-bin \
unixodbc-dev \
libc-dev \
freetds-dev \
wget \
openssh-client \
&& rm -rf /var/lib/apt/lists/*
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.31.1 \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /
# Configure ODBC in production
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Setup user permissions
RUN groupadd --system --gid 1001 nodejs \
&& useradd --system --uid 1001 --gid nodejs non-root-user
RUN apk add --upgrade --no-cache ca-certificates
RUN addgroup --system --gid 1001 nodejs \
&& adduser --system --uid 1001 non-root-user
# Give non-root-user permission to update SSL certs
RUN chown -R non-root-user /etc/ssl/certs
@ -150,17 +108,21 @@ RUN chmod u+rx /usr/sbin/update-ca-certificates
## set pre baked keys
ARG POSTHOG_API_KEY
ENV POSTHOG_API_KEY=$POSTHOG_API_KEY
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
BAKED_NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY
ARG INTERCOM_ID=intercom-id
ENV INTERCOM_ID=$INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
ARG CAPTCHA_SITE_KEY
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
WORKDIR /
COPY --from=backend-runner /app /backend
COPY --from=frontend-runner /app ./backend/frontend-build
ARG INFISICAL_PLATFORM_VERSION
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ENV PORT 8080
ENV HOST=0.0.0.0
@ -168,7 +130,6 @@ ENV HTTPS_ENABLED false
ENV NODE_ENV production
ENV STANDALONE_BUILD true
ENV STANDALONE_MODE true
WORKDIR /backend
ENV TELEMETRY_ENABLED true

@ -10,26 +10,8 @@ up-dev:
up-dev-ldap:
docker compose -f docker-compose.dev.yml --profile ldap up --build
up-dev-metrics:
docker compose -f docker-compose.dev.yml --profile metrics up --build
up-prod:
docker-compose -f docker-compose.prod.yml up --build
down:
docker compose -f docker-compose.dev.yml down
reviewable-ui:
cd frontend && \
npm run lint:fix && \
npm run type:check
reviewable-api:
cd backend && \
npm run lint:fix && \
npm run type:check
reviewable: reviewable-ui reviewable-api
up-dev-sso:
docker compose -f docker-compose.dev.yml --profile sso up --build

101
README.md

File diff suppressed because one or more lines are too long

@ -1,23 +1,8 @@
# Build stage
FROM node:20-slim AS build
FROM node:20-alpine AS build
WORKDIR /app
# Required for pkcs11js
RUN apt-get update && apt-get install -y \
python3 \
make \
g++ \
openssh-client
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apt-get install -y \
unixodbc \
freetds-bin \
freetds-dev \
unixodbc-dev \
libc-dev
COPY package*.json ./
RUN npm ci --only-production
@ -25,36 +10,20 @@ COPY . .
RUN npm run build
# Production stage
FROM node:20-slim
FROM node:20-alpine
WORKDIR /app
ENV npm_config_cache /home/node/.npm
COPY package*.json ./
RUN apt-get update && apt-get install -y \
python3 \
make \
g++
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apt-get install -y \
unixodbc \
freetds-bin \
freetds-dev \
unixodbc-dev \
libc-dev
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
RUN npm ci --only-production && npm cache clean --force
COPY --from=build /app .
# Install Infisical CLI
RUN apt-get install -y curl bash && \
curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
apt-get update && apt-get install -y infisical=0.8.1 git
RUN apk add --no-cache bash curl && curl -1sLf \
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
&& apk add infisical=0.8.1 && apk add --no-cache git
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
CMD node healthcheck.js

@ -1,62 +1,8 @@
FROM node:20-slim
FROM node:20-alpine
# ? Setup a test SoftHSM module. In production a real HSM is used.
ARG SOFTHSM2_VERSION=2.5.0
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
SOFTHSM2_SOURCES=/tmp/softhsm2
# Install build dependencies including python3 (required for pkcs11js and partially TDS driver)
RUN apt-get update && apt-get install -y \
build-essential \
autoconf \
automake \
git \
libtool \
libssl-dev \
python3 \
make \
g++ \
openssh-client \
curl \
pkg-config
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apt-get install -y \
unixodbc \
unixodbc-dev \
freetds-dev \
freetds-bin \
tdsodbc
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Build and install SoftHSM2
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
WORKDIR ${SOFTHSM2_SOURCES}
RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
&& sh autogen.sh \
&& ./configure --prefix=/usr/local --disable-gost \
&& make \
&& make install
WORKDIR /root
RUN rm -fr ${SOFTHSM2_SOURCES}
# Install pkcs11-tool
RUN apt-get install -y opensc
RUN mkdir -p /etc/softhsm2/tokens && \
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
# ? App setup
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
apt-get update && \
apt-get install -y infisical=0.8.1
RUN apk add --no-cache bash curl && curl -1sLf \
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
&& apk add infisical=0.8.1 && apk add --no-cache git
WORKDIR /app

@ -10,22 +10,17 @@ export const mockQueue = (): TQueueServiceFactory => {
queue: async (name, jobData) => {
job[name] = jobData;
},
queuePg: async () => {},
initialize: async () => {},
shutdown: async () => undefined,
stopRepeatableJob: async () => true,
start: (name, jobFn) => {
queues[name] = jobFn;
workers[name] = jobFn;
},
startPg: async () => {},
listen: (name, event) => {
events[name] = event;
},
getRepeatableJobs: async () => [],
clearQueue: async () => {},
stopJobById: async () => {},
stopRepeatableJobByJobId: async () => true,
stopRepeatableJobByKey: async () => true
stopRepeatableJobByJobId: async () => true
};
};

@ -5,9 +5,6 @@ export const mockSmtpServer = (): TSmtpService => {
return {
sendMail: async (data) => {
storage.push(data);
},
verify: async () => {
return true;
}
};
};

@ -34,7 +34,7 @@ describe("Identity v1", async () => {
test("Create identity", async () => {
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
expect(newIdentity.name).toBe("mac1");
expect(newIdentity.authMethods).toEqual([]);
expect(newIdentity.authMethod).toBeNull();
await deleteIdentity(newIdentity.id);
});
@ -42,7 +42,7 @@ describe("Identity v1", async () => {
test("Update identity", async () => {
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
expect(newIdentity.name).toBe("mac1");
expect(newIdentity.authMethods).toEqual([]);
expect(newIdentity.authMethod).toBeNull();
const updatedIdentity = await testServer.inject({
method: "PATCH",

@ -39,6 +39,8 @@ describe("Login V1 Router", async () => {
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("mfaEnabled");
expect(payload).toHaveProperty("token");
expect(payload.mfaEnabled).toBeFalsy();
});
});

@ -123,7 +123,7 @@ describe("Project Environment Router", async () => {
id: deletedProjectEnvironment.id,
name: mockProjectEnv.name,
slug: mockProjectEnv.slug,
position: 5,
position: 4,
createdAt: expect.any(String),
updatedAt: expect.any(String)
})

@ -1,36 +0,0 @@
import { seedData1 } from "@app/db/seed-data";
import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
const createPolicy = async (dto: { name: string; secretPath: string; approvers: {type: ApproverType.User, id: string}[]; approvals: number }) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/secret-approvals`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
name: dto.name,
secretPath: dto.secretPath,
approvers: dto.approvers,
approvals: dto.approvals
}
});
expect(res.statusCode).toBe(200);
return res.json().approval;
};
describe("Secret approval policy router", async () => {
test("Create policy", async () => {
const policy = await createPolicy({
secretPath: "/",
approvals: 1,
approvers: [{id:seedData1.id, type: ApproverType.User}],
name: "test-policy"
});
expect(policy.name).toBe("test-policy");
});
});

@ -1,61 +1,73 @@
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
import { seedData1 } from "@app/db/seed-data";
const createSecretImport = async (importPath: string, importEnv: string) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/secret-imports`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/",
import: {
environment: importEnv,
path: importPath
}
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
return payload.secretImport;
};
const deleteSecretImport = async (id: string) => {
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/secret-imports/${id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/"
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
return payload.secretImport;
};
describe("Secret Import Router", async () => {
test.each([
{ importEnv: "prod", importPath: "/" }, // one in root
{ importEnv: "staging", importPath: "/" } // then create a deep one creating intermediate ones
])("Create secret import $importEnv with path $importPath", async ({ importPath, importEnv }) => {
// check for default environments
const payload = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath,
importEnv
});
const payload = await createSecretImport(importPath, importEnv);
expect(payload).toEqual(
expect.objectContaining({
id: expect.any(String),
importPath,
importPath: expect.any(String),
importEnv: expect.objectContaining({
name: expect.any(String),
slug: importEnv,
slug: expect.any(String),
id: expect.any(String)
})
})
);
await deleteSecretImport({
id: payload.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
await deleteSecretImport(payload.id);
});
test("Get secret imports", async () => {
const createdImport1 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: "/",
importEnv: "prod"
});
const createdImport2 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: "/",
importEnv: "staging"
});
const createdImport1 = await createSecretImport("/", "prod");
const createdImport2 = await createSecretImport("/", "staging");
const res = await testServer.inject({
method: "GET",
url: `/api/v1/secret-imports`,
@ -77,60 +89,25 @@ describe("Secret Import Router", async () => {
expect.arrayContaining([
expect.objectContaining({
id: expect.any(String),
importPath: "/",
importPath: expect.any(String),
importEnv: expect.objectContaining({
name: expect.any(String),
slug: "prod",
id: expect.any(String)
})
}),
expect.objectContaining({
id: expect.any(String),
importPath: "/",
importEnv: expect.objectContaining({
name: expect.any(String),
slug: "staging",
slug: expect.any(String),
id: expect.any(String)
})
})
])
);
await deleteSecretImport({
id: createdImport1.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
await deleteSecretImport({
id: createdImport2.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
await deleteSecretImport(createdImport1.id);
await deleteSecretImport(createdImport2.id);
});
test("Update secret import position", async () => {
const prodImportDetails = { path: "/", envSlug: "prod" };
const stagingImportDetails = { path: "/", envSlug: "staging" };
const createdImport1 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: prodImportDetails.path,
importEnv: prodImportDetails.envSlug
});
const createdImport2 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: stagingImportDetails.path,
importEnv: stagingImportDetails.envSlug
});
const createdImport1 = await createSecretImport(prodImportDetails.path, prodImportDetails.envSlug);
const createdImport2 = await createSecretImport(stagingImportDetails.path, stagingImportDetails.envSlug);
const updateImportRes = await testServer.inject({
method: "PATCH",
@ -184,55 +161,22 @@ describe("Secret Import Router", async () => {
expect(secretImportList.secretImports[1].id).toEqual(createdImport1.id);
expect(secretImportList.secretImports[0].id).toEqual(createdImport2.id);
await deleteSecretImport({
id: createdImport1.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
await deleteSecretImport({
id: createdImport2.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
await deleteSecretImport(createdImport1.id);
await deleteSecretImport(createdImport2.id);
});
test("Delete secret import position", async () => {
const createdImport1 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: "/",
importEnv: "prod"
});
const createdImport2 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: "/",
importEnv: "staging"
});
const deletedImport = await deleteSecretImport({
id: createdImport1.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
const createdImport1 = await createSecretImport("/", "prod");
const createdImport2 = await createSecretImport("/", "staging");
const deletedImport = await deleteSecretImport(createdImport1.id);
// check for default environments
expect(deletedImport).toEqual(
expect.objectContaining({
id: expect.any(String),
importPath: "/",
importPath: expect.any(String),
importEnv: expect.objectContaining({
name: expect.any(String),
slug: "prod",
slug: expect.any(String),
id: expect.any(String)
})
})
@ -257,552 +201,6 @@ describe("Secret Import Router", async () => {
expect(secretImportList.secretImports.length).toEqual(1);
expect(secretImportList.secretImports[0].position).toEqual(1);
await deleteSecretImport({
id: createdImport2.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
await deleteSecretImport(createdImport2.id);
});
});
// dev <- stage <- prod
describe.each([{ path: "/" }, { path: "/deep" }])(
"Secret import waterfall pattern testing - %path",
({ path: testSuitePath }) => {
beforeAll(async () => {
let prodFolder: { id: string };
let stagingFolder: { id: string };
let devFolder: { id: string };
if (testSuitePath !== "/") {
prodFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
stagingFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
devFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
}
const devImportFromStage = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "staging"
});
const stageImportFromProd = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "prod"
});
return async () => {
await deleteSecretImport({
id: stageImportFromProd.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging",
secretPath: testSuitePath,
authToken: jwtAuthToken
});
await deleteSecretImport({
id: devImportFromStage.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
if (prodFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod"
});
}
if (stagingFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: stagingFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging"
});
}
if (devFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: devFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug
});
}
};
});
test("Check one level imported secret exist", async () => {
await createSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY",
value: "stage-value"
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
expect(secret.secretKey).toBe("STAGING_KEY");
expect(secret.secretValue).toBe("stage-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "STAGING_KEY",
secretValue: "stage-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
});
test("Check two level imported secret exist", async () => {
await createSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY",
value: "prod-value"
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
expect(secret.secretKey).toBe("PROD_KEY");
expect(secret.secretValue).toBe("prod-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "PROD_KEY",
secretValue: "prod-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
});
}
);
// dev <- stage, dev <- prod
describe.each([{ path: "/" }, { path: "/deep" }])(
"Secret import multiple destination to one source pattern testing - %path",
({ path: testSuitePath }) => {
beforeAll(async () => {
let prodFolder: { id: string };
let stagingFolder: { id: string };
let devFolder: { id: string };
if (testSuitePath !== "/") {
prodFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
stagingFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
devFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
}
const devImportFromStage = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "staging"
});
const devImportFromProd = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "prod"
});
return async () => {
await deleteSecretImport({
id: devImportFromProd.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
await deleteSecretImport({
id: devImportFromStage.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
if (prodFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod"
});
}
if (stagingFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: stagingFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging"
});
}
if (devFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: devFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug
});
}
};
});
test("Check imported secret exist", async () => {
await createSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY",
value: "stage-value"
});
await createSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY",
value: "prod-value"
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
expect(secret.secretKey).toBe("STAGING_KEY");
expect(secret.secretValue).toBe("stage-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "STAGING_KEY",
secretValue: "stage-value"
})
])
}),
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "PROD_KEY",
secretValue: "prod-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
await deleteSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
});
}
);
// dev -> stage, prod
describe.each([{ path: "/" }, { path: "/deep" }])(
"Secret import one source to multiple destination pattern testing - %path",
({ path: testSuitePath }) => {
beforeAll(async () => {
let prodFolder: { id: string };
let stagingFolder: { id: string };
let devFolder: { id: string };
if (testSuitePath !== "/") {
prodFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
stagingFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
devFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
}
const stageImportFromDev = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: seedData1.environment.slug
});
const prodImportFromDev = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: seedData1.environment.slug
});
return async () => {
await deleteSecretImport({
id: prodImportFromDev.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod",
secretPath: testSuitePath,
authToken: jwtAuthToken
});
await deleteSecretImport({
id: stageImportFromDev.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging",
secretPath: testSuitePath,
authToken: jwtAuthToken
});
if (prodFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod"
});
}
if (stagingFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: stagingFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging"
});
}
if (devFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: devFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug
});
}
};
});
test("Check imported secret exist", async () => {
await createSecretV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY",
value: "stage-value"
});
await createSecretV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY",
value: "prod-value"
});
const stagingSecret = await getSecretByNameV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
expect(stagingSecret.secretKey).toBe("STAGING_KEY");
expect(stagingSecret.secretValue).toBe("stage-value");
const prodSecret = await getSecretByNameV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
expect(prodSecret.secretKey).toBe("PROD_KEY");
expect(prodSecret.secretValue).toBe("prod-value");
await deleteSecretV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
await deleteSecretV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
});
}
);

@ -1,406 +0,0 @@
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
import { seedData1 } from "@app/db/seed-data";
// dev <- stage <- prod
describe.each([{ secretPath: "/" }, { secretPath: "/deep" }])(
"Secret replication waterfall pattern testing - %secretPath",
({ secretPath: testSuitePath }) => {
beforeAll(async () => {
let prodFolder: { id: string };
let stagingFolder: { id: string };
let devFolder: { id: string };
if (testSuitePath !== "/") {
prodFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
stagingFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
devFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
}
const devImportFromStage = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "staging",
isReplication: true
});
const stageImportFromProd = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "prod",
isReplication: true
});
return async () => {
await deleteSecretImport({
id: stageImportFromProd.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging",
secretPath: testSuitePath,
authToken: jwtAuthToken
});
await deleteSecretImport({
id: devImportFromStage.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
if (prodFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod"
});
}
if (stagingFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: stagingFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging"
});
}
if (devFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: devFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug
});
}
};
});
test("Check one level imported secret exist", async () => {
await createSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY",
value: "stage-value"
});
// wait for 10 second for replication to finish
await new Promise((resolve) => {
setTimeout(resolve, 10000); // time to breathe for db
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
expect(secret.secretKey).toBe("STAGING_KEY");
expect(secret.secretValue).toBe("stage-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "STAGING_KEY",
secretValue: "stage-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
});
test("Check two level imported secret exist", async () => {
await createSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY",
value: "prod-value"
});
// wait for 10 second for replication to finish
await new Promise((resolve) => {
setTimeout(resolve, 10000); // time to breathe for db
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
expect(secret.secretKey).toBe("PROD_KEY");
expect(secret.secretValue).toBe("prod-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "PROD_KEY",
secretValue: "prod-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
});
},
{ timeout: 30000 }
);
// dev <- stage, dev <- prod
describe.each([{ path: "/" }, { path: "/deep" }])(
"Secret replication 1-N pattern testing - %path",
({ path: testSuitePath }) => {
beforeAll(async () => {
let prodFolder: { id: string };
let stagingFolder: { id: string };
let devFolder: { id: string };
if (testSuitePath !== "/") {
prodFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
stagingFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
devFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
}
const devImportFromStage = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "staging",
isReplication: true
});
const devImportFromProd = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "prod",
isReplication: true
});
return async () => {
await deleteSecretImport({
id: devImportFromProd.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
await deleteSecretImport({
id: devImportFromStage.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
if (prodFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod"
});
}
if (stagingFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: stagingFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging"
});
}
if (devFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: devFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug
});
}
};
});
test("Check imported secret exist", async () => {
await createSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY",
value: "stage-value"
});
await createSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY",
value: "prod-value"
});
// wait for 10 second for replication to finish
await new Promise((resolve) => {
setTimeout(resolve, 10000); // time to breathe for db
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
expect(secret.secretKey).toBe("STAGING_KEY");
expect(secret.secretValue).toBe("stage-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "STAGING_KEY",
secretValue: "stage-value"
})
])
}),
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "PROD_KEY",
secretValue: "prod-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
await deleteSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
});
},
{ timeout: 30000 }
);

@ -510,7 +510,7 @@ describe("Service token fail cases", async () => {
authorization: `Bearer ${serviceToken}`
}
});
expect(fetchSecrets.statusCode).toBe(403);
expect(fetchSecrets.statusCode).toBe(401);
expect(fetchSecrets.json().error).toBe("PermissionDenied");
await deleteServiceToken();
});
@ -532,7 +532,7 @@ describe("Service token fail cases", async () => {
authorization: `Bearer ${serviceToken}`
}
});
expect(fetchSecrets.statusCode).toBe(403);
expect(fetchSecrets.statusCode).toBe(401);
expect(fetchSecrets.json().error).toBe("PermissionDenied");
await deleteServiceToken();
});
@ -557,7 +557,7 @@ describe("Service token fail cases", async () => {
authorization: `Bearer ${serviceToken}`
}
});
expect(writeSecrets.statusCode).toBe(403);
expect(writeSecrets.statusCode).toBe(401);
expect(writeSecrets.json().error).toBe("PermissionDenied");
// but read access should still work fine

@ -1,86 +0,0 @@
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
import { createSecretV2, deleteSecretV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
import { seedData1 } from "@app/db/seed-data";
describe("Secret Recursive Testing", async () => {
const projectId = seedData1.projectV3.id;
const folderAndSecretNames = [
{ name: "deep1", path: "/", expectedSecretCount: 4 },
{ name: "deep21", path: "/deep1", expectedSecretCount: 2 },
{ name: "deep3", path: "/deep1/deep2", expectedSecretCount: 1 },
{ name: "deep22", path: "/deep2", expectedSecretCount: 1 }
];
beforeAll(async () => {
const rootFolderIds: string[] = [];
for (const folder of folderAndSecretNames) {
// eslint-disable-next-line no-await-in-loop
const createdFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: projectId,
secretPath: folder.path,
name: folder.name
});
if (folder.path === "/") {
rootFolderIds.push(createdFolder.id);
}
// eslint-disable-next-line no-await-in-loop
await createSecretV2({
secretPath: folder.path,
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: projectId,
key: folder.name,
value: folder.name
});
}
return async () => {
await Promise.all(
rootFolderIds.map((id) =>
deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id,
workspaceId: projectId,
environmentSlug: "prod"
})
)
);
await deleteSecretV2({
authToken: jwtAuthToken,
secretPath: "/",
workspaceId: projectId,
environmentSlug: "prod",
key: folderAndSecretNames[0].name
});
};
});
test.each(folderAndSecretNames)("$path recursive secret fetching", async ({ path, expectedSecretCount }) => {
const secrets = await getSecretsV2({
authToken: jwtAuthToken,
secretPath: path,
workspaceId: projectId,
environmentSlug: "prod",
recursive: true
});
expect(secrets.secrets.length).toEqual(expectedSecretCount);
expect(secrets.secrets.sort((a, b) => a.secretKey.localeCompare(b.secretKey))).toEqual(
folderAndSecretNames
.filter((el) => el.path.startsWith(path))
.sort((a, b) => a.name.localeCompare(b.name))
.map((el) =>
expect.objectContaining({
secretKey: el.name,
secretValue: el.name
})
)
);
});
});

@ -1,344 +0,0 @@
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
import { seedData1 } from "@app/db/seed-data";
describe("Secret expansion", () => {
const projectId = seedData1.projectV3.id;
beforeAll(async () => {
const prodRootFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/",
name: "deep"
});
await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep",
name: "nested"
});
return async () => {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodRootFolder.id,
workspaceId: projectId,
environmentSlug: "prod"
});
};
});
test("Local secret reference", async () => {
const secrets = [
{
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "HELLO",
value: "world"
},
{
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "TEST",
// eslint-disable-next-line
value: "hello ${HELLO}"
}
];
for (const secret of secrets) {
// eslint-disable-next-line no-await-in-loop
await createSecretV2(secret);
}
const expandedSecret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "TEST"
});
expect(expandedSecret.secretValue).toBe("hello world");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken
});
expect(listSecrets.secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretKey: "TEST",
secretValue: "hello world"
})
])
);
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
});
test("Cross environment secret reference", async () => {
const secrets = [
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep",
authToken: jwtAuthToken,
key: "DEEP_KEY_1",
value: "testing"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_1",
value: "reference"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_2",
// eslint-disable-next-line
value: "secret ${NESTED_KEY_1}"
},
{
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "KEY",
// eslint-disable-next-line
value: "hello ${prod.deep.DEEP_KEY_1} ${prod.deep.nested.NESTED_KEY_2}"
}
];
for (const secret of secrets) {
// eslint-disable-next-line no-await-in-loop
await createSecretV2(secret);
}
const expandedSecret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "KEY"
});
expect(expandedSecret.secretValue).toBe("hello testing secret reference");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken
});
expect(listSecrets.secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretKey: "KEY",
secretValue: "hello testing secret reference"
})
])
);
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
});
test("Non replicated secret import secret expansion on local reference and nested reference", async () => {
const secrets = [
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep",
authToken: jwtAuthToken,
key: "DEEP_KEY_1",
value: "testing"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_1",
value: "reference"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_2",
// eslint-disable-next-line
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
},
{
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "KEY",
// eslint-disable-next-line
value: "hello world"
}
];
for (const secret of secrets) {
// eslint-disable-next-line no-await-in-loop
await createSecretV2(secret);
}
const secretImportFromProdToDev = await createSecretImport({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
importEnv: "prod",
importPath: "/deep/nested"
});
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretPath: "/deep/nested",
environment: "prod",
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "NESTED_KEY_1",
secretValue: "reference"
}),
expect.objectContaining({
secretKey: "NESTED_KEY_2",
secretValue: "secret reference testing"
})
])
})
])
);
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
await deleteSecretImport({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
authToken: jwtAuthToken,
id: secretImportFromProdToDev.id,
secretPath: "/"
});
});
test(
"Replicated secret import secret expansion on local reference and nested reference",
async () => {
const secrets = [
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep",
authToken: jwtAuthToken,
key: "DEEP_KEY_1",
value: "testing"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_1",
value: "reference"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_2",
// eslint-disable-next-line
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
},
{
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "KEY",
// eslint-disable-next-line
value: "hello world"
}
];
for (const secret of secrets) {
// eslint-disable-next-line no-await-in-loop
await createSecretV2(secret);
}
const secretImportFromProdToDev = await createSecretImport({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
importEnv: "prod",
importPath: "/deep/nested",
isReplication: true
});
// wait for 5 second for replication to finish
await new Promise((resolve) => {
setTimeout(resolve, 5000); // time to breathe for db
});
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretPath: `/__reserve_replication_${secretImportFromProdToDev.id}`,
environment: seedData1.environment.slug,
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "NESTED_KEY_1",
secretValue: "reference"
}),
expect.objectContaining({
secretKey: "NESTED_KEY_2",
secretValue: "secret reference testing"
})
])
})
])
);
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
await deleteSecretImport({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
authToken: jwtAuthToken,
id: secretImportFromProdToDev.id,
secretPath: "/"
});
},
{ timeout: 10000 }
);
});

@ -8,7 +8,6 @@ type TRawSecret = {
secretComment?: string;
version: number;
};
const createSecret = async (dto: { path: string; key: string; value: string; comment: string; type?: SecretType }) => {
const createSecretReqBody = {
workspaceId: seedData1.projectV3.id,
@ -535,107 +534,6 @@ describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }]
);
});
test.each(secretTestCases)("Bulk upsert secrets in path $path", async ({ secret, path }) => {
const updateSharedSecRes = await testServer.inject({
method: "PATCH",
url: `/api/v3/secrets/batch/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
body: {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
secretPath: path,
mode: "upsert",
secrets: Array.from(Array(5)).map((_e, i) => ({
secretKey: `BULK-${secret.key}-${i + 1}`,
secretValue: "update-value",
secretComment: secret.comment
}))
}
});
expect(updateSharedSecRes.statusCode).toBe(200);
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
expect(updateSharedSecPayload).toHaveProperty("secrets");
// bulk ones should exist
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining(
Array.from(Array(5)).map((_e, i) =>
expect.objectContaining({
secretKey: `BULK-${secret.key}-${i + 1}`,
secretValue: "update-value",
type: SecretType.Shared
})
)
)
);
await Promise.all(
Array.from(Array(5)).map((_e, i) => deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}` }))
);
});
test("Bulk upsert secrets in path multiple paths", async () => {
const firstBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
secretKey: `BULK-KEY-${secretTestCases[0].secret.key}-${i + 1}`,
secretValue: "update-value",
secretComment: "comment",
secretPath: secretTestCases[0].path
}));
const secondBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
secretKey: `BULK-KEY-${secretTestCases[1].secret.key}-${i + 1}`,
secretValue: "update-value",
secretComment: "comment",
secretPath: secretTestCases[1].path
}));
const testSecrets = [...firstBatchSecrets, ...secondBatchSecrets];
const updateSharedSecRes = await testServer.inject({
method: "PATCH",
url: `/api/v3/secrets/batch/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
body: {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
mode: "upsert",
secrets: testSecrets
}
});
expect(updateSharedSecRes.statusCode).toBe(200);
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
expect(updateSharedSecPayload).toHaveProperty("secrets");
// bulk ones should exist
const firstBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[0].path);
expect(firstBatchSecretsOnInfisical).toEqual(
expect.arrayContaining(
firstBatchSecrets.map((el) =>
expect.objectContaining({
secretKey: el.secretKey,
secretValue: "update-value",
type: SecretType.Shared
})
)
)
);
const secondBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[1].path);
expect(secondBatchSecretsOnInfisical).toEqual(
expect.arrayContaining(
secondBatchSecrets.map((el) =>
expect.objectContaining({
secretKey: el.secretKey,
secretValue: "update-value",
type: SecretType.Shared
})
)
)
);
await Promise.all(testSecrets.map((el) => deleteSecret({ path: el.secretPath, key: el.secretKey })));
});
test.each(secretTestCases)("Bulk delete secrets in path $path", async ({ secret, path }) => {
await Promise.all(
Array.from(Array(5)).map((_e, i) => createSecret({ ...secret, key: `BULK-${secret.key}-${i + 1}`, path }))

@ -1075,7 +1075,7 @@ describe("Secret V3 Raw Router Without E2EE enabled", async () => {
},
body: createSecretReqBody
});
expect(createSecRes.statusCode).toBe(404);
expect(createSecRes.statusCode).toBe(400);
});
test("Update secret raw", async () => {
@ -1093,7 +1093,7 @@ describe("Secret V3 Raw Router Without E2EE enabled", async () => {
},
body: updateSecretReqBody
});
expect(updateSecRes.statusCode).toBe(404);
expect(updateSecRes.statusCode).toBe(400);
});
test("Delete secret raw", async () => {
@ -1110,6 +1110,6 @@ describe("Secret V3 Raw Router Without E2EE enabled", async () => {
},
body: deletedSecretReqBody
});
expect(deletedSecRes.statusCode).toBe(404);
expect(deletedSecRes.statusCode).toBe(400);
});
});

@ -1,73 +0,0 @@
type TFolder = {
id: string;
name: string;
};
export const createFolder = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
name: string;
authToken: string;
}) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/folders`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
name: dto.name,
path: dto.secretPath
}
});
expect(res.statusCode).toBe(200);
return res.json().folder as TFolder;
};
export const deleteFolder = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
id: string;
authToken: string;
}) => {
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/folders/${dto.id}`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
path: dto.secretPath
}
});
expect(res.statusCode).toBe(200);
return res.json().folder as TFolder;
};
export const listFolders = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
authToken: string;
}) => {
const res = await testServer.inject({
method: "GET",
url: `/api/v1/folders`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
path: dto.secretPath
}
});
expect(res.statusCode).toBe(200);
return res.json().folders as TFolder[];
};

@ -1,93 +0,0 @@
type TSecretImport = {
id: string;
importEnv: {
name: string;
slug: string;
id: string;
};
importPath: string;
};
export const createSecretImport = async (dto: {
workspaceId: string;
environmentSlug: string;
isReplication?: boolean;
secretPath: string;
importPath: string;
importEnv: string;
authToken: string;
}) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/secret-imports`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
isReplication: dto.isReplication,
path: dto.secretPath,
import: {
environment: dto.importEnv,
path: dto.importPath
}
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
return payload.secretImport as TSecretImport;
};
export const deleteSecretImport = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
authToken: string;
id: string;
}) => {
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/secret-imports/${dto.id}`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
path: dto.secretPath
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
return payload.secretImport as TSecretImport;
};
export const listSecretImport = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
authToken: string;
}) => {
const res = await testServer.inject({
method: "GET",
url: `/api/v1/secret-imports`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
query: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
path: dto.secretPath
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImports");
return payload.secretImports as TSecretImport[];
};

@ -1,130 +0,0 @@
import { SecretType } from "@app/db/schemas";
type TRawSecret = {
secretKey: string;
secretValue: string;
secretComment?: string;
version: number;
};
export const createSecretV2 = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
key: string;
value: string;
comment?: string;
authToken: string;
type?: SecretType;
}) => {
const createSecretReqBody = {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
type: dto.type || SecretType.Shared,
secretPath: dto.secretPath,
secretKey: dto.key,
secretValue: dto.value,
secretComment: dto.comment
};
const createSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/raw/${dto.key}`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: createSecretReqBody
});
expect(createSecRes.statusCode).toBe(200);
const createdSecretPayload = JSON.parse(createSecRes.payload);
expect(createdSecretPayload).toHaveProperty("secret");
return createdSecretPayload.secret as TRawSecret;
};
export const deleteSecretV2 = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
key: string;
authToken: string;
}) => {
const deleteSecRes = await testServer.inject({
method: "DELETE",
url: `/api/v3/secrets/raw/${dto.key}`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
secretPath: dto.secretPath
}
});
expect(deleteSecRes.statusCode).toBe(200);
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
expect(updatedSecretPayload).toHaveProperty("secret");
return updatedSecretPayload.secret as TRawSecret;
};
export const getSecretByNameV2 = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
key: string;
authToken: string;
}) => {
const response = await testServer.inject({
method: "GET",
url: `/api/v3/secrets/raw/${dto.key}`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
query: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
secretPath: dto.secretPath,
expandSecretReferences: "true",
include_imports: "true"
}
});
expect(response.statusCode).toBe(200);
const payload = JSON.parse(response.payload);
expect(payload).toHaveProperty("secret");
return payload.secret as TRawSecret;
};
export const getSecretsV2 = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
authToken: string;
recursive?: boolean;
}) => {
const getSecretsResponse = await testServer.inject({
method: "GET",
url: `/api/v3/secrets/raw`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
query: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
secretPath: dto.secretPath,
expandSecretReferences: "true",
include_imports: "true",
recursive: String(dto.recursive || false)
}
});
expect(getSecretsResponse.statusCode).toBe(200);
const getSecretsPayload = JSON.parse(getSecretsResponse.payload);
expect(getSecretsPayload).toHaveProperty("secrets");
expect(getSecretsPayload).toHaveProperty("imports");
return getSecretsPayload as {
secrets: TRawSecret[];
imports: {
secretPath: string;
environment: string;
folderId: string;
secrets: TRawSecret[];
}[];
};
};

@ -11,67 +11,37 @@ import { initLogger } from "@app/lib/logger";
import { main } from "@app/server/app";
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
import { mockQueue } from "./mocks/queue";
import { mockSmtpServer } from "./mocks/smtp";
import { mockKeyStore } from "./mocks/keystore";
import { initDbConnection } from "@app/db";
import { queueServiceFactory } from "@app/queue";
import { keyStoreFactory } from "@app/keystore/keystore";
import { Redis } from "ioredis";
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
export default {
name: "knex-env",
transformMode: "ssr",
async setup() {
const logger = initLogger();
const envConfig = initEnvConfig(logger);
const logger = await initLogger();
const cfg = initEnvConfig(logger);
const db = initDbConnection({
dbConnectionUri: envConfig.DB_CONNECTION_URI,
dbRootCert: envConfig.DB_ROOT_CERT
dbConnectionUri: cfg.DB_CONNECTION_URI,
dbRootCert: cfg.DB_ROOT_CERT
});
const redis = new Redis(envConfig.REDIS_URL);
await redis.flushdb("SYNC");
try {
await db.migrate.rollback(
{
directory: path.join(__dirname, "../src/db/migrations"),
extension: "ts",
tableName: "infisical_migrations"
},
true
);
await db.migrate.latest({
directory: path.join(__dirname, "../src/db/migrations"),
extension: "ts",
tableName: "infisical_migrations"
});
await db.seed.run({
directory: path.join(__dirname, "../src/db/seeds"),
extension: "ts"
});
const smtp = mockSmtpServer();
const queue = queueServiceFactory(envConfig.REDIS_URL, { dbConnectionUrl: envConfig.DB_CONNECTION_URI });
const keyStore = keyStoreFactory(envConfig.REDIS_URL);
const hsmModule = initializeHsmModule(envConfig);
hsmModule.initialize();
const server = await main({
db,
smtp,
logger,
queue,
keyStore,
hsmModule: hsmModule.getModule(),
redis,
envConfig
});
const queue = mockQueue();
const keyStore = mockKeyStore();
const server = await main({ db, smtp, logger, queue, keyStore });
// @ts-expect-error type
globalThis.testServer = server;
// @ts-expect-error type
@ -84,16 +54,14 @@ export default {
organizationId: seedData1.organization.id,
accessVersion: 1
},
envConfig.AUTH_SECRET,
{ expiresIn: envConfig.JWT_AUTH_LIFETIME }
cfg.AUTH_SECRET,
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
);
} catch (error) {
// eslint-disable-next-line
console.log("[TEST] Error setting up environment", error);
await db.destroy();
throw error;
}
// custom setup
return {
async teardown() {
@ -112,12 +80,8 @@ export default {
},
true
);
await redis.flushdb("ASYNC");
redis.disconnect();
await db.destroy();
}
};
}
};

11336
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -34,42 +34,25 @@
"test": "echo \"Error: no test specified\" && exit 1",
"dev": "tsx watch --clear-screen=false ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine",
"dev:docker": "nodemon",
"build": "tsup --sourcemap",
"build": "tsup",
"build:frontend": "npm run build --prefix ../frontend",
"start": "node --enable-source-maps dist/main.mjs",
"start": "node dist/main.mjs",
"type:check": "tsc --noEmit",
"lint:fix": "eslint --fix --ext js,ts ./src",
"lint": "eslint 'src/**/*.ts'",
"test:e2e": "vitest run -c vitest.e2e.config.ts --bail=1",
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=<value>",
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
"generate:component": "tsx ./scripts/create-backend-file.ts",
"generate:schema": "tsx ./scripts/generate-schema-types.ts && eslint --fix --ext ts ./src/db/schemas",
"auditlog-migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:latest",
"auditlog-migration:up": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:up",
"auditlog-migration:down": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:down",
"auditlog-migration:list": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:list",
"auditlog-migration:status": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:status",
"auditlog-migration:unlock": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:unlock",
"auditlog-migration:rollback": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:rollback",
"generate:schema": "tsx ./scripts/generate-schema-types.ts",
"migration:new": "tsx ./scripts/create-migration.ts",
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:up",
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:down",
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:list",
"migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && npm run auditlog-migration:latest && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:latest",
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:status",
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./dist/db/knexfile.mjs migrate:rollback",
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./dist/db/knexfile.mjs migrate:unlock",
"migration:up-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
"migration:down-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
"migration:list-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
"migration:latest-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
"migration:status-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
"migration:rollback-dev": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
"migration:unlock-dev": "knex --knexfile ./src/db/knexfile.ts migrate:unlock",
"migrate:org": "tsx ./scripts/migrate-organization.ts",
"migration:up": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
"migration:down": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
"migration:list": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
"migration:latest": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
"migration:rollback": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
"seed:new": "tsx ./scripts/create-seed-file.ts",
"seed": "knex --knexfile ./dist/db/knexfile.ts --client pg seed:run",
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
},
"keywords": [],
@ -93,11 +76,8 @@
"@types/passport-google-oauth20": "^2.0.14",
"@types/pg": "^8.10.9",
"@types/picomatch": "^2.3.3",
"@types/pkcs11js": "^1.0.4",
"@types/prompt-sync": "^4.2.3",
"@types/resolve": "^1.20.6",
"@types/safe-regex": "^1.1.6",
"@types/sjcl": "^1.0.34",
"@types/uuid": "^9.0.7",
"@typescript-eslint/eslint-plugin": "^6.20.0",
"@typescript-eslint/parser": "^6.20.0",
@ -121,51 +101,33 @@
"tsup": "^8.0.1",
"tsx": "^4.4.0",
"typescript": "^5.3.2",
"vite-tsconfig-paths": "^4.2.2",
"vitest": "^1.2.2"
},
"dependencies": {
"@aws-sdk/client-elasticache": "^3.637.0",
"@aws-sdk/client-iam": "^3.525.0",
"@aws-sdk/client-kms": "^3.609.0",
"@aws-sdk/client-secrets-manager": "^3.504.0",
"@aws-sdk/client-sts": "^3.600.0",
"@casl/ability": "^6.5.0",
"@elastic/elasticsearch": "^8.15.0",
"@fastify/cookie": "^9.3.1",
"@fastify/cors": "^8.5.0",
"@fastify/etag": "^5.1.0",
"@fastify/formbody": "^7.4.0",
"@fastify/helmet": "^11.1.1",
"@fastify/multipart": "8.3.1",
"@fastify/passport": "^2.4.0",
"@fastify/rate-limit": "^9.0.0",
"@fastify/request-context": "^5.1.0",
"@fastify/session": "^10.7.0",
"@fastify/static": "^7.0.4",
"@fastify/swagger": "^8.14.0",
"@fastify/swagger-ui": "^2.1.0",
"@google-cloud/kms": "^4.5.0",
"@infisical/quic": "^1.0.8",
"@node-saml/passport-saml": "^4.0.4",
"@octokit/auth-app": "^7.1.1",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
"@octopusdeploy/api-client": "^3.4.1",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
"@opentelemetry/exporter-prometheus": "^0.55.0",
"@opentelemetry/instrumentation": "^0.55.0",
"@opentelemetry/instrumentation-http": "^0.57.2",
"@opentelemetry/resources": "^1.28.0",
"@opentelemetry/sdk-metrics": "^1.28.0",
"@opentelemetry/semantic-conventions": "^1.27.0",
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/x509": "^1.12.1",
"@peculiar/x509": "^1.10.0",
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
"@sindresorhus/slugify": "1.1.0",
"@slack/oauth": "^3.0.2",
"@slack/web-api": "^7.8.0",
"@team-plain/typescript-sdk": "^4.6.1",
"@ucast/mongo2js": "^1.3.4",
"ajv": "^8.12.0",
"argon2": "^0.31.2",
@ -177,54 +139,39 @@
"cassandra-driver": "^4.7.2",
"connect-redis": "^7.1.1",
"cron": "^3.1.7",
"dd-trace": "^5.40.0",
"dotenv": "^16.4.1",
"fastify": "^4.28.1",
"fastify": "^4.26.0",
"fastify-plugin": "^4.5.1",
"google-auth-library": "^9.9.0",
"googleapis": "^137.1.0",
"handlebars": "^4.7.8",
"hdb": "^0.19.10",
"ioredis": "^5.3.2",
"isomorphic-dompurify": "^2.22.0",
"jmespath": "^0.16.0",
"jsonwebtoken": "^9.0.2",
"jsrp": "^0.2.4",
"jwks-rsa": "^3.1.0",
"knex": "^3.0.1",
"ldapjs": "^3.0.7",
"ldif": "0.5.1",
"libsodium-wrappers": "^0.7.13",
"lodash.isequal": "^4.5.0",
"mongodb": "^6.8.1",
"ms": "^2.1.3",
"mysql2": "^3.9.8",
"nanoid": "^3.3.8",
"nanoid": "^3.3.4",
"nodemailer": "^6.9.9",
"odbc": "^2.4.9",
"openid-client": "^5.6.5",
"ora": "^7.0.1",
"oracledb": "^6.4.0",
"otplib": "^12.0.1",
"passport-github": "^1.1.0",
"passport-gitlab2": "^5.0.0",
"passport-google-oauth20": "^2.0.0",
"passport-ldapauth": "^3.0.1",
"pg": "^8.11.3",
"pg-boss": "^10.1.5",
"pg-query-stream": "^4.5.3",
"picomatch": "^3.0.1",
"pino": "^8.16.2",
"pkcs11js": "^2.1.6",
"pkijs": "^3.2.4",
"posthog-node": "^3.6.2",
"probot": "^13.3.8",
"safe-regex": "^2.1.1",
"scim-patch": "^0.8.3",
"scim2-parse-filter": "^0.2.10",
"sjcl": "^1.0.8",
"probot": "^13.0.0",
"smee-client": "^2.0.0",
"snowflake-sdk": "^1.14.0",
"tedious": "^18.2.1",
"tweetnacl": "^1.0.3",
"tweetnacl-util": "^0.15.1",

@ -7,33 +7,14 @@ const prompt = promptSync({
sigint: true
});
type ComponentType = 1 | 2 | 3;
console.log(`
Component List
--------------
0. Exit
1. Service component
2. DAL component
3. Router component
`);
function getComponentType(): ComponentType {
while (true) {
const input = prompt("Select a component (0-3): ");
const componentType = parseInt(input, 10);
if (componentType === 0) {
console.log("Exiting the program. Goodbye!");
process.exit(0);
} else if (componentType === 1 || componentType === 2 || componentType === 3) {
return componentType;
} else {
console.log("Invalid input. Please enter 0, 1, 2, or 3.");
}
}
}
const componentType = getComponentType();
const componentType = parseInt(prompt("Select a component: "), 10);
if (componentType === 1) {
const componentName = prompt("Enter service name: ");

@ -90,12 +90,7 @@ const main = async () => {
.whereRaw("table_schema = current_schema()")
.select<{ tableName: string }[]>("table_name as tableName")
.orderBy("table_name")
).filter(
(el) =>
!el.tableName.includes("_migrations") &&
!el.tableName.includes("audit_logs_") &&
el.tableName !== "intermediate_audit_logs"
);
).filter((el) => !el.tableName.includes("_migrations"));
for (let i = 0; i < tables.length; i += 1) {
const { tableName } = tables[i];

@ -1,103 +0,0 @@
/* eslint-disable */
import promptSync from "prompt-sync";
import { execSync } from "child_process";
import path from "path";
import { existsSync } from "fs";
const prompt = promptSync({
sigint: true
});
const sanitizeInputParam = (value: string) => {
// Escape double quotes and wrap the entire value in double quotes
if (value) {
return `"${value.replace(/"/g, '\\"')}"`;
}
return '""';
};
const exportDb = () => {
const exportHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate from: "));
const exportPort = sanitizeInputParam(
prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432"
);
const exportUser = sanitizeInputParam(
prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical"
);
const exportPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate from: "));
const exportDatabase = sanitizeInputParam(
prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical"
);
// we do not include the audit_log and secret_sharing entries
execSync(
`PGDATABASE=${exportDatabase} PGPASSWORD=${exportPassword} PGHOST=${exportHost} PGPORT=${exportPort} PGUSER=${exportUser} pg_dump -Fc infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join(
__dirname,
"../src/db/backup.dump"
)}`,
{ stdio: "inherit" }
);
};
const importDbForOrg = () => {
const importHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate to: "));
const importPort = sanitizeInputParam(prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432");
const importUser = sanitizeInputParam(
prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical"
);
const importPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate to: "));
const importDatabase = sanitizeInputParam(
prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical"
);
const orgId = sanitizeInputParam(prompt("Enter the organization ID to migrate: "));
if (!existsSync(path.join(__dirname, "../src/db/backup.dump"))) {
console.log("File not found, please export the database first.");
return;
}
execSync(
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} pg_restore -d ${importDatabase} --verbose ${path.join(
__dirname,
"../src/db/backup.dump"
)}`,
{ maxBuffer: 1024 * 1024 * 4096 }
);
execSync(
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"`
);
// delete global/instance-level resources not relevant to the organization to migrate
// users
execSync(
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'`
);
// identities
execSync(
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'`
);
// reset slack configuration in superAdmin
execSync(
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'`
);
console.log("Organization migrated successfully.");
};
const main = () => {
const action = prompt(
"Enter the action to perform\n 1. Export from existing instance.\n 2. Import org to instance.\n \n Action: "
);
if (action === "1") {
exportDb();
} else if (action === "2") {
importDbForOrg();
} else {
console.log("Invalid action");
}
};
main();

@ -1,7 +0,0 @@
import "@fastify/request-context";
declare module "@fastify/request-context" {
interface RequestContextData {
reqId: string;
}
}

@ -1,6 +1,6 @@
import { FastifyInstance, RawReplyDefaultExpression, RawRequestDefaultExpression, RawServerDefault } from "fastify";
import { Logger } from "pino";
import { CustomLogger } from "@app/lib/logger/logger";
import { ZodTypeProvider } from "@app/server/plugins/fastify-zod";
declare global {
@ -8,7 +8,7 @@ declare global {
RawServerDefault,
RawRequestDefaultExpression<RawServerDefault>,
RawReplyDefaultExpression<RawServerDefault>,
Readonly<CustomLogger>,
Readonly<Logger>,
ZodTypeProvider
>;

@ -1,7 +1,5 @@
import "fastify";
import { Redis } from "ioredis";
import { TUsers } from "@app/db/schemas";
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-service";
@ -9,25 +7,17 @@ import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-se
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service";
import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service";
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
import { TIdentityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
import { TKmipClientDALFactory } from "@app/ee/services/kmip/kmip-client-dal";
import { TKmipOperationServiceFactory } from "@app/ee/services/kmip/kmip-operation-service";
import { TKmipServiceFactory } from "@app/ee/services/kmip/kmip-service";
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service";
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
import { RateLimitConfiguration } from "@app/ee/services/rate-limit/rate-limit-types";
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
@ -35,12 +25,9 @@ import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-ap
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service";
import { TSshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-service";
import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
import { TAuthMode } from "@app/server/plugins/auth/inject-identity";
import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
import { TAuthLoginFactory } from "@app/services/auth/auth-login-service";
import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
@ -48,18 +35,12 @@ import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service";
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
import { TCertificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
import { TCmekServiceFactory } from "@app/services/cmek/cmek-service";
import { TExternalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service";
import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
import { THsmServiceFactory } from "@app/services/hsm/hsm-service";
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service";
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
import { TIdentityJwtAuthServiceFactory } from "@app/services/identity-jwt-auth/identity-jwt-auth-service";
import { TIdentityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
import { TIdentityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service";
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
@ -69,9 +50,6 @@ import { TIntegrationServiceFactory } from "@app/services/integration/integratio
import { TIntegrationAuthServiceFactory } from "@app/services/integration-auth/integration-auth-service";
import { TOrgRoleServiceFactory } from "@app/services/org/org-role-service";
import { TOrgServiceFactory } from "@app/services/org/org-service";
import { TOrgAdminServiceFactory } from "@app/services/org-admin/org-admin-service";
import { TPkiAlertServiceFactory } from "@app/services/pki-alert/pki-alert-service";
import { TPkiCollectionServiceFactory } from "@app/services/pki-collection/pki-collection-service";
import { TProjectServiceFactory } from "@app/services/project/project-service";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
import { TProjectEnvServiceFactory } from "@app/services/project-env/project-env-service";
@ -84,30 +62,16 @@ import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-
import { TSecretImportServiceFactory } from "@app/services/secret-import/secret-import-service";
import { TSecretReplicationServiceFactory } from "@app/services/secret-replication/secret-replication-service";
import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
import { TSecretSyncServiceFactory } from "@app/services/secret-sync/secret-sync-service";
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
import { TTotpServiceFactory } from "@app/services/totp/totp-service";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TUserServiceFactory } from "@app/services/user/user-service";
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
declare module "@fastify/request-context" {
interface RequestContextData {
reqId: string;
}
}
declare module "fastify" {
interface Session {
callbackPort: string;
}
interface FastifyRequest {
realIp: string;
// used for mfa session authentication
@ -124,24 +88,17 @@ declare module "fastify" {
id: string;
orgId: string;
};
rateLimits: RateLimitConfiguration;
// passport data
passportUser: {
isUserCompleted: string;
providerAuthToken: string;
};
kmipUser: {
projectId: string;
clientId: string;
name: string;
};
auditLogInfo: Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
ssoConfig: Awaited<ReturnType<TSamlConfigServiceFactory["getSaml"]>>;
ldapConfig: Awaited<ReturnType<TLdapConfigServiceFactory["getLdapCfg"]>>;
}
interface FastifyInstance {
redis: Redis;
services: {
login: TAuthLoginFactory;
password: TAuthPasswordFactory;
@ -156,7 +113,6 @@ declare module "fastify" {
group: TGroupServiceFactory;
groupProject: TGroupProjectServiceFactory;
apiKey: TApiKeyServiceFactory;
pkiAlert: TPkiAlertServiceFactory;
project: TProjectServiceFactory;
projectMembership: TProjectMembershipServiceFactory;
projectEnv: TProjectEnvServiceFactory;
@ -182,7 +138,6 @@ declare module "fastify" {
identityAwsAuth: TIdentityAwsAuthServiceFactory;
identityAzureAuth: TIdentityAzureAuthServiceFactory;
identityOidcAuth: TIdentityOidcAuthServiceFactory;
identityJwtAuth: TIdentityJwtAuthServiceFactory;
accessApprovalPolicy: TAccessApprovalPolicyServiceFactory;
accessApprovalRequest: TAccessApprovalRequestServiceFactory;
secretApprovalPolicy: TSecretApprovalPolicyServiceFactory;
@ -195,13 +150,8 @@ declare module "fastify" {
auditLog: TAuditLogServiceFactory;
auditLogStream: TAuditLogStreamServiceFactory;
certificate: TCertificateServiceFactory;
certificateTemplate: TCertificateTemplateServiceFactory;
sshCertificateAuthority: TSshCertificateAuthorityServiceFactory;
sshCertificateTemplate: TSshCertificateTemplateServiceFactory;
certificateAuthority: TCertificateAuthorityServiceFactory;
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
certificateEst: TCertificateEstServiceFactory;
pkiCollection: TPkiCollectionServiceFactory;
secretScanning: TSecretScanningServiceFactory;
license: TLicenseServiceFactory;
trustedIp: TTrustedIpServiceFactory;
@ -211,31 +161,15 @@ declare module "fastify" {
dynamicSecretLease: TDynamicSecretLeaseServiceFactory;
projectUserAdditionalPrivilege: TProjectUserAdditionalPrivilegeServiceFactory;
identityProjectAdditionalPrivilege: TIdentityProjectAdditionalPrivilegeServiceFactory;
identityProjectAdditionalPrivilegeV2: TIdentityProjectAdditionalPrivilegeV2ServiceFactory;
secretSharing: TSecretSharingServiceFactory;
rateLimit: TRateLimitServiceFactory;
userEngagement: TUserEngagementServiceFactory;
externalKms: TExternalKmsServiceFactory;
hsm: THsmServiceFactory;
orgAdmin: TOrgAdminServiceFactory;
slack: TSlackServiceFactory;
workflowIntegration: TWorkflowIntegrationServiceFactory;
cmek: TCmekServiceFactory;
migration: TExternalMigrationServiceFactory;
externalGroupOrgRoleMapping: TExternalGroupOrgRoleMappingServiceFactory;
projectTemplate: TProjectTemplateServiceFactory;
totp: TTotpServiceFactory;
appConnection: TAppConnectionServiceFactory;
secretSync: TSecretSyncServiceFactory;
kmip: TKmipServiceFactory;
kmipOperation: TKmipOperationServiceFactory;
gateway: TGatewayServiceFactory;
};
// this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer
store: {
user: Pick<TUserDALFactory, "findById">;
kmipClient: Pick<TKmipClientDALFactory, "findByProjectAndClientId">;
};
}
}

@ -1,4 +0,0 @@
declare module "hdb" {
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Untyped, the function returns `any`.
function createClient(options): any;
}

@ -53,12 +53,6 @@ import {
TCertificateSecretsUpdate,
TCertificatesInsert,
TCertificatesUpdate,
TCertificateTemplateEstConfigs,
TCertificateTemplateEstConfigsInsert,
TCertificateTemplateEstConfigsUpdate,
TCertificateTemplates,
TCertificateTemplatesInsert,
TCertificateTemplatesUpdate,
TDynamicSecretLeases,
TDynamicSecretLeasesInsert,
TDynamicSecretLeasesUpdate,
@ -68,9 +62,6 @@ import {
TExternalKms,
TExternalKmsInsert,
TExternalKmsUpdate,
TGateways,
TGatewaysInsert,
TGatewaysUpdate,
TGitAppInstallSessions,
TGitAppInstallSessionsInsert,
TGitAppInstallSessionsUpdate,
@ -101,15 +92,9 @@ import {
TIdentityGcpAuths,
TIdentityGcpAuthsInsert,
TIdentityGcpAuthsUpdate,
TIdentityJwtAuths,
TIdentityJwtAuthsInsert,
TIdentityJwtAuthsUpdate,
TIdentityKubernetesAuths,
TIdentityKubernetesAuthsInsert,
TIdentityKubernetesAuthsUpdate,
TIdentityMetadata,
TIdentityMetadataInsert,
TIdentityMetadataUpdate,
TIdentityOidcAuths,
TIdentityOidcAuthsInsert,
TIdentityOidcAuthsUpdate,
@ -146,18 +131,6 @@ import {
TInternalKms,
TInternalKmsInsert,
TInternalKmsUpdate,
TKmipClientCertificates,
TKmipClientCertificatesInsert,
TKmipClientCertificatesUpdate,
TKmipClients,
TKmipClientsInsert,
TKmipClientsUpdate,
TKmipOrgConfigs,
TKmipOrgConfigsInsert,
TKmipOrgConfigsUpdate,
TKmipOrgServerCertificates,
TKmipOrgServerCertificatesInsert,
TKmipOrgServerCertificatesUpdate,
TKmsKeys,
TKmsKeysInsert,
TKmsKeysUpdate,
@ -182,33 +155,18 @@ import {
TOrgBots,
TOrgBotsInsert,
TOrgBotsUpdate,
TOrgGatewayConfig,
TOrgGatewayConfigInsert,
TOrgGatewayConfigUpdate,
TOrgMemberships,
TOrgMembershipsInsert,
TOrgMembershipsUpdate,
TOrgRoles,
TOrgRolesInsert,
TOrgRolesUpdate,
TPkiAlerts,
TPkiAlertsInsert,
TPkiAlertsUpdate,
TPkiCollectionItems,
TPkiCollectionItemsInsert,
TPkiCollectionItemsUpdate,
TPkiCollections,
TPkiCollectionsInsert,
TPkiCollectionsUpdate,
TProjectBots,
TProjectBotsInsert,
TProjectBotsUpdate,
TProjectEnvironments,
TProjectEnvironmentsInsert,
TProjectEnvironmentsUpdate,
TProjectGateways,
TProjectGatewaysInsert,
TProjectGatewaysUpdate,
TProjectKeys,
TProjectKeysInsert,
TProjectKeysUpdate,
@ -220,16 +178,7 @@ import {
TProjectRolesUpdate,
TProjects,
TProjectsInsert,
TProjectSlackConfigs,
TProjectSlackConfigsInsert,
TProjectSlackConfigsUpdate,
TProjectSplitBackfillIds,
TProjectSplitBackfillIdsInsert,
TProjectSplitBackfillIdsUpdate,
TProjectsUpdate,
TProjectTemplates,
TProjectTemplatesInsert,
TProjectTemplatesUpdate,
TProjectUserAdditionalPrivilege,
TProjectUserAdditionalPrivilegeInsert,
TProjectUserAdditionalPrivilegeUpdate,
@ -239,9 +188,6 @@ import {
TRateLimit,
TRateLimitInsert,
TRateLimitUpdate,
TResourceMetadata,
TResourceMetadataInsert,
TResourceMetadataUpdate,
TSamlConfigs,
TSamlConfigsInsert,
TSamlConfigsUpdate,
@ -338,30 +284,9 @@ import {
TServiceTokens,
TServiceTokensInsert,
TServiceTokensUpdate,
TSlackIntegrations,
TSlackIntegrationsInsert,
TSlackIntegrationsUpdate,
TSshCertificateAuthorities,
TSshCertificateAuthoritiesInsert,
TSshCertificateAuthoritiesUpdate,
TSshCertificateAuthoritySecrets,
TSshCertificateAuthoritySecretsInsert,
TSshCertificateAuthoritySecretsUpdate,
TSshCertificateBodies,
TSshCertificateBodiesInsert,
TSshCertificateBodiesUpdate,
TSshCertificates,
TSshCertificatesInsert,
TSshCertificatesUpdate,
TSshCertificateTemplates,
TSshCertificateTemplatesInsert,
TSshCertificateTemplatesUpdate,
TSuperAdmin,
TSuperAdminInsert,
TSuperAdminUpdate,
TTotpConfigs,
TTotpConfigsInsert,
TTotpConfigsUpdate,
TTrustedIps,
TTrustedIpsInsert,
TTrustedIpsUpdate,
@ -382,18 +307,8 @@ import {
TUsersUpdate,
TWebhooks,
TWebhooksInsert,
TWebhooksUpdate,
TWorkflowIntegrations,
TWorkflowIntegrationsInsert,
TWorkflowIntegrationsUpdate
TWebhooksUpdate
} from "@app/db/schemas";
import { TAppConnections, TAppConnectionsInsert, TAppConnectionsUpdate } from "@app/db/schemas/app-connections";
import {
TExternalGroupOrgRoleMappings,
TExternalGroupOrgRoleMappingsInsert,
TExternalGroupOrgRoleMappingsUpdate
} from "@app/db/schemas/external-group-org-role-mappings";
import { TSecretSyncs, TSecretSyncsInsert, TSecretSyncsUpdate } from "@app/db/schemas/secret-syncs";
import {
TSecretV2TagJunction,
TSecretV2TagJunctionInsert,
@ -419,31 +334,6 @@ declare module "knex/types/tables" {
interface Tables {
[TableName.Users]: KnexOriginal.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
[TableName.Groups]: KnexOriginal.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
[TableName.SshCertificateAuthority]: KnexOriginal.CompositeTableType<
TSshCertificateAuthorities,
TSshCertificateAuthoritiesInsert,
TSshCertificateAuthoritiesUpdate
>;
[TableName.SshCertificateAuthoritySecret]: KnexOriginal.CompositeTableType<
TSshCertificateAuthoritySecrets,
TSshCertificateAuthoritySecretsInsert,
TSshCertificateAuthoritySecretsUpdate
>;
[TableName.SshCertificateTemplate]: KnexOriginal.CompositeTableType<
TSshCertificateTemplates,
TSshCertificateTemplatesInsert,
TSshCertificateTemplatesUpdate
>;
[TableName.SshCertificate]: KnexOriginal.CompositeTableType<
TSshCertificates,
TSshCertificatesInsert,
TSshCertificatesUpdate
>;
[TableName.SshCertificateBody]: KnexOriginal.CompositeTableType<
TSshCertificateBodies,
TSshCertificateBodiesInsert,
TSshCertificateBodiesUpdate
>;
[TableName.CertificateAuthority]: KnexOriginal.CompositeTableType<
TCertificateAuthorities,
TCertificateAuthoritiesInsert,
@ -465,16 +355,6 @@ declare module "knex/types/tables" {
TCertificateAuthorityCrlUpdate
>;
[TableName.Certificate]: KnexOriginal.CompositeTableType<TCertificates, TCertificatesInsert, TCertificatesUpdate>;
[TableName.CertificateTemplate]: KnexOriginal.CompositeTableType<
TCertificateTemplates,
TCertificateTemplatesInsert,
TCertificateTemplatesUpdate
>;
[TableName.CertificateTemplateEstConfig]: KnexOriginal.CompositeTableType<
TCertificateTemplateEstConfigs,
TCertificateTemplateEstConfigsInsert,
TCertificateTemplateEstConfigsUpdate
>;
[TableName.CertificateBody]: KnexOriginal.CompositeTableType<
TCertificateBodies,
TCertificateBodiesInsert,
@ -485,17 +365,6 @@ declare module "knex/types/tables" {
TCertificateSecretsInsert,
TCertificateSecretsUpdate
>;
[TableName.PkiAlert]: KnexOriginal.CompositeTableType<TPkiAlerts, TPkiAlertsInsert, TPkiAlertsUpdate>;
[TableName.PkiCollection]: KnexOriginal.CompositeTableType<
TPkiCollections,
TPkiCollectionsInsert,
TPkiCollectionsUpdate
>;
[TableName.PkiCollectionItem]: KnexOriginal.CompositeTableType<
TPkiCollectionItems,
TPkiCollectionItemsInsert,
TPkiCollectionItemsUpdate
>;
[TableName.UserGroupMembership]: KnexOriginal.CompositeTableType<
TUserGroupMembership,
TUserGroupMembershipInsert,
@ -632,11 +501,6 @@ declare module "knex/types/tables" {
TIdentityUniversalAuthsInsert,
TIdentityUniversalAuthsUpdate
>;
[TableName.IdentityMetadata]: KnexOriginal.CompositeTableType<
TIdentityMetadata,
TIdentityMetadataInsert,
TIdentityMetadataUpdate
>;
[TableName.IdentityKubernetesAuth]: KnexOriginal.CompositeTableType<
TIdentityKubernetesAuths,
TIdentityKubernetesAuthsInsert,
@ -662,11 +526,6 @@ declare module "knex/types/tables" {
TIdentityOidcAuthsInsert,
TIdentityOidcAuthsUpdate
>;
[TableName.IdentityJwtAuth]: KnexOriginal.CompositeTableType<
TIdentityJwtAuths,
TIdentityJwtAuthsInsert,
TIdentityJwtAuthsUpdate
>;
[TableName.IdentityUaClientSecret]: KnexOriginal.CompositeTableType<
TIdentityUaClientSecrets,
TIdentityUaClientSecretsInsert,
@ -881,74 +740,5 @@ declare module "knex/types/tables" {
TKmsKeyVersionsInsert,
TKmsKeyVersionsUpdate
>;
[TableName.SlackIntegrations]: KnexOriginal.CompositeTableType<
TSlackIntegrations,
TSlackIntegrationsInsert,
TSlackIntegrationsUpdate
>;
[TableName.ProjectSlackConfigs]: KnexOriginal.CompositeTableType<
TProjectSlackConfigs,
TProjectSlackConfigsInsert,
TProjectSlackConfigsUpdate
>;
[TableName.WorkflowIntegrations]: KnexOriginal.CompositeTableType<
TWorkflowIntegrations,
TWorkflowIntegrationsInsert,
TWorkflowIntegrationsUpdate
>;
[TableName.ExternalGroupOrgRoleMapping]: KnexOriginal.CompositeTableType<
TExternalGroupOrgRoleMappings,
TExternalGroupOrgRoleMappingsInsert,
TExternalGroupOrgRoleMappingsUpdate
>;
[TableName.ProjectTemplates]: KnexOriginal.CompositeTableType<
TProjectTemplates,
TProjectTemplatesInsert,
TProjectTemplatesUpdate
>;
[TableName.TotpConfig]: KnexOriginal.CompositeTableType<TTotpConfigs, TTotpConfigsInsert, TTotpConfigsUpdate>;
[TableName.ProjectSplitBackfillIds]: KnexOriginal.CompositeTableType<
TProjectSplitBackfillIds,
TProjectSplitBackfillIdsInsert,
TProjectSplitBackfillIdsUpdate
>;
[TableName.ResourceMetadata]: KnexOriginal.CompositeTableType<
TResourceMetadata,
TResourceMetadataInsert,
TResourceMetadataUpdate
>;
[TableName.AppConnection]: KnexOriginal.CompositeTableType<
TAppConnections,
TAppConnectionsInsert,
TAppConnectionsUpdate
>;
[TableName.SecretSync]: KnexOriginal.CompositeTableType<TSecretSyncs, TSecretSyncsInsert, TSecretSyncsUpdate>;
[TableName.KmipClient]: KnexOriginal.CompositeTableType<TKmipClients, TKmipClientsInsert, TKmipClientsUpdate>;
[TableName.KmipOrgConfig]: KnexOriginal.CompositeTableType<
TKmipOrgConfigs,
TKmipOrgConfigsInsert,
TKmipOrgConfigsUpdate
>;
[TableName.KmipOrgServerCertificates]: KnexOriginal.CompositeTableType<
TKmipOrgServerCertificates,
TKmipOrgServerCertificatesInsert,
TKmipOrgServerCertificatesUpdate
>;
[TableName.KmipClientCertificates]: KnexOriginal.CompositeTableType<
TKmipClientCertificates,
TKmipClientCertificatesInsert,
TKmipClientCertificatesUpdate
>;
[TableName.Gateway]: KnexOriginal.CompositeTableType<TGateways, TGatewaysInsert, TGatewaysUpdate>;
[TableName.ProjectGateway]: KnexOriginal.CompositeTableType<
TProjectGateways,
TProjectGatewaysInsert,
TProjectGatewaysUpdate
>;
[TableName.OrgGatewayConfig]: KnexOriginal.CompositeTableType<
TOrgGatewayConfig,
TOrgGatewayConfigInsert,
TOrgGatewayConfigUpdate
>;
}
}

@ -1,4 +0,0 @@
declare module "ldif" {
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Untyped, the function returns `any`.
function parse(input: string, ...args: any[]): any;
}

@ -1,105 +0,0 @@
import path from "node:path";
import dotenv from "dotenv";
import { Knex } from "knex";
import { Logger } from "pino";
import { PgSqlLock } from "./keystore/keystore";
dotenv.config();
type TArgs = {
auditLogDb?: Knex;
applicationDb: Knex;
logger: Logger;
};
const isProduction = process.env.NODE_ENV === "production";
const migrationConfig = {
directory: path.join(__dirname, "./db/migrations"),
loadExtensions: [".mjs", ".ts"],
tableName: "infisical_migrations"
};
const migrationStatusCheckErrorHandler = (err: Error) => {
// happens for first time in which the migration table itself is not created yet
// error: select * from "infisical_migrations" - relation "infisical_migrations" does not exist
if (err?.message?.includes("does not exist")) {
return true;
}
throw err;
};
export const runMigrations = async ({ applicationDb, auditLogDb, logger }: TArgs) => {
try {
// akhilmhdh(Feb 10 2025): 2 years from now remove this
if (isProduction) {
const migrationTable = migrationConfig.tableName;
const hasMigrationTable = await applicationDb.schema.hasTable(migrationTable);
if (hasMigrationTable) {
const firstFile = (await applicationDb(migrationTable).where({}).first()) as { name: string };
if (firstFile?.name?.includes(".ts")) {
await applicationDb(migrationTable).update({
name: applicationDb.raw("REPLACE(name, '.ts', '.mjs')")
});
}
}
if (auditLogDb) {
const hasMigrationTableInAuditLog = await auditLogDb.schema.hasTable(migrationTable);
if (hasMigrationTableInAuditLog) {
const firstFile = (await auditLogDb(migrationTable).where({}).first()) as { name: string };
if (firstFile?.name?.includes(".ts")) {
await auditLogDb(migrationTable).update({
name: auditLogDb.raw("REPLACE(name, '.ts', '.mjs')")
});
}
}
}
}
const shouldRunMigration = Boolean(
await applicationDb.migrate.status(migrationConfig).catch(migrationStatusCheckErrorHandler)
); // db.length - code.length
if (!shouldRunMigration) {
logger.info("No migrations pending: Skipping migration process.");
return;
}
if (auditLogDb) {
await auditLogDb.transaction(async (tx) => {
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
logger.info("Running audit log migrations.");
const didPreviousInstanceRunMigration = !(await auditLogDb.migrate
.status(migrationConfig)
.catch(migrationStatusCheckErrorHandler));
if (didPreviousInstanceRunMigration) {
logger.info("No audit log migrations pending: Applied by previous instance. Skipping migration process.");
return;
}
await auditLogDb.migrate.latest(migrationConfig);
logger.info("Finished audit log migrations.");
});
}
await applicationDb.transaction(async (tx) => {
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
logger.info("Running application migrations.");
const didPreviousInstanceRunMigration = !(await applicationDb.migrate
.status(migrationConfig)
.catch(migrationStatusCheckErrorHandler));
if (didPreviousInstanceRunMigration) {
logger.info("No application migrations pending: Applied by previous instance. Skipping migration process.");
return;
}
await applicationDb.migrate.latest(migrationConfig);
logger.info("Finished application migrations.");
});
} catch (err) {
logger.error(err, "Boot up migration failed");
process.exit(1);
}
};

@ -1,75 +0,0 @@
// eslint-disable-next-line
import "ts-node/register";
import dotenv from "dotenv";
import type { Knex } from "knex";
import path from "path";
// Update with your config settings. .
dotenv.config({
path: path.join(__dirname, "../../../.env.migration")
});
dotenv.config({
path: path.join(__dirname, "../../../.env")
});
if (!process.env.AUDIT_LOGS_DB_CONNECTION_URI && !process.env.AUDIT_LOGS_DB_HOST) {
console.info("Dedicated audit log database not found. No further migrations necessary");
process.exit(0);
}
console.info("Executing migration on audit log database...");
export default {
development: {
client: "postgres",
connection: {
connectionString: process.env.AUDIT_LOGS_DB_CONNECTION_URI,
host: process.env.AUDIT_LOGS_DB_HOST,
port: process.env.AUDIT_LOGS_DB_PORT,
user: process.env.AUDIT_LOGS_DB_USER,
database: process.env.AUDIT_LOGS_DB_NAME,
password: process.env.AUDIT_LOGS_DB_PASSWORD,
ssl: process.env.AUDIT_LOGS_DB_ROOT_CERT
? {
rejectUnauthorized: true,
ca: Buffer.from(process.env.AUDIT_LOGS_DB_ROOT_CERT, "base64").toString("ascii")
}
: false
},
pool: {
min: 2,
max: 10
},
seeds: {
directory: "./seeds"
},
migrations: {
tableName: "infisical_migrations"
}
},
production: {
client: "postgres",
connection: {
connectionString: process.env.AUDIT_LOGS_DB_CONNECTION_URI,
host: process.env.AUDIT_LOGS_DB_HOST,
port: process.env.AUDIT_LOGS_DB_PORT,
user: process.env.AUDIT_LOGS_DB_USER,
database: process.env.AUDIT_LOGS_DB_NAME,
password: process.env.AUDIT_LOGS_DB_PASSWORD,
ssl: process.env.AUDIT_LOGS_DB_ROOT_CERT
? {
rejectUnauthorized: true,
ca: Buffer.from(process.env.AUDIT_LOGS_DB_ROOT_CERT, "base64").toString("ascii")
}
: false
},
pool: {
min: 2,
max: 10
},
migrations: {
tableName: "infisical_migrations"
}
}
} as Knex.Config;

@ -1,2 +1,2 @@
export type { TDbClient } from "./instance";
export { initAuditLogDbConnection, initDbConnection } from "./instance";
export { initDbConnection } from "./instance";

@ -49,9 +49,6 @@ export const initDbConnection = ({
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
}
: false
},
migrations: {
tableName: "infisical_migrations"
}
});
@ -67,57 +64,9 @@ export const initDbConnection = ({
ca: Buffer.from(replicaDbCertificate, "base64").toString("ascii")
}
: false
},
migrations: {
tableName: "infisical_migrations"
}
});
});
return db;
};
export const initAuditLogDbConnection = ({
dbConnectionUri,
dbRootCert
}: {
dbConnectionUri: string;
dbRootCert?: string;
}) => {
// akhilmhdh: the default Knex is knex.Knex<any, any[]>. but when assigned with knex({<config>}) the value is knex.Knex<any, unknown[]>
// this was causing issue with files like `snapshot-dal` `findRecursivelySnapshots` this i am explicitly putting the any and unknown[]
// eslint-disable-next-line
const db: Knex<any, unknown[]> = knex({
client: "pg",
connection: {
connectionString: dbConnectionUri,
host: process.env.AUDIT_LOGS_DB_HOST,
// @ts-expect-error I have no clue why only for the port there is a type error
// eslint-disable-next-line
port: process.env.AUDIT_LOGS_DB_PORT,
user: process.env.AUDIT_LOGS_DB_USER,
database: process.env.AUDIT_LOGS_DB_NAME,
password: process.env.AUDIT_LOGS_DB_PASSWORD,
ssl: dbRootCert
? {
rejectUnauthorized: true,
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
}
: false
},
migrations: {
tableName: "infisical_migrations"
}
});
// we add these overrides so that auditLogDb and the primary DB are interchangeable
db.primaryNode = () => {
return db;
};
db.replicaNode = () => {
return db;
};
return db;
};

@ -38,8 +38,7 @@ export default {
directory: "./seeds"
},
migrations: {
tableName: "infisical_migrations",
loadExtensions: [".mjs", ".ts"]
tableName: "infisical_migrations"
}
},
production: {
@ -63,8 +62,7 @@ export default {
max: 10
},
migrations: {
tableName: "infisical_migrations",
loadExtensions: [".mjs", ".ts"]
tableName: "infisical_migrations"
}
}
} as Knex.Config;

@ -1,161 +0,0 @@
import kx, { Knex } from "knex";
import { TableName } from "../schemas";
const INTERMEDIATE_AUDIT_LOG_TABLE = "intermediate_audit_logs";
const formatPartitionDate = (date: Date) => {
const year = date.getFullYear();
const month = String(date.getMonth() + 1).padStart(2, "0");
const day = String(date.getDate()).padStart(2, "0");
return `${year}-${month}-${day}`;
};
const createAuditLogPartition = async (knex: Knex, startDate: Date, endDate: Date) => {
const startDateStr = formatPartitionDate(startDate);
const endDateStr = formatPartitionDate(endDate);
const partitionName = `${TableName.AuditLog}_${startDateStr.replace(/-/g, "")}_${endDateStr.replace(/-/g, "")}`;
await knex.schema.raw(
`CREATE TABLE ${partitionName} PARTITION OF ${TableName.AuditLog} FOR VALUES FROM ('${startDateStr}') TO ('${endDateStr}')`
);
};
const up = async (knex: Knex): Promise<void> => {
console.info("Dropping primary key of audit log table...");
await knex.schema.alterTable(TableName.AuditLog, (t) => {
// remove existing keys
t.dropPrimary();
});
// Get all indices of the audit log table and drop them
const indexNames: { rows: { indexname: string }[] } = await knex.raw(
`
SELECT indexname
FROM pg_indexes
WHERE tablename = '${TableName.AuditLog}'
`
);
console.log(
"Deleting existing audit log indices:",
indexNames.rows.map((e) => e.indexname)
);
for await (const row of indexNames.rows) {
await knex.raw(`DROP INDEX IF EXISTS ${row.indexname}`);
}
// renaming audit log to intermediate table
console.log("Renaming audit log table to the intermediate name");
await knex.schema.renameTable(TableName.AuditLog, INTERMEDIATE_AUDIT_LOG_TABLE);
if (!(await knex.schema.hasTable(TableName.AuditLog))) {
const createTableSql = knex.schema
.createTable(TableName.AuditLog, (t) => {
t.uuid("id").defaultTo(knex.fn.uuid());
t.string("actor").notNullable();
t.jsonb("actorMetadata").notNullable();
t.string("ipAddress");
t.string("eventType").notNullable();
t.jsonb("eventMetadata");
t.string("userAgent");
t.string("userAgentType");
t.datetime("expiresAt");
t.timestamps(true, true, true);
t.uuid("orgId");
t.string("projectId");
t.string("projectName");
t.primary(["id", "createdAt"]);
})
.toString();
console.info("Creating partition table...");
await knex.schema.raw(`
${createTableSql} PARTITION BY RANGE ("createdAt");
`);
console.log("Adding indices...");
await knex.schema.alterTable(TableName.AuditLog, (t) => {
t.index(["projectId", "createdAt"]);
t.index(["orgId", "createdAt"]);
t.index("expiresAt");
t.index("orgId");
t.index("projectId");
});
console.log("Adding GIN indices...");
await knex.raw(
`CREATE INDEX IF NOT EXISTS "audit_logs_actorMetadata_idx" ON ${TableName.AuditLog} USING gin("actorMetadata" jsonb_path_ops)`
);
console.log("GIN index for actorMetadata done");
await knex.raw(
`CREATE INDEX IF NOT EXISTS "audit_logs_eventMetadata_idx" ON ${TableName.AuditLog} USING gin("eventMetadata" jsonb_path_ops)`
);
console.log("GIN index for eventMetadata done");
// create default partition
console.log("Creating default partition...");
await knex.schema.raw(`CREATE TABLE ${TableName.AuditLog}_default PARTITION OF ${TableName.AuditLog} DEFAULT`);
const nextDate = new Date();
nextDate.setDate(nextDate.getDate() + 1);
const nextDateStr = formatPartitionDate(nextDate);
console.log("Attaching existing audit log table as a partition...");
await knex.schema.raw(`
ALTER TABLE ${INTERMEDIATE_AUDIT_LOG_TABLE} ADD CONSTRAINT audit_log_old
CHECK ( "createdAt" < DATE '${nextDateStr}' );
ALTER TABLE ${TableName.AuditLog} ATTACH PARTITION ${INTERMEDIATE_AUDIT_LOG_TABLE}
FOR VALUES FROM (MINVALUE) TO ('${nextDateStr}' );
`);
// create partition from now until end of month
console.log("Creating audit log partitions ahead of time... next date:", nextDateStr);
await createAuditLogPartition(knex, nextDate, new Date(nextDate.getFullYear(), nextDate.getMonth() + 1));
// create partitions 4 years ahead
const partitionMonths = 4 * 12;
const partitionPromises: Promise<void>[] = [];
for (let x = 1; x <= partitionMonths; x += 1) {
partitionPromises.push(
createAuditLogPartition(
knex,
new Date(nextDate.getFullYear(), nextDate.getMonth() + x, 1),
new Date(nextDate.getFullYear(), nextDate.getMonth() + (x + 1), 1)
)
);
}
await Promise.all(partitionPromises);
console.log("Partition migration complete");
}
};
export const executeMigration = async (url: string) => {
console.log("Executing migration...");
const knex = kx({
client: "pg",
connection: url
});
await knex.transaction(async (tx) => {
await up(tx);
});
};
const dbUrl = process.env.AUDIT_LOGS_DB_CONNECTION_URI;
if (!dbUrl) {
console.error("Please provide a DB connection URL to the AUDIT_LOGS_DB_CONNECTION_URI env");
process.exit(1);
}
void executeMigration(dbUrl).then(() => {
console.log("Migration: partition-audit-logs DONE");
process.exit(0);
});

@ -9,7 +9,7 @@ export async function up(knex: Knex): Promise<void> {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("integration").notNullable();
t.string("teamId"); // vercel-specific
t.string("url"); // for self-hosted
t.string("url"); // for self hosted
t.string("namespace"); // hashicorp specific
t.string("accountId"); // netlify
t.text("refreshCiphertext");
@ -36,7 +36,7 @@ export async function up(knex: Knex): Promise<void> {
await knex.schema.createTable(TableName.Integration, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.boolean("isActive").notNullable();
t.string("url"); // self-hosted
t.string("url"); // self hosted
t.string("app"); // name of app in provider
t.string("appId");
t.string("targetEnvironment");

@ -115,14 +115,7 @@ export async function down(knex: Knex): Promise<void> {
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
approverId: knex(TableName.ProjectMembership)
.join(
TableName.SecretApprovalPolicy,
`${TableName.SecretApprovalPolicy}.id`,
`${TableName.SecretApprovalPolicyApprover}.policyId`
)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
.select("id")
.where("userId", knex.raw("??", [`${TableName.SecretApprovalPolicyApprover}.approverUserId`]))
});
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => {
@ -154,27 +147,13 @@ export async function down(knex: Knex): Promise<void> {
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
committerId: knex(TableName.ProjectMembership)
.join(
TableName.SecretApprovalPolicy,
`${TableName.SecretApprovalPolicy}.id`,
`${TableName.SecretApprovalRequest}.policyId`
)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerUserId`]))
.select(knex.ref("id").withSchema(TableName.ProjectMembership)),
.select("id")
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerUserId`])),
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
statusChangeBy: knex(TableName.ProjectMembership)
.join(
TableName.SecretApprovalPolicy,
`${TableName.SecretApprovalPolicy}.id`,
`${TableName.SecretApprovalRequest}.policyId`
)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
.select("id")
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.statusChangedByUserId`]))
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
});
await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => {
@ -198,20 +177,8 @@ export async function down(knex: Knex): Promise<void> {
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
member: knex(TableName.ProjectMembership)
.join(
TableName.SecretApprovalRequest,
`${TableName.SecretApprovalRequest}.id`,
`${TableName.SecretApprovalRequestReviewer}.requestId`
)
.join(
TableName.SecretApprovalPolicy,
`${TableName.SecretApprovalPolicy}.id`,
`${TableName.SecretApprovalRequest}.policyId`
)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
.select("id")
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`]))
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
});
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => {
tb.uuid("member").notNullable().alter();

@ -1,4 +1,3 @@
/* eslint-disable @typescript-eslint/ban-ts-comment */
import { Knex } from "knex";
import { SecretType, TableName } from "../schemas";
@ -23,7 +22,6 @@ export async function up(knex: Knex): Promise<void> {
t.uuid("folderId").notNullable();
t.foreign("folderId").references("id").inTable(TableName.SecretFolder).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index(["folderId", "userId"]);
});
}
await createOnUpdateTrigger(knex, TableName.SecretV2);
@ -107,12 +105,12 @@ export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.SnapshotSecretV2))) {
await knex.schema.createTable(TableName.SnapshotSecretV2, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("envId").index().notNullable();
t.uuid("envId").notNullable();
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
// not a relation kept like that to keep it when rolled back
t.uuid("secretVersionId").index().notNullable();
t.uuid("secretVersionId").notNullable();
t.foreign("secretVersionId").references("id").inTable(TableName.SecretVersionV2).onDelete("CASCADE");
t.uuid("snapshotId").index().notNullable();
t.uuid("snapshotId").notNullable();
t.foreign("snapshotId").references("id").inTable(TableName.Snapshot).onDelete("CASCADE");
t.timestamps(true, true, true);
});

@ -1,23 +0,0 @@
import { Knex } from "knex";
import { EnforcementLevel } from "@app/lib/types";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "enforcementLevel");
if (!hasColumn) {
await knex.schema.table(TableName.SecretApprovalPolicy, (table) => {
table.string("enforcementLevel", 10).notNullable().defaultTo(EnforcementLevel.Hard);
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "enforcementLevel");
if (hasColumn) {
await knex.schema.table(TableName.SecretApprovalPolicy, (table) => {
table.dropColumn("enforcementLevel");
});
}
}

@ -1,23 +0,0 @@
import { Knex } from "knex";
import { EnforcementLevel } from "@app/lib/types";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "enforcementLevel");
if (!hasColumn) {
await knex.schema.table(TableName.AccessApprovalPolicy, (table) => {
table.string("enforcementLevel", 10).notNullable().defaultTo(EnforcementLevel.Hard);
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "enforcementLevel");
if (hasColumn) {
await knex.schema.table(TableName.AccessApprovalPolicy, (table) => {
table.dropColumn("enforcementLevel");
});
}
}

@ -1,23 +0,0 @@
import { Knex } from "knex";
import { SecretSharingAccessType } from "@app/lib/types";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.SecretSharing, "accessType");
if (!hasColumn) {
await knex.schema.table(TableName.SecretSharing, (table) => {
table.string("accessType").notNullable().defaultTo(SecretSharingAccessType.Anyone);
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.SecretSharing, "accessType");
if (hasColumn) {
await knex.schema.table(TableName.SecretSharing, (table) => {
table.dropColumn("accessType");
});
}
}

@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "bypassReason");
if (!hasColumn) {
await knex.schema.table(TableName.SecretApprovalRequest, (table) => {
table.string("bypassReason").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "bypassReason");
if (hasColumn) {
await knex.schema.table(TableName.SecretApprovalRequest, (table) => {
table.dropColumn("bypassReason");
});
}
}

@ -1,294 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
// ---------- ACCESS APPROVAL POLICY APPROVER ------------
const hasApproverUserId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverUserId");
const hasApproverId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverId");
if (!hasApproverUserId) {
// add the new fields
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (tb) => {
// if (hasApproverId) tb.setNullable("approverId");
tb.uuid("approverUserId");
tb.foreign("approverUserId").references("id").inTable(TableName.Users).onDelete("CASCADE");
});
// convert project membership id => user id
await knex(TableName.AccessApprovalPolicyApprover).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
approverUserId: knex(TableName.ProjectMembership)
.select("userId")
.where("id", knex.raw("??", [`${TableName.AccessApprovalPolicyApprover}.approverId`]))
});
// drop the old field
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (tb) => {
if (hasApproverId) tb.dropColumn("approverId");
tb.uuid("approverUserId").notNullable().alter();
});
}
// ---------- ACCESS APPROVAL REQUEST ------------
const hasAccessApprovalRequestTable = await knex.schema.hasTable(TableName.AccessApprovalRequest);
const hasRequestedByUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedByUserId");
const hasRequestedBy = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedBy");
if (hasAccessApprovalRequestTable) {
// new fields
await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => {
if (!hasRequestedByUserId) {
tb.uuid("requestedByUserId");
tb.foreign("requestedByUserId").references("id").inTable(TableName.Users).onDelete("SET NULL");
}
});
// copy the assigned project membership => user id to new fields
await knex(TableName.AccessApprovalRequest).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
requestedByUserId: knex(TableName.ProjectMembership)
.select("userId")
.where("id", knex.raw("??", [`${TableName.AccessApprovalRequest}.requestedBy`]))
});
// drop old fields
await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => {
if (hasRequestedBy) {
// DROP AT A LATER TIME
// tb.dropColumn("requestedBy");
// ADD ALLOW NULLABLE FOR NOW
tb.uuid("requestedBy").nullable().alter();
}
tb.uuid("requestedByUserId").notNullable().alter();
});
}
// ---------- ACCESS APPROVAL REQUEST REVIEWER ------------
const hasMemberId = await knex.schema.hasColumn(TableName.AccessApprovalRequestReviewer, "member");
const hasReviewerUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequestReviewer, "reviewerUserId");
if (!hasReviewerUserId) {
// new fields
await knex.schema.alterTable(TableName.AccessApprovalRequestReviewer, (tb) => {
// if (hasMemberId) tb.setNullable("member");
tb.uuid("reviewerUserId");
tb.foreign("reviewerUserId").references("id").inTable(TableName.Users).onDelete("SET NULL");
});
// copy project membership => user id to new fields
await knex(TableName.AccessApprovalRequestReviewer).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
reviewerUserId: knex(TableName.ProjectMembership)
.select("userId")
.where("id", knex.raw("??", [`${TableName.AccessApprovalRequestReviewer}.member`]))
});
// drop table
await knex.schema.alterTable(TableName.AccessApprovalRequestReviewer, (tb) => {
if (hasMemberId) {
// DROP AT A LATER TIME
// tb.dropColumn("member");
// ADD ALLOW NULLABLE FOR NOW
tb.uuid("member").nullable().alter();
}
tb.uuid("reviewerUserId").notNullable().alter();
});
}
// ---------- PROJECT USER ADDITIONAL PRIVILEGE ------------
const projectUserAdditionalPrivilegeHasProjectMembershipId = await knex.schema.hasColumn(
TableName.ProjectUserAdditionalPrivilege,
"projectMembershipId"
);
const projectUserAdditionalPrivilegeHasUserId = await knex.schema.hasColumn(
TableName.ProjectUserAdditionalPrivilege,
"userId"
);
if (!projectUserAdditionalPrivilegeHasUserId) {
await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => {
tb.uuid("userId");
tb.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
tb.string("projectId");
tb.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
});
await knex(TableName.ProjectUserAdditionalPrivilege)
.update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
userId: knex(TableName.ProjectMembership)
.select("userId")
.where("id", knex.raw("??", [`${TableName.ProjectUserAdditionalPrivilege}.projectMembershipId`])),
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
projectId: knex(TableName.ProjectMembership)
.select("projectId")
.where("id", knex.raw("??", [`${TableName.ProjectUserAdditionalPrivilege}.projectMembershipId`]))
})
.whereNotNull("projectMembershipId");
await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => {
tb.uuid("userId").notNullable().alter();
tb.string("projectId").notNullable().alter();
});
}
if (projectUserAdditionalPrivilegeHasProjectMembershipId) {
await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => {
// DROP AT A LATER TIME
// tb.dropColumn("projectMembershipId");
// ADD ALLOW NULLABLE FOR NOW
tb.uuid("projectMembershipId").nullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
// We remove project user additional privileges first, because it may delete records in the database where the project membership is not found.
// The project membership won't be found on records created by group members. In those cades we just delete the record and continue.
// When the additionl privilege record is deleted, it will cascade delete the access request created by the group member.
// ---------- PROJECT USER ADDITIONAL PRIVILEGE ------------
const hasUserId = await knex.schema.hasColumn(TableName.ProjectUserAdditionalPrivilege, "userId");
const hasProjectMembershipId = await knex.schema.hasColumn(
TableName.ProjectUserAdditionalPrivilege,
"projectMembershipId"
);
// If it doesn't have the userId field, then the up migration has not run
if (!hasUserId) {
return;
}
await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => {
if (!hasProjectMembershipId) {
tb.uuid("projectMembershipId");
tb.foreign("projectMembershipId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
}
});
if (!hasProjectMembershipId) {
// First, update records where a matching project membership exists
await knex(TableName.ProjectUserAdditionalPrivilege).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
projectMembershipId: knex(TableName.ProjectMembership)
.select("id")
.where("userId", knex.raw("??", [`${TableName.ProjectUserAdditionalPrivilege}.userId`]))
});
await knex(TableName.AccessApprovalRequest).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
projectMembershipId: knex(TableName.ProjectMembership)
.select("id")
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.userId`]))
});
await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => {
tb.dropColumn("userId");
tb.dropColumn("projectId");
tb.uuid("projectMembershipId").notNullable().alter();
});
}
// Then, delete records where no matching project membership was found
await knex(TableName.ProjectUserAdditionalPrivilege).whereNull("projectMembershipId").delete();
await knex(TableName.AccessApprovalRequest).whereNull("requestedBy").delete();
// ---------- ACCESS APPROVAL POLICY APPROVER ------------
const hasApproverUserId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverUserId");
const hasApproverId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverId");
if (hasApproverUserId) {
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (tb) => {
if (!hasApproverId) {
tb.uuid("approverId");
tb.foreign("approverId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
}
});
if (!hasApproverId) {
await knex(TableName.AccessApprovalPolicyApprover).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
approverId: knex(TableName.ProjectMembership)
.select("id")
.where("userId", knex.raw("??", [`${TableName.AccessApprovalPolicyApprover}.approverUserId`]))
});
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (tb) => {
tb.dropColumn("approverUserId");
tb.uuid("approverId").notNullable().alter();
});
}
// ---------- ACCESS APPROVAL REQUEST ------------
const hasAccessApprovalRequestTable = await knex.schema.hasTable(TableName.AccessApprovalRequest);
const hasRequestedByUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedByUserId");
const hasRequestedBy = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedBy");
if (hasAccessApprovalRequestTable) {
await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => {
if (!hasRequestedBy) {
tb.uuid("requestedBy");
tb.foreign("requestedBy").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
}
});
// Try to find a project membership based on the AccessApprovalRequest.requestedByUserId and AccessApprovalRequest.policyId(reference to AccessApprovalRequestPolicy).envId(reference to Environment).projectId(reference to Project)
// If a project membership is found, set the AccessApprovalRequest.requestedBy to the project membership id
// If a project membership is not found, remove the AccessApprovalRequest record
await knex(TableName.AccessApprovalRequest).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
requestedBy: knex(TableName.ProjectMembership)
.select("id")
.where("userId", knex.raw("??", [`${TableName.AccessApprovalRequest}.requestedByUserId`]))
});
// Then, delete records where no matching project membership was found
await knex(TableName.AccessApprovalRequest).whereNull("requestedBy").delete();
await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => {
if (hasRequestedByUserId) {
tb.dropColumn("requestedByUserId");
}
if (hasRequestedBy) tb.uuid("requestedBy").notNullable().alter();
});
}
// ---------- ACCESS APPROVAL REQUEST REVIEWER ------------
const hasMemberId = await knex.schema.hasColumn(TableName.AccessApprovalRequestReviewer, "member");
const hasReviewerUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequestReviewer, "reviewerUserId");
if (hasReviewerUserId) {
if (!hasMemberId) {
await knex.schema.alterTable(TableName.AccessApprovalRequestReviewer, (tb) => {
tb.uuid("member");
tb.foreign("member").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
});
}
await knex(TableName.AccessApprovalRequestReviewer).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
member: knex(TableName.ProjectMembership)
.select("id")
.where("userId", knex.raw("??", [`${TableName.AccessApprovalRequestReviewer}.reviewerUserId`]))
});
await knex.schema.alterTable(TableName.AccessApprovalRequestReviewer, (tb) => {
tb.dropColumn("reviewerUserId");
tb.uuid("member").notNullable().alter();
});
}
}
}

@ -1,39 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretSharing)) {
const doesNameExist = await knex.schema.hasColumn(TableName.SecretSharing, "name");
if (!doesNameExist) {
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
t.string("name").nullable();
});
}
const doesLastViewedAtExist = await knex.schema.hasColumn(TableName.SecretSharing, "lastViewedAt");
if (!doesLastViewedAtExist) {
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
t.timestamp("lastViewedAt").nullable();
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretSharing)) {
const doesNameExist = await knex.schema.hasColumn(TableName.SecretSharing, "name");
if (doesNameExist) {
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
t.dropColumn("name");
});
}
const doesLastViewedAtExist = await knex.schema.hasColumn(TableName.SecretSharing, "lastViewedAt");
if (doesLastViewedAtExist) {
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
t.dropColumn("lastViewedAt");
});
}
}
}

@ -1,119 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
const hasActiveCaCertIdColumn = await knex.schema.hasColumn(TableName.CertificateAuthority, "activeCaCertId");
if (!hasActiveCaCertIdColumn) {
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
t.uuid("activeCaCertId").nullable();
t.foreign("activeCaCertId").references("id").inTable(TableName.CertificateAuthorityCert);
});
await knex.raw(`
UPDATE "${TableName.CertificateAuthority}" ca
SET "activeCaCertId" = cac.id
FROM "${TableName.CertificateAuthorityCert}" cac
WHERE ca.id = cac."caId"
`);
}
}
if (await knex.schema.hasTable(TableName.CertificateAuthorityCert)) {
const hasVersionColumn = await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "version");
if (!hasVersionColumn) {
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
t.integer("version").nullable();
t.dropUnique(["caId"]);
});
await knex(TableName.CertificateAuthorityCert).update({ version: 1 }).whereNull("version");
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
t.integer("version").notNullable().alter();
});
}
const hasCaSecretIdColumn = await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "caSecretId");
if (!hasCaSecretIdColumn) {
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
t.uuid("caSecretId").nullable();
t.foreign("caSecretId").references("id").inTable(TableName.CertificateAuthoritySecret).onDelete("CASCADE");
});
await knex.raw(`
UPDATE "${TableName.CertificateAuthorityCert}" cert
SET "caSecretId" = (
SELECT sec.id
FROM "${TableName.CertificateAuthoritySecret}" sec
WHERE sec."caId" = cert."caId"
)
`);
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
t.uuid("caSecretId").notNullable().alter();
});
}
}
if (await knex.schema.hasTable(TableName.CertificateAuthoritySecret)) {
await knex.schema.alterTable(TableName.CertificateAuthoritySecret, (t) => {
t.dropUnique(["caId"]);
});
}
if (await knex.schema.hasTable(TableName.Certificate)) {
const hasCaCertIdColumn = await knex.schema.hasColumn(TableName.Certificate, "caCertId");
if (!hasCaCertIdColumn) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.uuid("caCertId").nullable();
t.foreign("caCertId").references("id").inTable(TableName.CertificateAuthorityCert);
});
await knex.raw(`
UPDATE "${TableName.Certificate}" cert
SET "caCertId" = (
SELECT caCert.id
FROM "${TableName.CertificateAuthorityCert}" caCert
WHERE caCert."caId" = cert."caId"
)`);
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.uuid("caCertId").notNullable().alter();
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
if (await knex.schema.hasColumn(TableName.CertificateAuthority, "activeCaCertId")) {
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
t.dropColumn("activeCaCertId");
});
}
}
if (await knex.schema.hasTable(TableName.CertificateAuthorityCert)) {
if (await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "version")) {
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
t.dropColumn("version");
});
}
if (await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "caSecretId")) {
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
t.dropColumn("caSecretId");
});
}
}
if (await knex.schema.hasTable(TableName.Certificate)) {
if (await knex.schema.hasColumn(TableName.Certificate, "caCertId")) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.dropColumn("caCertId");
});
}
}
}

@ -1,25 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretSharing)) {
const doesPasswordExist = await knex.schema.hasColumn(TableName.SecretSharing, "password");
if (!doesPasswordExist) {
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
t.string("password").nullable();
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretSharing)) {
const doesPasswordExist = await knex.schema.hasColumn(TableName.SecretSharing, "password");
if (doesPasswordExist) {
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
t.dropColumn("password");
});
}
}
}

@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasCreationLimitCol = await knex.schema.hasColumn(TableName.RateLimit, "creationLimit");
await knex.schema.alterTable(TableName.RateLimit, (t) => {
if (hasCreationLimitCol) {
t.dropColumn("creationLimit");
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasCreationLimitCol = await knex.schema.hasColumn(TableName.RateLimit, "creationLimit");
await knex.schema.alterTable(TableName.RateLimit, (t) => {
if (!hasCreationLimitCol) {
t.integer("creationLimit").defaultTo(30).notNullable();
}
});
}

@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasNameField = await knex.schema.hasColumn(TableName.SecretTag, "name");
if (hasNameField) {
await knex.schema.alterTable(TableName.SecretTag, (t) => {
t.dropColumn("name");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasNameField = await knex.schema.hasColumn(TableName.SecretTag, "name");
if (!hasNameField) {
await knex.schema.alterTable(TableName.SecretTag, (t) => {
t.string("name");
});
}
}

@ -1,62 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.PkiCollection))) {
await knex.schema.createTable(TableName.PkiCollection, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.string("name").notNullable();
t.string("description").notNullable();
});
}
await createOnUpdateTrigger(knex, TableName.PkiCollection);
if (!(await knex.schema.hasTable(TableName.PkiCollectionItem))) {
await knex.schema.createTable(TableName.PkiCollectionItem, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("pkiCollectionId").notNullable();
t.foreign("pkiCollectionId").references("id").inTable(TableName.PkiCollection).onDelete("CASCADE");
t.uuid("caId").nullable();
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
t.uuid("certId").nullable();
t.foreign("certId").references("id").inTable(TableName.Certificate).onDelete("CASCADE");
});
}
await createOnUpdateTrigger(knex, TableName.PkiCollectionItem);
if (!(await knex.schema.hasTable(TableName.PkiAlert))) {
await knex.schema.createTable(TableName.PkiAlert, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("pkiCollectionId").notNullable();
t.foreign("pkiCollectionId").references("id").inTable(TableName.PkiCollection).onDelete("CASCADE");
t.string("name").notNullable();
t.integer("alertBeforeDays").notNullable();
t.string("recipientEmails").notNullable();
t.unique(["name", "projectId"]);
});
}
await createOnUpdateTrigger(knex, TableName.PkiAlert);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.PkiAlert);
await dropOnUpdateTrigger(knex, TableName.PkiAlert);
await knex.schema.dropTableIfExists(TableName.PkiCollectionItem);
await dropOnUpdateTrigger(knex, TableName.PkiCollectionItem);
await knex.schema.dropTableIfExists(TableName.PkiCollection);
await dropOnUpdateTrigger(knex, TableName.PkiCollection);
}

@ -1,55 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const hasCertificateTemplateTable = await knex.schema.hasTable(TableName.CertificateTemplate);
if (!hasCertificateTemplateTable) {
await knex.schema.createTable(TableName.CertificateTemplate, (tb) => {
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
tb.uuid("caId").notNullable();
tb.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
tb.uuid("pkiCollectionId");
tb.foreign("pkiCollectionId").references("id").inTable(TableName.PkiCollection).onDelete("SET NULL");
tb.string("name").notNullable();
tb.string("commonName").notNullable();
tb.string("subjectAlternativeName").notNullable();
tb.string("ttl").notNullable();
tb.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.CertificateTemplate);
}
const doesCertificateTableHaveTemplateId = await knex.schema.hasColumn(
TableName.Certificate,
"certificateTemplateId"
);
if (!doesCertificateTableHaveTemplateId) {
await knex.schema.alterTable(TableName.Certificate, (tb) => {
tb.uuid("certificateTemplateId");
tb.foreign("certificateTemplateId").references("id").inTable(TableName.CertificateTemplate).onDelete("SET NULL");
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesCertificateTableHaveTemplateId = await knex.schema.hasColumn(
TableName.Certificate,
"certificateTemplateId"
);
if (doesCertificateTableHaveTemplateId) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.dropColumn("certificateTemplateId");
});
}
const hasCertificateTemplateTable = await knex.schema.hasTable(TableName.CertificateTemplate);
if (hasCertificateTemplateTable) {
await knex.schema.dropTable(TableName.CertificateTemplate);
await dropOnUpdateTrigger(knex, TableName.CertificateTemplate);
}
}

@ -1,26 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const hasEstConfigTable = await knex.schema.hasTable(TableName.CertificateTemplateEstConfig);
if (!hasEstConfigTable) {
await knex.schema.createTable(TableName.CertificateTemplateEstConfig, (tb) => {
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
tb.uuid("certificateTemplateId").notNullable().unique();
tb.foreign("certificateTemplateId").references("id").inTable(TableName.CertificateTemplate).onDelete("CASCADE");
tb.binary("encryptedCaChain").notNullable();
tb.string("hashedPassphrase").notNullable();
tb.boolean("isEnabled").notNullable();
tb.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.CertificateTemplateEstConfig);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.CertificateTemplateEstConfig);
await dropOnUpdateTrigger(knex, TableName.CertificateTemplateEstConfig);
}

@ -1,36 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateAuthorityCrl)) {
const hasCaSecretIdColumn = await knex.schema.hasColumn(TableName.CertificateAuthorityCrl, "caSecretId");
if (!hasCaSecretIdColumn) {
await knex.schema.alterTable(TableName.CertificateAuthorityCrl, (t) => {
t.uuid("caSecretId").nullable();
t.foreign("caSecretId").references("id").inTable(TableName.CertificateAuthoritySecret).onDelete("CASCADE");
});
await knex.raw(`
UPDATE "${TableName.CertificateAuthorityCrl}" crl
SET "caSecretId" = (
SELECT sec.id
FROM "${TableName.CertificateAuthoritySecret}" sec
WHERE sec."caId" = crl."caId"
)
`);
await knex.schema.alterTable(TableName.CertificateAuthorityCrl, (t) => {
t.uuid("caSecretId").notNullable().alter();
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateAuthorityCrl)) {
await knex.schema.alterTable(TableName.CertificateAuthorityCrl, (t) => {
t.dropColumn("caSecretId");
});
}
}

@ -1,96 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.WorkflowIntegrations))) {
await knex.schema.createTable(TableName.WorkflowIntegrations, (tb) => {
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
tb.string("integration").notNullable();
tb.string("slug").notNullable();
tb.uuid("orgId").notNullable();
tb.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
tb.string("description");
tb.unique(["orgId", "slug"]);
tb.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.WorkflowIntegrations);
}
if (!(await knex.schema.hasTable(TableName.SlackIntegrations))) {
await knex.schema.createTable(TableName.SlackIntegrations, (tb) => {
tb.uuid("id", { primaryKey: true }).notNullable();
tb.foreign("id").references("id").inTable(TableName.WorkflowIntegrations).onDelete("CASCADE");
tb.string("teamId").notNullable();
tb.string("teamName").notNullable();
tb.string("slackUserId").notNullable();
tb.string("slackAppId").notNullable();
tb.binary("encryptedBotAccessToken").notNullable();
tb.string("slackBotId").notNullable();
tb.string("slackBotUserId").notNullable();
tb.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.SlackIntegrations);
}
if (!(await knex.schema.hasTable(TableName.ProjectSlackConfigs))) {
await knex.schema.createTable(TableName.ProjectSlackConfigs, (tb) => {
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
tb.string("projectId").notNullable().unique();
tb.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
tb.uuid("slackIntegrationId").notNullable();
tb.foreign("slackIntegrationId").references("id").inTable(TableName.SlackIntegrations).onDelete("CASCADE");
tb.boolean("isAccessRequestNotificationEnabled").notNullable().defaultTo(false);
tb.string("accessRequestChannels").notNullable().defaultTo("");
tb.boolean("isSecretRequestNotificationEnabled").notNullable().defaultTo(false);
tb.string("secretRequestChannels").notNullable().defaultTo("");
tb.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.ProjectSlackConfigs);
}
const doesSuperAdminHaveSlackClientId = await knex.schema.hasColumn(TableName.SuperAdmin, "encryptedSlackClientId");
const doesSuperAdminHaveSlackClientSecret = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedSlackClientSecret"
);
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
if (!doesSuperAdminHaveSlackClientId) {
tb.binary("encryptedSlackClientId");
}
if (!doesSuperAdminHaveSlackClientSecret) {
tb.binary("encryptedSlackClientSecret");
}
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.ProjectSlackConfigs);
await dropOnUpdateTrigger(knex, TableName.ProjectSlackConfigs);
await knex.schema.dropTableIfExists(TableName.SlackIntegrations);
await dropOnUpdateTrigger(knex, TableName.SlackIntegrations);
await knex.schema.dropTableIfExists(TableName.WorkflowIntegrations);
await dropOnUpdateTrigger(knex, TableName.WorkflowIntegrations);
const doesSuperAdminHaveSlackClientId = await knex.schema.hasColumn(TableName.SuperAdmin, "encryptedSlackClientId");
const doesSuperAdminHaveSlackClientSecret = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedSlackClientSecret"
);
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
if (doesSuperAdminHaveSlackClientId) {
tb.dropColumn("encryptedSlackClientId");
}
if (doesSuperAdminHaveSlackClientSecret) {
tb.dropColumn("encryptedSlackClientSecret");
}
});
}

@ -1,25 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
const hasRequireTemplateForIssuanceColumn = await knex.schema.hasColumn(
TableName.CertificateAuthority,
"requireTemplateForIssuance"
);
if (!hasRequireTemplateForIssuanceColumn) {
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
t.boolean("requireTemplateForIssuance").notNullable().defaultTo(false);
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
t.dropColumn("requireTemplateForIssuance");
});
}
}

@ -1,85 +0,0 @@
import { Knex } from "knex";
import { CertKeyUsage } from "@app/services/certificate/certificate-types";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
// Certificate template
const hasKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "keyUsages");
const hasExtendedKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "extendedKeyUsages");
await knex.schema.alterTable(TableName.CertificateTemplate, (tb) => {
if (!hasKeyUsagesCol) {
tb.specificType("keyUsages", "text[]");
}
if (!hasExtendedKeyUsagesCol) {
tb.specificType("extendedKeyUsages", "text[]");
}
});
if (!hasKeyUsagesCol) {
await knex(TableName.CertificateTemplate).update({
keyUsages: [CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT]
});
}
if (!hasExtendedKeyUsagesCol) {
await knex(TableName.CertificateTemplate).update({
extendedKeyUsages: []
});
}
// Certificate
const doesCertTableHaveKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "keyUsages");
const doesCertTableHaveExtendedKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "extendedKeyUsages");
await knex.schema.alterTable(TableName.Certificate, (tb) => {
if (!doesCertTableHaveKeyUsages) {
tb.specificType("keyUsages", "text[]");
}
if (!doesCertTableHaveExtendedKeyUsages) {
tb.specificType("extendedKeyUsages", "text[]");
}
});
if (!doesCertTableHaveKeyUsages) {
await knex(TableName.Certificate).update({
keyUsages: [CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT]
});
}
if (!doesCertTableHaveExtendedKeyUsages) {
await knex(TableName.Certificate).update({
extendedKeyUsages: []
});
}
}
export async function down(knex: Knex): Promise<void> {
// Certificate Template
const hasKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "keyUsages");
const hasExtendedKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "extendedKeyUsages");
await knex.schema.alterTable(TableName.CertificateTemplate, (t) => {
if (hasKeyUsagesCol) {
t.dropColumn("keyUsages");
}
if (hasExtendedKeyUsagesCol) {
t.dropColumn("extendedKeyUsages");
}
});
// Certificate
const doesCertTableHaveKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "keyUsages");
const doesCertTableHaveExtendedKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "extendedKeyUsages");
await knex.schema.alterTable(TableName.Certificate, (t) => {
if (doesCertTableHaveKeyUsages) {
t.dropColumn("keyUsages");
}
if (doesCertTableHaveExtendedKeyUsages) {
t.dropColumn("extendedKeyUsages");
}
});
}

@ -1,76 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasAccessApproverGroupId = await knex.schema.hasColumn(
TableName.AccessApprovalPolicyApprover,
"approverGroupId"
);
const hasAccessApproverUserId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverUserId");
const hasSecretApproverGroupId = await knex.schema.hasColumn(
TableName.SecretApprovalPolicyApprover,
"approverGroupId"
);
const hasSecretApproverUserId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverUserId");
if (await knex.schema.hasTable(TableName.AccessApprovalPolicyApprover)) {
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (table) => {
// add column approverGroupId to AccessApprovalPolicyApprover
if (!hasAccessApproverGroupId) {
table.uuid("approverGroupId").nullable().references("id").inTable(TableName.Groups).onDelete("CASCADE");
}
// make approverUserId nullable
if (hasAccessApproverUserId) {
table.uuid("approverUserId").nullable().alter();
}
});
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (table) => {
// add column approverGroupId to SecretApprovalPolicyApprover
if (!hasSecretApproverGroupId) {
table.uuid("approverGroupId").nullable().references("id").inTable(TableName.Groups).onDelete("CASCADE");
}
// make approverUserId nullable
if (hasSecretApproverUserId) {
table.uuid("approverUserId").nullable().alter();
}
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasAccessApproverGroupId = await knex.schema.hasColumn(
TableName.AccessApprovalPolicyApprover,
"approverGroupId"
);
const hasAccessApproverUserId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverUserId");
const hasSecretApproverGroupId = await knex.schema.hasColumn(
TableName.SecretApprovalPolicyApprover,
"approverGroupId"
);
const hasSecretApproverUserId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverUserId");
if (await knex.schema.hasTable(TableName.AccessApprovalPolicyApprover)) {
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (table) => {
if (hasAccessApproverGroupId) {
table.dropColumn("approverGroupId");
}
// make approverUserId not nullable
if (hasAccessApproverUserId) {
table.uuid("approverUserId").notNullable().alter();
}
});
// remove
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (table) => {
if (hasSecretApproverGroupId) {
table.dropColumn("approverGroupId");
}
// make approverUserId not nullable
if (hasSecretApproverUserId) {
table.uuid("approverUserId").notNullable().alter();
}
});
}
}

@ -1,24 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.IdentityMetadata))) {
await knex.schema.createTable(TableName.IdentityMetadata, (tb) => {
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
tb.string("key").notNullable();
tb.string("value").notNullable();
tb.uuid("orgId").notNullable();
tb.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
tb.uuid("userId");
tb.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
tb.uuid("identityId");
tb.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
tb.timestamps(true, true, true);
});
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityMetadata);
}

@ -1,43 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretSharing)) {
const hasEncryptedSecret = await knex.schema.hasColumn(TableName.SecretSharing, "encryptedSecret");
const hasIdentifier = await knex.schema.hasColumn(TableName.SecretSharing, "identifier");
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
t.string("iv").nullable().alter();
t.string("tag").nullable().alter();
t.string("encryptedValue").nullable().alter();
if (!hasEncryptedSecret) {
t.binary("encryptedSecret").nullable();
}
t.string("hashedHex").nullable().alter();
if (!hasIdentifier) {
t.string("identifier", 64).nullable();
t.unique("identifier");
t.index("identifier");
}
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasEncryptedSecret = await knex.schema.hasColumn(TableName.SecretSharing, "encryptedSecret");
const hasIdentifier = await knex.schema.hasColumn(TableName.SecretSharing, "identifier");
if (await knex.schema.hasTable(TableName.SecretSharing)) {
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
if (hasEncryptedSecret) {
t.dropColumn("encryptedSecret");
}
if (hasIdentifier) {
t.dropColumn("identifier");
}
});
}
}

@ -1,19 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.OidcConfig, "lastUsed"))) {
await knex.schema.alterTable(TableName.OidcConfig, (tb) => {
tb.datetime("lastUsed");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.OidcConfig, "lastUsed")) {
await knex.schema.alterTable(TableName.OidcConfig, (tb) => {
tb.dropColumn("lastUsed");
});
}
}

@ -1,52 +0,0 @@
import { Knex } from "knex";
import { dropConstraintIfExists } from "@app/db/migrations/utils/dropConstraintIfExists";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.KmsKey)) {
const hasOrgId = await knex.schema.hasColumn(TableName.KmsKey, "orgId");
const hasSlug = await knex.schema.hasColumn(TableName.KmsKey, "slug");
const hasProjectId = await knex.schema.hasColumn(TableName.KmsKey, "projectId");
// drop constraint if exists (won't exist if rolled back, see below)
await dropConstraintIfExists(TableName.KmsKey, "kms_keys_orgid_slug_unique", knex);
// projectId for CMEK functionality
await knex.schema.alterTable(TableName.KmsKey, (table) => {
if (!hasProjectId) {
table.string("projectId").nullable().references("id").inTable(TableName.Project).onDelete("CASCADE");
}
if (hasOrgId && hasSlug) {
table.unique(["orgId", "projectId", "slug"]);
}
if (hasSlug) {
table.renameColumn("slug", "name");
}
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.KmsKey)) {
const hasOrgId = await knex.schema.hasColumn(TableName.KmsKey, "orgId");
const hasName = await knex.schema.hasColumn(TableName.KmsKey, "name");
const hasProjectId = await knex.schema.hasColumn(TableName.KmsKey, "projectId");
// remove projectId for CMEK functionality
await knex.schema.alterTable(TableName.KmsKey, (table) => {
if (hasName) {
table.renameColumn("name", "slug");
}
if (hasOrgId) {
table.dropUnique(["orgId", "projectId", "slug"]);
}
if (hasProjectId) {
table.dropColumn("projectId");
}
});
}
}

@ -1,30 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.KmsKey)) {
const hasSlug = await knex.schema.hasColumn(TableName.KmsKey, "slug");
if (!hasSlug) {
// add slug back temporarily and set value equal to name
await knex.schema
.alterTable(TableName.KmsKey, (table) => {
table.string("slug", 32);
})
.then(() => knex(TableName.KmsKey).update("slug", knex.ref("name")));
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.KmsKey)) {
const hasSlug = await knex.schema.hasColumn(TableName.KmsKey, "slug");
if (hasSlug) {
await knex.schema.alterTable(TableName.KmsKey, (table) => {
table.dropColumn("slug");
});
}
}
}

@ -1,48 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.AuditLog)) {
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
const doesProjectNameExist = await knex.schema.hasColumn(TableName.AuditLog, "projectName");
await knex.schema.alterTable(TableName.AuditLog, (t) => {
if (doesOrgIdExist) {
t.dropForeign("orgId");
}
if (doesProjectIdExist) {
t.dropForeign("projectId");
}
// add normalized field
if (!doesProjectNameExist) {
t.string("projectName");
}
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
const doesProjectNameExist = await knex.schema.hasColumn(TableName.AuditLog, "projectName");
if (await knex.schema.hasTable(TableName.AuditLog)) {
await knex.schema.alterTable(TableName.AuditLog, (t) => {
if (doesOrgIdExist) {
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
}
if (doesProjectIdExist) {
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
}
// remove normalized field
if (doesProjectNameExist) {
t.dropColumn("projectName");
}
});
}
}

@ -1,29 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
// org default role
if (await knex.schema.hasTable(TableName.Organization)) {
const hasDefaultRoleCol = await knex.schema.hasColumn(TableName.Organization, "defaultMembershipRole");
if (!hasDefaultRoleCol) {
await knex.schema.alterTable(TableName.Organization, (tb) => {
tb.string("defaultMembershipRole").notNullable().defaultTo("member");
});
}
}
}
export async function down(knex: Knex): Promise<void> {
// org default role
if (await knex.schema.hasTable(TableName.Organization)) {
const hasDefaultRoleCol = await knex.schema.hasColumn(TableName.Organization, "defaultMembershipRole");
if (hasDefaultRoleCol) {
await knex.schema.alterTable(TableName.Organization, (tb) => {
tb.dropColumn("defaultMembershipRole");
});
}
}
}

@ -1,101 +0,0 @@
/* eslint-disable no-await-in-loop */
import { packRules, unpackRules } from "@casl/ability/extra";
import { Knex } from "knex";
import {
backfillPermissionV1SchemaToV2Schema,
ProjectPermissionSub
} from "@app/ee/services/permission/project-permission";
import { TableName } from "../schemas";
const CHUNK_SIZE = 1000;
export async function up(knex: Knex): Promise<void> {
const hasVersion = await knex.schema.hasColumn(TableName.ProjectRoles, "version");
if (!hasVersion) {
await knex.schema.alterTable(TableName.ProjectRoles, (t) => {
t.integer("version").defaultTo(1).notNullable();
});
const docs = await knex(TableName.ProjectRoles).select("*");
const updatedDocs = docs
.filter((i) => {
const permissionString = JSON.stringify(i.permissions || []);
return (
!permissionString.includes(ProjectPermissionSub.SecretImports) &&
!permissionString.includes(ProjectPermissionSub.DynamicSecrets)
);
})
.map((el) => ({
...el,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore-error this is valid ts
permissions: JSON.stringify(packRules(backfillPermissionV1SchemaToV2Schema(unpackRules(el.permissions), true)))
}));
if (updatedDocs.length) {
for (let i = 0; i < updatedDocs.length; i += CHUNK_SIZE) {
const chunk = updatedDocs.slice(i, i + CHUNK_SIZE);
await knex(TableName.ProjectRoles).insert(chunk).onConflict("id").merge();
}
}
// secret permission is split into multiple ones like secrets, folders, imports and dynamic-secrets
// so we just find all the privileges with respective mapping and map it as needed
const identityPrivileges = await knex(TableName.IdentityProjectAdditionalPrivilege).select("*");
const updatedIdentityPrivilegesDocs = identityPrivileges
.filter((i) => {
const permissionString = JSON.stringify(i.permissions || []);
return (
!permissionString.includes(ProjectPermissionSub.SecretImports) &&
!permissionString.includes(ProjectPermissionSub.DynamicSecrets) &&
!permissionString.includes(ProjectPermissionSub.SecretFolders)
);
})
.map((el) => ({
...el,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore-error this is valid ts
permissions: JSON.stringify(packRules(backfillPermissionV1SchemaToV2Schema(unpackRules(el.permissions))))
}));
if (updatedIdentityPrivilegesDocs.length) {
for (let i = 0; i < updatedIdentityPrivilegesDocs.length; i += CHUNK_SIZE) {
const chunk = updatedIdentityPrivilegesDocs.slice(i, i + CHUNK_SIZE);
await knex(TableName.IdentityProjectAdditionalPrivilege).insert(chunk).onConflict("id").merge();
}
}
const userPrivileges = await knex(TableName.ProjectUserAdditionalPrivilege).select("*");
const updatedUserPrivilegeDocs = userPrivileges
.filter((i) => {
const permissionString = JSON.stringify(i.permissions || []);
return (
!permissionString.includes(ProjectPermissionSub.SecretImports) &&
!permissionString.includes(ProjectPermissionSub.DynamicSecrets) &&
!permissionString.includes(ProjectPermissionSub.SecretFolders)
);
})
.map((el) => ({
...el,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore-error this is valid ts
permissions: JSON.stringify(packRules(backfillPermissionV1SchemaToV2Schema(unpackRules(el.permissions))))
}));
if (docs.length) {
for (let i = 0; i < updatedUserPrivilegeDocs.length; i += CHUNK_SIZE) {
const chunk = updatedUserPrivilegeDocs.slice(i, i + CHUNK_SIZE);
await knex(TableName.ProjectUserAdditionalPrivilege).insert(chunk).onConflict("id").merge();
}
}
}
}
export async function down(knex: Knex): Promise<void> {
const hasVersion = await knex.schema.hasColumn(TableName.ProjectRoles, "version");
if (hasVersion) {
await knex.schema.alterTable(TableName.ProjectRoles, (t) => {
t.dropColumn("version");
});
// permission change can be ignored
}
}

@ -1,78 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
const BATCH_SIZE = 10_000;
export async function up(knex: Knex): Promise<void> {
const hasAuthMethodColumnAccessToken = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod");
if (!hasAuthMethodColumnAccessToken) {
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
t.string("authMethod").nullable();
});
// first we remove identities without auth method that is unused
// ! We delete all access tokens where the identity has no auth method set!
// ! Which means un-configured identities that for some reason have access tokens, will have their access tokens deleted.
await knex(TableName.IdentityAccessToken)
.leftJoin(TableName.Identity, `${TableName.Identity}.id`, `${TableName.IdentityAccessToken}.identityId`)
.whereNull(`${TableName.Identity}.authMethod`)
.delete();
let nullableAccessTokens = await knex(TableName.IdentityAccessToken)
.whereNull("authMethod")
.limit(BATCH_SIZE)
.select("id");
let totalUpdated = 0;
do {
const batchIds = nullableAccessTokens.map((token) => token.id);
// ! Update the auth method column in batches for the current batch
// eslint-disable-next-line no-await-in-loop
await knex(TableName.IdentityAccessToken)
.whereIn("id", batchIds)
.update({
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore because generate schema happens after this
authMethod: knex(TableName.Identity)
.select("authMethod")
.whereRaw(`${TableName.IdentityAccessToken}."identityId" = ${TableName.Identity}.id`)
.whereNotNull("authMethod")
.first()
});
// eslint-disable-next-line no-await-in-loop
nullableAccessTokens = await knex(TableName.IdentityAccessToken)
.whereNull("authMethod")
.limit(BATCH_SIZE)
.select("id");
totalUpdated += batchIds.length;
console.log(`Updated ${batchIds.length} access tokens in batch <> Total updated: ${totalUpdated}`);
} while (nullableAccessTokens.length > 0);
// Finally we set the authMethod to notNullable after populating the column.
// This will fail if the data is not populated correctly, so it's safe.
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
t.string("authMethod").notNullable().alter();
});
}
// ! We aren't dropping the authMethod column from the Identity itself, because we wan't to be able to easily rollback for the time being.
}
// eslint-disable-next-line @typescript-eslint/no-unused-vars
export async function down(knex: Knex): Promise<void> {
const hasAuthMethodColumnAccessToken = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod");
if (hasAuthMethodColumnAccessToken) {
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
t.dropColumn("authMethod");
});
}
}
const config = { transaction: false };
export { config };

@ -1,19 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
t.string("value", 1020).alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
t.string("value", 255).alter();
});
}
}

@ -1,32 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
export async function up(knex: Knex): Promise<void> {
// add external group to org role mapping table
if (!(await knex.schema.hasTable(TableName.ExternalGroupOrgRoleMapping))) {
await knex.schema.createTable(TableName.ExternalGroupOrgRoleMapping, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("groupName").notNullable();
t.index("groupName");
t.string("role").notNullable();
t.uuid("roleId");
t.foreign("roleId").references("id").inTable(TableName.OrgRoles);
t.uuid("orgId").notNullable();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.timestamps(true, true, true);
t.unique(["orgId", "groupName"]);
});
await createOnUpdateTrigger(knex, TableName.ExternalGroupOrgRoleMapping);
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.ExternalGroupOrgRoleMapping)) {
await dropOnUpdateTrigger(knex, TableName.ExternalGroupOrgRoleMapping);
await knex.schema.dropTable(TableName.ExternalGroupOrgRoleMapping);
}
}

@ -1,19 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.Organization, "enforceMfa"))) {
await knex.schema.alterTable(TableName.Organization, (tb) => {
tb.boolean("enforceMfa").defaultTo(false).notNullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.Organization, "enforceMfa")) {
await knex.schema.alterTable(TableName.Organization, (t) => {
t.dropColumn("enforceMfa");
});
}
}

@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SamlConfig, "orgId")) {
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
t.dropForeign("orgId");
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SamlConfig, "orgId")) {
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
t.dropForeign("orgId");
t.foreign("orgId").references("id").inTable(TableName.Organization);
});
}
}

@ -1,28 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.ProjectTemplates))) {
await knex.schema.createTable(TableName.ProjectTemplates, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("name", 32).notNullable();
t.string("description").nullable();
t.jsonb("roles").notNullable();
t.jsonb("environments").notNullable();
t.uuid("orgId").notNullable().references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.ProjectTemplates);
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.ProjectTemplates)) {
await dropOnUpdateTrigger(knex, TableName.ProjectTemplates);
await knex.schema.dropTable(TableName.ProjectTemplates);
}
}

@ -1,35 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasDisableBootstrapCertValidationCol = await knex.schema.hasColumn(
TableName.CertificateTemplateEstConfig,
"disableBootstrapCertValidation"
);
const hasCaChainCol = await knex.schema.hasColumn(TableName.CertificateTemplateEstConfig, "encryptedCaChain");
await knex.schema.alterTable(TableName.CertificateTemplateEstConfig, (t) => {
if (!hasDisableBootstrapCertValidationCol) {
t.boolean("disableBootstrapCertValidation").defaultTo(false).notNullable();
}
if (hasCaChainCol) {
t.binary("encryptedCaChain").nullable().alter();
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasDisableBootstrapCertValidationCol = await knex.schema.hasColumn(
TableName.CertificateTemplateEstConfig,
"disableBootstrapCertValidation"
);
await knex.schema.alterTable(TableName.CertificateTemplateEstConfig, (t) => {
if (hasDisableBootstrapCertValidationCol) {
t.dropColumn("disableBootstrapCertValidation");
}
});
}

@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.OidcConfig, "orgId")) {
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
t.dropForeign("orgId");
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.OidcConfig, "orgId")) {
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
t.dropForeign("orgId");
t.foreign("orgId").references("id").inTable(TableName.Organization);
});
}
}

@ -1,23 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasEncryptionStrategy = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "encryptionStrategy");
const hasTimestampsCol = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "createdAt");
await knex.schema.alterTable(TableName.KmsServerRootConfig, (t) => {
if (!hasEncryptionStrategy) t.string("encryptionStrategy").defaultTo("SOFTWARE");
if (!hasTimestampsCol) t.timestamps(true, true, true);
});
}
export async function down(knex: Knex): Promise<void> {
const hasEncryptionStrategy = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "encryptionStrategy");
const hasTimestampsCol = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "createdAt");
await knex.schema.alterTable(TableName.KmsServerRootConfig, (t) => {
if (hasEncryptionStrategy) t.dropColumn("encryptionStrategy");
if (hasTimestampsCol) t.dropTimestamps(true);
});
}

@ -1,54 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.TotpConfig))) {
await knex.schema.createTable(TableName.TotpConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("userId").notNullable();
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
t.boolean("isVerified").defaultTo(false).notNullable();
t.binary("encryptedRecoveryCodes").notNullable();
t.binary("encryptedSecret").notNullable();
t.timestamps(true, true, true);
t.unique("userId");
});
await createOnUpdateTrigger(knex, TableName.TotpConfig);
}
const doesOrgMfaMethodColExist = await knex.schema.hasColumn(TableName.Organization, "selectedMfaMethod");
await knex.schema.alterTable(TableName.Organization, (t) => {
if (!doesOrgMfaMethodColExist) {
t.string("selectedMfaMethod");
}
});
const doesUserSelectedMfaMethodColExist = await knex.schema.hasColumn(TableName.Users, "selectedMfaMethod");
await knex.schema.alterTable(TableName.Users, (t) => {
if (!doesUserSelectedMfaMethodColExist) {
t.string("selectedMfaMethod");
}
});
}
export async function down(knex: Knex): Promise<void> {
await dropOnUpdateTrigger(knex, TableName.TotpConfig);
await knex.schema.dropTableIfExists(TableName.TotpConfig);
const doesOrgMfaMethodColExist = await knex.schema.hasColumn(TableName.Organization, "selectedMfaMethod");
await knex.schema.alterTable(TableName.Organization, (t) => {
if (doesOrgMfaMethodColExist) {
t.dropColumn("selectedMfaMethod");
}
});
const doesUserSelectedMfaMethodColExist = await knex.schema.hasColumn(TableName.Users, "selectedMfaMethod");
await knex.schema.alterTable(TableName.Users, (t) => {
if (doesUserSelectedMfaMethodColExist) {
t.dropColumn("selectedMfaMethod");
}
});
}

@ -1,23 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasProjectDescription = await knex.schema.hasColumn(TableName.Project, "description");
if (!hasProjectDescription) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.string("description");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasProjectDescription = await knex.schema.hasColumn(TableName.Project, "description");
if (hasProjectDescription) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.dropColumn("description");
});
}
}

@ -1,20 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
await knex(TableName.IdentityMetadata).whereNull("value").delete();
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
t.string("value", 1020).notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
t.string("value", 1020).alter();
});
}
}

@ -1,59 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasAccessApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
TableName.AccessApprovalPolicy,
"deletedAt"
);
const hasSecretApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
TableName.SecretApprovalPolicy,
"deletedAt"
);
if (!hasAccessApprovalPolicyDeletedAtColumn) {
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.timestamp("deletedAt");
});
}
if (!hasSecretApprovalPolicyDeletedAtColumn) {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.timestamp("deletedAt");
});
}
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
t.dropForeign(["privilegeId"]);
// Add the new foreign key constraint with ON DELETE SET NULL
t.foreign("privilegeId").references("id").inTable(TableName.ProjectUserAdditionalPrivilege).onDelete("SET NULL");
});
}
export async function down(knex: Knex): Promise<void> {
const hasAccessApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
TableName.AccessApprovalPolicy,
"deletedAt"
);
const hasSecretApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
TableName.SecretApprovalPolicy,
"deletedAt"
);
if (hasAccessApprovalPolicyDeletedAtColumn) {
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.dropColumn("deletedAt");
});
}
if (hasSecretApprovalPolicyDeletedAtColumn) {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.dropColumn("deletedAt");
});
}
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
t.dropForeign(["privilegeId"]);
t.foreign("privilegeId").references("id").inTable(TableName.ProjectUserAdditionalPrivilege).onDelete("CASCADE");
});
}

Some files were not shown because too many files have changed in this diff Show More