mirror of
https://github.com/Infisical/infisical.git
synced 2025-07-22 13:29:55 +00:00
Compare commits
760 Commits
misc/updat
...
help-fix-f
Author | SHA1 | Date | |
---|---|---|---|
|
26bed22b94 | ||
|
2d3c63e8b9 | ||
|
bdb36d6be4 | ||
|
3ee8f7aa20 | ||
|
36a5291dc3 | ||
|
977fd7a057 | ||
|
bf413c75bc | ||
|
3250a18050 | ||
|
2eb1451c56 | ||
|
a24158b187 | ||
|
4cc80e38f4 | ||
|
d5ee74bb1a | ||
|
ec776b94ae | ||
|
14be4eb601 | ||
|
d1faed5672 | ||
|
9c6b300ad4 | ||
|
210ddf506a | ||
|
33d740a4de | ||
|
86dee1ec5d | ||
|
6dfe2851e1 | ||
|
95b843779b | ||
|
219aa3c641 | ||
|
cf5391d6d4 | ||
|
2ca476f21e | ||
|
bf81469341 | ||
|
8445127fad | ||
|
fb1cf3eb02 | ||
|
f8c822eda7 | ||
|
ea5a5e0aa7 | ||
|
f20e4e189d | ||
|
c7ec6236e1 | ||
|
c4dea2d51f | ||
|
e89b0fdf3f | ||
|
d57f76d230 | ||
|
55efa00b8c | ||
|
29ba92dadb | ||
|
7ba79dec19 | ||
|
6ea8bff224 | ||
|
65f4e1bea1 | ||
|
73ce3b8bb7 | ||
|
e63af81e60 | ||
|
6c2c2b319b | ||
|
82c2be64a1 | ||
|
a064e31117 | ||
|
051d0780a8 | ||
|
5c9563f18b | ||
|
5406871c30 | ||
|
8b89edc277 | ||
|
b394e191a8 | ||
|
92030884ec | ||
|
4583eb1732 | ||
|
4c8bf9bd92 | ||
|
a6554deb80 | ||
|
ae00e74c17 | ||
|
adfd5a1b59 | ||
|
d6c321d34d | ||
|
09a7346f32 | ||
|
1ae82dc460 | ||
|
80fada6b55 | ||
|
e4abac91b4 | ||
|
b4f37193ac | ||
|
c8be5a637a | ||
|
45485f8bd3 | ||
|
545df3bf28 | ||
|
766254c4e3 | ||
|
4c22024d13 | ||
|
4bd1eb6f70 | ||
|
6847e5bb89 | ||
|
022ecf75e1 | ||
|
5d35ce6c6c | ||
|
635f027752 | ||
|
6334ad0d07 | ||
|
89e8f200e9 | ||
|
e57935a7d3 | ||
|
617d07177c | ||
|
ce170a6a47 | ||
|
cb8e36ae15 | ||
|
16ce1f441e | ||
|
8043b61c9f | ||
|
d374ff2093 | ||
|
eb7c533261 | ||
|
ac5bfbb6c9 | ||
|
1f80ff040d | ||
|
9a935c9177 | ||
|
f8939835e1 | ||
|
9d24eb15dc | ||
|
ed4882dfac | ||
|
7acd7fd522 | ||
|
2148b636f5 | ||
|
e40b4a0a4b | ||
|
d2b0ca94d8 | ||
|
5255f0ac17 | ||
|
311bf8b515 | ||
|
4f67834eaa | ||
|
78c4c3e847 | ||
|
b8aa36be99 | ||
|
594445814a | ||
|
a467b13069 | ||
|
c425c03939 | ||
|
9cc17452fa | ||
|
93ba6f7b58 | ||
|
0fcb66e9ab | ||
|
135f425fcf | ||
|
9c149cb4bf | ||
|
ce45c1a43d | ||
|
1a14c71564 | ||
|
e7fe2ea51e | ||
|
caa129b565 | ||
|
30d7e63a67 | ||
|
a4c21d85ac | ||
|
c34a139b19 | ||
|
f2a55da9b6 | ||
|
a3584d6a8a | ||
|
36f1559e5e | ||
|
07902f7db9 | ||
|
6fddecdf82 | ||
|
99e2c85f8f | ||
|
6e1504dc73 | ||
|
07d930f608 | ||
|
1101707d8b | ||
|
696bbcb072 | ||
|
54435d0ad9 | ||
|
952e60f08a | ||
|
6c52847dec | ||
|
698260cba6 | ||
|
5367d1ac2e | ||
|
caeda09b21 | ||
|
1201baf35c | ||
|
5d5f843a9f | ||
|
caca23b56c | ||
|
01ea22f167 | ||
|
92b9abb52b | ||
|
e2680d9aee | ||
|
aa049dc43b | ||
|
419e9ac755 | ||
|
b7b36a475d | ||
|
83c53b9d5a | ||
|
8cc457d49a | ||
|
540374f543 | ||
|
4edb90d644 | ||
|
1a7151aba7 | ||
|
80d2d9d2cf | ||
|
4268fdea44 | ||
|
781965767d | ||
|
fef7e43869 | ||
|
9e651a58e3 | ||
|
0fbf8efd3a | ||
|
dcb77bbdd4 | ||
|
36f7e7d81b | ||
|
9159a9fa36 | ||
|
8f97b3ad87 | ||
|
be80444ec2 | ||
|
6f2043dc26 | ||
|
6ae7b5e996 | ||
|
95fcf560a5 | ||
|
d8ee05bfba | ||
|
400157a468 | ||
|
274952544f | ||
|
d23beaedf1 | ||
|
73e89fc4db | ||
|
817e762e6b | ||
|
ce5712606f | ||
|
ce67e5f137 | ||
|
440c45fd42 | ||
|
893a042c25 | ||
|
f3fb65fcc3 | ||
|
c0add863be | ||
|
5878904f77 | ||
|
98ab969356 | ||
|
d4523b0ca4 | ||
|
2be8c47ae8 | ||
|
8730d14104 | ||
|
d924580599 | ||
|
6e3fe0fe24 | ||
|
9d11babc4d | ||
|
ce97179b49 | ||
|
f9ebb919e5 | ||
|
739ef8e05a | ||
|
d5f5abef8e | ||
|
644659bc10 | ||
|
21e4fa83ef | ||
|
a6a6c72397 | ||
|
4061feba21 | ||
|
90a415722c | ||
|
f3d5790e2c | ||
|
0d0fddb53a | ||
|
9f2e379d4d | ||
|
14e898351f | ||
|
16e0aa13c8 | ||
|
dc130ecd7f | ||
|
b70c6b6260 | ||
|
a701635f08 | ||
|
9eb98dd276 | ||
|
96e9bc3b2f | ||
|
22f32e060b | ||
|
b4f26aac25 | ||
|
b634a6c371 | ||
|
080ae5ce6f | ||
|
90d213a8ab | ||
|
52a26b51af | ||
|
3b28e946cf | ||
|
dfcf613023 | ||
|
f711f8a35c | ||
|
9c8bb71878 | ||
|
d0547c354a | ||
|
88abdd9529 | ||
|
f3a04f1a2f | ||
|
082d6c44c4 | ||
|
a0aafcc1bf | ||
|
3ae2ec1f51 | ||
|
ce4e35e908 | ||
|
4db82e37c1 | ||
|
b350841b86 | ||
|
3a8789af76 | ||
|
79ebfc92e9 | ||
|
ffca4aa054 | ||
|
52b3f7e8c8 | ||
|
9de33d8c23 | ||
|
ad623f8753 | ||
|
9cedae61a9 | ||
|
97aed61c54 | ||
|
972dbac7db | ||
|
5c0e265703 | ||
|
4efbb8dca6 | ||
|
09db9e340b | ||
|
5e3d4edec9 | ||
|
86348eb434 | ||
|
d31d28666a | ||
|
3362ec29cd | ||
|
3a0e2bf88b | ||
|
86862b932c | ||
|
85fefb2a82 | ||
|
858ec2095e | ||
|
a5bb80d2cf | ||
|
3156057278 | ||
|
b5da1d7a6c | ||
|
8fa8161602 | ||
|
b12aca62ff | ||
|
c9cd843184 | ||
|
47442b16f5 | ||
|
0bdb5d3f19 | ||
|
cd9ab0024e | ||
|
f4bed26781 | ||
|
abedb4b53c | ||
|
29561d37e9 | ||
|
75e9ea9c5d | ||
|
d0c10838e1 | ||
|
4773336a04 | ||
|
4dc587576b | ||
|
7097731539 | ||
|
4261281b0f | ||
|
ff7ff06a6a | ||
|
6cbeb4ddf9 | ||
|
f7a4731565 | ||
|
5a07c3d1d4 | ||
|
d96e880015 | ||
|
4df6c8c2cc | ||
|
70860e0d26 | ||
|
3f3b81f9bf | ||
|
5181cac9c8 | ||
|
5af39b1a40 | ||
|
a70aff5f31 | ||
|
a9723134f9 | ||
|
fe237fbf4a | ||
|
98e79207cc | ||
|
26375715e4 | ||
|
5c435f7645 | ||
|
f7a9e13209 | ||
|
04908edb5b | ||
|
e8753a3ce8 | ||
|
1947989ca5 | ||
|
c22e616771 | ||
|
40711ac707 | ||
|
a47e6910b1 | ||
|
78c4a591a9 | ||
|
f6b7717517 | ||
|
0885620981 | ||
|
f67511fa19 | ||
|
e6c97510ca | ||
|
476671e6ef | ||
|
b21a5b6425 | ||
|
d1d5dd29c6 | ||
|
41d7987a6e | ||
|
66a5691ffd | ||
|
6bdf62d453 | ||
|
652a48b520 | ||
|
3148c54e18 | ||
|
44367f9149 | ||
|
286dc39ed2 | ||
|
bd4cf64fc6 | ||
|
f4e3d7d576 | ||
|
8298f9974f | ||
|
da347e96e1 | ||
|
5df96234a0 | ||
|
e78682560c | ||
|
1602fac5ca | ||
|
0100bf7032 | ||
|
e2c49878c6 | ||
|
e74117b7fd | ||
|
335aada941 | ||
|
b949fe06c3 | ||
|
28e539c481 | ||
|
5c4c881b60 | ||
|
8ffb92bfb3 | ||
|
db9a1726c2 | ||
|
15986633c7 | ||
|
c4809bbb54 | ||
|
6305aab0d1 | ||
|
456493ff5a | ||
|
8cfaefcec5 | ||
|
e39e80a0e7 | ||
|
8cae92f29e | ||
|
918911f2e4 | ||
|
a1aee45eb2 | ||
|
5fe93dc35a | ||
|
5e0e7763a3 | ||
|
f663d1d4a6 | ||
|
650f6d9585 | ||
|
90c36eeded | ||
|
7994034639 | ||
|
48619ed24c | ||
|
21fb8df39b | ||
|
f03a7cc249 | ||
|
b5c3f17ec1 | ||
|
99d88f7687 | ||
|
8e3559828f | ||
|
93d7c812e7 | ||
|
f2dcbfa91c | ||
|
d08510ebe4 | ||
|
767159bf8f | ||
|
98457cdb34 | ||
|
8ed8f1200d | ||
|
30252c2bcb | ||
|
9687f33122 | ||
|
a5282a56c9 | ||
|
cc3551c417 | ||
|
9e6fe39609 | ||
|
2bc91c42a7 | ||
|
accb21f7ed | ||
|
8f010e740f | ||
|
f3768c90c7 | ||
|
3190ff2eb1 | ||
|
c7ec825830 | ||
|
5b7f445e33 | ||
|
7fe53ab00e | ||
|
90c17820fc | ||
|
e739b29b3c | ||
|
1a89f2a479 | ||
|
78568bffe2 | ||
|
1407a122b9 | ||
|
8168b5faf8 | ||
|
8b9e035bf6 | ||
|
d36d0784ca | ||
|
e69354b546 | ||
|
64bd5ddcc8 | ||
|
72088634d8 | ||
|
f3a84f6001 | ||
|
13672481a8 | ||
|
058394f892 | ||
|
4f26b43789 | ||
|
4817eb2fc6 | ||
|
c623c615a1 | ||
|
034a8112b7 | ||
|
5fc6fd71ce | ||
|
f45c917922 | ||
|
debef510e4 | ||
|
be37e27dbf | ||
|
3b62f956e9 | ||
|
f49e3788cc | ||
|
1147f87eed | ||
|
995e3254ba | ||
|
67d0c53912 | ||
|
a6fbcb3e01 | ||
|
db1ca2b89f | ||
|
f91bbe1f31 | ||
|
e5f475e8d6 | ||
|
1e4ca2f48f | ||
|
8d5e7406c3 | ||
|
3b230dad9a | ||
|
782bf2cdc9 | ||
|
982b506eb8 | ||
|
e5bc609a2a | ||
|
b812761bdd | ||
|
14362dbe6a | ||
|
b7b90aea33 | ||
|
14cc21787d | ||
|
8d147867ed | ||
|
eb4e727922 | ||
|
bb276a0dba | ||
|
7cdb015b81 | ||
|
ce446fa723 | ||
|
82f6c9fb58 | ||
|
6369d13862 | ||
|
9f91970be2 | ||
|
c7398d924a | ||
|
df57364985 | ||
|
84322f4f68 | ||
|
f551806737 | ||
|
5518df116f | ||
|
73c6c076e8 | ||
|
ba2a772247 | ||
|
8fbe46256b | ||
|
b75bb93d83 | ||
|
db4db04ba6 | ||
|
db44d958d3 | ||
|
12beb06682 | ||
|
804f8be07d | ||
|
e81991c545 | ||
|
28a3bf0b94 | ||
|
5712c24370 | ||
|
65bc522ae9 | ||
|
b950e07ad6 | ||
|
498bf8244c | ||
|
4a391c7ac2 | ||
|
d49c1e4b72 | ||
|
424e4670e5 | ||
|
5e803e76d7 | ||
|
6648397a64 | ||
|
85edbbcdc3 | ||
|
a64f8ac776 | ||
|
b46a0dfc21 | ||
|
95ef113aea | ||
|
07bf65b1c3 | ||
|
12071e4816 | ||
|
a40d4efa39 | ||
|
6d509d85f4 | ||
|
5b200f42a3 | ||
|
64f724ed95 | ||
|
b0d5be6221 | ||
|
2b21c9d348 | ||
|
f0a45fb7d8 | ||
|
40398efb06 | ||
|
a16c1336fc | ||
|
ef4df9691d | ||
|
6a23583391 | ||
|
e8d00161eb | ||
|
0a5a073db1 | ||
|
0f14685d54 | ||
|
d5888d5bbb | ||
|
8ff95aedd5 | ||
|
2b948a18f3 | ||
|
4d173ad163 | ||
|
7041b88b9d | ||
|
f06004370d | ||
|
c1fa344f02 | ||
|
df75b3b8d3 | ||
|
e0322c8a7f | ||
|
e3725dd3ab | ||
|
dc6a94ccda | ||
|
e5229a5377 | ||
|
2e8003ca95 | ||
|
04989372b1 | ||
|
d185dbb7ff | ||
|
77de085ffc | ||
|
afcae17e91 | ||
|
c985690e9a | ||
|
bb2a70b986 | ||
|
3ac3710273 | ||
|
92cb034155 | ||
|
2493bbbc97 | ||
|
77b42836e7 | ||
|
949615606f | ||
|
6cd7657e41 | ||
|
38bf5e8b1d | ||
|
4292cb2a04 | ||
|
051f53c66e | ||
|
a6bafb8adc | ||
|
99daa43fc6 | ||
|
e9e1f4ff5d | ||
|
13afc9c996 | ||
|
67d4da40ec | ||
|
27badad3d7 | ||
|
b5e3af6e7d | ||
|
280fbdfbb9 | ||
|
18fc10aaec | ||
|
b20e04bdeb | ||
|
10d14edc20 | ||
|
4abdd4216b | ||
|
332ed68c13 | ||
|
52feabd786 | ||
|
d7a99db66a | ||
|
fc0bdc25af | ||
|
ec633c3e3d | ||
|
5ffe45eaf5 | ||
|
8f795100ea | ||
|
1efdb31037 | ||
|
8d8a3efd77 | ||
|
44aa743d56 | ||
|
fefb71dd86 | ||
|
677180548b | ||
|
1748052cb0 | ||
|
293bea474e | ||
|
bc4fc9a1ca | ||
|
483850441d | ||
|
4355fd09cc | ||
|
1f85d9c486 | ||
|
c01a98ccf1 | ||
|
9ea9f90928 | ||
|
6319f53802 | ||
|
75d33820b3 | ||
|
074446df1f | ||
|
7ffa0ef8f5 | ||
|
5250e7c3d5 | ||
|
2deaa4eff3 | ||
|
0b6bc4c1f0 | ||
|
966294bd0e | ||
|
e1dee0678e | ||
|
8b25f202fe | ||
|
abbe7bbd0c | ||
|
565340dc50 | ||
|
36c428f152 | ||
|
f97826ea82 | ||
|
0f5cbf055c | ||
|
1345ff02e3 | ||
|
b960ee61d7 | ||
|
0b98a214a7 | ||
|
599c2226e4 | ||
|
8e24a4d3f8 | ||
|
27486e7600 | ||
|
979e9efbcb | ||
|
e06b5ecd1b | ||
|
1097ec64b2 | ||
|
93fe9929b7 | ||
|
aca654a993 | ||
|
b5cf237a4a | ||
|
6efb630200 | ||
|
151ede6cbf | ||
|
931ee1e8da | ||
|
0401793d38 | ||
|
eb31318d39 | ||
|
7f6dcd3afa | ||
|
2b4a6ad907 | ||
|
0613c12508 | ||
|
ba8fcb6891 | ||
|
c2df8cf869 | ||
|
e383872486 | ||
|
490c589a44 | ||
|
b358f2dbb7 | ||
|
10ed6f6b52 | ||
|
e0f1311f6d | ||
|
60d3ffac5d | ||
|
5e192539a1 | ||
|
021a8ddace | ||
|
f92aba14cd | ||
|
fdeefcdfcf | ||
|
645f70f770 | ||
|
923feb81f3 | ||
|
1cff92d000 | ||
|
db8f43385d | ||
|
41b45c212d | ||
|
ef9269fe10 | ||
|
4d95052896 | ||
|
260679b01d | ||
|
a77cc77be8 | ||
|
9bc5c55cd0 | ||
|
2cbad206b5 | ||
|
16c51af340 | ||
|
9fd37ca456 | ||
|
56b7328231 | ||
|
92bebf7d84 | ||
|
df053bbae9 | ||
|
42319f01a7 | ||
|
0ea9f9b60d | ||
|
33ce783fda | ||
|
63c48dc095 | ||
|
edefa7698c | ||
|
16eefe5bac | ||
|
b984111a73 | ||
|
677ff62b5c | ||
|
60ea4bb579 | ||
|
8cc2e08f24 | ||
|
04d553f052 | ||
|
d90178f49a | ||
|
ad50cff184 | ||
|
8e43d2a994 | ||
|
7074fdbac3 | ||
|
ef70de1e0b | ||
|
7e9ee7b5e3 | ||
|
517c613d05 | ||
|
ae8cf06ec6 | ||
|
818778ddc5 | ||
|
2e12d9a13c | ||
|
e678c9d1cf | ||
|
da0b07ce2a | ||
|
3306a9ca69 | ||
|
e9af34a6ba | ||
|
3de8ed169f | ||
|
d1eb350bdd | ||
|
0c1ccf7c2e | ||
|
d268f52a1c | ||
|
c519cee5d1 | ||
|
6d10afc9d2 | ||
|
b55a39dd24 | ||
|
7b880f85cc | ||
|
c7dc595e1a | ||
|
6e494f198b | ||
|
e1f3eaf1a0 | ||
|
be26dc9872 | ||
|
aaeb6e73fe | ||
|
1e11702c58 | ||
|
3b81cdb16e | ||
|
6584166815 | ||
|
827cb35194 | ||
|
89a6a0ba13 | ||
|
3b9a50d65d | ||
|
beb7200233 | ||
|
18e3d132a2 | ||
|
c2949964b3 | ||
|
52f8c6adba | ||
|
3d2b2cbbab | ||
|
1a82809bd5 | ||
|
c4f994750d | ||
|
fa7020949c | ||
|
eca2b3ccde | ||
|
67fc16ecd3 | ||
|
f85add7cca | ||
|
3f74d3a80d | ||
|
4a44dc6119 | ||
|
dd4bc4bc73 | ||
|
6188de43e4 | ||
|
36310387e0 | ||
|
43f3960225 | ||
|
2f0a442866 | ||
|
7e05bc86a9 | ||
|
b0c4fddf86 | ||
|
6faad102e2 | ||
|
f5578d39a6 | ||
|
8bfd3913da | ||
|
cd028ae133 | ||
|
63c71fabcd | ||
|
e90166f1f0 | ||
|
d1e5ae2d85 | ||
|
5a3fbc0401 | ||
|
7c52e000cd | ||
|
cccd4ba9e5 | ||
|
63f0f8e299 | ||
|
c8a3837432 | ||
|
2dd407b136 | ||
|
4e1a5565d8 | ||
|
bae62421ae | ||
|
d397002704 | ||
|
f5b1f671e3 | ||
|
0597c5f0c0 | ||
|
eb3afc8034 | ||
|
b67457fe93 | ||
|
75abdbe938 | ||
|
9b6a315825 | ||
|
13b2f65b7e | ||
|
6cf1e046b0 | ||
|
e5555ffd3f | ||
|
6b95bb0ceb | ||
|
f6e1441dc0 | ||
|
7ed96164e5 | ||
|
9eeb72ac80 | ||
|
f6e566a028 | ||
|
a34c74e958 | ||
|
eef7a875a1 | ||
|
09938a911b | ||
|
af08c41008 | ||
|
443c8854ea | ||
|
f7a25e7601 | ||
|
4c6e5c9c4c | ||
|
98a4e6c96d | ||
|
b0e25a8bd1 | ||
|
c93ce06409 | ||
|
672e4baec4 | ||
|
d483e70748 | ||
|
8adf4787b9 | ||
|
a12522db55 | ||
|
49ab487dc2 | ||
|
daf0731580 | ||
|
4b94848a79 | ||
|
b5ef2a6837 | ||
|
879b12002c | ||
|
52858dad79 | ||
|
1d7a6ea50e | ||
|
c031233247 | ||
|
70fff1f2da | ||
|
3f8eaa0679 | ||
|
50d0035d7b | ||
|
9743ad02d5 | ||
|
50f5248e3e | ||
|
8d7b573988 | ||
|
26d0ab1dc2 | ||
|
bc93db8603 | ||
|
4acdbd24e9 | ||
|
c3c907788a | ||
|
bf833a57cd | ||
|
e8519f6612 | ||
|
0b4675e7b5 | ||
|
091e521180 | ||
|
d5dbc7d7e0 | ||
|
c43a87947f | ||
|
0af9415aa6 | ||
|
fb2b64cb19 | ||
|
2793ac22aa | ||
|
31fad03af8 | ||
|
ce612877b8 | ||
|
4ad8b468d5 | ||
|
5742fc648b | ||
|
aa68a3ef58 | ||
|
9e1d38a27b | ||
|
78d5bc823d | ||
|
578a0d7d93 | ||
|
cd71db416d | ||
|
9d682ca874 | ||
|
9054db80ad | ||
|
5bb8756c67 | ||
|
8b7cb4c4eb | ||
|
a6ee6fc4ea | ||
|
b21c17572d | ||
|
44c7be54cf | ||
|
45c08b3f09 | ||
|
57a29577fe | ||
|
2700a96df4 | ||
|
7457ef3b66 | ||
|
806df70dd7 | ||
|
8eda358c17 | ||
|
b34aabe72b | ||
|
dfaed3c513 | ||
|
5b7627585f | ||
|
800ea5ce78 | ||
|
531607dcb7 | ||
|
182de009b2 | ||
|
f1651ce171 | ||
|
e1f563dbd4 | ||
|
107cca0b62 | ||
|
72abc08f04 | ||
|
e8d424bbb0 | ||
|
d6b31cde44 | ||
|
2c94f9ec3c | ||
|
42ad63b58d | ||
|
f2d5112585 | ||
|
9c7b25de49 | ||
|
36954a9df9 | ||
|
581840a701 | ||
|
326742c2d5 | ||
|
f0c52cc8da | ||
|
e58dbe853e | ||
|
c891b8f5d3 | ||
|
a32bb95703 | ||
|
f493a617b1 | ||
|
0410c83cef | ||
|
cf4f2ea6b1 | ||
|
32a3e1d200 | ||
|
7447d17e94 | ||
|
4efa4ad8df | ||
|
c6e56f0380 | ||
|
d61216ed62 | ||
|
580de0565b | ||
|
bbfd4a44c3 | ||
|
01e13ca7bd | ||
|
f5fdd1a266 | ||
|
bda74ce13e | ||
|
6a973be6f3 | ||
|
7f836ed9bc | ||
|
4d847ab2cb | ||
|
80cecbb937 | ||
|
8b6c97d5bc | ||
|
5641d334cd |
@@ -107,6 +107,14 @@ INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
|
||||
INF_APP_CONNECTION_GITHUB_APP_SLUG=
|
||||
INF_APP_CONNECTION_GITHUB_APP_ID=
|
||||
|
||||
#github radar app connection
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID=
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET=
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY=
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG=
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_ID=
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET=
|
||||
|
||||
#gcp app connection
|
||||
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
|
||||
|
||||
|
53
.github/workflows/check-non-re2-regex.yml
vendored
Normal file
53
.github/workflows/check-non-re2-regex.yml
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
name: Detect Non-RE2 Regex
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
|
||||
jobs:
|
||||
check-non-re2-regex:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get diff of backend/*
|
||||
run: |
|
||||
git diff --unified=0 "origin/${{ github.base_ref }}"...HEAD -- backend/ > diff.txt
|
||||
|
||||
- name: Scan backend diff for non-RE2 regex
|
||||
run: |
|
||||
# Extract only added lines (excluding file headers)
|
||||
grep '^+' diff.txt | grep -v '^+++' | sed 's/^\+//' > added_lines.txt
|
||||
|
||||
if [ ! -s added_lines.txt ]; then
|
||||
echo "✅ No added lines in backend/ to check for regex usage."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
regex_usage_pattern='(^|[^A-Za-z0-9_"'"'"'`\.\/\\])(\/(?:\\.|[^\/\n\\])+\/[gimsuyv]*(?=\s*[\.\(;,)\]}:]|$)|new RegExp\()'
|
||||
|
||||
# Find all added lines that contain regex patterns
|
||||
if grep -E "$regex_usage_pattern" added_lines.txt > potential_violations.txt 2>/dev/null; then
|
||||
# Filter out lines that contain 'new RE2' (allowing for whitespace variations)
|
||||
if grep -v -E 'new\s+RE2\s*\(' potential_violations.txt > actual_violations.txt 2>/dev/null && [ -s actual_violations.txt ]; then
|
||||
echo "🚨 ERROR: Found forbidden regex pattern in added/modified backend code."
|
||||
echo ""
|
||||
echo "The following lines use raw regex literals (/.../) or new RegExp(...):"
|
||||
echo "Please replace with 'new RE2(...)' for RE2 compatibility."
|
||||
echo ""
|
||||
echo "Offending lines:"
|
||||
cat actual_violations.txt
|
||||
exit 1
|
||||
else
|
||||
echo "✅ All identified regex usages are correctly using 'new RE2(...)'."
|
||||
fi
|
||||
else
|
||||
echo "✅ No regex patterns found in added/modified backend lines."
|
||||
fi
|
||||
|
||||
- name: Cleanup temporary files
|
||||
if: always()
|
||||
run: |
|
||||
rm -f diff.txt added_lines.txt potential_violations.txt actual_violations.txt
|
@@ -28,3 +28,16 @@ frontend/src/pages/secret-manager/OverviewPage/components/SecretOverviewTableRow
|
||||
docs/cli/commands/user.mdx:generic-api-key:51
|
||||
frontend/src/pages/secret-manager/OverviewPage/components/SecretOverviewTableRow/SecretOverviewTableRow.tsx:generic-api-key:76
|
||||
docs/integrations/app-connections/hashicorp-vault.mdx:generic-api-key:188
|
||||
cli/detect/config/gitleaks.toml:gcp-api-key:567
|
||||
cli/detect/config/gitleaks.toml:gcp-api-key:569
|
||||
cli/detect/config/gitleaks.toml:gcp-api-key:570
|
||||
cli/detect/config/gitleaks.toml:gcp-api-key:572
|
||||
cli/detect/config/gitleaks.toml:gcp-api-key:574
|
||||
cli/detect/config/gitleaks.toml:gcp-api-key:575
|
||||
cli/detect/config/gitleaks.toml:gcp-api-key:576
|
||||
cli/detect/config/gitleaks.toml:gcp-api-key:577
|
||||
cli/detect/config/gitleaks.toml:gcp-api-key:578
|
||||
cli/detect/config/gitleaks.toml:gcp-api-key:579
|
||||
cli/detect/config/gitleaks.toml:gcp-api-key:581
|
||||
cli/detect/config/gitleaks.toml:gcp-api-key:582
|
||||
backend/src/services/smtp/smtp-service.ts:generic-api-key:79
|
||||
|
@@ -15,8 +15,8 @@ import { mockSmtpServer } from "./mocks/smtp";
|
||||
import { initDbConnection } from "@app/db";
|
||||
import { queueServiceFactory } from "@app/queue";
|
||||
import { keyStoreFactory } from "@app/keystore/keystore";
|
||||
import { Redis } from "ioredis";
|
||||
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
||||
import { buildRedisFromConfig } from "@app/lib/config/redis";
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
|
||||
export default {
|
||||
@@ -30,7 +30,7 @@ export default {
|
||||
dbRootCert: envConfig.DB_ROOT_CERT
|
||||
});
|
||||
|
||||
const redis = new Redis(envConfig.REDIS_URL);
|
||||
const redis = buildRedisFromConfig(envConfig);
|
||||
await redis.flushdb("SYNC");
|
||||
|
||||
try {
|
||||
@@ -55,8 +55,8 @@ export default {
|
||||
});
|
||||
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = queueServiceFactory(envConfig.REDIS_URL, { dbConnectionUrl: envConfig.DB_CONNECTION_URI });
|
||||
const keyStore = keyStoreFactory(envConfig.REDIS_URL);
|
||||
const queue = queueServiceFactory(envConfig, { dbConnectionUrl: envConfig.DB_CONNECTION_URI });
|
||||
const keyStore = keyStoreFactory(envConfig);
|
||||
|
||||
const hsmModule = initializeHsmModule(envConfig);
|
||||
hsmModule.initialize();
|
||||
|
3818
backend/package-lock.json
generated
3818
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -38,8 +38,8 @@
|
||||
"build:frontend": "npm run build --prefix ../frontend",
|
||||
"start": "node --enable-source-maps dist/main.mjs",
|
||||
"type:check": "tsc --noEmit",
|
||||
"lint:fix": "eslint --fix --ext js,ts ./src",
|
||||
"lint": "eslint 'src/**/*.ts'",
|
||||
"lint:fix": "node --max-old-space-size=8192 ./node_modules/.bin/eslint --fix --ext js,ts ./src",
|
||||
"lint": "node --max-old-space-size=8192 ./node_modules/.bin/eslint 'src/**/*.ts'",
|
||||
"test:unit": "vitest run -c vitest.unit.config.ts",
|
||||
"test:e2e": "vitest run -c vitest.e2e.config.ts --bail=1",
|
||||
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
|
||||
@@ -131,6 +131,7 @@
|
||||
"@aws-sdk/client-elasticache": "^3.637.0",
|
||||
"@aws-sdk/client-iam": "^3.525.0",
|
||||
"@aws-sdk/client-kms": "^3.609.0",
|
||||
"@aws-sdk/client-route-53": "^3.810.0",
|
||||
"@aws-sdk/client-secrets-manager": "^3.504.0",
|
||||
"@aws-sdk/client-sts": "^3.600.0",
|
||||
"@casl/ability": "^6.5.0",
|
||||
@@ -174,6 +175,7 @@
|
||||
"@slack/oauth": "^3.0.2",
|
||||
"@slack/web-api": "^7.8.0",
|
||||
"@ucast/mongo2js": "^1.3.4",
|
||||
"acme-client": "^5.4.0",
|
||||
"ajv": "^8.12.0",
|
||||
"argon2": "^0.31.2",
|
||||
"aws-sdk": "^2.1553.0",
|
||||
@@ -209,6 +211,7 @@
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^3.3.8",
|
||||
"nodemailer": "^6.9.9",
|
||||
"oci-sdk": "^2.108.0",
|
||||
"odbc": "^2.4.9",
|
||||
"openid-client": "^5.6.5",
|
||||
"ora": "^7.0.1",
|
||||
|
@@ -84,6 +84,11 @@ const getZodDefaultValue = (type: unknown, value: string | number | boolean | Ob
|
||||
}
|
||||
};
|
||||
|
||||
const bigIntegerColumns: Record<string, string[]> = {
|
||||
"folder_commits": ["commitId"]
|
||||
};
|
||||
|
||||
|
||||
const main = async () => {
|
||||
const tables = (
|
||||
await db("information_schema.tables")
|
||||
@@ -108,6 +113,9 @@ const main = async () => {
|
||||
const columnName = columnNames[colNum];
|
||||
const colInfo = columns[columnName];
|
||||
let ztype = getZodPrimitiveType(colInfo.type);
|
||||
if (bigIntegerColumns[tableName]?.includes(columnName)) {
|
||||
ztype = "z.coerce.bigint()";
|
||||
}
|
||||
if (["zodBuffer"].includes(ztype)) {
|
||||
zodImportSet.add(ztype);
|
||||
}
|
||||
|
19
backend/src/@types/fastify.d.ts
vendored
19
backend/src/@types/fastify.d.ts
vendored
@@ -26,6 +26,7 @@ import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-con
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { TPitServiceFactory } from "@app/ee/services/pit/pit-service";
|
||||
import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service";
|
||||
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
|
||||
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
|
||||
@@ -37,6 +38,7 @@ import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-ap
|
||||
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
|
||||
import { TSecretRotationV2ServiceFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-service";
|
||||
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
|
||||
import { TSecretScanningV2ServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-service";
|
||||
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
|
||||
import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service";
|
||||
import { TSshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-service";
|
||||
@@ -53,10 +55,12 @@ import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service";
|
||||
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
|
||||
import { TInternalCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/internal/internal-certificate-authority-service";
|
||||
import { TCertificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
|
||||
import { TCmekServiceFactory } from "@app/services/cmek/cmek-service";
|
||||
import { TExternalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service";
|
||||
import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
|
||||
import { TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
|
||||
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
|
||||
import { THsmServiceFactory } from "@app/services/hsm/hsm-service";
|
||||
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
|
||||
@@ -68,6 +72,7 @@ import { TIdentityJwtAuthServiceFactory } from "@app/services/identity-jwt-auth/
|
||||
import { TIdentityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
|
||||
import { TIdentityLdapAuthServiceFactory } from "@app/services/identity-ldap-auth/identity-ldap-auth-service";
|
||||
import { TAllowedFields } from "@app/services/identity-ldap-auth/identity-ldap-auth-types";
|
||||
import { TIdentityOciAuthServiceFactory } from "@app/services/identity-oci-auth/identity-oci-auth-service";
|
||||
import { TIdentityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service";
|
||||
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
|
||||
import { TIdentityTokenAuthServiceFactory } from "@app/services/identity-token-auth/identity-token-auth-service";
|
||||
@@ -80,6 +85,8 @@ import { TOrgServiceFactory } from "@app/services/org/org-service";
|
||||
import { TOrgAdminServiceFactory } from "@app/services/org-admin/org-admin-service";
|
||||
import { TPkiAlertServiceFactory } from "@app/services/pki-alert/pki-alert-service";
|
||||
import { TPkiCollectionServiceFactory } from "@app/services/pki-collection/pki-collection-service";
|
||||
import { TPkiSubscriberServiceFactory } from "@app/services/pki-subscriber/pki-subscriber-service";
|
||||
import { TPkiTemplatesServiceFactory } from "@app/services/pki-templates/pki-templates-service";
|
||||
import { TProjectServiceFactory } from "@app/services/project/project-service";
|
||||
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
|
||||
import { TProjectEnvServiceFactory } from "@app/services/project-env/project-env-service";
|
||||
@@ -108,11 +115,16 @@ import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integ
|
||||
declare module "@fastify/request-context" {
|
||||
interface RequestContextData {
|
||||
reqId: string;
|
||||
orgId?: string;
|
||||
identityAuthInfo?: {
|
||||
identityId: string;
|
||||
oidc?: {
|
||||
claims: Record<string, string>;
|
||||
};
|
||||
kubernetes?: {
|
||||
namespace: string;
|
||||
name: string;
|
||||
};
|
||||
};
|
||||
identityPermissionMetadata?: Record<string, unknown>; // filled by permission service
|
||||
assumedPrivilegeDetails?: { requesterId: string; actorId: string; actorType: ActorType; projectId: string };
|
||||
@@ -208,6 +220,7 @@ declare module "fastify" {
|
||||
identityGcpAuth: TIdentityGcpAuthServiceFactory;
|
||||
identityAwsAuth: TIdentityAwsAuthServiceFactory;
|
||||
identityAzureAuth: TIdentityAzureAuthServiceFactory;
|
||||
identityOciAuth: TIdentityOciAuthServiceFactory;
|
||||
identityOidcAuth: TIdentityOidcAuthServiceFactory;
|
||||
identityJwtAuth: TIdentityJwtAuthServiceFactory;
|
||||
identityLdapAuth: TIdentityLdapAuthServiceFactory;
|
||||
@@ -232,6 +245,7 @@ declare module "fastify" {
|
||||
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
|
||||
certificateEst: TCertificateEstServiceFactory;
|
||||
pkiCollection: TPkiCollectionServiceFactory;
|
||||
pkiSubscriber: TPkiSubscriberServiceFactory;
|
||||
secretScanning: TSecretScanningServiceFactory;
|
||||
license: TLicenseServiceFactory;
|
||||
trustedIp: TTrustedIpServiceFactory;
|
||||
@@ -264,6 +278,11 @@ declare module "fastify" {
|
||||
microsoftTeams: TMicrosoftTeamsServiceFactory;
|
||||
assumePrivileges: TAssumePrivilegeServiceFactory;
|
||||
githubOrgSync: TGithubOrgSyncServiceFactory;
|
||||
folderCommit: TFolderCommitServiceFactory;
|
||||
pit: TPitServiceFactory;
|
||||
secretScanningV2: TSecretScanningV2ServiceFactory;
|
||||
internalCertificateAuthority: TInternalCertificateAuthorityServiceFactory;
|
||||
pkiTemplate: TPkiTemplatesServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
|
137
backend/src/@types/knex.d.ts
vendored
137
backend/src/@types/knex.d.ts
vendored
@@ -6,6 +6,9 @@ import {
|
||||
TAccessApprovalPoliciesApprovers,
|
||||
TAccessApprovalPoliciesApproversInsert,
|
||||
TAccessApprovalPoliciesApproversUpdate,
|
||||
TAccessApprovalPoliciesBypassers,
|
||||
TAccessApprovalPoliciesBypassersInsert,
|
||||
TAccessApprovalPoliciesBypassersUpdate,
|
||||
TAccessApprovalPoliciesInsert,
|
||||
TAccessApprovalPoliciesUpdate,
|
||||
TAccessApprovalRequests,
|
||||
@@ -68,12 +71,33 @@ import {
|
||||
TDynamicSecrets,
|
||||
TDynamicSecretsInsert,
|
||||
TDynamicSecretsUpdate,
|
||||
TExternalCertificateAuthorities,
|
||||
TExternalCertificateAuthoritiesInsert,
|
||||
TExternalCertificateAuthoritiesUpdate,
|
||||
TExternalGroupOrgRoleMappings,
|
||||
TExternalGroupOrgRoleMappingsInsert,
|
||||
TExternalGroupOrgRoleMappingsUpdate,
|
||||
TExternalKms,
|
||||
TExternalKmsInsert,
|
||||
TExternalKmsUpdate,
|
||||
TFolderCheckpointResources,
|
||||
TFolderCheckpointResourcesInsert,
|
||||
TFolderCheckpointResourcesUpdate,
|
||||
TFolderCheckpoints,
|
||||
TFolderCheckpointsInsert,
|
||||
TFolderCheckpointsUpdate,
|
||||
TFolderCommitChanges,
|
||||
TFolderCommitChangesInsert,
|
||||
TFolderCommitChangesUpdate,
|
||||
TFolderCommits,
|
||||
TFolderCommitsInsert,
|
||||
TFolderCommitsUpdate,
|
||||
TFolderTreeCheckpointResources,
|
||||
TFolderTreeCheckpointResourcesInsert,
|
||||
TFolderTreeCheckpointResourcesUpdate,
|
||||
TFolderTreeCheckpoints,
|
||||
TFolderTreeCheckpointsInsert,
|
||||
TFolderTreeCheckpointsUpdate,
|
||||
TGateways,
|
||||
TGatewaysInsert,
|
||||
TGatewaysUpdate,
|
||||
@@ -119,6 +143,9 @@ import {
|
||||
TIdentityMetadata,
|
||||
TIdentityMetadataInsert,
|
||||
TIdentityMetadataUpdate,
|
||||
TIdentityOciAuths,
|
||||
TIdentityOciAuthsInsert,
|
||||
TIdentityOciAuthsUpdate,
|
||||
TIdentityOidcAuths,
|
||||
TIdentityOidcAuthsInsert,
|
||||
TIdentityOidcAuthsUpdate,
|
||||
@@ -152,6 +179,9 @@ import {
|
||||
TIntegrations,
|
||||
TIntegrationsInsert,
|
||||
TIntegrationsUpdate,
|
||||
TInternalCertificateAuthorities,
|
||||
TInternalCertificateAuthoritiesInsert,
|
||||
TInternalCertificateAuthoritiesUpdate,
|
||||
TInternalKms,
|
||||
TInternalKmsInsert,
|
||||
TInternalKmsUpdate,
|
||||
@@ -209,6 +239,9 @@ import {
|
||||
TPkiCollections,
|
||||
TPkiCollectionsInsert,
|
||||
TPkiCollectionsUpdate,
|
||||
TPkiSubscribers,
|
||||
TPkiSubscribersInsert,
|
||||
TPkiSubscribersUpdate,
|
||||
TProjectBots,
|
||||
TProjectBotsInsert,
|
||||
TProjectBotsUpdate,
|
||||
@@ -264,6 +297,9 @@ import {
|
||||
TSecretApprovalPoliciesApprovers,
|
||||
TSecretApprovalPoliciesApproversInsert,
|
||||
TSecretApprovalPoliciesApproversUpdate,
|
||||
TSecretApprovalPoliciesBypassers,
|
||||
TSecretApprovalPoliciesBypassersInsert,
|
||||
TSecretApprovalPoliciesBypassersUpdate,
|
||||
TSecretApprovalPoliciesInsert,
|
||||
TSecretApprovalPoliciesUpdate,
|
||||
TSecretApprovalRequests,
|
||||
@@ -318,9 +354,24 @@ import {
|
||||
TSecretRotationV2SecretMappingsInsert,
|
||||
TSecretRotationV2SecretMappingsUpdate,
|
||||
TSecrets,
|
||||
TSecretScanningConfigs,
|
||||
TSecretScanningConfigsInsert,
|
||||
TSecretScanningConfigsUpdate,
|
||||
TSecretScanningDataSources,
|
||||
TSecretScanningDataSourcesInsert,
|
||||
TSecretScanningDataSourcesUpdate,
|
||||
TSecretScanningFindings,
|
||||
TSecretScanningFindingsInsert,
|
||||
TSecretScanningFindingsUpdate,
|
||||
TSecretScanningGitRisks,
|
||||
TSecretScanningGitRisksInsert,
|
||||
TSecretScanningGitRisksUpdate,
|
||||
TSecretScanningResources,
|
||||
TSecretScanningResourcesInsert,
|
||||
TSecretScanningResourcesUpdate,
|
||||
TSecretScanningScans,
|
||||
TSecretScanningScansInsert,
|
||||
TSecretScanningScansUpdate,
|
||||
TSecretSharing,
|
||||
TSecretSharingInsert,
|
||||
TSecretSharingUpdate,
|
||||
@@ -532,6 +583,16 @@ declare module "knex/types/tables" {
|
||||
TCertificateAuthorityCrlInsert,
|
||||
TCertificateAuthorityCrlUpdate
|
||||
>;
|
||||
[TableName.InternalCertificateAuthority]: KnexOriginal.CompositeTableType<
|
||||
TInternalCertificateAuthorities,
|
||||
TInternalCertificateAuthoritiesInsert,
|
||||
TInternalCertificateAuthoritiesUpdate
|
||||
>;
|
||||
[TableName.ExternalCertificateAuthority]: KnexOriginal.CompositeTableType<
|
||||
TExternalCertificateAuthorities,
|
||||
TExternalCertificateAuthoritiesInsert,
|
||||
TExternalCertificateAuthoritiesUpdate
|
||||
>;
|
||||
[TableName.Certificate]: KnexOriginal.CompositeTableType<TCertificates, TCertificatesInsert, TCertificatesUpdate>;
|
||||
[TableName.CertificateTemplate]: KnexOriginal.CompositeTableType<
|
||||
TCertificateTemplates,
|
||||
@@ -564,6 +625,11 @@ declare module "knex/types/tables" {
|
||||
TPkiCollectionItemsInsert,
|
||||
TPkiCollectionItemsUpdate
|
||||
>;
|
||||
[TableName.PkiSubscriber]: KnexOriginal.CompositeTableType<
|
||||
TPkiSubscribers,
|
||||
TPkiSubscribersInsert,
|
||||
TPkiSubscribersUpdate
|
||||
>;
|
||||
[TableName.UserGroupMembership]: KnexOriginal.CompositeTableType<
|
||||
TUserGroupMembership,
|
||||
TUserGroupMembershipInsert,
|
||||
@@ -730,6 +796,11 @@ declare module "knex/types/tables" {
|
||||
TIdentityAzureAuthsInsert,
|
||||
TIdentityAzureAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityOciAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityOciAuths,
|
||||
TIdentityOciAuthsInsert,
|
||||
TIdentityOciAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityOidcAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityOidcAuths,
|
||||
TIdentityOidcAuthsInsert,
|
||||
@@ -788,6 +859,12 @@ declare module "knex/types/tables" {
|
||||
TAccessApprovalPoliciesApproversUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalPolicyBypasser]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalPoliciesBypassers,
|
||||
TAccessApprovalPoliciesBypassersInsert,
|
||||
TAccessApprovalPoliciesBypassersUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalRequest]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalRequests,
|
||||
TAccessApprovalRequestsInsert,
|
||||
@@ -811,6 +888,11 @@ declare module "knex/types/tables" {
|
||||
TSecretApprovalPoliciesApproversInsert,
|
||||
TSecretApprovalPoliciesApproversUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalPolicyBypasser]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalPoliciesBypassers,
|
||||
TSecretApprovalPoliciesBypassersInsert,
|
||||
TSecretApprovalPoliciesBypassersUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequest]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequests,
|
||||
TSecretApprovalRequestsInsert,
|
||||
@@ -1058,5 +1140,60 @@ declare module "knex/types/tables" {
|
||||
TGithubOrgSyncConfigsInsert,
|
||||
TGithubOrgSyncConfigsUpdate
|
||||
>;
|
||||
[TableName.FolderCommit]: KnexOriginal.CompositeTableType<
|
||||
TFolderCommits,
|
||||
TFolderCommitsInsert,
|
||||
TFolderCommitsUpdate
|
||||
>;
|
||||
[TableName.FolderCommitChanges]: KnexOriginal.CompositeTableType<
|
||||
TFolderCommitChanges,
|
||||
TFolderCommitChangesInsert,
|
||||
TFolderCommitChangesUpdate
|
||||
>;
|
||||
[TableName.FolderCheckpoint]: KnexOriginal.CompositeTableType<
|
||||
TFolderCheckpoints,
|
||||
TFolderCheckpointsInsert,
|
||||
TFolderCheckpointsUpdate
|
||||
>;
|
||||
[TableName.FolderCheckpointResources]: KnexOriginal.CompositeTableType<
|
||||
TFolderCheckpointResources,
|
||||
TFolderCheckpointResourcesInsert,
|
||||
TFolderCheckpointResourcesUpdate
|
||||
>;
|
||||
[TableName.FolderTreeCheckpoint]: KnexOriginal.CompositeTableType<
|
||||
TFolderTreeCheckpoints,
|
||||
TFolderTreeCheckpointsInsert,
|
||||
TFolderTreeCheckpointsUpdate
|
||||
>;
|
||||
[TableName.FolderTreeCheckpointResources]: KnexOriginal.CompositeTableType<
|
||||
TFolderTreeCheckpointResources,
|
||||
TFolderTreeCheckpointResourcesInsert,
|
||||
TFolderTreeCheckpointResourcesUpdate
|
||||
>;
|
||||
[TableName.SecretScanningDataSource]: KnexOriginal.CompositeTableType<
|
||||
TSecretScanningDataSources,
|
||||
TSecretScanningDataSourcesInsert,
|
||||
TSecretScanningDataSourcesUpdate
|
||||
>;
|
||||
[TableName.SecretScanningResource]: KnexOriginal.CompositeTableType<
|
||||
TSecretScanningResources,
|
||||
TSecretScanningResourcesInsert,
|
||||
TSecretScanningResourcesUpdate
|
||||
>;
|
||||
[TableName.SecretScanningScan]: KnexOriginal.CompositeTableType<
|
||||
TSecretScanningScans,
|
||||
TSecretScanningScansInsert,
|
||||
TSecretScanningScansUpdate
|
||||
>;
|
||||
[TableName.SecretScanningFinding]: KnexOriginal.CompositeTableType<
|
||||
TSecretScanningFindings,
|
||||
TSecretScanningFindingsInsert,
|
||||
TSecretScanningFindingsUpdate
|
||||
>;
|
||||
[TableName.SecretScanningConfig]: KnexOriginal.CompositeTableType<
|
||||
TSecretScanningConfigs,
|
||||
TSecretScanningConfigsInsert,
|
||||
TSecretScanningConfigsUpdate
|
||||
>;
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,44 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.Certificate)) {
|
||||
const hasProjectIdColumn = await knex.schema.hasColumn(TableName.Certificate, "projectId");
|
||||
if (!hasProjectIdColumn) {
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.string("projectId", 36).nullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
});
|
||||
|
||||
await knex.raw(`
|
||||
UPDATE "${TableName.Certificate}" cert
|
||||
SET "projectId" = ca."projectId"
|
||||
FROM "${TableName.CertificateAuthority}" ca
|
||||
WHERE cert."caId" = ca.id
|
||||
`);
|
||||
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.string("projectId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.uuid("caId").nullable().alter();
|
||||
t.uuid("caCertId").nullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.Certificate)) {
|
||||
if (await knex.schema.hasColumn(TableName.Certificate, "projectId")) {
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.dropForeign("projectId");
|
||||
t.dropColumn("projectId");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Altering back to notNullable for caId and caCertId will fail
|
||||
}
|
@@ -0,0 +1,47 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasEmail = await knex.schema.hasColumn(TableName.Users, "email");
|
||||
const hasUsername = await knex.schema.hasColumn(TableName.Users, "username");
|
||||
if (hasEmail) {
|
||||
await knex(TableName.Users)
|
||||
.where({ isGhost: false })
|
||||
.update({
|
||||
// @ts-expect-error email assume string this is expected
|
||||
email: knex.raw("lower(email)")
|
||||
});
|
||||
}
|
||||
if (hasUsername) {
|
||||
await knex.schema.raw(`
|
||||
CREATE INDEX IF NOT EXISTS ${TableName.Users}_lower_username_idx
|
||||
ON ${TableName.Users} (LOWER(username))
|
||||
`);
|
||||
|
||||
const duplicatesSubquery = knex(TableName.Users)
|
||||
.select(knex.raw("lower(username) as lowercase_username"))
|
||||
.groupBy("lowercase_username")
|
||||
.having(knex.raw("count(*)"), ">", 1);
|
||||
|
||||
// Update usernames to lowercase where they won't create duplicates
|
||||
await knex(TableName.Users)
|
||||
.where({ isGhost: false })
|
||||
.whereRaw("username <> lower(username)") // Only update if not already lowercase
|
||||
// @ts-expect-error username assume string this is expected
|
||||
.whereNotIn(knex.raw("lower(username)"), duplicatesSubquery)
|
||||
.update({
|
||||
// @ts-expect-error username assume string this is expected
|
||||
username: knex.raw("lower(username)")
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasUsername = await knex.schema.hasColumn(TableName.Users, "username");
|
||||
if (hasUsername) {
|
||||
await knex.schema.raw(`
|
||||
DROP INDEX IF EXISTS ${TableName.Users}_lower_username_idx
|
||||
`);
|
||||
}
|
||||
}
|
@@ -0,0 +1,166 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
|
||||
if (!hasFolderCommitTable) {
|
||||
await knex.schema.createTable(TableName.FolderCommit, (t) => {
|
||||
t.uuid("id").primary().defaultTo(knex.fn.uuid());
|
||||
t.bigIncrements("commitId");
|
||||
t.jsonb("actorMetadata").notNullable();
|
||||
t.string("actorType").notNullable();
|
||||
t.string("message");
|
||||
t.uuid("folderId").notNullable();
|
||||
t.uuid("envId").notNullable();
|
||||
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
|
||||
t.index("folderId");
|
||||
t.index("envId");
|
||||
});
|
||||
}
|
||||
|
||||
const hasFolderCommitChangesTable = await knex.schema.hasTable(TableName.FolderCommitChanges);
|
||||
if (!hasFolderCommitChangesTable) {
|
||||
await knex.schema.createTable(TableName.FolderCommitChanges, (t) => {
|
||||
t.uuid("id").primary().defaultTo(knex.fn.uuid());
|
||||
t.uuid("folderCommitId").notNullable();
|
||||
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
|
||||
t.string("changeType").notNullable();
|
||||
t.boolean("isUpdate").notNullable().defaultTo(false);
|
||||
t.uuid("secretVersionId");
|
||||
t.foreign("secretVersionId").references("id").inTable(TableName.SecretVersionV2).onDelete("CASCADE");
|
||||
t.uuid("folderVersionId");
|
||||
t.foreign("folderVersionId").references("id").inTable(TableName.SecretFolderVersion).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
|
||||
t.index("folderCommitId");
|
||||
t.index("secretVersionId");
|
||||
t.index("folderVersionId");
|
||||
});
|
||||
}
|
||||
|
||||
const hasFolderCheckpointTable = await knex.schema.hasTable(TableName.FolderCheckpoint);
|
||||
if (!hasFolderCheckpointTable) {
|
||||
await knex.schema.createTable(TableName.FolderCheckpoint, (t) => {
|
||||
t.uuid("id").primary().defaultTo(knex.fn.uuid());
|
||||
t.uuid("folderCommitId").notNullable();
|
||||
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
|
||||
t.index("folderCommitId");
|
||||
});
|
||||
}
|
||||
|
||||
const hasFolderCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderCheckpointResources);
|
||||
if (!hasFolderCheckpointResourcesTable) {
|
||||
await knex.schema.createTable(TableName.FolderCheckpointResources, (t) => {
|
||||
t.uuid("id").primary().defaultTo(knex.fn.uuid());
|
||||
t.uuid("folderCheckpointId").notNullable();
|
||||
t.foreign("folderCheckpointId").references("id").inTable(TableName.FolderCheckpoint).onDelete("CASCADE");
|
||||
t.uuid("secretVersionId");
|
||||
t.foreign("secretVersionId").references("id").inTable(TableName.SecretVersionV2).onDelete("CASCADE");
|
||||
t.uuid("folderVersionId");
|
||||
t.foreign("folderVersionId").references("id").inTable(TableName.SecretFolderVersion).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
|
||||
t.index("folderCheckpointId");
|
||||
t.index("secretVersionId");
|
||||
t.index("folderVersionId");
|
||||
});
|
||||
}
|
||||
|
||||
const hasFolderTreeCheckpointTable = await knex.schema.hasTable(TableName.FolderTreeCheckpoint);
|
||||
if (!hasFolderTreeCheckpointTable) {
|
||||
await knex.schema.createTable(TableName.FolderTreeCheckpoint, (t) => {
|
||||
t.uuid("id").primary().defaultTo(knex.fn.uuid());
|
||||
t.uuid("folderCommitId").notNullable();
|
||||
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
|
||||
t.index("folderCommitId");
|
||||
});
|
||||
}
|
||||
|
||||
const hasFolderTreeCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderTreeCheckpointResources);
|
||||
if (!hasFolderTreeCheckpointResourcesTable) {
|
||||
await knex.schema.createTable(TableName.FolderTreeCheckpointResources, (t) => {
|
||||
t.uuid("id").primary().defaultTo(knex.fn.uuid());
|
||||
t.uuid("folderTreeCheckpointId").notNullable();
|
||||
t.foreign("folderTreeCheckpointId").references("id").inTable(TableName.FolderTreeCheckpoint).onDelete("CASCADE");
|
||||
t.uuid("folderId").notNullable();
|
||||
t.uuid("folderCommitId").notNullable();
|
||||
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
|
||||
t.index("folderTreeCheckpointId");
|
||||
t.index("folderId");
|
||||
t.index("folderCommitId");
|
||||
});
|
||||
}
|
||||
|
||||
if (!hasFolderCommitTable) {
|
||||
await createOnUpdateTrigger(knex, TableName.FolderCommit);
|
||||
}
|
||||
|
||||
if (!hasFolderCommitChangesTable) {
|
||||
await createOnUpdateTrigger(knex, TableName.FolderCommitChanges);
|
||||
}
|
||||
|
||||
if (!hasFolderCheckpointTable) {
|
||||
await createOnUpdateTrigger(knex, TableName.FolderCheckpoint);
|
||||
}
|
||||
|
||||
if (!hasFolderCheckpointResourcesTable) {
|
||||
await createOnUpdateTrigger(knex, TableName.FolderCheckpointResources);
|
||||
}
|
||||
|
||||
if (!hasFolderTreeCheckpointTable) {
|
||||
await createOnUpdateTrigger(knex, TableName.FolderTreeCheckpoint);
|
||||
}
|
||||
|
||||
if (!hasFolderTreeCheckpointResourcesTable) {
|
||||
await createOnUpdateTrigger(knex, TableName.FolderTreeCheckpointResources);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasFolderCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderCheckpointResources);
|
||||
const hasFolderTreeCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderTreeCheckpointResources);
|
||||
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
|
||||
const hasFolderCommitChangesTable = await knex.schema.hasTable(TableName.FolderCommitChanges);
|
||||
const hasFolderTreeCheckpointTable = await knex.schema.hasTable(TableName.FolderTreeCheckpoint);
|
||||
const hasFolderCheckpointTable = await knex.schema.hasTable(TableName.FolderCheckpoint);
|
||||
|
||||
if (hasFolderTreeCheckpointResourcesTable) {
|
||||
await dropOnUpdateTrigger(knex, TableName.FolderTreeCheckpointResources);
|
||||
await knex.schema.dropTableIfExists(TableName.FolderTreeCheckpointResources);
|
||||
}
|
||||
|
||||
if (hasFolderCheckpointResourcesTable) {
|
||||
await dropOnUpdateTrigger(knex, TableName.FolderCheckpointResources);
|
||||
await knex.schema.dropTableIfExists(TableName.FolderCheckpointResources);
|
||||
}
|
||||
|
||||
if (hasFolderTreeCheckpointTable) {
|
||||
await dropOnUpdateTrigger(knex, TableName.FolderTreeCheckpoint);
|
||||
await knex.schema.dropTableIfExists(TableName.FolderTreeCheckpoint);
|
||||
}
|
||||
|
||||
if (hasFolderCheckpointTable) {
|
||||
await dropOnUpdateTrigger(knex, TableName.FolderCheckpoint);
|
||||
await knex.schema.dropTableIfExists(TableName.FolderCheckpoint);
|
||||
}
|
||||
|
||||
if (hasFolderCommitChangesTable) {
|
||||
await dropOnUpdateTrigger(knex, TableName.FolderCommitChanges);
|
||||
await knex.schema.dropTableIfExists(TableName.FolderCommitChanges);
|
||||
}
|
||||
|
||||
if (hasFolderCommitTable) {
|
||||
await dropOnUpdateTrigger(knex, TableName.FolderCommit);
|
||||
await knex.schema.dropTableIfExists(TableName.FolderCommit);
|
||||
}
|
||||
}
|
46
backend/src/db/migrations/20250508160957_pki-subscriber.ts
Normal file
46
backend/src/db/migrations/20250508160957_pki-subscriber.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.PkiSubscriber))) {
|
||||
await knex.schema.createTable(TableName.PkiSubscriber, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.uuid("caId").nullable();
|
||||
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("SET NULL");
|
||||
t.string("name").notNullable();
|
||||
t.string("commonName").notNullable();
|
||||
t.specificType("subjectAlternativeNames", "text[]").notNullable();
|
||||
t.string("ttl").notNullable();
|
||||
t.specificType("keyUsages", "text[]").notNullable();
|
||||
t.specificType("extendedKeyUsages", "text[]").notNullable();
|
||||
t.string("status").notNullable(); // active / disabled
|
||||
t.unique(["projectId", "name"]);
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.PkiSubscriber);
|
||||
}
|
||||
|
||||
const hasSubscriberCol = await knex.schema.hasColumn(TableName.Certificate, "pkiSubscriberId");
|
||||
if (!hasSubscriberCol) {
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.uuid("pkiSubscriberId").nullable();
|
||||
t.foreign("pkiSubscriberId").references("id").inTable(TableName.PkiSubscriber).onDelete("SET NULL");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasSubscriberCol = await knex.schema.hasColumn(TableName.Certificate, "pkiSubscriberId");
|
||||
if (hasSubscriberCol) {
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.dropColumn("pkiSubscriberId");
|
||||
});
|
||||
}
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.PkiSubscriber);
|
||||
await dropOnUpdateTrigger(knex, TableName.PkiSubscriber);
|
||||
}
|
@@ -0,0 +1,30 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.IdentityOciAuth))) {
|
||||
await knex.schema.createTable(TableName.IdentityOciAuth, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
|
||||
t.jsonb("accessTokenTrustedIps").notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("identityId").notNullable().unique();
|
||||
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||
t.string("type").notNullable();
|
||||
|
||||
t.string("tenancyOcid").notNullable();
|
||||
t.string("allowedUsernames").nullable();
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.IdentityOciAuth);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.IdentityOciAuth);
|
||||
await dropOnUpdateTrigger(knex, TableName.IdentityOciAuth);
|
||||
}
|
@@ -0,0 +1,25 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasGatewayIdColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "gatewayId");
|
||||
|
||||
if (!hasGatewayIdColumn) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
|
||||
table.uuid("gatewayId").nullable();
|
||||
table.foreign("gatewayId").references("id").inTable(TableName.Gateway).onDelete("SET NULL");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasGatewayIdColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "gatewayId");
|
||||
|
||||
if (hasGatewayIdColumn) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
|
||||
table.dropForeign("gatewayId");
|
||||
table.dropColumn("gatewayId");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,110 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
// Note(daniel): We aren't dropping tables or columns in this migrations so we can easily rollback if needed.
|
||||
// In the future we need to drop the projectGatewayId on the dynamic secrets table, and drop the project_gateways table entirely.
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
knex.replicaNode = () => {
|
||||
return knex;
|
||||
};
|
||||
|
||||
if (!(await knex.schema.hasColumn(TableName.DynamicSecret, "gatewayId"))) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (table) => {
|
||||
table.uuid("gatewayId").nullable();
|
||||
table.foreign("gatewayId").references("id").inTable(TableName.Gateway).onDelete("SET NULL");
|
||||
|
||||
table.index("gatewayId");
|
||||
});
|
||||
|
||||
const existingDynamicSecretsWithProjectGatewayId = await knex(TableName.DynamicSecret)
|
||||
.select(selectAllTableCols(TableName.DynamicSecret))
|
||||
.whereNotNull(`${TableName.DynamicSecret}.projectGatewayId`)
|
||||
.join(TableName.ProjectGateway, `${TableName.ProjectGateway}.id`, `${TableName.DynamicSecret}.projectGatewayId`)
|
||||
.whereNotNull(`${TableName.ProjectGateway}.gatewayId`)
|
||||
.select(
|
||||
knex.ref("projectId").withSchema(TableName.ProjectGateway).as("projectId"),
|
||||
knex.ref("gatewayId").withSchema(TableName.ProjectGateway).as("projectGatewayGatewayId")
|
||||
);
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
|
||||
const updatedDynamicSecrets = await Promise.all(
|
||||
existingDynamicSecretsWithProjectGatewayId.map(async (existingDynamicSecret) => {
|
||||
if (!existingDynamicSecret.projectGatewayGatewayId) {
|
||||
const result = {
|
||||
...existingDynamicSecret,
|
||||
gatewayId: null
|
||||
};
|
||||
|
||||
const { projectId, projectGatewayGatewayId, ...rest } = result;
|
||||
return rest;
|
||||
}
|
||||
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: existingDynamicSecret.projectId
|
||||
});
|
||||
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: existingDynamicSecret.projectId
|
||||
});
|
||||
|
||||
let decryptedStoredInput = JSON.parse(
|
||||
secretManagerDecryptor({ cipherTextBlob: Buffer.from(existingDynamicSecret.encryptedInput) }).toString()
|
||||
) as object;
|
||||
|
||||
// We're not removing the existing projectGatewayId from the input so we can easily rollback without having to re-encrypt the input
|
||||
decryptedStoredInput = {
|
||||
...decryptedStoredInput,
|
||||
gatewayId: existingDynamicSecret.projectGatewayGatewayId
|
||||
};
|
||||
|
||||
const encryptedInput = secretManagerEncryptor({
|
||||
plainText: Buffer.from(JSON.stringify(decryptedStoredInput))
|
||||
}).cipherTextBlob;
|
||||
|
||||
const result = {
|
||||
...existingDynamicSecret,
|
||||
encryptedInput,
|
||||
gatewayId: existingDynamicSecret.projectGatewayGatewayId
|
||||
};
|
||||
|
||||
const { projectId, projectGatewayGatewayId, ...rest } = result;
|
||||
return rest;
|
||||
})
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedDynamicSecrets.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.DynamicSecret)
|
||||
.insert(updatedDynamicSecrets.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
// no re-encryption needed as we keep the old projectGatewayId in the input
|
||||
if (await knex.schema.hasColumn(TableName.DynamicSecret, "gatewayId")) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (table) => {
|
||||
table.dropForeign("gatewayId");
|
||||
table.dropColumn("gatewayId");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,53 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const columns = await knex.table(TableName.Organization).columnInfo();
|
||||
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
if (!columns.secretsProductEnabled) {
|
||||
t.boolean("secretsProductEnabled").defaultTo(true);
|
||||
}
|
||||
if (!columns.pkiProductEnabled) {
|
||||
t.boolean("pkiProductEnabled").defaultTo(true);
|
||||
}
|
||||
if (!columns.kmsProductEnabled) {
|
||||
t.boolean("kmsProductEnabled").defaultTo(true);
|
||||
}
|
||||
if (!columns.sshProductEnabled) {
|
||||
t.boolean("sshProductEnabled").defaultTo(true);
|
||||
}
|
||||
if (!columns.scannerProductEnabled) {
|
||||
t.boolean("scannerProductEnabled").defaultTo(true);
|
||||
}
|
||||
if (!columns.shareSecretsProductEnabled) {
|
||||
t.boolean("shareSecretsProductEnabled").defaultTo(true);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const columns = await knex.table(TableName.Organization).columnInfo();
|
||||
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
if (columns.secretsProductEnabled) {
|
||||
t.dropColumn("secretsProductEnabled");
|
||||
}
|
||||
if (columns.pkiProductEnabled) {
|
||||
t.dropColumn("pkiProductEnabled");
|
||||
}
|
||||
if (columns.kmsProductEnabled) {
|
||||
t.dropColumn("kmsProductEnabled");
|
||||
}
|
||||
if (columns.sshProductEnabled) {
|
||||
t.dropColumn("sshProductEnabled");
|
||||
}
|
||||
if (columns.scannerProductEnabled) {
|
||||
t.dropColumn("scannerProductEnabled");
|
||||
}
|
||||
if (columns.shareSecretsProductEnabled) {
|
||||
t.dropColumn("shareSecretsProductEnabled");
|
||||
}
|
||||
});
|
||||
}
|
@@ -0,0 +1,21 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasSecretSharingColumn = await knex.schema.hasColumn(TableName.Project, "secretSharing");
|
||||
if (!hasSecretSharingColumn) {
|
||||
await knex.schema.table(TableName.Project, (table) => {
|
||||
table.boolean("secretSharing").notNullable().defaultTo(true);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasSecretSharingColumn = await knex.schema.hasColumn(TableName.Project, "secretSharing");
|
||||
if (hasSecretSharingColumn) {
|
||||
await knex.schema.table(TableName.Project, (table) => {
|
||||
table.dropColumn("secretSharing");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,35 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasLifetimeColumn = await knex.schema.hasColumn(TableName.Organization, "maxSharedSecretLifetime");
|
||||
const hasViewLimitColumn = await knex.schema.hasColumn(TableName.Organization, "maxSharedSecretViewLimit");
|
||||
|
||||
if (!hasLifetimeColumn || !hasViewLimitColumn) {
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
if (!hasLifetimeColumn) {
|
||||
t.integer("maxSharedSecretLifetime").nullable().defaultTo(2592000); // 30 days in seconds
|
||||
}
|
||||
if (!hasViewLimitColumn) {
|
||||
t.integer("maxSharedSecretViewLimit").nullable();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasLifetimeColumn = await knex.schema.hasColumn(TableName.Organization, "maxSharedSecretLifetime");
|
||||
const hasViewLimitColumn = await knex.schema.hasColumn(TableName.Organization, "maxSharedSecretViewLimit");
|
||||
|
||||
if (hasLifetimeColumn || hasViewLimitColumn) {
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
if (hasLifetimeColumn) {
|
||||
t.dropColumn("maxSharedSecretLifetime");
|
||||
}
|
||||
if (hasViewLimitColumn) {
|
||||
t.dropColumn("maxSharedSecretViewLimit");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,43 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
const hasEncryptedSalt = await knex.schema.hasColumn(TableName.SecretSharing, "encryptedSalt");
|
||||
const hasAuthorizedEmails = await knex.schema.hasColumn(TableName.SecretSharing, "authorizedEmails");
|
||||
|
||||
if (!hasEncryptedSalt || !hasAuthorizedEmails) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
// These two columns are only needed when secrets are shared with a specific list of emails
|
||||
|
||||
if (!hasEncryptedSalt) {
|
||||
t.binary("encryptedSalt").nullable();
|
||||
}
|
||||
|
||||
if (!hasAuthorizedEmails) {
|
||||
t.json("authorizedEmails").nullable();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
const hasEncryptedSalt = await knex.schema.hasColumn(TableName.SecretSharing, "encryptedSalt");
|
||||
const hasAuthorizedEmails = await knex.schema.hasColumn(TableName.SecretSharing, "authorizedEmails");
|
||||
|
||||
if (hasEncryptedSalt || hasAuthorizedEmails) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
if (hasEncryptedSalt) {
|
||||
t.dropColumn("encryptedSalt");
|
||||
}
|
||||
|
||||
if (hasAuthorizedEmails) {
|
||||
t.dropColumn("authorizedEmails");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
107
backend/src/db/migrations/20250517002225_secret-scanning-v2.ts
Normal file
107
backend/src/db/migrations/20250517002225_secret-scanning-v2.ts
Normal file
@@ -0,0 +1,107 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
|
||||
import {
|
||||
SecretScanningFindingStatus,
|
||||
SecretScanningScanStatus
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.SecretScanningDataSource))) {
|
||||
await knex.schema.createTable(TableName.SecretScanningDataSource, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("externalId").index(); // if we need a unique way of identifying this data source from an external resource
|
||||
t.string("name", 48).notNullable();
|
||||
t.string("description");
|
||||
t.string("type").notNullable();
|
||||
t.jsonb("config").notNullable();
|
||||
t.binary("encryptedCredentials"); // webhook credentials, etc.
|
||||
t.uuid("connectionId");
|
||||
t.boolean("isAutoScanEnabled").defaultTo(true);
|
||||
t.foreign("connectionId").references("id").inTable(TableName.AppConnection);
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
t.boolean("isDisconnected").notNullable().defaultTo(false);
|
||||
t.unique(["projectId", "name"]);
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.SecretScanningDataSource);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SecretScanningResource))) {
|
||||
await knex.schema.createTable(TableName.SecretScanningResource, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("externalId").notNullable();
|
||||
t.string("name").notNullable();
|
||||
t.string("type").notNullable();
|
||||
t.uuid("dataSourceId").notNullable();
|
||||
t.foreign("dataSourceId").references("id").inTable(TableName.SecretScanningDataSource).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
t.unique(["dataSourceId", "externalId"]);
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.SecretScanningResource);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SecretScanningScan))) {
|
||||
await knex.schema.createTable(TableName.SecretScanningScan, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("status").notNullable().defaultTo(SecretScanningScanStatus.Queued);
|
||||
t.string("statusMessage", 1024);
|
||||
t.string("type").notNullable();
|
||||
t.uuid("resourceId").notNullable();
|
||||
t.foreign("resourceId").references("id").inTable(TableName.SecretScanningResource).onDelete("CASCADE");
|
||||
t.timestamp("createdAt").defaultTo(knex.fn.now());
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SecretScanningFinding))) {
|
||||
await knex.schema.createTable(TableName.SecretScanningFinding, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("dataSourceName").notNullable();
|
||||
t.string("dataSourceType").notNullable();
|
||||
t.string("resourceName").notNullable();
|
||||
t.string("resourceType").notNullable();
|
||||
t.string("rule").notNullable();
|
||||
t.string("severity").notNullable();
|
||||
t.string("status").notNullable().defaultTo(SecretScanningFindingStatus.Unresolved);
|
||||
t.string("remarks");
|
||||
t.string("fingerprint").notNullable();
|
||||
t.jsonb("details").notNullable();
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.uuid("scanId");
|
||||
t.foreign("scanId").references("id").inTable(TableName.SecretScanningScan).onDelete("SET NULL");
|
||||
t.timestamps(true, true, true);
|
||||
t.unique(["projectId", "fingerprint"]);
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.SecretScanningFinding);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SecretScanningConfig))) {
|
||||
await knex.schema.createTable(TableName.SecretScanningConfig, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("projectId").notNullable().unique();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.string("content", 5000);
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.SecretScanningConfig);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.SecretScanningFinding);
|
||||
|
||||
await dropOnUpdateTrigger(knex, TableName.SecretScanningFinding);
|
||||
await knex.schema.dropTableIfExists(TableName.SecretScanningScan);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.SecretScanningResource);
|
||||
await dropOnUpdateTrigger(knex, TableName.SecretScanningResource);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.SecretScanningDataSource);
|
||||
await dropOnUpdateTrigger(knex, TableName.SecretScanningDataSource);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.SecretScanningConfig);
|
||||
await dropOnUpdateTrigger(knex, TableName.SecretScanningConfig);
|
||||
}
|
@@ -0,0 +1,22 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.SecretSync, (t) => {
|
||||
t.string("name", 64).notNullable().alter();
|
||||
});
|
||||
await knex.schema.alterTable(TableName.ProjectTemplates, (t) => {
|
||||
t.string("name", 64).notNullable().alter();
|
||||
});
|
||||
await knex.schema.alterTable(TableName.AppConnection, (t) => {
|
||||
t.string("name", 64).notNullable().alter();
|
||||
});
|
||||
await knex.schema.alterTable(TableName.SecretRotationV2, (t) => {
|
||||
t.string("name", 64).notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(): Promise<void> {
|
||||
// No down migration or it will error
|
||||
}
|
205
backend/src/db/migrations/20250521110635_add-external-ca-pki.ts
Normal file
205
backend/src/db/migrations/20250521110635_add-external-ca-pki.ts
Normal file
@@ -0,0 +1,205 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasCATable = await knex.schema.hasTable(TableName.CertificateAuthority);
|
||||
const hasExternalCATable = await knex.schema.hasTable(TableName.ExternalCertificateAuthority);
|
||||
const hasInternalCATable = await knex.schema.hasTable(TableName.InternalCertificateAuthority);
|
||||
|
||||
if (hasCATable && !hasInternalCATable) {
|
||||
await knex.schema.createTableLike(TableName.InternalCertificateAuthority, TableName.CertificateAuthority, (t) => {
|
||||
t.uuid("caId").nullable();
|
||||
});
|
||||
|
||||
// @ts-expect-error intentional: migration
|
||||
await knex(TableName.InternalCertificateAuthority).insert(knex(TableName.CertificateAuthority).select("*"));
|
||||
await knex(TableName.InternalCertificateAuthority).update("caId", knex.ref("id"));
|
||||
|
||||
await knex.schema.alterTable(TableName.InternalCertificateAuthority, (t) => {
|
||||
t.dropColumn("projectId");
|
||||
t.dropColumn("requireTemplateForIssuance");
|
||||
t.dropColumn("createdAt");
|
||||
t.dropColumn("updatedAt");
|
||||
t.dropColumn("status");
|
||||
t.uuid("parentCaId")
|
||||
.nullable()
|
||||
.references("id")
|
||||
.inTable(TableName.CertificateAuthority)
|
||||
.onDelete("CASCADE")
|
||||
.alter();
|
||||
t.uuid("activeCaCertId").nullable().references("id").inTable(TableName.CertificateAuthorityCert).alter();
|
||||
t.uuid("caId").notNullable().references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE").alter();
|
||||
});
|
||||
|
||||
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
|
||||
t.renameColumn("requireTemplateForIssuance", "enableDirectIssuance");
|
||||
t.string("name").nullable();
|
||||
});
|
||||
|
||||
// prefill name for existing internal CAs and flip enableDirectIssuance
|
||||
const cas = await knex(TableName.CertificateAuthority).select("id", "friendlyName", "enableDirectIssuance");
|
||||
await Promise.all(
|
||||
cas.map((ca) => {
|
||||
const slugifiedName = ca.friendlyName
|
||||
? slugify(`${ca.friendlyName.slice(0, 16)}-${alphaNumericNanoId(8)}`)
|
||||
: slugify(alphaNumericNanoId(12));
|
||||
|
||||
return knex(TableName.CertificateAuthority)
|
||||
.where({ id: ca.id })
|
||||
.update({ name: slugifiedName, enableDirectIssuance: !ca.enableDirectIssuance });
|
||||
})
|
||||
);
|
||||
|
||||
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
|
||||
t.dropColumn("parentCaId");
|
||||
t.dropColumn("type");
|
||||
t.dropColumn("friendlyName");
|
||||
t.dropColumn("organization");
|
||||
t.dropColumn("ou");
|
||||
t.dropColumn("country");
|
||||
t.dropColumn("province");
|
||||
t.dropColumn("locality");
|
||||
t.dropColumn("commonName");
|
||||
t.dropColumn("dn");
|
||||
t.dropColumn("serialNumber");
|
||||
t.dropColumn("maxPathLength");
|
||||
t.dropColumn("keyAlgorithm");
|
||||
t.dropColumn("notBefore");
|
||||
t.dropColumn("notAfter");
|
||||
t.dropColumn("activeCaCertId");
|
||||
t.boolean("enableDirectIssuance").notNullable().defaultTo(true).alter();
|
||||
t.string("name").notNullable().alter();
|
||||
t.unique(["name", "projectId"]);
|
||||
});
|
||||
}
|
||||
|
||||
if (!hasExternalCATable) {
|
||||
await knex.schema.createTable(TableName.ExternalCertificateAuthority, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("type").notNullable();
|
||||
t.uuid("appConnectionId").nullable();
|
||||
t.foreign("appConnectionId").references("id").inTable(TableName.AppConnection);
|
||||
t.uuid("dnsAppConnectionId").nullable();
|
||||
t.foreign("dnsAppConnectionId").references("id").inTable(TableName.AppConnection);
|
||||
t.uuid("caId").notNullable().references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
|
||||
t.binary("credentials");
|
||||
t.json("configuration");
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.PkiSubscriber)) {
|
||||
await knex.schema.alterTable(TableName.PkiSubscriber, (t) => {
|
||||
t.string("ttl").nullable().alter();
|
||||
|
||||
t.boolean("enableAutoRenewal").notNullable().defaultTo(false);
|
||||
t.integer("autoRenewalPeriodInDays");
|
||||
t.datetime("lastAutoRenewAt");
|
||||
|
||||
t.string("lastOperationStatus");
|
||||
t.text("lastOperationMessage");
|
||||
t.dateTime("lastOperationAt");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasCATable = await knex.schema.hasTable(TableName.CertificateAuthority);
|
||||
const hasExternalCATable = await knex.schema.hasTable(TableName.ExternalCertificateAuthority);
|
||||
const hasInternalCATable = await knex.schema.hasTable(TableName.InternalCertificateAuthority);
|
||||
|
||||
if (hasCATable && hasInternalCATable) {
|
||||
// First add all columns as nullable
|
||||
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
|
||||
t.uuid("parentCaId").nullable().references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
|
||||
t.string("type").nullable();
|
||||
t.string("friendlyName").nullable();
|
||||
t.string("organization").nullable();
|
||||
t.string("ou").nullable();
|
||||
t.string("country").nullable();
|
||||
t.string("province").nullable();
|
||||
t.string("locality").nullable();
|
||||
t.string("commonName").nullable();
|
||||
t.string("dn").nullable();
|
||||
t.string("serialNumber").nullable().unique();
|
||||
t.integer("maxPathLength").nullable();
|
||||
t.string("keyAlgorithm").nullable();
|
||||
t.timestamp("notBefore").nullable();
|
||||
t.timestamp("notAfter").nullable();
|
||||
t.uuid("activeCaCertId").nullable().references("id").inTable(TableName.CertificateAuthorityCert);
|
||||
t.renameColumn("enableDirectIssuance", "requireTemplateForIssuance");
|
||||
t.dropColumn("name");
|
||||
});
|
||||
|
||||
// flip requireTemplateForIssuance for existing internal CAs
|
||||
const cas = await knex(TableName.CertificateAuthority).select("id", "requireTemplateForIssuance");
|
||||
await Promise.all(
|
||||
cas.map((ca) => {
|
||||
return (
|
||||
knex(TableName.CertificateAuthority)
|
||||
.where({ id: ca.id })
|
||||
// @ts-expect-error intentional: migration
|
||||
.update({ requireTemplateForIssuance: !ca.requireTemplateForIssuance })
|
||||
);
|
||||
})
|
||||
);
|
||||
|
||||
await knex.raw(`
|
||||
UPDATE ${TableName.CertificateAuthority} ca
|
||||
SET
|
||||
type = ica.type,
|
||||
"friendlyName" = ica."friendlyName",
|
||||
organization = ica.organization,
|
||||
ou = ica.ou,
|
||||
country = ica.country,
|
||||
province = ica.province,
|
||||
locality = ica.locality,
|
||||
"commonName" = ica."commonName",
|
||||
dn = ica.dn,
|
||||
"parentCaId" = ica."parentCaId",
|
||||
"serialNumber" = ica."serialNumber",
|
||||
"maxPathLength" = ica."maxPathLength",
|
||||
"keyAlgorithm" = ica."keyAlgorithm",
|
||||
"notBefore" = ica."notBefore",
|
||||
"notAfter" = ica."notAfter",
|
||||
"activeCaCertId" = ica."activeCaCertId"
|
||||
FROM ${TableName.InternalCertificateAuthority} ica
|
||||
WHERE ca.id = ica."caId"
|
||||
`);
|
||||
|
||||
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
|
||||
t.string("type").notNullable().alter();
|
||||
t.string("friendlyName").notNullable().alter();
|
||||
t.string("organization").notNullable().alter();
|
||||
t.string("ou").notNullable().alter();
|
||||
t.string("country").notNullable().alter();
|
||||
t.string("province").notNullable().alter();
|
||||
t.string("locality").notNullable().alter();
|
||||
t.string("commonName").notNullable().alter();
|
||||
t.string("dn").notNullable().alter();
|
||||
t.string("keyAlgorithm").notNullable().alter();
|
||||
t.boolean("requireTemplateForIssuance").notNullable().defaultTo(false).alter();
|
||||
});
|
||||
|
||||
await knex.schema.dropTable(TableName.InternalCertificateAuthority);
|
||||
}
|
||||
|
||||
if (hasExternalCATable) {
|
||||
await knex.schema.dropTable(TableName.ExternalCertificateAuthority);
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.PkiSubscriber)) {
|
||||
await knex.schema.alterTable(TableName.PkiSubscriber, (t) => {
|
||||
t.dropColumn("enableAutoRenewal");
|
||||
t.dropColumn("autoRenewalPeriodInDays");
|
||||
t.dropColumn("lastAutoRenewAt");
|
||||
|
||||
t.dropColumn("lastOperationStatus");
|
||||
t.dropColumn("lastOperationMessage");
|
||||
t.dropColumn("lastOperationAt");
|
||||
});
|
||||
}
|
||||
}
|
48
backend/src/db/migrations/20250527030702_policy-bypassers.ts
Normal file
48
backend/src/db/migrations/20250527030702_policy-bypassers.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.AccessApprovalPolicyBypasser))) {
|
||||
await knex.schema.createTable(TableName.AccessApprovalPolicyBypasser, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
|
||||
t.uuid("bypasserGroupId").nullable();
|
||||
t.foreign("bypasserGroupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
|
||||
|
||||
t.uuid("bypasserUserId").nullable();
|
||||
t.foreign("bypasserUserId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
|
||||
t.uuid("policyId").notNullable();
|
||||
t.foreign("policyId").references("id").inTable(TableName.AccessApprovalPolicy).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.AccessApprovalPolicyBypasser);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SecretApprovalPolicyBypasser))) {
|
||||
await knex.schema.createTable(TableName.SecretApprovalPolicyBypasser, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
|
||||
t.uuid("bypasserGroupId").nullable();
|
||||
t.foreign("bypasserGroupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
|
||||
|
||||
t.uuid("bypasserUserId").nullable();
|
||||
t.foreign("bypasserUserId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
|
||||
t.uuid("policyId").notNullable();
|
||||
t.foreign("policyId").references("id").inTable(TableName.SecretApprovalPolicy).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.SecretApprovalPolicyBypasser);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.SecretApprovalPolicyBypasser);
|
||||
await knex.schema.dropTableIfExists(TableName.AccessApprovalPolicyBypasser);
|
||||
|
||||
await dropOnUpdateTrigger(knex, TableName.SecretApprovalPolicyBypasser);
|
||||
await dropOnUpdateTrigger(knex, TableName.AccessApprovalPolicyBypasser);
|
||||
}
|
@@ -0,0 +1,21 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "usernameTemplate");
|
||||
if (!hasColumn) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
t.string("usernameTemplate").nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "usernameTemplate");
|
||||
if (hasColumn) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
t.dropColumn("usernameTemplate");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,139 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.IdentityAccessToken, "accessTokenPeriod"))) {
|
||||
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
|
||||
t.bigInteger("accessTokenPeriod").defaultTo(0).notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "accessTokenPeriod"))) {
|
||||
await knex.schema.alterTable(TableName.IdentityUniversalAuth, (t) => {
|
||||
t.bigInteger("accessTokenPeriod").defaultTo(0).notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasColumn(TableName.IdentityAwsAuth, "accessTokenPeriod"))) {
|
||||
await knex.schema.alterTable(TableName.IdentityAwsAuth, (t) => {
|
||||
t.bigInteger("accessTokenPeriod").defaultTo(0).notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasColumn(TableName.IdentityOidcAuth, "accessTokenPeriod"))) {
|
||||
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
|
||||
t.bigInteger("accessTokenPeriod").defaultTo(0).notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasColumn(TableName.IdentityAzureAuth, "accessTokenPeriod"))) {
|
||||
await knex.schema.alterTable(TableName.IdentityAzureAuth, (t) => {
|
||||
t.bigInteger("accessTokenPeriod").defaultTo(0).notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasColumn(TableName.IdentityGcpAuth, "accessTokenPeriod"))) {
|
||||
await knex.schema.alterTable(TableName.IdentityGcpAuth, (t) => {
|
||||
t.bigInteger("accessTokenPeriod").defaultTo(0).notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasColumn(TableName.IdentityJwtAuth, "accessTokenPeriod"))) {
|
||||
await knex.schema.alterTable(TableName.IdentityJwtAuth, (t) => {
|
||||
t.bigInteger("accessTokenPeriod").defaultTo(0).notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "accessTokenPeriod"))) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
|
||||
t.bigInteger("accessTokenPeriod").defaultTo(0).notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasColumn(TableName.IdentityLdapAuth, "accessTokenPeriod"))) {
|
||||
await knex.schema.alterTable(TableName.IdentityLdapAuth, (t) => {
|
||||
t.bigInteger("accessTokenPeriod").defaultTo(0).notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasColumn(TableName.IdentityOciAuth, "accessTokenPeriod"))) {
|
||||
await knex.schema.alterTable(TableName.IdentityOciAuth, (t) => {
|
||||
t.bigInteger("accessTokenPeriod").defaultTo(0).notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasColumn(TableName.IdentityTokenAuth, "accessTokenPeriod"))) {
|
||||
await knex.schema.alterTable(TableName.IdentityTokenAuth, (t) => {
|
||||
t.bigInteger("accessTokenPeriod").defaultTo(0).notNullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.IdentityAccessToken, "accessTokenPeriod")) {
|
||||
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
|
||||
t.dropColumn("accessTokenPeriod");
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "accessTokenPeriod")) {
|
||||
await knex.schema.alterTable(TableName.IdentityUniversalAuth, (t) => {
|
||||
t.dropColumn("accessTokenPeriod");
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasColumn(TableName.IdentityAwsAuth, "accessTokenPeriod")) {
|
||||
await knex.schema.alterTable(TableName.IdentityAwsAuth, (t) => {
|
||||
t.dropColumn("accessTokenPeriod");
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasColumn(TableName.IdentityOidcAuth, "accessTokenPeriod")) {
|
||||
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
|
||||
t.dropColumn("accessTokenPeriod");
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasColumn(TableName.IdentityAzureAuth, "accessTokenPeriod")) {
|
||||
await knex.schema.alterTable(TableName.IdentityAzureAuth, (t) => {
|
||||
t.dropColumn("accessTokenPeriod");
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasColumn(TableName.IdentityGcpAuth, "accessTokenPeriod")) {
|
||||
await knex.schema.alterTable(TableName.IdentityGcpAuth, (t) => {
|
||||
t.dropColumn("accessTokenPeriod");
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasColumn(TableName.IdentityJwtAuth, "accessTokenPeriod")) {
|
||||
await knex.schema.alterTable(TableName.IdentityJwtAuth, (t) => {
|
||||
t.dropColumn("accessTokenPeriod");
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "accessTokenPeriod")) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
|
||||
t.dropColumn("accessTokenPeriod");
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasColumn(TableName.IdentityLdapAuth, "accessTokenPeriod")) {
|
||||
await knex.schema.alterTable(TableName.IdentityLdapAuth, (t) => {
|
||||
t.dropColumn("accessTokenPeriod");
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasColumn(TableName.IdentityOciAuth, "accessTokenPeriod")) {
|
||||
await knex.schema.alterTable(TableName.IdentityOciAuth, (t) => {
|
||||
t.dropColumn("accessTokenPeriod");
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasColumn(TableName.IdentityTokenAuth, "accessTokenPeriod")) {
|
||||
await knex.schema.alterTable(TableName.IdentityTokenAuth, (t) => {
|
||||
t.dropColumn("accessTokenPeriod");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SecretFolderVersion, "description"))) {
|
||||
await knex.schema.alterTable(TableName.SecretFolderVersion, (t) => {
|
||||
t.string("description").nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SecretFolderVersion, "description")) {
|
||||
await knex.schema.alterTable(TableName.SecretFolderVersion, (t) => {
|
||||
t.dropColumn("description");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,24 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasNameCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "name");
|
||||
if (hasNameCol) {
|
||||
const templates = await knex(TableName.CertificateTemplate).select("id", "name");
|
||||
await Promise.all(
|
||||
templates.map((el) => {
|
||||
const slugifiedName = el.name
|
||||
? slugify(`${el.name.slice(0, 16)}-${alphaNumericNanoId(8)}`)
|
||||
: slugify(alphaNumericNanoId(12));
|
||||
|
||||
return knex(TableName.CertificateTemplate).where({ id: el.id }).update({ name: slugifiedName });
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(): Promise<void> {}
|
@@ -0,0 +1,27 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
const hasEncryptedSalt = await knex.schema.hasColumn(TableName.SecretSharing, "encryptedSalt");
|
||||
|
||||
if (hasEncryptedSalt) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
t.dropColumn("encryptedSalt");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
const hasEncryptedSalt = await knex.schema.hasColumn(TableName.SecretSharing, "encryptedSalt");
|
||||
|
||||
if (!hasEncryptedSalt) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
t.binary("encryptedSalt").nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,63 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { ApprovalStatus } from "@app/ee/services/secret-approval-request/secret-approval-request-types";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasPrivilegeDeletedAtColumn = await knex.schema.hasColumn(
|
||||
TableName.AccessApprovalRequest,
|
||||
"privilegeDeletedAt"
|
||||
);
|
||||
const hasStatusColumn = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "status");
|
||||
|
||||
if (!hasPrivilegeDeletedAtColumn) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
|
||||
t.timestamp("privilegeDeletedAt").nullable();
|
||||
});
|
||||
}
|
||||
|
||||
if (!hasStatusColumn) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
|
||||
t.string("status").defaultTo(ApprovalStatus.PENDING).notNullable();
|
||||
});
|
||||
|
||||
// Update existing rows based on business logic
|
||||
// If privilegeId is not null, set status to "approved"
|
||||
await knex(TableName.AccessApprovalRequest).whereNotNull("privilegeId").update({ status: ApprovalStatus.APPROVED });
|
||||
|
||||
// If privilegeId is null and there's a rejected reviewer, set to "rejected"
|
||||
const rejectedRequestIds = await knex(TableName.AccessApprovalRequestReviewer)
|
||||
.select("requestId")
|
||||
.where("status", "rejected")
|
||||
.distinct()
|
||||
.pluck("requestId");
|
||||
|
||||
if (rejectedRequestIds.length > 0) {
|
||||
await knex(TableName.AccessApprovalRequest)
|
||||
.whereNull("privilegeId")
|
||||
.whereIn("id", rejectedRequestIds)
|
||||
.update({ status: ApprovalStatus.REJECTED });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasPrivilegeDeletedAtColumn = await knex.schema.hasColumn(
|
||||
TableName.AccessApprovalRequest,
|
||||
"privilegeDeletedAt"
|
||||
);
|
||||
const hasStatusColumn = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "status");
|
||||
|
||||
if (hasPrivilegeDeletedAtColumn) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
|
||||
t.dropColumn("privilegeDeletedAt");
|
||||
});
|
||||
}
|
||||
|
||||
if (hasStatusColumn) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
|
||||
t.dropColumn("status");
|
||||
});
|
||||
}
|
||||
}
|
139
backend/src/db/migrations/20250602155451_fix-secret-versions.ts
Normal file
139
backend/src/db/migrations/20250602155451_fix-secret-versions.ts
Normal file
@@ -0,0 +1,139 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { chunkArray } from "@app/lib/fn";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { SecretType, TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
logger.info("Starting secret version fix migration");
|
||||
|
||||
// Get all shared secret IDs first to optimize versions query
|
||||
const secretIds = await knex(TableName.SecretV2)
|
||||
.where("type", SecretType.Shared)
|
||||
.select("id")
|
||||
.then((rows) => rows.map((row) => row.id));
|
||||
|
||||
logger.info(`Found ${secretIds.length} shared secrets to process`);
|
||||
|
||||
if (secretIds.length === 0) {
|
||||
logger.info("No shared secrets found");
|
||||
return;
|
||||
}
|
||||
|
||||
const secretIdChunks = chunkArray(secretIds, 5000);
|
||||
|
||||
for (let chunkIndex = 0; chunkIndex < secretIdChunks.length; chunkIndex += 1) {
|
||||
const currentSecretIds = secretIdChunks[chunkIndex];
|
||||
logger.info(`Processing chunk ${chunkIndex + 1} of ${secretIdChunks.length}`);
|
||||
|
||||
// Get secrets and versions for current chunk
|
||||
const [sharedSecrets, allVersions] = await Promise.all([
|
||||
knex(TableName.SecretV2).whereIn("id", currentSecretIds).select(selectAllTableCols(TableName.SecretV2)),
|
||||
knex(TableName.SecretVersionV2).whereIn("secretId", currentSecretIds).select("secretId", "version")
|
||||
]);
|
||||
|
||||
const versionsBySecretId = new Map<string, number[]>();
|
||||
|
||||
allVersions.forEach((v) => {
|
||||
const versions = versionsBySecretId.get(v.secretId);
|
||||
if (versions) {
|
||||
versions.push(v.version);
|
||||
} else {
|
||||
versionsBySecretId.set(v.secretId, [v.version]);
|
||||
}
|
||||
});
|
||||
|
||||
const versionsToAdd = [];
|
||||
const secretsToUpdate = [];
|
||||
|
||||
// Process each shared secret
|
||||
for (const secret of sharedSecrets) {
|
||||
const existingVersions = versionsBySecretId.get(secret.id) || [];
|
||||
|
||||
if (existingVersions.length === 0) {
|
||||
// No versions exist - add current version
|
||||
versionsToAdd.push({
|
||||
secretId: secret.id,
|
||||
version: secret.version,
|
||||
key: secret.key,
|
||||
encryptedValue: secret.encryptedValue,
|
||||
encryptedComment: secret.encryptedComment,
|
||||
reminderNote: secret.reminderNote,
|
||||
reminderRepeatDays: secret.reminderRepeatDays,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding,
|
||||
metadata: secret.metadata,
|
||||
folderId: secret.folderId,
|
||||
actorType: "platform"
|
||||
});
|
||||
} else {
|
||||
const latestVersion = Math.max(...existingVersions);
|
||||
|
||||
if (latestVersion !== secret.version) {
|
||||
// Latest version doesn't match - create new version and update secret
|
||||
const nextVersion = latestVersion + 1;
|
||||
|
||||
versionsToAdd.push({
|
||||
secretId: secret.id,
|
||||
version: nextVersion,
|
||||
key: secret.key,
|
||||
encryptedValue: secret.encryptedValue,
|
||||
encryptedComment: secret.encryptedComment,
|
||||
reminderNote: secret.reminderNote,
|
||||
reminderRepeatDays: secret.reminderRepeatDays,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding,
|
||||
metadata: secret.metadata,
|
||||
folderId: secret.folderId,
|
||||
actorType: "platform"
|
||||
});
|
||||
|
||||
secretsToUpdate.push({
|
||||
id: secret.id,
|
||||
newVersion: nextVersion
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`Chunk ${chunkIndex + 1}: Adding ${versionsToAdd.length} versions, updating ${secretsToUpdate.length} secrets`
|
||||
);
|
||||
|
||||
// Batch insert new versions
|
||||
if (versionsToAdd.length > 0) {
|
||||
const insertBatches = chunkArray(versionsToAdd, 9000);
|
||||
for (let i = 0; i < insertBatches.length; i += 1) {
|
||||
await knex.batchInsert(TableName.SecretVersionV2, insertBatches[i]);
|
||||
}
|
||||
}
|
||||
|
||||
if (secretsToUpdate.length > 0) {
|
||||
const updateBatches = chunkArray(secretsToUpdate, 1000);
|
||||
|
||||
for (const updateBatch of updateBatches) {
|
||||
const ids = updateBatch.map((u) => u.id);
|
||||
const versionCases = updateBatch.map((u) => `WHEN '${u.id}' THEN ${u.newVersion}`).join(" ");
|
||||
|
||||
await knex.raw(
|
||||
`
|
||||
UPDATE ${TableName.SecretV2}
|
||||
SET version = CASE id ${versionCases} END,
|
||||
"updatedAt" = NOW()
|
||||
WHERE id IN (${ids.map(() => "?").join(",")})
|
||||
`,
|
||||
ids
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info("Secret version fix migration completed");
|
||||
}
|
||||
|
||||
export async function down(): Promise<void> {
|
||||
logger.info("Rollback not implemented for secret version fix migration");
|
||||
// Note: Rolling back this migration would be complex and potentially destructive
|
||||
// as it would require tracking which version entries were added
|
||||
}
|
@@ -0,0 +1,345 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { chunkArray } from "@app/lib/fn";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { ChangeType } from "@app/services/folder-commit/folder-commit-service";
|
||||
|
||||
import {
|
||||
ProjectType,
|
||||
SecretType,
|
||||
TableName,
|
||||
TFolderCheckpoints,
|
||||
TFolderCommits,
|
||||
TFolderTreeCheckpoints,
|
||||
TSecretFolders
|
||||
} from "../schemas";
|
||||
|
||||
const sortFoldersByHierarchy = (folders: TSecretFolders[]) => {
|
||||
// Create a map for quick lookup of children by parent ID
|
||||
const childrenMap = new Map<string, TSecretFolders[]>();
|
||||
|
||||
// Set of all folder IDs
|
||||
const allFolderIds = new Set<string>();
|
||||
|
||||
// Build the set of all folder IDs
|
||||
folders.forEach((folder) => {
|
||||
if (folder.id) {
|
||||
allFolderIds.add(folder.id);
|
||||
}
|
||||
});
|
||||
|
||||
// Group folders by their parentId
|
||||
folders.forEach((folder) => {
|
||||
if (folder.parentId) {
|
||||
const children = childrenMap.get(folder.parentId) || [];
|
||||
children.push(folder);
|
||||
childrenMap.set(folder.parentId, children);
|
||||
}
|
||||
});
|
||||
|
||||
// Find root folders - those with no parentId or with a parentId that doesn't exist
|
||||
const rootFolders = folders.filter((folder) => !folder.parentId || !allFolderIds.has(folder.parentId));
|
||||
|
||||
// Process each level of the hierarchy
|
||||
const result = [];
|
||||
let currentLevel = rootFolders;
|
||||
|
||||
while (currentLevel.length > 0) {
|
||||
result.push(...currentLevel);
|
||||
|
||||
const nextLevel = [];
|
||||
for (const folder of currentLevel) {
|
||||
if (folder.id) {
|
||||
const children = childrenMap.get(folder.id) || [];
|
||||
nextLevel.push(...children);
|
||||
}
|
||||
}
|
||||
|
||||
currentLevel = nextLevel;
|
||||
}
|
||||
|
||||
return result.reverse();
|
||||
};
|
||||
|
||||
const getSecretsByFolderIds = async (knex: Knex, folderIds: string[]): Promise<Record<string, string[]>> => {
|
||||
const secrets = await knex(TableName.SecretV2)
|
||||
.whereIn(`${TableName.SecretV2}.folderId`, folderIds)
|
||||
.where(`${TableName.SecretV2}.type`, SecretType.Shared)
|
||||
.join<TableName.SecretVersionV2>(TableName.SecretVersionV2, (queryBuilder) => {
|
||||
void queryBuilder
|
||||
.on(`${TableName.SecretVersionV2}.secretId`, `${TableName.SecretV2}.id`)
|
||||
.andOn(`${TableName.SecretVersionV2}.version`, `${TableName.SecretV2}.version`);
|
||||
})
|
||||
.select(selectAllTableCols(TableName.SecretV2))
|
||||
.select(knex.ref("id").withSchema(TableName.SecretVersionV2).as("secretVersionId"));
|
||||
|
||||
const secretsMap: Record<string, string[]> = {};
|
||||
|
||||
secrets.forEach((secret) => {
|
||||
if (!secretsMap[secret.folderId]) {
|
||||
secretsMap[secret.folderId] = [];
|
||||
}
|
||||
secretsMap[secret.folderId].push(secret.secretVersionId);
|
||||
});
|
||||
|
||||
return secretsMap;
|
||||
};
|
||||
|
||||
const getFoldersByParentIds = async (knex: Knex, parentIds: string[]): Promise<Record<string, string[]>> => {
|
||||
const folders = await knex(TableName.SecretFolder)
|
||||
.whereIn(`${TableName.SecretFolder}.parentId`, parentIds)
|
||||
.where(`${TableName.SecretFolder}.isReserved`, false)
|
||||
.join<TableName.SecretFolderVersion>(TableName.SecretFolderVersion, (queryBuilder) => {
|
||||
void queryBuilder
|
||||
.on(`${TableName.SecretFolderVersion}.folderId`, `${TableName.SecretFolder}.id`)
|
||||
.andOn(`${TableName.SecretFolderVersion}.version`, `${TableName.SecretFolder}.version`);
|
||||
})
|
||||
.select(selectAllTableCols(TableName.SecretFolder))
|
||||
.select(knex.ref("id").withSchema(TableName.SecretFolderVersion).as("folderVersionId"));
|
||||
|
||||
const foldersMap: Record<string, string[]> = {};
|
||||
|
||||
folders.forEach((folder) => {
|
||||
if (!folder.parentId) {
|
||||
return;
|
||||
}
|
||||
if (!foldersMap[folder.parentId]) {
|
||||
foldersMap[folder.parentId] = [];
|
||||
}
|
||||
foldersMap[folder.parentId].push(folder.folderVersionId);
|
||||
});
|
||||
|
||||
return foldersMap;
|
||||
};
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
logger.info("Initializing folder commits");
|
||||
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
|
||||
if (hasFolderCommitTable) {
|
||||
// Get Projects to Initialize
|
||||
const projects = await knex(TableName.Project)
|
||||
.where(`${TableName.Project}.version`, 3)
|
||||
.where(`${TableName.Project}.type`, ProjectType.SecretManager)
|
||||
.select(selectAllTableCols(TableName.Project));
|
||||
logger.info(`Found ${projects.length} projects to initialize`);
|
||||
|
||||
// Process Projects in batches of 100
|
||||
const batches = chunkArray(projects, 100);
|
||||
let i = 0;
|
||||
for (const batch of batches) {
|
||||
i += 1;
|
||||
logger.info(`Processing project batch ${i} of ${batches.length}`);
|
||||
let foldersCommitsList = [];
|
||||
|
||||
const rootFoldersMap: Record<string, string> = {};
|
||||
const envRootFoldersMap: Record<string, string> = {};
|
||||
|
||||
// Get All Folders for the Project
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const folders = await knex(TableName.SecretFolder)
|
||||
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
|
||||
.whereIn(
|
||||
`${TableName.Environment}.projectId`,
|
||||
batch.map((project) => project.id)
|
||||
)
|
||||
.where(`${TableName.SecretFolder}.isReserved`, false)
|
||||
.select(selectAllTableCols(TableName.SecretFolder));
|
||||
logger.info(`Found ${folders.length} folders to initialize in project batch ${i} of ${batches.length}`);
|
||||
|
||||
// Sort Folders by Hierarchy (parents before nested folders)
|
||||
const sortedFolders = sortFoldersByHierarchy(folders);
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const folderSecretsMap = await getSecretsByFolderIds(
|
||||
knex,
|
||||
sortedFolders.map((folder) => folder.id)
|
||||
);
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const folderFoldersMap = await getFoldersByParentIds(
|
||||
knex,
|
||||
sortedFolders.map((folder) => folder.id)
|
||||
);
|
||||
|
||||
// Get folder commit changes
|
||||
for (const folder of sortedFolders) {
|
||||
const subFolderVersionIds = folderFoldersMap[folder.id];
|
||||
const secretVersionIds = folderSecretsMap[folder.id];
|
||||
const changes = [];
|
||||
if (subFolderVersionIds) {
|
||||
changes.push(
|
||||
...subFolderVersionIds.map((folderVersionId) => ({
|
||||
folderId: folder.id,
|
||||
changeType: ChangeType.ADD,
|
||||
secretVersionId: undefined,
|
||||
folderVersionId,
|
||||
isUpdate: false
|
||||
}))
|
||||
);
|
||||
}
|
||||
if (secretVersionIds) {
|
||||
changes.push(
|
||||
...secretVersionIds.map((secretVersionId) => ({
|
||||
folderId: folder.id,
|
||||
changeType: ChangeType.ADD,
|
||||
secretVersionId,
|
||||
folderVersionId: undefined,
|
||||
isUpdate: false
|
||||
}))
|
||||
);
|
||||
}
|
||||
if (changes.length > 0) {
|
||||
const folderCommit = {
|
||||
commit: {
|
||||
actorMetadata: {},
|
||||
actorType: ActorType.PLATFORM,
|
||||
message: "Initialized folder",
|
||||
folderId: folder.id,
|
||||
envId: folder.envId
|
||||
},
|
||||
changes
|
||||
};
|
||||
foldersCommitsList.push(folderCommit);
|
||||
if (!folder.parentId) {
|
||||
rootFoldersMap[folder.id] = folder.envId;
|
||||
envRootFoldersMap[folder.envId] = folder.id;
|
||||
}
|
||||
}
|
||||
}
|
||||
logger.info(`Retrieved folder changes for project batch ${i} of ${batches.length}`);
|
||||
|
||||
const filteredBrokenProjectFolders: string[] = [];
|
||||
|
||||
foldersCommitsList = foldersCommitsList.filter((folderCommit) => {
|
||||
if (!envRootFoldersMap[folderCommit.commit.envId]) {
|
||||
filteredBrokenProjectFolders.push(folderCommit.commit.folderId);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
logger.info(
|
||||
`Filtered ${filteredBrokenProjectFolders.length} broken project folders: ${JSON.stringify(filteredBrokenProjectFolders)}`
|
||||
);
|
||||
|
||||
// Insert New Commits in batches of 9000
|
||||
const newCommits = foldersCommitsList.map((folderCommit) => folderCommit.commit);
|
||||
const commitBatches = chunkArray(newCommits, 9000);
|
||||
|
||||
let j = 0;
|
||||
for (const commitBatch of commitBatches) {
|
||||
j += 1;
|
||||
logger.info(`Inserting folder commits - batch ${j} of ${commitBatches.length}`);
|
||||
// Create folder commit
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const newCommitsInserted = (await knex
|
||||
.batchInsert(TableName.FolderCommit, commitBatch)
|
||||
.returning("*")) as TFolderCommits[];
|
||||
|
||||
logger.info(`Finished inserting folder commits - batch ${j} of ${commitBatches.length}`);
|
||||
|
||||
const newCommitsMap: Record<string, string> = {};
|
||||
const newCommitsMapInverted: Record<string, string> = {};
|
||||
const newCheckpointsMap: Record<string, string> = {};
|
||||
newCommitsInserted.forEach((commit) => {
|
||||
newCommitsMap[commit.folderId] = commit.id;
|
||||
newCommitsMapInverted[commit.id] = commit.folderId;
|
||||
});
|
||||
|
||||
// Create folder checkpoints
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const newCheckpoints = (await knex
|
||||
.batchInsert(
|
||||
TableName.FolderCheckpoint,
|
||||
Object.values(newCommitsMap).map((commitId) => ({
|
||||
folderCommitId: commitId
|
||||
}))
|
||||
)
|
||||
.returning("*")) as TFolderCheckpoints[];
|
||||
|
||||
logger.info(`Finished inserting folder checkpoints - batch ${j} of ${commitBatches.length}`);
|
||||
|
||||
newCheckpoints.forEach((checkpoint) => {
|
||||
newCheckpointsMap[newCommitsMapInverted[checkpoint.folderCommitId]] = checkpoint.id;
|
||||
});
|
||||
|
||||
// Create folder commit changes
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex.batchInsert(
|
||||
TableName.FolderCommitChanges,
|
||||
foldersCommitsList
|
||||
.map((folderCommit) => folderCommit.changes)
|
||||
.flat()
|
||||
.map((change) => ({
|
||||
folderCommitId: newCommitsMap[change.folderId],
|
||||
changeType: change.changeType,
|
||||
secretVersionId: change.secretVersionId,
|
||||
folderVersionId: change.folderVersionId,
|
||||
isUpdate: false
|
||||
}))
|
||||
);
|
||||
|
||||
logger.info(`Finished inserting folder commit changes - batch ${j} of ${commitBatches.length}`);
|
||||
|
||||
// Create folder checkpoint resources
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex.batchInsert(
|
||||
TableName.FolderCheckpointResources,
|
||||
foldersCommitsList
|
||||
.map((folderCommit) => folderCommit.changes)
|
||||
.flat()
|
||||
.map((change) => ({
|
||||
folderCheckpointId: newCheckpointsMap[change.folderId],
|
||||
folderVersionId: change.folderVersionId,
|
||||
secretVersionId: change.secretVersionId
|
||||
}))
|
||||
);
|
||||
|
||||
logger.info(`Finished inserting folder checkpoint resources - batch ${j} of ${commitBatches.length}`);
|
||||
|
||||
// Create Folder Tree Checkpoint
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const newTreeCheckpoints = (await knex
|
||||
.batchInsert(
|
||||
TableName.FolderTreeCheckpoint,
|
||||
Object.keys(rootFoldersMap).map((folderId) => ({
|
||||
folderCommitId: newCommitsMap[folderId]
|
||||
}))
|
||||
)
|
||||
.returning("*")) as TFolderTreeCheckpoints[];
|
||||
|
||||
logger.info(`Finished inserting folder tree checkpoints - batch ${j} of ${commitBatches.length}`);
|
||||
|
||||
const newTreeCheckpointsMap: Record<string, string> = {};
|
||||
newTreeCheckpoints.forEach((checkpoint) => {
|
||||
newTreeCheckpointsMap[rootFoldersMap[newCommitsMapInverted[checkpoint.folderCommitId]]] = checkpoint.id;
|
||||
});
|
||||
|
||||
// Create Folder Tree Checkpoint Resources
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex
|
||||
.batchInsert(
|
||||
TableName.FolderTreeCheckpointResources,
|
||||
newCommitsInserted.map((folderCommit) => ({
|
||||
folderTreeCheckpointId: newTreeCheckpointsMap[folderCommit.envId],
|
||||
folderId: folderCommit.folderId,
|
||||
folderCommitId: folderCommit.id
|
||||
}))
|
||||
)
|
||||
.returning("*");
|
||||
|
||||
logger.info(`Finished inserting folder tree checkpoint resources - batch ${j} of ${commitBatches.length}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
logger.info("Folder commits initialized");
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
|
||||
if (hasFolderCommitTable) {
|
||||
// delete all existing entries
|
||||
await knex(TableName.FolderCommit).del();
|
||||
}
|
||||
}
|
@@ -0,0 +1,23 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasTokenReviewModeColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "tokenReviewMode");
|
||||
|
||||
if (!hasTokenReviewModeColumn) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
|
||||
table.string("tokenReviewMode").notNullable().defaultTo("api");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasTokenReviewModeColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "tokenReviewMode");
|
||||
|
||||
if (hasTokenReviewModeColumn) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
|
||||
table.dropColumn("tokenReviewMode");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,21 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasShowSnapshotsLegacyColumn = await knex.schema.hasColumn(TableName.Project, "showSnapshotsLegacy");
|
||||
if (!hasShowSnapshotsLegacyColumn) {
|
||||
await knex.schema.table(TableName.Project, (table) => {
|
||||
table.boolean("showSnapshotsLegacy").notNullable().defaultTo(false);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasShowSnapshotsLegacyColumn = await knex.schema.hasColumn(TableName.Project, "showSnapshotsLegacy");
|
||||
if (hasShowSnapshotsLegacyColumn) {
|
||||
await knex.schema.table(TableName.Project, (table) => {
|
||||
table.dropColumn("showSnapshotsLegacy");
|
||||
});
|
||||
}
|
||||
}
|
@@ -3,12 +3,27 @@ import { Knex } from "knex";
|
||||
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
||||
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { folderCheckpointDALFactory } from "@app/services/folder-checkpoint/folder-checkpoint-dal";
|
||||
import { folderCheckpointResourcesDALFactory } from "@app/services/folder-checkpoint-resources/folder-checkpoint-resources-dal";
|
||||
import { folderCommitDALFactory } from "@app/services/folder-commit/folder-commit-dal";
|
||||
import { folderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
|
||||
import { folderCommitChangesDALFactory } from "@app/services/folder-commit-changes/folder-commit-changes-dal";
|
||||
import { folderTreeCheckpointDALFactory } from "@app/services/folder-tree-checkpoint/folder-tree-checkpoint-dal";
|
||||
import { folderTreeCheckpointResourcesDALFactory } from "@app/services/folder-tree-checkpoint-resources/folder-tree-checkpoint-resources-dal";
|
||||
import { identityDALFactory } from "@app/services/identity/identity-dal";
|
||||
import { internalKmsDALFactory } from "@app/services/kms/internal-kms-dal";
|
||||
import { kmskeyDALFactory } from "@app/services/kms/kms-key-dal";
|
||||
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
|
||||
import { kmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { orgDALFactory } from "@app/services/org/org-dal";
|
||||
import { projectDALFactory } from "@app/services/project/project-dal";
|
||||
import { resourceMetadataDALFactory } from "@app/services/resource-metadata/resource-metadata-dal";
|
||||
import { secretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
import { secretFolderVersionDALFactory } from "@app/services/secret-folder/secret-folder-version-dal";
|
||||
import { secretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal";
|
||||
import { secretV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-v2-bridge-dal";
|
||||
import { secretVersionV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-version-dal";
|
||||
import { userDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
import { TMigrationEnvConfig } from "./env-config";
|
||||
|
||||
@@ -50,3 +65,77 @@ export const getMigrationEncryptionServices = async ({ envConfig, db, keyStore }
|
||||
|
||||
return { kmsService };
|
||||
};
|
||||
|
||||
export const getMigrationPITServices = async ({
|
||||
db,
|
||||
keyStore,
|
||||
envConfig
|
||||
}: {
|
||||
db: Knex;
|
||||
keyStore: TKeyStoreFactory;
|
||||
envConfig: TMigrationEnvConfig;
|
||||
}) => {
|
||||
const projectDAL = projectDALFactory(db);
|
||||
const folderCommitDAL = folderCommitDALFactory(db);
|
||||
const folderCommitChangesDAL = folderCommitChangesDALFactory(db);
|
||||
const folderCheckpointDAL = folderCheckpointDALFactory(db);
|
||||
const folderTreeCheckpointDAL = folderTreeCheckpointDALFactory(db);
|
||||
const userDAL = userDALFactory(db);
|
||||
const identityDAL = identityDALFactory(db);
|
||||
const folderDAL = secretFolderDALFactory(db);
|
||||
const folderVersionDAL = secretFolderVersionDALFactory(db);
|
||||
const secretVersionV2BridgeDAL = secretVersionV2BridgeDALFactory(db);
|
||||
const folderCheckpointResourcesDAL = folderCheckpointResourcesDALFactory(db);
|
||||
const secretV2BridgeDAL = secretV2BridgeDALFactory({ db, keyStore });
|
||||
const folderTreeCheckpointResourcesDAL = folderTreeCheckpointResourcesDALFactory(db);
|
||||
const secretTagDAL = secretTagDALFactory(db);
|
||||
|
||||
const orgDAL = orgDALFactory(db);
|
||||
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
|
||||
const kmsDAL = kmskeyDALFactory(db);
|
||||
const internalKmsDAL = internalKmsDALFactory(db);
|
||||
const resourceMetadataDAL = resourceMetadataDALFactory(db);
|
||||
|
||||
const hsmModule = initializeHsmModule(envConfig);
|
||||
hsmModule.initialize();
|
||||
|
||||
const hsmService = hsmServiceFactory({
|
||||
hsmModule: hsmModule.getModule(),
|
||||
envConfig
|
||||
});
|
||||
|
||||
const kmsService = kmsServiceFactory({
|
||||
kmsRootConfigDAL,
|
||||
keyStore,
|
||||
kmsDAL,
|
||||
internalKmsDAL,
|
||||
orgDAL,
|
||||
projectDAL,
|
||||
hsmService,
|
||||
envConfig
|
||||
});
|
||||
|
||||
await hsmService.startService();
|
||||
await kmsService.startService();
|
||||
|
||||
const folderCommitService = folderCommitServiceFactory({
|
||||
folderCommitDAL,
|
||||
folderCommitChangesDAL,
|
||||
folderCheckpointDAL,
|
||||
folderTreeCheckpointDAL,
|
||||
userDAL,
|
||||
identityDAL,
|
||||
folderDAL,
|
||||
folderVersionDAL,
|
||||
secretVersionV2BridgeDAL,
|
||||
projectDAL,
|
||||
folderCheckpointResourcesDAL,
|
||||
secretV2BridgeDAL,
|
||||
folderTreeCheckpointResourcesDAL,
|
||||
kmsService,
|
||||
secretTagDAL,
|
||||
resourceMetadataDAL
|
||||
});
|
||||
|
||||
return { folderCommitService };
|
||||
};
|
||||
|
26
backend/src/db/schemas/access-approval-policies-bypassers.ts
Normal file
26
backend/src/db/schemas/access-approval-policies-bypassers.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const AccessApprovalPoliciesBypassersSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
bypasserGroupId: z.string().uuid().nullable().optional(),
|
||||
bypasserUserId: z.string().uuid().nullable().optional(),
|
||||
policyId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TAccessApprovalPoliciesBypassers = z.infer<typeof AccessApprovalPoliciesBypassersSchema>;
|
||||
export type TAccessApprovalPoliciesBypassersInsert = Omit<
|
||||
z.input<typeof AccessApprovalPoliciesBypassersSchema>,
|
||||
TImmutableDBKeys
|
||||
>;
|
||||
export type TAccessApprovalPoliciesBypassersUpdate = Partial<
|
||||
Omit<z.input<typeof AccessApprovalPoliciesBypassersSchema>, TImmutableDBKeys>
|
||||
>;
|
@@ -18,7 +18,9 @@ export const AccessApprovalRequestsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
requestedByUserId: z.string().uuid(),
|
||||
note: z.string().nullable().optional()
|
||||
note: z.string().nullable().optional(),
|
||||
privilegeDeletedAt: z.date().nullable().optional(),
|
||||
status: z.string().default("pending")
|
||||
});
|
||||
|
||||
export type TAccessApprovalRequests = z.infer<typeof AccessApprovalRequestsSchema>;
|
||||
|
@@ -11,25 +11,10 @@ export const CertificateAuthoritiesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
parentCaId: z.string().uuid().nullable().optional(),
|
||||
projectId: z.string(),
|
||||
type: z.string(),
|
||||
enableDirectIssuance: z.boolean().default(true),
|
||||
status: z.string(),
|
||||
friendlyName: z.string(),
|
||||
organization: z.string(),
|
||||
ou: z.string(),
|
||||
country: z.string(),
|
||||
province: z.string(),
|
||||
locality: z.string(),
|
||||
commonName: z.string(),
|
||||
dn: z.string(),
|
||||
serialNumber: z.string().nullable().optional(),
|
||||
maxPathLength: z.number().nullable().optional(),
|
||||
keyAlgorithm: z.string(),
|
||||
notBefore: z.date().nullable().optional(),
|
||||
notAfter: z.date().nullable().optional(),
|
||||
activeCaCertId: z.string().uuid().nullable().optional(),
|
||||
requireTemplateForIssuance: z.boolean().default(false)
|
||||
name: z.string()
|
||||
});
|
||||
|
||||
export type TCertificateAuthorities = z.infer<typeof CertificateAuthoritiesSchema>;
|
||||
|
@@ -11,7 +11,7 @@ export const CertificatesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
caId: z.string().uuid(),
|
||||
caId: z.string().uuid().nullable().optional(),
|
||||
status: z.string(),
|
||||
serialNumber: z.string(),
|
||||
friendlyName: z.string(),
|
||||
@@ -21,10 +21,12 @@ export const CertificatesSchema = z.object({
|
||||
revokedAt: z.date().nullable().optional(),
|
||||
revocationReason: z.number().nullable().optional(),
|
||||
altNames: z.string().nullable().optional(),
|
||||
caCertId: z.string().uuid(),
|
||||
caCertId: z.string().uuid().nullable().optional(),
|
||||
certificateTemplateId: z.string().uuid().nullable().optional(),
|
||||
keyUsages: z.string().array().nullable().optional(),
|
||||
extendedKeyUsages: z.string().array().nullable().optional()
|
||||
extendedKeyUsages: z.string().array().nullable().optional(),
|
||||
pkiSubscriberId: z.string().uuid().nullable().optional(),
|
||||
projectId: z.string()
|
||||
});
|
||||
|
||||
export type TCertificates = z.infer<typeof CertificatesSchema>;
|
||||
|
@@ -27,7 +27,9 @@ export const DynamicSecretsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
encryptedInput: zodBuffer,
|
||||
projectGatewayId: z.string().uuid().nullable().optional()
|
||||
projectGatewayId: z.string().uuid().nullable().optional(),
|
||||
gatewayId: z.string().uuid().nullable().optional(),
|
||||
usernameTemplate: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>;
|
||||
|
29
backend/src/db/schemas/external-certificate-authorities.ts
Normal file
29
backend/src/db/schemas/external-certificate-authorities.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const ExternalCertificateAuthoritiesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
type: z.string(),
|
||||
appConnectionId: z.string().uuid().nullable().optional(),
|
||||
dnsAppConnectionId: z.string().uuid().nullable().optional(),
|
||||
caId: z.string().uuid(),
|
||||
credentials: zodBuffer.nullable().optional(),
|
||||
configuration: z.unknown().nullable().optional()
|
||||
});
|
||||
|
||||
export type TExternalCertificateAuthorities = z.infer<typeof ExternalCertificateAuthoritiesSchema>;
|
||||
export type TExternalCertificateAuthoritiesInsert = Omit<
|
||||
z.input<typeof ExternalCertificateAuthoritiesSchema>,
|
||||
TImmutableDBKeys
|
||||
>;
|
||||
export type TExternalCertificateAuthoritiesUpdate = Partial<
|
||||
Omit<z.input<typeof ExternalCertificateAuthoritiesSchema>, TImmutableDBKeys>
|
||||
>;
|
23
backend/src/db/schemas/folder-checkpoint-resources.ts
Normal file
23
backend/src/db/schemas/folder-checkpoint-resources.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const FolderCheckpointResourcesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
folderCheckpointId: z.string().uuid(),
|
||||
secretVersionId: z.string().uuid().nullable().optional(),
|
||||
folderVersionId: z.string().uuid().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TFolderCheckpointResources = z.infer<typeof FolderCheckpointResourcesSchema>;
|
||||
export type TFolderCheckpointResourcesInsert = Omit<z.input<typeof FolderCheckpointResourcesSchema>, TImmutableDBKeys>;
|
||||
export type TFolderCheckpointResourcesUpdate = Partial<
|
||||
Omit<z.input<typeof FolderCheckpointResourcesSchema>, TImmutableDBKeys>
|
||||
>;
|
19
backend/src/db/schemas/folder-checkpoints.ts
Normal file
19
backend/src/db/schemas/folder-checkpoints.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const FolderCheckpointsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
folderCommitId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TFolderCheckpoints = z.infer<typeof FolderCheckpointsSchema>;
|
||||
export type TFolderCheckpointsInsert = Omit<z.input<typeof FolderCheckpointsSchema>, TImmutableDBKeys>;
|
||||
export type TFolderCheckpointsUpdate = Partial<Omit<z.input<typeof FolderCheckpointsSchema>, TImmutableDBKeys>>;
|
23
backend/src/db/schemas/folder-commit-changes.ts
Normal file
23
backend/src/db/schemas/folder-commit-changes.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const FolderCommitChangesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
folderCommitId: z.string().uuid(),
|
||||
changeType: z.string(),
|
||||
isUpdate: z.boolean().default(false),
|
||||
secretVersionId: z.string().uuid().nullable().optional(),
|
||||
folderVersionId: z.string().uuid().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TFolderCommitChanges = z.infer<typeof FolderCommitChangesSchema>;
|
||||
export type TFolderCommitChangesInsert = Omit<z.input<typeof FolderCommitChangesSchema>, TImmutableDBKeys>;
|
||||
export type TFolderCommitChangesUpdate = Partial<Omit<z.input<typeof FolderCommitChangesSchema>, TImmutableDBKeys>>;
|
24
backend/src/db/schemas/folder-commits.ts
Normal file
24
backend/src/db/schemas/folder-commits.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const FolderCommitsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
commitId: z.coerce.bigint(),
|
||||
actorMetadata: z.unknown(),
|
||||
actorType: z.string(),
|
||||
message: z.string().nullable().optional(),
|
||||
folderId: z.string().uuid(),
|
||||
envId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TFolderCommits = z.infer<typeof FolderCommitsSchema>;
|
||||
export type TFolderCommitsInsert = Omit<z.input<typeof FolderCommitsSchema>, TImmutableDBKeys>;
|
||||
export type TFolderCommitsUpdate = Partial<Omit<z.input<typeof FolderCommitsSchema>, TImmutableDBKeys>>;
|
26
backend/src/db/schemas/folder-tree-checkpoint-resources.ts
Normal file
26
backend/src/db/schemas/folder-tree-checkpoint-resources.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const FolderTreeCheckpointResourcesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
folderTreeCheckpointId: z.string().uuid(),
|
||||
folderId: z.string().uuid(),
|
||||
folderCommitId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TFolderTreeCheckpointResources = z.infer<typeof FolderTreeCheckpointResourcesSchema>;
|
||||
export type TFolderTreeCheckpointResourcesInsert = Omit<
|
||||
z.input<typeof FolderTreeCheckpointResourcesSchema>,
|
||||
TImmutableDBKeys
|
||||
>;
|
||||
export type TFolderTreeCheckpointResourcesUpdate = Partial<
|
||||
Omit<z.input<typeof FolderTreeCheckpointResourcesSchema>, TImmutableDBKeys>
|
||||
>;
|
19
backend/src/db/schemas/folder-tree-checkpoints.ts
Normal file
19
backend/src/db/schemas/folder-tree-checkpoints.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const FolderTreeCheckpointsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
folderCommitId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TFolderTreeCheckpoints = z.infer<typeof FolderTreeCheckpointsSchema>;
|
||||
export type TFolderTreeCheckpointsInsert = Omit<z.input<typeof FolderTreeCheckpointsSchema>, TImmutableDBKeys>;
|
||||
export type TFolderTreeCheckpointsUpdate = Partial<Omit<z.input<typeof FolderTreeCheckpointsSchema>, TImmutableDBKeys>>;
|
@@ -21,7 +21,8 @@ export const IdentityAccessTokensSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
name: z.string().nullable().optional(),
|
||||
authMethod: z.string()
|
||||
authMethod: z.string(),
|
||||
accessTokenPeriod: z.coerce.number().default(0)
|
||||
});
|
||||
|
||||
export type TIdentityAccessTokens = z.infer<typeof IdentityAccessTokensSchema>;
|
||||
|
@@ -19,7 +19,8 @@ export const IdentityAwsAuthsSchema = z.object({
|
||||
type: z.string(),
|
||||
stsEndpoint: z.string(),
|
||||
allowedPrincipalArns: z.string(),
|
||||
allowedAccountIds: z.string()
|
||||
allowedAccountIds: z.string(),
|
||||
accessTokenPeriod: z.coerce.number().default(0)
|
||||
});
|
||||
|
||||
export type TIdentityAwsAuths = z.infer<typeof IdentityAwsAuthsSchema>;
|
||||
|
@@ -18,7 +18,8 @@ export const IdentityAzureAuthsSchema = z.object({
|
||||
identityId: z.string().uuid(),
|
||||
tenantId: z.string(),
|
||||
resource: z.string(),
|
||||
allowedServicePrincipalIds: z.string()
|
||||
allowedServicePrincipalIds: z.string(),
|
||||
accessTokenPeriod: z.coerce.number().default(0)
|
||||
});
|
||||
|
||||
export type TIdentityAzureAuths = z.infer<typeof IdentityAzureAuthsSchema>;
|
||||
|
@@ -19,7 +19,8 @@ export const IdentityGcpAuthsSchema = z.object({
|
||||
type: z.string(),
|
||||
allowedServiceAccounts: z.string().nullable().optional(),
|
||||
allowedProjects: z.string().nullable().optional(),
|
||||
allowedZones: z.string().nullable().optional()
|
||||
allowedZones: z.string().nullable().optional(),
|
||||
accessTokenPeriod: z.coerce.number().default(0)
|
||||
});
|
||||
|
||||
export type TIdentityGcpAuths = z.infer<typeof IdentityGcpAuthsSchema>;
|
||||
|
@@ -25,7 +25,8 @@ export const IdentityJwtAuthsSchema = z.object({
|
||||
boundClaims: z.unknown(),
|
||||
boundSubject: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
accessTokenPeriod: z.coerce.number().default(0)
|
||||
});
|
||||
|
||||
export type TIdentityJwtAuths = z.infer<typeof IdentityJwtAuthsSchema>;
|
||||
|
@@ -29,7 +29,10 @@ export const IdentityKubernetesAuthsSchema = z.object({
|
||||
allowedNames: z.string(),
|
||||
allowedAudience: z.string(),
|
||||
encryptedKubernetesTokenReviewerJwt: zodBuffer.nullable().optional(),
|
||||
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional()
|
||||
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional(),
|
||||
gatewayId: z.string().uuid().nullable().optional(),
|
||||
accessTokenPeriod: z.coerce.number().default(0),
|
||||
tokenReviewMode: z.string().default("api")
|
||||
});
|
||||
|
||||
export type TIdentityKubernetesAuths = z.infer<typeof IdentityKubernetesAuthsSchema>;
|
||||
|
@@ -24,7 +24,8 @@ export const IdentityLdapAuthsSchema = z.object({
|
||||
searchFilter: z.string(),
|
||||
allowedFields: z.unknown().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
accessTokenPeriod: z.coerce.number().default(0)
|
||||
});
|
||||
|
||||
export type TIdentityLdapAuths = z.infer<typeof IdentityLdapAuthsSchema>;
|
||||
|
27
backend/src/db/schemas/identity-oci-auths.ts
Normal file
27
backend/src/db/schemas/identity-oci-auths.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const IdentityOciAuthsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
accessTokenTTL: z.coerce.number().default(7200),
|
||||
accessTokenMaxTTL: z.coerce.number().default(7200),
|
||||
accessTokenNumUsesLimit: z.coerce.number().default(0),
|
||||
accessTokenTrustedIps: z.unknown(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
identityId: z.string().uuid(),
|
||||
type: z.string(),
|
||||
tenancyOcid: z.string(),
|
||||
allowedUsernames: z.string().nullable().optional(),
|
||||
accessTokenPeriod: z.coerce.number().default(0)
|
||||
});
|
||||
|
||||
export type TIdentityOciAuths = z.infer<typeof IdentityOciAuthsSchema>;
|
||||
export type TIdentityOciAuthsInsert = Omit<z.input<typeof IdentityOciAuthsSchema>, TImmutableDBKeys>;
|
||||
export type TIdentityOciAuthsUpdate = Partial<Omit<z.input<typeof IdentityOciAuthsSchema>, TImmutableDBKeys>>;
|
@@ -27,7 +27,8 @@ export const IdentityOidcAuthsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
encryptedCaCertificate: zodBuffer.nullable().optional(),
|
||||
claimMetadataMapping: z.unknown().nullable().optional()
|
||||
claimMetadataMapping: z.unknown().nullable().optional(),
|
||||
accessTokenPeriod: z.coerce.number().default(0)
|
||||
});
|
||||
|
||||
export type TIdentityOidcAuths = z.infer<typeof IdentityOidcAuthsSchema>;
|
||||
|
@@ -15,7 +15,8 @@ export const IdentityTokenAuthsSchema = z.object({
|
||||
accessTokenTrustedIps: z.unknown(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
identityId: z.string().uuid()
|
||||
identityId: z.string().uuid(),
|
||||
accessTokenPeriod: z.coerce.number().default(0)
|
||||
});
|
||||
|
||||
export type TIdentityTokenAuths = z.infer<typeof IdentityTokenAuthsSchema>;
|
||||
|
@@ -17,7 +17,8 @@ export const IdentityUniversalAuthsSchema = z.object({
|
||||
accessTokenTrustedIps: z.unknown(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
identityId: z.string().uuid()
|
||||
identityId: z.string().uuid(),
|
||||
accessTokenPeriod: z.coerce.number().default(0)
|
||||
});
|
||||
|
||||
export type TIdentityUniversalAuths = z.infer<typeof IdentityUniversalAuthsSchema>;
|
||||
|
@@ -1,5 +1,6 @@
|
||||
export * from "./access-approval-policies";
|
||||
export * from "./access-approval-policies-approvers";
|
||||
export * from "./access-approval-policies-bypassers";
|
||||
export * from "./access-approval-requests";
|
||||
export * from "./access-approval-requests-reviewers";
|
||||
export * from "./api-keys";
|
||||
@@ -20,8 +21,15 @@ export * from "./certificate-templates";
|
||||
export * from "./certificates";
|
||||
export * from "./dynamic-secret-leases";
|
||||
export * from "./dynamic-secrets";
|
||||
export * from "./external-certificate-authorities";
|
||||
export * from "./external-group-org-role-mappings";
|
||||
export * from "./external-kms";
|
||||
export * from "./folder-checkpoint-resources";
|
||||
export * from "./folder-checkpoints";
|
||||
export * from "./folder-commit-changes";
|
||||
export * from "./folder-commits";
|
||||
export * from "./folder-tree-checkpoint-resources";
|
||||
export * from "./folder-tree-checkpoints";
|
||||
export * from "./gateways";
|
||||
export * from "./git-app-install-sessions";
|
||||
export * from "./git-app-org";
|
||||
@@ -37,6 +45,7 @@ export * from "./identity-gcp-auths";
|
||||
export * from "./identity-jwt-auths";
|
||||
export * from "./identity-kubernetes-auths";
|
||||
export * from "./identity-metadata";
|
||||
export * from "./identity-oci-auths";
|
||||
export * from "./identity-oidc-auths";
|
||||
export * from "./identity-org-memberships";
|
||||
export * from "./identity-project-additional-privilege";
|
||||
@@ -48,6 +57,7 @@ export * from "./identity-universal-auths";
|
||||
export * from "./incident-contacts";
|
||||
export * from "./integration-auths";
|
||||
export * from "./integrations";
|
||||
export * from "./internal-certificate-authorities";
|
||||
export * from "./internal-kms";
|
||||
export * from "./kmip-client-certificates";
|
||||
export * from "./kmip-clients";
|
||||
@@ -69,6 +79,7 @@ export * from "./organizations";
|
||||
export * from "./pki-alerts";
|
||||
export * from "./pki-collection-items";
|
||||
export * from "./pki-collections";
|
||||
export * from "./pki-subscribers";
|
||||
export * from "./project-bots";
|
||||
export * from "./project-environments";
|
||||
export * from "./project-gateways";
|
||||
@@ -88,6 +99,7 @@ export * from "./saml-configs";
|
||||
export * from "./scim-tokens";
|
||||
export * from "./secret-approval-policies";
|
||||
export * from "./secret-approval-policies-approvers";
|
||||
export * from "./secret-approval-policies-bypassers";
|
||||
export * from "./secret-approval-request-secret-tags";
|
||||
export * from "./secret-approval-request-secret-tags-v2";
|
||||
export * from "./secret-approval-requests";
|
||||
@@ -105,7 +117,12 @@ export * from "./secret-rotation-outputs";
|
||||
export * from "./secret-rotation-v2-secret-mappings";
|
||||
export * from "./secret-rotations";
|
||||
export * from "./secret-rotations-v2";
|
||||
export * from "./secret-scanning-configs";
|
||||
export * from "./secret-scanning-data-sources";
|
||||
export * from "./secret-scanning-findings";
|
||||
export * from "./secret-scanning-git-risks";
|
||||
export * from "./secret-scanning-resources";
|
||||
export * from "./secret-scanning-scans";
|
||||
export * from "./secret-sharing";
|
||||
export * from "./secret-snapshot-folders";
|
||||
export * from "./secret-snapshot-secrets";
|
||||
|
38
backend/src/db/schemas/internal-certificate-authorities.ts
Normal file
38
backend/src/db/schemas/internal-certificate-authorities.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const InternalCertificateAuthoritiesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
parentCaId: z.string().uuid().nullable().optional(),
|
||||
type: z.string(),
|
||||
friendlyName: z.string(),
|
||||
organization: z.string(),
|
||||
ou: z.string(),
|
||||
country: z.string(),
|
||||
province: z.string(),
|
||||
locality: z.string(),
|
||||
commonName: z.string(),
|
||||
dn: z.string(),
|
||||
serialNumber: z.string().nullable().optional(),
|
||||
maxPathLength: z.number().nullable().optional(),
|
||||
keyAlgorithm: z.string(),
|
||||
notBefore: z.date().nullable().optional(),
|
||||
notAfter: z.date().nullable().optional(),
|
||||
activeCaCertId: z.string().uuid().nullable().optional(),
|
||||
caId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TInternalCertificateAuthorities = z.infer<typeof InternalCertificateAuthoritiesSchema>;
|
||||
export type TInternalCertificateAuthoritiesInsert = Omit<
|
||||
z.input<typeof InternalCertificateAuthoritiesSchema>,
|
||||
TImmutableDBKeys
|
||||
>;
|
||||
export type TInternalCertificateAuthoritiesUpdate = Partial<
|
||||
Omit<z.input<typeof InternalCertificateAuthoritiesSchema>, TImmutableDBKeys>
|
||||
>;
|
@@ -13,6 +13,8 @@ export enum TableName {
|
||||
SshCertificate = "ssh_certificates",
|
||||
SshCertificateBody = "ssh_certificate_bodies",
|
||||
CertificateAuthority = "certificate_authorities",
|
||||
ExternalCertificateAuthority = "external_certificate_authorities",
|
||||
InternalCertificateAuthority = "internal_certificate_authorities",
|
||||
CertificateTemplateEstConfig = "certificate_template_est_configs",
|
||||
CertificateAuthorityCert = "certificate_authority_certs",
|
||||
CertificateAuthoritySecret = "certificate_authority_secret",
|
||||
@@ -21,6 +23,7 @@ export enum TableName {
|
||||
CertificateBody = "certificate_bodies",
|
||||
CertificateSecret = "certificate_secrets",
|
||||
CertificateTemplate = "certificate_templates",
|
||||
PkiSubscriber = "pki_subscribers",
|
||||
PkiAlert = "pki_alerts",
|
||||
PkiCollection = "pki_collections",
|
||||
PkiCollectionItem = "pki_collection_items",
|
||||
@@ -78,6 +81,7 @@ export enum TableName {
|
||||
IdentityAzureAuth = "identity_azure_auths",
|
||||
IdentityUaClientSecret = "identity_ua_client_secrets",
|
||||
IdentityAwsAuth = "identity_aws_auths",
|
||||
IdentityOciAuth = "identity_oci_auths",
|
||||
IdentityOidcAuth = "identity_oidc_auths",
|
||||
IdentityJwtAuth = "identity_jwt_auths",
|
||||
IdentityLdapAuth = "identity_ldap_auths",
|
||||
@@ -91,10 +95,12 @@ export enum TableName {
|
||||
ScimToken = "scim_tokens",
|
||||
AccessApprovalPolicy = "access_approval_policies",
|
||||
AccessApprovalPolicyApprover = "access_approval_policies_approvers",
|
||||
AccessApprovalPolicyBypasser = "access_approval_policies_bypassers",
|
||||
AccessApprovalRequest = "access_approval_requests",
|
||||
AccessApprovalRequestReviewer = "access_approval_requests_reviewers",
|
||||
SecretApprovalPolicy = "secret_approval_policies",
|
||||
SecretApprovalPolicyApprover = "secret_approval_policies_approvers",
|
||||
SecretApprovalPolicyBypasser = "secret_approval_policies_bypassers",
|
||||
SecretApprovalRequest = "secret_approval_requests",
|
||||
SecretApprovalRequestReviewer = "secret_approval_requests_reviewers",
|
||||
SecretApprovalRequestSecret = "secret_approval_requests_secrets",
|
||||
@@ -153,10 +159,21 @@ export enum TableName {
|
||||
MicrosoftTeamsIntegrations = "microsoft_teams_integrations",
|
||||
ProjectMicrosoftTeamsConfigs = "project_microsoft_teams_configs",
|
||||
SecretReminderRecipients = "secret_reminder_recipients",
|
||||
GithubOrgSyncConfig = "github_org_sync_configs"
|
||||
GithubOrgSyncConfig = "github_org_sync_configs",
|
||||
FolderCommit = "folder_commits",
|
||||
FolderCommitChanges = "folder_commit_changes",
|
||||
FolderCheckpoint = "folder_checkpoints",
|
||||
FolderCheckpointResources = "folder_checkpoint_resources",
|
||||
FolderTreeCheckpoint = "folder_tree_checkpoints",
|
||||
FolderTreeCheckpointResources = "folder_tree_checkpoint_resources",
|
||||
SecretScanningDataSource = "secret_scanning_data_sources",
|
||||
SecretScanningResource = "secret_scanning_resources",
|
||||
SecretScanningScan = "secret_scanning_scans",
|
||||
SecretScanningFinding = "secret_scanning_findings",
|
||||
SecretScanningConfig = "secret_scanning_configs"
|
||||
}
|
||||
|
||||
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
|
||||
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt" | "commitId";
|
||||
|
||||
export const UserDeviceSchema = z
|
||||
.object({
|
||||
@@ -232,6 +249,7 @@ export enum IdentityAuthMethod {
|
||||
GCP_AUTH = "gcp-auth",
|
||||
AWS_AUTH = "aws-auth",
|
||||
AZURE_AUTH = "azure-auth",
|
||||
OCI_AUTH = "oci-auth",
|
||||
OIDC_AUTH = "oidc-auth",
|
||||
JWT_AUTH = "jwt-auth",
|
||||
LDAP_AUTH = "ldap-auth"
|
||||
@@ -241,7 +259,8 @@ export enum ProjectType {
|
||||
SecretManager = "secret-manager",
|
||||
CertificateManager = "cert-manager",
|
||||
KMS = "kms",
|
||||
SSH = "ssh"
|
||||
SSH = "ssh",
|
||||
SecretScanning = "secret-scanning"
|
||||
}
|
||||
|
||||
export enum ActionProjectType {
|
||||
@@ -249,6 +268,7 @@ export enum ActionProjectType {
|
||||
CertificateManager = ProjectType.CertificateManager,
|
||||
KMS = ProjectType.KMS,
|
||||
SSH = ProjectType.SSH,
|
||||
SecretScanning = ProjectType.SecretScanning,
|
||||
// project operations that happen on all types
|
||||
Any = "any"
|
||||
}
|
||||
|
@@ -28,7 +28,15 @@ export const OrganizationsSchema = z.object({
|
||||
privilegeUpgradeInitiatedByUsername: z.string().nullable().optional(),
|
||||
privilegeUpgradeInitiatedAt: z.date().nullable().optional(),
|
||||
bypassOrgAuthEnabled: z.boolean().default(false),
|
||||
userTokenExpiration: z.string().nullable().optional()
|
||||
userTokenExpiration: z.string().nullable().optional(),
|
||||
secretsProductEnabled: z.boolean().default(true).nullable().optional(),
|
||||
pkiProductEnabled: z.boolean().default(true).nullable().optional(),
|
||||
kmsProductEnabled: z.boolean().default(true).nullable().optional(),
|
||||
sshProductEnabled: z.boolean().default(true).nullable().optional(),
|
||||
scannerProductEnabled: z.boolean().default(true).nullable().optional(),
|
||||
shareSecretsProductEnabled: z.boolean().default(true).nullable().optional(),
|
||||
maxSharedSecretLifetime: z.number().default(2592000).nullable().optional(),
|
||||
maxSharedSecretViewLimit: z.number().nullable().optional()
|
||||
});
|
||||
|
||||
export type TOrganizations = z.infer<typeof OrganizationsSchema>;
|
||||
|
33
backend/src/db/schemas/pki-subscribers.ts
Normal file
33
backend/src/db/schemas/pki-subscribers.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const PkiSubscribersSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
projectId: z.string(),
|
||||
caId: z.string().uuid().nullable().optional(),
|
||||
name: z.string(),
|
||||
commonName: z.string(),
|
||||
subjectAlternativeNames: z.string().array(),
|
||||
ttl: z.string().nullable().optional(),
|
||||
keyUsages: z.string().array(),
|
||||
extendedKeyUsages: z.string().array(),
|
||||
status: z.string(),
|
||||
enableAutoRenewal: z.boolean().default(false),
|
||||
autoRenewalPeriodInDays: z.number().nullable().optional(),
|
||||
lastAutoRenewAt: z.date().nullable().optional(),
|
||||
lastOperationStatus: z.string().nullable().optional(),
|
||||
lastOperationMessage: z.string().nullable().optional(),
|
||||
lastOperationAt: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TPkiSubscribers = z.infer<typeof PkiSubscribersSchema>;
|
||||
export type TPkiSubscribersInsert = Omit<z.input<typeof PkiSubscribersSchema>, TImmutableDBKeys>;
|
||||
export type TPkiSubscribersUpdate = Partial<Omit<z.input<typeof PkiSubscribersSchema>, TImmutableDBKeys>>;
|
@@ -27,7 +27,9 @@ export const ProjectsSchema = z.object({
|
||||
description: z.string().nullable().optional(),
|
||||
type: z.string(),
|
||||
enforceCapitalization: z.boolean().default(false),
|
||||
hasDeleteProtection: z.boolean().default(false).nullable().optional()
|
||||
hasDeleteProtection: z.boolean().default(false).nullable().optional(),
|
||||
secretSharing: z.boolean().default(true),
|
||||
showSnapshotsLegacy: z.boolean().default(false)
|
||||
});
|
||||
|
||||
export type TProjects = z.infer<typeof ProjectsSchema>;
|
||||
|
26
backend/src/db/schemas/secret-approval-policies-bypassers.ts
Normal file
26
backend/src/db/schemas/secret-approval-policies-bypassers.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretApprovalPoliciesBypassersSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
bypasserGroupId: z.string().uuid().nullable().optional(),
|
||||
bypasserUserId: z.string().uuid().nullable().optional(),
|
||||
policyId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TSecretApprovalPoliciesBypassers = z.infer<typeof SecretApprovalPoliciesBypassersSchema>;
|
||||
export type TSecretApprovalPoliciesBypassersInsert = Omit<
|
||||
z.input<typeof SecretApprovalPoliciesBypassersSchema>,
|
||||
TImmutableDBKeys
|
||||
>;
|
||||
export type TSecretApprovalPoliciesBypassersUpdate = Partial<
|
||||
Omit<z.input<typeof SecretApprovalPoliciesBypassersSchema>, TImmutableDBKeys>
|
||||
>;
|
@@ -14,7 +14,8 @@ export const SecretFolderVersionsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
envId: z.string().uuid(),
|
||||
folderId: z.string().uuid()
|
||||
folderId: z.string().uuid(),
|
||||
description: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretFolderVersions = z.infer<typeof SecretFolderVersionsSchema>;
|
||||
|
20
backend/src/db/schemas/secret-scanning-configs.ts
Normal file
20
backend/src/db/schemas/secret-scanning-configs.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretScanningConfigsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
projectId: z.string(),
|
||||
content: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TSecretScanningConfigs = z.infer<typeof SecretScanningConfigsSchema>;
|
||||
export type TSecretScanningConfigsInsert = Omit<z.input<typeof SecretScanningConfigsSchema>, TImmutableDBKeys>;
|
||||
export type TSecretScanningConfigsUpdate = Partial<Omit<z.input<typeof SecretScanningConfigsSchema>, TImmutableDBKeys>>;
|
32
backend/src/db/schemas/secret-scanning-data-sources.ts
Normal file
32
backend/src/db/schemas/secret-scanning-data-sources.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretScanningDataSourcesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
externalId: z.string().nullable().optional(),
|
||||
name: z.string(),
|
||||
description: z.string().nullable().optional(),
|
||||
type: z.string(),
|
||||
config: z.unknown(),
|
||||
encryptedCredentials: zodBuffer.nullable().optional(),
|
||||
connectionId: z.string().uuid().nullable().optional(),
|
||||
isAutoScanEnabled: z.boolean().default(true).nullable().optional(),
|
||||
projectId: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
isDisconnected: z.boolean().default(false)
|
||||
});
|
||||
|
||||
export type TSecretScanningDataSources = z.infer<typeof SecretScanningDataSourcesSchema>;
|
||||
export type TSecretScanningDataSourcesInsert = Omit<z.input<typeof SecretScanningDataSourcesSchema>, TImmutableDBKeys>;
|
||||
export type TSecretScanningDataSourcesUpdate = Partial<
|
||||
Omit<z.input<typeof SecretScanningDataSourcesSchema>, TImmutableDBKeys>
|
||||
>;
|
32
backend/src/db/schemas/secret-scanning-findings.ts
Normal file
32
backend/src/db/schemas/secret-scanning-findings.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretScanningFindingsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
dataSourceName: z.string(),
|
||||
dataSourceType: z.string(),
|
||||
resourceName: z.string(),
|
||||
resourceType: z.string(),
|
||||
rule: z.string(),
|
||||
severity: z.string(),
|
||||
status: z.string().default("unresolved"),
|
||||
remarks: z.string().nullable().optional(),
|
||||
fingerprint: z.string(),
|
||||
details: z.unknown(),
|
||||
projectId: z.string(),
|
||||
scanId: z.string().uuid().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TSecretScanningFindings = z.infer<typeof SecretScanningFindingsSchema>;
|
||||
export type TSecretScanningFindingsInsert = Omit<z.input<typeof SecretScanningFindingsSchema>, TImmutableDBKeys>;
|
||||
export type TSecretScanningFindingsUpdate = Partial<
|
||||
Omit<z.input<typeof SecretScanningFindingsSchema>, TImmutableDBKeys>
|
||||
>;
|
24
backend/src/db/schemas/secret-scanning-resources.ts
Normal file
24
backend/src/db/schemas/secret-scanning-resources.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretScanningResourcesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
externalId: z.string(),
|
||||
name: z.string(),
|
||||
type: z.string(),
|
||||
dataSourceId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TSecretScanningResources = z.infer<typeof SecretScanningResourcesSchema>;
|
||||
export type TSecretScanningResourcesInsert = Omit<z.input<typeof SecretScanningResourcesSchema>, TImmutableDBKeys>;
|
||||
export type TSecretScanningResourcesUpdate = Partial<
|
||||
Omit<z.input<typeof SecretScanningResourcesSchema>, TImmutableDBKeys>
|
||||
>;
|
21
backend/src/db/schemas/secret-scanning-scans.ts
Normal file
21
backend/src/db/schemas/secret-scanning-scans.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretScanningScansSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
status: z.string().default("queued"),
|
||||
statusMessage: z.string().nullable().optional(),
|
||||
type: z.string(),
|
||||
resourceId: z.string().uuid(),
|
||||
createdAt: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretScanningScans = z.infer<typeof SecretScanningScansSchema>;
|
||||
export type TSecretScanningScansInsert = Omit<z.input<typeof SecretScanningScansSchema>, TImmutableDBKeys>;
|
||||
export type TSecretScanningScansUpdate = Partial<Omit<z.input<typeof SecretScanningScansSchema>, TImmutableDBKeys>>;
|
@@ -27,7 +27,8 @@ export const SecretSharingSchema = z.object({
|
||||
password: z.string().nullable().optional(),
|
||||
encryptedSecret: zodBuffer.nullable().optional(),
|
||||
identifier: z.string().nullable().optional(),
|
||||
type: z.string().default("share")
|
||||
type: z.string().default("share"),
|
||||
authorizedEmails: z.unknown().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretSharing = z.infer<typeof SecretSharingSchema>;
|
||||
|
@@ -1,7 +1,7 @@
|
||||
import { nanoid } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
|
||||
import { ApproverType, BypasserType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
|
||||
import { EnforcementLevel } from "@app/lib/types";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
@@ -24,10 +24,19 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
approvers: z
|
||||
.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
|
||||
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), name: z.string().optional() })
|
||||
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), username: z.string().optional() })
|
||||
])
|
||||
.array()
|
||||
.max(100, "Cannot have more than 100 approvers")
|
||||
.min(1, { message: "At least one approver should be provided" }),
|
||||
bypassers: z
|
||||
.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
|
||||
z.object({ type: z.literal(BypasserType.User), id: z.string().optional(), username: z.string().optional() })
|
||||
])
|
||||
.array()
|
||||
.max(100, "Cannot have more than 100 bypassers")
|
||||
.optional(),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
@@ -72,7 +81,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.object({ type: z.nativeEnum(ApproverType), id: z.string().nullable().optional() })
|
||||
.array()
|
||||
.nullable()
|
||||
.optional()
|
||||
.optional(),
|
||||
bypassers: z.object({ type: z.nativeEnum(BypasserType), id: z.string().nullable().optional() }).array()
|
||||
})
|
||||
.array()
|
||||
.nullable()
|
||||
@@ -143,10 +153,19 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
approvers: z
|
||||
.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
|
||||
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), name: z.string().optional() })
|
||||
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), username: z.string().optional() })
|
||||
])
|
||||
.array()
|
||||
.min(1, { message: "At least one approver should be provided" }),
|
||||
.min(1, { message: "At least one approver should be provided" })
|
||||
.max(100, "Cannot have more than 100 approvers"),
|
||||
bypassers: z
|
||||
.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
|
||||
z.object({ type: z.literal(BypasserType.User), id: z.string().optional(), username: z.string().optional() })
|
||||
])
|
||||
.array()
|
||||
.max(100, "Cannot have more than 100 bypassers")
|
||||
.optional(),
|
||||
approvals: z.number().min(1).optional(),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
@@ -220,6 +239,15 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
})
|
||||
.array()
|
||||
.nullable()
|
||||
.optional(),
|
||||
bypassers: z
|
||||
.object({
|
||||
type: z.nativeEnum(BypasserType),
|
||||
id: z.string().nullable().optional(),
|
||||
name: z.string().nullable().optional()
|
||||
})
|
||||
.array()
|
||||
.nullable()
|
||||
.optional()
|
||||
})
|
||||
})
|
||||
|
@@ -113,6 +113,7 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
|
||||
name: z.string(),
|
||||
approvals: z.number(),
|
||||
approvers: z.string().array(),
|
||||
bypassers: z.string().array(),
|
||||
secretPath: z.string().nullish(),
|
||||
envId: z.string(),
|
||||
enforcementLevel: z.string(),
|
||||
@@ -154,7 +155,8 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
|
||||
requestId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
status: z.enum([ApprovalStatus.APPROVED, ApprovalStatus.REJECTED])
|
||||
status: z.enum([ApprovalStatus.APPROVED, ApprovalStatus.REJECTED]),
|
||||
bypassReason: z.string().min(10).max(1000).optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -170,7 +172,8 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
requestId: req.params.requestId,
|
||||
status: req.body.status
|
||||
status: req.body.status,
|
||||
bypassReason: req.body.bypassReason
|
||||
});
|
||||
|
||||
return { review };
|
||||
|
@@ -0,0 +1,123 @@
|
||||
import z from "zod";
|
||||
|
||||
import {
|
||||
CreateOCIConnectionSchema,
|
||||
SanitizedOCIConnectionSchema,
|
||||
UpdateOCIConnectionSchema
|
||||
} from "@app/ee/services/app-connections/oci";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
import { registerAppConnectionEndpoints } from "../../../../server/routes/v1/app-connection-routers/app-connection-endpoints";
|
||||
|
||||
export const registerOCIConnectionRouter = async (server: FastifyZodProvider) => {
|
||||
registerAppConnectionEndpoints({
|
||||
app: AppConnection.OCI,
|
||||
server,
|
||||
sanitizedResponseSchema: SanitizedOCIConnectionSchema,
|
||||
createSchema: CreateOCIConnectionSchema,
|
||||
updateSchema: UpdateOCIConnectionSchema
|
||||
});
|
||||
|
||||
// The following endpoints are for internal Infisical App use only and not part of the public API
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/:connectionId/compartments`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
connectionId: z.string().uuid()
|
||||
}),
|
||||
response: {
|
||||
200: z
|
||||
.object({
|
||||
id: z.string(),
|
||||
name: z.string()
|
||||
})
|
||||
.array()
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { connectionId } = req.params;
|
||||
|
||||
const compartments = await server.services.appConnection.oci.listCompartments(connectionId, req.permission);
|
||||
return compartments;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/:connectionId/vaults`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
connectionId: z.string().uuid()
|
||||
}),
|
||||
querystring: z.object({
|
||||
compartmentOcid: z.string().min(1, "Compartment OCID required")
|
||||
}),
|
||||
response: {
|
||||
200: z
|
||||
.object({
|
||||
id: z.string(),
|
||||
displayName: z.string()
|
||||
})
|
||||
.array()
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { connectionId } = req.params;
|
||||
const { compartmentOcid } = req.query;
|
||||
|
||||
const vaults = await server.services.appConnection.oci.listVaults(
|
||||
{ connectionId, compartmentOcid },
|
||||
req.permission
|
||||
);
|
||||
return vaults;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/:connectionId/vault-keys`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
connectionId: z.string().uuid()
|
||||
}),
|
||||
querystring: z.object({
|
||||
compartmentOcid: z.string().min(1, "Compartment OCID required"),
|
||||
vaultOcid: z.string().min(1, "Vault OCID required")
|
||||
}),
|
||||
response: {
|
||||
200: z
|
||||
.object({
|
||||
id: z.string(),
|
||||
displayName: z.string()
|
||||
})
|
||||
.array()
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { connectionId } = req.params;
|
||||
const { compartmentOcid, vaultOcid } = req.query;
|
||||
|
||||
const keys = await server.services.appConnection.oci.listVaultKeys(
|
||||
{ connectionId, compartmentOcid, vaultOcid },
|
||||
req.permission
|
||||
);
|
||||
return keys;
|
||||
}
|
||||
});
|
||||
};
|
@@ -6,6 +6,8 @@ import { ApiDocsTags, DYNAMIC_SECRETS } from "@app/lib/api-docs";
|
||||
import { daysToMillisecond } from "@app/lib/dates";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { isValidHandleBarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
import { CharacterType, characterValidator } from "@app/lib/validator/validate-string";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
@@ -13,6 +15,31 @@ import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchema
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { ResourceMetadataSchema } from "@app/services/resource-metadata/resource-metadata-schema";
|
||||
|
||||
const validateUsernameTemplateCharacters = characterValidator([
|
||||
CharacterType.AlphaNumeric,
|
||||
CharacterType.Underscore,
|
||||
CharacterType.Hyphen,
|
||||
CharacterType.OpenBrace,
|
||||
CharacterType.CloseBrace,
|
||||
CharacterType.CloseBracket,
|
||||
CharacterType.OpenBracket,
|
||||
CharacterType.Fullstop,
|
||||
CharacterType.SingleQuote,
|
||||
CharacterType.Spaces,
|
||||
CharacterType.Pipe
|
||||
]);
|
||||
|
||||
const userTemplateSchema = z
|
||||
.string()
|
||||
.trim()
|
||||
.max(255)
|
||||
.refine((el) => validateUsernameTemplateCharacters(el))
|
||||
.refine((el) =>
|
||||
isValidHandleBarTemplate(el, {
|
||||
allowedExpressions: (val) => ["randomUsername", "unixTimestamp", "identity.name"].includes(val)
|
||||
})
|
||||
);
|
||||
|
||||
export const registerDynamicSecretRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "POST",
|
||||
@@ -52,7 +79,8 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
path: z.string().describe(DYNAMIC_SECRETS.CREATE.path).trim().default("/").transform(removeTrailingSlash),
|
||||
environmentSlug: z.string().describe(DYNAMIC_SECRETS.CREATE.environmentSlug).min(1),
|
||||
name: slugSchema({ min: 1, max: 64, field: "Name" }).describe(DYNAMIC_SECRETS.CREATE.name),
|
||||
metadata: ResourceMetadataSchema.optional()
|
||||
metadata: ResourceMetadataSchema.optional(),
|
||||
usernameTemplate: userTemplateSchema.optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -73,39 +101,6 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/entra-id/users",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
tenantId: z.string().min(1).describe("The tenant ID of the Azure Entra ID"),
|
||||
applicationId: z.string().min(1).describe("The application ID of the Azure Entra ID App Registration"),
|
||||
clientSecret: z.string().min(1).describe("The client secret of the Azure Entra ID App Registration")
|
||||
}),
|
||||
response: {
|
||||
200: z
|
||||
.object({
|
||||
name: z.string().min(1).describe("The name of the user"),
|
||||
id: z.string().min(1).describe("The ID of the user"),
|
||||
email: z.string().min(1).describe("The email of the user")
|
||||
})
|
||||
.array()
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const data = await server.services.dynamicSecret.fetchAzureEntraIdUsers({
|
||||
tenantId: req.body.tenantId,
|
||||
applicationId: req.body.applicationId,
|
||||
clientSecret: req.body.clientSecret
|
||||
});
|
||||
return data;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/:name",
|
||||
@@ -150,7 +145,8 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
})
|
||||
.nullable(),
|
||||
newName: z.string().describe(DYNAMIC_SECRETS.UPDATE.newName).optional(),
|
||||
metadata: ResourceMetadataSchema.optional()
|
||||
metadata: ResourceMetadataSchema.optional(),
|
||||
usernameTemplate: userTemplateSchema.nullable().optional()
|
||||
})
|
||||
}),
|
||||
response: {
|
||||
@@ -328,4 +324,37 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
return { leases };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/entra-id/users",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
tenantId: z.string().min(1).describe("The tenant ID of the Azure Entra ID"),
|
||||
applicationId: z.string().min(1).describe("The application ID of the Azure Entra ID App Registration"),
|
||||
clientSecret: z.string().min(1).describe("The client secret of the Azure Entra ID App Registration")
|
||||
}),
|
||||
response: {
|
||||
200: z
|
||||
.object({
|
||||
name: z.string().min(1).describe("The name of the user"),
|
||||
id: z.string().min(1).describe("The ID of the user"),
|
||||
email: z.string().min(1).describe("The email of the user")
|
||||
})
|
||||
.array()
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const data = await server.services.dynamicSecret.fetchAzureEntraIdUsers({
|
||||
tenantId: req.body.tenantId,
|
||||
applicationId: req.body.applicationId,
|
||||
clientSecret: req.body.clientSecret
|
||||
});
|
||||
return data;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -121,14 +121,7 @@ export const registerGatewayRouter = async (server: FastifyZodProvider) => {
|
||||
identity: z.object({
|
||||
name: z.string(),
|
||||
id: z.string()
|
||||
}),
|
||||
projects: z
|
||||
.object({
|
||||
name: z.string(),
|
||||
id: z.string(),
|
||||
slug: z.string()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
}).array()
|
||||
})
|
||||
}
|
||||
@@ -158,17 +151,15 @@ export const registerGatewayRouter = async (server: FastifyZodProvider) => {
|
||||
identity: z.object({
|
||||
name: z.string(),
|
||||
id: z.string()
|
||||
}),
|
||||
projectGatewayId: z.string()
|
||||
})
|
||||
}).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const gateways = await server.services.gateway.getProjectGateways({
|
||||
projectId: req.params.projectId,
|
||||
projectPermission: req.permission
|
||||
const gateways = await server.services.gateway.listGateways({
|
||||
orgPermission: req.permission
|
||||
});
|
||||
return { gateways };
|
||||
}
|
||||
@@ -216,8 +207,7 @@ export const registerGatewayRouter = async (server: FastifyZodProvider) => {
|
||||
id: z.string()
|
||||
}),
|
||||
body: z.object({
|
||||
name: slugSchema({ field: "name" }).optional(),
|
||||
projectIds: z.string().array().optional()
|
||||
name: slugSchema({ field: "name" }).optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -230,8 +220,7 @@ export const registerGatewayRouter = async (server: FastifyZodProvider) => {
|
||||
const gateway = await server.services.gateway.updateGatewayById({
|
||||
orgPermission: req.permission,
|
||||
id: req.params.id,
|
||||
name: req.body.name,
|
||||
projectIds: req.body.projectIds
|
||||
name: req.body.name
|
||||
});
|
||||
return { gateway };
|
||||
}
|
||||
|
@@ -18,6 +18,7 @@ import { registerLdapRouter } from "./ldap-router";
|
||||
import { registerLicenseRouter } from "./license-router";
|
||||
import { registerOidcRouter } from "./oidc-router";
|
||||
import { registerOrgRoleRouter } from "./org-role-router";
|
||||
import { registerPITRouter } from "./pit-router";
|
||||
import { registerProjectRoleRouter } from "./project-role-router";
|
||||
import { registerProjectRouter } from "./project-router";
|
||||
import { registerRateLimitRouter } from "./rate-limit-router";
|
||||
@@ -53,6 +54,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
||||
{ prefix: "/workspace" }
|
||||
);
|
||||
await server.register(registerSnapshotRouter, { prefix: "/secret-snapshot" });
|
||||
await server.register(registerPITRouter, { prefix: "/pit" });
|
||||
await server.register(registerSecretApprovalPolicyRouter, { prefix: "/secret-approvals" });
|
||||
await server.register(registerSecretApprovalRequestRouter, {
|
||||
prefix: "/secret-approval-requests"
|
||||
|
@@ -47,7 +47,7 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
|
||||
200: z.object({ plan: z.any() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const plan = await server.services.license.getOrgPlan({
|
||||
actorId: req.permission.id,
|
||||
|
416
backend/src/ee/routes/v1/pit-router.ts
Normal file
416
backend/src/ee/routes/v1/pit-router.ts
Normal file
@@ -0,0 +1,416 @@
|
||||
/* eslint-disable @typescript-eslint/no-base-to-string */
|
||||
import { z } from "zod";
|
||||
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { booleanSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { commitChangesResponseSchema, resourceChangeSchema } from "@app/services/folder-commit/folder-commit-schemas";
|
||||
|
||||
const commitHistoryItemSchema = z.object({
|
||||
id: z.string(),
|
||||
folderId: z.string(),
|
||||
actorType: z.string(),
|
||||
actorMetadata: z.unknown().optional(),
|
||||
message: z.string().optional().nullable(),
|
||||
commitId: z.string(),
|
||||
createdAt: z.string().or(z.date()),
|
||||
envId: z.string()
|
||||
});
|
||||
|
||||
const folderStateSchema = z.array(
|
||||
z.object({
|
||||
type: z.string(),
|
||||
id: z.string(),
|
||||
versionId: z.string(),
|
||||
secretKey: z.string().optional(),
|
||||
secretVersion: z.number().optional(),
|
||||
folderName: z.string().optional(),
|
||||
folderVersion: z.number().optional()
|
||||
})
|
||||
);
|
||||
|
||||
export const registerPITRouter = async (server: FastifyZodProvider) => {
|
||||
// Get commits count for a folder
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/commits/count",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
querystring: z.object({
|
||||
environment: z.string().trim(),
|
||||
path: z.string().trim().default("/").transform(removeTrailingSlash),
|
||||
projectId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
count: z.number(),
|
||||
folderId: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const result = await server.services.pit.getCommitsCount({
|
||||
actor: req.permission?.type,
|
||||
actorId: req.permission?.id,
|
||||
actorOrgId: req.permission?.orgId,
|
||||
actorAuthMethod: req.permission?.authMethod,
|
||||
projectId: req.query.projectId,
|
||||
environment: req.query.environment,
|
||||
path: req.query.path
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: req.query.projectId,
|
||||
event: {
|
||||
type: EventType.GET_PROJECT_PIT_COMMIT_COUNT,
|
||||
metadata: {
|
||||
environment: req.query.environment,
|
||||
path: req.query.path,
|
||||
commitCount: result.count.toString()
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
});
|
||||
|
||||
// Get all commits for a folder
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/commits",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
querystring: z.object({
|
||||
environment: z.string().trim(),
|
||||
path: z.string().trim().default("/").transform(removeTrailingSlash),
|
||||
projectId: z.string().trim(),
|
||||
offset: z.coerce.number().min(0).default(0),
|
||||
limit: z.coerce.number().min(1).max(100).default(20),
|
||||
search: z.string().trim().optional(),
|
||||
sort: z.enum(["asc", "desc"]).default("desc")
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
commits: commitHistoryItemSchema.array(),
|
||||
total: z.number(),
|
||||
hasMore: z.boolean()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const result = await server.services.pit.getCommitsForFolder({
|
||||
actor: req.permission?.type,
|
||||
actorId: req.permission?.id,
|
||||
actorOrgId: req.permission?.orgId,
|
||||
actorAuthMethod: req.permission?.authMethod,
|
||||
projectId: req.query.projectId,
|
||||
environment: req.query.environment,
|
||||
path: req.query.path,
|
||||
offset: req.query.offset,
|
||||
limit: req.query.limit,
|
||||
search: req.query.search,
|
||||
sort: req.query.sort
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: req.query.projectId,
|
||||
event: {
|
||||
type: EventType.GET_PROJECT_PIT_COMMITS,
|
||||
metadata: {
|
||||
environment: req.query.environment,
|
||||
path: req.query.path,
|
||||
commitCount: result.commits.length.toString(),
|
||||
offset: req.query.offset.toString(),
|
||||
limit: req.query.limit.toString(),
|
||||
search: req.query.search,
|
||||
sort: req.query.sort
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
});
|
||||
|
||||
// Get commit changes for a specific commit
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/commits/:commitId/changes",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
commitId: z.string().trim()
|
||||
}),
|
||||
querystring: z.object({
|
||||
projectId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: commitChangesResponseSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const result = await server.services.pit.getCommitChanges({
|
||||
actor: req.permission?.type,
|
||||
actorId: req.permission?.id,
|
||||
actorOrgId: req.permission?.orgId,
|
||||
actorAuthMethod: req.permission?.authMethod,
|
||||
projectId: req.query.projectId,
|
||||
commitId: req.params.commitId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: req.query.projectId,
|
||||
event: {
|
||||
type: EventType.GET_PROJECT_PIT_COMMIT_CHANGES,
|
||||
metadata: {
|
||||
commitId: req.params.commitId,
|
||||
changesCount: (result.changes.changes?.length || 0).toString()
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
});
|
||||
|
||||
// Retrieve rollback changes for a commit
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/commits/:commitId/compare",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
commitId: z.string().trim()
|
||||
}),
|
||||
querystring: z.object({
|
||||
folderId: z.string().trim(),
|
||||
environment: z.string().trim(),
|
||||
deepRollback: booleanSchema.default(false),
|
||||
secretPath: z.string().trim().default("/").transform(removeTrailingSlash),
|
||||
projectId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.array(
|
||||
z.object({
|
||||
folderId: z.string(),
|
||||
folderName: z.string(),
|
||||
folderPath: z.string().optional(),
|
||||
changes: z.array(resourceChangeSchema)
|
||||
})
|
||||
)
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const result = await server.services.pit.compareCommitChanges({
|
||||
actor: req.permission?.type,
|
||||
actorId: req.permission?.id,
|
||||
actorOrgId: req.permission?.orgId,
|
||||
actorAuthMethod: req.permission?.authMethod,
|
||||
projectId: req.query.projectId,
|
||||
commitId: req.params.commitId,
|
||||
folderId: req.query.folderId,
|
||||
environment: req.query.environment,
|
||||
deepRollback: req.query.deepRollback,
|
||||
secretPath: req.query.secretPath
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: req.query.projectId,
|
||||
event: {
|
||||
type: EventType.PIT_COMPARE_FOLDER_STATES,
|
||||
metadata: {
|
||||
targetCommitId: req.params.commitId,
|
||||
folderId: req.query.folderId,
|
||||
deepRollback: req.query.deepRollback,
|
||||
diffsCount: result.length.toString(),
|
||||
environment: req.query.environment,
|
||||
folderPath: req.query.secretPath
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
});
|
||||
|
||||
// Rollback to a previous commit
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/commits/:commitId/rollback",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
commitId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
folderId: z.string().trim(),
|
||||
deepRollback: z.boolean().default(false),
|
||||
message: z.string().max(256).trim().optional(),
|
||||
environment: z.string().trim(),
|
||||
projectId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
success: z.boolean(),
|
||||
secretChangesCount: z.number().optional(),
|
||||
folderChangesCount: z.number().optional(),
|
||||
totalChanges: z.number().optional()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const result = await server.services.pit.rollbackToCommit({
|
||||
actor: req.permission?.type,
|
||||
actorId: req.permission?.id,
|
||||
actorOrgId: req.permission?.orgId,
|
||||
actorAuthMethod: req.permission?.authMethod,
|
||||
projectId: req.body.projectId,
|
||||
commitId: req.params.commitId,
|
||||
folderId: req.body.folderId,
|
||||
deepRollback: req.body.deepRollback,
|
||||
message: req.body.message,
|
||||
environment: req.body.environment
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: req.body.projectId,
|
||||
event: {
|
||||
type: EventType.PIT_ROLLBACK_COMMIT,
|
||||
metadata: {
|
||||
targetCommitId: req.params.commitId,
|
||||
environment: req.body.environment,
|
||||
folderId: req.body.folderId,
|
||||
deepRollback: req.body.deepRollback,
|
||||
message: req.body.message || "Rollback to previous commit",
|
||||
totalChanges: result.totalChanges?.toString() || "0"
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
});
|
||||
|
||||
// Revert commit
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/commits/:commitId/revert",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
commitId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
projectId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
success: z.boolean(),
|
||||
message: z.string(),
|
||||
originalCommitId: z.string(),
|
||||
revertCommitId: z.string().optional(),
|
||||
changesReverted: z.number().optional()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const result = await server.services.pit.revertCommit({
|
||||
actor: req.permission?.type,
|
||||
actorId: req.permission?.id,
|
||||
actorOrgId: req.permission?.orgId,
|
||||
actorAuthMethod: req.permission?.authMethod,
|
||||
projectId: req.body.projectId,
|
||||
commitId: req.params.commitId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: req.body.projectId,
|
||||
event: {
|
||||
type: EventType.PIT_REVERT_COMMIT,
|
||||
metadata: {
|
||||
commitId: req.params.commitId,
|
||||
revertCommitId: result.revertCommitId,
|
||||
changesReverted: result.changesReverted?.toString()
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
});
|
||||
|
||||
// Folder state at commit
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/commits/:commitId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
commitId: z.string().trim()
|
||||
}),
|
||||
querystring: z.object({
|
||||
folderId: z.string().trim(),
|
||||
projectId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: folderStateSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const result = await server.services.pit.getFolderStateAtCommit({
|
||||
actor: req.permission?.type,
|
||||
actorId: req.permission?.id,
|
||||
actorOrgId: req.permission?.orgId,
|
||||
actorAuthMethod: req.permission?.authMethod,
|
||||
projectId: req.query.projectId,
|
||||
commitId: req.params.commitId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: req.query.projectId,
|
||||
event: {
|
||||
type: EventType.PIT_GET_FOLDER_STATE,
|
||||
metadata: {
|
||||
commitId: req.params.commitId,
|
||||
folderId: req.query.folderId,
|
||||
resourceCount: result.length.toString()
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
});
|
||||
};
|
@@ -145,7 +145,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
|
||||
const { isUserCompleted, providerAuthToken } = await server.services.saml.samlLogin({
|
||||
externalId: profile.nameID,
|
||||
email,
|
||||
email: email.toLowerCase(),
|
||||
firstName,
|
||||
lastName: lastName as string,
|
||||
relayState: (req.body as { RelayState?: string }).RelayState,
|
||||
|
@@ -1,7 +1,7 @@
|
||||
import { nanoid } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
|
||||
import { ApproverType, BypasserType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { EnforcementLevel } from "@app/lib/types";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
@@ -30,10 +30,19 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
approvers: z
|
||||
.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
|
||||
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), name: z.string().optional() })
|
||||
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), username: z.string().optional() })
|
||||
])
|
||||
.array()
|
||||
.min(1, { message: "At least one approver should be provided" }),
|
||||
.min(1, { message: "At least one approver should be provided" })
|
||||
.max(100, "Cannot have more than 100 approvers"),
|
||||
bypassers: z
|
||||
.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
|
||||
z.object({ type: z.literal(BypasserType.User), id: z.string().optional(), username: z.string().optional() })
|
||||
])
|
||||
.array()
|
||||
.max(100, "Cannot have more than 100 bypassers")
|
||||
.optional(),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
@@ -75,10 +84,19 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
approvers: z
|
||||
.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
|
||||
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), name: z.string().optional() })
|
||||
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), username: z.string().optional() })
|
||||
])
|
||||
.array()
|
||||
.min(1, { message: "At least one approver should be provided" }),
|
||||
.min(1, { message: "At least one approver should be provided" })
|
||||
.max(100, "Cannot have more than 100 approvers"),
|
||||
bypassers: z
|
||||
.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
|
||||
z.object({ type: z.literal(BypasserType.User), id: z.string().optional(), username: z.string().optional() })
|
||||
])
|
||||
.array()
|
||||
.max(100, "Cannot have more than 100 bypassers")
|
||||
.optional(),
|
||||
approvals: z.number().min(1).default(1),
|
||||
secretPath: z
|
||||
.string()
|
||||
@@ -157,6 +175,12 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
id: z.string().nullable().optional(),
|
||||
type: z.nativeEnum(ApproverType)
|
||||
})
|
||||
.array(),
|
||||
bypassers: z
|
||||
.object({
|
||||
id: z.string().nullable().optional(),
|
||||
type: z.nativeEnum(BypasserType)
|
||||
})
|
||||
.array()
|
||||
})
|
||||
.array()
|
||||
@@ -193,7 +217,14 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.object({
|
||||
id: z.string().nullable().optional(),
|
||||
type: z.nativeEnum(ApproverType),
|
||||
name: z.string().nullable().optional()
|
||||
username: z.string().nullable().optional()
|
||||
})
|
||||
.array(),
|
||||
bypassers: z
|
||||
.object({
|
||||
id: z.string().nullable().optional(),
|
||||
type: z.nativeEnum(BypasserType),
|
||||
username: z.string().nullable().optional()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
|
@@ -47,6 +47,11 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
userId: z.string().nullable().optional()
|
||||
})
|
||||
.array(),
|
||||
bypassers: z
|
||||
.object({
|
||||
userId: z.string().nullable().optional()
|
||||
})
|
||||
.array(),
|
||||
secretPath: z.string().optional().nullable(),
|
||||
enforcementLevel: z.string(),
|
||||
deletedAt: z.date().nullish(),
|
||||
@@ -266,6 +271,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
name: z.string(),
|
||||
approvals: z.number(),
|
||||
approvers: approvalRequestUser.array(),
|
||||
bypassers: approvalRequestUser.array(),
|
||||
secretPath: z.string().optional().nullable(),
|
||||
enforcementLevel: z.string(),
|
||||
deletedAt: z.date().nullish(),
|
||||
|
@@ -0,0 +1,16 @@
|
||||
import {
|
||||
CreateOCIVaultSyncSchema,
|
||||
OCIVaultSyncSchema,
|
||||
UpdateOCIVaultSyncSchema
|
||||
} from "@app/ee/services/secret-sync/oci-vault";
|
||||
import { registerSyncSecretsEndpoints } from "@app/server/routes/v1/secret-sync-routers/secret-sync-endpoints";
|
||||
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
|
||||
|
||||
export const registerOCIVaultSyncRouter = async (server: FastifyZodProvider) =>
|
||||
registerSyncSecretsEndpoints({
|
||||
destination: SecretSync.OCIVault,
|
||||
server,
|
||||
responseSchema: OCIVaultSyncSchema,
|
||||
createSchema: CreateOCIVaultSyncSchema,
|
||||
updateSchema: UpdateOCIVaultSyncSchema
|
||||
});
|
@@ -65,9 +65,10 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
hide: true,
|
||||
deprecated: true,
|
||||
tags: [ApiDocsTags.Projects],
|
||||
description: "Roll back project secrets to those captured in a secret snapshot version.",
|
||||
description: "(Deprecated) Roll back project secrets to those captured in a secret snapshot version.",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
@@ -84,6 +85,10 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
throw new Error(
|
||||
"This endpoint is deprecated. Please use the new PIT recovery system. More information is available at: https://infisical.com/docs/documentation/platform/pit-recovery."
|
||||
);
|
||||
|
||||
const secretSnapshot = await server.services.snapshot.rollbackSnapshot({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
|
@@ -97,7 +97,7 @@ export const registerSshCertificateTemplateRouter = async (server: FastifyZodPro
|
||||
allowCustomKeyIds: z.boolean().describe(SSH_CERTIFICATE_TEMPLATES.CREATE.allowCustomKeyIds)
|
||||
})
|
||||
.refine((data) => ms(data.maxTTL) >= ms(data.ttl), {
|
||||
message: "Max TLL must be greater than or equal to TTL",
|
||||
message: "Max TTL must be greater than or equal to TTL",
|
||||
path: ["maxTTL"]
|
||||
}),
|
||||
response: {
|
||||
|
@@ -73,7 +73,7 @@ export const registerSshHostRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const host = await server.services.sshHost.getSshHost({
|
||||
const host = await server.services.sshHost.getSshHostById({
|
||||
sshHostId: req.params.sshHostId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
|
@@ -2,6 +2,10 @@ import {
|
||||
registerSecretRotationV2Router,
|
||||
SECRET_ROTATION_REGISTER_ROUTER_MAP
|
||||
} from "@app/ee/routes/v2/secret-rotation-v2-routers";
|
||||
import {
|
||||
registerSecretScanningV2Router,
|
||||
SECRET_SCANNING_REGISTER_ROUTER_MAP
|
||||
} from "@app/ee/routes/v2/secret-scanning-v2-routers";
|
||||
|
||||
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
|
||||
import { registerProjectRoleRouter } from "./project-role-router";
|
||||
@@ -31,4 +35,17 @@ export const registerV2EERoutes = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
{ prefix: "/secret-rotations" }
|
||||
);
|
||||
|
||||
await server.register(
|
||||
async (secretScanningV2Router) => {
|
||||
// register generic secret scanning endpoints
|
||||
await secretScanningV2Router.register(registerSecretScanningV2Router);
|
||||
|
||||
// register service-specific secret scanning endpoints (gitlab/github, etc.)
|
||||
for await (const [type, router] of Object.entries(SECRET_SCANNING_REGISTER_ROUTER_MAP)) {
|
||||
await secretScanningV2Router.register(router, { prefix: `data-sources/${type}` });
|
||||
}
|
||||
},
|
||||
{ prefix: "/secret-scanning" }
|
||||
);
|
||||
};
|
||||
|
@@ -5,6 +5,7 @@ import { registerAwsIamUserSecretRotationRouter } from "./aws-iam-user-secret-ro
|
||||
import { registerAzureClientSecretRotationRouter } from "./azure-client-secret-rotation-router";
|
||||
import { registerLdapPasswordRotationRouter } from "./ldap-password-rotation-router";
|
||||
import { registerMsSqlCredentialsRotationRouter } from "./mssql-credentials-rotation-router";
|
||||
import { registerMySqlCredentialsRotationRouter } from "./mysql-credentials-rotation-router";
|
||||
import { registerPostgresCredentialsRotationRouter } from "./postgres-credentials-rotation-router";
|
||||
|
||||
export * from "./secret-rotation-v2-router";
|
||||
@@ -15,6 +16,7 @@ export const SECRET_ROTATION_REGISTER_ROUTER_MAP: Record<
|
||||
> = {
|
||||
[SecretRotation.PostgresCredentials]: registerPostgresCredentialsRotationRouter,
|
||||
[SecretRotation.MsSqlCredentials]: registerMsSqlCredentialsRotationRouter,
|
||||
[SecretRotation.MySqlCredentials]: registerMySqlCredentialsRotationRouter,
|
||||
[SecretRotation.Auth0ClientSecret]: registerAuth0ClientSecretRotationRouter,
|
||||
[SecretRotation.AzureClientSecret]: registerAzureClientSecretRotationRouter,
|
||||
[SecretRotation.AwsIamUserSecret]: registerAwsIamUserSecretRotationRouter,
|
||||
|
@@ -0,0 +1,19 @@
|
||||
import {
|
||||
CreateMySqlCredentialsRotationSchema,
|
||||
MySqlCredentialsRotationSchema,
|
||||
UpdateMySqlCredentialsRotationSchema
|
||||
} from "@app/ee/services/secret-rotation-v2/mysql-credentials";
|
||||
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
|
||||
import { SqlCredentialsRotationGeneratedCredentialsSchema } from "@app/ee/services/secret-rotation-v2/shared/sql-credentials";
|
||||
|
||||
import { registerSecretRotationEndpoints } from "./secret-rotation-v2-endpoints";
|
||||
|
||||
export const registerMySqlCredentialsRotationRouter = async (server: FastifyZodProvider) =>
|
||||
registerSecretRotationEndpoints({
|
||||
type: SecretRotation.MySqlCredentials,
|
||||
server,
|
||||
responseSchema: MySqlCredentialsRotationSchema,
|
||||
createSchema: CreateMySqlCredentialsRotationSchema,
|
||||
updateSchema: UpdateMySqlCredentialsRotationSchema,
|
||||
generatedCredentialsSchema: SqlCredentialsRotationGeneratedCredentialsSchema
|
||||
});
|
@@ -6,6 +6,7 @@ import { AwsIamUserSecretRotationListItemSchema } from "@app/ee/services/secret-
|
||||
import { AzureClientSecretRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/azure-client-secret";
|
||||
import { LdapPasswordRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/ldap-password";
|
||||
import { MsSqlCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/mssql-credentials";
|
||||
import { MySqlCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/mysql-credentials";
|
||||
import { PostgresCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/postgres-credentials";
|
||||
import { SecretRotationV2Schema } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-union-schema";
|
||||
import { ApiDocsTags, SecretRotations } from "@app/lib/api-docs";
|
||||
@@ -16,6 +17,7 @@ import { AuthMode } from "@app/services/auth/auth-type";
|
||||
const SecretRotationV2OptionsSchema = z.discriminatedUnion("type", [
|
||||
PostgresCredentialsRotationListItemSchema,
|
||||
MsSqlCredentialsRotationListItemSchema,
|
||||
MySqlCredentialsRotationListItemSchema,
|
||||
Auth0ClientSecretRotationListItemSchema,
|
||||
AzureClientSecretRotationListItemSchema,
|
||||
AwsIamUserSecretRotationListItemSchema,
|
||||
|
@@ -0,0 +1,16 @@
|
||||
import { registerSecretScanningEndpoints } from "@app/ee/routes/v2/secret-scanning-v2-routers/secret-scanning-v2-endpoints";
|
||||
import {
|
||||
CreateGitHubDataSourceSchema,
|
||||
GitHubDataSourceSchema,
|
||||
UpdateGitHubDataSourceSchema
|
||||
} from "@app/ee/services/secret-scanning-v2/github";
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
|
||||
export const registerGitHubSecretScanningRouter = async (server: FastifyZodProvider) =>
|
||||
registerSecretScanningEndpoints({
|
||||
type: SecretScanningDataSource.GitHub,
|
||||
server,
|
||||
responseSchema: GitHubDataSourceSchema,
|
||||
createSchema: CreateGitHubDataSourceSchema,
|
||||
updateSchema: UpdateGitHubDataSourceSchema
|
||||
});
|
12
backend/src/ee/routes/v2/secret-scanning-v2-routers/index.ts
Normal file
12
backend/src/ee/routes/v2/secret-scanning-v2-routers/index.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
|
||||
import { registerGitHubSecretScanningRouter } from "./github-secret-scanning-router";
|
||||
|
||||
export * from "./secret-scanning-v2-router";
|
||||
|
||||
export const SECRET_SCANNING_REGISTER_ROUTER_MAP: Record<
|
||||
SecretScanningDataSource,
|
||||
(server: FastifyZodProvider) => Promise<void>
|
||||
> = {
|
||||
[SecretScanningDataSource.GitHub]: registerGitHubSecretScanningRouter
|
||||
};
|
@@ -0,0 +1,593 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretScanningResourcesSchema, SecretScanningScansSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import {
|
||||
SecretScanningDataSource,
|
||||
SecretScanningScanStatus
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { SECRET_SCANNING_DATA_SOURCE_NAME_MAP } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-maps";
|
||||
import {
|
||||
TSecretScanningDataSource,
|
||||
TSecretScanningDataSourceInput
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { ApiDocsTags, SecretScanningDataSources } from "@app/lib/api-docs";
|
||||
import { startsWithVowel } from "@app/lib/fn";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerSecretScanningEndpoints = <
|
||||
T extends TSecretScanningDataSource,
|
||||
I extends TSecretScanningDataSourceInput
|
||||
>({
|
||||
server,
|
||||
type,
|
||||
createSchema,
|
||||
updateSchema,
|
||||
responseSchema
|
||||
}: {
|
||||
type: SecretScanningDataSource;
|
||||
server: FastifyZodProvider;
|
||||
createSchema: z.ZodType<{
|
||||
name: string;
|
||||
projectId: string;
|
||||
connectionId?: string;
|
||||
config: Partial<I["config"]>;
|
||||
description?: string | null;
|
||||
isAutoScanEnabled?: boolean;
|
||||
}>;
|
||||
updateSchema: z.ZodType<{
|
||||
name?: string;
|
||||
config?: Partial<I["config"]>;
|
||||
description?: string | null;
|
||||
isAutoScanEnabled?: boolean;
|
||||
}>;
|
||||
responseSchema: z.ZodTypeAny;
|
||||
}) => {
|
||||
const sourceType = SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type];
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `List the ${sourceType} Data Sources for the specified project.`,
|
||||
querystring: z.object({
|
||||
projectId: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Project ID required")
|
||||
.describe(SecretScanningDataSources.LIST(type).projectId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ dataSources: responseSchema.array() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
query: { projectId }
|
||||
} = req;
|
||||
|
||||
const dataSources = (await server.services.secretScanningV2.listSecretScanningDataSourcesByProjectId(
|
||||
{ projectId, type },
|
||||
req.permission
|
||||
)) as T[];
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST,
|
||||
metadata: {
|
||||
type,
|
||||
count: dataSources.length,
|
||||
dataSourceIds: dataSources.map((source) => source.id)
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSources };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:dataSourceId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Get the specified ${sourceType} Data Source by ID.`,
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.GET_BY_ID(type).dataSourceId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ dataSource: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId } = req.params;
|
||||
|
||||
const dataSource = (await server.services.secretScanningV2.findSecretScanningDataSourceById(
|
||||
{ dataSourceId, type },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: dataSource.projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_GET,
|
||||
metadata: {
|
||||
dataSourceId,
|
||||
type
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSource };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/data-source-name/:dataSourceName`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Get the specified ${sourceType} Data Source by name and project ID.`,
|
||||
params: z.object({
|
||||
sourceName: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Data Source name required")
|
||||
.describe(SecretScanningDataSources.GET_BY_NAME(type).sourceName)
|
||||
}),
|
||||
querystring: z.object({
|
||||
projectId: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Project ID required")
|
||||
.describe(SecretScanningDataSources.GET_BY_NAME(type).projectId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ dataSource: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { sourceName } = req.params;
|
||||
const { projectId } = req.query;
|
||||
|
||||
const dataSource = (await server.services.secretScanningV2.findSecretScanningDataSourceByName(
|
||||
{ sourceName, projectId, type },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_GET,
|
||||
metadata: {
|
||||
dataSourceId: dataSource.id,
|
||||
type
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSource };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Create ${
|
||||
startsWithVowel(sourceType) ? "an" : "a"
|
||||
} ${sourceType} Data Source for the specified project.`,
|
||||
body: createSchema,
|
||||
response: {
|
||||
200: z.object({ dataSource: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const dataSource = (await server.services.secretScanningV2.createSecretScanningDataSource(
|
||||
{ ...req.body, type },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: dataSource.projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_CREATE,
|
||||
metadata: {
|
||||
dataSourceId: dataSource.id,
|
||||
type,
|
||||
...req.body
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSource };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/:dataSourceId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Update the specified ${sourceType} Data Source.`,
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.UPDATE(type).dataSourceId)
|
||||
}),
|
||||
body: updateSchema,
|
||||
response: {
|
||||
200: z.object({ dataSource: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId } = req.params;
|
||||
|
||||
const dataSource = (await server.services.secretScanningV2.updateSecretScanningDataSource(
|
||||
{ ...req.body, dataSourceId, type },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: dataSource.projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_UPDATE,
|
||||
metadata: {
|
||||
dataSourceId,
|
||||
type,
|
||||
...req.body
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSource };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: `/:dataSourceId`,
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Delete the specified ${sourceType} Data Source.`,
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.DELETE(type).dataSourceId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ dataSource: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId } = req.params;
|
||||
|
||||
const dataSource = (await server.services.secretScanningV2.deleteSecretScanningDataSource(
|
||||
{ type, dataSourceId },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: dataSource.projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_DELETE,
|
||||
metadata: {
|
||||
type,
|
||||
dataSourceId
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSource };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: `/:dataSourceId/scan`,
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Trigger a scan for the specified ${sourceType} Data Source.`,
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.SCAN(type).dataSourceId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ dataSource: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId } = req.params;
|
||||
|
||||
const dataSource = (await server.services.secretScanningV2.triggerSecretScanningDataSourceScan(
|
||||
{ type, dataSourceId },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: dataSource.projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_TRIGGER_SCAN,
|
||||
metadata: {
|
||||
type,
|
||||
dataSourceId
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSource };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: `/:dataSourceId/resources/:resourceId/scan`,
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Trigger a scan for the specified ${sourceType} Data Source resource.`,
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.SCAN(type).dataSourceId),
|
||||
resourceId: z.string().uuid().describe(SecretScanningDataSources.SCAN(type).resourceId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ dataSource: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId, resourceId } = req.params;
|
||||
|
||||
const dataSource = (await server.services.secretScanningV2.triggerSecretScanningDataSourceScan(
|
||||
{ type, dataSourceId, resourceId },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: dataSource.projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_TRIGGER_SCAN,
|
||||
metadata: {
|
||||
type,
|
||||
dataSourceId,
|
||||
resourceId
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSource };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:dataSourceId/resources",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Get the resources associated with the specified ${sourceType} Data Source by ID.`,
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.LIST_RESOURCES(type).dataSourceId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ resources: SecretScanningResourcesSchema.array() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId } = req.params;
|
||||
|
||||
const { resources, projectId } = await server.services.secretScanningV2.listSecretScanningResourcesByDataSourceId(
|
||||
{ dataSourceId, type },
|
||||
req.permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_RESOURCE_LIST,
|
||||
metadata: {
|
||||
dataSourceId,
|
||||
type,
|
||||
resourceIds: resources.map((resource) => resource.id),
|
||||
count: resources.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { resources };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:dataSourceId/scans",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Get the scans associated with the specified ${sourceType} Data Source by ID.`,
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.LIST_SCANS(type).dataSourceId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ scans: SecretScanningScansSchema.array() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId } = req.params;
|
||||
|
||||
const { scans, projectId } = await server.services.secretScanningV2.listSecretScanningScansByDataSourceId(
|
||||
{ dataSourceId, type },
|
||||
req.permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_SCAN_LIST,
|
||||
metadata: {
|
||||
dataSourceId,
|
||||
type,
|
||||
count: scans.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { scans };
|
||||
}
|
||||
});
|
||||
|
||||
// not exposed, for UI only
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:dataSourceId/resources-dashboard",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
resources: SecretScanningResourcesSchema.extend({
|
||||
lastScannedAt: z.date().nullish(),
|
||||
lastScanStatus: z.nativeEnum(SecretScanningScanStatus).nullish(),
|
||||
lastScanStatusMessage: z.string().nullish(),
|
||||
unresolvedFindings: z.number()
|
||||
}).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId } = req.params;
|
||||
|
||||
const { resources, projectId } =
|
||||
await server.services.secretScanningV2.listSecretScanningResourcesWithDetailsByDataSourceId(
|
||||
{ dataSourceId, type },
|
||||
req.permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_RESOURCE_LIST,
|
||||
metadata: {
|
||||
dataSourceId,
|
||||
type,
|
||||
resourceIds: resources.map((resource) => resource.id),
|
||||
count: resources.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { resources };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:dataSourceId/scans-dashboard",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
scans: SecretScanningScansSchema.extend({
|
||||
unresolvedFindings: z.number(),
|
||||
resolvedFindings: z.number(),
|
||||
resourceName: z.string()
|
||||
}).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId } = req.params;
|
||||
|
||||
const { scans, projectId } =
|
||||
await server.services.secretScanningV2.listSecretScanningScansWithDetailsByDataSourceId(
|
||||
{ dataSourceId, type },
|
||||
req.permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_SCAN_LIST,
|
||||
metadata: {
|
||||
dataSourceId,
|
||||
type,
|
||||
count: scans.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { scans };
|
||||
}
|
||||
});
|
||||
};
|
@@ -0,0 +1,366 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretScanningConfigsSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { GitHubDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/github";
|
||||
import {
|
||||
SecretScanningFindingStatus,
|
||||
SecretScanningScanStatus
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import {
|
||||
SecretScanningDataSourceSchema,
|
||||
SecretScanningFindingSchema
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-union-schemas";
|
||||
import {
|
||||
ApiDocsTags,
|
||||
SecretScanningConfigs,
|
||||
SecretScanningDataSources,
|
||||
SecretScanningFindings
|
||||
} from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const SecretScanningDataSourceOptionsSchema = z.discriminatedUnion("type", [GitHubDataSourceListItemSchema]);
|
||||
|
||||
export const registerSecretScanningV2Router = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/data-sources/options",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: "List the available Secret Scanning Data Source Options.",
|
||||
response: {
|
||||
200: z.object({
|
||||
dataSourceOptions: SecretScanningDataSourceOptionsSchema.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: () => {
|
||||
const dataSourceOptions = server.services.secretScanningV2.listSecretScanningDataSourceOptions();
|
||||
return { dataSourceOptions };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/data-sources",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: "List all the Secret Scanning Data Sources for the specified project.",
|
||||
querystring: z.object({
|
||||
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningDataSources.LIST().projectId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ dataSources: SecretScanningDataSourceSchema.array() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
query: { projectId },
|
||||
permission
|
||||
} = req;
|
||||
|
||||
const dataSources = await server.services.secretScanningV2.listSecretScanningDataSourcesByProjectId(
|
||||
{ projectId },
|
||||
permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST,
|
||||
metadata: {
|
||||
dataSourceIds: dataSources.map((dataSource) => dataSource.id),
|
||||
count: dataSources.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSources };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/findings",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: "List all the Secret Scanning Findings for the specified project.",
|
||||
querystring: z.object({
|
||||
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningFindings.LIST.projectId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ findings: SecretScanningFindingSchema.array() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
query: { projectId },
|
||||
permission
|
||||
} = req;
|
||||
|
||||
const findings = await server.services.secretScanningV2.listSecretScanningFindingsByProjectId(
|
||||
projectId,
|
||||
permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_FINDING_LIST,
|
||||
metadata: {
|
||||
findingIds: findings.map((finding) => finding.id),
|
||||
count: findings.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { findings };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/findings/:findingId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: "Update the specified Secret Scanning Finding.",
|
||||
params: z.object({
|
||||
findingId: z.string().trim().min(1, "Finding ID required").describe(SecretScanningFindings.UPDATE.findingId)
|
||||
}),
|
||||
body: z.object({
|
||||
status: z.nativeEnum(SecretScanningFindingStatus).optional().describe(SecretScanningFindings.UPDATE.status),
|
||||
remarks: z.string().nullish().describe(SecretScanningFindings.UPDATE.remarks)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ finding: SecretScanningFindingSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
params: { findingId },
|
||||
body,
|
||||
permission
|
||||
} = req;
|
||||
|
||||
const { finding, projectId } = await server.services.secretScanningV2.updateSecretScanningFindingById(
|
||||
{ findingId, ...body },
|
||||
permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_FINDING_UPDATE,
|
||||
metadata: {
|
||||
findingId,
|
||||
...body
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { finding };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/configs",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: "Get the Secret Scanning Config for the specified project.",
|
||||
querystring: z.object({
|
||||
projectId: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Project ID required")
|
||||
.describe(SecretScanningConfigs.GET_BY_PROJECT_ID.projectId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
config: z.object({ content: z.string().nullish(), projectId: z.string(), updatedAt: z.date().nullish() })
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
query: { projectId },
|
||||
permission
|
||||
} = req;
|
||||
|
||||
const config = await server.services.secretScanningV2.findSecretScanningConfigByProjectId(projectId, permission);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_CONFIG_GET
|
||||
}
|
||||
});
|
||||
|
||||
return { config };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/configs",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: "Update the specified Secret Scanning Configuration.",
|
||||
querystring: z.object({
|
||||
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningConfigs.UPDATE.projectId)
|
||||
}),
|
||||
body: z.object({
|
||||
content: z.string().nullable().describe(SecretScanningConfigs.UPDATE.content)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ config: SecretScanningConfigsSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
query: { projectId },
|
||||
body,
|
||||
permission
|
||||
} = req;
|
||||
|
||||
const config = await server.services.secretScanningV2.upsertSecretScanningConfig(
|
||||
{ projectId, ...body },
|
||||
permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_CONFIG_UPDATE,
|
||||
metadata: body
|
||||
}
|
||||
});
|
||||
|
||||
return { config };
|
||||
}
|
||||
});
|
||||
|
||||
// not exposed, for UI only
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/data-sources-dashboard",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
querystring: z.object({
|
||||
projectId: z.string().trim().min(1, "Project ID required")
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
dataSources: z
|
||||
.intersection(
|
||||
SecretScanningDataSourceSchema,
|
||||
z.object({
|
||||
lastScannedAt: z.date().nullish(),
|
||||
lastScanStatus: z.nativeEnum(SecretScanningScanStatus).nullish(),
|
||||
lastScanStatusMessage: z.string().nullish(),
|
||||
unresolvedFindings: z.number().nullish()
|
||||
})
|
||||
)
|
||||
.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
query: { projectId },
|
||||
permission
|
||||
} = req;
|
||||
|
||||
const dataSources = await server.services.secretScanningV2.listSecretScanningDataSourcesWithDetailsByProjectId(
|
||||
{ projectId },
|
||||
permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST,
|
||||
metadata: {
|
||||
dataSourceIds: dataSources.map((dataSource) => dataSource.id),
|
||||
count: dataSources.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSources };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/unresolved-findings-count",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
querystring: z.object({
|
||||
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningFindings.LIST.projectId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ unresolvedFindings: z.number() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
query: { projectId },
|
||||
permission
|
||||
} = req;
|
||||
|
||||
const unresolvedFindings =
|
||||
await server.services.secretScanningV2.getSecretScanningUnresolvedFindingsCountByProjectId(
|
||||
projectId,
|
||||
permission
|
||||
);
|
||||
|
||||
return { unresolvedFindings };
|
||||
}
|
||||
});
|
||||
};
|
@@ -8,3 +8,10 @@ export const accessApprovalPolicyApproverDALFactory = (db: TDbClient) => {
|
||||
const accessApprovalPolicyApproverOrm = ormify(db, TableName.AccessApprovalPolicyApprover);
|
||||
return { ...accessApprovalPolicyApproverOrm };
|
||||
};
|
||||
|
||||
export type TAccessApprovalPolicyBypasserDALFactory = ReturnType<typeof accessApprovalPolicyBypasserDALFactory>;
|
||||
|
||||
export const accessApprovalPolicyBypasserDALFactory = (db: TDbClient) => {
|
||||
const accessApprovalPolicyBypasserOrm = ormify(db, TableName.AccessApprovalPolicyBypasser);
|
||||
return { ...accessApprovalPolicyBypasserOrm };
|
||||
};
|
||||
|
@@ -1,11 +1,11 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { AccessApprovalPoliciesSchema, TableName, TAccessApprovalPolicies } from "@app/db/schemas";
|
||||
import { AccessApprovalPoliciesSchema, TableName, TAccessApprovalPolicies, TUsers } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { buildFindFilter, ormify, selectAllTableCols, sqlNestRelationships, TFindFilter } from "@app/lib/knex";
|
||||
|
||||
import { ApproverType } from "./access-approval-policy-types";
|
||||
import { ApproverType, BypasserType } from "./access-approval-policy-types";
|
||||
|
||||
export type TAccessApprovalPolicyDALFactory = ReturnType<typeof accessApprovalPolicyDALFactory>;
|
||||
|
||||
@@ -34,9 +34,22 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
`${TableName.AccessApprovalPolicyApprover}.policyId`
|
||||
)
|
||||
.leftJoin(TableName.Users, `${TableName.AccessApprovalPolicyApprover}.approverUserId`, `${TableName.Users}.id`)
|
||||
.leftJoin(
|
||||
TableName.AccessApprovalPolicyBypasser,
|
||||
`${TableName.AccessApprovalPolicy}.id`,
|
||||
`${TableName.AccessApprovalPolicyBypasser}.policyId`
|
||||
)
|
||||
.leftJoin<TUsers>(
|
||||
db(TableName.Users).as("bypasserUsers"),
|
||||
`${TableName.AccessApprovalPolicyBypasser}.bypasserUserId`,
|
||||
`bypasserUsers.id`
|
||||
)
|
||||
.select(tx.ref("username").withSchema(TableName.Users).as("approverUsername"))
|
||||
.select(tx.ref("username").withSchema("bypasserUsers").as("bypasserUsername"))
|
||||
.select(tx.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
|
||||
.select(tx.ref("approverGroupId").withSchema(TableName.AccessApprovalPolicyApprover))
|
||||
.select(tx.ref("bypasserUserId").withSchema(TableName.AccessApprovalPolicyBypasser))
|
||||
.select(tx.ref("bypasserGroupId").withSchema(TableName.AccessApprovalPolicyBypasser))
|
||||
.select(tx.ref("name").withSchema(TableName.Environment).as("envName"))
|
||||
.select(tx.ref("slug").withSchema(TableName.Environment).as("envSlug"))
|
||||
.select(tx.ref("id").withSchema(TableName.Environment).as("envId"))
|
||||
@@ -129,6 +142,23 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
id,
|
||||
type: ApproverType.Group
|
||||
})
|
||||
},
|
||||
{
|
||||
key: "bypasserUserId",
|
||||
label: "bypassers" as const,
|
||||
mapper: ({ bypasserUserId: id, bypasserUsername }) => ({
|
||||
id,
|
||||
type: BypasserType.User,
|
||||
name: bypasserUsername
|
||||
})
|
||||
},
|
||||
{
|
||||
key: "bypasserGroupId",
|
||||
label: "bypassers" as const,
|
||||
mapper: ({ bypasserGroupId: id }) => ({
|
||||
id,
|
||||
type: BypasserType.Group
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
@@ -144,5 +174,28 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
return softDeletedPolicy;
|
||||
};
|
||||
|
||||
return { ...accessApprovalPolicyOrm, find, findById, softDeleteById };
|
||||
const findLastValidPolicy = async ({ envId, secretPath }: { envId: string; secretPath: string }, tx?: Knex) => {
|
||||
try {
|
||||
const result = await (tx || db.replicaNode())(TableName.AccessApprovalPolicy)
|
||||
.where(
|
||||
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||
buildFindFilter(
|
||||
{
|
||||
envId,
|
||||
secretPath
|
||||
},
|
||||
TableName.AccessApprovalPolicy
|
||||
)
|
||||
)
|
||||
.orderBy("deletedAt", "desc")
|
||||
.orderByRaw(`"deletedAt" IS NULL`)
|
||||
.first();
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindLastValidPolicy" });
|
||||
}
|
||||
};
|
||||
|
||||
return { ...accessApprovalPolicyOrm, find, findById, softDeleteById, findLastValidPolicy };
|
||||
};
|
||||
|
@@ -4,6 +4,7 @@ import { ActionProjectType } from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal";
|
||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||
@@ -14,10 +15,14 @@ import { TAccessApprovalRequestReviewerDALFactory } from "../access-approval-req
|
||||
import { ApprovalStatus } from "../access-approval-request/access-approval-request-types";
|
||||
import { TGroupDALFactory } from "../group/group-dal";
|
||||
import { TProjectUserAdditionalPrivilegeDALFactory } from "../project-user-additional-privilege/project-user-additional-privilege-dal";
|
||||
import { TAccessApprovalPolicyApproverDALFactory } from "./access-approval-policy-approver-dal";
|
||||
import {
|
||||
TAccessApprovalPolicyApproverDALFactory,
|
||||
TAccessApprovalPolicyBypasserDALFactory
|
||||
} from "./access-approval-policy-approver-dal";
|
||||
import { TAccessApprovalPolicyDALFactory } from "./access-approval-policy-dal";
|
||||
import {
|
||||
ApproverType,
|
||||
BypasserType,
|
||||
TCreateAccessApprovalPolicy,
|
||||
TDeleteAccessApprovalPolicy,
|
||||
TGetAccessApprovalPolicyByIdDTO,
|
||||
@@ -32,12 +37,14 @@ type TAccessApprovalPolicyServiceFactoryDep = {
|
||||
accessApprovalPolicyDAL: TAccessApprovalPolicyDALFactory;
|
||||
projectEnvDAL: Pick<TProjectEnvDALFactory, "find" | "findOne">;
|
||||
accessApprovalPolicyApproverDAL: TAccessApprovalPolicyApproverDALFactory;
|
||||
accessApprovalPolicyBypasserDAL: TAccessApprovalPolicyBypasserDALFactory;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find">;
|
||||
groupDAL: TGroupDALFactory;
|
||||
userDAL: Pick<TUserDALFactory, "find">;
|
||||
accessApprovalRequestDAL: Pick<TAccessApprovalRequestDALFactory, "update" | "find">;
|
||||
additionalPrivilegeDAL: Pick<TProjectUserAdditionalPrivilegeDALFactory, "delete">;
|
||||
accessApprovalRequestReviewerDAL: Pick<TAccessApprovalRequestReviewerDALFactory, "update">;
|
||||
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "find">;
|
||||
};
|
||||
|
||||
export type TAccessApprovalPolicyServiceFactory = ReturnType<typeof accessApprovalPolicyServiceFactory>;
|
||||
@@ -45,6 +52,7 @@ export type TAccessApprovalPolicyServiceFactory = ReturnType<typeof accessApprov
|
||||
export const accessApprovalPolicyServiceFactory = ({
|
||||
accessApprovalPolicyDAL,
|
||||
accessApprovalPolicyApproverDAL,
|
||||
accessApprovalPolicyBypasserDAL,
|
||||
groupDAL,
|
||||
permissionService,
|
||||
projectEnvDAL,
|
||||
@@ -52,7 +60,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
userDAL,
|
||||
accessApprovalRequestDAL,
|
||||
additionalPrivilegeDAL,
|
||||
accessApprovalRequestReviewerDAL
|
||||
accessApprovalRequestReviewerDAL,
|
||||
orgMembershipDAL
|
||||
}: TAccessApprovalPolicyServiceFactoryDep) => {
|
||||
const createAccessApprovalPolicy = async ({
|
||||
name,
|
||||
@@ -63,6 +72,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
approvals,
|
||||
approvers,
|
||||
bypassers,
|
||||
projectSlug,
|
||||
environment,
|
||||
enforcementLevel,
|
||||
@@ -82,7 +92,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
.filter(Boolean) as string[];
|
||||
|
||||
const userApproverNames = approvers
|
||||
.map((approver) => (approver.type === ApproverType.User ? approver.name : undefined))
|
||||
.map((approver) => (approver.type === ApproverType.User ? approver.username : undefined))
|
||||
.filter(Boolean) as string[];
|
||||
|
||||
if (!groupApprovers && approvals > userApprovers.length + userApproverNames.length)
|
||||
@@ -147,6 +157,44 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
.map((user) => user.id);
|
||||
verifyAllApprovers.push(...verifyGroupApprovers);
|
||||
|
||||
let groupBypassers: string[] = [];
|
||||
let bypasserUserIds: string[] = [];
|
||||
|
||||
if (bypassers && bypassers.length) {
|
||||
groupBypassers = bypassers
|
||||
.filter((bypasser) => bypasser.type === BypasserType.Group)
|
||||
.map((bypasser) => bypasser.id) as string[];
|
||||
|
||||
const userBypassers = bypassers
|
||||
.filter((bypasser) => bypasser.type === BypasserType.User)
|
||||
.map((bypasser) => bypasser.id)
|
||||
.filter(Boolean) as string[];
|
||||
|
||||
const userBypasserNames = bypassers
|
||||
.map((bypasser) => (bypasser.type === BypasserType.User ? bypasser.username : undefined))
|
||||
.filter(Boolean) as string[];
|
||||
|
||||
bypasserUserIds = userBypassers;
|
||||
if (userBypasserNames.length) {
|
||||
const bypasserUsers = await userDAL.find({
|
||||
$in: {
|
||||
username: userBypasserNames
|
||||
}
|
||||
});
|
||||
|
||||
const bypasserNamesFromDb = bypasserUsers.map((user) => user.username);
|
||||
const invalidUsernames = userBypasserNames.filter((username) => !bypasserNamesFromDb.includes(username));
|
||||
|
||||
if (invalidUsernames.length) {
|
||||
throw new BadRequestError({
|
||||
message: `Invalid bypasser user: ${invalidUsernames.join(", ")}`
|
||||
});
|
||||
}
|
||||
|
||||
bypasserUserIds = bypasserUserIds.concat(bypasserUsers.map((user) => user.id));
|
||||
}
|
||||
}
|
||||
|
||||
const accessApproval = await accessApprovalPolicyDAL.transaction(async (tx) => {
|
||||
const doc = await accessApprovalPolicyDAL.create(
|
||||
{
|
||||
@@ -159,6 +207,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (approverUserIds.length) {
|
||||
await accessApprovalPolicyApproverDAL.insertMany(
|
||||
approverUserIds.map((userId) => ({
|
||||
@@ -179,8 +228,29 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
);
|
||||
}
|
||||
|
||||
if (bypasserUserIds.length) {
|
||||
await accessApprovalPolicyBypasserDAL.insertMany(
|
||||
bypasserUserIds.map((userId) => ({
|
||||
bypasserUserId: userId,
|
||||
policyId: doc.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
if (groupBypassers.length) {
|
||||
await accessApprovalPolicyBypasserDAL.insertMany(
|
||||
groupBypassers.map((groupId) => ({
|
||||
bypasserGroupId: groupId,
|
||||
policyId: doc.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
return doc;
|
||||
});
|
||||
|
||||
return { ...accessApproval, environment: env, projectId: project.id };
|
||||
};
|
||||
|
||||
@@ -211,6 +281,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
const updateAccessApprovalPolicy = async ({
|
||||
policyId,
|
||||
approvers,
|
||||
bypassers,
|
||||
secretPath,
|
||||
name,
|
||||
actorId,
|
||||
@@ -231,15 +302,15 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
.filter(Boolean) as string[];
|
||||
|
||||
const userApproverNames = approvers
|
||||
.map((approver) => (approver.type === ApproverType.User ? approver.name : undefined))
|
||||
.map((approver) => (approver.type === ApproverType.User ? approver.username : undefined))
|
||||
.filter(Boolean) as string[];
|
||||
|
||||
const accessApprovalPolicy = await accessApprovalPolicyDAL.findById(policyId);
|
||||
const currentAppovals = approvals || accessApprovalPolicy.approvals;
|
||||
const currentApprovals = approvals || accessApprovalPolicy.approvals;
|
||||
if (
|
||||
groupApprovers?.length === 0 &&
|
||||
userApprovers &&
|
||||
currentAppovals > userApprovers.length + userApproverNames.length
|
||||
currentApprovals > userApprovers.length + userApproverNames.length
|
||||
) {
|
||||
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
|
||||
}
|
||||
@@ -258,6 +329,78 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.SecretApproval);
|
||||
|
||||
let groupBypassers: string[] = [];
|
||||
let bypasserUserIds: string[] = [];
|
||||
|
||||
if (bypassers && bypassers.length) {
|
||||
groupBypassers = bypassers
|
||||
.filter((bypasser) => bypasser.type === BypasserType.Group)
|
||||
.map((bypasser) => bypasser.id) as string[];
|
||||
|
||||
groupBypassers = [...new Set(groupBypassers)];
|
||||
|
||||
const userBypassers = bypassers
|
||||
.filter((bypasser) => bypasser.type === BypasserType.User)
|
||||
.map((bypasser) => bypasser.id)
|
||||
.filter(Boolean) as string[];
|
||||
|
||||
const userBypasserNames = bypassers
|
||||
.map((bypasser) => (bypasser.type === BypasserType.User ? bypasser.username : undefined))
|
||||
.filter(Boolean) as string[];
|
||||
|
||||
bypasserUserIds = userBypassers;
|
||||
if (userBypasserNames.length) {
|
||||
const bypasserUsers = await userDAL.find({
|
||||
$in: {
|
||||
username: userBypasserNames
|
||||
}
|
||||
});
|
||||
|
||||
const bypasserNamesFromDb = bypasserUsers.map((user) => user.username);
|
||||
const invalidUsernames = userBypasserNames.filter((username) => !bypasserNamesFromDb.includes(username));
|
||||
|
||||
if (invalidUsernames.length) {
|
||||
throw new BadRequestError({
|
||||
message: `Invalid bypasser user: ${invalidUsernames.join(", ")}`
|
||||
});
|
||||
}
|
||||
|
||||
bypasserUserIds = [...new Set(bypasserUserIds.concat(bypasserUsers.map((user) => user.id)))];
|
||||
}
|
||||
|
||||
// Validate user bypassers
|
||||
if (bypasserUserIds.length > 0) {
|
||||
const orgMemberships = await orgMembershipDAL.find({
|
||||
$in: { userId: bypasserUserIds },
|
||||
orgId: actorOrgId
|
||||
});
|
||||
|
||||
if (orgMemberships.length !== bypasserUserIds.length) {
|
||||
const foundUserIdsInOrg = new Set(orgMemberships.map((mem) => mem.userId));
|
||||
const missingUserIds = bypasserUserIds.filter((id) => !foundUserIdsInOrg.has(id));
|
||||
throw new BadRequestError({
|
||||
message: `One or more specified bypasser users are not part of the organization or do not exist. Invalid or non-member user IDs: ${missingUserIds.join(", ")}`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Validate group bypassers
|
||||
if (groupBypassers.length > 0) {
|
||||
const orgGroups = await groupDAL.find({
|
||||
$in: { id: groupBypassers },
|
||||
orgId: actorOrgId
|
||||
});
|
||||
|
||||
if (orgGroups.length !== groupBypassers.length) {
|
||||
const foundGroupIdsInOrg = new Set(orgGroups.map((group) => group.id));
|
||||
const missingGroupIds = groupBypassers.filter((id) => !foundGroupIdsInOrg.has(id));
|
||||
throw new BadRequestError({
|
||||
message: `One or more specified bypasser groups are not part of the organization or do not exist. Invalid or non-member group IDs: ${missingGroupIds.join(", ")}`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const updatedPolicy = await accessApprovalPolicyDAL.transaction(async (tx) => {
|
||||
const doc = await accessApprovalPolicyDAL.updateById(
|
||||
accessApprovalPolicy.id,
|
||||
@@ -313,6 +456,28 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
);
|
||||
}
|
||||
|
||||
await accessApprovalPolicyBypasserDAL.delete({ policyId: doc.id }, tx);
|
||||
|
||||
if (bypasserUserIds.length) {
|
||||
await accessApprovalPolicyBypasserDAL.insertMany(
|
||||
bypasserUserIds.map((userId) => ({
|
||||
bypasserUserId: userId,
|
||||
policyId: doc.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
if (groupBypassers.length) {
|
||||
await accessApprovalPolicyBypasserDAL.insertMany(
|
||||
groupBypassers.map((groupId) => ({
|
||||
bypasserGroupId: groupId,
|
||||
policyId: doc.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
return doc;
|
||||
});
|
||||
return {
|
||||
|
@@ -18,11 +18,20 @@ export enum ApproverType {
|
||||
User = "user"
|
||||
}
|
||||
|
||||
export enum BypasserType {
|
||||
Group = "group",
|
||||
User = "user"
|
||||
}
|
||||
|
||||
export type TCreateAccessApprovalPolicy = {
|
||||
approvals: number;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; name?: string })[];
|
||||
approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; username?: string })[];
|
||||
bypassers?: (
|
||||
| { type: BypasserType.Group; id: string }
|
||||
| { type: BypasserType.User; id?: string; username?: string }
|
||||
)[];
|
||||
projectSlug: string;
|
||||
name: string;
|
||||
enforcementLevel: EnforcementLevel;
|
||||
@@ -32,7 +41,11 @@ export type TCreateAccessApprovalPolicy = {
|
||||
export type TUpdateAccessApprovalPolicy = {
|
||||
policyId: string;
|
||||
approvals?: number;
|
||||
approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; name?: string })[];
|
||||
approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; username?: string })[];
|
||||
bypassers?: (
|
||||
| { type: BypasserType.Group; id: string }
|
||||
| { type: BypasserType.User; id?: string; username?: string }
|
||||
)[];
|
||||
secretPath?: string;
|
||||
name?: string;
|
||||
enforcementLevel?: EnforcementLevel;
|
||||
|
@@ -1,7 +1,13 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { AccessApprovalRequestsSchema, TableName, TAccessApprovalRequests, TUsers } from "@app/db/schemas";
|
||||
import {
|
||||
AccessApprovalRequestsSchema,
|
||||
TableName,
|
||||
TAccessApprovalRequests,
|
||||
TUserGroupMembership,
|
||||
TUsers
|
||||
} from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols, sqlNestRelationships, TFindFilter } from "@app/lib/knex";
|
||||
|
||||
@@ -28,12 +34,12 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
`${TableName.AccessApprovalRequest}.policyId`,
|
||||
`${TableName.AccessApprovalPolicy}.id`
|
||||
)
|
||||
|
||||
.leftJoin(
|
||||
TableName.AccessApprovalRequestReviewer,
|
||||
`${TableName.AccessApprovalRequest}.id`,
|
||||
`${TableName.AccessApprovalRequestReviewer}.requestId`
|
||||
)
|
||||
|
||||
.leftJoin(
|
||||
TableName.AccessApprovalPolicyApprover,
|
||||
`${TableName.AccessApprovalPolicy}.id`,
|
||||
@@ -46,6 +52,17 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
)
|
||||
.leftJoin(TableName.Users, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`)
|
||||
|
||||
.leftJoin(
|
||||
TableName.AccessApprovalPolicyBypasser,
|
||||
`${TableName.AccessApprovalPolicy}.id`,
|
||||
`${TableName.AccessApprovalPolicyBypasser}.policyId`
|
||||
)
|
||||
.leftJoin<TUserGroupMembership>(
|
||||
db(TableName.UserGroupMembership).as("bypasserUserGroupMembership"),
|
||||
`${TableName.AccessApprovalPolicyBypasser}.bypasserGroupId`,
|
||||
`bypasserUserGroupMembership.groupId`
|
||||
)
|
||||
|
||||
.join<TUsers>(
|
||||
db(TableName.Users).as("requestedByUser"),
|
||||
`${TableName.AccessApprovalRequest}.requestedByUserId`,
|
||||
@@ -69,6 +86,9 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
.select(db.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
|
||||
.select(db.ref("userId").withSchema(TableName.UserGroupMembership).as("approverGroupUserId"))
|
||||
|
||||
.select(db.ref("bypasserUserId").withSchema(TableName.AccessApprovalPolicyBypasser))
|
||||
.select(db.ref("userId").withSchema("bypasserUserGroupMembership").as("bypasserGroupUserId"))
|
||||
|
||||
.select(
|
||||
db.ref("projectId").withSchema(TableName.Environment),
|
||||
db.ref("slug").withSchema(TableName.Environment).as("envSlug"),
|
||||
@@ -145,7 +165,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
: null,
|
||||
|
||||
isApproved: !!doc.policyDeletedAt || !!doc.privilegeId
|
||||
isApproved: !!doc.policyDeletedAt || !!doc.privilegeId || doc.status !== ApprovalStatus.PENDING
|
||||
}),
|
||||
childrenMapper: [
|
||||
{
|
||||
@@ -158,6 +178,12 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
key: "approverGroupUserId",
|
||||
label: "approvers" as const,
|
||||
mapper: ({ approverGroupUserId }) => approverGroupUserId
|
||||
},
|
||||
{ key: "bypasserUserId", label: "bypassers" as const, mapper: ({ bypasserUserId }) => bypasserUserId },
|
||||
{
|
||||
key: "bypasserGroupUserId",
|
||||
label: "bypassers" as const,
|
||||
mapper: ({ bypasserGroupUserId }) => bypasserGroupUserId
|
||||
}
|
||||
]
|
||||
});
|
||||
@@ -166,7 +192,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
|
||||
return formattedDocs.map((doc) => ({
|
||||
...doc,
|
||||
policy: { ...doc.policy, approvers: doc.approvers }
|
||||
policy: { ...doc.policy, approvers: doc.approvers, bypassers: doc.bypassers }
|
||||
}));
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindRequestsWithPrivilege" });
|
||||
@@ -193,7 +219,6 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
`${TableName.AccessApprovalPolicy}.id`,
|
||||
`${TableName.AccessApprovalPolicyApprover}.policyId`
|
||||
)
|
||||
|
||||
.leftJoin<TUsers>(
|
||||
db(TableName.Users).as("accessApprovalPolicyApproverUser"),
|
||||
`${TableName.AccessApprovalPolicyApprover}.approverUserId`,
|
||||
@@ -204,13 +229,33 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
`${TableName.AccessApprovalPolicyApprover}.approverGroupId`,
|
||||
`${TableName.UserGroupMembership}.groupId`
|
||||
)
|
||||
|
||||
.leftJoin<TUsers>(
|
||||
db(TableName.Users).as("accessApprovalPolicyGroupApproverUser"),
|
||||
`${TableName.UserGroupMembership}.userId`,
|
||||
"accessApprovalPolicyGroupApproverUser.id"
|
||||
)
|
||||
|
||||
.leftJoin(
|
||||
TableName.AccessApprovalPolicyBypasser,
|
||||
`${TableName.AccessApprovalPolicy}.id`,
|
||||
`${TableName.AccessApprovalPolicyBypasser}.policyId`
|
||||
)
|
||||
.leftJoin<TUsers>(
|
||||
db(TableName.Users).as("accessApprovalPolicyBypasserUser"),
|
||||
`${TableName.AccessApprovalPolicyBypasser}.bypasserUserId`,
|
||||
"accessApprovalPolicyBypasserUser.id"
|
||||
)
|
||||
.leftJoin<TUserGroupMembership>(
|
||||
db(TableName.UserGroupMembership).as("bypasserUserGroupMembership"),
|
||||
`${TableName.AccessApprovalPolicyBypasser}.bypasserGroupId`,
|
||||
`bypasserUserGroupMembership.groupId`
|
||||
)
|
||||
.leftJoin<TUsers>(
|
||||
db(TableName.Users).as("accessApprovalPolicyGroupBypasserUser"),
|
||||
`bypasserUserGroupMembership.userId`,
|
||||
"accessApprovalPolicyGroupBypasserUser.id"
|
||||
)
|
||||
|
||||
.leftJoin(
|
||||
TableName.AccessApprovalRequestReviewer,
|
||||
`${TableName.AccessApprovalRequest}.id`,
|
||||
@@ -241,6 +286,18 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
tx.ref("firstName").withSchema("requestedByUser").as("requestedByUserFirstName"),
|
||||
tx.ref("lastName").withSchema("requestedByUser").as("requestedByUserLastName"),
|
||||
|
||||
// Bypassers
|
||||
tx.ref("bypasserUserId").withSchema(TableName.AccessApprovalPolicyBypasser),
|
||||
tx.ref("userId").withSchema("bypasserUserGroupMembership").as("bypasserGroupUserId"),
|
||||
tx.ref("email").withSchema("accessApprovalPolicyBypasserUser").as("bypasserEmail"),
|
||||
tx.ref("email").withSchema("accessApprovalPolicyGroupBypasserUser").as("bypasserGroupEmail"),
|
||||
tx.ref("username").withSchema("accessApprovalPolicyBypasserUser").as("bypasserUsername"),
|
||||
tx.ref("username").withSchema("accessApprovalPolicyGroupBypasserUser").as("bypasserGroupUsername"),
|
||||
tx.ref("firstName").withSchema("accessApprovalPolicyBypasserUser").as("bypasserFirstName"),
|
||||
tx.ref("firstName").withSchema("accessApprovalPolicyGroupBypasserUser").as("bypasserGroupFirstName"),
|
||||
tx.ref("lastName").withSchema("accessApprovalPolicyBypasserUser").as("bypasserLastName"),
|
||||
tx.ref("lastName").withSchema("accessApprovalPolicyGroupBypasserUser").as("bypasserGroupLastName"),
|
||||
|
||||
tx.ref("reviewerUserId").withSchema(TableName.AccessApprovalRequestReviewer),
|
||||
|
||||
tx.ref("status").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerStatus"),
|
||||
@@ -265,7 +322,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
try {
|
||||
const sql = findQuery({ [`${TableName.AccessApprovalRequest}.id` as "id"]: id }, tx || db.replicaNode());
|
||||
const docs = await sql;
|
||||
const formatedDoc = sqlNestRelationships({
|
||||
const formattedDoc = sqlNestRelationships({
|
||||
data: docs,
|
||||
key: "id",
|
||||
parentMapper: (el) => ({
|
||||
@@ -335,13 +392,51 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
lastName,
|
||||
username
|
||||
})
|
||||
},
|
||||
{
|
||||
key: "bypasserUserId",
|
||||
label: "bypassers" as const,
|
||||
mapper: ({
|
||||
bypasserUserId,
|
||||
bypasserEmail: email,
|
||||
bypasserUsername: username,
|
||||
bypasserLastName: lastName,
|
||||
bypasserFirstName: firstName
|
||||
}) => ({
|
||||
userId: bypasserUserId,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
username
|
||||
})
|
||||
},
|
||||
{
|
||||
key: "bypasserGroupUserId",
|
||||
label: "bypassers" as const,
|
||||
mapper: ({
|
||||
userId,
|
||||
bypasserGroupEmail: email,
|
||||
bypasserGroupUsername: username,
|
||||
bypasserGroupLastName: lastName,
|
||||
bypasserFirstName: firstName
|
||||
}) => ({
|
||||
userId,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
username
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
if (!formatedDoc?.[0]) return;
|
||||
if (!formattedDoc?.[0]) return;
|
||||
return {
|
||||
...formatedDoc[0],
|
||||
policy: { ...formatedDoc[0].policy, approvers: formatedDoc[0].approvers }
|
||||
...formattedDoc[0],
|
||||
policy: {
|
||||
...formattedDoc[0].policy,
|
||||
approvers: formattedDoc[0].approvers,
|
||||
bypassers: formattedDoc[0].bypassers
|
||||
}
|
||||
};
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindByIdAccessApprovalRequest" });
|
||||
@@ -392,14 +487,20 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
]
|
||||
});
|
||||
|
||||
// an approval is pending if there is no reviewer rejections and no privilege ID is set
|
||||
// an approval is pending if there is no reviewer rejections, no privilege ID is set and the status is pending
|
||||
const pendingApprovals = formattedRequests.filter(
|
||||
(req) => !req.privilegeId && !req.reviewers.some((r) => r.status === ApprovalStatus.REJECTED)
|
||||
(req) =>
|
||||
!req.privilegeId &&
|
||||
!req.reviewers.some((r) => r.status === ApprovalStatus.REJECTED) &&
|
||||
req.status === ApprovalStatus.PENDING
|
||||
);
|
||||
|
||||
// an approval is finalized if there are any rejections or a privilege ID is set
|
||||
// an approval is finalized if there are any rejections, a privilege ID is set or the number of approvals is equal to the number of approvals required
|
||||
const finalizedApprovals = formattedRequests.filter(
|
||||
(req) => req.privilegeId || req.reviewers.some((r) => r.status === ApprovalStatus.REJECTED)
|
||||
(req) =>
|
||||
req.privilegeId ||
|
||||
req.reviewers.some((r) => r.status === ApprovalStatus.REJECTED) ||
|
||||
req.status !== ApprovalStatus.PENDING
|
||||
);
|
||||
|
||||
return { pendingCount: pendingApprovals.length, finalizedCount: finalizedApprovals.length };
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user