Compare commits
29 Commits
project-te
...
misc/updat
Author | SHA1 | Date | |
---|---|---|---|
9c611daada | |||
71edb08942 | |||
89d8261a43 | |||
a2b2b07185 | |||
76864ababa | |||
d17d40ebd9 | |||
07df6803a5 | |||
a09d0e8948 | |||
ee598560ec | |||
c629705c9c | |||
be10f6e52a | |||
40c5ff0ad6 | |||
ab6a2b7dbb | |||
81bfc04e7c | |||
a757fceaed | |||
859b643e43 | |||
91f71e0ef6 | |||
4e9e31eeb7 | |||
f6bc99b964 | |||
aea44088db | |||
e584c9ea95 | |||
1921763fa8 | |||
5408859a18 | |||
a6b3be72a9 | |||
bf85df7e36 | |||
b9070a8fa3 | |||
3ea450e94a | |||
7d0574087c | |||
36916704be |
@ -133,8 +133,8 @@ RUN apt-get update && apt-get install -y \
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical=0.31.1 \
|
||||
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical=0.41.2 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||
|
@ -127,8 +127,8 @@ RUN apt-get update && apt-get install -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical=0.31.1 \
|
||||
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical=0.41.2 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /
|
||||
|
@ -54,8 +54,8 @@ COPY --from=build /app .
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN apt-get install -y curl bash && \
|
||||
curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
|
||||
apt-get update && apt-get install -y infisical=0.8.1 git
|
||||
curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
|
||||
apt-get update && apt-get install -y infisical=0.41.2 git
|
||||
|
||||
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
|
||||
CMD node healthcheck.js
|
||||
|
@ -55,9 +55,9 @@ RUN mkdir -p /etc/softhsm2/tokens && \
|
||||
# ? App setup
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
|
||||
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
|
||||
apt-get update && \
|
||||
apt-get install -y infisical=0.8.1
|
||||
apt-get install -y infisical=0.41.2
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
|
@ -64,9 +64,9 @@ RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
|
||||
# ? App setup
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
|
||||
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
|
||||
apt-get update && \
|
||||
apt-get install -y infisical=0.8.1
|
||||
apt-get install -y infisical=0.41.2
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
|
11
backend/package-lock.json
generated
@ -122,7 +122,7 @@
|
||||
"tweetnacl-util": "^0.15.1",
|
||||
"uuid": "^9.0.1",
|
||||
"zod": "^3.22.4",
|
||||
"zod-to-json-schema": "^3.22.4"
|
||||
"zod-to-json-schema": "^3.24.5"
|
||||
},
|
||||
"bin": {
|
||||
"backend": "dist/main.js"
|
||||
@ -27442,11 +27442,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/zod-to-json-schema": {
|
||||
"version": "3.22.4",
|
||||
"resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.22.4.tgz",
|
||||
"integrity": "sha512-2Ed5dJ+n/O3cU383xSY28cuVi0BCQhF8nYqWU5paEpl7fVdqdAmiLdqLyfblbNdfOFwFfi/mqU4O1pwc60iBhQ==",
|
||||
"version": "3.24.5",
|
||||
"resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.5.tgz",
|
||||
"integrity": "sha512-/AuWwMP+YqiPbsJx5D6TfgRTc4kTLjsh5SOcd4bLsfUg2RcEXrFMJl1DGgdHy2aCfsIA/cr/1JM0xcB2GZji8g==",
|
||||
"license": "ISC",
|
||||
"peerDependencies": {
|
||||
"zod": "^3.22.4"
|
||||
"zod": "^3.24.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -241,6 +241,6 @@
|
||||
"tweetnacl-util": "^0.15.1",
|
||||
"uuid": "^9.0.1",
|
||||
"zod": "^3.22.4",
|
||||
"zod-to-json-schema": "^3.22.4"
|
||||
"zod-to-json-schema": "^3.24.5"
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,22 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SshHostLoginUserMapping, "groupId"))) {
|
||||
await knex.schema.alterTable(TableName.SshHostLoginUserMapping, (t) => {
|
||||
t.uuid("groupId").nullable();
|
||||
t.foreign("groupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
|
||||
t.unique(["sshHostLoginUserId", "groupId"]);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SshHostLoginUserMapping, "groupId")) {
|
||||
await knex.schema.alterTable(TableName.SshHostLoginUserMapping, (t) => {
|
||||
t.dropUnique(["sshHostLoginUserId", "groupId"]);
|
||||
t.dropColumn("groupId");
|
||||
});
|
||||
}
|
||||
}
|
@ -12,7 +12,8 @@ export const SshHostLoginUserMappingsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
sshHostLoginUserId: z.string().uuid(),
|
||||
userId: z.string().uuid().nullable().optional()
|
||||
userId: z.string().uuid().nullable().optional(),
|
||||
groupId: z.string().uuid().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSshHostLoginUserMappings = z.infer<typeof SshHostLoginUserMappingsSchema>;
|
||||
|
@ -157,10 +157,23 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const findGroupsByProjectId = async (projectId: string, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await (tx || db.replicaNode())(TableName.Groups)
|
||||
.join(TableName.GroupProjectMembership, `${TableName.Groups}.id`, `${TableName.GroupProjectMembership}.groupId`)
|
||||
.where(`${TableName.GroupProjectMembership}.projectId`, projectId)
|
||||
.select(selectAllTableCols(TableName.Groups));
|
||||
return docs;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find groups by project id" });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
findGroups,
|
||||
findByOrgId,
|
||||
findAllGroupPossibleMembers,
|
||||
findGroupsByProjectId,
|
||||
...groupOrm
|
||||
};
|
||||
};
|
||||
|
@ -176,7 +176,8 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
|
||||
db.ref("name").withSchema(TableName.Groups).as("groupName"),
|
||||
db.ref("id").withSchema(TableName.OrgMembership).as("orgMembershipId"),
|
||||
db.ref("firstName").withSchema(TableName.Users).as("firstName"),
|
||||
db.ref("lastName").withSchema(TableName.Users).as("lastName")
|
||||
db.ref("lastName").withSchema(TableName.Users).as("lastName"),
|
||||
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
|
||||
);
|
||||
|
||||
return docs;
|
||||
|
@ -132,7 +132,7 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const getProjectGroupPermissions = async (projectId: string) => {
|
||||
const getProjectGroupPermissions = async (projectId: string, filterGroupId?: string) => {
|
||||
try {
|
||||
const docs = await db
|
||||
.replicaNode()(TableName.GroupProjectMembership)
|
||||
@ -148,6 +148,11 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
`groupCustomRoles.id`
|
||||
)
|
||||
.where(`${TableName.GroupProjectMembership}.projectId`, "=", projectId)
|
||||
.where((bd) => {
|
||||
if (filterGroupId) {
|
||||
void bd.where(`${TableName.GroupProjectMembership}.groupId`, "=", filterGroupId);
|
||||
}
|
||||
})
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.GroupProjectMembership).as("membershipId"),
|
||||
db.ref("id").withSchema(TableName.Groups).as("groupId"),
|
||||
|
@ -630,6 +630,34 @@ export const permissionServiceFactory = ({
|
||||
return { permission };
|
||||
};
|
||||
|
||||
const checkGroupProjectPermission = async ({
|
||||
groupId,
|
||||
projectId,
|
||||
checkPermissions
|
||||
}: {
|
||||
groupId: string;
|
||||
projectId: string;
|
||||
checkPermissions: ProjectPermissionSet;
|
||||
}) => {
|
||||
const rawGroupProjectPermissions = await permissionDAL.getProjectGroupPermissions(projectId, groupId);
|
||||
const groupPermissions = rawGroupProjectPermissions.map((groupProjectPermission) => {
|
||||
const rolePermissions =
|
||||
groupProjectPermission.roles?.map(({ role, permissions }) => ({ role, permissions })) || [];
|
||||
const rules = buildProjectPermissionRules(rolePermissions);
|
||||
const permission = createMongoAbility<ProjectPermissionSet>(rules, {
|
||||
conditionsMatcher
|
||||
});
|
||||
|
||||
return {
|
||||
permission,
|
||||
id: groupProjectPermission.groupId,
|
||||
name: groupProjectPermission.username,
|
||||
membershipId: groupProjectPermission.id
|
||||
};
|
||||
});
|
||||
return groupPermissions.some((groupPermission) => groupPermission.permission.can(...checkPermissions));
|
||||
};
|
||||
|
||||
return {
|
||||
getUserOrgPermission,
|
||||
getOrgPermission,
|
||||
@ -639,6 +667,7 @@ export const permissionServiceFactory = ({
|
||||
getOrgPermissionByRole,
|
||||
getProjectPermissionByRole,
|
||||
buildOrgPermission,
|
||||
buildProjectPermissionRules
|
||||
buildProjectPermissionRules,
|
||||
checkGroupProjectPermission
|
||||
};
|
||||
};
|
||||
|
@ -334,7 +334,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
db.ref("secretId").withSchema(TableName.SecretApprovalRequestSecret).as("commitSecretId"),
|
||||
db.ref("id").withSchema(TableName.SecretApprovalRequestSecret).as("commitId"),
|
||||
db.raw(
|
||||
`DENSE_RANK() OVER (partition by ${TableName.Environment}."projectId" ORDER BY ${TableName.SecretApprovalRequest}."id" DESC) as rank`
|
||||
`DENSE_RANK() OVER (PARTITION BY ${TableName.Environment}."projectId" ORDER BY ${TableName.SecretApprovalRequest}."createdAt" DESC) as rank`
|
||||
),
|
||||
db.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
|
||||
db.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
|
||||
@ -483,7 +483,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
db.ref("secretId").withSchema(TableName.SecretApprovalRequestSecretV2).as("commitSecretId"),
|
||||
db.ref("id").withSchema(TableName.SecretApprovalRequestSecretV2).as("commitId"),
|
||||
db.raw(
|
||||
`DENSE_RANK() OVER (partition by ${TableName.Environment}."projectId" ORDER BY ${TableName.SecretApprovalRequest}."id" DESC) as rank`
|
||||
`DENSE_RANK() OVER (PARTITION BY ${TableName.Environment}."projectId" ORDER BY ${TableName.SecretApprovalRequest}."createdAt" DESC) as rank`
|
||||
),
|
||||
db.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
|
||||
db.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
|
||||
|
@ -28,6 +28,7 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
|
||||
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
|
||||
)
|
||||
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
|
||||
.where(`${TableName.SshHostGroup}.projectId`, projectId)
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.SshHostGroup).as("sshHostGroupId"),
|
||||
@ -35,7 +36,8 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
|
||||
db.ref("name").withSchema(TableName.SshHostGroup),
|
||||
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
|
||||
db.ref("username").withSchema(TableName.Users),
|
||||
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping)
|
||||
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping),
|
||||
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
|
||||
)
|
||||
.orderBy(`${TableName.SshHostGroup}.updatedAt`, "desc");
|
||||
|
||||
@ -69,7 +71,8 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
|
||||
const loginMappings = Object.entries(loginMappingGrouped).map(([loginUser, entries]) => ({
|
||||
loginUser,
|
||||
allowedPrincipals: {
|
||||
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
|
||||
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
|
||||
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
|
||||
}
|
||||
}));
|
||||
return {
|
||||
@ -99,6 +102,7 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
|
||||
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
|
||||
)
|
||||
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
|
||||
.where(`${TableName.SshHostGroup}.id`, sshHostGroupId)
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.SshHostGroup).as("sshHostGroupId"),
|
||||
@ -106,7 +110,8 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
|
||||
db.ref("name").withSchema(TableName.SshHostGroup),
|
||||
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
|
||||
db.ref("username").withSchema(TableName.Users),
|
||||
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping)
|
||||
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping),
|
||||
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
|
||||
);
|
||||
|
||||
if (rows.length === 0) return null;
|
||||
@ -121,7 +126,8 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
|
||||
const loginMappings = Object.entries(loginMappingGrouped).map(([loginUser, entries]) => ({
|
||||
loginUser,
|
||||
allowedPrincipals: {
|
||||
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
|
||||
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
|
||||
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
|
||||
}
|
||||
}));
|
||||
|
||||
|
@ -12,6 +12,7 @@ import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
import { TGroupDALFactory } from "../group/group-dal";
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { createSshLoginMappings } from "../ssh-host/ssh-host-fns";
|
||||
import {
|
||||
@ -43,8 +44,12 @@ type TSshHostGroupServiceFactoryDep = {
|
||||
sshHostLoginUserDAL: Pick<TSshHostLoginUserDALFactory, "create" | "transaction" | "delete">;
|
||||
sshHostLoginUserMappingDAL: Pick<TSshHostLoginUserMappingDALFactory, "insertMany">;
|
||||
userDAL: Pick<TUserDALFactory, "find">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getUserProjectPermission">;
|
||||
permissionService: Pick<
|
||||
TPermissionServiceFactory,
|
||||
"getProjectPermission" | "getUserProjectPermission" | "checkGroupProjectPermission"
|
||||
>;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
groupDAL: Pick<TGroupDALFactory, "findGroupsByProjectId">;
|
||||
};
|
||||
|
||||
export type TSshHostGroupServiceFactory = ReturnType<typeof sshHostGroupServiceFactory>;
|
||||
@ -58,7 +63,8 @@ export const sshHostGroupServiceFactory = ({
|
||||
sshHostLoginUserMappingDAL,
|
||||
userDAL,
|
||||
permissionService,
|
||||
licenseService
|
||||
licenseService,
|
||||
groupDAL
|
||||
}: TSshHostGroupServiceFactoryDep) => {
|
||||
const createSshHostGroup = async ({
|
||||
projectId,
|
||||
@ -127,6 +133,7 @@ export const sshHostGroupServiceFactory = ({
|
||||
loginMappings,
|
||||
sshHostLoginUserDAL,
|
||||
sshHostLoginUserMappingDAL,
|
||||
groupDAL,
|
||||
userDAL,
|
||||
permissionService,
|
||||
projectId,
|
||||
@ -194,6 +201,7 @@ export const sshHostGroupServiceFactory = ({
|
||||
loginMappings,
|
||||
sshHostLoginUserDAL,
|
||||
sshHostLoginUserMappingDAL,
|
||||
groupDAL,
|
||||
userDAL,
|
||||
permissionService,
|
||||
projectId: sshHostGroup.projectId,
|
||||
|
@ -9,12 +9,7 @@ export type TCreateSshHostGroupDTO = {
|
||||
export type TUpdateSshHostGroupDTO = {
|
||||
sshHostGroupId: string;
|
||||
name?: string;
|
||||
loginMappings?: {
|
||||
loginUser: string;
|
||||
allowedPrincipals: {
|
||||
usernames: string[];
|
||||
};
|
||||
}[];
|
||||
loginMappings?: TLoginMapping[];
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TGetSshHostGroupDTO = {
|
||||
|
@ -31,8 +31,18 @@ export const sshHostDALFactory = (db: TDbClient) => {
|
||||
`${TableName.SshHostLoginUser}.id`,
|
||||
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
|
||||
)
|
||||
.leftJoin(TableName.Users, `${TableName.Users}.id`, `${TableName.SshHostLoginUserMapping}.userId`)
|
||||
.leftJoin(
|
||||
TableName.UserGroupMembership,
|
||||
`${TableName.UserGroupMembership}.groupId`,
|
||||
`${TableName.SshHostLoginUserMapping}.groupId`
|
||||
)
|
||||
.whereIn(`${TableName.SshHost}.projectId`, projectIds)
|
||||
.andWhere(`${TableName.SshHostLoginUserMapping}.userId`, userId)
|
||||
.andWhere((bd) => {
|
||||
void bd
|
||||
.where(`${TableName.SshHostLoginUserMapping}.userId`, userId)
|
||||
.orWhere(`${TableName.UserGroupMembership}.userId`, userId);
|
||||
})
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.SshHost).as("sshHostId"),
|
||||
db.ref("projectId").withSchema(TableName.SshHost),
|
||||
@ -58,8 +68,17 @@ export const sshHostDALFactory = (db: TDbClient) => {
|
||||
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
|
||||
)
|
||||
.join(TableName.SshHost, `${TableName.SshHostGroupMembership}.sshHostId`, `${TableName.SshHost}.id`)
|
||||
.leftJoin(
|
||||
TableName.UserGroupMembership,
|
||||
`${TableName.UserGroupMembership}.groupId`,
|
||||
`${TableName.SshHostLoginUserMapping}.groupId`
|
||||
)
|
||||
.whereIn(`${TableName.SshHost}.projectId`, projectIds)
|
||||
.andWhere(`${TableName.SshHostLoginUserMapping}.userId`, userId)
|
||||
.andWhere((bd) => {
|
||||
void bd
|
||||
.where(`${TableName.SshHostLoginUserMapping}.userId`, userId)
|
||||
.orWhere(`${TableName.UserGroupMembership}.userId`, userId);
|
||||
})
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.SshHost).as("sshHostId"),
|
||||
db.ref("projectId").withSchema(TableName.SshHost),
|
||||
@ -133,6 +152,7 @@ export const sshHostDALFactory = (db: TDbClient) => {
|
||||
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
|
||||
)
|
||||
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
|
||||
.where(`${TableName.SshHost}.projectId`, projectId)
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.SshHost).as("sshHostId"),
|
||||
@ -144,6 +164,7 @@ export const sshHostDALFactory = (db: TDbClient) => {
|
||||
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
|
||||
db.ref("username").withSchema(TableName.Users),
|
||||
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping),
|
||||
db.ref("slug").withSchema(TableName.Groups).as("groupSlug"),
|
||||
db.ref("userSshCaId").withSchema(TableName.SshHost),
|
||||
db.ref("hostSshCaId").withSchema(TableName.SshHost)
|
||||
)
|
||||
@ -163,10 +184,12 @@ export const sshHostDALFactory = (db: TDbClient) => {
|
||||
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
|
||||
)
|
||||
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
|
||||
.select(
|
||||
db.ref("sshHostId").withSchema(TableName.SshHostGroupMembership),
|
||||
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
|
||||
db.ref("username").withSchema(TableName.Users)
|
||||
db.ref("username").withSchema(TableName.Users),
|
||||
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
|
||||
)
|
||||
.whereIn(`${TableName.SshHostGroupMembership}.sshHostId`, hostIds);
|
||||
|
||||
@ -185,7 +208,8 @@ export const sshHostDALFactory = (db: TDbClient) => {
|
||||
const directMappings = Object.entries(loginMappingGrouped).map(([loginUser, entries]) => ({
|
||||
loginUser,
|
||||
allowedPrincipals: {
|
||||
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
|
||||
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
|
||||
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
|
||||
},
|
||||
source: LoginMappingSource.HOST
|
||||
}));
|
||||
@ -197,7 +221,8 @@ export const sshHostDALFactory = (db: TDbClient) => {
|
||||
const groupMappings = Object.entries(inheritedGrouped).map(([loginUser, entries]) => ({
|
||||
loginUser,
|
||||
allowedPrincipals: {
|
||||
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
|
||||
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
|
||||
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
|
||||
},
|
||||
source: LoginMappingSource.HOST_GROUP
|
||||
}));
|
||||
@ -229,6 +254,7 @@ export const sshHostDALFactory = (db: TDbClient) => {
|
||||
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
|
||||
)
|
||||
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
|
||||
.where(`${TableName.SshHost}.id`, sshHostId)
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.SshHost).as("sshHostId"),
|
||||
@ -241,7 +267,8 @@ export const sshHostDALFactory = (db: TDbClient) => {
|
||||
db.ref("username").withSchema(TableName.Users),
|
||||
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping),
|
||||
db.ref("userSshCaId").withSchema(TableName.SshHost),
|
||||
db.ref("hostSshCaId").withSchema(TableName.SshHost)
|
||||
db.ref("hostSshCaId").withSchema(TableName.SshHost),
|
||||
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
|
||||
);
|
||||
|
||||
if (rows.length === 0) return null;
|
||||
@ -257,7 +284,8 @@ export const sshHostDALFactory = (db: TDbClient) => {
|
||||
const directMappings = Object.entries(directGrouped).map(([loginUser, entries]) => ({
|
||||
loginUser,
|
||||
allowedPrincipals: {
|
||||
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
|
||||
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
|
||||
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
|
||||
},
|
||||
source: LoginMappingSource.HOST
|
||||
}));
|
||||
@ -275,10 +303,12 @@ export const sshHostDALFactory = (db: TDbClient) => {
|
||||
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
|
||||
)
|
||||
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
|
||||
.where(`${TableName.SshHostGroupMembership}.sshHostId`, sshHostId)
|
||||
.select(
|
||||
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
|
||||
db.ref("username").withSchema(TableName.Users)
|
||||
db.ref("username").withSchema(TableName.Users),
|
||||
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
|
||||
);
|
||||
|
||||
const groupGrouped = groupBy(
|
||||
@ -289,7 +319,8 @@ export const sshHostDALFactory = (db: TDbClient) => {
|
||||
const groupMappings = Object.entries(groupGrouped).map(([loginUser, entries]) => ({
|
||||
loginUser,
|
||||
allowedPrincipals: {
|
||||
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
|
||||
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
|
||||
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
|
||||
},
|
||||
source: LoginMappingSource.HOST_GROUP
|
||||
}));
|
||||
|
@ -3,6 +3,7 @@ import { Knex } from "knex";
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
|
||||
import { ProjectPermissionSshHostActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { TCreateSshLoginMappingsDTO } from "./ssh-host-types";
|
||||
|
||||
/**
|
||||
@ -15,6 +16,7 @@ export const createSshLoginMappings = async ({
|
||||
loginMappings,
|
||||
sshHostLoginUserDAL,
|
||||
sshHostLoginUserMappingDAL,
|
||||
groupDAL,
|
||||
userDAL,
|
||||
permissionService,
|
||||
projectId,
|
||||
@ -35,7 +37,7 @@ export const createSshLoginMappings = async ({
|
||||
tx
|
||||
);
|
||||
|
||||
if (allowedPrincipals.usernames.length > 0) {
|
||||
if (allowedPrincipals.usernames && allowedPrincipals.usernames.length > 0) {
|
||||
const users = await userDAL.find(
|
||||
{
|
||||
$in: {
|
||||
@ -74,6 +76,41 @@ export const createSshLoginMappings = async ({
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
if (allowedPrincipals.groups && allowedPrincipals.groups.length > 0) {
|
||||
const projectGroups = await groupDAL.findGroupsByProjectId(projectId);
|
||||
const groups = projectGroups.filter((g) => allowedPrincipals.groups?.includes(g.slug));
|
||||
|
||||
if (groups.length !== allowedPrincipals.groups?.length) {
|
||||
throw new BadRequestError({
|
||||
message: `Invalid group slugs: ${allowedPrincipals.groups
|
||||
.filter((g) => !projectGroups.some((pg) => pg.slug === g))
|
||||
.join(", ")}`
|
||||
});
|
||||
}
|
||||
|
||||
for await (const group of groups) {
|
||||
// check that each group has access to the SSH project and have read access to hosts
|
||||
const hasPermission = await permissionService.checkGroupProjectPermission({
|
||||
groupId: group.id,
|
||||
projectId,
|
||||
checkPermissions: [ProjectPermissionSshHostActions.Read, ProjectPermissionSub.SshHosts]
|
||||
});
|
||||
if (!hasPermission) {
|
||||
throw new BadRequestError({
|
||||
message: `Group ${group.slug} does not have access to the SSH project`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
await sshHostLoginUserMappingDAL.insertMany(
|
||||
groups.map((group) => ({
|
||||
sshHostLoginUserId: sshHostLoginUser.id,
|
||||
groupId: group.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -15,7 +15,24 @@ export const sanitizedSshHost = SshHostsSchema.pick({
|
||||
|
||||
export const loginMappingSchema = z.object({
|
||||
loginUser: z.string().trim(),
|
||||
allowedPrincipals: z.object({
|
||||
usernames: z.array(z.string().trim()).transform((usernames) => Array.from(new Set(usernames)))
|
||||
})
|
||||
allowedPrincipals: z
|
||||
.object({
|
||||
usernames: z
|
||||
.array(z.string().trim())
|
||||
.transform((usernames) => Array.from(new Set(usernames)))
|
||||
.optional(),
|
||||
groups: z
|
||||
.array(z.string().trim())
|
||||
.transform((groups) => Array.from(new Set(groups)))
|
||||
.optional()
|
||||
})
|
||||
.refine(
|
||||
(data) => {
|
||||
return (data.usernames && data.usernames.length > 0) || (data.groups && data.groups.length > 0);
|
||||
},
|
||||
{
|
||||
message: "At least one username or group must be provided",
|
||||
path: ["allowedPrincipals"]
|
||||
}
|
||||
)
|
||||
});
|
||||
|
@ -1,6 +1,7 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
|
||||
import { ActionProjectType, ProjectType } from "@app/db/schemas";
|
||||
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionSshHostActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { TSshCertificateAuthorityDALFactory } from "@app/ee/services/ssh/ssh-certificate-authority-dal";
|
||||
@ -19,6 +20,7 @@ import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectSshConfigDALFactory } from "@app/services/project/project-ssh-config-dal";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
import { TUserGroupMembershipDALFactory } from "../group/user-group-membership-dal";
|
||||
import {
|
||||
convertActorToPrincipals,
|
||||
createSshCert,
|
||||
@ -39,12 +41,14 @@ import {
|
||||
|
||||
type TSshHostServiceFactoryDep = {
|
||||
userDAL: Pick<TUserDALFactory, "findById" | "find">;
|
||||
groupDAL: Pick<TGroupDALFactory, "findGroupsByProjectId">;
|
||||
projectDAL: Pick<TProjectDALFactory, "find">;
|
||||
projectSshConfigDAL: Pick<TProjectSshConfigDALFactory, "findOne">;
|
||||
sshCertificateAuthorityDAL: Pick<TSshCertificateAuthorityDALFactory, "findOne">;
|
||||
sshCertificateAuthoritySecretDAL: Pick<TSshCertificateAuthoritySecretDALFactory, "findOne">;
|
||||
sshCertificateDAL: Pick<TSshCertificateDALFactory, "create" | "transaction">;
|
||||
sshCertificateBodyDAL: Pick<TSshCertificateBodyDALFactory, "create">;
|
||||
userGroupMembershipDAL: Pick<TUserGroupMembershipDALFactory, "findGroupMembershipsByUserIdInOrg">;
|
||||
sshHostDAL: Pick<
|
||||
TSshHostDALFactory,
|
||||
| "transaction"
|
||||
@ -58,7 +62,10 @@ type TSshHostServiceFactoryDep = {
|
||||
>;
|
||||
sshHostLoginUserDAL: TSshHostLoginUserDALFactory;
|
||||
sshHostLoginUserMappingDAL: TSshHostLoginUserMappingDALFactory;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getUserProjectPermission">;
|
||||
permissionService: Pick<
|
||||
TPermissionServiceFactory,
|
||||
"getProjectPermission" | "getUserProjectPermission" | "checkGroupProjectPermission"
|
||||
>;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
@ -66,6 +73,8 @@ export type TSshHostServiceFactory = ReturnType<typeof sshHostServiceFactory>;
|
||||
|
||||
export const sshHostServiceFactory = ({
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
groupDAL,
|
||||
projectDAL,
|
||||
projectSshConfigDAL,
|
||||
sshCertificateAuthorityDAL,
|
||||
@ -208,6 +217,7 @@ export const sshHostServiceFactory = ({
|
||||
loginMappings,
|
||||
sshHostLoginUserDAL,
|
||||
sshHostLoginUserMappingDAL,
|
||||
groupDAL,
|
||||
userDAL,
|
||||
permissionService,
|
||||
projectId,
|
||||
@ -278,6 +288,7 @@ export const sshHostServiceFactory = ({
|
||||
loginMappings,
|
||||
sshHostLoginUserDAL,
|
||||
sshHostLoginUserMappingDAL,
|
||||
groupDAL,
|
||||
userDAL,
|
||||
permissionService,
|
||||
projectId: host.projectId,
|
||||
@ -387,10 +398,14 @@ export const sshHostServiceFactory = ({
|
||||
userDAL
|
||||
});
|
||||
|
||||
const userGroups = await userGroupMembershipDAL.findGroupMembershipsByUserIdInOrg(actorId, actorOrgId);
|
||||
const userGroupSlugs = userGroups.map((g) => g.groupSlug);
|
||||
|
||||
const mapping = host.loginMappings.find(
|
||||
(m) =>
|
||||
m.loginUser === loginUser &&
|
||||
m.allowedPrincipals.usernames.some((allowed) => internalPrincipals.includes(allowed))
|
||||
(m.allowedPrincipals.usernames?.some((allowed) => internalPrincipals.includes(allowed)) ||
|
||||
m.allowedPrincipals.groups?.some((allowed) => userGroupSlugs.includes(allowed)))
|
||||
);
|
||||
|
||||
if (!mapping) {
|
||||
|
@ -7,12 +7,15 @@ import { TProjectPermission } from "@app/lib/types";
|
||||
import { ActorAuthMethod } from "@app/services/auth/auth-type";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
import { TGroupDALFactory } from "../group/group-dal";
|
||||
|
||||
export type TListSshHostsDTO = Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TLoginMapping = {
|
||||
loginUser: string;
|
||||
allowedPrincipals: {
|
||||
usernames: string[];
|
||||
usernames?: string[];
|
||||
groups?: string[];
|
||||
};
|
||||
};
|
||||
|
||||
@ -63,7 +66,8 @@ type BaseCreateSshLoginMappingsDTO = {
|
||||
sshHostLoginUserDAL: Pick<TSshHostLoginUserDALFactory, "create" | "transaction">;
|
||||
sshHostLoginUserMappingDAL: Pick<TSshHostLoginUserMappingDALFactory, "insertMany">;
|
||||
userDAL: Pick<TUserDALFactory, "find">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getUserProjectPermission">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getUserProjectPermission" | "checkGroupProjectPermission">;
|
||||
groupDAL: Pick<TGroupDALFactory, "findGroupsByProjectId">;
|
||||
projectId: string;
|
||||
actorAuthMethod: ActorAuthMethod;
|
||||
actorOrgId: string;
|
||||
|
@ -1478,7 +1478,7 @@ export const SSH_HOSTS = {
|
||||
loginUser: "A login user on the remote machine (e.g. 'ec2-user', 'deploy', 'admin')",
|
||||
allowedPrincipals: "A list of allowed principals that can log in as the login user.",
|
||||
loginMappings:
|
||||
"A list of login mappings for the SSH host. Each login mapping contains a login user and a list of corresponding allowed principals being usernames of users in the Infisical SSH project.",
|
||||
"A list of login mappings for the SSH host. Each login mapping contains a login user and a list of corresponding allowed principals being usernames of users or groups slugs in the Infisical SSH project.",
|
||||
userSshCaId:
|
||||
"The ID of the SSH CA to use for user certificates. If not specified, the default user SSH CA will be used if it exists.",
|
||||
hostSshCaId:
|
||||
@ -1493,7 +1493,7 @@ export const SSH_HOSTS = {
|
||||
loginUser: "A login user on the remote machine (e.g. 'ec2-user', 'deploy', 'admin')",
|
||||
allowedPrincipals: "A list of allowed principals that can log in as the login user.",
|
||||
loginMappings:
|
||||
"A list of login mappings for the SSH host. Each login mapping contains a login user and a list of corresponding allowed principals being usernames of users in the Infisical SSH project."
|
||||
"A list of login mappings for the SSH host. Each login mapping contains a login user and a list of corresponding allowed principals being usernames of users or groups slugs in the Infisical SSH project."
|
||||
},
|
||||
DELETE: {
|
||||
sshHostId: "The ID of the SSH host to delete."
|
||||
|
@ -5,7 +5,7 @@
|
||||
import type { FastifySchema, FastifySchemaCompiler, FastifyTypeProvider } from "fastify";
|
||||
import type { FastifySerializerCompiler } from "fastify/types/schema";
|
||||
import type { z, ZodAny, ZodTypeAny } from "zod";
|
||||
import { zodToJsonSchema } from "zod-to-json-schema";
|
||||
import { PostProcessCallback, zodToJsonSchema } from "zod-to-json-schema";
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
type FreeformRecord = Record<string, any>;
|
||||
@ -28,9 +28,25 @@ interface Schema extends FastifySchema {
|
||||
hide?: boolean;
|
||||
}
|
||||
|
||||
// Credit: https://github.com/StefanTerdell/zod-to-json-schema
|
||||
const jsonDescription: PostProcessCallback = (jsonSchema, def) => {
|
||||
if (def.description) {
|
||||
try {
|
||||
return {
|
||||
...jsonSchema,
|
||||
description: undefined,
|
||||
...JSON.parse(def.description)
|
||||
};
|
||||
} catch {}
|
||||
}
|
||||
|
||||
return jsonSchema;
|
||||
};
|
||||
|
||||
const zodToJsonSchemaOptions = {
|
||||
target: "openApi3",
|
||||
$refStrategy: "none"
|
||||
$refStrategy: "none",
|
||||
postProcess: jsonDescription
|
||||
} as const;
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
|
@ -870,6 +870,8 @@ export const registerRoutes = async (
|
||||
|
||||
const sshHostService = sshHostServiceFactory({
|
||||
userDAL,
|
||||
groupDAL,
|
||||
userGroupMembershipDAL,
|
||||
projectDAL,
|
||||
projectSshConfigDAL,
|
||||
sshCertificateAuthorityDAL,
|
||||
@ -892,7 +894,8 @@ export const registerRoutes = async (
|
||||
sshHostLoginUserMappingDAL,
|
||||
userDAL,
|
||||
permissionService,
|
||||
licenseService
|
||||
licenseService,
|
||||
groupDAL
|
||||
});
|
||||
|
||||
const certificateAuthorityService = certificateAuthorityServiceFactory({
|
||||
@ -1784,6 +1787,10 @@ export const registerRoutes = async (
|
||||
if (licenseSyncJob) {
|
||||
cronJobs.push(licenseSyncJob);
|
||||
}
|
||||
const microsoftTeamsSyncJob = await microsoftTeamsService.initializeBackgroundSync();
|
||||
if (microsoftTeamsSyncJob) {
|
||||
cronJobs.push(microsoftTeamsSyncJob);
|
||||
}
|
||||
}
|
||||
|
||||
server.decorate<FastifyZodProvider["store"]>("store", {
|
||||
|
@ -6,6 +6,7 @@ import {
|
||||
Request,
|
||||
Response
|
||||
} from "botbuilder";
|
||||
import { CronJob } from "cron";
|
||||
import { FastifyReply, FastifyRequest } from "fastify";
|
||||
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||
@ -86,8 +87,17 @@ export const microsoftTeamsServiceFactory = ({
|
||||
}: TMicrosoftTeamsServiceFactoryDep) => {
|
||||
let teamsBot: TeamsBot | null = null;
|
||||
let adapter: CloudAdapter | null = null;
|
||||
let lastKnownUpdatedAt = new Date();
|
||||
|
||||
const initializeTeamsBot = async ({ botAppId, botAppPassword }: { botAppId: string; botAppPassword: string }) => {
|
||||
const initializeTeamsBot = async ({
|
||||
botAppId,
|
||||
botAppPassword,
|
||||
lastUpdatedAt
|
||||
}: {
|
||||
botAppId: string;
|
||||
botAppPassword: string;
|
||||
lastUpdatedAt?: Date;
|
||||
}) => {
|
||||
logger.info("Initializing Microsoft Teams bot");
|
||||
teamsBot = new TeamsBot({
|
||||
botAppId,
|
||||
@ -106,6 +116,57 @@ export const microsoftTeamsServiceFactory = ({
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
if (lastUpdatedAt) {
|
||||
lastKnownUpdatedAt = lastUpdatedAt;
|
||||
}
|
||||
};
|
||||
|
||||
const $syncMicrosoftTeamsIntegrationConfiguration = async () => {
|
||||
try {
|
||||
const serverCfg = await serverCfgDAL.findById(ADMIN_CONFIG_DB_UUID);
|
||||
if (!serverCfg) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to get server configuration."
|
||||
});
|
||||
}
|
||||
|
||||
if (lastKnownUpdatedAt.getTime() === serverCfg.updatedAt.getTime()) {
|
||||
logger.info("No changes to Microsoft Teams integration configuration, skipping sync");
|
||||
return;
|
||||
}
|
||||
|
||||
lastKnownUpdatedAt = serverCfg.updatedAt;
|
||||
|
||||
if (
|
||||
serverCfg.encryptedMicrosoftTeamsAppId &&
|
||||
serverCfg.encryptedMicrosoftTeamsClientSecret &&
|
||||
serverCfg.encryptedMicrosoftTeamsBotId
|
||||
) {
|
||||
const decryptWithRoot = kmsService.decryptWithRootKey();
|
||||
const decryptedAppId = decryptWithRoot(serverCfg.encryptedMicrosoftTeamsAppId);
|
||||
const decryptedAppPassword = decryptWithRoot(serverCfg.encryptedMicrosoftTeamsClientSecret);
|
||||
|
||||
await initializeTeamsBot({
|
||||
botAppId: decryptedAppId.toString(),
|
||||
botAppPassword: decryptedAppPassword.toString()
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(err, "Error syncing Microsoft Teams integration configuration");
|
||||
}
|
||||
};
|
||||
|
||||
const initializeBackgroundSync = async () => {
|
||||
logger.info("Setting up background sync process for Microsoft Teams workflow integration configuration");
|
||||
// initial sync upon startup
|
||||
await $syncMicrosoftTeamsIntegrationConfiguration();
|
||||
|
||||
// sync rate limits configuration every 5 minutes
|
||||
const job = new CronJob("*/5 * * * *", $syncMicrosoftTeamsIntegrationConfiguration);
|
||||
job.start();
|
||||
|
||||
return job;
|
||||
};
|
||||
|
||||
const start = async () => {
|
||||
@ -703,6 +764,7 @@ export const microsoftTeamsServiceFactory = ({
|
||||
getTeams,
|
||||
handleMessageEndpoint,
|
||||
start,
|
||||
initializeBackgroundSync,
|
||||
sendNotification,
|
||||
checkInstallationStatus,
|
||||
getClientId
|
||||
|
@ -246,7 +246,8 @@ export const superAdminServiceFactory = ({
|
||||
|
||||
await microsoftTeamsService.initializeTeamsBot({
|
||||
botAppId: decryptedAppId.toString(),
|
||||
botAppPassword: decryptedAppPassword.toString()
|
||||
botAppPassword: decryptedAppPassword.toString(),
|
||||
lastUpdatedAt: updatedServerCfg.updatedAt
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -1,115 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestCommitAllowed(t *testing.T) {
|
||||
tests := []struct {
|
||||
allowlist Allowlist
|
||||
commit string
|
||||
commitAllowed bool
|
||||
}{
|
||||
{
|
||||
allowlist: Allowlist{
|
||||
Commits: []string{"commitA"},
|
||||
},
|
||||
commit: "commitA",
|
||||
commitAllowed: true,
|
||||
},
|
||||
{
|
||||
allowlist: Allowlist{
|
||||
Commits: []string{"commitB"},
|
||||
},
|
||||
commit: "commitA",
|
||||
commitAllowed: false,
|
||||
},
|
||||
{
|
||||
allowlist: Allowlist{
|
||||
Commits: []string{"commitB"},
|
||||
},
|
||||
commit: "",
|
||||
commitAllowed: false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
assert.Equal(t, tt.commitAllowed, tt.allowlist.CommitAllowed(tt.commit))
|
||||
}
|
||||
}
|
||||
|
||||
func TestRegexAllowed(t *testing.T) {
|
||||
tests := []struct {
|
||||
allowlist Allowlist
|
||||
secret string
|
||||
regexAllowed bool
|
||||
}{
|
||||
{
|
||||
allowlist: Allowlist{
|
||||
Regexes: []*regexp.Regexp{regexp.MustCompile("matchthis")},
|
||||
},
|
||||
secret: "a secret: matchthis, done",
|
||||
regexAllowed: true,
|
||||
},
|
||||
{
|
||||
allowlist: Allowlist{
|
||||
Regexes: []*regexp.Regexp{regexp.MustCompile("matchthis")},
|
||||
},
|
||||
secret: "a secret",
|
||||
regexAllowed: false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
assert.Equal(t, tt.regexAllowed, tt.allowlist.RegexAllowed(tt.secret))
|
||||
}
|
||||
}
|
||||
|
||||
func TestPathAllowed(t *testing.T) {
|
||||
tests := []struct {
|
||||
allowlist Allowlist
|
||||
path string
|
||||
pathAllowed bool
|
||||
}{
|
||||
{
|
||||
allowlist: Allowlist{
|
||||
Paths: []*regexp.Regexp{regexp.MustCompile("path")},
|
||||
},
|
||||
path: "a path",
|
||||
pathAllowed: true,
|
||||
},
|
||||
{
|
||||
allowlist: Allowlist{
|
||||
Paths: []*regexp.Regexp{regexp.MustCompile("path")},
|
||||
},
|
||||
path: "a ???",
|
||||
pathAllowed: false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
assert.Equal(t, tt.pathAllowed, tt.allowlist.PathAllowed(tt.path))
|
||||
}
|
||||
}
|
@ -1,279 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
_ "embed"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
//go:embed infisical-scan.toml
|
||||
var DefaultConfig string
|
||||
|
||||
// use to keep track of how many configs we can extend
|
||||
// yea I know, globals bad
|
||||
var extendDepth int
|
||||
|
||||
const maxExtendDepth = 2
|
||||
|
||||
const DefaultScanConfigFileName = ".infisical-scan.toml"
|
||||
const DefaultScanConfigEnvName = "INFISICAL_SCAN_CONFIG"
|
||||
const DefaultInfisicalIgnoreFineName = ".infisicalignore"
|
||||
|
||||
// ViperConfig is the config struct used by the Viper config package
|
||||
// to parse the config file. This struct does not include regular expressions.
|
||||
// It is used as an intermediary to convert the Viper config to the Config struct.
|
||||
type ViperConfig struct {
|
||||
Description string
|
||||
Extend Extend
|
||||
Rules []struct {
|
||||
ID string
|
||||
Description string
|
||||
Entropy float64
|
||||
SecretGroup int
|
||||
Regex string
|
||||
Keywords []string
|
||||
Path string
|
||||
Tags []string
|
||||
|
||||
Allowlist struct {
|
||||
RegexTarget string
|
||||
Regexes []string
|
||||
Paths []string
|
||||
Commits []string
|
||||
StopWords []string
|
||||
}
|
||||
}
|
||||
Allowlist struct {
|
||||
RegexTarget string
|
||||
Regexes []string
|
||||
Paths []string
|
||||
Commits []string
|
||||
StopWords []string
|
||||
}
|
||||
}
|
||||
|
||||
// Config is a configuration struct that contains rules and an allowlist if present.
|
||||
type Config struct {
|
||||
Extend Extend
|
||||
Path string
|
||||
Description string
|
||||
Rules map[string]Rule
|
||||
Allowlist Allowlist
|
||||
Keywords []string
|
||||
|
||||
// used to keep sarif results consistent
|
||||
orderedRules []string
|
||||
}
|
||||
|
||||
// Extend is a struct that allows users to define how they want their
|
||||
// configuration extended by other configuration files.
|
||||
type Extend struct {
|
||||
Path string
|
||||
URL string
|
||||
UseDefault bool
|
||||
}
|
||||
|
||||
func (vc *ViperConfig) Translate() (Config, error) {
|
||||
var (
|
||||
keywords []string
|
||||
orderedRules []string
|
||||
)
|
||||
rulesMap := make(map[string]Rule)
|
||||
|
||||
for _, r := range vc.Rules {
|
||||
var allowlistRegexes []*regexp.Regexp
|
||||
for _, a := range r.Allowlist.Regexes {
|
||||
allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(a))
|
||||
}
|
||||
var allowlistPaths []*regexp.Regexp
|
||||
for _, a := range r.Allowlist.Paths {
|
||||
allowlistPaths = append(allowlistPaths, regexp.MustCompile(a))
|
||||
}
|
||||
|
||||
if r.Keywords == nil {
|
||||
r.Keywords = []string{}
|
||||
} else {
|
||||
for _, k := range r.Keywords {
|
||||
keywords = append(keywords, strings.ToLower(k))
|
||||
}
|
||||
}
|
||||
|
||||
if r.Tags == nil {
|
||||
r.Tags = []string{}
|
||||
}
|
||||
|
||||
var configRegex *regexp.Regexp
|
||||
var configPathRegex *regexp.Regexp
|
||||
if r.Regex == "" {
|
||||
configRegex = nil
|
||||
} else {
|
||||
configRegex = regexp.MustCompile(r.Regex)
|
||||
}
|
||||
if r.Path == "" {
|
||||
configPathRegex = nil
|
||||
} else {
|
||||
configPathRegex = regexp.MustCompile(r.Path)
|
||||
}
|
||||
r := Rule{
|
||||
Description: r.Description,
|
||||
RuleID: r.ID,
|
||||
Regex: configRegex,
|
||||
Path: configPathRegex,
|
||||
SecretGroup: r.SecretGroup,
|
||||
Entropy: r.Entropy,
|
||||
Tags: r.Tags,
|
||||
Keywords: r.Keywords,
|
||||
Allowlist: Allowlist{
|
||||
RegexTarget: r.Allowlist.RegexTarget,
|
||||
Regexes: allowlistRegexes,
|
||||
Paths: allowlistPaths,
|
||||
Commits: r.Allowlist.Commits,
|
||||
StopWords: r.Allowlist.StopWords,
|
||||
},
|
||||
}
|
||||
orderedRules = append(orderedRules, r.RuleID)
|
||||
|
||||
if r.Regex != nil && r.SecretGroup > r.Regex.NumSubexp() {
|
||||
return Config{}, fmt.Errorf("%s invalid regex secret group %d, max regex secret group %d", r.Description, r.SecretGroup, r.Regex.NumSubexp())
|
||||
}
|
||||
rulesMap[r.RuleID] = r
|
||||
}
|
||||
var allowlistRegexes []*regexp.Regexp
|
||||
for _, a := range vc.Allowlist.Regexes {
|
||||
allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(a))
|
||||
}
|
||||
var allowlistPaths []*regexp.Regexp
|
||||
for _, a := range vc.Allowlist.Paths {
|
||||
allowlistPaths = append(allowlistPaths, regexp.MustCompile(a))
|
||||
}
|
||||
c := Config{
|
||||
Description: vc.Description,
|
||||
Extend: vc.Extend,
|
||||
Rules: rulesMap,
|
||||
Allowlist: Allowlist{
|
||||
RegexTarget: vc.Allowlist.RegexTarget,
|
||||
Regexes: allowlistRegexes,
|
||||
Paths: allowlistPaths,
|
||||
Commits: vc.Allowlist.Commits,
|
||||
StopWords: vc.Allowlist.StopWords,
|
||||
},
|
||||
Keywords: keywords,
|
||||
orderedRules: orderedRules,
|
||||
}
|
||||
|
||||
if maxExtendDepth != extendDepth {
|
||||
// disallow both usedefault and path from being set
|
||||
if c.Extend.Path != "" && c.Extend.UseDefault {
|
||||
log.Fatal().Msg("unable to load config due to extend.path and extend.useDefault being set")
|
||||
}
|
||||
if c.Extend.UseDefault {
|
||||
c.extendDefault()
|
||||
} else if c.Extend.Path != "" {
|
||||
c.extendPath()
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
func (c *Config) OrderedRules() []Rule {
|
||||
var orderedRules []Rule
|
||||
for _, id := range c.orderedRules {
|
||||
if _, ok := c.Rules[id]; ok {
|
||||
orderedRules = append(orderedRules, c.Rules[id])
|
||||
}
|
||||
}
|
||||
return orderedRules
|
||||
}
|
||||
|
||||
func (c *Config) extendDefault() {
|
||||
extendDepth++
|
||||
viper.SetConfigType("toml")
|
||||
if err := viper.ReadConfig(strings.NewReader(DefaultConfig)); err != nil {
|
||||
log.Fatal().Msgf("failed to load extended config, err: %s", err)
|
||||
return
|
||||
}
|
||||
defaultViperConfig := ViperConfig{}
|
||||
if err := viper.Unmarshal(&defaultViperConfig); err != nil {
|
||||
log.Fatal().Msgf("failed to load extended config, err: %s", err)
|
||||
return
|
||||
}
|
||||
cfg, err := defaultViperConfig.Translate()
|
||||
if err != nil {
|
||||
log.Fatal().Msgf("failed to load extended config, err: %s", err)
|
||||
return
|
||||
}
|
||||
log.Debug().Msg("extending config with default config")
|
||||
c.extend(cfg)
|
||||
|
||||
}
|
||||
|
||||
func (c *Config) extendPath() {
|
||||
extendDepth++
|
||||
viper.SetConfigFile(c.Extend.Path)
|
||||
if err := viper.ReadInConfig(); err != nil {
|
||||
log.Fatal().Msgf("failed to load extended config, err: %s", err)
|
||||
return
|
||||
}
|
||||
extensionViperConfig := ViperConfig{}
|
||||
if err := viper.Unmarshal(&extensionViperConfig); err != nil {
|
||||
log.Fatal().Msgf("failed to load extended config, err: %s", err)
|
||||
return
|
||||
}
|
||||
cfg, err := extensionViperConfig.Translate()
|
||||
if err != nil {
|
||||
log.Fatal().Msgf("failed to load extended config, err: %s", err)
|
||||
return
|
||||
}
|
||||
log.Debug().Msgf("extending config with %s", c.Extend.Path)
|
||||
c.extend(cfg)
|
||||
}
|
||||
|
||||
func (c *Config) extendURL() {
|
||||
// TODO
|
||||
}
|
||||
|
||||
func (c *Config) extend(extensionConfig Config) {
|
||||
for ruleID, rule := range extensionConfig.Rules {
|
||||
if _, ok := c.Rules[ruleID]; !ok {
|
||||
log.Trace().Msgf("adding %s to base config", ruleID)
|
||||
c.Rules[ruleID] = rule
|
||||
c.Keywords = append(c.Keywords, rule.Keywords...)
|
||||
}
|
||||
}
|
||||
|
||||
// append allowlists, not attempting to merge
|
||||
c.Allowlist.Commits = append(c.Allowlist.Commits,
|
||||
extensionConfig.Allowlist.Commits...)
|
||||
c.Allowlist.Paths = append(c.Allowlist.Paths,
|
||||
extensionConfig.Allowlist.Paths...)
|
||||
c.Allowlist.Regexes = append(c.Allowlist.Regexes,
|
||||
extensionConfig.Allowlist.Regexes...)
|
||||
}
|
@ -1,170 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/viper"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
const configPath = "../testdata/config/"
|
||||
|
||||
func TestTranslate(t *testing.T) {
|
||||
tests := []struct {
|
||||
cfgName string
|
||||
cfg Config
|
||||
wantError error
|
||||
}{
|
||||
{
|
||||
cfgName: "allow_aws_re",
|
||||
cfg: Config{
|
||||
Rules: map[string]Rule{"aws-access-key": {
|
||||
Description: "AWS Access Key",
|
||||
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
|
||||
Tags: []string{"key", "AWS"},
|
||||
Keywords: []string{},
|
||||
RuleID: "aws-access-key",
|
||||
Allowlist: Allowlist{
|
||||
Regexes: []*regexp.Regexp{
|
||||
regexp.MustCompile("AKIALALEMEL33243OLIA"),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "allow_commit",
|
||||
cfg: Config{
|
||||
Rules: map[string]Rule{"aws-access-key": {
|
||||
Description: "AWS Access Key",
|
||||
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
|
||||
Tags: []string{"key", "AWS"},
|
||||
Keywords: []string{},
|
||||
RuleID: "aws-access-key",
|
||||
Allowlist: Allowlist{
|
||||
Commits: []string{"allowthiscommit"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "allow_path",
|
||||
cfg: Config{
|
||||
Rules: map[string]Rule{"aws-access-key": {
|
||||
Description: "AWS Access Key",
|
||||
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
|
||||
Tags: []string{"key", "AWS"},
|
||||
Keywords: []string{},
|
||||
RuleID: "aws-access-key",
|
||||
Allowlist: Allowlist{
|
||||
Paths: []*regexp.Regexp{
|
||||
regexp.MustCompile(".go"),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "entropy_group",
|
||||
cfg: Config{
|
||||
Rules: map[string]Rule{"discord-api-key": {
|
||||
Description: "Discord API key",
|
||||
Regex: regexp.MustCompile(`(?i)(discord[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-h0-9]{64})['\"]`),
|
||||
RuleID: "discord-api-key",
|
||||
Allowlist: Allowlist{},
|
||||
Entropy: 3.5,
|
||||
SecretGroup: 3,
|
||||
Tags: []string{},
|
||||
Keywords: []string{},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "bad_entropy_group",
|
||||
cfg: Config{},
|
||||
wantError: fmt.Errorf("Discord API key invalid regex secret group 5, max regex secret group 3"),
|
||||
},
|
||||
{
|
||||
cfgName: "base",
|
||||
cfg: Config{
|
||||
Rules: map[string]Rule{
|
||||
"aws-access-key": {
|
||||
Description: "AWS Access Key",
|
||||
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
|
||||
Tags: []string{"key", "AWS"},
|
||||
Keywords: []string{},
|
||||
RuleID: "aws-access-key",
|
||||
},
|
||||
"aws-secret-key": {
|
||||
Description: "AWS Secret Key",
|
||||
Regex: regexp.MustCompile(`(?i)aws_(.{0,20})?=?.[\'\"0-9a-zA-Z\/+]{40}`),
|
||||
Tags: []string{"key", "AWS"},
|
||||
Keywords: []string{},
|
||||
RuleID: "aws-secret-key",
|
||||
},
|
||||
"aws-secret-key-again": {
|
||||
Description: "AWS Secret Key",
|
||||
Regex: regexp.MustCompile(`(?i)aws_(.{0,20})?=?.[\'\"0-9a-zA-Z\/+]{40}`),
|
||||
Tags: []string{"key", "AWS"},
|
||||
Keywords: []string{},
|
||||
RuleID: "aws-secret-key-again",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
viper.Reset()
|
||||
viper.AddConfigPath(configPath)
|
||||
viper.SetConfigName(tt.cfgName)
|
||||
viper.SetConfigType("toml")
|
||||
err := viper.ReadInConfig()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
var vc ViperConfig
|
||||
err = viper.Unmarshal(&vc)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
cfg, err := vc.Translate()
|
||||
if tt.wantError != nil {
|
||||
if err == nil {
|
||||
t.Errorf("expected error")
|
||||
}
|
||||
assert.Equal(t, tt.wantError, err)
|
||||
}
|
||||
|
||||
assert.Equal(t, cfg.Rules, tt.cfg.Rules)
|
||||
}
|
||||
}
|
@ -1,8 +0,0 @@
|
||||
public_ip: 127.0.0.1
|
||||
auth_secret: example-auth-secret
|
||||
realm: infisical.org
|
||||
# set port 5349 for tls
|
||||
# port: 5349
|
||||
# tls_private_key_path: /full-path
|
||||
# tls_ca_path: /full-path
|
||||
# tls_cert_path: /full-path
|
@ -1,8 +0,0 @@
|
||||
public_ip: 127.0.0.1
|
||||
auth_secret: changeThisOnProduction
|
||||
realm: infisical.org
|
||||
# set port 5349 for tls
|
||||
# port: 5349
|
||||
# tls_private_key_path: /full-path
|
||||
# tls_ca_path: /full-path
|
||||
# tls_cert_path: /full-path
|
@ -1,43 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
)
|
||||
|
||||
// Rules contain information that define details on how to detect secrets
|
||||
type Rule struct {
|
||||
// Description is the description of the rule.
|
||||
Description string
|
||||
|
||||
// RuleID is a unique identifier for this rule
|
||||
RuleID string
|
||||
|
||||
// Entropy is a float representing the minimum shannon
|
||||
// entropy a regex group must have to be considered a secret.
|
||||
Entropy float64
|
||||
|
||||
// SecretGroup is an int used to extract secret from regex
|
||||
// match and used as the group that will have its entropy
|
||||
// checked if `entropy` is set.
|
||||
SecretGroup int
|
||||
|
||||
// Regex is a golang regular expression used to detect secrets.
|
||||
Regex *regexp.Regexp
|
||||
|
||||
// Path is a golang regular expression used to
|
||||
// filter secrets by path
|
||||
Path *regexp.Regexp
|
||||
|
||||
// Tags is an array of strings used for metadata
|
||||
// and reporting purposes.
|
||||
Tags []string
|
||||
|
||||
// Keywords are used for pre-regex check filtering. Rules that contain
|
||||
// keywords will perform a quick string compare check to make sure the
|
||||
// keyword(s) are in the content being scanned.
|
||||
Keywords []string
|
||||
|
||||
// Allowlist allows a rule to be ignored for specific
|
||||
// regexes, paths, and/or commits
|
||||
Allowlist Allowlist
|
||||
}
|
@ -1,24 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
)
|
||||
|
||||
func anyRegexMatch(f string, res []*regexp.Regexp) bool {
|
||||
for _, re := range res {
|
||||
if regexMatched(f, re) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func regexMatched(f string, re *regexp.Regexp) bool {
|
||||
if re == nil {
|
||||
return false
|
||||
}
|
||||
if re.FindString(f) != "" {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
@ -25,35 +25,31 @@ package detect
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/rs/zerolog/log"
|
||||
|
||||
"github.com/Infisical/infisical-merge/report"
|
||||
"github.com/Infisical/infisical-merge/detect/report"
|
||||
)
|
||||
|
||||
func IsNew(finding report.Finding, baseline []report.Finding) bool {
|
||||
func IsNew(finding report.Finding, redact uint, baseline []report.Finding) bool {
|
||||
// Explicitly testing each property as it gives significantly better performance in comparison to cmp.Equal(). Drawback is that
|
||||
// the code requires maintanance if/when the Finding struct changes
|
||||
// the code requires maintenance if/when the Finding struct changes
|
||||
for _, b := range baseline {
|
||||
|
||||
if finding.Author == b.Author &&
|
||||
finding.Commit == b.Commit &&
|
||||
finding.Date == b.Date &&
|
||||
if finding.RuleID == b.RuleID &&
|
||||
finding.Description == b.Description &&
|
||||
finding.Email == b.Email &&
|
||||
finding.EndColumn == b.EndColumn &&
|
||||
finding.StartLine == b.StartLine &&
|
||||
finding.EndLine == b.EndLine &&
|
||||
finding.Entropy == b.Entropy &&
|
||||
finding.File == b.File &&
|
||||
// Omit checking finding.Fingerprint - if the format of the fingerprint changes, the users will see unexpected behaviour
|
||||
finding.Match == b.Match &&
|
||||
finding.Message == b.Message &&
|
||||
finding.RuleID == b.RuleID &&
|
||||
finding.Secret == b.Secret &&
|
||||
finding.StartColumn == b.StartColumn &&
|
||||
finding.StartLine == b.StartLine {
|
||||
finding.EndColumn == b.EndColumn &&
|
||||
(redact > 0 || (finding.Match == b.Match && finding.Secret == b.Secret)) &&
|
||||
finding.File == b.File &&
|
||||
finding.Commit == b.Commit &&
|
||||
finding.Author == b.Author &&
|
||||
finding.Email == b.Email &&
|
||||
finding.Date == b.Date &&
|
||||
finding.Message == b.Message &&
|
||||
// Omit checking finding.Fingerprint - if the format of the fingerprint changes, the users will see unexpected behaviour
|
||||
finding.Entropy == b.Entropy {
|
||||
return false
|
||||
}
|
||||
}
|
||||
@ -61,23 +57,12 @@ func IsNew(finding report.Finding, baseline []report.Finding) bool {
|
||||
}
|
||||
|
||||
func LoadBaseline(baselinePath string) ([]report.Finding, error) {
|
||||
var previousFindings []report.Finding
|
||||
jsonFile, err := os.Open(baselinePath)
|
||||
bytes, err := os.ReadFile(baselinePath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not open %s", baselinePath)
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if cerr := jsonFile.Close(); cerr != nil {
|
||||
log.Warn().Err(cerr).Msg("problem closing jsonFile handle")
|
||||
}
|
||||
}()
|
||||
|
||||
bytes, err := io.ReadAll(jsonFile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not read data from the file %s", baselinePath)
|
||||
}
|
||||
|
||||
var previousFindings []report.Finding
|
||||
err = json.Unmarshal(bytes, &previousFindings)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("the format of the file %s is not supported", baselinePath)
|
||||
@ -85,3 +70,34 @@ func LoadBaseline(baselinePath string) ([]report.Finding, error) {
|
||||
|
||||
return previousFindings, nil
|
||||
}
|
||||
|
||||
func (d *Detector) AddBaseline(baselinePath string, source string) error {
|
||||
if baselinePath != "" {
|
||||
absoluteSource, err := filepath.Abs(source)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
absoluteBaseline, err := filepath.Abs(baselinePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
relativeBaseline, err := filepath.Rel(absoluteSource, absoluteBaseline)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
baseline, err := LoadBaseline(baselinePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
d.baseline = baseline
|
||||
baselinePath = relativeBaseline
|
||||
|
||||
}
|
||||
|
||||
d.baselinePath = baselinePath
|
||||
return nil
|
||||
}
|
||||
|
@ -1,160 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package detect
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/Infisical/infisical-merge/report"
|
||||
)
|
||||
|
||||
func TestIsNew(t *testing.T) {
|
||||
tests := []struct {
|
||||
findings report.Finding
|
||||
baseline []report.Finding
|
||||
expect bool
|
||||
}{
|
||||
{
|
||||
findings: report.Finding{
|
||||
Author: "a",
|
||||
Commit: "0000",
|
||||
},
|
||||
baseline: []report.Finding{
|
||||
{
|
||||
Author: "a",
|
||||
Commit: "0000",
|
||||
},
|
||||
},
|
||||
expect: false,
|
||||
},
|
||||
{
|
||||
findings: report.Finding{
|
||||
Author: "a",
|
||||
Commit: "0000",
|
||||
},
|
||||
baseline: []report.Finding{
|
||||
{
|
||||
Author: "a",
|
||||
Commit: "0002",
|
||||
},
|
||||
},
|
||||
expect: true,
|
||||
},
|
||||
{
|
||||
findings: report.Finding{
|
||||
Author: "a",
|
||||
Commit: "0000",
|
||||
Tags: []string{"a", "b"},
|
||||
},
|
||||
baseline: []report.Finding{
|
||||
{
|
||||
Author: "a",
|
||||
Commit: "0000",
|
||||
Tags: []string{"a", "c"},
|
||||
},
|
||||
},
|
||||
expect: false, // Updated tags doesn't make it a new finding
|
||||
},
|
||||
}
|
||||
for _, test := range tests {
|
||||
assert.Equal(t, test.expect, IsNew(test.findings, test.baseline))
|
||||
}
|
||||
}
|
||||
|
||||
func TestFileLoadBaseline(t *testing.T) {
|
||||
tests := []struct {
|
||||
Filename string
|
||||
ExpectedError error
|
||||
}{
|
||||
{
|
||||
Filename: "../testdata/baseline/baseline.csv",
|
||||
ExpectedError: errors.New("the format of the file ../testdata/baseline/baseline.csv is not supported"),
|
||||
},
|
||||
{
|
||||
Filename: "../testdata/baseline/baseline.sarif",
|
||||
ExpectedError: errors.New("the format of the file ../testdata/baseline/baseline.sarif is not supported"),
|
||||
},
|
||||
{
|
||||
Filename: "../testdata/baseline/notfound.json",
|
||||
ExpectedError: errors.New("could not open ../testdata/baseline/notfound.json"),
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
_, err := LoadBaseline(test.Filename)
|
||||
assert.Equal(t, test.ExpectedError.Error(), err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
func TestIgnoreIssuesInBaseline(t *testing.T) {
|
||||
tests := []struct {
|
||||
findings []report.Finding
|
||||
baseline []report.Finding
|
||||
expectCount int
|
||||
}{
|
||||
{
|
||||
findings: []report.Finding{
|
||||
{
|
||||
Author: "a",
|
||||
Commit: "5",
|
||||
},
|
||||
},
|
||||
baseline: []report.Finding{
|
||||
{
|
||||
Author: "a",
|
||||
Commit: "5",
|
||||
},
|
||||
},
|
||||
expectCount: 0,
|
||||
},
|
||||
{
|
||||
findings: []report.Finding{
|
||||
{
|
||||
Author: "a",
|
||||
Commit: "5",
|
||||
Fingerprint: "a",
|
||||
},
|
||||
},
|
||||
baseline: []report.Finding{
|
||||
{
|
||||
Author: "a",
|
||||
Commit: "5",
|
||||
Fingerprint: "b",
|
||||
},
|
||||
},
|
||||
expectCount: 0,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
d, _ := NewDetectorDefaultConfig()
|
||||
d.baseline = test.baseline
|
||||
for _, finding := range test.findings {
|
||||
d.addFinding(finding)
|
||||
}
|
||||
assert.Equal(t, test.expectCount, len(d.findings))
|
||||
}
|
||||
}
|
66
cli/detect/cmd/scm/scm.go
Normal file
@ -0,0 +1,66 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package scm
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Platform int
|
||||
|
||||
const (
|
||||
UnknownPlatform Platform = iota
|
||||
NoPlatform // Explicitly disable the feature
|
||||
GitHubPlatform
|
||||
GitLabPlatform
|
||||
AzureDevOpsPlatform
|
||||
// TODO: Add others.
|
||||
)
|
||||
|
||||
func (p Platform) String() string {
|
||||
return [...]string{
|
||||
"unknown",
|
||||
"none",
|
||||
"github",
|
||||
"gitlab",
|
||||
"azuredevops",
|
||||
}[p]
|
||||
}
|
||||
|
||||
func PlatformFromString(s string) (Platform, error) {
|
||||
switch strings.ToLower(s) {
|
||||
case "", "unknown":
|
||||
return UnknownPlatform, nil
|
||||
case "none":
|
||||
return NoPlatform, nil
|
||||
case "github":
|
||||
return GitHubPlatform, nil
|
||||
case "gitlab":
|
||||
return GitLabPlatform, nil
|
||||
case "azuredevops":
|
||||
return AzureDevOpsPlatform, nil
|
||||
default:
|
||||
return UnknownPlatform, fmt.Errorf("invalid scm platform value: %s", s)
|
||||
}
|
||||
}
|
@ -23,63 +23,137 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/exp/maps"
|
||||
|
||||
"github.com/Infisical/infisical-merge/detect/regexp"
|
||||
)
|
||||
|
||||
type AllowlistMatchCondition int
|
||||
|
||||
const (
|
||||
AllowlistMatchOr AllowlistMatchCondition = iota
|
||||
AllowlistMatchAnd
|
||||
)
|
||||
|
||||
func (a AllowlistMatchCondition) String() string {
|
||||
return [...]string{
|
||||
"OR",
|
||||
"AND",
|
||||
}[a]
|
||||
}
|
||||
|
||||
// Allowlist allows a rule to be ignored for specific
|
||||
// regexes, paths, and/or commits
|
||||
type Allowlist struct {
|
||||
// Short human readable description of the allowlist.
|
||||
Description string
|
||||
|
||||
// Regexes is slice of content regular expressions that are allowed to be ignored.
|
||||
Regexes []*regexp.Regexp
|
||||
// MatchCondition determines whether all criteria must match.
|
||||
MatchCondition AllowlistMatchCondition
|
||||
|
||||
// RegexTarget
|
||||
RegexTarget string
|
||||
// Commits is a slice of commit SHAs that are allowed to be ignored. Defaults to "OR".
|
||||
Commits []string
|
||||
|
||||
// Paths is a slice of path regular expressions that are allowed to be ignored.
|
||||
Paths []*regexp.Regexp
|
||||
|
||||
// Commits is a slice of commit SHAs that are allowed to be ignored.
|
||||
Commits []string
|
||||
// Can be `match` or `line`.
|
||||
//
|
||||
// If `match` the _Regexes_ will be tested against the match of the _Rule.Regex_.
|
||||
//
|
||||
// If `line` the _Regexes_ will be tested against the entire line.
|
||||
//
|
||||
// If RegexTarget is empty, it will be tested against the found secret.
|
||||
RegexTarget string
|
||||
|
||||
// Regexes is slice of content regular expressions that are allowed to be ignored.
|
||||
Regexes []*regexp.Regexp
|
||||
|
||||
// StopWords is a slice of stop words that are allowed to be ignored.
|
||||
// This targets the _secret_, not the content of the regex match like the
|
||||
// Regexes slice.
|
||||
StopWords []string
|
||||
|
||||
// validated is an internal flag to track whether `Validate()` has been called.
|
||||
validated bool
|
||||
}
|
||||
|
||||
func (a *Allowlist) Validate() error {
|
||||
if a.validated {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Disallow empty allowlists.
|
||||
if len(a.Commits) == 0 &&
|
||||
len(a.Paths) == 0 &&
|
||||
len(a.Regexes) == 0 &&
|
||||
len(a.StopWords) == 0 {
|
||||
return fmt.Errorf("must contain at least one check for: commits, paths, regexes, or stopwords")
|
||||
}
|
||||
|
||||
// Deduplicate commits and stopwords.
|
||||
if len(a.Commits) > 0 {
|
||||
uniqueCommits := make(map[string]struct{})
|
||||
for _, commit := range a.Commits {
|
||||
uniqueCommits[commit] = struct{}{}
|
||||
}
|
||||
a.Commits = maps.Keys(uniqueCommits)
|
||||
}
|
||||
if len(a.StopWords) > 0 {
|
||||
uniqueStopwords := make(map[string]struct{})
|
||||
for _, stopWord := range a.StopWords {
|
||||
uniqueStopwords[stopWord] = struct{}{}
|
||||
}
|
||||
a.StopWords = maps.Keys(uniqueStopwords)
|
||||
}
|
||||
|
||||
a.validated = true
|
||||
return nil
|
||||
}
|
||||
|
||||
// CommitAllowed returns true if the commit is allowed to be ignored.
|
||||
func (a *Allowlist) CommitAllowed(c string) bool {
|
||||
if c == "" {
|
||||
return false
|
||||
func (a *Allowlist) CommitAllowed(c string) (bool, string) {
|
||||
if a == nil || c == "" {
|
||||
return false, ""
|
||||
}
|
||||
|
||||
for _, commit := range a.Commits {
|
||||
if commit == c {
|
||||
return true
|
||||
return true, c
|
||||
}
|
||||
}
|
||||
return false
|
||||
return false, ""
|
||||
}
|
||||
|
||||
// PathAllowed returns true if the path is allowed to be ignored.
|
||||
func (a *Allowlist) PathAllowed(path string) bool {
|
||||
if a == nil || path == "" {
|
||||
return false
|
||||
}
|
||||
return anyRegexMatch(path, a.Paths)
|
||||
}
|
||||
|
||||
// RegexAllowed returns true if the regex is allowed to be ignored.
|
||||
func (a *Allowlist) RegexAllowed(s string) bool {
|
||||
return anyRegexMatch(s, a.Regexes)
|
||||
func (a *Allowlist) RegexAllowed(secret string) bool {
|
||||
if a == nil || secret == "" {
|
||||
return false
|
||||
}
|
||||
return anyRegexMatch(secret, a.Regexes)
|
||||
}
|
||||
|
||||
func (a *Allowlist) ContainsStopWord(s string) bool {
|
||||
func (a *Allowlist) ContainsStopWord(s string) (bool, string) {
|
||||
if a == nil || s == "" {
|
||||
return false, ""
|
||||
}
|
||||
|
||||
s = strings.ToLower(s)
|
||||
for _, stopWord := range a.StopWords {
|
||||
if strings.Contains(s, strings.ToLower(stopWord)) {
|
||||
return true
|
||||
return true, stopWord
|
||||
}
|
||||
}
|
||||
return false
|
||||
return false, ""
|
||||
}
|
426
cli/detect/config/config.go
Normal file
@ -0,0 +1,426 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
_ "embed"
|
||||
"errors"
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/viper"
|
||||
|
||||
"github.com/Infisical/infisical-merge/detect/logging"
|
||||
"github.com/Infisical/infisical-merge/detect/regexp"
|
||||
)
|
||||
|
||||
const DefaultScanConfigFileName = ".infisical-scan.toml"
|
||||
const DefaultScanConfigEnvName = "INFISICAL_SCAN_CONFIG"
|
||||
const DefaultInfisicalIgnoreFineName = ".infisicalignore"
|
||||
|
||||
var (
|
||||
//go:embed gitleaks.toml
|
||||
DefaultConfig string
|
||||
|
||||
// use to keep track of how many configs we can extend
|
||||
// yea I know, globals bad
|
||||
extendDepth int
|
||||
)
|
||||
|
||||
const maxExtendDepth = 2
|
||||
|
||||
// ViperConfig is the config struct used by the Viper config package
|
||||
// to parse the config file. This struct does not include regular expressions.
|
||||
// It is used as an intermediary to convert the Viper config to the Config struct.
|
||||
type ViperConfig struct {
|
||||
Title string
|
||||
Description string
|
||||
Extend Extend
|
||||
Rules []struct {
|
||||
ID string
|
||||
Description string
|
||||
Path string
|
||||
Regex string
|
||||
SecretGroup int
|
||||
Entropy float64
|
||||
Keywords []string
|
||||
Tags []string
|
||||
|
||||
// Deprecated: this is a shim for backwards-compatibility.
|
||||
// TODO: Remove this in 9.x.
|
||||
AllowList *viperRuleAllowlist
|
||||
Allowlists []*viperRuleAllowlist
|
||||
}
|
||||
// Deprecated: this is a shim for backwards-compatibility.
|
||||
// TODO: Remove this in 9.x.
|
||||
AllowList *viperGlobalAllowlist
|
||||
Allowlists []*viperGlobalAllowlist
|
||||
}
|
||||
|
||||
type viperRuleAllowlist struct {
|
||||
Description string
|
||||
Condition string
|
||||
Commits []string
|
||||
Paths []string
|
||||
RegexTarget string
|
||||
Regexes []string
|
||||
StopWords []string
|
||||
}
|
||||
|
||||
type viperGlobalAllowlist struct {
|
||||
TargetRules []string
|
||||
viperRuleAllowlist `mapstructure:",squash"`
|
||||
}
|
||||
|
||||
// Config is a configuration struct that contains rules and an allowlist if present.
|
||||
type Config struct {
|
||||
Title string
|
||||
Extend Extend
|
||||
Path string
|
||||
Description string
|
||||
Rules map[string]Rule
|
||||
Keywords map[string]struct{}
|
||||
// used to keep sarif results consistent
|
||||
OrderedRules []string
|
||||
Allowlists []*Allowlist
|
||||
}
|
||||
|
||||
// Extend is a struct that allows users to define how they want their
|
||||
// configuration extended by other configuration files.
|
||||
type Extend struct {
|
||||
Path string
|
||||
URL string
|
||||
UseDefault bool
|
||||
DisabledRules []string
|
||||
}
|
||||
|
||||
func (vc *ViperConfig) Translate() (Config, error) {
|
||||
var (
|
||||
keywords = make(map[string]struct{})
|
||||
orderedRules []string
|
||||
rulesMap = make(map[string]Rule)
|
||||
ruleAllowlists = make(map[string][]*Allowlist)
|
||||
)
|
||||
|
||||
// Validate individual rules.
|
||||
for _, vr := range vc.Rules {
|
||||
var (
|
||||
pathPat *regexp.Regexp
|
||||
regexPat *regexp.Regexp
|
||||
)
|
||||
if vr.Path != "" {
|
||||
pathPat = regexp.MustCompile(vr.Path)
|
||||
}
|
||||
if vr.Regex != "" {
|
||||
regexPat = regexp.MustCompile(vr.Regex)
|
||||
}
|
||||
if vr.Keywords == nil {
|
||||
vr.Keywords = []string{}
|
||||
} else {
|
||||
for i, k := range vr.Keywords {
|
||||
keyword := strings.ToLower(k)
|
||||
keywords[keyword] = struct{}{}
|
||||
vr.Keywords[i] = keyword
|
||||
}
|
||||
}
|
||||
if vr.Tags == nil {
|
||||
vr.Tags = []string{}
|
||||
}
|
||||
cr := Rule{
|
||||
RuleID: vr.ID,
|
||||
Description: vr.Description,
|
||||
Regex: regexPat,
|
||||
SecretGroup: vr.SecretGroup,
|
||||
Entropy: vr.Entropy,
|
||||
Path: pathPat,
|
||||
Keywords: vr.Keywords,
|
||||
Tags: vr.Tags,
|
||||
}
|
||||
|
||||
// Parse the rule allowlists, including the older format for backwards compatibility.
|
||||
if vr.AllowList != nil {
|
||||
// TODO: Remove this in v9.
|
||||
if len(vr.Allowlists) > 0 {
|
||||
return Config{}, fmt.Errorf("%s: [rules.allowlist] is deprecated, it cannot be used alongside [[rules.allowlist]]", cr.RuleID)
|
||||
}
|
||||
vr.Allowlists = append(vr.Allowlists, vr.AllowList)
|
||||
}
|
||||
for _, a := range vr.Allowlists {
|
||||
allowlist, err := parseAllowlist(a)
|
||||
if err != nil {
|
||||
return Config{}, fmt.Errorf("%s: [[rules.allowlists]] %w", cr.RuleID, err)
|
||||
}
|
||||
cr.Allowlists = append(cr.Allowlists, allowlist)
|
||||
}
|
||||
orderedRules = append(orderedRules, cr.RuleID)
|
||||
rulesMap[cr.RuleID] = cr
|
||||
}
|
||||
|
||||
// Assemble the config.
|
||||
c := Config{
|
||||
Title: vc.Title,
|
||||
Description: vc.Description,
|
||||
Extend: vc.Extend,
|
||||
Rules: rulesMap,
|
||||
Keywords: keywords,
|
||||
OrderedRules: orderedRules,
|
||||
}
|
||||
// Parse the config allowlists, including the older format for backwards compatibility.
|
||||
if vc.AllowList != nil {
|
||||
// TODO: Remove this in v9.
|
||||
if len(vc.Allowlists) > 0 {
|
||||
return Config{}, errors.New("[allowlist] is deprecated, it cannot be used alongside [[allowlists]]")
|
||||
}
|
||||
vc.Allowlists = append(vc.Allowlists, vc.AllowList)
|
||||
}
|
||||
for _, a := range vc.Allowlists {
|
||||
allowlist, err := parseAllowlist(&a.viperRuleAllowlist)
|
||||
if err != nil {
|
||||
return Config{}, fmt.Errorf("[[allowlists]] %w", err)
|
||||
}
|
||||
// Allowlists with |targetRules| aren't added to the global list.
|
||||
if len(a.TargetRules) > 0 {
|
||||
for _, ruleID := range a.TargetRules {
|
||||
// It's not possible to validate |ruleID| until after extend.
|
||||
ruleAllowlists[ruleID] = append(ruleAllowlists[ruleID], allowlist)
|
||||
}
|
||||
} else {
|
||||
c.Allowlists = append(c.Allowlists, allowlist)
|
||||
}
|
||||
}
|
||||
|
||||
if maxExtendDepth != extendDepth {
|
||||
// disallow both usedefault and path from being set
|
||||
if c.Extend.Path != "" && c.Extend.UseDefault {
|
||||
return Config{}, errors.New("unable to load config due to extend.path and extend.useDefault being set")
|
||||
}
|
||||
if c.Extend.UseDefault {
|
||||
if err := c.extendDefault(); err != nil {
|
||||
return Config{}, err
|
||||
}
|
||||
} else if c.Extend.Path != "" {
|
||||
if err := c.extendPath(); err != nil {
|
||||
return Config{}, err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Validate the rules after everything has been assembled (including extended configs).
|
||||
if extendDepth == 0 {
|
||||
for _, rule := range c.Rules {
|
||||
if err := rule.Validate(); err != nil {
|
||||
return Config{}, err
|
||||
}
|
||||
}
|
||||
|
||||
// Populate targeted configs.
|
||||
for ruleID, allowlists := range ruleAllowlists {
|
||||
rule, ok := c.Rules[ruleID]
|
||||
if !ok {
|
||||
return Config{}, fmt.Errorf("[[allowlists]] target rule ID '%s' does not exist", ruleID)
|
||||
}
|
||||
rule.Allowlists = append(rule.Allowlists, allowlists...)
|
||||
c.Rules[ruleID] = rule
|
||||
}
|
||||
}
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
func parseAllowlist(a *viperRuleAllowlist) (*Allowlist, error) {
|
||||
var matchCondition AllowlistMatchCondition
|
||||
switch strings.ToUpper(a.Condition) {
|
||||
case "AND", "&&":
|
||||
matchCondition = AllowlistMatchAnd
|
||||
case "", "OR", "||":
|
||||
matchCondition = AllowlistMatchOr
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown allowlist |condition| '%s' (expected 'and', 'or')", a.Condition)
|
||||
}
|
||||
|
||||
// Validate the target.
|
||||
regexTarget := a.RegexTarget
|
||||
if regexTarget != "" {
|
||||
switch regexTarget {
|
||||
case "secret":
|
||||
regexTarget = ""
|
||||
case "match", "line":
|
||||
// do nothing
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown allowlist |regexTarget| '%s' (expected 'match', 'line')", regexTarget)
|
||||
}
|
||||
}
|
||||
var allowlistRegexes []*regexp.Regexp
|
||||
for _, a := range a.Regexes {
|
||||
allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(a))
|
||||
}
|
||||
var allowlistPaths []*regexp.Regexp
|
||||
for _, a := range a.Paths {
|
||||
allowlistPaths = append(allowlistPaths, regexp.MustCompile(a))
|
||||
}
|
||||
|
||||
allowlist := &Allowlist{
|
||||
Description: a.Description,
|
||||
MatchCondition: matchCondition,
|
||||
Commits: a.Commits,
|
||||
Paths: allowlistPaths,
|
||||
RegexTarget: regexTarget,
|
||||
Regexes: allowlistRegexes,
|
||||
StopWords: a.StopWords,
|
||||
}
|
||||
if err := allowlist.Validate(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return allowlist, nil
|
||||
}
|
||||
|
||||
func (c *Config) GetOrderedRules() []Rule {
|
||||
var orderedRules []Rule
|
||||
for _, id := range c.OrderedRules {
|
||||
if _, ok := c.Rules[id]; ok {
|
||||
orderedRules = append(orderedRules, c.Rules[id])
|
||||
}
|
||||
}
|
||||
return orderedRules
|
||||
}
|
||||
|
||||
func (c *Config) extendDefault() error {
|
||||
extendDepth++
|
||||
viper.SetConfigType("toml")
|
||||
if err := viper.ReadConfig(strings.NewReader(DefaultConfig)); err != nil {
|
||||
return fmt.Errorf("failed to load extended default config, err: %w", err)
|
||||
}
|
||||
defaultViperConfig := ViperConfig{}
|
||||
if err := viper.Unmarshal(&defaultViperConfig); err != nil {
|
||||
return fmt.Errorf("failed to load extended default config, err: %w", err)
|
||||
}
|
||||
cfg, err := defaultViperConfig.Translate()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to load extended default config, err: %w", err)
|
||||
|
||||
}
|
||||
logging.Debug().Msg("extending config with default config")
|
||||
c.extend(cfg)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Config) extendPath() error {
|
||||
extendDepth++
|
||||
viper.SetConfigFile(c.Extend.Path)
|
||||
if err := viper.ReadInConfig(); err != nil {
|
||||
return fmt.Errorf("failed to load extended config, err: %w", err)
|
||||
}
|
||||
extensionViperConfig := ViperConfig{}
|
||||
if err := viper.Unmarshal(&extensionViperConfig); err != nil {
|
||||
return fmt.Errorf("failed to load extended config, err: %w", err)
|
||||
}
|
||||
cfg, err := extensionViperConfig.Translate()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to load extended config, err: %w", err)
|
||||
}
|
||||
logging.Debug().Msgf("extending config with %s", c.Extend.Path)
|
||||
c.extend(cfg)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Config) extendURL() {
|
||||
// TODO
|
||||
}
|
||||
|
||||
func (c *Config) extend(extensionConfig Config) {
|
||||
// Get config name for helpful log messages.
|
||||
var configName string
|
||||
if c.Extend.Path != "" {
|
||||
configName = c.Extend.Path
|
||||
} else {
|
||||
configName = "default"
|
||||
}
|
||||
// Convert |Config.DisabledRules| into a map for ease of access.
|
||||
disabledRuleIDs := map[string]struct{}{}
|
||||
for _, id := range c.Extend.DisabledRules {
|
||||
if _, ok := extensionConfig.Rules[id]; !ok {
|
||||
logging.Warn().
|
||||
Str("rule-id", id).
|
||||
Str("config", configName).
|
||||
Msg("Disabled rule doesn't exist in extended config.")
|
||||
}
|
||||
disabledRuleIDs[id] = struct{}{}
|
||||
}
|
||||
|
||||
for ruleID, baseRule := range extensionConfig.Rules {
|
||||
// Skip the rule.
|
||||
if _, ok := disabledRuleIDs[ruleID]; ok {
|
||||
logging.Debug().
|
||||
Str("rule-id", ruleID).
|
||||
Str("config", configName).
|
||||
Msg("Ignoring rule from extended config.")
|
||||
continue
|
||||
}
|
||||
|
||||
currentRule, ok := c.Rules[ruleID]
|
||||
if !ok {
|
||||
// Rule doesn't exist, add it to the config.
|
||||
c.Rules[ruleID] = baseRule
|
||||
for _, k := range baseRule.Keywords {
|
||||
c.Keywords[k] = struct{}{}
|
||||
}
|
||||
c.OrderedRules = append(c.OrderedRules, ruleID)
|
||||
} else {
|
||||
// Rule exists, merge our changes into the base.
|
||||
if currentRule.Description != "" {
|
||||
baseRule.Description = currentRule.Description
|
||||
}
|
||||
if currentRule.Entropy != 0 {
|
||||
baseRule.Entropy = currentRule.Entropy
|
||||
}
|
||||
if currentRule.SecretGroup != 0 {
|
||||
baseRule.SecretGroup = currentRule.SecretGroup
|
||||
}
|
||||
if currentRule.Regex != nil {
|
||||
baseRule.Regex = currentRule.Regex
|
||||
}
|
||||
if currentRule.Path != nil {
|
||||
baseRule.Path = currentRule.Path
|
||||
}
|
||||
baseRule.Tags = append(baseRule.Tags, currentRule.Tags...)
|
||||
baseRule.Keywords = append(baseRule.Keywords, currentRule.Keywords...)
|
||||
for _, a := range currentRule.Allowlists {
|
||||
baseRule.Allowlists = append(baseRule.Allowlists, a)
|
||||
}
|
||||
// The keywords from the base rule and the extended rule must be merged into the global keywords list
|
||||
for _, k := range baseRule.Keywords {
|
||||
c.Keywords[k] = struct{}{}
|
||||
}
|
||||
c.Rules[ruleID] = baseRule
|
||||
}
|
||||
}
|
||||
|
||||
// append allowlists, not attempting to merge
|
||||
for _, a := range extensionConfig.Allowlists {
|
||||
c.Allowlists = append(c.Allowlists, a)
|
||||
}
|
||||
|
||||
// sort to keep extended rules in order
|
||||
sort.Strings(c.OrderedRules)
|
||||
}
|
3130
cli/detect/config/gitleaks.toml
Normal file
114
cli/detect/config/rule.go
Normal file
@ -0,0 +1,114 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/Infisical/infisical-merge/detect/regexp"
|
||||
)
|
||||
|
||||
// Rules contain information that define details on how to detect secrets
|
||||
type Rule struct {
|
||||
// RuleID is a unique identifier for this rule
|
||||
RuleID string
|
||||
|
||||
// Description is the description of the rule.
|
||||
Description string
|
||||
|
||||
// Entropy is a float representing the minimum shannon
|
||||
// entropy a regex group must have to be considered a secret.
|
||||
Entropy float64
|
||||
|
||||
// SecretGroup is an int used to extract secret from regex
|
||||
// match and used as the group that will have its entropy
|
||||
// checked if `entropy` is set.
|
||||
SecretGroup int
|
||||
|
||||
// Regex is a golang regular expression used to detect secrets.
|
||||
Regex *regexp.Regexp
|
||||
|
||||
// Path is a golang regular expression used to
|
||||
// filter secrets by path
|
||||
Path *regexp.Regexp
|
||||
|
||||
// Tags is an array of strings used for metadata
|
||||
// and reporting purposes.
|
||||
Tags []string
|
||||
|
||||
// Keywords are used for pre-regex check filtering. Rules that contain
|
||||
// keywords will perform a quick string compare check to make sure the
|
||||
// keyword(s) are in the content being scanned.
|
||||
Keywords []string
|
||||
|
||||
// Allowlists allows a rule to be ignored for specific commits, paths, regexes, and/or stopwords.
|
||||
Allowlists []*Allowlist
|
||||
|
||||
// validated is an internal flag to track whether `Validate()` has been called.
|
||||
validated bool
|
||||
}
|
||||
|
||||
// Validate guards against common misconfigurations.
|
||||
func (r *Rule) Validate() error {
|
||||
if r.validated {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Ensure |id| is present.
|
||||
if strings.TrimSpace(r.RuleID) == "" {
|
||||
// Try to provide helpful context, since |id| is empty.
|
||||
var context string
|
||||
if r.Regex != nil {
|
||||
context = ", regex: " + r.Regex.String()
|
||||
} else if r.Path != nil {
|
||||
context = ", path: " + r.Path.String()
|
||||
} else if r.Description != "" {
|
||||
context = ", description: " + r.Description
|
||||
}
|
||||
return fmt.Errorf("rule |id| is missing or empty" + context)
|
||||
}
|
||||
|
||||
// Ensure the rule actually matches something.
|
||||
if r.Regex == nil && r.Path == nil {
|
||||
return fmt.Errorf("%s: both |regex| and |path| are empty, this rule will have no effect", r.RuleID)
|
||||
}
|
||||
|
||||
// Ensure |secretGroup| works.
|
||||
if r.Regex != nil && r.SecretGroup > r.Regex.NumSubexp() {
|
||||
return fmt.Errorf("%s: invalid regex secret group %d, max regex secret group %d", r.RuleID, r.SecretGroup, r.Regex.NumSubexp())
|
||||
}
|
||||
|
||||
for _, allowlist := range r.Allowlists {
|
||||
// This will probably never happen.
|
||||
if allowlist == nil {
|
||||
continue
|
||||
}
|
||||
if err := allowlist.Validate(); err != nil {
|
||||
return fmt.Errorf("%s: %w", r.RuleID, err)
|
||||
}
|
||||
}
|
||||
|
||||
r.validated = true
|
||||
return nil
|
||||
}
|
@ -20,35 +20,27 @@
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package report
|
||||
package config
|
||||
|
||||
import (
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/Infisical/infisical-merge/config"
|
||||
"github.com/Infisical/infisical-merge/detect/regexp"
|
||||
)
|
||||
|
||||
const (
|
||||
// https://cwe.mitre.org/data/definitions/798.html
|
||||
CWE = "CWE-798"
|
||||
CWE_DESCRIPTION = "Use of Hard-coded Credentials"
|
||||
)
|
||||
|
||||
func Write(findings []Finding, cfg config.Config, ext string, reportPath string) error {
|
||||
file, err := os.Create(reportPath)
|
||||
if err != nil {
|
||||
return err
|
||||
func anyRegexMatch(f string, res []*regexp.Regexp) bool {
|
||||
for _, re := range res {
|
||||
if regexMatched(f, re) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
ext = strings.ToLower(ext)
|
||||
switch ext {
|
||||
case ".json", "json":
|
||||
err = writeJson(findings, file)
|
||||
case ".csv", "csv":
|
||||
err = writeCsv(findings, file)
|
||||
case ".sarif", "sarif":
|
||||
err = writeSarif(cfg, findings, file)
|
||||
}
|
||||
|
||||
return err
|
||||
return false
|
||||
}
|
||||
|
||||
func regexMatched(f string, re *regexp.Regexp) bool {
|
||||
if re == nil {
|
||||
return false
|
||||
}
|
||||
if re.FindString(f) != "" {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
328
cli/detect/decoder.go
Normal file
@ -0,0 +1,328 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package detect
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"unicode"
|
||||
|
||||
"github.com/Infisical/infisical-merge/detect/logging"
|
||||
)
|
||||
|
||||
var b64LikelyChars [128]byte
|
||||
var b64Regexp = regexp.MustCompile(`[\w/+-]{16,}={0,3}`)
|
||||
var decoders = []func(string) ([]byte, error){
|
||||
base64.StdEncoding.DecodeString,
|
||||
base64.RawURLEncoding.DecodeString,
|
||||
}
|
||||
|
||||
func init() {
|
||||
// Basically look for anything that isn't just letters
|
||||
for _, c := range `0123456789+/-_` {
|
||||
b64LikelyChars[c] = 1
|
||||
}
|
||||
}
|
||||
|
||||
// EncodedSegment represents a portion of text that is encoded in some way.
|
||||
// `decode` supports recusive decoding and can result in "segment trees".
|
||||
// There can be multiple segments in the original text, so each can be thought
|
||||
// of as its own tree with the root being the original segment.
|
||||
type EncodedSegment struct {
|
||||
// The parent segment in a segment tree. If nil, it is a root segment
|
||||
parent *EncodedSegment
|
||||
|
||||
// Relative start/end are the bounds of the encoded value in the current pass.
|
||||
relativeStart int
|
||||
relativeEnd int
|
||||
|
||||
// Absolute start/end refer to the bounds of the root segment in this segment
|
||||
// tree
|
||||
absoluteStart int
|
||||
absoluteEnd int
|
||||
|
||||
// Decoded start/end refer to the bounds of the decoded value in the current
|
||||
// pass. These can differ from relative values because decoding can shrink
|
||||
// or grow the size of the segment.
|
||||
decodedStart int
|
||||
decodedEnd int
|
||||
|
||||
// This is the actual decoded content in the segment
|
||||
decodedValue string
|
||||
|
||||
// This is the type of encoding
|
||||
encoding string
|
||||
}
|
||||
|
||||
// isChildOf inspects the bounds of two segments to determine
|
||||
// if one should be the child of another
|
||||
func (s EncodedSegment) isChildOf(parent EncodedSegment) bool {
|
||||
return parent.decodedStart <= s.relativeStart && parent.decodedEnd >= s.relativeEnd
|
||||
}
|
||||
|
||||
// decodedOverlaps checks if the decoded bounds of the segment overlaps a range
|
||||
func (s EncodedSegment) decodedOverlaps(start, end int) bool {
|
||||
return start <= s.decodedEnd && end >= s.decodedStart
|
||||
}
|
||||
|
||||
// adjustMatchIndex takes the matchIndex from the current decoding pass and
|
||||
// updates it to match the absolute matchIndex in the original text.
|
||||
func (s EncodedSegment) adjustMatchIndex(matchIndex []int) []int {
|
||||
// The match is within the bounds of the segment so we just return
|
||||
// the absolute start and end of the root segment.
|
||||
if s.decodedStart <= matchIndex[0] && matchIndex[1] <= s.decodedEnd {
|
||||
return []int{
|
||||
s.absoluteStart,
|
||||
s.absoluteEnd,
|
||||
}
|
||||
}
|
||||
|
||||
// Since it overlaps one side and/or the other, we're going to have to adjust
|
||||
// and climb parents until we're either at the root or we've determined
|
||||
// we're fully inside one of the parent segments.
|
||||
adjustedMatchIndex := make([]int, 2)
|
||||
|
||||
if matchIndex[0] < s.decodedStart {
|
||||
// It starts before the encoded segment so adjust the start to match
|
||||
// the location before it was decoded
|
||||
matchStartDelta := s.decodedStart - matchIndex[0]
|
||||
adjustedMatchIndex[0] = s.relativeStart - matchStartDelta
|
||||
} else {
|
||||
// It starts within the encoded segment so set the bound to the
|
||||
// relative start
|
||||
adjustedMatchIndex[0] = s.relativeStart
|
||||
}
|
||||
|
||||
if matchIndex[1] > s.decodedEnd {
|
||||
// It ends after the encoded segment so adjust the end to match
|
||||
// the location before it was decoded
|
||||
matchEndDelta := matchIndex[1] - s.decodedEnd
|
||||
adjustedMatchIndex[1] = s.relativeEnd + matchEndDelta
|
||||
} else {
|
||||
// It ends within the encoded segment so set the bound to the relative end
|
||||
adjustedMatchIndex[1] = s.relativeEnd
|
||||
}
|
||||
|
||||
// We're still not at a root segment so we'll need to keep on adjusting
|
||||
if s.parent != nil {
|
||||
return s.parent.adjustMatchIndex(adjustedMatchIndex)
|
||||
}
|
||||
|
||||
return adjustedMatchIndex
|
||||
}
|
||||
|
||||
// depth reports how many levels of decoding needed to be done (default is 1)
|
||||
func (s EncodedSegment) depth() int {
|
||||
depth := 1
|
||||
|
||||
// Climb the tree and increment the depth
|
||||
for current := &s; current.parent != nil; current = current.parent {
|
||||
depth++
|
||||
}
|
||||
|
||||
return depth
|
||||
}
|
||||
|
||||
// tags returns additional meta data tags related to the types of segments
|
||||
func (s EncodedSegment) tags() []string {
|
||||
return []string{
|
||||
fmt.Sprintf("decoded:%s", s.encoding),
|
||||
fmt.Sprintf("decode-depth:%d", s.depth()),
|
||||
}
|
||||
}
|
||||
|
||||
// Decoder decodes various types of data in place
|
||||
type Decoder struct {
|
||||
decodedMap map[string]string
|
||||
}
|
||||
|
||||
// NewDecoder creates a default decoder struct
|
||||
func NewDecoder() *Decoder {
|
||||
return &Decoder{
|
||||
decodedMap: make(map[string]string),
|
||||
}
|
||||
}
|
||||
|
||||
// decode returns the data with the values decoded in-place
|
||||
func (d *Decoder) decode(data string, parentSegments []EncodedSegment) (string, []EncodedSegment) {
|
||||
segments := d.findEncodedSegments(data, parentSegments)
|
||||
|
||||
if len(segments) > 0 {
|
||||
result := bytes.NewBuffer(make([]byte, 0, len(data)))
|
||||
|
||||
relativeStart := 0
|
||||
for _, segment := range segments {
|
||||
result.WriteString(data[relativeStart:segment.relativeStart])
|
||||
result.WriteString(segment.decodedValue)
|
||||
relativeStart = segment.relativeEnd
|
||||
}
|
||||
result.WriteString(data[relativeStart:])
|
||||
|
||||
return result.String(), segments
|
||||
}
|
||||
|
||||
return data, segments
|
||||
}
|
||||
|
||||
// findEncodedSegments finds the encoded segments in the data and updates the
|
||||
// segment tree for this pass
|
||||
func (d *Decoder) findEncodedSegments(data string, parentSegments []EncodedSegment) []EncodedSegment {
|
||||
if len(data) == 0 {
|
||||
return []EncodedSegment{}
|
||||
}
|
||||
|
||||
matchIndices := b64Regexp.FindAllStringIndex(data, -1)
|
||||
if matchIndices == nil {
|
||||
return []EncodedSegment{}
|
||||
}
|
||||
|
||||
segments := make([]EncodedSegment, 0, len(matchIndices))
|
||||
|
||||
// Keeps up with offsets from the text changing size as things are decoded
|
||||
decodedShift := 0
|
||||
|
||||
for _, matchIndex := range matchIndices {
|
||||
encodedValue := data[matchIndex[0]:matchIndex[1]]
|
||||
|
||||
if !isLikelyB64(encodedValue) {
|
||||
d.decodedMap[encodedValue] = ""
|
||||
continue
|
||||
}
|
||||
|
||||
decodedValue, alreadyDecoded := d.decodedMap[encodedValue]
|
||||
|
||||
// We haven't decoded this yet, so go ahead and decode it
|
||||
if !alreadyDecoded {
|
||||
decodedValue = decodeValue(encodedValue)
|
||||
d.decodedMap[encodedValue] = decodedValue
|
||||
}
|
||||
|
||||
// Skip this segment because there was nothing to check
|
||||
if len(decodedValue) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
// Create a segment for the encoded data
|
||||
segment := EncodedSegment{
|
||||
relativeStart: matchIndex[0],
|
||||
relativeEnd: matchIndex[1],
|
||||
absoluteStart: matchIndex[0],
|
||||
absoluteEnd: matchIndex[1],
|
||||
decodedStart: matchIndex[0] + decodedShift,
|
||||
decodedEnd: matchIndex[0] + decodedShift + len(decodedValue),
|
||||
decodedValue: decodedValue,
|
||||
encoding: "base64",
|
||||
}
|
||||
|
||||
// Shift decoded start and ends based on size changes
|
||||
decodedShift += len(decodedValue) - len(encodedValue)
|
||||
|
||||
// Adjust the absolute position of segments contained in parent segments
|
||||
for _, parentSegment := range parentSegments {
|
||||
if segment.isChildOf(parentSegment) {
|
||||
segment.absoluteStart = parentSegment.absoluteStart
|
||||
segment.absoluteEnd = parentSegment.absoluteEnd
|
||||
segment.parent = &parentSegment
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
logging.Debug().Msgf("segment found: %#v", segment)
|
||||
segments = append(segments, segment)
|
||||
}
|
||||
|
||||
return segments
|
||||
}
|
||||
|
||||
// decoders tries a list of decoders and returns the first successful one
|
||||
func decodeValue(encodedValue string) string {
|
||||
for _, decoder := range decoders {
|
||||
decodedValue, err := decoder(encodedValue)
|
||||
|
||||
if err == nil && len(decodedValue) > 0 && isASCII(decodedValue) {
|
||||
return string(decodedValue)
|
||||
}
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func isASCII(b []byte) bool {
|
||||
for i := 0; i < len(b); i++ {
|
||||
if b[i] > unicode.MaxASCII || b[i] < '\t' {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// Skip a lot of method signatures and things at the risk of missing about
|
||||
// 1% of base64
|
||||
func isLikelyB64(s string) bool {
|
||||
for _, c := range s {
|
||||
if b64LikelyChars[c] != 0 {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// Find a segment where the decoded bounds overlaps a range
|
||||
func segmentWithDecodedOverlap(encodedSegments []EncodedSegment, start, end int) *EncodedSegment {
|
||||
for _, segment := range encodedSegments {
|
||||
if segment.decodedOverlaps(start, end) {
|
||||
return &segment
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s EncodedSegment) currentLine(currentRaw string) string {
|
||||
start := 0
|
||||
end := len(currentRaw)
|
||||
|
||||
// Find the start of the range
|
||||
for i := s.decodedStart; i > -1; i-- {
|
||||
c := currentRaw[i]
|
||||
if c == '\n' {
|
||||
start = i
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Find the end of the range
|
||||
for i := s.decodedEnd; i < end; i++ {
|
||||
c := currentRaw[i]
|
||||
if c == '\n' {
|
||||
end = i
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return currentRaw[start:end]
|
||||
}
|
@ -1,754 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package detect
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/viper"
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/Infisical/infisical-merge/config"
|
||||
"github.com/Infisical/infisical-merge/report"
|
||||
)
|
||||
|
||||
const configPath = "../testdata/config/"
|
||||
const repoBasePath = "../testdata/repos/"
|
||||
|
||||
func TestDetect(t *testing.T) {
|
||||
tests := []struct {
|
||||
cfgName string
|
||||
baselinePath string
|
||||
fragment Fragment
|
||||
// NOTE: for expected findings, all line numbers will be 0
|
||||
// because line deltas are added _after_ the finding is created.
|
||||
// I.e, if the finding is from a --no-git file, the line number will be
|
||||
// increase by 1 in DetectFromFiles(). If the finding is from git,
|
||||
// the line number will be increased by the patch delta.
|
||||
expectedFindings []report.Finding
|
||||
wantError error
|
||||
}{
|
||||
{
|
||||
cfgName: "simple",
|
||||
fragment: Fragment{
|
||||
Raw: `awsToken := \"AKIALALEMEL33243OKIA\ // infisical-scan:ignore"`,
|
||||
FilePath: "tmp.go",
|
||||
},
|
||||
expectedFindings: []report.Finding{},
|
||||
},
|
||||
{
|
||||
cfgName: "simple",
|
||||
fragment: Fragment{
|
||||
Raw: `awsToken := \
|
||||
|
||||
\"AKIALALEMEL33243OKIA\ // infisical-scan:ignore"
|
||||
|
||||
`,
|
||||
FilePath: "tmp.go",
|
||||
},
|
||||
expectedFindings: []report.Finding{},
|
||||
},
|
||||
{
|
||||
cfgName: "simple",
|
||||
fragment: Fragment{
|
||||
Raw: `awsToken := \"AKIALALEMEL33243OKIA\"
|
||||
|
||||
// infisical-scan:ignore"
|
||||
|
||||
`,
|
||||
FilePath: "tmp.go",
|
||||
},
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "AWS Access Key",
|
||||
Secret: "AKIALALEMEL33243OKIA",
|
||||
Match: "AKIALALEMEL33243OKIA",
|
||||
File: "tmp.go",
|
||||
Line: `awsToken := \"AKIALALEMEL33243OKIA\"`,
|
||||
RuleID: "aws-access-key",
|
||||
Tags: []string{"key", "AWS"},
|
||||
StartLine: 0,
|
||||
EndLine: 0,
|
||||
StartColumn: 15,
|
||||
EndColumn: 34,
|
||||
Entropy: 3.1464393,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "escaped_character_group",
|
||||
fragment: Fragment{
|
||||
Raw: `pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB`,
|
||||
FilePath: "tmp.go",
|
||||
},
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "PyPI upload token",
|
||||
Secret: "pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB",
|
||||
Match: "pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB",
|
||||
Line: `pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB`,
|
||||
File: "tmp.go",
|
||||
RuleID: "pypi-upload-token",
|
||||
Tags: []string{"key", "pypi"},
|
||||
StartLine: 0,
|
||||
EndLine: 0,
|
||||
StartColumn: 1,
|
||||
EndColumn: 86,
|
||||
Entropy: 1.9606875,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "simple",
|
||||
fragment: Fragment{
|
||||
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
|
||||
FilePath: "tmp.go",
|
||||
},
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "AWS Access Key",
|
||||
Secret: "AKIALALEMEL33243OLIA",
|
||||
Match: "AKIALALEMEL33243OLIA",
|
||||
Line: `awsToken := \"AKIALALEMEL33243OLIA\"`,
|
||||
File: "tmp.go",
|
||||
RuleID: "aws-access-key",
|
||||
Tags: []string{"key", "AWS"},
|
||||
StartLine: 0,
|
||||
EndLine: 0,
|
||||
StartColumn: 15,
|
||||
EndColumn: 34,
|
||||
Entropy: 3.0841837,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "simple",
|
||||
fragment: Fragment{
|
||||
Raw: `export BUNDLE_ENTERPRISE__CONTRIBSYS__COM=cafebabe:deadbeef;`,
|
||||
FilePath: "tmp.sh",
|
||||
},
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "Sidekiq Secret",
|
||||
Match: "BUNDLE_ENTERPRISE__CONTRIBSYS__COM=cafebabe:deadbeef;",
|
||||
Secret: "cafebabe:deadbeef",
|
||||
Line: `export BUNDLE_ENTERPRISE__CONTRIBSYS__COM=cafebabe:deadbeef;`,
|
||||
File: "tmp.sh",
|
||||
RuleID: "sidekiq-secret",
|
||||
Tags: []string{},
|
||||
Entropy: 2.6098502,
|
||||
StartLine: 0,
|
||||
EndLine: 0,
|
||||
StartColumn: 8,
|
||||
EndColumn: 60,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "simple",
|
||||
fragment: Fragment{
|
||||
Raw: `echo hello1; export BUNDLE_ENTERPRISE__CONTRIBSYS__COM="cafebabe:deadbeef" && echo hello2`,
|
||||
FilePath: "tmp.sh",
|
||||
},
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "Sidekiq Secret",
|
||||
Match: "BUNDLE_ENTERPRISE__CONTRIBSYS__COM=\"cafebabe:deadbeef\"",
|
||||
Secret: "cafebabe:deadbeef",
|
||||
File: "tmp.sh",
|
||||
Line: `echo hello1; export BUNDLE_ENTERPRISE__CONTRIBSYS__COM="cafebabe:deadbeef" && echo hello2`,
|
||||
RuleID: "sidekiq-secret",
|
||||
Tags: []string{},
|
||||
Entropy: 2.6098502,
|
||||
StartLine: 0,
|
||||
EndLine: 0,
|
||||
StartColumn: 21,
|
||||
EndColumn: 74,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "simple",
|
||||
fragment: Fragment{
|
||||
Raw: `url = "http://cafeb4b3:d3adb33f@enterprise.contribsys.com:80/path?param1=true¶m2=false#heading1"`,
|
||||
FilePath: "tmp.sh",
|
||||
},
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "Sidekiq Sensitive URL",
|
||||
Match: "http://cafeb4b3:d3adb33f@enterprise.contribsys.com:",
|
||||
Secret: "cafeb4b3:d3adb33f",
|
||||
File: "tmp.sh",
|
||||
Line: `url = "http://cafeb4b3:d3adb33f@enterprise.contribsys.com:80/path?param1=true¶m2=false#heading1"`,
|
||||
RuleID: "sidekiq-sensitive-url",
|
||||
Tags: []string{},
|
||||
Entropy: 2.984234,
|
||||
StartLine: 0,
|
||||
EndLine: 0,
|
||||
StartColumn: 8,
|
||||
EndColumn: 58,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "allow_aws_re",
|
||||
fragment: Fragment{
|
||||
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
|
||||
FilePath: "tmp.go",
|
||||
},
|
||||
expectedFindings: []report.Finding{},
|
||||
},
|
||||
{
|
||||
cfgName: "allow_path",
|
||||
fragment: Fragment{
|
||||
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
|
||||
FilePath: "tmp.go",
|
||||
},
|
||||
expectedFindings: []report.Finding{},
|
||||
},
|
||||
{
|
||||
cfgName: "allow_commit",
|
||||
fragment: Fragment{
|
||||
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
|
||||
FilePath: "tmp.go",
|
||||
CommitSHA: "allowthiscommit",
|
||||
},
|
||||
expectedFindings: []report.Finding{},
|
||||
},
|
||||
{
|
||||
cfgName: "entropy_group",
|
||||
fragment: Fragment{
|
||||
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
|
||||
FilePath: "tmp.go",
|
||||
},
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "Discord API key",
|
||||
Match: "Discord_Public_Key = \"e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5\"",
|
||||
Secret: "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5",
|
||||
Line: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
|
||||
File: "tmp.go",
|
||||
RuleID: "discord-api-key",
|
||||
Tags: []string{},
|
||||
Entropy: 3.7906237,
|
||||
StartLine: 0,
|
||||
EndLine: 0,
|
||||
StartColumn: 7,
|
||||
EndColumn: 93,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "generic_with_py_path",
|
||||
fragment: Fragment{
|
||||
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
|
||||
FilePath: "tmp.go",
|
||||
},
|
||||
expectedFindings: []report.Finding{},
|
||||
},
|
||||
{
|
||||
cfgName: "generic_with_py_path",
|
||||
fragment: Fragment{
|
||||
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
|
||||
FilePath: "tmp.py",
|
||||
},
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "Generic API Key",
|
||||
Match: "Key = \"e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5\"",
|
||||
Secret: "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5",
|
||||
Line: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
|
||||
File: "tmp.py",
|
||||
RuleID: "generic-api-key",
|
||||
Tags: []string{},
|
||||
Entropy: 3.7906237,
|
||||
StartLine: 0,
|
||||
EndLine: 0,
|
||||
StartColumn: 22,
|
||||
EndColumn: 93,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "path_only",
|
||||
fragment: Fragment{
|
||||
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
|
||||
FilePath: "tmp.py",
|
||||
},
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "Python Files",
|
||||
Match: "file detected: tmp.py",
|
||||
File: "tmp.py",
|
||||
RuleID: "python-files-only",
|
||||
Tags: []string{},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "bad_entropy_group",
|
||||
fragment: Fragment{
|
||||
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
|
||||
FilePath: "tmp.go",
|
||||
},
|
||||
expectedFindings: []report.Finding{},
|
||||
wantError: fmt.Errorf("Discord API key invalid regex secret group 5, max regex secret group 3"),
|
||||
},
|
||||
{
|
||||
cfgName: "simple",
|
||||
fragment: Fragment{
|
||||
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
|
||||
FilePath: filepath.Join(configPath, "simple.toml"),
|
||||
},
|
||||
expectedFindings: []report.Finding{},
|
||||
},
|
||||
{
|
||||
cfgName: "allow_global_aws_re",
|
||||
fragment: Fragment{
|
||||
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
|
||||
FilePath: "tmp.go",
|
||||
},
|
||||
expectedFindings: []report.Finding{},
|
||||
},
|
||||
{
|
||||
cfgName: "generic_with_py_path",
|
||||
fragment: Fragment{
|
||||
Raw: `const Discord_Public_Key = "load2523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
|
||||
FilePath: "tmp.py",
|
||||
},
|
||||
expectedFindings: []report.Finding{},
|
||||
},
|
||||
{
|
||||
cfgName: "path_only",
|
||||
baselinePath: ".baseline.json",
|
||||
fragment: Fragment{
|
||||
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
|
||||
FilePath: ".baseline.json",
|
||||
},
|
||||
expectedFindings: []report.Finding{},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
viper.Reset()
|
||||
viper.AddConfigPath(configPath)
|
||||
viper.SetConfigName(tt.cfgName)
|
||||
viper.SetConfigType("toml")
|
||||
err := viper.ReadInConfig()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
var vc config.ViperConfig
|
||||
err = viper.Unmarshal(&vc)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
cfg, err := vc.Translate()
|
||||
cfg.Path = filepath.Join(configPath, tt.cfgName+".toml")
|
||||
if tt.wantError != nil {
|
||||
if err == nil {
|
||||
t.Errorf("expected error")
|
||||
}
|
||||
assert.Equal(t, tt.wantError, err)
|
||||
}
|
||||
d := NewDetector(cfg)
|
||||
d.baselinePath = tt.baselinePath
|
||||
|
||||
findings := d.Detect(tt.fragment)
|
||||
assert.ElementsMatch(t, tt.expectedFindings, findings)
|
||||
}
|
||||
}
|
||||
|
||||
// TestFromGit tests the FromGit function
|
||||
func TestFromGit(t *testing.T) {
|
||||
tests := []struct {
|
||||
cfgName string
|
||||
source string
|
||||
logOpts string
|
||||
expectedFindings []report.Finding
|
||||
}{
|
||||
{
|
||||
source: filepath.Join(repoBasePath, "small"),
|
||||
cfgName: "simple",
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "AWS Access Key",
|
||||
StartLine: 20,
|
||||
EndLine: 20,
|
||||
StartColumn: 19,
|
||||
EndColumn: 38,
|
||||
Line: "\n awsToken := \"AKIALALEMEL33243OLIA\"",
|
||||
Secret: "AKIALALEMEL33243OLIA",
|
||||
Match: "AKIALALEMEL33243OLIA",
|
||||
File: "main.go",
|
||||
Date: "2021-11-02T23:37:53Z",
|
||||
Commit: "1b6da43b82b22e4eaa10bcf8ee591e91abbfc587",
|
||||
Author: "Zachary Rice",
|
||||
Email: "zricer@protonmail.com",
|
||||
Message: "Accidentally add a secret",
|
||||
RuleID: "aws-access-key",
|
||||
Tags: []string{"key", "AWS"},
|
||||
Entropy: 3.0841837,
|
||||
Fingerprint: "1b6da43b82b22e4eaa10bcf8ee591e91abbfc587:main.go:aws-access-key:20",
|
||||
},
|
||||
{
|
||||
Description: "AWS Access Key",
|
||||
StartLine: 9,
|
||||
EndLine: 9,
|
||||
StartColumn: 17,
|
||||
EndColumn: 36,
|
||||
Secret: "AKIALALEMEL33243OLIA",
|
||||
Match: "AKIALALEMEL33243OLIA",
|
||||
Line: "\n\taws_token := \"AKIALALEMEL33243OLIA\"",
|
||||
File: "foo/foo.go",
|
||||
Date: "2021-11-02T23:48:06Z",
|
||||
Commit: "491504d5a31946ce75e22554cc34203d8e5ff3ca",
|
||||
Author: "Zach Rice",
|
||||
Email: "zricer@protonmail.com",
|
||||
Message: "adding foo package with secret",
|
||||
RuleID: "aws-access-key",
|
||||
Tags: []string{"key", "AWS"},
|
||||
Entropy: 3.0841837,
|
||||
Fingerprint: "491504d5a31946ce75e22554cc34203d8e5ff3ca:foo/foo.go:aws-access-key:9",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
source: filepath.Join(repoBasePath, "small"),
|
||||
logOpts: "--all foo...",
|
||||
cfgName: "simple",
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "AWS Access Key",
|
||||
StartLine: 9,
|
||||
EndLine: 9,
|
||||
StartColumn: 17,
|
||||
EndColumn: 36,
|
||||
Secret: "AKIALALEMEL33243OLIA",
|
||||
Line: "\n\taws_token := \"AKIALALEMEL33243OLIA\"",
|
||||
Match: "AKIALALEMEL33243OLIA",
|
||||
Date: "2021-11-02T23:48:06Z",
|
||||
File: "foo/foo.go",
|
||||
Commit: "491504d5a31946ce75e22554cc34203d8e5ff3ca",
|
||||
Author: "Zach Rice",
|
||||
Email: "zricer@protonmail.com",
|
||||
Message: "adding foo package with secret",
|
||||
RuleID: "aws-access-key",
|
||||
Tags: []string{"key", "AWS"},
|
||||
Entropy: 3.0841837,
|
||||
Fingerprint: "491504d5a31946ce75e22554cc34203d8e5ff3ca:foo/foo.go:aws-access-key:9",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
err := moveDotGit("dotGit", ".git")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer func() {
|
||||
if err := moveDotGit(".git", "dotGit"); err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
}()
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
viper.AddConfigPath(configPath)
|
||||
viper.SetConfigName("simple")
|
||||
viper.SetConfigType("toml")
|
||||
err = viper.ReadInConfig()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
var vc config.ViperConfig
|
||||
err = viper.Unmarshal(&vc)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
cfg, err := vc.Translate()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
detector := NewDetector(cfg)
|
||||
findings, err := detector.DetectGit(tt.source, tt.logOpts, DetectType)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
for _, f := range findings {
|
||||
f.Match = "" // remove lines cause copying and pasting them has some wack formatting
|
||||
}
|
||||
assert.ElementsMatch(t, tt.expectedFindings, findings)
|
||||
}
|
||||
}
|
||||
func TestFromGitStaged(t *testing.T) {
|
||||
tests := []struct {
|
||||
cfgName string
|
||||
source string
|
||||
logOpts string
|
||||
expectedFindings []report.Finding
|
||||
}{
|
||||
{
|
||||
source: filepath.Join(repoBasePath, "staged"),
|
||||
cfgName: "simple",
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "AWS Access Key",
|
||||
StartLine: 7,
|
||||
EndLine: 7,
|
||||
StartColumn: 18,
|
||||
EndColumn: 37,
|
||||
Line: "\n\taws_token2 := \"AKIALALEMEL33243OLIA\" // this one is not",
|
||||
Match: "AKIALALEMEL33243OLIA",
|
||||
Secret: "AKIALALEMEL33243OLIA",
|
||||
File: "api/api.go",
|
||||
SymlinkFile: "",
|
||||
Commit: "",
|
||||
Entropy: 3.0841837,
|
||||
Author: "",
|
||||
Email: "",
|
||||
Date: "0001-01-01T00:00:00Z",
|
||||
Message: "",
|
||||
Tags: []string{
|
||||
"key",
|
||||
"AWS",
|
||||
},
|
||||
RuleID: "aws-access-key",
|
||||
Fingerprint: "api/api.go:aws-access-key:7",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
err := moveDotGit("dotGit", ".git")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer func() {
|
||||
if err := moveDotGit(".git", "dotGit"); err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
}()
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
viper.AddConfigPath(configPath)
|
||||
viper.SetConfigName("simple")
|
||||
viper.SetConfigType("toml")
|
||||
err = viper.ReadInConfig()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
var vc config.ViperConfig
|
||||
err = viper.Unmarshal(&vc)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
cfg, err := vc.Translate()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
detector := NewDetector(cfg)
|
||||
detector.AddGitleaksIgnore(filepath.Join(tt.source, ".gitleaksignore"))
|
||||
findings, err := detector.DetectGit(tt.source, tt.logOpts, ProtectStagedType)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
for _, f := range findings {
|
||||
f.Match = "" // remove lines cause copying and pasting them has some wack formatting
|
||||
}
|
||||
assert.ElementsMatch(t, tt.expectedFindings, findings)
|
||||
}
|
||||
}
|
||||
|
||||
// TestFromFiles tests the FromFiles function
|
||||
func TestFromFiles(t *testing.T) {
|
||||
tests := []struct {
|
||||
cfgName string
|
||||
source string
|
||||
expectedFindings []report.Finding
|
||||
}{
|
||||
{
|
||||
source: filepath.Join(repoBasePath, "nogit"),
|
||||
cfgName: "simple",
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "AWS Access Key",
|
||||
StartLine: 20,
|
||||
EndLine: 20,
|
||||
StartColumn: 16,
|
||||
EndColumn: 35,
|
||||
Match: "AKIALALEMEL33243OLIA",
|
||||
Secret: "AKIALALEMEL33243OLIA",
|
||||
Line: "\n\tawsToken := \"AKIALALEMEL33243OLIA\"",
|
||||
File: "../testdata/repos/nogit/main.go",
|
||||
SymlinkFile: "",
|
||||
RuleID: "aws-access-key",
|
||||
Tags: []string{"key", "AWS"},
|
||||
Entropy: 3.0841837,
|
||||
Fingerprint: "../testdata/repos/nogit/main.go:aws-access-key:20",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
source: filepath.Join(repoBasePath, "nogit", "main.go"),
|
||||
cfgName: "simple",
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "AWS Access Key",
|
||||
StartLine: 20,
|
||||
EndLine: 20,
|
||||
StartColumn: 16,
|
||||
EndColumn: 35,
|
||||
Match: "AKIALALEMEL33243OLIA",
|
||||
Secret: "AKIALALEMEL33243OLIA",
|
||||
Line: "\n\tawsToken := \"AKIALALEMEL33243OLIA\"",
|
||||
File: "../testdata/repos/nogit/main.go",
|
||||
RuleID: "aws-access-key",
|
||||
Tags: []string{"key", "AWS"},
|
||||
Entropy: 3.0841837,
|
||||
Fingerprint: "../testdata/repos/nogit/main.go:aws-access-key:20",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
viper.AddConfigPath(configPath)
|
||||
viper.SetConfigName("simple")
|
||||
viper.SetConfigType("toml")
|
||||
err := viper.ReadInConfig()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
var vc config.ViperConfig
|
||||
err = viper.Unmarshal(&vc)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
cfg, _ := vc.Translate()
|
||||
detector := NewDetector(cfg)
|
||||
detector.FollowSymlinks = true
|
||||
findings, err := detector.DetectFiles(tt.source)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
assert.ElementsMatch(t, tt.expectedFindings, findings)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDetectWithSymlinks(t *testing.T) {
|
||||
tests := []struct {
|
||||
cfgName string
|
||||
source string
|
||||
expectedFindings []report.Finding
|
||||
}{
|
||||
{
|
||||
source: filepath.Join(repoBasePath, "symlinks/file_symlink"),
|
||||
cfgName: "simple",
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "Asymmetric Private Key",
|
||||
StartLine: 1,
|
||||
EndLine: 1,
|
||||
StartColumn: 1,
|
||||
EndColumn: 35,
|
||||
Match: "-----BEGIN OPENSSH PRIVATE KEY-----",
|
||||
Secret: "-----BEGIN OPENSSH PRIVATE KEY-----",
|
||||
Line: "-----BEGIN OPENSSH PRIVATE KEY-----",
|
||||
File: "../testdata/repos/symlinks/source_file/id_ed25519",
|
||||
SymlinkFile: "../testdata/repos/symlinks/file_symlink/symlinked_id_ed25519",
|
||||
RuleID: "apkey",
|
||||
Tags: []string{"key", "AsymmetricPrivateKey"},
|
||||
Entropy: 3.587164,
|
||||
Fingerprint: "../testdata/repos/symlinks/source_file/id_ed25519:apkey:1",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
viper.AddConfigPath(configPath)
|
||||
viper.SetConfigName("simple")
|
||||
viper.SetConfigType("toml")
|
||||
err := viper.ReadInConfig()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
var vc config.ViperConfig
|
||||
err = viper.Unmarshal(&vc)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
cfg, _ := vc.Translate()
|
||||
detector := NewDetector(cfg)
|
||||
detector.FollowSymlinks = true
|
||||
findings, err := detector.DetectFiles(tt.source)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
assert.ElementsMatch(t, tt.expectedFindings, findings)
|
||||
}
|
||||
}
|
||||
|
||||
func moveDotGit(from, to string) error {
|
||||
repoDirs, err := os.ReadDir("../testdata/repos")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, dir := range repoDirs {
|
||||
if to == ".git" {
|
||||
_, err := os.Stat(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), "dotGit"))
|
||||
if os.IsNotExist(err) {
|
||||
// dont want to delete the only copy of .git accidentally
|
||||
continue
|
||||
}
|
||||
os.RemoveAll(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), ".git"))
|
||||
}
|
||||
if !dir.IsDir() {
|
||||
continue
|
||||
}
|
||||
_, err := os.Stat(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), from))
|
||||
if os.IsNotExist(err) {
|
||||
continue
|
||||
}
|
||||
|
||||
err = os.Rename(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), from),
|
||||
fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), to))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
225
cli/detect/directory.go
Normal file
@ -0,0 +1,225 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package detect
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/h2non/filetype"
|
||||
|
||||
"github.com/Infisical/infisical-merge/detect/logging"
|
||||
"github.com/Infisical/infisical-merge/detect/report"
|
||||
"github.com/Infisical/infisical-merge/detect/sources"
|
||||
)
|
||||
|
||||
const maxPeekSize = 25 * 1_000 // 10kb
|
||||
|
||||
func (d *Detector) DetectFiles(paths <-chan sources.ScanTarget) ([]report.Finding, error) {
|
||||
for pa := range paths {
|
||||
d.Sema.Go(func() error {
|
||||
logger := logging.With().Str("path", pa.Path).Logger()
|
||||
logger.Trace().Msg("Scanning path")
|
||||
|
||||
f, err := os.Open(pa.Path)
|
||||
if err != nil {
|
||||
if os.IsPermission(err) {
|
||||
logger.Warn().Msg("Skipping file: permission denied")
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
_ = f.Close()
|
||||
}()
|
||||
|
||||
// Get file size
|
||||
fileInfo, err := f.Stat()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
fileSize := fileInfo.Size()
|
||||
if d.MaxTargetMegaBytes > 0 {
|
||||
rawLength := fileSize / 1000000
|
||||
if rawLength > int64(d.MaxTargetMegaBytes) {
|
||||
logger.Debug().
|
||||
Int64("size", rawLength).
|
||||
Msg("Skipping file: exceeds --max-target-megabytes")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
// Buffer to hold file chunks
|
||||
reader = bufio.NewReaderSize(f, chunkSize)
|
||||
buf = make([]byte, chunkSize)
|
||||
totalLines = 0
|
||||
)
|
||||
for {
|
||||
n, err := reader.Read(buf)
|
||||
|
||||
// "Callers should always process the n > 0 bytes returned before considering the error err."
|
||||
// https://pkg.go.dev/io#Reader
|
||||
if n > 0 {
|
||||
// Only check the filetype at the start of file.
|
||||
if totalLines == 0 {
|
||||
// TODO: could other optimizations be introduced here?
|
||||
if mimetype, err := filetype.Match(buf[:n]); err != nil {
|
||||
return nil
|
||||
} else if mimetype.MIME.Type == "application" {
|
||||
return nil // skip binary files
|
||||
}
|
||||
}
|
||||
|
||||
// Try to split chunks across large areas of whitespace, if possible.
|
||||
peekBuf := bytes.NewBuffer(buf[:n])
|
||||
if readErr := readUntilSafeBoundary(reader, n, maxPeekSize, peekBuf); readErr != nil {
|
||||
return readErr
|
||||
}
|
||||
|
||||
// Count the number of newlines in this chunk
|
||||
chunk := peekBuf.String()
|
||||
linesInChunk := strings.Count(chunk, "\n")
|
||||
totalLines += linesInChunk
|
||||
fragment := Fragment{
|
||||
Raw: chunk,
|
||||
Bytes: peekBuf.Bytes(),
|
||||
}
|
||||
if pa.Symlink != "" {
|
||||
fragment.SymlinkFile = pa.Symlink
|
||||
}
|
||||
|
||||
if isWindows {
|
||||
fragment.FilePath = filepath.ToSlash(pa.Path)
|
||||
fragment.SymlinkFile = filepath.ToSlash(fragment.SymlinkFile)
|
||||
fragment.WindowsFilePath = pa.Path
|
||||
} else {
|
||||
fragment.FilePath = pa.Path
|
||||
}
|
||||
|
||||
timer := time.AfterFunc(SlowWarningThreshold, func() {
|
||||
logger.Debug().Msgf("Taking longer than %s to inspect fragment", SlowWarningThreshold.String())
|
||||
})
|
||||
for _, finding := range d.Detect(fragment) {
|
||||
// need to add 1 since line counting starts at 1
|
||||
finding.StartLine += (totalLines - linesInChunk) + 1
|
||||
finding.EndLine += (totalLines - linesInChunk) + 1
|
||||
d.AddFinding(finding)
|
||||
}
|
||||
if timer != nil {
|
||||
timer.Stop()
|
||||
timer = nil
|
||||
}
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if err := d.Sema.Wait(); err != nil {
|
||||
return d.findings, err
|
||||
}
|
||||
|
||||
return d.findings, nil
|
||||
}
|
||||
|
||||
// readUntilSafeBoundary consumes |f| until it finds two consecutive `\n` characters, up to |maxPeekSize|.
|
||||
// This hopefully avoids splitting. (https://github.com/gitleaks/gitleaks/issues/1651)
|
||||
func readUntilSafeBoundary(r *bufio.Reader, n int, maxPeekSize int, peekBuf *bytes.Buffer) error {
|
||||
if peekBuf.Len() == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Does the buffer end in consecutive newlines?
|
||||
var (
|
||||
data = peekBuf.Bytes()
|
||||
lastChar = data[len(data)-1]
|
||||
newlineCount = 0 // Tracks consecutive newlines
|
||||
)
|
||||
if isWhitespace(lastChar) {
|
||||
for i := len(data) - 1; i >= 0; i-- {
|
||||
lastChar = data[i]
|
||||
if lastChar == '\n' {
|
||||
newlineCount++
|
||||
|
||||
// Stop if two consecutive newlines are found
|
||||
if newlineCount >= 2 {
|
||||
return nil
|
||||
}
|
||||
} else if lastChar == '\r' || lastChar == ' ' || lastChar == '\t' {
|
||||
// The presence of other whitespace characters (`\r`, ` `, `\t`) shouldn't reset the count.
|
||||
// (Intentionally do nothing.)
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If not, read ahead until we (hopefully) find some.
|
||||
newlineCount = 0
|
||||
for {
|
||||
data = peekBuf.Bytes()
|
||||
// Check if the last character is a newline.
|
||||
lastChar = data[len(data)-1]
|
||||
if lastChar == '\n' {
|
||||
newlineCount++
|
||||
|
||||
// Stop if two consecutive newlines are found
|
||||
if newlineCount >= 2 {
|
||||
break
|
||||
}
|
||||
} else if lastChar == '\r' || lastChar == ' ' || lastChar == '\t' {
|
||||
// The presence of other whitespace characters (`\r`, ` `, `\t`) shouldn't reset the count.
|
||||
// (Intentionally do nothing.)
|
||||
} else {
|
||||
newlineCount = 0 // Reset if a non-newline character is found
|
||||
}
|
||||
|
||||
// Stop growing the buffer if it reaches maxSize
|
||||
if (peekBuf.Len() - n) >= maxPeekSize {
|
||||
break
|
||||
}
|
||||
|
||||
// Read additional data into a temporary buffer
|
||||
b, err := r.ReadByte()
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
return err
|
||||
}
|
||||
peekBuf.WriteByte(b)
|
||||
}
|
||||
return nil
|
||||
}
|
214
cli/detect/git.go
Normal file
@ -0,0 +1,214 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package detect
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"os/exec"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/Infisical/infisical-merge/detect/cmd/scm"
|
||||
"github.com/gitleaks/go-gitdiff/gitdiff"
|
||||
|
||||
"github.com/Infisical/infisical-merge/detect/logging"
|
||||
"github.com/Infisical/infisical-merge/detect/report"
|
||||
"github.com/Infisical/infisical-merge/detect/sources"
|
||||
)
|
||||
|
||||
func (d *Detector) DetectGit(cmd *sources.GitCmd, remote *RemoteInfo) ([]report.Finding, error) {
|
||||
defer cmd.Wait()
|
||||
var (
|
||||
diffFilesCh = cmd.DiffFilesCh()
|
||||
errCh = cmd.ErrCh()
|
||||
)
|
||||
|
||||
// loop to range over both DiffFiles (stdout) and ErrCh (stderr)
|
||||
for diffFilesCh != nil || errCh != nil {
|
||||
select {
|
||||
case gitdiffFile, open := <-diffFilesCh:
|
||||
if !open {
|
||||
diffFilesCh = nil
|
||||
break
|
||||
}
|
||||
|
||||
// skip binary files
|
||||
if gitdiffFile.IsBinary || gitdiffFile.IsDelete {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if commit is allowed
|
||||
commitSHA := ""
|
||||
if gitdiffFile.PatchHeader != nil {
|
||||
commitSHA = gitdiffFile.PatchHeader.SHA
|
||||
for _, a := range d.Config.Allowlists {
|
||||
if ok, c := a.CommitAllowed(gitdiffFile.PatchHeader.SHA); ok {
|
||||
logging.Trace().Str("allowed-commit", c).Msg("skipping commit: global allowlist")
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
d.addCommit(commitSHA)
|
||||
|
||||
d.Sema.Go(func() error {
|
||||
for _, textFragment := range gitdiffFile.TextFragments {
|
||||
if textFragment == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
fragment := Fragment{
|
||||
Raw: textFragment.Raw(gitdiff.OpAdd),
|
||||
CommitSHA: commitSHA,
|
||||
FilePath: gitdiffFile.NewName,
|
||||
}
|
||||
|
||||
timer := time.AfterFunc(SlowWarningThreshold, func() {
|
||||
logging.Debug().
|
||||
Str("commit", commitSHA[:7]).
|
||||
Str("path", fragment.FilePath).
|
||||
Msgf("Taking longer than %s to inspect fragment", SlowWarningThreshold.String())
|
||||
})
|
||||
for _, finding := range d.Detect(fragment) {
|
||||
d.AddFinding(augmentGitFinding(remote, finding, textFragment, gitdiffFile))
|
||||
}
|
||||
if timer != nil {
|
||||
timer.Stop()
|
||||
timer = nil
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
case err, open := <-errCh:
|
||||
if !open {
|
||||
errCh = nil
|
||||
break
|
||||
}
|
||||
|
||||
return d.findings, err
|
||||
}
|
||||
}
|
||||
|
||||
if err := d.Sema.Wait(); err != nil {
|
||||
return d.findings, err
|
||||
}
|
||||
logging.Info().Msgf("%d commits scanned.", len(d.commitMap))
|
||||
logging.Debug().Msg("Note: this number might be smaller than expected due to commits with no additions")
|
||||
return d.findings, nil
|
||||
}
|
||||
|
||||
type RemoteInfo struct {
|
||||
Platform scm.Platform
|
||||
Url string
|
||||
}
|
||||
|
||||
func NewRemoteInfo(platform scm.Platform, source string) *RemoteInfo {
|
||||
if platform == scm.NoPlatform {
|
||||
return &RemoteInfo{Platform: platform}
|
||||
}
|
||||
|
||||
remoteUrl, err := getRemoteUrl(source)
|
||||
if err != nil {
|
||||
if strings.Contains(err.Error(), "No remote configured") {
|
||||
logging.Debug().Msg("skipping finding links: repository has no configured remote.")
|
||||
platform = scm.NoPlatform
|
||||
} else {
|
||||
logging.Error().Err(err).Msg("skipping finding links: unable to parse remote URL")
|
||||
}
|
||||
goto End
|
||||
}
|
||||
|
||||
if platform == scm.UnknownPlatform {
|
||||
platform = platformFromHost(remoteUrl)
|
||||
if platform == scm.UnknownPlatform {
|
||||
logging.Info().
|
||||
Str("host", remoteUrl.Hostname()).
|
||||
Msg("Unknown SCM platform. Use --platform to include links in findings.")
|
||||
} else {
|
||||
logging.Debug().
|
||||
Str("host", remoteUrl.Hostname()).
|
||||
Str("platform", platform.String()).
|
||||
Msg("SCM platform parsed from host")
|
||||
}
|
||||
}
|
||||
|
||||
End:
|
||||
var rUrl string
|
||||
if remoteUrl != nil {
|
||||
rUrl = remoteUrl.String()
|
||||
}
|
||||
return &RemoteInfo{
|
||||
Platform: platform,
|
||||
Url: rUrl,
|
||||
}
|
||||
}
|
||||
|
||||
var sshUrlpat = regexp.MustCompile(`^git@([a-zA-Z0-9.-]+):([\w/.-]+?)(?:\.git)?$`)
|
||||
|
||||
func getRemoteUrl(source string) (*url.URL, error) {
|
||||
// This will return the first remote — typically, "origin".
|
||||
cmd := exec.Command("git", "ls-remote", "--quiet", "--get-url")
|
||||
if source != "." {
|
||||
cmd.Dir = source
|
||||
}
|
||||
|
||||
stdout, err := cmd.Output()
|
||||
if err != nil {
|
||||
var exitError *exec.ExitError
|
||||
if errors.As(err, &exitError) {
|
||||
return nil, fmt.Errorf("command failed (%d): %w, stderr: %s", exitError.ExitCode(), err, string(bytes.TrimSpace(exitError.Stderr)))
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
remoteUrl := string(bytes.TrimSpace(stdout))
|
||||
if matches := sshUrlpat.FindStringSubmatch(remoteUrl); matches != nil {
|
||||
remoteUrl = fmt.Sprintf("https://%s/%s", matches[1], matches[2])
|
||||
}
|
||||
remoteUrl = strings.TrimSuffix(remoteUrl, ".git")
|
||||
|
||||
parsedUrl, err := url.Parse(remoteUrl)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to parse remote URL: %w", err)
|
||||
}
|
||||
|
||||
// Remove any user info.
|
||||
parsedUrl.User = nil
|
||||
return parsedUrl, nil
|
||||
}
|
||||
|
||||
func platformFromHost(u *url.URL) scm.Platform {
|
||||
switch strings.ToLower(u.Hostname()) {
|
||||
case "github.com":
|
||||
return scm.GitHubPlatform
|
||||
case "gitlab.com":
|
||||
return scm.GitLabPlatform
|
||||
case "dev.azure.com", "visualstudio.com":
|
||||
return scm.AzureDevOpsPlatform
|
||||
default:
|
||||
return scm.UnknownPlatform
|
||||
}
|
||||
}
|
@ -1,143 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package git
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"io"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gitleaks/go-gitdiff/gitdiff"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
var ErrEncountered bool
|
||||
|
||||
// GitLog returns a channel of gitdiff.File objects from the
|
||||
// git log -p command for the given source.
|
||||
func GitLog(source string, logOpts string) (<-chan *gitdiff.File, error) {
|
||||
sourceClean := filepath.Clean(source)
|
||||
var cmd *exec.Cmd
|
||||
if logOpts != "" {
|
||||
args := []string{"-C", sourceClean, "log", "-p", "-U0"}
|
||||
args = append(args, strings.Split(logOpts, " ")...)
|
||||
cmd = exec.Command("git", args...)
|
||||
} else {
|
||||
cmd = exec.Command("git", "-C", sourceClean, "log", "-p", "-U0",
|
||||
"--full-history", "--all")
|
||||
}
|
||||
|
||||
log.Debug().Msgf("executing: %s", cmd.String())
|
||||
|
||||
stdout, err := cmd.StdoutPipe()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
stderr, err := cmd.StderrPipe()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
go listenForStdErr(stderr)
|
||||
|
||||
if err := cmd.Start(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// HACK: to avoid https://github.com/zricethezav/gitleaks/issues/722
|
||||
time.Sleep(50 * time.Millisecond)
|
||||
|
||||
return gitdiff.Parse(cmd, stdout)
|
||||
}
|
||||
|
||||
// GitDiff returns a channel of gitdiff.File objects from
|
||||
// the git diff command for the given source.
|
||||
func GitDiff(source string, staged bool) (<-chan *gitdiff.File, error) {
|
||||
sourceClean := filepath.Clean(source)
|
||||
var cmd *exec.Cmd
|
||||
cmd = exec.Command("git", "-C", sourceClean, "diff", "-U0", ".")
|
||||
if staged {
|
||||
cmd = exec.Command("git", "-C", sourceClean, "diff", "-U0",
|
||||
"--staged", ".")
|
||||
}
|
||||
log.Debug().Msgf("executing: %s", cmd.String())
|
||||
|
||||
stdout, err := cmd.StdoutPipe()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
stderr, err := cmd.StderrPipe()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
go listenForStdErr(stderr)
|
||||
|
||||
if err := cmd.Start(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// HACK: to avoid https://github.com/zricethezav/gitleaks/issues/722
|
||||
time.Sleep(50 * time.Millisecond)
|
||||
|
||||
return gitdiff.Parse(cmd, stdout)
|
||||
}
|
||||
|
||||
// listenForStdErr listens for stderr output from git and prints it to stdout
|
||||
// then exits with exit code 1
|
||||
func listenForStdErr(stderr io.ReadCloser) {
|
||||
scanner := bufio.NewScanner(stderr)
|
||||
for scanner.Scan() {
|
||||
// if git throws one of the following errors:
|
||||
//
|
||||
// exhaustive rename detection was skipped due to too many files.
|
||||
// you may want to set your diff.renameLimit variable to at least
|
||||
// (some large number) and retry the command.
|
||||
//
|
||||
// inexact rename detection was skipped due to too many files.
|
||||
// you may want to set your diff.renameLimit variable to at least
|
||||
// (some large number) and retry the command.
|
||||
//
|
||||
// we skip exiting the program as git log -p/git diff will continue
|
||||
// to send data to stdout and finish executing. This next bit of
|
||||
// code prevents gitleaks from stopping mid scan if this error is
|
||||
// encountered
|
||||
if strings.Contains(scanner.Text(),
|
||||
"exhaustive rename detection was skipped") ||
|
||||
strings.Contains(scanner.Text(),
|
||||
"inexact rename detection was skipped") ||
|
||||
strings.Contains(scanner.Text(),
|
||||
"you may want to set your diff.renameLimit") {
|
||||
log.Warn().Msg(scanner.Text())
|
||||
} else {
|
||||
log.Error().Msgf("[git] %s", scanner.Text())
|
||||
|
||||
// asynchronously set this error flag to true so that we can
|
||||
// capture a log message and exit with a non-zero exit code
|
||||
// This value should get set before the `git` command exits so it's
|
||||
// safe-ish, although I know I know, bad practice.
|
||||
ErrEncountered = true
|
||||
}
|
||||
}
|
||||
}
|
@ -1,158 +0,0 @@
|
||||
package git_test
|
||||
|
||||
// TODO: commenting out this test for now because it's flaky. Alternatives to consider to get this working:
|
||||
// -- use `git stash` instead of `restore()`
|
||||
|
||||
// const repoBasePath = "../../testdata/repos/"
|
||||
|
||||
// const expectPath = "../../testdata/expected/"
|
||||
|
||||
// func TestGitLog(t *testing.T) {
|
||||
// tests := []struct {
|
||||
// source string
|
||||
// logOpts string
|
||||
// expected string
|
||||
// }{
|
||||
// {
|
||||
// source: filepath.Join(repoBasePath, "small"),
|
||||
// expected: filepath.Join(expectPath, "git", "small.txt"),
|
||||
// },
|
||||
// {
|
||||
// source: filepath.Join(repoBasePath, "small"),
|
||||
// expected: filepath.Join(expectPath, "git", "small-branch-foo.txt"),
|
||||
// logOpts: "--all foo...",
|
||||
// },
|
||||
// }
|
||||
|
||||
// err := moveDotGit("dotGit", ".git")
|
||||
// if err != nil {
|
||||
// t.Fatal(err)
|
||||
// }
|
||||
// defer func() {
|
||||
// if err = moveDotGit(".git", "dotGit"); err != nil {
|
||||
// t.Fatal(err)
|
||||
// }
|
||||
// }()
|
||||
|
||||
// for _, tt := range tests {
|
||||
// files, err := git.GitLog(tt.source, tt.logOpts)
|
||||
// if err != nil {
|
||||
// t.Error(err)
|
||||
// }
|
||||
|
||||
// var diffSb strings.Builder
|
||||
// for f := range files {
|
||||
// for _, tf := range f.TextFragments {
|
||||
// diffSb.WriteString(tf.Raw(gitdiff.OpAdd))
|
||||
// }
|
||||
// }
|
||||
|
||||
// expectedBytes, err := os.ReadFile(tt.expected)
|
||||
// if err != nil {
|
||||
// t.Error(err)
|
||||
// }
|
||||
// expected := string(expectedBytes)
|
||||
// if expected != diffSb.String() {
|
||||
// // write string builder to .got file using os.Create
|
||||
// err = os.WriteFile(strings.Replace(tt.expected, ".txt", ".got.txt", 1), []byte(diffSb.String()), 0644)
|
||||
// if err != nil {
|
||||
// t.Error(err)
|
||||
// }
|
||||
// t.Error("expected: ", expected, "got: ", diffSb.String())
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// func TestGitDiff(t *testing.T) {
|
||||
// tests := []struct {
|
||||
// source string
|
||||
// expected string
|
||||
// additions string
|
||||
// target string
|
||||
// }{
|
||||
// {
|
||||
// source: filepath.Join(repoBasePath, "small"),
|
||||
// expected: "this line is added\nand another one",
|
||||
// additions: "this line is added\nand another one",
|
||||
// target: filepath.Join(repoBasePath, "small", "main.go"),
|
||||
// },
|
||||
// }
|
||||
|
||||
// err := moveDotGit("dotGit", ".git")
|
||||
// if err != nil {
|
||||
// t.Fatal(err)
|
||||
// }
|
||||
// defer func() {
|
||||
// if err = moveDotGit(".git", "dotGit"); err != nil {
|
||||
// t.Fatal(err)
|
||||
// }
|
||||
// }()
|
||||
|
||||
// for _, tt := range tests {
|
||||
// noChanges, err := os.ReadFile(tt.target)
|
||||
// if err != nil {
|
||||
// t.Error(err)
|
||||
// }
|
||||
// err = os.WriteFile(tt.target, []byte(tt.additions), 0644)
|
||||
// if err != nil {
|
||||
// restore(tt.target, noChanges, t)
|
||||
// t.Error(err)
|
||||
// }
|
||||
|
||||
// files, err := git.GitDiff(tt.source, false)
|
||||
// if err != nil {
|
||||
// restore(tt.target, noChanges, t)
|
||||
// t.Error(err)
|
||||
// }
|
||||
|
||||
// for f := range files {
|
||||
// sb := strings.Builder{}
|
||||
// for _, tf := range f.TextFragments {
|
||||
// sb.WriteString(tf.Raw(gitdiff.OpAdd))
|
||||
// }
|
||||
// if sb.String() != tt.expected {
|
||||
// restore(tt.target, noChanges, t)
|
||||
// t.Error("expected: ", tt.expected, "got: ", sb.String())
|
||||
// }
|
||||
// }
|
||||
// restore(tt.target, noChanges, t)
|
||||
// }
|
||||
// }
|
||||
|
||||
// func restore(path string, data []byte, t *testing.T) {
|
||||
// err := os.WriteFile(path, data, 0644)
|
||||
// if err != nil {
|
||||
// t.Fatal(err)
|
||||
// }
|
||||
// }
|
||||
|
||||
// func moveDotGit(from, to string) error {
|
||||
// repoDirs, err := os.ReadDir("../../testdata/repos")
|
||||
// if err != nil {
|
||||
// return err
|
||||
// }
|
||||
// for _, dir := range repoDirs {
|
||||
// if to == ".git" {
|
||||
// _, err := os.Stat(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), "dotGit"))
|
||||
// if os.IsNotExist(err) {
|
||||
// // dont want to delete the only copy of .git accidentally
|
||||
// continue
|
||||
// }
|
||||
// os.RemoveAll(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), ".git"))
|
||||
// }
|
||||
// if !dir.IsDir() {
|
||||
// continue
|
||||
// }
|
||||
// _, err := os.Stat(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), from))
|
||||
// if os.IsNotExist(err) {
|
||||
// continue
|
||||
// }
|
||||
|
||||
// err = os.Rename(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), from),
|
||||
// fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), to))
|
||||
// if err != nil {
|
||||
// return err
|
||||
// }
|
||||
// }
|
||||
// return nil
|
||||
// }
|
@ -72,6 +72,7 @@ func location(fragment Fragment, matchIndex []int) Location {
|
||||
location.endColumn = (end - prevNewLine)
|
||||
location.endLineIndex = newLineByteIndex
|
||||
}
|
||||
|
||||
prevNewLine = pair[0]
|
||||
}
|
||||
|
||||
|
@ -1,82 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package detect
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
// TestGetLocation tests the getLocation function.
|
||||
func TestGetLocation(t *testing.T) {
|
||||
tests := []struct {
|
||||
linePairs [][]int
|
||||
start int
|
||||
end int
|
||||
wantLocation Location
|
||||
}{
|
||||
{
|
||||
linePairs: [][]int{
|
||||
{0, 39},
|
||||
{40, 55},
|
||||
{56, 57},
|
||||
},
|
||||
start: 35,
|
||||
end: 38,
|
||||
wantLocation: Location{
|
||||
startLine: 1,
|
||||
startColumn: 36,
|
||||
endLine: 1,
|
||||
endColumn: 38,
|
||||
startLineIndex: 0,
|
||||
endLineIndex: 40,
|
||||
},
|
||||
},
|
||||
{
|
||||
linePairs: [][]int{
|
||||
{0, 39},
|
||||
{40, 55},
|
||||
{56, 57},
|
||||
},
|
||||
start: 40,
|
||||
end: 44,
|
||||
wantLocation: Location{
|
||||
startLine: 2,
|
||||
startColumn: 1,
|
||||
endLine: 2,
|
||||
endColumn: 4,
|
||||
startLineIndex: 40,
|
||||
endLineIndex: 56,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
loc := location(Fragment{newlineIndices: test.linePairs}, []int{test.start, test.end})
|
||||
if loc != test.wantLocation {
|
||||
t.Errorf("\nstartLine %d\nstartColumn: %d\nendLine: %d\nendColumn: %d\nstartLineIndex: %d\nendlineIndex %d",
|
||||
loc.startLine, loc.startColumn, loc.endLine, loc.endColumn, loc.startLineIndex, loc.endLineIndex)
|
||||
|
||||
t.Error("got", loc, "want", test.wantLocation)
|
||||
}
|
||||
}
|
||||
}
|
72
cli/detect/logging/log.go
Normal file
@ -0,0 +1,72 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package logging
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"github.com/rs/zerolog"
|
||||
)
|
||||
|
||||
var Logger zerolog.Logger
|
||||
|
||||
func init() {
|
||||
// send all logs to stdout
|
||||
Logger = zerolog.New(zerolog.ConsoleWriter{Out: os.Stderr}).
|
||||
Level(zerolog.InfoLevel).
|
||||
With().Timestamp().Logger()
|
||||
}
|
||||
|
||||
func With() zerolog.Context {
|
||||
return Logger.With()
|
||||
}
|
||||
|
||||
func Trace() *zerolog.Event {
|
||||
return Logger.Trace()
|
||||
}
|
||||
|
||||
func Debug() *zerolog.Event {
|
||||
return Logger.Debug()
|
||||
}
|
||||
func Info() *zerolog.Event {
|
||||
return Logger.Info()
|
||||
}
|
||||
func Warn() *zerolog.Event {
|
||||
return Logger.Warn()
|
||||
}
|
||||
|
||||
func Error() *zerolog.Event {
|
||||
return Logger.Error()
|
||||
}
|
||||
|
||||
func Err(err error) *zerolog.Event {
|
||||
return Logger.Err(err)
|
||||
}
|
||||
|
||||
func Fatal() *zerolog.Event {
|
||||
return Logger.Fatal()
|
||||
}
|
||||
|
||||
func Panic() *zerolog.Event {
|
||||
return Logger.Panic()
|
||||
}
|
149
cli/detect/reader.go
Normal file
@ -0,0 +1,149 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package detect
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"errors"
|
||||
"io"
|
||||
|
||||
"github.com/Infisical/infisical-merge/detect/report"
|
||||
)
|
||||
|
||||
// DetectReader accepts an io.Reader and a buffer size for the reader in KB
|
||||
func (d *Detector) DetectReader(r io.Reader, bufSize int) ([]report.Finding, error) {
|
||||
reader := bufio.NewReader(r)
|
||||
buf := make([]byte, 1000*bufSize)
|
||||
findings := []report.Finding{}
|
||||
|
||||
for {
|
||||
n, err := reader.Read(buf)
|
||||
|
||||
// "Callers should always process the n > 0 bytes returned before considering the error err."
|
||||
// https://pkg.go.dev/io#Reader
|
||||
if n > 0 {
|
||||
// Try to split chunks across large areas of whitespace, if possible.
|
||||
peekBuf := bytes.NewBuffer(buf[:n])
|
||||
if readErr := readUntilSafeBoundary(reader, n, maxPeekSize, peekBuf); readErr != nil {
|
||||
return findings, readErr
|
||||
}
|
||||
|
||||
fragment := Fragment{
|
||||
Raw: peekBuf.String(),
|
||||
}
|
||||
for _, finding := range d.Detect(fragment) {
|
||||
findings = append(findings, finding)
|
||||
if d.Verbose {
|
||||
printFinding(finding, d.NoColor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
return findings, err
|
||||
}
|
||||
}
|
||||
|
||||
return findings, nil
|
||||
}
|
||||
|
||||
// StreamDetectReader streams the detection results from the provided io.Reader.
|
||||
// It reads data using the specified buffer size (in KB) and processes each chunk through
|
||||
// the existing detection logic. Findings are sent down the returned findings channel as soon as
|
||||
// they are detected, while a separate error channel signals a terminal error (or nil upon successful completion).
|
||||
// The function returns two channels:
|
||||
// - findingsCh: a receive-only channel that emits report.Finding objects as they are found.
|
||||
// - errCh: a receive-only channel that emits a single final error (or nil if no error occurred)
|
||||
// once the stream ends.
|
||||
//
|
||||
// Recommended Usage:
|
||||
//
|
||||
// Since there will only ever be a single value on the errCh, it is recommended to consume the findingsCh
|
||||
// first. Once findingsCh is closed, the consumer should then read from errCh to determine
|
||||
// if the stream completed successfully or if an error occurred.
|
||||
//
|
||||
// This design avoids the need for a select loop, keeping client code simple.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// // Assume detector is an instance of *Detector and myReader implements io.Reader.
|
||||
// findingsCh, errCh := detector.StreamDetectReader(myReader, 64) // using 64 KB buffer size
|
||||
//
|
||||
// // Process findings as they arrive.
|
||||
// for finding := range findingsCh {
|
||||
// fmt.Printf("Found secret: %+v\n", finding)
|
||||
// }
|
||||
//
|
||||
// // After the findings channel is closed, check the final error.
|
||||
// if err := <-errCh; err != nil {
|
||||
// log.Fatalf("StreamDetectReader encountered an error: %v", err)
|
||||
// } else {
|
||||
// fmt.Println("Scanning completed successfully.")
|
||||
// }
|
||||
func (d *Detector) StreamDetectReader(r io.Reader, bufSize int) (<-chan report.Finding, <-chan error) {
|
||||
findingsCh := make(chan report.Finding, 1)
|
||||
errCh := make(chan error, 1)
|
||||
|
||||
go func() {
|
||||
defer close(findingsCh)
|
||||
defer close(errCh)
|
||||
|
||||
reader := bufio.NewReader(r)
|
||||
buf := make([]byte, 1000*bufSize)
|
||||
|
||||
for {
|
||||
n, err := reader.Read(buf)
|
||||
|
||||
if n > 0 {
|
||||
peekBuf := bytes.NewBuffer(buf[:n])
|
||||
if readErr := readUntilSafeBoundary(reader, n, maxPeekSize, peekBuf); readErr != nil {
|
||||
errCh <- readErr
|
||||
return
|
||||
}
|
||||
|
||||
fragment := Fragment{Raw: peekBuf.String()}
|
||||
for _, finding := range d.Detect(fragment) {
|
||||
findingsCh <- finding
|
||||
if d.Verbose {
|
||||
printFinding(finding, d.NoColor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
if errors.Is(err, io.EOF) {
|
||||
errCh <- nil
|
||||
return
|
||||
}
|
||||
errCh <- err
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
return findingsCh, errCh
|
||||
}
|
37
cli/detect/regexp/stdlib_regex.go
Normal file
@ -0,0 +1,37 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
//go:build !gore2regex
|
||||
|
||||
package regexp
|
||||
|
||||
import (
|
||||
re "regexp"
|
||||
)
|
||||
|
||||
const Version = "stdlib"
|
||||
|
||||
type Regexp = re.Regexp
|
||||
|
||||
func MustCompile(str string) *re.Regexp {
|
||||
return re.MustCompile(str)
|
||||
}
|
37
cli/detect/regexp/wasilibs_regex.go
Normal file
@ -0,0 +1,37 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
//go:build gore2regex
|
||||
|
||||
package regexp
|
||||
|
||||
import (
|
||||
re "github.com/wasilibs/go-re2"
|
||||
)
|
||||
|
||||
const Version = "github.com/wasilibs/go-re2"
|
||||
|
||||
type Regexp = re.Regexp
|
||||
|
||||
func MustCompile(str string) *re.Regexp {
|
||||
return re.MustCompile(str)
|
||||
}
|
@ -19,6 +19,7 @@
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package report
|
||||
|
||||
const version = "v8.0.0"
|
@ -26,16 +26,24 @@ import (
|
||||
"encoding/csv"
|
||||
"io"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// writeCsv writes the list of findings to a writeCloser.
|
||||
func writeCsv(f []Finding, w io.WriteCloser) error {
|
||||
if len(f) == 0 {
|
||||
type CsvReporter struct {
|
||||
}
|
||||
|
||||
var _ Reporter = (*CsvReporter)(nil)
|
||||
|
||||
func (r *CsvReporter) Write(w io.WriteCloser, findings []Finding) error {
|
||||
if len(findings) == 0 {
|
||||
return nil
|
||||
}
|
||||
defer w.Close()
|
||||
cw := csv.NewWriter(w)
|
||||
err := cw.Write([]string{"RuleID",
|
||||
|
||||
var (
|
||||
cw = csv.NewWriter(w)
|
||||
err error
|
||||
)
|
||||
columns := []string{"RuleID",
|
||||
"Commit",
|
||||
"File",
|
||||
"SymlinkFile",
|
||||
@ -50,12 +58,18 @@ func writeCsv(f []Finding, w io.WriteCloser) error {
|
||||
"Date",
|
||||
"Email",
|
||||
"Fingerprint",
|
||||
})
|
||||
if err != nil {
|
||||
"Tags",
|
||||
}
|
||||
// A miserable attempt at "omitempty" so tests don't yell at me.
|
||||
if findings[0].Link != "" {
|
||||
columns = append(columns, "Link")
|
||||
}
|
||||
|
||||
if err = cw.Write(columns); err != nil {
|
||||
return err
|
||||
}
|
||||
for _, f := range f {
|
||||
err = cw.Write([]string{f.RuleID,
|
||||
for _, f := range findings {
|
||||
row := []string{f.RuleID,
|
||||
f.Commit,
|
||||
f.File,
|
||||
f.SymlinkFile,
|
||||
@ -70,8 +84,13 @@ func writeCsv(f []Finding, w io.WriteCloser) error {
|
||||
f.Date,
|
||||
f.Email,
|
||||
f.Fingerprint,
|
||||
})
|
||||
if err != nil {
|
||||
strings.Join(f.Tags, " "),
|
||||
}
|
||||
if findings[0].Link != "" {
|
||||
row = append(row, f.Link)
|
||||
}
|
||||
|
||||
if err = cw.Write(row); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
@ -23,13 +23,17 @@
|
||||
package report
|
||||
|
||||
import (
|
||||
"math"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Finding contains information about strings that
|
||||
// have been captured by a tree-sitter query.
|
||||
type Finding struct {
|
||||
// Rule is the name of the rule that was matched
|
||||
RuleID string
|
||||
Description string
|
||||
|
||||
StartLine int
|
||||
EndLine int
|
||||
StartColumn int
|
||||
@ -47,6 +51,7 @@ type Finding struct {
|
||||
File string
|
||||
SymlinkFile string
|
||||
Commit string
|
||||
Link string `json:",omitempty"`
|
||||
|
||||
// Entropy is the shannon entropy of Value
|
||||
Entropy float32
|
||||
@ -57,16 +62,31 @@ type Finding struct {
|
||||
Message string
|
||||
Tags []string
|
||||
|
||||
// Rule is the name of the rule that was matched
|
||||
RuleID string
|
||||
|
||||
// unique identifer
|
||||
// unique identifier
|
||||
Fingerprint string
|
||||
}
|
||||
|
||||
// Redact removes sensitive information from a finding.
|
||||
func (f *Finding) Redact() {
|
||||
f.Line = strings.Replace(f.Line, f.Secret, "REDACTED", -1)
|
||||
f.Match = strings.Replace(f.Match, f.Secret, "REDACTED", -1)
|
||||
f.Secret = "REDACTED"
|
||||
func (f *Finding) Redact(percent uint) {
|
||||
secret := maskSecret(f.Secret, percent)
|
||||
if percent >= 100 {
|
||||
secret = "REDACTED"
|
||||
}
|
||||
f.Line = strings.Replace(f.Line, f.Secret, secret, -1)
|
||||
f.Match = strings.Replace(f.Match, f.Secret, secret, -1)
|
||||
f.Secret = secret
|
||||
}
|
||||
|
||||
func maskSecret(secret string, percent uint) string {
|
||||
if percent > 100 {
|
||||
percent = 100
|
||||
}
|
||||
len := float64(len(secret))
|
||||
if len <= 0 {
|
||||
return secret
|
||||
}
|
||||
prc := float64(100 - percent)
|
||||
lth := int64(math.RoundToEven(len * prc / float64(100)))
|
||||
|
||||
return secret[:lth] + "..."
|
||||
}
|
@ -27,10 +27,12 @@ import (
|
||||
"io"
|
||||
)
|
||||
|
||||
func writeJson(findings []Finding, w io.WriteCloser) error {
|
||||
if len(findings) == 0 {
|
||||
findings = []Finding{}
|
||||
}
|
||||
type JsonReporter struct {
|
||||
}
|
||||
|
||||
var _ Reporter = (*JsonReporter)(nil)
|
||||
|
||||
func (t *JsonReporter) Write(w io.WriteCloser, findings []Finding) error {
|
||||
encoder := json.NewEncoder(w)
|
||||
encoder.SetIndent("", " ")
|
||||
return encoder.Encode(findings)
|
129
cli/detect/report/junit.go
Normal file
@ -0,0 +1,129 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package report
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"encoding/xml"
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type JunitReporter struct {
|
||||
}
|
||||
|
||||
var _ Reporter = (*JunitReporter)(nil)
|
||||
|
||||
func (r *JunitReporter) Write(w io.WriteCloser, findings []Finding) error {
|
||||
testSuites := TestSuites{
|
||||
TestSuites: getTestSuites(findings),
|
||||
}
|
||||
|
||||
io.WriteString(w, xml.Header)
|
||||
encoder := xml.NewEncoder(w)
|
||||
encoder.Indent("", "\t")
|
||||
return encoder.Encode(testSuites)
|
||||
}
|
||||
|
||||
func getTestSuites(findings []Finding) []TestSuite {
|
||||
return []TestSuite{
|
||||
{
|
||||
Failures: strconv.Itoa(len(findings)),
|
||||
Name: "gitleaks",
|
||||
Tests: strconv.Itoa(len(findings)),
|
||||
TestCases: getTestCases(findings),
|
||||
Time: "",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func getTestCases(findings []Finding) []TestCase {
|
||||
testCases := []TestCase{}
|
||||
for _, f := range findings {
|
||||
testCase := TestCase{
|
||||
Classname: f.Description,
|
||||
Failure: getFailure(f),
|
||||
File: f.File,
|
||||
Name: getMessage(f),
|
||||
Time: "",
|
||||
}
|
||||
testCases = append(testCases, testCase)
|
||||
}
|
||||
return testCases
|
||||
}
|
||||
|
||||
func getFailure(f Finding) Failure {
|
||||
return Failure{
|
||||
Data: getData(f),
|
||||
Message: getMessage(f),
|
||||
Type: f.Description,
|
||||
}
|
||||
}
|
||||
|
||||
func getData(f Finding) string {
|
||||
data, err := json.MarshalIndent(f, "", "\t")
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return ""
|
||||
}
|
||||
return string(data)
|
||||
}
|
||||
|
||||
func getMessage(f Finding) string {
|
||||
if f.Commit == "" {
|
||||
return fmt.Sprintf("%s has detected a secret in file %s, line %s.", f.RuleID, f.File, strconv.Itoa(f.StartLine))
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s has detected a secret in file %s, line %s, at commit %s.", f.RuleID, f.File, strconv.Itoa(f.StartLine), f.Commit)
|
||||
}
|
||||
|
||||
type TestSuites struct {
|
||||
XMLName xml.Name `xml:"testsuites"`
|
||||
TestSuites []TestSuite
|
||||
}
|
||||
|
||||
type TestSuite struct {
|
||||
XMLName xml.Name `xml:"testsuite"`
|
||||
Failures string `xml:"failures,attr"`
|
||||
Name string `xml:"name,attr"`
|
||||
Tests string `xml:"tests,attr"`
|
||||
TestCases []TestCase `xml:"testcase"`
|
||||
Time string `xml:"time,attr"`
|
||||
}
|
||||
|
||||
type TestCase struct {
|
||||
XMLName xml.Name `xml:"testcase"`
|
||||
Classname string `xml:"classname,attr"`
|
||||
Failure Failure `xml:"failure"`
|
||||
File string `xml:"file,attr"`
|
||||
Name string `xml:"name,attr"`
|
||||
Time string `xml:"time,attr"`
|
||||
}
|
||||
|
||||
type Failure struct {
|
||||
XMLName xml.Name `xml:"failure"`
|
||||
Data string `xml:",chardata"`
|
||||
Message string `xml:"message,attr"`
|
||||
Type string `xml:"type,attr"`
|
||||
}
|
@ -19,30 +19,20 @@
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package report
|
||||
|
||||
import "testing"
|
||||
import (
|
||||
"io"
|
||||
)
|
||||
|
||||
func TestRedact(t *testing.T) {
|
||||
tests := []struct {
|
||||
findings []Finding
|
||||
redact bool
|
||||
}{
|
||||
{
|
||||
redact: true,
|
||||
findings: []Finding{
|
||||
{
|
||||
Secret: "line containing secret",
|
||||
Match: "secret",
|
||||
},
|
||||
}},
|
||||
}
|
||||
for _, test := range tests {
|
||||
for _, f := range test.findings {
|
||||
f.Redact()
|
||||
if f.Secret != "REDACTED" {
|
||||
t.Error("redact not redacting: ", f.Secret)
|
||||
}
|
||||
}
|
||||
}
|
||||
const (
|
||||
// https://cwe.mitre.org/data/definitions/798.html
|
||||
CWE = "CWE-798"
|
||||
CWE_DESCRIPTION = "Use of Hard-coded Credentials"
|
||||
StdoutReportPath = "-"
|
||||
)
|
||||
|
||||
type Reporter interface {
|
||||
Write(w io.WriteCloser, findings []Finding) error
|
||||
}
|
@ -27,14 +27,20 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/Infisical/infisical-merge/config"
|
||||
"github.com/Infisical/infisical-merge/detect/config"
|
||||
)
|
||||
|
||||
func writeSarif(cfg config.Config, findings []Finding, w io.WriteCloser) error {
|
||||
type SarifReporter struct {
|
||||
OrderedRules []config.Rule
|
||||
}
|
||||
|
||||
var _ Reporter = (*SarifReporter)(nil)
|
||||
|
||||
func (r *SarifReporter) Write(w io.WriteCloser, findings []Finding) error {
|
||||
sarif := Sarif{
|
||||
Schema: "https://json.schemastore.org/sarif-2.1.0.json",
|
||||
Version: "2.1.0",
|
||||
Runs: getRuns(cfg, findings),
|
||||
Runs: r.getRuns(findings),
|
||||
}
|
||||
|
||||
encoder := json.NewEncoder(w)
|
||||
@ -42,22 +48,22 @@ func writeSarif(cfg config.Config, findings []Finding, w io.WriteCloser) error {
|
||||
return encoder.Encode(sarif)
|
||||
}
|
||||
|
||||
func getRuns(cfg config.Config, findings []Finding) []Runs {
|
||||
func (r *SarifReporter) getRuns(findings []Finding) []Runs {
|
||||
return []Runs{
|
||||
{
|
||||
Tool: getTool(cfg),
|
||||
Tool: r.getTool(),
|
||||
Results: getResults(findings),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func getTool(cfg config.Config) Tool {
|
||||
func (r *SarifReporter) getTool() Tool {
|
||||
tool := Tool{
|
||||
Driver: Driver{
|
||||
Name: driver,
|
||||
SemanticVersion: version,
|
||||
InformationUri: "https://github.com/Infisical/infisical",
|
||||
Rules: getRules(cfg),
|
||||
InformationUri: "https://github.com/gitleaks/gitleaks",
|
||||
Rules: r.getRules(),
|
||||
},
|
||||
}
|
||||
|
||||
@ -73,26 +79,15 @@ func hasEmptyRules(tool Tool) bool {
|
||||
return len(tool.Driver.Rules) == 0
|
||||
}
|
||||
|
||||
func getRules(cfg config.Config) []Rules {
|
||||
func (r *SarifReporter) getRules() []Rules {
|
||||
// TODO for _, rule := range cfg.Rules {
|
||||
var rules []Rules
|
||||
for _, rule := range cfg.OrderedRules() {
|
||||
shortDescription := ShortDescription{
|
||||
Text: rule.Description,
|
||||
}
|
||||
if rule.Regex != nil {
|
||||
shortDescription = ShortDescription{
|
||||
Text: rule.Regex.String(),
|
||||
}
|
||||
} else if rule.Path != nil {
|
||||
shortDescription = ShortDescription{
|
||||
Text: rule.Path.String(),
|
||||
}
|
||||
}
|
||||
for _, rule := range r.OrderedRules {
|
||||
rules = append(rules, Rules{
|
||||
ID: rule.RuleID,
|
||||
Name: rule.Description,
|
||||
Description: shortDescription,
|
||||
ID: rule.RuleID,
|
||||
Description: ShortDescription{
|
||||
Text: rule.Description,
|
||||
},
|
||||
})
|
||||
}
|
||||
return rules
|
||||
@ -125,6 +120,9 @@ func getResults(findings []Finding) []Results {
|
||||
Date: f.Date,
|
||||
Author: f.Author,
|
||||
},
|
||||
Properties: Properties{
|
||||
Tags: f.Tags,
|
||||
},
|
||||
}
|
||||
results = append(results, r)
|
||||
}
|
||||
@ -180,7 +178,6 @@ type FullDescription struct {
|
||||
|
||||
type Rules struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Description ShortDescription `json:"shortDescription"`
|
||||
}
|
||||
|
||||
@ -224,11 +221,16 @@ type Locations struct {
|
||||
PhysicalLocation PhysicalLocation `json:"physicalLocation"`
|
||||
}
|
||||
|
||||
type Properties struct {
|
||||
Tags []string `json:"tags"`
|
||||
}
|
||||
|
||||
type Results struct {
|
||||
Message Message `json:"message"`
|
||||
RuleId string `json:"ruleId"`
|
||||
Locations []Locations `json:"locations"`
|
||||
PartialFingerPrints `json:"partialFingerprints"`
|
||||
Properties Properties `json:"properties"`
|
||||
}
|
||||
|
||||
type Runs struct {
|
68
cli/detect/report/template.go
Normal file
@ -0,0 +1,68 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package report
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"text/template"
|
||||
|
||||
"github.com/Masterminds/sprig/v3"
|
||||
)
|
||||
|
||||
type TemplateReporter struct {
|
||||
template *template.Template
|
||||
}
|
||||
|
||||
var _ Reporter = (*TemplateReporter)(nil)
|
||||
|
||||
func NewTemplateReporter(templatePath string) (*TemplateReporter, error) {
|
||||
if templatePath == "" {
|
||||
return nil, fmt.Errorf("template path cannot be empty")
|
||||
}
|
||||
|
||||
file, err := os.ReadFile(templatePath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error reading file: %w", err)
|
||||
}
|
||||
templateText := string(file)
|
||||
|
||||
// TODO: Add helper functions like escaping for JSON, XML, etc.
|
||||
t := template.New("custom")
|
||||
t = t.Funcs(sprig.TxtFuncMap())
|
||||
t, err = t.Parse(templateText)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing file: %w", err)
|
||||
}
|
||||
return &TemplateReporter{template: t}, nil
|
||||
}
|
||||
|
||||
// writeTemplate renders the findings using the user-provided template.
|
||||
// https://www.digitalocean.com/community/tutorials/how-to-use-templates-in-go
|
||||
func (t *TemplateReporter) Write(w io.WriteCloser, findings []Finding) error {
|
||||
if err := t.template.Execute(w, findings); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
127
cli/detect/sources/directory.go
Normal file
@ -0,0 +1,127 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package sources
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
|
||||
"github.com/fatih/semgroup"
|
||||
|
||||
"github.com/Infisical/infisical-merge/detect/config"
|
||||
"github.com/Infisical/infisical-merge/detect/logging"
|
||||
)
|
||||
|
||||
type ScanTarget struct {
|
||||
Path string
|
||||
Symlink string
|
||||
}
|
||||
|
||||
var isWindows = runtime.GOOS == "windows"
|
||||
|
||||
func DirectoryTargets(source string, s *semgroup.Group, followSymlinks bool, allowlists []*config.Allowlist) (<-chan ScanTarget, error) {
|
||||
paths := make(chan ScanTarget)
|
||||
s.Go(func() error {
|
||||
defer close(paths)
|
||||
return filepath.Walk(source,
|
||||
func(path string, fInfo os.FileInfo, err error) error {
|
||||
logger := logging.With().Str("path", path).Logger()
|
||||
|
||||
if err != nil {
|
||||
if os.IsPermission(err) {
|
||||
// This seems to only fail on directories at this stage.
|
||||
logger.Warn().Msg("Skipping directory: permission denied")
|
||||
return filepath.SkipDir
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// Empty; nothing to do here.
|
||||
if fInfo.Size() == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Unwrap symlinks, if |followSymlinks| is set.
|
||||
scanTarget := ScanTarget{
|
||||
Path: path,
|
||||
}
|
||||
if fInfo.Mode().Type() == fs.ModeSymlink {
|
||||
if !followSymlinks {
|
||||
logger.Debug().Msg("Skipping symlink")
|
||||
return nil
|
||||
}
|
||||
|
||||
realPath, err := filepath.EvalSymlinks(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
realPathFileInfo, _ := os.Stat(realPath)
|
||||
if realPathFileInfo.IsDir() {
|
||||
logger.Warn().Str("target", realPath).Msg("Skipping symlinked directory")
|
||||
return nil
|
||||
}
|
||||
|
||||
scanTarget.Path = realPath
|
||||
scanTarget.Symlink = path
|
||||
}
|
||||
|
||||
// TODO: Also run this check against the resolved symlink?
|
||||
var skip bool
|
||||
for _, a := range allowlists {
|
||||
skip = a.PathAllowed(path) ||
|
||||
// TODO: Remove this in v9.
|
||||
// This is an awkward hack to mitigate https://github.com/gitleaks/gitleaks/issues/1641.
|
||||
(isWindows && a.PathAllowed(filepath.ToSlash(path)))
|
||||
if skip {
|
||||
break
|
||||
}
|
||||
}
|
||||
if fInfo.IsDir() {
|
||||
// Directory
|
||||
if skip {
|
||||
logger.Debug().Msg("Skipping directory due to global allowlist")
|
||||
return filepath.SkipDir
|
||||
}
|
||||
|
||||
if fInfo.Name() == ".git" {
|
||||
// Don't scan .git directories.
|
||||
// TODO: Add this to the config allowlist, instead of hard-coding it.
|
||||
return filepath.SkipDir
|
||||
}
|
||||
} else {
|
||||
// File
|
||||
if skip {
|
||||
logger.Debug().Msg("Skipping file due to global allowlist")
|
||||
return nil
|
||||
}
|
||||
|
||||
paths <- scanTarget
|
||||
}
|
||||
return nil
|
||||
})
|
||||
})
|
||||
return paths, nil
|
||||
}
|
211
cli/detect/sources/git.go
Normal file
@ -0,0 +1,211 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package sources
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"io"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/gitleaks/go-gitdiff/gitdiff"
|
||||
|
||||
"github.com/Infisical/infisical-merge/detect/logging"
|
||||
)
|
||||
|
||||
var quotedOptPattern = regexp.MustCompile(`^(?:"[^"]+"|'[^']+')$`)
|
||||
|
||||
// GitCmd helps to work with Git's output.
|
||||
type GitCmd struct {
|
||||
cmd *exec.Cmd
|
||||
diffFilesCh <-chan *gitdiff.File
|
||||
errCh <-chan error
|
||||
}
|
||||
|
||||
// NewGitLogCmd returns `*DiffFilesCmd` with two channels: `<-chan *gitdiff.File` and `<-chan error`.
|
||||
// Caller should read everything from channels until receiving a signal about their closure and call
|
||||
// the `func (*DiffFilesCmd) Wait()` error in order to release resources.
|
||||
func NewGitLogCmd(source string, logOpts string) (*GitCmd, error) {
|
||||
sourceClean := filepath.Clean(source)
|
||||
var cmd *exec.Cmd
|
||||
if logOpts != "" {
|
||||
args := []string{"-C", sourceClean, "log", "-p", "-U0"}
|
||||
|
||||
// Ensure that the user-provided |logOpts| aren't wrapped in quotes.
|
||||
// https://github.com/gitleaks/gitleaks/issues/1153
|
||||
userArgs := strings.Split(logOpts, " ")
|
||||
var quotedOpts []string
|
||||
for _, element := range userArgs {
|
||||
if quotedOptPattern.MatchString(element) {
|
||||
quotedOpts = append(quotedOpts, element)
|
||||
}
|
||||
}
|
||||
if len(quotedOpts) > 0 {
|
||||
logging.Warn().Msgf("the following `--log-opts` values may not work as expected: %v\n\tsee https://github.com/gitleaks/gitleaks/issues/1153 for more information", quotedOpts)
|
||||
}
|
||||
|
||||
args = append(args, userArgs...)
|
||||
cmd = exec.Command("git", args...)
|
||||
} else {
|
||||
cmd = exec.Command("git", "-C", sourceClean, "log", "-p", "-U0",
|
||||
"--full-history", "--all")
|
||||
}
|
||||
|
||||
logging.Debug().Msgf("executing: %s", cmd.String())
|
||||
|
||||
stdout, err := cmd.StdoutPipe()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
stderr, err := cmd.StderrPipe()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := cmd.Start(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
errCh := make(chan error)
|
||||
go listenForStdErr(stderr, errCh)
|
||||
|
||||
gitdiffFiles, err := gitdiff.Parse(stdout)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &GitCmd{
|
||||
cmd: cmd,
|
||||
diffFilesCh: gitdiffFiles,
|
||||
errCh: errCh,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// NewGitDiffCmd returns `*DiffFilesCmd` with two channels: `<-chan *gitdiff.File` and `<-chan error`.
|
||||
// Caller should read everything from channels until receiving a signal about their closure and call
|
||||
// the `func (*DiffFilesCmd) Wait()` error in order to release resources.
|
||||
func NewGitDiffCmd(source string, staged bool) (*GitCmd, error) {
|
||||
sourceClean := filepath.Clean(source)
|
||||
var cmd *exec.Cmd
|
||||
cmd = exec.Command("git", "-C", sourceClean, "diff", "-U0", "--no-ext-diff", ".")
|
||||
if staged {
|
||||
cmd = exec.Command("git", "-C", sourceClean, "diff", "-U0", "--no-ext-diff",
|
||||
"--staged", ".")
|
||||
}
|
||||
logging.Debug().Msgf("executing: %s", cmd.String())
|
||||
|
||||
stdout, err := cmd.StdoutPipe()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
stderr, err := cmd.StderrPipe()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := cmd.Start(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
errCh := make(chan error)
|
||||
go listenForStdErr(stderr, errCh)
|
||||
|
||||
gitdiffFiles, err := gitdiff.Parse(stdout)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &GitCmd{
|
||||
cmd: cmd,
|
||||
diffFilesCh: gitdiffFiles,
|
||||
errCh: errCh,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// DiffFilesCh returns a channel with *gitdiff.File.
|
||||
func (c *GitCmd) DiffFilesCh() <-chan *gitdiff.File {
|
||||
return c.diffFilesCh
|
||||
}
|
||||
|
||||
// ErrCh returns a channel that could produce an error if there is something in stderr.
|
||||
func (c *GitCmd) ErrCh() <-chan error {
|
||||
return c.errCh
|
||||
}
|
||||
|
||||
// Wait waits for the command to exit and waits for any copying to
|
||||
// stdin or copying from stdout or stderr to complete.
|
||||
//
|
||||
// Wait also closes underlying stdout and stderr.
|
||||
func (c *GitCmd) Wait() (err error) {
|
||||
return c.cmd.Wait()
|
||||
}
|
||||
|
||||
// listenForStdErr listens for stderr output from git, prints it to stdout,
|
||||
// sends to errCh and closes it.
|
||||
func listenForStdErr(stderr io.ReadCloser, errCh chan<- error) {
|
||||
defer close(errCh)
|
||||
|
||||
var errEncountered bool
|
||||
|
||||
scanner := bufio.NewScanner(stderr)
|
||||
for scanner.Scan() {
|
||||
// if git throws one of the following errors:
|
||||
//
|
||||
// exhaustive rename detection was skipped due to too many files.
|
||||
// you may want to set your diff.renameLimit variable to at least
|
||||
// (some large number) and retry the command.
|
||||
//
|
||||
// inexact rename detection was skipped due to too many files.
|
||||
// you may want to set your diff.renameLimit variable to at least
|
||||
// (some large number) and retry the command.
|
||||
//
|
||||
// Auto packing the repository in background for optimum performance.
|
||||
// See "git help gc" for manual housekeeping.
|
||||
//
|
||||
// we skip exiting the program as git log -p/git diff will continue
|
||||
// to send data to stdout and finish executing. This next bit of
|
||||
// code prevents gitleaks from stopping mid scan if this error is
|
||||
// encountered
|
||||
if strings.Contains(scanner.Text(),
|
||||
"exhaustive rename detection was skipped") ||
|
||||
strings.Contains(scanner.Text(),
|
||||
"inexact rename detection was skipped") ||
|
||||
strings.Contains(scanner.Text(),
|
||||
"you may want to set your diff.renameLimit") ||
|
||||
strings.Contains(scanner.Text(),
|
||||
"See \"git help gc\" for manual housekeeping") ||
|
||||
strings.Contains(scanner.Text(),
|
||||
"Auto packing the repository in background for optimum performance") {
|
||||
logging.Warn().Msg(scanner.Text())
|
||||
} else {
|
||||
logging.Error().Msgf("[git] %s", scanner.Text())
|
||||
errEncountered = true
|
||||
}
|
||||
}
|
||||
|
||||
if errEncountered {
|
||||
errCh <- errors.New("stderr is not empty")
|
||||
return
|
||||
}
|
||||
}
|
@ -26,20 +26,21 @@ import (
|
||||
// "encoding/json"
|
||||
"fmt"
|
||||
"math"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/Infisical/infisical-merge/detect/cmd/scm"
|
||||
"github.com/Infisical/infisical-merge/detect/logging"
|
||||
"github.com/Infisical/infisical-merge/detect/report"
|
||||
|
||||
"github.com/charmbracelet/lipgloss"
|
||||
|
||||
"github.com/Infisical/infisical-merge/report"
|
||||
|
||||
"github.com/gitleaks/go-gitdiff/gitdiff"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
// augmentGitFinding updates the start and end line numbers of a finding to include the
|
||||
// delta from the git diff
|
||||
func augmentGitFinding(finding report.Finding, textFragment *gitdiff.TextFragment, f *gitdiff.File) report.Finding {
|
||||
func augmentGitFinding(remote *RemoteInfo, finding report.Finding, textFragment *gitdiff.TextFragment, f *gitdiff.File) report.Finding {
|
||||
if !strings.HasPrefix(finding.Match, "file detected") {
|
||||
finding.StartLine += int(textFragment.NewPosition)
|
||||
finding.EndLine += int(textFragment.NewPosition)
|
||||
@ -47,16 +48,76 @@ func augmentGitFinding(finding report.Finding, textFragment *gitdiff.TextFragmen
|
||||
|
||||
if f.PatchHeader != nil {
|
||||
finding.Commit = f.PatchHeader.SHA
|
||||
finding.Message = f.PatchHeader.Message()
|
||||
if f.PatchHeader.Author != nil {
|
||||
finding.Author = f.PatchHeader.Author.Name
|
||||
finding.Email = f.PatchHeader.Author.Email
|
||||
}
|
||||
finding.Date = f.PatchHeader.AuthorDate.UTC().Format(time.RFC3339)
|
||||
finding.Message = f.PatchHeader.Message()
|
||||
// Results from `git diff` shouldn't have a link.
|
||||
if finding.Commit != "" {
|
||||
finding.Link = createScmLink(remote.Platform, remote.Url, finding)
|
||||
}
|
||||
}
|
||||
return finding
|
||||
}
|
||||
|
||||
var linkCleaner = strings.NewReplacer(
|
||||
" ", "%20",
|
||||
"%", "%25",
|
||||
)
|
||||
|
||||
func createScmLink(scmPlatform scm.Platform, remoteUrl string, finding report.Finding) string {
|
||||
if scmPlatform == scm.UnknownPlatform || scmPlatform == scm.NoPlatform {
|
||||
return ""
|
||||
}
|
||||
|
||||
// Clean the path.
|
||||
var (
|
||||
filePath = linkCleaner.Replace(finding.File)
|
||||
ext = strings.ToLower(filepath.Ext(filePath))
|
||||
)
|
||||
|
||||
switch scmPlatform {
|
||||
case scm.GitHubPlatform:
|
||||
link := fmt.Sprintf("%s/blob/%s/%s", remoteUrl, finding.Commit, filePath)
|
||||
if ext == ".ipynb" || ext == ".md" {
|
||||
link += "?plain=1"
|
||||
}
|
||||
if finding.StartLine != 0 {
|
||||
link += fmt.Sprintf("#L%d", finding.StartLine)
|
||||
}
|
||||
if finding.EndLine != finding.StartLine {
|
||||
link += fmt.Sprintf("-L%d", finding.EndLine)
|
||||
}
|
||||
return link
|
||||
case scm.GitLabPlatform:
|
||||
link := fmt.Sprintf("%s/blob/%s/%s", remoteUrl, finding.Commit, filePath)
|
||||
if finding.StartLine != 0 {
|
||||
link += fmt.Sprintf("#L%d", finding.StartLine)
|
||||
}
|
||||
if finding.EndLine != finding.StartLine {
|
||||
link += fmt.Sprintf("-%d", finding.EndLine)
|
||||
}
|
||||
return link
|
||||
case scm.AzureDevOpsPlatform:
|
||||
link := fmt.Sprintf("%s/commit/%s?path=/%s", remoteUrl, finding.Commit, filePath)
|
||||
// Add line information if applicable
|
||||
if finding.StartLine != 0 {
|
||||
link += fmt.Sprintf("&line=%d", finding.StartLine)
|
||||
}
|
||||
if finding.EndLine != finding.StartLine {
|
||||
link += fmt.Sprintf("&lineEnd=%d", finding.EndLine)
|
||||
}
|
||||
// This is a bit dirty, but Azure DevOps does not highlight the line when the lineStartColumn and lineEndColumn are not provided
|
||||
link += "&lineStartColumn=1&lineEndColumn=10000000&type=2&lineStyle=plain&_a=files"
|
||||
return link
|
||||
default:
|
||||
// This should never happen.
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
// shannonEntropy calculates the entropy of data using the formula defined here:
|
||||
// https://en.wiktionary.org/wiki/Shannon_entropy
|
||||
// Another way to think about what this is doing is calculating the number of bits
|
||||
@ -82,7 +143,7 @@ func shannonEntropy(data string) (entropy float64) {
|
||||
}
|
||||
|
||||
// filter will dedupe and redact findings
|
||||
func filter(findings []report.Finding, redact bool) []report.Finding {
|
||||
func filter(findings []report.Finding, redact uint) []report.Finding {
|
||||
var retFindings []report.Finding
|
||||
for _, f := range findings {
|
||||
include := true
|
||||
@ -96,15 +157,15 @@ func filter(findings []report.Finding, redact bool) []report.Finding {
|
||||
|
||||
genericMatch := strings.Replace(f.Match, f.Secret, "REDACTED", -1)
|
||||
betterMatch := strings.Replace(fPrime.Match, fPrime.Secret, "REDACTED", -1)
|
||||
log.Trace().Msgf("skipping %s finding (%s), %s rule takes precendence (%s)", f.RuleID, genericMatch, fPrime.RuleID, betterMatch)
|
||||
logging.Trace().Msgf("skipping %s finding (%s), %s rule takes precedence (%s)", f.RuleID, genericMatch, fPrime.RuleID, betterMatch)
|
||||
include = false
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if redact {
|
||||
f.Redact()
|
||||
if redact > 0 {
|
||||
f.Redact(redact)
|
||||
}
|
||||
if include {
|
||||
retFindings = append(retFindings, f)
|
||||
@ -152,7 +213,7 @@ func printFinding(f report.Finding, noColor bool) {
|
||||
|
||||
lineEndIdx := matchInLineIDX + len(f.Match)
|
||||
if len(f.Line)-1 <= lineEndIdx {
|
||||
lineEndIdx = len(f.Line) - 1
|
||||
lineEndIdx = len(f.Line)
|
||||
}
|
||||
|
||||
lineEnd := f.Line[lineEndIdx:]
|
||||
@ -184,6 +245,9 @@ func printFinding(f report.Finding, noColor bool) {
|
||||
fmt.Println("")
|
||||
return
|
||||
}
|
||||
if len(f.Tags) > 0 {
|
||||
fmt.Printf("%-12s %s\n", "Tags:", f.Tags)
|
||||
}
|
||||
fmt.Printf("%-12s %s\n", "File:", f.File)
|
||||
fmt.Printf("%-12s %d\n", "Line:", f.StartLine)
|
||||
if f.Commit == "" {
|
||||
@ -196,16 +260,12 @@ func printFinding(f report.Finding, noColor bool) {
|
||||
fmt.Printf("%-12s %s\n", "Email:", f.Email)
|
||||
fmt.Printf("%-12s %s\n", "Date:", f.Date)
|
||||
fmt.Printf("%-12s %s\n", "Fingerprint:", f.Fingerprint)
|
||||
if f.Link != "" {
|
||||
fmt.Printf("%-12s %s\n", "Link:", f.Link)
|
||||
}
|
||||
fmt.Println("")
|
||||
}
|
||||
|
||||
func containsDigit(s string) bool {
|
||||
for _, c := range s {
|
||||
switch c {
|
||||
case '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return true
|
||||
}
|
||||
|
||||
}
|
||||
return false
|
||||
func isWhitespace(ch byte) bool {
|
||||
return ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r'
|
||||
}
|
||||
|
14
cli/go.mod
@ -10,7 +10,7 @@ require (
|
||||
github.com/creack/pty v1.1.21
|
||||
github.com/denisbrodbeck/machineid v1.0.1
|
||||
github.com/fatih/semgroup v1.2.0
|
||||
github.com/gitleaks/go-gitdiff v0.8.0
|
||||
github.com/gitleaks/go-gitdiff v0.9.1
|
||||
github.com/h2non/filetype v1.1.3
|
||||
github.com/infisical/go-sdk v0.5.92
|
||||
github.com/infisical/infisical-kmip v0.3.5
|
||||
@ -42,6 +42,11 @@ require (
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.2 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.4.0 // indirect
|
||||
cloud.google.com/go/iam v1.1.11 // indirect
|
||||
dario.cat/mergo v1.0.1 // indirect
|
||||
github.com/BobuSumisu/aho-corasick v1.0.3 // indirect
|
||||
github.com/Masterminds/goutils v1.1.1 // indirect
|
||||
github.com/Masterminds/semver/v3 v3.3.0 // indirect
|
||||
github.com/Masterminds/sprig/v3 v3.3.0 // indirect
|
||||
github.com/alessio/shellescape v1.4.1 // indirect
|
||||
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef // indirect
|
||||
github.com/aws/aws-sdk-go-v2 v1.27.2 // indirect
|
||||
@ -74,17 +79,21 @@ require (
|
||||
github.com/golang/protobuf v1.5.4 // indirect
|
||||
github.com/google/pprof v0.0.0-20250302191652-9094ed2288e7 // indirect
|
||||
github.com/google/s2a-go v0.1.7 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.12.5 // indirect
|
||||
github.com/gosimple/slug v1.15.0 // indirect
|
||||
github.com/gosimple/unidecode v1.0.1 // indirect
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
|
||||
github.com/hashicorp/hcl v1.0.0 // indirect
|
||||
github.com/huandu/xstrings v1.5.0 // indirect
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
|
||||
github.com/magiconair/properties v1.8.5 // indirect
|
||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.15 // indirect
|
||||
github.com/mitchellh/copystructure v1.2.0 // indirect
|
||||
github.com/mitchellh/mapstructure v1.4.1 // indirect
|
||||
github.com/mitchellh/reflectwalk v1.0.2 // indirect
|
||||
github.com/mtibben/percent v0.2.1 // indirect
|
||||
github.com/muesli/mango v0.1.0 // indirect
|
||||
github.com/muesli/mango-pflag v0.1.0 // indirect
|
||||
@ -98,8 +107,9 @@ require (
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/rivo/uniseg v0.2.0 // indirect
|
||||
github.com/shopspring/decimal v1.4.0 // indirect
|
||||
github.com/spf13/afero v1.6.0 // indirect
|
||||
github.com/spf13/cast v1.3.1 // indirect
|
||||
github.com/spf13/cast v1.7.0 // indirect
|
||||
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
||||
github.com/subosito/gotenv v1.2.0 // indirect
|
||||
github.com/wlynxg/anet v0.0.5 // indirect
|
||||
|
22
cli/go.sum
@ -44,13 +44,23 @@ cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0Zeo
|
||||
cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk=
|
||||
cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
|
||||
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
|
||||
dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s=
|
||||
dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
|
||||
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
||||
github.com/BobuSumisu/aho-corasick v1.0.3 h1:uuf+JHwU9CHP2Vx+wAy6jcksJThhJS9ehR8a+4nPE9g=
|
||||
github.com/BobuSumisu/aho-corasick v1.0.3/go.mod h1:hm4jLcvZKI2vRF2WDU1N4p/jpWtpOzp3nLmi9AzX/XE=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||
github.com/Infisical/go-keyring v1.0.2 h1:dWOkI/pB/7RocfSJgGXbXxLDcVYsdslgjEPmVhb+nl8=
|
||||
github.com/Infisical/go-keyring v1.0.2/go.mod h1:LWOnn/sw9FxDW/0VY+jHFAfOFEe03xmwBVSfJnBowto=
|
||||
github.com/Infisical/turn/v4 v4.0.1 h1:omdelNsnFfzS5cu86W5OBR68by68a8sva4ogR0lQQnw=
|
||||
github.com/Infisical/turn/v4 v4.0.1/go.mod h1:pMMKP/ieNAG/fN5cZiN4SDuyKsXtNTr0ccN7IToA1zs=
|
||||
github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI=
|
||||
github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU=
|
||||
github.com/Masterminds/semver/v3 v3.3.0 h1:B8LGeaivUe71a5qox1ICM/JLl0NqZSW5CHyL+hmvYS0=
|
||||
github.com/Masterminds/semver/v3 v3.3.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM=
|
||||
github.com/Masterminds/sprig/v3 v3.3.0 h1:mQh0Yrg1XPo6vjYXgtf5OtijNAKJRNcTdOOGZe3tPhs=
|
||||
github.com/Masterminds/sprig/v3 v3.3.0/go.mod h1:Zy1iXRYNqNLUolqCpL4uhk6SHUMAOSCzdgBfDb35Lz0=
|
||||
github.com/alessio/shellescape v1.4.1 h1:V7yhSDDn8LP4lc4jS8pFkt0zCnzVJlG5JXy9BVKJUX0=
|
||||
github.com/alessio/shellescape v1.4.1/go.mod h1:PZAiSCk0LJaZkiCSkPv8qIobYglO3FPpyFjDCtHLS30=
|
||||
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
|
||||
@ -142,6 +152,8 @@ github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4
|
||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
github.com/gitleaks/go-gitdiff v0.8.0 h1:7aExTZm+K/M/EQKOyYcub8rIAdWK6ONxPGuRzxmWW+0=
|
||||
github.com/gitleaks/go-gitdiff v0.8.0/go.mod h1:pKz0X4YzCKZs30BL+weqBIG7mx0jl4tF1uXV9ZyNvrA=
|
||||
github.com/gitleaks/go-gitdiff v0.9.1 h1:ni6z6/3i9ODT685OLCTf+s/ERlWUNWQF4x1pvoNICw0=
|
||||
github.com/gitleaks/go-gitdiff v0.9.1/go.mod h1:pKz0X4YzCKZs30BL+weqBIG7mx0jl4tF1uXV9ZyNvrA=
|
||||
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
@ -273,6 +285,8 @@ github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO
|
||||
github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ=
|
||||
github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I=
|
||||
github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
|
||||
github.com/huandu/xstrings v1.5.0 h1:2ag3IFq9ZDANvthTwTiqSSZLjDc+BedvHPAp5tJy2TI=
|
||||
github.com/huandu/xstrings v1.5.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||
github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc=
|
||||
@ -315,6 +329,8 @@ github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZ
|
||||
github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
|
||||
github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
|
||||
github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw=
|
||||
github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s=
|
||||
github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||
github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
|
||||
github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg=
|
||||
@ -324,6 +340,8 @@ github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh
|
||||
github.com/mitchellh/mapstructure v1.3.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
github.com/mitchellh/mapstructure v1.4.1 h1:CpVNEelQCZBooIPDn+AR3NpivK/TIKU8bDxdASFVQag=
|
||||
github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ=
|
||||
github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
@ -393,6 +411,8 @@ github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQD
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
|
||||
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
|
||||
github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
|
||||
github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME=
|
||||
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||
@ -402,6 +422,8 @@ github.com/spf13/afero v1.6.0 h1:xoax2sJ2DT8S8xA2paPFjDCScCNeWsg75VG0DLRreiY=
|
||||
github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I=
|
||||
github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng=
|
||||
github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
|
||||
github.com/spf13/cast v1.7.0 h1:ntdiHjuueXFgm5nzDRdOS4yfT43P5Fnud6DH50rz/7w=
|
||||
github.com/spf13/cast v1.7.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
|
||||
github.com/spf13/cobra v1.6.1 h1:o94oiPyS4KD1mPy2fmcYYHHfCxLqYjJOhGsCHFZtEzA=
|
||||
github.com/spf13/cobra v1.6.1/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY=
|
||||
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
|
||||
|
@ -32,10 +32,13 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/Infisical/infisical-merge/config"
|
||||
"github.com/Infisical/infisical-merge/detect"
|
||||
"github.com/Infisical/infisical-merge/detect/cmd/scm"
|
||||
"github.com/Infisical/infisical-merge/detect/config"
|
||||
"github.com/Infisical/infisical-merge/detect/logging"
|
||||
"github.com/Infisical/infisical-merge/detect/report"
|
||||
"github.com/Infisical/infisical-merge/detect/sources"
|
||||
"github.com/Infisical/infisical-merge/packages/util"
|
||||
"github.com/Infisical/infisical-merge/report"
|
||||
"github.com/manifoldco/promptui"
|
||||
"github.com/posthog/posthog-go"
|
||||
"github.com/rs/zerolog/log"
|
||||
@ -240,9 +243,17 @@ var scanCmd = &cobra.Command{
|
||||
log.Fatal().Err(err).Msg("")
|
||||
}
|
||||
// set redact flag
|
||||
if detector.Redact, err = cmd.Flags().GetBool("redact"); err != nil {
|
||||
|
||||
redactFlag, err := cmd.Flags().GetBool("redact")
|
||||
if err != nil {
|
||||
log.Fatal().Err(err).Msg("")
|
||||
}
|
||||
if redactFlag {
|
||||
detector.Redact = 100
|
||||
} else {
|
||||
detector.Redact = 0
|
||||
}
|
||||
|
||||
if detector.MaxTargetMegaBytes, err = cmd.Flags().GetInt("max-target-megabytes"); err != nil {
|
||||
log.Fatal().Err(err).Msg("")
|
||||
}
|
||||
@ -293,31 +304,49 @@ var scanCmd = &cobra.Command{
|
||||
|
||||
// start the detector scan
|
||||
if noGit {
|
||||
findings, err = detector.DetectFiles(source)
|
||||
paths, err := sources.DirectoryTargets(
|
||||
source,
|
||||
detector.Sema,
|
||||
detector.FollowSymlinks,
|
||||
detector.Config.Allowlists,
|
||||
)
|
||||
if err != nil {
|
||||
logging.Fatal().Err(err).Send()
|
||||
}
|
||||
|
||||
if findings, err = detector.DetectFiles(paths); err != nil {
|
||||
// don't exit on error, just log it
|
||||
log.Error().Err(err).Msg("")
|
||||
logging.Error().Err(err).Msg("failed scan directory")
|
||||
}
|
||||
} else if fromPipe {
|
||||
findings, err = detector.DetectReader(os.Stdin, 10)
|
||||
if err != nil {
|
||||
if findings, err = detector.DetectReader(os.Stdin, 10); err != nil {
|
||||
// log fatal to exit, no need to continue since a report
|
||||
// will not be generated when scanning from a pipe...for now
|
||||
log.Fatal().Err(err).Msg("")
|
||||
logging.Fatal().Err(err).Msg("failed scan input from stdin")
|
||||
}
|
||||
} else {
|
||||
var (
|
||||
gitCmd *sources.GitCmd
|
||||
scmPlatform scm.Platform
|
||||
remote *detect.RemoteInfo
|
||||
)
|
||||
|
||||
var logOpts string
|
||||
logOpts, err = cmd.Flags().GetString("log-opts")
|
||||
if err != nil {
|
||||
log.Fatal().Err(err).Msg("")
|
||||
|
||||
if gitCmd, err = sources.NewGitLogCmd(source, logOpts); err != nil {
|
||||
logging.Fatal().Err(err).Msg("could not create Git cmd")
|
||||
}
|
||||
findings, err = detector.DetectGit(source, logOpts, detect.DetectType)
|
||||
if err != nil {
|
||||
if scmPlatform, err = scm.PlatformFromString("github"); err != nil {
|
||||
logging.Fatal().Err(err).Send()
|
||||
}
|
||||
remote = detect.NewRemoteInfo(scmPlatform, source)
|
||||
|
||||
if findings, err = detector.DetectGit(gitCmd, remote); err != nil {
|
||||
// don't exit on error, just log it
|
||||
log.Error().Err(err).Msg("")
|
||||
logging.Error().Err(err).Msg("failed to scan Git repository")
|
||||
}
|
||||
}
|
||||
|
||||
// log info about the scan
|
||||
if err == nil {
|
||||
log.Info().Msgf("scan completed in %s", FormatDuration(time.Since(start)))
|
||||
@ -341,9 +370,7 @@ var scanCmd = &cobra.Command{
|
||||
reportPath, _ := cmd.Flags().GetString("report-path")
|
||||
ext, _ := cmd.Flags().GetString("report-format")
|
||||
if reportPath != "" {
|
||||
if err := report.Write(findings, cfg, ext, reportPath); err != nil {
|
||||
log.Fatal().Err(err).Msg("could not write")
|
||||
}
|
||||
reportFindings(findings, reportPath, ext, &cfg)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
@ -375,7 +402,6 @@ var scanGitChangesCmd = &cobra.Command{
|
||||
cfg.Path, _ = cmd.Flags().GetString("config")
|
||||
exitCode, _ := cmd.Flags().GetInt("exit-code")
|
||||
staged, _ := cmd.Flags().GetBool("staged")
|
||||
start := time.Now()
|
||||
|
||||
// Setup detector
|
||||
detector := detect.NewDetector(cfg)
|
||||
@ -397,9 +423,17 @@ var scanGitChangesCmd = &cobra.Command{
|
||||
log.Fatal().Err(err).Msg("")
|
||||
}
|
||||
// set redact flag
|
||||
if detector.Redact, err = cmd.Flags().GetBool("redact"); err != nil {
|
||||
|
||||
redactFlag, err := cmd.Flags().GetBool("redact")
|
||||
if err != nil {
|
||||
log.Fatal().Err(err).Msg("")
|
||||
}
|
||||
if redactFlag {
|
||||
detector.Redact = 100
|
||||
} else {
|
||||
detector.Redact = 0
|
||||
}
|
||||
|
||||
if detector.MaxTargetMegaBytes, err = cmd.Flags().GetInt("max-target-megabytes"); err != nil {
|
||||
log.Fatal().Err(err).Msg("")
|
||||
}
|
||||
@ -414,32 +448,22 @@ var scanGitChangesCmd = &cobra.Command{
|
||||
}
|
||||
}
|
||||
|
||||
// get log options for git scan
|
||||
logOpts, err := cmd.Flags().GetString("log-opts")
|
||||
if err != nil {
|
||||
log.Fatal().Err(err).Msg("")
|
||||
}
|
||||
|
||||
log.Info().Msgf("scanning for exposed secrets...")
|
||||
|
||||
// start git scan
|
||||
var findings []report.Finding
|
||||
if staged {
|
||||
findings, err = detector.DetectGit(source, logOpts, detect.ProtectStagedType)
|
||||
} else {
|
||||
findings, err = detector.DetectGit(source, logOpts, detect.ProtectType)
|
||||
}
|
||||
if err != nil {
|
||||
// don't exit on error, just log it
|
||||
log.Error().Err(err).Msg("")
|
||||
}
|
||||
var (
|
||||
findings []report.Finding
|
||||
|
||||
// log info about the scan
|
||||
log.Info().Msgf("scan completed in %s", FormatDuration(time.Since(start)))
|
||||
if len(findings) != 0 {
|
||||
log.Warn().Msgf("leaks found: %d", len(findings))
|
||||
} else {
|
||||
log.Info().Msg("no leaks found")
|
||||
gitCmd *sources.GitCmd
|
||||
remote *detect.RemoteInfo
|
||||
)
|
||||
|
||||
if gitCmd, err = sources.NewGitDiffCmd(source, staged); err != nil {
|
||||
logging.Fatal().Err(err).Msg("could not create Git diff cmd")
|
||||
}
|
||||
remote = &detect.RemoteInfo{Platform: scm.NoPlatform}
|
||||
|
||||
if findings, err = detector.DetectGit(gitCmd, remote); err != nil {
|
||||
// don't exit on error, just log it
|
||||
logging.Error().Err(err).Msg("failed to scan Git repository")
|
||||
}
|
||||
|
||||
Telemetry.CaptureEvent("cli-command:scan git-changes", posthog.NewProperties().Set("risks", len(findings)).Set("version", util.CLI_VERSION))
|
||||
@ -447,9 +471,7 @@ var scanGitChangesCmd = &cobra.Command{
|
||||
reportPath, _ := cmd.Flags().GetString("report-path")
|
||||
ext, _ := cmd.Flags().GetString("report-format")
|
||||
if reportPath != "" {
|
||||
if err = report.Write(findings, cfg, ext, reportPath); err != nil {
|
||||
log.Fatal().Err(err).Msg("")
|
||||
}
|
||||
reportFindings(findings, reportPath, ext, &cfg)
|
||||
}
|
||||
if len(findings) != 0 {
|
||||
os.Exit(exitCode)
|
||||
@ -457,6 +479,36 @@ var scanGitChangesCmd = &cobra.Command{
|
||||
},
|
||||
}
|
||||
|
||||
func reportFindings(findings []report.Finding, reportPath string, ext string, cfg *config.Config) {
|
||||
|
||||
var reporter report.Reporter
|
||||
|
||||
switch ext {
|
||||
case "csv":
|
||||
reporter = &report.CsvReporter{}
|
||||
case "json":
|
||||
reporter = &report.JsonReporter{}
|
||||
case "junit":
|
||||
reporter = &report.JunitReporter{}
|
||||
case "sarif":
|
||||
reporter = &report.SarifReporter{
|
||||
OrderedRules: cfg.GetOrderedRules(),
|
||||
}
|
||||
default:
|
||||
logging.Fatal().Msgf("unknown report format %s", ext)
|
||||
}
|
||||
|
||||
file, err := os.Create(reportPath)
|
||||
if err != nil {
|
||||
log.Fatal().Err(err).Msg("could not create file")
|
||||
}
|
||||
|
||||
if err := reporter.Write(file, findings); err != nil {
|
||||
log.Fatal().Err(err).Msg("could not write")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func fileExists(fileName string) bool {
|
||||
// check for a .infisicalignore file
|
||||
info, err := os.Stat(fileName)
|
||||
|
@ -1,108 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package report
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestWriteCSV(t *testing.T) {
|
||||
tests := []struct {
|
||||
findings []Finding
|
||||
testReportName string
|
||||
expected string
|
||||
wantEmpty bool
|
||||
}{
|
||||
{
|
||||
testReportName: "simple",
|
||||
expected: filepath.Join(expectPath, "report", "csv_simple.csv"),
|
||||
findings: []Finding{
|
||||
{
|
||||
RuleID: "test-rule",
|
||||
Match: "line containing secret",
|
||||
Secret: "a secret",
|
||||
StartLine: 1,
|
||||
EndLine: 2,
|
||||
StartColumn: 1,
|
||||
EndColumn: 2,
|
||||
Message: "opps",
|
||||
File: "auth.py",
|
||||
SymlinkFile: "",
|
||||
Commit: "0000000000000000",
|
||||
Author: "John Doe",
|
||||
Email: "johndoe@gmail.com",
|
||||
Date: "10-19-2003",
|
||||
Fingerprint: "fingerprint",
|
||||
},
|
||||
}},
|
||||
{
|
||||
|
||||
wantEmpty: true,
|
||||
testReportName: "empty",
|
||||
expected: filepath.Join(expectPath, "report", "this_should_not_exist.csv"),
|
||||
findings: []Finding{}},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
tmpfile, err := os.Create(filepath.Join(tmpPath, test.testReportName+".csv"))
|
||||
if err != nil {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Error(err)
|
||||
}
|
||||
err = writeCsv(test.findings, tmpfile)
|
||||
if err != nil {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Error(err)
|
||||
}
|
||||
got, err := os.ReadFile(tmpfile.Name())
|
||||
if err != nil {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Error(err)
|
||||
}
|
||||
if test.wantEmpty {
|
||||
if len(got) > 0 {
|
||||
t.Errorf("Expected empty file, got %s", got)
|
||||
}
|
||||
os.Remove(tmpfile.Name())
|
||||
continue
|
||||
}
|
||||
want, err := os.ReadFile(test.expected)
|
||||
if err != nil {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
if string(got) != string(want) {
|
||||
err = os.WriteFile(strings.Replace(test.expected, ".csv", ".got.csv", 1), got, 0644)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
t.Errorf("got %s, want %s", string(got), string(want))
|
||||
}
|
||||
|
||||
os.Remove(tmpfile.Name())
|
||||
}
|
||||
}
|
@ -1,111 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package report
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestWriteJSON(t *testing.T) {
|
||||
tests := []struct {
|
||||
findings []Finding
|
||||
testReportName string
|
||||
expected string
|
||||
wantEmpty bool
|
||||
}{
|
||||
{
|
||||
testReportName: "simple",
|
||||
expected: filepath.Join(expectPath, "report", "json_simple.json"),
|
||||
findings: []Finding{
|
||||
{
|
||||
|
||||
Description: "",
|
||||
RuleID: "test-rule",
|
||||
Match: "line containing secret",
|
||||
Secret: "a secret",
|
||||
StartLine: 1,
|
||||
EndLine: 2,
|
||||
StartColumn: 1,
|
||||
EndColumn: 2,
|
||||
Message: "opps",
|
||||
File: "auth.py",
|
||||
SymlinkFile: "",
|
||||
Commit: "0000000000000000",
|
||||
Author: "John Doe",
|
||||
Email: "johndoe@gmail.com",
|
||||
Date: "10-19-2003",
|
||||
Tags: []string{},
|
||||
},
|
||||
}},
|
||||
{
|
||||
|
||||
testReportName: "empty",
|
||||
expected: filepath.Join(expectPath, "report", "empty.json"),
|
||||
findings: []Finding{}},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
// create tmp file using os.TempDir()
|
||||
tmpfile, err := os.Create(filepath.Join(tmpPath, test.testReportName+".json"))
|
||||
if err != nil {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Error(err)
|
||||
}
|
||||
err = writeJson(test.findings, tmpfile)
|
||||
if err != nil {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Error(err)
|
||||
}
|
||||
got, err := os.ReadFile(tmpfile.Name())
|
||||
if err != nil {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Error(err)
|
||||
}
|
||||
if test.wantEmpty {
|
||||
if len(got) > 0 {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Errorf("Expected empty file, got %s", got)
|
||||
}
|
||||
os.Remove(tmpfile.Name())
|
||||
continue
|
||||
}
|
||||
want, err := os.ReadFile(test.expected)
|
||||
if err != nil {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
if string(got) != string(want) {
|
||||
err = os.WriteFile(strings.Replace(test.expected, ".json", ".got.json", 1), got, 0644)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
t.Errorf("got %s, want %s", string(got), string(want))
|
||||
}
|
||||
|
||||
os.Remove(tmpfile.Name())
|
||||
}
|
||||
}
|
@ -1,133 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package report
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"github.com/Infisical/infisical-merge/config"
|
||||
)
|
||||
|
||||
const (
|
||||
expectPath = "../testdata/expected/"
|
||||
tmpPath = "../testdata/tmp"
|
||||
)
|
||||
|
||||
func TestReport(t *testing.T) {
|
||||
tests := []struct {
|
||||
findings []Finding
|
||||
ext string
|
||||
wantEmpty bool
|
||||
}{
|
||||
{
|
||||
ext: "json",
|
||||
findings: []Finding{
|
||||
{
|
||||
RuleID: "test-rule",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
ext: ".json",
|
||||
findings: []Finding{
|
||||
{
|
||||
RuleID: "test-rule",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
ext: ".jsonj",
|
||||
findings: []Finding{
|
||||
{
|
||||
RuleID: "test-rule",
|
||||
},
|
||||
},
|
||||
wantEmpty: true,
|
||||
},
|
||||
{
|
||||
ext: ".csv",
|
||||
findings: []Finding{
|
||||
{
|
||||
RuleID: "test-rule",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
ext: "csv",
|
||||
findings: []Finding{
|
||||
{
|
||||
RuleID: "test-rule",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
ext: "CSV",
|
||||
findings: []Finding{
|
||||
{
|
||||
RuleID: "test-rule",
|
||||
},
|
||||
},
|
||||
},
|
||||
// {
|
||||
// ext: "SARIF",
|
||||
// findings: []Finding{
|
||||
// {
|
||||
// RuleID: "test-rule",
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
}
|
||||
|
||||
for i, test := range tests {
|
||||
tmpfile, err := os.Create(filepath.Join(tmpPath, strconv.Itoa(i)+test.ext))
|
||||
if err != nil {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Error(err)
|
||||
}
|
||||
err = Write(test.findings, config.Config{}, test.ext, tmpfile.Name())
|
||||
if err != nil {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Error(err)
|
||||
}
|
||||
got, err := os.ReadFile(tmpfile.Name())
|
||||
if err != nil {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Error(err)
|
||||
}
|
||||
os.Remove(tmpfile.Name())
|
||||
|
||||
if len(got) == 0 && !test.wantEmpty {
|
||||
t.Errorf("got empty file with extension " + test.ext)
|
||||
}
|
||||
|
||||
if test.wantEmpty {
|
||||
if len(got) > 0 {
|
||||
t.Errorf("Expected empty file, got %s", got)
|
||||
}
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
@ -1,122 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package report
|
||||
|
||||
const configPath = "../testdata/config/"
|
||||
|
||||
// func TestWriteSarif(t *testing.T) {
|
||||
// tests := []struct {
|
||||
// findings []Finding
|
||||
// testReportName string
|
||||
// expected string
|
||||
// wantEmpty bool
|
||||
// cfgName string
|
||||
// }{
|
||||
// {
|
||||
// cfgName: "simple",
|
||||
// testReportName: "simple",
|
||||
// expected: filepath.Join(expectPath, "report", "sarif_simple.sarif"),
|
||||
// findings: []Finding{
|
||||
// {
|
||||
|
||||
// Description: "A test rule",
|
||||
// RuleID: "test-rule",
|
||||
// Match: "line containing secret",
|
||||
// Secret: "a secret",
|
||||
// StartLine: 1,
|
||||
// EndLine: 2,
|
||||
// StartColumn: 1,
|
||||
// EndColumn: 2,
|
||||
// Message: "opps",
|
||||
// File: "auth.py",
|
||||
// Commit: "0000000000000000",
|
||||
// Author: "John Doe",
|
||||
// Email: "johndoe@gmail.com",
|
||||
// Date: "10-19-2003",
|
||||
// Tags: []string{},
|
||||
// },
|
||||
// }},
|
||||
// }
|
||||
|
||||
// for _, test := range tests {
|
||||
// // create tmp file using os.TempDir()
|
||||
// tmpfile, err := os.Create(filepath.Join(tmpPath, test.testReportName+".json"))
|
||||
// if err != nil {
|
||||
// os.Remove(tmpfile.Name())
|
||||
// t.Error(err)
|
||||
// }
|
||||
// viper.Reset()
|
||||
// viper.AddConfigPath(configPath)
|
||||
// viper.SetConfigName(test.cfgName)
|
||||
// viper.SetConfigType("toml")
|
||||
// err = viper.ReadInConfig()
|
||||
// if err != nil {
|
||||
// t.Error(err)
|
||||
// }
|
||||
|
||||
// var vc config.ViperConfig
|
||||
// err = viper.Unmarshal(&vc)
|
||||
// if err != nil {
|
||||
// t.Error(err)
|
||||
// }
|
||||
|
||||
// cfg, err := vc.Translate()
|
||||
// if err != nil {
|
||||
// t.Error(err)
|
||||
// }
|
||||
// err = writeSarif(cfg, test.findings, tmpfile)
|
||||
// fmt.Println(cfg)
|
||||
// if err != nil {
|
||||
// os.Remove(tmpfile.Name())
|
||||
// t.Error(err)
|
||||
// }
|
||||
// got, err := os.ReadFile(tmpfile.Name())
|
||||
// if err != nil {
|
||||
// os.Remove(tmpfile.Name())
|
||||
// t.Error(err)
|
||||
// }
|
||||
// if test.wantEmpty {
|
||||
// if len(got) > 0 {
|
||||
// os.Remove(tmpfile.Name())
|
||||
// t.Errorf("Expected empty file, got %s", got)
|
||||
// }
|
||||
// os.Remove(tmpfile.Name())
|
||||
// continue
|
||||
// }
|
||||
// want, err := os.ReadFile(test.expected)
|
||||
// if err != nil {
|
||||
// os.Remove(tmpfile.Name())
|
||||
// t.Error(err)
|
||||
// }
|
||||
|
||||
// if string(got) != string(want) {
|
||||
// err = os.WriteFile(strings.Replace(test.expected, ".sarif", ".got.sarif", 1), got, 0644)
|
||||
// if err != nil {
|
||||
// t.Error(err)
|
||||
// }
|
||||
// t.Errorf("got %s, want %s", string(got), string(want))
|
||||
// }
|
||||
|
||||
// os.Remove(tmpfile.Name())
|
||||
// }
|
||||
// }
|
@ -13,7 +13,7 @@ To enable and configure GitHub Organization Synchronization, follow these steps:
|
||||
|
||||
<Steps>
|
||||
<Step title="Set up GitHub organization configuration">
|
||||
1. Navigate to **Organization Settings** and select the **Security Tab**.
|
||||
1. Navigate to the **Single Sign-On (SSO)** page and select the **Provisioning** tab.
|
||||

|
||||
2. Click the **Configure** button and provide the name of your GitHub Organization.
|
||||

|
||||
|
@ -18,7 +18,9 @@ Prerequisites:
|
||||
|
||||
<Steps>
|
||||
<Step title="Prepare the LDAP configuration in Infisical">
|
||||
In Infisical, head to your Organization Settings > Security > LDAP and select **Manage**.
|
||||
In Infisical, head to the **Single Sign-On (SSO)** page and select the **General** tab. Select **Connect** for **LDAP**.
|
||||
|
||||

|
||||
|
||||
Next, input your LDAP server settings.
|
||||
|
||||
|
@ -27,7 +27,9 @@ Prerequisites:
|
||||

|
||||
</Step>
|
||||
<Step title="Prepare the LDAP configuration in Infisical">
|
||||
In Infisical, head to your Organization Settings > Security > LDAP and select **Manage**.
|
||||
In Infisical, head to the **Single Sign-On (SSO)** page and select the **General** tab. Select **Connect** for **LDAP**.
|
||||
|
||||

|
||||
|
||||
Next, input your JumpCloud LDAP server settings.
|
||||
|
||||
|
@ -15,7 +15,7 @@ Prerequisites:
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a SCIM token in Infisical">
|
||||
In Infisical, head to your Organization Settings > Security > SCIM Configuration and
|
||||
In Infisical, head to the **Single Sign-On (SSO)** page and select the **Provisioning** tab. Under SCIM Configuration,
|
||||
press the **Enable SCIM provisioning** toggle to allow Azure to provision/deprovision users for your organization.
|
||||
|
||||

|
||||
|
@ -15,7 +15,7 @@ Prerequisites:
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a SCIM token in Infisical">
|
||||
In Infisical, head to your Organization Settings > Security > SCIM Configuration and
|
||||
In Infisical, head to the **Single Sign-On (SSO)** page and select the **Provisioning** tab. Under SCIM Configuration,
|
||||
press the **Enable SCIM provisioning** toggle to allow JumpCloud to provision/deprovision users and user groups for your organization.
|
||||
|
||||

|
||||
|
@ -15,7 +15,7 @@ Prerequisites:
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a SCIM token in Infisical">
|
||||
In Infisical, head to your Organization Settings > Security > SCIM Configuration and
|
||||
In Infisical, head to the **Single Sign-On (SSO)** page and select the **Provisioning** tab. Under SCIM Configuration,
|
||||
press the **Enable SCIM provisioning** toggle to allow Okta to provision/deprovision users and user groups for your organization.
|
||||
|
||||

|
||||
|
@ -39,8 +39,8 @@ description: "Learn how to configure Auth0 OIDC for Infisical SSO."
|
||||
|
||||
</Step>
|
||||
<Step title="Finish configuring OIDC in Infisical">
|
||||
3.1. Back in Infisical, in the Organization settings > Security > OIDC, click **Connect**.
|
||||

|
||||
3.1. Back in Infisical, head to the **Single Sign-On (SSO)** page and select the **General** tab. Click **Connect** for **OIDC**.
|
||||

|
||||
|
||||
3.2. For configuration type, select **Discovery URL**. Then, set **Discovery Document URL**, **JWT Signature Algorithm**, **Client ID**, and **Client Secret** from step 2.1 and 2.2.
|
||||

|
||||
|
@ -12,7 +12,9 @@ description: "Learn how to configure Auth0 SAML for Infisical SSO."
|
||||
|
||||
<Steps>
|
||||
<Step title="Prepare the SAML SSO configuration in Infisical">
|
||||
In Infisical, head to Organization Settings > Security and click **Connect** for SAML under the Connect to an Identity Provider section. Select Auth0, then click **Connect** again.
|
||||
In Infisical, head to the **Single Sign-On (SSO)** page and select the **General** tab. Click **Connect** for **SAML** under the Connect to an Identity Provider section. Select **Auth0**, then click **Connect** again.
|
||||
|
||||

|
||||
|
||||
Next, note the **Application Callback URL** and **Audience** to use when configuring the Auth0 SAML application.
|
||||
|
||||
|
@ -12,7 +12,9 @@ description: "Learn how to configure Microsoft Entra ID for Infisical SSO."
|
||||
|
||||
<Steps>
|
||||
<Step title="Prepare the SAML SSO configuration in Infisical">
|
||||
In Infisical, head to Organization Settings > Security and click **Connect** for SAML under the Connect to an Identity Provider section. Select Azure / Entra, then click **Connect** again.
|
||||
In Infisical, head to the **Single Sign-On (SSO)** page and select the **General** tab. Click **Connect** for **SAML** under the Connect to an Identity Provider section. Select **Azure / Entra**, then click **Connect** again.
|
||||
|
||||

|
||||
|
||||
Next, copy the **Reply URL (Assertion Consumer Service URL)** and **Identifier (Entity ID)** to use when configuring the Azure SAML application.
|
||||
|
||||
|
@ -28,8 +28,8 @@ Prerequisites:
|
||||
1.4. Access the IdP’s OIDC discovery document (usually located at `https://<idp-domain>/.well-known/openid-configuration`). This document contains important endpoints such as authorization, token, userinfo, and keys.
|
||||
</Step>
|
||||
<Step title="Finish configuring OIDC in Infisical">
|
||||
2.1. Back in Infisical, in the Organization settings > Security > OIDC, click Connect.
|
||||

|
||||
2.1. Back in Infisical, head to the **Single Sign-On (SSO)** page and select the **General** tab. Select **Connect** for **OIDC**.
|
||||

|
||||
|
||||
2.2. You can configure OIDC either through the Discovery URL (Recommended) or by inputting custom endpoints.
|
||||
|
||||
|
@ -12,7 +12,9 @@ description: "Learn how to configure Google SAML for Infisical SSO."
|
||||
|
||||
<Steps>
|
||||
<Step title="Prepare the SAML SSO configuration in Infisical">
|
||||
In Infisical, head to Organization Settings > Security and click **Connect** for SAML under the Connect to an Identity Provider section. Select Google, then click **Connect** again.
|
||||
In Infisical, head to the **Single Sign-On (SSO)** page and select the **General** tab. Click **Connect** for **SAML** under the Connect to an Identity Provider section. Select **Google**, then click **Connect** again.
|
||||
|
||||

|
||||
|
||||
Next, note the **ACS URL** and **SP Entity ID** to use when configuring the Google SAML application.
|
||||
|
||||
|
@ -12,7 +12,9 @@ description: "Learn how to configure JumpCloud SAML for Infisical SSO."
|
||||
|
||||
<Steps>
|
||||
<Step title="Prepare the SAML SSO configuration in Infisical">
|
||||
In Infisical, head to Organization Settings > Security and click **Connect** for SAML under the Connect to an Identity Provider section. Select JumpCloud, then click **Connect** again.
|
||||
In Infisical, head to the **Single Sign-On (SSO)** page and select the **General** tab. Click **Connect** for **SAML** under the Connect to an Identity Provider section. Select **JumpCloud**, then click **Connect** again.
|
||||
|
||||

|
||||
|
||||
Next, copy the **ACS URL** and **SP Entity ID** to use when configuring the JumpCloud SAML application.
|
||||
|
||||
|
@ -53,7 +53,7 @@ Infisical groups not present in their groups claim.
|
||||
2.1. In Infisical, create any groups you would like to sync users to. Make sure the name of the Infisical group is an exact match of the Keycloak group name.
|
||||

|
||||
|
||||
2.2. Next, enable **OIDC Group Membership Mapping** in Organization Settings > Security.
|
||||
2.2. Next, enable **OIDC Group Membership Mapping** on the **Single Sign-On (SSO)** page under the **General** tab.
|
||||

|
||||
|
||||
2.3. The next time a user logs in they will be synced to their matching Keycloak groups.
|
||||
|
@ -66,8 +66,8 @@ description: "Learn how to configure Keycloak OIDC for Infisical SSO."
|
||||
|
||||
</Step>
|
||||
<Step title="Finish configuring OIDC in Infisical">
|
||||
3.1. Back in Infisical, in the Organization settings > Security > OIDC, click Connect.
|
||||

|
||||
3.1. Back in Infisical, head to the **Single Sign-On (SSO)** page and select the **General** tab. Click **Connect** for **OIDC**.
|
||||

|
||||
|
||||
3.2. For configuration type, select Discovery URL. Then, set the appropriate values for **Discovery Document URL**, **JWT Signature Algorithm**, **Client ID**, and **Client Secret**.
|
||||

|
||||
|
@ -12,9 +12,9 @@ description: "Learn how to configure Keycloak SAML for Infisical SSO."
|
||||
|
||||
<Steps>
|
||||
<Step title="Prepare the SAML SSO configuration in Infisical">
|
||||
In Infisical, head to Organization Settings > Security and click **Connect** for SAML under the Connect to an Identity Provider section. Select Keycloak, then click **Connect** again.
|
||||
In Infisical, head to the **Single Sign-On (SSO)** page and select the **General** tab. Click **Connect** for **SAML** under the Connect to an Identity Provider section. Select **Keycloak**, then click **Connect** again.
|
||||
|
||||

|
||||

|
||||
|
||||
Next, copy the **Valid redirect URI** and **SP Entity ID** to use when configuring the Keycloak SAML application.
|
||||
|
||||
|
@ -12,8 +12,10 @@ description: "Learn how to configure Okta SAML 2.0 for Infisical SSO."
|
||||
|
||||
<Steps>
|
||||
<Step title="Prepare the SAML SSO configuration in Infisical">
|
||||
In Infisical, head to Organization Settings > Security and click **Connect** for SAML under the Connect to an Identity Provider section. Select Okta, then click **Connect** again.
|
||||
|
||||
In Infisical, head to the **Single Sign-On (SSO)** page and select the **General** tab. Click **Connect** for **SAML** under the Connect to an Identity Provider section. Select **Okta**, then click **Connect** again.
|
||||
|
||||

|
||||
|
||||
Next, copy the **Single sign-on URL** and **Audience URI (SP Entity ID)** to use when configuring the Okta SAML 2.0 application.
|
||||

|
||||
</Step>
|
||||
|
Before Width: | Height: | Size: 450 KiB After Width: | Height: | Size: 1.3 MiB |
Before Width: | Height: | Size: 485 KiB After Width: | Height: | Size: 766 KiB |
Before Width: | Height: | Size: 452 KiB After Width: | Height: | Size: 1.3 MiB |
Before Width: | Height: | Size: 618 KiB After Width: | Height: | Size: 1.3 MiB |
Before Width: | Height: | Size: 1.0 MiB After Width: | Height: | Size: 1.3 MiB |
Before Width: | Height: | Size: 780 KiB |
BIN
docs/images/sso/connect-ldap.png
Normal file
After Width: | Height: | Size: 1.2 MiB |
BIN
docs/images/sso/connect-oidc.png
Normal file
After Width: | Height: | Size: 1.2 MiB |
BIN
docs/images/sso/connect-saml.png
Normal file
After Width: | Height: | Size: 1.2 MiB |
Before Width: | Height: | Size: 780 KiB |