Skip to content

Commit d53b57e

Browse files
Merge remote-tracking branch 'upstream/main'
2 parents 7ecb03d + b482a9c commit d53b57e

35 files changed

+487
-293
lines changed

.github/workflows/build-binaries.yml

+88-3
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@ on:
77
description: "Version number"
88
required: true
99
type: string
10-
1110
defaults:
1211
run:
1312
working-directory: ./backend
@@ -49,9 +48,9 @@ jobs:
4948
- name: Package into node binary
5049
run: |
5150
if [ "${{ matrix.os }}" != "linux" ]; then
52-
pkg --no-bytecode --public-packages "*" --public --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }} .
51+
pkg --no-bytecode --public-packages "*" --public --compress GZip --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }} .
5352
else
54-
pkg --no-bytecode --public-packages "*" --public --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core .
53+
pkg --no-bytecode --public-packages "*" --public --compress GZip --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core .
5554
fi
5655
5756
# Set up .deb package structure (Debian/Ubuntu only)
@@ -83,6 +82,86 @@ jobs:
8382
dpkg-deb --build infisical-core
8483
mv infisical-core.deb ./binary/infisical-core-${{matrix.arch}}.deb
8584
85+
### RPM
86+
87+
# Set up .rpm package structure
88+
- name: Set up .rpm package structure
89+
if: matrix.os == 'linux'
90+
run: |
91+
mkdir -p infisical-core-rpm/usr/local/bin
92+
cp ./binary/infisical-core infisical-core-rpm/usr/local/bin/
93+
chmod +x infisical-core-rpm/usr/local/bin/infisical-core
94+
95+
# Install RPM build tools
96+
- name: Install RPM build tools
97+
if: matrix.os == 'linux'
98+
run: sudo apt-get update && sudo apt-get install -y rpm
99+
100+
# Create .spec file for RPM
101+
- name: Create .spec file for RPM
102+
if: matrix.os == 'linux'
103+
run: |
104+
cat <<EOF > infisical-core.spec
105+
106+
%global _enable_debug_package 0
107+
%global debug_package %{nil}
108+
%global __os_install_post /usr/lib/rpm/brp-compress %{nil}
109+
110+
Name: infisical-core
111+
Version: ${{ github.event.inputs.version }}
112+
Release: 1%{?dist}
113+
Summary: Infisical Core standalone executable
114+
License: Proprietary
115+
URL: https://app.infisical.com
116+
117+
%description
118+
Infisical Core standalone executable (app.infisical.com)
119+
120+
%install
121+
mkdir -p %{buildroot}/usr/local/bin
122+
cp %{_sourcedir}/infisical-core %{buildroot}/usr/local/bin/
123+
124+
%files
125+
/usr/local/bin/infisical-core
126+
127+
%pre
128+
129+
%post
130+
131+
%preun
132+
133+
%postun
134+
EOF
135+
136+
# Build .rpm file
137+
- name: Build .rpm package
138+
if: matrix.os == 'linux'
139+
run: |
140+
# Create necessary directories
141+
mkdir -p rpmbuild/{BUILD,RPMS,SOURCES,SPECS,SRPMS}
142+
143+
# Copy the binary directly to SOURCES
144+
cp ./binary/infisical-core rpmbuild/SOURCES/
145+
146+
# Run rpmbuild with verbose output
147+
rpmbuild -vv -bb \
148+
--define "_topdir $(pwd)/rpmbuild" \
149+
--define "_sourcedir $(pwd)/rpmbuild/SOURCES" \
150+
--define "_rpmdir $(pwd)/rpmbuild/RPMS" \
151+
--target ${{ matrix.arch == 'x64' && 'x86_64' || 'aarch64' }} \
152+
infisical-core.spec
153+
154+
# Try to find the RPM file
155+
find rpmbuild -name "*.rpm"
156+
157+
# Move the RPM file if found
158+
if [ -n "$(find rpmbuild -name '*.rpm')" ]; then
159+
mv $(find rpmbuild -name '*.rpm') ./binary/infisical-core-${{matrix.arch}}.rpm
160+
else
161+
echo "RPM file not found!"
162+
exit 1
163+
fi
164+
86165
- uses: actions/setup-python@v4
87166
with:
88167
python-version: "3.x" # Specify the Python version you need
@@ -97,6 +176,12 @@ jobs:
97176
working-directory: ./backend
98177
run: cloudsmith push deb --republish --no-wait-for-sync --api-key=${{ secrets.CLOUDSMITH_API_KEY }} infisical/infisical-core/any-distro/any-version ./binary/infisical-core-${{ matrix.arch }}.deb
99178

179+
# Publish .rpm file to Cloudsmith (Red Hat-based systems only)
180+
- name: Publish .rpm to Cloudsmith
181+
if: matrix.os == 'linux'
182+
working-directory: ./backend
183+
run: cloudsmith push rpm --republish --no-wait-for-sync --api-key=${{ secrets.CLOUDSMITH_API_KEY }} infisical/infisical-core/any-distro/any-version ./binary/infisical-core-${{ matrix.arch }}.rpm
184+
100185
# Publish .exe file to Cloudsmith (Windows only)
101186
- name: Publish to Cloudsmith (Windows)
102187
if: matrix.os == 'win'

.github/workflows/build-staging-and-deploy-aws.yml

+1
Original file line numberDiff line numberDiff line change
@@ -127,6 +127,7 @@ jobs:
127127
- name: Change directory to backend and install dependencies
128128
env:
129129
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
130+
AUDIT_LOGS_DB_CONNECTION_URI: ${{ secrets.AUDIT_LOGS_DB_CONNECTION_URI }}
130131
run: |
131132
cd backend
132133
npm install

backend/package-lock.json

+1-1
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

backend/scripts/generate-schema-types.ts

+6-1
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,12 @@ const main = async () => {
9090
.whereRaw("table_schema = current_schema()")
9191
.select<{ tableName: string }[]>("table_name as tableName")
9292
.orderBy("table_name")
93-
).filter((el) => !el.tableName.includes("_migrations") && !el.tableName.includes("partitioned_audit_logs_"));
93+
).filter(
94+
(el) =>
95+
!el.tableName.includes("_migrations") &&
96+
!el.tableName.includes("audit_logs_") &&
97+
el.tableName !== "intermediate_audit_logs"
98+
);
9499

95100
for (let i = 0; i < tables.length; i += 1) {
96101
const { tableName } = tables[i];

backend/src/@types/knex.d.ts

-8
Original file line numberDiff line numberDiff line change
@@ -170,9 +170,6 @@ import {
170170
TOrgRoles,
171171
TOrgRolesInsert,
172172
TOrgRolesUpdate,
173-
TPartitionedAuditLogs,
174-
TPartitionedAuditLogsInsert,
175-
TPartitionedAuditLogsUpdate,
176173
TPkiAlerts,
177174
TPkiAlertsInsert,
178175
TPkiAlertsUpdate,
@@ -718,11 +715,6 @@ declare module "knex/types/tables" {
718715
TAuditLogStreamsInsert,
719716
TAuditLogStreamsUpdate
720717
>;
721-
[TableName.PartitionedAuditLog]: KnexOriginal.CompositeTableType<
722-
TPartitionedAuditLogs,
723-
TPartitionedAuditLogsInsert,
724-
TPartitionedAuditLogsUpdate
725-
>;
726718
[TableName.GitAppInstallSession]: KnexOriginal.CompositeTableType<
727719
TGitAppInstallSessions,
728720
TGitAppInstallSessionsInsert,
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,161 @@
1+
import kx, { Knex } from "knex";
2+
3+
import { TableName } from "../schemas";
4+
5+
const INTERMEDIATE_AUDIT_LOG_TABLE = "intermediate_audit_logs";
6+
7+
const formatPartitionDate = (date: Date) => {
8+
const year = date.getFullYear();
9+
const month = String(date.getMonth() + 1).padStart(2, "0");
10+
const day = String(date.getDate()).padStart(2, "0");
11+
12+
return `${year}-${month}-${day}`;
13+
};
14+
15+
const createAuditLogPartition = async (knex: Knex, startDate: Date, endDate: Date) => {
16+
const startDateStr = formatPartitionDate(startDate);
17+
const endDateStr = formatPartitionDate(endDate);
18+
19+
const partitionName = `${TableName.AuditLog}_${startDateStr.replace(/-/g, "")}_${endDateStr.replace(/-/g, "")}`;
20+
21+
await knex.schema.raw(
22+
`CREATE TABLE ${partitionName} PARTITION OF ${TableName.AuditLog} FOR VALUES FROM ('${startDateStr}') TO ('${endDateStr}')`
23+
);
24+
};
25+
26+
const up = async (knex: Knex): Promise<void> => {
27+
console.info("Dropping primary key of audit log table...");
28+
await knex.schema.alterTable(TableName.AuditLog, (t) => {
29+
// remove existing keys
30+
t.dropPrimary();
31+
});
32+
33+
// Get all indices of the audit log table and drop them
34+
const indexNames: { rows: { indexname: string }[] } = await knex.raw(
35+
`
36+
SELECT indexname
37+
FROM pg_indexes
38+
WHERE tablename = '${TableName.AuditLog}'
39+
`
40+
);
41+
42+
console.log(
43+
"Deleting existing audit log indices:",
44+
indexNames.rows.map((e) => e.indexname)
45+
);
46+
47+
for await (const row of indexNames.rows) {
48+
await knex.raw(`DROP INDEX IF EXISTS ${row.indexname}`);
49+
}
50+
51+
// renaming audit log to intermediate table
52+
console.log("Renaming audit log table to the intermediate name");
53+
await knex.schema.renameTable(TableName.AuditLog, INTERMEDIATE_AUDIT_LOG_TABLE);
54+
55+
if (!(await knex.schema.hasTable(TableName.AuditLog))) {
56+
const createTableSql = knex.schema
57+
.createTable(TableName.AuditLog, (t) => {
58+
t.uuid("id").defaultTo(knex.fn.uuid());
59+
t.string("actor").notNullable();
60+
t.jsonb("actorMetadata").notNullable();
61+
t.string("ipAddress");
62+
t.string("eventType").notNullable();
63+
t.jsonb("eventMetadata");
64+
t.string("userAgent");
65+
t.string("userAgentType");
66+
t.datetime("expiresAt");
67+
t.timestamps(true, true, true);
68+
t.uuid("orgId");
69+
t.string("projectId");
70+
t.string("projectName");
71+
t.primary(["id", "createdAt"]);
72+
})
73+
.toString();
74+
75+
console.info("Creating partition table...");
76+
await knex.schema.raw(`
77+
${createTableSql} PARTITION BY RANGE ("createdAt");
78+
`);
79+
80+
console.log("Adding indices...");
81+
await knex.schema.alterTable(TableName.AuditLog, (t) => {
82+
t.index(["projectId", "createdAt"]);
83+
t.index(["orgId", "createdAt"]);
84+
t.index("expiresAt");
85+
t.index("orgId");
86+
t.index("projectId");
87+
});
88+
89+
console.log("Adding GIN indices...");
90+
91+
await knex.raw(
92+
`CREATE INDEX IF NOT EXISTS "audit_logs_actorMetadata_idx" ON ${TableName.AuditLog} USING gin("actorMetadata" jsonb_path_ops)`
93+
);
94+
console.log("GIN index for actorMetadata done");
95+
96+
await knex.raw(
97+
`CREATE INDEX IF NOT EXISTS "audit_logs_eventMetadata_idx" ON ${TableName.AuditLog} USING gin("eventMetadata" jsonb_path_ops)`
98+
);
99+
console.log("GIN index for eventMetadata done");
100+
101+
// create default partition
102+
console.log("Creating default partition...");
103+
await knex.schema.raw(`CREATE TABLE ${TableName.AuditLog}_default PARTITION OF ${TableName.AuditLog} DEFAULT`);
104+
105+
const nextDate = new Date();
106+
nextDate.setDate(nextDate.getDate() + 1);
107+
const nextDateStr = formatPartitionDate(nextDate);
108+
109+
console.log("Attaching existing audit log table as a partition...");
110+
await knex.schema.raw(`
111+
ALTER TABLE ${INTERMEDIATE_AUDIT_LOG_TABLE} ADD CONSTRAINT audit_log_old
112+
CHECK ( "createdAt" < DATE '${nextDateStr}' );
113+
114+
ALTER TABLE ${TableName.AuditLog} ATTACH PARTITION ${INTERMEDIATE_AUDIT_LOG_TABLE}
115+
FOR VALUES FROM (MINVALUE) TO ('${nextDateStr}' );
116+
`);
117+
118+
// create partition from now until end of month
119+
console.log("Creating audit log partitions ahead of time... next date:", nextDateStr);
120+
await createAuditLogPartition(knex, nextDate, new Date(nextDate.getFullYear(), nextDate.getMonth() + 1));
121+
122+
// create partitions 4 years ahead
123+
const partitionMonths = 4 * 12;
124+
const partitionPromises: Promise<void>[] = [];
125+
for (let x = 1; x <= partitionMonths; x += 1) {
126+
partitionPromises.push(
127+
createAuditLogPartition(
128+
knex,
129+
new Date(nextDate.getFullYear(), nextDate.getMonth() + x, 1),
130+
new Date(nextDate.getFullYear(), nextDate.getMonth() + (x + 1), 1)
131+
)
132+
);
133+
}
134+
135+
await Promise.all(partitionPromises);
136+
console.log("Partition migration complete");
137+
}
138+
};
139+
140+
export const executeMigration = async (url: string) => {
141+
console.log("Executing migration...");
142+
const knex = kx({
143+
client: "pg",
144+
connection: url
145+
});
146+
147+
await knex.transaction(async (tx) => {
148+
await up(tx);
149+
});
150+
};
151+
152+
const dbUrl = process.env.AUDIT_LOGS_DB_CONNECTION_URI;
153+
if (!dbUrl) {
154+
console.error("Please provide a DB connection URL to the AUDIT_LOGS_DB_CONNECTION_URI env");
155+
process.exit(1);
156+
}
157+
158+
void executeMigration(dbUrl).then(() => {
159+
console.log("Migration: partition-audit-logs DONE");
160+
process.exit(0);
161+
});

0 commit comments

Comments
 (0)