refactor(cloud): add api key for autentication between servers

This commit is contained in:
Mauricio Siu
2024-10-06 01:56:53 -06:00
parent 3cf27a068a
commit 58c06fba86
7 changed files with 74 additions and 67 deletions

View File

@@ -7,7 +7,6 @@ import { createClient } from "redis";
import { logger } from "./logger";
import { type DeployJob, deployJobSchema } from "./schema";
import { deploy } from "./utils";
import { validateBearerTokenAPI } from "@dokploy/server";
const app = new Hono();
const redisClient = createClient({
@@ -15,17 +14,12 @@ const redisClient = createClient({
});
app.use(async (c, next) => {
const authHeader = c.req.header("authorization");
const authHeader = c.req.header("X-API-Key");
if (!authHeader || !authHeader.startsWith("Bearer ")) {
return c.json({ message: "Authorization header missing" }, 401);
if (process.env.API_KEY !== authHeader) {
return c.json({ message: "Invalid API Key" }, 403);
}
const result = await validateBearerTokenAPI(authHeader);
if (!result.user || !result.session) {
return c.json({ message: "Invalid session" }, 403);
}
return next();
});

View File

@@ -254,7 +254,7 @@ export const applicationRouter = createTRPCRouter({
if (IS_CLOUD && application.serverId) {
jobData.serverId = application.serverId;
await deploy(jobData, ctx.session.id);
await deploy(jobData);
return true;
}
await myQueue.add(
@@ -482,7 +482,7 @@ export const applicationRouter = createTRPCRouter({
};
if (IS_CLOUD && application.serverId) {
jobData.serverId = application.serverId;
await deploy(jobData, ctx.session.id);
await deploy(jobData);
return true;
}
@@ -571,7 +571,7 @@ export const applicationRouter = createTRPCRouter({
};
if (IS_CLOUD && app.serverId) {
jobData.serverId = app.serverId;
await deploy(jobData, ctx.session.id);
await deploy(jobData);
return true;
}

View File

@@ -11,9 +11,13 @@ import {
createBackup,
findBackupById,
findMariadbByBackupId,
findMariadbById,
findMongoByBackupId,
findMongoById,
findMySqlByBackupId,
findMySqlById,
findPostgresByBackupId,
findPostgresById,
removeBackupById,
removeScheduleBackup,
runMariadbBackup,
@@ -36,14 +40,11 @@ export const backupRouter = createTRPCRouter({
const backup = await findBackupById(newBackup.backupId);
if (IS_CLOUD && backup.enabled) {
await schedule(
{
cronSchedule: backup.schedule,
backupId: backup.backupId,
type: "backup",
},
ctx.session.id,
);
await schedule({
cronSchedule: backup.schedule,
backupId: backup.backupId,
type: "backup",
});
} else {
if (backup.enabled) {
scheduleBackup(backup);
@@ -57,10 +58,13 @@ export const backupRouter = createTRPCRouter({
});
}
}),
one: protectedProcedure.input(apiFindOneBackup).query(async ({ input }) => {
const backup = await findBackupById(input.backupId);
return backup;
}),
one: protectedProcedure
.input(apiFindOneBackup)
.query(async ({ input, ctx }) => {
const backup = await findBackupById(input.backupId);
return backup;
}),
update: protectedProcedure
.input(apiUpdateBackup)
.mutation(async ({ input, ctx }) => {
@@ -70,23 +74,17 @@ export const backupRouter = createTRPCRouter({
if (IS_CLOUD) {
if (backup.enabled) {
await updateJob(
{
cronSchedule: backup.schedule,
backupId: backup.backupId,
type: "backup",
},
ctx.session.id,
);
await updateJob({
cronSchedule: backup.schedule,
backupId: backup.backupId,
type: "backup",
});
} else {
await removeJob(
{
cronSchedule: backup.schedule,
backupId: backup.backupId,
type: "backup",
},
ctx.session.id,
);
await removeJob({
cronSchedule: backup.schedule,
backupId: backup.backupId,
type: "backup",
});
}
} else {
if (backup.enabled) {
@@ -109,14 +107,11 @@ export const backupRouter = createTRPCRouter({
try {
const value = await removeBackupById(input.backupId);
if (IS_CLOUD && value) {
removeJob(
{
backupId: input.backupId,
cronSchedule: value.schedule,
type: "backup",
},
ctx.session.id,
);
removeJob({
backupId: input.backupId,
cronSchedule: value.schedule,
type: "backup",
});
} else if (!IS_CLOUD) {
removeScheduleBackup(input.backupId);
}
@@ -196,3 +191,26 @@ export const backupRouter = createTRPCRouter({
}
}),
});
// export const getAdminId = async (backupId: string) => {
// const backup = await findBackupById(backupId);
// if (backup.databaseType === "postgres" && backup.postgresId) {
// const postgres = await findPostgresById(backup.postgresId);
// return postgres.project.adminId;
// }
// if (backup.databaseType === "mariadb" && backup.mariadbId) {
// const mariadb = await findMariadbById(backup.mariadbId);
// return mariadb.project.adminId;
// }
// if (backup.databaseType === "mysql" && backup.mysqlId) {
// const mysql = await findMySqlById(backup.mysqlId);
// return mysql.project.adminId;
// }
// if (backup.databaseType === "mongo" && backup.mongoId) {
// const mongo = await findMongoById(backup.mongoId);
// return mongo.project.adminId;
// }
// return null;
// };

View File

@@ -256,7 +256,7 @@ export const composeRouter = createTRPCRouter({
if (IS_CLOUD && compose.serverId) {
jobData.serverId = compose.serverId;
await deploy(jobData, ctx.session.id);
await deploy(jobData);
return true;
}
await myQueue.add(
@@ -288,7 +288,7 @@ export const composeRouter = createTRPCRouter({
};
if (IS_CLOUD && compose.serverId) {
jobData.serverId = compose.serverId;
await deploy(jobData, ctx.session.id);
await deploy(jobData);
return true;
}
await myQueue.add(

View File

@@ -9,13 +9,13 @@ type QueueJob =
cronSchedule: string;
serverId: string;
};
export const schedule = async (job: QueueJob, authSession: string) => {
export const schedule = async (job: QueueJob) => {
try {
const result = await fetch(`${process.env.JOBS_URL}/create-backup`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${authSession}`,
"X-API-Key": process.env.API_KEY || "NO-DEFINED",
},
body: JSON.stringify(job),
});
@@ -28,13 +28,13 @@ export const schedule = async (job: QueueJob, authSession: string) => {
}
};
export const removeJob = async (job: QueueJob, authSession: string) => {
export const removeJob = async (job: QueueJob) => {
try {
const result = await fetch(`${process.env.JOBS_URL}/remove-job`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${authSession}`,
"X-API-Key": process.env.API_KEY || "NO-DEFINED",
},
body: JSON.stringify(job),
});
@@ -47,13 +47,13 @@ export const removeJob = async (job: QueueJob, authSession: string) => {
}
};
export const updateJob = async (job: QueueJob, authSession: string) => {
export const updateJob = async (job: QueueJob) => {
try {
const result = await fetch(`${process.env.JOBS_URL}/update-backup`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${authSession}`,
"X-API-Key": process.env.API_KEY || "NO-DEFINED",
},
body: JSON.stringify(job),
});

View File

@@ -1,12 +1,12 @@
import type { DeploymentJob } from "../queues/deployments-queue";
export const deploy = async (jobData: DeploymentJob, sessionId: string) => {
export const deploy = async (jobData: DeploymentJob) => {
try {
const result = await fetch(`${process.env.SERVER_URL}/deploy`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${sessionId}`,
"X-API-Key": process.env.API_KEY || "NO-DEFINED",
},
body: JSON.stringify(jobData),
});

View File

@@ -6,24 +6,18 @@ import { logger } from "./logger";
import { cleanQueue, getJobRepeatable, removeJob, scheduleJob } from "./queue";
import { jobQueueSchema } from "./schema";
import { firstWorker, secondWorker } from "./workers";
import { validateBearerTokenAPI } from "@dokploy/server";
const app = new Hono();
cleanQueue();
app.use(async (c, next) => {
const authHeader = c.req.header("authorization");
const authHeader = c.req.header("X-API-Key");
if (!authHeader || !authHeader.startsWith("Bearer ")) {
return c.json({ message: "Authorization header missing" }, 401);
if (process.env.API_KEY !== authHeader) {
return c.json({ message: "Invalid API Key" }, 403);
}
const result = await validateBearerTokenAPI(authHeader);
if (!result.user || !result.session) {
return c.json({ message: "Invalid session" }, 403);
}
return next();
});
@@ -55,6 +49,7 @@ app.post("/update-backup", zValidator("json", jobQueueSchema), async (c) => {
logger.info("Job removed", result);
}
scheduleJob(data);
logger.info("Backup updated successfully");
return c.json({ message: "Backup updated successfully" });
});