mirror of
https://github.com/LukeHagar/dokploy.git
synced 2025-12-06 04:19:37 +00:00
Merge branch 'canary' into separate-permission-for-creating-environments-#2593
This commit is contained in:
@@ -57,7 +57,7 @@
|
||||
"drizzle-orm": "^0.39.3",
|
||||
"drizzle-zod": "0.5.1",
|
||||
"hi-base32": "^0.5.1",
|
||||
"js-yaml": "4.1.0",
|
||||
"yaml": "2.8.1",
|
||||
"lodash": "4.17.21",
|
||||
"micromatch": "4.0.8",
|
||||
"nanoid": "3.3.11",
|
||||
@@ -85,7 +85,6 @@
|
||||
"@types/adm-zip": "^0.5.7",
|
||||
"@types/bcrypt": "5.0.2",
|
||||
"@types/dockerode": "3.3.23",
|
||||
"@types/js-yaml": "4.0.9",
|
||||
"@types/lodash": "4.17.4",
|
||||
"@types/micromatch": "4.0.9",
|
||||
"@types/node": "^18.19.104",
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { relations } from "drizzle-orm";
|
||||
import {
|
||||
bigint,
|
||||
boolean,
|
||||
integer,
|
||||
json,
|
||||
@@ -20,7 +21,6 @@ import { gitlab } from "./gitlab";
|
||||
import { mounts } from "./mount";
|
||||
import { ports } from "./port";
|
||||
import { previewDeployments } from "./preview-deployments";
|
||||
import { projects } from "./project";
|
||||
import { redirects } from "./redirects";
|
||||
import { registry } from "./registry";
|
||||
import { security } from "./security";
|
||||
@@ -164,6 +164,7 @@ export const applications = pgTable("application", {
|
||||
modeSwarm: json("modeSwarm").$type<ServiceModeSwarm>(),
|
||||
labelsSwarm: json("labelsSwarm").$type<LabelsSwarm>(),
|
||||
networkSwarm: json("networkSwarm").$type<NetworkSwarm[]>(),
|
||||
stopGracePeriodSwarm: bigint("stopGracePeriodSwarm", { mode: "bigint" }),
|
||||
//
|
||||
replicas: integer("replicas").default(1).notNull(),
|
||||
applicationStatus: applicationStatus("applicationStatus")
|
||||
@@ -312,6 +313,7 @@ const createSchema = createInsertSchema(applications, {
|
||||
watchPaths: z.array(z.string()).optional(),
|
||||
previewLabels: z.array(z.string()).optional(),
|
||||
cleanCache: z.boolean().optional(),
|
||||
stopGracePeriodSwarm: z.bigint().nullable(),
|
||||
});
|
||||
|
||||
export const apiCreateApplication = createSchema.pick({
|
||||
|
||||
@@ -11,7 +11,9 @@ export const bitbucket = pgTable("bitbucket", {
|
||||
.primaryKey()
|
||||
.$defaultFn(() => nanoid()),
|
||||
bitbucketUsername: text("bitbucketUsername"),
|
||||
bitbucketEmail: text("bitbucketEmail"),
|
||||
appPassword: text("appPassword"),
|
||||
apiToken: text("apiToken"),
|
||||
bitbucketWorkspaceName: text("bitbucketWorkspaceName"),
|
||||
gitProviderId: text("gitProviderId")
|
||||
.notNull()
|
||||
@@ -29,7 +31,9 @@ const createSchema = createInsertSchema(bitbucket);
|
||||
|
||||
export const apiCreateBitbucket = createSchema.extend({
|
||||
bitbucketUsername: z.string().optional(),
|
||||
bitbucketEmail: z.string().email().optional(),
|
||||
appPassword: z.string().optional(),
|
||||
apiToken: z.string().optional(),
|
||||
bitbucketWorkspaceName: z.string().optional(),
|
||||
gitProviderId: z.string().optional(),
|
||||
authId: z.string().min(1),
|
||||
@@ -46,9 +50,19 @@ export const apiBitbucketTestConnection = createSchema
|
||||
.extend({
|
||||
bitbucketId: z.string().min(1),
|
||||
bitbucketUsername: z.string().optional(),
|
||||
bitbucketEmail: z.string().email().optional(),
|
||||
workspaceName: z.string().optional(),
|
||||
apiToken: z.string().optional(),
|
||||
appPassword: z.string().optional(),
|
||||
})
|
||||
.pick({ bitbucketId: true, bitbucketUsername: true, workspaceName: true });
|
||||
.pick({
|
||||
bitbucketId: true,
|
||||
bitbucketUsername: true,
|
||||
bitbucketEmail: true,
|
||||
workspaceName: true,
|
||||
apiToken: true,
|
||||
appPassword: true,
|
||||
});
|
||||
|
||||
export const apiFindBitbucketBranches = z.object({
|
||||
owner: z.string(),
|
||||
@@ -60,6 +74,9 @@ export const apiUpdateBitbucket = createSchema.extend({
|
||||
bitbucketId: z.string().min(1),
|
||||
name: z.string().min(1),
|
||||
bitbucketUsername: z.string().optional(),
|
||||
bitbucketEmail: z.string().email().optional(),
|
||||
appPassword: z.string().optional(),
|
||||
apiToken: z.string().optional(),
|
||||
bitbucketWorkspaceName: z.string().optional(),
|
||||
organizationId: z.string().optional(),
|
||||
});
|
||||
|
||||
@@ -21,6 +21,7 @@ export const deploymentStatus = pgEnum("deploymentStatus", [
|
||||
"running",
|
||||
"done",
|
||||
"error",
|
||||
"cancelled",
|
||||
]);
|
||||
|
||||
export const deployments = pgTable("deployment", {
|
||||
|
||||
@@ -58,4 +58,5 @@ export const apiUpdateGithub = createSchema.extend({
|
||||
githubId: z.string().min(1),
|
||||
name: z.string().min(1),
|
||||
gitProviderId: z.string().min(1),
|
||||
githubAppName: z.string().min(1),
|
||||
});
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { relations } from "drizzle-orm";
|
||||
import { integer, json, pgTable, text } from "drizzle-orm/pg-core";
|
||||
import { bigint, integer, json, pgTable, text } from "drizzle-orm/pg-core";
|
||||
import { createInsertSchema } from "drizzle-zod";
|
||||
import { nanoid } from "nanoid";
|
||||
import { z } from "zod";
|
||||
@@ -62,6 +62,7 @@ export const mariadb = pgTable("mariadb", {
|
||||
modeSwarm: json("modeSwarm").$type<ServiceModeSwarm>(),
|
||||
labelsSwarm: json("labelsSwarm").$type<LabelsSwarm>(),
|
||||
networkSwarm: json("networkSwarm").$type<NetworkSwarm[]>(),
|
||||
stopGracePeriodSwarm: bigint("stopGracePeriodSwarm", { mode: "bigint" }),
|
||||
replicas: integer("replicas").default(1).notNull(),
|
||||
createdAt: text("createdAt")
|
||||
.notNull()
|
||||
@@ -128,6 +129,7 @@ const createSchema = createInsertSchema(mariadb, {
|
||||
modeSwarm: ServiceModeSwarmSchema.nullable(),
|
||||
labelsSwarm: LabelsSwarmSchema.nullable(),
|
||||
networkSwarm: NetworkSwarmSchema.nullable(),
|
||||
stopGracePeriodSwarm: z.bigint().nullable(),
|
||||
});
|
||||
|
||||
export const apiCreateMariaDB = createSchema
|
||||
|
||||
@@ -1,5 +1,12 @@
|
||||
import { relations } from "drizzle-orm";
|
||||
import { boolean, integer, json, pgTable, text } from "drizzle-orm/pg-core";
|
||||
import {
|
||||
bigint,
|
||||
boolean,
|
||||
integer,
|
||||
json,
|
||||
pgTable,
|
||||
text,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { createInsertSchema } from "drizzle-zod";
|
||||
import { nanoid } from "nanoid";
|
||||
import { z } from "zod";
|
||||
@@ -58,6 +65,7 @@ export const mongo = pgTable("mongo", {
|
||||
modeSwarm: json("modeSwarm").$type<ServiceModeSwarm>(),
|
||||
labelsSwarm: json("labelsSwarm").$type<LabelsSwarm>(),
|
||||
networkSwarm: json("networkSwarm").$type<NetworkSwarm[]>(),
|
||||
stopGracePeriodSwarm: bigint("stopGracePeriodSwarm", { mode: "bigint" }),
|
||||
replicas: integer("replicas").default(1).notNull(),
|
||||
createdAt: text("createdAt")
|
||||
.notNull()
|
||||
@@ -118,6 +126,7 @@ const createSchema = createInsertSchema(mongo, {
|
||||
modeSwarm: ServiceModeSwarmSchema.nullable(),
|
||||
labelsSwarm: LabelsSwarmSchema.nullable(),
|
||||
networkSwarm: NetworkSwarmSchema.nullable(),
|
||||
stopGracePeriodSwarm: z.bigint().nullable(),
|
||||
});
|
||||
|
||||
export const apiCreateMongo = createSchema
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { relations } from "drizzle-orm";
|
||||
import { integer, json, pgTable, text } from "drizzle-orm/pg-core";
|
||||
import { bigint, integer, json, pgTable, text } from "drizzle-orm/pg-core";
|
||||
import { createInsertSchema } from "drizzle-zod";
|
||||
import { nanoid } from "nanoid";
|
||||
import { z } from "zod";
|
||||
@@ -60,6 +60,7 @@ export const mysql = pgTable("mysql", {
|
||||
modeSwarm: json("modeSwarm").$type<ServiceModeSwarm>(),
|
||||
labelsSwarm: json("labelsSwarm").$type<LabelsSwarm>(),
|
||||
networkSwarm: json("networkSwarm").$type<NetworkSwarm[]>(),
|
||||
stopGracePeriodSwarm: bigint("stopGracePeriodSwarm", { mode: "bigint" }),
|
||||
replicas: integer("replicas").default(1).notNull(),
|
||||
createdAt: text("createdAt")
|
||||
.notNull()
|
||||
@@ -125,6 +126,7 @@ const createSchema = createInsertSchema(mysql, {
|
||||
modeSwarm: ServiceModeSwarmSchema.nullable(),
|
||||
labelsSwarm: LabelsSwarmSchema.nullable(),
|
||||
networkSwarm: NetworkSwarmSchema.nullable(),
|
||||
stopGracePeriodSwarm: z.bigint().nullable(),
|
||||
});
|
||||
|
||||
export const apiCreateMySql = createSchema
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { relations } from "drizzle-orm";
|
||||
import { integer, json, pgTable, text } from "drizzle-orm/pg-core";
|
||||
import { bigint, integer, json, pgTable, text } from "drizzle-orm/pg-core";
|
||||
import { createInsertSchema } from "drizzle-zod";
|
||||
import { nanoid } from "nanoid";
|
||||
import { z } from "zod";
|
||||
@@ -60,6 +60,7 @@ export const postgres = pgTable("postgres", {
|
||||
modeSwarm: json("modeSwarm").$type<ServiceModeSwarm>(),
|
||||
labelsSwarm: json("labelsSwarm").$type<LabelsSwarm>(),
|
||||
networkSwarm: json("networkSwarm").$type<NetworkSwarm[]>(),
|
||||
stopGracePeriodSwarm: bigint("stopGracePeriodSwarm", { mode: "bigint" }),
|
||||
replicas: integer("replicas").default(1).notNull(),
|
||||
createdAt: text("createdAt")
|
||||
.notNull()
|
||||
@@ -118,6 +119,7 @@ const createSchema = createInsertSchema(postgres, {
|
||||
modeSwarm: ServiceModeSwarmSchema.nullable(),
|
||||
labelsSwarm: LabelsSwarmSchema.nullable(),
|
||||
networkSwarm: NetworkSwarmSchema.nullable(),
|
||||
stopGracePeriodSwarm: z.bigint().nullable(),
|
||||
});
|
||||
|
||||
export const apiCreatePostgres = createSchema
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { relations } from "drizzle-orm";
|
||||
import { integer, json, pgTable, text } from "drizzle-orm/pg-core";
|
||||
import { bigint, integer, json, pgTable, text } from "drizzle-orm/pg-core";
|
||||
import { createInsertSchema } from "drizzle-zod";
|
||||
import { nanoid } from "nanoid";
|
||||
import { z } from "zod";
|
||||
@@ -60,6 +60,7 @@ export const redis = pgTable("redis", {
|
||||
modeSwarm: json("modeSwarm").$type<ServiceModeSwarm>(),
|
||||
labelsSwarm: json("labelsSwarm").$type<LabelsSwarm>(),
|
||||
networkSwarm: json("networkSwarm").$type<NetworkSwarm[]>(),
|
||||
stopGracePeriodSwarm: bigint("stopGracePeriodSwarm", { mode: "bigint" }),
|
||||
replicas: integer("replicas").default(1).notNull(),
|
||||
|
||||
environmentId: text("environmentId")
|
||||
@@ -108,6 +109,7 @@ const createSchema = createInsertSchema(redis, {
|
||||
modeSwarm: ServiceModeSwarmSchema.nullable(),
|
||||
labelsSwarm: LabelsSwarmSchema.nullable(),
|
||||
networkSwarm: NetworkSwarmSchema.nullable(),
|
||||
stopGracePeriodSwarm: z.bigint().nullable(),
|
||||
});
|
||||
|
||||
export const apiCreateRedis = createSchema
|
||||
|
||||
@@ -323,6 +323,11 @@ export const apiUpdateWebServerMonitoring = z.object({
|
||||
});
|
||||
|
||||
export const apiUpdateUser = createSchema.partial().extend({
|
||||
email: z
|
||||
.string()
|
||||
.email("Please enter a valid email address")
|
||||
.min(1, "Email is required")
|
||||
.optional(),
|
||||
password: z.string().optional(),
|
||||
currentPassword: z.string().optional(),
|
||||
name: z.string().optional(),
|
||||
|
||||
@@ -68,6 +68,7 @@ export * from "./utils/backups/postgres";
|
||||
export * from "./utils/backups/utils";
|
||||
export * from "./utils/backups/web-server";
|
||||
export * from "./utils/builders/compose";
|
||||
export * from "./utils/startup/cancell-deployments";
|
||||
export * from "./utils/builders/docker-file";
|
||||
export * from "./utils/builders/drop";
|
||||
export * from "./utils/builders/heroku";
|
||||
|
||||
@@ -92,31 +92,48 @@ export const suggestVariants = async ({
|
||||
|
||||
const { object } = await generateObject({
|
||||
model,
|
||||
output: "array",
|
||||
output: "object",
|
||||
schema: z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
shortDescription: z.string(),
|
||||
description: z.string(),
|
||||
suggestions: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
shortDescription: z.string(),
|
||||
description: z.string(),
|
||||
}),
|
||||
),
|
||||
}),
|
||||
prompt: `
|
||||
Act as advanced DevOps engineer and generate a list of open source projects what can cover users needs(up to 3 items), the suggestion
|
||||
should include id, name, shortDescription, and description. Use slug of title for id.
|
||||
Act as advanced DevOps engineer and generate a list of open source projects what can cover users needs(up to 3 items).
|
||||
|
||||
Return your response as a JSON object with the following structure:
|
||||
{
|
||||
"suggestions": [
|
||||
{
|
||||
"id": "project-slug",
|
||||
"name": "Project Name",
|
||||
"shortDescription": "Brief one-line description",
|
||||
"description": "Detailed description"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Important rules for the response:
|
||||
1. The description field should ONLY contain a plain text description of the project, its features, and use cases
|
||||
2. Do NOT include any code snippets, configuration examples, or installation instructions in the description
|
||||
3. The shortDescription should be a single-line summary focusing on the main technologies
|
||||
1. Use slug format for the id field (lowercase, hyphenated)
|
||||
2. The description field should ONLY contain a plain text description of the project, its features, and use cases
|
||||
3. Do NOT include any code snippets, configuration examples, or installation instructions in the description
|
||||
4. The shortDescription should be a single-line summary focusing on the main technologies
|
||||
5. All projects should be installable in docker and have docker compose support
|
||||
|
||||
User wants to create a new project with the following details, it should be installable in docker and can be docker compose generated for it:
|
||||
User wants to create a new project with the following details:
|
||||
|
||||
${input}
|
||||
`,
|
||||
});
|
||||
|
||||
if (object?.length) {
|
||||
if (object?.suggestions?.length) {
|
||||
const result = [];
|
||||
for (const suggestion of object) {
|
||||
for (const suggestion of object.suggestions) {
|
||||
try {
|
||||
const { object: docker } = await generateObject({
|
||||
model,
|
||||
@@ -136,16 +153,29 @@ export const suggestVariants = async ({
|
||||
serviceName: z.string(),
|
||||
}),
|
||||
),
|
||||
configFiles: z.array(
|
||||
z.object({
|
||||
content: z.string(),
|
||||
filePath: z.string(),
|
||||
}),
|
||||
),
|
||||
configFiles: z
|
||||
.array(
|
||||
z.object({
|
||||
content: z.string(),
|
||||
filePath: z.string(),
|
||||
}),
|
||||
)
|
||||
.optional(),
|
||||
}),
|
||||
prompt: `
|
||||
Act as advanced DevOps engineer and generate docker compose with environment variables and domain configurations needed to install the following project.
|
||||
Return the docker compose as a YAML string and environment variables configuration. Follow these rules:
|
||||
|
||||
Return your response as a JSON object with this structure:
|
||||
{
|
||||
"dockerCompose": "yaml string here",
|
||||
"envVariables": [{"name": "VAR_NAME", "value": "example_value"}],
|
||||
"domains": [{"host": "domain.com", "port": 3000, "serviceName": "service"}],
|
||||
"configFiles": [{"content": "file content", "filePath": "path/to/file"}]
|
||||
}
|
||||
|
||||
Note: configFiles is optional - only include it if configuration files are absolutely required.
|
||||
|
||||
Follow these rules:
|
||||
|
||||
Docker Compose Rules:
|
||||
1. Use placeholder like \${VARIABLE_NAME-default} for generated variables in the docker-compose.yml
|
||||
@@ -198,6 +228,7 @@ export const suggestVariants = async ({
|
||||
console.error("Error in docker compose generation:", error);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
@@ -68,10 +68,26 @@ export const updateBitbucket = async (
|
||||
input: typeof apiUpdateBitbucket._type,
|
||||
) => {
|
||||
return await db.transaction(async (tx) => {
|
||||
// First get the current bitbucket provider to get gitProviderId
|
||||
const currentProvider = await tx.query.bitbucket.findFirst({
|
||||
where: eq(bitbucket.bitbucketId, bitbucketId),
|
||||
});
|
||||
|
||||
if (!currentProvider) {
|
||||
throw new TRPCError({
|
||||
code: "NOT_FOUND",
|
||||
message: "Bitbucket provider not found",
|
||||
});
|
||||
}
|
||||
|
||||
const result = await tx
|
||||
.update(bitbucket)
|
||||
.set({
|
||||
...input,
|
||||
bitbucketUsername: input.bitbucketUsername,
|
||||
bitbucketEmail: input.bitbucketEmail,
|
||||
appPassword: input.appPassword,
|
||||
apiToken: input.apiToken,
|
||||
bitbucketWorkspaceName: input.bitbucketWorkspaceName,
|
||||
})
|
||||
.where(eq(bitbucket.bitbucketId, bitbucketId))
|
||||
.returning();
|
||||
@@ -83,7 +99,7 @@ export const updateBitbucket = async (
|
||||
name: input.name,
|
||||
organizationId: input.organizationId,
|
||||
})
|
||||
.where(eq(gitProvider.gitProviderId, input.gitProviderId))
|
||||
.where(eq(gitProvider.gitProviderId, currentProvider.gitProviderId))
|
||||
.returning();
|
||||
}
|
||||
|
||||
|
||||
@@ -603,6 +603,21 @@ const BUNNY_CDN_IPS = new Set([
|
||||
"89.187.184.176",
|
||||
]);
|
||||
|
||||
// Arvancloud IP ranges
|
||||
// https://www.arvancloud.ir/fa/ips.txt
|
||||
const ARVANCLOUD_IP_RANGES = [
|
||||
"185.143.232.0/22",
|
||||
"188.229.116.16/29",
|
||||
"94.101.182.0/27",
|
||||
"2.144.3.128/28",
|
||||
"89.45.48.64/28",
|
||||
"37.32.16.0/27",
|
||||
"37.32.17.0/27",
|
||||
"37.32.18.0/27",
|
||||
"37.32.19.0/27",
|
||||
"185.215.232.0/22",
|
||||
];
|
||||
|
||||
const CDN_PROVIDERS: CDNProvider[] = [
|
||||
{
|
||||
name: "cloudflare",
|
||||
@@ -627,6 +642,14 @@ const CDN_PROVIDERS: CDNProvider[] = [
|
||||
warningMessage:
|
||||
"Domain is behind Fastly - actual IP is masked by CDN proxy",
|
||||
},
|
||||
{
|
||||
name: "arvancloud",
|
||||
displayName: "Arvancloud",
|
||||
checkIp: (ip: string) =>
|
||||
ARVANCLOUD_IP_RANGES.some((range) => isIPInCIDR(ip, range)),
|
||||
warningMessage:
|
||||
"Domain is behind Arvancloud - actual IP is masked by CDN proxy",
|
||||
},
|
||||
];
|
||||
|
||||
export const detectCDNProvider = (ip: string): CDNProvider | null => {
|
||||
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
import { removeDirectoryIfExistsContent } from "@dokploy/server/utils/filesystem/directory";
|
||||
import { TRPCError } from "@trpc/server";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { dump } from "js-yaml";
|
||||
import { stringify } from "yaml";
|
||||
import type { z } from "zod";
|
||||
import { encodeBase64 } from "../utils/docker/utils";
|
||||
import { execAsyncRemote } from "../utils/process/execAsync";
|
||||
@@ -101,7 +101,7 @@ const createCertificateFiles = async (certificate: Certificate) => {
|
||||
],
|
||||
},
|
||||
};
|
||||
const yamlConfig = dump(traefikConfig);
|
||||
const yamlConfig = stringify(traefikConfig);
|
||||
const configFile = path.join(certDir, "certificate.yml");
|
||||
|
||||
if (certificate.serverId) {
|
||||
|
||||
@@ -227,7 +227,7 @@ export const deployCompose = async ({
|
||||
|
||||
const buildLink = `${await getDokployUrl()}/dashboard/project/${
|
||||
compose.environment.projectId
|
||||
}/services/compose/${compose.composeId}?tab=deployments`;
|
||||
}/environment/${compose.environmentId}/services/compose/${compose.composeId}?tab=deployments`;
|
||||
const deployment = await createDeploymentCompose({
|
||||
composeId: composeId,
|
||||
title: titleLog,
|
||||
@@ -335,7 +335,7 @@ export const deployRemoteCompose = async ({
|
||||
|
||||
const buildLink = `${await getDokployUrl()}/dashboard/project/${
|
||||
compose.environment.projectId
|
||||
}/services/compose/${compose.composeId}?tab=deployments`;
|
||||
}/environment/${compose.environmentId}/services/compose/${compose.composeId}?tab=deployments`;
|
||||
const deployment = await createDeploymentCompose({
|
||||
composeId: composeId,
|
||||
title: titleLog,
|
||||
|
||||
@@ -10,6 +10,22 @@ import { IS_CLOUD } from "../constants";
|
||||
|
||||
export type Registry = typeof registry.$inferSelect;
|
||||
|
||||
function shEscape(s: string | undefined): string {
|
||||
if (!s) return "''";
|
||||
return `'${s.replace(/'/g, `'\\''`)}'`;
|
||||
}
|
||||
|
||||
function safeDockerLoginCommand(
|
||||
registry: string | undefined,
|
||||
user: string | undefined,
|
||||
pass: string | undefined,
|
||||
) {
|
||||
const escapedRegistry = shEscape(registry);
|
||||
const escapedUser = shEscape(user);
|
||||
const escapedPassword = shEscape(pass);
|
||||
return `printf %s ${escapedPassword} | docker login ${escapedRegistry} -u ${escapedUser} --password-stdin`;
|
||||
}
|
||||
|
||||
export const createRegistry = async (
|
||||
input: typeof apiCreateRegistry._type,
|
||||
organizationId: string,
|
||||
@@ -37,7 +53,11 @@ export const createRegistry = async (
|
||||
message: "Select a server to add the registry",
|
||||
});
|
||||
}
|
||||
const loginCommand = `echo ${input.password} | docker login ${input.registryUrl} --username ${input.username} --password-stdin`;
|
||||
const loginCommand = safeDockerLoginCommand(
|
||||
input.registryUrl,
|
||||
input.username,
|
||||
input.password,
|
||||
);
|
||||
if (input.serverId && input.serverId !== "none") {
|
||||
await execAsyncRemote(input.serverId, loginCommand);
|
||||
} else if (newRegistry.registryType === "cloud") {
|
||||
@@ -91,7 +111,11 @@ export const updateRegistry = async (
|
||||
.returning()
|
||||
.then((res) => res[0]);
|
||||
|
||||
const loginCommand = `echo ${response?.password} | docker login ${response?.registryUrl} --username ${response?.username} --password-stdin`;
|
||||
const loginCommand = safeDockerLoginCommand(
|
||||
response?.registryUrl,
|
||||
response?.username,
|
||||
response?.password,
|
||||
);
|
||||
|
||||
if (
|
||||
IS_CLOUD &&
|
||||
|
||||
@@ -336,6 +336,19 @@ export const findMemberById = async (
|
||||
};
|
||||
|
||||
export const updateUser = async (userId: string, userData: Partial<User>) => {
|
||||
// Validate email if it's being updated
|
||||
if (userData.email !== undefined) {
|
||||
if (!userData.email || userData.email.trim() === "") {
|
||||
throw new Error("Email is required and cannot be empty");
|
||||
}
|
||||
|
||||
// Basic email format validation
|
||||
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
|
||||
if (!emailRegex.test(userData.email)) {
|
||||
throw new Error("Please enter a valid email address");
|
||||
}
|
||||
}
|
||||
|
||||
const user = await db
|
||||
.update(users_temp)
|
||||
.set({
|
||||
|
||||
@@ -1,7 +1,14 @@
|
||||
import { chmodSync, existsSync, mkdirSync, writeFileSync } from "node:fs";
|
||||
import {
|
||||
chmodSync,
|
||||
existsSync,
|
||||
mkdirSync,
|
||||
rmSync,
|
||||
statSync,
|
||||
writeFileSync,
|
||||
} from "node:fs";
|
||||
import path from "node:path";
|
||||
import type { ContainerCreateOptions, CreateServiceOptions } from "dockerode";
|
||||
import { dump } from "js-yaml";
|
||||
import { stringify } from "yaml";
|
||||
import { paths } from "../constants";
|
||||
import { getRemoteDocker } from "../utils/servers/remote-docker";
|
||||
import type { FileConfig } from "../utils/traefik/file-types";
|
||||
@@ -234,7 +241,7 @@ export const createDefaultServerTraefikConfig = () => {
|
||||
},
|
||||
};
|
||||
|
||||
const yamlStr = dump(config);
|
||||
const yamlStr = stringify(config);
|
||||
mkdirSync(DYNAMIC_TRAEFIK_PATH, { recursive: true });
|
||||
writeFileSync(
|
||||
path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`),
|
||||
@@ -308,7 +315,7 @@ export const getDefaultTraefikConfig = () => {
|
||||
}),
|
||||
};
|
||||
|
||||
const yamlStr = dump(configObject);
|
||||
const yamlStr = stringify(configObject);
|
||||
|
||||
return yamlStr;
|
||||
};
|
||||
@@ -362,7 +369,7 @@ export const getDefaultServerTraefikConfig = () => {
|
||||
},
|
||||
};
|
||||
|
||||
const yamlStr = dump(configObject);
|
||||
const yamlStr = stringify(configObject);
|
||||
|
||||
return yamlStr;
|
||||
};
|
||||
@@ -375,13 +382,26 @@ export const createDefaultTraefikConfig = () => {
|
||||
if (existsSync(acmeJsonPath)) {
|
||||
chmodSync(acmeJsonPath, "600");
|
||||
}
|
||||
if (existsSync(mainConfig)) {
|
||||
console.log("Main config already exists");
|
||||
return;
|
||||
}
|
||||
const yamlStr = getDefaultTraefikConfig();
|
||||
|
||||
// Create the traefik directory first
|
||||
mkdirSync(MAIN_TRAEFIK_PATH, { recursive: true });
|
||||
|
||||
// Check if traefik.yml exists and handle the case where it might be a directory
|
||||
if (existsSync(mainConfig)) {
|
||||
const stats = statSync(mainConfig);
|
||||
if (stats.isDirectory()) {
|
||||
// If traefik.yml is a directory, remove it
|
||||
console.log("Found traefik.yml as directory, removing it...");
|
||||
rmSync(mainConfig, { recursive: true, force: true });
|
||||
} else if (stats.isFile()) {
|
||||
console.log("Main config already exists");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const yamlStr = getDefaultTraefikConfig();
|
||||
writeFileSync(mainConfig, yamlStr, "utf8");
|
||||
console.log("Traefik config created successfully");
|
||||
};
|
||||
|
||||
export const getDefaultMiddlewares = () => {
|
||||
@@ -397,7 +417,7 @@ export const getDefaultMiddlewares = () => {
|
||||
},
|
||||
},
|
||||
};
|
||||
const yamlStr = dump(defaultMiddlewares);
|
||||
const yamlStr = stringify(defaultMiddlewares);
|
||||
return yamlStr;
|
||||
};
|
||||
export const createDefaultMiddlewares = () => {
|
||||
|
||||
@@ -89,7 +89,7 @@ export const getMariadbBackupCommand = (
|
||||
databaseUser: string,
|
||||
databasePassword: string,
|
||||
) => {
|
||||
return `docker exec -i $CONTAINER_ID bash -c "set -o pipefail; mariadb-dump --user='${databaseUser}' --password='${databasePassword}' --databases ${database} | gzip"`;
|
||||
return `docker exec -i $CONTAINER_ID bash -c "set -o pipefail; mariadb-dump --user='${databaseUser}' --password='${databasePassword}' --single-transaction --quick --databases ${database} | gzip"`;
|
||||
};
|
||||
|
||||
export const getMysqlBackupCommand = (
|
||||
|
||||
@@ -142,6 +142,7 @@ export const mechanizeDockerContainer = async (
|
||||
RollbackConfig,
|
||||
UpdateConfig,
|
||||
Networks,
|
||||
StopGracePeriod,
|
||||
} = generateConfigContainer(application);
|
||||
|
||||
const bindsMount = generateBindMounts(mounts);
|
||||
@@ -191,6 +192,8 @@ export const mechanizeDockerContainer = async (
|
||||
})),
|
||||
},
|
||||
UpdateConfig,
|
||||
...(StopGracePeriod !== undefined &&
|
||||
StopGracePeriod !== null && { StopGracePeriod }),
|
||||
};
|
||||
|
||||
try {
|
||||
@@ -220,8 +223,8 @@ const getImageName = (application: ApplicationNested) => {
|
||||
if (registry) {
|
||||
const { registryUrl, imagePrefix, username } = registry;
|
||||
const registryTag = imagePrefix
|
||||
? `${registryUrl}/${imagePrefix}/${imageName}`
|
||||
: `${registryUrl}/${username}/${imageName}`;
|
||||
? `${registryUrl ? `${registryUrl}/` : ""}${imagePrefix}/${imageName}`
|
||||
: `${registryUrl ? `${registryUrl}/` : ""}${username}/${imageName}`;
|
||||
return registryTag;
|
||||
}
|
||||
|
||||
|
||||
@@ -22,8 +22,8 @@ export const uploadImage = async (
|
||||
// For ghcr.io: ghcr.io/username/image:tag
|
||||
// For docker.io: docker.io/username/image:tag
|
||||
const registryTag = imagePrefix
|
||||
? `${registryUrl}/${imagePrefix}/${imageName}`
|
||||
: `${registryUrl}/${username}/${imageName}`;
|
||||
? `${registryUrl ? `${registryUrl}/` : ""}${imagePrefix}/${imageName}`
|
||||
: `${registryUrl ? `${registryUrl}/` : ""}${username}/${imageName}`;
|
||||
|
||||
try {
|
||||
writeStream.write(
|
||||
|
||||
@@ -45,6 +45,7 @@ export const buildMariadb = async (mariadb: MariadbNested) => {
|
||||
RollbackConfig,
|
||||
UpdateConfig,
|
||||
Networks,
|
||||
StopGracePeriod,
|
||||
} = generateConfigContainer(mariadb);
|
||||
const resources = calculateResources({
|
||||
memoryLimit,
|
||||
@@ -102,6 +103,8 @@ export const buildMariadb = async (mariadb: MariadbNested) => {
|
||||
: [],
|
||||
},
|
||||
UpdateConfig,
|
||||
...(StopGracePeriod !== undefined &&
|
||||
StopGracePeriod !== null && { StopGracePeriod }),
|
||||
};
|
||||
try {
|
||||
const service = docker.getService(appName);
|
||||
|
||||
@@ -91,6 +91,7 @@ ${command ?? "wait $MONGOD_PID"}`;
|
||||
RollbackConfig,
|
||||
UpdateConfig,
|
||||
Networks,
|
||||
StopGracePeriod,
|
||||
} = generateConfigContainer(mongo);
|
||||
|
||||
const resources = calculateResources({
|
||||
@@ -155,6 +156,8 @@ ${command ?? "wait $MONGOD_PID"}`;
|
||||
: [],
|
||||
},
|
||||
UpdateConfig,
|
||||
...(StopGracePeriod !== undefined &&
|
||||
StopGracePeriod !== null && { StopGracePeriod }),
|
||||
};
|
||||
|
||||
try {
|
||||
|
||||
@@ -51,6 +51,7 @@ export const buildMysql = async (mysql: MysqlNested) => {
|
||||
RollbackConfig,
|
||||
UpdateConfig,
|
||||
Networks,
|
||||
StopGracePeriod,
|
||||
} = generateConfigContainer(mysql);
|
||||
const resources = calculateResources({
|
||||
memoryLimit,
|
||||
@@ -108,6 +109,8 @@ export const buildMysql = async (mysql: MysqlNested) => {
|
||||
: [],
|
||||
},
|
||||
UpdateConfig,
|
||||
...(StopGracePeriod !== undefined &&
|
||||
StopGracePeriod !== null && { StopGracePeriod }),
|
||||
};
|
||||
try {
|
||||
const service = docker.getService(appName);
|
||||
|
||||
@@ -44,6 +44,7 @@ export const buildPostgres = async (postgres: PostgresNested) => {
|
||||
RollbackConfig,
|
||||
UpdateConfig,
|
||||
Networks,
|
||||
StopGracePeriod,
|
||||
} = generateConfigContainer(postgres);
|
||||
const resources = calculateResources({
|
||||
memoryLimit,
|
||||
@@ -101,6 +102,8 @@ export const buildPostgres = async (postgres: PostgresNested) => {
|
||||
: [],
|
||||
},
|
||||
UpdateConfig,
|
||||
...(StopGracePeriod !== undefined &&
|
||||
StopGracePeriod !== null && { StopGracePeriod }),
|
||||
};
|
||||
try {
|
||||
const service = docker.getService(appName);
|
||||
|
||||
@@ -42,6 +42,7 @@ export const buildRedis = async (redis: RedisNested) => {
|
||||
RollbackConfig,
|
||||
UpdateConfig,
|
||||
Networks,
|
||||
StopGracePeriod,
|
||||
} = generateConfigContainer(redis);
|
||||
const resources = calculateResources({
|
||||
memoryLimit,
|
||||
@@ -98,6 +99,8 @@ export const buildRedis = async (redis: RedisNested) => {
|
||||
: [],
|
||||
},
|
||||
UpdateConfig,
|
||||
...(StopGracePeriod !== undefined &&
|
||||
StopGracePeriod !== null && { StopGracePeriod }),
|
||||
};
|
||||
|
||||
try {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { findComposeById } from "@dokploy/server/services/compose";
|
||||
import { dump } from "js-yaml";
|
||||
import { stringify } from "yaml";
|
||||
import { addAppNameToAllServiceNames } from "./collision/root-network";
|
||||
import { generateRandomHash } from "./compose";
|
||||
import { addSuffixToAllVolumes } from "./compose/volume";
|
||||
@@ -59,7 +59,7 @@ export const randomizeIsolatedDeploymentComposeFile = async (
|
||||
)
|
||||
: composeData;
|
||||
|
||||
return dump(newComposeFile);
|
||||
return stringify(newComposeFile);
|
||||
};
|
||||
|
||||
export const randomizeDeployableSpecificationFile = (
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import crypto from "node:crypto";
|
||||
import { findComposeById } from "@dokploy/server/services/compose";
|
||||
import { dump, load } from "js-yaml";
|
||||
import { parse, stringify } from "yaml";
|
||||
import { addSuffixToAllConfigs } from "./compose/configs";
|
||||
import { addSuffixToAllNetworks } from "./compose/network";
|
||||
import { addSuffixToAllSecrets } from "./compose/secrets";
|
||||
@@ -18,13 +18,13 @@ export const randomizeComposeFile = async (
|
||||
) => {
|
||||
const compose = await findComposeById(composeId);
|
||||
const composeFile = compose.composeFile;
|
||||
const composeData = load(composeFile) as ComposeSpecification;
|
||||
const composeData = parse(composeFile) as ComposeSpecification;
|
||||
|
||||
const randomSuffix = suffix || generateRandomHash();
|
||||
|
||||
const newComposeFile = addSuffixToAllProperties(composeData, randomSuffix);
|
||||
|
||||
return dump(newComposeFile);
|
||||
return stringify(newComposeFile);
|
||||
};
|
||||
|
||||
export const randomizeSpecificationFile = (
|
||||
|
||||
@@ -4,7 +4,7 @@ import { join } from "node:path";
|
||||
import { paths } from "@dokploy/server/constants";
|
||||
import type { Compose } from "@dokploy/server/services/compose";
|
||||
import type { Domain } from "@dokploy/server/services/domain";
|
||||
import { dump, load } from "js-yaml";
|
||||
import { parse, stringify } from "yaml";
|
||||
import { execAsyncRemote } from "../process/execAsync";
|
||||
import {
|
||||
cloneRawBitbucketRepository,
|
||||
@@ -92,7 +92,7 @@ export const loadDockerCompose = async (
|
||||
|
||||
if (existsSync(path)) {
|
||||
const yamlStr = readFileSync(path, "utf8");
|
||||
const parsedConfig = load(yamlStr) as ComposeSpecification;
|
||||
const parsedConfig = parse(yamlStr) as ComposeSpecification;
|
||||
return parsedConfig;
|
||||
}
|
||||
return null;
|
||||
@@ -115,7 +115,7 @@ export const loadDockerComposeRemote = async (
|
||||
return null;
|
||||
}
|
||||
if (!stdout) return null;
|
||||
const parsedConfig = load(stdout) as ComposeSpecification;
|
||||
const parsedConfig = parse(stdout) as ComposeSpecification;
|
||||
return parsedConfig;
|
||||
} catch {
|
||||
return null;
|
||||
@@ -141,7 +141,7 @@ export const writeDomainsToCompose = async (
|
||||
const composeConverted = await addDomainToCompose(compose, domains);
|
||||
|
||||
const path = getComposePath(compose);
|
||||
const composeString = dump(composeConverted, { lineWidth: 1000 });
|
||||
const composeString = stringify(composeConverted, { lineWidth: 1000 });
|
||||
try {
|
||||
await writeFile(path, composeString, "utf8");
|
||||
} catch (error) {
|
||||
@@ -169,7 +169,7 @@ exit 1;
|
||||
`;
|
||||
}
|
||||
if (compose.serverId) {
|
||||
const composeString = dump(composeConverted, { lineWidth: 1000 });
|
||||
const composeString = stringify(composeConverted, { lineWidth: 1000 });
|
||||
const encodedContent = encodeBase64(composeString);
|
||||
return `echo "${encodedContent}" | base64 -d > "${path}";`;
|
||||
}
|
||||
@@ -251,11 +251,15 @@ export const addDomainToCompose = async (
|
||||
}
|
||||
labels.unshift(...httpLabels);
|
||||
if (!compose.isolatedDeployment) {
|
||||
if (!labels.includes("traefik.docker.network=dokploy-network")) {
|
||||
labels.unshift("traefik.docker.network=dokploy-network");
|
||||
}
|
||||
if (!labels.includes("traefik.swarm.network=dokploy-network")) {
|
||||
labels.unshift("traefik.swarm.network=dokploy-network");
|
||||
if (compose.composeType === "docker-compose") {
|
||||
if (!labels.includes("traefik.docker.network=dokploy-network")) {
|
||||
labels.unshift("traefik.docker.network=dokploy-network");
|
||||
}
|
||||
} else {
|
||||
// Stack Case
|
||||
if (!labels.includes("traefik.swarm.network=dokploy-network")) {
|
||||
labels.unshift("traefik.swarm.network=dokploy-network");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -283,7 +287,7 @@ export const writeComposeFile = async (
|
||||
const path = getComposePath(compose);
|
||||
|
||||
try {
|
||||
const composeFile = dump(composeSpec, {
|
||||
const composeFile = stringify(composeSpec, {
|
||||
lineWidth: 1000,
|
||||
});
|
||||
fs.writeFileSync(path, composeFile, "utf8");
|
||||
|
||||
@@ -394,8 +394,14 @@ export const generateConfigContainer = (
|
||||
replicas,
|
||||
mounts,
|
||||
networkSwarm,
|
||||
stopGracePeriodSwarm,
|
||||
} = application;
|
||||
|
||||
const sanitizedStopGracePeriodSwarm =
|
||||
typeof stopGracePeriodSwarm === "bigint"
|
||||
? Number(stopGracePeriodSwarm)
|
||||
: stopGracePeriodSwarm;
|
||||
|
||||
const haveMounts = mounts && mounts.length > 0;
|
||||
|
||||
return {
|
||||
@@ -444,6 +450,10 @@ export const generateConfigContainer = (
|
||||
Order: "start-first",
|
||||
},
|
||||
}),
|
||||
...(sanitizedStopGracePeriodSwarm !== null &&
|
||||
sanitizedStopGracePeriodSwarm !== undefined && {
|
||||
StopGracePeriod: sanitizedStopGracePeriodSwarm,
|
||||
}),
|
||||
...(networkSwarm
|
||||
? {
|
||||
Networks: networkSwarm,
|
||||
|
||||
@@ -33,6 +33,7 @@ export const sendEmailNotification = async (
|
||||
to: toAddresses.join(", "),
|
||||
subject,
|
||||
html: htmlContent,
|
||||
textEncoding: "base64",
|
||||
});
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
|
||||
@@ -5,7 +5,10 @@ import type {
|
||||
apiBitbucketTestConnection,
|
||||
apiFindBitbucketBranches,
|
||||
} from "@dokploy/server/db/schema";
|
||||
import { findBitbucketById } from "@dokploy/server/services/bitbucket";
|
||||
import {
|
||||
type Bitbucket,
|
||||
findBitbucketById,
|
||||
} from "@dokploy/server/services/bitbucket";
|
||||
import type { Compose } from "@dokploy/server/services/compose";
|
||||
import type { InferResultType } from "@dokploy/server/types/with";
|
||||
import { TRPCError } from "@trpc/server";
|
||||
@@ -23,6 +26,61 @@ export type ComposeWithBitbucket = InferResultType<
|
||||
{ bitbucket: true }
|
||||
>;
|
||||
|
||||
export const getBitbucketCloneUrl = (
|
||||
bitbucketProvider: {
|
||||
apiToken?: string | null;
|
||||
bitbucketUsername?: string | null;
|
||||
appPassword?: string | null;
|
||||
bitbucketEmail?: string | null;
|
||||
bitbucketWorkspaceName?: string | null;
|
||||
} | null,
|
||||
repoClone: string,
|
||||
) => {
|
||||
if (!bitbucketProvider) {
|
||||
throw new Error("Bitbucket provider is required");
|
||||
}
|
||||
|
||||
if (bitbucketProvider.apiToken) {
|
||||
return `https://x-bitbucket-api-token-auth:${bitbucketProvider.apiToken}@${repoClone}`;
|
||||
}
|
||||
|
||||
// For app passwords, use username:app_password format
|
||||
if (!bitbucketProvider.bitbucketUsername || !bitbucketProvider.appPassword) {
|
||||
throw new Error(
|
||||
"Username and app password are required when not using API token",
|
||||
);
|
||||
}
|
||||
return `https://${bitbucketProvider.bitbucketUsername}:${bitbucketProvider.appPassword}@${repoClone}`;
|
||||
};
|
||||
|
||||
export const getBitbucketHeaders = (bitbucketProvider: Bitbucket) => {
|
||||
if (bitbucketProvider.apiToken) {
|
||||
// According to Bitbucket official docs, for API calls with API tokens:
|
||||
// "You will need both your Atlassian account email and an API token"
|
||||
// Use: {atlassian_account_email}:{api_token}
|
||||
|
||||
if (!bitbucketProvider.bitbucketEmail) {
|
||||
throw new Error(
|
||||
"Atlassian account email is required when using API token for API calls",
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
Authorization: `Basic ${Buffer.from(`${bitbucketProvider.bitbucketEmail}:${bitbucketProvider.apiToken}`).toString("base64")}`,
|
||||
};
|
||||
}
|
||||
|
||||
// For app passwords, use HTTP Basic auth with username and app password
|
||||
if (!bitbucketProvider.bitbucketUsername || !bitbucketProvider.appPassword) {
|
||||
throw new Error(
|
||||
"Username and app password are required when not using API token",
|
||||
);
|
||||
}
|
||||
return {
|
||||
Authorization: `Basic ${Buffer.from(`${bitbucketProvider.bitbucketUsername}:${bitbucketProvider.appPassword}`).toString("base64")}`,
|
||||
};
|
||||
};
|
||||
|
||||
export const cloneBitbucketRepository = async (
|
||||
entity: ApplicationWithBitbucket | ComposeWithBitbucket,
|
||||
logPath: string,
|
||||
@@ -51,7 +109,7 @@ export const cloneBitbucketRepository = async (
|
||||
const outputPath = join(basePath, appName, "code");
|
||||
await recreateDirectory(outputPath);
|
||||
const repoclone = `bitbucket.org/${bitbucketOwner}/${bitbucketRepository}.git`;
|
||||
const cloneUrl = `https://${bitbucket?.bitbucketUsername}:${bitbucket?.appPassword}@${repoclone}`;
|
||||
const cloneUrl = getBitbucketCloneUrl(bitbucket, repoclone);
|
||||
try {
|
||||
writeStream.write(`\nCloning Repo ${repoclone} to ${outputPath}: ✅\n`);
|
||||
const cloneArgs = [
|
||||
@@ -103,7 +161,7 @@ export const cloneRawBitbucketRepository = async (entity: Compose) => {
|
||||
const outputPath = join(basePath, appName, "code");
|
||||
await recreateDirectory(outputPath);
|
||||
const repoclone = `bitbucket.org/${bitbucketOwner}/${bitbucketRepository}.git`;
|
||||
const cloneUrl = `https://${bitbucketProvider?.bitbucketUsername}:${bitbucketProvider?.appPassword}@${repoclone}`;
|
||||
const cloneUrl = getBitbucketCloneUrl(bitbucketProvider, repoclone);
|
||||
|
||||
try {
|
||||
const cloneArgs = [
|
||||
@@ -153,7 +211,7 @@ export const cloneRawBitbucketRepositoryRemote = async (compose: Compose) => {
|
||||
const basePath = COMPOSE_PATH;
|
||||
const outputPath = join(basePath, appName, "code");
|
||||
const repoclone = `bitbucket.org/${bitbucketOwner}/${bitbucketRepository}.git`;
|
||||
const cloneUrl = `https://${bitbucketProvider?.bitbucketUsername}:${bitbucketProvider?.appPassword}@${repoclone}`;
|
||||
const cloneUrl = getBitbucketCloneUrl(bitbucketProvider, repoclone);
|
||||
|
||||
try {
|
||||
const cloneCommand = `
|
||||
@@ -206,7 +264,7 @@ export const getBitbucketCloneCommand = async (
|
||||
const outputPath = join(basePath, appName, "code");
|
||||
await recreateDirectory(outputPath);
|
||||
const repoclone = `bitbucket.org/${bitbucketOwner}/${bitbucketRepository}.git`;
|
||||
const cloneUrl = `https://${bitbucketProvider?.bitbucketUsername}:${bitbucketProvider?.appPassword}@${repoclone}`;
|
||||
const cloneUrl = getBitbucketCloneUrl(bitbucketProvider, repoclone);
|
||||
|
||||
const cloneCommand = `
|
||||
rm -rf ${outputPath};
|
||||
@@ -241,9 +299,7 @@ export const getBitbucketRepositories = async (bitbucketId?: string) => {
|
||||
while (url) {
|
||||
const response = await fetch(url, {
|
||||
method: "GET",
|
||||
headers: {
|
||||
Authorization: `Basic ${Buffer.from(`${bitbucketProvider.bitbucketUsername}:${bitbucketProvider.appPassword}`).toString("base64")}`,
|
||||
},
|
||||
headers: getBitbucketHeaders(bitbucketProvider),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
@@ -279,35 +335,43 @@ export const getBitbucketBranches = async (
|
||||
}
|
||||
const bitbucketProvider = await findBitbucketById(input.bitbucketId);
|
||||
const { owner, repo } = input;
|
||||
const url = `https://api.bitbucket.org/2.0/repositories/${owner}/${repo}/refs/branches?pagelen=100`;
|
||||
let url = `https://api.bitbucket.org/2.0/repositories/${owner}/${repo}/refs/branches?pagelen=1`;
|
||||
let allBranches: {
|
||||
name: string;
|
||||
commit: {
|
||||
sha: string;
|
||||
};
|
||||
}[] = [];
|
||||
|
||||
try {
|
||||
const response = await fetch(url, {
|
||||
method: "GET",
|
||||
headers: {
|
||||
Authorization: `Basic ${Buffer.from(`${bitbucketProvider.bitbucketUsername}:${bitbucketProvider.appPassword}`).toString("base64")}`,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new TRPCError({
|
||||
code: "BAD_REQUEST",
|
||||
message: `HTTP error! status: ${response.status}`,
|
||||
while (url) {
|
||||
const response = await fetch(url, {
|
||||
method: "GET",
|
||||
headers: getBitbucketHeaders(bitbucketProvider),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new TRPCError({
|
||||
code: "BAD_REQUEST",
|
||||
message: `HTTP error! status: ${response.status}`,
|
||||
});
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
const mappedData = data.values.map((branch: any) => {
|
||||
return {
|
||||
name: branch.name,
|
||||
commit: {
|
||||
sha: branch.target.hash,
|
||||
},
|
||||
};
|
||||
});
|
||||
allBranches = allBranches.concat(mappedData);
|
||||
url = data.next || null;
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
const mappedData = data.values.map((branch: any) => {
|
||||
return {
|
||||
name: branch.name,
|
||||
commit: {
|
||||
sha: branch.target.hash,
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
return mappedData as {
|
||||
return allBranches as {
|
||||
name: string;
|
||||
commit: {
|
||||
sha: string;
|
||||
@@ -335,9 +399,7 @@ export const testBitbucketConnection = async (
|
||||
try {
|
||||
const response = await fetch(url, {
|
||||
method: "GET",
|
||||
headers: {
|
||||
Authorization: `Basic ${Buffer.from(`${bitbucketProvider.bitbucketUsername}:${bitbucketProvider.appPassword}`).toString("base64")}`,
|
||||
},
|
||||
headers: getBitbucketHeaders(bitbucketProvider),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
|
||||
@@ -99,6 +99,19 @@ export const refreshGiteaToken = async (giteaProviderId: string) => {
|
||||
}
|
||||
};
|
||||
|
||||
const buildGiteaCloneUrl = (
|
||||
giteaUrl: string,
|
||||
accessToken: string,
|
||||
owner: string,
|
||||
repository: string,
|
||||
) => {
|
||||
const protocol = giteaUrl.startsWith("http://") ? "http" : "https";
|
||||
const baseUrl = giteaUrl.replace(/^https?:\/\//, "");
|
||||
const repoClone = `${owner}/${repository}.git`;
|
||||
const cloneUrl = `${protocol}://oauth2:${accessToken}@${baseUrl}/${repoClone}`;
|
||||
return cloneUrl;
|
||||
};
|
||||
|
||||
export type ApplicationWithGitea = InferResultType<
|
||||
"applications",
|
||||
{ gitea: true }
|
||||
@@ -148,9 +161,13 @@ export const getGiteaCloneCommand = async (
|
||||
const basePath = isCompose ? COMPOSE_PATH : APPLICATIONS_PATH;
|
||||
const outputPath = join(basePath, appName, "code");
|
||||
|
||||
const baseUrl = gitea?.giteaUrl.replace(/^https?:\/\//, "");
|
||||
const repoClone = `${giteaOwner}/${giteaRepository}.git`;
|
||||
const cloneUrl = `https://oauth2:${gitea?.accessToken}@${baseUrl}/${repoClone}`;
|
||||
const cloneUrl = buildGiteaCloneUrl(
|
||||
gitea?.giteaUrl!,
|
||||
gitea?.accessToken!,
|
||||
giteaOwner!,
|
||||
giteaRepository!,
|
||||
);
|
||||
|
||||
const cloneCommand = `
|
||||
rm -rf ${outputPath};
|
||||
@@ -205,8 +222,12 @@ export const cloneGiteaRepository = async (
|
||||
await recreateDirectory(outputPath);
|
||||
|
||||
const repoClone = `${giteaOwner}/${giteaRepository}.git`;
|
||||
const baseUrl = giteaProvider.giteaUrl.replace(/^https?:\/\//, "");
|
||||
const cloneUrl = `https://oauth2:${giteaProvider.accessToken}@${baseUrl}/${repoClone}`;
|
||||
const cloneUrl = buildGiteaCloneUrl(
|
||||
giteaProvider.giteaUrl,
|
||||
giteaProvider.accessToken!,
|
||||
giteaOwner!,
|
||||
giteaRepository!,
|
||||
);
|
||||
|
||||
writeStream.write(`\nCloning Repo ${repoClone} to ${outputPath}...\n`);
|
||||
|
||||
@@ -269,9 +290,12 @@ export const cloneRawGiteaRepository = async (entity: Compose) => {
|
||||
const outputPath = join(basePath, appName, "code");
|
||||
await recreateDirectory(outputPath);
|
||||
|
||||
const repoClone = `${giteaOwner}/${giteaRepository}.git`;
|
||||
const baseUrl = giteaProvider.giteaUrl.replace(/^https?:\/\//, "");
|
||||
const cloneUrl = `https://oauth2:${giteaProvider.accessToken}@${baseUrl}/${repoClone}`;
|
||||
const cloneUrl = buildGiteaCloneUrl(
|
||||
giteaProvider.giteaUrl,
|
||||
giteaProvider.accessToken!,
|
||||
giteaOwner!,
|
||||
giteaRepository!,
|
||||
);
|
||||
|
||||
try {
|
||||
await spawnAsync("git", [
|
||||
@@ -317,9 +341,13 @@ export const cloneRawGiteaRepositoryRemote = async (compose: Compose) => {
|
||||
const giteaProvider = await findGiteaById(giteaId);
|
||||
const basePath = COMPOSE_PATH;
|
||||
const outputPath = join(basePath, appName, "code");
|
||||
const repoClone = `${giteaOwner}/${giteaRepository}.git`;
|
||||
const baseUrl = giteaProvider.giteaUrl.replace(/^https?:\/\//, "");
|
||||
const cloneUrl = `https://oauth2:${giteaProvider.accessToken}@${baseUrl}/${repoClone}`;
|
||||
const cloneUrl = buildGiteaCloneUrl(
|
||||
giteaProvider.giteaUrl,
|
||||
giteaProvider.accessToken!,
|
||||
giteaOwner!,
|
||||
giteaRepository!,
|
||||
);
|
||||
|
||||
try {
|
||||
const command = `
|
||||
rm -rf ${outputPath};
|
||||
@@ -362,17 +390,22 @@ export const testGiteaConnection = async (input: { giteaId: string }) => {
|
||||
}
|
||||
|
||||
const baseUrl = provider.giteaUrl.replace(/\/+$/, "");
|
||||
const limit = 30;
|
||||
let allRepos = 0;
|
||||
let nextUrl = `${baseUrl}/api/v1/repos/search?limit=${limit}`;
|
||||
|
||||
while (nextUrl) {
|
||||
const response = await fetch(nextUrl, {
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
Authorization: `token ${provider.accessToken}`,
|
||||
// Use /user/repos to get authenticated user's repositories with pagination
|
||||
let allRepos = 0;
|
||||
let page = 1;
|
||||
const limit = 50; // Max per page
|
||||
|
||||
while (true) {
|
||||
const response = await fetch(
|
||||
`${baseUrl}/api/v1/user/repos?page=${page}&limit=${limit}`,
|
||||
{
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
Authorization: `token ${provider.accessToken}`,
|
||||
},
|
||||
},
|
||||
});
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(
|
||||
@@ -381,22 +414,18 @@ export const testGiteaConnection = async (input: { giteaId: string }) => {
|
||||
}
|
||||
|
||||
const repos = await response.json();
|
||||
allRepos += repos.data.length;
|
||||
|
||||
const linkHeader = response.headers.get("link");
|
||||
nextUrl = "";
|
||||
|
||||
if (linkHeader) {
|
||||
const nextLink = linkHeader
|
||||
.split(",")
|
||||
.find((link) => link.includes('rel="next"'));
|
||||
if (nextLink) {
|
||||
const matches = nextLink.match(/<([^>]+)>/);
|
||||
if (matches?.[1]) {
|
||||
nextUrl = matches[1];
|
||||
}
|
||||
}
|
||||
if (!Array.isArray(repos) || repos.length === 0) {
|
||||
break; // No more repositories
|
||||
}
|
||||
|
||||
allRepos += repos.length;
|
||||
|
||||
// Check if there are more pages
|
||||
if (repos.length < limit) {
|
||||
break; // Last page (fewer results than limit)
|
||||
}
|
||||
|
||||
page++;
|
||||
}
|
||||
|
||||
await updateGitea(giteaId, {
|
||||
@@ -418,17 +447,22 @@ export const getGiteaRepositories = async (giteaId?: string) => {
|
||||
const giteaProvider = await findGiteaById(giteaId);
|
||||
|
||||
const baseUrl = giteaProvider.giteaUrl.replace(/\/+$/, "");
|
||||
const limit = 30;
|
||||
let allRepositories: any[] = [];
|
||||
let nextUrl = `${baseUrl}/api/v1/repos/search?limit=${limit}`;
|
||||
|
||||
while (nextUrl) {
|
||||
const response = await fetch(nextUrl, {
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
Authorization: `token ${giteaProvider.accessToken}`,
|
||||
// Use /user/repos to get authenticated user's repositories with pagination
|
||||
let allRepositories: any[] = [];
|
||||
let page = 1;
|
||||
const limit = 50; // Max per page
|
||||
|
||||
while (true) {
|
||||
const response = await fetch(
|
||||
`${baseUrl}/api/v1/user/repos?page=${page}&limit=${limit}`,
|
||||
{
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
Authorization: `token ${giteaProvider.accessToken}`,
|
||||
},
|
||||
},
|
||||
});
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new TRPCError({
|
||||
@@ -437,23 +471,19 @@ export const getGiteaRepositories = async (giteaId?: string) => {
|
||||
});
|
||||
}
|
||||
|
||||
const result = await response.json();
|
||||
allRepositories = [...allRepositories, ...result.data];
|
||||
|
||||
const linkHeader = response.headers.get("link");
|
||||
nextUrl = "";
|
||||
|
||||
if (linkHeader) {
|
||||
const nextLink = linkHeader
|
||||
.split(",")
|
||||
.find((link) => link.includes('rel="next"'));
|
||||
if (nextLink) {
|
||||
const matches = nextLink.match(/<([^>]+)>/);
|
||||
if (matches?.[1]) {
|
||||
nextUrl = matches[1];
|
||||
}
|
||||
}
|
||||
const repos = await response.json();
|
||||
if (!Array.isArray(repos) || repos.length === 0) {
|
||||
break; // No more repositories
|
||||
}
|
||||
|
||||
allRepositories = [...allRepositories, ...repos];
|
||||
|
||||
// Check if there are more pages
|
||||
if (repos.length < limit) {
|
||||
break; // Last page (fewer results than limit)
|
||||
}
|
||||
|
||||
page++;
|
||||
}
|
||||
|
||||
return (
|
||||
@@ -482,25 +512,43 @@ export const getGiteaBranches = async (input: {
|
||||
const giteaProvider = await findGiteaById(input.giteaId);
|
||||
|
||||
const baseUrl = giteaProvider.giteaUrl.replace(/\/+$/, "");
|
||||
const url = `${baseUrl}/api/v1/repos/${input.owner}/${input.repo}/branches`;
|
||||
|
||||
const response = await fetch(url, {
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
Authorization: `token ${giteaProvider.accessToken}`,
|
||||
},
|
||||
});
|
||||
// Handle pagination for branches
|
||||
let allBranches: any[] = [];
|
||||
let page = 1;
|
||||
const limit = 50; // Max per page
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch branches: ${response.statusText}`);
|
||||
while (true) {
|
||||
const response = await fetch(
|
||||
`${baseUrl}/api/v1/repos/${input.owner}/${input.repo}/branches?page=${page}&limit=${limit}`,
|
||||
{
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
Authorization: `token ${giteaProvider.accessToken}`,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch branches: ${response.statusText}`);
|
||||
}
|
||||
|
||||
const branches = await response.json();
|
||||
if (!Array.isArray(branches) || branches.length === 0) {
|
||||
break; // No more branches
|
||||
}
|
||||
|
||||
allBranches = [...allBranches, ...branches];
|
||||
|
||||
// Check if there are more pages
|
||||
if (branches.length < limit) {
|
||||
break; // Last page (fewer results than limit)
|
||||
}
|
||||
|
||||
page++;
|
||||
}
|
||||
|
||||
const branches = await response.json();
|
||||
|
||||
if (!branches) {
|
||||
return [];
|
||||
}
|
||||
return branches?.map((branch: any) => ({
|
||||
return allBranches?.map((branch: any) => ({
|
||||
id: branch.name,
|
||||
name: branch.name,
|
||||
commit: {
|
||||
|
||||
@@ -171,7 +171,7 @@ export const cloneGithubRepository = async ({
|
||||
const cloneUrl = `https://oauth2:${token}@${repoclone}`;
|
||||
|
||||
try {
|
||||
writeStream.write(`\nClonning Repo ${repoclone} to ${outputPath}: ✅\n`);
|
||||
writeStream.write(`\nCloning Repo ${repoclone} to ${outputPath}: ✅\n`);
|
||||
const cloneArgs = [
|
||||
"clone",
|
||||
"--branch",
|
||||
|
||||
@@ -401,7 +401,7 @@ export const cloneRawGitlabRepositoryRemote = async (compose: Compose) => {
|
||||
const {
|
||||
appName,
|
||||
gitlabPathNamespace,
|
||||
branch,
|
||||
gitlabBranch,
|
||||
gitlabId,
|
||||
serverId,
|
||||
enableSubmodules,
|
||||
@@ -429,7 +429,7 @@ export const cloneRawGitlabRepositoryRemote = async (compose: Compose) => {
|
||||
try {
|
||||
const command = `
|
||||
rm -rf ${outputPath};
|
||||
git clone --branch ${branch} --depth 1 ${enableSubmodules ? "--recurse-submodules" : ""} ${cloneUrl} ${outputPath}
|
||||
git clone --branch ${gitlabBranch} --depth 1 ${enableSubmodules ? "--recurse-submodules" : ""} ${cloneUrl} ${outputPath}
|
||||
`;
|
||||
await execAsyncRemote(serverId, command);
|
||||
} catch (error) {
|
||||
|
||||
21
packages/server/src/utils/startup/cancell-deployments.ts
Normal file
21
packages/server/src/utils/startup/cancell-deployments.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { deployments } from "@dokploy/server/db/schema";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { db } from "../../db/index";
|
||||
|
||||
export const initCancelDeployments = async () => {
|
||||
try {
|
||||
console.log("Setting up cancel deployments....");
|
||||
|
||||
const result = await db
|
||||
.update(deployments)
|
||||
.set({
|
||||
status: "cancelled",
|
||||
})
|
||||
.where(eq(deployments.status, "running"))
|
||||
.returning();
|
||||
|
||||
console.log(`Cancelled ${result.length} deployments`);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
};
|
||||
@@ -3,7 +3,7 @@ import path from "node:path";
|
||||
import { createInterface } from "node:readline";
|
||||
import { paths } from "@dokploy/server/constants";
|
||||
import type { Domain } from "@dokploy/server/services/domain";
|
||||
import { dump, load } from "js-yaml";
|
||||
import { parse, stringify } from "yaml";
|
||||
import { encodeBase64 } from "../docker/utils";
|
||||
import { execAsyncRemote } from "../process/execAsync";
|
||||
import type { FileConfig, HttpLoadBalancerService } from "./file-types";
|
||||
@@ -40,7 +40,7 @@ export const createTraefikConfig = (appName: string) => {
|
||||
},
|
||||
},
|
||||
};
|
||||
const yamlStr = dump(config);
|
||||
const yamlStr = stringify(config);
|
||||
const { DYNAMIC_TRAEFIK_PATH } = paths();
|
||||
fs.mkdirSync(DYNAMIC_TRAEFIK_PATH, { recursive: true });
|
||||
writeFileSync(
|
||||
@@ -87,7 +87,7 @@ export const loadOrCreateConfig = (appName: string): FileConfig => {
|
||||
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`);
|
||||
if (fs.existsSync(configPath)) {
|
||||
const yamlStr = fs.readFileSync(configPath, "utf8");
|
||||
const parsedConfig = (load(yamlStr) as FileConfig) || {
|
||||
const parsedConfig = (parse(yamlStr) as FileConfig) || {
|
||||
http: { routers: {}, services: {} },
|
||||
};
|
||||
return parsedConfig;
|
||||
@@ -107,7 +107,7 @@ export const loadOrCreateConfigRemote = async (
|
||||
|
||||
if (!stdout) return fileConfig;
|
||||
|
||||
const parsedConfig = (load(stdout) as FileConfig) || {
|
||||
const parsedConfig = (parse(stdout) as FileConfig) || {
|
||||
http: { routers: {}, services: {} },
|
||||
};
|
||||
return parsedConfig;
|
||||
@@ -248,7 +248,7 @@ export const writeTraefikConfig = (
|
||||
try {
|
||||
const { DYNAMIC_TRAEFIK_PATH } = paths();
|
||||
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`);
|
||||
const yamlStr = dump(traefikConfig);
|
||||
const yamlStr = stringify(traefikConfig);
|
||||
fs.writeFileSync(configPath, yamlStr, "utf8");
|
||||
} catch (e) {
|
||||
console.error("Error saving the YAML config file:", e);
|
||||
@@ -263,7 +263,7 @@ export const writeTraefikConfigRemote = async (
|
||||
try {
|
||||
const { DYNAMIC_TRAEFIK_PATH } = paths(true);
|
||||
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`);
|
||||
const yamlStr = dump(traefikConfig);
|
||||
const yamlStr = stringify(traefikConfig);
|
||||
await execAsyncRemote(serverId, `echo '${yamlStr}' > ${configPath}`);
|
||||
} catch (e) {
|
||||
console.error("Error saving the YAML config file:", e);
|
||||
|
||||
@@ -2,7 +2,7 @@ import { existsSync, readFileSync, writeFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import { paths } from "@dokploy/server/constants";
|
||||
import type { Domain } from "@dokploy/server/services/domain";
|
||||
import { dump, load } from "js-yaml";
|
||||
import { parse, stringify } from "yaml";
|
||||
import type { ApplicationNested } from "../builders";
|
||||
import { execAsyncRemote } from "../process/execAsync";
|
||||
import { writeTraefikConfigRemote } from "./application";
|
||||
@@ -76,7 +76,7 @@ export const loadMiddlewares = <T>() => {
|
||||
throw new Error(`File not found: ${configPath}`);
|
||||
}
|
||||
const yamlStr = readFileSync(configPath, "utf8");
|
||||
const config = load(yamlStr) as T;
|
||||
const config = parse(yamlStr) as T;
|
||||
return config;
|
||||
};
|
||||
|
||||
@@ -94,7 +94,7 @@ export const loadRemoteMiddlewares = async (serverId: string) => {
|
||||
console.error(`Error: ${stderr}`);
|
||||
throw new Error(`File not found: ${configPath}`);
|
||||
}
|
||||
const config = load(stdout) as FileConfig;
|
||||
const config = parse(stdout) as FileConfig;
|
||||
return config;
|
||||
} catch (_) {
|
||||
throw new Error(`File not found: ${configPath}`);
|
||||
@@ -103,7 +103,7 @@ export const loadRemoteMiddlewares = async (serverId: string) => {
|
||||
export const writeMiddleware = <T>(config: T) => {
|
||||
const { DYNAMIC_TRAEFIK_PATH } = paths();
|
||||
const configPath = join(DYNAMIC_TRAEFIK_PATH, "middlewares.yml");
|
||||
const newYamlContent = dump(config);
|
||||
const newYamlContent = stringify(config);
|
||||
writeFileSync(configPath, newYamlContent, "utf8");
|
||||
};
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import { existsSync, readFileSync, writeFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import { paths } from "@dokploy/server/constants";
|
||||
import type { User } from "@dokploy/server/services/user";
|
||||
import { dump, load } from "js-yaml";
|
||||
import { parse, stringify } from "yaml";
|
||||
import {
|
||||
loadOrCreateConfig,
|
||||
removeTraefikConfig,
|
||||
@@ -79,13 +79,13 @@ export const updateLetsEncryptEmail = (newEmail: string | null) => {
|
||||
const { MAIN_TRAEFIK_PATH } = paths();
|
||||
const configPath = join(MAIN_TRAEFIK_PATH, "traefik.yml");
|
||||
const configContent = readFileSync(configPath, "utf8");
|
||||
const config = load(configContent) as MainTraefikConfig;
|
||||
const config = parse(configContent) as MainTraefikConfig;
|
||||
if (config?.certificatesResolvers?.letsencrypt?.acme) {
|
||||
config.certificatesResolvers.letsencrypt.acme.email = newEmail;
|
||||
} else {
|
||||
throw new Error("Invalid Let's Encrypt configuration structure.");
|
||||
}
|
||||
const newYamlContent = dump(config);
|
||||
const newYamlContent = stringify(config);
|
||||
writeFileSync(configPath, newYamlContent, "utf8");
|
||||
} catch (error) {
|
||||
throw error;
|
||||
|
||||
@@ -1,15 +1,17 @@
|
||||
import { findVolumeBackupById } from "@dokploy/server/services/volume-backups";
|
||||
import { scheduledJobs, scheduleJob } from "node-schedule";
|
||||
import path from "node:path";
|
||||
import { paths } from "@dokploy/server/constants";
|
||||
import {
|
||||
createDeploymentVolumeBackup,
|
||||
updateDeploymentStatus,
|
||||
} from "@dokploy/server/services/deployment";
|
||||
import { findVolumeBackupById } from "@dokploy/server/services/volume-backups";
|
||||
import {
|
||||
execAsync,
|
||||
execAsyncRemote,
|
||||
} from "@dokploy/server/utils/process/execAsync";
|
||||
import { backupVolume } from "./backup";
|
||||
import { scheduledJobs, scheduleJob } from "node-schedule";
|
||||
import { getS3Credentials, normalizeS3Path } from "../backups/utils";
|
||||
import { backupVolume } from "./backup";
|
||||
|
||||
export const scheduleVolumeBackup = async (volumeBackupId: string) => {
|
||||
const volumeBackup = await findVolumeBackupById(volumeBackupId);
|
||||
@@ -76,7 +78,20 @@ export const runVolumeBackup = async (volumeBackupId: string) => {
|
||||
|
||||
await updateDeploymentStatus(deployment.deploymentId, "done");
|
||||
} catch (error) {
|
||||
const { VOLUME_BACKUPS_PATH } = paths(!!serverId);
|
||||
const volumeBackupPath = path.join(
|
||||
VOLUME_BACKUPS_PATH,
|
||||
volumeBackup.appName,
|
||||
);
|
||||
// delete all the .tar files
|
||||
const command = `rm -rf ${volumeBackupPath}/*.tar`;
|
||||
if (serverId) {
|
||||
await execAsyncRemote(serverId, command);
|
||||
} else {
|
||||
await execAsync(command);
|
||||
}
|
||||
await updateDeploymentStatus(deployment.deploymentId, "error");
|
||||
|
||||
console.error(error);
|
||||
}
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user