mirror of
https://github.com/LukeHagar/dokploy.git
synced 2025-12-06 04:19:37 +00:00
Merge branch 'canary' into separate-permission-for-creating-environments-#2593
This commit is contained in:
@@ -1,7 +1,7 @@
|
||||
import type { ComposeSpecification } from "@dokploy/server";
|
||||
import { addSuffixToAllProperties } from "@dokploy/server";
|
||||
import { load } from "js-yaml";
|
||||
import { expect, test } from "vitest";
|
||||
import { parse } from "yaml";
|
||||
|
||||
const composeFile1 = `
|
||||
version: "3.8"
|
||||
@@ -61,7 +61,7 @@ secrets:
|
||||
file: ./db_password.txt
|
||||
`;
|
||||
|
||||
const expectedComposeFile1 = load(`
|
||||
const expectedComposeFile1 = parse(`
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
@@ -120,7 +120,7 @@ secrets:
|
||||
`) as ComposeSpecification;
|
||||
|
||||
test("Add suffix to all properties in compose file 1", () => {
|
||||
const composeData = load(composeFile1) as ComposeSpecification;
|
||||
const composeData = parse(composeFile1) as ComposeSpecification;
|
||||
const suffix = "testhash";
|
||||
|
||||
const updatedComposeData = addSuffixToAllProperties(composeData, suffix);
|
||||
@@ -185,7 +185,7 @@ secrets:
|
||||
file: ./db_password.txt
|
||||
`;
|
||||
|
||||
const expectedComposeFile2 = load(`
|
||||
const expectedComposeFile2 = parse(`
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
@@ -243,7 +243,7 @@ secrets:
|
||||
`) as ComposeSpecification;
|
||||
|
||||
test("Add suffix to all properties in compose file 2", () => {
|
||||
const composeData = load(composeFile2) as ComposeSpecification;
|
||||
const composeData = parse(composeFile2) as ComposeSpecification;
|
||||
const suffix = "testhash";
|
||||
|
||||
const updatedComposeData = addSuffixToAllProperties(composeData, suffix);
|
||||
@@ -308,7 +308,7 @@ secrets:
|
||||
file: ./service_secret.txt
|
||||
`;
|
||||
|
||||
const expectedComposeFile3 = load(`
|
||||
const expectedComposeFile3 = parse(`
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
@@ -366,7 +366,7 @@ secrets:
|
||||
`) as ComposeSpecification;
|
||||
|
||||
test("Add suffix to all properties in compose file 3", () => {
|
||||
const composeData = load(composeFile3) as ComposeSpecification;
|
||||
const composeData = parse(composeFile3) as ComposeSpecification;
|
||||
const suffix = "testhash";
|
||||
|
||||
const updatedComposeData = addSuffixToAllProperties(composeData, suffix);
|
||||
@@ -420,7 +420,7 @@ volumes:
|
||||
driver: local
|
||||
`;
|
||||
|
||||
const expectedComposeFile = load(`
|
||||
const expectedComposeFile = parse(`
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
@@ -467,7 +467,7 @@ volumes:
|
||||
`) as ComposeSpecification;
|
||||
|
||||
test("Add suffix to all properties in Plausible compose file", () => {
|
||||
const composeData = load(composeFile) as ComposeSpecification;
|
||||
const composeData = parse(composeFile) as ComposeSpecification;
|
||||
const suffix = "testhash";
|
||||
|
||||
const updatedComposeData = addSuffixToAllProperties(composeData, suffix);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { ComposeSpecification } from "@dokploy/server";
|
||||
import { addSuffixToConfigsRoot, generateRandomHash } from "@dokploy/server";
|
||||
import { load } from "js-yaml";
|
||||
import { expect, test } from "vitest";
|
||||
import { parse } from "yaml";
|
||||
|
||||
test("Generate random hash with 8 characters", () => {
|
||||
const hash = generateRandomHash();
|
||||
@@ -23,7 +23,7 @@ configs:
|
||||
`;
|
||||
|
||||
test("Add suffix to configs in root property", () => {
|
||||
const composeData = load(composeFile) as ComposeSpecification;
|
||||
const composeData = parse(composeFile) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
@@ -59,7 +59,7 @@ configs:
|
||||
`;
|
||||
|
||||
test("Add suffix to multiple configs in root property", () => {
|
||||
const composeData = load(composeFileMultipleConfigs) as ComposeSpecification;
|
||||
const composeData = parse(composeFileMultipleConfigs) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
@@ -92,7 +92,7 @@ configs:
|
||||
`;
|
||||
|
||||
test("Add suffix to configs with different properties in root property", () => {
|
||||
const composeData = load(
|
||||
const composeData = parse(
|
||||
composeFileDifferentProperties,
|
||||
) as ComposeSpecification;
|
||||
|
||||
@@ -137,7 +137,7 @@ configs:
|
||||
`;
|
||||
|
||||
// Expected compose file con el prefijo `testhash`
|
||||
const expectedComposeFileConfigRoot = load(`
|
||||
const expectedComposeFileConfigRoot = parse(`
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
@@ -162,7 +162,7 @@ configs:
|
||||
`) as ComposeSpecification;
|
||||
|
||||
test("Add suffix to configs in root property", () => {
|
||||
const composeData = load(composeFileConfigRoot) as ComposeSpecification;
|
||||
const composeData = parse(composeFileConfigRoot) as ComposeSpecification;
|
||||
|
||||
const suffix = "testhash";
|
||||
|
||||
|
||||
@@ -3,8 +3,8 @@ import {
|
||||
addSuffixToConfigsInServices,
|
||||
generateRandomHash,
|
||||
} from "@dokploy/server";
|
||||
import { load } from "js-yaml";
|
||||
import { expect, test } from "vitest";
|
||||
import { parse } from "yaml";
|
||||
|
||||
const composeFile = `
|
||||
version: "3.8"
|
||||
@@ -22,7 +22,7 @@ configs:
|
||||
`;
|
||||
|
||||
test("Add suffix to configs in services", () => {
|
||||
const composeData = load(composeFile) as ComposeSpecification;
|
||||
const composeData = parse(composeFile) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
@@ -54,7 +54,7 @@ configs:
|
||||
`;
|
||||
|
||||
test("Add suffix to configs in services with single config", () => {
|
||||
const composeData = load(
|
||||
const composeData = parse(
|
||||
composeFileSingleServiceConfig,
|
||||
) as ComposeSpecification;
|
||||
|
||||
@@ -108,7 +108,7 @@ configs:
|
||||
`;
|
||||
|
||||
test("Add suffix to configs in services with multiple configs", () => {
|
||||
const composeData = load(
|
||||
const composeData = parse(
|
||||
composeFileMultipleServicesConfigs,
|
||||
) as ComposeSpecification;
|
||||
|
||||
@@ -157,7 +157,7 @@ services:
|
||||
`;
|
||||
|
||||
// Expected compose file con el prefijo `testhash`
|
||||
const expectedComposeFileConfigServices = load(`
|
||||
const expectedComposeFileConfigServices = parse(`
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
@@ -182,7 +182,7 @@ services:
|
||||
`) as ComposeSpecification;
|
||||
|
||||
test("Add suffix to configs in services", () => {
|
||||
const composeData = load(composeFileConfigServices) as ComposeSpecification;
|
||||
const composeData = parse(composeFileConfigServices) as ComposeSpecification;
|
||||
|
||||
const suffix = "testhash";
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { ComposeSpecification } from "@dokploy/server";
|
||||
import { addSuffixToAllConfigs, generateRandomHash } from "@dokploy/server";
|
||||
import { load } from "js-yaml";
|
||||
import { expect, test } from "vitest";
|
||||
import { parse } from "yaml";
|
||||
|
||||
test("Generate random hash with 8 characters", () => {
|
||||
const hash = generateRandomHash();
|
||||
@@ -43,7 +43,7 @@ configs:
|
||||
file: ./db-config.yml
|
||||
`;
|
||||
|
||||
const expectedComposeFileCombinedConfigs = load(`
|
||||
const expectedComposeFileCombinedConfigs = parse(`
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
@@ -77,7 +77,7 @@ configs:
|
||||
`) as ComposeSpecification;
|
||||
|
||||
test("Add suffix to all configs in root and services", () => {
|
||||
const composeData = load(composeFileCombinedConfigs) as ComposeSpecification;
|
||||
const composeData = parse(composeFileCombinedConfigs) as ComposeSpecification;
|
||||
|
||||
const suffix = "testhash";
|
||||
|
||||
@@ -122,7 +122,7 @@ configs:
|
||||
file: ./db-config.yml
|
||||
`;
|
||||
|
||||
const expectedComposeFileWithEnvAndExternal = load(`
|
||||
const expectedComposeFileWithEnvAndExternal = parse(`
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
@@ -159,7 +159,7 @@ configs:
|
||||
`) as ComposeSpecification;
|
||||
|
||||
test("Add suffix to configs with environment and external", () => {
|
||||
const composeData = load(
|
||||
const composeData = parse(
|
||||
composeFileWithEnvAndExternal,
|
||||
) as ComposeSpecification;
|
||||
|
||||
@@ -200,7 +200,7 @@ configs:
|
||||
file: ./db-config.yml
|
||||
`;
|
||||
|
||||
const expectedComposeFileWithTemplateDriverAndLabels = load(`
|
||||
const expectedComposeFileWithTemplateDriverAndLabels = parse(`
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
@@ -231,7 +231,7 @@ configs:
|
||||
`) as ComposeSpecification;
|
||||
|
||||
test("Add suffix to configs with template driver and labels", () => {
|
||||
const composeData = load(
|
||||
const composeData = parse(
|
||||
composeFileWithTemplateDriverAndLabels,
|
||||
) as ComposeSpecification;
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { ComposeSpecification } from "@dokploy/server";
|
||||
import { addSuffixToNetworksRoot, generateRandomHash } from "@dokploy/server";
|
||||
import { load } from "js-yaml";
|
||||
import { expect, test } from "vitest";
|
||||
import { parse } from "yaml";
|
||||
|
||||
const composeFile = `
|
||||
version: "3.8"
|
||||
@@ -35,7 +35,7 @@ test("Generate random hash with 8 characters", () => {
|
||||
});
|
||||
|
||||
test("Add suffix to networks root property", () => {
|
||||
const composeData = load(composeFile) as ComposeSpecification;
|
||||
const composeData = parse(composeFile) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
@@ -79,7 +79,7 @@ networks:
|
||||
`;
|
||||
|
||||
test("Add suffix to advanced networks root property (2 TRY)", () => {
|
||||
const composeData = load(composeFile2) as ComposeSpecification;
|
||||
const composeData = parse(composeFile2) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
@@ -120,7 +120,7 @@ networks:
|
||||
`;
|
||||
|
||||
test("Add suffix to networks with external properties", () => {
|
||||
const composeData = load(composeFile3) as ComposeSpecification;
|
||||
const composeData = parse(composeFile3) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
@@ -160,7 +160,7 @@ networks:
|
||||
`;
|
||||
|
||||
test("Add suffix to networks with IPAM configurations", () => {
|
||||
const composeData = load(composeFile4) as ComposeSpecification;
|
||||
const composeData = parse(composeFile4) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
@@ -201,7 +201,7 @@ networks:
|
||||
`;
|
||||
|
||||
test("Add suffix to networks with custom options", () => {
|
||||
const composeData = load(composeFile5) as ComposeSpecification;
|
||||
const composeData = parse(composeFile5) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
@@ -264,7 +264,7 @@ networks:
|
||||
`;
|
||||
|
||||
test("Add suffix to networks with static suffix", () => {
|
||||
const composeData = load(composeFile6) as ComposeSpecification;
|
||||
const composeData = parse(composeFile6) as ComposeSpecification;
|
||||
|
||||
const suffix = "testhash";
|
||||
|
||||
@@ -273,7 +273,7 @@ test("Add suffix to networks with static suffix", () => {
|
||||
}
|
||||
const networks = addSuffixToNetworksRoot(composeData.networks, suffix);
|
||||
|
||||
const expectedComposeData = load(
|
||||
const expectedComposeData = parse(
|
||||
expectedComposeFile6,
|
||||
) as ComposeSpecification;
|
||||
expect(networks).toStrictEqual(expectedComposeData.networks);
|
||||
@@ -293,7 +293,7 @@ networks:
|
||||
`;
|
||||
|
||||
test("It shoudn't add suffix to dokploy-network", () => {
|
||||
const composeData = load(composeFile7) as ComposeSpecification;
|
||||
const composeData = parse(composeFile7) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
|
||||
@@ -3,8 +3,8 @@ import {
|
||||
addSuffixToServiceNetworks,
|
||||
generateRandomHash,
|
||||
} from "@dokploy/server";
|
||||
import { load } from "js-yaml";
|
||||
import { expect, test } from "vitest";
|
||||
import { parse } from "yaml";
|
||||
|
||||
const composeFile = `
|
||||
version: "3.8"
|
||||
@@ -23,7 +23,7 @@ services:
|
||||
`;
|
||||
|
||||
test("Add suffix to networks in services", () => {
|
||||
const composeData = load(composeFile) as ComposeSpecification;
|
||||
const composeData = parse(composeFile) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
@@ -67,7 +67,7 @@ networks:
|
||||
`;
|
||||
|
||||
test("Add suffix to networks in services with aliases", () => {
|
||||
const composeData = load(composeFile2) as ComposeSpecification;
|
||||
const composeData = parse(composeFile2) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
@@ -107,7 +107,7 @@ networks:
|
||||
`;
|
||||
|
||||
test("Add suffix to networks in services (Object with simple networks)", () => {
|
||||
const composeData = load(composeFile3) as ComposeSpecification;
|
||||
const composeData = parse(composeFile3) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
@@ -153,7 +153,7 @@ networks:
|
||||
`;
|
||||
|
||||
test("Add suffix to networks in services (combined case)", () => {
|
||||
const composeData = load(composeFileCombined) as ComposeSpecification;
|
||||
const composeData = parse(composeFileCombined) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
@@ -196,7 +196,7 @@ services:
|
||||
`;
|
||||
|
||||
test("It shoudn't add suffix to dokploy-network in services", () => {
|
||||
const composeData = load(composeFile7) as ComposeSpecification;
|
||||
const composeData = parse(composeFile7) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
@@ -245,7 +245,7 @@ services:
|
||||
`;
|
||||
|
||||
test("It shoudn't add suffix to dokploy-network in services multiples cases", () => {
|
||||
const composeData = load(composeFile8) as ComposeSpecification;
|
||||
const composeData = parse(composeFile8) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
|
||||
@@ -5,8 +5,8 @@ import {
|
||||
addSuffixToServiceNetworks,
|
||||
generateRandomHash,
|
||||
} from "@dokploy/server";
|
||||
import { load } from "js-yaml";
|
||||
import { expect, test } from "vitest";
|
||||
import { parse } from "yaml";
|
||||
|
||||
const composeFileCombined = `
|
||||
version: "3.8"
|
||||
@@ -39,7 +39,7 @@ networks:
|
||||
`;
|
||||
|
||||
test("Add suffix to networks in services and root (combined case)", () => {
|
||||
const composeData = load(composeFileCombined) as ComposeSpecification;
|
||||
const composeData = parse(composeFileCombined) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
@@ -89,7 +89,7 @@ test("Add suffix to networks in services and root (combined case)", () => {
|
||||
expect(redisNetworks).not.toHaveProperty("backend");
|
||||
});
|
||||
|
||||
const expectedComposeFile = load(`
|
||||
const expectedComposeFile = parse(`
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
@@ -120,7 +120,7 @@ networks:
|
||||
`);
|
||||
|
||||
test("Add suffix to networks in compose file", () => {
|
||||
const composeData = load(composeFileCombined) as ComposeSpecification;
|
||||
const composeData = parse(composeFileCombined) as ComposeSpecification;
|
||||
|
||||
const suffix = "testhash";
|
||||
if (!composeData?.networks) {
|
||||
@@ -156,7 +156,7 @@ networks:
|
||||
driver: bridge
|
||||
`;
|
||||
|
||||
const expectedComposeFile2 = load(`
|
||||
const expectedComposeFile2 = parse(`
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
@@ -182,7 +182,7 @@ networks:
|
||||
`);
|
||||
|
||||
test("Add suffix to networks in compose file with external and internal networks", () => {
|
||||
const composeData = load(composeFile2) as ComposeSpecification;
|
||||
const composeData = parse(composeFile2) as ComposeSpecification;
|
||||
|
||||
const suffix = "testhash";
|
||||
const updatedComposeData = addSuffixToAllNetworks(composeData, suffix);
|
||||
@@ -218,7 +218,7 @@ networks:
|
||||
com.docker.network.bridge.enable_icc: "true"
|
||||
`;
|
||||
|
||||
const expectedComposeFile3 = load(`
|
||||
const expectedComposeFile3 = parse(`
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
@@ -247,7 +247,7 @@ networks:
|
||||
`);
|
||||
|
||||
test("Add suffix to networks in compose file with multiple services and complex network configurations", () => {
|
||||
const composeData = load(composeFile3) as ComposeSpecification;
|
||||
const composeData = parse(composeFile3) as ComposeSpecification;
|
||||
|
||||
const suffix = "testhash";
|
||||
const updatedComposeData = addSuffixToAllNetworks(composeData, suffix);
|
||||
@@ -289,7 +289,7 @@ networks:
|
||||
|
||||
`;
|
||||
|
||||
const expectedComposeFile4 = load(`
|
||||
const expectedComposeFile4 = parse(`
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
@@ -326,7 +326,7 @@ networks:
|
||||
`);
|
||||
|
||||
test("Expect don't add suffix to dokploy-network in compose file with multiple services and complex network configurations", () => {
|
||||
const composeData = load(composeFile4) as ComposeSpecification;
|
||||
const composeData = parse(composeFile4) as ComposeSpecification;
|
||||
|
||||
const suffix = "testhash";
|
||||
const updatedComposeData = addSuffixToAllNetworks(composeData, suffix);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { ComposeSpecification } from "@dokploy/server";
|
||||
import { addSuffixToSecretsRoot, generateRandomHash } from "@dokploy/server";
|
||||
import { load } from "js-yaml";
|
||||
import { expect, test } from "vitest";
|
||||
import { parse } from "yaml";
|
||||
|
||||
test("Generate random hash with 8 characters", () => {
|
||||
const hash = generateRandomHash();
|
||||
@@ -23,7 +23,7 @@ secrets:
|
||||
`;
|
||||
|
||||
test("Add suffix to secrets in root property", () => {
|
||||
const composeData = load(composeFileSecretsRoot) as ComposeSpecification;
|
||||
const composeData = parse(composeFileSecretsRoot) as ComposeSpecification;
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
if (!composeData?.secrets) {
|
||||
@@ -52,7 +52,7 @@ secrets:
|
||||
`;
|
||||
|
||||
test("Add suffix to secrets in root property (Test 1)", () => {
|
||||
const composeData = load(composeFileSecretsRoot1) as ComposeSpecification;
|
||||
const composeData = parse(composeFileSecretsRoot1) as ComposeSpecification;
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
if (!composeData?.secrets) {
|
||||
@@ -84,7 +84,7 @@ secrets:
|
||||
`;
|
||||
|
||||
test("Add suffix to secrets in root property (Test 2)", () => {
|
||||
const composeData = load(composeFileSecretsRoot2) as ComposeSpecification;
|
||||
const composeData = parse(composeFileSecretsRoot2) as ComposeSpecification;
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
if (!composeData?.secrets) {
|
||||
|
||||
@@ -3,8 +3,8 @@ import {
|
||||
addSuffixToSecretsInServices,
|
||||
generateRandomHash,
|
||||
} from "@dokploy/server";
|
||||
import { load } from "js-yaml";
|
||||
import { expect, test } from "vitest";
|
||||
import { parse } from "yaml";
|
||||
|
||||
const composeFileSecretsServices = `
|
||||
version: "3.8"
|
||||
@@ -21,7 +21,7 @@ secrets:
|
||||
`;
|
||||
|
||||
test("Add suffix to secrets in services", () => {
|
||||
const composeData = load(composeFileSecretsServices) as ComposeSpecification;
|
||||
const composeData = parse(composeFileSecretsServices) as ComposeSpecification;
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
if (!composeData.services) {
|
||||
@@ -54,7 +54,9 @@ secrets:
|
||||
`;
|
||||
|
||||
test("Add suffix to secrets in services (Test 1)", () => {
|
||||
const composeData = load(composeFileSecretsServices1) as ComposeSpecification;
|
||||
const composeData = parse(
|
||||
composeFileSecretsServices1,
|
||||
) as ComposeSpecification;
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
if (!composeData.services) {
|
||||
@@ -93,7 +95,9 @@ secrets:
|
||||
`;
|
||||
|
||||
test("Add suffix to secrets in services (Test 2)", () => {
|
||||
const composeData = load(composeFileSecretsServices2) as ComposeSpecification;
|
||||
const composeData = parse(
|
||||
composeFileSecretsServices2,
|
||||
) as ComposeSpecification;
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
if (!composeData.services) {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { ComposeSpecification } from "@dokploy/server";
|
||||
import { addSuffixToAllSecrets } from "@dokploy/server";
|
||||
import { load } from "js-yaml";
|
||||
import { expect, test } from "vitest";
|
||||
import { parse } from "yaml";
|
||||
|
||||
const composeFileCombinedSecrets = `
|
||||
version: "3.8"
|
||||
@@ -25,7 +25,7 @@ secrets:
|
||||
file: ./app_secret.txt
|
||||
`;
|
||||
|
||||
const expectedComposeFileCombinedSecrets = load(`
|
||||
const expectedComposeFileCombinedSecrets = parse(`
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
@@ -48,7 +48,7 @@ secrets:
|
||||
`) as ComposeSpecification;
|
||||
|
||||
test("Add suffix to all secrets", () => {
|
||||
const composeData = load(composeFileCombinedSecrets) as ComposeSpecification;
|
||||
const composeData = parse(composeFileCombinedSecrets) as ComposeSpecification;
|
||||
const suffix = "testhash";
|
||||
|
||||
const updatedComposeData = addSuffixToAllSecrets(composeData, suffix);
|
||||
@@ -77,7 +77,7 @@ secrets:
|
||||
file: ./cache_secret.txt
|
||||
`;
|
||||
|
||||
const expectedComposeFileCombinedSecrets3 = load(`
|
||||
const expectedComposeFileCombinedSecrets3 = parse(`
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
@@ -99,7 +99,9 @@ secrets:
|
||||
`) as ComposeSpecification;
|
||||
|
||||
test("Add suffix to all secrets (3rd Case)", () => {
|
||||
const composeData = load(composeFileCombinedSecrets3) as ComposeSpecification;
|
||||
const composeData = parse(
|
||||
composeFileCombinedSecrets3,
|
||||
) as ComposeSpecification;
|
||||
const suffix = "testhash";
|
||||
|
||||
const updatedComposeData = addSuffixToAllSecrets(composeData, suffix);
|
||||
@@ -128,7 +130,7 @@ secrets:
|
||||
file: ./db_password.txt
|
||||
`;
|
||||
|
||||
const expectedComposeFileCombinedSecrets4 = load(`
|
||||
const expectedComposeFileCombinedSecrets4 = parse(`
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
@@ -150,7 +152,9 @@ secrets:
|
||||
`) as ComposeSpecification;
|
||||
|
||||
test("Add suffix to all secrets (4th Case)", () => {
|
||||
const composeData = load(composeFileCombinedSecrets4) as ComposeSpecification;
|
||||
const composeData = parse(
|
||||
composeFileCombinedSecrets4,
|
||||
) as ComposeSpecification;
|
||||
const suffix = "testhash";
|
||||
|
||||
const updatedComposeData = addSuffixToAllSecrets(composeData, suffix);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { ComposeSpecification } from "@dokploy/server";
|
||||
import { addSuffixToServiceNames, generateRandomHash } from "@dokploy/server";
|
||||
import { load } from "js-yaml";
|
||||
import { expect, test } from "vitest";
|
||||
import { parse } from "yaml";
|
||||
|
||||
const composeFile = `
|
||||
version: "3.8"
|
||||
@@ -27,7 +27,7 @@ test("Generate random hash with 8 characters", () => {
|
||||
});
|
||||
|
||||
test("Add suffix to service names with container_name in compose file", () => {
|
||||
const composeData = load(composeFile) as ComposeSpecification;
|
||||
const composeData = parse(composeFile) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { ComposeSpecification } from "@dokploy/server";
|
||||
import { addSuffixToServiceNames, generateRandomHash } from "@dokploy/server";
|
||||
import { load } from "js-yaml";
|
||||
import { expect, test } from "vitest";
|
||||
import { parse } from "yaml";
|
||||
|
||||
test("Generate random hash with 8 characters", () => {
|
||||
const hash = generateRandomHash();
|
||||
@@ -32,7 +32,7 @@ networks:
|
||||
`;
|
||||
|
||||
test("Add suffix to service names with depends_on (array) in compose file", () => {
|
||||
const composeData = load(composeFile4) as ComposeSpecification;
|
||||
const composeData = parse(composeFile4) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
@@ -102,7 +102,7 @@ networks:
|
||||
`;
|
||||
|
||||
test("Add suffix to service names with depends_on (object) in compose file", () => {
|
||||
const composeData = load(composeFile5) as ComposeSpecification;
|
||||
const composeData = parse(composeFile5) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { ComposeSpecification } from "@dokploy/server";
|
||||
import { addSuffixToServiceNames, generateRandomHash } from "@dokploy/server";
|
||||
import { load } from "js-yaml";
|
||||
import { expect, test } from "vitest";
|
||||
import { parse } from "yaml";
|
||||
|
||||
test("Generate random hash with 8 characters", () => {
|
||||
const hash = generateRandomHash();
|
||||
@@ -30,7 +30,7 @@ networks:
|
||||
`;
|
||||
|
||||
test("Add suffix to service names with extends (string) in compose file", () => {
|
||||
const composeData = load(composeFile6) as ComposeSpecification;
|
||||
const composeData = parse(composeFile6) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
@@ -90,7 +90,7 @@ networks:
|
||||
`;
|
||||
|
||||
test("Add suffix to service names with extends (object) in compose file", () => {
|
||||
const composeData = load(composeFile7) as ComposeSpecification;
|
||||
const composeData = parse(composeFile7) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { ComposeSpecification } from "@dokploy/server";
|
||||
import { addSuffixToServiceNames, generateRandomHash } from "@dokploy/server";
|
||||
import { load } from "js-yaml";
|
||||
import { expect, test } from "vitest";
|
||||
import { parse } from "yaml";
|
||||
|
||||
test("Generate random hash with 8 characters", () => {
|
||||
const hash = generateRandomHash();
|
||||
@@ -31,7 +31,7 @@ networks:
|
||||
`;
|
||||
|
||||
test("Add suffix to service names with links in compose file", () => {
|
||||
const composeData = load(composeFile2) as ComposeSpecification;
|
||||
const composeData = parse(composeFile2) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { ComposeSpecification } from "@dokploy/server";
|
||||
import { addSuffixToServiceNames, generateRandomHash } from "@dokploy/server";
|
||||
import { load } from "js-yaml";
|
||||
import { expect, test } from "vitest";
|
||||
import { parse } from "yaml";
|
||||
|
||||
test("Generate random hash with 8 characters", () => {
|
||||
const hash = generateRandomHash();
|
||||
@@ -26,7 +26,7 @@ networks:
|
||||
`;
|
||||
|
||||
test("Add suffix to service names in compose file", () => {
|
||||
const composeData = load(composeFile) as ComposeSpecification;
|
||||
const composeData = parse(composeFile) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
|
||||
@@ -3,8 +3,8 @@ import {
|
||||
addSuffixToAllServiceNames,
|
||||
addSuffixToServiceNames,
|
||||
} from "@dokploy/server";
|
||||
import { load } from "js-yaml";
|
||||
import { expect, test } from "vitest";
|
||||
import { parse } from "yaml";
|
||||
|
||||
const composeFileCombinedAllCases = `
|
||||
version: "3.8"
|
||||
@@ -38,7 +38,7 @@ networks:
|
||||
driver: bridge
|
||||
`;
|
||||
|
||||
const expectedComposeFile = load(`
|
||||
const expectedComposeFile = parse(`
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
@@ -71,7 +71,9 @@ networks:
|
||||
`);
|
||||
|
||||
test("Add suffix to all service names in compose file", () => {
|
||||
const composeData = load(composeFileCombinedAllCases) as ComposeSpecification;
|
||||
const composeData = parse(
|
||||
composeFileCombinedAllCases,
|
||||
) as ComposeSpecification;
|
||||
|
||||
const suffix = "testhash";
|
||||
|
||||
@@ -131,7 +133,7 @@ networks:
|
||||
driver: bridge
|
||||
`;
|
||||
|
||||
const expectedComposeFile1 = load(`
|
||||
const expectedComposeFile1 = parse(`
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
@@ -176,7 +178,7 @@ networks:
|
||||
`) as ComposeSpecification;
|
||||
|
||||
test("Add suffix to all service names in compose file 1", () => {
|
||||
const composeData = load(composeFile1) as ComposeSpecification;
|
||||
const composeData = parse(composeFile1) as ComposeSpecification;
|
||||
const suffix = "testhash";
|
||||
|
||||
const updatedComposeData = addSuffixToAllServiceNames(composeData, suffix);
|
||||
@@ -227,7 +229,7 @@ networks:
|
||||
driver: bridge
|
||||
`;
|
||||
|
||||
const expectedComposeFile2 = load(`
|
||||
const expectedComposeFile2 = parse(`
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
@@ -271,7 +273,7 @@ networks:
|
||||
`) as ComposeSpecification;
|
||||
|
||||
test("Add suffix to all service names in compose file 2", () => {
|
||||
const composeData = load(composeFile2) as ComposeSpecification;
|
||||
const composeData = parse(composeFile2) as ComposeSpecification;
|
||||
const suffix = "testhash";
|
||||
|
||||
const updatedComposeData = addSuffixToAllServiceNames(composeData, suffix);
|
||||
@@ -322,7 +324,7 @@ networks:
|
||||
driver: bridge
|
||||
`;
|
||||
|
||||
const expectedComposeFile3 = load(`
|
||||
const expectedComposeFile3 = parse(`
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
@@ -366,7 +368,7 @@ networks:
|
||||
`) as ComposeSpecification;
|
||||
|
||||
test("Add suffix to all service names in compose file 3", () => {
|
||||
const composeData = load(composeFile3) as ComposeSpecification;
|
||||
const composeData = parse(composeFile3) as ComposeSpecification;
|
||||
const suffix = "testhash";
|
||||
|
||||
const updatedComposeData = addSuffixToAllServiceNames(composeData, suffix);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { ComposeSpecification } from "@dokploy/server";
|
||||
import { addSuffixToServiceNames, generateRandomHash } from "@dokploy/server";
|
||||
import { load } from "js-yaml";
|
||||
import { expect, test } from "vitest";
|
||||
import { parse } from "yaml";
|
||||
|
||||
test("Generate random hash with 8 characters", () => {
|
||||
const hash = generateRandomHash();
|
||||
@@ -35,7 +35,7 @@ networks:
|
||||
`;
|
||||
|
||||
test("Add suffix to service names with volumes_from in compose file", () => {
|
||||
const composeData = load(composeFile3) as ComposeSpecification;
|
||||
const composeData = parse(composeFile3) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
|
||||
@@ -4,8 +4,8 @@ import {
|
||||
addSuffixToVolumesRoot,
|
||||
generateRandomHash,
|
||||
} from "@dokploy/server";
|
||||
import { load } from "js-yaml";
|
||||
import { expect, test } from "vitest";
|
||||
import { parse } from "yaml";
|
||||
|
||||
const composeFile = `
|
||||
services:
|
||||
@@ -70,7 +70,7 @@ volumes:
|
||||
driver: local
|
||||
`;
|
||||
|
||||
const expectedDockerCompose = load(`
|
||||
const expectedDockerCompose = parse(`
|
||||
services:
|
||||
mail:
|
||||
image: bytemark/smtp
|
||||
@@ -143,7 +143,7 @@ test("Generate random hash with 8 characters", () => {
|
||||
// Docker compose needs unique names for services, volumes, networks and containers
|
||||
// So base on a input which is a dockercompose file, it should replace the name with a hash and return a new dockercompose file
|
||||
test("Add suffix to volumes root property", () => {
|
||||
const composeData = load(composeFile) as ComposeSpecification;
|
||||
const composeData = parse(composeFile) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
@@ -165,7 +165,7 @@ test("Add suffix to volumes root property", () => {
|
||||
});
|
||||
|
||||
test("Expect to change the suffix in all the possible places", () => {
|
||||
const composeData = load(composeFile) as ComposeSpecification;
|
||||
const composeData = parse(composeFile) as ComposeSpecification;
|
||||
const suffix = "testhash";
|
||||
|
||||
const updatedComposeData = addSuffixToAllVolumes(composeData, suffix);
|
||||
@@ -195,7 +195,7 @@ volumes:
|
||||
mongo-data:
|
||||
`;
|
||||
|
||||
const expectedDockerCompose2 = load(`
|
||||
const expectedDockerCompose2 = parse(`
|
||||
version: '3.8'
|
||||
services:
|
||||
app:
|
||||
@@ -218,7 +218,7 @@ volumes:
|
||||
`) as ComposeSpecification;
|
||||
|
||||
test("Expect to change the suffix in all the possible places (2 Try)", () => {
|
||||
const composeData = load(composeFile2) as ComposeSpecification;
|
||||
const composeData = parse(composeFile2) as ComposeSpecification;
|
||||
const suffix = "testhash";
|
||||
|
||||
const updatedComposeData = addSuffixToAllVolumes(composeData, suffix);
|
||||
@@ -248,7 +248,7 @@ volumes:
|
||||
mongo-data:
|
||||
`;
|
||||
|
||||
const expectedDockerCompose3 = load(`
|
||||
const expectedDockerCompose3 = parse(`
|
||||
version: '3.8'
|
||||
services:
|
||||
app:
|
||||
@@ -271,7 +271,7 @@ volumes:
|
||||
`) as ComposeSpecification;
|
||||
|
||||
test("Expect to change the suffix in all the possible places (3 Try)", () => {
|
||||
const composeData = load(composeFile3) as ComposeSpecification;
|
||||
const composeData = parse(composeFile3) as ComposeSpecification;
|
||||
const suffix = "testhash";
|
||||
|
||||
const updatedComposeData = addSuffixToAllVolumes(composeData, suffix);
|
||||
@@ -645,7 +645,7 @@ volumes:
|
||||
db-config:
|
||||
`;
|
||||
|
||||
const expectedDockerComposeComplex = load(`
|
||||
const expectedDockerComposeComplex = parse(`
|
||||
version: "3.8"
|
||||
services:
|
||||
studio:
|
||||
@@ -1012,7 +1012,7 @@ volumes:
|
||||
`);
|
||||
|
||||
test("Expect to change the suffix in all the possible places (4 Try)", () => {
|
||||
const composeData = load(composeFileComplex) as ComposeSpecification;
|
||||
const composeData = parse(composeFileComplex) as ComposeSpecification;
|
||||
const suffix = "testhash";
|
||||
|
||||
const updatedComposeData = addSuffixToAllVolumes(composeData, suffix);
|
||||
@@ -1065,7 +1065,7 @@ volumes:
|
||||
db-data:
|
||||
`;
|
||||
|
||||
const expectedDockerComposeExample1 = load(`
|
||||
const expectedDockerComposeExample1 = parse(`
|
||||
version: "3.8"
|
||||
services:
|
||||
web:
|
||||
@@ -1111,7 +1111,7 @@ volumes:
|
||||
`) as ComposeSpecification;
|
||||
|
||||
test("Expect to change the suffix in all the possible places (5 Try)", () => {
|
||||
const composeData = load(composeFileExample1) as ComposeSpecification;
|
||||
const composeData = parse(composeFileExample1) as ComposeSpecification;
|
||||
const suffix = "testhash";
|
||||
|
||||
const updatedComposeData = addSuffixToAllVolumes(composeData, suffix);
|
||||
@@ -1143,7 +1143,7 @@ volumes:
|
||||
backrest-cache:
|
||||
`;
|
||||
|
||||
const expectedDockerComposeBackrest = load(`
|
||||
const expectedDockerComposeBackrest = parse(`
|
||||
services:
|
||||
backrest:
|
||||
image: garethgeorge/backrest:v1.7.3
|
||||
@@ -1168,7 +1168,7 @@ volumes:
|
||||
`) as ComposeSpecification;
|
||||
|
||||
test("Should handle volume paths with subdirectories correctly", () => {
|
||||
const composeData = load(composeFileBackrest) as ComposeSpecification;
|
||||
const composeData = parse(composeFileBackrest) as ComposeSpecification;
|
||||
const suffix = "testhash";
|
||||
|
||||
const updatedComposeData = addSuffixToAllVolumes(composeData, suffix);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { ComposeSpecification } from "@dokploy/server";
|
||||
import { addSuffixToVolumesRoot, generateRandomHash } from "@dokploy/server";
|
||||
import { load } from "js-yaml";
|
||||
import { expect, test } from "vitest";
|
||||
import { parse } from "yaml";
|
||||
|
||||
const composeFile = `
|
||||
version: "3.8"
|
||||
@@ -29,7 +29,7 @@ test("Generate random hash with 8 characters", () => {
|
||||
});
|
||||
|
||||
test("Add suffix to volumes in root property", () => {
|
||||
const composeData = load(composeFile) as ComposeSpecification;
|
||||
const composeData = parse(composeFile) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
@@ -67,7 +67,7 @@ networks:
|
||||
`;
|
||||
|
||||
test("Add suffix to volumes in root property (Case 2)", () => {
|
||||
const composeData = load(composeFile2) as ComposeSpecification;
|
||||
const composeData = parse(composeFile2) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
@@ -101,7 +101,7 @@ networks:
|
||||
`;
|
||||
|
||||
test("Add suffix to volumes in root property (Case 3)", () => {
|
||||
const composeData = load(composeFile3) as ComposeSpecification;
|
||||
const composeData = parse(composeFile3) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
@@ -148,7 +148,7 @@ volumes:
|
||||
`;
|
||||
|
||||
// Expected compose file con el prefijo `testhash`
|
||||
const expectedComposeFile4 = load(`
|
||||
const expectedComposeFile4 = parse(`
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
@@ -179,7 +179,7 @@ volumes:
|
||||
`) as ComposeSpecification;
|
||||
|
||||
test("Add suffix to volumes in root property", () => {
|
||||
const composeData = load(composeFile4) as ComposeSpecification;
|
||||
const composeData = parse(composeFile4) as ComposeSpecification;
|
||||
|
||||
const suffix = "testhash";
|
||||
|
||||
|
||||
@@ -3,8 +3,8 @@ import {
|
||||
addSuffixToVolumesInServices,
|
||||
generateRandomHash,
|
||||
} from "@dokploy/server";
|
||||
import { load } from "js-yaml";
|
||||
import { expect, test } from "vitest";
|
||||
import { parse } from "yaml";
|
||||
|
||||
test("Generate random hash with 8 characters", () => {
|
||||
const hash = generateRandomHash();
|
||||
@@ -24,7 +24,7 @@ services:
|
||||
`;
|
||||
|
||||
test("Add suffix to volumes declared directly in services", () => {
|
||||
const composeData = load(composeFile1) as ComposeSpecification;
|
||||
const composeData = parse(composeFile1) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
@@ -59,7 +59,7 @@ volumes:
|
||||
`;
|
||||
|
||||
test("Add suffix to volumes declared directly in services (Case 2)", () => {
|
||||
const composeData = load(composeFileTypeVolume) as ComposeSpecification;
|
||||
const composeData = parse(composeFileTypeVolume) as ComposeSpecification;
|
||||
|
||||
const suffix = generateRandomHash();
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { ComposeSpecification } from "@dokploy/server";
|
||||
import { addSuffixToAllVolumes } from "@dokploy/server";
|
||||
import { load } from "js-yaml";
|
||||
import { expect, test } from "vitest";
|
||||
import { parse } from "yaml";
|
||||
|
||||
const composeFileTypeVolume = `
|
||||
version: "3.8"
|
||||
@@ -23,7 +23,7 @@ volumes:
|
||||
driver: local
|
||||
`;
|
||||
|
||||
const expectedComposeFileTypeVolume = load(`
|
||||
const expectedComposeFileTypeVolume = parse(`
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
@@ -44,7 +44,7 @@ volumes:
|
||||
`) as ComposeSpecification;
|
||||
|
||||
test("Add suffix to volumes with type: volume in services", () => {
|
||||
const composeData = load(composeFileTypeVolume) as ComposeSpecification;
|
||||
const composeData = parse(composeFileTypeVolume) as ComposeSpecification;
|
||||
|
||||
const suffix = "testhash";
|
||||
|
||||
@@ -73,7 +73,7 @@ volumes:
|
||||
driver: local
|
||||
`;
|
||||
|
||||
const expectedComposeFileTypeVolume1 = load(`
|
||||
const expectedComposeFileTypeVolume1 = parse(`
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
@@ -93,7 +93,7 @@ volumes:
|
||||
`) as ComposeSpecification;
|
||||
|
||||
test("Add suffix to mixed volumes in services", () => {
|
||||
const composeData = load(composeFileTypeVolume1) as ComposeSpecification;
|
||||
const composeData = parse(composeFileTypeVolume1) as ComposeSpecification;
|
||||
|
||||
const suffix = "testhash";
|
||||
|
||||
@@ -128,7 +128,7 @@ volumes:
|
||||
device: /path/to/app/logs
|
||||
`;
|
||||
|
||||
const expectedComposeFileTypeVolume2 = load(`
|
||||
const expectedComposeFileTypeVolume2 = parse(`
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
@@ -154,7 +154,7 @@ volumes:
|
||||
`) as ComposeSpecification;
|
||||
|
||||
test("Add suffix to complex volume configurations in services", () => {
|
||||
const composeData = load(composeFileTypeVolume2) as ComposeSpecification;
|
||||
const composeData = parse(composeFileTypeVolume2) as ComposeSpecification;
|
||||
|
||||
const suffix = "testhash";
|
||||
|
||||
@@ -218,7 +218,7 @@ volumes:
|
||||
device: /path/to/shared/logs
|
||||
`;
|
||||
|
||||
const expectedComposeFileTypeVolume3 = load(`
|
||||
const expectedComposeFileTypeVolume3 = parse(`
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
@@ -273,7 +273,7 @@ volumes:
|
||||
`) as ComposeSpecification;
|
||||
|
||||
test("Add suffix to complex nested volumes configuration in services", () => {
|
||||
const composeData = load(composeFileTypeVolume3) as ComposeSpecification;
|
||||
const composeData = parse(composeFileTypeVolume3) as ComposeSpecification;
|
||||
|
||||
const suffix = "testhash";
|
||||
|
||||
|
||||
@@ -133,6 +133,7 @@ const baseApp: ApplicationNested = {
|
||||
username: null,
|
||||
dockerContextPath: null,
|
||||
rollbackActive: false,
|
||||
stopGracePeriodSwarm: null,
|
||||
};
|
||||
|
||||
describe("unzipDrop using real zip files", () => {
|
||||
|
||||
102
apps/dokploy/__test__/server/mechanizeDockerContainer.test.ts
Normal file
102
apps/dokploy/__test__/server/mechanizeDockerContainer.test.ts
Normal file
@@ -0,0 +1,102 @@
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
import type { ApplicationNested } from "@dokploy/server/utils/builders";
|
||||
import { mechanizeDockerContainer } from "@dokploy/server/utils/builders";
|
||||
|
||||
type MockCreateServiceOptions = {
|
||||
StopGracePeriod?: number;
|
||||
[key: string]: unknown;
|
||||
};
|
||||
|
||||
const { inspectMock, getServiceMock, createServiceMock, getRemoteDockerMock } =
|
||||
vi.hoisted(() => {
|
||||
const inspect = vi.fn<[], Promise<never>>();
|
||||
const getService = vi.fn(() => ({ inspect }));
|
||||
const createService = vi.fn<[MockCreateServiceOptions], Promise<void>>(
|
||||
async () => undefined,
|
||||
);
|
||||
const getRemoteDocker = vi.fn(async () => ({
|
||||
getService,
|
||||
createService,
|
||||
}));
|
||||
return {
|
||||
inspectMock: inspect,
|
||||
getServiceMock: getService,
|
||||
createServiceMock: createService,
|
||||
getRemoteDockerMock: getRemoteDocker,
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock("@dokploy/server/utils/servers/remote-docker", () => ({
|
||||
getRemoteDocker: getRemoteDockerMock,
|
||||
}));
|
||||
|
||||
const createApplication = (
|
||||
overrides: Partial<ApplicationNested> = {},
|
||||
): ApplicationNested =>
|
||||
({
|
||||
appName: "test-app",
|
||||
buildType: "dockerfile",
|
||||
env: null,
|
||||
mounts: [],
|
||||
cpuLimit: null,
|
||||
memoryLimit: null,
|
||||
memoryReservation: null,
|
||||
cpuReservation: null,
|
||||
command: null,
|
||||
ports: [],
|
||||
sourceType: "docker",
|
||||
dockerImage: "example:latest",
|
||||
registry: null,
|
||||
environment: {
|
||||
project: { env: null },
|
||||
env: null,
|
||||
},
|
||||
replicas: 1,
|
||||
stopGracePeriodSwarm: 0n,
|
||||
serverId: "server-id",
|
||||
...overrides,
|
||||
}) as unknown as ApplicationNested;
|
||||
|
||||
describe("mechanizeDockerContainer", () => {
|
||||
beforeEach(() => {
|
||||
inspectMock.mockReset();
|
||||
inspectMock.mockRejectedValue(new Error("service not found"));
|
||||
getServiceMock.mockClear();
|
||||
createServiceMock.mockClear();
|
||||
getRemoteDockerMock.mockClear();
|
||||
getRemoteDockerMock.mockResolvedValue({
|
||||
getService: getServiceMock,
|
||||
createService: createServiceMock,
|
||||
});
|
||||
});
|
||||
|
||||
it("converts bigint stopGracePeriodSwarm to a number and keeps zero values", async () => {
|
||||
const application = createApplication({ stopGracePeriodSwarm: 0n });
|
||||
|
||||
await mechanizeDockerContainer(application);
|
||||
|
||||
expect(createServiceMock).toHaveBeenCalledTimes(1);
|
||||
const call = createServiceMock.mock.calls[0];
|
||||
if (!call) {
|
||||
throw new Error("createServiceMock should have been called once");
|
||||
}
|
||||
const [settings] = call;
|
||||
expect(settings.StopGracePeriod).toBe(0);
|
||||
expect(typeof settings.StopGracePeriod).toBe("number");
|
||||
});
|
||||
|
||||
it("omits StopGracePeriod when stopGracePeriodSwarm is null", async () => {
|
||||
const application = createApplication({ stopGracePeriodSwarm: null });
|
||||
|
||||
await mechanizeDockerContainer(application);
|
||||
|
||||
expect(createServiceMock).toHaveBeenCalledTimes(1);
|
||||
const call = createServiceMock.mock.calls[0];
|
||||
if (!call) {
|
||||
throw new Error("createServiceMock should have been called once");
|
||||
}
|
||||
const [settings] = call;
|
||||
expect(settings).not.toHaveProperty("StopGracePeriod");
|
||||
});
|
||||
});
|
||||
@@ -111,6 +111,7 @@ const baseApp: ApplicationNested = {
|
||||
updateConfigSwarm: null,
|
||||
username: null,
|
||||
dockerContextPath: null,
|
||||
stopGracePeriodSwarm: null,
|
||||
};
|
||||
|
||||
const baseDomain: Domain = {
|
||||
|
||||
@@ -25,6 +25,7 @@ import {
|
||||
FormLabel,
|
||||
FormMessage,
|
||||
} from "@/components/ui/form";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
@@ -176,10 +177,18 @@ const addSwarmSettings = z.object({
|
||||
modeSwarm: createStringToJSONSchema(ServiceModeSwarmSchema).nullable(),
|
||||
labelsSwarm: createStringToJSONSchema(LabelsSwarmSchema).nullable(),
|
||||
networkSwarm: createStringToJSONSchema(NetworkSwarmSchema).nullable(),
|
||||
stopGracePeriodSwarm: z.bigint().nullable(),
|
||||
});
|
||||
|
||||
type AddSwarmSettings = z.infer<typeof addSwarmSettings>;
|
||||
|
||||
const hasStopGracePeriodSwarm = (
|
||||
value: unknown,
|
||||
): value is { stopGracePeriodSwarm: bigint | number | string | null } =>
|
||||
typeof value === "object" &&
|
||||
value !== null &&
|
||||
"stopGracePeriodSwarm" in value;
|
||||
|
||||
interface Props {
|
||||
id: string;
|
||||
type: "postgres" | "mariadb" | "mongo" | "mysql" | "redis" | "application";
|
||||
@@ -224,12 +233,22 @@ export const AddSwarmSettings = ({ id, type }: Props) => {
|
||||
modeSwarm: null,
|
||||
labelsSwarm: null,
|
||||
networkSwarm: null,
|
||||
stopGracePeriodSwarm: null,
|
||||
},
|
||||
resolver: zodResolver(addSwarmSettings),
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (data) {
|
||||
const stopGracePeriodValue = hasStopGracePeriodSwarm(data)
|
||||
? data.stopGracePeriodSwarm
|
||||
: null;
|
||||
const normalizedStopGracePeriod =
|
||||
stopGracePeriodValue === null || stopGracePeriodValue === undefined
|
||||
? null
|
||||
: typeof stopGracePeriodValue === "bigint"
|
||||
? stopGracePeriodValue
|
||||
: BigInt(stopGracePeriodValue);
|
||||
form.reset({
|
||||
healthCheckSwarm: data.healthCheckSwarm
|
||||
? JSON.stringify(data.healthCheckSwarm, null, 2)
|
||||
@@ -255,6 +274,7 @@ export const AddSwarmSettings = ({ id, type }: Props) => {
|
||||
networkSwarm: data.networkSwarm
|
||||
? JSON.stringify(data.networkSwarm, null, 2)
|
||||
: null,
|
||||
stopGracePeriodSwarm: normalizedStopGracePeriod,
|
||||
});
|
||||
}
|
||||
}, [form, form.reset, data]);
|
||||
@@ -275,6 +295,7 @@ export const AddSwarmSettings = ({ id, type }: Props) => {
|
||||
modeSwarm: data.modeSwarm,
|
||||
labelsSwarm: data.labelsSwarm,
|
||||
networkSwarm: data.networkSwarm,
|
||||
stopGracePeriodSwarm: data.stopGracePeriodSwarm ?? null,
|
||||
})
|
||||
.then(async () => {
|
||||
toast.success("Swarm settings updated");
|
||||
@@ -352,9 +373,9 @@ export const AddSwarmSettings = ({ id, type }: Props) => {
|
||||
language="json"
|
||||
placeholder={`{
|
||||
"Test" : ["CMD-SHELL", "curl -f http://localhost:3000/health"],
|
||||
"Interval" : 10000,
|
||||
"Timeout" : 10000,
|
||||
"StartPeriod" : 10000,
|
||||
"Interval" : 10000000000,
|
||||
"Timeout" : 10000000000,
|
||||
"StartPeriod" : 10000000000,
|
||||
"Retries" : 10
|
||||
}`}
|
||||
className="h-[12rem] font-mono"
|
||||
@@ -407,9 +428,9 @@ export const AddSwarmSettings = ({ id, type }: Props) => {
|
||||
language="json"
|
||||
placeholder={`{
|
||||
"Condition" : "on-failure",
|
||||
"Delay" : 10000,
|
||||
"Delay" : 10000000000,
|
||||
"MaxAttempts" : 10,
|
||||
"Window" : 10000
|
||||
"Window" : 10000000000
|
||||
} `}
|
||||
className="h-[12rem] font-mono"
|
||||
{...field}
|
||||
@@ -529,9 +550,9 @@ export const AddSwarmSettings = ({ id, type }: Props) => {
|
||||
language="json"
|
||||
placeholder={`{
|
||||
"Parallelism" : 1,
|
||||
"Delay" : 10000,
|
||||
"Delay" : 10000000000,
|
||||
"FailureAction" : "continue",
|
||||
"Monitor" : 10000,
|
||||
"Monitor" : 10000000000,
|
||||
"MaxFailureRatio" : 10,
|
||||
"Order" : "start-first"
|
||||
}`}
|
||||
@@ -587,9 +608,9 @@ export const AddSwarmSettings = ({ id, type }: Props) => {
|
||||
language="json"
|
||||
placeholder={`{
|
||||
"Parallelism" : 1,
|
||||
"Delay" : 10000,
|
||||
"Delay" : 10000000000,
|
||||
"FailureAction" : "continue",
|
||||
"Monitor" : 10000,
|
||||
"Monitor" : 10000000000,
|
||||
"MaxFailureRatio" : 10,
|
||||
"Order" : "start-first"
|
||||
}`}
|
||||
@@ -774,7 +795,57 @@ export const AddSwarmSettings = ({ id, type }: Props) => {
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="stopGracePeriodSwarm"
|
||||
render={({ field }) => (
|
||||
<FormItem className="relative max-lg:px-4 lg:pl-6 ">
|
||||
<FormLabel>Stop Grace Period (nanoseconds)</FormLabel>
|
||||
<TooltipProvider delayDuration={0}>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<FormDescription className="break-all w-fit flex flex-row gap-1 items-center">
|
||||
Duration in nanoseconds
|
||||
<HelpCircle className="size-4 text-muted-foreground" />
|
||||
</FormDescription>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent
|
||||
className="w-full z-[999]"
|
||||
align="start"
|
||||
side="bottom"
|
||||
>
|
||||
<code>
|
||||
<pre>
|
||||
{`Enter duration in nanoseconds:
|
||||
• 30000000000 - 30 seconds
|
||||
• 120000000000 - 2 minutes
|
||||
• 3600000000000 - 1 hour
|
||||
• 0 - no grace period`}
|
||||
</pre>
|
||||
</code>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
<FormControl>
|
||||
<Input
|
||||
type="number"
|
||||
placeholder="30000000000"
|
||||
className="font-mono"
|
||||
{...field}
|
||||
value={field?.value?.toString() || ""}
|
||||
onChange={(e) =>
|
||||
field.onChange(
|
||||
e.target.value ? BigInt(e.target.value) : null,
|
||||
)
|
||||
}
|
||||
/>
|
||||
</FormControl>
|
||||
<pre>
|
||||
<FormMessage />
|
||||
</pre>
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
<DialogFooter className="flex w-full flex-row justify-end md:col-span-2 m-0 sticky bottom-0 right-0 bg-muted border">
|
||||
<Button
|
||||
isLoading={isLoading}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { zodResolver } from "@hookform/resolvers/zod";
|
||||
import jsyaml from "js-yaml";
|
||||
import { useEffect, useState } from "react";
|
||||
import { useForm } from "react-hook-form";
|
||||
import { toast } from "sonner";
|
||||
import { parse, stringify, YAMLParseError } from "yaml";
|
||||
import { z } from "zod";
|
||||
import { AlertBlock } from "@/components/shared/alert-block";
|
||||
import { CodeEditor } from "@/components/shared/code-editor";
|
||||
@@ -38,11 +38,11 @@ interface Props {
|
||||
|
||||
export const validateAndFormatYAML = (yamlText: string) => {
|
||||
try {
|
||||
const obj = jsyaml.load(yamlText);
|
||||
const formattedYaml = jsyaml.dump(obj, { indent: 4 });
|
||||
const obj = parse(yamlText);
|
||||
const formattedYaml = stringify(obj, { indent: 4 });
|
||||
return { valid: true, formattedYaml, error: null };
|
||||
} catch (error) {
|
||||
if (error instanceof jsyaml.YAMLException) {
|
||||
if (error instanceof YAMLParseError) {
|
||||
return {
|
||||
valid: false,
|
||||
formattedYaml: yamlText,
|
||||
@@ -89,7 +89,7 @@ export const UpdateTraefikConfig = ({ applicationId }: Props) => {
|
||||
if (!valid) {
|
||||
form.setError("traefikConfig", {
|
||||
type: "manual",
|
||||
message: error || "Invalid YAML",
|
||||
message: (error as string) || "Invalid YAML",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -59,7 +59,7 @@ export const SaveGitProvider = ({ applicationId }: Props) => {
|
||||
const router = useRouter();
|
||||
|
||||
const { mutateAsync, isLoading } =
|
||||
api.application.saveGitProdiver.useMutation();
|
||||
api.application.saveGitProvider.useMutation();
|
||||
|
||||
const form = useForm<GitProvider>({
|
||||
defaultValues: {
|
||||
|
||||
@@ -7,7 +7,7 @@ import {
|
||||
RefreshCw,
|
||||
} from "lucide-react";
|
||||
import { useEffect, useState } from "react";
|
||||
import { useForm } from "react-hook-form";
|
||||
import { type Control, useForm } from "react-hook-form";
|
||||
import { toast } from "sonner";
|
||||
import { z } from "zod";
|
||||
import { AlertBlock } from "@/components/shared/alert-block";
|
||||
@@ -57,6 +57,7 @@ export const commonCronExpressions = [
|
||||
{ label: "Every month on the 1st at midnight", value: "0 0 1 * *" },
|
||||
{ label: "Every 15 minutes", value: "*/15 * * * *" },
|
||||
{ label: "Every weekday at midnight", value: "0 0 * * 1-5" },
|
||||
{ label: "Custom", value: "custom" },
|
||||
];
|
||||
|
||||
const formSchema = z
|
||||
@@ -115,10 +116,91 @@ interface Props {
|
||||
scheduleType?: "application" | "compose" | "server" | "dokploy-server";
|
||||
}
|
||||
|
||||
export const ScheduleFormField = ({
|
||||
name,
|
||||
formControl,
|
||||
}: {
|
||||
name: string;
|
||||
formControl: Control<any>;
|
||||
}) => {
|
||||
const [selectedOption, setSelectedOption] = useState("");
|
||||
|
||||
return (
|
||||
<FormField
|
||||
control={formControl}
|
||||
name={name}
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel className="flex items-center gap-2">
|
||||
Schedule
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Info className="w-4 h-4 text-muted-foreground cursor-help" />
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>Cron expression format: minute hour day month weekday</p>
|
||||
<p>Example: 0 0 * * * (daily at midnight)</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
</FormLabel>
|
||||
<div className="flex flex-col gap-2">
|
||||
<Select
|
||||
value={selectedOption}
|
||||
onValueChange={(value) => {
|
||||
setSelectedOption(value);
|
||||
field.onChange(value === "custom" ? "" : value);
|
||||
}}
|
||||
>
|
||||
<FormControl>
|
||||
<SelectTrigger>
|
||||
<SelectValue placeholder="Select a predefined schedule" />
|
||||
</SelectTrigger>
|
||||
</FormControl>
|
||||
<SelectContent>
|
||||
{commonCronExpressions.map((expr) => (
|
||||
<SelectItem key={expr.value} value={expr.value}>
|
||||
{expr.label}
|
||||
{expr.value !== "custom" && ` (${expr.value})`}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
<div className="relative">
|
||||
<FormControl>
|
||||
<Input
|
||||
placeholder="Custom cron expression (e.g., 0 0 * * *)"
|
||||
{...field}
|
||||
onChange={(e) => {
|
||||
const value = e.target.value;
|
||||
const commonExpression = commonCronExpressions.find(
|
||||
(expression) => expression.value === value,
|
||||
);
|
||||
if (commonExpression) {
|
||||
setSelectedOption(commonExpression.value);
|
||||
} else {
|
||||
setSelectedOption("custom");
|
||||
}
|
||||
field.onChange(e);
|
||||
}}
|
||||
/>
|
||||
</FormControl>
|
||||
</div>
|
||||
</div>
|
||||
<FormDescription>
|
||||
Choose a predefined schedule or enter a custom cron expression
|
||||
</FormDescription>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
export const HandleSchedules = ({ id, scheduleId, scheduleType }: Props) => {
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const [cacheType, setCacheType] = useState<CacheType>("cache");
|
||||
|
||||
const utils = api.useUtils();
|
||||
const form = useForm<z.infer<typeof formSchema>>({
|
||||
resolver: zodResolver(formSchema),
|
||||
@@ -377,63 +459,9 @@ export const HandleSchedules = ({ id, scheduleId, scheduleType }: Props) => {
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
<ScheduleFormField
|
||||
name="cronExpression"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel className="flex items-center gap-2">
|
||||
Schedule
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Info className="w-4 h-4 text-muted-foreground cursor-help" />
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>
|
||||
Cron expression format: minute hour day month
|
||||
weekday
|
||||
</p>
|
||||
<p>Example: 0 0 * * * (daily at midnight)</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
</FormLabel>
|
||||
<div className="flex flex-col gap-2">
|
||||
<Select
|
||||
onValueChange={(value) => {
|
||||
field.onChange(value);
|
||||
}}
|
||||
>
|
||||
<FormControl>
|
||||
<SelectTrigger>
|
||||
<SelectValue placeholder="Select a predefined schedule" />
|
||||
</SelectTrigger>
|
||||
</FormControl>
|
||||
<SelectContent>
|
||||
{commonCronExpressions.map((expr) => (
|
||||
<SelectItem key={expr.value} value={expr.value}>
|
||||
{expr.label} ({expr.value})
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
<div className="relative">
|
||||
<FormControl>
|
||||
<Input
|
||||
placeholder="Custom cron expression (e.g., 0 0 * * *)"
|
||||
{...field}
|
||||
/>
|
||||
</FormControl>
|
||||
</div>
|
||||
</div>
|
||||
<FormDescription>
|
||||
Choose a predefined schedule or enter a custom cron
|
||||
expression
|
||||
</FormDescription>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
formControl={form.control}
|
||||
/>
|
||||
|
||||
{(scheduleTypeForm === "application" ||
|
||||
|
||||
@@ -1,11 +1,5 @@
|
||||
import { zodResolver } from "@hookform/resolvers/zod";
|
||||
import {
|
||||
DatabaseZap,
|
||||
Info,
|
||||
PenBoxIcon,
|
||||
PlusCircle,
|
||||
RefreshCw,
|
||||
} from "lucide-react";
|
||||
import { DatabaseZap, PenBoxIcon, PlusCircle, RefreshCw } from "lucide-react";
|
||||
import { useEffect, useState } from "react";
|
||||
import { useForm } from "react-hook-form";
|
||||
import { toast } from "sonner";
|
||||
@@ -47,7 +41,7 @@ import {
|
||||
import { cn } from "@/lib/utils";
|
||||
import { api } from "@/utils/api";
|
||||
import type { CacheType } from "../domains/handle-domain";
|
||||
import { commonCronExpressions } from "../schedules/handle-schedules";
|
||||
import { ScheduleFormField } from "../schedules/handle-schedules";
|
||||
|
||||
const formSchema = z
|
||||
.object({
|
||||
@@ -306,64 +300,9 @@ export const HandleVolumeBackups = ({
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
<ScheduleFormField
|
||||
name="cronExpression"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel className="flex items-center gap-2">
|
||||
Schedule
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Info className="w-4 h-4 text-muted-foreground cursor-help" />
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>
|
||||
Cron expression format: minute hour day month
|
||||
weekday
|
||||
</p>
|
||||
<p>Example: 0 0 * * * (daily at midnight)</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
</FormLabel>
|
||||
<div className="flex flex-col gap-2">
|
||||
<Select
|
||||
onValueChange={(value) => {
|
||||
field.onChange(value);
|
||||
}}
|
||||
>
|
||||
<FormControl>
|
||||
<SelectTrigger>
|
||||
<SelectValue placeholder="Select a predefined schedule" />
|
||||
</SelectTrigger>
|
||||
</FormControl>
|
||||
<SelectContent>
|
||||
{commonCronExpressions.map((expr) => (
|
||||
<SelectItem key={expr.value} value={expr.value}>
|
||||
{expr.label} ({expr.value})
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
<div className="relative">
|
||||
<FormControl>
|
||||
<Input
|
||||
placeholder="Custom cron expression (e.g., 0 0 * * *)"
|
||||
{...field}
|
||||
/>
|
||||
</FormControl>
|
||||
</div>
|
||||
</div>
|
||||
<FormDescription>
|
||||
Choose a predefined schedule or enter a custom cron
|
||||
expression
|
||||
</FormDescription>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
formControl={form.control}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { zodResolver } from "@hookform/resolvers/zod";
|
||||
import { useEffect } from "react";
|
||||
import { useEffect, useState } from "react";
|
||||
import { useForm } from "react-hook-form";
|
||||
import { toast } from "sonner";
|
||||
import { z } from "zod";
|
||||
@@ -35,6 +35,7 @@ export const ComposeFileEditor = ({ composeId }: Props) => {
|
||||
);
|
||||
|
||||
const { mutateAsync, isLoading } = api.compose.update.useMutation();
|
||||
const [hasUnsavedChanges, setHasUnsavedChanges] = useState(false);
|
||||
|
||||
const form = useForm<AddComposeFile>({
|
||||
defaultValues: {
|
||||
@@ -53,6 +54,12 @@ export const ComposeFileEditor = ({ composeId }: Props) => {
|
||||
}
|
||||
}, [form, form.reset, data]);
|
||||
|
||||
useEffect(() => {
|
||||
if (data?.composeFile !== undefined) {
|
||||
setHasUnsavedChanges(composeFile !== data.composeFile);
|
||||
}
|
||||
}, [composeFile, data?.composeFile]);
|
||||
|
||||
const onSubmit = async (data: AddComposeFile) => {
|
||||
const { valid, error } = validateAndFormatYAML(data.composeFile);
|
||||
if (!valid) {
|
||||
@@ -71,6 +78,7 @@ export const ComposeFileEditor = ({ composeId }: Props) => {
|
||||
})
|
||||
.then(async () => {
|
||||
toast.success("Compose config Updated");
|
||||
setHasUnsavedChanges(false);
|
||||
refetch();
|
||||
await utils.compose.getConvertedCompose.invalidate({
|
||||
composeId,
|
||||
@@ -99,6 +107,19 @@ export const ComposeFileEditor = ({ composeId }: Props) => {
|
||||
return (
|
||||
<>
|
||||
<div className="w-full flex flex-col gap-4 ">
|
||||
<div className="flex items-center justify-between">
|
||||
<div>
|
||||
<h3 className="text-lg font-medium">Compose File</h3>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
Configure your Docker Compose file for this service.
|
||||
{hasUnsavedChanges && (
|
||||
<span className="text-yellow-500 ml-2">
|
||||
(You have unsaved changes)
|
||||
</span>
|
||||
)}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<Form {...form}>
|
||||
<form
|
||||
id="hook-form-save-compose-file"
|
||||
|
||||
@@ -3,7 +3,6 @@ import {
|
||||
CheckIcon,
|
||||
ChevronsUpDown,
|
||||
DatabaseZap,
|
||||
Info,
|
||||
PenBoxIcon,
|
||||
PlusIcon,
|
||||
RefreshCw,
|
||||
@@ -62,7 +61,7 @@ import {
|
||||
} from "@/components/ui/tooltip";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { api } from "@/utils/api";
|
||||
import { commonCronExpressions } from "../../application/schedules/handle-schedules";
|
||||
import { ScheduleFormField } from "../../application/schedules/handle-schedules";
|
||||
|
||||
type CacheType = "cache" | "fetch";
|
||||
|
||||
@@ -579,66 +578,9 @@ export const HandleBackup = ({
|
||||
);
|
||||
}}
|
||||
/>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="schedule"
|
||||
render={({ field }) => {
|
||||
return (
|
||||
<FormItem>
|
||||
<FormLabel className="flex items-center gap-2">
|
||||
Schedule
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Info className="w-4 h-4 text-muted-foreground cursor-help" />
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>
|
||||
Cron expression format: minute hour day month
|
||||
weekday
|
||||
</p>
|
||||
<p>Example: 0 0 * * * (daily at midnight)</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
</FormLabel>
|
||||
<div className="flex flex-col gap-2">
|
||||
<Select
|
||||
onValueChange={(value) => {
|
||||
field.onChange(value);
|
||||
}}
|
||||
>
|
||||
<FormControl>
|
||||
<SelectTrigger>
|
||||
<SelectValue placeholder="Select a predefined schedule" />
|
||||
</SelectTrigger>
|
||||
</FormControl>
|
||||
<SelectContent>
|
||||
{commonCronExpressions.map((expr) => (
|
||||
<SelectItem key={expr.value} value={expr.value}>
|
||||
{expr.label} ({expr.value})
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
<div className="relative">
|
||||
<FormControl>
|
||||
<Input
|
||||
placeholder="Custom cron expression (e.g., 0 0 * * *)"
|
||||
{...field}
|
||||
/>
|
||||
</FormControl>
|
||||
</div>
|
||||
</div>
|
||||
<FormDescription>
|
||||
Choose a predefined schedule or enter a custom cron
|
||||
expression
|
||||
</FormDescription>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
);
|
||||
}}
|
||||
/>
|
||||
|
||||
<ScheduleFormField name="schedule" formControl={form.control} />
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="prefix"
|
||||
|
||||
@@ -171,7 +171,7 @@ export const AddTemplate = ({ environmentId, baseUrl }: Props) => {
|
||||
<Input
|
||||
placeholder="Search Template"
|
||||
onChange={(e) => setQuery(e.target.value)}
|
||||
className="w-full sm:w-[200px]"
|
||||
className="w-full"
|
||||
value={query}
|
||||
/>
|
||||
<Input
|
||||
@@ -248,7 +248,7 @@ export const AddTemplate = ({ environmentId, baseUrl }: Props) => {
|
||||
onClick={() =>
|
||||
setViewMode(viewMode === "detailed" ? "icon" : "detailed")
|
||||
}
|
||||
className="h-9 w-9"
|
||||
className="h-9 w-9 flex-shrink-0"
|
||||
>
|
||||
{viewMode === "detailed" ? (
|
||||
<LayoutGrid className="size-4" />
|
||||
|
||||
@@ -88,7 +88,7 @@ export const StepThree = ({ templateInfo }: StepProps) => {
|
||||
<div>
|
||||
<h3 className="text-sm font-semibold">Configuration Files</h3>
|
||||
<ul className="list-disc pl-5">
|
||||
{templateInfo?.details?.configFiles.map((file, index) => (
|
||||
{templateInfo?.details?.configFiles?.map((file, index) => (
|
||||
<li key={index}>
|
||||
<strong className="text-sm font-semibold">
|
||||
{file.filePath}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Bot, Eye, EyeOff, PlusCircle, Trash2 } from "lucide-react";
|
||||
import { useEffect, useState } from "react";
|
||||
import { Bot, PlusCircle, Trash2 } from "lucide-react";
|
||||
import { useEffect } from "react";
|
||||
import ReactMarkdown from "react-markdown";
|
||||
import { toast } from "sonner";
|
||||
import { AlertBlock } from "@/components/shared/alert-block";
|
||||
@@ -27,7 +27,6 @@ export interface StepProps {
|
||||
export const StepTwo = ({ templateInfo, setTemplateInfo }: StepProps) => {
|
||||
const suggestions = templateInfo.suggestions || [];
|
||||
const selectedVariant = templateInfo.details;
|
||||
const [showValues, setShowValues] = useState<Record<string, boolean>>({});
|
||||
|
||||
const { mutateAsync, isLoading, error, isError } =
|
||||
api.ai.suggest.useMutation();
|
||||
@@ -44,7 +43,7 @@ export const StepTwo = ({ templateInfo, setTemplateInfo }: StepProps) => {
|
||||
.then((data) => {
|
||||
setTemplateInfo({
|
||||
...templateInfo,
|
||||
suggestions: data,
|
||||
suggestions: data || [],
|
||||
});
|
||||
})
|
||||
.catch((error) => {
|
||||
@@ -54,10 +53,6 @@ export const StepTwo = ({ templateInfo, setTemplateInfo }: StepProps) => {
|
||||
});
|
||||
}, [templateInfo.userInput]);
|
||||
|
||||
const toggleShowValue = (name: string) => {
|
||||
setShowValues((prev) => ({ ...prev, [name]: !prev[name] }));
|
||||
};
|
||||
|
||||
const handleEnvVariableChange = (
|
||||
index: number,
|
||||
field: "name" | "value",
|
||||
@@ -308,11 +303,9 @@ export const StepTwo = ({ templateInfo, setTemplateInfo }: StepProps) => {
|
||||
placeholder="Variable Name"
|
||||
className="flex-1"
|
||||
/>
|
||||
<div className="flex-1 relative">
|
||||
<div className="relative">
|
||||
<Input
|
||||
type={
|
||||
showValues[env.name] ? "text" : "password"
|
||||
}
|
||||
type={"password"}
|
||||
value={env.value}
|
||||
onChange={(e) =>
|
||||
handleEnvVariableChange(
|
||||
@@ -323,19 +316,6 @@ export const StepTwo = ({ templateInfo, setTemplateInfo }: StepProps) => {
|
||||
}
|
||||
placeholder="Variable Value"
|
||||
/>
|
||||
<Button
|
||||
type="button"
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className="absolute right-2 top-1/2 transform -translate-y-1/2"
|
||||
onClick={() => toggleShowValue(env.name)}
|
||||
>
|
||||
{showValues[env.name] ? (
|
||||
<EyeOff className="h-4 w-4" />
|
||||
) : (
|
||||
<Eye className="h-4 w-4" />
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
<Button
|
||||
type="button"
|
||||
@@ -437,13 +417,14 @@ export const StepTwo = ({ templateInfo, setTemplateInfo }: StepProps) => {
|
||||
<AccordionContent>
|
||||
<ScrollArea className="w-full rounded-md border">
|
||||
<div className="p-4 space-y-4">
|
||||
{selectedVariant?.configFiles?.length > 0 ? (
|
||||
{selectedVariant?.configFiles?.length &&
|
||||
selectedVariant?.configFiles?.length > 0 ? (
|
||||
<>
|
||||
<div className="text-sm text-muted-foreground mb-4">
|
||||
This template requires the following
|
||||
configuration files to be mounted:
|
||||
</div>
|
||||
{selectedVariant.configFiles.map(
|
||||
{selectedVariant?.configFiles?.map(
|
||||
(config, index) => (
|
||||
<div
|
||||
key={index}
|
||||
|
||||
@@ -47,7 +47,7 @@ interface Details {
|
||||
envVariables: EnvVariable[];
|
||||
shortDescription: string;
|
||||
domains: Domain[];
|
||||
configFiles: Mount[];
|
||||
configFiles?: Mount[];
|
||||
}
|
||||
|
||||
interface Mount {
|
||||
|
||||
@@ -80,6 +80,29 @@ export const DuplicateProject = ({
|
||||
api.project.duplicate.useMutation({
|
||||
onSuccess: async (newProject) => {
|
||||
await utils.project.all.invalidate();
|
||||
|
||||
// If duplicating to same project+environment, invalidate the environment query
|
||||
// to refresh the services list
|
||||
if (duplicateType === "existing-environment") {
|
||||
await utils.environment.one.invalidate({
|
||||
environmentId: selectedTargetEnvironment,
|
||||
});
|
||||
await utils.environment.byProjectId.invalidate({
|
||||
projectId: selectedTargetProject,
|
||||
});
|
||||
|
||||
// If duplicating to the same environment we're currently viewing,
|
||||
// also invalidate the current environment to refresh the services list
|
||||
if (selectedTargetEnvironment === environmentId) {
|
||||
await utils.environment.one.invalidate({ environmentId });
|
||||
// Also invalidate the project query to refresh the project data
|
||||
const projectId = router.query.projectId as string;
|
||||
if (projectId) {
|
||||
await utils.project.one.invalidate({ projectId });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
toast.success(
|
||||
duplicateType === "new-project"
|
||||
? "Project duplicated successfully"
|
||||
|
||||
@@ -96,8 +96,30 @@ export const ShowProjects = () => {
|
||||
new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime();
|
||||
break;
|
||||
case "services": {
|
||||
const aTotalServices = a.environments.length;
|
||||
const bTotalServices = b.environments.length;
|
||||
const aTotalServices = a.environments.reduce((total, env) => {
|
||||
return (
|
||||
total +
|
||||
(env.applications?.length || 0) +
|
||||
(env.mariadb?.length || 0) +
|
||||
(env.mongo?.length || 0) +
|
||||
(env.mysql?.length || 0) +
|
||||
(env.postgres?.length || 0) +
|
||||
(env.redis?.length || 0) +
|
||||
(env.compose?.length || 0)
|
||||
);
|
||||
}, 0);
|
||||
const bTotalServices = b.environments.reduce((total, env) => {
|
||||
return (
|
||||
total +
|
||||
(env.applications?.length || 0) +
|
||||
(env.mariadb?.length || 0) +
|
||||
(env.mongo?.length || 0) +
|
||||
(env.mysql?.length || 0) +
|
||||
(env.postgres?.length || 0) +
|
||||
(env.redis?.length || 0) +
|
||||
(env.compose?.length || 0)
|
||||
);
|
||||
}, 0);
|
||||
comparison = aTotalServices - bTotalServices;
|
||||
break;
|
||||
}
|
||||
@@ -291,13 +313,15 @@ export const ShowProjects = () => {
|
||||
)}
|
||||
</DropdownMenuGroup>
|
||||
)}
|
||||
{/*
|
||||
{project.compose.length > 0 && (
|
||||
{project.environments.some(
|
||||
(env) => env.compose.length > 0,
|
||||
) && (
|
||||
<DropdownMenuGroup>
|
||||
<DropdownMenuLabel>
|
||||
Compose
|
||||
</DropdownMenuLabel>
|
||||
{project.compose.map((comp) => (
|
||||
{project.environments.map((env) =>
|
||||
env.compose.map((comp) => (
|
||||
<div key={comp.composeId}>
|
||||
<DropdownMenuSeparator />
|
||||
<DropdownMenuGroup>
|
||||
@@ -327,9 +351,10 @@ export const ShowProjects = () => {
|
||||
))}
|
||||
</DropdownMenuGroup>
|
||||
</div>
|
||||
))}
|
||||
)),
|
||||
)}
|
||||
</DropdownMenuGroup>
|
||||
)} */}
|
||||
)}
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
) : null}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { zodResolver } from "@hookform/resolvers/zod";
|
||||
import { ExternalLink } from "lucide-react";
|
||||
import Link from "next/link";
|
||||
import { useRouter } from "next/router";
|
||||
import { useEffect, useState } from "react";
|
||||
import { useForm } from "react-hook-form";
|
||||
import { toast } from "sonner";
|
||||
@@ -27,18 +26,12 @@ import {
|
||||
} from "@/components/ui/form";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { api } from "@/utils/api";
|
||||
import { useUrl } from "@/utils/hooks/use-url";
|
||||
|
||||
const Schema = z.object({
|
||||
name: z.string().min(1, {
|
||||
message: "Name is required",
|
||||
}),
|
||||
username: z.string().min(1, {
|
||||
message: "Username is required",
|
||||
}),
|
||||
password: z.string().min(1, {
|
||||
message: "App Password is required",
|
||||
}),
|
||||
name: z.string().min(1, { message: "Name is required" }),
|
||||
username: z.string().min(1, { message: "Username is required" }),
|
||||
email: z.string().email().optional(),
|
||||
apiToken: z.string().min(1, { message: "API Token is required" }),
|
||||
workspaceName: z.string().optional(),
|
||||
});
|
||||
|
||||
@@ -47,14 +40,12 @@ type Schema = z.infer<typeof Schema>;
|
||||
export const AddBitbucketProvider = () => {
|
||||
const utils = api.useUtils();
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const _url = useUrl();
|
||||
const { mutateAsync, error, isError } = api.bitbucket.create.useMutation();
|
||||
const { data: auth } = api.user.get.useQuery();
|
||||
const _router = useRouter();
|
||||
const form = useForm<Schema>({
|
||||
defaultValues: {
|
||||
username: "",
|
||||
password: "",
|
||||
apiToken: "",
|
||||
workspaceName: "",
|
||||
},
|
||||
resolver: zodResolver(Schema),
|
||||
@@ -63,7 +54,8 @@ export const AddBitbucketProvider = () => {
|
||||
useEffect(() => {
|
||||
form.reset({
|
||||
username: "",
|
||||
password: "",
|
||||
email: "",
|
||||
apiToken: "",
|
||||
workspaceName: "",
|
||||
});
|
||||
}, [form, isOpen]);
|
||||
@@ -71,10 +63,11 @@ export const AddBitbucketProvider = () => {
|
||||
const onSubmit = async (data: Schema) => {
|
||||
await mutateAsync({
|
||||
bitbucketUsername: data.username,
|
||||
appPassword: data.password,
|
||||
apiToken: data.apiToken,
|
||||
bitbucketWorkspaceName: data.workspaceName || "",
|
||||
authId: auth?.id || "",
|
||||
name: data.name || "",
|
||||
bitbucketEmail: data.email || "",
|
||||
})
|
||||
.then(async () => {
|
||||
await utils.gitProvider.getAll.invalidate();
|
||||
@@ -113,37 +106,46 @@ export const AddBitbucketProvider = () => {
|
||||
>
|
||||
<CardContent className="p-0">
|
||||
<div className="flex flex-col gap-4">
|
||||
<p className="text-muted-foreground text-sm">
|
||||
To integrate your Bitbucket account, you need to create a new
|
||||
App Password in your Bitbucket settings. Follow these steps:
|
||||
</p>
|
||||
<ol className="list-decimal list-inside text-sm text-muted-foreground">
|
||||
<li className="flex flex-row gap-2 items-center">
|
||||
Create new App Password{" "}
|
||||
<AlertBlock type="warning">
|
||||
Bitbucket App Passwords are deprecated for new providers. Use
|
||||
an API Token instead. Existing providers with App Passwords
|
||||
will continue to work until 9th June 2026.
|
||||
</AlertBlock>
|
||||
|
||||
<div className="mt-1 text-sm">
|
||||
Manage tokens in
|
||||
<Link
|
||||
href="https://bitbucket.org/account/settings/app-passwords/new"
|
||||
href="https://id.atlassian.com/manage-profile/security/api-tokens"
|
||||
target="_blank"
|
||||
className="inline-flex items-center gap-1 ml-1"
|
||||
>
|
||||
<span>Bitbucket settings</span>
|
||||
<ExternalLink className="w-fit text-primary size-4" />
|
||||
</Link>
|
||||
</div>
|
||||
<ul className="list-disc list-inside ml-4 text-sm text-muted-foreground">
|
||||
<li className="text-muted-foreground text-sm">
|
||||
Click on Create API token with scopes
|
||||
</li>
|
||||
<li className="text-muted-foreground text-sm">
|
||||
Select the expiration date (Max 1 year)
|
||||
</li>
|
||||
<li className="text-muted-foreground text-sm">
|
||||
Select Bitbucket product.
|
||||
</li>
|
||||
<li>
|
||||
When creating the App Password, ensure you grant the
|
||||
following permissions:
|
||||
<ul className="list-disc list-inside ml-4">
|
||||
<li>Account: Read</li>
|
||||
<li>Workspace membership: Read</li>
|
||||
<li>Projects: Read</li>
|
||||
<li>Repositories: Read</li>
|
||||
<li>Pull requests: Read</li>
|
||||
<li>Webhooks: Read and write</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>
|
||||
After creating, you'll receive an App Password. Copy it and
|
||||
paste it below along with your Bitbucket username.
|
||||
</li>
|
||||
</ol>
|
||||
<p className="text-muted-foreground text-sm">
|
||||
Select the following scopes:
|
||||
</p>
|
||||
|
||||
<ul className="list-disc list-inside ml-4 text-sm text-muted-foreground">
|
||||
<li>read:repository:bitbucket</li>
|
||||
<li>read:pullrequest:bitbucket</li>
|
||||
<li>read:webhook:bitbucket</li>
|
||||
<li>read:workspace:bitbucket</li>
|
||||
<li>write:webhook:bitbucket</li>
|
||||
</ul>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="name"
|
||||
@@ -152,7 +154,7 @@ export const AddBitbucketProvider = () => {
|
||||
<FormLabel>Name</FormLabel>
|
||||
<FormControl>
|
||||
<Input
|
||||
placeholder="Random Name eg(my-personal-account)"
|
||||
placeholder="Your Bitbucket Provider, eg: my-personal-account"
|
||||
{...field}
|
||||
/>
|
||||
</FormControl>
|
||||
@@ -179,14 +181,27 @@ export const AddBitbucketProvider = () => {
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="password"
|
||||
name="email"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel>App Password</FormLabel>
|
||||
<FormLabel>Bitbucket Email</FormLabel>
|
||||
<FormControl>
|
||||
<Input placeholder="Your Bitbucket email" {...field} />
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="apiToken"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel>API Token</FormLabel>
|
||||
<FormControl>
|
||||
<Input
|
||||
type="password"
|
||||
placeholder="ATBBPDYUC94nR96Nj7Cqpp4pfwKk03573DD2"
|
||||
placeholder="Paste your Bitbucket API token"
|
||||
{...field}
|
||||
/>
|
||||
</FormControl>
|
||||
@@ -200,7 +215,7 @@ export const AddBitbucketProvider = () => {
|
||||
name="workspaceName"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel>Workspace Name (Optional)</FormLabel>
|
||||
<FormLabel>Workspace Name (optional)</FormLabel>
|
||||
<FormControl>
|
||||
<Input
|
||||
placeholder="For organization accounts"
|
||||
|
||||
@@ -33,7 +33,10 @@ const Schema = z.object({
|
||||
username: z.string().min(1, {
|
||||
message: "Username is required",
|
||||
}),
|
||||
email: z.string().email().optional(),
|
||||
workspaceName: z.string().optional(),
|
||||
apiToken: z.string().optional(),
|
||||
appPassword: z.string().optional(),
|
||||
});
|
||||
|
||||
type Schema = z.infer<typeof Schema>;
|
||||
@@ -60,19 +63,28 @@ export const EditBitbucketProvider = ({ bitbucketId }: Props) => {
|
||||
const form = useForm<Schema>({
|
||||
defaultValues: {
|
||||
username: "",
|
||||
email: "",
|
||||
workspaceName: "",
|
||||
apiToken: "",
|
||||
appPassword: "",
|
||||
},
|
||||
resolver: zodResolver(Schema),
|
||||
});
|
||||
|
||||
const username = form.watch("username");
|
||||
const email = form.watch("email");
|
||||
const workspaceName = form.watch("workspaceName");
|
||||
const apiToken = form.watch("apiToken");
|
||||
const appPassword = form.watch("appPassword");
|
||||
|
||||
useEffect(() => {
|
||||
form.reset({
|
||||
username: bitbucket?.bitbucketUsername || "",
|
||||
email: bitbucket?.bitbucketEmail || "",
|
||||
workspaceName: bitbucket?.bitbucketWorkspaceName || "",
|
||||
name: bitbucket?.gitProvider.name || "",
|
||||
apiToken: bitbucket?.apiToken || "",
|
||||
appPassword: bitbucket?.appPassword || "",
|
||||
});
|
||||
}, [form, isOpen, bitbucket]);
|
||||
|
||||
@@ -81,8 +93,11 @@ export const EditBitbucketProvider = ({ bitbucketId }: Props) => {
|
||||
bitbucketId,
|
||||
gitProviderId: bitbucket?.gitProviderId || "",
|
||||
bitbucketUsername: data.username,
|
||||
bitbucketEmail: data.email || "",
|
||||
bitbucketWorkspaceName: data.workspaceName || "",
|
||||
name: data.name || "",
|
||||
apiToken: data.apiToken || "",
|
||||
appPassword: data.appPassword || "",
|
||||
})
|
||||
.then(async () => {
|
||||
await utils.gitProvider.getAll.invalidate();
|
||||
@@ -121,6 +136,12 @@ export const EditBitbucketProvider = ({ bitbucketId }: Props) => {
|
||||
>
|
||||
<CardContent className="p-0">
|
||||
<div className="flex flex-col gap-4">
|
||||
<p className="text-muted-foreground text-sm">
|
||||
Update your Bitbucket authentication. Use API Token for
|
||||
enhanced security (recommended) or App Password for legacy
|
||||
support.
|
||||
</p>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="name"
|
||||
@@ -154,6 +175,24 @@ export const EditBitbucketProvider = ({ bitbucketId }: Props) => {
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="email"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel>Email (Required for API Tokens)</FormLabel>
|
||||
<FormControl>
|
||||
<Input
|
||||
type="email"
|
||||
placeholder="Your Bitbucket email address"
|
||||
{...field}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="workspaceName"
|
||||
@@ -171,6 +210,49 @@ export const EditBitbucketProvider = ({ bitbucketId }: Props) => {
|
||||
)}
|
||||
/>
|
||||
|
||||
<div className="flex flex-col gap-2 border-t pt-4">
|
||||
<h3 className="text-sm font-medium mb-2">
|
||||
Authentication (Update to use API Token)
|
||||
</h3>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="apiToken"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel>API Token (Recommended)</FormLabel>
|
||||
<FormControl>
|
||||
<Input
|
||||
type="password"
|
||||
placeholder="Enter your Bitbucket API Token"
|
||||
{...field}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="appPassword"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel>
|
||||
App Password (Legacy - will be deprecated June 2026)
|
||||
</FormLabel>
|
||||
<FormControl>
|
||||
<Input
|
||||
type="password"
|
||||
placeholder="Enter your Bitbucket App Password"
|
||||
{...field}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="flex w-full justify-between gap-4 mt-4">
|
||||
<Button
|
||||
type="button"
|
||||
@@ -180,7 +262,10 @@ export const EditBitbucketProvider = ({ bitbucketId }: Props) => {
|
||||
await testConnection({
|
||||
bitbucketId,
|
||||
bitbucketUsername: username,
|
||||
bitbucketEmail: email,
|
||||
workspaceName: workspaceName,
|
||||
apiToken: apiToken,
|
||||
appPassword: appPassword,
|
||||
})
|
||||
.then(async (message) => {
|
||||
toast.info(`Message: ${message}`);
|
||||
|
||||
@@ -30,6 +30,9 @@ const Schema = z.object({
|
||||
name: z.string().min(1, {
|
||||
message: "Name is required",
|
||||
}),
|
||||
appName: z.string().min(1, {
|
||||
message: "App Name is required",
|
||||
}),
|
||||
});
|
||||
|
||||
type Schema = z.infer<typeof Schema>;
|
||||
@@ -55,6 +58,7 @@ export const EditGithubProvider = ({ githubId }: Props) => {
|
||||
const form = useForm<Schema>({
|
||||
defaultValues: {
|
||||
name: "",
|
||||
appName: "",
|
||||
},
|
||||
resolver: zodResolver(Schema),
|
||||
});
|
||||
@@ -62,6 +66,7 @@ export const EditGithubProvider = ({ githubId }: Props) => {
|
||||
useEffect(() => {
|
||||
form.reset({
|
||||
name: github?.gitProvider.name || "",
|
||||
appName: github?.githubAppName || "",
|
||||
});
|
||||
}, [form, isOpen]);
|
||||
|
||||
@@ -70,6 +75,7 @@ export const EditGithubProvider = ({ githubId }: Props) => {
|
||||
githubId,
|
||||
name: data.name || "",
|
||||
gitProviderId: github?.gitProviderId || "",
|
||||
githubAppName: data.appName || "",
|
||||
})
|
||||
.then(async () => {
|
||||
await utils.gitProvider.getAll.invalidate();
|
||||
@@ -124,6 +130,22 @@ export const EditGithubProvider = ({ githubId }: Props) => {
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="appName"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel>App Name</FormLabel>
|
||||
<FormControl>
|
||||
<Input
|
||||
placeholder="pp Name eg(my-personal)"
|
||||
{...field}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<div className="flex w-full justify-between gap-4 mt-4">
|
||||
<Button
|
||||
|
||||
@@ -157,7 +157,13 @@ export const ShowGitProviders = () => {
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="flex flex-row gap-1">
|
||||
<div className="flex flex-row gap-1 items-center">
|
||||
{isBitbucket &&
|
||||
gitProvider.bitbucket?.appPassword &&
|
||||
!gitProvider.bitbucket?.apiToken ? (
|
||||
<Badge variant="yellow">Deprecated</Badge>
|
||||
) : null}
|
||||
|
||||
{!haveGithubRequirements && isGithub && (
|
||||
<div className="flex flex-row gap-1 items-center">
|
||||
<Badge
|
||||
|
||||
@@ -12,6 +12,8 @@ import { toast } from "sonner";
|
||||
import { z } from "zod";
|
||||
import {
|
||||
DiscordIcon,
|
||||
GotifyIcon,
|
||||
NtfyIcon,
|
||||
SlackIcon,
|
||||
TelegramIcon,
|
||||
} from "@/components/icons/notification-icons";
|
||||
@@ -130,11 +132,11 @@ export const notificationsMap = {
|
||||
label: "Email",
|
||||
},
|
||||
gotify: {
|
||||
icon: <MessageCircleMore size={29} className="text-muted-foreground" />,
|
||||
icon: <GotifyIcon />,
|
||||
label: "Gotify",
|
||||
},
|
||||
ntfy: {
|
||||
icon: <MessageCircleMore size={29} className="text-muted-foreground" />,
|
||||
icon: <NtfyIcon />,
|
||||
label: "ntfy",
|
||||
},
|
||||
};
|
||||
|
||||
@@ -2,6 +2,8 @@ import { Bell, Loader2, Mail, MessageCircleMore, Trash2 } from "lucide-react";
|
||||
import { toast } from "sonner";
|
||||
import {
|
||||
DiscordIcon,
|
||||
GotifyIcon,
|
||||
NtfyIcon,
|
||||
SlackIcon,
|
||||
TelegramIcon,
|
||||
} from "@/components/icons/notification-icons";
|
||||
@@ -85,12 +87,12 @@ export const ShowNotifications = () => {
|
||||
)}
|
||||
{notification.notificationType === "gotify" && (
|
||||
<div className="flex items-center justify-center rounded-lg ">
|
||||
<MessageCircleMore className="size-6 text-muted-foreground" />
|
||||
<GotifyIcon className="size-6" />
|
||||
</div>
|
||||
)}
|
||||
{notification.notificationType === "ntfy" && (
|
||||
<div className="flex items-center justify-center rounded-lg ">
|
||||
<MessageCircleMore className="size-6 text-muted-foreground" />
|
||||
<NtfyIcon className="size-6" />
|
||||
</div>
|
||||
)}
|
||||
|
||||
|
||||
@@ -33,7 +33,10 @@ import { Disable2FA } from "./disable-2fa";
|
||||
import { Enable2FA } from "./enable-2fa";
|
||||
|
||||
const profileSchema = z.object({
|
||||
email: z.string(),
|
||||
email: z
|
||||
.string()
|
||||
.email("Please enter a valid email address")
|
||||
.min(1, "Email is required"),
|
||||
password: z.string().nullable(),
|
||||
currentPassword: z.string().nullable(),
|
||||
image: z.string().optional(),
|
||||
|
||||
@@ -5,6 +5,7 @@ import { useEffect } from "react";
|
||||
import { useForm } from "react-hook-form";
|
||||
import { toast } from "sonner";
|
||||
import { z } from "zod";
|
||||
import { AlertBlock } from "@/components/shared/alert-block";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {
|
||||
Card,
|
||||
@@ -76,6 +77,9 @@ export const WebDomain = () => {
|
||||
resolver: zodResolver(addServerDomain),
|
||||
});
|
||||
const https = form.watch("https");
|
||||
const domain = form.watch("domain") || "";
|
||||
const host = data?.user?.host || "";
|
||||
const hasChanged = domain !== host;
|
||||
useEffect(() => {
|
||||
if (data) {
|
||||
form.reset({
|
||||
@@ -119,6 +123,19 @@ export const WebDomain = () => {
|
||||
</div>
|
||||
</CardHeader>
|
||||
<CardContent className="space-y-2 py-6 border-t">
|
||||
{/* Warning for GitHub webhook URL changes */}
|
||||
{hasChanged && (
|
||||
<AlertBlock type="warning">
|
||||
<div className="space-y-2">
|
||||
<p className="font-medium">⚠️ Important: URL Change Impact</p>
|
||||
<p>
|
||||
If you change the Dokploy Server URL make sure to update
|
||||
your Github Apps to keep the auto-deploy working and preview
|
||||
deployments working.
|
||||
</p>
|
||||
</div>
|
||||
</AlertBlock>
|
||||
)}
|
||||
<Form {...form}>
|
||||
<form
|
||||
onSubmit={form.handleSubmit(onSubmit)}
|
||||
|
||||
@@ -88,3 +88,121 @@ export const DiscordIcon = ({ className }: Props) => {
|
||||
</svg>
|
||||
);
|
||||
};
|
||||
|
||||
export const GotifyIcon = ({ className }: Props) => {
|
||||
return (
|
||||
<svg
|
||||
viewBox="0 0 500 500"
|
||||
className={cn("size-8", className)}
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<defs>
|
||||
<style>
|
||||
{`
|
||||
.gotify-st0{fill:#DDCBA2;stroke:#000000;stroke-width:2;stroke-linecap:round;stroke-miterlimit:10;}
|
||||
.gotify-st1{fill:#71CAEE;stroke:#000000;stroke-width:2;stroke-linecap:round;stroke-miterlimit:10;}
|
||||
.gotify-st2{fill:#FFFFFF;stroke:#000000;stroke-width:2;stroke-linecap:round;stroke-miterlimit:10;}
|
||||
.gotify-st3{fill:#888E93;stroke:#000000;stroke-width:2;stroke-linecap:round;stroke-miterlimit:10;}
|
||||
.gotify-st4{fill:#F0F0F0;stroke:#000000;stroke-width:2;stroke-linecap:round;stroke-miterlimit:10;}
|
||||
.gotify-st5{fill:none;stroke:#000000;stroke-width:2;stroke-linecap:round;stroke-miterlimit:10;}
|
||||
.gotify-st8{fill:#FFFFFF;}
|
||||
`}
|
||||
</style>
|
||||
<linearGradient
|
||||
id="gotify-gradient"
|
||||
x1="265"
|
||||
y1="280"
|
||||
x2="275"
|
||||
y2="302"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
>
|
||||
<stop offset="0" stopColor="#71CAEE" />
|
||||
<stop offset="0.04" stopColor="#83CAE2" />
|
||||
<stop offset="0.12" stopColor="#9FCACE" />
|
||||
<stop offset="0.21" stopColor="#B6CBBE" />
|
||||
<stop offset="0.31" stopColor="#C7CBB1" />
|
||||
<stop offset="0.44" stopColor="#D4CBA8" />
|
||||
<stop offset="0.61" stopColor="#DBCBA3" />
|
||||
<stop offset="1" stopColor="#DDCBA2" />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<g transform="matrix(2.33,0,0,2.33,-432,-323)">
|
||||
<g transform="translate(-25,26)">
|
||||
<path
|
||||
className="gotify-st1"
|
||||
d="m258.9,119.7c-3,-0.9-6,-1.8-9,-2.7-4.6,-1.4-9.2,-2.8-14,-2.5-2.8,0.2-6.1,1.3-6.9,4-0.6,2-1.6,7.3-1.3,7.9 1.5,3.4 13.9,6.7 18.3,6.7"
|
||||
/>
|
||||
<path d="m392.6,177.9c-1.4,1.4-2.2,3.5-2.5,5.5-0.2,1.4-0.1,3 0.5,4.3 0.6,1.3 1.8,2.3 3.1,3 1.3,0.6 2.8,0.9 4.3,0.9 1.1,0 2.3,-0.1 3.1,-0.9 0.6,-0.7 0.8,-1.6 0.9,-2.5 0.2,-2.3-0.1,-4.7-0.9,-6.9-0.4,-1.1-0.9,-2.3-1.8,-3.1-1.7,-1.8-4.5,-2.2-6.4,-0.5-0.1,0-0.2,0.1-0.3,0.2z" />
|
||||
<path
|
||||
className="gotify-st2"
|
||||
d="m358.5,164.2c-1,-1 0,-2.7 1,-3.7 5.8,-5.2 15.1,-4.6 21.8,-0.6 10.9,6.6 15.6,19.9 17.2,32.5 0.6,5.2 0.9,10.6-0.5,15.7-1.4,5.1-4.6,9.9-9.3,12.1-1.1,0.5-2.3,0.9-3.4,0.5-1.1,-0.4-1.9,-1.8-1.2,-2.8-9.4,-13.6-19,-26.8-20.9,-43.2-0.5,-4.1-1.8,-7.4-4.7,-10.5z"
|
||||
/>
|
||||
<path
|
||||
className="gotify-st1"
|
||||
d="m220.1,133c34.6,-18 79.3,-19.6 112.2,-8.7 23.7,7.9 41.3,26.7 49.5,50 7.1,20.6 7.1,43.6 3,65.7-7.5,40.2-26.2,77.9-49,112.6-12.6,19-24.6,36-44.2,48.5-38.7,24.6-88.9,22.1-129.3,11.5-19.5,-5.1-38.4,-17.3-44.3,-37.3-3.8,-12.8-2.1,-27.6 4.6,-40 13.5,-24.8 46.2,-38.4 50.8,-67.9 1.4,-8.7-0.3,-17.3-1.6,-25.7-3.8,-23.4-5.4,-45.8 6.7,-68.7 9.5,-17.7 24.3,-31 41.7,-40z"
|
||||
/>
|
||||
<path
|
||||
className="gotify-st2"
|
||||
d="m264.5,174.9c-0.5,0.5-0.9,1-1.3,1.6-9,11.6-12,27.9-9.3,42.1 1.7,9 5.9,17.9 13.2,23.4 19.3,14.6 51.5,13.5 68.4,-1.5 24.4,-21.7 13,-67.6-14,-78.8-17.6,-7.2-43.7,-1.6-57,13.2z"
|
||||
/>
|
||||
<path
|
||||
className="gotify-st2"
|
||||
d="m382.1,237.1c1.4,-0.1 2.9,-0.1 4.3,0.1 0.3,0 0.7,0.1 1,0.4 0.2,0.3 0.4,0.7 0.5,1.1 1,3.9 0.5,8.2 0.1,12.4-0.1,0.9-0.2,1.8-0.6,2.6-1,2.1-3.1,2.7-4.7,2.7-0.1,0-0.2,0-0.3,-0.1-0.3,-0.2-0.3,-0.7-0.2,-1.2 0.3,-5.9-0.1,-11.9-0.1,-18z"
|
||||
/>
|
||||
<path
|
||||
className="gotify-st2"
|
||||
d="m378.7,236.8c-1.4,0.4-2.5,2-2.8,4.4-0.5,4.4-0.7,8.9-0.5,13.4 0,0.9 0.1,1.9 0.5,2.4 0.2,0.3 0.5,0.4 0.8,0.4 1.6,0.3 4.1,-0.6 5.6,-1 0,0 0,-5.2-0.1,-8-0.1,-2.8-0.1,-6.1-0.2,-8.9 0,-0.6 0,-1.5 0,-2.2 0.1,-0.7-2.6,-0.7-3.3,-0.5z"
|
||||
/>
|
||||
<path
|
||||
className="gotify-st0"
|
||||
d="m358.3,231.8c-0.3,2.2 0.1,4.7 1.7,7.4 2.6,4.4 7,6.1 11.9,5.8 8.9,-0.6 25.3,-5.4 27.5,-15.7 0.6,-3-0.3,-6.1-2.2,-8.5-6.2,-7.8-17.8,-5.7-25.6,-2-5.9,2.7-12.4,7-13.3,13z"
|
||||
/>
|
||||
<path
|
||||
className="gotify-st3"
|
||||
d="m386.4,208.6c2.2,1.4 3.7,3.8 4,7 0.3,3.6-1.4,7.5-5,8.8-2.9,1.1-6.2,0.6-9.1,-0.4-2.9,-1-5.8,-2.8-6.8,-5.7-0.7,-2-0.3,-4.3 0.7,-6.1 1.1,-1.8 2.8,-3.2 4.7,-4.1 3.9,-1.8 8.4,-1.6 11.5,0.5z"
|
||||
/>
|
||||
<path
|
||||
className="gotify-st0"
|
||||
d="m414.7,262.6c2.4,0.6 4.8,2.1 5.6,4.4 0.8,2.3 0.1,4.9-1.6,6.7-1.7,1.8-4.2,2.5-6.6,2.5-0.8,0-1.7,-0.1-2.4,-0.5-2.5,-1.1-3.5,-4-4.2,-6.6-1.8,-6.8 3.6,-7.8 9.2,-6.5z"
|
||||
/>
|
||||
<path
|
||||
className="gotify-st4"
|
||||
d="m267.1,284.7c2.3,-4.5 141.3,-36.2 144.7,-31.6 3.4,4.5 15.8,88.2 9,90.4-6.8,2.3-119.8,37.3-126.6,35-6.8,-2.3-29.4,-89.3-27.1,-93.8z"
|
||||
/>
|
||||
<path
|
||||
className="gotify-st5"
|
||||
d="m294.2,378.5c0,0 54.3,-74.6 59.9,-76.9 5.7,-2.3 67.3,41.3 67.3,41.3"
|
||||
/>
|
||||
<path
|
||||
className="gotify-st4"
|
||||
d="m267,287.7c0,0 86,38.8 91.6,36.6 5.7,-2.3 53.1,-71.2 53.1,-71.2"
|
||||
/>
|
||||
<path
|
||||
fill="url(#gotify-gradient)"
|
||||
d="m261.9,283.5c-0.1,4.2 4.3,7.3 8.4,7.6 4.1,0.3 8.2,-1.3 12.2,-2.6 1.4,-0.4 2.9,-0.8 4.2,-0.2 1.8,0.9 2.7,4.1 1.8,5.9-0.9,1.8-3.4,3.5-5.3,4.4-6.5,3-12.9,3.6-19.9,2-5.3,-1.2-11.3,-4.3-13,-13.5"
|
||||
/>
|
||||
<path d="m318.4,198.4c-2,-0.3-4.1,0.1-5.9,1.3-3.2,2.1-4.7,6.2-4.7,9.9 0,1.9 0.4,3.8 1.4,5.3 1.2,1.7 3.1,2.9 5.2,3.4 3.4,0.8 8.2,0.7 10.5,-2.5 1,-1.5 1.4,-3.3 1.5,-5.1 0.5,-5.7-1.8,-11.4-8,-12.3z" />
|
||||
<path
|
||||
className="gotify-st8"
|
||||
d="m320.4,203.3c0.9,0.3 1.7,0.8 2.1,1.7 0.4,0.8 0.4,1.7 0.3,2.5-0.1,1-0.6,2-1.5,2.7-0.7,0.5-1.7,0.7-2.6,0.5-0.9,-0.2-1.7,-0.8-2.2,-1.6-1.1,-1.6-0.9,-4.4 0.9,-5.5 0.9,-0.4 2,-0.6 3,-0.3z"
|
||||
/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
);
|
||||
};
|
||||
|
||||
export const NtfyIcon = ({ className }: Props) => {
|
||||
return (
|
||||
<svg
|
||||
viewBox="0 0 24 24"
|
||||
className={cn("size-8", className)}
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<path
|
||||
fill="currentColor"
|
||||
d="M12.597 13.693v2.156h6.205v-2.156ZM5.183 6.549v2.363l3.591 1.901 0.023 0.01 -0.023 0.009 -3.591 1.901v2.35l0.386 -0.211 5.456 -2.969V9.729ZM3.659 2.037C1.915 2.037 0.42 3.41 0.42 5.154v0.002L0.438 18.73 0 21.963l5.956 -1.583h14.806c1.744 0 3.238 -1.374 3.238 -3.118V5.154c0 -1.744 -1.493 -3.116 -3.237 -3.117h-0.001zm0 2.2h17.104c0.613 0.001 1.037 0.447 1.037 0.917v12.108c0 0.47 -0.424 0.916 -1.038 0.916H5.633l-3.026 0.915 0.031 -0.179 -0.017 -13.76c0 -0.47 0.424 -0.917 1.038 -0.917z"
|
||||
/>
|
||||
</svg>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -7,7 +7,14 @@ import {
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
interface Props {
|
||||
status: "running" | "error" | "done" | "idle" | undefined | null;
|
||||
status:
|
||||
| "running"
|
||||
| "error"
|
||||
| "done"
|
||||
| "idle"
|
||||
| "cancelled"
|
||||
| undefined
|
||||
| null;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
@@ -34,6 +41,14 @@ export const StatusTooltip = ({ status, className }: Props) => {
|
||||
className={cn("size-3.5 rounded-full bg-green-500", className)}
|
||||
/>
|
||||
)}
|
||||
{status === "cancelled" && (
|
||||
<div
|
||||
className={cn(
|
||||
"size-3.5 rounded-full bg-muted-foreground",
|
||||
className,
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
{status === "running" && (
|
||||
<div
|
||||
className={cn("size-3.5 rounded-full bg-yellow-500", className)}
|
||||
@@ -46,6 +61,7 @@ export const StatusTooltip = ({ status, className }: Props) => {
|
||||
{status === "error" && "Error"}
|
||||
{status === "done" && "Done"}
|
||||
{status === "running" && "Running"}
|
||||
{status === "cancelled" && "Cancelled"}
|
||||
</span>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
|
||||
1
apps/dokploy/drizzle/0111_mushy_wolfsbane.sql
Normal file
1
apps/dokploy/drizzle/0111_mushy_wolfsbane.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE "bitbucket" ADD COLUMN "apiToken" text;
|
||||
1
apps/dokploy/drizzle/0112_freezing_skrulls.sql
Normal file
1
apps/dokploy/drizzle/0112_freezing_skrulls.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE "bitbucket" ADD COLUMN "bitbucketEmail" text;
|
||||
1
apps/dokploy/drizzle/0113_complete_rafael_vega.sql
Normal file
1
apps/dokploy/drizzle/0113_complete_rafael_vega.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TYPE "public"."deploymentStatus" ADD VALUE 'cancelled';
|
||||
6
apps/dokploy/drizzle/0114_dry_black_tom.sql
Normal file
6
apps/dokploy/drizzle/0114_dry_black_tom.sql
Normal file
@@ -0,0 +1,6 @@
|
||||
ALTER TABLE "application" ADD COLUMN "stopGracePeriodSwarm" bigint;--> statement-breakpoint
|
||||
ALTER TABLE "mariadb" ADD COLUMN "stopGracePeriodSwarm" bigint;--> statement-breakpoint
|
||||
ALTER TABLE "mongo" ADD COLUMN "stopGracePeriodSwarm" bigint;--> statement-breakpoint
|
||||
ALTER TABLE "mysql" ADD COLUMN "stopGracePeriodSwarm" bigint;--> statement-breakpoint
|
||||
ALTER TABLE "postgres" ADD COLUMN "stopGracePeriodSwarm" bigint;--> statement-breakpoint
|
||||
ALTER TABLE "redis" ADD COLUMN "stopGracePeriodSwarm" bigint;
|
||||
6565
apps/dokploy/drizzle/meta/0111_snapshot.json
Normal file
6565
apps/dokploy/drizzle/meta/0111_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
6571
apps/dokploy/drizzle/meta/0112_snapshot.json
Normal file
6571
apps/dokploy/drizzle/meta/0112_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
6572
apps/dokploy/drizzle/meta/0113_snapshot.json
Normal file
6572
apps/dokploy/drizzle/meta/0113_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
6608
apps/dokploy/drizzle/meta/0114_snapshot.json
Normal file
6608
apps/dokploy/drizzle/meta/0114_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -778,6 +778,34 @@
|
||||
"when": 1757189541734,
|
||||
"tag": "0110_red_psynapse",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 111,
|
||||
"version": "7",
|
||||
"when": 1758445844561,
|
||||
"tag": "0111_mushy_wolfsbane",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 112,
|
||||
"version": "7",
|
||||
"when": 1758483520214,
|
||||
"tag": "0112_freezing_skrulls",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 113,
|
||||
"version": "7",
|
||||
"when": 1758960816504,
|
||||
"tag": "0113_complete_rafael_vega",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 114,
|
||||
"version": "7",
|
||||
"when": 1759643172958,
|
||||
"tag": "0114_dry_black_tom",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "dokploy",
|
||||
"version": "v0.25.1",
|
||||
"version": "v0.25.4",
|
||||
"private": true,
|
||||
"license": "Apache-2.0",
|
||||
"type": "module",
|
||||
@@ -112,7 +112,7 @@
|
||||
"i18next": "^23.16.8",
|
||||
"input-otp": "^1.4.2",
|
||||
"js-cookie": "^3.0.5",
|
||||
"js-yaml": "4.1.0",
|
||||
"yaml": "2.8.1",
|
||||
"lodash": "4.17.21",
|
||||
"lucide-react": "^0.469.0",
|
||||
"micromatch": "4.0.8",
|
||||
@@ -160,7 +160,6 @@
|
||||
"@types/adm-zip": "^0.5.7",
|
||||
"@types/bcrypt": "5.0.2",
|
||||
"@types/js-cookie": "^3.0.6",
|
||||
"@types/js-yaml": "4.0.9",
|
||||
"@types/lodash": "4.17.4",
|
||||
"@types/micromatch": "4.0.9",
|
||||
"@types/node": "^18.19.104",
|
||||
|
||||
@@ -1,4 +1,9 @@
|
||||
import { IS_CLOUD, shouldDeploy } from "@dokploy/server";
|
||||
import {
|
||||
type Bitbucket,
|
||||
getBitbucketHeaders,
|
||||
IS_CLOUD,
|
||||
shouldDeploy,
|
||||
} from "@dokploy/server";
|
||||
import { eq } from "drizzle-orm";
|
||||
import type { NextApiRequest, NextApiResponse } from "next";
|
||||
import { db } from "@/server/db";
|
||||
@@ -146,10 +151,10 @@ export default async function handler(
|
||||
|
||||
const commitedPaths = await extractCommitedPaths(
|
||||
req.body,
|
||||
application.bitbucketOwner,
|
||||
application.bitbucket?.appPassword || "",
|
||||
application.bitbucket,
|
||||
application.bitbucketRepository || "",
|
||||
);
|
||||
|
||||
const shouldDeployPaths = shouldDeploy(
|
||||
application.watchPaths,
|
||||
commitedPaths,
|
||||
@@ -354,9 +359,8 @@ export const getProviderByHeader = (headers: any) => {
|
||||
|
||||
export const extractCommitedPaths = async (
|
||||
body: any,
|
||||
bitbucketUsername: string | null,
|
||||
bitbucketAppPassword: string | null,
|
||||
repository: string | null,
|
||||
bitbucket: Bitbucket | null,
|
||||
repository: string,
|
||||
) => {
|
||||
const changes = body.push?.changes || [];
|
||||
|
||||
@@ -365,18 +369,16 @@ export const extractCommitedPaths = async (
|
||||
.filter(Boolean);
|
||||
const commitedPaths: string[] = [];
|
||||
for (const commit of commitHashes) {
|
||||
const url = `https://api.bitbucket.org/2.0/repositories/${bitbucketUsername}/${repository}/diffstat/${commit}`;
|
||||
const url = `https://api.bitbucket.org/2.0/repositories/${bitbucket?.bitbucketUsername}/${repository}/diffstat/${commit}`;
|
||||
|
||||
try {
|
||||
const response = await fetch(url, {
|
||||
headers: {
|
||||
Authorization: `Basic ${Buffer.from(`${bitbucketUsername}:${bitbucketAppPassword}`).toString("base64")}`,
|
||||
},
|
||||
headers: getBitbucketHeaders(bitbucket!),
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
for (const value of data.values) {
|
||||
commitedPaths.push(value.new?.path);
|
||||
if (value?.new?.path) commitedPaths.push(value.new.path);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(
|
||||
|
||||
@@ -99,8 +99,7 @@ export default async function handler(
|
||||
|
||||
const commitedPaths = await extractCommitedPaths(
|
||||
req.body,
|
||||
composeResult.bitbucketOwner,
|
||||
composeResult.bitbucket?.appPassword || "",
|
||||
composeResult.bitbucket,
|
||||
composeResult.bitbucketRepository || "",
|
||||
);
|
||||
|
||||
|
||||
@@ -226,6 +226,7 @@ const Service = (
|
||||
<TabsTrigger value="general">General</TabsTrigger>
|
||||
<TabsTrigger value="environment">Environment</TabsTrigger>
|
||||
<TabsTrigger value="domains">Domains</TabsTrigger>
|
||||
<TabsTrigger value="deployments">Deployments</TabsTrigger>
|
||||
<TabsTrigger value="preview-deployments">
|
||||
Preview Deployments
|
||||
</TabsTrigger>
|
||||
@@ -233,7 +234,6 @@ const Service = (
|
||||
<TabsTrigger value="volume-backups">
|
||||
Volume Backups
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="deployments">Deployments</TabsTrigger>
|
||||
<TabsTrigger value="logs">Logs</TabsTrigger>
|
||||
{((data?.serverId && isCloud) || !data?.server) && (
|
||||
<TabsTrigger value="monitoring">Monitoring</TabsTrigger>
|
||||
|
||||
@@ -524,7 +524,7 @@ export const applicationRouter = createTRPCRouter({
|
||||
|
||||
return true;
|
||||
}),
|
||||
saveGitProdiver: protectedProcedure
|
||||
saveGitProvider: protectedProcedure
|
||||
.input(apiSaveGitProvider)
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
const application = await findApplicationById(input.applicationId);
|
||||
|
||||
@@ -7,7 +7,7 @@ import {
|
||||
} from "@dokploy/server";
|
||||
import { TRPCError } from "@trpc/server";
|
||||
import { z } from "zod";
|
||||
import { getPublicIpWithFallback } from "@/server/wss/terminal";
|
||||
import { getLocalServerIp } from "@/server/wss/terminal";
|
||||
import { createTRPCRouter, protectedProcedure } from "../trpc";
|
||||
export const clusterRouter = createTRPCRouter({
|
||||
getNodes: protectedProcedure
|
||||
@@ -61,7 +61,7 @@ export const clusterRouter = createTRPCRouter({
|
||||
const result = await docker.swarmInspect();
|
||||
const docker_version = await docker.version();
|
||||
|
||||
let ip = await getPublicIpWithFallback();
|
||||
let ip = await getLocalServerIp();
|
||||
if (input.serverId) {
|
||||
const server = await findServerById(input.serverId);
|
||||
ip = server?.ipAddress;
|
||||
@@ -85,7 +85,7 @@ export const clusterRouter = createTRPCRouter({
|
||||
const result = await docker.swarmInspect();
|
||||
const docker_version = await docker.version();
|
||||
|
||||
let ip = await getPublicIpWithFallback();
|
||||
let ip = await getLocalServerIp();
|
||||
if (input.serverId) {
|
||||
const server = await findServerById(input.serverId);
|
||||
ip = server?.ipAddress;
|
||||
|
||||
@@ -39,10 +39,10 @@ import {
|
||||
import { processTemplate } from "@dokploy/server/templates/processors";
|
||||
import { TRPCError } from "@trpc/server";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { dump } from "js-yaml";
|
||||
import _ from "lodash";
|
||||
import { nanoid } from "nanoid";
|
||||
import { parse } from "toml";
|
||||
import { stringify } from "yaml";
|
||||
import { z } from "zod";
|
||||
import { slugify } from "@/lib/slug";
|
||||
import { db } from "@/server/db";
|
||||
@@ -364,7 +364,7 @@ export const composeRouter = createTRPCRouter({
|
||||
}
|
||||
const domains = await findDomainsByComposeId(input.composeId);
|
||||
const composeFile = await addDomainToCompose(compose, domains);
|
||||
return dump(composeFile, {
|
||||
return stringify(composeFile, {
|
||||
lineWidth: 1000,
|
||||
});
|
||||
}),
|
||||
|
||||
@@ -3,6 +3,7 @@ import {
|
||||
getGithubBranches,
|
||||
getGithubRepositories,
|
||||
haveGithubRequirements,
|
||||
updateGithub,
|
||||
updateGitProvider,
|
||||
} from "@dokploy/server";
|
||||
import { TRPCError } from "@trpc/server";
|
||||
@@ -134,5 +135,9 @@ export const githubRouter = createTRPCRouter({
|
||||
name: input.name,
|
||||
organizationId: ctx.session.activeOrganizationId,
|
||||
});
|
||||
|
||||
await updateGithub(input.githubId, {
|
||||
...input,
|
||||
});
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -46,8 +46,8 @@ import {
|
||||
import { generateOpenApiDocument } from "@dokploy/trpc-openapi";
|
||||
import { TRPCError } from "@trpc/server";
|
||||
import { eq, sql } from "drizzle-orm";
|
||||
import { dump, load } from "js-yaml";
|
||||
import { scheduledJobs, scheduleJob } from "node-schedule";
|
||||
import { parse, stringify } from "yaml";
|
||||
import { z } from "zod";
|
||||
import { db } from "@/server/db";
|
||||
import {
|
||||
@@ -657,7 +657,7 @@ export const settingsRouter = createTRPCRouter({
|
||||
const config = readMainConfig();
|
||||
|
||||
if (!config) return false;
|
||||
const parsedConfig = load(config) as {
|
||||
const parsedConfig = parse(config) as {
|
||||
accessLog?: {
|
||||
filePath: string;
|
||||
};
|
||||
@@ -678,7 +678,7 @@ export const settingsRouter = createTRPCRouter({
|
||||
const mainConfig = readMainConfig();
|
||||
if (!mainConfig) return false;
|
||||
|
||||
const currentConfig = load(mainConfig) as {
|
||||
const currentConfig = parse(mainConfig) as {
|
||||
accessLog?: {
|
||||
filePath: string;
|
||||
};
|
||||
@@ -701,7 +701,7 @@ export const settingsRouter = createTRPCRouter({
|
||||
currentConfig.accessLog = undefined;
|
||||
}
|
||||
|
||||
writeMainConfig(dump(currentConfig));
|
||||
writeMainConfig(stringify(currentConfig));
|
||||
|
||||
return true;
|
||||
}),
|
||||
|
||||
@@ -192,7 +192,16 @@ export const userRouter = createTRPCRouter({
|
||||
})
|
||||
.where(eq(account.userId, ctx.user.id));
|
||||
}
|
||||
|
||||
try {
|
||||
return await updateUser(ctx.user.id, input);
|
||||
} catch (error) {
|
||||
throw new TRPCError({
|
||||
code: "BAD_REQUEST",
|
||||
message:
|
||||
error instanceof Error ? error.message : "Failed to update user",
|
||||
});
|
||||
}
|
||||
}),
|
||||
getUserByToken: publicProcedure
|
||||
.input(apiFindOneToken)
|
||||
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
initializeNetwork,
|
||||
initSchedules,
|
||||
initVolumeBackupsCronJobs,
|
||||
initCancelDeployments,
|
||||
sendDokployRestartNotifications,
|
||||
setupDirectories,
|
||||
} from "@dokploy/server";
|
||||
@@ -52,6 +53,7 @@ void app.prepare().then(async () => {
|
||||
await migration();
|
||||
await initCronJobs();
|
||||
await initSchedules();
|
||||
await initCancelDeployments();
|
||||
await initVolumeBackupsCronJobs();
|
||||
await sendDokployRestartNotifications();
|
||||
}
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
import type http from "node:http";
|
||||
import { findServerById, IS_CLOUD, validateRequest } from "@dokploy/server";
|
||||
import {
|
||||
execAsync,
|
||||
findServerById,
|
||||
IS_CLOUD,
|
||||
validateRequest,
|
||||
} from "@dokploy/server";
|
||||
import { publicIpv4, publicIpv6 } from "public-ip";
|
||||
import { Client, type ConnectConfig } from "ssh2";
|
||||
import { WebSocketServer } from "ws";
|
||||
@@ -44,6 +49,21 @@ export const getPublicIpWithFallback = async () => {
|
||||
return ip;
|
||||
};
|
||||
|
||||
export const getLocalServerIp = async () => {
|
||||
try {
|
||||
const command = `ip addr show | grep -E "inet (192\.168\.|10\.|172\.1[6-9]\.|172\.2[0-9]\.|172\.3[0-1]\.)" | head -n1 | awk '{print $2}' | cut -d/ -f1`;
|
||||
const { stdout } = await execAsync(command);
|
||||
const ip = stdout.trim();
|
||||
return (
|
||||
ip ||
|
||||
"We were unable to obtain the local server IP, please use your private IP address"
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Error to obtain local server IP", error);
|
||||
return "We were unable to obtain the local server IP, please use your private IP address";
|
||||
}
|
||||
};
|
||||
|
||||
export const setupTerminalWebSocketServer = (
|
||||
server: http.Server<typeof http.IncomingMessage, typeof http.ServerResponse>,
|
||||
) => {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { exit } from "node:process";
|
||||
import { execAsync } from "@dokploy/server";
|
||||
import { setupDirectories } from "@dokploy/server/setup/config-paths";
|
||||
import { initializePostgres } from "@dokploy/server/setup/postgres-setup";
|
||||
@@ -25,6 +26,8 @@ import {
|
||||
await initializeStandaloneTraefik();
|
||||
await initializeRedis();
|
||||
await initializePostgres();
|
||||
console.log("Dokploy setup completed");
|
||||
exit(0);
|
||||
} catch (e) {
|
||||
console.error("Error in dokploy setup", e);
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ import {
|
||||
} from "./queue.js";
|
||||
import { jobQueueSchema } from "./schema.js";
|
||||
import { initializeJobs } from "./utils.js";
|
||||
import { firstWorker, secondWorker } from "./workers.js";
|
||||
import { firstWorker, secondWorker, thirdWorker } from "./workers.js";
|
||||
|
||||
const app = new Hono();
|
||||
|
||||
@@ -91,6 +91,7 @@ export const gracefulShutdown = async (signal: string) => {
|
||||
logger.warn(`Received ${signal}, closing server...`);
|
||||
await firstWorker.close();
|
||||
await secondWorker.close();
|
||||
await thirdWorker.close();
|
||||
process.exit(0);
|
||||
};
|
||||
|
||||
|
||||
@@ -7,22 +7,34 @@ import { runJobs } from "./utils.js";
|
||||
export const firstWorker = new Worker(
|
||||
"backupQueue",
|
||||
async (job: Job<QueueJob>) => {
|
||||
logger.info({ data: job.data }, "Running job");
|
||||
logger.info({ data: job.data }, "Running job first worker");
|
||||
await runJobs(job.data);
|
||||
},
|
||||
{
|
||||
concurrency: 50,
|
||||
concurrency: 100,
|
||||
connection,
|
||||
},
|
||||
);
|
||||
export const secondWorker = new Worker(
|
||||
"backupQueue",
|
||||
async (job: Job<QueueJob>) => {
|
||||
logger.info({ data: job.data }, "Running job");
|
||||
logger.info({ data: job.data }, "Running job second worker");
|
||||
await runJobs(job.data);
|
||||
},
|
||||
{
|
||||
concurrency: 50,
|
||||
concurrency: 100,
|
||||
connection,
|
||||
},
|
||||
);
|
||||
|
||||
export const thirdWorker = new Worker(
|
||||
"backupQueue",
|
||||
async (job: Job<QueueJob>) => {
|
||||
logger.info({ data: job.data }, "Running job third worker");
|
||||
await runJobs(job.data);
|
||||
},
|
||||
{
|
||||
concurrency: 100,
|
||||
connection,
|
||||
},
|
||||
);
|
||||
|
||||
@@ -57,7 +57,7 @@
|
||||
"drizzle-orm": "^0.39.3",
|
||||
"drizzle-zod": "0.5.1",
|
||||
"hi-base32": "^0.5.1",
|
||||
"js-yaml": "4.1.0",
|
||||
"yaml": "2.8.1",
|
||||
"lodash": "4.17.21",
|
||||
"micromatch": "4.0.8",
|
||||
"nanoid": "3.3.11",
|
||||
@@ -85,7 +85,6 @@
|
||||
"@types/adm-zip": "^0.5.7",
|
||||
"@types/bcrypt": "5.0.2",
|
||||
"@types/dockerode": "3.3.23",
|
||||
"@types/js-yaml": "4.0.9",
|
||||
"@types/lodash": "4.17.4",
|
||||
"@types/micromatch": "4.0.9",
|
||||
"@types/node": "^18.19.104",
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { relations } from "drizzle-orm";
|
||||
import {
|
||||
bigint,
|
||||
boolean,
|
||||
integer,
|
||||
json,
|
||||
@@ -20,7 +21,6 @@ import { gitlab } from "./gitlab";
|
||||
import { mounts } from "./mount";
|
||||
import { ports } from "./port";
|
||||
import { previewDeployments } from "./preview-deployments";
|
||||
import { projects } from "./project";
|
||||
import { redirects } from "./redirects";
|
||||
import { registry } from "./registry";
|
||||
import { security } from "./security";
|
||||
@@ -164,6 +164,7 @@ export const applications = pgTable("application", {
|
||||
modeSwarm: json("modeSwarm").$type<ServiceModeSwarm>(),
|
||||
labelsSwarm: json("labelsSwarm").$type<LabelsSwarm>(),
|
||||
networkSwarm: json("networkSwarm").$type<NetworkSwarm[]>(),
|
||||
stopGracePeriodSwarm: bigint("stopGracePeriodSwarm", { mode: "bigint" }),
|
||||
//
|
||||
replicas: integer("replicas").default(1).notNull(),
|
||||
applicationStatus: applicationStatus("applicationStatus")
|
||||
@@ -312,6 +313,7 @@ const createSchema = createInsertSchema(applications, {
|
||||
watchPaths: z.array(z.string()).optional(),
|
||||
previewLabels: z.array(z.string()).optional(),
|
||||
cleanCache: z.boolean().optional(),
|
||||
stopGracePeriodSwarm: z.bigint().nullable(),
|
||||
});
|
||||
|
||||
export const apiCreateApplication = createSchema.pick({
|
||||
|
||||
@@ -11,7 +11,9 @@ export const bitbucket = pgTable("bitbucket", {
|
||||
.primaryKey()
|
||||
.$defaultFn(() => nanoid()),
|
||||
bitbucketUsername: text("bitbucketUsername"),
|
||||
bitbucketEmail: text("bitbucketEmail"),
|
||||
appPassword: text("appPassword"),
|
||||
apiToken: text("apiToken"),
|
||||
bitbucketWorkspaceName: text("bitbucketWorkspaceName"),
|
||||
gitProviderId: text("gitProviderId")
|
||||
.notNull()
|
||||
@@ -29,7 +31,9 @@ const createSchema = createInsertSchema(bitbucket);
|
||||
|
||||
export const apiCreateBitbucket = createSchema.extend({
|
||||
bitbucketUsername: z.string().optional(),
|
||||
bitbucketEmail: z.string().email().optional(),
|
||||
appPassword: z.string().optional(),
|
||||
apiToken: z.string().optional(),
|
||||
bitbucketWorkspaceName: z.string().optional(),
|
||||
gitProviderId: z.string().optional(),
|
||||
authId: z.string().min(1),
|
||||
@@ -46,9 +50,19 @@ export const apiBitbucketTestConnection = createSchema
|
||||
.extend({
|
||||
bitbucketId: z.string().min(1),
|
||||
bitbucketUsername: z.string().optional(),
|
||||
bitbucketEmail: z.string().email().optional(),
|
||||
workspaceName: z.string().optional(),
|
||||
apiToken: z.string().optional(),
|
||||
appPassword: z.string().optional(),
|
||||
})
|
||||
.pick({ bitbucketId: true, bitbucketUsername: true, workspaceName: true });
|
||||
.pick({
|
||||
bitbucketId: true,
|
||||
bitbucketUsername: true,
|
||||
bitbucketEmail: true,
|
||||
workspaceName: true,
|
||||
apiToken: true,
|
||||
appPassword: true,
|
||||
});
|
||||
|
||||
export const apiFindBitbucketBranches = z.object({
|
||||
owner: z.string(),
|
||||
@@ -60,6 +74,9 @@ export const apiUpdateBitbucket = createSchema.extend({
|
||||
bitbucketId: z.string().min(1),
|
||||
name: z.string().min(1),
|
||||
bitbucketUsername: z.string().optional(),
|
||||
bitbucketEmail: z.string().email().optional(),
|
||||
appPassword: z.string().optional(),
|
||||
apiToken: z.string().optional(),
|
||||
bitbucketWorkspaceName: z.string().optional(),
|
||||
organizationId: z.string().optional(),
|
||||
});
|
||||
|
||||
@@ -21,6 +21,7 @@ export const deploymentStatus = pgEnum("deploymentStatus", [
|
||||
"running",
|
||||
"done",
|
||||
"error",
|
||||
"cancelled",
|
||||
]);
|
||||
|
||||
export const deployments = pgTable("deployment", {
|
||||
|
||||
@@ -58,4 +58,5 @@ export const apiUpdateGithub = createSchema.extend({
|
||||
githubId: z.string().min(1),
|
||||
name: z.string().min(1),
|
||||
gitProviderId: z.string().min(1),
|
||||
githubAppName: z.string().min(1),
|
||||
});
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { relations } from "drizzle-orm";
|
||||
import { integer, json, pgTable, text } from "drizzle-orm/pg-core";
|
||||
import { bigint, integer, json, pgTable, text } from "drizzle-orm/pg-core";
|
||||
import { createInsertSchema } from "drizzle-zod";
|
||||
import { nanoid } from "nanoid";
|
||||
import { z } from "zod";
|
||||
@@ -62,6 +62,7 @@ export const mariadb = pgTable("mariadb", {
|
||||
modeSwarm: json("modeSwarm").$type<ServiceModeSwarm>(),
|
||||
labelsSwarm: json("labelsSwarm").$type<LabelsSwarm>(),
|
||||
networkSwarm: json("networkSwarm").$type<NetworkSwarm[]>(),
|
||||
stopGracePeriodSwarm: bigint("stopGracePeriodSwarm", { mode: "bigint" }),
|
||||
replicas: integer("replicas").default(1).notNull(),
|
||||
createdAt: text("createdAt")
|
||||
.notNull()
|
||||
@@ -128,6 +129,7 @@ const createSchema = createInsertSchema(mariadb, {
|
||||
modeSwarm: ServiceModeSwarmSchema.nullable(),
|
||||
labelsSwarm: LabelsSwarmSchema.nullable(),
|
||||
networkSwarm: NetworkSwarmSchema.nullable(),
|
||||
stopGracePeriodSwarm: z.bigint().nullable(),
|
||||
});
|
||||
|
||||
export const apiCreateMariaDB = createSchema
|
||||
|
||||
@@ -1,5 +1,12 @@
|
||||
import { relations } from "drizzle-orm";
|
||||
import { boolean, integer, json, pgTable, text } from "drizzle-orm/pg-core";
|
||||
import {
|
||||
bigint,
|
||||
boolean,
|
||||
integer,
|
||||
json,
|
||||
pgTable,
|
||||
text,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { createInsertSchema } from "drizzle-zod";
|
||||
import { nanoid } from "nanoid";
|
||||
import { z } from "zod";
|
||||
@@ -58,6 +65,7 @@ export const mongo = pgTable("mongo", {
|
||||
modeSwarm: json("modeSwarm").$type<ServiceModeSwarm>(),
|
||||
labelsSwarm: json("labelsSwarm").$type<LabelsSwarm>(),
|
||||
networkSwarm: json("networkSwarm").$type<NetworkSwarm[]>(),
|
||||
stopGracePeriodSwarm: bigint("stopGracePeriodSwarm", { mode: "bigint" }),
|
||||
replicas: integer("replicas").default(1).notNull(),
|
||||
createdAt: text("createdAt")
|
||||
.notNull()
|
||||
@@ -118,6 +126,7 @@ const createSchema = createInsertSchema(mongo, {
|
||||
modeSwarm: ServiceModeSwarmSchema.nullable(),
|
||||
labelsSwarm: LabelsSwarmSchema.nullable(),
|
||||
networkSwarm: NetworkSwarmSchema.nullable(),
|
||||
stopGracePeriodSwarm: z.bigint().nullable(),
|
||||
});
|
||||
|
||||
export const apiCreateMongo = createSchema
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { relations } from "drizzle-orm";
|
||||
import { integer, json, pgTable, text } from "drizzle-orm/pg-core";
|
||||
import { bigint, integer, json, pgTable, text } from "drizzle-orm/pg-core";
|
||||
import { createInsertSchema } from "drizzle-zod";
|
||||
import { nanoid } from "nanoid";
|
||||
import { z } from "zod";
|
||||
@@ -60,6 +60,7 @@ export const mysql = pgTable("mysql", {
|
||||
modeSwarm: json("modeSwarm").$type<ServiceModeSwarm>(),
|
||||
labelsSwarm: json("labelsSwarm").$type<LabelsSwarm>(),
|
||||
networkSwarm: json("networkSwarm").$type<NetworkSwarm[]>(),
|
||||
stopGracePeriodSwarm: bigint("stopGracePeriodSwarm", { mode: "bigint" }),
|
||||
replicas: integer("replicas").default(1).notNull(),
|
||||
createdAt: text("createdAt")
|
||||
.notNull()
|
||||
@@ -125,6 +126,7 @@ const createSchema = createInsertSchema(mysql, {
|
||||
modeSwarm: ServiceModeSwarmSchema.nullable(),
|
||||
labelsSwarm: LabelsSwarmSchema.nullable(),
|
||||
networkSwarm: NetworkSwarmSchema.nullable(),
|
||||
stopGracePeriodSwarm: z.bigint().nullable(),
|
||||
});
|
||||
|
||||
export const apiCreateMySql = createSchema
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { relations } from "drizzle-orm";
|
||||
import { integer, json, pgTable, text } from "drizzle-orm/pg-core";
|
||||
import { bigint, integer, json, pgTable, text } from "drizzle-orm/pg-core";
|
||||
import { createInsertSchema } from "drizzle-zod";
|
||||
import { nanoid } from "nanoid";
|
||||
import { z } from "zod";
|
||||
@@ -60,6 +60,7 @@ export const postgres = pgTable("postgres", {
|
||||
modeSwarm: json("modeSwarm").$type<ServiceModeSwarm>(),
|
||||
labelsSwarm: json("labelsSwarm").$type<LabelsSwarm>(),
|
||||
networkSwarm: json("networkSwarm").$type<NetworkSwarm[]>(),
|
||||
stopGracePeriodSwarm: bigint("stopGracePeriodSwarm", { mode: "bigint" }),
|
||||
replicas: integer("replicas").default(1).notNull(),
|
||||
createdAt: text("createdAt")
|
||||
.notNull()
|
||||
@@ -118,6 +119,7 @@ const createSchema = createInsertSchema(postgres, {
|
||||
modeSwarm: ServiceModeSwarmSchema.nullable(),
|
||||
labelsSwarm: LabelsSwarmSchema.nullable(),
|
||||
networkSwarm: NetworkSwarmSchema.nullable(),
|
||||
stopGracePeriodSwarm: z.bigint().nullable(),
|
||||
});
|
||||
|
||||
export const apiCreatePostgres = createSchema
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { relations } from "drizzle-orm";
|
||||
import { integer, json, pgTable, text } from "drizzle-orm/pg-core";
|
||||
import { bigint, integer, json, pgTable, text } from "drizzle-orm/pg-core";
|
||||
import { createInsertSchema } from "drizzle-zod";
|
||||
import { nanoid } from "nanoid";
|
||||
import { z } from "zod";
|
||||
@@ -60,6 +60,7 @@ export const redis = pgTable("redis", {
|
||||
modeSwarm: json("modeSwarm").$type<ServiceModeSwarm>(),
|
||||
labelsSwarm: json("labelsSwarm").$type<LabelsSwarm>(),
|
||||
networkSwarm: json("networkSwarm").$type<NetworkSwarm[]>(),
|
||||
stopGracePeriodSwarm: bigint("stopGracePeriodSwarm", { mode: "bigint" }),
|
||||
replicas: integer("replicas").default(1).notNull(),
|
||||
|
||||
environmentId: text("environmentId")
|
||||
@@ -108,6 +109,7 @@ const createSchema = createInsertSchema(redis, {
|
||||
modeSwarm: ServiceModeSwarmSchema.nullable(),
|
||||
labelsSwarm: LabelsSwarmSchema.nullable(),
|
||||
networkSwarm: NetworkSwarmSchema.nullable(),
|
||||
stopGracePeriodSwarm: z.bigint().nullable(),
|
||||
});
|
||||
|
||||
export const apiCreateRedis = createSchema
|
||||
|
||||
@@ -323,6 +323,11 @@ export const apiUpdateWebServerMonitoring = z.object({
|
||||
});
|
||||
|
||||
export const apiUpdateUser = createSchema.partial().extend({
|
||||
email: z
|
||||
.string()
|
||||
.email("Please enter a valid email address")
|
||||
.min(1, "Email is required")
|
||||
.optional(),
|
||||
password: z.string().optional(),
|
||||
currentPassword: z.string().optional(),
|
||||
name: z.string().optional(),
|
||||
|
||||
@@ -68,6 +68,7 @@ export * from "./utils/backups/postgres";
|
||||
export * from "./utils/backups/utils";
|
||||
export * from "./utils/backups/web-server";
|
||||
export * from "./utils/builders/compose";
|
||||
export * from "./utils/startup/cancell-deployments";
|
||||
export * from "./utils/builders/docker-file";
|
||||
export * from "./utils/builders/drop";
|
||||
export * from "./utils/builders/heroku";
|
||||
|
||||
@@ -92,31 +92,48 @@ export const suggestVariants = async ({
|
||||
|
||||
const { object } = await generateObject({
|
||||
model,
|
||||
output: "array",
|
||||
output: "object",
|
||||
schema: z.object({
|
||||
suggestions: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
shortDescription: z.string(),
|
||||
description: z.string(),
|
||||
}),
|
||||
),
|
||||
}),
|
||||
prompt: `
|
||||
Act as advanced DevOps engineer and generate a list of open source projects what can cover users needs(up to 3 items), the suggestion
|
||||
should include id, name, shortDescription, and description. Use slug of title for id.
|
||||
Act as advanced DevOps engineer and generate a list of open source projects what can cover users needs(up to 3 items).
|
||||
|
||||
Return your response as a JSON object with the following structure:
|
||||
{
|
||||
"suggestions": [
|
||||
{
|
||||
"id": "project-slug",
|
||||
"name": "Project Name",
|
||||
"shortDescription": "Brief one-line description",
|
||||
"description": "Detailed description"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Important rules for the response:
|
||||
1. The description field should ONLY contain a plain text description of the project, its features, and use cases
|
||||
2. Do NOT include any code snippets, configuration examples, or installation instructions in the description
|
||||
3. The shortDescription should be a single-line summary focusing on the main technologies
|
||||
1. Use slug format for the id field (lowercase, hyphenated)
|
||||
2. The description field should ONLY contain a plain text description of the project, its features, and use cases
|
||||
3. Do NOT include any code snippets, configuration examples, or installation instructions in the description
|
||||
4. The shortDescription should be a single-line summary focusing on the main technologies
|
||||
5. All projects should be installable in docker and have docker compose support
|
||||
|
||||
User wants to create a new project with the following details, it should be installable in docker and can be docker compose generated for it:
|
||||
User wants to create a new project with the following details:
|
||||
|
||||
${input}
|
||||
`,
|
||||
});
|
||||
|
||||
if (object?.length) {
|
||||
if (object?.suggestions?.length) {
|
||||
const result = [];
|
||||
for (const suggestion of object) {
|
||||
for (const suggestion of object.suggestions) {
|
||||
try {
|
||||
const { object: docker } = await generateObject({
|
||||
model,
|
||||
@@ -136,16 +153,29 @@ export const suggestVariants = async ({
|
||||
serviceName: z.string(),
|
||||
}),
|
||||
),
|
||||
configFiles: z.array(
|
||||
configFiles: z
|
||||
.array(
|
||||
z.object({
|
||||
content: z.string(),
|
||||
filePath: z.string(),
|
||||
}),
|
||||
),
|
||||
)
|
||||
.optional(),
|
||||
}),
|
||||
prompt: `
|
||||
Act as advanced DevOps engineer and generate docker compose with environment variables and domain configurations needed to install the following project.
|
||||
Return the docker compose as a YAML string and environment variables configuration. Follow these rules:
|
||||
|
||||
Return your response as a JSON object with this structure:
|
||||
{
|
||||
"dockerCompose": "yaml string here",
|
||||
"envVariables": [{"name": "VAR_NAME", "value": "example_value"}],
|
||||
"domains": [{"host": "domain.com", "port": 3000, "serviceName": "service"}],
|
||||
"configFiles": [{"content": "file content", "filePath": "path/to/file"}]
|
||||
}
|
||||
|
||||
Note: configFiles is optional - only include it if configuration files are absolutely required.
|
||||
|
||||
Follow these rules:
|
||||
|
||||
Docker Compose Rules:
|
||||
1. Use placeholder like \${VARIABLE_NAME-default} for generated variables in the docker-compose.yml
|
||||
@@ -198,6 +228,7 @@ export const suggestVariants = async ({
|
||||
console.error("Error in docker compose generation:", error);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
@@ -68,10 +68,26 @@ export const updateBitbucket = async (
|
||||
input: typeof apiUpdateBitbucket._type,
|
||||
) => {
|
||||
return await db.transaction(async (tx) => {
|
||||
// First get the current bitbucket provider to get gitProviderId
|
||||
const currentProvider = await tx.query.bitbucket.findFirst({
|
||||
where: eq(bitbucket.bitbucketId, bitbucketId),
|
||||
});
|
||||
|
||||
if (!currentProvider) {
|
||||
throw new TRPCError({
|
||||
code: "NOT_FOUND",
|
||||
message: "Bitbucket provider not found",
|
||||
});
|
||||
}
|
||||
|
||||
const result = await tx
|
||||
.update(bitbucket)
|
||||
.set({
|
||||
...input,
|
||||
bitbucketUsername: input.bitbucketUsername,
|
||||
bitbucketEmail: input.bitbucketEmail,
|
||||
appPassword: input.appPassword,
|
||||
apiToken: input.apiToken,
|
||||
bitbucketWorkspaceName: input.bitbucketWorkspaceName,
|
||||
})
|
||||
.where(eq(bitbucket.bitbucketId, bitbucketId))
|
||||
.returning();
|
||||
@@ -83,7 +99,7 @@ export const updateBitbucket = async (
|
||||
name: input.name,
|
||||
organizationId: input.organizationId,
|
||||
})
|
||||
.where(eq(gitProvider.gitProviderId, input.gitProviderId))
|
||||
.where(eq(gitProvider.gitProviderId, currentProvider.gitProviderId))
|
||||
.returning();
|
||||
}
|
||||
|
||||
|
||||
@@ -603,6 +603,21 @@ const BUNNY_CDN_IPS = new Set([
|
||||
"89.187.184.176",
|
||||
]);
|
||||
|
||||
// Arvancloud IP ranges
|
||||
// https://www.arvancloud.ir/fa/ips.txt
|
||||
const ARVANCLOUD_IP_RANGES = [
|
||||
"185.143.232.0/22",
|
||||
"188.229.116.16/29",
|
||||
"94.101.182.0/27",
|
||||
"2.144.3.128/28",
|
||||
"89.45.48.64/28",
|
||||
"37.32.16.0/27",
|
||||
"37.32.17.0/27",
|
||||
"37.32.18.0/27",
|
||||
"37.32.19.0/27",
|
||||
"185.215.232.0/22",
|
||||
];
|
||||
|
||||
const CDN_PROVIDERS: CDNProvider[] = [
|
||||
{
|
||||
name: "cloudflare",
|
||||
@@ -627,6 +642,14 @@ const CDN_PROVIDERS: CDNProvider[] = [
|
||||
warningMessage:
|
||||
"Domain is behind Fastly - actual IP is masked by CDN proxy",
|
||||
},
|
||||
{
|
||||
name: "arvancloud",
|
||||
displayName: "Arvancloud",
|
||||
checkIp: (ip: string) =>
|
||||
ARVANCLOUD_IP_RANGES.some((range) => isIPInCIDR(ip, range)),
|
||||
warningMessage:
|
||||
"Domain is behind Arvancloud - actual IP is masked by CDN proxy",
|
||||
},
|
||||
];
|
||||
|
||||
export const detectCDNProvider = (ip: string): CDNProvider | null => {
|
||||
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
import { removeDirectoryIfExistsContent } from "@dokploy/server/utils/filesystem/directory";
|
||||
import { TRPCError } from "@trpc/server";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { dump } from "js-yaml";
|
||||
import { stringify } from "yaml";
|
||||
import type { z } from "zod";
|
||||
import { encodeBase64 } from "../utils/docker/utils";
|
||||
import { execAsyncRemote } from "../utils/process/execAsync";
|
||||
@@ -101,7 +101,7 @@ const createCertificateFiles = async (certificate: Certificate) => {
|
||||
],
|
||||
},
|
||||
};
|
||||
const yamlConfig = dump(traefikConfig);
|
||||
const yamlConfig = stringify(traefikConfig);
|
||||
const configFile = path.join(certDir, "certificate.yml");
|
||||
|
||||
if (certificate.serverId) {
|
||||
|
||||
@@ -227,7 +227,7 @@ export const deployCompose = async ({
|
||||
|
||||
const buildLink = `${await getDokployUrl()}/dashboard/project/${
|
||||
compose.environment.projectId
|
||||
}/services/compose/${compose.composeId}?tab=deployments`;
|
||||
}/environment/${compose.environmentId}/services/compose/${compose.composeId}?tab=deployments`;
|
||||
const deployment = await createDeploymentCompose({
|
||||
composeId: composeId,
|
||||
title: titleLog,
|
||||
@@ -335,7 +335,7 @@ export const deployRemoteCompose = async ({
|
||||
|
||||
const buildLink = `${await getDokployUrl()}/dashboard/project/${
|
||||
compose.environment.projectId
|
||||
}/services/compose/${compose.composeId}?tab=deployments`;
|
||||
}/environment/${compose.environmentId}/services/compose/${compose.composeId}?tab=deployments`;
|
||||
const deployment = await createDeploymentCompose({
|
||||
composeId: composeId,
|
||||
title: titleLog,
|
||||
|
||||
@@ -10,6 +10,22 @@ import { IS_CLOUD } from "../constants";
|
||||
|
||||
export type Registry = typeof registry.$inferSelect;
|
||||
|
||||
function shEscape(s: string | undefined): string {
|
||||
if (!s) return "''";
|
||||
return `'${s.replace(/'/g, `'\\''`)}'`;
|
||||
}
|
||||
|
||||
function safeDockerLoginCommand(
|
||||
registry: string | undefined,
|
||||
user: string | undefined,
|
||||
pass: string | undefined,
|
||||
) {
|
||||
const escapedRegistry = shEscape(registry);
|
||||
const escapedUser = shEscape(user);
|
||||
const escapedPassword = shEscape(pass);
|
||||
return `printf %s ${escapedPassword} | docker login ${escapedRegistry} -u ${escapedUser} --password-stdin`;
|
||||
}
|
||||
|
||||
export const createRegistry = async (
|
||||
input: typeof apiCreateRegistry._type,
|
||||
organizationId: string,
|
||||
@@ -37,7 +53,11 @@ export const createRegistry = async (
|
||||
message: "Select a server to add the registry",
|
||||
});
|
||||
}
|
||||
const loginCommand = `echo ${input.password} | docker login ${input.registryUrl} --username ${input.username} --password-stdin`;
|
||||
const loginCommand = safeDockerLoginCommand(
|
||||
input.registryUrl,
|
||||
input.username,
|
||||
input.password,
|
||||
);
|
||||
if (input.serverId && input.serverId !== "none") {
|
||||
await execAsyncRemote(input.serverId, loginCommand);
|
||||
} else if (newRegistry.registryType === "cloud") {
|
||||
@@ -91,7 +111,11 @@ export const updateRegistry = async (
|
||||
.returning()
|
||||
.then((res) => res[0]);
|
||||
|
||||
const loginCommand = `echo ${response?.password} | docker login ${response?.registryUrl} --username ${response?.username} --password-stdin`;
|
||||
const loginCommand = safeDockerLoginCommand(
|
||||
response?.registryUrl,
|
||||
response?.username,
|
||||
response?.password,
|
||||
);
|
||||
|
||||
if (
|
||||
IS_CLOUD &&
|
||||
|
||||
@@ -336,6 +336,19 @@ export const findMemberById = async (
|
||||
};
|
||||
|
||||
export const updateUser = async (userId: string, userData: Partial<User>) => {
|
||||
// Validate email if it's being updated
|
||||
if (userData.email !== undefined) {
|
||||
if (!userData.email || userData.email.trim() === "") {
|
||||
throw new Error("Email is required and cannot be empty");
|
||||
}
|
||||
|
||||
// Basic email format validation
|
||||
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
|
||||
if (!emailRegex.test(userData.email)) {
|
||||
throw new Error("Please enter a valid email address");
|
||||
}
|
||||
}
|
||||
|
||||
const user = await db
|
||||
.update(users_temp)
|
||||
.set({
|
||||
|
||||
@@ -1,7 +1,14 @@
|
||||
import { chmodSync, existsSync, mkdirSync, writeFileSync } from "node:fs";
|
||||
import {
|
||||
chmodSync,
|
||||
existsSync,
|
||||
mkdirSync,
|
||||
rmSync,
|
||||
statSync,
|
||||
writeFileSync,
|
||||
} from "node:fs";
|
||||
import path from "node:path";
|
||||
import type { ContainerCreateOptions, CreateServiceOptions } from "dockerode";
|
||||
import { dump } from "js-yaml";
|
||||
import { stringify } from "yaml";
|
||||
import { paths } from "../constants";
|
||||
import { getRemoteDocker } from "../utils/servers/remote-docker";
|
||||
import type { FileConfig } from "../utils/traefik/file-types";
|
||||
@@ -234,7 +241,7 @@ export const createDefaultServerTraefikConfig = () => {
|
||||
},
|
||||
};
|
||||
|
||||
const yamlStr = dump(config);
|
||||
const yamlStr = stringify(config);
|
||||
mkdirSync(DYNAMIC_TRAEFIK_PATH, { recursive: true });
|
||||
writeFileSync(
|
||||
path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`),
|
||||
@@ -308,7 +315,7 @@ export const getDefaultTraefikConfig = () => {
|
||||
}),
|
||||
};
|
||||
|
||||
const yamlStr = dump(configObject);
|
||||
const yamlStr = stringify(configObject);
|
||||
|
||||
return yamlStr;
|
||||
};
|
||||
@@ -362,7 +369,7 @@ export const getDefaultServerTraefikConfig = () => {
|
||||
},
|
||||
};
|
||||
|
||||
const yamlStr = dump(configObject);
|
||||
const yamlStr = stringify(configObject);
|
||||
|
||||
return yamlStr;
|
||||
};
|
||||
@@ -375,13 +382,26 @@ export const createDefaultTraefikConfig = () => {
|
||||
if (existsSync(acmeJsonPath)) {
|
||||
chmodSync(acmeJsonPath, "600");
|
||||
}
|
||||
|
||||
// Create the traefik directory first
|
||||
mkdirSync(MAIN_TRAEFIK_PATH, { recursive: true });
|
||||
|
||||
// Check if traefik.yml exists and handle the case where it might be a directory
|
||||
if (existsSync(mainConfig)) {
|
||||
const stats = statSync(mainConfig);
|
||||
if (stats.isDirectory()) {
|
||||
// If traefik.yml is a directory, remove it
|
||||
console.log("Found traefik.yml as directory, removing it...");
|
||||
rmSync(mainConfig, { recursive: true, force: true });
|
||||
} else if (stats.isFile()) {
|
||||
console.log("Main config already exists");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const yamlStr = getDefaultTraefikConfig();
|
||||
mkdirSync(MAIN_TRAEFIK_PATH, { recursive: true });
|
||||
writeFileSync(mainConfig, yamlStr, "utf8");
|
||||
console.log("Traefik config created successfully");
|
||||
};
|
||||
|
||||
export const getDefaultMiddlewares = () => {
|
||||
@@ -397,7 +417,7 @@ export const getDefaultMiddlewares = () => {
|
||||
},
|
||||
},
|
||||
};
|
||||
const yamlStr = dump(defaultMiddlewares);
|
||||
const yamlStr = stringify(defaultMiddlewares);
|
||||
return yamlStr;
|
||||
};
|
||||
export const createDefaultMiddlewares = () => {
|
||||
|
||||
@@ -89,7 +89,7 @@ export const getMariadbBackupCommand = (
|
||||
databaseUser: string,
|
||||
databasePassword: string,
|
||||
) => {
|
||||
return `docker exec -i $CONTAINER_ID bash -c "set -o pipefail; mariadb-dump --user='${databaseUser}' --password='${databasePassword}' --databases ${database} | gzip"`;
|
||||
return `docker exec -i $CONTAINER_ID bash -c "set -o pipefail; mariadb-dump --user='${databaseUser}' --password='${databasePassword}' --single-transaction --quick --databases ${database} | gzip"`;
|
||||
};
|
||||
|
||||
export const getMysqlBackupCommand = (
|
||||
|
||||
@@ -142,6 +142,7 @@ export const mechanizeDockerContainer = async (
|
||||
RollbackConfig,
|
||||
UpdateConfig,
|
||||
Networks,
|
||||
StopGracePeriod,
|
||||
} = generateConfigContainer(application);
|
||||
|
||||
const bindsMount = generateBindMounts(mounts);
|
||||
@@ -191,6 +192,8 @@ export const mechanizeDockerContainer = async (
|
||||
})),
|
||||
},
|
||||
UpdateConfig,
|
||||
...(StopGracePeriod !== undefined &&
|
||||
StopGracePeriod !== null && { StopGracePeriod }),
|
||||
};
|
||||
|
||||
try {
|
||||
@@ -220,8 +223,8 @@ const getImageName = (application: ApplicationNested) => {
|
||||
if (registry) {
|
||||
const { registryUrl, imagePrefix, username } = registry;
|
||||
const registryTag = imagePrefix
|
||||
? `${registryUrl}/${imagePrefix}/${imageName}`
|
||||
: `${registryUrl}/${username}/${imageName}`;
|
||||
? `${registryUrl ? `${registryUrl}/` : ""}${imagePrefix}/${imageName}`
|
||||
: `${registryUrl ? `${registryUrl}/` : ""}${username}/${imageName}`;
|
||||
return registryTag;
|
||||
}
|
||||
|
||||
|
||||
@@ -22,8 +22,8 @@ export const uploadImage = async (
|
||||
// For ghcr.io: ghcr.io/username/image:tag
|
||||
// For docker.io: docker.io/username/image:tag
|
||||
const registryTag = imagePrefix
|
||||
? `${registryUrl}/${imagePrefix}/${imageName}`
|
||||
: `${registryUrl}/${username}/${imageName}`;
|
||||
? `${registryUrl ? `${registryUrl}/` : ""}${imagePrefix}/${imageName}`
|
||||
: `${registryUrl ? `${registryUrl}/` : ""}${username}/${imageName}`;
|
||||
|
||||
try {
|
||||
writeStream.write(
|
||||
|
||||
@@ -45,6 +45,7 @@ export const buildMariadb = async (mariadb: MariadbNested) => {
|
||||
RollbackConfig,
|
||||
UpdateConfig,
|
||||
Networks,
|
||||
StopGracePeriod,
|
||||
} = generateConfigContainer(mariadb);
|
||||
const resources = calculateResources({
|
||||
memoryLimit,
|
||||
@@ -102,6 +103,8 @@ export const buildMariadb = async (mariadb: MariadbNested) => {
|
||||
: [],
|
||||
},
|
||||
UpdateConfig,
|
||||
...(StopGracePeriod !== undefined &&
|
||||
StopGracePeriod !== null && { StopGracePeriod }),
|
||||
};
|
||||
try {
|
||||
const service = docker.getService(appName);
|
||||
|
||||
@@ -91,6 +91,7 @@ ${command ?? "wait $MONGOD_PID"}`;
|
||||
RollbackConfig,
|
||||
UpdateConfig,
|
||||
Networks,
|
||||
StopGracePeriod,
|
||||
} = generateConfigContainer(mongo);
|
||||
|
||||
const resources = calculateResources({
|
||||
@@ -155,6 +156,8 @@ ${command ?? "wait $MONGOD_PID"}`;
|
||||
: [],
|
||||
},
|
||||
UpdateConfig,
|
||||
...(StopGracePeriod !== undefined &&
|
||||
StopGracePeriod !== null && { StopGracePeriod }),
|
||||
};
|
||||
|
||||
try {
|
||||
|
||||
@@ -51,6 +51,7 @@ export const buildMysql = async (mysql: MysqlNested) => {
|
||||
RollbackConfig,
|
||||
UpdateConfig,
|
||||
Networks,
|
||||
StopGracePeriod,
|
||||
} = generateConfigContainer(mysql);
|
||||
const resources = calculateResources({
|
||||
memoryLimit,
|
||||
@@ -108,6 +109,8 @@ export const buildMysql = async (mysql: MysqlNested) => {
|
||||
: [],
|
||||
},
|
||||
UpdateConfig,
|
||||
...(StopGracePeriod !== undefined &&
|
||||
StopGracePeriod !== null && { StopGracePeriod }),
|
||||
};
|
||||
try {
|
||||
const service = docker.getService(appName);
|
||||
|
||||
@@ -44,6 +44,7 @@ export const buildPostgres = async (postgres: PostgresNested) => {
|
||||
RollbackConfig,
|
||||
UpdateConfig,
|
||||
Networks,
|
||||
StopGracePeriod,
|
||||
} = generateConfigContainer(postgres);
|
||||
const resources = calculateResources({
|
||||
memoryLimit,
|
||||
@@ -101,6 +102,8 @@ export const buildPostgres = async (postgres: PostgresNested) => {
|
||||
: [],
|
||||
},
|
||||
UpdateConfig,
|
||||
...(StopGracePeriod !== undefined &&
|
||||
StopGracePeriod !== null && { StopGracePeriod }),
|
||||
};
|
||||
try {
|
||||
const service = docker.getService(appName);
|
||||
|
||||
@@ -42,6 +42,7 @@ export const buildRedis = async (redis: RedisNested) => {
|
||||
RollbackConfig,
|
||||
UpdateConfig,
|
||||
Networks,
|
||||
StopGracePeriod,
|
||||
} = generateConfigContainer(redis);
|
||||
const resources = calculateResources({
|
||||
memoryLimit,
|
||||
@@ -98,6 +99,8 @@ export const buildRedis = async (redis: RedisNested) => {
|
||||
: [],
|
||||
},
|
||||
UpdateConfig,
|
||||
...(StopGracePeriod !== undefined &&
|
||||
StopGracePeriod !== null && { StopGracePeriod }),
|
||||
};
|
||||
|
||||
try {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { findComposeById } from "@dokploy/server/services/compose";
|
||||
import { dump } from "js-yaml";
|
||||
import { stringify } from "yaml";
|
||||
import { addAppNameToAllServiceNames } from "./collision/root-network";
|
||||
import { generateRandomHash } from "./compose";
|
||||
import { addSuffixToAllVolumes } from "./compose/volume";
|
||||
@@ -59,7 +59,7 @@ export const randomizeIsolatedDeploymentComposeFile = async (
|
||||
)
|
||||
: composeData;
|
||||
|
||||
return dump(newComposeFile);
|
||||
return stringify(newComposeFile);
|
||||
};
|
||||
|
||||
export const randomizeDeployableSpecificationFile = (
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user