diff --git a/.github/workflows/ci-test-custom-script.yml b/.github/workflows/ci-test-custom-script.yml index 0cd2c5065069..228d95ee07fe 100644 --- a/.github/workflows/ci-test-custom-script.yml +++ b/.github/workflows/ci-test-custom-script.yml @@ -453,7 +453,7 @@ jobs: if: failure() with: name: server-logs-${{ matrix.job }} - path: app/server/server-logs.log + path: ${{ github.workspace }}/app/server/server-logs.log overwrite: true - name: Collect docker log as file diff --git a/app/client/packages/design-system/widgets/src/components/Calendar/src/styles.module.css b/app/client/packages/design-system/widgets/src/components/Calendar/src/styles.module.css index 2a51ca6840a9..216afe61510f 100644 --- a/app/client/packages/design-system/widgets/src/components/Calendar/src/styles.module.css +++ b/app/client/packages/design-system/widgets/src/components/Calendar/src/styles.module.css @@ -61,6 +61,10 @@ } .calendar tbody [role="button"][data-focus-visible] { - outline: var(--border-width-2) solid var(--color-bd-accent); - outline-offset: var(--border-width-2); + --box-shadow-offset: 2px; + + box-shadow: + 0 0 0 var(--box-shadow-offset) var(--color-bg), + 0 0 0 calc(var(--box-shadow-offset) + var(--border-width-2)) + var(--color-bd-focus); } diff --git a/app/client/packages/git/src/actions/checkoutBranchActions.ts b/app/client/packages/git/src/actions/checkoutBranchActions.ts new file mode 100644 index 000000000000..65640d5859cd --- /dev/null +++ b/app/client/packages/git/src/actions/checkoutBranchActions.ts @@ -0,0 +1,28 @@ +import { createSingleArtifactAction } from "./helpers/createSingleArtifactAction"; +import type { GitArtifactErrorPayloadAction } from "../types"; + +export const checkoutBranchInitAction = createSingleArtifactAction((state) => { + state.apiResponses.checkoutBranch.loading = true; + state.apiResponses.checkoutBranch.error = null; + + return state; +}); + +export const checkoutBranchSuccessAction = createSingleArtifactAction( + (state) => { + state.apiResponses.checkoutBranch.loading = false; + + return state; + }, +); + +export const checkoutBranchErrorAction = createSingleArtifactAction( + (state, action: GitArtifactErrorPayloadAction) => { + const { error } = action.payload; + + state.apiResponses.checkoutBranch.loading = false; + state.apiResponses.checkoutBranch.error = error; + + return state; + }, +); diff --git a/app/client/packages/git/src/actions/commitActions.ts b/app/client/packages/git/src/actions/commitActions.ts index 700866c8303e..1fbe3f90cdea 100644 --- a/app/client/packages/git/src/actions/commitActions.ts +++ b/app/client/packages/git/src/actions/commitActions.ts @@ -1,25 +1,25 @@ import { createSingleArtifactAction } from "./helpers/createSingleArtifactAction"; -import type { GitArtifactPayloadAction } from "../types"; +import type { GitArtifactErrorPayloadAction } from "../types"; export const commitInitAction = createSingleArtifactAction((state) => { - state.commit.loading = true; - state.commit.error = null; + state.apiResponses.commit.loading = true; + state.apiResponses.commit.error = null; return state; }); export const commitSuccessAction = createSingleArtifactAction((state) => { - state.commit.loading = false; + state.apiResponses.commit.loading = false; return state; }); export const commitErrorAction = createSingleArtifactAction( - (state, action: GitArtifactPayloadAction<{ error: string }>) => { + (state, action: GitArtifactErrorPayloadAction) => { const { error } = action.payload; - state.commit.loading = false; - state.commit.error = error; + state.apiResponses.commit.loading = false; + state.apiResponses.commit.error = error; return state; }, diff --git a/app/client/packages/git/src/actions/connectActions.ts b/app/client/packages/git/src/actions/connectActions.ts index 6a306cbd034f..725b7483924f 100644 --- a/app/client/packages/git/src/actions/connectActions.ts +++ b/app/client/packages/git/src/actions/connectActions.ts @@ -1,25 +1,25 @@ import { createSingleArtifactAction } from "./helpers/createSingleArtifactAction"; -import type { GitArtifactPayloadAction } from "../types"; +import type { GitArtifactErrorPayloadAction } from "../types"; export const connectInitAction = createSingleArtifactAction((state) => { - state.connect.loading = true; - state.connect.error = null; + state.apiResponses.connect.loading = true; + state.apiResponses.connect.error = null; return state; }); export const connectSuccessAction = createSingleArtifactAction((state) => { - state.connect.loading = false; + state.apiResponses.connect.loading = false; return state; }); export const connectErrorAction = createSingleArtifactAction( - (state, action: GitArtifactPayloadAction<{ error: string }>) => { + (state, action: GitArtifactErrorPayloadAction) => { const { error } = action.payload; - state.connect.loading = false; - state.connect.error = error; + state.apiResponses.connect.loading = false; + state.apiResponses.connect.error = error; return state; }, diff --git a/app/client/packages/git/src/actions/createBranchActions.ts b/app/client/packages/git/src/actions/createBranchActions.ts new file mode 100644 index 000000000000..82963f12d949 --- /dev/null +++ b/app/client/packages/git/src/actions/createBranchActions.ts @@ -0,0 +1,26 @@ +import { createSingleArtifactAction } from "./helpers/createSingleArtifactAction"; +import type { GitArtifactErrorPayloadAction } from "../types"; + +export const createBranchInitAction = createSingleArtifactAction((state) => { + state.apiResponses.createBranch.loading = true; + state.apiResponses.createBranch.error = null; + + return state; +}); + +export const createBranchSuccessAction = createSingleArtifactAction((state) => { + state.apiResponses.createBranch.loading = false; + + return state; +}); + +export const createBranchErrorAction = createSingleArtifactAction( + (state, action: GitArtifactErrorPayloadAction) => { + const { error } = action.payload; + + state.apiResponses.createBranch.loading = false; + state.apiResponses.createBranch.error = error; + + return state; + }, +); diff --git a/app/client/packages/git/src/actions/deleteBranchActions.ts b/app/client/packages/git/src/actions/deleteBranchActions.ts new file mode 100644 index 000000000000..5d3ae8293ae4 --- /dev/null +++ b/app/client/packages/git/src/actions/deleteBranchActions.ts @@ -0,0 +1,26 @@ +import { createSingleArtifactAction } from "./helpers/createSingleArtifactAction"; +import type { GitArtifactErrorPayloadAction } from "../types"; + +export const deleteBranchInitAction = createSingleArtifactAction((state) => { + state.apiResponses.deleteBranch.loading = true; + state.apiResponses.deleteBranch.error = null; + + return state; +}); + +export const deleteBranchSuccessAction = createSingleArtifactAction((state) => { + state.apiResponses.deleteBranch.loading = false; + + return state; +}); + +export const deleteBranchErrorAction = createSingleArtifactAction( + (state, action: GitArtifactErrorPayloadAction) => { + const { error } = action.payload; + + state.apiResponses.deleteBranch.loading = false; + state.apiResponses.deleteBranch.error = error; + + return state; + }, +); diff --git a/app/client/packages/git/src/actions/discardActions.ts b/app/client/packages/git/src/actions/discardActions.ts new file mode 100644 index 000000000000..a0863c79e638 --- /dev/null +++ b/app/client/packages/git/src/actions/discardActions.ts @@ -0,0 +1,26 @@ +import { createSingleArtifactAction } from "./helpers/createSingleArtifactAction"; +import type { GitArtifactErrorPayloadAction } from "../types"; + +export const discardInitAction = createSingleArtifactAction((state) => { + state.apiResponses.discard.loading = true; + state.apiResponses.discard.error = null; + + return state; +}); + +export const discardSuccessAction = createSingleArtifactAction((state) => { + state.apiResponses.discard.loading = false; + + return state; +}); + +export const discardErrorAction = createSingleArtifactAction( + (state, action: GitArtifactErrorPayloadAction) => { + const { error } = action.payload; + + state.apiResponses.discard.loading = false; + state.apiResponses.discard.error = error; + + return state; + }, +); diff --git a/app/client/packages/git/src/actions/disconnectActions.ts b/app/client/packages/git/src/actions/disconnectActions.ts new file mode 100644 index 000000000000..f911eefec631 --- /dev/null +++ b/app/client/packages/git/src/actions/disconnectActions.ts @@ -0,0 +1,26 @@ +import { createSingleArtifactAction } from "./helpers/createSingleArtifactAction"; +import type { GitArtifactErrorPayloadAction } from "../types"; + +export const disconnectInitAction = createSingleArtifactAction((state) => { + state.apiResponses.disconnect.loading = true; + state.apiResponses.disconnect.error = null; + + return state; +}); + +export const disconnectSuccessAction = createSingleArtifactAction((state) => { + state.apiResponses.disconnect.loading = false; + + return state; +}); + +export const disconnectErrorAction = createSingleArtifactAction( + (state, action: GitArtifactErrorPayloadAction) => { + const { error } = action.payload; + + state.apiResponses.disconnect.loading = false; + state.apiResponses.disconnect.error = error; + + return state; + }, +); diff --git a/app/client/packages/git/src/actions/fetchAutocommitProgressActions.ts b/app/client/packages/git/src/actions/fetchAutocommitProgressActions.ts new file mode 100644 index 000000000000..caa054318f5f --- /dev/null +++ b/app/client/packages/git/src/actions/fetchAutocommitProgressActions.ts @@ -0,0 +1,41 @@ +import type { + GitArtifactPayloadAction, + GitArtifactErrorPayloadAction, + GitAutocommitProgress, +} from "../types"; +import { createSingleArtifactAction } from "./helpers/createSingleArtifactAction"; + +export const fetchAutocommitProgressInitAction = createSingleArtifactAction( + (state) => { + state.apiResponses.autocommitProgress.loading = true; + state.apiResponses.autocommitProgress.error = null; + + return state; + }, +); + +export const fetchAutocommitProgressSuccessAction = createSingleArtifactAction( + ( + state, + action: GitArtifactPayloadAction<{ + autocommitProgress: GitAutocommitProgress; + }>, + ) => { + state.apiResponses.autocommitProgress.loading = false; + state.apiResponses.autocommitProgress.value = + action.payload.autocommitProgress; + + return state; + }, +); + +export const fetchAutocommitProgressErrorAction = createSingleArtifactAction( + (state, action: GitArtifactErrorPayloadAction) => { + const { error } = action.payload; + + state.apiResponses.autocommitProgress.loading = false; + state.apiResponses.autocommitProgress.error = error; + + return state; + }, +); diff --git a/app/client/packages/git/src/actions/fetchBranchesActions.ts b/app/client/packages/git/src/actions/fetchBranchesActions.ts index fc073348d55f..cd85068f6757 100644 --- a/app/client/packages/git/src/actions/fetchBranchesActions.ts +++ b/app/client/packages/git/src/actions/fetchBranchesActions.ts @@ -1,28 +1,32 @@ -import type { GitArtifactPayloadAction, GitBranches } from "../types"; +import type { + GitArtifactPayloadAction, + GitArtifactErrorPayloadAction, + GitBranches, +} from "../types"; import { createSingleArtifactAction } from "./helpers/createSingleArtifactAction"; export const fetchBranchesInitAction = createSingleArtifactAction((state) => { - state.branches.loading = true; - state.branches.error = null; + state.apiResponses.branches.loading = true; + state.apiResponses.branches.error = null; return state; }); export const fetchBranchesSuccessAction = createSingleArtifactAction( (state, action: GitArtifactPayloadAction<{ branches: GitBranches }>) => { - state.branches.loading = false; - state.branches.value = action.payload.branches; + state.apiResponses.branches.loading = false; + state.apiResponses.branches.value = action.payload.branches; return state; }, ); export const fetchBranchesErrorAction = createSingleArtifactAction( - (state, action: GitArtifactPayloadAction<{ error: string }>) => { + (state, action: GitArtifactErrorPayloadAction) => { const { error } = action.payload; - state.branches.loading = false; - state.branches.error = error; + state.apiResponses.branches.loading = false; + state.apiResponses.branches.error = error; return state; }, diff --git a/app/client/packages/git/src/actions/fetchGlobalConfigActions.ts b/app/client/packages/git/src/actions/fetchGlobalConfigActions.ts new file mode 100644 index 000000000000..e30f80644b92 --- /dev/null +++ b/app/client/packages/git/src/actions/fetchGlobalConfigActions.ts @@ -0,0 +1,38 @@ +import type { + GitArtifactPayloadAction, + GitArtifactErrorPayloadAction, + GitGlobalConfig, +} from "../types"; +import { createSingleArtifactAction } from "./helpers/createSingleArtifactAction"; + +export const fetchGlobalConfigInitAction = createSingleArtifactAction( + (state) => { + state.apiResponses.globalConfig.loading = true; + state.apiResponses.globalConfig.error = null; + + return state; + }, +); + +export const fetchGlobalConfigSuccessAction = createSingleArtifactAction( + ( + state, + action: GitArtifactPayloadAction<{ globalConfig: GitGlobalConfig }>, + ) => { + state.apiResponses.globalConfig.loading = false; + state.apiResponses.globalConfig.value = action.payload.globalConfig; + + return state; + }, +); + +export const fetchGlobalConfigErrorAction = createSingleArtifactAction( + (state, action: GitArtifactErrorPayloadAction) => { + const { error } = action.payload; + + state.apiResponses.globalConfig.loading = false; + state.apiResponses.globalConfig.error = error; + + return state; + }, +); diff --git a/app/client/packages/git/src/actions/fetchLocalConfigActions.ts b/app/client/packages/git/src/actions/fetchLocalConfigActions.ts new file mode 100644 index 000000000000..e04dd2135874 --- /dev/null +++ b/app/client/packages/git/src/actions/fetchLocalConfigActions.ts @@ -0,0 +1,38 @@ +import type { + GitArtifactPayloadAction, + GitArtifactErrorPayloadAction, + GitLocalConfig, +} from "../types"; +import { createSingleArtifactAction } from "./helpers/createSingleArtifactAction"; + +export const fetchLocalConfigInitAction = createSingleArtifactAction( + (state) => { + state.apiResponses.localConfig.loading = true; + state.apiResponses.localConfig.error = null; + + return state; + }, +); + +export const fetchLocalConfigSuccessAction = createSingleArtifactAction( + ( + state, + action: GitArtifactPayloadAction<{ localConfig: GitLocalConfig }>, + ) => { + state.apiResponses.localConfig.loading = false; + state.apiResponses.localConfig.value = action.payload.localConfig; + + return state; + }, +); + +export const fetchLocalConfigErrorAction = createSingleArtifactAction( + (state, action: GitArtifactErrorPayloadAction) => { + const { error } = action.payload; + + state.apiResponses.localConfig.loading = false; + state.apiResponses.localConfig.error = error; + + return state; + }, +); diff --git a/app/client/packages/git/src/actions/fetchMergeStatusActions.ts b/app/client/packages/git/src/actions/fetchMergeStatusActions.ts new file mode 100644 index 000000000000..e81a387fe675 --- /dev/null +++ b/app/client/packages/git/src/actions/fetchMergeStatusActions.ts @@ -0,0 +1,38 @@ +import type { + GitArtifactPayloadAction, + GitArtifactErrorPayloadAction, + GitMergeStatus, +} from "../types"; +import { createSingleArtifactAction } from "./helpers/createSingleArtifactAction"; + +export const fetchMergeStatusInitAction = createSingleArtifactAction( + (state) => { + state.apiResponses.mergeStatus.loading = true; + state.apiResponses.mergeStatus.error = null; + + return state; + }, +); + +export const fetchMergeStatusSuccessAction = createSingleArtifactAction( + ( + state, + action: GitArtifactPayloadAction<{ mergeStatus: GitMergeStatus }>, + ) => { + state.apiResponses.mergeStatus.loading = false; + state.apiResponses.mergeStatus.value = action.payload.mergeStatus; + + return state; + }, +); + +export const fetchMergeStatusErrorAction = createSingleArtifactAction( + (state, action: GitArtifactErrorPayloadAction) => { + const { error } = action.payload; + + state.apiResponses.mergeStatus.loading = false; + state.apiResponses.mergeStatus.error = error; + + return state; + }, +); diff --git a/app/client/packages/git/src/actions/fetchMetadataActions.ts b/app/client/packages/git/src/actions/fetchMetadataActions.ts index d7b46dd25549..a11914c8200a 100644 --- a/app/client/packages/git/src/actions/fetchMetadataActions.ts +++ b/app/client/packages/git/src/actions/fetchMetadataActions.ts @@ -1,28 +1,32 @@ -import type { GitArtifactPayloadAction, GitMetadata } from "../types"; +import type { + GitArtifactPayloadAction, + GitArtifactErrorPayloadAction, + GitMetadata, +} from "../types"; import { createSingleArtifactAction } from "./helpers/createSingleArtifactAction"; export const fetchMetadataInitAction = createSingleArtifactAction((state) => { - state.metadata.loading = true; - state.metadata.error = null; + state.apiResponses.metadata.loading = true; + state.apiResponses.metadata.error = null; return state; }); export const fetchMetadataSuccessAction = createSingleArtifactAction( (state, action: GitArtifactPayloadAction<{ metadata: GitMetadata }>) => { - state.metadata.loading = false; - state.metadata.value = action.payload.metadata; + state.apiResponses.metadata.loading = false; + state.apiResponses.metadata.value = action.payload.metadata; return state; }, ); export const fetchMetadataErrorAction = createSingleArtifactAction( - (state, action: GitArtifactPayloadAction<{ error: string }>) => { + (state, action: GitArtifactErrorPayloadAction) => { const { error } = action.payload; - state.metadata.loading = false; - state.metadata.error = error; + state.apiResponses.metadata.loading = false; + state.apiResponses.metadata.error = error; return state; }, diff --git a/app/client/packages/git/src/actions/fetchProtectedBranchesActions.ts b/app/client/packages/git/src/actions/fetchProtectedBranchesActions.ts new file mode 100644 index 000000000000..32026a1ed285 --- /dev/null +++ b/app/client/packages/git/src/actions/fetchProtectedBranchesActions.ts @@ -0,0 +1,41 @@ +import type { + GitArtifactPayloadAction, + GitArtifactErrorPayloadAction, + GitProtectedBranches, +} from "../types"; +import { createSingleArtifactAction } from "./helpers/createSingleArtifactAction"; + +export const fetchProtectedBranchesInitAction = createSingleArtifactAction( + (state) => { + state.apiResponses.protectedBranches.loading = true; + state.apiResponses.protectedBranches.error = null; + + return state; + }, +); + +export const fetchProtectedBranchesSuccessAction = createSingleArtifactAction( + ( + state, + action: GitArtifactPayloadAction<{ + protectedBranches: GitProtectedBranches; + }>, + ) => { + state.apiResponses.protectedBranches.loading = false; + state.apiResponses.protectedBranches.value = + action.payload.protectedBranches; + + return state; + }, +); + +export const fetchProtectedBranchesErrorAction = createSingleArtifactAction( + (state, action: GitArtifactErrorPayloadAction) => { + const { error } = action.payload; + + state.apiResponses.protectedBranches.loading = false; + state.apiResponses.protectedBranches.error = error; + + return state; + }, +); diff --git a/app/client/packages/git/src/actions/fetchSSHKeyActions.ts b/app/client/packages/git/src/actions/fetchSSHKeyActions.ts new file mode 100644 index 000000000000..516adf758f44 --- /dev/null +++ b/app/client/packages/git/src/actions/fetchSSHKeyActions.ts @@ -0,0 +1,33 @@ +import type { + GitArtifactPayloadAction, + GitArtifactErrorPayloadAction, + GitSSHKey, +} from "../types"; +import { createSingleArtifactAction } from "./helpers/createSingleArtifactAction"; + +export const fetchSSHKeyInitAction = createSingleArtifactAction((state) => { + state.apiResponses.sshKey.loading = true; + state.apiResponses.sshKey.error = null; + + return state; +}); + +export const fetchSSHKeySuccessAction = createSingleArtifactAction( + (state, action: GitArtifactPayloadAction<{ sshKey: GitSSHKey }>) => { + state.apiResponses.sshKey.loading = false; + state.apiResponses.sshKey.value = action.payload.sshKey; + + return state; + }, +); + +export const fetchSSHKeyErrorAction = createSingleArtifactAction( + (state, action: GitArtifactErrorPayloadAction) => { + const { error } = action.payload; + + state.apiResponses.sshKey.loading = false; + state.apiResponses.sshKey.error = error; + + return state; + }, +); diff --git a/app/client/packages/git/src/actions/fetchStatusActions.ts b/app/client/packages/git/src/actions/fetchStatusActions.ts index dcecbd91c008..16b6892c04aa 100644 --- a/app/client/packages/git/src/actions/fetchStatusActions.ts +++ b/app/client/packages/git/src/actions/fetchStatusActions.ts @@ -1,28 +1,32 @@ -import type { GitArtifactPayloadAction, GitStatus } from "../types"; +import type { + GitArtifactPayloadAction, + GitArtifactErrorPayloadAction, + GitStatus, +} from "../types"; import { createSingleArtifactAction } from "./helpers/createSingleArtifactAction"; export const fetchStatusInitAction = createSingleArtifactAction((state) => { - state.status.loading = true; - state.status.error = null; + state.apiResponses.status.loading = true; + state.apiResponses.status.error = null; return state; }); export const fetchStatusSuccessAction = createSingleArtifactAction( (state, action: GitArtifactPayloadAction<{ status: GitStatus }>) => { - state.status.loading = false; - state.status.value = action.payload.status; + state.apiResponses.status.loading = false; + state.apiResponses.status.value = action.payload.status; return state; }, ); export const fetchStatusErrorAction = createSingleArtifactAction( - (state, action: GitArtifactPayloadAction<{ error: string }>) => { + (state, action: GitArtifactErrorPayloadAction) => { const { error } = action.payload; - state.status.loading = false; - state.status.error = error; + state.apiResponses.status.loading = false; + state.apiResponses.status.error = error; return state; }, diff --git a/app/client/packages/git/src/actions/generateSSHKey.ts b/app/client/packages/git/src/actions/generateSSHKey.ts new file mode 100644 index 000000000000..c2a82f94e8f3 --- /dev/null +++ b/app/client/packages/git/src/actions/generateSSHKey.ts @@ -0,0 +1,28 @@ +import { createSingleArtifactAction } from "./helpers/createSingleArtifactAction"; +import type { GitArtifactErrorPayloadAction } from "../types"; + +export const generateSSHKeyInitAction = createSingleArtifactAction((state) => { + state.apiResponses.generateSSHKey.loading = true; + state.apiResponses.generateSSHKey.error = null; + + return state; +}); + +export const generateSSHKeySuccessAction = createSingleArtifactAction( + (state) => { + state.apiResponses.generateSSHKey.loading = false; + + return state; + }, +); + +export const generateSSHKeyErrorAction = createSingleArtifactAction( + (state, action: GitArtifactErrorPayloadAction) => { + const { error } = action.payload; + + state.apiResponses.generateSSHKey.loading = false; + state.apiResponses.generateSSHKey.error = error; + + return state; + }, +); diff --git a/app/client/packages/git/src/actions/helpers/createSingleArtifactAction.ts b/app/client/packages/git/src/actions/helpers/createSingleArtifactAction.ts index 3ad93734c6fe..2f71dee8ce11 100644 --- a/app/client/packages/git/src/actions/helpers/createSingleArtifactAction.ts +++ b/app/client/packages/git/src/actions/helpers/createSingleArtifactAction.ts @@ -3,42 +3,13 @@ import type { GitArtifactReduxState, GitSingleArtifactReduxState, } from "../../types"; +import { gitSingleArtifactInitialState } from "./singleArtifactInitialState"; type SingleArtifactStateCb = ( singleArtifactState: GitSingleArtifactReduxState, action: GitArtifactPayloadAction, ) => GitSingleArtifactReduxState; -export const gitSingleArtifactInitialState: GitSingleArtifactReduxState = { - metadata: { - value: null, - loading: false, - error: null, - }, - connect: { - loading: false, - error: null, - }, - branches: { - value: null, - loading: false, - error: null, - }, - status: { - value: null, - loading: false, - error: null, - }, - commit: { - loading: false, - error: null, - }, - pull: { - loading: false, - error: null, - }, -}; - export const createSingleArtifactAction = ( singleArtifactStateCb: SingleArtifactStateCb, ) => { diff --git a/app/client/packages/git/src/actions/helpers/singleArtifactInitialState.ts b/app/client/packages/git/src/actions/helpers/singleArtifactInitialState.ts new file mode 100644 index 000000000000..0c8e0cb282f8 --- /dev/null +++ b/app/client/packages/git/src/actions/helpers/singleArtifactInitialState.ts @@ -0,0 +1,147 @@ +import { + GitConnectStep, + GitImportStep, + GitOpsTab, + GitSettingsTab, +} from "../../enums"; +import type { + GitSingleArtifactAPIResponsesReduxState, + GitSingleArtifactUIReduxState, + GitSingleArtifactReduxState, +} from "../../types"; + +const gitSingleArtifactInitialUIState: GitSingleArtifactUIReduxState = { + connectModal: { + open: false, + step: GitConnectStep.Provider, + }, + importModal: { + open: false, + step: GitImportStep.Provider, + }, + branchList: { + open: false, + }, + opsModal: { + open: false, + tab: GitOpsTab.Deploy, + }, + settingsModal: { + open: false, + tab: GitSettingsTab.General, + }, +}; + +const gitSingleArtifactInitialAPIResponses: GitSingleArtifactAPIResponsesReduxState = + { + metadata: { + value: null, + loading: false, + error: null, + }, + connect: { + loading: false, + error: null, + }, + status: { + value: null, + loading: false, + error: null, + }, + commit: { + loading: false, + error: null, + }, + pull: { + loading: false, + error: null, + }, + discard: { + loading: false, + error: null, + }, + mergeStatus: { + value: null, + loading: false, + error: null, + }, + merge: { + loading: false, + error: null, + }, + branches: { + value: null, + loading: false, + error: null, + }, + checkoutBranch: { + loading: false, + error: null, + }, + createBranch: { + loading: false, + error: null, + }, + deleteBranch: { + loading: false, + error: null, + }, + globalConfig: { + value: null, + loading: false, + error: null, + }, + localConfig: { + value: null, + loading: false, + error: null, + }, + updateGlobalConfig: { + loading: false, + error: null, + }, + updateLocalConfig: { + loading: false, + error: null, + }, + disconnect: { + loading: false, + error: null, + }, + protectedBranches: { + value: null, + loading: false, + error: null, + }, + updateProtectedBranches: { + loading: false, + error: null, + }, + autocommitProgress: { + value: null, + loading: false, + error: null, + }, + toggleAutocommit: { + loading: false, + error: null, + }, + triggerAutocommit: { + loading: false, + error: null, + }, + generateSSHKey: { + loading: false, + error: null, + }, + sshKey: { + value: null, + loading: false, + error: null, + }, + }; + +export const gitSingleArtifactInitialState: GitSingleArtifactReduxState = { + ui: gitSingleArtifactInitialUIState, + apiResponses: gitSingleArtifactInitialAPIResponses, +}; diff --git a/app/client/packages/git/src/actions/mergeActions.ts b/app/client/packages/git/src/actions/mergeActions.ts new file mode 100644 index 000000000000..dab2d21ed4c3 --- /dev/null +++ b/app/client/packages/git/src/actions/mergeActions.ts @@ -0,0 +1,26 @@ +import { createSingleArtifactAction } from "./helpers/createSingleArtifactAction"; +import type { GitArtifactErrorPayloadAction } from "../types"; + +export const mergeInitAction = createSingleArtifactAction((state) => { + state.apiResponses.merge.loading = true; + state.apiResponses.merge.error = null; + + return state; +}); + +export const mergeSuccessAction = createSingleArtifactAction((state) => { + state.apiResponses.merge.loading = false; + + return state; +}); + +export const mergeErrorAction = createSingleArtifactAction( + (state, action: GitArtifactErrorPayloadAction) => { + const { error } = action.payload; + + state.apiResponses.merge.loading = false; + state.apiResponses.merge.error = error; + + return state; + }, +); diff --git a/app/client/packages/git/src/actions/mountActions.ts b/app/client/packages/git/src/actions/mountActions.ts index a4ee3d372d08..556aae44a483 100644 --- a/app/client/packages/git/src/actions/mountActions.ts +++ b/app/client/packages/git/src/actions/mountActions.ts @@ -1,6 +1,6 @@ import type { PayloadAction } from "@reduxjs/toolkit"; import type { GitArtifactBasePayload, GitArtifactReduxState } from "../types"; -import { gitSingleArtifactInitialState } from "./helpers/createSingleArtifactAction"; +import { gitSingleArtifactInitialState } from "./helpers/singleArtifactInitialState"; // ! This might be removed later diff --git a/app/client/packages/git/src/actions/pullActions.ts b/app/client/packages/git/src/actions/pullActions.ts index 44311e384071..04f2dfcd31fe 100644 --- a/app/client/packages/git/src/actions/pullActions.ts +++ b/app/client/packages/git/src/actions/pullActions.ts @@ -1,25 +1,25 @@ import { createSingleArtifactAction } from "./helpers/createSingleArtifactAction"; -import type { GitArtifactPayloadAction } from "../types"; +import type { GitArtifactErrorPayloadAction } from "../types"; export const pullInitAction = createSingleArtifactAction((state) => { - state.pull.loading = true; - state.pull.error = null; + state.apiResponses.pull.loading = true; + state.apiResponses.pull.error = null; return state; }); export const pullSuccessAction = createSingleArtifactAction((state) => { - state.pull.loading = false; + state.apiResponses.pull.loading = false; return state; }); export const pullErrorAction = createSingleArtifactAction( - (state, action: GitArtifactPayloadAction<{ error: string }>) => { + (state, action: GitArtifactErrorPayloadAction) => { const { error } = action.payload; - state.pull.loading = false; - state.pull.error = error; + state.apiResponses.pull.loading = false; + state.apiResponses.pull.error = error; return state; }, diff --git a/app/client/packages/git/src/actions/toggleAutocommitActions.ts b/app/client/packages/git/src/actions/toggleAutocommitActions.ts new file mode 100644 index 000000000000..129011c50143 --- /dev/null +++ b/app/client/packages/git/src/actions/toggleAutocommitActions.ts @@ -0,0 +1,30 @@ +import { createSingleArtifactAction } from "./helpers/createSingleArtifactAction"; +import type { GitArtifactErrorPayloadAction } from "../types"; + +export const toggleAutocommitInitAction = createSingleArtifactAction( + (state) => { + state.apiResponses.toggleAutocommit.loading = true; + state.apiResponses.toggleAutocommit.error = null; + + return state; + }, +); + +export const toggleAutocommitSuccessAction = createSingleArtifactAction( + (state) => { + state.apiResponses.toggleAutocommit.loading = false; + + return state; + }, +); + +export const toggleAutocommitErrorAction = createSingleArtifactAction( + (state, action: GitArtifactErrorPayloadAction) => { + const { error } = action.payload; + + state.apiResponses.toggleAutocommit.loading = false; + state.apiResponses.toggleAutocommit.error = error; + + return state; + }, +); diff --git a/app/client/packages/git/src/actions/triggerAutocommitActions.ts b/app/client/packages/git/src/actions/triggerAutocommitActions.ts new file mode 100644 index 000000000000..1ea785bdaeb7 --- /dev/null +++ b/app/client/packages/git/src/actions/triggerAutocommitActions.ts @@ -0,0 +1,30 @@ +import { createSingleArtifactAction } from "./helpers/createSingleArtifactAction"; +import type { GitArtifactErrorPayloadAction } from "../types"; + +export const triggerAutocommitInitAction = createSingleArtifactAction( + (state) => { + state.apiResponses.triggerAutocommit.loading = true; + state.apiResponses.triggerAutocommit.error = null; + + return state; + }, +); + +export const triggerAutocommitSuccessAction = createSingleArtifactAction( + (state) => { + state.apiResponses.triggerAutocommit.loading = false; + + return state; + }, +); + +export const triggerAutocommitErrorAction = createSingleArtifactAction( + (state, action: GitArtifactErrorPayloadAction) => { + const { error } = action.payload; + + state.apiResponses.triggerAutocommit.loading = false; + state.apiResponses.triggerAutocommit.error = error; + + return state; + }, +); diff --git a/app/client/packages/git/src/actions/updateGlobalConfigActions.ts b/app/client/packages/git/src/actions/updateGlobalConfigActions.ts new file mode 100644 index 000000000000..a4990e2996b2 --- /dev/null +++ b/app/client/packages/git/src/actions/updateGlobalConfigActions.ts @@ -0,0 +1,30 @@ +import { createSingleArtifactAction } from "./helpers/createSingleArtifactAction"; +import type { GitArtifactErrorPayloadAction } from "../types"; + +export const updateGlobalConfigInitAction = createSingleArtifactAction( + (state) => { + state.apiResponses.updateGlobalConfig.loading = true; + state.apiResponses.updateGlobalConfig.error = null; + + return state; + }, +); + +export const updateGlobalConfigSuccessAction = createSingleArtifactAction( + (state) => { + state.apiResponses.updateGlobalConfig.loading = false; + + return state; + }, +); + +export const updateGlobalConfigErrorAction = createSingleArtifactAction( + (state, action: GitArtifactErrorPayloadAction) => { + const { error } = action.payload; + + state.apiResponses.updateGlobalConfig.loading = false; + state.apiResponses.updateGlobalConfig.error = error; + + return state; + }, +); diff --git a/app/client/packages/git/src/actions/updateLocalConfigActions.ts b/app/client/packages/git/src/actions/updateLocalConfigActions.ts new file mode 100644 index 000000000000..439d284d20af --- /dev/null +++ b/app/client/packages/git/src/actions/updateLocalConfigActions.ts @@ -0,0 +1,30 @@ +import { createSingleArtifactAction } from "./helpers/createSingleArtifactAction"; +import type { GitArtifactErrorPayloadAction } from "../types"; + +export const updateLocalConfigInitAction = createSingleArtifactAction( + (state) => { + state.apiResponses.updateLocalConfig.loading = true; + state.apiResponses.updateLocalConfig.error = null; + + return state; + }, +); + +export const updateLocalConfigSuccessAction = createSingleArtifactAction( + (state) => { + state.apiResponses.updateLocalConfig.loading = false; + + return state; + }, +); + +export const updateLocalConfigErrorAction = createSingleArtifactAction( + (state, action: GitArtifactErrorPayloadAction) => { + const { error } = action.payload; + + state.apiResponses.updateLocalConfig.loading = false; + state.apiResponses.updateLocalConfig.error = error; + + return state; + }, +); diff --git a/app/client/packages/git/src/actions/updateProtectedBranchesActions.ts b/app/client/packages/git/src/actions/updateProtectedBranchesActions.ts new file mode 100644 index 000000000000..d20fb52cd591 --- /dev/null +++ b/app/client/packages/git/src/actions/updateProtectedBranchesActions.ts @@ -0,0 +1,30 @@ +import { createSingleArtifactAction } from "./helpers/createSingleArtifactAction"; +import type { GitArtifactErrorPayloadAction } from "../types"; + +export const updateProtectedBranchesInitAction = createSingleArtifactAction( + (state) => { + state.apiResponses.updateProtectedBranches.loading = true; + state.apiResponses.updateProtectedBranches.error = null; + + return state; + }, +); + +export const updateProtectedBranchesSuccessAction = createSingleArtifactAction( + (state) => { + state.apiResponses.updateProtectedBranches.loading = false; + + return state; + }, +); + +export const updateProtectedBranchesErrorAction = createSingleArtifactAction( + (state, action: GitArtifactErrorPayloadAction) => { + const { error } = action.payload; + + state.apiResponses.updateProtectedBranches.loading = false; + state.apiResponses.updateProtectedBranches.error = error; + + return state; + }, +); diff --git a/app/client/packages/git/src/enums.ts b/app/client/packages/git/src/enums.ts new file mode 100644 index 000000000000..fe72149e9fe7 --- /dev/null +++ b/app/client/packages/git/src/enums.ts @@ -0,0 +1,27 @@ +export enum GitArtifactType { + Application = "Application", + Package = "Package", + Workflow = "Workflow", +} + +export enum GitConnectStep { + Provider = "Provider", + Remote = "Remote", + SSH = "SSH", +} + +export enum GitImportStep { + Provider = "Provider", + remote = "remote", + SSH = "SSH", +} + +export enum GitOpsTab { + Deploy = "Deploy", + Merge = "Merge", +} + +export enum GitSettingsTab { + General = "General", + Branch = "Branch", +} diff --git a/app/client/packages/git/src/types.ts b/app/client/packages/git/src/types.ts index df64af331290..7786dcc1b4a0 100644 --- a/app/client/packages/git/src/types.ts +++ b/app/client/packages/git/src/types.ts @@ -1,4 +1,11 @@ import type { PayloadAction } from "@reduxjs/toolkit"; +import type { + GitArtifactType, + GitConnectStep, + GitImportStep, + GitOpsTab, + GitSettingsTab, +} from "./enums"; // These will be updated when contracts are finalized export type GitMetadata = Record; @@ -7,19 +14,79 @@ export type GitBranches = Record; export type GitStatus = Record; +export type GitMergeStatus = Record; + +export type GitGlobalConfig = Record; + +export type GitLocalConfig = Record; + +export type GitProtectedBranches = Record; + +export type GitAutocommitProgress = Record; + +export type GitSSHKey = Record; + interface AsyncState { value: T | null; loading: boolean; error: string | null; } -export interface GitSingleArtifactReduxState { +interface AsyncStateWithoutValue { + loading: boolean; + error: string | null; +} +export interface GitSingleArtifactAPIResponsesReduxState { metadata: AsyncState; - connect: Omit; - branches: AsyncState; + connect: AsyncStateWithoutValue; status: AsyncState; - commit: Omit; - pull: Omit; + commit: AsyncStateWithoutValue; + pull: AsyncStateWithoutValue; + discard: AsyncStateWithoutValue; + mergeStatus: AsyncState; + merge: AsyncStateWithoutValue; + branches: AsyncState; + checkoutBranch: AsyncStateWithoutValue; + createBranch: AsyncStateWithoutValue; + deleteBranch: AsyncStateWithoutValue; + globalConfig: AsyncState; + localConfig: AsyncState; + updateGlobalConfig: AsyncStateWithoutValue; + updateLocalConfig: AsyncStateWithoutValue; + disconnect: AsyncStateWithoutValue; + protectedBranches: AsyncState; + updateProtectedBranches: AsyncStateWithoutValue; + autocommitProgress: AsyncState; + toggleAutocommit: AsyncStateWithoutValue; + triggerAutocommit: AsyncStateWithoutValue; + sshKey: AsyncState; + generateSSHKey: AsyncStateWithoutValue; +} + +export interface GitSingleArtifactUIReduxState { + connectModal: { + open: boolean; + step: keyof typeof GitConnectStep; + }; + importModal: { + open: boolean; + step: keyof typeof GitImportStep; + }; + branchList: { + open: boolean; + }; + opsModal: { + open: boolean; + tab: keyof typeof GitOpsTab; + }; + settingsModal: { + open: boolean; + tab: keyof typeof GitSettingsTab; + }; +} +export interface GitSingleArtifactReduxState { + ui: GitSingleArtifactUIReduxState; + apiResponses: GitSingleArtifactAPIResponsesReduxState; } export interface GitArtifactReduxState { @@ -27,9 +94,13 @@ export interface GitArtifactReduxState { } export interface GitArtifactBasePayload { - artifactType: string; + artifactType: keyof typeof GitArtifactType; baseArtifactId: string; } export type GitArtifactPayloadAction> = PayloadAction; + +export type GitArtifactErrorPayloadAction = GitArtifactPayloadAction<{ + error: string; +}>; diff --git a/app/client/packages/rts/package.json b/app/client/packages/rts/package.json index 07d9ffec6985..d040bd90486b 100644 --- a/app/client/packages/rts/package.json +++ b/app/client/packages/rts/package.json @@ -30,8 +30,7 @@ "loglevel": "^1.8.1", "mongodb": "^5.8.0", "nodemailer": "6.9.9", - "readline-sync": "1.4.10", - "socket.io": "^4.6.2" + "readline-sync": "1.4.10" }, "devDependencies": { "@types/express": "^4.17.14", diff --git a/app/client/packages/rts/src/constants/routes.ts b/app/client/packages/rts/src/constants/routes.ts index d6f275c956c2..7996e1750d6e 100644 --- a/app/client/packages/rts/src/constants/routes.ts +++ b/app/client/packages/rts/src/constants/routes.ts @@ -1,6 +1,5 @@ const BASE_API_URL = "http://localhost:8091"; -export const RTS_BASE_PATH = "/rts"; export const RTS_BASE_API_PATH = "/rts-api/v1"; export const RTS_BASE_API_URL = `${BASE_API_URL}${RTS_BASE_API_PATH}`; diff --git a/app/client/packages/rts/src/constants/socket.ts b/app/client/packages/rts/src/constants/socket.ts deleted file mode 100644 index ae0e94195d68..000000000000 --- a/app/client/packages/rts/src/constants/socket.ts +++ /dev/null @@ -1,11 +0,0 @@ -export const APP_ROOM_PREFIX = "app:"; -export const PAGE_ROOM_PREFIX = "page:"; -export const ROOT_NAMESPACE = "/"; -export const PAGE_EDIT_NAMESPACE = "/page/edit"; - -export const EDITORS_EVENT_NAME = "collab:online_editors"; -export const START_EDIT_EVENT_NAME = "collab:start_edit"; -export const LEAVE_EDIT_EVENT_NAME = "collab:leave_edit"; -export const MOUSE_POINTER_EVENT_NAME = "collab:mouse_pointer"; -export const RELEASE_VERSION_EVENT_NAME = "info:release_version"; -export const PAGE_VISIBILITY_EVENT_NAME = "info:page_visibility"; diff --git a/app/client/packages/rts/src/controllers/socket.ts b/app/client/packages/rts/src/controllers/socket.ts deleted file mode 100644 index 07c986ba4a9d..000000000000 --- a/app/client/packages/rts/src/controllers/socket.ts +++ /dev/null @@ -1,136 +0,0 @@ -import type { Server, Socket } from "socket.io"; -import { tryAuth } from "@middlewares/socket-auth"; -import { - START_EDIT_EVENT_NAME, - LEAVE_EDIT_EVENT_NAME, - MOUSE_POINTER_EVENT_NAME, - PAGE_EDIT_NAMESPACE, - PAGE_ROOM_PREFIX, - EDITORS_EVENT_NAME, -} from "@constants/socket"; -import type { Policy, MousePointerEvent } from "@utils/models"; -import { AppUser, CurrentEditorsEvent } from "@utils/models"; - -function subscribeToEditEvents(socket: Socket, appRoomPrefix: string) { - socket.on(START_EDIT_EVENT_NAME, (resourceId) => { - if (socket.data.email) { - // user is authenticated, join the room now - joinEditRoom(socket, resourceId, appRoomPrefix); - } else { - // user not authenticated yet, save the resource id and room prefix to join later after auth - socket.data.pendingRoomId = resourceId; - socket.data.pendingRoomPrefix = appRoomPrefix; - } - }); - - socket.on(LEAVE_EDIT_EVENT_NAME, (resourceId) => { - const roomName = appRoomPrefix + resourceId; - - socket.leave(roomName); // remove this socket from room - }); -} - -async function onAppSocketConnected(socket: Socket) { - const isAuthenticated = await tryAuthAndJoinPendingRoom(socket); - - if (isAuthenticated) { - socket.join("email:" + socket.data.email); - } -} - -async function onPageSocketConnected(socket: Socket, socketIo: Server) { - const isAuthenticated = await tryAuthAndJoinPendingRoom(socket); - - if (isAuthenticated) { - socket.on(MOUSE_POINTER_EVENT_NAME, (event: MousePointerEvent) => { - event.user = new AppUser(socket.data.name, socket.data.email); - event.socketId = socket.id; - socketIo - .of(PAGE_EDIT_NAMESPACE) - .to(PAGE_ROOM_PREFIX + event.pageId) - .emit(MOUSE_POINTER_EVENT_NAME, event); - }); - } -} - -async function tryAuthAndJoinPendingRoom(socket: Socket) { - const isAuthenticated = await tryAuth(socket); - - if (socket.data.pendingRoomId) { - // an appId or pageId is pending for this socket, join now - joinEditRoom( - socket, - socket.data.pendingRoomId, - socket.data.pendingRoomPrefix, - ); - } - - return isAuthenticated; -} - -function joinEditRoom(socket: Socket, roomId: string, roomPrefix: string) { - // remove this socket from any other rooms with roomPrefix - if (socket.rooms) { - socket.rooms.forEach((roomName) => { - if (roomName.startsWith(roomPrefix)) { - socket.leave(roomName); - } - }); - } - - // add this socket to room with application id - const roomName = roomPrefix + roomId; - - socket.join(roomName); -} - -function findPolicyEmails(policies: Policy[], permission: string): string[] { - const emails: string[] = []; - - for (const policy of policies) { - if (policy.permission === permission) { - for (const email of policy.users) { - emails.push(email); - } - - break; - } - } - - return emails; -} - -function sendCurrentUsers(socketIo, roomName: string, roomPrefix: string) { - if (roomName.startsWith(roomPrefix)) { - socketIo - .in(roomName) - .fetchSockets() - .then((sockets) => { - const onlineUsernames = new Set(); - const onlineUsers = new Array(); - - if (sockets) { - sockets.forEach((s) => { - if (!onlineUsernames.has(s.data.email)) { - onlineUsers.push(new AppUser(s.data.name, s.data.email)); - } - - onlineUsernames.add(s.data.email); - }); - } - - const resourceId = roomName.replace(roomPrefix, ""); // get resourceId from room name by removing the prefix - const response = new CurrentEditorsEvent(resourceId, onlineUsers); - - socketIo.to(roomName).emit(EDITORS_EVENT_NAME, response); - }); - } -} - -export { - subscribeToEditEvents, - onAppSocketConnected, - onPageSocketConnected, - sendCurrentUsers, - findPolicyEmails, -}; diff --git a/app/client/packages/rts/src/ctl/backup/BackupState.ts b/app/client/packages/rts/src/ctl/backup/BackupState.ts index 182756ab298f..9192f654d51e 100644 --- a/app/client/packages/rts/src/ctl/backup/BackupState.ts +++ b/app/client/packages/rts/src/ctl/backup/BackupState.ts @@ -1,25 +1,19 @@ -import { getTimeStampInISO } from "./index"; - export class BackupState { - readonly args: string[]; - readonly initAt: string = getTimeStampInISO(); + readonly args: readonly string[]; + readonly initAt: string = new Date().toISOString().replace(/:/g, "-"); readonly errors: string[] = []; backupRootPath: string = ""; archivePath: string = ""; - encryptionPassword: string = ""; + isEncryptionEnabled: boolean = false; constructor(args: string[]) { - this.args = args; + this.args = Object.freeze([...args]); // We seal `this` so that no link in the chain can "add" new properties to the state. This is intentional. If any // link wants to save data in the `BackupState`, which shouldn't even be needed in most cases, it should do so by // explicitly declaring a property in this class. No surprises. Object.seal(this); } - - isEncryptionEnabled() { - return !!this.encryptionPassword; - } } diff --git a/app/client/packages/rts/src/ctl/backup/backup.test.ts b/app/client/packages/rts/src/ctl/backup/backup.test.ts index fd4d12b73091..7fa9ce48d9d3 100644 --- a/app/client/packages/rts/src/ctl/backup/backup.test.ts +++ b/app/client/packages/rts/src/ctl/backup/backup.test.ts @@ -3,6 +3,16 @@ import * as backup from "."; import * as Constants from "../constants"; import * as utils from "../utils"; import readlineSync from "readline-sync"; +import { + checkAvailableBackupSpace, + encryptBackupArchive, + executeCopyCMD, + executeMongoDumpCMD, + getAvailableBackupSpaceInBytes, + getEncryptionPasswordFromUser, + getGitRoot, + removeSensitiveEnvData, +} from "./links"; jest.mock("../utils", () => ({ ...jest.requireActual("../utils"), @@ -10,15 +20,8 @@ jest.mock("../utils", () => ({ })); describe("Backup Tests", () => { - test("Timestamp string in ISO format", () => { - console.log(backup.getTimeStampInISO()); - expect(backup.getTimeStampInISO()).toMatch( - /(\d{4})-(\d{2})-(\d{2})T(\d{2})-(\d{2})-(\d{2})\.(\d{3})Z/, - ); - }); - test("Available Space in /appsmith-stacks volume in Bytes", async () => { - const res = expect(await backup.getAvailableBackupSpaceInBytes("/")); + const res = expect(await getAvailableBackupSpaceInBytes("/")); res.toBeGreaterThan(1024 * 1024); }); @@ -32,14 +35,12 @@ describe("Backup Tests", () => { it("Should throw Error when the available size is below MIN_REQUIRED_DISK_SPACE_IN_BYTES", () => { const size = Constants.MIN_REQUIRED_DISK_SPACE_IN_BYTES - 1; - expect(() => backup.checkAvailableBackupSpace(size)).toThrow(); + expect(() => checkAvailableBackupSpace(size)).toThrow(); }); it("Should not should throw Error when the available size is >= MIN_REQUIRED_DISK_SPACE_IN_BYTES", () => { expect(() => { - backup.checkAvailableBackupSpace( - Constants.MIN_REQUIRED_DISK_SPACE_IN_BYTES, - ); + checkAvailableBackupSpace(Constants.MIN_REQUIRED_DISK_SPACE_IN_BYTES); }).not.toThrow( "Not enough space available at /appsmith-stacks. Please ensure availability of at least 5GB to backup successfully.", ); @@ -59,29 +60,29 @@ describe("Backup Tests", () => { const appsmithMongoURI = "mongodb://username:password@host/appsmith"; const cmd = "mongodump --uri=mongodb://username:password@host/appsmith --archive=/dest/mongodb-data.gz --gzip"; - const res = await backup.executeMongoDumpCMD(dest, appsmithMongoURI); + const res = await executeMongoDumpCMD(dest, appsmithMongoURI); expect(res).toBe(cmd); console.log(res); }); test("Test get gitRoot path when APPSMITH_GIT_ROOT is '' ", () => { - expect(backup.getGitRoot("")).toBe("/appsmith-stacks/git-storage"); + expect(getGitRoot("")).toBe("/appsmith-stacks/git-storage"); }); test("Test get gitRoot path when APPSMITH_GIT_ROOT is null ", () => { - expect(backup.getGitRoot()).toBe("/appsmith-stacks/git-storage"); + expect(getGitRoot()).toBe("/appsmith-stacks/git-storage"); }); test("Test get gitRoot path when APPSMITH_GIT_ROOT is defined ", () => { - expect(backup.getGitRoot("/my/git/storage")).toBe("/my/git/storage"); + expect(getGitRoot("/my/git/storage")).toBe("/my/git/storage"); }); test("Test ln command generation", async () => { const gitRoot = "/appsmith-stacks/git-storage"; const dest = "/destdir"; const cmd = "ln -s /appsmith-stacks/git-storage /destdir/git-storage"; - const res = await backup.executeCopyCMD(gitRoot, dest); + const res = await executeCopyCMD(gitRoot, dest); expect(res).toBe(cmd); console.log(res); @@ -102,7 +103,7 @@ describe("Backup Tests", () => { test("If MONGODB and Encryption env values are being removed", () => { expect( - backup.removeSensitiveEnvData(`APPSMITH_REDIS_URL=redis://127.0.0.1:6379\nAPPSMITH_DB_URL=mongodb://appsmith:pass@localhost:27017/appsmith\nAPPSMITH_MONGODB_USER=appsmith\nAPPSMITH_MONGODB_PASSWORD=pass\nAPPSMITH_INSTANCE_NAME=Appsmith\n + removeSensitiveEnvData(`APPSMITH_REDIS_URL=redis://127.0.0.1:6379\nAPPSMITH_DB_URL=mongodb://appsmith:pass@localhost:27017/appsmith\nAPPSMITH_MONGODB_USER=appsmith\nAPPSMITH_MONGODB_PASSWORD=pass\nAPPSMITH_INSTANCE_NAME=Appsmith\n `), ).toMatch( `APPSMITH_REDIS_URL=redis://127.0.0.1:6379\nAPPSMITH_INSTANCE_NAME=Appsmith\n`, @@ -111,7 +112,7 @@ describe("Backup Tests", () => { test("If MONGODB and Encryption env values are being removed", () => { expect( - backup.removeSensitiveEnvData(`APPSMITH_REDIS_URL=redis://127.0.0.1:6379\nAPPSMITH_ENCRYPTION_PASSWORD=dummy-pass\nAPPSMITH_ENCRYPTION_SALT=dummy-salt\nAPPSMITH_DB_URL=mongodb://appsmith:pass@localhost:27017/appsmith\nAPPSMITH_MONGODB_USER=appsmith\nAPPSMITH_MONGODB_PASSWORD=pass\nAPPSMITH_INSTANCE_NAME=Appsmith\n + removeSensitiveEnvData(`APPSMITH_REDIS_URL=redis://127.0.0.1:6379\nAPPSMITH_ENCRYPTION_PASSWORD=dummy-pass\nAPPSMITH_ENCRYPTION_SALT=dummy-salt\nAPPSMITH_DB_URL=mongodb://appsmith:pass@localhost:27017/appsmith\nAPPSMITH_MONGODB_USER=appsmith\nAPPSMITH_MONGODB_PASSWORD=pass\nAPPSMITH_INSTANCE_NAME=Appsmith\n `), ).toMatch( `APPSMITH_REDIS_URL=redis://127.0.0.1:6379\nAPPSMITH_INSTANCE_NAME=Appsmith\n`, @@ -199,7 +200,7 @@ describe("Backup Tests", () => { const password = "password#4321"; readlineSync.question = jest.fn().mockImplementation(() => password); - const password_res = backup.getEncryptionPasswordFromUser(); + const password_res = getEncryptionPasswordFromUser(); expect(password_res).toEqual(password); }); @@ -215,13 +216,13 @@ describe("Backup Tests", () => { return password; }); - expect(() => backup.getEncryptionPasswordFromUser()).toThrow(); + expect(() => getEncryptionPasswordFromUser()).toThrow(); }); test("Get encrypted archive path", async () => { const archivePath = "/rootDir/appsmith-backup-0000-00-0T00-00-00.00Z"; const encryptionPassword = "password#4321"; - const encArchivePath = await backup.encryptBackupArchive( + const encArchivePath = await encryptBackupArchive( archivePath, encryptionPassword, ); @@ -234,10 +235,7 @@ describe("Backup Tests", () => { test("Test backup encryption function", async () => { const archivePath = "/rootDir/appsmith-backup-0000-00-0T00-00-00.00Z"; const encryptionPassword = "password#123"; - const res = await backup.encryptBackupArchive( - archivePath, - encryptionPassword, - ); + const res = await encryptBackupArchive(archivePath, encryptionPassword); console.log(res); expect(res).toEqual("/rootDir/appsmith-backup-0000-00-0T00-00-00.00Z.enc"); diff --git a/app/client/packages/rts/src/ctl/backup/index.ts b/app/client/packages/rts/src/ctl/backup/index.ts index 8fa2c353583f..0e2d70c30868 100644 --- a/app/client/packages/rts/src/ctl/backup/index.ts +++ b/app/client/packages/rts/src/ctl/backup/index.ts @@ -1,14 +1,10 @@ import fsPromises from "fs/promises"; -import path from "path"; -import os from "os"; import * as utils from "../utils"; import * as Constants from "../constants"; import * as logger from "../logger"; import * as mailer from "../mailer"; -import readlineSync from "readline-sync"; -import { DiskSpaceLink } from "./links/DiskSpaceLink"; import type { Link } from "./links"; -import { EncryptionLink, ManifestLink } from "./links"; +import * as linkClasses from "./links"; import { BackupState } from "./BackupState"; export async function run(args: string[]) { @@ -17,9 +13,16 @@ export async function run(args: string[]) { const state: BackupState = new BackupState(args); const chain: Link[] = [ - new DiskSpaceLink(), - new ManifestLink(state), - new EncryptionLink(state), + new linkClasses.BackupFolderLink(state), + new linkClasses.DiskSpaceLink(), + new linkClasses.ManifestLink(state), + new linkClasses.MongoDumpLink(state), + new linkClasses.GitStorageLink(state), + new linkClasses.EnvFileLink(state), + + // Encryption link is best placed last so if any of the above links fail, we don't ask the user for a password and + // then do nothing with it. + new linkClasses.EncryptionLink(state), ]; try { @@ -29,19 +32,6 @@ export async function run(args: string[]) { } // BACKUP - state.backupRootPath = await fsPromises.mkdtemp( - path.join(os.tmpdir(), "appsmithctl-backup-"), - ); - - await exportDatabase(state.backupRootPath); - - await createGitStorageArchive(state.backupRootPath); - - await exportDockerEnvFile( - state.backupRootPath, - state.isEncryptionEnabled(), - ); - for (const link of chain) { await link.doBackup?.(); } @@ -58,23 +48,6 @@ export async function run(args: string[]) { console.log("Post-backup done. Final archive at", state.archivePath); - if (!state.isEncryptionEnabled()) { - console.log( - "********************************************************* IMPORTANT!!! *************************************************************", - ); - console.log( - "*** Please ensure you have saved the APPSMITH_ENCRYPTION_SALT and APPSMITH_ENCRYPTION_PASSWORD variables from the docker.env file **", - ); - console.log( - "*** These values are not included in the backup export. **", - ); - console.log( - "************************************************************************************************************************************", - ); - } - - await fsPromises.rm(state.backupRootPath, { recursive: true, force: true }); - await logger.backup_info( "Finished taking a backup at " + state.archivePath, ); @@ -116,118 +89,6 @@ export async function run(args: string[]) { } } -export async function encryptBackupArchive( - archivePath: string, - encryptionPassword: string, -) { - const encryptedArchivePath = archivePath + ".enc"; - - await utils.execCommand([ - "openssl", - "enc", - "-aes-256-cbc", - "-pbkdf2", - "-iter", - "100000", - "-in", - archivePath, - "-out", - encryptedArchivePath, - "-k", - encryptionPassword, - ]); - - return encryptedArchivePath; -} - -export function getEncryptionPasswordFromUser(): string { - for (const attempt of [1, 2, 3]) { - if (attempt > 1) { - console.log("Retry attempt", attempt); - } - - const encryptionPwd1: string = readlineSync.question( - "Enter a password to encrypt the backup archive: ", - { hideEchoBack: true }, - ); - const encryptionPwd2: string = readlineSync.question( - "Enter the above password again: ", - { hideEchoBack: true }, - ); - - if (encryptionPwd1 === encryptionPwd2) { - if (encryptionPwd1) { - return encryptionPwd1; - } - - console.error( - "Invalid input. Empty password is not allowed, please try again.", - ); - } else { - console.error("The passwords do not match, please try again."); - } - } - - console.error( - "Aborting backup process, failed to obtain valid encryption password.", - ); - - throw new Error( - "Backup process aborted because a valid encryption password could not be obtained from the user", - ); -} - -async function exportDatabase(destFolder: string) { - console.log("Exporting database"); - await executeMongoDumpCMD(destFolder, utils.getDburl()); - console.log("Exporting database done."); -} - -async function createGitStorageArchive(destFolder: string) { - console.log("Creating git-storage archive"); - - const gitRoot = getGitRoot(process.env.APPSMITH_GIT_ROOT); - - await executeCopyCMD(gitRoot, destFolder); - - console.log("Created git-storage archive"); -} - -async function exportDockerEnvFile( - destFolder: string, - encryptArchive: boolean, -) { - console.log("Exporting docker environment file"); - const content = await fsPromises.readFile( - "/appsmith-stacks/configuration/docker.env", - { encoding: "utf8" }, - ); - let cleaned_content = removeSensitiveEnvData(content); - - if (encryptArchive) { - cleaned_content += - "\nAPPSMITH_ENCRYPTION_SALT=" + - process.env.APPSMITH_ENCRYPTION_SALT + - "\nAPPSMITH_ENCRYPTION_PASSWORD=" + - process.env.APPSMITH_ENCRYPTION_PASSWORD; - } - - await fsPromises.writeFile(destFolder + "/docker.env", cleaned_content); - console.log("Exporting docker environment file done."); -} - -export async function executeMongoDumpCMD( - destFolder: string, - appsmithMongoURI: string, -) { - return await utils.execCommand([ - "mongodump", - `--uri=${appsmithMongoURI}`, - `--archive=${destFolder}/mongodb-data.gz`, - "--gzip", - ]); // generate cmd -} - async function createFinalArchive(destFolder: string, timestamp: string) { console.log("Creating final archive"); @@ -260,23 +121,6 @@ async function postBackupCleanup() { console.log("Cleanup completed."); } -export async function executeCopyCMD(srcFolder: string, destFolder: string) { - return await utils.execCommand([ - "ln", - "-s", - srcFolder, - path.join(destFolder, "git-storage"), - ]); -} - -export function getGitRoot(gitRoot?: string | undefined) { - if (gitRoot == null || gitRoot === "") { - gitRoot = "/appsmith-stacks/git-storage"; - } - - return gitRoot; -} - export function getBackupContentsPath( backupRootPath: string, timestamp: string, @@ -284,23 +128,6 @@ export function getBackupContentsPath( return backupRootPath + "/appsmith-backup-" + timestamp; } -export function removeSensitiveEnvData(content: string): string { - // Remove encryption and Mongodb data from docker.env - const output_lines = []; - - content.split(/\r?\n/).forEach((line) => { - if ( - !line.startsWith("APPSMITH_ENCRYPTION") && - !line.startsWith("APPSMITH_MONGODB") && - !line.startsWith("APPSMITH_DB_URL=") - ) { - output_lines.push(line); - } - }); - - return output_lines.join("\n"); -} - export function getBackupArchiveLimit(backupArchivesLimit?: number): number { return backupArchivesLimit || Constants.APPSMITH_DEFAULT_BACKUP_ARCHIVE_LIMIT; } @@ -318,23 +145,3 @@ export async function removeOldBackups( .map(async (file) => fsPromises.rm(file)), ); } - -export function getTimeStampInISO() { - return new Date().toISOString().replace(/:/g, "-"); -} - -export async function getAvailableBackupSpaceInBytes( - path: string, -): Promise { - const stat = await fsPromises.statfs(path); - - return stat.bsize * stat.bfree; -} - -export function checkAvailableBackupSpace(availSpaceInBytes: number) { - if (availSpaceInBytes < Constants.MIN_REQUIRED_DISK_SPACE_IN_BYTES) { - throw new Error( - "Not enough space available at /appsmith-stacks. Please ensure availability of at least 2GB to backup successfully.", - ); - } -} diff --git a/app/client/packages/rts/src/ctl/backup/links/BackupFolderLink.ts b/app/client/packages/rts/src/ctl/backup/links/BackupFolderLink.ts new file mode 100644 index 000000000000..95539fe8a141 --- /dev/null +++ b/app/client/packages/rts/src/ctl/backup/links/BackupFolderLink.ts @@ -0,0 +1,26 @@ +import type { Link } from "."; +import type { BackupState } from "../BackupState"; +import fsPromises from "fs/promises"; +import path from "path"; +import os from "os"; + +/** + * Creates the backup folder in pre step, and deletes it in post step. The existence of the backup folder should only + * be assumed in the "doBackup" step, and no other. + */ +export class BackupFolderLink implements Link { + constructor(private readonly state: BackupState) {} + + async preBackup() { + this.state.backupRootPath = await fsPromises.mkdtemp( + path.join(os.tmpdir(), "appsmithctl-backup-"), + ); + } + + async postBackup() { + await fsPromises.rm(this.state.backupRootPath, { + recursive: true, + force: true, + }); + } +} diff --git a/app/client/packages/rts/src/ctl/backup/links/DiskSpaceLink.ts b/app/client/packages/rts/src/ctl/backup/links/DiskSpaceLink.ts index 8ab16efe7eaf..950dcde9d7b3 100644 --- a/app/client/packages/rts/src/ctl/backup/links/DiskSpaceLink.ts +++ b/app/client/packages/rts/src/ctl/backup/links/DiskSpaceLink.ts @@ -1,6 +1,10 @@ -import { checkAvailableBackupSpace, getAvailableBackupSpaceInBytes } from ".."; import type { Link } from "."; +import * as Constants from "../../constants"; +import fsPromises from "fs/promises"; +/** + * Checks if there is enough space available at the backup location. + */ export class DiskSpaceLink implements Link { async preBackup() { const availSpaceInBytes: number = @@ -9,3 +13,19 @@ export class DiskSpaceLink implements Link { checkAvailableBackupSpace(availSpaceInBytes); } } + +export async function getAvailableBackupSpaceInBytes( + path: string, +): Promise { + const stat = await fsPromises.statfs(path); + + return stat.bsize * stat.bfree; +} + +export function checkAvailableBackupSpace(availSpaceInBytes: number) { + if (availSpaceInBytes < Constants.MIN_REQUIRED_DISK_SPACE_IN_BYTES) { + throw new Error( + "Not enough space available at /appsmith-stacks. Please ensure availability of at least 2GB to backup successfully.", + ); + } +} diff --git a/app/client/packages/rts/src/ctl/backup/links/EncryptionLink.ts b/app/client/packages/rts/src/ctl/backup/links/EncryptionLink.ts index 086c7fc0c8e3..6b6f294544c5 100644 --- a/app/client/packages/rts/src/ctl/backup/links/EncryptionLink.ts +++ b/app/client/packages/rts/src/ctl/backup/links/EncryptionLink.ts @@ -1,10 +1,17 @@ -import type { Link } from "./index"; +import type { Link } from "."; import tty from "tty"; import fsPromises from "fs/promises"; -import { encryptBackupArchive, getEncryptionPasswordFromUser } from "../index"; import type { BackupState } from "../BackupState"; +import readlineSync from "readline-sync"; +import * as utils from "../../utils"; +/** + * Asks the user for a password, and then encrypts the backup archive using openssl, with that password. If a TTY is not + * available to ask for a password, then this feature is gracefully disabled, and encryption is not performed. + */ export class EncryptionLink implements Link { + #password: string = ""; + constructor(private readonly state: BackupState) {} async preBackup() { @@ -12,12 +19,14 @@ export class EncryptionLink implements Link { !this.state.args.includes("--non-interactive") && tty.isatty((process.stdout as any).fd) ) { - this.state.encryptionPassword = getEncryptionPasswordFromUser(); + this.#password = getEncryptionPasswordFromUser(); } + + this.state.isEncryptionEnabled = !!this.#password; } async postBackup() { - if (!this.state.isEncryptionEnabled()) { + if (!this.#password) { return; } @@ -25,7 +34,7 @@ export class EncryptionLink implements Link { this.state.archivePath = await encryptBackupArchive( unencryptedArchivePath, - this.state.encryptionPassword, + this.#password, ); await fsPromises.rm(unencryptedArchivePath, { @@ -34,3 +43,64 @@ export class EncryptionLink implements Link { }); } } + +export function getEncryptionPasswordFromUser(): string { + for (const attempt of [1, 2, 3]) { + if (attempt > 1) { + console.log("Retry attempt", attempt); + } + + const encryptionPwd1: string = readlineSync.question( + "Enter a password to encrypt the backup archive: ", + { hideEchoBack: true }, + ); + const encryptionPwd2: string = readlineSync.question( + "Enter the above password again: ", + { hideEchoBack: true }, + ); + + if (encryptionPwd1 === encryptionPwd2) { + if (encryptionPwd1) { + return encryptionPwd1; + } + + console.error( + "Invalid input. Empty password is not allowed, please try again.", + ); + } else { + console.error("The passwords do not match, please try again."); + } + } + + console.error( + "Aborting backup process, failed to obtain valid encryption password.", + ); + + throw new Error( + "Backup process aborted because a valid encryption password could not be obtained from the user", + ); +} + +export async function encryptBackupArchive( + archivePath: string, + encryptionPassword: string, +) { + const encryptedArchivePath = archivePath + ".enc"; + + await utils.execCommand([ + "openssl", + "enc", + "-aes-256-cbc", + "-pbkdf2", + "-iter", + "100000", + "-in", + archivePath, + "-out", + encryptedArchivePath, + "-k", + encryptionPassword, + ]); + + return encryptedArchivePath; +} diff --git a/app/client/packages/rts/src/ctl/backup/links/EnvFileLink.ts b/app/client/packages/rts/src/ctl/backup/links/EnvFileLink.ts new file mode 100644 index 000000000000..3c5201aae401 --- /dev/null +++ b/app/client/packages/rts/src/ctl/backup/links/EnvFileLink.ts @@ -0,0 +1,65 @@ +import type { Link } from "."; +import type { BackupState } from "../BackupState"; +import fsPromises from "fs/promises"; + +const SECRETS_WARNING = ` +***************************** IMPORTANT!!! ***************************** +*** Please ensure you have saved the APPSMITH_ENCRYPTION_SALT and *** +*** APPSMITH_ENCRYPTION_PASSWORD variables from the docker.env file. *** +*** These values are not included in the backup export. *** +************************************************************************ +`; + +/** + * Exports the docker environment file to the backup folder. If encryption is not enabled, sensitive information is + * not written to the backup folder. + */ +export class EnvFileLink implements Link { + constructor(private readonly state: BackupState) {} + + async doBackup() { + console.log("Exporting docker environment file"); + const content = await fsPromises.readFile( + "/appsmith-stacks/configuration/docker.env", + { encoding: "utf8" }, + ); + let cleanedContent = removeSensitiveEnvData(content); + + if (this.state.isEncryptionEnabled) { + cleanedContent += + "\nAPPSMITH_ENCRYPTION_SALT=" + + process.env.APPSMITH_ENCRYPTION_SALT + + "\nAPPSMITH_ENCRYPTION_PASSWORD=" + + process.env.APPSMITH_ENCRYPTION_PASSWORD; + } + + await fsPromises.writeFile( + this.state.backupRootPath + "/docker.env", + cleanedContent, + ); + console.log("Exporting docker environment file done."); + } + + async postBackup() { + if (!this.state.isEncryptionEnabled) { + console.log(SECRETS_WARNING); + } + } +} + +export function removeSensitiveEnvData(content: string): string { + // Remove encryption and Mongodb data from docker.env + const output_lines = []; + + content.split(/\r?\n/).forEach((line) => { + if ( + !line.startsWith("APPSMITH_ENCRYPTION") && + !line.startsWith("APPSMITH_MONGODB") && + !line.startsWith("APPSMITH_DB_URL=") + ) { + output_lines.push(line); + } + }); + + return output_lines.join("\n"); +} diff --git a/app/client/packages/rts/src/ctl/backup/links/GitStorageLink.ts b/app/client/packages/rts/src/ctl/backup/links/GitStorageLink.ts new file mode 100644 index 000000000000..21c2c0934fd4 --- /dev/null +++ b/app/client/packages/rts/src/ctl/backup/links/GitStorageLink.ts @@ -0,0 +1,37 @@ +import type { Link } from "."; +import type { BackupState } from "../BackupState"; +import * as utils from "../../utils"; +import path from "path"; + +/** + * Copies the `git-storage` folder to the backup folder. + */ +export class GitStorageLink implements Link { + constructor(private readonly state: BackupState) {} + + async doBackup() { + console.log("Creating git-storage archive"); + + const gitRoot = getGitRoot(process.env.APPSMITH_GIT_ROOT); + + await executeCopyCMD(gitRoot, this.state.backupRootPath); + console.log("Created git-storage archive"); + } +} + +export function getGitRoot(gitRoot?: string | undefined) { + if (gitRoot == null || gitRoot === "") { + gitRoot = "/appsmith-stacks/git-storage"; + } + + return gitRoot; +} + +export async function executeCopyCMD(srcFolder: string, destFolder: string) { + return await utils.execCommand([ + "ln", + "-s", + srcFolder, + path.join(destFolder, "git-storage"), + ]); +} diff --git a/app/client/packages/rts/src/ctl/backup/links/ManifestLink.ts b/app/client/packages/rts/src/ctl/backup/links/ManifestLink.ts index 16998203ac1b..c9958aa52c9c 100644 --- a/app/client/packages/rts/src/ctl/backup/links/ManifestLink.ts +++ b/app/client/packages/rts/src/ctl/backup/links/ManifestLink.ts @@ -1,9 +1,12 @@ -import type { Link } from "./index"; +import type { Link } from "."; import type { BackupState } from "../BackupState"; import * as utils from "../../utils"; import fsPromises from "fs/promises"; import path from "path"; +/** + * Creates a manifest file that contains metadata about the backup. + */ export class ManifestLink implements Link { constructor(private readonly state: BackupState) {} diff --git a/app/client/packages/rts/src/ctl/backup/links/MongoDumpLink.ts b/app/client/packages/rts/src/ctl/backup/links/MongoDumpLink.ts new file mode 100644 index 000000000000..13400928bb11 --- /dev/null +++ b/app/client/packages/rts/src/ctl/backup/links/MongoDumpLink.ts @@ -0,0 +1,25 @@ +import type { Link } from "."; +import type { BackupState } from "../BackupState"; +import * as utils from "../../utils"; + +/** + * Exports the MongoDB database data using mongodump. + */ +export class MongoDumpLink implements Link { + constructor(private readonly state: BackupState) {} + + async doBackup() { + console.log("Exporting database"); + await executeMongoDumpCMD(this.state.backupRootPath, utils.getDburl()); + console.log("Exporting database done."); + } +} + +export async function executeMongoDumpCMD(destFolder: string, dbUrl: string) { + return await utils.execCommand([ + "mongodump", + `--uri=${dbUrl}`, + `--archive=${destFolder}/mongodb-data.gz`, + "--gzip", + ]); +} diff --git a/app/client/packages/rts/src/ctl/backup/links/index.ts b/app/client/packages/rts/src/ctl/backup/links/index.ts index eea6f31bcd55..da48f43c0531 100644 --- a/app/client/packages/rts/src/ctl/backup/links/index.ts +++ b/app/client/packages/rts/src/ctl/backup/links/index.ts @@ -9,5 +9,10 @@ export interface Link { postBackup?(): Promise; } -export { EncryptionLink } from "./EncryptionLink"; -export { ManifestLink } from "./ManifestLink"; +export * from "./BackupFolderLink"; +export * from "./DiskSpaceLink"; +export * from "./EncryptionLink"; +export * from "./EnvFileLink"; +export * from "./GitStorageLink"; +export * from "./ManifestLink"; +export * from "./MongoDumpLink"; diff --git a/app/client/packages/rts/src/middlewares/socket-auth.ts b/app/client/packages/rts/src/middlewares/socket-auth.ts deleted file mode 100644 index 353a908f9c18..000000000000 --- a/app/client/packages/rts/src/middlewares/socket-auth.ts +++ /dev/null @@ -1,74 +0,0 @@ -import type { Socket } from "socket.io"; -import log from "loglevel"; -import axios from "axios"; - -import { BASE_APPSMITH_API_URL } from "@constants/routes"; - -export async function tryAuth(socket: Socket) { - /* ********************************************************* */ - // TODO: This change is not being used at the moment. Instead of using the environment variable API_BASE_URL - // we should be able to derive the API_BASE_URL from the host header. This will make configuration simpler - // for the user. The problem with this implementation is that Axios doesn't work for https endpoints currently. - // This needs to be debugged. - /* ********************************************************* */ - - // const host = socket.handshake.headers.host; - const connectionCookie = socket?.handshake?.headers?.cookie; - - if ( - connectionCookie === undefined || - connectionCookie === null || - connectionCookie === "" - ) { - return false; - } - - const matchedCookie = connectionCookie.match(/\bSESSION=\S+/); - - if (!matchedCookie) { - return false; - } - - const sessionCookie = matchedCookie[0]; - let response; - - try { - response = await axios.request({ - method: "GET", - url: BASE_APPSMITH_API_URL + "/users/me", - headers: { - Cookie: sessionCookie, - }, - }); - } catch (error) { - if (error.response?.status === 401) { - // eslint-disable-next-line no-console - console.info( - "401 received when authenticating user with cookie: " + sessionCookie, - ); - } else if (error.response) { - log.error( - "Error response received while authentication: ", - JSON.stringify(error.response.data), // this is so the message shows up in one line. - ); - } else { - log.error("Error authenticating", error.cause?.toString()); - } - - return false; - } - - const email = response?.data?.data?.email; - const name = response?.data?.data?.name ?? email; - - // If the session check API succeeds & the email/name is anonymousUser, then the user is not authenticated - // and we should not allow them to join any rooms - if (email == null || email === "anonymousUser" || name === "anonymousUser") { - return false; - } - - socket.data.email = email; - socket.data.name = name; - - return true; -} diff --git a/app/client/packages/rts/src/server.ts b/app/client/packages/rts/src/server.ts index 98c4fdac3418..665fc6ae69c2 100644 --- a/app/client/packages/rts/src/server.ts +++ b/app/client/packages/rts/src/server.ts @@ -1,18 +1,16 @@ import "./instrumentation"; import http from "http"; import express from "express"; -import { Server } from "socket.io"; import type { LogLevelDesc } from "loglevel"; import log from "loglevel"; import { VERSION as buildVersion } from "./version"; // release version of the api -import { initializeSockets } from "./sockets"; // routes import ast_routes from "./routes/ast_routes"; import dsl_routes from "./routes/dsl_routes"; import health_check_routes from "./routes/health_check_routes"; -import { RTS_BASE_PATH, RTS_BASE_API_PATH } from "@constants/routes"; +import { RTS_BASE_API_PATH } from "@constants/routes"; // Setting the logLevel for all log messages const logLevel: LogLevelDesc = (process.env.APPSMITH_LOG_LEVEL || @@ -27,11 +25,6 @@ const app = express(); app.disable("x-powered-by"); const server = new http.Server(app); -const io = new Server(server, { - path: RTS_BASE_PATH, -}); - -initializeSockets(io); // parse incoming json requests app.use(express.json({ limit: "5mb" })); diff --git a/app/client/packages/rts/src/sockets/events.ts b/app/client/packages/rts/src/sockets/events.ts deleted file mode 100644 index f1eb0a961cf0..000000000000 --- a/app/client/packages/rts/src/sockets/events.ts +++ /dev/null @@ -1,82 +0,0 @@ -import type { Server, Socket } from "socket.io"; -import log from "loglevel"; -import { - APP_ROOM_PREFIX, - RELEASE_VERSION_EVENT_NAME, - LEAVE_EDIT_EVENT_NAME, - PAGE_EDIT_NAMESPACE, - PAGE_ROOM_PREFIX, - ROOT_NAMESPACE, - PAGE_VISIBILITY_EVENT_NAME, -} from "@constants/socket"; -import { VERSION as buildVersion } from "../version"; -import { - subscribeToEditEvents, - onAppSocketConnected, - onPageSocketConnected, - sendCurrentUsers, -} from "@controllers/socket"; - -export function watchEvents(io: Server) { - io.on("connection", (socket: Socket) => { - socket.emit(RELEASE_VERSION_EVENT_NAME, buildVersion); - subscribeToEditEvents(socket, APP_ROOM_PREFIX); - onAppSocketConnected(socket).catch((error) => - log.error("Error in socket connected handler", error), - ); - }); - /** When we get the page visibility event, it means the page/tab - * is visible on the client after navigating away from it. - * We will respond back with the current version to - * so that the client can confirm if they are - * on the latest version of the client - */ - io.on(PAGE_VISIBILITY_EVENT_NAME, (socket: Socket) => { - socket.emit(RELEASE_VERSION_EVENT_NAME, buildVersion); - }); - - io.of(PAGE_EDIT_NAMESPACE).on("connection", (socket: Socket) => { - subscribeToEditEvents(socket, PAGE_ROOM_PREFIX); - onPageSocketConnected(socket, io).catch((error) => - log.error("Error in socket connected handler", error), - ); - }); - - io.of(ROOT_NAMESPACE).adapter.on("leave-room", (room, id) => { - if (room.startsWith(APP_ROOM_PREFIX)) { - log.debug(`ns:${ROOT_NAMESPACE}# socket ${id} left the room ${room}`); - } - - sendCurrentUsers(io, room, APP_ROOM_PREFIX); - }); - - io.of(ROOT_NAMESPACE).adapter.on("join-room", (room, id) => { - if (room.startsWith(APP_ROOM_PREFIX)) { - log.debug(`ns:${ROOT_NAMESPACE}# socket ${id} joined the room ${room}`); - } - - sendCurrentUsers(io, room, APP_ROOM_PREFIX); - }); - - io.of(PAGE_EDIT_NAMESPACE).adapter.on("leave-room", (room, id) => { - if (room.startsWith(PAGE_ROOM_PREFIX)) { - // someone left the page edit, notify others - log.debug( - `ns:${PAGE_EDIT_NAMESPACE} # socket ${id} left the room ${room}`, - ); - io.of(PAGE_EDIT_NAMESPACE).to(room).emit(LEAVE_EDIT_EVENT_NAME, id); - } - - sendCurrentUsers(io.of(PAGE_EDIT_NAMESPACE), room, PAGE_ROOM_PREFIX); - }); - - io.of(PAGE_EDIT_NAMESPACE).adapter.on("join-room", (room, id) => { - if (room.startsWith(PAGE_ROOM_PREFIX)) { - log.debug( - `ns:${PAGE_EDIT_NAMESPACE}# socket ${id} joined the room ${room}`, - ); - } - - sendCurrentUsers(io.of(PAGE_EDIT_NAMESPACE), room, PAGE_ROOM_PREFIX); - }); -} diff --git a/app/client/packages/rts/src/sockets/index.ts b/app/client/packages/rts/src/sockets/index.ts deleted file mode 100644 index 6c5c9d026025..000000000000 --- a/app/client/packages/rts/src/sockets/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { watchEvents } from "./events"; -import type { Server } from "socket.io"; - -// Initializing Multiple Sockets -export function initializeSockets(io: Server) { - watchEvents(io); -} diff --git a/app/client/packages/rts/src/utils/models.ts b/app/client/packages/rts/src/utils/models.ts deleted file mode 100644 index 1cfc04127720..000000000000 --- a/app/client/packages/rts/src/utils/models.ts +++ /dev/null @@ -1,32 +0,0 @@ -export class AppUser { - name: string; - email: string; - - constructor(name: string, email: string) { - this.name = name; - this.email = email; - } -} - -export class CurrentEditorsEvent { - resourceId: string; - users: AppUser[]; - - constructor(resourceId: string, users: AppUser[]) { - this.resourceId = resourceId; - this.users = users; - } -} - -export class MousePointerEvent { - pageId: string; - socketId: string; - user: AppUser; - data: object; -} - -export interface Policy { - permission: string; - users: string[]; - groups: string[]; -} diff --git a/app/client/src/modules/ui-builder/ui/wds/WDSBaseInputWidget/constants.ts b/app/client/src/modules/ui-builder/ui/wds/WDSBaseInputWidget/constants.ts index 945c8e0b646e..c06a033f2778 100644 --- a/app/client/src/modules/ui-builder/ui/wds/WDSBaseInputWidget/constants.ts +++ b/app/client/src/modules/ui-builder/ui/wds/WDSBaseInputWidget/constants.ts @@ -6,6 +6,7 @@ export const INPUT_TYPES = { PASSWORD: "PASSWORD", PHONE_NUMBER: "PHONE_NUMBER", MULTI_LINE_TEXT: "MULTI_LINE_TEXT", + DATE: "DATE", } as const; export const INPUT_TYPE_TO_WIDGET_TYPE_MAP = { @@ -16,4 +17,5 @@ export const INPUT_TYPE_TO_WIDGET_TYPE_MAP = { [INPUT_TYPES.MULTI_LINE_TEXT]: "WDS_MULTILINE_INPUT_WIDGET", [INPUT_TYPES.CURRENCY]: "WDS_CURRENCY_INPUT_WIDGET", [INPUT_TYPES.PHONE_NUMBER]: "WDS_PHONE_INPUT_WIDGET", + [INPUT_TYPES.DATE]: "WDS_DATEPICKER_WIDGET", }; diff --git a/app/client/src/modules/ui-builder/ui/wds/WDSCurrencyInputWidget/config/defaultsConfig.ts b/app/client/src/modules/ui-builder/ui/wds/WDSCurrencyInputWidget/config/defaultsConfig.ts index 1efd682612b6..c7309a059207 100644 --- a/app/client/src/modules/ui-builder/ui/wds/WDSCurrencyInputWidget/config/defaultsConfig.ts +++ b/app/client/src/modules/ui-builder/ui/wds/WDSCurrencyInputWidget/config/defaultsConfig.ts @@ -1,4 +1,7 @@ -import { WDSBaseInputWidget } from "modules/ui-builder/ui/wds/WDSBaseInputWidget"; +import { + INPUT_TYPES, + WDSBaseInputWidget, +} from "modules/ui-builder/ui/wds/WDSBaseInputWidget"; import { ResponsiveBehavior } from "layoutSystems/common/utils/constants"; import type { WidgetDefaultProps } from "WidgetProvider/constants"; @@ -14,4 +17,5 @@ export const defaultsConfig = { showStepArrows: false, label: "Current Price", responsiveBehavior: ResponsiveBehavior.Fill, + inputType: INPUT_TYPES.CURRENCY, } as WidgetDefaultProps; diff --git a/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/anvilConfig.ts b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/anvilConfig.ts new file mode 100644 index 000000000000..dc7fe21e103c --- /dev/null +++ b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/anvilConfig.ts @@ -0,0 +1,11 @@ +import type { AnvilConfig } from "WidgetProvider/constants"; + +export const anvilConfig: AnvilConfig = { + isLargeWidget: false, + widgetSize: { + minWidth: { + base: "100%", + "180px": "sizing-30", + }, + }, +}; diff --git a/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/autocompleteConfig.ts b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/autocompleteConfig.ts new file mode 100644 index 000000000000..456e9ec25652 --- /dev/null +++ b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/autocompleteConfig.ts @@ -0,0 +1,11 @@ +import { DefaultAutocompleteDefinitions } from "widgets/WidgetUtils"; + +export const autocompleteConfig = { + "!doc": + "Datepicker is used to capture the date and time from a user. It can be used to filter data base on the input date range as well as to capture personal information such as date of birth", + "!url": "https://docs.appsmith.com/widget-reference/datepicker", + isVisible: DefaultAutocompleteDefinitions.isVisible, + selectedDate: "string", + formattedDate: "string", + isDisabled: "bool", +}; diff --git a/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/defaultsConfig.ts b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/defaultsConfig.ts new file mode 100644 index 000000000000..d16c0029829d --- /dev/null +++ b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/defaultsConfig.ts @@ -0,0 +1,20 @@ +import { ResponsiveBehavior } from "layoutSystems/common/utils/constants"; +import type { WidgetDefaultProps } from "WidgetProvider/constants"; +import { INPUT_TYPES } from "modules/ui-builder/ui/wds/WDSBaseInputWidget"; + +export const defaultsConfig = { + animateLoading: true, + label: "Label", + dateFormat: "YYYY-MM-DD HH:mm", + defaultOptionValue: "", + isRequired: false, + isDisabled: false, + isVisible: true, + isInline: false, + widgetName: "DatePicker", + widgetType: "WDS_DATE_PICKER", + version: 1, + timePrecision: "day", + responsiveBehavior: ResponsiveBehavior.Fill, + inputType: INPUT_TYPES.DATE, +} as unknown as WidgetDefaultProps; diff --git a/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/index.ts b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/index.ts new file mode 100644 index 000000000000..995925903b3f --- /dev/null +++ b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/index.ts @@ -0,0 +1,7 @@ +export * from "./propertyPaneConfig"; +export { metaConfig } from "./metaConfig"; +export { anvilConfig } from "./anvilConfig"; +export { defaultsConfig } from "./defaultsConfig"; +export { settersConfig } from "./settersConfig"; +export { methodsConfig } from "./methodsConfig"; +export { autocompleteConfig } from "./autocompleteConfig"; diff --git a/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/metaConfig.ts b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/metaConfig.ts new file mode 100644 index 000000000000..23147fb5b53a --- /dev/null +++ b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/metaConfig.ts @@ -0,0 +1,21 @@ +import { WIDGET_TAGS } from "constants/WidgetConstants"; + +export const metaConfig = { + name: "DatePicker", + tags: [WIDGET_TAGS.INPUTS], + needsMeta: true, + searchTags: [ + "datepicker", + "appointment", + "calendar", + "date", + "day", + "hour", + "meeting", + "moment", + "schedule", + "time", + "week", + "year", + ], +}; diff --git a/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/methodsConfig.ts b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/methodsConfig.ts new file mode 100644 index 000000000000..f2434d189307 --- /dev/null +++ b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/methodsConfig.ts @@ -0,0 +1,6 @@ +import { DatePickerIcon, DatePickerThumbnail } from "appsmith-icons"; + +export const methodsConfig = { + IconCmp: DatePickerIcon, + ThumbnailCmp: DatePickerThumbnail, +}; diff --git a/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/propertyPaneConfig/contentConfig.ts b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/propertyPaneConfig/contentConfig.ts new file mode 100644 index 000000000000..77a22697323a --- /dev/null +++ b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/propertyPaneConfig/contentConfig.ts @@ -0,0 +1,191 @@ +import { ValidationTypes } from "constants/WidgetValidation"; +import { DATE_FORMAT_OPTIONS } from "../../constants"; + +import { propertyPaneContentConfig as WdsInputWidgetPropertyPaneContentConfig } from "modules/ui-builder/ui/wds/WDSInputWidget/config/propertyPaneConfig/contentConfig"; +import type { PropertyPaneConfig } from "constants/PropertyControlConstants"; + +const inputTypeSectionConfig = WdsInputWidgetPropertyPaneContentConfig.find( + (config) => config.sectionName === "Type", +); + +export const propertyPaneContentConfig = [ + inputTypeSectionConfig, + { + sectionName: "Data", + children: [ + { + helpText: "Sets the format of the selected date", + propertyName: "dateFormat", + label: "Date format", + controlType: "DROP_DOWN", + isJSConvertible: true, + optionWidth: "340px", + options: DATE_FORMAT_OPTIONS, + isBindProperty: true, + isTriggerProperty: false, + validation: { type: ValidationTypes.TEXT }, + hideSubText: true, + }, + { + propertyName: "defaultDate", + label: "Default Date", + helpText: + "Sets the default date of the widget. The date is updated if the default date changes", + controlType: "DATE_PICKER", + placeholderText: "Enter Default Date", + useValidationMessage: true, + isJSConvertible: true, + isBindProperty: true, + isTriggerProperty: false, + validation: { type: ValidationTypes.DATE_ISO_STRING }, + }, + { + propertyName: "timePrecision", + label: "Time Precision", + controlType: "DROP_DOWN", + helpText: "Sets the time precision or hides the time picker.", + defaultValue: "day", + options: [ + { + label: "Day", + value: "day", + }, + { + label: "Hour", + value: "hour", + }, + { + label: "Minute", + value: "minute", + }, + { + label: "Second", + value: "second", + }, + ], + isJSConvertible: true, + isBindProperty: true, + isTriggerProperty: false, + validation: { + type: ValidationTypes.TEXT, + params: { + allowedValues: ["day", "hour", "minute", "second"], + default: "day", + }, + }, + }, + ], + }, + { + sectionName: "Label", + children: [ + { + helpText: "Sets the label text of the date picker widget", + propertyName: "label", + label: "Text", + controlType: "INPUT_TEXT", + placeholderText: "Label", + isBindProperty: true, + isTriggerProperty: false, + validation: { type: ValidationTypes.TEXT }, + }, + ], + }, + { + sectionName: "Validations", + children: [ + { + propertyName: "isRequired", + label: "Required", + helpText: "Makes input to the widget mandatory", + controlType: "SWITCH", + isJSConvertible: true, + isBindProperty: true, + isTriggerProperty: false, + validation: { type: ValidationTypes.BOOLEAN }, + }, + { + propertyName: "minDate", + label: "Minimum Date", + helpText: "Sets the minimum date that can be selected", + controlType: "DATE_PICKER", + placeholderText: "Enter Minimum Date", + isJSConvertible: true, + isBindProperty: true, + isTriggerProperty: false, + validation: { type: ValidationTypes.DATE_ISO_STRING }, + }, + { + propertyName: "maxDate", + label: "Maximum Date", + helpText: "Sets the maximum date that can be selected", + controlType: "DATE_PICKER", + placeholderText: "Enter Maximum Date", + isJSConvertible: true, + isBindProperty: true, + isTriggerProperty: false, + validation: { type: ValidationTypes.DATE_ISO_STRING }, + }, + ], + }, + { + sectionName: "General", + children: [ + { + helpText: "Shows help text or details about the current input", + propertyName: "labelTooltip", + label: "Tooltip", + controlType: "INPUT_TEXT", + placeholderText: "", + isBindProperty: true, + isTriggerProperty: false, + validation: { type: ValidationTypes.TEXT }, + }, + { + helpText: "Controls the visibility of the widget", + propertyName: "isVisible", + label: "Visible", + controlType: "SWITCH", + isJSConvertible: true, + isBindProperty: true, + isTriggerProperty: false, + validation: { type: ValidationTypes.BOOLEAN }, + }, + { + propertyName: "isDisabled", + label: "Disabled", + helpText: "Disables input to this widget", + controlType: "SWITCH", + isJSConvertible: true, + isBindProperty: true, + isTriggerProperty: false, + validation: { type: ValidationTypes.BOOLEAN }, + }, + { + propertyName: "animateLoading", + label: "Animate loading", + controlType: "SWITCH", + helpText: "Controls the loading of the widget", + defaultValue: true, + isJSConvertible: true, + isBindProperty: true, + isTriggerProperty: false, + validation: { type: ValidationTypes.BOOLEAN }, + }, + ], + }, + { + sectionName: "Events", + children: [ + { + propertyName: "onDateSelected", + label: "onDateSelected", + helpText: "when a date is selected in the calendar", + controlType: "ACTION_SELECTOR", + isJSConvertible: true, + isBindProperty: true, + isTriggerProperty: true, + }, + ], + }, +] as PropertyPaneConfig[]; diff --git a/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/propertyPaneConfig/index.ts b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/propertyPaneConfig/index.ts new file mode 100644 index 000000000000..7f43d3bde57a --- /dev/null +++ b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/propertyPaneConfig/index.ts @@ -0,0 +1 @@ +export { propertyPaneContentConfig } from "./contentConfig"; diff --git a/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/settersConfig.ts b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/settersConfig.ts new file mode 100644 index 000000000000..888f2f9bcb70 --- /dev/null +++ b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/config/settersConfig.ts @@ -0,0 +1,12 @@ +export const settersConfig = { + __setters: { + setVisibility: { + path: "isVisible", + type: "boolean", + }, + setDisabled: { + path: "isDisabled", + type: "boolean", + }, + }, +}; diff --git a/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/constants.ts b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/constants.ts new file mode 100644 index 000000000000..3b53c462e713 --- /dev/null +++ b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/constants.ts @@ -0,0 +1,88 @@ +import moment from "moment"; +import { SubTextPosition } from "components/constants"; + +export const DATE_FORMAT_OPTIONS = [ + { + label: moment().format("YYYY-MM-DDTHH:mm:ss.sssZ"), + subText: "ISO 8601", + value: "YYYY-MM-DDTHH:mm:ss.sssZ", + }, + { + label: moment().format("LLL"), + subText: "LLL", + value: "LLL", + }, + { + label: moment().format("LL"), + subText: "LL", + value: "LL", + }, + { + label: moment().format("YYYY-MM-DD HH:mm"), + subText: "YYYY-MM-DD HH:mm", + value: "YYYY-MM-DD HH:mm", + }, + { + label: moment().format("YYYY-MM-DDTHH:mm:ss"), + subText: "YYYY-MM-DDTHH:mm:ss", + value: "YYYY-MM-DDTHH:mm:ss", + }, + { + label: moment().format("YYYY-MM-DD hh:mm:ss A"), + subText: "YYYY-MM-DD hh:mm:ss A", + value: "YYYY-MM-DD hh:mm:ss A", + }, + { + label: moment().format("DD/MM/YYYY HH:mm"), + subText: "DD/MM/YYYY HH:mm", + value: "DD/MM/YYYY HH:mm", + }, + { + label: moment().format("D MMMM, YYYY"), + subText: "D MMMM, YYYY", + value: "D MMMM, YYYY", + }, + { + label: moment().format("H:mm A D MMMM, YYYY"), + subText: "H:mm A D MMMM, YYYY", + value: "H:mm A D MMMM, YYYY", + }, + { + label: moment().format("YYYY-MM-DD"), + subText: "YYYY-MM-DD", + value: "YYYY-MM-DD", + }, + { + label: moment().format("MM-DD-YYYY"), + subText: "MM-DD-YYYY", + value: "MM-DD-YYYY", + }, + { + label: moment().format("DD-MM-YYYY"), + subText: "DD-MM-YYYY", + value: "DD-MM-YYYY", + }, + { + label: moment().format("MM/DD/YYYY"), + subText: "MM/DD/YYYY", + value: "MM/DD/YYYY", + }, + { + label: moment().format("DD/MM/YYYY"), + subText: "DD/MM/YYYY", + value: "DD/MM/YYYY", + }, + { + label: moment().format("DD/MM/YY"), + subText: "DD/MM/YY", + value: "DD/MM/YY", + }, + { + label: moment().format("MM/DD/YY"), + subText: "MM/DD/YY", + value: "MM/DD/YY", + }, +].map((x) => ({ + ...x, + subTextPosition: SubTextPosition.BOTTOM, +})); diff --git a/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/index.ts b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/index.ts new file mode 100644 index 000000000000..8c35d8ef1789 --- /dev/null +++ b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/index.ts @@ -0,0 +1,3 @@ +import { WDSDatePickerWidget } from "./widget"; + +export { WDSDatePickerWidget }; diff --git a/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/widget/derived.js b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/widget/derived.js new file mode 100644 index 000000000000..7e4be5805db2 --- /dev/null +++ b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/widget/derived.js @@ -0,0 +1,36 @@ +export default { + isValid: (props, moment) => { + const parsedMinDate = new Date(props.minDate); + const parsedMaxDate = new Date(props.maxDate); + const parsedSelectedDate = props.selectedDate + ? moment(new Date(props.selectedDate)) + : null; + + // only do validation when the date is dirty + if (!props.isDirty) { + return true; + } + + if (!parsedSelectedDate && !props.isRequired) { + return true; + } + + if (!parsedSelectedDate && props.isRequired) { + return false; + } + + if (props.minDate && props.maxDate) { + return parsedSelectedDate.isBetween(parsedMinDate, parsedMaxDate); + } + + if (props.minDate) { + return parsedSelectedDate.isAfter(parsedMinDate); + } + + if (props.maxDate) { + return parsedSelectedDate.isBefore(parsedMaxDate); + } + + return true; + }, +}; diff --git a/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/widget/derived.test.ts b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/widget/derived.test.ts new file mode 100644 index 000000000000..48be8c2041ba --- /dev/null +++ b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/widget/derived.test.ts @@ -0,0 +1,64 @@ +import moment from "moment"; +import derived from "./derived"; + +describe("isValid function", () => { + const mockMoment = (date: string) => moment(date); + + it("should return true when isDirty is false", () => { + const props = { isDirty: false }; + + expect(derived.isValid(props, mockMoment)).toBe(true); + }); + + it("should return true when selectedDate is null and not required", () => { + const props = { isDirty: true, isRequired: false, selectedDate: null }; + + expect(derived.isValid(props, mockMoment)).toBe(true); + }); + + it("should return false when selectedDate is null and required", () => { + const props = { isDirty: true, isRequired: true, selectedDate: null }; + + expect(derived.isValid(props, mockMoment)).toBe(false); + }); + + it("should return true when selectedDate is between minDate and maxDate", () => { + const props = { + isDirty: true, + minDate: "2023-01-01", + maxDate: "2023-12-31", + selectedDate: "2023-06-15", + }; + + expect(derived.isValid(props, mockMoment)).toBe(true); + }); + + it("should return false when selectedDate is before minDate", () => { + const props = { + isDirty: true, + minDate: "2023-01-01", + selectedDate: "2022-12-31", + }; + + expect(derived.isValid(props, mockMoment)).toBe(false); + }); + + it("should return false when selectedDate is after maxDate", () => { + const props = { + isDirty: true, + maxDate: "2023-12-31", + selectedDate: "2024-01-01", + }; + + expect(derived.isValid(props, mockMoment)).toBe(false); + }); + + it("should return true when selectedDate is valid and no min/max dates are set", () => { + const props = { + isDirty: true, + selectedDate: "2023-06-15", + }; + + expect(derived.isValid(props, mockMoment)).toBe(true); + }); +}); diff --git a/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/widget/helpers.ts b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/widget/helpers.ts new file mode 100644 index 000000000000..a08a462ac33c --- /dev/null +++ b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/widget/helpers.ts @@ -0,0 +1,15 @@ +import type { WDSDatePickerWidgetProps } from "./types"; + +export function validateInput(props: WDSDatePickerWidgetProps) { + if (props.isValid === false) { + return { + validationStatus: "invalid", + errorMessage: "Please select a valid date", + }; + } + + return { + validationStatus: "valid", + errorMessage: "", + }; +} diff --git a/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/widget/index.tsx b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/widget/index.tsx new file mode 100644 index 000000000000..e36e2a587039 --- /dev/null +++ b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/widget/index.tsx @@ -0,0 +1,149 @@ +import React from "react"; +import moment from "moment"; +import BaseWidget from "widgets/BaseWidget"; +import type { WidgetState } from "widgets/BaseWidget"; +import type { + AnvilConfig, + AutocompletionDefinitions, +} from "WidgetProvider/constants"; +import { parseDateTime } from "@internationalized/date"; +import { DatePicker, type DateValue } from "@appsmith/wds"; +import { EventType } from "constants/AppsmithActionConstants/ActionConstants"; + +import * as config from "../config"; +import { validateInput } from "./helpers"; +import derivedPropertyFns from "./derived"; +import type { WDSDatePickerWidgetProps } from "./types"; +import { parseDerivedProperties } from "widgets/WidgetUtils"; + +class WDSDatePickerWidget extends BaseWidget< + WDSDatePickerWidgetProps, + WidgetState +> { + static type = "WDS_DATEPICKER_WIDGET"; + + static getConfig() { + return config.metaConfig; + } + + static getDefaults() { + return config.defaultsConfig; + } + + static getMethods() { + return config.methodsConfig; + } + + static getAnvilConfig(): AnvilConfig | null { + return config.anvilConfig; + } + + static getAutocompleteDefinitions(): AutocompletionDefinitions { + return config.autocompleteConfig; + } + + static getPropertyPaneContentConfig() { + return config.propertyPaneContentConfig; + } + + static getPropertyPaneStyleConfig() { + return []; + } + + static getDerivedPropertiesMap() { + const parsedDerivedProperties = parseDerivedProperties(derivedPropertyFns); + + return { + isValid: `{{(() => {${parsedDerivedProperties.isValid}})()}}`, + selectedDate: `{{ this.value ? moment(this.value).toISOString() : "" }}`, + formattedDate: `{{ this.value ? moment(this.value).format(this.dateFormat) : "" }}`, + }; + } + + static getDefaultPropertiesMap(): Record { + return { + value: "defaultDate", + }; + } + + static getMetaPropertiesMap() { + return { + value: undefined, + isDirty: false, + }; + } + + static getStylesheetConfig() { + return {}; + } + + static getSetterConfig() { + return config.settersConfig; + } + + static getDependencyMap() { + return {}; + } + + componentDidUpdate(prevProps: WDSDatePickerWidgetProps): void { + if (!this.shouldResetDirtyState(prevProps)) { + return; + } + + this.resetDirtyState(); + } + + handleDateChange = (date: DateValue) => { + if (!this.props.isDirty) { + this.props.updateWidgetMetaProperty("isDirty", true); + } + + this.props.updateWidgetMetaProperty("value", date.toString(), { + triggerPropertyName: "onDateSelected", + dynamicString: this.props.onDateSelected, + event: { + type: EventType.ON_DATE_SELECTED, + }, + }); + }; + + private shouldResetDirtyState(prevProps: WDSDatePickerWidgetProps): boolean { + const { defaultDate, isDirty } = this.props; + const hasDefaultDateChanged = defaultDate !== prevProps.defaultDate; + + return hasDefaultDateChanged && isDirty; + } + + private resetDirtyState() { + this.props.updateWidgetMetaProperty("isDirty", false); + } + + private parseDate(date: string | undefined) { + return date + ? parseDateTime(moment(date).format("YYYY-MM-DDTHH:mm:ss")) + : undefined; + } + + getWidgetView() { + const { label, labelTooltip, maxDate, minDate, value, ...rest } = + this.props; + const { errorMessage, validationStatus } = validateInput(this.props); + + return ( + + ); + } +} + +export { WDSDatePickerWidget }; diff --git a/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/widget/types.ts b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/widget/types.ts new file mode 100644 index 000000000000..1fa58c814588 --- /dev/null +++ b/app/client/src/modules/ui-builder/ui/wds/WDSDatePickerWidget/widget/types.ts @@ -0,0 +1,11 @@ +import type { WidgetProps } from "widgets/BaseWidget"; + +export interface WDSDatePickerWidgetProps extends WidgetProps { + selectedDate: string; + defaultDate: string; + onDateSelected: string; + isRequired?: boolean; + isDisabled?: boolean; + label: string; + labelTooltip?: string; +} diff --git a/app/client/src/modules/ui-builder/ui/wds/WDSInputWidget/config/propertyPaneConfig/contentConfig.ts b/app/client/src/modules/ui-builder/ui/wds/WDSInputWidget/config/propertyPaneConfig/contentConfig.ts index 175acb771c0a..e40aa0007b0e 100644 --- a/app/client/src/modules/ui-builder/ui/wds/WDSInputWidget/config/propertyPaneConfig/contentConfig.ts +++ b/app/client/src/modules/ui-builder/ui/wds/WDSInputWidget/config/propertyPaneConfig/contentConfig.ts @@ -48,6 +48,10 @@ export const propertyPaneContentConfig = [ label: "Currency", value: "CURRENCY", }, + { + label: "Date", + value: "DATE", + }, ], isBindProperty: false, isTriggerProperty: false, diff --git a/app/client/src/modules/ui-builder/ui/wds/WDSPhoneInputWidget/config/defaultsConfig.ts b/app/client/src/modules/ui-builder/ui/wds/WDSPhoneInputWidget/config/defaultsConfig.ts index 1d85047574a5..9edf9bee89b1 100644 --- a/app/client/src/modules/ui-builder/ui/wds/WDSPhoneInputWidget/config/defaultsConfig.ts +++ b/app/client/src/modules/ui-builder/ui/wds/WDSPhoneInputWidget/config/defaultsConfig.ts @@ -1,4 +1,7 @@ -import { WDSBaseInputWidget } from "modules/ui-builder/ui/wds/WDSBaseInputWidget"; +import { + INPUT_TYPES, + WDSBaseInputWidget, +} from "modules/ui-builder/ui/wds/WDSBaseInputWidget"; import { ResponsiveBehavior } from "layoutSystems/common/utils/constants"; import type { WidgetDefaultProps } from "WidgetProvider/constants"; @@ -13,4 +16,5 @@ export const defaultsConfig = { allowFormatting: true, responsiveBehavior: ResponsiveBehavior.Fill, label: "Phone number", + inputType: INPUT_TYPES.PHONE_NUMBER, } as WidgetDefaultProps; diff --git a/app/client/src/modules/ui-builder/ui/wds/constants.ts b/app/client/src/modules/ui-builder/ui/wds/constants.ts index 8f897732e6b0..9452b5af8e30 100644 --- a/app/client/src/modules/ui-builder/ui/wds/constants.ts +++ b/app/client/src/modules/ui-builder/ui/wds/constants.ts @@ -61,6 +61,7 @@ export const WDS_V2_WIDGET_MAP = { MULTILINE_INPUT_WIDGET: "WDS_MULTILINE_INPUT_WIDGET", WDS_SELECT_WIDGET: "WDS_SELECT_WIDGET", WDS_COMBOBOX_WIDGET: "WDS_COMBOBOX_WIDGET", + WDS_DATEPICKER_WIDGET: "WDS_DATEPICKER_WIDGET", // Anvil layout widgets ZONE_WIDGET: anvilWidgets.ZONE_WIDGET, diff --git a/app/client/src/widgets/WidgetUtils.ts b/app/client/src/widgets/WidgetUtils.ts index 5184b0c73d2a..286d5eeaafa1 100644 --- a/app/client/src/widgets/WidgetUtils.ts +++ b/app/client/src/widgets/WidgetUtils.ts @@ -989,3 +989,48 @@ export const checkForOnClick = (e: React.MouseEvent) => { return false; }; + +/** + * Parses the derived properties from the given property functions. Used in getDerivedPropertiesMap + * + * @example + * ```js + * { + * isValidDate: (props, moment, _) => { + * return props.value === 1; + * } + * ``` + * + * It will return + * ```js + * { + * isValidDate: "{{ this.value === 1 }}" + * } + * ``` + * + * Main rule to remember is don't deconstruct the props like `const { value } = props;` in the derived property function. + * Directly access props like `props.value` + */ +export function parseDerivedProperties(propertyFns: Record) { + const derivedProperties: Record = {}; + + for (const [key, value] of Object.entries(propertyFns)) { + if (typeof value === "function") { + const functionString = value.toString(); + const functionBody = functionString.match(/(?<=\{)(.|\n)*(?=\})/)?.[0]; + + if (functionBody) { + const paramMatch = functionString.match(/\((.*?),/); + const propsParam = paramMatch ? paramMatch[1].trim() : "props"; + + const modifiedBody = functionBody + .trim() + .replace(new RegExp(`${propsParam}\\.`, "g"), "this."); + + derivedProperties[key] = modifiedBody; + } + } + } + + return derivedProperties; +} diff --git a/app/client/src/widgets/index.ts b/app/client/src/widgets/index.ts index c1e4cb2e52ca..c9e25d732745 100644 --- a/app/client/src/widgets/index.ts +++ b/app/client/src/widgets/index.ts @@ -88,6 +88,7 @@ import { WDSNumberInputWidget } from "modules/ui-builder/ui/wds/WDSNumberInputWi import { WDSMultilineInputWidget } from "modules/ui-builder/ui/wds/WDSMultilineInputWidget"; import { WDSSelectWidget } from "modules/ui-builder/ui/wds/WDSSelectWidget"; import { EEWDSWidgets } from "ee/modules/ui-builder/ui/wds"; +import { WDSDatePickerWidget } from "modules/ui-builder/ui/wds/WDSDatePickerWidget"; const LegacyWidgets = [ CanvasWidget, @@ -185,6 +186,7 @@ const WDSWidgets = [ WDSNumberInputWidget, WDSMultilineInputWidget, WDSSelectWidget, + WDSDatePickerWidget, ]; const Widgets = [ diff --git a/app/client/start-https.sh b/app/client/start-https.sh index 8301a1369cbe..b2e6599292f1 100755 --- a/app/client/start-https.sh +++ b/app/client/start-https.sh @@ -86,14 +86,6 @@ if [[ ${backend-} == release ]]; then backend=https://release.app.appsmith.com fi -if [[ ${backend-} == *.appsmith.com ]]; then - # If running client against release, we get the release's version and set it up, so we don't see version mismatches. - APPSMITH_VERSION_ID="$( - curl -sS "$backend/info" | grep -Eo '"version": ".+?"' | cut -d\" -f4 - )" - export APPSMITH_VERSION_ID -fi - if [[ -z ${run_as-} ]]; then if type nginx; then run_as=nginx @@ -167,6 +159,14 @@ if [[ $backend =~ /$ ]]; then exit 1 fi +if [[ -n $backend ]]; then + # Try to get a version from the "backend". If it's a full container, not just backend, then it'll give us a version. + APPSMITH_VERSION_ID="$( + curl -vsS "${backend/host.docker.internal/localhost}/info" | grep -Eo '"version": ".+?"' | cut -d\" -f4 || true + )" + export APPSMITH_VERSION_ID +fi + if [[ -n ${env_file-} && ! -f $env_file ]]; then echo "I got --env-file as '$env_file', but I cannot access it." >&2 exit 1 diff --git a/app/client/yarn.lock b/app/client/yarn.lock index 385ff53d11d0..21e7ee0d476c 100644 --- a/app/client/yarn.lock +++ b/app/client/yarn.lock @@ -9040,13 +9040,6 @@ __metadata: languageName: node linkType: hard -"@socket.io/component-emitter@npm:~3.1.0": - version: 3.1.0 - resolution: "@socket.io/component-emitter@npm:3.1.0" - checksum: db069d95425b419de1514dffe945cc439795f6a8ef5b9465715acf5b8b50798e2c91b8719cbf5434b3fe7de179d6cdcd503c277b7871cb3dd03febb69bdd50fa - languageName: node - linkType: hard - "@storybook/addon-a11y@npm:^8.2.7": version: 8.2.7 resolution: "@storybook/addon-a11y@npm:8.2.7" @@ -10204,22 +10197,13 @@ __metadata: languageName: node linkType: hard -"@types/cookie@npm:^0.4.0, @types/cookie@npm:^0.4.1": +"@types/cookie@npm:^0.4.0": version: 0.4.1 resolution: "@types/cookie@npm:0.4.1" checksum: 3275534ed69a76c68eb1a77d547d75f99fedc80befb75a3d1d03662fb08d697e6f8b1274e12af1a74c6896071b11510631ba891f64d30c78528d0ec45a9c1a18 languageName: node linkType: hard -"@types/cors@npm:^2.8.12": - version: 2.8.13 - resolution: "@types/cors@npm:2.8.13" - dependencies: - "@types/node": "*" - checksum: 7ef197ea19d2e5bf1313b8416baa6f3fd6dd887fd70191da1f804f557395357dafd8bc8bed0ac60686923406489262a7c8a525b55748f7b2b8afa686700de907 - languageName: node - linkType: hard - "@types/cross-spawn@npm:^6.0.2": version: 6.0.2 resolution: "@types/cross-spawn@npm:6.0.2" @@ -10730,7 +10714,7 @@ __metadata: languageName: node linkType: hard -"@types/node@npm:*, @types/node@npm:>=10.0.0, @types/node@npm:>=13.7.0": +"@types/node@npm:*, @types/node@npm:>=13.7.0": version: 20.8.10 resolution: "@types/node@npm:20.8.10" dependencies: @@ -12693,7 +12677,6 @@ __metadata: mongodb: ^5.8.0 nodemailer: 6.9.9 readline-sync: 1.4.10 - socket.io: ^4.6.2 supertest: ^6.3.3 ts-jest: 29.1.0 typescript: ^5.5.4 @@ -13827,13 +13810,6 @@ __metadata: languageName: node linkType: hard -"base64id@npm:2.0.0, base64id@npm:~2.0.0": - version: 2.0.0 - resolution: "base64id@npm:2.0.0" - checksum: 581b1d37e6cf3738b7ccdd4d14fe2bfc5c238e696e2720ee6c44c183b838655842e22034e53ffd783f872a539915c51b0d4728a49c7cc678ac5a758e00d62168 - languageName: node - linkType: hard - "batch@npm:0.6.1": version: 0.6.1 resolution: "batch@npm:0.6.1" @@ -15438,7 +15414,7 @@ __metadata: languageName: node linkType: hard -"cookie@npm:^0.4.1, cookie@npm:~0.4.1": +"cookie@npm:^0.4.1": version: 0.4.2 resolution: "cookie@npm:0.4.2" checksum: a00833c998bedf8e787b4c342defe5fa419abd96b32f4464f718b91022586b8f1bafbddd499288e75c037642493c83083da426c6a9080d309e3bd90fd11baa9b @@ -15498,16 +15474,6 @@ __metadata: languageName: node linkType: hard -"cors@npm:~2.8.5": - version: 2.8.5 - resolution: "cors@npm:2.8.5" - dependencies: - object-assign: ^4 - vary: ^1 - checksum: ced838404ccd184f61ab4fdc5847035b681c90db7ac17e428f3d81d69e2989d2b680cc254da0e2554f5ed4f8a341820a1ce3d1c16b499f6e2f47a1b9b07b5006 - languageName: node - linkType: hard - "cosmiconfig-typescript-loader@npm:^1.0.0": version: 1.0.9 resolution: "cosmiconfig-typescript-loader@npm:1.0.9" @@ -16384,7 +16350,7 @@ __metadata: languageName: node linkType: hard -"debug@npm:4, debug@npm:^4.0.0, debug@npm:^4.1.0, debug@npm:^4.1.1, debug@npm:^4.3.0, debug@npm:^4.3.1, debug@npm:^4.3.2, debug@npm:^4.3.3, debug@npm:^4.3.4, debug@npm:~4.3.1, debug@npm:~4.3.2": +"debug@npm:4, debug@npm:^4.0.0, debug@npm:^4.1.0, debug@npm:^4.1.1, debug@npm:^4.3.0, debug@npm:^4.3.1, debug@npm:^4.3.2, debug@npm:^4.3.3, debug@npm:^4.3.4": version: 4.3.7 resolution: "debug@npm:4.3.7" dependencies: @@ -17397,31 +17363,6 @@ __metadata: languageName: node linkType: hard -"engine.io-parser@npm:~5.2.1": - version: 5.2.2 - resolution: "engine.io-parser@npm:5.2.2" - checksum: 470231215f3136a9259efb1268bc9a71f789af4e8c74da8d3b49ceb149fe3cd5c315bf0cd13d2d8d9c8f0f051c6f93b68e8fa9c89a3b612b9217bf33765c943a - languageName: node - linkType: hard - -"engine.io@npm:~6.5.2": - version: 6.5.5 - resolution: "engine.io@npm:6.5.5" - dependencies: - "@types/cookie": ^0.4.1 - "@types/cors": ^2.8.12 - "@types/node": ">=10.0.0" - accepts: ~1.3.4 - base64id: 2.0.0 - cookie: ~0.4.1 - cors: ~2.8.5 - debug: ~4.3.1 - engine.io-parser: ~5.2.1 - ws: ~8.17.1 - checksum: 358d337dd007b81cd6d7f39d0161ec8ec3a86097f0fbb0e10240eace51f836741f93c3e6bd69322b9ce0ad0fd89253a41e09335b6eb412d13e5357a054a90c4a - languageName: node - linkType: hard - "enhanced-resolve@npm:^2.2.2": version: 2.3.0 resolution: "enhanced-resolve@npm:2.3.0" @@ -25715,7 +25656,7 @@ __metadata: languageName: node linkType: hard -"object-assign@npm:^4, object-assign@npm:^4.0.1, object-assign@npm:^4.1.0, object-assign@npm:^4.1.1": +"object-assign@npm:^4.0.1, object-assign@npm:^4.1.0, object-assign@npm:^4.1.1": version: 4.1.1 resolution: "object-assign@npm:4.1.1" checksum: fcc6e4ea8c7fe48abfbb552578b1c53e0d194086e2e6bbbf59e0a536381a292f39943c6e9628af05b5528aa5e3318bb30d6b2e53cadaf5b8fe9e12c4b69af23f @@ -31107,40 +31048,6 @@ __metadata: languageName: node linkType: hard -"socket.io-adapter@npm:~2.5.2": - version: 2.5.2 - resolution: "socket.io-adapter@npm:2.5.2" - dependencies: - ws: ~8.11.0 - checksum: 481251c3547221e57eb5cb247d0b1a3cde4d152a4c1c9051cc887345a7770e59f3b47f1011cac4499e833f01fcfc301ed13c4ec6e72f7dbb48a476375a6344cd - languageName: node - linkType: hard - -"socket.io-parser@npm:~4.2.4": - version: 4.2.4 - resolution: "socket.io-parser@npm:4.2.4" - dependencies: - "@socket.io/component-emitter": ~3.1.0 - debug: ~4.3.1 - checksum: 61540ef99af33e6a562b9effe0fad769bcb7ec6a301aba5a64b3a8bccb611a0abdbe25f469933ab80072582006a78ca136bf0ad8adff9c77c9953581285e2263 - languageName: node - linkType: hard - -"socket.io@npm:^4.6.2": - version: 4.7.5 - resolution: "socket.io@npm:4.7.5" - dependencies: - accepts: ~1.3.4 - base64id: ~2.0.0 - cors: ~2.8.5 - debug: ~4.3.2 - engine.io: ~6.5.2 - socket.io-adapter: ~2.5.2 - socket.io-parser: ~4.2.4 - checksum: b8b57216152cf230bdcb77b5450e124ebe1fee7482eeb50a6ef760b69f2f5a064e9b8640ce9c1efc5c9e081f5d797d3f6ff3f81606e19ddaf5d4114aad9ec7d3 - languageName: node - linkType: hard - "sockjs@npm:^0.3.24": version: 0.3.24 resolution: "sockjs@npm:0.3.24" @@ -33729,7 +33636,7 @@ __metadata: languageName: node linkType: hard -"vary@npm:^1, vary@npm:~1.1.2": +"vary@npm:~1.1.2": version: 1.1.2 resolution: "vary@npm:1.1.2" checksum: ae0123222c6df65b437669d63dfa8c36cee20a504101b2fcd97b8bf76f91259c17f9f2b4d70a1e3c6bbcee7f51b28392833adb6b2770b23b01abec84e369660b @@ -34776,36 +34683,6 @@ __metadata: languageName: node linkType: hard -"ws@npm:~8.11.0": - version: 8.11.0 - resolution: "ws@npm:8.11.0" - peerDependencies: - bufferutil: ^4.0.1 - utf-8-validate: ^5.0.2 - peerDependenciesMeta: - bufferutil: - optional: true - utf-8-validate: - optional: true - checksum: 316b33aba32f317cd217df66dbfc5b281a2f09ff36815de222bc859e3424d83766d9eb2bd4d667de658b6ab7be151f258318fb1da812416b30be13103e5b5c67 - languageName: node - linkType: hard - -"ws@npm:~8.17.1": - version: 8.17.1 - resolution: "ws@npm:8.17.1" - peerDependencies: - bufferutil: ^4.0.1 - utf-8-validate: ">=5.0.2" - peerDependenciesMeta: - bufferutil: - optional: true - utf-8-validate: - optional: true - checksum: 442badcce1f1178ec87a0b5372ae2e9771e07c4929a3180321901f226127f252441e8689d765aa5cfba5f50ac60dd830954afc5aeae81609aefa11d3ddf5cecf - languageName: node - linkType: hard - "xlsx@https://cdn.sheetjs.com/xlsx-0.19.3/xlsx-0.19.3.tgz": version: 0.19.3 resolution: "xlsx@https://cdn.sheetjs.com/xlsx-0.19.3/xlsx-0.19.3.tgz" diff --git a/app/server/appsmith-git/src/main/java/com/appsmith/git/dto/CommitDTO.java b/app/server/appsmith-git/src/main/java/com/appsmith/git/dto/CommitDTO.java new file mode 100644 index 000000000000..2be153b05037 --- /dev/null +++ b/app/server/appsmith-git/src/main/java/com/appsmith/git/dto/CommitDTO.java @@ -0,0 +1,20 @@ +package com.appsmith.git.dto; + +import lombok.Data; + +/** + * TODO: scope for addition in case of native implementation + */ +@Data +public class CommitDTO { + + String message; + + String header; + + Boolean isAmendCommit; + + GitUser author; + + GitUser committer; +} diff --git a/app/server/appsmith-git/src/main/java/com/appsmith/git/dto/GitUser.java b/app/server/appsmith-git/src/main/java/com/appsmith/git/dto/GitUser.java new file mode 100644 index 000000000000..368cfb2c9bff --- /dev/null +++ b/app/server/appsmith-git/src/main/java/com/appsmith/git/dto/GitUser.java @@ -0,0 +1,28 @@ +package com.appsmith.git.dto; + +import com.fasterxml.jackson.annotation.JsonInclude; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +@Getter +@Setter +@NoArgsConstructor +@JsonInclude(JsonInclude.Include.NON_NULL) +public class GitUser { + + /** + * name of the author/committer + */ + String name; + + /** + * email of the author/committer + */ + String email; + + /** + * TODO: To be converted to the Instant or a timestamp + */ + String timestamp; +} diff --git a/app/server/appsmith-git/src/main/java/com/appsmith/git/handler/FSGitHandlerImpl.java b/app/server/appsmith-git/src/main/java/com/appsmith/git/handler/FSGitHandlerImpl.java new file mode 100644 index 000000000000..a36aa4947ad3 --- /dev/null +++ b/app/server/appsmith-git/src/main/java/com/appsmith/git/handler/FSGitHandlerImpl.java @@ -0,0 +1,21 @@ +package com.appsmith.git.handler; + +import com.appsmith.external.configurations.git.GitConfig; +import com.appsmith.external.git.handler.FSGitHandler; +import com.appsmith.git.configurations.GitServiceConfig; +import com.appsmith.git.handler.ce.FSGitHandlerCEImpl; +import io.micrometer.observation.ObservationRegistry; +import lombok.extern.slf4j.Slf4j; +import org.springframework.context.annotation.Primary; +import org.springframework.stereotype.Component; + +@Slf4j +@Primary +@Component +public class FSGitHandlerImpl extends FSGitHandlerCEImpl implements FSGitHandler { + + public FSGitHandlerImpl( + GitServiceConfig gitServiceConfig, GitConfig gitConfig, ObservationRegistry observationRegistry) { + super(gitServiceConfig, gitConfig, observationRegistry); + } +} diff --git a/app/server/appsmith-git/src/main/java/com/appsmith/git/handler/ce/FSGitHandlerCEImpl.java b/app/server/appsmith-git/src/main/java/com/appsmith/git/handler/ce/FSGitHandlerCEImpl.java new file mode 100644 index 000000000000..bbaff5c949b5 --- /dev/null +++ b/app/server/appsmith-git/src/main/java/com/appsmith/git/handler/ce/FSGitHandlerCEImpl.java @@ -0,0 +1,1157 @@ +package com.appsmith.git.handler.ce; + +import com.appsmith.external.configurations.git.GitConfig; +import com.appsmith.external.constants.AnalyticsEvents; +import com.appsmith.external.constants.ErrorReferenceDocUrl; +import com.appsmith.external.dtos.GitBranchDTO; +import com.appsmith.external.dtos.GitLogDTO; +import com.appsmith.external.dtos.GitStatusDTO; +import com.appsmith.external.dtos.MergeStatusDTO; +import com.appsmith.external.git.constants.GitSpan; +import com.appsmith.external.git.handler.FSGitHandler; +import com.appsmith.external.helpers.Stopwatch; +import com.appsmith.git.configurations.GitServiceConfig; +import com.appsmith.git.constants.AppsmithBotAsset; +import com.appsmith.git.constants.CommonConstants; +import com.appsmith.git.constants.Constraint; +import com.appsmith.git.constants.GitDirectories; +import com.appsmith.git.helpers.RepositoryHelper; +import com.appsmith.git.helpers.SshTransportConfigCallback; +import com.appsmith.git.helpers.StopwatchHelpers; +import io.micrometer.observation.ObservationRegistry; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.eclipse.jgit.api.CreateBranchCommand; +import org.eclipse.jgit.api.Git; +import org.eclipse.jgit.api.ListBranchCommand; +import org.eclipse.jgit.api.MergeCommand; +import org.eclipse.jgit.api.MergeResult; +import org.eclipse.jgit.api.RebaseCommand; +import org.eclipse.jgit.api.RebaseResult; +import org.eclipse.jgit.api.ResetCommand; +import org.eclipse.jgit.api.Status; +import org.eclipse.jgit.api.TransportConfigCallback; +import org.eclipse.jgit.api.errors.CheckoutConflictException; +import org.eclipse.jgit.api.errors.GitAPIException; +import org.eclipse.jgit.lib.BranchTrackingStatus; +import org.eclipse.jgit.lib.PersonIdent; +import org.eclipse.jgit.lib.Ref; +import org.eclipse.jgit.lib.StoredConfig; +import org.eclipse.jgit.merge.MergeStrategy; +import org.eclipse.jgit.revwalk.RevCommit; +import org.eclipse.jgit.transport.RefSpec; +import org.eclipse.jgit.util.StringUtils; +import org.springframework.stereotype.Component; +import org.springframework.util.FileSystemUtils; +import reactor.core.observability.micrometer.Micrometer; +import reactor.core.publisher.Mono; +import reactor.core.scheduler.Scheduler; +import reactor.core.scheduler.Schedulers; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.time.Duration; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.format.DateTimeFormatter; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.List; +import java.util.Optional; +import java.util.Set; +import java.util.function.Function; +import java.util.function.Predicate; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static com.appsmith.external.git.constants.GitConstants.GitMetricConstants.CHECKOUT_REMOTE; +import static com.appsmith.external.git.constants.GitConstants.GitMetricConstants.HARD_RESET; +import static com.appsmith.git.constants.CommonConstants.FILE_MIGRATION_MESSAGE; +import static java.lang.Boolean.FALSE; +import static java.lang.Boolean.TRUE; + +@Slf4j +@Component +@RequiredArgsConstructor +public class FSGitHandlerCEImpl implements FSGitHandler { + + private final RepositoryHelper repositoryHelper = new RepositoryHelper(); + + private final GitServiceConfig gitServiceConfig; + private final GitConfig gitConfig; + + protected final ObservationRegistry observationRegistry; + + public static final DateTimeFormatter ISO_FORMATTER = + DateTimeFormatter.ISO_INSTANT.withZone(ZoneId.from(ZoneOffset.UTC)); + + private final Scheduler scheduler = Schedulers.boundedElastic(); + + private static final String SUCCESS_MERGE_STATUS = "This branch has no conflicts with the base branch."; + + /** + * This method will handle the git-commit functionality. Under the hood it checks if the repo has already been + * initialised and will be initialised if git repo is not present + * @param path parent path to repo + * @param commitMessage message which will be registered for this commit + * @param authorName author details + * @param authorEmail author details + * @param doAmend To amend with the previous commit + * @return if the commit was successful + */ + @Override + public Mono commitArtifact( + Path path, + String commitMessage, + String authorName, + String authorEmail, + boolean isSuffixedPath, + boolean doAmend) { + + final String finalAuthorName = + StringUtils.isEmptyOrNull(authorName) ? AppsmithBotAsset.APPSMITH_BOT_USERNAME : authorName; + final String finalAuthorEmail = + StringUtils.isEmptyOrNull(authorEmail) ? AppsmithBotAsset.APPSMITH_BOT_EMAIL : authorEmail; + final Path repoPath = TRUE.equals(isSuffixedPath) ? createRepoPath(path) : path; + + return Mono.using( + () -> Git.open(repoPath.toFile()), + git -> Mono.fromCallable(() -> { + log.debug("Trying to commit to local repo path, {}", path); + + Stopwatch processStopwatch = StopwatchHelpers.startStopwatch( + repoPath, AnalyticsEvents.GIT_COMMIT.getEventName()); + // Just need to open a repository here and make a commit + // Stage all the files added and modified + git.add().addFilepattern(".").call(); + // Stage modified and deleted files + git.add() + .setUpdate(true) + .addFilepattern(".") + .call(); + + // Commit the changes + git.commit() + .setMessage(commitMessage) + // Only make a commit if there are any updates + .setAllowEmpty(false) + .setAuthor(finalAuthorName, finalAuthorEmail) + .setCommitter(finalAuthorName, finalAuthorEmail) + .setAmend(doAmend) + .call(); + processStopwatch.stopAndLogTimeInMillis(); + return "Committed successfully!"; + }) + .timeout(Duration.ofMillis(Constraint.TIMEOUT_MILLIS)) + .name(GitSpan.FS_COMMIT) + .tap(Micrometer.observation(observationRegistry)), + Git::close) + .subscribeOn(scheduler); + } + + /** + * Method to create a new repository to provided path + * @param repoPath path where new repo needs to be created + * @return if the operation was successful + */ + @Override + public boolean createNewRepository(Path repoPath) throws GitAPIException { + // create new repo to the mentioned path + log.debug("Trying to create new repository: {}", repoPath); + try (Git ignored = Git.init().setDirectory(repoPath.toFile()).call()) { + return true; + } + } + + /** + * Method to get the commit history + * @param repoSuffix Path used to generate the repo url specific to the application for which the commit history is requested + * @return list of git commits + */ + @Override + public Mono> getCommitHistory(Path repoSuffix) { + Path repoPath = createRepoPath(repoSuffix); + return Mono.using( + () -> Git.open(repoPath.toFile()), + git -> Mono.fromCallable(() -> { + log.debug(Thread.currentThread().getName() + ": get commit history for " + + repoSuffix); + List commitLogs = new ArrayList<>(); + Stopwatch processStopwatch = StopwatchHelpers.startStopwatch( + repoPath, AnalyticsEvents.GIT_COMMIT_HISTORY.getEventName()); + Iterable gitLogs = git.log() + .setMaxCount(Constraint.MAX_COMMIT_LOGS) + .call(); + gitLogs.forEach(revCommit -> { + PersonIdent author = revCommit.getAuthorIdent(); + GitLogDTO gitLog = new GitLogDTO( + revCommit.getName(), + author.getName(), + author.getEmailAddress(), + revCommit.getFullMessage(), + ISO_FORMATTER.format( + new Date(revCommit.getCommitTime() * 1000L).toInstant())); + processStopwatch.stopAndLogTimeInMillis(); + commitLogs.add(gitLog); + }); + return commitLogs; + }) + .timeout(Duration.ofMillis(Constraint.TIMEOUT_MILLIS)), + Git::close) + .subscribeOn(scheduler); + } + + @Override + public Path createRepoPath(Path suffix) { + return Paths.get(gitServiceConfig.getGitRootPath()).resolve(suffix); + } + + /** + * Method to push changes to remote repo + * @param repoSuffix Path used to generate the repo url specific to the application which needs to be pushed to remote + * @param remoteUrl remote repo url + * @param publicKey + * @param privateKey + * @return Success message + */ + @Override + public Mono pushApplication( + Path repoSuffix, String remoteUrl, String publicKey, String privateKey, String branchName) { + // We can safely assume that repo has been already initialised either in commit or clone flow and can directly + // open the repo + Path baseRepoPath = createRepoPath(repoSuffix); + + return gitConfig.getIsAtomicPushAllowed().flatMap(isAtomicPushAllowed -> { + return Mono.using( + () -> Git.open(baseRepoPath.toFile()), + git -> Mono.fromCallable(() -> { + log.debug(Thread.currentThread().getName() + ": pushing changes to remote " + + remoteUrl); + // open the repo + Stopwatch processStopwatch = StopwatchHelpers.startStopwatch( + baseRepoPath, AnalyticsEvents.GIT_PUSH.getEventName()); + TransportConfigCallback transportConfigCallback = + new SshTransportConfigCallback(privateKey, publicKey); + + StringBuilder result = new StringBuilder("Pushed successfully with status : "); + git.push() + .setAtomic(isAtomicPushAllowed) + .setTransportConfigCallback(transportConfigCallback) + .setRemote(remoteUrl) + .call() + .forEach(pushResult -> pushResult + .getRemoteUpdates() + .forEach(remoteRefUpdate -> { + result.append(remoteRefUpdate.getStatus()) + .append(","); + if (!StringUtils.isEmptyOrNull( + remoteRefUpdate.getMessage())) { + result.append(remoteRefUpdate.getMessage()) + .append(","); + } + })); + // We can support username and password in future if needed + // pushCommand.setCredentialsProvider(new + // UsernamePasswordCredentialsProvider("username", + // "password")); + processStopwatch.stopAndLogTimeInMillis(); + return result.substring(0, result.length() - 1); + }) + .timeout(Duration.ofMillis(Constraint.TIMEOUT_MILLIS)) + .name(GitSpan.FS_PUSH) + .tap(Micrometer.observation(observationRegistry)), + Git::close) + // this subscribeOn on is required because Mono.using + // is not deferring the execution of push and for that reason it runs on the + // lettuce-nioEventLoop thread instead of boundedElastic + .subscribeOn(scheduler); + }); + } + + /** Clone the repo to the file path : container-volume/orgId/defaultAppId/repo/ + * + * @param repoSuffix combination of orgId, defaultId and repoName + * @param remoteUrl ssh url of the git repo(we support cloning via ssh url only with deploy key) + * @param privateKey generated by us and specific to the defaultApplication + * @param publicKey generated by us and specific to the defaultApplication + * @return defaultBranchName of the repo + * */ + @Override + public Mono cloneRemoteIntoArtifactRepo( + Path repoSuffix, String remoteUrl, String privateKey, String publicKey) { + + Stopwatch processStopwatch = + StopwatchHelpers.startStopwatch(repoSuffix, AnalyticsEvents.GIT_CLONE.getEventName()); + return Mono.fromCallable(() -> { + log.debug(Thread.currentThread().getName() + ": Cloning the repo from the remote " + remoteUrl); + final TransportConfigCallback transportConfigCallback = + new SshTransportConfigCallback(privateKey, publicKey); + File file = Paths.get(gitServiceConfig.getGitRootPath()) + .resolve(repoSuffix) + .toFile(); + while (file.exists()) { + FileSystemUtils.deleteRecursively(file); + } + String branchName; + try (Git git = Git.cloneRepository() + .setURI(remoteUrl) + .setTransportConfigCallback(transportConfigCallback) + .setDirectory(file) + .call()) { + branchName = git.getRepository().getBranch(); + + repositoryHelper.updateRemoteBranchTrackingConfig(branchName, git); + } + processStopwatch.stopAndLogTimeInMillis(); + return branchName; + }) + .timeout(Duration.ofMillis(Constraint.TIMEOUT_MILLIS)) + .name(GitSpan.FS_CLONE_REPO) + .tap(Micrometer.observation(observationRegistry)) + .subscribeOn(scheduler); + } + + @Override + public Mono createAndCheckoutToBranch(Path repoSuffix, String branchName) { + // We can safely assume that repo has been already initialised either in commit or clone flow and can directly + // open the repo + Stopwatch processStopwatch = + StopwatchHelpers.startStopwatch(repoSuffix, AnalyticsEvents.GIT_CREATE_BRANCH.getEventName()); + return Mono.using( + () -> Git.open(createRepoPath(repoSuffix).toFile()), + git -> Mono.fromCallable(() -> { + log.debug(Thread.currentThread().getName() + ": Creating branch " + branchName + + "for the repo " + repoSuffix); + // open the repo + // Create and checkout to new branch + git.checkout() + .setCreateBranch(TRUE) + .setName(branchName) + .setUpstreamMode(CreateBranchCommand.SetupUpstreamMode.TRACK) + .call(); + + repositoryHelper.updateRemoteBranchTrackingConfig(branchName, git); + processStopwatch.stopAndLogTimeInMillis(); + return git.getRepository().getBranch(); + }) + .timeout(Duration.ofMillis(Constraint.TIMEOUT_MILLIS)) + .name(GitSpan.FS_CREATE_BRANCH) + .tap(Micrometer.observation(observationRegistry)), + Git::close) + .subscribeOn(scheduler); + } + + @Override + public Mono deleteBranch(Path repoSuffix, String branchName) { + // We can safely assume that repo has been already initialised either in commit or clone flow and can directly + // open the repo + Stopwatch processStopwatch = + StopwatchHelpers.startStopwatch(repoSuffix, AnalyticsEvents.GIT_DELETE_BRANCH.getEventName()); + return Mono.using( + () -> Git.open(createRepoPath(repoSuffix).toFile()), + git -> Mono.fromCallable(() -> { + log.debug(Thread.currentThread().getName() + ": Deleting branch " + branchName + + "for the repo " + repoSuffix); + // open the repo + // Create and checkout to new branch + List deleteBranchList = git.branchDelete() + .setBranchNames(branchName) + .setForce(TRUE) + .call(); + processStopwatch.stopAndLogTimeInMillis(); + if (deleteBranchList.isEmpty()) { + return Boolean.FALSE; + } + return TRUE; + }) + .timeout(Duration.ofMillis(Constraint.TIMEOUT_MILLIS)) + .name(GitSpan.FS_DELETE_BRANCH) + .tap(Micrometer.observation(observationRegistry)), + Git::close) + .subscribeOn(scheduler); + } + + @Override + public Mono checkoutToBranch(Path repoSuffix, String branchName) { + + Stopwatch processStopwatch = + StopwatchHelpers.startStopwatch(repoSuffix, AnalyticsEvents.GIT_CHECKOUT.getEventName()); + return Mono.using( + () -> Git.open(createRepoPath(repoSuffix).toFile()), + git -> Mono.fromCallable(() -> { + log.debug(Thread.currentThread().getName() + ": Switching to the branch " + + branchName); + // We can safely assume that repo has been already initialised either in commit or + // clone flow and + // can directly + // open the repo + if (StringUtils.equalsIgnoreCase( + branchName, git.getRepository().getBranch())) { + return TRUE; + } + // Create and checkout to new branch + String checkedOutBranch = git.checkout() + .setCreateBranch(Boolean.FALSE) + .setName(branchName) + .setUpstreamMode(CreateBranchCommand.SetupUpstreamMode.SET_UPSTREAM) + .call() + .getName(); + processStopwatch.stopAndLogTimeInMillis(); + return StringUtils.equalsIgnoreCase(checkedOutBranch, "refs/heads/" + branchName); + }) + .timeout(Duration.ofMillis(Constraint.TIMEOUT_MILLIS)) + .tag(CHECKOUT_REMOTE, FALSE.toString()) + .name(GitSpan.FS_CHECKOUT_BRANCH) + .tap(Micrometer.observation(observationRegistry)), + Git::close) + .subscribeOn(scheduler); + } + + @Override + public Mono pullApplication( + Path repoSuffix, String remoteUrl, String branchName, String privateKey, String publicKey) + throws IOException { + + Stopwatch processStopwatch = + StopwatchHelpers.startStopwatch(repoSuffix, AnalyticsEvents.GIT_PULL.getEventName()); + TransportConfigCallback transportConfigCallback = new SshTransportConfigCallback(privateKey, publicKey); + + return Mono.using( + () -> Git.open(createRepoPath(repoSuffix).toFile()), + git -> Mono.fromCallable(() -> { + log.debug(Thread.currentThread().getName() + ": Pull changes from remote " + + remoteUrl + " for the branch " + branchName); + // checkout the branch on which the merge command is run + MergeResult mergeResult; + try { + git.checkout() + .setName(branchName) + .setCreateBranch(false) + .call(); + mergeResult = git.pull() + .setRemoteBranchName(branchName) + .setTransportConfigCallback(transportConfigCallback) + .setFastForward(MergeCommand.FastForwardMode.FF) + .call() + .getMergeResult(); + } catch (GitAPIException e) { + throw e; + } + MergeStatusDTO mergeStatus = new MergeStatusDTO(); + Long count = Arrays.stream(mergeResult.getMergedCommits()) + .count(); + if (mergeResult.getMergeStatus().isSuccessful()) { + mergeStatus.setMergeAble(true); + mergeStatus.setStatus(count + " commits merged from origin/" + branchName); + processStopwatch.stopAndLogTimeInMillis(); + return mergeStatus; + } else { + // If there are conflicts add the conflicting file names to the response + // structure + mergeStatus.setMergeAble(false); + List mergeConflictFiles = new ArrayList<>(); + if (!Optional.ofNullable(mergeResult.getConflicts()) + .isEmpty()) { + mergeConflictFiles.addAll( + mergeResult.getConflicts().keySet()); + } + mergeStatus.setConflictingFiles(mergeConflictFiles); + try { + // On merge conflicts abort the merge => git merge --abort + git.getRepository().writeMergeCommitMsg(null); + git.getRepository().writeMergeHeads(null); + throw new org.eclipse.jgit.errors.CheckoutConflictException( + mergeConflictFiles.toString()); + } catch (IOException e) { + log.debug("Encountered error while aborting merge", e); + throw new org.eclipse.jgit.errors.CheckoutConflictException( + mergeConflictFiles.toString()); + } finally { + processStopwatch.stopAndLogTimeInMillis(); + } + } + }) + .onErrorResume(error -> resetToLastCommit(git).flatMap(ignore -> Mono.error(error))) + .timeout(Duration.ofMillis(Constraint.TIMEOUT_MILLIS)) + .name(GitSpan.FS_PULL) + .tap(Micrometer.observation(observationRegistry)), + Git::close) + .subscribeOn(scheduler); + } + + @Override + public Mono> listBranches(Path repoSuffix) { + Path baseRepoPath = createRepoPath(repoSuffix); + + return Mono.using( + () -> Git.open(baseRepoPath.toFile()), + git -> Mono.fromCallable(() -> { + log.debug(Thread.currentThread().getName() + ": Get branches for the application " + + repoSuffix); + + List refList = git.branchList() + .setListMode(ListBranchCommand.ListMode.ALL) + .call(); + + List branchList = new ArrayList<>(); + GitBranchDTO gitBranchDTO = new GitBranchDTO(); + if (refList.isEmpty()) { + gitBranchDTO.setBranchName( + git.getRepository().getBranch()); + branchList.add(gitBranchDTO); + } else { + for (Ref ref : refList) { + // if (!ref.getName().equals(defaultBranch)) { + gitBranchDTO = new GitBranchDTO(); + gitBranchDTO.setBranchName(ref.getName() + .replace("refs/", "") + .replace("heads/", "") + .replace("remotes/", "")); + branchList.add(gitBranchDTO); + } + } + return branchList; + }) + .timeout(Duration.ofMillis(Constraint.TIMEOUT_MILLIS)), + Git::close) + .subscribeOn(scheduler); + } + + @Override + public Mono getRemoteDefaultBranch(Path repoSuffix, String remoteUrl, String privateKey, String publicKey) { + Path baseRepoPath = createRepoPath(repoSuffix); + return Mono.using( + () -> Git.open(baseRepoPath.toFile()), + git -> Mono.fromCallable(() -> { + TransportConfigCallback transportConfigCallback = + new SshTransportConfigCallback(privateKey, publicKey); + + return git.lsRemote() + .setRemote(remoteUrl) + .setTransportConfigCallback(transportConfigCallback) + .callAsMap() + .get("HEAD") + .getTarget() + .getName() + .replace("refs/heads/", ""); + }) + .timeout(Duration.ofMillis(Constraint.TIMEOUT_MILLIS)), + Git::close) + .subscribeOn(scheduler); + } + + /** + * This method will handle the git-status functionality + * + * @param repoPath Path to actual repo + * @param branchName branch name for which the status is required + * @return Map of file names those are modified, conflicted etc. + */ + @Override + public Mono getStatus(Path repoPath, String branchName) { + Stopwatch processStopwatch = + StopwatchHelpers.startStopwatch(repoPath, AnalyticsEvents.GIT_STATUS.getEventName()); + return Mono.using( + () -> Git.open(repoPath.toFile()), + git -> Mono.fromCallable(() -> { + log.debug(Thread.currentThread().getName() + ": Get status for repo " + repoPath + + ", branch " + branchName); + Status status = git.status().call(); + GitStatusDTO response = new GitStatusDTO(); + + // resource changes + Set modified = Stream.concat( + status.getChanged().stream(), status.getModified().stream()) + .collect(Collectors.toSet()); + Set added = Stream.concat( + status.getAdded().stream(), status.getUntracked().stream()) + .collect(Collectors.toSet()); + Set removed = Stream.concat( + status.getRemoved().stream(), status.getMissing().stream()) + .collect(Collectors.toSet()); + + response.setModified(modified); + response.setAdded(added); + response.setRemoved(removed); + + populateModifiedEntities(response); + + // conflicts changes + response.setConflicting(status.getConflicting()); + response.setIsClean(status.isClean()); + + // remote status changes + BranchTrackingStatus trackingStatus = + BranchTrackingStatus.of(git.getRepository(), branchName); + if (trackingStatus != null) { + response.setAheadCount(trackingStatus.getAheadCount()); + response.setBehindCount(trackingStatus.getBehindCount()); + response.setRemoteBranch(trackingStatus.getRemoteTrackingBranch()); + } else { + log.debug( + "Remote tracking details not present for branch: {}, repo: {}", + branchName, + repoPath); + response.setAheadCount(0); + response.setBehindCount(0); + response.setRemoteBranch("untracked"); + } + + // Remove modified changes from current branch so that checkout to other branches + // will be possible + if (!status.isClean()) { + return resetToLastCommit(git).map(ref -> { + processStopwatch.stopAndLogTimeInMillis(); + return response; + }); + } + processStopwatch.stopAndLogTimeInMillis(); + return Mono.just(response); + }) + .timeout(Duration.ofMillis(Constraint.TIMEOUT_MILLIS)) + .flatMap(response -> response) + .name(GitSpan.FS_STATUS) + .tap(Micrometer.observation(observationRegistry)), + Git::close) + .subscribeOn(scheduler); + } + + protected void populateModifiedEntities(GitStatusDTO response) { + populatePageChanges(response); + populateQueryChanges(response); + populateJsObjectChanges(response); + populateDatasourceChanges(response); + populateJsLibsChanges(response); + legacyPopulateJsLibMigrationMessage(response); + } + + protected boolean isAModifiedPage(String x) { + return x.startsWith(GitDirectories.PAGE_DIRECTORY) + && !x.contains(GitDirectories.ACTION_DIRECTORY) + && !x.contains(GitDirectories.ACTION_COLLECTION_DIRECTORY); + } + + protected void populatePageChanges(GitStatusDTO response) { + + Predicate isPageAddedOrRemoved = x -> { + if (isAModifiedPage(x)) { + String[] pageNameArray = x.split(CommonConstants.DELIMITER_PATH); + String folderName = pageNameArray[1]; + String fileName = + pageNameArray[2].replace(CommonConstants.JSON_EXTENSION, CommonConstants.EMPTY_STRING); + return folderName.equals(fileName); + } + return false; + }; + + Function getName = x -> x.split(CommonConstants.DELIMITER_PATH)[1]; + + Set pagesAdded = response.getAdded().stream() + .filter(isPageAddedOrRemoved) + .map(getName) + .collect(Collectors.toSet()); + Set pagesRemoved = response.getRemoved().stream() + .filter(isPageAddedOrRemoved) + .map(getName) + .collect(Collectors.toSet()); + Set pagesModified = Stream.concat( + response.getModified().stream(), + Stream.concat(response.getAdded().stream(), response.getRemoved().stream())) + .filter(this::isAModifiedPage) + .map(getName) + .filter(x -> !pagesAdded.contains(x)) + .filter(x -> !pagesRemoved.contains(x)) + .collect(Collectors.toSet()); + + response.setPagesModified(pagesModified); + response.setPagesAdded(pagesAdded); + response.setPagesRemoved(pagesRemoved); + response.setModifiedPages(pagesModified.size() + pagesAdded.size() + pagesRemoved.size()); + } + + protected void populateQueryChanges(GitStatusDTO response) { + Predicate condition = x -> { + if (x.contains(GitDirectories.ACTION_DIRECTORY + CommonConstants.DELIMITER_PATH)) { + String queryName = x.split(CommonConstants.DELIMITER_PATH)[3]; + return !queryName.contains(CommonConstants.DELIMITER_HYPHEN); + } + return false; + }; + + Function getName = x -> { + String pageName = x.split(CommonConstants.DELIMITER_PATH)[1]; + String queryName = x.split(CommonConstants.DELIMITER_PATH)[3]; + return pageName + CommonConstants.DELIMITER_PATH + queryName; + }; + + Set queriesModified = + response.getModified().stream().filter(condition).map(getName).collect(Collectors.toSet()); + Set queriesAdded = + response.getAdded().stream().filter(condition).map(getName).collect(Collectors.toSet()); + Set queriesRemoved = + response.getRemoved().stream().filter(condition).map(getName).collect(Collectors.toSet()); + + response.setQueriesModified(queriesModified); + response.setQueriesAdded(queriesAdded); + response.setQueriesRemoved(queriesRemoved); + response.setModifiedQueries(queriesModified.size() + queriesAdded.size() + queriesRemoved.size()); + } + + protected void populateJsObjectChanges(GitStatusDTO response) { + Predicate condition = + x -> x.contains(GitDirectories.ACTION_COLLECTION_DIRECTORY + CommonConstants.DELIMITER_PATH) + && !x.contains(CommonConstants.METADATA + CommonConstants.JSON_EXTENSION); + + Function getName = x -> { + String pageName = x.split(CommonConstants.DELIMITER_PATH)[1]; + String jsObjectName = x.substring( + x.lastIndexOf(CommonConstants.DELIMITER_PATH) + 1, x.lastIndexOf(CommonConstants.DELIMITER_POINT)); + return pageName + CommonConstants.DELIMITER_PATH + jsObjectName; + }; + + Set jsObjectsModified = + response.getModified().stream().filter(condition).map(getName).collect(Collectors.toSet()); + Set jsObjectsAdded = + response.getAdded().stream().filter(condition).map(getName).collect(Collectors.toSet()); + Set jsObjectsRemoved = + response.getRemoved().stream().filter(condition).map(getName).collect(Collectors.toSet()); + + response.setJsObjectsModified(jsObjectsModified); + response.setJsObjectsAdded(jsObjectsAdded); + response.setJsObjectsRemoved(jsObjectsRemoved); + response.setModifiedJSObjects(jsObjectsModified.size() + jsObjectsAdded.size() + jsObjectsRemoved.size()); + } + + protected void populateDatasourceChanges(GitStatusDTO response) { + Predicate condition = + x -> x.contains(GitDirectories.DATASOURCE_DIRECTORY + CommonConstants.DELIMITER_PATH); + + Function getName = x -> x.substring( + x.lastIndexOf(CommonConstants.DELIMITER_PATH) + 1, x.lastIndexOf(CommonConstants.DELIMITER_POINT)); + + Set datasourcesModified = + response.getModified().stream().filter(condition).map(getName).collect(Collectors.toSet()); + Set datasourcesAdded = + response.getAdded().stream().filter(condition).map(getName).collect(Collectors.toSet()); + Set datasourcesRemoved = + response.getRemoved().stream().filter(condition).map(getName).collect(Collectors.toSet()); + + response.setDatasourcesModified(datasourcesModified); + response.setDatasourcesAdded(datasourcesAdded); + response.setDatasourcesRemoved(datasourcesRemoved); + response.setModifiedDatasources( + datasourcesModified.size() + datasourcesAdded.size() + datasourcesRemoved.size()); + } + + protected void populateJsLibsChanges(GitStatusDTO response) { + Predicate condition = x -> x.contains(GitDirectories.JS_LIB_DIRECTORY + CommonConstants.DELIMITER_PATH); + + Function getName = x -> { + String filename = x.split(CommonConstants.DELIMITER_PATH)[1]; + return filename.substring(0, filename.lastIndexOf(CommonConstants.SEPARATOR_UNDERSCORE)); + }; + Set jsLibsModified = + response.getModified().stream().filter(condition).map(getName).collect(Collectors.toSet()); + Set jsLibsAdded = + response.getAdded().stream().filter(condition).map(getName).collect(Collectors.toSet()); + Set jsLibsRemoved = + response.getRemoved().stream().filter(condition).map(getName).collect(Collectors.toSet()); + + response.setJsLibsModified(jsLibsModified); + response.setJsLibsAdded(jsLibsAdded); + response.setJsLibsRemoved(jsLibsRemoved); + response.setModifiedJSLibs(jsLibsModified.size() + jsLibsAdded.size() + jsLibsRemoved.size()); + } + + protected void legacyPopulateJsLibMigrationMessage(GitStatusDTO response) { + /* + LEGACY: Remove this code in future when all the older format js libs are migrated to new format + + As this updated filename has color, it means this is the older format js + lib file that we're going to rename with the format without colon. + Hence, we need to show a message to user saying this might be a system level change. + */ + Predicate condition = x -> + x.contains(GitDirectories.JS_LIB_DIRECTORY + CommonConstants.DELIMITER_PATH) && x.contains("js.json"); + + Boolean isModified = response.getModified().stream().anyMatch(condition); + Boolean isAdded = response.getAdded().stream().anyMatch(condition); + Boolean isRemoved = response.getAdded().stream().anyMatch(condition); + + if (isModified || isAdded || isRemoved) { + response.setMigrationMessage(FILE_MIGRATION_MESSAGE); + } + } + + private String getPageName(String path) { + String[] pathArray = path.split(CommonConstants.DELIMITER_PATH); + return pathArray[1]; + } + + @Override + public Mono mergeBranch(Path repoSuffix, String sourceBranch, String destinationBranch) { + return Mono.using( + () -> Git.open(createRepoPath(repoSuffix).toFile()), + git -> Mono.fromCallable(() -> { + Stopwatch processStopwatch = StopwatchHelpers.startStopwatch( + repoSuffix, AnalyticsEvents.GIT_MERGE.getEventName()); + log.debug(Thread.currentThread().getName() + ": Merge branch " + sourceBranch + + " on " + destinationBranch); + try { + // checkout the branch on which the merge command is run + git.checkout() + .setName(destinationBranch) + .setCreateBranch(false) + .call(); + + MergeResult mergeResult = git.merge() + .include(git.getRepository().findRef(sourceBranch)) + .setStrategy(MergeStrategy.RECURSIVE) + .call(); + processStopwatch.stopAndLogTimeInMillis(); + return mergeResult.getMergeStatus().name(); + } catch (GitAPIException e) { + // On merge conflicts abort the merge => git merge --abort + git.getRepository().writeMergeCommitMsg(null); + git.getRepository().writeMergeHeads(null); + processStopwatch.stopAndLogTimeInMillis(); + throw new Exception(e); + } + }) + .onErrorResume(error -> { + try { + return resetToLastCommit(repoSuffix, destinationBranch) + .thenReturn(error.getMessage()); + } catch (GitAPIException | IOException e) { + log.error("Error while hard resetting to latest commit {0}", e); + return Mono.error(e); + } + }) + .timeout(Duration.ofMillis(Constraint.TIMEOUT_MILLIS)) + .name(GitSpan.FS_MERGE) + .tap(Micrometer.observation(observationRegistry)), + Git::close) + .subscribeOn(scheduler); + } + + @Override + public Mono fetchRemote( + Path repoSuffix, + String publicKey, + String privateKey, + boolean isRepoPath, + String branchName, + boolean isFetchAll) { + Stopwatch processStopwatch = + StopwatchHelpers.startStopwatch(repoSuffix, AnalyticsEvents.GIT_FETCH.getEventName()); + Path repoPath = TRUE.equals(isRepoPath) ? repoSuffix : createRepoPath(repoSuffix); + return Mono.using( + () -> Git.open(repoPath.toFile()), + git -> Mono.fromCallable(() -> { + TransportConfigCallback config = + new SshTransportConfigCallback(privateKey, publicKey); + String fetchMessages; + if (TRUE.equals(isFetchAll)) { + fetchMessages = git.fetch() + .setRemoveDeletedRefs(true) + .setTransportConfigCallback(config) + .call() + .getMessages(); + } else { + RefSpec ref = new RefSpec( + "refs/heads/" + branchName + ":refs/remotes/origin/" + branchName); + fetchMessages = git.fetch() + .setRefSpecs(ref) + .setRemoveDeletedRefs(true) + .setTransportConfigCallback(config) + .call() + .getMessages(); + } + processStopwatch.stopAndLogTimeInMillis(); + return fetchMessages; + }) + .onErrorResume(error -> { + log.error(error.getMessage()); + return Mono.error(error); + }) + .timeout(Duration.ofMillis(Constraint.TIMEOUT_MILLIS)) + .name(GitSpan.FS_FETCH_REMOTE) + .tap(Micrometer.observation(observationRegistry)), + Git::close) + .subscribeOn(scheduler); + } + + @Override + public Mono fetchRemote( + Path repoSuffix, String publicKey, String privateKey, boolean isRepoPath, String... branchNames) { + Stopwatch processStopwatch = + StopwatchHelpers.startStopwatch(repoSuffix, AnalyticsEvents.GIT_FETCH.getEventName()); + Path repoPath = TRUE.equals(isRepoPath) ? repoSuffix : createRepoPath(repoSuffix); + return Mono.using( + () -> Git.open(repoPath.toFile()), + git -> Mono.fromCallable(() -> { + TransportConfigCallback config = + new SshTransportConfigCallback(privateKey, publicKey); + String fetchMessages; + + List refSpecs = new ArrayList<>(); + for (String branchName : branchNames) { + RefSpec ref = new RefSpec( + "refs/heads/" + branchName + ":refs/remotes/origin/" + branchName); + refSpecs.add(ref); + } + + fetchMessages = git.fetch() + .setRefSpecs(refSpecs.toArray(new RefSpec[0])) + .setRemoveDeletedRefs(true) + .setTransportConfigCallback(config) + .call() + .getMessages(); + + processStopwatch.stopAndLogTimeInMillis(); + return fetchMessages; + }) + .onErrorResume(error -> { + log.error(error.getMessage()); + return Mono.error(error); + }) + .timeout(Duration.ofMillis(Constraint.TIMEOUT_MILLIS)) + .name(GitSpan.FS_FETCH_REMOTE) + .tap(Micrometer.observation(observationRegistry)), + Git::close) + .subscribeOn(scheduler); + } + + @Override + public Mono isMergeBranch(Path repoSuffix, String sourceBranch, String destinationBranch) { + Stopwatch processStopwatch = + StopwatchHelpers.startStopwatch(repoSuffix, AnalyticsEvents.GIT_MERGE_CHECK.getEventName()); + return Mono.using( + () -> Git.open(createRepoPath(repoSuffix).toFile()), + git -> Mono.fromCallable(() -> { + log.debug( + Thread.currentThread().getName() + + ": Check mergeability for repo {} with src: {}, dest: {}", + repoSuffix, + sourceBranch, + destinationBranch); + + // checkout the branch on which the merge command is run + try { + git.checkout() + .setName(destinationBranch) + .setCreateBranch(false) + .call(); + } catch (GitAPIException e) { + if (e instanceof CheckoutConflictException) { + MergeStatusDTO mergeStatus = new MergeStatusDTO(); + mergeStatus.setMergeAble(false); + mergeStatus.setConflictingFiles( + ((CheckoutConflictException) e).getConflictingPaths()); + processStopwatch.stopAndLogTimeInMillis(); + return mergeStatus; + } + } + + MergeResult mergeResult = git.merge() + .include(git.getRepository().findRef(sourceBranch)) + .setFastForward(MergeCommand.FastForwardMode.NO_FF) + .setCommit(false) + .call(); + + MergeStatusDTO mergeStatus = new MergeStatusDTO(); + if (mergeResult.getMergeStatus().isSuccessful()) { + mergeStatus.setMergeAble(true); + mergeStatus.setMessage(SUCCESS_MERGE_STATUS); + } else { + // If there aer conflicts add the conflicting file names to the response + // structure + mergeStatus.setMergeAble(false); + List mergeConflictFiles = new ArrayList<>( + mergeResult.getConflicts().keySet()); + mergeStatus.setConflictingFiles(mergeConflictFiles); + StringBuilder errorMessage = new StringBuilder(); + if (mergeResult.getMergeStatus().equals(MergeResult.MergeStatus.CONFLICTING)) { + errorMessage.append("Conflicts"); + } else { + errorMessage.append( + mergeResult.getMergeStatus().toString()); + } + errorMessage + .append(" while merging branch: ") + .append(destinationBranch) + .append(" <= ") + .append(sourceBranch); + mergeStatus.setMessage(errorMessage.toString()); + mergeStatus.setReferenceDoc( + ErrorReferenceDocUrl.GIT_MERGE_CONFLICT.getDocUrl()); + } + mergeStatus.setStatus( + mergeResult.getMergeStatus().name()); + return mergeStatus; + }) + .flatMap(status -> { + try { + // Revert uncommitted changes if any + return resetToLastCommit(repoSuffix, destinationBranch) + .map(ignore -> { + processStopwatch.stopAndLogTimeInMillis(); + return status; + }); + } catch (GitAPIException | IOException e) { + log.error("Error for hard resetting to latest commit {0}", e); + return Mono.error(e); + } + }) + .timeout(Duration.ofMillis(Constraint.TIMEOUT_MILLIS)), + Git::close) + .subscribeOn(scheduler); + } + + public Mono checkoutRemoteBranch(Path repoSuffix, String branchName) { + // We can safely assume that repo has been already initialised either in commit or clone flow and can directly + // open the repo + return Mono.using( + () -> Git.open(createRepoPath(repoSuffix).toFile()), + git -> Mono.fromCallable(() -> { + log.debug(Thread.currentThread().getName() + ": Checking out remote branch origin/" + + branchName + " for the repo " + repoSuffix); + // open the repo + Path baseRepoPath = createRepoPath(repoSuffix); + // Create and checkout to new branch + git.checkout() + .setCreateBranch(TRUE) + .setName(branchName) + .setUpstreamMode(CreateBranchCommand.SetupUpstreamMode.TRACK) + .setStartPoint("origin/" + branchName) + .call(); + + StoredConfig config = git.getRepository().getConfig(); + config.setString("branch", branchName, "remote", "origin"); + config.setString("branch", branchName, "merge", "refs/heads/" + branchName); + config.save(); + return git.getRepository().getBranch(); + }) + .timeout(Duration.ofMillis(Constraint.TIMEOUT_MILLIS)) + .tag(CHECKOUT_REMOTE, TRUE.toString()) + .name(GitSpan.FS_CHECKOUT_BRANCH) + .tap(Micrometer.observation(observationRegistry)), + Git::close) + .subscribeOn(scheduler); + } + + @Override + public Mono testConnection(String publicKey, String privateKey, String remoteUrl) { + return Mono.fromCallable(() -> { + TransportConfigCallback transportConfigCallback = + new SshTransportConfigCallback(privateKey, publicKey); + Git.lsRemoteRepository() + .setTransportConfigCallback(transportConfigCallback) + .setRemote(remoteUrl) + .setHeads(true) + .setTags(true) + .call(); + return true; + }) + .timeout(Duration.ofMillis(Constraint.TIMEOUT_MILLIS)) + .subscribeOn(scheduler); + } + + private Mono resetToLastCommit(Git git) { + Stopwatch processStopwatch = StopwatchHelpers.startStopwatch( + git.getRepository().getDirectory().toPath().getParent(), AnalyticsEvents.GIT_RESET.getEventName()); + return Mono.fromCallable(() -> { + // Remove tracked files + Ref ref = git.reset().setMode(ResetCommand.ResetType.HARD).call(); + // Remove untracked files + git.clean().setForce(true).setCleanDirectories(true).call(); + processStopwatch.stopAndLogTimeInMillis(); + return ref; + }) + .timeout(Duration.ofMillis(Constraint.TIMEOUT_MILLIS)) + .tag(HARD_RESET, Boolean.FALSE.toString()) + .name(GitSpan.FS_RESET) + .tap(Micrometer.observation(observationRegistry)) + .subscribeOn(scheduler); + } + + public Mono resetToLastCommit(Path repoSuffix, String branchName) throws GitAPIException, IOException { + return Mono.using( + () -> Git.open(createRepoPath(repoSuffix).toFile()), + git -> this.resetToLastCommit(git) + .flatMap(ref -> checkoutToBranch(repoSuffix, branchName)) + .flatMap(checkedOut -> resetToLastCommit(git).thenReturn(true)), + Git::close); + } + + public Mono resetHard(Path repoSuffix, String branchName) { + return this.checkoutToBranch(repoSuffix, branchName) + .flatMap(aBoolean -> Mono.using( + () -> Git.open(createRepoPath(repoSuffix).toFile()), + git -> Mono.fromCallable(() -> { + git.reset() + .setMode(ResetCommand.ResetType.HARD) + .setRef("HEAD~1") + .call(); + return true; + }) + .onErrorResume(e -> { + log.error("Error while resetting the commit, {}", e.getMessage()); + return Mono.just(false); + }) + .timeout(Duration.ofMillis(Constraint.TIMEOUT_MILLIS)) + .tag(HARD_RESET, TRUE.toString()) + .name(GitSpan.FS_RESET) + .tap(Micrometer.observation(observationRegistry)), + Git::close)) + .subscribeOn(scheduler); + } + + public Mono rebaseBranch(Path repoSuffix, String branchName) { + return this.checkoutToBranch(repoSuffix, branchName).flatMap(isCheckedOut -> Mono.using( + () -> Git.open(createRepoPath(repoSuffix).toFile()), + git -> Mono.fromCallable(() -> { + RebaseResult result = git.rebase() + .setUpstream("origin/" + branchName) + .call(); + if (result.getStatus().isSuccessful()) { + return true; + } else { + log.error( + "Error while rebasing the branch, {}, {}", + result.getStatus().name(), + result.getConflicts()); + git.rebase() + .setUpstream("origin/" + branchName) + .setOperation(RebaseCommand.Operation.ABORT) + .call(); + throw new Exception("Error while rebasing the branch, " + + result.getStatus().name()); + } + }) + .onErrorMap(e -> { + log.error("Error while rebasing the branch, {}", e.getMessage()); + return e; + }) + .timeout(Duration.ofMillis(Constraint.TIMEOUT_MILLIS)) + .name(GitSpan.FS_REBASE) + .tap(Micrometer.observation(observationRegistry)), + Git::close) + .subscribeOn(scheduler)); + } + + @Override + public Mono getBranchTrackingStatus(Path repoPath, String branchName) { + return Mono.using( + () -> Git.open(repoPath.toFile()), + git -> Mono.fromCallable(() -> BranchTrackingStatus.of(git.getRepository(), branchName)) + .timeout(Duration.ofMillis(Constraint.TIMEOUT_MILLIS)) + .name(GitSpan.FS_BRANCH_TRACK) + .tap(Micrometer.observation(observationRegistry)), + Git::close) + .subscribeOn(scheduler); + } +} diff --git a/app/server/appsmith-interfaces/src/main/java/com/appsmith/external/constants/spans/ce/OnLoadSpanCE.java b/app/server/appsmith-interfaces/src/main/java/com/appsmith/external/constants/spans/ce/OnLoadSpanCE.java index 138aa40f93f2..1859a37ef21e 100644 --- a/app/server/appsmith-interfaces/src/main/java/com/appsmith/external/constants/spans/ce/OnLoadSpanCE.java +++ b/app/server/appsmith-interfaces/src/main/java/com/appsmith/external/constants/spans/ce/OnLoadSpanCE.java @@ -8,7 +8,6 @@ public class OnLoadSpanCE { APPSMITH_SPAN_PREFIX + "getAllExecutablesByCreatorIdFlux"; public static final String EXECUTABLE_NAME_TO_EXECUTABLE_MAP = APPSMITH_SPAN_PREFIX + "executableNameToExecutableMap"; - public static final String EXECUTABLE_IN_CREATOR_CONTEXT = APPSMITH_SPAN_PREFIX + "executablesInCreatorContext"; public static final String ADD_DIRECTLY_REFERENCED_EXECUTABLES_TO_GRAPH = APPSMITH_SPAN_PREFIX + "addDirectlyReferencedExecutablesToGraph"; public static final String UPDATE_EXECUTABLE_SELF_REFERENCING_PATHS = diff --git a/app/server/appsmith-interfaces/src/main/java/com/appsmith/external/git/handler/FSGitHandler.java b/app/server/appsmith-interfaces/src/main/java/com/appsmith/external/git/handler/FSGitHandler.java new file mode 100644 index 000000000000..47d3f5883498 --- /dev/null +++ b/app/server/appsmith-interfaces/src/main/java/com/appsmith/external/git/handler/FSGitHandler.java @@ -0,0 +1,204 @@ +package com.appsmith.external.git.handler; + +import com.appsmith.external.dtos.GitBranchDTO; +import com.appsmith.external.dtos.GitLogDTO; +import com.appsmith.external.dtos.GitStatusDTO; +import com.appsmith.external.dtos.MergeStatusDTO; +import org.eclipse.jgit.api.errors.GitAPIException; +import org.eclipse.jgit.lib.BranchTrackingStatus; +import reactor.core.publisher.Mono; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.List; + +/** + * This class is a replica of gitExecutor, the methods would be re-evaluated as we will enter the git epic domain + */ +public interface FSGitHandler { + + /** + * This method will handle the git-commit functionality. Under the hood it checks if the repo has already been + * initialised + * @param repoPath parent path to repo + * @param commitMessage message which will be registered for this commit + * @param authorName author details + * @param authorEmail author details + * @param doAmend To amend with the previous commit + * @return if the commit was successful + */ + Mono commitArtifact( + Path repoPath, + String commitMessage, + String authorName, + String authorEmail, + boolean isSuffixedPath, + boolean doAmend); + + /** + * Method to get the commit history + * @param suffix suffixedPath used to generate the base repo path this includes orgId, defaultAppId, repoName + * @return list of git commits + */ + Mono> getCommitHistory(Path suffix); + + /** + * Method to create a new repository to provided path + * @param repoPath path where new repo needs to be created + * @return if the operation was successful + */ + boolean createNewRepository(Path repoPath) throws GitAPIException; + + /** + * Method to push changes to remote repo + * + * @param branchSuffix Path used to generate the repo url specific to the application which needs to pushed to remote + * @param remoteUrl remote repo url + * @param publicKey generated by us and specific to the defaultApplication + * @param privateKey generated by us and specific to the defaultApplication + * @return Success message + */ + Mono pushApplication( + Path branchSuffix, String remoteUrl, String publicKey, String privateKey, String branchName); + + /** Clone the repo to the file path : container-volume/orgId/defaultAppId/repo/applicationData + * + * @param repoSuffix combination of orgId, defaultId and repoName + * @param remoteUrl ssh url of the git repo(we support cloning via ssh url only with deploy key) + * @param privateKey generated by us and specific to the defaultApplication + * @param publicKey generated by us and specific to the defaultApplication + * @return defaultBranchName of the repo + * */ + Mono cloneRemoteIntoArtifactRepo(Path repoSuffix, String remoteUrl, String privateKey, String publicKey); + + /** + * Create a new branch in the local repo and checkout to that branch + * + * @param repoSuffix suffixedPath used to generate the base repo path this includes orgId, defaultAppId, repoName + * @param branchName branch which needs to be created + * @return created branch name + */ + Mono createAndCheckoutToBranch(Path repoSuffix, String branchName); + + /** + * Delete a branch in the local repo + * + * @param repoSuffix suffixedPath used to generate the base repo path this includes orgId, defaultAppId, repoName + * @param branchName branch which needs to be deleted + * @return deleted branch name + */ + Mono deleteBranch(Path repoSuffix, String branchName); + + /** + * Git checkout to specific branch + * + * @param repoSuffix suffixedPath used to generate the base repo path this includes orgId, defaultAppId, repoName + * @param branchName name of the branch which needs to be checked out + * @return if the operation is successful + */ + Mono checkoutToBranch(Path repoSuffix, String branchName); + + /** + * Pull changes from remote branch and merge the changes + * @param repoSuffix suffixedPath used to generate the base repo path this includes orgId, defaultAppId, repoName + * @param remoteUrl ssh url of the git repo(we support cloning via ssh url only with deploy key) + * @param branchName remoteBranchName from which commits will be fetched and merged to the current branch + * @param privateKey generated by us and specific to the defaultApplication + * @param publicKey generated by us and specific to the defaultApplication + * @return success message + */ + Mono pullApplication( + Path repoSuffix, String remoteUrl, String branchName, String privateKey, String publicKey) + throws IOException; + + /** + * @param repoSuffix suffixedPath used to generate the base repo path this includes orgId, defaultAppId, repoName + * @return List of branches for the application + */ + // Mono> listBranches( + // Path repoSuffix, String remoteUrl, String privateKey, String publicKey, Boolean + // isDefaultBranchNeeded); + + Mono getRemoteDefaultBranch(Path repoSuffix, String remoteUrl, String privateKey, String publicKey); + + Mono> listBranches(Path repoSuffix); + /** + * This method will handle the git-status functionality + * + * @param repoPath Path to actual repo + * @param branchName branch name for which the status is required + * @return Map of file names those are added, removed, modified + */ + Mono getStatus(Path repoPath, String branchName); + + /** + * @param repoSuffix suffixedPath used to generate the base repo path this includes orgId, defaultAppId, repoName + * @param sourceBranch name of the branch whose commits will be referred amd merged to destinationBranch + * @param destinationBranch Merge operation is performed on this branch + * @return Merge status + */ + Mono mergeBranch(Path repoSuffix, String sourceBranch, String destinationBranch); + + /** + * @param repoSuffix suffixedPath used to generate the base repo path this includes orgId, defaultAppId, repoName + * @param publicKey public ssh key + * @param privateKey private ssh key + * @param isRepoPath does the repoSuffix contains the complete repoPath or only the suffix + * @return messages received after the remote is fetched + */ + Mono fetchRemote( + Path repoSuffix, + String publicKey, + String privateKey, + boolean isRepoPath, + String branchName, + boolean isFetchAll); + + Mono fetchRemote( + Path repoSuffix, String publicKey, String privateKey, boolean isRepoPath, String... branchNames); + + /** + * + * @param repoSuffix suffixedPath used to generate the base repo path this includes orgId, defaultAppId, repoName + * @param sourceBranch name of the branch whose commits will be referred amd merged to destinationBranch + * @param destinationBranch Merge operation is performed on this branch + * @return Whether the two branches can be merged or not with list of files where the conflicts are present + */ + Mono isMergeBranch(Path repoSuffix, String sourceBranch, String destinationBranch); + + /** + * This method will reset the repo to last commit for the specific branch + * + * @param repoSuffix suffixedPath used to generate the base repo path this includes orgId, defaultAppId, repoName + * @param branchName branch for which the repo should hard reset + * @return success status + * @throws GitAPIException + * @throws IOException + */ + Mono resetToLastCommit(Path repoSuffix, String branchName) throws GitAPIException, IOException; + + /** + * + * @param repoSuffix suffixedPath used to generate the base repo path this includes orgId, defaultAppId, repoName + * @param branchName Name of the remote branch + * @return created branch name + */ + Mono checkoutRemoteBranch(Path repoSuffix, String branchName); + + /** + * + * @param publicKey public key + * @param privateKey private key + * @param remoteUrl remote repo ssh url + * @return boolean if the connection can be established with the given keys + */ + Mono testConnection(String publicKey, String privateKey, String remoteUrl); + + Mono resetHard(Path repoSuffix, String branchName); + + Mono rebaseBranch(Path repoSuffix, String branchName); + + Path createRepoPath(Path suffix); + + Mono getBranchTrackingStatus(Path repoPath, String branchName); +} diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/applications/git/GitApplicationHelperCEImpl.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/applications/git/GitApplicationHelperCEImpl.java index 366465523cc4..9e5d38dfd713 100644 --- a/app/server/appsmith-server/src/main/java/com/appsmith/server/applications/git/GitApplicationHelperCEImpl.java +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/applications/git/GitApplicationHelperCEImpl.java @@ -87,6 +87,11 @@ public AclPermission getArtifactManageDefaultBranchPermission() { return applicationPermission.getManageDefaultBranchPermission(); } + @Override + public AclPermission getWorkspaceArtifactCreationPermission() { + return AclPermission.WORKSPACE_CREATE_APPLICATION; + } + @Override public Mono getArtifactById(String applicationId, AclPermission aclPermission) { return applicationService @@ -290,10 +295,7 @@ public Mono disconnectEntitiesOfBaseArtifact(Artifact baseArtifact) @Override public Mono createArtifactForImport(String workspaceId, String repoName) { - Application newApplication = new Application(); - newApplication.setName(repoName); - newApplication.setWorkspaceId(workspaceId); - newApplication.setGitApplicationMetadata(new GitArtifactMetadata()); + Application newApplication = getNewArtifact(workspaceId, repoName); return applicationPageService.createOrUpdateSuffixedApplication(newApplication, newApplication.getName(), 0); } @@ -306,4 +308,13 @@ public Mono deleteArtifact(String artifactId) { public Boolean isContextInArtifactEmpty(ArtifactExchangeJson artifactExchangeJson) { return CollectionUtils.isNullOrEmpty(((ApplicationJson) artifactExchangeJson).getPageList()); } + + @Override + public Application getNewArtifact(String workspaceId, String repoName) { + Application newApplication = new Application(); + newApplication.setName(repoName); + newApplication.setWorkspaceId(workspaceId); + newApplication.setGitApplicationMetadata(new GitArtifactMetadata()); + return newApplication; + } } diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/constants/ce/RefType.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/constants/ce/RefType.java new file mode 100644 index 000000000000..f7403f9a7f56 --- /dev/null +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/constants/ce/RefType.java @@ -0,0 +1,6 @@ +package com.appsmith.server.constants.ce; + +public enum RefType { + BRANCH, + TAG +} diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/domains/ce/GitArtifactMetadataCE.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/domains/ce/GitArtifactMetadataCE.java index ce46a144fc3c..1c3b700c56a4 100644 --- a/app/server/appsmith-server/src/main/java/com/appsmith/server/domains/ce/GitArtifactMetadataCE.java +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/domains/ce/GitArtifactMetadataCE.java @@ -109,7 +109,8 @@ public String getDefaultArtifactId() { } // TODO : Set to private to prevent direct access unless migration is performed - private void setDefaultArtifactId(String defaultArtifactId) { + // TODO: reevaluate the above TODO bit + public void setDefaultArtifactId(String defaultArtifactId) { this.defaultArtifactId = defaultArtifactId; } diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/dtos/ApplicationImportDTO.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/dtos/ApplicationImportDTO.java index a8a586094d6a..13750aab6b8a 100644 --- a/app/server/appsmith-server/src/main/java/com/appsmith/server/dtos/ApplicationImportDTO.java +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/dtos/ApplicationImportDTO.java @@ -2,6 +2,7 @@ import com.appsmith.external.models.Datasource; import com.appsmith.server.domains.Application; +import com.appsmith.server.domains.Artifact; import lombok.Getter; import lombok.Setter; @@ -16,4 +17,14 @@ public class ApplicationImportDTO extends ArtifactImportDTO { List unConfiguredDatasourceList; Boolean isPartialImport; + + @Override + public Artifact getArtifact() { + return this.getApplication(); + } + + @Override + public void setArtifact(Artifact artifact) { + this.setApplication((Application) artifact); + } } diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/dtos/ArtifactImportDTO.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/dtos/ArtifactImportDTO.java index 2e8801637283..9ccbac08ffce 100644 --- a/app/server/appsmith-server/src/main/java/com/appsmith/server/dtos/ArtifactImportDTO.java +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/dtos/ArtifactImportDTO.java @@ -1,3 +1,10 @@ package com.appsmith.server.dtos; -public abstract class ArtifactImportDTO {} +import com.appsmith.server.domains.Artifact; + +public abstract class ArtifactImportDTO { + + public abstract Artifact getArtifact(); + + public abstract void setArtifact(Artifact artifact); +} diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/CentralGitService.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/CentralGitService.java new file mode 100644 index 000000000000..57e55c56e024 --- /dev/null +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/CentralGitService.java @@ -0,0 +1,3 @@ +package com.appsmith.server.git.central; + +public interface CentralGitService extends CentralGitServiceCECompatible {} diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/CentralGitServiceCE.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/CentralGitServiceCE.java new file mode 100644 index 000000000000..acc212ab68aa --- /dev/null +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/CentralGitServiceCE.java @@ -0,0 +1,20 @@ +package com.appsmith.server.git.central; + +import com.appsmith.server.constants.ArtifactType; +import com.appsmith.server.domains.Artifact; +import com.appsmith.server.dtos.ArtifactImportDTO; +import com.appsmith.server.dtos.GitConnectDTO; +import reactor.core.publisher.Mono; + +public interface CentralGitServiceCE { + + Mono importArtifactFromGit( + String workspaceId, GitConnectDTO gitConnectDTO, ArtifactType artifactType, GitType gitType); + + Mono connectArtifactToGit( + String baseArtifactId, + GitConnectDTO gitConnectDTO, + String originHeader, + ArtifactType artifactType, + GitType gitType); +} diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/CentralGitServiceCECompatible.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/CentralGitServiceCECompatible.java new file mode 100644 index 000000000000..3d521935c691 --- /dev/null +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/CentralGitServiceCECompatible.java @@ -0,0 +1,3 @@ +package com.appsmith.server.git.central; + +public interface CentralGitServiceCECompatible extends CentralGitServiceCE {} diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/CentralGitServiceCECompatibleImpl.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/CentralGitServiceCECompatibleImpl.java new file mode 100644 index 000000000000..909f7ffee583 --- /dev/null +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/CentralGitServiceCECompatibleImpl.java @@ -0,0 +1,50 @@ +package com.appsmith.server.git.central; + +import com.appsmith.server.datasources.base.DatasourceService; +import com.appsmith.server.exports.internal.ExportService; +import com.appsmith.server.git.resolver.GitArtifactHelperResolver; +import com.appsmith.server.git.resolver.GitHandlingServiceResolver; +import com.appsmith.server.git.utils.GitAnalyticsUtils; +import com.appsmith.server.git.utils.GitProfileUtils; +import com.appsmith.server.helpers.GitPrivateRepoHelper; +import com.appsmith.server.imports.internal.ImportService; +import com.appsmith.server.plugins.base.PluginService; +import com.appsmith.server.services.UserDataService; +import com.appsmith.server.services.WorkspaceService; +import com.appsmith.server.solutions.DatasourcePermission; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Service; + +@Slf4j +@Service +public class CentralGitServiceCECompatibleImpl extends CentralGitServiceCEImpl + implements CentralGitServiceCECompatible { + + public CentralGitServiceCECompatibleImpl( + GitProfileUtils gitProfileUtils, + GitAnalyticsUtils gitAnalyticsUtils, + UserDataService userDataService, + GitArtifactHelperResolver gitArtifactHelperResolver, + GitHandlingServiceResolver gitHandlingServiceResolver, + GitPrivateRepoHelper gitPrivateRepoHelper, + DatasourceService datasourceService, + DatasourcePermission datasourcePermission, + WorkspaceService workspaceService, + PluginService pluginService, + ImportService importService, + ExportService exportService) { + super( + gitProfileUtils, + gitAnalyticsUtils, + userDataService, + gitArtifactHelperResolver, + gitHandlingServiceResolver, + gitPrivateRepoHelper, + datasourceService, + datasourcePermission, + workspaceService, + pluginService, + importService, + exportService); + } +} diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/CentralGitServiceCEImpl.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/CentralGitServiceCEImpl.java new file mode 100644 index 000000000000..b075fc7048f5 --- /dev/null +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/CentralGitServiceCEImpl.java @@ -0,0 +1,621 @@ +package com.appsmith.server.git.central; + +import com.appsmith.external.constants.AnalyticsEvents; +import com.appsmith.external.models.Datasource; +import com.appsmith.external.models.DatasourceStorage; +import com.appsmith.git.dto.CommitDTO; +import com.appsmith.git.dto.GitUser; +import com.appsmith.server.acl.AclPermission; +import com.appsmith.server.constants.ArtifactType; +import com.appsmith.server.constants.FieldName; +import com.appsmith.server.constants.GitDefaultCommitMessage; +import com.appsmith.server.constants.ce.RefType; +import com.appsmith.server.datasources.base.DatasourceService; +import com.appsmith.server.domains.Artifact; +import com.appsmith.server.domains.GitArtifactMetadata; +import com.appsmith.server.domains.GitAuth; +import com.appsmith.server.domains.GitProfile; +import com.appsmith.server.domains.Plugin; +import com.appsmith.server.domains.UserData; +import com.appsmith.server.domains.Workspace; +import com.appsmith.server.dtos.ArtifactExchangeJson; +import com.appsmith.server.dtos.ArtifactImportDTO; +import com.appsmith.server.dtos.GitConnectDTO; +import com.appsmith.server.exceptions.AppsmithError; +import com.appsmith.server.exceptions.AppsmithException; +import com.appsmith.server.exports.internal.ExportService; +import com.appsmith.server.git.dtos.ArtifactJsonTransformationDTO; +import com.appsmith.server.git.resolver.GitArtifactHelperResolver; +import com.appsmith.server.git.resolver.GitHandlingServiceResolver; +import com.appsmith.server.git.utils.GitAnalyticsUtils; +import com.appsmith.server.git.utils.GitProfileUtils; +import com.appsmith.server.helpers.GitPrivateRepoHelper; +import com.appsmith.server.imports.internal.ImportService; +import com.appsmith.server.plugins.base.PluginService; +import com.appsmith.server.services.GitArtifactHelper; +import com.appsmith.server.services.UserDataService; +import com.appsmith.server.services.WorkspaceService; +import com.appsmith.server.solutions.DatasourcePermission; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.eclipse.jgit.api.errors.InvalidRemoteException; +import org.eclipse.jgit.api.errors.TransportException; +import org.springframework.stereotype.Service; +import org.springframework.util.CollectionUtils; +import org.springframework.util.StringUtils; +import reactor.core.publisher.Mono; +import reactor.util.function.Tuple2; + +import java.io.IOException; +import java.time.Instant; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeoutException; + +import static com.appsmith.external.git.constants.ce.GitConstantsCE.DEFAULT_COMMIT_MESSAGE; +import static com.appsmith.external.git.constants.ce.GitConstantsCE.GIT_PROFILE_ERROR; +import static com.appsmith.server.constants.FieldName.DEFAULT; +import static com.appsmith.server.constants.SerialiseArtifactObjective.VERSION_CONTROL; +import static java.lang.Boolean.FALSE; + +@Slf4j +@Service +@RequiredArgsConstructor +public class CentralGitServiceCEImpl implements CentralGitServiceCE { + + private final GitProfileUtils gitProfileUtils; + private final GitAnalyticsUtils gitAnalyticsUtils; + private final UserDataService userDataService; + + protected final GitArtifactHelperResolver gitArtifactHelperResolver; + protected final GitHandlingServiceResolver gitHandlingServiceResolver; + + private final GitPrivateRepoHelper gitPrivateRepoHelper; + + private final DatasourceService datasourceService; + private final DatasourcePermission datasourcePermission; + + private final WorkspaceService workspaceService; + private final PluginService pluginService; + + private final ImportService importService; + private final ExportService exportService; + + protected Mono isRepositoryLimitReachedForWorkspace(String workspaceId, Boolean isRepositoryPrivate) { + if (!isRepositoryPrivate) { + return Mono.just(FALSE); + } + + return gitPrivateRepoHelper.isRepoLimitReached(workspaceId, true); + } + + @Override + public Mono importArtifactFromGit( + String workspaceId, GitConnectDTO gitConnectDTO, ArtifactType artifactType, GitType gitType) { + // 1. Check private repo limit for workspace + // 2. Create dummy artifact, clone repo from remote + // 3. Re-hydrate artifact to DB from local repo + // a. Save the ssh keys in artifact object with other details + // b. During import-export need to handle the DS(empty vs non-empty) + // 4. Return artifact + + if (!StringUtils.hasText(workspaceId)) { + return Mono.error(new AppsmithException(AppsmithError.INVALID_PARAMETER, "Invalid workspace id")); + } + + GitHandlingService gitHandlingService = gitHandlingServiceResolver.getGitHandlingService(gitType); + Set errors = gitHandlingService.validateGitConnectDTO(gitConnectDTO); + + if (!CollectionUtils.isEmpty(errors)) { + return Mono.error(new AppsmithException( + AppsmithError.INVALID_PARAMETER, errors.stream().findAny().get())); + } + + GitArtifactHelper gitArtifactHelper = gitArtifactHelperResolver.getArtifactHelper(artifactType); + AclPermission artifactCreatePermission = gitArtifactHelper.getWorkspaceArtifactCreationPermission(); + + // TODO: permission bit deferred to gitArtifactHelper + Mono workspaceMono = workspaceService + .findById(workspaceId, artifactCreatePermission) + .switchIfEmpty(Mono.error( + new AppsmithException(AppsmithError.NO_RESOURCE_FOUND, FieldName.WORKSPACE, workspaceId))); + + final String repoName = gitHandlingService.getRepoName(gitConnectDTO); + Mono isRepositoryPrivateMono = + gitHandlingService.isRepoPrivate(gitConnectDTO).cache(); + Mono isRepositoryLimitReachedForWorkspaceMono = isRepositoryPrivateMono.flatMap( + isRepositoryPrivate -> isRepositoryLimitReachedForWorkspace(workspaceId, isRepositoryPrivate)); + + Mono importedArtifactMono = workspaceMono + .then(Mono.defer(() -> isRepositoryLimitReachedForWorkspaceMono)) + .flatMap(isRepositoryLimitReached -> { + Mono gitAuthForUserMono = + gitHandlingService.getGitAuthForUser().cache(); + Mono createArtifactMono = gitArtifactHelper + .createArtifactForImport(workspaceId, repoName) + .cache(); + + if (FALSE.equals(isRepositoryLimitReached)) { + return gitAuthForUserMono.zipWith(createArtifactMono); + } + + // TODO: Change errors to artifact level. + return gitAnalyticsUtils + .addAnalyticsForGitOperation( + AnalyticsEvents.GIT_IMPORT, + gitArtifactHelper.getNewArtifact(workspaceId, repoName), + AppsmithError.GIT_APPLICATION_LIMIT_ERROR.getErrorType(), + AppsmithError.GIT_APPLICATION_LIMIT_ERROR.getMessage(), + true) + .then(Mono.error(new AppsmithException(AppsmithError.GIT_APPLICATION_LIMIT_ERROR))); + }) + .flatMap(tuple2 -> { + GitAuth gitAuth = tuple2.getT1(); + Artifact artifact = tuple2.getT2(); + + Mono> profileMono = gitProfileUtils.updateOrCreateGitProfileForCurrentUser( + gitConnectDTO.getGitProfile(), artifact.getId()); + + Mono fetchRemoteRepository = + gitHandlingService.fetchRemoteRepository(gitConnectDTO, gitAuth, artifact, repoName); + + return fetchRemoteRepository + .zipWith(isRepositoryPrivateMono) + .flatMap(tuple -> { + String defaultBranch = tuple.getT1(); + Boolean isRepoPrivate = tuple.getT2(); + + GitArtifactMetadata gitArtifactMetadata = new GitArtifactMetadata(); + gitArtifactMetadata.setGitAuth(gitAuth); + gitArtifactMetadata.setDefaultArtifactId(artifact.getId()); + gitArtifactMetadata.setDefaultBranchName(defaultBranch); + gitArtifactMetadata.setBranchName(defaultBranch); + gitArtifactMetadata.setRepoName(repoName); + gitArtifactMetadata.setIsRepoPrivate(isRepoPrivate); + gitArtifactMetadata.setLastCommittedAt(Instant.now()); + + gitHandlingService.setRepositoryDetailsInGitArtifactMetadata( + gitConnectDTO, gitArtifactMetadata); + artifact.setGitArtifactMetadata(gitArtifactMetadata); + return Mono.just(artifact).zipWith(profileMono); + }); + }) + .flatMap(tuple2 -> { + Artifact artifact = tuple2.getT1(); + GitArtifactMetadata gitArtifactMetadata = artifact.getGitArtifactMetadata(); + String defaultBranch = gitArtifactMetadata.getDefaultBranchName(); + + Mono> datasourceMono = datasourceService + .getAllByWorkspaceIdWithStorages(workspaceId, datasourcePermission.getEditPermission()) + .collectList(); + + Mono> pluginMono = + pluginService.getDefaultPlugins().collectList(); + + ArtifactJsonTransformationDTO jsonMorphDTO = new ArtifactJsonTransformationDTO(); + jsonMorphDTO.setWorkspaceId(workspaceId); + jsonMorphDTO.setArtifactId(artifact.getId()); + jsonMorphDTO.setArtifactType(artifactType); + jsonMorphDTO.setRepoName(gitArtifactMetadata.getRepoName()); + jsonMorphDTO.setRefType(RefType.BRANCH); + jsonMorphDTO.setRefName(defaultBranch); + + Mono artifactExchangeJsonMono = gitHandlingService + .reconstructArtifactJsonFromGitRepository(jsonMorphDTO) + .onErrorResume(error -> { + log.error("Error while constructing artifact from git repo", error); + return deleteArtifactCreatedFromGitImport(jsonMorphDTO, gitType) + .then(Mono.error(new AppsmithException( + AppsmithError.GIT_FILE_SYSTEM_ERROR, error.getMessage()))); + }); + + return Mono.zip(artifactExchangeJsonMono, datasourceMono, pluginMono) + .flatMap(data -> { + ArtifactExchangeJson artifactExchangeJson = data.getT1(); + List datasourceList = data.getT2(); + List pluginList = data.getT3(); + + if (artifactExchangeJson.getArtifact() == null + || gitArtifactHelper.isContextInArtifactEmpty(artifactExchangeJson)) { + return deleteArtifactCreatedFromGitImport(jsonMorphDTO, gitType) + .then(Mono.error(new AppsmithException( + AppsmithError.GIT_ACTION_FAILED, + "import", + "Cannot import artifact from an empty repo"))); + } + // If there is an existing datasource with the same name but a different type from that + // in the repo, the import api should fail + // TODO: change the implementation to compare datasource with gitSyncIds instead. + if (checkIsDatasourceNameConflict( + datasourceList, artifactExchangeJson.getDatasourceList(), pluginList)) { + return deleteArtifactCreatedFromGitImport(jsonMorphDTO, gitType) + .then(Mono.error(new AppsmithException( + AppsmithError.GIT_ACTION_FAILED, + "import", + "Datasource already exists with the same name"))); + } + + artifactExchangeJson.getArtifact().setGitArtifactMetadata(gitArtifactMetadata); + return importService + .importArtifactInWorkspaceFromGit( + workspaceId, artifact.getId(), artifactExchangeJson, defaultBranch) + .onErrorResume(throwable -> deleteArtifactCreatedFromGitImport( + jsonMorphDTO, gitType) + .then(Mono.error(new AppsmithException( + AppsmithError.GIT_FILE_SYSTEM_ERROR, throwable.getMessage())))); + }); + }) + .flatMap(artifact -> gitArtifactHelper.publishArtifact(artifact, false)) + // Add un-configured datasource to the list to response + .flatMap(artifact -> importService.getArtifactImportDTO( + artifact.getWorkspaceId(), artifact.getId(), artifact, artifactType)) + // Add analytics event + .flatMap(artifactImportDTO -> { + Artifact artifact = artifactImportDTO.getArtifact(); + return gitAnalyticsUtils + .addAnalyticsForGitOperation( + AnalyticsEvents.GIT_IMPORT, + artifact, + artifact.getGitArtifactMetadata().getIsRepoPrivate()) + .thenReturn(artifactImportDTO); + }); + + return Mono.create( + sink -> importedArtifactMono.subscribe(sink::success, sink::error, null, sink.currentContext())); + } + + private Mono deleteArtifactCreatedFromGitImport( + ArtifactJsonTransformationDTO artifactJsonTransformationDTO, GitType gitType) { + + GitArtifactHelper gitArtifactHelper = + gitArtifactHelperResolver.getArtifactHelper(artifactJsonTransformationDTO.getArtifactType()); + GitHandlingService gitHandlingService = gitHandlingServiceResolver.getGitHandlingService(gitType); + + return gitHandlingService + .removeRepository(artifactJsonTransformationDTO) + .zipWith(gitArtifactHelper.deleteArtifact(artifactJsonTransformationDTO.getArtifactId())) + .map(Tuple2::getT2); + } + + private boolean checkIsDatasourceNameConflict( + List existingDatasources, + List importedDatasources, + List pluginList) { + // If we have an existing datasource with the same name but a different type from that in the repo, the import + // api should fail + for (DatasourceStorage datasourceStorage : importedDatasources) { + // Collect the datasource(existing in workspace) which has same as of imported datasource + // As names are unique we will need filter first element to check if the plugin id is matched + Datasource filteredDatasource = existingDatasources.stream() + .filter(datasource1 -> datasource1.getName().equals(datasourceStorage.getName())) + .findFirst() + .orElse(null); + + // Check if both of the datasource's are of the same plugin type + if (filteredDatasource != null) { + long matchCount = pluginList.stream() + .filter(plugin -> { + final String pluginReference = + plugin.getPluginName() == null ? plugin.getPackageName() : plugin.getPluginName(); + + return plugin.getId().equals(filteredDatasource.getPluginId()) + && !datasourceStorage.getPluginId().equals(pluginReference); + }) + .count(); + if (matchCount > 0) { + return true; + } + } + } + return false; + } + + /** + * Connect the artifact from Appsmith to a git repo + * This is the prerequisite step needed to perform all the git operation for an artifact + * We are implementing the deployKey approach and since the deploy-keys are repo level these keys are store under artifact. + * Each artifact is equal to a repo in the git(and each branch creates a new artifact with default artifact as parent) + * + * @param baseArtifactId : artifactId of the artifact which is getting connected to git + * @param gitConnectDTO artifactId - this is used to link the local git repo to an artifact + * remoteUrl - used for connecting to remote repo etc + * @param originHeader + * @param artifactType + * @param gitType + * @return an artifact with git metadata + */ + @Override + public Mono connectArtifactToGit( + String baseArtifactId, + GitConnectDTO gitConnectDTO, + String originHeader, + ArtifactType artifactType, + GitType gitType) { + /* + * Connecting the artifact for the first time + * The ssh keys is already present in artifact object from generate SSH key step + * We would be updating the remote url and default branchName + * */ + + GitHandlingService gitHandlingService = gitHandlingServiceResolver.getGitHandlingService(gitType); + Set validationErrors = gitHandlingService.validateGitConnectDTO(gitConnectDTO); + + if (!CollectionUtils.isEmpty(validationErrors)) { + return Mono.error(new AppsmithException( + AppsmithError.INVALID_PARAMETER, + validationErrors.stream().findFirst().get())); + } + + if (!StringUtils.hasText(originHeader)) { + return Mono.error(new AppsmithException(AppsmithError.INVALID_PARAMETER, FieldName.ORIGIN)); + } + + Mono currentUserMono = userDataService + .getForCurrentUser() + .filter(userData -> !CollectionUtils.isEmpty(userData.getGitProfiles())) + .switchIfEmpty( + Mono.error(new AppsmithException(AppsmithError.INVALID_GIT_CONFIGURATION, GIT_PROFILE_ERROR))); + + Mono gitUserMono = currentUserMono + .map(userData -> { + GitProfile profile = userData.getGitProfileByKey(baseArtifactId); + if (profile == null + || Boolean.TRUE.equals(profile.getUseGlobalProfile()) + || !StringUtils.hasText(profile.getAuthorName())) { + profile = userData.getGitProfileByKey(DEFAULT); + } + + GitUser gitUser = new GitUser(); + gitUser.setName(profile.getAuthorName()); + gitUser.setEmail(profile.getAuthorEmail()); + return gitUser; + }) + .cache(); + + Mono> profileMono = gitProfileUtils + .updateOrCreateGitProfileForCurrentUser(gitConnectDTO.getGitProfile(), baseArtifactId) + .switchIfEmpty( + Mono.error(new AppsmithException(AppsmithError.INVALID_GIT_CONFIGURATION, GIT_PROFILE_ERROR))) + .cache(); + + String repoName = gitHandlingService.getRepoName(gitConnectDTO); + + Mono isPrivateRepoMono = gitHandlingService.isRepoPrivate(gitConnectDTO); + GitArtifactHelper gitArtifactHelper = gitArtifactHelperResolver.getArtifactHelper(artifactType); + AclPermission connectToGitPermission = gitArtifactHelper.getArtifactGitConnectPermission(); + + Mono artifactToConnectMono = gitArtifactHelper + .getArtifactById(baseArtifactId, connectToGitPermission) + .cache(); + Mono connectedArtifactMono = Mono.zip(profileMono, isPrivateRepoMono, artifactToConnectMono) + .flatMap(tuple -> { + Artifact artifact = tuple.getT3(); + Boolean isRepoPrivate = tuple.getT2(); + + return isRepositoryLimitReachedForWorkspace(artifact.getWorkspaceId(), isRepoPrivate) + .flatMap(isLimitReached -> { + if (FALSE.equals(isLimitReached)) { + return Mono.just(artifact); + } + + return gitAnalyticsUtils + .addAnalyticsForGitOperation( + AnalyticsEvents.GIT_PRIVATE_REPO_LIMIT_EXCEEDED, + artifact, + AppsmithError.GIT_APPLICATION_LIMIT_ERROR.getErrorType(), + AppsmithError.GIT_APPLICATION_LIMIT_ERROR.getMessage(), + isRepoPrivate) + .then(Mono.error( + new AppsmithException(AppsmithError.GIT_APPLICATION_LIMIT_ERROR))); + }); + }) + .flatMap(artifact -> { + GitArtifactMetadata gitArtifactMetadata = artifact.getGitArtifactMetadata(); + if (isBaseGitMetadataInvalid(gitArtifactMetadata, gitType)) { + return Mono.error(new AppsmithException(AppsmithError.INVALID_GIT_SSH_CONFIGURATION)); + } else { + GitAuth gitAuth = gitArtifactMetadata.getGitAuth(); + Mono defaultBranchMono = gitHandlingService + .fetchRemoteRepository(gitConnectDTO, gitAuth, artifact, repoName) + .onErrorResume(error -> { + log.error("Error while cloning the remote repo, ", error); + + AppsmithException appsmithException = + new AppsmithException(AppsmithError.GIT_GENERIC_ERROR, error.getMessage()); + if (error instanceof TransportException) { + appsmithException = + new AppsmithException(AppsmithError.INVALID_GIT_SSH_CONFIGURATION); + } else if (error instanceof InvalidRemoteException) { + appsmithException = new AppsmithException( + AppsmithError.INVALID_GIT_CONFIGURATION, error.getMessage()); + } else if (error instanceof TimeoutException) { + appsmithException = new AppsmithException(AppsmithError.GIT_EXECUTION_TIMEOUT); + } else if (error instanceof ClassCastException) { + // To catch TransportHttp cast error in case HTTP URL is passed + // instead of SSH URL + if (error.getMessage().contains("TransportHttp")) { + appsmithException = + new AppsmithException(AppsmithError.INVALID_GIT_SSH_URL); + } + } + + ArtifactJsonTransformationDTO jsonTransformationDTO = + new ArtifactJsonTransformationDTO(); + jsonTransformationDTO.setWorkspaceId(artifact.getWorkspaceId()); + jsonTransformationDTO.setArtifactId(artifact.getId()); + jsonTransformationDTO.setRepoName(repoName); + jsonTransformationDTO.setArtifactType(artifactType); + + return gitHandlingService + .removeRepository(jsonTransformationDTO) + .then(gitAnalyticsUtils.addAnalyticsForGitOperation( + AnalyticsEvents.GIT_CONNECT, + artifact, + error.getClass().getName(), + error.getMessage(), + artifact.getGitArtifactMetadata() + .getIsRepoPrivate())) + .then(Mono.error(appsmithException)); + }); + + return Mono.zip(Mono.just(artifact), defaultBranchMono); + } + }) + .flatMap(tuple -> { + Artifact artifact = tuple.getT1(); + String defaultBranch = tuple.getT2(); + + ArtifactJsonTransformationDTO jsonTransformationDTO = new ArtifactJsonTransformationDTO(); + jsonTransformationDTO.setWorkspaceId(artifact.getWorkspaceId()); + jsonTransformationDTO.setArtifactId(artifact.getId()); + jsonTransformationDTO.setRepoName(repoName); + jsonTransformationDTO.setArtifactType(artifactType); + + final String artifactId = artifact.getId(); + final String workspaceId = artifact.getWorkspaceId(); + + Mono isClonedRepositoryEmptyMono = + gitHandlingService.validateEmptyRepository(jsonTransformationDTO); + return isClonedRepositoryEmptyMono + .zipWith(isPrivateRepoMono) + .flatMap(objects -> { + Boolean isEmpty = objects.getT1(); + Boolean isRepoPrivate = objects.getT2(); + if (FALSE.equals(isEmpty)) { + return gitAnalyticsUtils + .addAnalyticsForGitOperation( + AnalyticsEvents.GIT_CONNECT, + artifact, + AppsmithError.INVALID_GIT_REPO.getErrorType(), + AppsmithError.INVALID_GIT_REPO.getMessage(), + isRepoPrivate) + .then(Mono.error(new AppsmithException(AppsmithError.INVALID_GIT_REPO))); + } + + GitArtifactMetadata gitArtifactMetadata = artifact.getGitArtifactMetadata(); + gitArtifactMetadata.setDefaultArtifactId(artifactId); + gitArtifactMetadata.setBranchName(defaultBranch); + gitArtifactMetadata.setDefaultBranchName(defaultBranch); + gitArtifactMetadata.setRepoName(repoName); + gitArtifactMetadata.setIsRepoPrivate(isRepoPrivate); + gitArtifactMetadata.setLastCommittedAt(Instant.now()); + + gitHandlingService.setRepositoryDetailsInGitArtifactMetadata( + gitConnectDTO, gitArtifactMetadata); + + // Set branchName for each artifact resource + return exportService + .exportByArtifactId(artifactId, VERSION_CONTROL, artifactType) + .flatMap(artifactJson -> { + artifactJson.getArtifact().setGitArtifactMetadata(gitArtifactMetadata); + return importService.importArtifactInWorkspaceFromGit( + workspaceId, artifactId, artifactJson, defaultBranch); + }); + }) + .onErrorResume(e -> { + if (e instanceof IOException) { + return Mono.error( + new AppsmithException(AppsmithError.GIT_FILE_SYSTEM_ERROR, e.getMessage())); + } + return Mono.error(e); + }); + }) + .flatMap(artifact -> { + ArtifactJsonTransformationDTO jsonTransformationDTO = new ArtifactJsonTransformationDTO(); + jsonTransformationDTO.setWorkspaceId(artifact.getWorkspaceId()); + jsonTransformationDTO.setArtifactId(artifact.getId()); + jsonTransformationDTO.setArtifactType(artifactType); + jsonTransformationDTO.setRepoName(repoName); + + final String README_FILE_NAME = "README.md"; + Mono readMeIntialisationMono = gitHandlingService.initialiseReadMe( + jsonTransformationDTO, artifact, README_FILE_NAME, originHeader); + + return Mono.zip(readMeIntialisationMono, gitUserMono) + .flatMap(tuple2 -> { + String commitMessage = + DEFAULT_COMMIT_MESSAGE + GitDefaultCommitMessage.CONNECT_FLOW.getReason(); + GitUser author = tuple2.getT2(); + CommitDTO commitDTO = new CommitDTO(); + commitDTO.setAuthor(author); + commitDTO.setCommitter(author); + commitDTO.setIsAmendCommit(FALSE); + commitDTO.setMessage(commitMessage); + return gitHandlingService + .createFirstCommit(jsonTransformationDTO, commitDTO) + .then(gitUserMono); + }) + .flatMap(author -> { + // Commit and push artifact to check if the SSH key has the write access + String commitMessage = + DEFAULT_COMMIT_MESSAGE + GitDefaultCommitMessage.CONNECT_FLOW.getReason(); + CommitDTO commitDTO = new CommitDTO(); + commitDTO.setAuthor(author); + commitDTO.setCommitter(author); + commitDTO.setIsAmendCommit(FALSE); + commitDTO.setMessage(commitMessage); + + return this.commitArtifact(baseArtifactId, commitDTO, artifactType, gitType) + .onErrorResume(error -> + // If the push fails remove all the cloned files from local repo + this.detachRemote(baseArtifactId, artifactType) + .flatMap(isDeleted -> { + if (error instanceof TransportException) { + return gitAnalyticsUtils + .addAnalyticsForGitOperation( + AnalyticsEvents.GIT_CONNECT, + artifact, + error.getClass() + .getName(), + error.getMessage(), + artifact.getGitArtifactMetadata() + .getIsRepoPrivate()) + .then(Mono.error(new AppsmithException( + AppsmithError + .INVALID_GIT_SSH_CONFIGURATION, + error.getMessage()))); + } + return Mono.error(new AppsmithException( + AppsmithError.GIT_ACTION_FAILED, + "push", + error.getMessage())); + })); + }) + .then(gitAnalyticsUtils.addAnalyticsForGitOperation( + AnalyticsEvents.GIT_CONNECT, + artifact, + artifact.getGitArtifactMetadata().getIsRepoPrivate())); + }); + + return Mono.create( + sink -> connectedArtifactMono.subscribe(sink::success, sink::error, null, sink.currentContext())); + } + + /** + * TODO: commit artifact + * @return + */ + public Mono commitArtifact( + String baseArtifactId, CommitDTO commitDTO, ArtifactType artifactType, GitType gitType) { + return null; + } + + /** + * TODO: implementation quite similar to the disconnectGitRepo + * @param baseArtifactId + * @param artifactType + * @return + */ + protected Mono detachRemote(String baseArtifactId, ArtifactType artifactType) { + return null; + } + + private boolean isBaseGitMetadataInvalid(GitArtifactMetadata gitArtifactMetadata, GitType gitType) { + return gitArtifactMetadata == null + || gitArtifactMetadata.getGitAuth() == null + || gitHandlingServiceResolver + .getGitHandlingService(gitType) + .isGitAuthInvalid(gitArtifactMetadata.getGitAuth()); + } +} diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/CentralGitServiceImpl.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/CentralGitServiceImpl.java new file mode 100644 index 000000000000..49c110d6dca8 --- /dev/null +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/CentralGitServiceImpl.java @@ -0,0 +1,49 @@ +package com.appsmith.server.git.central; + +import com.appsmith.server.datasources.base.DatasourceService; +import com.appsmith.server.exports.internal.ExportService; +import com.appsmith.server.git.resolver.GitArtifactHelperResolver; +import com.appsmith.server.git.resolver.GitHandlingServiceResolver; +import com.appsmith.server.git.utils.GitAnalyticsUtils; +import com.appsmith.server.git.utils.GitProfileUtils; +import com.appsmith.server.helpers.GitPrivateRepoHelper; +import com.appsmith.server.imports.internal.ImportService; +import com.appsmith.server.plugins.base.PluginService; +import com.appsmith.server.services.UserDataService; +import com.appsmith.server.services.WorkspaceService; +import com.appsmith.server.solutions.DatasourcePermission; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Service; + +@Slf4j +@Service +public class CentralGitServiceImpl extends CentralGitServiceCECompatibleImpl implements CentralGitService { + + public CentralGitServiceImpl( + GitProfileUtils gitProfileUtils, + GitAnalyticsUtils gitAnalyticsUtils, + UserDataService userDataService, + GitArtifactHelperResolver gitArtifactHelperResolver, + GitHandlingServiceResolver gitHandlingServiceResolver, + GitPrivateRepoHelper gitPrivateRepoHelper, + DatasourceService datasourceService, + DatasourcePermission datasourcePermission, + WorkspaceService workspaceService, + PluginService pluginService, + ImportService importService, + ExportService exportService) { + super( + gitProfileUtils, + gitAnalyticsUtils, + userDataService, + gitArtifactHelperResolver, + gitHandlingServiceResolver, + gitPrivateRepoHelper, + datasourceService, + datasourcePermission, + workspaceService, + pluginService, + importService, + exportService); + } +} diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/GitHandlingService.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/GitHandlingService.java new file mode 100644 index 000000000000..1fc2a745e16e --- /dev/null +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/GitHandlingService.java @@ -0,0 +1,3 @@ +package com.appsmith.server.git.central; + +public interface GitHandlingService extends GitHandlingServiceCECompatible {} diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/GitHandlingServiceCE.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/GitHandlingServiceCE.java new file mode 100644 index 000000000000..66fc2dcbffb3 --- /dev/null +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/GitHandlingServiceCE.java @@ -0,0 +1,47 @@ +package com.appsmith.server.git.central; + +import com.appsmith.git.dto.CommitDTO; +import com.appsmith.server.domains.Artifact; +import com.appsmith.server.domains.GitArtifactMetadata; +import com.appsmith.server.domains.GitAuth; +import com.appsmith.server.dtos.ArtifactExchangeJson; +import com.appsmith.server.dtos.GitConnectDTO; +import com.appsmith.server.git.dtos.ArtifactJsonTransformationDTO; +import reactor.core.publisher.Mono; + +import java.util.Set; + +public interface GitHandlingServiceCE { + + Set validateGitConnectDTO(GitConnectDTO gitConnectDTO); + + String getRepoName(GitConnectDTO gitConnectDTO); + + Mono isRepoPrivate(GitConnectDTO gitConnectDTO); + + // TODO: modify git auth class for native implementation + Mono getGitAuthForUser(); + + Boolean isGitAuthInvalid(GitAuth gitAuth); + + Mono fetchRemoteRepository( + GitConnectDTO gitConnectDTO, GitAuth gitAuth, Artifact artifact, String repoName); + + Mono reconstructArtifactJsonFromGitRepository( + ArtifactJsonTransformationDTO artifactJsonTransformationDTO); + + void setRepositoryDetailsInGitArtifactMetadata( + GitConnectDTO gitConnectDTO, GitArtifactMetadata gitArtifactMetadata); + + Mono removeRepository(ArtifactJsonTransformationDTO artifactJsonTransformationDTO); + + Mono validateEmptyRepository(ArtifactJsonTransformationDTO artifactJsonTransformationDTO); + + Mono initialiseReadMe( + ArtifactJsonTransformationDTO artifactJsonTransformationDTO, + Artifact artifact, + String readmeFileName, + String originHeader); + + Mono createFirstCommit(ArtifactJsonTransformationDTO jsonTransformationDTO, CommitDTO commitDTO); +} diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/GitHandlingServiceCECompatible.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/GitHandlingServiceCECompatible.java new file mode 100644 index 000000000000..5ff84126defc --- /dev/null +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/GitHandlingServiceCECompatible.java @@ -0,0 +1,3 @@ +package com.appsmith.server.git.central; + +public interface GitHandlingServiceCECompatible extends GitHandlingServiceCE {} diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/GitType.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/GitType.java new file mode 100644 index 000000000000..5dd097cef053 --- /dev/null +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/central/GitType.java @@ -0,0 +1,6 @@ +package com.appsmith.server.git.central; + +public enum GitType { + FILE_SYSTEM, + NATIVE +} diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/git/dtos/ArtifactJsonTransformationDTO.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/dtos/ArtifactJsonTransformationDTO.java new file mode 100644 index 000000000000..899a545b28ba --- /dev/null +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/dtos/ArtifactJsonTransformationDTO.java @@ -0,0 +1,27 @@ +package com.appsmith.server.git.dtos; + +import com.appsmith.server.constants.ArtifactType; +import com.appsmith.server.constants.ce.RefType; +import lombok.Data; + +// TODO: Find a better name for this DTO + +/** + * This DTO carries the info when a json is getting transformed in a git resource map or vice versa, + * this is also responsible for traversing paths for fs ops + */ +@Data +public class ArtifactJsonTransformationDTO { + + String workspaceId; + + String artifactId; + + String repoName; + + String refName; + + ArtifactType artifactType; + + RefType refType; +} diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/git/fs/GitFSServiceCECompatibleImpl.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/fs/GitFSServiceCECompatibleImpl.java new file mode 100644 index 000000000000..aaf5de7aaddd --- /dev/null +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/fs/GitFSServiceCECompatibleImpl.java @@ -0,0 +1,80 @@ +package com.appsmith.server.git.fs; + +import com.appsmith.external.git.handler.FSGitHandler; +import com.appsmith.server.configurations.EmailConfig; +import com.appsmith.server.datasources.base.DatasourceService; +import com.appsmith.server.exports.internal.ExportService; +import com.appsmith.server.git.GitRedisUtils; +import com.appsmith.server.git.autocommit.helpers.GitAutoCommitHelper; +import com.appsmith.server.git.central.GitHandlingServiceCECompatible; +import com.appsmith.server.git.resolver.GitArtifactHelperResolver; +import com.appsmith.server.git.utils.GitAnalyticsUtils; +import com.appsmith.server.git.utils.GitProfileUtils; +import com.appsmith.server.helpers.CommonGitFileUtils; +import com.appsmith.server.helpers.GitPrivateRepoHelper; +import com.appsmith.server.imports.internal.ImportService; +import com.appsmith.server.plugins.base.PluginService; +import com.appsmith.server.repositories.GitDeployKeysRepository; +import com.appsmith.server.services.AnalyticsService; +import com.appsmith.server.services.SessionUserService; +import com.appsmith.server.services.UserDataService; +import com.appsmith.server.services.UserService; +import com.appsmith.server.services.WorkspaceService; +import com.appsmith.server.solutions.DatasourcePermission; +import io.micrometer.observation.ObservationRegistry; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Service; +import org.springframework.transaction.reactive.TransactionalOperator; + +@Slf4j +@Service +public class GitFSServiceCECompatibleImpl extends GitFSServiceCEImpl implements GitHandlingServiceCECompatible { + + public GitFSServiceCECompatibleImpl( + GitDeployKeysRepository gitDeployKeysRepository, + GitPrivateRepoHelper gitPrivateRepoHelper, + CommonGitFileUtils commonGitFileUtils, + GitRedisUtils gitRedisUtils, + SessionUserService sessionUserService, + UserDataService userDataService, + UserService userService, + EmailConfig emailConfig, + TransactionalOperator transactionalOperator, + AnalyticsService analyticsService, + ObservationRegistry observationRegistry, + WorkspaceService workspaceService, + DatasourceService datasourceService, + DatasourcePermission datasourcePermission, + PluginService pluginService, + ExportService exportService, + ImportService importService, + FSGitHandler fsGitHandler, + GitAutoCommitHelper gitAutoCommitHelper, + GitProfileUtils gitProfileUtils, + GitAnalyticsUtils gitAnalyticsUtils, + GitArtifactHelperResolver gitArtifactHelperResolver) { + super( + gitDeployKeysRepository, + gitPrivateRepoHelper, + commonGitFileUtils, + gitRedisUtils, + sessionUserService, + userDataService, + userService, + emailConfig, + transactionalOperator, + analyticsService, + observationRegistry, + workspaceService, + datasourceService, + datasourcePermission, + pluginService, + exportService, + importService, + fsGitHandler, + gitAutoCommitHelper, + gitProfileUtils, + gitAnalyticsUtils, + gitArtifactHelperResolver); + } +} diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/git/fs/GitFSServiceCEImpl.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/fs/GitFSServiceCEImpl.java new file mode 100644 index 000000000000..563a34b15b34 --- /dev/null +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/fs/GitFSServiceCEImpl.java @@ -0,0 +1,289 @@ +package com.appsmith.server.git.fs; + +import com.appsmith.external.constants.AnalyticsEvents; +import com.appsmith.external.git.constants.GitSpan; +import com.appsmith.external.git.handler.FSGitHandler; +import com.appsmith.git.dto.CommitDTO; +import com.appsmith.server.configurations.EmailConfig; +import com.appsmith.server.datasources.base.DatasourceService; +import com.appsmith.server.domains.Artifact; +import com.appsmith.server.domains.GitArtifactMetadata; +import com.appsmith.server.domains.GitAuth; +import com.appsmith.server.domains.GitDeployKeys; +import com.appsmith.server.dtos.ArtifactExchangeJson; +import com.appsmith.server.dtos.GitConnectDTO; +import com.appsmith.server.exceptions.AppsmithError; +import com.appsmith.server.exceptions.AppsmithException; +import com.appsmith.server.exports.internal.ExportService; +import com.appsmith.server.git.GitRedisUtils; +import com.appsmith.server.git.autocommit.helpers.GitAutoCommitHelper; +import com.appsmith.server.git.central.GitHandlingServiceCE; +import com.appsmith.server.git.dtos.ArtifactJsonTransformationDTO; +import com.appsmith.server.git.resolver.GitArtifactHelperResolver; +import com.appsmith.server.git.utils.GitAnalyticsUtils; +import com.appsmith.server.git.utils.GitProfileUtils; +import com.appsmith.server.helpers.CommonGitFileUtils; +import com.appsmith.server.helpers.GitPrivateRepoHelper; +import com.appsmith.server.helpers.GitUtils; +import com.appsmith.server.imports.internal.ImportService; +import com.appsmith.server.plugins.base.PluginService; +import com.appsmith.server.repositories.GitDeployKeysRepository; +import com.appsmith.server.services.AnalyticsService; +import com.appsmith.server.services.GitArtifactHelper; +import com.appsmith.server.services.SessionUserService; +import com.appsmith.server.services.UserDataService; +import com.appsmith.server.services.UserService; +import com.appsmith.server.services.WorkspaceService; +import com.appsmith.server.solutions.DatasourcePermission; +import io.micrometer.observation.ObservationRegistry; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.eclipse.jgit.api.errors.InvalidRemoteException; +import org.eclipse.jgit.api.errors.TransportException; +import org.springframework.stereotype.Service; +import org.springframework.transaction.reactive.TransactionalOperator; +import org.springframework.util.StringUtils; +import reactor.core.observability.micrometer.Micrometer; +import reactor.core.publisher.Mono; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.HashSet; +import java.util.Set; +import java.util.concurrent.TimeoutException; + +@Slf4j +@Service +@RequiredArgsConstructor +public class GitFSServiceCEImpl implements GitHandlingServiceCE { + + private final GitDeployKeysRepository gitDeployKeysRepository; + private final GitPrivateRepoHelper gitPrivateRepoHelper; + private final CommonGitFileUtils commonGitFileUtils; + private final GitRedisUtils gitRedisUtils; + protected final SessionUserService sessionUserService; + private final UserDataService userDataService; + protected final UserService userService; + private final EmailConfig emailConfig; + private final TransactionalOperator transactionalOperator; + + protected final AnalyticsService analyticsService; + private final ObservationRegistry observationRegistry; + + private final WorkspaceService workspaceService; + private final DatasourceService datasourceService; + private final DatasourcePermission datasourcePermission; + private final PluginService pluginService; + + private final ExportService exportService; + private final ImportService importService; + + private final FSGitHandler fsGitHandler; + private final GitAutoCommitHelper gitAutoCommitHelper; + + private final GitProfileUtils gitProfileUtils; + private final GitAnalyticsUtils gitAnalyticsUtils; + + protected final GitArtifactHelperResolver gitArtifactHelperResolver; + + private static final String ORIGIN = "origin/"; + private static final String REMOTE_NAME_REPLACEMENT = ""; + + private Mono addFileLock(String baseArtifactId, String commandName, boolean isLockRequired) { + if (!Boolean.TRUE.equals(isLockRequired)) { + return Mono.just(Boolean.TRUE); + } + + return Mono.defer(() -> addFileLock(baseArtifactId, commandName)); + } + + private Mono addFileLock(String baseArtifactId, String commandName) { + return gitRedisUtils.addFileLock(baseArtifactId, commandName); + } + + private Mono releaseFileLock(String baseArtifactId, boolean isLockRequired) { + if (!Boolean.TRUE.equals(isLockRequired)) { + return Mono.just(Boolean.TRUE); + } + + return releaseFileLock(baseArtifactId); + } + + private Mono releaseFileLock(String baseArtifactId) { + return gitRedisUtils + .releaseFileLock(baseArtifactId) + .name(GitSpan.RELEASE_FILE_LOCK) + .tap(Micrometer.observation(observationRegistry)); + } + + @Override + public Set validateGitConnectDTO(GitConnectDTO gitConnectDTO) { + Set errors = new HashSet<>(); + + if (!StringUtils.hasText(gitConnectDTO.getRemoteUrl())) { + errors.add("remoteUrl"); + } + + try { + GitUtils.convertSshUrlToBrowserSupportedUrl(gitConnectDTO.getRemoteUrl()); + } catch (AppsmithException error) { + errors.add("browserSupportedRemoteUrl"); + } + + return errors; + } + + @Override + public String getRepoName(GitConnectDTO gitConnectDTO) { + return GitUtils.getRepoName(gitConnectDTO.getRemoteUrl()); + } + + @Override + public Mono isRepoPrivate(GitConnectDTO gitConnectDTO) { + return GitUtils.isRepoPrivate(GitUtils.convertSshUrlToBrowserSupportedUrl(gitConnectDTO.getRemoteUrl())); + } + + @Override + public Mono getGitAuthForUser() { + return sessionUserService + .getCurrentUser() + .flatMap(user -> gitDeployKeysRepository.findByEmail(user.getEmail())) + .map(GitDeployKeys::getGitAuth) + .switchIfEmpty( + Mono.error( + new AppsmithException( + AppsmithError.INVALID_GIT_CONFIGURATION, + "Unable to find git configuration for logged-in user. Please contact Appsmith team for support"))); + } + + @Override + public Boolean isGitAuthInvalid(GitAuth gitAuth) { + return !StringUtils.hasText(gitAuth.getPrivateKey()) || !StringUtils.hasText(gitAuth.getPublicKey()); + } + + @Override + public Mono fetchRemoteRepository( + GitConnectDTO gitConnectDTO, GitAuth gitAuth, Artifact artifact, String repoName) { + + GitArtifactHelper gitArtifactHelper = + gitArtifactHelperResolver.getArtifactHelper(artifact.getArtifactType()); + Path repoSuffix = gitArtifactHelper.getRepoSuffixPath(artifact.getWorkspaceId(), artifact.getId(), repoName); + + return fsGitHandler + .cloneRemoteIntoArtifactRepo( + repoSuffix, gitConnectDTO.getRemoteUrl(), gitAuth.getPrivateKey(), gitAuth.getPublicKey()) + .onErrorResume(error -> { + log.error("Error while cloning the remote repo, {}", error.getMessage()); + return gitAnalyticsUtils + .addAnalyticsForGitOperation( + AnalyticsEvents.GIT_IMPORT, + artifact, + error.getClass().getName(), + error.getMessage(), + false) + .flatMap(user -> commonGitFileUtils + .deleteLocalRepo(repoSuffix) + .then(gitArtifactHelper.deleteArtifact(artifact.getId()))) + .flatMap(artifact1 -> { + if (error instanceof TransportException) { + return Mono.error( + new AppsmithException(AppsmithError.INVALID_GIT_SSH_CONFIGURATION)); + } else if (error instanceof InvalidRemoteException) { + return Mono.error( + new AppsmithException(AppsmithError.INVALID_PARAMETER, "remote url")); + } else if (error instanceof TimeoutException) { + return Mono.error(new AppsmithException(AppsmithError.GIT_EXECUTION_TIMEOUT)); + } + return Mono.error( + new AppsmithException(AppsmithError.GIT_ACTION_FAILED, "clone", error)); + }); + }); + } + + @Override + public void setRepositoryDetailsInGitArtifactMetadata( + GitConnectDTO gitConnectDTO, GitArtifactMetadata gitArtifactMetadata) { + gitArtifactMetadata.setRemoteUrl(gitConnectDTO.getRemoteUrl()); + gitArtifactMetadata.setBrowserSupportedRemoteUrl( + GitUtils.convertSshUrlToBrowserSupportedUrl(gitConnectDTO.getRemoteUrl())); + } + + @Override + public Mono reconstructArtifactJsonFromGitRepository( + ArtifactJsonTransformationDTO artifactJsonTransformationDTO) { + return commonGitFileUtils.reconstructArtifactExchangeJsonFromGitRepoWithAnalytics( + artifactJsonTransformationDTO.getWorkspaceId(), + artifactJsonTransformationDTO.getArtifactId(), + artifactJsonTransformationDTO.getRepoName(), + artifactJsonTransformationDTO.getRefName(), + artifactJsonTransformationDTO.getArtifactType()); + } + + @Override + public Mono removeRepository(ArtifactJsonTransformationDTO artifactJsonTransformationDTO) { + GitArtifactHelper gitArtifactHelper = + gitArtifactHelperResolver.getArtifactHelper(artifactJsonTransformationDTO.getArtifactType()); + Path repoSuffix = gitArtifactHelper.getRepoSuffixPath( + artifactJsonTransformationDTO.getWorkspaceId(), + artifactJsonTransformationDTO.getArtifactId(), + artifactJsonTransformationDTO.getRepoName()); + return commonGitFileUtils.deleteLocalRepo(repoSuffix); + } + + @Override + public Mono validateEmptyRepository(ArtifactJsonTransformationDTO artifactJsonTransformationDTO) { + GitArtifactHelper gitArtifactHelper = + gitArtifactHelperResolver.getArtifactHelper(artifactJsonTransformationDTO.getArtifactType()); + Path repoSuffix = gitArtifactHelper.getRepoSuffixPath( + artifactJsonTransformationDTO.getWorkspaceId(), + artifactJsonTransformationDTO.getArtifactId(), + artifactJsonTransformationDTO.getRepoName()); + + try { + return commonGitFileUtils.checkIfDirectoryIsEmpty(repoSuffix); + } catch (IOException ioException) { + log.error("Error while validating empty repository, {}", ioException.getMessage()); + return Mono.error(new AppsmithException(AppsmithError.GIT_FILE_SYSTEM_ERROR, ioException.getMessage())); + } + } + + @Override + public Mono initialiseReadMe( + ArtifactJsonTransformationDTO jsonTransformationDTO, + Artifact artifact, + String readmeFileName, + String originHeader) { + GitArtifactHelper gitArtifactHelper = + gitArtifactHelperResolver.getArtifactHelper(jsonTransformationDTO.getArtifactType()); + Path readmePath = gitArtifactHelper.getRepoSuffixPath( + jsonTransformationDTO.getWorkspaceId(), + jsonTransformationDTO.getArtifactId(), + jsonTransformationDTO.getRepoName()); + try { + return gitArtifactHelper + .intialiseReadMe(artifact, readmePath, originHeader) + .map(path -> Boolean.TRUE); + } catch (IOException ioException) { + log.error("Error while creating readme file in the repository, {}", ioException.getMessage()); + return Mono.error(new AppsmithException(AppsmithError.GIT_FILE_SYSTEM_ERROR, ioException.getMessage())); + } + } + + @Override + public Mono createFirstCommit(ArtifactJsonTransformationDTO jsonTransformationDTO, CommitDTO commitDTO) { + GitArtifactHelper gitArtifactHelper = + gitArtifactHelperResolver.getArtifactHelper(jsonTransformationDTO.getArtifactType()); + Path repoSuffix = gitArtifactHelper.getRepoSuffixPath( + jsonTransformationDTO.getWorkspaceId(), + jsonTransformationDTO.getArtifactId(), + jsonTransformationDTO.getRepoName()); + + return fsGitHandler.commitArtifact( + repoSuffix, + commitDTO.getMessage(), + commitDTO.getAuthor().getName(), + commitDTO.getAuthor().getEmail(), + true, + commitDTO.getIsAmendCommit()); + } +} diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/git/fs/GitFSServiceImpl.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/fs/GitFSServiceImpl.java new file mode 100644 index 000000000000..b3b31b5c97d1 --- /dev/null +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/fs/GitFSServiceImpl.java @@ -0,0 +1,80 @@ +package com.appsmith.server.git.fs; + +import com.appsmith.external.git.handler.FSGitHandler; +import com.appsmith.server.configurations.EmailConfig; +import com.appsmith.server.datasources.base.DatasourceService; +import com.appsmith.server.exports.internal.ExportService; +import com.appsmith.server.git.GitRedisUtils; +import com.appsmith.server.git.autocommit.helpers.GitAutoCommitHelper; +import com.appsmith.server.git.central.GitHandlingService; +import com.appsmith.server.git.resolver.GitArtifactHelperResolver; +import com.appsmith.server.git.utils.GitAnalyticsUtils; +import com.appsmith.server.git.utils.GitProfileUtils; +import com.appsmith.server.helpers.CommonGitFileUtils; +import com.appsmith.server.helpers.GitPrivateRepoHelper; +import com.appsmith.server.imports.internal.ImportService; +import com.appsmith.server.plugins.base.PluginService; +import com.appsmith.server.repositories.GitDeployKeysRepository; +import com.appsmith.server.services.AnalyticsService; +import com.appsmith.server.services.SessionUserService; +import com.appsmith.server.services.UserDataService; +import com.appsmith.server.services.UserService; +import com.appsmith.server.services.WorkspaceService; +import com.appsmith.server.solutions.DatasourcePermission; +import io.micrometer.observation.ObservationRegistry; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Service; +import org.springframework.transaction.reactive.TransactionalOperator; + +@Slf4j +@Service +public class GitFSServiceImpl extends GitFSServiceCECompatibleImpl implements GitHandlingService { + + public GitFSServiceImpl( + GitDeployKeysRepository gitDeployKeysRepository, + GitPrivateRepoHelper gitPrivateRepoHelper, + CommonGitFileUtils commonGitFileUtils, + GitRedisUtils gitRedisUtils, + SessionUserService sessionUserService, + UserDataService userDataService, + UserService userService, + EmailConfig emailConfig, + TransactionalOperator transactionalOperator, + AnalyticsService analyticsService, + ObservationRegistry observationRegistry, + WorkspaceService workspaceService, + DatasourceService datasourceService, + DatasourcePermission datasourcePermission, + PluginService pluginService, + ExportService exportService, + ImportService importService, + FSGitHandler fsGitHandler, + GitAutoCommitHelper gitAutoCommitHelper, + GitProfileUtils gitProfileUtils, + GitAnalyticsUtils gitAnalyticsUtils, + GitArtifactHelperResolver gitArtifactHelperResolver) { + super( + gitDeployKeysRepository, + gitPrivateRepoHelper, + commonGitFileUtils, + gitRedisUtils, + sessionUserService, + userDataService, + userService, + emailConfig, + transactionalOperator, + analyticsService, + observationRegistry, + workspaceService, + datasourceService, + datasourcePermission, + pluginService, + exportService, + importService, + fsGitHandler, + gitAutoCommitHelper, + gitProfileUtils, + gitAnalyticsUtils, + gitArtifactHelperResolver); + } +} diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/git/resolver/GitArtifactHelperResolver.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/resolver/GitArtifactHelperResolver.java new file mode 100644 index 000000000000..2b713abb9802 --- /dev/null +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/resolver/GitArtifactHelperResolver.java @@ -0,0 +1,18 @@ +package com.appsmith.server.git.resolver; + +import com.appsmith.server.domains.Application; +import com.appsmith.server.git.fs.GitFSServiceImpl; +import com.appsmith.server.services.GitArtifactHelper; +import lombok.extern.slf4j.Slf4j; +import org.springframework.context.annotation.Lazy; +import org.springframework.stereotype.Component; + +@Slf4j +@Component +public class GitArtifactHelperResolver extends GitArtifactHelperResolverCE { + + public GitArtifactHelperResolver( + @Lazy GitFSServiceImpl gitFSService, GitArtifactHelper gitApplicationHelper) { + super(gitFSService, gitApplicationHelper); + } +} diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/git/resolver/GitArtifactHelperResolverCE.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/resolver/GitArtifactHelperResolverCE.java new file mode 100644 index 000000000000..fef508e31814 --- /dev/null +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/resolver/GitArtifactHelperResolverCE.java @@ -0,0 +1,24 @@ +package com.appsmith.server.git.resolver; + +import com.appsmith.server.constants.ArtifactType; +import com.appsmith.server.domains.Application; +import com.appsmith.server.git.fs.GitFSServiceImpl; +import com.appsmith.server.services.GitArtifactHelper; +import lombok.NonNull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@RequiredArgsConstructor +public class GitArtifactHelperResolverCE { + + protected final GitFSServiceImpl gitFSService; + protected final GitArtifactHelper gitApplicationHelper; + + public GitArtifactHelper getArtifactHelper(@NonNull ArtifactType artifactType) { + return switch (artifactType) { + case APPLICATION -> gitApplicationHelper; + default -> gitApplicationHelper; + }; + } +} diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/git/resolver/GitHandlingServiceResolver.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/resolver/GitHandlingServiceResolver.java new file mode 100644 index 000000000000..35509f7a7c0e --- /dev/null +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/resolver/GitHandlingServiceResolver.java @@ -0,0 +1,14 @@ +package com.appsmith.server.git.resolver; + +import com.appsmith.server.git.fs.GitFSServiceImpl; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Component; + +@Slf4j +@Component +public class GitHandlingServiceResolver extends GitHandlingServiceResolverCE { + + public GitHandlingServiceResolver(GitFSServiceImpl gitFSService) { + super(gitFSService); + } +} diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/git/resolver/GitHandlingServiceResolverCE.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/resolver/GitHandlingServiceResolverCE.java new file mode 100644 index 000000000000..60e240a4957c --- /dev/null +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/resolver/GitHandlingServiceResolverCE.java @@ -0,0 +1,22 @@ +package com.appsmith.server.git.resolver; + +import com.appsmith.server.git.central.GitHandlingService; +import com.appsmith.server.git.central.GitType; +import com.appsmith.server.git.fs.GitFSServiceImpl; +import lombok.NonNull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@RequiredArgsConstructor +public class GitHandlingServiceResolverCE { + + protected final GitFSServiceImpl gitFSService; + + public GitHandlingService getGitHandlingService(@NonNull GitType gitType) { + return switch (gitType) { + case FILE_SYSTEM -> gitFSService; + default -> gitFSService; + }; + } +} diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/git/utils/GitAnalyticsUtils.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/utils/GitAnalyticsUtils.java new file mode 100644 index 000000000000..add712ac1c77 --- /dev/null +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/utils/GitAnalyticsUtils.java @@ -0,0 +1,125 @@ +package com.appsmith.server.git.utils; + +import com.appsmith.external.constants.AnalyticsEvents; +import com.appsmith.server.constants.FieldName; +import com.appsmith.server.domains.ApplicationMode; +import com.appsmith.server.domains.Artifact; +import com.appsmith.server.domains.GitArtifactMetadata; +import com.appsmith.server.helpers.GitUtils; +import com.appsmith.server.services.AnalyticsService; +import com.appsmith.server.services.SessionUserService; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Component; +import org.springframework.util.StringUtils; +import reactor.core.publisher.Mono; + +import java.util.HashMap; +import java.util.Map; + +import static org.apache.commons.lang.ObjectUtils.defaultIfNull; + +@Slf4j +@Component +@RequiredArgsConstructor +public class GitAnalyticsUtils { + + private final SessionUserService sessionUserService; + private final AnalyticsService analyticsService; + + public Mono addAnalyticsForGitOperation( + AnalyticsEvents eventName, Artifact artifact, Boolean isRepoPrivate) { + return addAnalyticsForGitOperation(eventName, artifact, "", "", isRepoPrivate, false); + } + + public Mono addAnalyticsForGitOperation( + AnalyticsEvents eventName, String branchName, Artifact artifact) { + return addAnalyticsForGitOperation(eventName, artifact, null, null, null, false, null, branchName); + } + + public Mono addAnalyticsForGitOperation( + AnalyticsEvents eventName, + Artifact artifact, + String errorType, + String errorMessage, + Boolean isRepoPrivate) { + return addAnalyticsForGitOperation(eventName, artifact, errorType, errorMessage, isRepoPrivate, false); + } + + public Mono addAnalyticsForGitOperation( + AnalyticsEvents event, + Artifact artifact, + String errorType, + String errorMessage, + Boolean isRepoPrivate, + Boolean isSystemGenerated) { + return addAnalyticsForGitOperation( + event, artifact, errorType, errorMessage, isRepoPrivate, isSystemGenerated, null); + } + + public Mono addAnalyticsForGitOperation( + AnalyticsEvents event, + Artifact artifact, + String errorType, + String errorMessage, + Boolean isRepoPrivate, + Boolean isSystemGenerated, + Boolean isMergeable) { + + String branchName = artifact.getGitArtifactMetadata() != null + ? artifact.getGitArtifactMetadata().getBranchName() + : null; + return addAnalyticsForGitOperation( + event, artifact, errorType, errorMessage, isRepoPrivate, isSystemGenerated, isMergeable, branchName); + } + + public Mono addAnalyticsForGitOperation( + AnalyticsEvents event, + Artifact artifact, + String errorType, + String errorMessage, + Boolean isRepoPrivate, + Boolean isSystemGenerated, + Boolean isMergeable, + String branchName) { + GitArtifactMetadata gitData = artifact.getGitArtifactMetadata(); + Map analyticsProps = new HashMap<>(); + if (gitData != null) { + analyticsProps.put(FieldName.APPLICATION_ID, gitData.getDefaultArtifactId()); + analyticsProps.put("appId", gitData.getDefaultArtifactId()); + analyticsProps.put(FieldName.BRANCH_NAME, branchName); + analyticsProps.put(FieldName.GIT_HOSTING_PROVIDER, GitUtils.getGitProviderName(gitData.getRemoteUrl())); + analyticsProps.put(FieldName.REPO_URL, gitData.getRemoteUrl()); + if (event == AnalyticsEvents.GIT_COMMIT) { + analyticsProps.put("isAutoCommit", false); + } + } + // Do not include the error data points in the map for success states + if (StringUtils.hasText(errorMessage) || StringUtils.hasText(errorType)) { + analyticsProps.put("errorMessage", errorMessage); + analyticsProps.put("errorType", errorType); + } + + // Do not include the isMergeable for all the events + if (isMergeable != null) { + analyticsProps.put(FieldName.IS_MERGEABLE, isMergeable); + } + analyticsProps.putAll(Map.of( + FieldName.ORGANIZATION_ID, + defaultIfNull(artifact.getWorkspaceId(), ""), + "orgId", + defaultIfNull(artifact.getWorkspaceId(), ""), + "branchApplicationId", + defaultIfNull(artifact.getId(), ""), + "isRepoPrivate", + defaultIfNull(isRepoPrivate, ""), + "isSystemGenerated", + defaultIfNull(isSystemGenerated, ""))); + final Map eventData = + Map.of(FieldName.APP_MODE, ApplicationMode.EDIT.toString(), FieldName.APPLICATION, artifact); + analyticsProps.put(FieldName.EVENT_DATA, eventData); + return sessionUserService.getCurrentUser().flatMap(user -> analyticsService + .sendEvent(event.getEventName(), user.getUsername(), analyticsProps) + .thenReturn(artifact)); + } +} diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/git/utils/GitProfileUtils.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/utils/GitProfileUtils.java new file mode 100644 index 000000000000..6ce9acc94f38 --- /dev/null +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/git/utils/GitProfileUtils.java @@ -0,0 +1,169 @@ +package com.appsmith.server.git.utils; + +import com.appsmith.server.constants.FieldName; +import com.appsmith.server.domains.GitProfile; +import com.appsmith.server.domains.UserData; +import com.appsmith.server.exceptions.AppsmithError; +import com.appsmith.server.exceptions.AppsmithException; +import com.appsmith.server.helpers.CollectionUtils; +import com.appsmith.server.services.SessionUserService; +import com.appsmith.server.services.UserDataService; +import com.appsmith.server.services.UserService; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.eclipse.jgit.util.StringUtils; +import org.springframework.stereotype.Component; +import reactor.core.publisher.Mono; + +import java.util.Map; + +import static com.appsmith.server.constants.ce.FieldNameCE.DEFAULT; + +@Slf4j +@Component +@RequiredArgsConstructor +public class GitProfileUtils { + + private final SessionUserService sessionUserService; + private final UserDataService userDataService; + private final UserService userService; + + public Mono> updateOrCreateGitProfileForCurrentUser( + GitProfile gitProfile, String baseArtifactId) { + + // Throw error in following situations: + // 1. Updating or creating global git profile (defaultApplicationId = "default") and update is made with empty + // authorName or authorEmail + // 2. Updating or creating repo specific profile and user want to use repo specific profile but provided empty + // values for authorName and email + + if ((DEFAULT.equals(baseArtifactId) || Boolean.FALSE.equals(gitProfile.getUseGlobalProfile())) + && StringUtils.isEmptyOrNull(gitProfile.getAuthorName())) { + return Mono.error(new AppsmithException(AppsmithError.INVALID_PARAMETER, "Author Name")); + } else if ((DEFAULT.equals(baseArtifactId) || Boolean.FALSE.equals(gitProfile.getUseGlobalProfile())) + && StringUtils.isEmptyOrNull(gitProfile.getAuthorEmail())) { + return Mono.error(new AppsmithException(AppsmithError.INVALID_PARAMETER, "Author Email")); + } else if (StringUtils.isEmptyOrNull(baseArtifactId)) { + return Mono.error(new AppsmithException(AppsmithError.INVALID_PARAMETER, FieldName.ARTIFACT_ID)); + } + + if (DEFAULT.equals(baseArtifactId)) { + gitProfile.setUseGlobalProfile(null); + } else if (!Boolean.TRUE.equals(gitProfile.getUseGlobalProfile())) { + gitProfile.setUseGlobalProfile(Boolean.FALSE); + } + + return sessionUserService + .getCurrentUser() + .flatMap(user -> userService.findByEmail(user.getEmail())) + .flatMap(user -> userDataService + .getForUser(user.getId()) + .flatMap(userData -> { + // GitProfiles will be null if the user has not created any git profile. + GitProfile savedProfile = userData.getGitProfileByKey(baseArtifactId); + GitProfile defaultGitProfile = userData.getGitProfileByKey(DEFAULT); + + if (savedProfile == null || !savedProfile.equals(gitProfile) || defaultGitProfile == null) { + userData.setGitProfiles(userData.setGitProfileByKey(baseArtifactId, gitProfile)); + + // Assign appsmith user profile as a fallback git profile + if (defaultGitProfile == null) { + GitProfile userProfile = new GitProfile(); + String authorName = StringUtils.isEmptyOrNull(user.getName()) + ? user.getUsername().split("@")[0] + : user.getName(); + userProfile.setAuthorEmail(user.getEmail()); + userProfile.setAuthorName(authorName); + userProfile.setUseGlobalProfile(null); + userData.setGitProfiles(userData.setGitProfileByKey(DEFAULT, userProfile)); + } + + // Update userData here + UserData requiredUpdates = new UserData(); + requiredUpdates.setGitProfiles(userData.getGitProfiles()); + return userDataService + .updateForUser(user, requiredUpdates) + .map(UserData::getGitProfiles); + } + return Mono.just(userData.getGitProfiles()); + }) + .switchIfEmpty(Mono.defer(() -> { + // If profiles are empty use Appsmith's user profile as git default profile + GitProfile profile = new GitProfile(); + String authorName = StringUtils.isEmptyOrNull(user.getName()) + ? user.getUsername().split("@")[0] + : user.getName(); + + profile.setAuthorName(authorName); + profile.setAuthorEmail(user.getEmail()); + + UserData requiredUpdates = new UserData(); + requiredUpdates.setGitProfiles(Map.of(DEFAULT, gitProfile)); + return userDataService + .updateForUser(user, requiredUpdates) + .map(UserData::getGitProfiles); + })) + .filter(profiles -> !CollectionUtils.isNullOrEmpty(profiles))); + } + + public Mono> updateOrCreateGitProfileForCurrentUser(GitProfile gitProfile) { + gitProfile.setUseGlobalProfile(null); + return updateOrCreateGitProfileForCurrentUser(gitProfile, DEFAULT); + } + + public Mono getDefaultGitProfileOrCreateIfEmpty() { + // Get default git profile if the default is empty then use Appsmith profile as a fallback value + return getGitProfileForUser(DEFAULT).flatMap(gitProfile -> { + if (StringUtils.isEmptyOrNull(gitProfile.getAuthorName()) + || StringUtils.isEmptyOrNull(gitProfile.getAuthorEmail())) { + return updateGitProfileWithAppsmithProfile(DEFAULT); + } + gitProfile.setUseGlobalProfile(null); + return Mono.just(gitProfile); + }); + } + + public Mono getGitProfileForUser(String baseArtifactId) { + return userDataService.getForCurrentUser().map(userData -> { + GitProfile gitProfile = userData.getGitProfileByKey(baseArtifactId); + if (gitProfile != null && gitProfile.getUseGlobalProfile() == null) { + gitProfile.setUseGlobalProfile(true); + } else if (gitProfile == null) { + // If the profile is requested for repo specific using the applicationId + GitProfile gitProfile1 = new GitProfile(); + gitProfile1.setAuthorName(""); + gitProfile1.setAuthorEmail(""); + gitProfile1.setUseGlobalProfile(true); + return gitProfile1; + } + return gitProfile; + }); + } + + private Mono updateGitProfileWithAppsmithProfile(String key) { + return sessionUserService + .getCurrentUser() + .flatMap(user -> userService.findByEmail(user.getEmail())) + .flatMap(currentUser -> { + GitProfile gitProfile = new GitProfile(); + String authorName = StringUtils.isEmptyOrNull(currentUser.getName()) + ? currentUser.getUsername().split("@")[0] + : currentUser.getName(); + gitProfile.setAuthorEmail(currentUser.getEmail()); + gitProfile.setAuthorName(authorName); + gitProfile.setUseGlobalProfile(null); + return userDataService.getForUser(currentUser).flatMap(userData -> { + UserData updates = new UserData(); + if (CollectionUtils.isNullOrEmpty(userData.getGitProfiles())) { + updates.setGitProfiles(Map.of(key, gitProfile)); + } else { + userData.getGitProfiles().put(key, gitProfile); + updates.setGitProfiles(userData.getGitProfiles()); + } + return userDataService + .updateForUser(currentUser, updates) + .thenReturn(gitProfile); + }); + }); + } +} diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/onload/internal/OnLoadExecutablesUtilCEImpl.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/onload/internal/OnLoadExecutablesUtilCEImpl.java index 08c6840bc62c..ef535c21694f 100644 --- a/app/server/appsmith-server/src/main/java/com/appsmith/server/onload/internal/OnLoadExecutablesUtilCEImpl.java +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/onload/internal/OnLoadExecutablesUtilCEImpl.java @@ -524,9 +524,8 @@ private Mono> getPossibleEntityReferences( Set bindingsInDsl) { // We want to be finding both type of references final int entityTypes = EXECUTABLE_ENTITY_REFERENCES | WIDGET_ENTITY_REFERENCES; - return executableNameToExecutableMono - .zipWith(getPossibleEntityParentsMap(bindings, entityTypes, evalVersion)) + .zipWith(getPossibleEntityParentsMap(new ArrayList<>(bindings), entityTypes, evalVersion)) .map(tuple -> { Map executableMap = tuple.getT1(); // For each binding, here we receive a set of possible references to global entities @@ -584,6 +583,79 @@ private Mono> getPossibleEntityReferences( }); } + private Mono>> getPossibleEntityReferencesMap( + Mono> executableNameToExecutableMono, + List bindings, + int evalVersion, + Set bindingsInDsl) { + // We want to be finding both type of references + final int entityTypes = EXECUTABLE_ENTITY_REFERENCES | WIDGET_ENTITY_REFERENCES; + + return executableNameToExecutableMono + .zipWith(getPossibleEntityParentsMap(bindings, entityTypes, evalVersion)) + .map(tuple -> { + Map executableMap = tuple.getT1(); + // For each binding, here we receive a set of possible references to global entities + // At this point we're guaranteed that these references are made to possible variables, + // but we do not know if those entities exist in the global namespace yet + Map> bindingToPossibleParentMap = tuple.getT2(); + + Map> possibleEntitiesReferencesToBindingMap = new HashMap<>(); + + // From these references, we will try to validate executable references at this point + // Each identified node is already annotated with the expected type of entity we need to search for + bindingToPossibleParentMap.entrySet().stream().forEach(entry -> { + Set bindingsWithExecutableReference = new HashSet<>(); + String binding = entry.getKey(); + Set possibleEntitiesReferences = new HashSet<>(); + entry.getValue().stream().forEach(possibleParent -> { + // For each possible reference node, check if the reference was to an executable + Executable executable = executableMap.get(possibleParent.getValidEntityName()); + + if (executable != null) { + // If it was, and had been identified as the same type of executable as what exists in + // this app, + if (possibleParent + .getEntityReferenceType() + .equals(executable.getEntityReferenceType())) { + // Copy over some data from the identified executable, this ensures that we do not + // have + // to query the DB again later + possibleParent.setExecutable(executable); + bindingsWithExecutableReference.add(possibleParent); + // Only if this is not a direct JS function call, + // add it to a possible on page load executable call. + // This discards the following type: + // {{ JSObject1.func() }} + if (!TRUE.equals(possibleParent.getIsFunctionCall())) { + possibleEntitiesReferences.add(possibleParent); + } + // We're ignoring any reference that was identified as a widget but actually matched + // an executable + // We wouldn't have discarded JS collection names here, but this is just an + // optimization, so it's fine + } + } else { + // If the reference node was identified as a widget, directly add it as a possible + // reference + // Because we are not doing any validations for widget references at this point + if (EntityReferenceType.WIDGET.equals(possibleParent.getEntityReferenceType())) { + possibleEntitiesReferences.add(possibleParent); + } + } + + possibleEntitiesReferencesToBindingMap.put(binding, possibleEntitiesReferences); + }); + + if (!bindingsWithExecutableReference.isEmpty() && bindingsInDsl != null) { + bindingsInDsl.addAll(bindingsWithExecutableReference); + } + }); + + return possibleEntitiesReferencesToBindingMap; + }); + } + /** * This method is an abstraction that queries the ast service for possible global references as string values, * and then uses the mustache helper utility to classify these global references into possible types of EntityDependencyNodes @@ -594,9 +666,9 @@ private Mono> getPossibleEntityReferences( * @return A mono of a map of each of the provided binding values to the possible set of EntityDependencyNodes found in the binding */ private Mono>> getPossibleEntityParentsMap( - Set bindings, int types, int evalVersion) { + List bindings, int types, int evalVersion) { Flux>> findingToReferencesFlux = - astService.getPossibleReferencesFromDynamicBinding(new ArrayList<>(bindings), evalVersion); + astService.getPossibleReferencesFromDynamicBinding(bindings, evalVersion); return MustacheHelper.getPossibleEntityParentsMap(findingToReferencesFlux, types); } @@ -627,31 +699,45 @@ private Mono> addDirectlyReferencedExecutablesToGr Mono> executableNameToExecutableMapMono, Set executableBindingsInDslRef, int evalVersion) { - return Flux.fromIterable(widgetDynamicBindingsMap.entrySet()) - .flatMap(entry -> { - String widgetName = entry.getKey(); - // For each widget in the DSL that has a dynamic binding, - // we define an entity dependency node beforehand - // This will be a leaf node in the DAG that is constructed for on page load dependencies - EntityDependencyNode widgetDependencyNode = - new EntityDependencyNode(EntityReferenceType.WIDGET, widgetName, widgetName, null, null); - Set bindingsInWidget = entry.getValue(); - return getPossibleEntityReferences( - executableNameToExecutableMapMono, - bindingsInWidget, - evalVersion, - executableBindingsInDslRef) - .flatMapMany(Flux::fromIterable) - // Add dependencies of the executables found in the DSL in the graph - // We are ignoring the widget references at this point - // TODO: Possible optimization in the future - .flatMap(possibleEntity -> { + + Map> bindingToWidgetNodesMap = new HashMap<>(); + List allBindings = new ArrayList<>(); + + widgetDynamicBindingsMap.forEach((widgetName, bindingsInWidget) -> { + EntityDependencyNode widgetDependencyNode = + new EntityDependencyNode(EntityReferenceType.WIDGET, widgetName, widgetName, null, null); + + bindingsInWidget.forEach(binding -> { + bindingToWidgetNodesMap + .computeIfAbsent(binding, bindingKey -> new HashSet<>()) + .add(widgetDependencyNode); + allBindings.add(binding); + }); + }); + + Mono>> bindingToPossibleEntityMapMono = getPossibleEntityReferencesMap( + executableNameToExecutableMapMono, allBindings, evalVersion, executableBindingsInDslRef); + + return bindingToPossibleEntityMapMono + .flatMapMany(bindingToPossibleEntityMap -> Flux.fromIterable(bindingToPossibleEntityMap.entrySet())) + .flatMap(bindingEntry -> { + String binding = bindingEntry.getKey(); + Set possibleEntities = bindingEntry.getValue(); + + // Get all widget nodes associated with the binding + Set widgetDependencyNodes = + bindingToWidgetNodesMap.getOrDefault(binding, Set.of()); + + // Process each possibleEntity for the current binding + return Flux.fromIterable(possibleEntities).flatMap(possibleEntity -> Flux.fromIterable( + widgetDependencyNodes) // Iterate all associated widgets + .flatMap(widgetDependencyNode -> { if (getExecutableTypes().contains(possibleEntity.getEntityReferenceType())) { edgesRef.add(new ExecutableDependencyEdge(possibleEntity, widgetDependencyNode)); // This executable is directly referenced in the DSL. This executable is an ideal - // candidate - // for on page load + // candidate for on page load executablesUsedInDSLRef.add(possibleEntity.getValidEntityName()); + return updateExecutableSelfReferencingPaths(possibleEntity) .name(UPDATE_EXECUTABLE_SELF_REFERENCING_PATHS) .tap(Micrometer.observation(observationRegistry)) @@ -668,7 +754,7 @@ private Mono> addDirectlyReferencedExecutablesToGr .thenReturn(possibleEntity); } return Mono.just(possibleEntity); - }); + })); }) .collectList() .thenReturn(edgesRef); @@ -1134,7 +1220,7 @@ private Mono> addWidgetRelationshipToGraph( // This part will ensure that we are discovering widget to widget relationships. return Flux.fromIterable(widgetBindingMap.entrySet()) .flatMap(widgetBindingEntries -> getPossibleEntityParentsMap( - widgetBindingEntries.getValue(), entityTypes, evalVersion) + new ArrayList<>(widgetBindingEntries.getValue()), entityTypes, evalVersion) .map(possibleParentsMap -> { possibleParentsMap.entrySet().stream().forEach(entry -> { if (entry.getValue() == null || entry.getValue().isEmpty()) { diff --git a/app/server/appsmith-server/src/main/java/com/appsmith/server/services/ce/GitArtifactHelperCE.java b/app/server/appsmith-server/src/main/java/com/appsmith/server/services/ce/GitArtifactHelperCE.java index 7fb4f40c8c3c..434f3bf4baa0 100644 --- a/app/server/appsmith-server/src/main/java/com/appsmith/server/services/ce/GitArtifactHelperCE.java +++ b/app/server/appsmith-server/src/main/java/com/appsmith/server/services/ce/GitArtifactHelperCE.java @@ -25,6 +25,8 @@ public interface GitArtifactHelperCE { AclPermission getArtifactManageDefaultBranchPermission(); + AclPermission getWorkspaceArtifactCreationPermission(); + Mono getArtifactById(String artifactId, AclPermission aclPermission); Mono getArtifactByBaseIdAndBranchName(String baseArtifactId, String branchName, AclPermission aclPermission); @@ -62,4 +64,6 @@ public interface GitArtifactHelperCE { Mono deleteArtifact(String artifactId); Boolean isContextInArtifactEmpty(ArtifactExchangeJson artifactExchangeJson); + + T getNewArtifact(String workspaceId, String repoName); } diff --git a/app/server/appsmith-server/src/test/java/com/appsmith/server/services/ActionCollectionServiceTest.java b/app/server/appsmith-server/src/test/java/com/appsmith/server/services/ActionCollectionServiceTest.java index 8c0471e7e36f..3feeac7facf7 100644 --- a/app/server/appsmith-server/src/test/java/com/appsmith/server/services/ActionCollectionServiceTest.java +++ b/app/server/appsmith-server/src/test/java/com/appsmith/server/services/ActionCollectionServiceTest.java @@ -1,5 +1,6 @@ package com.appsmith.server.services; +import com.appsmith.external.dtos.DslExecutableDTO; import com.appsmith.external.models.ActionConfiguration; import com.appsmith.external.models.ActionDTO; import com.appsmith.external.models.Datasource; @@ -64,6 +65,7 @@ import java.util.Map; import java.util.Set; import java.util.UUID; +import java.util.stream.Collectors; import static com.appsmith.server.acl.AclPermission.EXECUTE_ACTIONS; import static com.appsmith.server.acl.AclPermission.MANAGE_ACTIONS; @@ -710,42 +712,48 @@ public void testDeleteActionCollection_afterApplicationPublish_clearsActionColle .verifyComplete(); } - @Test - @WithUserDetails(value = "api_user") - public void - testUpdateUnpublishedActionCollection_withValidCollection_callsPageLayoutOnlyOnceAndAssertCyclicDependencyError() { - Mockito.when(pluginExecutorHelper.getPluginExecutor(Mockito.any())).thenReturn(Mono.just(pluginExecutor)); - Mockito.when(pluginExecutor.getHintMessages(Mockito.any(), Mockito.any())) - .thenReturn(Mono.zip(Mono.just(new HashSet<>()), Mono.just(new HashSet<>()))); + private ActionDTO createAction(String actionName, String body, boolean isValid) { + ActionDTO testAction = new ActionDTO(); + testAction.setName(actionName); + testAction.setActionConfiguration(new ActionConfiguration()); + testAction.getActionConfiguration().setBody(body); + testAction.getActionConfiguration().setIsValid(isValid); + return testAction; + } + private ActionCollectionDTO createActionCollection(String collectionName, String body, PluginType pluginType) { ActionCollectionDTO actionCollectionDTO = new ActionCollectionDTO(); - actionCollectionDTO.setName("testCollection1"); + actionCollectionDTO.setName(collectionName); actionCollectionDTO.setPageId(testPage.getId()); actionCollectionDTO.setApplicationId(testApp.getId()); actionCollectionDTO.setWorkspaceId(workspaceId); actionCollectionDTO.setPluginId(datasource.getPluginId()); actionCollectionDTO.setVariables(List.of(new JSValue("test", "String", "test", true))); - actionCollectionDTO.setBody("collectionBody"); - actionCollectionDTO.setPluginType(PluginType.JS); + actionCollectionDTO.setBody(body); + actionCollectionDTO.setPluginType(pluginType); + return actionCollectionDTO; + } - // Create actions - ActionDTO action1 = new ActionDTO(); - action1.setName("testAction1"); - action1.setActionConfiguration(new ActionConfiguration()); - action1.getActionConfiguration().setBody("initial body"); - action1.getActionConfiguration().setIsValid(false); + private JSONArray createDynamicList(String key, String value) { + JSONArray temp2 = new JSONArray(); + temp2.add(new JSONObject(Map.of(key, value))); + return temp2; + } - ActionDTO action2 = new ActionDTO(); - action2.setName("testAction2"); - action2.setActionConfiguration(new ActionConfiguration()); - action2.getActionConfiguration().setBody("mockBody"); - action2.getActionConfiguration().setIsValid(false); + @Test + @WithUserDetails(value = "api_user") + public void + testUpdateUnpublishedActionCollection_withValidCollection_callsPageLayoutOnlyOnceAndAssertCyclicDependencyError() { + Mockito.when(pluginExecutorHelper.getPluginExecutor(Mockito.any())).thenReturn(Mono.just(pluginExecutor)); + Mockito.when(pluginExecutor.getHintMessages(Mockito.any(), Mockito.any())) + .thenReturn(Mono.zip(Mono.just(new HashSet<>()), Mono.just(new HashSet<>()))); - ActionDTO action3 = new ActionDTO(); - action3.setName("testAction3"); - action3.setActionConfiguration(new ActionConfiguration()); - action3.getActionConfiguration().setBody("mockBody"); - action3.getActionConfiguration().setIsValid(false); + ActionCollectionDTO actionCollectionDTO = + createActionCollection("testCollection1", "collectionBody", PluginType.JS); + + ActionDTO action1 = createAction("testAction1", "initial body", false); + ActionDTO action2 = createAction("testAction2", "mockBody", false); + ActionDTO action3 = createAction("testAction3", "mockBody", false); actionCollectionDTO.setActions(List.of(action1, action2, action3)); @@ -753,12 +761,8 @@ public void testDeleteActionCollection_afterApplicationPublish_clearsActionColle ArrayList dslList = (ArrayList) layout.getDsl().get("children"); JSONObject tableDsl = (JSONObject) dslList.get(0); tableDsl.put("tableData", "{{testCollection1.testAction1.data}}"); - JSONArray temp2 = new JSONArray(); - temp2.add(new JSONObject(Map.of("key", "tableData"))); - tableDsl.put("dynamicBindingPathList", temp2); - JSONArray temp3 = new JSONArray(); - temp3.add(new JSONObject(Map.of("key", "tableData"))); - tableDsl.put("dynamicPropertyPathList", temp3); + tableDsl.put("dynamicBindingPathList", createDynamicList("key", "tableData")); + tableDsl.put("dynamicPropertyPathList", createDynamicList("key", "tableData")); layout.getDsl().put("widgetName", "MainContainer"); testPage.setLayouts(List.of(layout)); @@ -815,22 +819,10 @@ public void testDeleteActionCollection_afterApplicationPublish_clearsActionColle Mockito.when(pluginExecutor.getHintMessages(Mockito.any(), Mockito.any())) .thenReturn(Mono.zip(Mono.just(new HashSet<>()), Mono.just(new HashSet<>()))); - ActionCollectionDTO actionCollectionDTO = new ActionCollectionDTO(); - actionCollectionDTO.setName("testCollection1"); - actionCollectionDTO.setPageId(testPage.getId()); - actionCollectionDTO.setApplicationId(testApp.getId()); - actionCollectionDTO.setWorkspaceId(workspaceId); - actionCollectionDTO.setPluginId(datasource.getPluginId()); - actionCollectionDTO.setVariables(List.of(new JSValue("test", "String", "test", true))); - actionCollectionDTO.setBody("collectionBody"); - actionCollectionDTO.setPluginType(PluginType.JS); + ActionCollectionDTO actionCollectionDTO = + createActionCollection("testCollection1", "collectionBody", PluginType.JS); - // Create actions - ActionDTO action1 = new ActionDTO(); - action1.setName("testAction1"); - action1.setActionConfiguration(new ActionConfiguration()); - action1.getActionConfiguration().setBody("initial body"); - action1.getActionConfiguration().setIsValid(false); + ActionDTO action1 = createAction("testAction1", "initial body", false); actionCollectionDTO.setActions(List.of(action1)); // Create Js object @@ -840,18 +832,8 @@ public void testDeleteActionCollection_afterApplicationPublish_clearsActionColle assert createdActionCollectionDTO.getId() != null; String createdActionCollectionId = createdActionCollectionDTO.getId(); - // Update JS object to create an action with same name as previously created action - ActionDTO action2 = new ActionDTO(); - action2.setName("testAction1"); - action2.setActionConfiguration(new ActionConfiguration()); - action2.getActionConfiguration().setBody("mockBody"); - action2.getActionConfiguration().setIsValid(false); - - ActionDTO action3 = new ActionDTO(); - action3.setName("testAction2"); - action3.setActionConfiguration(new ActionConfiguration()); - action3.getActionConfiguration().setBody("mockBody"); - action3.getActionConfiguration().setIsValid(false); + ActionDTO action2 = createAction("testAction1", "mockBody", false); + ActionDTO action3 = createAction("testAction2", "mockBody", false); actionCollectionDTO.setActions( List.of(createdActionCollectionDTO.getActions().get(0), action2, action3)); @@ -875,22 +857,9 @@ public void testDeleteActionCollection_afterApplicationPublish_clearsActionColle Mockito.when(pluginExecutor.getHintMessages(Mockito.any(), Mockito.any())) .thenReturn(Mono.zip(Mono.just(new HashSet<>()), Mono.just(new HashSet<>()))); - ActionCollectionDTO actionCollectionDTO = new ActionCollectionDTO(); - actionCollectionDTO.setName("testCollection1"); - actionCollectionDTO.setPageId(testPage.getId()); - actionCollectionDTO.setApplicationId(testApp.getId()); - actionCollectionDTO.setWorkspaceId(workspaceId); - actionCollectionDTO.setPluginId(datasource.getPluginId()); - actionCollectionDTO.setVariables(List.of(new JSValue("test", "String", "test", true))); - actionCollectionDTO.setBody("collectionBody"); - actionCollectionDTO.setPluginType(PluginType.JS); - - // Create actions - ActionDTO action1 = new ActionDTO(); - action1.setName("testAction1"); - action1.setActionConfiguration(new ActionConfiguration()); - action1.getActionConfiguration().setBody("initial body"); - action1.getActionConfiguration().setIsValid(false); + ActionCollectionDTO actionCollectionDTO = + createActionCollection("testCollection1", "collectionBody", PluginType.JS); + ActionDTO action1 = createAction("testAction1", "initial body", false); actionCollectionDTO.setActions(List.of(action1)); // Create Js object @@ -901,17 +870,8 @@ public void testDeleteActionCollection_afterApplicationPublish_clearsActionColle String createdActionCollectionId = createdActionCollectionDTO.getId(); // Update JS object to create an action with same name as previously created action - ActionDTO action2 = new ActionDTO(); - action2.setName("testAction2"); - action2.setActionConfiguration(new ActionConfiguration()); - action2.getActionConfiguration().setBody("mockBody"); - action2.getActionConfiguration().setIsValid(false); - - ActionDTO action3 = new ActionDTO(); - action3.setName("testAction2"); - action3.setActionConfiguration(new ActionConfiguration()); - action3.getActionConfiguration().setBody("mockBody"); - action3.getActionConfiguration().setIsValid(false); + ActionDTO action2 = createAction("testAction2", "mockBody", false); + ActionDTO action3 = createAction("testAction2", "mockBody", false); actionCollectionDTO.setActions( List.of(createdActionCollectionDTO.getActions().get(0), action2, action3)); @@ -926,4 +886,85 @@ public void testDeleteActionCollection_afterApplicationPublish_clearsActionColle assertEquals(expectedMessage, error.getMessage()); }); } + + @Test + @WithUserDetails(value = "api_user") + public void testLayoutOnLoadActions_withTwoWidgetsAndSameBinding_callsCorrectActions() { + Mockito.when(pluginExecutorHelper.getPluginExecutor(Mockito.any())).thenReturn(Mono.just(pluginExecutor)); + Mockito.when(pluginExecutor.getHintMessages(Mockito.any(), Mockito.any())) + .thenReturn(Mono.zip(Mono.just(new HashSet<>()), Mono.just(new HashSet<>()))); + + ActionCollectionDTO actionCollectionDTO = + createActionCollection("testCollection1", "collectionBody", PluginType.JS); + + // Create actions + ActionDTO action1 = createAction("myFunction", "return [{\"key\": \"value\"}];", true); + ActionDTO action2 = createAction("myFunction2", "mockBody", false); + + actionCollectionDTO.setActions(List.of(action1, action2)); + + // Create Layout with Table and Text Widgets + Layout layout = testPage.getLayouts().get(0); + layout.getDsl().put("widgetName", "MainContainer"); + ArrayList dslList = (ArrayList) layout.getDsl().get("children"); + JSONObject tableDsl = (JSONObject) dslList.get(0); + tableDsl.put("tableData", "{{testCollection1.myFunction.data}}"); + tableDsl.put("dynamicBindingPathList", createDynamicList("key", "tableData")); + tableDsl.put("dynamicPropertyPathList", createDynamicList("key", "tableData")); + + JSONObject textDsl = new JSONObject(); + textDsl.put("widgetName", "Text1"); + textDsl.put("type", "TEXT_WIDGET"); + textDsl.put("text", "{{testCollection1.myFunction.data}} + {{testCollection1.myFunction2.data}}"); + textDsl.put("dynamicBindingPathList", createDynamicList("key", "text")); + textDsl.put("dynamicPropertyPathList", createDynamicList("key", "text")); + + layout.setLayoutOnLoadActions(List.of()); + + dslList.add(textDsl); + + testPage.setLayouts(List.of(layout)); + + PageDTO updatedPage = + newPageService.updatePage(testPage.getId(), testPage).block(); + + // Create Js object + ActionCollectionDTO createdActionCollectionDTO = + layoutCollectionService.createCollection(actionCollectionDTO).block(); + assert createdActionCollectionDTO != null; + assert createdActionCollectionDTO.getId() != null; + String createdActionCollectionId = createdActionCollectionDTO.getId(); + + final Mono updatedActionCollectionDTOMono = + layoutCollectionService.updateUnpublishedActionCollection( + createdActionCollectionId, actionCollectionDTO); + + Mono pageWithMigratedDSLMono = + applicationPageService.getPageAndMigrateDslByBranchedPageId(testPage.getId(), false, false); + + StepVerifier.create(updatedActionCollectionDTOMono.zipWhen(actionCollectionDTO1 -> { + return pageWithMigratedDSLMono; + })) + .assertNext(tuple -> { + ActionCollectionDTO actionCollectionDTO1 = tuple.getT1(); + assertEquals(createdActionCollectionId, actionCollectionDTO1.getId()); + Mockito.verify(updateLayoutService, Mockito.times(2)) + .updatePageLayoutsByPageId(Mockito.anyString()); + actionCollectionDTO1 + .getActions() + .forEach(action -> assertNull(action.getErrorReports(), "Error reports should be null")); + + PageDTO pageWithMigratedDSL = tuple.getT2(); + List> layoutOnLoadActions = + pageWithMigratedDSL.getLayouts().get(0).getLayoutOnLoadActions(); + List> actualNames = layoutOnLoadActions.stream() + .map(set -> + set.stream().map(DslExecutableDTO::getName).collect(Collectors.toSet())) + .collect(Collectors.toList()); + List> expectedNames = + List.of(Set.of("testCollection1.myFunction", "testCollection1.myFunction2")); + assertEquals(expectedNames, actualNames, "layoutOnLoadActions should contain the expected names"); + }) + .verifyComplete(); + } } diff --git a/app/server/appsmith-server/src/test/java/com/appsmith/server/services/LayoutServiceTest.java b/app/server/appsmith-server/src/test/java/com/appsmith/server/services/LayoutServiceTest.java index 4e48f49e0c87..afdc45a785b5 100644 --- a/app/server/appsmith-server/src/test/java/com/appsmith/server/services/LayoutServiceTest.java +++ b/app/server/appsmith-server/src/test/java/com/appsmith/server/services/LayoutServiceTest.java @@ -524,16 +524,16 @@ private Mono createComplexAppForExecuteOnLoad(Mono pageMono) "some dynamic {{\"anIgnoredAction.data:\" + aGetAction.data}}", "dynamicPost", """ - some dynamic {{ - (function(ignoredAction1){ - \tlet a = ignoredAction1.data - \tlet ignoredAction2 = { data: "nothing" } - \tlet b = ignoredAction2.data - \tlet c = "ignoredAction3.data" - \t// ignoredAction4.data - \treturn aPostAction.data - })(anotherPostAction.data)}} - """, + some dynamic {{ + (function(ignoredAction1){ + \tlet a = ignoredAction1.data + \tlet ignoredAction2 = { data: "nothing" } + \tlet b = ignoredAction2.data + \tlet c = "ignoredAction3.data" + \t// ignoredAction4.data + \treturn aPostAction.data + })(anotherPostAction.data)}} + """, "dynamicPostWithAutoExec", "some dynamic {{aPostActionWithAutoExec.data}}", "dynamicDelete", @@ -936,6 +936,69 @@ public void getActionsExecuteOnLoadWithAstLogic() { \t// ignoredAction4.data \treturn aPostAction.data })(anotherPostAction.data)"""; + + Mockito.when(astService.getPossibleReferencesFromDynamicBinding( + List.of( + " anotherDBAction.data.optional ", + "Collection.aSyncCollectionActionWithCall()", + "Collection.anAsyncCollectionActionWithCall()", + "Collection.aSyncCollectionActionWithoutCall.data", + "Collection.anAsyncCollectionActionWithoutCall.data", + "aPostActionWithAutoExec.data", + "aTableAction.data.child", + "\"anIgnoredAction.data:\" + aGetAction.data", + "aDBAction.data[0].irrelevant", + bindingValue), + EVALUATION_VERSION)) + .thenReturn(Flux.just( + Tuples.of( + " anotherDBAction.data.optional ", + new HashSet<>(Set.of("anotherDBAction.data.optional"))), + Tuples.of( + "Collection.aSyncCollectionActionWithCall()", + new HashSet<>(Set.of("Collection.aSyncCollectionActionWithCall"))), + Tuples.of( + "Collection.anAsyncCollectionActionWithCall()", + new HashSet<>(Set.of("Collection.anAsyncCollectionActionWithCall"))), + Tuples.of( + "Collection.aSyncCollectionActionWithoutCall.data", + new HashSet<>(Set.of("Collection.aSyncCollectionActionWithoutCall.data"))), + Tuples.of( + "Collection.anAsyncCollectionActionWithoutCall.data", + new HashSet<>(Set.of("Collection.anAsyncCollectionActionWithoutCall.data"))), + Tuples.of( + "aPostActionWithAutoExec.data", new HashSet<>(Set.of("aPostActionWithAutoExec.data"))), + Tuples.of("aTableAction.data.child", new HashSet<>(Set.of("aTableAction.data.child"))), + Tuples.of( + "\"anIgnoredAction.data:\" + aGetAction.data", + new HashSet<>(Set.of("aGetAction.data"))), + Tuples.of( + "aDBAction.data[0].irrelevant", new HashSet<>(Set.of("aDBAction.data[0].irrelevant"))), + Tuples.of(bindingValue, new HashSet<>(Set.of("aPostAction.data", "anotherPostAction.data"))))); + + Mockito.when(astService.getPossibleReferencesFromDynamicBinding( + List.of("aPostTertiaryAction.data", "aPostSecondaryAction.data"), EVALUATION_VERSION)) + .thenReturn(Flux.just( + Tuples.of("aPostTertiaryAction.data", new HashSet<>(Set.of("aPostTertiaryAction.data"))), + Tuples.of("aPostSecondaryAction.data", new HashSet<>(Set.of("aPostSecondaryAction.data"))))); + + Mockito.when(astService.getPossibleReferencesFromDynamicBinding( + List.of( + "aPostTertiaryAction.data", + "hiddenAction4.data", + "hiddenAction1.data", + "hiddenAction3.data", + "aPostSecondaryAction.data", + "hiddenAction2.data"), + EVALUATION_VERSION)) + .thenReturn(Flux.just( + Tuples.of("aPostTertiaryAction.data", new HashSet<>(Set.of("aPostTertiaryAction.data"))), + Tuples.of("hiddenAction4.data", new HashSet<>(Set.of("hiddenAction4.data"))), + Tuples.of("hiddenAction1.data", new HashSet<>(Set.of("hiddenAction1.data"))), + Tuples.of("hiddenAction3.data", new HashSet<>(Set.of("hiddenAction3.data"))), + Tuples.of("aPostSecondaryAction.data", new HashSet<>(Set.of("aPostSecondaryAction.data"))), + Tuples.of("hiddenAction2.data", new HashSet<>(Set.of("hiddenAction2.data"))))); + Mockito.when(astService.getPossibleReferencesFromDynamicBinding(List.of(bindingValue), EVALUATION_VERSION)) .thenReturn(Flux.just( Tuples.of(bindingValue, new HashSet<>(Set.of("aPostAction.data", "anotherPostAction.data")))));