diff --git a/.github/workflows/ci-master.yml b/.github/workflows/ci-master.yml index b6972e19322ed..f2329320825ae 100644 --- a/.github/workflows/ci-master.yml +++ b/.github/workflows/ci-master.yml @@ -7,7 +7,9 @@ on: jobs: install-and-build: - runs-on: ubuntu-latest + runs-on: blacksmith-2vcpu-ubuntu-2204 + env: + NODE_OPTIONS: '--max-old-space-size=4096' timeout-minutes: 10 @@ -15,7 +17,7 @@ jobs: - uses: actions/checkout@v4.1.1 - run: corepack enable - - uses: actions/setup-node@v4.0.2 + - uses: useblacksmith/setup-node@v5 with: node-version: 20.x cache: pnpm @@ -24,13 +26,13 @@ jobs: run: pnpm install --frozen-lockfile - name: Setup build cache - uses: rharkor/caching-for-turbo@v1.5 + uses: useblacksmith/caching-for-turbo@v1 - name: Build run: pnpm build - name: Cache build artifacts - uses: actions/cache/save@v4.0.0 + uses: useblacksmith/cache/save@v5 with: path: ./packages/**/dist key: ${{ github.sha }}-base:build @@ -48,6 +50,7 @@ jobs: cacheKey: ${{ github.sha }}-base:build collectCoverage: ${{ matrix.node-version == '20.x' }} ignoreTurboCache: ${{ matrix.node-version == '20.x' }} + skipFrontendTests: ${{ matrix.node-version != '20.x' }} secrets: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/ci-pull-requests.yml b/.github/workflows/ci-pull-requests.yml index 1ce742709870a..e85f649655450 100644 --- a/.github/workflows/ci-pull-requests.yml +++ b/.github/workflows/ci-pull-requests.yml @@ -9,14 +9,16 @@ on: jobs: install-and-build: name: Install & Build - runs-on: ubuntu-latest + runs-on: blacksmith-2vcpu-ubuntu-2204 + env: + NODE_OPTIONS: '--max-old-space-size=4096' steps: - uses: actions/checkout@v4.1.1 with: ref: refs/pull/${{ github.event.pull_request.number }}/merge - run: corepack enable - - uses: actions/setup-node@v4.0.2 + - uses: useblacksmith/setup-node@v5 with: node-version: 20.x cache: pnpm @@ -25,7 +27,7 @@ jobs: run: pnpm install --frozen-lockfile - name: Setup build cache - uses: rharkor/caching-for-turbo@v1.5 + uses: useblacksmith/caching-for-turbo@v1 - name: Build run: pnpm build @@ -37,7 +39,7 @@ jobs: run: pnpm typecheck - name: Cache build artifacts - uses: actions/cache/save@v4.0.0 + uses: useblacksmith/cache/save@v5 with: path: ./packages/**/dist key: ${{ github.sha }}-base:build diff --git a/.github/workflows/e2e-reusable.yml b/.github/workflows/e2e-reusable.yml index b55d6728d2f21..82318961457bc 100644 --- a/.github/workflows/e2e-reusable.yml +++ b/.github/workflows/e2e-reusable.yml @@ -41,11 +41,6 @@ on: description: 'PR number to run tests for.' required: false type: number - node_view_version: - description: 'Node View version to run tests with.' - required: false - default: '1' - type: string secrets: CYPRESS_RECORD_KEY: description: 'Cypress record key.' @@ -165,7 +160,7 @@ jobs: spec: '${{ inputs.spec }}' env: NODE_OPTIONS: --dns-result-order=ipv4first - CYPRESS_NODE_VIEW_VERSION: ${{ inputs.node_view_version }} + CYPRESS_NODE_VIEW_VERSION: 2 CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} E2E_TESTS: true diff --git a/.github/workflows/e2e-tests.yml b/.github/workflows/e2e-tests.yml index 2f63f61bc6327..e7400adecbe25 100644 --- a/.github/workflows/e2e-tests.yml +++ b/.github/workflows/e2e-tests.yml @@ -27,11 +27,6 @@ on: description: 'URL to call after workflow is done.' required: false default: '' - node_view_version: - description: 'Node View version to run tests with.' - required: false - default: '1' - type: string jobs: calls-start-url: @@ -51,7 +46,6 @@ jobs: branch: ${{ github.event.inputs.branch || 'master' }} user: ${{ github.event.inputs.user || 'PR User' }} spec: ${{ github.event.inputs.spec || 'e2e/*' }} - node_view_version: ${{ github.event.inputs.node_view_version || '1' }} secrets: CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }} diff --git a/.github/workflows/linting-reusable.yml b/.github/workflows/linting-reusable.yml index ed8d23494000e..c1e2b996f4582 100644 --- a/.github/workflows/linting-reusable.yml +++ b/.github/workflows/linting-reusable.yml @@ -17,14 +17,16 @@ on: jobs: lint: name: Lint - runs-on: ubuntu-latest + runs-on: blacksmith-2vcpu-ubuntu-2204 + env: + NODE_OPTIONS: '--max-old-space-size=4096' steps: - uses: actions/checkout@v4.1.1 with: ref: ${{ inputs.ref }} - run: corepack enable - - uses: actions/setup-node@v4.0.2 + - uses: useblacksmith/setup-node@v5 with: node-version: 20.x cache: pnpm @@ -33,7 +35,7 @@ jobs: run: pnpm install --frozen-lockfile - name: Setup build cache - uses: rharkor/caching-for-turbo@v1.5 + uses: useblacksmith/caching-for-turbo@v1 - name: Build if: ${{ inputs.cacheKey == '' }} @@ -41,10 +43,11 @@ jobs: - name: Restore cached build artifacts if: ${{ inputs.cacheKey != '' }} - uses: actions/cache/restore@v4.0.0 + uses: useblacksmith/cache/restore@v5 with: path: ./packages/**/dist key: ${{ inputs.cacheKey }} + fail-on-cache-miss: true - name: Lint Backend run: pnpm lint:backend diff --git a/.github/workflows/units-tests-dispatch.yml b/.github/workflows/units-tests-dispatch.yml index 5ad63f000e1fb..72a9db5b6a957 100644 --- a/.github/workflows/units-tests-dispatch.yml +++ b/.github/workflows/units-tests-dispatch.yml @@ -12,6 +12,11 @@ on: description: 'PR number to run tests for.' required: false type: number + skipFrontendTests: + description: 'Skip Frontend tests' + required: false + default: false + type: boolean jobs: prepare: @@ -37,3 +42,4 @@ jobs: uses: ./.github/workflows/units-tests-reusable.yml with: ref: ${{ needs.prepare.outputs.branch }} + skipFrontendTests: ${{ inputs.skipFrontendTests }} diff --git a/.github/workflows/units-tests-reusable.yml b/.github/workflows/units-tests-reusable.yml index 62eca74b15bbb..efa256a3f9cd4 100644 --- a/.github/workflows/units-tests-reusable.yml +++ b/.github/workflows/units-tests-reusable.yml @@ -26,6 +26,10 @@ on: required: false default: false type: boolean + skipFrontendTests: + required: false + default: false + type: boolean secrets: CODECOV_TOKEN: description: 'Codecov upload token.' @@ -34,7 +38,7 @@ on: jobs: unit-test: name: Unit tests - runs-on: ubuntu-latest + runs-on: blacksmith-2vcpu-ubuntu-2204 env: TURBO_FORCE: ${{ inputs.ignoreTurboCache }} COVERAGE_ENABLED: ${{ inputs.collectCoverage }} @@ -45,7 +49,7 @@ jobs: - run: corepack enable - name: Use Node.js ${{ inputs.nodeVersion }} - uses: actions/setup-node@v4.0.2 + uses: useblacksmith/setup-node@v5 with: node-version: ${{ inputs.nodeVersion }} cache: pnpm @@ -54,7 +58,7 @@ jobs: run: pnpm install --frozen-lockfile - name: Setup build cache - uses: rharkor/caching-for-turbo@v1.5 + uses: useblacksmith/caching-for-turbo@v1 - name: Build if: ${{ inputs.cacheKey == '' }} @@ -62,10 +66,11 @@ jobs: - name: Restore cached build artifacts if: ${{ inputs.cacheKey != '' }} - uses: actions/cache/restore@v4.0.0 + uses: useblacksmith/cache/restore@v5 with: path: ./packages/**/dist key: ${{ inputs.cacheKey }} + fail-on-cache-miss: true - name: Test Backend run: pnpm test:backend @@ -74,6 +79,7 @@ jobs: run: pnpm test:nodes - name: Test Frontend + if: ${{ !inputs.skipFrontendTests }} run: pnpm test:frontend - name: Upload coverage to Codecov diff --git a/CHANGELOG.md b/CHANGELOG.md index 92770f1598ef7..e8c0260c13ae4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,51 @@ +# [1.77.0](https://github.com/n8n-io/n8n/compare/n8n@1.76.0...n8n@1.77.0) (2025-01-29) + + +### Bug Fixes + +* **core:** Account for pre-execution failure in scaling mode ([#12815](https://github.com/n8n-io/n8n/issues/12815)) ([b4d27c4](https://github.com/n8n-io/n8n/commit/b4d27c49e32bfacbd2690bf1c07194562f6a4a61)) +* **core:** Display the last activated plan name when multiple are activated ([#12835](https://github.com/n8n-io/n8n/issues/12835)) ([03365f0](https://github.com/n8n-io/n8n/commit/03365f096d3d5c8e3a6537f37cda412959705346)) +* **core:** Fix possible corruption of OAuth2 credential ([#12880](https://github.com/n8n-io/n8n/issues/12880)) ([ac84ea1](https://github.com/n8n-io/n8n/commit/ac84ea14452cbcec95f14073e8e70427169e6a7f)) +* **core:** Fix usage of external libs in task runner ([#12788](https://github.com/n8n-io/n8n/issues/12788)) ([3d9d5bf](https://github.com/n8n-io/n8n/commit/3d9d5bf9d58f3c49830d42a140d6c8c6b59952dc)) +* **core:** Handle max stalled count error better ([#12824](https://github.com/n8n-io/n8n/issues/12824)) ([eabf160](https://github.com/n8n-io/n8n/commit/eabf1609577cd94a6bad5020c34378d840a13bc0)) +* **core:** Improve error handling in credential decryption and parsing ([#12868](https://github.com/n8n-io/n8n/issues/12868)) ([0c86bf2](https://github.com/n8n-io/n8n/commit/0c86bf2b3761bb93fd3cedba7a483ae5d97bd332)) +* **core:** Renew license on startup for instances with detached floating entitlements ([#12884](https://github.com/n8n-io/n8n/issues/12884)) ([f32eef8](https://github.com/n8n-io/n8n/commit/f32eef85bd066ee9b54d110355c6b80124d67437)) +* **core:** Update execution entity and execution data in transaction ([#12756](https://github.com/n8n-io/n8n/issues/12756)) ([1f43181](https://github.com/n8n-io/n8n/commit/1f4318136011bffaad04527790a9eba79effce35)) +* **core:** Validate credential data before encryption ([#12885](https://github.com/n8n-io/n8n/issues/12885)) ([3d27a14](https://github.com/n8n-io/n8n/commit/3d27a1498702206b738cf978d037191306cec42b)) +* **editor:** Add notice when user hits the limit for execution metadata item length ([#12676](https://github.com/n8n-io/n8n/issues/12676)) ([02df25c](https://github.com/n8n-io/n8n/commit/02df25c450a0a384a32d0815d8a2faec7562a8ae)) +* **editor:** Don't send run data for full manual executions ([#12687](https://github.com/n8n-io/n8n/issues/12687)) ([9139dc3](https://github.com/n8n-io/n8n/commit/9139dc3c2916186648fb5bf63d14fcb90773eb1c)) +* **editor:** Fix sub-execution links in empty output tables ([#12781](https://github.com/n8n-io/n8n/issues/12781)) ([114ed88](https://github.com/n8n-io/n8n/commit/114ed88368d137443b9c6605d4fe11b02053549d)) +* **editor:** Fix workflow move project select filtering ([#12764](https://github.com/n8n-io/n8n/issues/12764)) ([358d284](https://github.com/n8n-io/n8n/commit/358d2843e5e468071d6764419169811e93138c35)) +* **editor:** Focus executions iframe when n8n is ready to delegate keyboard events ([#12741](https://github.com/n8n-io/n8n/issues/12741)) ([d506218](https://github.com/n8n-io/n8n/commit/d5062189dbca02dfdf485fc220cc2a7b05e3e6cc)) +* **editor:** Handle large payloads in the AI Assistant requests better ([#12747](https://github.com/n8n-io/n8n/issues/12747)) ([eb4dea1](https://github.com/n8n-io/n8n/commit/eb4dea1ca891bb7ac07c8bbbae8803de080c4623)) +* **editor:** Hide Set up Template button for empty workflows ([#12808](https://github.com/n8n-io/n8n/issues/12808)) ([36e615b](https://github.com/n8n-io/n8n/commit/36e615b28f395623457bbb9bf4ab6fd69102b6ea)) +* **editor:** Load appropriate credentials in canvas V2 for new workflow ([#12722](https://github.com/n8n-io/n8n/issues/12722)) ([2020dc5](https://github.com/n8n-io/n8n/commit/2020dc502feae6cae827dfbcc40ffed89bcc334a)) +* **editor:** Properly set active project in new canvas ([#12810](https://github.com/n8n-io/n8n/issues/12810)) ([648c6f9](https://github.com/n8n-io/n8n/commit/648c6f9315b16b885e04716e7e0035a73b358fb0)) +* **editor:** Render inline SVGs correctly on the external secrets settings page ([#12802](https://github.com/n8n-io/n8n/issues/12802)) ([5820ade](https://github.com/n8n-io/n8n/commit/5820ade1e4b9d638c9b6369aef369d6dc9320da6)) +* **editor:** Show input selector when node has error ([#12813](https://github.com/n8n-io/n8n/issues/12813)) ([5b760e7](https://github.com/n8n-io/n8n/commit/5b760e7f7fc612b10307b4871e24b549f5d9d420)) +* **editor:** Show mappings by default in sub-node NDVs when the root node isn't executed ([#12642](https://github.com/n8n-io/n8n/issues/12642)) ([fb662dd](https://github.com/n8n-io/n8n/commit/fb662dd95cae3bc51d05d05e32e772d05adafa1e)) +* **Postgres PGVector Store Node:** Release postgres connections back to the pool ([#12723](https://github.com/n8n-io/n8n/issues/12723)) ([663dfb4](https://github.com/n8n-io/n8n/commit/663dfb48defd944f88f0ecc4f3347ea4f8a7c831)) + + +### Features + +* Add DeepSeek Chat Model node ([#12873](https://github.com/n8n-io/n8n/issues/12873)) ([9918afa](https://github.com/n8n-io/n8n/commit/9918afa51b16116abb73692a66df84e48128f406)) +* Add OpenRouter node ([#12882](https://github.com/n8n-io/n8n/issues/12882)) ([dc85b02](https://github.com/n8n-io/n8n/commit/dc85b022d111d1e8b038ca1a9f6a1041f19cf2b0)) +* Add timeout options to sendAndWait operations ([#12753](https://github.com/n8n-io/n8n/issues/12753)) ([3e9f24d](https://github.com/n8n-io/n8n/commit/3e9f24ddf462349145d89fe183313c95512c699b)) +* **API:** Add route for schema static files ([#12770](https://github.com/n8n-io/n8n/issues/12770)) ([d981b56](https://github.com/n8n-io/n8n/commit/d981b5659a26f92b11e5d0cd5570504fd683626c)) +* **core:** Explicitly report external hook failures ([#12830](https://github.com/n8n-io/n8n/issues/12830)) ([a24e442](https://github.com/n8n-io/n8n/commit/a24e4420bb9023f808acd756d125dffaea325968)) +* **core:** Rename two task runner env vars ([#12763](https://github.com/n8n-io/n8n/issues/12763)) ([60187ca](https://github.com/n8n-io/n8n/commit/60187cab9bc9d21aa6ba710d772c068324e429f1)) +* **editor:** Add evaluation workflow and enhance workflow selector with pinned data support ([#12773](https://github.com/n8n-io/n8n/issues/12773)) ([be967eb](https://github.com/n8n-io/n8n/commit/be967ebec07fab223513f93f50bcc389b9a4c548)) +* **editor:** Always keep at least one executing node indicator in the workflow ([#12829](https://github.com/n8n-io/n8n/issues/12829)) ([c25c613](https://github.com/n8n-io/n8n/commit/c25c613a04a6773fa4014d9a0d290e443bcabbe0)) +* **Google Chat Node:** Updates ([#12827](https://github.com/n8n-io/n8n/issues/12827)) ([e146ad0](https://github.com/n8n-io/n8n/commit/e146ad021a0be22cf51bafa3c015d03550e03d97)) +* **Microsoft Outlook Node:** New operation sendAndWait ([#12795](https://github.com/n8n-io/n8n/issues/12795)) ([f4bf55f](https://github.com/n8n-io/n8n/commit/f4bf55f0d8278ff954344cf6397c10d8261b39a4)) +* **n8n Form Node:** Add read-only/custom HTML form elements ([#12760](https://github.com/n8n-io/n8n/issues/12760)) ([ba8aa39](https://github.com/n8n-io/n8n/commit/ba8aa3921613c590caaac627fbb9837ccaf87783)) +* **Send Email Node:** New operation sendAndWait ([#12775](https://github.com/n8n-io/n8n/issues/12775)) ([a197fbb](https://github.com/n8n-io/n8n/commit/a197fbb21b5642843d8bc3e657049aca99e0729d)) +* **Summarize Node:** Turns error when field not found in items into warning ([#11889](https://github.com/n8n-io/n8n/issues/11889)) ([d7dda3f](https://github.com/n8n-io/n8n/commit/d7dda3f5de52925e554455f9f10e51bd173ea856)) +* **Telegram Node:** New operation sendAndWait ([#12771](https://github.com/n8n-io/n8n/issues/12771)) ([2c58d47](https://github.com/n8n-io/n8n/commit/2c58d47f8eee1f865ecc1eeb89aa20c69c28abae)) + + + # [1.76.0](https://github.com/n8n-io/n8n/compare/n8n@1.75.0...n8n@1.76.0) (2025-01-22) diff --git a/cypress/composables/webhooks.ts b/cypress/composables/webhooks.ts new file mode 100644 index 0000000000000..8ad5dc6861638 --- /dev/null +++ b/cypress/composables/webhooks.ts @@ -0,0 +1,81 @@ +import { BACKEND_BASE_URL } from '../constants'; +import { NDV, WorkflowPage } from '../pages'; +import { getVisibleSelect } from '../utils'; + +export const waitForWebhook = 500; + +export interface SimpleWebhookCallOptions { + method: string; + webhookPath: string; + responseCode?: number; + respondWith?: string; + executeNow?: boolean; + responseData?: string; + authentication?: string; +} + +const workflowPage = new WorkflowPage(); +const ndv = new NDV(); + +export const simpleWebhookCall = (options: SimpleWebhookCallOptions) => { + const { + authentication, + method, + webhookPath, + responseCode, + respondWith, + responseData, + executeNow = true, + } = options; + + workflowPage.actions.addInitialNodeToCanvas('Webhook'); + workflowPage.actions.openNode('Webhook'); + + cy.getByTestId('parameter-input-httpMethod').click(); + getVisibleSelect().find('.option-headline').contains(method).click(); + cy.getByTestId('parameter-input-path') + .find('.parameter-input') + .find('input') + .clear() + .type(webhookPath); + + if (authentication) { + cy.getByTestId('parameter-input-authentication').click(); + getVisibleSelect().find('.option-headline').contains(authentication).click(); + } + + if (responseCode) { + cy.get('.param-options').click(); + getVisibleSelect().contains('Response Code').click(); + cy.get('.parameter-item-wrapper > .parameter-input-list-wrapper').children().click(); + getVisibleSelect().contains('201').click(); + } + + if (respondWith) { + cy.getByTestId('parameter-input-responseMode').click(); + getVisibleSelect().find('.option-headline').contains(respondWith).click(); + } + + if (responseData) { + cy.getByTestId('parameter-input-responseData').click(); + getVisibleSelect().find('.option-headline').contains(responseData).click(); + } + + const callEndpoint = (fn: (response: Cypress.Response) => void) => { + cy.request(method, `${BACKEND_BASE_URL}/webhook-test/${webhookPath}`).then(fn); + }; + + if (executeNow) { + ndv.actions.execute(); + cy.wait(waitForWebhook); + + callEndpoint((response) => { + expect(response.status).to.eq(200); + ndv.getters.outputPanel().contains('headers'); + }); + } + + return { + callEndpoint, + }; +}; diff --git a/cypress/composables/workflow.ts b/cypress/composables/workflow.ts index c3177775b47e2..d50c1e1255a99 100644 --- a/cypress/composables/workflow.ts +++ b/cypress/composables/workflow.ts @@ -67,6 +67,13 @@ export function getInputPlusHandleByType(nodeName: string, endpointType: Endpoin ); } +export function getOutputHandle(nodeName: string) { + return cy.ifCanvasVersion( + () => cy.get(`.add-output-endpoint[data-endpoint-name="${nodeName}"]`), + () => cy.get(`[data-test-id="canvas-node-output-handle"][data-node-name="${nodeName}"]`), + ); +} + export function getOutputPlusHandle(nodeName: string) { return cy.ifCanvasVersion( () => cy.get(`.add-output-endpoint[data-endpoint-name="${nodeName}"]`), diff --git a/cypress/e2e/10-undo-redo.cy.ts b/cypress/e2e/10-undo-redo.cy.ts index f54c2de9fae9e..2931897f03ae8 100644 --- a/cypress/e2e/10-undo-redo.cy.ts +++ b/cypress/e2e/10-undo-redo.cy.ts @@ -1,16 +1,15 @@ +import { getCanvasNodes } from '../composables/workflow'; import { SCHEDULE_TRIGGER_NODE_NAME, CODE_NODE_NAME, SET_NODE_NAME, - EDIT_FIELDS_SET_NODE_NAME, + MANUAL_TRIGGER_NODE_NAME, + MANUAL_TRIGGER_NODE_DISPLAY_NAME, } from '../constants'; import { MessageBox as MessageBoxClass } from '../pages/modals/message-box'; import { NDV } from '../pages/ndv'; import { WorkflowPage as WorkflowPageClass } from '../pages/workflow'; -// Suite-specific constants -const CODE_NODE_NEW_NAME = 'Something else'; - const WorkflowPage = new WorkflowPageClass(); const messageBox = new MessageBoxClass(); const ndv = new NDV(); @@ -20,40 +19,6 @@ describe('Undo/Redo', () => { WorkflowPage.actions.visit(); }); - // FIXME: Canvas V2: Fix redo connections - it('should undo/redo adding node in the middle', () => { - WorkflowPage.actions.addNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME); - WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); - WorkflowPage.actions.addNodeBetweenNodes( - SCHEDULE_TRIGGER_NODE_NAME, - CODE_NODE_NAME, - SET_NODE_NAME, - ); - WorkflowPage.actions.zoomToFit(); - WorkflowPage.getters.canvasNodeByName('Code').then(($codeNode) => { - const cssLeft = parseInt($codeNode.css('left')); - const cssTop = parseInt($codeNode.css('top')); - - WorkflowPage.actions.hitUndo(); - WorkflowPage.getters.canvasNodes().should('have.have.length', 2); - WorkflowPage.getters.nodeConnections().should('have.length', 1); - WorkflowPage.actions.hitUndo(); - WorkflowPage.getters.canvasNodes().should('have.have.length', 1); - WorkflowPage.getters.nodeConnections().should('have.length', 0); - WorkflowPage.actions.hitRedo(); - WorkflowPage.getters.canvasNodes().should('have.have.length', 2); - WorkflowPage.getters.nodeConnections().should('have.length', 1); - WorkflowPage.actions.hitRedo(); - WorkflowPage.getters.canvasNodes().should('have.have.length', 3); - WorkflowPage.getters.nodeConnections().should('have.length', 2); - // Last node should be added back to original position - WorkflowPage.getters - .canvasNodeByName('Code') - .should('have.css', 'left', cssLeft + 'px') - .should('have.css', 'top', cssTop + 'px'); - }); - }); - it('should undo/redo deleting node using context menu', () => { WorkflowPage.actions.addNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME); WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); @@ -115,34 +80,60 @@ describe('Undo/Redo', () => { WorkflowPage.getters.nodeConnections().should('have.length', 0); }); - // FIXME: Canvas V2: Fix moving of nodes via e2e tests it('should undo/redo moving nodes', () => { - WorkflowPage.actions.addNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME); + WorkflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); + WorkflowPage.getters.canvasNodeByName(MANUAL_TRIGGER_NODE_DISPLAY_NAME).click(); WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); - WorkflowPage.getters.canvasNodeByName(CODE_NODE_NAME).then(($node) => { - const initialPosition = $node.position(); - cy.drag('[data-test-id="canvas-node"].jtk-drag-selected', [50, 150], { clickToFinish: true }); - WorkflowPage.getters.canvasNodeByName(CODE_NODE_NAME).then(($node) => { - const cssLeft = parseInt($node.css('left')); - const cssTop = parseInt($node.css('top')); - expect(cssLeft).to.be.greaterThan(initialPosition.left); - expect(cssTop).to.be.greaterThan(initialPosition.top); - }); + WorkflowPage.actions.zoomToFit(); - WorkflowPage.actions.hitUndo(); - WorkflowPage.getters - .canvasNodeByName(CODE_NODE_NAME) - .should('have.css', 'left', `${initialPosition.left}px`) - .should('have.css', 'top', `${initialPosition.top}px`); - WorkflowPage.actions.hitRedo(); - WorkflowPage.getters.canvasNodeByName(CODE_NODE_NAME).then(($node) => { - const cssLeft = parseInt($node.css('left')); - const cssTop = parseInt($node.css('top')); - expect(cssLeft).to.be.greaterThan(initialPosition.left); - expect(cssTop).to.be.greaterThan(initialPosition.top); + getCanvasNodes() + .last() + .then(($node) => { + const { x: x1, y: y1 } = $node[0].getBoundingClientRect(); + + cy.ifCanvasVersion( + () => { + cy.drag('[data-test-id="canvas-node"].jtk-drag-selected', [50, 150], { + clickToFinish: true, + }); + }, + () => { + cy.drag(getCanvasNodes().last(), [50, 150], { + realMouse: true, + abs: true, + }); + }, + ); + + getCanvasNodes() + .last() + .then(($node) => { + const { x: x2, y: y2 } = $node[0].getBoundingClientRect(); + expect(x2).to.be.greaterThan(x1); + expect(y2).to.be.greaterThan(y1); + }); + + WorkflowPage.actions.hitUndo(); + + getCanvasNodes() + .last() + .then(($node) => { + const { x: x3, y: y3 } = $node[0].getBoundingClientRect(); + expect(x3).to.equal(x1); + expect(y3).to.equal(y1); + }); + + WorkflowPage.actions.hitRedo(); + + getCanvasNodes() + .last() + .then(($node) => { + const { x: x4, y: y4 } = $node[0].getBoundingClientRect(); + expect(x4).to.be.greaterThan(x1); + expect(y4).to.be.greaterThan(y1); + }); }); - }); }); it('should undo/redo deleting a connection using context menu', () => { @@ -155,17 +146,6 @@ describe('Undo/Redo', () => { WorkflowPage.actions.hitRedo(); WorkflowPage.getters.nodeConnections().should('have.length', 0); }); - // FIXME: Canvas V2: Fix disconnecting by moving - it('should undo/redo deleting a connection by moving it away', () => { - WorkflowPage.actions.addNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME); - WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); - cy.drag('.rect-input-endpoint.jtk-endpoint-connected', [0, -100]); - WorkflowPage.getters.nodeConnections().should('have.length', 0); - WorkflowPage.actions.hitUndo(); - WorkflowPage.getters.nodeConnections().should('have.length', 1); - WorkflowPage.actions.hitRedo(); - WorkflowPage.getters.nodeConnections().should('have.length', 0); - }); it('should undo/redo disabling a node using context menu', () => { WorkflowPage.actions.addNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME); @@ -204,23 +184,6 @@ describe('Undo/Redo', () => { WorkflowPage.getters.disabledNodes().should('have.length', 2); }); - // FIXME: Canvas V2: Fix undo renaming node - it('should undo/redo renaming node using keyboard shortcut', () => { - WorkflowPage.actions.addNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME); - WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); - WorkflowPage.getters.canvasNodes().last().click(); - cy.get('body').trigger('keydown', { key: 'F2' }); - cy.get('.rename-prompt').should('be.visible'); - cy.get('body').type(CODE_NODE_NEW_NAME); - cy.get('body').type('{enter}'); - WorkflowPage.actions.hitUndo(); - cy.get('body').type('{esc}'); - WorkflowPage.getters.canvasNodeByName(CODE_NODE_NAME).should('exist'); - WorkflowPage.actions.hitRedo(); - cy.get('body').type('{esc}'); - WorkflowPage.getters.canvasNodeByName(CODE_NODE_NEW_NAME).should('exist'); - }); - it('should undo/redo duplicating a node', () => { WorkflowPage.actions.addNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME); WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); @@ -243,77 +206,6 @@ describe('Undo/Redo', () => { }); }); - // FIXME: Canvas V2: Figure out why moving doesn't work from e2e - it('should undo/redo multiple steps', () => { - WorkflowPage.actions.addNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME); - WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); - // WorkflowPage.actions.addNodeToCanvas(SET_NODE_NAME); - WorkflowPage.actions.addNodeToCanvas(EDIT_FIELDS_SET_NODE_NAME); - WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); - WorkflowPage.actions.zoomToFit(); - - // Disable last node - WorkflowPage.getters.canvasNodes().last().click(); - WorkflowPage.actions.hitDisableNodeShortcut(); - - // Move first one - WorkflowPage.actions - .getNodePosition(WorkflowPage.getters.canvasNodes().first()) - .then((initialPosition) => { - WorkflowPage.getters.canvasNodes().first().click(); - cy.drag('[data-test-id="canvas-node"].jtk-drag-selected', [50, 150], { - clickToFinish: true, - }); - WorkflowPage.getters - .canvasNodes() - .first() - .then(($node) => { - const cssLeft = parseInt($node.css('left')); - const cssTop = parseInt($node.css('top')); - expect(cssLeft).to.be.greaterThan(initialPosition.left); - expect(cssTop).to.be.greaterThan(initialPosition.top); - }); - - // Delete the set node - WorkflowPage.getters.canvasNodeByName(EDIT_FIELDS_SET_NODE_NAME).click().click(); - cy.get('body').type('{backspace}'); - - // First undo: Should return deleted node - WorkflowPage.actions.hitUndo(); - WorkflowPage.getters.canvasNodes().should('have.length', 4); - WorkflowPage.getters.nodeConnections().should('have.length', 3); - // Second undo: Should move first node to it's original position - WorkflowPage.actions.hitUndo(); - WorkflowPage.getters - .canvasNodes() - .first() - .should('have.css', 'left', `${initialPosition.left}px`) - .should('have.css', 'top', `${initialPosition.top}px`); - // Third undo: Should enable last node - WorkflowPage.actions.hitUndo(); - WorkflowPage.getters.disabledNodes().should('have.length', 0); - - // First redo: Should disable last node - WorkflowPage.actions.hitRedo(); - WorkflowPage.getters.disabledNodes().should('have.length', 1); - // Second redo: Should move the first node - WorkflowPage.actions.hitRedo(); - WorkflowPage.getters - .canvasNodes() - .first() - .then(($node) => { - const cssLeft = parseInt($node.css('left')); - const cssTop = parseInt($node.css('top')); - expect(cssLeft).to.be.greaterThan(initialPosition.left); - expect(cssTop).to.be.greaterThan(initialPosition.top); - }); - // Third redo: Should delete the Set node - WorkflowPage.actions.hitRedo(); - WorkflowPage.getters.canvasNodes().should('have.length', 3); - WorkflowPage.getters.nodeConnections().should('have.length', 2); - }); - }); - it('should be able to copy and paste pinned data nodes in workflows with dynamic Switch node', () => { cy.fixture('Test_workflow_form_switch.json').then((data) => { cy.get('body').paste(JSON.stringify(data)); diff --git a/cypress/e2e/11-inline-expression-editor.cy.ts b/cypress/e2e/11-inline-expression-editor.cy.ts index a762135a65fc6..e35842293edd9 100644 --- a/cypress/e2e/11-inline-expression-editor.cy.ts +++ b/cypress/e2e/11-inline-expression-editor.cy.ts @@ -129,7 +129,7 @@ describe('Inline expression editor', () => { // Run workflow ndv.actions.close(); - WorkflowPage.actions.executeNode('No Operation', { anchor: 'topLeft' }); + WorkflowPage.actions.executeNode('No Operation, do nothing', { anchor: 'topLeft' }); WorkflowPage.actions.openNode('Hacker News'); WorkflowPage.actions.openInlineExpressionEditor(); diff --git a/cypress/e2e/12-canvas-actions.cy.ts b/cypress/e2e/12-canvas-actions.cy.ts index e869f2af76ac4..f63c85dc498cf 100644 --- a/cypress/e2e/12-canvas-actions.cy.ts +++ b/cypress/e2e/12-canvas-actions.cy.ts @@ -4,9 +4,9 @@ import { CODE_NODE_NAME, SCHEDULE_TRIGGER_NODE_NAME, EDIT_FIELDS_SET_NODE_NAME, - IF_NODE_NAME, HTTP_REQUEST_NODE_NAME, } from './../constants'; +import { getCanvasPane } from '../composables/workflow'; import { successToast } from '../pages/notifications'; import { WorkflowPage as WorkflowPageClass } from '../pages/workflow'; @@ -16,64 +16,12 @@ describe('Canvas Actions', () => { WorkflowPage.actions.visit(); }); - // FIXME: Canvas V2: Missing execute button if no nodes - it('should render canvas', () => { - WorkflowPage.getters.nodeViewRoot().should('be.visible'); - WorkflowPage.getters.canvasPlusButton().should('be.visible'); - WorkflowPage.getters.zoomToFitButton().should('be.visible'); - WorkflowPage.getters.zoomInButton().should('be.visible'); - WorkflowPage.getters.zoomOutButton().should('be.visible'); - WorkflowPage.getters.executeWorkflowButton().should('be.visible'); - }); - - // FIXME: Canvas V2: Fix changing of connection - it('should connect and disconnect a simple node', () => { - WorkflowPage.actions.addNodeToCanvas(EDIT_FIELDS_SET_NODE_NAME); - WorkflowPage.getters.nodeViewBackground().click(600, 200, { force: true }); - WorkflowPage.getters.nodeConnections().should('have.length', 1); - - WorkflowPage.getters.nodeViewBackground().click(600, 400, { force: true }); - WorkflowPage.actions.addNodeToCanvas(EDIT_FIELDS_SET_NODE_NAME); - - // Change connection from Set to Set1 - cy.draganddrop( - WorkflowPage.getters.getEndpointSelector('input', EDIT_FIELDS_SET_NODE_NAME), - WorkflowPage.getters.getEndpointSelector('input', `${EDIT_FIELDS_SET_NODE_NAME}1`), - ); - - WorkflowPage.getters - .getConnectionBetweenNodes(MANUAL_TRIGGER_NODE_DISPLAY_NAME, `${EDIT_FIELDS_SET_NODE_NAME}1`) - .should('be.visible'); - - WorkflowPage.getters.nodeConnections().should('have.length', 1); - // Disconnect Set1 - cy.drag( - WorkflowPage.getters.getEndpointSelector('input', `${EDIT_FIELDS_SET_NODE_NAME}1`), - [-200, 100], - ); - WorkflowPage.getters.nodeConnections().should('have.length', 0); - }); - it('should add first step', () => { WorkflowPage.getters.canvasPlusButton().should('be.visible'); WorkflowPage.actions.addInitialNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); WorkflowPage.getters.canvasNodes().should('have.length', 1); }); - it('should add a node via plus endpoint drag', () => { - WorkflowPage.getters.canvasPlusButton().should('be.visible'); - WorkflowPage.actions.addNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME, true); - - cy.drag( - WorkflowPage.getters.getEndpointSelector('plus', SCHEDULE_TRIGGER_NODE_NAME), - [100, 100], - ); - - WorkflowPage.getters.nodeCreatorSearchBar().should('be.visible'); - WorkflowPage.actions.addNodeToCanvas(IF_NODE_NAME, false); - WorkflowPage.getters.nodeViewBackground().click({ force: true }); - }); - it('should add a connected node using plus endpoint', () => { WorkflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); WorkflowPage.getters.canvasNodePlusEndpointByName(MANUAL_TRIGGER_NODE_DISPLAY_NAME).click(); @@ -116,7 +64,7 @@ describe('Canvas Actions', () => { it('should add disconnected node if nothing is selected', () => { WorkflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); // Deselect nodes - WorkflowPage.getters.nodeView().click({ force: true }); + getCanvasPane().click({ force: true }); WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); WorkflowPage.getters.canvasNodes().should('have.length', 2); WorkflowPage.getters.nodeConnections().should('have.length', 0); @@ -166,15 +114,6 @@ describe('Canvas Actions', () => { WorkflowPage.getters.nodeConnections().should('have.length', 0); }); - // FIXME: Canvas V2: Fix disconnecting of connection by dragging it - it('should delete a connection by moving it away from endpoint', () => { - WorkflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); - WorkflowPage.getters.canvasNodeByName(MANUAL_TRIGGER_NODE_DISPLAY_NAME).click(); - WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); - cy.drag(WorkflowPage.getters.getEndpointSelector('input', CODE_NODE_NAME), [0, -100]); - WorkflowPage.getters.nodeConnections().should('have.length', 0); - }); - describe('Node hover actions', () => { it('should execute node', () => { WorkflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); @@ -239,7 +178,6 @@ describe('Canvas Actions', () => { WorkflowPage.getters.selectedNodes().should('have.length', 0); }); - // FIXME: Canvas V2: Selection via arrow keys is broken it('should select nodes using arrow keys', () => { WorkflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); WorkflowPage.getters.canvasNodeByName(MANUAL_TRIGGER_NODE_DISPLAY_NAME).click(); @@ -263,7 +201,6 @@ describe('Canvas Actions', () => { ); }); - // FIXME: Canvas V2: Selection via shift and arrow keys is broken it('should select nodes using shift and arrow keys', () => { WorkflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); WorkflowPage.getters.canvasNodeByName(MANUAL_TRIGGER_NODE_DISPLAY_NAME).click(); @@ -272,31 +209,4 @@ describe('Canvas Actions', () => { cy.get('body').type('{shift}', { release: false }).type('{leftArrow}'); WorkflowPage.getters.selectedNodes().should('have.length', 2); }); - - // FIXME: Canvas V2: Fix select & deselect - it('should not break lasso selection when dragging node action buttons', () => { - WorkflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); - WorkflowPage.getters - .canvasNodes() - .last() - .findChildByTestId('execute-node-button') - .as('executeNodeButton'); - cy.drag('@executeNodeButton', [200, 200]); - WorkflowPage.actions.testLassoSelection([100, 100], [200, 200]); - }); - - // FIXME: Canvas V2: Fix select & deselect - it('should not break lasso selection with multiple clicks on node action buttons', () => { - WorkflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); - WorkflowPage.actions.testLassoSelection([100, 100], [200, 200]); - WorkflowPage.getters.canvasNodes().last().as('lastNode'); - cy.get('@lastNode').findChildByTestId('execute-node-button').as('executeNodeButton'); - for (let i = 0; i < 20; i++) { - cy.get('@lastNode').realHover(); - cy.get('@executeNodeButton').should('be.visible'); - cy.get('@executeNodeButton').realTouch(); - cy.getByTestId('execute-workflow-button').realHover(); - WorkflowPage.actions.testLassoSelection([100, 100], [200, 200]); - } - }); }); diff --git a/cypress/e2e/12-canvas.cy.ts b/cypress/e2e/12-canvas.cy.ts index c93180677a66f..be423344fbeef 100644 --- a/cypress/e2e/12-canvas.cy.ts +++ b/cypress/e2e/12-canvas.cy.ts @@ -28,8 +28,6 @@ const ZOOM_IN_X2_FACTOR = 1.5625; // Zoom in factor after two clicks const ZOOM_OUT_X1_FACTOR = 0.8; const ZOOM_OUT_X2_FACTOR = 0.64; -const PINCH_ZOOM_IN_FACTOR = 1.05702; -const PINCH_ZOOM_OUT_FACTOR = 0.946058; const RENAME_NODE_NAME = 'Something else'; const RENAME_NODE_NAME2 = 'Something different'; @@ -369,26 +367,6 @@ describe('Canvas Node Manipulation and Navigation', () => { zoomAndCheck('zoomOut', ZOOM_OUT_X2_FACTOR); }); - it('should zoom using scroll or pinch gesture', () => { - WorkflowPage.actions.pinchToZoom(1, 'zoomIn'); - - // V2 Canvas is using the same zoom factor for both pinch and scroll - cy.ifCanvasVersion( - () => checkZoomLevel(PINCH_ZOOM_IN_FACTOR), - () => checkZoomLevel(ZOOM_IN_X1_FACTOR), - ); - - WorkflowPage.actions.pinchToZoom(1, 'zoomOut'); - checkZoomLevel(1); // Zoom in 1x + Zoom out 1x should reset to default (=1) - - WorkflowPage.actions.pinchToZoom(1, 'zoomOut'); - - cy.ifCanvasVersion( - () => checkZoomLevel(PINCH_ZOOM_OUT_FACTOR), - () => checkZoomLevel(ZOOM_OUT_X1_FACTOR), - ); - }); - it('should reset zoom', () => { WorkflowPage.getters.resetZoomButton().should('not.exist'); WorkflowPage.getters.zoomInButton().click(); @@ -543,35 +521,4 @@ describe('Canvas Node Manipulation and Navigation', () => { NDVDialog.actions.close(); }); }); - - // FIXME: Canvas V2: Unknown nodes should still render connection endpoints - it('should render connections correctly if unkown nodes are present', () => { - const unknownNodeName = 'Unknown node'; - cy.createFixtureWorkflow('workflow-with-unknown-nodes.json', 'Unknown nodes'); - - WorkflowPage.getters.canvasNodeByName(`${unknownNodeName} 1`).should('exist'); - WorkflowPage.getters.canvasNodeByName(`${unknownNodeName} 2`).should('exist'); - WorkflowPage.actions.zoomToFit(); - - cy.draganddrop( - WorkflowPage.getters.getEndpointSelector('plus', `${unknownNodeName} 1`), - WorkflowPage.getters.getEndpointSelector('input', EDIT_FIELDS_SET_NODE_NAME), - ); - - cy.draganddrop( - WorkflowPage.getters.getEndpointSelector('plus', `${unknownNodeName} 2`), - WorkflowPage.getters.getEndpointSelector('input', `${EDIT_FIELDS_SET_NODE_NAME}1`), - ); - - WorkflowPage.actions.executeWorkflow(); - cy.contains('Unrecognized node type').should('be.visible'); - - WorkflowPage.actions.deselectAll(); - WorkflowPage.actions.deleteNodeFromContextMenu(`${unknownNodeName} 1`); - WorkflowPage.actions.deleteNodeFromContextMenu(`${unknownNodeName} 2`); - - WorkflowPage.actions.executeWorkflow(); - - cy.contains('Unrecognized node type').should('not.exist'); - }); }); diff --git a/cypress/e2e/13-pinning.cy.ts b/cypress/e2e/13-pinning.cy.ts index 2d3351f8aad3b..800f9e417a0af 100644 --- a/cypress/e2e/13-pinning.cy.ts +++ b/cypress/e2e/13-pinning.cy.ts @@ -1,6 +1,3 @@ -import { nanoid } from 'nanoid'; - -import { simpleWebhookCall, waitForWebhook } from './16-webhook-node.cy'; import { HTTP_REQUEST_NODE_NAME, MANUAL_TRIGGER_NODE_NAME, @@ -109,36 +106,6 @@ describe('Data pinning', () => { ndv.getters.outputTbodyCell(1, 0).should('include.text', 1); }); - it('Should be able to pin data from canvas (context menu or shortcut)', () => { - workflowPage.actions.addInitialNodeToCanvas('Schedule Trigger'); - workflowPage.actions.addNodeToCanvas(EDIT_FIELDS_SET_NODE_NAME); - workflowPage.actions.openContextMenu(EDIT_FIELDS_SET_NODE_NAME, { method: 'overflow-button' }); - workflowPage.getters - .contextMenuAction('toggle_pin') - .parent() - .should('have.class', 'is-disabled'); - - cy.get('body').type('{esc}'); - - // Unpin using context menu - workflowPage.actions.openNode(EDIT_FIELDS_SET_NODE_NAME); - ndv.actions.setPinnedData([{ test: 1 }]); - ndv.actions.close(); - workflowPage.actions.pinNode(EDIT_FIELDS_SET_NODE_NAME); - workflowPage.actions.openNode(EDIT_FIELDS_SET_NODE_NAME); - ndv.getters.nodeOutputHint().should('exist'); - ndv.actions.close(); - - // Unpin using shortcut - workflowPage.actions.openNode(EDIT_FIELDS_SET_NODE_NAME); - ndv.actions.setPinnedData([{ test: 1 }]); - ndv.actions.close(); - workflowPage.getters.canvasNodeByName(EDIT_FIELDS_SET_NODE_NAME).click(); - workflowPage.actions.hitPinNodeShortcut(); - workflowPage.actions.openNode(EDIT_FIELDS_SET_NODE_NAME); - ndv.getters.nodeOutputHint().should('exist'); - }); - it('Should show an error when maximum pin data size is exceeded', () => { workflowPage.actions.addInitialNodeToCanvas('Schedule Trigger'); workflowPage.actions.addNodeToCanvas(EDIT_FIELDS_SET_NODE_NAME, true, true); @@ -217,32 +184,6 @@ describe('Data pinning', () => { ); }); - it('should show pinned data tooltip', () => { - const { callEndpoint } = simpleWebhookCall({ - method: 'GET', - webhookPath: nanoid(), - executeNow: false, - }); - - ndv.actions.close(); - workflowPage.actions.executeWorkflow(); - cy.wait(waitForWebhook); - - // hide other visible popper on workflow execute button - workflowPage.getters.canvasNodes().eq(0).click(); - - callEndpoint((response) => { - expect(response.status).to.eq(200); - getVisiblePopper().should('have.length', 1); - getVisiblePopper() - .eq(0) - .should( - 'have.text', - 'You can pin this output instead of waiting for a test event. Open node to do so.', - ); - }); - }); - it('should not show pinned data tooltip', () => { cy.createFixtureWorkflow('Pinned_webhook_node.json', 'Test'); workflowPage.actions.executeWorkflow(); diff --git a/cypress/e2e/16-webhook-node.cy.ts b/cypress/e2e/16-webhook-node.cy.ts index 193ada0bcc6d5..e0892a4a0b1f1 100644 --- a/cypress/e2e/16-webhook-node.cy.ts +++ b/cypress/e2e/16-webhook-node.cy.ts @@ -1,5 +1,6 @@ import { nanoid } from 'nanoid'; +import { simpleWebhookCall, waitForWebhook } from '../composables/webhooks'; import { BACKEND_BASE_URL, EDIT_FIELDS_SET_NODE_NAME } from '../constants'; import { WorkflowPage, NDV, CredentialsModal } from '../pages'; import { cowBase64 } from '../support/binaryTestFiles'; @@ -9,81 +10,6 @@ const workflowPage = new WorkflowPage(); const ndv = new NDV(); const credentialsModal = new CredentialsModal(); -export const waitForWebhook = 500; - -interface SimpleWebhookCallOptions { - method: string; - webhookPath: string; - responseCode?: number; - respondWith?: string; - executeNow?: boolean; - responseData?: string; - authentication?: string; -} - -export const simpleWebhookCall = (options: SimpleWebhookCallOptions) => { - const { - authentication, - method, - webhookPath, - responseCode, - respondWith, - responseData, - executeNow = true, - } = options; - - workflowPage.actions.addInitialNodeToCanvas('Webhook'); - workflowPage.actions.openNode('Webhook'); - - cy.getByTestId('parameter-input-httpMethod').click(); - getVisibleSelect().find('.option-headline').contains(method).click(); - cy.getByTestId('parameter-input-path') - .find('.parameter-input') - .find('input') - .clear() - .type(webhookPath); - - if (authentication) { - cy.getByTestId('parameter-input-authentication').click(); - getVisibleSelect().find('.option-headline').contains(authentication).click(); - } - - if (responseCode) { - cy.get('.param-options').click(); - getVisibleSelect().contains('Response Code').click(); - cy.get('.parameter-item-wrapper > .parameter-input-list-wrapper').children().click(); - getVisibleSelect().contains('201').click(); - } - - if (respondWith) { - cy.getByTestId('parameter-input-responseMode').click(); - getVisibleSelect().find('.option-headline').contains(respondWith).click(); - } - - if (responseData) { - cy.getByTestId('parameter-input-responseData').click(); - getVisibleSelect().find('.option-headline').contains(responseData).click(); - } - - const callEndpoint = (cb: (response: Cypress.Response) => void) => { - cy.request(method, `${BACKEND_BASE_URL}/webhook-test/${webhookPath}`).then(cb); - }; - - if (executeNow) { - ndv.actions.execute(); - cy.wait(waitForWebhook); - - callEndpoint((response) => { - expect(response.status).to.eq(200); - ndv.getters.outputPanel().contains('headers'); - }); - } - - return { - callEndpoint, - }; -}; - describe('Webhook Trigger node', () => { beforeEach(() => { workflowPage.actions.visit(); diff --git a/cypress/e2e/1858-PAY-can-use-context-menu.ts b/cypress/e2e/1858-PAY-can-use-context-menu.ts deleted file mode 100644 index 6727df41667e1..0000000000000 --- a/cypress/e2e/1858-PAY-can-use-context-menu.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { WorkflowPage as WorkflowPageClass } from '../pages/workflow'; - -const WorkflowPage = new WorkflowPageClass(); - -describe('PAY-1858 context menu', () => { - it('can use context menu on saved workflow', () => { - WorkflowPage.actions.visit(); - cy.createFixtureWorkflow('Test_workflow_filter.json', 'test'); - - WorkflowPage.getters.canvasNodes().should('have.length', 5); - WorkflowPage.actions.deleteNodeFromContextMenu('Then'); - WorkflowPage.getters.canvasNodes().should('have.length', 4); - - WorkflowPage.actions.hitSaveWorkflow(); - - cy.reload(); - WorkflowPage.getters.canvasNodes().should('have.length', 4); - WorkflowPage.actions.deleteNodeFromContextMenu('Code'); - WorkflowPage.getters.canvasNodes().should('have.length', 3); - }); -}); diff --git a/cypress/e2e/19-execution.cy.ts b/cypress/e2e/19-execution.cy.ts index 5be2399253f3c..4a39af1d99f54 100644 --- a/cypress/e2e/19-execution.cy.ts +++ b/cypress/e2e/19-execution.cy.ts @@ -214,91 +214,6 @@ describe('Execution', () => { workflowPage.getters.clearExecutionDataButton().should('not.exist'); }); - // FIXME: Canvas V2: Webhook should show waiting state but it doesn't - it('should test webhook workflow stop', () => { - cy.createFixtureWorkflow('Webhook_wait_set.json'); - - // Check workflow buttons - workflowPage.getters.executeWorkflowButton().should('be.visible'); - workflowPage.getters.clearExecutionDataButton().should('not.exist'); - workflowPage.getters.stopExecutionButton().should('not.exist'); - workflowPage.getters.stopExecutionWaitingForWebhookButton().should('not.exist'); - - // Execute the workflow - workflowPage.getters.zoomToFitButton().click(); - workflowPage.getters.executeWorkflowButton().click(); - - // Check workflow buttons - workflowPage.getters.executeWorkflowButton().get('.n8n-spinner').should('be.visible'); - workflowPage.getters.clearExecutionDataButton().should('not.exist'); - workflowPage.getters.stopExecutionButton().should('not.exist'); - workflowPage.getters.stopExecutionWaitingForWebhookButton().should('be.visible'); - - workflowPage.getters.canvasNodes().first().dblclick(); - - ndv.getters.copyInput().click(); - - cy.grantBrowserPermissions('clipboardReadWrite', 'clipboardSanitizedWrite'); - - ndv.getters.backToCanvas().click(); - - cy.readClipboard().then((url) => { - cy.request({ - method: 'GET', - url, - }).then((resp) => { - expect(resp.status).to.eq(200); - }); - }); - - successToast().should('be.visible'); - clearNotifications(); - - workflowPage.getters.stopExecutionButton().click(); - // Check canvas nodes after 1st step (workflow passed the manual trigger node - workflowPage.getters - .canvasNodeByName('Webhook') - .within(() => cy.get('.fa-check')) - .should('exist'); - workflowPage.getters - .canvasNodeByName('Wait') - .within(() => cy.get('.fa-check').should('not.exist')); - workflowPage.getters - .canvasNodeByName('Wait') - .within(() => cy.get('.fa-sync-alt')) - .should('exist'); - workflowPage.getters - .canvasNodeByName('Set') - .within(() => cy.get('.fa-check').should('not.exist')); - - // Check canvas nodes after workflow stopped - workflowPage.getters - .canvasNodeByName('Webhook') - .within(() => cy.get('.fa-check')) - .should('exist'); - - if (isCanvasV2()) { - workflowPage.getters - .canvasNodeByName('Wait') - .within(() => cy.get('.fa-sync-alt').should('not.exist')); - } else { - workflowPage.getters - .canvasNodeByName('Wait') - .within(() => cy.get('.fa-sync-alt').should('not.be.visible')); - } - - workflowPage.getters - .canvasNodeByName('Set') - .within(() => cy.get('.fa-check').should('not.exist')); - - successToast().should('be.visible'); - - // Clear execution data - workflowPage.getters.clearExecutionDataButton().should('be.visible'); - workflowPage.getters.clearExecutionDataButton().click(); - workflowPage.getters.clearExecutionDataButton().should('not.exist'); - }); - describe('execution preview', () => { it('when deleting the last execution, it should show empty state', () => { workflowPage.actions.addInitialNodeToCanvas('Manual Trigger'); @@ -312,8 +227,11 @@ describe('Execution', () => { }); }); - // FIXME: Canvas V2: Missing pinned states for `edge-label-wrapper` - describe('connections should be colored differently for pinned data', () => { + /** + * @TODO New Canvas: Different classes for pinned states on edges and nodes + */ + // eslint-disable-next-line n8n-local-rules/no-skipped-tests + describe.skip('connections should be colored differently for pinned data', () => { beforeEach(() => { cy.createFixtureWorkflow('Schedule_pinned.json'); workflowPage.actions.deselectAll(); @@ -634,45 +552,4 @@ describe('Execution', () => { errorToast().should('contain', 'Problem in node ‘Telegram‘'); }); - - it('should not show pinned data in production execution', () => { - cy.createFixtureWorkflow('Execution-pinned-data-check.json'); - - workflowPage.getters.zoomToFitButton().click(); - cy.intercept('PATCH', '/rest/workflows/*').as('workflowActivate'); - workflowPage.getters.activatorSwitch().click(); - - cy.wait('@workflowActivate'); - cy.get('body').type('{esc}'); - workflowPage.actions.openNode('Webhook'); - - cy.contains('label', 'Production URL').should('be.visible').click(); - cy.grantBrowserPermissions('clipboardReadWrite', 'clipboardSanitizedWrite'); - cy.get('.webhook-url').click(); - ndv.getters.backToCanvas().click(); - - cy.readClipboard().then((url) => { - cy.request({ - method: 'GET', - url, - }).then((resp) => { - expect(resp.status).to.eq(200); - }); - }); - - cy.intercept('GET', '/rest/executions/*').as('getExecution'); - executionsTab.actions.switchToExecutionsTab(); - - cy.wait('@getExecution'); - executionsTab.getters - .workflowExecutionPreviewIframe() - .should('be.visible') - .its('0.contentDocument.body') - .should('not.be.empty') - - .then(cy.wrap) - .find('.connection-run-items-label') - .filter(':contains("5 items")') - .should('have.length', 2); - }); }); diff --git a/cypress/e2e/2106-ADO-pinned-data-execution-preview.cy.ts b/cypress/e2e/2106-ADO-pinned-data-execution-preview.cy.ts deleted file mode 100644 index e26a7acb825d3..0000000000000 --- a/cypress/e2e/2106-ADO-pinned-data-execution-preview.cy.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { WorkflowExecutionsTab, WorkflowPage as WorkflowPageClass } from '../pages'; - -const workflowPage = new WorkflowPageClass(); -const executionsTab = new WorkflowExecutionsTab(); - -describe('ADO-2106 connections should be colored correctly for pinned data in executions preview', () => { - beforeEach(() => { - workflowPage.actions.visit(); - }); - - beforeEach(() => { - cy.createFixtureWorkflow('Webhook_set_pinned.json'); - workflowPage.actions.deselectAll(); - workflowPage.getters.zoomToFitButton().click(); - - workflowPage.getters.getConnectionBetweenNodes('Webhook', 'Set').should('have.class', 'pinned'); - }); - - it('should color connections for pinned data nodes for manual executions', () => { - workflowPage.actions.executeWorkflow(); - - executionsTab.actions.switchToExecutionsTab(); - - executionsTab.getters.successfulExecutionListItems().should('have.length', 1); - - executionsTab.getters - .workflowExecutionPreviewIframe() - .should('be.visible') - .its('0.contentDocument.body') - .should('not.be.empty') - - .then(cy.wrap) - .find('.jtk-connector[data-source-node="Webhook"][data-target-node="Set"]') - .should('have.class', 'success') - .should('have.class', 'has-run') - .should('have.class', 'pinned'); - }); -}); diff --git a/cypress/e2e/2111-ado-support-pinned-data-in-expressions.cy.ts b/cypress/e2e/2111-ado-support-pinned-data-in-expressions.cy.ts deleted file mode 100644 index 6d2da55b32040..0000000000000 --- a/cypress/e2e/2111-ado-support-pinned-data-in-expressions.cy.ts +++ /dev/null @@ -1,135 +0,0 @@ -import { WorkflowPage, NDV } from '../pages'; - -const workflowPage = new WorkflowPage(); -const ndv = new NDV(); - -describe('ADO-2111 expressions should support pinned data', () => { - beforeEach(() => { - workflowPage.actions.visit(); - }); - - it('supports pinned data in expressions unexecuted and executed parent nodes', () => { - cy.createFixtureWorkflow('Test_workflow_pinned_data_in_expressions.json', 'Expressions'); - - // test previous node unexecuted - workflowPage.actions.openNode('NotPinnedWithExpressions'); - ndv.getters - .parameterExpressionPreview('value') - .eq(0) - .should('include.text', 'Joe\nJoe\nJoan\nJoan\nJoe\nJoan\n\nJoe\nJoan\n\nJoe'); - ndv.getters - .parameterExpressionPreview('value') - .eq(1) - .should('contain.text', '0,0\nJoe\n\nJoe\n\nJoe\n\nJoe\nJoe'); - - // test can resolve correctly based on item - ndv.actions.switchInputMode('Table'); - - ndv.getters.inputTableRow(2).realHover(); - cy.wait(50); - ndv.getters - .parameterExpressionPreview('value') - .eq(0) - .should('include.text', 'Joe\nJoe\nJoan\nJoan\nJoe\nJoan\n\nJoe\nJoan\n\nJoe'); - ndv.getters - .parameterExpressionPreview('value') - .eq(1) - .should('contain.text', '0,1\nJoan\n\nJoan\n\nJoan\n\nJoan\nJoan'); - - // test previous node executed - ndv.actions.execute(); - ndv.getters.inputTableRow(1).realHover(); - cy.wait(50); - - ndv.getters - .parameterExpressionPreview('value') - .eq(0) - .should('include.text', 'Joe\nJoe\nJoan\nJoan\nJoe\nJoan\n\nJoe\nJoan\n\nJoe'); - - ndv.getters - .parameterExpressionPreview('value') - .eq(1) - .should('contain.text', '0,0\nJoe\n\nJoe\n\nJoe\n\nJoe\nJoe'); - - ndv.getters.inputTableRow(2).realHover(); - cy.wait(50); - ndv.getters - .parameterExpressionPreview('value') - .eq(0) - .should('include.text', 'Joe\nJoe\nJoan\nJoan\nJoe\nJoan\n\nJoe\nJoan\n\nJoe'); - ndv.getters - .parameterExpressionPreview('value') - .eq(1) - .should('contain.text', '0,1\nJoan\n\nJoan\n\nJoan\n\nJoan\nJoan'); - - // check it resolved correctly on the backend - ndv.getters - .outputTbodyCell(1, 0) - .should('contain.text', 'Joe\\nJoe\\nJoan\\nJoan\\nJoe\\nJoan\\n\\nJoe\\nJoan\\n\\nJoe'); - - ndv.getters - .outputTbodyCell(2, 0) - .should('contain.text', 'Joe\\nJoe\\nJoan\\nJoan\\nJoe\\nJoan\\n\\nJoe\\nJoan\\n\\nJoe'); - - ndv.getters - .outputTbodyCell(1, 1) - .should('contain.text', '0,0\\nJoe\\n\\nJoe\\n\\nJoe\\n\\nJoe\\nJoe'); - - ndv.getters - .outputTbodyCell(2, 1) - .should('contain.text', '0,1\\nJoan\\n\\nJoan\\n\\nJoan\\n\\nJoan\\nJoan'); - }); - - it('resets expressions after node is unpinned', () => { - cy.createFixtureWorkflow('Test_workflow_pinned_data_in_expressions.json', 'Expressions'); - - // test previous node unexecuted - workflowPage.actions.openNode('NotPinnedWithExpressions'); - ndv.getters - .parameterExpressionPreview('value') - .eq(0) - .should('include.text', 'Joe\nJoe\nJoan\nJoan\nJoe\nJoan\n\nJoe\nJoan\n\nJoe'); - ndv.getters - .parameterExpressionPreview('value') - .eq(1) - .should('contain.text', '0,0\nJoe\n\nJoe\n\nJoe\n\nJoe\nJoe'); - - ndv.actions.close(); - - // unpin pinned node - workflowPage.getters - .canvasNodeByName('PinnedSet') - .eq(0) - .find('.node-pin-data-icon') - .should('exist'); - workflowPage.getters.canvasNodeByName('PinnedSet').eq(0).click(); - workflowPage.actions.hitPinNodeShortcut(); - workflowPage.getters - .canvasNodeByName('PinnedSet') - .eq(0) - .find('.node-pin-data-icon') - .should('not.exist'); - - workflowPage.actions.openNode('NotPinnedWithExpressions'); - ndv.getters.nodeParameters().find('parameter-expression-preview-value').should('not.exist'); - - ndv.getters.parameterInput('value').eq(0).click(); - ndv.getters - .inlineExpressionEditorOutput() - .should( - 'have.text', - '[Execute node ‘PinnedSet’ for preview][Execute node ‘PinnedSet’ for preview][Execute node ‘PinnedSet’ for preview][Execute node ‘PinnedSet’ for preview][Execute node ‘PinnedSet’ for preview][Execute node ‘PinnedSet’ for preview][Execute previous nodes for preview][Execute previous nodes for preview][undefined]', - ); - - // close open expression - ndv.getters.inputLabel().eq(0).click(); - - ndv.getters.parameterInput('value').eq(1).click(); - ndv.getters - .inlineExpressionEditorOutput() - .should( - 'have.text', - '0,0[Execute node ‘PinnedSet’ for preview][Execute node ‘PinnedSet’ for preview][Execute previous nodes for preview][Execute previous nodes for preview][Execute previous nodes for preview]', - ); - }); -}); diff --git a/cypress/e2e/24-ndv-paired-item.cy.ts b/cypress/e2e/24-ndv-paired-item.cy.ts index 49257a8a12f52..ccae14f6c9d91 100644 --- a/cypress/e2e/24-ndv-paired-item.cy.ts +++ b/cypress/e2e/24-ndv-paired-item.cy.ts @@ -118,6 +118,15 @@ describe('NDV', () => { ndv.actions.switchInputMode('Table'); ndv.actions.switchOutputMode('Table'); + // Start from linked state + ndv.getters.outputLinkRun().then(($el) => { + const classList = Array.from($el[0].classList); + if (!classList.includes('linked')) { + ndv.actions.toggleOutputRunLinking(); + ndv.getters.inputTbodyCell(1, 0).click(); // remove tooltip + } + }); + ndv.getters .inputRunSelector() .should('exist') @@ -243,38 +252,38 @@ describe('NDV', () => { // biome-ignore format: const PINNED_DATA = [ { - "id": "abc", - "historyId": "def", - "messages": [ + id: 'abc', + historyId: 'def', + messages: [ { - "id": "abc" - } - ] + id: 'abc', + }, + ], }, { - "id": "abc", - "historyId": "def", - "messages": [ + id: 'abc', + historyId: 'def', + messages: [ { - "id": "abc" + id: 'abc', }, { - "id": "abc" + id: 'abc', }, { - "id": "abc" - } - ] + id: 'abc', + }, + ], }, { - "id": "abc", - "historyId": "def", - "messages": [ + id: 'abc', + historyId: 'def', + messages: [ { - "id": "abc" - } - ] - } + id: 'abc', + }, + ], + }, ]; workflowPage.actions.openNode('Get thread details1'); ndv.actions.pastePinnedData(PINNED_DATA); diff --git a/cypress/e2e/25-stickies.cy.ts b/cypress/e2e/25-stickies.cy.ts index 14c176f17b4ef..da8d6c2674cca 100644 --- a/cypress/e2e/25-stickies.cy.ts +++ b/cypress/e2e/25-stickies.cy.ts @@ -3,24 +3,6 @@ import { WorkflowPage as WorkflowPageClass } from '../pages/workflow'; const workflowPage = new WorkflowPageClass(); -function checkStickiesStyle( - top: number, - left: number, - height: number, - width: number, - zIndex?: number, -) { - workflowPage.getters.stickies().should(($el) => { - expect($el).to.have.css('top', `${top}px`); - expect($el).to.have.css('left', `${left}px`); - expect($el).to.have.css('height', `${height}px`); - expect($el).to.have.css('width', `${width}px`); - if (zIndex) { - expect($el).to.have.css('z-index', `${zIndex}`); - } - }); -} - describe('Canvas Actions', () => { beforeEach(() => { workflowPage.actions.visit(); @@ -51,191 +33,8 @@ describe('Canvas Actions', () => { .contains('Guide') .should('have.attr', 'href'); }); - - it('drags sticky around to top left corner', () => { - // used to caliberate move sticky function - addDefaultSticky(); - moveSticky({ top: 0, left: 0 }); - }); - - it('drags sticky around and position/size are saved correctly', () => { - addDefaultSticky(); - moveSticky({ top: 500, left: 500 }); - - workflowPage.actions.saveWorkflowOnButtonClick(); - cy.wait('@createWorkflow'); - - cy.reload(); - cy.waitForLoad(); - - stickyShouldBePositionedCorrectly({ top: 500, left: 500 }); - }); - - it('deletes sticky', () => { - workflowPage.actions.addSticky(); - workflowPage.getters.stickies().should('have.length', 1); - - workflowPage.actions.deleteSticky(); - - workflowPage.getters.stickies().should('have.length', 0); - }); - - it('edits sticky and updates content as markdown', () => { - workflowPage.actions.addSticky(); - - workflowPage.getters - .stickies() - .should('have.text', 'I’m a note\nDouble click to edit me. Guide\n'); - - workflowPage.getters.stickies().dblclick(); - workflowPage.actions.editSticky('# hello world \n ## text text'); - workflowPage.getters.stickies().find('h1').should('have.text', 'hello world'); - workflowPage.getters.stickies().find('h2').should('have.text', 'text text'); - }); - - it('expands/shrinks sticky from the right edge', () => { - addDefaultSticky(); - - moveSticky({ top: 200, left: 200 }); - - cy.drag('[data-test-id="sticky"] [data-dir="right"]', [100, 100]); - checkStickiesStyle(100, 20, 160, 346); - - cy.drag('[data-test-id="sticky"] [data-dir="right"]', [-50, -50]); - checkStickiesStyle(100, 20, 160, 302); - }); - - it('expands/shrinks sticky from the left edge', () => { - addDefaultSticky(); - - moveSticky({ left: 600, top: 200 }); - cy.drag('[data-test-id="sticky"] [data-dir="left"]', [100, 100]); - checkStickiesStyle(100, 510, 160, 150); - - cy.drag('[data-test-id="sticky"] [data-dir="left"]', [-50, -50]); - checkStickiesStyle(100, 466, 160, 194); - }); - - it('expands/shrinks sticky from the top edge', () => { - workflowPage.actions.addSticky(); - cy.drag('[data-test-id="sticky"]', [100, 100]); // move away from canvas button - checkStickiesStyle(300, 620, 160, 240); - - cy.drag('[data-test-id="sticky"] [data-dir="top"]', [100, 100]); - checkStickiesStyle(380, 620, 80, 240); - - cy.drag('[data-test-id="sticky"] [data-dir="top"]', [-50, -50]); - checkStickiesStyle(324, 620, 136, 240); - }); - - it('expands/shrinks sticky from the bottom edge', () => { - workflowPage.actions.addSticky(); - cy.drag('[data-test-id="sticky"]', [100, 100]); // move away from canvas button - checkStickiesStyle(300, 620, 160, 240); - - cy.drag('[data-test-id="sticky"] [data-dir="bottom"]', [100, 100]); - checkStickiesStyle(300, 620, 254, 240); - - cy.drag('[data-test-id="sticky"] [data-dir="bottom"]', [-50, -50]); - checkStickiesStyle(300, 620, 198, 240); - }); - - it('expands/shrinks sticky from the bottom right edge', () => { - workflowPage.actions.addSticky(); - cy.drag('[data-test-id="sticky"]', [-100, -100]); // move away from canvas button - checkStickiesStyle(100, 420, 160, 240); - - cy.drag('[data-test-id="sticky"] [data-dir="bottomRight"]', [100, 100]); - checkStickiesStyle(100, 420, 254, 346); - - cy.drag('[data-test-id="sticky"] [data-dir="bottomRight"]', [-50, -50]); - checkStickiesStyle(100, 420, 198, 302); - }); - - it('expands/shrinks sticky from the top right edge', () => { - addDefaultSticky(); - - cy.drag('[data-test-id="sticky"] [data-dir="topRight"]', [100, 100]); - checkStickiesStyle(360, 400, 80, 346); - - cy.drag('[data-test-id="sticky"] [data-dir="topRight"]', [-50, -50]); - checkStickiesStyle(304, 400, 136, 302); - }); - - it('expands/shrinks sticky from the top left edge, and reach min height/width', () => { - addDefaultSticky(); - - cy.drag('[data-test-id="sticky"] [data-dir="topLeft"]', [100, 100]); - checkStickiesStyle(360, 490, 80, 150); - - cy.drag('[data-test-id="sticky"] [data-dir="topLeft"]', [-150, -150]); - checkStickiesStyle(204, 346, 236, 294); - }); - - it('sets sticky behind node', () => { - workflowPage.actions.addInitialNodeToCanvas('Manual Trigger'); - addDefaultSticky(); - - cy.drag('[data-test-id="sticky"] [data-dir="topLeft"]', [-150, -150]); - checkStickiesStyle(124, 256, 316, 384, -121); - - workflowPage.getters - .canvasNodes() - .eq(0) - .should(($el) => { - expect($el).to.have.css('z-index', 'auto'); - }); - - workflowPage.actions.addSticky(); - workflowPage.getters - .stickies() - .eq(0) - .should(($el) => { - expect($el).to.have.css('z-index', '-121'); - }); - workflowPage.getters - .stickies() - .eq(1) - .should(($el) => { - expect($el).to.have.css('z-index', '-38'); - }); - - cy.drag('[data-test-id="sticky"] [data-dir="topLeft"]', [-200, -200], { index: 1 }); - workflowPage.getters - .stickies() - .eq(0) - .should(($el) => { - expect($el).to.have.css('z-index', '-121'); - }); - - workflowPage.getters - .stickies() - .eq(1) - .should(($el) => { - expect($el).to.have.css('z-index', '-158'); - }); - }); - - it('Empty sticky should not error when activating workflow', () => { - workflowPage.actions.addSticky(); - - workflowPage.getters.stickies().should('have.length', 1); - - workflowPage.getters.stickies().dblclick(); - - workflowPage.actions.clearSticky(); - - workflowPage.actions.addNodeToCanvas('Schedule Trigger'); - - workflowPage.actions.activateWorkflow(); - }); }); -type Position = { - top: number; - left: number; -}; - function shouldHaveOneSticky() { workflowPage.getters.stickies().should('have.length', 1); } @@ -263,17 +62,3 @@ function addDefaultSticky() { shouldHaveDefaultSize(); shouldBeInDefaultLocation(); } - -function stickyShouldBePositionedCorrectly(position: Position) { - const yOffset = -100; - const xOffset = -180; - workflowPage.getters.stickies().should(($el) => { - expect($el).to.have.css('top', `${yOffset + position.top}px`); - expect($el).to.have.css('left', `${xOffset + position.left}px`); - }); -} - -function moveSticky(target: Position) { - cy.drag('[data-test-id="sticky"]', [target.left, target.top], { abs: true }); - stickyShouldBePositionedCorrectly(target); -} diff --git a/cypress/e2e/30-editor-after-route-changes.cy.ts b/cypress/e2e/30-editor-after-route-changes.cy.ts index 89c66d2dab946..89c64e1156c6d 100644 --- a/cypress/e2e/30-editor-after-route-changes.cy.ts +++ b/cypress/e2e/30-editor-after-route-changes.cy.ts @@ -1,86 +1,7 @@ import { getWorkflowHistoryCloseButton } from '../composables/workflow'; -import { - CODE_NODE_NAME, - EDIT_FIELDS_SET_NODE_NAME, - IF_NODE_NAME, - SCHEDULE_TRIGGER_NODE_NAME, -} from '../constants'; -import { WorkflowExecutionsTab, WorkflowPage as WorkflowPageClass } from '../pages'; +import { WorkflowPage as WorkflowPageClass } from '../pages'; const workflowPage = new WorkflowPageClass(); -const executionsTab = new WorkflowExecutionsTab(); - -const createNewWorkflowAndActivate = () => { - workflowPage.actions.visit(); - workflowPage.actions.addNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME); - workflowPage.actions.saveWorkflowOnButtonClick(); - workflowPage.actions.activateWorkflow(); - cy.get('.el-notification .el-notification--error').should('not.exist'); -}; - -const editWorkflowAndDeactivate = () => { - workflowPage.getters.canvasNodePlusEndpointByName(SCHEDULE_TRIGGER_NODE_NAME).click(); - workflowPage.getters.nodeCreatorSearchBar().should('be.visible'); - workflowPage.actions.addNodeToCanvas(EDIT_FIELDS_SET_NODE_NAME, false); - cy.get('.jtk-connector').should('have.length', 1); - workflowPage.actions.saveWorkflowOnButtonClick(); - workflowPage.getters.activatorSwitch().click(); - workflowPage.actions.zoomToFit(); - cy.get('.el-notification .el-notification--error').should('not.exist'); -}; - -const editWorkflowMoreAndActivate = () => { - cy.drag(workflowPage.getters.getEndpointSelector('plus', EDIT_FIELDS_SET_NODE_NAME), [200, 200], { - realMouse: true, - }); - workflowPage.getters.nodeCreatorSearchBar().should('be.visible'); - - workflowPage.actions.addNodeToCanvas(CODE_NODE_NAME, false); - workflowPage.getters.nodeViewBackground().click(600, 200, { force: true }); - cy.get('.jtk-connector').should('have.length', 2); - workflowPage.actions.zoomToFit(); - workflowPage.actions.saveWorkflowOnButtonClick(); - - workflowPage.actions.addNodeToCanvas(IF_NODE_NAME); - workflowPage.getters.nodeViewBackground().click(600, 200, { force: true }); - cy.get('.jtk-connector').should('have.length', 2); - - const position = { - top: 0, - left: 0, - }; - workflowPage.getters - .canvasNodeByName(IF_NODE_NAME) - .click() - .then(($element) => { - position.top = $element.position().top; - position.left = $element.position().left; - }); - - cy.drag('[data-test-id="canvas-node"].jtk-drag-selected', [50, 200]); - workflowPage.getters - .canvasNodes() - .last() - .then(($element) => { - const finalPosition = { - top: $element.position().top, - left: $element.position().left, - }; - - expect(finalPosition.top).to.be.greaterThan(position.top); - expect(finalPosition.left).to.be.greaterThan(position.left); - }); - - cy.draganddrop( - workflowPage.getters.getEndpointSelector('output', CODE_NODE_NAME), - workflowPage.getters.getEndpointSelector('input', IF_NODE_NAME), - ); - cy.get('.jtk-connector').should('have.length', 3); - - workflowPage.actions.saveWorkflowOnButtonClick(); - workflowPage.getters.activatorSwitch().click(); - cy.get('.el-notification .el-notification--error').should('not.exist'); -}; const switchBetweenEditorAndHistory = () => { workflowPage.getters.workflowHistoryButton().click(); @@ -116,62 +37,6 @@ const zoomInAndCheckNodes = () => { workflowPage.getters.canvasNodes().last().should('not.be.visible'); }; -describe('Editor actions should work', () => { - beforeEach(() => { - cy.enableFeature('debugInEditor'); - cy.enableFeature('workflowHistory'); - cy.signinAsOwner(); - createNewWorkflowAndActivate(); - }); - - it('after switching between Editor and Executions', () => { - cy.intercept('GET', '/rest/executions?filter=*').as('getExecutions'); - - executionsTab.actions.switchToExecutionsTab(); - cy.wait(['@getExecutions']); - cy.wait(500); - executionsTab.actions.switchToEditorTab(); - editWorkflowAndDeactivate(); - editWorkflowMoreAndActivate(); - }); - - it('after switching between Editor and Debug', () => { - cy.intercept('GET', '/rest/executions?filter=*').as('getExecutions'); - cy.intercept('GET', '/rest/executions/*').as('getExecution'); - cy.intercept('POST', '/rest/workflows/**/run?**').as('postWorkflowRun'); - - editWorkflowAndDeactivate(); - workflowPage.actions.executeWorkflow(); - cy.wait(['@postWorkflowRun']); - - executionsTab.actions.switchToExecutionsTab(); - cy.wait(['@getExecutions']); - - executionsTab.getters.executionListItems().should('have.length', 1).first().click(); - cy.wait(['@getExecution']); - - executionsTab.getters.executionDebugButton().should('have.text', 'Copy to editor').click(); - editWorkflowMoreAndActivate(); - }); - - it('after switching between Editor and Workflow history', () => { - cy.intercept('GET', '/rest/workflow-history/workflow/*/version/*').as('getVersion'); - cy.intercept('GET', '/rest/workflow-history/workflow/*').as('getHistory'); - - editWorkflowAndDeactivate(); - workflowPage.getters.workflowHistoryButton().click(); - cy.wait(['@getHistory']); - cy.wait(['@getVersion']); - - cy.intercept('GET', '/rest/workflows/*').as('workflowGet'); - getWorkflowHistoryCloseButton().click(); - cy.wait(['@workflowGet']); - cy.wait(1000); - - editWorkflowMoreAndActivate(); - }); -}); - describe('Editor zoom should work after route changes', () => { beforeEach(() => { cy.enableFeature('debugInEditor'); diff --git a/cypress/e2e/30-langchain.cy.ts b/cypress/e2e/30-langchain.cy.ts index f5def3f7f855c..2d0076bc11592 100644 --- a/cypress/e2e/30-langchain.cy.ts +++ b/cypress/e2e/30-langchain.cy.ts @@ -38,8 +38,6 @@ import { addToolNodeToParent, clickExecuteWorkflowButton, clickManualChatButton, - disableNode, - getExecuteWorkflowButton, navigateToNewWorkflowPage, getNodes, openNode, @@ -73,27 +71,6 @@ describe('Langchain Integration', () => { getManualChatModal().should('not.exist'); }); - it('should disable test workflow button', () => { - addNodeToCanvas('Schedule Trigger', true); - addNodeToCanvas(EDIT_FIELDS_SET_NODE_NAME, true); - - clickGetBackToCanvas(); - - addNodeToCanvas(AGENT_NODE_NAME, true, true); - clickGetBackToCanvas(); - - addLanguageModelNodeToParent( - AI_LANGUAGE_MODEL_OPENAI_CHAT_MODEL_NODE_NAME, - AGENT_NODE_NAME, - true, - ); - clickGetBackToCanvas(); - - disableNode('Schedule Trigger'); - - getExecuteWorkflowButton().should('be.disabled'); - }); - it('should add nodes to all Agent node input types', () => { addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME, true); addNodeToCanvas(AGENT_NODE_NAME, true, true); @@ -368,58 +345,6 @@ describe('Langchain Integration', () => { getConnectionBySourceAndTarget(CHAT_TRIGGER_NODE_DISPLAY_NAME, AGENT_NODE_NAME).should('exist'); getNodes().should('have.length', 3); }); - it('should render runItems for sub-nodes and allow switching between them', () => { - const workflowPage = new WorkflowPage(); - const ndv = new NDV(); - - cy.visit(workflowPage.url); - cy.createFixtureWorkflow('In_memory_vector_store_fake_embeddings.json'); - workflowPage.actions.zoomToFit(); - - workflowPage.actions.executeNode('Populate VS'); - cy.get('[data-label="25 items"]').should('exist'); - - const assertInputOutputText = (text: string, assertion: 'exist' | 'not.exist') => { - ndv.getters.outputPanel().contains(text).should(assertion); - ndv.getters.inputPanel().contains(text).should(assertion); - }; - - workflowPage.actions.openNode('Character Text Splitter'); - ndv.getters.outputRunSelector().should('exist'); - ndv.getters.inputRunSelector().should('exist'); - ndv.getters.inputRunSelector().find('input').should('include.value', '3 of 3'); - ndv.getters.outputRunSelector().find('input').should('include.value', '3 of 3'); - assertInputOutputText('Kyiv', 'exist'); - assertInputOutputText('Berlin', 'not.exist'); - assertInputOutputText('Prague', 'not.exist'); - - ndv.actions.changeOutputRunSelector('2 of 3'); - assertInputOutputText('Berlin', 'exist'); - assertInputOutputText('Kyiv', 'not.exist'); - assertInputOutputText('Prague', 'not.exist'); - - ndv.actions.changeOutputRunSelector('1 of 3'); - assertInputOutputText('Prague', 'exist'); - assertInputOutputText('Berlin', 'not.exist'); - assertInputOutputText('Kyiv', 'not.exist'); - - ndv.actions.toggleInputRunLinking(); - ndv.actions.changeOutputRunSelector('2 of 3'); - ndv.getters.inputRunSelector().find('input').should('include.value', '1 of 3'); - ndv.getters.outputRunSelector().find('input').should('include.value', '2 of 3'); - ndv.getters.inputPanel().contains('Prague').should('exist'); - ndv.getters.inputPanel().contains('Berlin').should('not.exist'); - - ndv.getters.outputPanel().contains('Berlin').should('exist'); - ndv.getters.outputPanel().contains('Prague').should('not.exist'); - - ndv.actions.toggleInputRunLinking(); - ndv.getters.inputRunSelector().find('input').should('include.value', '1 of 3'); - ndv.getters.outputRunSelector().find('input').should('include.value', '1 of 3'); - assertInputOutputText('Prague', 'exist'); - assertInputOutputText('Berlin', 'not.exist'); - assertInputOutputText('Kyiv', 'not.exist'); - }); it('should show tool info notice if no existing tools were used during execution', () => { addNodeToCanvas(MANUAL_CHAT_TRIGGER_NODE_NAME, true); @@ -519,29 +444,6 @@ describe('Langchain Integration', () => { getRunDataInfoCallout().should('not.exist'); }); - it('should execute up to Node 1 when using partial execution', () => { - const workflowPage = new WorkflowPage(); - - cy.visit(workflowPage.url); - cy.createFixtureWorkflow('Test_workflow_chat_partial_execution.json'); - workflowPage.actions.zoomToFit(); - - getManualChatModal().should('not.exist'); - workflowPage.actions.executeNode('Node 1'); - - getManualChatModal().should('exist'); - sendManualChatMessage('Test'); - - getManualChatMessages().should('contain', 'this_my_field_1'); - cy.getByTestId('refresh-session-button').click(); - cy.get('button').contains('Reset').click(); - getManualChatMessages().should('not.exist'); - - sendManualChatMessage('Another test'); - getManualChatMessages().should('contain', 'this_my_field_3'); - getManualChatMessages().should('contain', 'this_my_field_4'); - }); - it('should execute up to Node 1 when using partial execution', () => { const workflowPage = new WorkflowPage(); const ndv = new NDV(); diff --git a/cypress/e2e/39-projects.cy.ts b/cypress/e2e/39-projects.cy.ts index 197d5852566df..b506040c87a21 100644 --- a/cypress/e2e/39-projects.cy.ts +++ b/cypress/e2e/39-projects.cy.ts @@ -1,3 +1,5 @@ +import { setCredentialValues } from '../composables/modals/credential-modal'; +import { clickCreateNewCredential, selectResourceLocatorItem } from '../composables/ndv'; import * as projects from '../composables/projects'; import { INSTANCE_ADMIN, @@ -11,18 +13,16 @@ import { WorkflowPage, CredentialsModal, CredentialsPage, - WorkflowExecutionsTab, NDV, MainSidebar, } from '../pages'; import { clearNotifications, successToast } from '../pages/notifications'; -import { getVisibleDropdown, getVisibleModalOverlay, getVisibleSelect } from '../utils'; +import { getVisibleSelect } from '../utils'; const workflowsPage = new WorkflowsPage(); const workflowPage = new WorkflowPage(); const credentialsPage = new CredentialsPage(); const credentialsModal = new CredentialsModal(); -const executionsTab = new WorkflowExecutionsTab(); const ndv = new NDV(); const mainSidebar = new MainSidebar(); @@ -36,207 +36,6 @@ describe('Projects', { disableAutoLogin: true }, () => { cy.changeQuota('maxTeamProjects', -1); }); - it('should handle workflows and credentials and menu items', () => { - cy.signinAsAdmin(); - cy.visit(workflowsPage.url); - workflowsPage.getters.workflowCards().should('not.have.length'); - - workflowsPage.getters.newWorkflowButtonCard().click(); - - cy.intercept('POST', '/rest/workflows').as('workflowSave'); - workflowPage.actions.saveWorkflowOnButtonClick(); - - cy.wait('@workflowSave').then((interception) => { - expect(interception.request.body).not.to.have.property('projectId'); - }); - - projects.getHomeButton().click(); - projects.getProjectTabs().should('have.length', 3); - - projects.getProjectTabCredentials().click(); - credentialsPage.getters.credentialCards().should('not.have.length'); - - credentialsPage.getters.emptyListCreateCredentialButton().click(); - credentialsModal.getters.newCredentialModal().should('be.visible'); - credentialsModal.getters.newCredentialTypeSelect().should('be.visible'); - credentialsModal.getters.newCredentialTypeOption('Notion API').click(); - credentialsModal.getters.newCredentialTypeButton().click(); - credentialsModal.getters.connectionParameter('Internal Integration Secret').type('1234567890'); - credentialsModal.actions.setName('My awesome Notion account'); - - cy.intercept('POST', '/rest/credentials').as('credentialSave'); - credentialsModal.actions.save(); - cy.wait('@credentialSave').then((interception) => { - expect(interception.request.body).not.to.have.property('projectId'); - }); - - credentialsModal.actions.close(); - credentialsPage.getters.credentialCards().should('have.length', 1); - credentialsPage.getters - .credentialCards() - .first() - .find('.n8n-node-icon img') - .should('be.visible'); - - projects.getProjectTabWorkflows().click(); - workflowsPage.getters.workflowCards().should('have.length', 1); - - projects.getMenuItems().should('not.have.length'); - - cy.intercept('POST', '/rest/projects').as('projectCreate'); - projects.getAddProjectButton().click(); - cy.wait('@projectCreate'); - projects.getMenuItems().should('have.length', 1); - projects.getProjectTabs().should('have.length', 3); - - cy.get('input[name="name"]').type('Development'); - projects.addProjectMember(INSTANCE_MEMBERS[0].email); - - cy.intercept('PATCH', '/rest/projects/*').as('projectSettingsSave'); - projects.getProjectSettingsSaveButton().click(); - cy.wait('@projectSettingsSave').then((interception) => { - expect(interception.request.body).to.have.property('name').and.to.equal('Development'); - expect(interception.request.body).to.have.property('relations').to.have.lengthOf(2); - }); - - projects.getMenuItems().first().click(); - workflowsPage.getters.workflowCards().should('not.have.length'); - projects.getProjectTabs().should('have.length', 4); - - workflowsPage.getters.newWorkflowButtonCard().click(); - - cy.intercept('POST', '/rest/workflows').as('workflowSave'); - workflowPage.actions.saveWorkflowOnButtonClick(); - - cy.wait('@workflowSave').then((interception) => { - expect(interception.request.body).to.have.property('projectId'); - }); - - projects.getMenuItems().first().click(); - - projects.getProjectTabCredentials().click(); - credentialsPage.getters.credentialCards().should('not.have.length'); - - credentialsPage.getters.emptyListCreateCredentialButton().click(); - credentialsModal.getters.newCredentialModal().should('be.visible'); - credentialsModal.getters.newCredentialTypeSelect().should('be.visible'); - credentialsModal.getters.newCredentialTypeOption('Notion API').click(); - credentialsModal.getters.newCredentialTypeButton().click(); - credentialsModal.getters.connectionParameter('Internal Integration Secret').type('1234567890'); - credentialsModal.actions.setName('My awesome Notion account'); - - cy.intercept('POST', '/rest/credentials').as('credentialSave'); - credentialsModal.actions.save(); - cy.wait('@credentialSave').then((interception) => { - expect(interception.request.body).to.have.property('projectId'); - }); - credentialsModal.actions.close(); - - projects.getAddProjectButton().click(); - projects.getMenuItems().should('have.length', 2); - - let projectId: string; - projects.getMenuItems().first().click(); - cy.intercept('GET', '/rest/credentials*').as('credentialsList'); - projects.getProjectTabCredentials().click(); - cy.wait('@credentialsList').then((interception) => { - const url = new URL(interception.request.url); - const queryParams = new URLSearchParams(url.search); - const filter = queryParams.get('filter'); - expect(filter).to.be.a('string').and.to.contain('projectId'); - - if (filter) { - projectId = JSON.parse(filter).projectId; - } - }); - - projects.getMenuItems().last().click(); - cy.intercept('GET', '/rest/credentials*').as('credentialsListProjectId'); - projects.getProjectTabCredentials().click(); - cy.wait('@credentialsListProjectId').then((interception) => { - const url = new URL(interception.request.url); - const queryParams = new URLSearchParams(url.search); - const filter = queryParams.get('filter'); - expect(filter).to.be.a('string').and.to.contain('projectId'); - - if (filter) { - expect(JSON.parse(filter).projectId).not.to.equal(projectId); - } - }); - - projects.getHomeButton().click(); - workflowsPage.getters.workflowCards().should('have.length', 2); - - cy.intercept('GET', '/rest/credentials*').as('credentialsListUnfiltered'); - projects.getProjectTabCredentials().click(); - cy.wait('@credentialsListUnfiltered').then((interception) => { - expect(interception.request.url).not.to.contain('filter'); - }); - - let menuItems = cy.getByTestId('menu-item'); - - menuItems.filter('[class*=active_]').should('have.length', 1); - menuItems.filter(':contains("Overview")[class*=active_]').should('exist'); - - projects.getMenuItems().first().click(); - - menuItems = cy.getByTestId('menu-item'); - - menuItems.filter('[class*=active_]').should('have.length', 1); - menuItems.filter(':contains("Development")[class*=active_]').should('exist'); - - cy.intercept('GET', '/rest/workflows/*').as('loadWorkflow'); - workflowsPage.getters.workflowCards().first().findChildByTestId('card-content').click(); - - cy.wait('@loadWorkflow'); - menuItems = cy.getByTestId('menu-item'); - - menuItems.filter('[class*=active_]').should('have.length', 1); - menuItems.filter(':contains("Development")[class*=active_]').should('exist'); - - cy.intercept('GET', '/rest/executions*').as('loadExecutions'); - executionsTab.actions.switchToExecutionsTab(); - - cy.wait('@loadExecutions'); - menuItems = cy.getByTestId('menu-item'); - - menuItems.filter('[class*=active_]').should('have.length', 1); - menuItems.filter(':contains("Development")[class*=active_]').should('exist'); - - executionsTab.actions.switchToEditorTab(); - - menuItems = cy.getByTestId('menu-item'); - - menuItems.filter('[class*=active_]').should('have.length', 1); - menuItems.filter(':contains("Development")[class*=active_]').should('exist'); - - cy.getByTestId('menu-item').filter(':contains("Variables")').click(); - cy.getByTestId('unavailable-resources-list').should('be.visible'); - - menuItems = cy.getByTestId('menu-item'); - - menuItems.filter('[class*=active_]').should('have.length', 1); - menuItems.filter(':contains("Variables")[class*=active_]').should('exist'); - - projects.getHomeButton().click(); - menuItems = cy.getByTestId('menu-item'); - - menuItems.filter('[class*=active_]').should('have.length', 1); - menuItems.filter(':contains("Overview")[class*=active_]').should('exist'); - - workflowsPage.getters.workflowCards().should('have.length', 2).first().click(); - - cy.wait('@loadWorkflow'); - cy.getByTestId('execute-workflow-button').should('be.visible'); - - menuItems = cy.getByTestId('menu-item'); - menuItems.filter(':contains("Overview")[class*=active_]').should('not.exist'); - - menuItems = cy.getByTestId('menu-item'); - menuItems.filter('[class*=active_]').should('have.length', 1); - menuItems.filter(':contains("Development")[class*=active_]').should('exist'); - }); - it('should not show project add button and projects to a member if not invited to any project', () => { cy.signinAsMember(1); cy.visit(workflowsPage.url); @@ -245,26 +44,6 @@ describe('Projects', { disableAutoLogin: true }, () => { projects.getMenuItems().should('not.exist'); }); - it('should not show viewer role if not licensed', () => { - cy.signinAsOwner(); - cy.visit(workflowsPage.url); - - projects.getMenuItems().first().click(); - projects.getProjectTabSettings().click(); - - cy.get( - `[data-test-id="user-list-item-${INSTANCE_MEMBERS[0].email}"] [data-test-id="projects-settings-user-role-select"]`, - ).click(); - - cy.get('.el-select-dropdown__item.is-disabled') - .should('contain.text', 'Viewer') - .get('span:contains("Upgrade")') - .filter(':visible') - .click(); - - getVisibleModalOverlay().should('contain.text', 'Upgrade to unlock additional roles'); - }); - describe('when starting from scratch', () => { beforeEach(() => { cy.resetDatabase(); @@ -275,7 +54,11 @@ describe('Projects', { disableAutoLogin: true }, () => { cy.changeQuota('maxTeamProjects', -1); }); - it('should filter credentials by project ID when creating new workflow or hard reloading an opened workflow', () => { + /** + * @TODO: New Canvas - Fix this test + */ + // eslint-disable-next-line n8n-local-rules/no-skipped-tests + it.skip('should filter credentials by project ID when creating new workflow or hard reloading an opened workflow', () => { cy.signinAsOwner(); cy.visit(workflowsPage.url); @@ -754,80 +537,64 @@ describe('Projects', { disableAutoLogin: true }, () => { ndv.getters.credentialInput().find('input').should('be.enabled'); }); - it('should handle viewer role', () => { - cy.enableFeature('projectRole:viewer'); + it('should create sub-workflow and credential in the sub-workflow in the same project', () => { cy.signinAsOwner(); cy.visit(workflowsPage.url); - projects.createProject('Development'); - projects.addProjectMember(INSTANCE_MEMBERS[0].email, 'Viewer'); - projects.getProjectSettingsSaveButton().click(); - + projects.createProject('Dev'); projects.getProjectTabWorkflows().click(); workflowsPage.getters.newWorkflowButtonCard().click(); - projects.createWorkflow('Test_workflow_4_executions_view.json', 'WF with random error'); - executionsTab.actions.createManualExecutions(2); - executionsTab.actions.toggleNodeEnabled('Error'); - executionsTab.actions.createManualExecutions(2); - workflowPage.actions.saveWorkflowUsingKeyboardShortcut(); + workflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); + workflowPage.actions.saveWorkflowOnButtonClick(); + workflowPage.actions.addNodeToCanvas('Execute Workflow', true, true); - projects.getMenuItems().first().click(); - projects.getProjectTabCredentials().click(); - credentialsPage.getters.emptyListCreateCredentialButton().click(); - projects.createCredential('Notion API'); + cy.window().then((win) => { + cy.stub(win, 'open').callsFake((url) => { + cy.visit(url); + }); + }); - mainSidebar.actions.openUserMenu(); - cy.getByTestId('user-menu-item-logout').click(); + selectResourceLocatorItem('workflowId', 0, 'Create a'); - cy.get('input[name="email"]').type(INSTANCE_MEMBERS[0].email); - cy.get('input[name="password"]').type(INSTANCE_MEMBERS[0].password); - cy.getByTestId('form-submit-button').click(); + cy.get('body').type('{esc}'); + workflowPage.actions.addNodeToCanvas(NOTION_NODE_NAME, true, true); + clickCreateNewCredential(); + setCredentialValues({ + apiKey: 'abc123', + }); + ndv.actions.close(); + workflowPage.actions.saveWorkflowOnButtonClick(); projects.getMenuItems().last().click(); - projects.getProjectTabExecutions().click(); - cy.getByTestId('global-execution-list-item').first().find('td:last button').click(); - getVisibleDropdown() - .find('li') - .filter(':contains("Retry")') - .should('have.class', 'is-disabled'); - getVisibleDropdown() - .find('li') - .filter(':contains("Delete")') - .should('have.class', 'is-disabled'); + workflowsPage.getters.workflowCards().should('have.length', 2); - projects.getMenuItems().first().click(); - cy.getByTestId('workflow-card-name').should('be.visible').first().click(); - workflowPage.getters.nodeViewRoot().should('be.visible'); - workflowPage.getters.executeWorkflowButton().should('not.exist'); - workflowPage.getters.nodeCreatorPlusButton().should('not.exist'); - workflowPage.getters.canvasNodes().should('have.length', 3).last().click(); - cy.get('body').type('{backspace}'); - workflowPage.getters.canvasNodes().should('have.length', 3).last().rightclick(); - getVisibleDropdown() - .find('li') - .should('be.visible') - .filter( - ':contains("Open"), :contains("Copy"), :contains("Select all"), :contains("Clear selection")', - ) - .should('not.have.class', 'is-disabled'); - cy.get('body').type('{esc}'); + projects.getProjectTabCredentials().click(); + credentialsPage.getters.credentialCards().should('have.length', 1); + }); - executionsTab.actions.switchToExecutionsTab(); - cy.getByTestId('retry-execution-button') - .should('be.visible') - .find('.is-disabled') - .should('exist'); - cy.get('button:contains("Debug")').should('be.disabled'); - cy.get('button[title="Retry execution"]').should('be.disabled'); - cy.get('button[title="Delete this execution"]').should('be.disabled'); + it('should create credential from workflow in the correct project after editor page refresh', () => { + cy.signinAsOwner(); + cy.visit(workflowsPage.url); - projects.getMenuItems().first().click(); + projects.createProject('Dev'); + projects.getProjectTabWorkflows().click(); + workflowsPage.getters.newWorkflowButtonCard().click(); + workflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); + workflowPage.actions.saveWorkflowOnButtonClick(); + + cy.reload(); + + workflowPage.actions.addNodeToCanvas(NOTION_NODE_NAME, true, true); + clickCreateNewCredential(); + setCredentialValues({ + apiKey: 'abc123', + }); + ndv.actions.close(); + workflowPage.actions.saveWorkflowOnButtonClick(); + + projects.getMenuItems().last().click(); projects.getProjectTabCredentials().click(); - credentialsPage.getters.credentialCards().filter(':contains("Notion")').click(); - cy.getByTestId('node-credentials-config-container') - .should('be.visible') - .find('input') - .should('not.have.length'); + credentialsPage.getters.credentialCards().should('have.length', 1); }); }); diff --git a/cypress/e2e/4-node-creator.cy.ts b/cypress/e2e/4-node-creator.cy.ts index b70e121fd0d03..a33f16156f1e7 100644 --- a/cypress/e2e/4-node-creator.cy.ts +++ b/cypress/e2e/4-node-creator.cy.ts @@ -135,7 +135,6 @@ describe('Node Creator', () => { 'OpenThesaurus', 'Spontit', 'Vonage', - 'Send Email', 'Toggl Trigger', ]; const doubleActionNode = 'OpenWeatherMap'; diff --git a/cypress/e2e/45-workflow-selector-parameter.cy.ts b/cypress/e2e/45-workflow-selector-parameter.cy.ts index 38de780490a45..40e5ca248802c 100644 --- a/cypress/e2e/45-workflow-selector-parameter.cy.ts +++ b/cypress/e2e/45-workflow-selector-parameter.cy.ts @@ -17,7 +17,7 @@ describe('Workflow Selector Parameter', () => { workflowPage.actions.visit(); workflowPage.actions.addInitialNodeToCanvas(EXECUTE_WORKFLOW_NODE_NAME, { keepNdvOpen: true, - action: 'Call Another Workflow', + action: 'Execute A Sub Workflow', }); }); it('should render sub-workflows list', () => { @@ -86,6 +86,8 @@ describe('Workflow Selector Parameter', () => { cy.stub(win, 'open').as('windowOpen'); }); + cy.intercept('POST', '/rest/workflows*').as('createSubworkflow'); + ndv.getters.resourceLocator('workflowId').should('be.visible'); ndv.getters.resourceLocatorInput('workflowId').click(); @@ -98,10 +100,20 @@ describe('Workflow Selector Parameter', () => { getVisiblePopper().findChildByTestId('rlc-item').eq(0).click(); - const SAMPLE_SUBWORKFLOW_TEMPLATE_ID = 'VMiAxXa3lCAizGB5f7dVZQSFfg3FtHkdTKvLuupqBls='; - cy.get('@windowOpen').should( - 'be.calledWith', - `/workflows/onboarding/${SAMPLE_SUBWORKFLOW_TEMPLATE_ID}?sampleSubWorkflows=0`, - ); + cy.wait('@createSubworkflow').then((interception) => { + expect(interception.request.body).to.have.property('name').that.includes('Sub-Workflow'); + expect(interception.request.body.nodes).to.be.an('array'); + expect(interception.request.body.nodes).to.have.length(2); + expect(interception.request.body.nodes[0]).to.have.property( + 'name', + 'When Executed by Another Workflow', + ); + expect(interception.request.body.nodes[1]).to.have.property( + 'name', + 'Replace me with your logic', + ); + }); + + cy.get('@windowOpen').should('be.calledWithMatch', /\/workflow\/.+/); }); }); diff --git a/cypress/e2e/47-subworkflow-debugging.cy.ts b/cypress/e2e/47-subworkflow-debugging.cy.ts index 725b6b32c46b9..77aaa4d7f6838 100644 --- a/cypress/e2e/47-subworkflow-debugging.cy.ts +++ b/cypress/e2e/47-subworkflow-debugging.cy.ts @@ -1,9 +1,3 @@ -import { - getExecutionPreviewOutputPanelRelatedExecutionLink, - getExecutionsSidebar, - getWorkflowExecutionPreviewIframe, - openExecutionPreviewNode, -} from '../composables/executions'; import { changeOutputRunSelector, getOutputPanelItemsCount, @@ -103,38 +97,4 @@ describe('Subworkflow debugging', () => { getOutputTbodyCell(1, 2).should('include.text', 'Terry.Dach@hotmail.com'); }); }); - - it('can inspect parent executions', () => { - cy.url().then((workflowUrl) => { - openNode('Execute Workflow with param'); - - getOutputPanelItemsCount().should('contain.text', '2 items, 1 sub-execution'); - getOutputPanelRelatedExecutionLink().should('contain.text', 'View sub-execution'); - getOutputPanelRelatedExecutionLink().should('have.attr', 'href'); - - // ensure workflow executed and waited on output - getOutputTableHeaders().should('have.length', 2); - getOutputTbodyCell(1, 0).should('have.text', 'world Natalie Moore'); - - // cypress cannot handle new tabs so removing it - getOutputPanelRelatedExecutionLink().invoke('removeAttr', 'target').click(); - - getExecutionsSidebar().should('be.visible'); - getWorkflowExecutionPreviewIframe().should('be.visible'); - openExecutionPreviewNode('Execute Workflow Trigger'); - - getExecutionPreviewOutputPanelRelatedExecutionLink().should( - 'include.text', - 'View parent execution', - ); - - getExecutionPreviewOutputPanelRelatedExecutionLink() - .invoke('removeAttr', 'target') - .click({ force: true }); - - cy.url().then((currentUrl) => { - expect(currentUrl === workflowUrl); - }); - }); - }); }); diff --git a/cypress/e2e/48-subworkflow-inputs.cy.ts b/cypress/e2e/48-subworkflow-inputs.cy.ts index aababf4cb6383..d529ec2c259cc 100644 --- a/cypress/e2e/48-subworkflow-inputs.cy.ts +++ b/cypress/e2e/48-subworkflow-inputs.cy.ts @@ -65,8 +65,9 @@ describe('Sub-workflow creation and typed usage', () => { // ************************** // NAVIGATE TO CHILD WORKFLOW // ************************** - - openNode('Workflow Input Trigger'); + // Close NDV before opening the node creator + cy.get('body').type('{esc}'); + openNode('When Executed by Another Workflow'); }); it('works with type-checked values', () => { @@ -138,41 +139,41 @@ describe('Sub-workflow creation and typed usage', () => { cy.window().then((win) => { cy.stub(win, 'open').callsFake((url) => { cy.visit(url); - }); - }); - selectResourceLocatorItem('workflowId', 0, 'Create a'); + selectResourceLocatorItem('workflowId', 0, 'Create a'); - openNode('Workflow Input Trigger'); + openNode('When Executed by Another Workflow'); - getParameterInputByName('inputSource').click(); + getParameterInputByName('inputSource').click(); - getVisiblePopper() - .getByTestId('parameter-input') - .eq(0) - .type('Using JSON Example{downArrow}{enter}'); + getVisiblePopper() + .getByTestId('parameter-input') + .eq(0) + .type('Using JSON Example{downArrow}{enter}'); - const exampleJson = - '{{}' + EXAMPLE_FIELDS.map((x) => `"${x[0]}": ${makeExample(x[1])}`).join(',') + '}'; - getParameterInputByName('jsonExample') - .find('.cm-line') - .eq(0) - .type(`{selectAll}{backspace}${exampleJson}{enter}`); + const exampleJson = + '{{}' + EXAMPLE_FIELDS.map((x) => `"${x[0]}": ${makeExample(x[1])}`).join(',') + '}'; + getParameterInputByName('jsonExample') + .find('.cm-line') + .eq(0) + .type(`{selectAll}{backspace}${exampleJson}{enter}`); - // first one doesn't work for some reason, might need to wait for something? - clickExecuteNode(); + // first one doesn't work for some reason, might need to wait for something? + clickExecuteNode(); - validateAndReturnToParent( - DEFAULT_SUBWORKFLOW_NAME_2, - 2, - EXAMPLE_FIELDS.map((f) => f[0]), - ); + validateAndReturnToParent( + DEFAULT_SUBWORKFLOW_NAME_2, + 2, + EXAMPLE_FIELDS.map((f) => f[0]), + ); - assertOutputTableContent([ - ['[null]', '[null]', '[null]', '[null]', '[null]', 'false'], - ['[null]', '[null]', '[null]', '[null]', '[null]', 'false'], - ]); + assertOutputTableContent([ + ['[null]', '[null]', '[null]', '[null]', '[null]', 'false'], + ['[null]', '[null]', '[null]', '[null]', '[null]', 'false'], + ]); - clickExecuteNode(); + clickExecuteNode(); + }); + }); }); it('should show node issue when no fields are defined in manual mode', () => { @@ -181,7 +182,7 @@ describe('Sub-workflow creation and typed usage', () => { // Executing the workflow should show an error toast clickExecuteWorkflowButton(); errorToast().should('contain', 'The workflow has issues'); - openNode('Workflow Input Trigger'); + openNode('When Executed by Another Workflow'); // Add a field to the workflowInputs fixedCollection addItemToFixedCollection('workflowInputs'); typeIntoFixedCollectionItem('workflowInputs', 0, 'test'); diff --git a/cypress/e2e/5-ndv.cy.ts b/cypress/e2e/5-ndv.cy.ts index d9b7ecf2d1702..22d5b9f49e65d 100644 --- a/cypress/e2e/5-ndv.cy.ts +++ b/cypress/e2e/5-ndv.cy.ts @@ -249,6 +249,15 @@ describe('NDV', () => { ndv.actions.switchInputMode('Table'); ndv.actions.switchOutputMode('Table'); + // Start from linked state + ndv.getters.outputLinkRun().then(($el) => { + const classList = Array.from($el[0].classList); + if (!classList.includes('linked')) { + ndv.actions.toggleOutputRunLinking(); + ndv.getters.inputTbodyCell(1, 0).click(); // remove tooltip + } + }); + ndv.getters .inputRunSelector() .should('exist') diff --git a/cypress/e2e/7-workflow-actions.cy.ts b/cypress/e2e/7-workflow-actions.cy.ts index f0f3ae019a2dd..079030359ad00 100644 --- a/cypress/e2e/7-workflow-actions.cy.ts +++ b/cypress/e2e/7-workflow-actions.cy.ts @@ -200,7 +200,14 @@ describe('Workflow Actions', () => { WorkflowPage.getters.nodeConnections().should('have.length', 2); // Check if all nodes have names WorkflowPage.getters.canvasNodes().each((node) => { - cy.wrap(node).should('have.attr', 'data-name'); + cy.ifCanvasVersion( + () => { + cy.wrap(node).should('have.attr', 'data-name'); + }, + () => { + cy.wrap(node).should('have.attr', 'data-node-name'); + }, + ); }); }); }); diff --git a/cypress/package.json b/cypress/package.json index 6725c46bc6f5a..4ad2d4f199c4e 100644 --- a/cypress/package.json +++ b/cypress/package.json @@ -6,7 +6,7 @@ "cypress:install": "cypress install", "test:e2e:ui": "scripts/run-e2e.js ui", "test:e2e:dev": "scripts/run-e2e.js dev", - "test:e2e:dev:v2": "scripts/run-e2e.js dev:v2", + "test:e2e:dev:v1": "scripts/run-e2e.js dev:v1", "test:e2e:all": "scripts/run-e2e.js all", "format": "biome format --write .", "format:check": "biome ci .", diff --git a/cypress/pages/workflow.ts b/cypress/pages/workflow.ts index 3552e68e08bcb..24fda156f6625 100644 --- a/cypress/pages/workflow.ts +++ b/cypress/pages/workflow.ts @@ -96,7 +96,7 @@ export class WorkflowPage extends BasePage { nodeConnections: () => cy.ifCanvasVersion( () => cy.get('.jtk-connector'), - () => cy.getByTestId('edge-label'), + () => cy.getByTestId('edge'), ), zoomToFitButton: () => cy.getByTestId('zoom-to-fit'), nodeEndpoints: () => cy.get('.jtk-endpoint-connected'), @@ -182,7 +182,7 @@ export class WorkflowPage extends BasePage { ), () => cy.get( - `[data-test-id="edge-label"][data-source-node-name="${sourceNodeName}"][data-target-node-name="${targetNodeName}"]`, + `[data-test-id="edge"][data-source-node-name="${sourceNodeName}"][data-target-node-name="${targetNodeName}"]`, ), ), getConnectionActionsBetweenNodes: (sourceNodeName: string, targetNodeName: string) => @@ -430,7 +430,7 @@ export class WorkflowPage extends BasePage { pinchToZoom: (steps: number, mode: 'zoomIn' | 'zoomOut' = 'zoomIn') => { cy.window().then((win) => { // Pinch-to-zoom simulates a 'wheel' event with ctrlKey: true (same as zooming by scrolling) - this.getters.canvasViewport().trigger('wheel', { + getCanvasPane().trigger('wheel', { force: true, bubbles: true, ctrlKey: true, diff --git a/cypress/scripts/run-e2e.js b/cypress/scripts/run-e2e.js index 6819d6c824c8d..c7f9ccf749072 100755 --- a/cypress/scripts/run-e2e.js +++ b/cypress/scripts/run-e2e.js @@ -45,19 +45,23 @@ switch (scenario) { startCommand: 'start', url: 'http://localhost:5678/favicon.ico', testCommand: 'cypress open', + customEnv: { + CYPRESS_NODE_VIEW_VERSION: 2, + }, }); break; - case 'dev': + case 'dev:v1': runTests({ startCommand: 'develop', url: 'http://localhost:8080/favicon.ico', testCommand: 'cypress open', customEnv: { + CYPRESS_NODE_VIEW_VERSION: 1, CYPRESS_BASE_URL: 'http://localhost:8080', }, }); break; - case 'dev:v2': + case 'dev': runTests({ startCommand: 'develop', url: 'http://localhost:8080/favicon.ico', @@ -76,6 +80,9 @@ switch (scenario) { startCommand: 'start', url: 'http://localhost:5678/favicon.ico', testCommand: `cypress run --headless ${specParam}`, + customEnv: { + CYPRESS_NODE_VIEW_VERSION: 2, + }, }); break; default: diff --git a/cypress/support/e2e.ts b/cypress/support/e2e.ts index 0fe782499d1d7..297fcfa9b6028 100644 --- a/cypress/support/e2e.ts +++ b/cypress/support/e2e.ts @@ -38,7 +38,21 @@ beforeEach(() => { data: { status: 'success', message: 'Tested successfully' }, }).as('credentialTest'); - cy.intercept('POST', '/rest/license/renew', {}); + cy.intercept('POST', '/rest/license/renew', { + data: { + usage: { + activeWorkflowTriggers: { + limit: -1, + value: 0, + warningThreshold: 0.8, + }, + }, + license: { + planId: '', + planName: 'Community', + }, + }, + }); cy.intercept({ pathname: '/api/health' }, { status: 'OK' }).as('healthCheck'); cy.intercept({ pathname: '/api/versions/*' }, [ diff --git a/lefthook.yml b/lefthook.yml index aa17417824774..b6aac6e069f0b 100644 --- a/lefthook.yml +++ b/lefthook.yml @@ -2,14 +2,14 @@ pre-commit: commands: biome_check: glob: 'packages/**/*.{js,ts,json}' - run: ./node_modules/.bin/biome check --write --no-errors-on-unmatched --files-ignore-unknown=true --colors=off {staged_files} + run: pnpm biome check --write --no-errors-on-unmatched --files-ignore-unknown=true --colors=off {staged_files} stage_fixed: true skip: - merge - rebase prettier_check: glob: 'packages/**/*.{vue,yml,md,css,scss}' - run: ./node_modules/.bin/prettier --write --ignore-unknown --no-error-on-unmatched-pattern {staged_files} + run: pnpm prettier --write --ignore-unknown --no-error-on-unmatched-pattern {staged_files} stage_fixed: true skip: - merge diff --git a/package.json b/package.json index 8e1d8da85a46e..f0adf07b7925e 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "n8n-monorepo", - "version": "1.76.0", + "version": "1.77.0", "private": true, "engines": { "node": ">=20.15", @@ -21,7 +21,7 @@ "dev:fe": "run-p start \"dev:fe:editor --filter=n8n-design-system\"", "dev:fe:editor": "turbo run dev --parallel --env-mode=loose --filter=n8n-editor-ui", "dev:e2e": "cd cypress && pnpm run test:e2e:dev", - "dev:e2e:v2": "cd cypress && pnpm run test:e2e:dev:v2", + "dev:e2e:v1": "cd cypress && pnpm run test:e2e:dev:v1", "dev:e2e:server": "run-p start dev:fe:editor", "clean": "turbo run clean --parallel", "reset": "node scripts/ensure-zx.mjs && zx scripts/reset.mjs", @@ -58,7 +58,7 @@ "jest-mock": "^29.6.2", "jest-mock-extended": "^3.0.4", "lefthook": "^1.7.15", - "nock": "^13.3.2", + "nock": "^14.0.0", "nodemon": "^3.0.1", "npm-run-all2": "^7.0.2", "p-limit": "^3.1.0", diff --git a/packages/@n8n/api-types/src/api-keys.ts b/packages/@n8n/api-types/src/api-keys.ts new file mode 100644 index 0000000000000..e812786e78911 --- /dev/null +++ b/packages/@n8n/api-types/src/api-keys.ts @@ -0,0 +1,9 @@ +export type ApiKey = { + id: string; + label: string; + apiKey: string; + createdAt: string; + updatedAt: string; +}; + +export type ApiKeyWithRawValue = ApiKey & { rawApiKey: string }; diff --git a/packages/@n8n/api-types/src/dto/api-keys/__tests__/create-or-update.dto.test.ts b/packages/@n8n/api-types/src/dto/api-keys/__tests__/create-or-update.dto.test.ts new file mode 100644 index 0000000000000..beb7ebcf0d9ac --- /dev/null +++ b/packages/@n8n/api-types/src/dto/api-keys/__tests__/create-or-update.dto.test.ts @@ -0,0 +1,40 @@ +import { CreateOrUpdateApiKeyRequestDto } from '../create-or-update-api-key-request.dto'; + +describe('CreateOrUpdateApiKeyRequestDto', () => { + describe('Valid requests', () => { + test('should allow valid label', () => { + const result = CreateOrUpdateApiKeyRequestDto.safeParse({ + label: 'valid label', + }); + expect(result.success).toBe(true); + }); + }); + + describe('Invalid requests', () => { + test.each([ + { + name: 'empty label', + label: '', + expectedErrorPath: ['label'], + }, + { + name: 'label exceeding 50 characters', + label: '2mWMfsrvAmneWluS8IbezaIHZOu2mWMfsrvAmneWluS8IbezaIa', + expectedErrorPath: ['label'], + }, + { + name: 'label with xss injection', + label: '', + expectedErrorPath: ['label'], + }, + ])('should fail validation for $name', ({ label, expectedErrorPath }) => { + const result = CreateOrUpdateApiKeyRequestDto.safeParse({ label }); + + expect(result.success).toBe(false); + + if (expectedErrorPath) { + expect(result.error?.issues[0].path).toEqual(expectedErrorPath); + } + }); + }); +}); diff --git a/packages/@n8n/api-types/src/dto/api-keys/create-or-update-api-key-request.dto.ts b/packages/@n8n/api-types/src/dto/api-keys/create-or-update-api-key-request.dto.ts new file mode 100644 index 0000000000000..168c28c2faaaa --- /dev/null +++ b/packages/@n8n/api-types/src/dto/api-keys/create-or-update-api-key-request.dto.ts @@ -0,0 +1,13 @@ +import xss from 'xss'; +import { z } from 'zod'; +import { Z } from 'zod-class'; + +const xssCheck = (value: string) => + value === + xss(value, { + whiteList: {}, + }); + +export class CreateOrUpdateApiKeyRequestDto extends Z.class({ + label: z.string().max(50).min(1).refine(xssCheck), +}) {} diff --git a/packages/@n8n/api-types/src/dto/index.ts b/packages/@n8n/api-types/src/dto/index.ts index 9be09c02f3d22..f8bdb80268cce 100644 --- a/packages/@n8n/api-types/src/dto/index.ts +++ b/packages/@n8n/api-types/src/dto/index.ts @@ -47,3 +47,5 @@ export { ImportWorkflowFromUrlDto } from './workflows/import-workflow-from-url.d export { CreateOrUpdateTagRequestDto } from './tag/create-or-update-tag-request.dto'; export { RetrieveTagQueryDto } from './tag/retrieve-tag-query.dto'; + +export { CreateOrUpdateApiKeyRequestDto } from './api-keys/create-or-update-api-key-request.dto'; diff --git a/packages/@n8n/api-types/src/frontend-settings.ts b/packages/@n8n/api-types/src/frontend-settings.ts index 3ce856d6aded0..ceaac19a69dbe 100644 --- a/packages/@n8n/api-types/src/frontend-settings.ts +++ b/packages/@n8n/api-types/src/frontend-settings.ts @@ -87,6 +87,7 @@ export interface FrontendSettings { }; }; publicApi: { + apiKeysPerUserLimit: number; enabled: boolean; latestVersion: number; path: string; diff --git a/packages/@n8n/api-types/src/index.ts b/packages/@n8n/api-types/src/index.ts index 5620689af0a5d..a51850cc6c135 100644 --- a/packages/@n8n/api-types/src/index.ts +++ b/packages/@n8n/api-types/src/index.ts @@ -4,6 +4,7 @@ export type * from './push'; export type * from './scaling'; export type * from './frontend-settings'; export type * from './user'; +export type * from './api-keys'; export type { Collaborator } from './push/collaboration'; export type { SendWorkerStatusMessage } from './push/worker'; diff --git a/packages/@n8n/client-oauth2/.eslintrc.js b/packages/@n8n/client-oauth2/.eslintrc.js index c3fe283453eb5..be8ebd21d10b9 100644 --- a/packages/@n8n/client-oauth2/.eslintrc.js +++ b/packages/@n8n/client-oauth2/.eslintrc.js @@ -11,5 +11,6 @@ module.exports = { rules: { '@typescript-eslint/consistent-type-imports': 'error', 'n8n-local-rules/no-plain-errors': 'off', + 'n8n-local-rules/no-uncaught-json-parse': 'off', }, }; diff --git a/packages/@n8n/client-oauth2/package.json b/packages/@n8n/client-oauth2/package.json index 33f15218f26a5..fccbc63098d61 100644 --- a/packages/@n8n/client-oauth2/package.json +++ b/packages/@n8n/client-oauth2/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/client-oauth2", - "version": "0.21.0", + "version": "0.22.0", "scripts": { "clean": "rimraf dist .turbo", "dev": "pnpm watch", diff --git a/packages/@n8n/client-oauth2/src/ClientOAuth2.ts b/packages/@n8n/client-oauth2/src/ClientOAuth2.ts index 676249254ab91..62e0241e6ff88 100644 --- a/packages/@n8n/client-oauth2/src/ClientOAuth2.ts +++ b/packages/@n8n/client-oauth2/src/ClientOAuth2.ts @@ -1,8 +1,6 @@ -/* eslint-disable @typescript-eslint/no-unsafe-return */ -/* eslint-disable @typescript-eslint/no-unsafe-argument */ /* eslint-disable @typescript-eslint/no-explicit-any */ import axios from 'axios'; -import type { AxiosRequestConfig } from 'axios'; +import type { AxiosRequestConfig, AxiosResponse } from 'axios'; import { Agent } from 'https'; import * as qs from 'querystring'; @@ -10,7 +8,7 @@ import type { ClientOAuth2TokenData } from './ClientOAuth2Token'; import { ClientOAuth2Token } from './ClientOAuth2Token'; import { CodeFlow } from './CodeFlow'; import { CredentialsFlow } from './CredentialsFlow'; -import type { Headers } from './types'; +import type { Headers, OAuth2AccessTokenErrorResponse } from './types'; import { getAuthError } from './utils'; export interface ClientOAuth2RequestObject { @@ -38,10 +36,10 @@ export interface ClientOAuth2Options { ignoreSSLIssues?: boolean; } -class ResponseError extends Error { +export class ResponseError extends Error { constructor( readonly status: number, - readonly body: object, + readonly body: unknown, readonly code = 'ESTATUS', ) { super(`HTTP status ${status}`); @@ -74,21 +72,12 @@ export class ClientOAuth2 { } /** - * Attempt to parse response body as JSON, fall back to parsing as a query string. + * Request an access token from the OAuth2 server. + * + * @throws {ResponseError} If the response is an unexpected status code. + * @throws {AuthError} If the response is an authentication error. */ - private parseResponseBody(body: string): T { - try { - return JSON.parse(body); - } catch (e) { - return qs.parse(body) as T; - } - } - - /** - * Using the built-in request method, we'll automatically attempt to parse - * the response. - */ - async request(options: ClientOAuth2RequestObject): Promise { + async accessTokenRequest(options: ClientOAuth2RequestObject): Promise { let url = options.url; const query = qs.stringify(options.query); @@ -101,7 +90,7 @@ export class ClientOAuth2 { method: options.method, data: qs.stringify(options.body), headers: options.headers, - transformResponse: (res) => res, + transformResponse: (res: unknown) => res, // Axios rejects the promise by default for all status codes 4xx. // We override this to reject promises only on 5xxs validateStatus: (status) => status < 500, @@ -113,16 +102,36 @@ export class ClientOAuth2 { const response = await axios.request(requestConfig); - const body = this.parseResponseBody(response.data); + if (response.status >= 400) { + const body = this.parseResponseBody(response); + const authErr = getAuthError(body); - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - const authErr = getAuthError(body); - if (authErr) throw authErr; + if (authErr) throw authErr; + else throw new ResponseError(response.status, response.data); + } - if (response.status < 200 || response.status >= 399) + if (response.status >= 300) { throw new ResponseError(response.status, response.data); + } + + return this.parseResponseBody(response); + } + + /** + * Attempt to parse response body based on the content type. + */ + private parseResponseBody(response: AxiosResponse): T { + const contentType = (response.headers['content-type'] as string) ?? ''; + const body = response.data as string; + + if (contentType.startsWith('application/json')) { + return JSON.parse(body) as T; + } + + if (contentType.startsWith('application/x-www-form-urlencoded')) { + return qs.parse(body) as T; + } - return body; + throw new Error(`Unsupported content type: ${contentType}`); } } diff --git a/packages/@n8n/client-oauth2/src/ClientOAuth2Token.ts b/packages/@n8n/client-oauth2/src/ClientOAuth2Token.ts index 2bcacdf112bb1..d7a80a1699187 100644 --- a/packages/@n8n/client-oauth2/src/ClientOAuth2Token.ts +++ b/packages/@n8n/client-oauth2/src/ClientOAuth2Token.ts @@ -1,3 +1,5 @@ +import * as a from 'node:assert'; + import type { ClientOAuth2, ClientOAuth2Options, ClientOAuth2RequestObject } from './ClientOAuth2'; import { DEFAULT_HEADERS } from './constants'; import { auth, expects, getRequestOptions } from './utils'; @@ -65,17 +67,16 @@ export class ClientOAuth2Token { } /** - * Refresh a user access token with the supplied token. + * Refresh a user access token with the refresh token. + * As in RFC 6749 Section 6: https://www.rfc-editor.org/rfc/rfc6749.html#section-6 */ async refresh(opts?: ClientOAuth2Options): Promise { const options = { ...this.client.options, ...opts }; expects(options, 'clientSecret'); + a.ok(this.refreshToken, 'refreshToken is required'); - if (!this.refreshToken) throw new Error('No refresh token'); - - const clientId = options.clientId; - const clientSecret = options.clientSecret; + const { clientId, clientSecret } = options; const headers = { ...DEFAULT_HEADERS }; const body: Record = { refresh_token: this.refreshToken, @@ -99,7 +100,7 @@ export class ClientOAuth2Token { options, ); - const responseData = await this.client.request(requestOptions); + const responseData = await this.client.accessTokenRequest(requestOptions); return this.client.createToken({ ...this.data, ...responseData }); } diff --git a/packages/@n8n/client-oauth2/src/CodeFlow.ts b/packages/@n8n/client-oauth2/src/CodeFlow.ts index 6d0fff235ea89..6db98929f9782 100644 --- a/packages/@n8n/client-oauth2/src/CodeFlow.ts +++ b/packages/@n8n/client-oauth2/src/CodeFlow.ts @@ -1,7 +1,7 @@ import * as qs from 'querystring'; import type { ClientOAuth2, ClientOAuth2Options } from './ClientOAuth2'; -import type { ClientOAuth2Token, ClientOAuth2TokenData } from './ClientOAuth2Token'; +import type { ClientOAuth2Token } from './ClientOAuth2Token'; import { DEFAULT_HEADERS, DEFAULT_URL_BASE } from './constants'; import { auth, expects, getAuthError, getRequestOptions } from './utils'; @@ -117,7 +117,7 @@ export class CodeFlow { options, ); - const responseData = await this.client.request(requestOptions); + const responseData = await this.client.accessTokenRequest(requestOptions); return this.client.createToken(responseData); } } diff --git a/packages/@n8n/client-oauth2/src/CredentialsFlow.ts b/packages/@n8n/client-oauth2/src/CredentialsFlow.ts index f1ccc256e73ea..eeb5550cf3fdb 100644 --- a/packages/@n8n/client-oauth2/src/CredentialsFlow.ts +++ b/packages/@n8n/client-oauth2/src/CredentialsFlow.ts @@ -1,5 +1,5 @@ import type { ClientOAuth2 } from './ClientOAuth2'; -import type { ClientOAuth2Token, ClientOAuth2TokenData } from './ClientOAuth2Token'; +import type { ClientOAuth2Token } from './ClientOAuth2Token'; import { DEFAULT_HEADERS } from './constants'; import type { Headers } from './types'; import { auth, expects, getRequestOptions } from './utils'; @@ -55,7 +55,7 @@ export class CredentialsFlow { options, ); - const responseData = await this.client.request(requestOptions); + const responseData = await this.client.accessTokenRequest(requestOptions); return this.client.createToken(responseData); } } diff --git a/packages/@n8n/client-oauth2/src/types.ts b/packages/@n8n/client-oauth2/src/types.ts index 69c225d827661..26a90bd441733 100644 --- a/packages/@n8n/client-oauth2/src/types.ts +++ b/packages/@n8n/client-oauth2/src/types.ts @@ -17,3 +17,14 @@ export interface OAuth2CredentialData { refresh_token?: string; }; } + +/** + * The response from the OAuth2 server when the access token is not successfully + * retrieved. As specified in RFC 6749 Section 5.2: + * https://www.rfc-editor.org/rfc/rfc6749.html#section-5.2 + */ +export interface OAuth2AccessTokenErrorResponse extends Record { + error: string; + error_description?: string; + error_uri?: string; +} diff --git a/packages/@n8n/client-oauth2/test/ClientOAuth2.test.ts b/packages/@n8n/client-oauth2/test/ClientOAuth2.test.ts new file mode 100644 index 0000000000000..7e6fa788befcd --- /dev/null +++ b/packages/@n8n/client-oauth2/test/ClientOAuth2.test.ts @@ -0,0 +1,168 @@ +import axios from 'axios'; +import nock from 'nock'; + +import { ClientOAuth2, ResponseError } from '@/ClientOAuth2'; +import { ERROR_RESPONSES } from '@/constants'; +import { auth, AuthError } from '@/utils'; + +import * as config from './config'; + +describe('ClientOAuth2', () => { + const client = new ClientOAuth2({ + clientId: config.clientId, + clientSecret: config.clientSecret, + accessTokenUri: config.accessTokenUri, + authentication: 'header', + }); + + beforeAll(async () => { + nock.disableNetConnect(); + }); + + afterAll(() => { + nock.restore(); + }); + + describe('accessTokenRequest', () => { + const authHeader = auth(config.clientId, config.clientSecret); + + const makeTokenCall = async () => + await client.accessTokenRequest({ + url: config.accessTokenUri, + method: 'POST', + headers: { + Authorization: authHeader, + Accept: 'application/json', + 'Content-Type': 'application/x-www-form-urlencoded', + }, + body: { + refresh_token: 'test', + grant_type: 'refresh_token', + }, + }); + + const mockTokenResponse = ({ + status = 200, + headers, + body, + }: { + status: number; + body: string; + headers: Record; + }) => + nock(config.baseUrl).post('/login/oauth/access_token').once().reply(status, body, headers); + + it('should send the correct request based on given options', async () => { + mockTokenResponse({ + status: 200, + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + access_token: config.accessToken, + refresh_token: config.refreshToken, + }), + }); + + const axiosSpy = jest.spyOn(axios, 'request'); + + await makeTokenCall(); + + expect(axiosSpy).toHaveBeenCalledWith( + expect.objectContaining({ + url: config.accessTokenUri, + method: 'POST', + data: 'refresh_token=test&grant_type=refresh_token', + headers: { + Authorization: authHeader, + Accept: 'application/json', + 'Content-Type': 'application/x-www-form-urlencoded', + }, + }), + ); + }); + + test.each([ + { + contentType: 'application/json', + body: JSON.stringify({ + access_token: config.accessToken, + refresh_token: config.refreshToken, + }), + }, + { + contentType: 'application/json; charset=utf-8', + body: JSON.stringify({ + access_token: config.accessToken, + refresh_token: config.refreshToken, + }), + }, + { + contentType: 'application/x-www-form-urlencoded', + body: `access_token=${config.accessToken}&refresh_token=${config.refreshToken}`, + }, + ])('should parse response with content type $contentType', async ({ contentType, body }) => { + mockTokenResponse({ + status: 200, + headers: { 'Content-Type': contentType }, + body, + }); + + const response = await makeTokenCall(); + + expect(response).toEqual({ + access_token: config.accessToken, + refresh_token: config.refreshToken, + }); + }); + + test.each([ + { + contentType: 'text/html', + body: 'Hello, world!', + }, + { + contentType: 'application/xml', + body: 'Hello, world!', + }, + { + contentType: 'text/plain', + body: 'Hello, world!', + }, + ])('should reject content type $contentType', async ({ contentType, body }) => { + mockTokenResponse({ + status: 200, + headers: { 'Content-Type': contentType }, + body, + }); + + const result = await makeTokenCall().catch((err) => err); + expect(result).toBeInstanceOf(Error); + expect(result.message).toEqual(`Unsupported content type: ${contentType}`); + }); + + it('should reject 4xx responses with auth errors', async () => { + mockTokenResponse({ + status: 401, + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ error: 'access_denied' }), + }); + + const result = await makeTokenCall().catch((err) => err); + expect(result).toBeInstanceOf(AuthError); + expect(result.message).toEqual(ERROR_RESPONSES.access_denied); + expect(result.body).toEqual({ error: 'access_denied' }); + }); + + it('should reject 3xx responses with response errors', async () => { + mockTokenResponse({ + status: 302, + headers: {}, + body: 'Redirected', + }); + + const result = await makeTokenCall().catch((err) => err); + expect(result).toBeInstanceOf(ResponseError); + expect(result.message).toEqual('HTTP status 302'); + expect(result.body).toEqual('Redirected'); + }); + }); +}); diff --git a/packages/@n8n/config/package.json b/packages/@n8n/config/package.json index 949bafdce5dba..e8678c9cbc8ab 100644 --- a/packages/@n8n/config/package.json +++ b/packages/@n8n/config/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/config", - "version": "1.26.0", + "version": "1.27.0", "scripts": { "clean": "rimraf dist .turbo", "dev": "pnpm watch", diff --git a/packages/@n8n/config/src/configs/external-hooks.config.ts b/packages/@n8n/config/src/configs/external-hooks.config.ts new file mode 100644 index 0000000000000..20c8eb86549b8 --- /dev/null +++ b/packages/@n8n/config/src/configs/external-hooks.config.ts @@ -0,0 +1,17 @@ +import { Config, Env } from '../decorators'; + +class ColonSeparatedStringArray extends Array { + constructor(str: string) { + super(); + const parsed = str.split(':') as this; + const filtered = parsed.filter((i) => typeof i === 'string' && i.length); + return filtered.length ? filtered : []; + } +} + +@Config +export class ExternalHooksConfig { + /** Files containing external hooks. Multiple files can be separated by colon (":") */ + @Env('EXTERNAL_HOOK_FILES') + files: ColonSeparatedStringArray = []; +} diff --git a/packages/@n8n/config/src/configs/external-storage.config.ts b/packages/@n8n/config/src/configs/external-storage.config.ts index 6e5fbd64d89ad..aff2447d40f8a 100644 --- a/packages/@n8n/config/src/configs/external-storage.config.ts +++ b/packages/@n8n/config/src/configs/external-storage.config.ts @@ -23,11 +23,14 @@ class S3CredentialsConfig { } @Config -class S3Config { +export class S3Config { /** Host of the n8n bucket in S3-compatible external storage @example "s3.us-east-1.amazonaws.com" */ @Env('N8N_EXTERNAL_STORAGE_S3_HOST') host: string = ''; + @Env('N8N_EXTERNAL_STORAGE_S3_PROTOCOL') + protocol: 'http' | 'https' = 'https'; + @Nested bucket: S3BucketConfig; diff --git a/packages/@n8n/config/src/index.ts b/packages/@n8n/config/src/index.ts index f462ef94248cf..1f686999f749d 100644 --- a/packages/@n8n/config/src/index.ts +++ b/packages/@n8n/config/src/index.ts @@ -6,6 +6,7 @@ import { DiagnosticsConfig } from './configs/diagnostics.config'; import { EndpointsConfig } from './configs/endpoints.config'; import { EventBusConfig } from './configs/event-bus.config'; import { ExecutionsConfig } from './configs/executions.config'; +import { ExternalHooksConfig } from './configs/external-hooks.config'; import { ExternalSecretsConfig } from './configs/external-secrets.config'; import { ExternalStorageConfig } from './configs/external-storage.config'; import { GenericConfig } from './configs/generic.config'; @@ -30,6 +31,7 @@ export { TaskRunnersConfig } from './configs/runners.config'; export { SecurityConfig } from './configs/security.config'; export { ExecutionsConfig } from './configs/executions.config'; export { FrontendBetaFeatures, FrontendConfig } from './configs/frontend.config'; +export { S3Config } from './configs/external-storage.config'; export { LOG_SCOPES } from './configs/logging.config'; export type { LogScope } from './configs/logging.config'; @@ -50,6 +52,9 @@ export class GlobalConfig { @Nested publicApi: PublicApiConfig; + @Nested + externalHooks: ExternalHooksConfig; + @Nested externalSecrets: ExternalSecretsConfig; diff --git a/packages/@n8n/config/test/config.test.ts b/packages/@n8n/config/test/config.test.ts index d9499d784952f..213776056968c 100644 --- a/packages/@n8n/config/test/config.test.ts +++ b/packages/@n8n/config/test/config.test.ts @@ -107,6 +107,9 @@ describe('GlobalConfig', () => { maxFileSizeInKB: 10240, }, }, + externalHooks: { + files: [], + }, externalSecrets: { preferGet: false, updateInterval: 300, @@ -138,6 +141,7 @@ describe('GlobalConfig', () => { externalStorage: { s3: { host: '', + protocol: 'https', bucket: { name: '', region: '', diff --git a/packages/@n8n/nodes-langchain/credentials/DeepSeekApi.credentials.ts b/packages/@n8n/nodes-langchain/credentials/DeepSeekApi.credentials.ts new file mode 100644 index 0000000000000..6088fbbb81277 --- /dev/null +++ b/packages/@n8n/nodes-langchain/credentials/DeepSeekApi.credentials.ts @@ -0,0 +1,47 @@ +import type { + IAuthenticateGeneric, + ICredentialTestRequest, + ICredentialType, + INodeProperties, +} from 'n8n-workflow'; + +export class DeepSeekApi implements ICredentialType { + name = 'deepSeekApi'; + + displayName = 'DeepSeek'; + + documentationUrl = 'deepseek'; + + properties: INodeProperties[] = [ + { + displayName: 'API Key', + name: 'apiKey', + type: 'string', + typeOptions: { password: true }, + required: true, + default: '', + }, + { + displayName: 'Base URL', + name: 'url', + type: 'hidden', + default: 'https://api.deepseek.com', + }, + ]; + + authenticate: IAuthenticateGeneric = { + type: 'generic', + properties: { + headers: { + Authorization: '=Bearer {{$credentials.apiKey}}', + }, + }, + }; + + test: ICredentialTestRequest = { + request: { + baseURL: '={{ $credentials.url }}', + url: '/models', + }, + }; +} diff --git a/packages/@n8n/nodes-langchain/credentials/OpenRouterApi.credentials.ts b/packages/@n8n/nodes-langchain/credentials/OpenRouterApi.credentials.ts new file mode 100644 index 0000000000000..e21a84d02065d --- /dev/null +++ b/packages/@n8n/nodes-langchain/credentials/OpenRouterApi.credentials.ts @@ -0,0 +1,47 @@ +import type { + IAuthenticateGeneric, + ICredentialTestRequest, + ICredentialType, + INodeProperties, +} from 'n8n-workflow'; + +export class OpenRouterApi implements ICredentialType { + name = 'openRouterApi'; + + displayName = 'OpenRouter'; + + documentationUrl = 'openrouter'; + + properties: INodeProperties[] = [ + { + displayName: 'API Key', + name: 'apiKey', + type: 'string', + typeOptions: { password: true }, + required: true, + default: '', + }, + { + displayName: 'Base URL', + name: 'url', + type: 'hidden', + default: 'https://openrouter.ai/api/v1', + }, + ]; + + authenticate: IAuthenticateGeneric = { + type: 'generic', + properties: { + headers: { + Authorization: '=Bearer {{$credentials.apiKey}}', + }, + }, + }; + + test: ICredentialTestRequest = { + request: { + baseURL: '={{ $credentials.url }}', + url: '/models', + }, + }; +} diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/Agent.node.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/Agent.node.ts index 230a6ddc6ed37..c5a46a119212a 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/Agent.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/Agent.node.ts @@ -91,6 +91,8 @@ function getInputs( '@n8n/n8n-nodes-langchain.lmChatGoogleVertex', '@n8n/n8n-nodes-langchain.lmChatMistralCloud', '@n8n/n8n-nodes-langchain.lmChatAzureOpenAi', + '@n8n/n8n-nodes-langchain.lmChatDeepSeek', + '@n8n/n8n-nodes-langchain.lmChatOpenRouter', ], }, }, @@ -119,6 +121,8 @@ function getInputs( '@n8n/n8n-nodes-langchain.lmChatGroq', '@n8n/n8n-nodes-langchain.lmChatGoogleVertex', '@n8n/n8n-nodes-langchain.lmChatGoogleGemini', + '@n8n/n8n-nodes-langchain.lmChatDeepSeek', + '@n8n/n8n-nodes-langchain.lmChatOpenRouter', ], }, }, diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.ts index 99d7345939522..977f8c5fb81de 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.ts @@ -258,7 +258,7 @@ export class LmChatOpenAi implements INodeType { displayName: 'Sampling Temperature', name: 'temperature', default: 0.7, - typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 }, + typeOptions: { maxValue: 2, minValue: 0, numberPrecision: 1 }, description: 'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.', type: 'number', diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatDeepSeek/LmChatDeepSeek.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatDeepSeek/LmChatDeepSeek.node.ts new file mode 100644 index 0000000000000..bf811ac2fe44b --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatDeepSeek/LmChatDeepSeek.node.ts @@ -0,0 +1,253 @@ +/* eslint-disable n8n-nodes-base/node-dirname-against-convention */ + +import { ChatOpenAI, type ClientOptions } from '@langchain/openai'; +import { + NodeConnectionType, + type INodeType, + type INodeTypeDescription, + type ISupplyDataFunctions, + type SupplyData, +} from 'n8n-workflow'; + +import { getConnectionHintNoticeField } from '@utils/sharedFields'; + +import { openAiFailedAttemptHandler } from '../../vendors/OpenAi/helpers/error-handling'; +import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler'; +import { N8nLlmTracing } from '../N8nLlmTracing'; + +export class LmChatDeepSeek implements INodeType { + description: INodeTypeDescription = { + displayName: 'DeepSeek Chat Model', + // eslint-disable-next-line n8n-nodes-base/node-class-description-name-miscased + name: 'lmChatDeepSeek', + icon: 'file:deepseek.svg', + group: ['transform'], + version: [1], + description: 'For advanced usage with an AI chain', + defaults: { + name: 'DeepSeek Chat Model', + }, + codex: { + categories: ['AI'], + subcategories: { + AI: ['Language Models', 'Root Nodes'], + 'Language Models': ['Chat Models (Recommended)'], + }, + resources: { + primaryDocumentation: [ + { + url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatdeepseek/', + }, + ], + }, + }, + // eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node + inputs: [], + // eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong + outputs: [NodeConnectionType.AiLanguageModel], + outputNames: ['Model'], + credentials: [ + { + name: 'deepSeekApi', + required: true, + }, + ], + requestDefaults: { + ignoreHttpStatusErrors: true, + baseURL: '={{ $credentials?.url }}', + }, + properties: [ + getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]), + { + displayName: + 'If using JSON response format, you must include word "json" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.', + name: 'notice', + type: 'notice', + default: '', + displayOptions: { + show: { + '/options.responseFormat': ['json_object'], + }, + }, + }, + { + displayName: 'Model', + name: 'model', + type: 'options', + description: + 'The model which will generate the completion. Learn more.', + typeOptions: { + loadOptions: { + routing: { + request: { + method: 'GET', + url: '/models', + }, + output: { + postReceive: [ + { + type: 'rootProperty', + properties: { + property: 'data', + }, + }, + { + type: 'setKeyValue', + properties: { + name: '={{$responseItem.id}}', + value: '={{$responseItem.id}}', + }, + }, + { + type: 'sort', + properties: { + key: 'name', + }, + }, + ], + }, + }, + }, + }, + routing: { + send: { + type: 'body', + property: 'model', + }, + }, + default: 'deepseek-chat', + }, + { + displayName: 'Options', + name: 'options', + placeholder: 'Add Option', + description: 'Additional options to add', + type: 'collection', + default: {}, + options: [ + { + displayName: 'Frequency Penalty', + name: 'frequencyPenalty', + default: 0, + typeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 }, + description: + "Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim", + type: 'number', + }, + { + displayName: 'Maximum Number of Tokens', + name: 'maxTokens', + default: -1, + description: + 'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).', + type: 'number', + typeOptions: { + maxValue: 32768, + }, + }, + { + displayName: 'Response Format', + name: 'responseFormat', + default: 'text', + type: 'options', + options: [ + { + name: 'Text', + value: 'text', + description: 'Regular text response', + }, + { + name: 'JSON', + value: 'json_object', + description: + 'Enables JSON mode, which should guarantee the message the model generates is valid JSON', + }, + ], + }, + { + displayName: 'Presence Penalty', + name: 'presencePenalty', + default: 0, + typeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 }, + description: + "Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics", + type: 'number', + }, + { + displayName: 'Sampling Temperature', + name: 'temperature', + default: 0.7, + typeOptions: { maxValue: 2, minValue: 0, numberPrecision: 1 }, + description: + 'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.', + type: 'number', + }, + { + displayName: 'Timeout', + name: 'timeout', + default: 360000, + description: 'Maximum amount of time a request is allowed to take in milliseconds', + type: 'number', + }, + { + displayName: 'Max Retries', + name: 'maxRetries', + default: 2, + description: 'Maximum number of retries to attempt', + type: 'number', + }, + { + displayName: 'Top P', + name: 'topP', + default: 1, + typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 }, + description: + 'Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.', + type: 'number', + }, + ], + }, + ], + }; + + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { + const credentials = await this.getCredentials('deepSeekApi'); + + const modelName = this.getNodeParameter('model', itemIndex) as string; + + const options = this.getNodeParameter('options', itemIndex, {}) as { + frequencyPenalty?: number; + maxTokens?: number; + maxRetries: number; + timeout: number; + presencePenalty?: number; + temperature?: number; + topP?: number; + responseFormat?: 'text' | 'json_object'; + }; + + const configuration: ClientOptions = { + baseURL: credentials.url, + }; + + const model = new ChatOpenAI({ + openAIApiKey: credentials.apiKey, + modelName, + ...options, + timeout: options.timeout ?? 60000, + maxRetries: options.maxRetries ?? 2, + configuration, + callbacks: [new N8nLlmTracing(this)], + modelKwargs: options.responseFormat + ? { + response_format: { type: options.responseFormat }, + } + : undefined, + onFailedAttempt: makeN8nLlmFailedAttemptHandler(this, openAiFailedAttemptHandler), + }); + + return { + response: model, + }; + } +} diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatDeepSeek/deepseek.svg b/packages/@n8n/nodes-langchain/nodes/llms/LmChatDeepSeek/deepseek.svg new file mode 100644 index 0000000000000..3395016ce0fa8 --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatDeepSeek/deepseek.svg @@ -0,0 +1 @@ +DeepSeek diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatOpenRouter/LmChatOpenRouter.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatOpenRouter/LmChatOpenRouter.node.ts new file mode 100644 index 0000000000000..57a14028e716d --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatOpenRouter/LmChatOpenRouter.node.ts @@ -0,0 +1,252 @@ +/* eslint-disable n8n-nodes-base/node-dirname-against-convention */ + +import { ChatOpenAI, type ClientOptions } from '@langchain/openai'; +import { + NodeConnectionType, + type INodeType, + type INodeTypeDescription, + type ISupplyDataFunctions, + type SupplyData, +} from 'n8n-workflow'; + +import { getConnectionHintNoticeField } from '@utils/sharedFields'; + +import { openAiFailedAttemptHandler } from '../../vendors/OpenAi/helpers/error-handling'; +import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler'; +import { N8nLlmTracing } from '../N8nLlmTracing'; + +export class LmChatOpenRouter implements INodeType { + description: INodeTypeDescription = { + displayName: 'OpenRouter Chat Model', + name: 'lmChatOpenRouter', + icon: { light: 'file:openrouter.svg', dark: 'file:openrouter.dark.svg' }, + group: ['transform'], + version: [1], + description: 'For advanced usage with an AI chain', + defaults: { + name: 'OpenRouter Chat Model', + }, + codex: { + categories: ['AI'], + subcategories: { + AI: ['Language Models', 'Root Nodes'], + 'Language Models': ['Chat Models (Recommended)'], + }, + resources: { + primaryDocumentation: [ + { + url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatopenrouter/', + }, + ], + }, + }, + // eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node + inputs: [], + // eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong + outputs: [NodeConnectionType.AiLanguageModel], + outputNames: ['Model'], + credentials: [ + { + name: 'openRouterApi', + required: true, + }, + ], + requestDefaults: { + ignoreHttpStatusErrors: true, + baseURL: '={{ $credentials?.url }}', + }, + properties: [ + getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]), + { + displayName: + 'If using JSON response format, you must include word "json" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.', + name: 'notice', + type: 'notice', + default: '', + displayOptions: { + show: { + '/options.responseFormat': ['json_object'], + }, + }, + }, + { + displayName: 'Model', + name: 'model', + type: 'options', + description: + 'The model which will generate the completion. Learn more.', + typeOptions: { + loadOptions: { + routing: { + request: { + method: 'GET', + url: '/models', + }, + output: { + postReceive: [ + { + type: 'rootProperty', + properties: { + property: 'data', + }, + }, + { + type: 'setKeyValue', + properties: { + name: '={{$responseItem.id}}', + value: '={{$responseItem.id}}', + }, + }, + { + type: 'sort', + properties: { + key: 'name', + }, + }, + ], + }, + }, + }, + }, + routing: { + send: { + type: 'body', + property: 'model', + }, + }, + default: 'openai/gpt-4o-mini', + }, + { + displayName: 'Options', + name: 'options', + placeholder: 'Add Option', + description: 'Additional options to add', + type: 'collection', + default: {}, + options: [ + { + displayName: 'Frequency Penalty', + name: 'frequencyPenalty', + default: 0, + typeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 }, + description: + "Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim", + type: 'number', + }, + { + displayName: 'Maximum Number of Tokens', + name: 'maxTokens', + default: -1, + description: + 'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).', + type: 'number', + typeOptions: { + maxValue: 32768, + }, + }, + { + displayName: 'Response Format', + name: 'responseFormat', + default: 'text', + type: 'options', + options: [ + { + name: 'Text', + value: 'text', + description: 'Regular text response', + }, + { + name: 'JSON', + value: 'json_object', + description: + 'Enables JSON mode, which should guarantee the message the model generates is valid JSON', + }, + ], + }, + { + displayName: 'Presence Penalty', + name: 'presencePenalty', + default: 0, + typeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 }, + description: + "Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics", + type: 'number', + }, + { + displayName: 'Sampling Temperature', + name: 'temperature', + default: 0.7, + typeOptions: { maxValue: 2, minValue: 0, numberPrecision: 1 }, + description: + 'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.', + type: 'number', + }, + { + displayName: 'Timeout', + name: 'timeout', + default: 360000, + description: 'Maximum amount of time a request is allowed to take in milliseconds', + type: 'number', + }, + { + displayName: 'Max Retries', + name: 'maxRetries', + default: 2, + description: 'Maximum number of retries to attempt', + type: 'number', + }, + { + displayName: 'Top P', + name: 'topP', + default: 1, + typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 }, + description: + 'Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.', + type: 'number', + }, + ], + }, + ], + }; + + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { + const credentials = await this.getCredentials('openRouterApi'); + + const modelName = this.getNodeParameter('model', itemIndex) as string; + + const options = this.getNodeParameter('options', itemIndex, {}) as { + frequencyPenalty?: number; + maxTokens?: number; + maxRetries: number; + timeout: number; + presencePenalty?: number; + temperature?: number; + topP?: number; + responseFormat?: 'text' | 'json_object'; + }; + + const configuration: ClientOptions = { + baseURL: credentials.url, + }; + + const model = new ChatOpenAI({ + openAIApiKey: credentials.apiKey, + modelName, + ...options, + timeout: options.timeout ?? 60000, + maxRetries: options.maxRetries ?? 2, + configuration, + callbacks: [new N8nLlmTracing(this)], + modelKwargs: options.responseFormat + ? { + response_format: { type: options.responseFormat }, + } + : undefined, + onFailedAttempt: makeN8nLlmFailedAttemptHandler(this, openAiFailedAttemptHandler), + }); + + return { + response: model, + }; + } +} diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatOpenRouter/openrouter.dark.svg b/packages/@n8n/nodes-langchain/nodes/llms/LmChatOpenRouter/openrouter.dark.svg new file mode 100644 index 0000000000000..0b8bb9df1b410 --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatOpenRouter/openrouter.dark.svg @@ -0,0 +1 @@ +OpenRouter diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatOpenRouter/openrouter.svg b/packages/@n8n/nodes-langchain/nodes/llms/LmChatOpenRouter/openrouter.svg new file mode 100644 index 0000000000000..749e44df71301 --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatOpenRouter/openrouter.svg @@ -0,0 +1 @@ +OpenRouter diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/v2/ToolWorkflowV2.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/v2/ToolWorkflowV2.node.ts index 22ca31e4da2b1..98ca94cb1fa71 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/v2/ToolWorkflowV2.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/v2/ToolWorkflowV2.node.ts @@ -1,4 +1,3 @@ -import { loadWorkflowInputMappings } from 'n8n-nodes-base/dist/utils/workflowInputsResourceMapping/GenericFunctions'; import type { INodeTypeBaseDescription, ISupplyDataFunctions, @@ -7,6 +6,7 @@ import type { INodeTypeDescription, } from 'n8n-workflow'; +import { localResourceMapping } from './methods'; import { WorkflowToolService } from './utils/WorkflowToolService'; import { versionDescription } from './versionDescription'; @@ -21,9 +21,7 @@ export class ToolWorkflowV2 implements INodeType { } methods = { - localResourceMapping: { - loadWorkflowInputMappings, - }, + localResourceMapping, }; async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/v2/methods/index.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/v2/methods/index.ts new file mode 100644 index 0000000000000..f43c9557eab52 --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/v2/methods/index.ts @@ -0,0 +1 @@ +export * as localResourceMapping from './localResourceMapping'; diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/v2/methods/localResourceMapping.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/v2/methods/localResourceMapping.ts new file mode 100644 index 0000000000000..2ca2b534e54f4 --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/v2/methods/localResourceMapping.ts @@ -0,0 +1,17 @@ +import { loadWorkflowInputMappings } from 'n8n-nodes-base/dist/utils/workflowInputsResourceMapping/GenericFunctions'; +import type { ILocalLoadOptionsFunctions, ResourceMapperFields } from 'n8n-workflow'; + +export async function loadSubWorkflowInputs( + this: ILocalLoadOptionsFunctions, +): Promise { + const { fields, subworkflowInfo } = await loadWorkflowInputMappings.bind(this)(); + let emptyFieldsNotice: string | undefined; + if (fields.length === 0) { + const subworkflowLink = subworkflowInfo?.id + ? `sub-workflow’s trigger` + : 'sub-workflow’s trigger'; + + emptyFieldsNotice = `This sub-workflow will not receive any input when called by your AI node. Define your expected input in the ${subworkflowLink}.`; + } + return { fields, emptyFieldsNotice }; +} diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/v2/versionDescription.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/v2/versionDescription.ts index 469a7d6d4cb34..6d4275b4497f7 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/v2/versionDescription.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/v2/versionDescription.ts @@ -107,7 +107,7 @@ export const versionDescription: INodeTypeDescription = { typeOptions: { loadOptionsDependsOn: ['workflowId.value'], resourceMapper: { - localResourceMapperMethod: 'loadWorkflowInputMappings', + localResourceMapperMethod: 'loadSubWorkflowInputs', valuesLabel: 'Workflow Inputs', mode: 'map', fieldWords: { diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreInMemory/VectorStoreInMemory.node.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreInMemory/VectorStoreInMemory.node.ts index 0323478ee8c58..d08bc2bab2a6a 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreInMemory/VectorStoreInMemory.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreInMemory/VectorStoreInMemory.node.ts @@ -1,3 +1,4 @@ +import type { MemoryVectorStore } from 'langchain/vectorstores/memory'; import type { INodeProperties } from 'n8n-workflow'; import { createVectorStoreNode } from '../shared/createVectorStoreNode'; @@ -20,7 +21,7 @@ const insertFields: INodeProperties[] = [ }, ]; -export class VectorStoreInMemory extends createVectorStoreNode({ +export class VectorStoreInMemory extends createVectorStoreNode({ meta: { displayName: 'In-Memory Vector Store', name: 'vectorStoreInMemory', diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePGVector/VectorStorePGVector.node.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePGVector/VectorStorePGVector.node.ts index 852453b6228d9..7b2ab7664d086 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePGVector/VectorStorePGVector.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePGVector/VectorStorePGVector.node.ts @@ -213,7 +213,7 @@ class ExtendedPGVectorStore extends PGVectorStore { } } -export class VectorStorePGVector extends createVectorStoreNode({ +export class VectorStorePGVector extends createVectorStoreNode({ meta: { description: 'Work with your data in Postgresql with the PGVector extension', icon: 'file:postgres.svg', @@ -274,6 +274,7 @@ export class VectorStorePGVector extends createVectorStoreNode({ return await ExtendedPGVectorStore.initialize(embeddings, config); }, + async populateVectorStore(context, embeddings, documents, itemIndex) { // NOTE: if you are to create the HNSW index before use, you need to consider moving the distanceStrategy field to // shared fields, because you need that strategy when creating the index. @@ -307,6 +308,11 @@ export class VectorStorePGVector extends createVectorStoreNode({ metadataColumnName: 'metadata', }) as ColumnOptions; - await PGVectorStore.fromDocuments(documents, embeddings, config); + const vectorStore = await PGVectorStore.fromDocuments(documents, embeddings, config); + vectorStore.client?.release(); + }, + + releaseVectorStoreClient(vectorStore) { + vectorStore.client?.release(); }, }) {} diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePinecone/VectorStorePinecone.node.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePinecone/VectorStorePinecone.node.ts index 5a11acea24e44..61761a54ec13b 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePinecone/VectorStorePinecone.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePinecone/VectorStorePinecone.node.ts @@ -51,7 +51,7 @@ const insertFields: INodeProperties[] = [ }, ]; -export class VectorStorePinecone extends createVectorStoreNode({ +export class VectorStorePinecone extends createVectorStoreNode({ meta: { displayName: 'Pinecone Vector Store', name: 'vectorStorePinecone', diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreQdrant/VectorStoreQdrant.node.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreQdrant/VectorStoreQdrant.node.ts index 988f607ad75ef..e18cc4988efc5 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreQdrant/VectorStoreQdrant.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreQdrant/VectorStoreQdrant.node.ts @@ -79,7 +79,7 @@ const retrieveFields: INodeProperties[] = [ }, ]; -export class VectorStoreQdrant extends createVectorStoreNode({ +export class VectorStoreQdrant extends createVectorStoreNode({ meta: { displayName: 'Qdrant Vector Store', name: 'vectorStoreQdrant', diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreSupabase/VectorStoreSupabase.node.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreSupabase/VectorStoreSupabase.node.ts index a462ff8cf6a97..6ec3975ebd2f2 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreSupabase/VectorStoreSupabase.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreSupabase/VectorStoreSupabase.node.ts @@ -41,7 +41,7 @@ const retrieveFields: INodeProperties[] = [ const updateFields: INodeProperties[] = [...insertFields]; -export class VectorStoreSupabase extends createVectorStoreNode({ +export class VectorStoreSupabase extends createVectorStoreNode({ meta: { description: 'Work with your data in Supabase Vector Store', icon: 'file:supabase.svg', diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreZep/VectorStoreZep.node.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreZep/VectorStoreZep.node.ts index 1372d54f6e313..5c973002b26cd 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreZep/VectorStoreZep.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreZep/VectorStoreZep.node.ts @@ -46,7 +46,7 @@ const retrieveFields: INodeProperties[] = [ }, ]; -export class VectorStoreZep extends createVectorStoreNode({ +export class VectorStoreZep extends createVectorStoreNode({ meta: { displayName: 'Zep Vector Store', name: 'vectorStoreZep', diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts index e393d32e55177..ecf2e64a81c01 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts @@ -49,7 +49,7 @@ interface NodeMeta { operationModes?: NodeOperationMode[]; } -export interface VectorStoreNodeConstructorArgs { +export interface VectorStoreNodeConstructorArgs { meta: NodeMeta; methods?: { listSearch?: { @@ -77,7 +77,8 @@ export interface VectorStoreNodeConstructorArgs { filter: Record | undefined, embeddings: Embeddings, itemIndex: number, - ) => Promise; + ) => Promise; + releaseVectorStoreClient?: (vectorStore: T) => void; } function transformDescriptionForOperationMode( @@ -90,11 +91,15 @@ function transformDescriptionForOperationMode( })); } -function isUpdateSupported(args: VectorStoreNodeConstructorArgs): boolean { +function isUpdateSupported( + args: VectorStoreNodeConstructorArgs, +): boolean { return args.meta.operationModes?.includes('update') ?? false; } -function getOperationModeOptions(args: VectorStoreNodeConstructorArgs): INodePropertyOptions[] { +function getOperationModeOptions( + args: VectorStoreNodeConstructorArgs, +): INodePropertyOptions[] { const enabledOperationModes = args.meta.operationModes ?? DEFAULT_OPERATION_MODES; const allOptions = [ @@ -137,7 +142,9 @@ function getOperationModeOptions(args: VectorStoreNodeConstructorArgs): INodePro ); } -export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) => +export const createVectorStoreNode = ( + args: VectorStoreNodeConstructorArgs, +) => class VectorStoreNodeType implements INodeType { description: INodeTypeDescription = { displayName: args.meta.displayName, @@ -334,38 +341,42 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) => embeddings, itemIndex, ); - const prompt = this.getNodeParameter('prompt', itemIndex) as string; - const topK = this.getNodeParameter('topK', itemIndex, 4) as number; - - const embeddedPrompt = await embeddings.embedQuery(prompt); - const docs = await vectorStore.similaritySearchVectorWithScore( - embeddedPrompt, - topK, - filter, - ); + try { + const prompt = this.getNodeParameter('prompt', itemIndex) as string; + const topK = this.getNodeParameter('topK', itemIndex, 4) as number; - const includeDocumentMetadata = this.getNodeParameter( - 'includeDocumentMetadata', - itemIndex, - true, - ) as boolean; - - const serializedDocs = docs.map(([doc, score]) => { - const document = { - pageContent: doc.pageContent, - ...(includeDocumentMetadata ? { metadata: doc.metadata } : {}), - }; - - return { - json: { document, score }, - pairedItem: { - item: itemIndex, - }, - }; - }); - - resultData.push(...serializedDocs); - logAiEvent(this, 'ai-vector-store-searched', { query: prompt }); + const embeddedPrompt = await embeddings.embedQuery(prompt); + const docs = await vectorStore.similaritySearchVectorWithScore( + embeddedPrompt, + topK, + filter, + ); + + const includeDocumentMetadata = this.getNodeParameter( + 'includeDocumentMetadata', + itemIndex, + true, + ) as boolean; + + const serializedDocs = docs.map(([doc, score]) => { + const document = { + pageContent: doc.pageContent, + ...(includeDocumentMetadata ? { metadata: doc.metadata } : {}), + }; + + return { + json: { document, score }, + pairedItem: { + item: itemIndex, + }, + }; + }); + + resultData.push(...serializedDocs); + logAiEvent(this, 'ai-vector-store-searched', { query: prompt }); + } finally { + args.releaseVectorStoreClient?.(vectorStore); + } } return [resultData]; @@ -427,24 +438,28 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) => itemIndex, ); - const { processedDocuments, serializedDocuments } = await processDocument( - loader, - itemData, - itemIndex, - ); + try { + const { processedDocuments, serializedDocuments } = await processDocument( + loader, + itemData, + itemIndex, + ); - if (processedDocuments?.length !== 1) { - throw new NodeOperationError(this.getNode(), 'Single document per item expected'); - } + if (processedDocuments?.length !== 1) { + throw new NodeOperationError(this.getNode(), 'Single document per item expected'); + } - resultData.push(...serializedDocuments); + resultData.push(...serializedDocuments); - // Use ids option to upsert instead of insert - await vectorStore.addDocuments(processedDocuments, { - ids: [documentId], - }); + // Use ids option to upsert instead of insert + await vectorStore.addDocuments(processedDocuments, { + ids: [documentId], + }); - logAiEvent(this, 'ai-vector-store-updated'); + logAiEvent(this, 'ai-vector-store-updated'); + } finally { + args.releaseVectorStoreClient?.(vectorStore); + } } return [resultData]; @@ -468,6 +483,9 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) => const vectorStore = await args.getVectorStoreClient(this, filter, embeddings, itemIndex); return { response: logWrapper(vectorStore, this), + closeFunction: async () => { + args.releaseVectorStoreClient?.(vectorStore); + }, }; } @@ -491,23 +509,28 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) => embeddings, itemIndex, ); - const embeddedPrompt = await embeddings.embedQuery(input); - const documents = await vectorStore.similaritySearchVectorWithScore( - embeddedPrompt, - topK, - filter, - ); - return documents - .map((document) => { - if (includeDocumentMetadata) { - return { type: 'text', text: JSON.stringify(document[0]) }; - } - return { - type: 'text', - text: JSON.stringify({ pageContent: document[0].pageContent }), - }; - }) - .filter((document) => !!document); + + try { + const embeddedPrompt = await embeddings.embedQuery(input); + const documents = await vectorStore.similaritySearchVectorWithScore( + embeddedPrompt, + topK, + filter, + ); + return documents + .map((document) => { + if (includeDocumentMetadata) { + return { type: 'text', text: JSON.stringify(document[0]) }; + } + return { + type: 'text', + text: JSON.stringify({ pageContent: document[0].pageContent }), + }; + }) + .filter((document) => !!document); + } finally { + args.releaseVectorStoreClient?.(vectorStore); + } }, }); diff --git a/packages/@n8n/nodes-langchain/package.json b/packages/@n8n/nodes-langchain/package.json index b93deba806e8a..b22a887f52fdf 100644 --- a/packages/@n8n/nodes-langchain/package.json +++ b/packages/@n8n/nodes-langchain/package.json @@ -1,13 +1,13 @@ { "name": "@n8n/n8n-nodes-langchain", - "version": "1.76.0", + "version": "1.77.0", "description": "", "main": "index.js", "scripts": { "clean": "rimraf dist .turbo", "dev": "pnpm run watch", "typecheck": "tsc --noEmit", - "build": "tsc -p tsconfig.build.json && tsc-alias -p tsconfig.build.json && pnpm n8n-copy-icons && pnpm n8n-generate-metadata", + "build": "tsc -p tsconfig.build.json && tsc-alias -p tsconfig.build.json && pnpm n8n-copy-static-files && pnpm n8n-generate-metadata", "format": "biome format --write .", "format:check": "biome ci .", "lint": "eslint nodes credentials utils --quiet", @@ -25,12 +25,14 @@ "dist/credentials/AnthropicApi.credentials.js", "dist/credentials/AzureOpenAiApi.credentials.js", "dist/credentials/CohereApi.credentials.js", + "dist/credentials/DeepSeekApi.credentials.js", "dist/credentials/GooglePalmApi.credentials.js", "dist/credentials/GroqApi.credentials.js", "dist/credentials/HuggingFaceApi.credentials.js", "dist/credentials/MotorheadApi.credentials.js", "dist/credentials/MistralCloudApi.credentials.js", "dist/credentials/OllamaApi.credentials.js", + "dist/credentials/OpenRouterApi.credentials.js", "dist/credentials/PineconeApi.credentials.js", "dist/credentials/QdrantApi.credentials.js", "dist/credentials/SerpApi.credentials.js", @@ -64,11 +66,13 @@ "dist/nodes/llms/LMChatAnthropic/LmChatAnthropic.node.js", "dist/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.js", "dist/nodes/llms/LmChatAwsBedrock/LmChatAwsBedrock.node.js", + "dist/nodes/llms/LmChatDeepSeek/LmChatDeepSeek.node.js", "dist/nodes/llms/LmChatGoogleGemini/LmChatGoogleGemini.node.js", "dist/nodes/llms/LmChatGoogleVertex/LmChatGoogleVertex.node.js", "dist/nodes/llms/LmChatGroq/LmChatGroq.node.js", "dist/nodes/llms/LmChatMistralCloud/LmChatMistralCloud.node.js", "dist/nodes/llms/LMChatOllama/LmChatOllama.node.js", + "dist/nodes/llms/LmChatOpenRouter/LmChatOpenRouter.node.js", "dist/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.js", "dist/nodes/llms/LMOpenAi/LmOpenAi.node.js", "dist/nodes/llms/LMCohere/LmCohere.node.js", diff --git a/packages/@n8n/nodes-langchain/types/types.ts b/packages/@n8n/nodes-langchain/types/types.ts new file mode 100644 index 0000000000000..75d10441329f6 --- /dev/null +++ b/packages/@n8n/nodes-langchain/types/types.ts @@ -0,0 +1 @@ +type OpenAICompatibleCredential = { apiKey: string; url: string }; diff --git a/packages/@n8n/task-runner/package.json b/packages/@n8n/task-runner/package.json index ec53c53dcdef7..f82e34607e02b 100644 --- a/packages/@n8n/task-runner/package.json +++ b/packages/@n8n/task-runner/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/task-runner", - "version": "1.14.0", + "version": "1.15.0", "scripts": { "clean": "rimraf dist .turbo", "start": "node dist/start.js", diff --git a/packages/cli/package.json b/packages/cli/package.json index 15511d5ef8039..2271d293c9a24 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "n8n", - "version": "1.76.0", + "version": "1.77.0", "description": "n8n Workflow Automation Tool", "main": "dist/index", "types": "dist/index.d.ts", @@ -96,7 +96,7 @@ "@n8n/task-runner": "workspace:*", "@n8n/typeorm": "0.3.20-12", "@n8n_io/ai-assistant-sdk": "1.13.0", - "@n8n_io/license-sdk": "2.14.2", + "@n8n_io/license-sdk": "2.15.0", "@oclif/core": "4.0.7", "@rudderstack/rudder-sdk-node": "2.0.9", "@sentry/node": "catalog:", diff --git a/packages/cli/src/__tests__/external-hooks.test.ts b/packages/cli/src/__tests__/external-hooks.test.ts new file mode 100644 index 0000000000000..5e4aa6ee89681 --- /dev/null +++ b/packages/cli/src/__tests__/external-hooks.test.ts @@ -0,0 +1,125 @@ +import type { GlobalConfig } from '@n8n/config'; +import { mock } from 'jest-mock-extended'; +import type { ErrorReporter, Logger } from 'n8n-core'; +import type { IWorkflowBase } from 'n8n-workflow'; +import { ApplicationError } from 'n8n-workflow'; + +import type { CredentialsRepository } from '@/databases/repositories/credentials.repository'; +import type { SettingsRepository } from '@/databases/repositories/settings.repository'; +import type { UserRepository } from '@/databases/repositories/user.repository'; +import type { WorkflowRepository } from '@/databases/repositories/workflow.repository'; +import { ExternalHooks } from '@/external-hooks'; + +describe('ExternalHooks', () => { + const logger = mock(); + const errorReporter = mock(); + const globalConfig = mock(); + const userRepository = mock(); + const settingsRepository = mock(); + const credentialsRepository = mock(); + const workflowRepository = mock(); + + const workflowData = mock({ id: '123', name: 'Test Workflow' }); + const hookFn = jest.fn(); + + let externalHooks: ExternalHooks; + + beforeEach(() => { + jest.resetAllMocks(); + globalConfig.externalHooks.files = []; + externalHooks = new ExternalHooks( + logger, + errorReporter, + globalConfig, + userRepository, + settingsRepository, + credentialsRepository, + workflowRepository, + ); + }); + + describe('init()', () => { + it('should not load hooks if no external hook files are configured', async () => { + // @ts-expect-error private method + const loadHooksSpy = jest.spyOn(externalHooks, 'loadHooks'); + await externalHooks.init(); + expect(loadHooksSpy).not.toHaveBeenCalled(); + }); + + it('should throw an error if hook file cannot be loaded', async () => { + globalConfig.externalHooks.files = ['/path/to/non-existent-hook.js']; + + jest.mock( + '/path/to/non-existent-hook.js', + () => { + throw new Error('File not found'); + }, + { virtual: true }, + ); + + await expect(externalHooks.init()).rejects.toThrow(ApplicationError); + }); + + it('should successfully load hooks from valid hook file', async () => { + const mockHookFile = { + workflow: { + create: [hookFn], + }, + }; + + globalConfig.externalHooks.files = ['/path/to/valid-hook.js']; + jest.mock('/path/to/valid-hook.js', () => mockHookFile, { virtual: true }); + + await externalHooks.init(); + + // eslint-disable-next-line @typescript-eslint/dot-notation + expect(externalHooks['registered']['workflow.create']).toHaveLength(1); + + await externalHooks.run('workflow.create', [workflowData]); + + expect(hookFn).toHaveBeenCalledTimes(1); + expect(hookFn).toHaveBeenCalledWith(workflowData); + }); + }); + + describe('run()', () => { + it('should not throw if no hooks are registered', async () => { + await externalHooks.run('n8n.stop'); + }); + + it('should execute registered hooks', async () => { + // eslint-disable-next-line @typescript-eslint/dot-notation + externalHooks['registered']['workflow.create'] = [hookFn]; + + await externalHooks.run('workflow.create', [workflowData]); + + expect(hookFn).toHaveBeenCalledTimes(1); + + const hookInvocationContext = hookFn.mock.instances[0]; + expect(hookInvocationContext).toHaveProperty('dbCollections'); + expect(hookInvocationContext.dbCollections).toEqual({ + User: userRepository, + Settings: settingsRepository, + Credentials: credentialsRepository, + Workflow: workflowRepository, + }); + }); + + it('should report error if hook execution fails', async () => { + hookFn.mockRejectedValueOnce(new Error('Hook failed')); + // eslint-disable-next-line @typescript-eslint/dot-notation + externalHooks['registered']['workflow.create'] = [hookFn]; + + await expect(externalHooks.run('workflow.create', [workflowData])).rejects.toThrow( + ApplicationError, + ); + + expect(errorReporter.error).toHaveBeenCalledWith(expect.any(ApplicationError), { + level: 'fatal', + }); + expect(logger.error).toHaveBeenCalledWith( + 'There was a problem running hook "workflow.create"', + ); + }); + }); +}); diff --git a/packages/cli/src/__tests__/license.test.ts b/packages/cli/src/__tests__/license.test.ts index 0e26d0d81c513..fb82f312ee641 100644 --- a/packages/cli/src/__tests__/license.test.ts +++ b/packages/cli/src/__tests__/license.test.ts @@ -16,6 +16,12 @@ const MOCK_ACTIVATION_KEY = 'activation-key'; const MOCK_FEATURE_FLAG = 'feat:sharing'; const MOCK_MAIN_PLAN_ID = '1b765dc4-d39d-4ffe-9885-c56dd67c4b26'; +function makeDateWithHourOffset(offsetInHours: number): Date { + const date = new Date(); + date.setHours(date.getHours() + offsetInHours); + return date; +} + const licenseConfig: GlobalConfig['license'] = { serverUrl: MOCK_SERVER_URL, autoRenewalEnabled: true, @@ -134,7 +140,7 @@ describe('License', () => { expect(LicenseManager.prototype.getManagementJwt).toHaveBeenCalled(); }); - test('getMainPlan() returns the right entitlement', async () => { + test('getMainPlan() returns the latest main entitlement', async () => { // mock entitlements response License.prototype.getCurrentEntitlements = jest.fn().mockReturnValue([ { @@ -143,8 +149,21 @@ describe('License', () => { productMetadata: {}, features: {}, featureOverrides: {}, - validFrom: new Date(), - validTo: new Date(), + validFrom: makeDateWithHourOffset(-3), + validTo: makeDateWithHourOffset(1), + }, + { + id: '95b9c852-1349-478d-9ad1-b3f55510e488', + productId: '670650f2-72d8-4397-898c-c249906e2cc2', + productMetadata: { + terms: { + isMainPlan: true, + }, + }, + features: {}, + featureOverrides: {}, + validFrom: makeDateWithHourOffset(-2), + validTo: makeDateWithHourOffset(1), }, { id: MOCK_MAIN_PLAN_ID, @@ -156,8 +175,8 @@ describe('License', () => { }, features: {}, featureOverrides: {}, - validFrom: new Date(), - validTo: new Date(), + validFrom: makeDateWithHourOffset(-1), // this is the LATEST / newest plan + validTo: makeDateWithHourOffset(1), }, ]); jest.fn(license.getMainPlan).mockReset(); @@ -175,8 +194,8 @@ describe('License', () => { productMetadata: {}, // has no `productMetadata.terms.isMainPlan`! features: {}, featureOverrides: {}, - validFrom: new Date(), - validTo: new Date(), + validFrom: makeDateWithHourOffset(-1), + validTo: makeDateWithHourOffset(1), }, { id: 'c1aae471-c24e-4874-ad88-b97107de486c', @@ -184,8 +203,8 @@ describe('License', () => { productMetadata: {}, // has no `productMetadata.terms.isMainPlan`! features: {}, featureOverrides: {}, - validFrom: new Date(), - validTo: new Date(), + validFrom: makeDateWithHourOffset(-1), + validTo: makeDateWithHourOffset(1), }, ]); jest.fn(license.getMainPlan).mockReset(); diff --git a/packages/cli/src/__tests__/load-nodes-and-credentials.test.ts b/packages/cli/src/__tests__/load-nodes-and-credentials.test.ts index 75aa6023019c6..c5d88b1b25a51 100644 --- a/packages/cli/src/__tests__/load-nodes-and-credentials.test.ts +++ b/packages/cli/src/__tests__/load-nodes-and-credentials.test.ts @@ -211,4 +211,44 @@ describe('LoadNodesAndCredentials', () => { expect(result.description.displayName).toBe('Special @#$% Node Tool'); }); }); + + describe('resolveSchema', () => { + let instance: LoadNodesAndCredentials; + + beforeEach(() => { + instance = new LoadNodesAndCredentials(mock(), mock(), mock(), mock()); + instance.knownNodes['n8n-nodes-base.test'] = { + className: 'Test', + sourcePath: '/nodes-base/dist/nodes/Test/Test.node.js', + }; + }); + + it('should return undefined if the node is not known', () => { + const result = instance.resolveSchema({ + node: 'n8n-nodes-base.doesNotExist', + version: '1.0.0', + resource: 'account', + operation: 'get', + }); + expect(result).toBeUndefined(); + }); + + it('should return the correct path if the node is known', () => { + const result = instance.resolveSchema({ + node: 'n8n-nodes-base.test', + version: '1.0.0', + resource: 'account', + operation: 'get', + }); + expect(result).toEqual('/nodes-base/dist/nodes/Test/__schema__/v1.0.0/account/get.json'); + }); + + it('should return the correct path if there is no resource or operation', () => { + const result = instance.resolveSchema({ + node: 'n8n-nodes-base.test', + version: '1.0.0', + }); + expect(result).toEqual('/nodes-base/dist/nodes/Test/__schema__/v1.0.0.json'); + }); + }); }); diff --git a/packages/cli/src/abstract-server.ts b/packages/cli/src/abstract-server.ts index 8a2ba38b4ac4b..a9340b0a87fe6 100644 --- a/packages/cli/src/abstract-server.ts +++ b/packages/cli/src/abstract-server.ts @@ -7,7 +7,6 @@ import { readFile } from 'fs/promises'; import type { Server } from 'http'; import isbot from 'isbot'; import { Logger } from 'n8n-core'; -import path from 'path'; import config from '@/config'; import { N8N_VERSION, TEMPLATES_DIR, inDevelopment, inTest } from '@/constants'; @@ -68,9 +67,6 @@ export abstract class AbstractServer { this.app.set('view engine', 'handlebars'); this.app.set('views', TEMPLATES_DIR); - const assetsPath: string = path.join(__dirname, '../../../assets'); - this.app.use(express.static(assetsPath)); - const proxyHops = config.getEnv('proxy_hops'); if (proxyHops > 0) this.app.set('trust proxy', proxyHops); diff --git a/packages/cli/src/active-workflow-manager.ts b/packages/cli/src/active-workflow-manager.ts index 403e60f51d3e5..af0a1a42302de 100644 --- a/packages/cli/src/active-workflow-manager.ts +++ b/packages/cli/src/active-workflow-manager.ts @@ -89,7 +89,7 @@ export class ActiveWorkflowManager { await this.addActiveWorkflows('init'); - await this.externalHooks.run('activeWorkflows.initialized', []); + await this.externalHooks.run('activeWorkflows.initialized'); await this.webhookService.populateCache(); } diff --git a/packages/cli/src/commands/base-command.ts b/packages/cli/src/commands/base-command.ts index 9bfd992b3d58c..5e07b8e350b0a 100644 --- a/packages/cli/src/commands/base-command.ts +++ b/packages/cli/src/commands/base-command.ts @@ -189,42 +189,10 @@ export abstract class BaseCommand extends Command { private async _initObjectStoreService(options = { isReadOnly: false }) { const objectStoreService = Container.get(ObjectStoreService); - const { host, bucket, credentials } = this.globalConfig.externalStorage.s3; - - if (host === '') { - throw new ApplicationError( - 'External storage host not configured. Please set `N8N_EXTERNAL_STORAGE_S3_HOST`.', - ); - } - - if (bucket.name === '') { - throw new ApplicationError( - 'External storage bucket name not configured. Please set `N8N_EXTERNAL_STORAGE_S3_BUCKET_NAME`.', - ); - } - - if (bucket.region === '') { - throw new ApplicationError( - 'External storage bucket region not configured. Please set `N8N_EXTERNAL_STORAGE_S3_BUCKET_REGION`.', - ); - } - - if (credentials.accessKey === '') { - throw new ApplicationError( - 'External storage access key not configured. Please set `N8N_EXTERNAL_STORAGE_S3_ACCESS_KEY`.', - ); - } - - if (credentials.accessSecret === '') { - throw new ApplicationError( - 'External storage access secret not configured. Please set `N8N_EXTERNAL_STORAGE_S3_ACCESS_SECRET`.', - ); - } - this.logger.debug('Initializing object store service'); try { - await objectStoreService.init(host, bucket, credentials); + await objectStoreService.init(); objectStoreService.setReadonly(options.isReadOnly); this.logger.debug('Object store init completed'); diff --git a/packages/cli/src/commands/start.ts b/packages/cli/src/commands/start.ts index 286b20c7c6e7e..a255a95bb905a 100644 --- a/packages/cli/src/commands/start.ts +++ b/packages/cli/src/commands/start.ts @@ -94,7 +94,7 @@ export class Start extends BaseCommand { Container.get(WaitTracker).stopTracking(); - await this.externalHooks?.run('n8n.stop', []); + await this.externalHooks?.run('n8n.stop'); await this.activeWorkflowManager.removeAllTriggerAndPollerBasedWorkflows(); diff --git a/packages/cli/src/commands/webhook.ts b/packages/cli/src/commands/webhook.ts index 8b6f318576183..fd1e961b59d94 100644 --- a/packages/cli/src/commands/webhook.ts +++ b/packages/cli/src/commands/webhook.ts @@ -33,7 +33,7 @@ export class Webhook extends BaseCommand { this.logger.info('\nStopping n8n...'); try { - await this.externalHooks?.run('n8n.stop', []); + await this.externalHooks?.run('n8n.stop'); await Container.get(ActiveExecutions).shutdown(); } catch (error) { diff --git a/packages/cli/src/commands/worker.ts b/packages/cli/src/commands/worker.ts index f5138f1ef3c7c..c6046a7772760 100644 --- a/packages/cli/src/commands/worker.ts +++ b/packages/cli/src/commands/worker.ts @@ -49,7 +49,7 @@ export class Worker extends BaseCommand { this.logger.info('Stopping worker...'); try { - await this.externalHooks?.run('n8n.stop', []); + await this.externalHooks?.run('n8n.stop'); } catch (error) { await this.exitWithCrash('Error shutting down worker', error); } diff --git a/packages/cli/src/config/index.ts b/packages/cli/src/config/index.ts index 8839d180ff16b..d11fa795db7a1 100644 --- a/packages/cli/src/config/index.ts +++ b/packages/cli/src/config/index.ts @@ -133,3 +133,5 @@ setGlobalState({ // eslint-disable-next-line import/no-default-export export default config; + +export type Config = typeof config; diff --git a/packages/cli/src/config/schema.ts b/packages/cli/src/config/schema.ts index 0e7f747fba3aa..dba86112577f7 100644 --- a/packages/cli/src/config/schema.ts +++ b/packages/cli/src/config/schema.ts @@ -189,13 +189,6 @@ export const schema = { env: 'EXTERNAL_FRONTEND_HOOKS_URLS', }, - externalHookFiles: { - doc: 'Files containing external hooks. Multiple files can be separated by colon (":")', - format: String, - default: '', - env: 'EXTERNAL_HOOK_FILES', - }, - push: { backend: { format: ['sse', 'websocket'] as const, diff --git a/packages/cli/src/constants.ts b/packages/cli/src/constants.ts index abcf298d3d5e8..6411c91bacc45 100644 --- a/packages/cli/src/constants.ts +++ b/packages/cli/src/constants.ts @@ -104,6 +104,7 @@ export const LICENSE_QUOTAS = { WORKFLOW_HISTORY_PRUNE_LIMIT: 'quota:workflowHistoryPrune', TEAM_PROJECT_LIMIT: 'quota:maxTeamProjects', AI_CREDITS: 'quota:aiCredits', + API_KEYS_PER_USER_LIMIT: 'quota:apiKeysPerUserLimit', } as const; export const UNLIMITED_LICENSE_QUOTA = -1; diff --git a/packages/cli/src/controllers/__tests__/api-keys.controller.test.ts b/packages/cli/src/controllers/__tests__/api-keys.controller.test.ts index dc40d3357d956..aaa530a39b8ff 100644 --- a/packages/cli/src/controllers/__tests__/api-keys.controller.test.ts +++ b/packages/cli/src/controllers/__tests__/api-keys.controller.test.ts @@ -3,8 +3,10 @@ import { mock } from 'jest-mock-extended'; import type { ApiKey } from '@/databases/entities/api-key'; import type { User } from '@/databases/entities/user'; +import { ApiKeyRepository } from '@/databases/repositories/api-key.repository'; import { EventService } from '@/events/event.service'; -import type { ApiKeysRequest, AuthenticatedRequest } from '@/requests'; +import { License } from '@/license'; +import type { AuthenticatedRequest } from '@/requests'; import { PublicApiKeyService } from '@/services/public-api-key.service'; import { mockInstance } from '@test/mocking'; @@ -13,6 +15,8 @@ import { ApiKeysController } from '../api-keys.controller'; describe('ApiKeysController', () => { const publicApiKeyService = mockInstance(PublicApiKeyService); const eventService = mockInstance(EventService); + mockInstance(ApiKeyRepository); + mockInstance(License); const controller = Container.get(ApiKeysController); let req: AuthenticatedRequest; @@ -28,7 +32,7 @@ describe('ApiKeysController', () => { id: '123', userId: '123', label: 'My API Key', - apiKey: 'apiKey********', + apiKey: 'apiKey123', createdAt: new Date(), } as ApiKey; @@ -36,14 +40,25 @@ describe('ApiKeysController', () => { publicApiKeyService.createPublicApiKeyForUser.mockResolvedValue(apiKeyData); + publicApiKeyService.redactApiKey.mockImplementation(() => '***123'); + // Act - const newApiKey = await controller.createAPIKey(req); + const newApiKey = await controller.createAPIKey(req, mock(), mock()); // Assert expect(publicApiKeyService.createPublicApiKeyForUser).toHaveBeenCalled(); - expect(apiKeyData).toEqual(newApiKey); + expect(newApiKey).toEqual( + expect.objectContaining({ + id: '123', + userId: '123', + label: 'My API Key', + apiKey: '***123', + createdAt: expect.any(Date), + rawApiKey: 'apiKey123', + }), + ); expect(eventService.emit).toHaveBeenCalledWith( 'public-api-key-created', expect.objectContaining({ user: req.user, publicApi: false }), @@ -91,11 +106,11 @@ describe('ApiKeysController', () => { mfaEnabled: false, }); - const req = mock({ user, params: { id: user.id } }); + const req = mock({ user, params: { id: user.id } }); // Act - await controller.deleteAPIKey(req); + await controller.deleteAPIKey(req, mock(), user.id); publicApiKeyService.deleteApiKeyForUser.mockResolvedValue(); diff --git a/packages/cli/src/controllers/api-keys.controller.ts b/packages/cli/src/controllers/api-keys.controller.ts index db53a0044923d..17ed524b82c94 100644 --- a/packages/cli/src/controllers/api-keys.controller.ts +++ b/packages/cli/src/controllers/api-keys.controller.ts @@ -1,9 +1,13 @@ -import { type RequestHandler } from 'express'; +import { CreateOrUpdateApiKeyRequestDto } from '@n8n/api-types'; +import type { RequestHandler } from 'express'; -import { Delete, Get, Post, RestController } from '@/decorators'; +import { ApiKeyRepository } from '@/databases/repositories/api-key.repository'; +import { Body, Delete, Get, Param, Patch, Post, RestController } from '@/decorators'; +import { BadRequestError } from '@/errors/response-errors/bad-request.error'; import { EventService } from '@/events/event.service'; +import { License } from '@/license'; import { isApiEnabled } from '@/public-api'; -import { ApiKeysRequest, AuthenticatedRequest } from '@/requests'; +import { AuthenticatedRequest } from '@/requests'; import { PublicApiKeyService } from '@/services/public-api-key.service'; export const isApiEnabledMiddleware: RequestHandler = (_, res, next) => { @@ -19,18 +23,36 @@ export class ApiKeysController { constructor( private readonly eventService: EventService, private readonly publicApiKeyService: PublicApiKeyService, + private readonly apiKeysRepository: ApiKeyRepository, + private readonly license: License, ) {} /** * Create an API Key */ @Post('/', { middlewares: [isApiEnabledMiddleware] }) - async createAPIKey(req: AuthenticatedRequest) { - const newApiKey = await this.publicApiKeyService.createPublicApiKeyForUser(req.user); + async createAPIKey( + req: AuthenticatedRequest, + _res: Response, + @Body payload: CreateOrUpdateApiKeyRequestDto, + ) { + const currentNumberOfApiKeys = await this.apiKeysRepository.countBy({ userId: req.user.id }); + + if (currentNumberOfApiKeys >= this.license.getApiKeysPerUserLimit()) { + throw new BadRequestError('You have reached the maximum number of API keys allowed.'); + } + + const newApiKey = await this.publicApiKeyService.createPublicApiKeyForUser(req.user, { + label: payload.label, + }); this.eventService.emit('public-api-key-created', { user: req.user, publicApi: false }); - return newApiKey; + return { + ...newApiKey, + apiKey: this.publicApiKeyService.redactApiKey(newApiKey.apiKey), + rawApiKey: newApiKey.apiKey, + }; } /** @@ -46,11 +68,28 @@ export class ApiKeysController { * Delete an API Key */ @Delete('/:id', { middlewares: [isApiEnabledMiddleware] }) - async deleteAPIKey(req: ApiKeysRequest.DeleteAPIKey) { - await this.publicApiKeyService.deleteApiKeyForUser(req.user, req.params.id); + async deleteAPIKey(req: AuthenticatedRequest, _res: Response, @Param('id') apiKeyId: string) { + await this.publicApiKeyService.deleteApiKeyForUser(req.user, apiKeyId); this.eventService.emit('public-api-key-deleted', { user: req.user, publicApi: false }); return { success: true }; } + + /** + * Patch an API Key + */ + @Patch('/:id', { middlewares: [isApiEnabledMiddleware] }) + async updateAPIKey( + req: AuthenticatedRequest, + _res: Response, + @Param('id') apiKeyId: string, + @Body payload: CreateOrUpdateApiKeyRequestDto, + ) { + await this.publicApiKeyService.updateApiKeyForUser(req.user, apiKeyId, { + label: payload.label, + }); + + return { success: true }; + } } diff --git a/packages/cli/src/controllers/e2e.controller.ts b/packages/cli/src/controllers/e2e.controller.ts index 025139aec2a79..90ba208ce17ca 100644 --- a/packages/cli/src/controllers/e2e.controller.ts +++ b/packages/cli/src/controllers/e2e.controller.ts @@ -110,6 +110,7 @@ export class E2EController { [LICENSE_QUOTAS.WORKFLOW_HISTORY_PRUNE_LIMIT]: -1, [LICENSE_QUOTAS.TEAM_PROJECT_LIMIT]: 0, [LICENSE_QUOTAS.AI_CREDITS]: 0, + [LICENSE_QUOTAS.API_KEYS_PER_USER_LIMIT]: 1, }; private numericFeatures: Record = { @@ -123,6 +124,8 @@ export class E2EController { [LICENSE_QUOTAS.TEAM_PROJECT_LIMIT]: E2EController.numericFeaturesDefaults[LICENSE_QUOTAS.TEAM_PROJECT_LIMIT], [LICENSE_QUOTAS.AI_CREDITS]: E2EController.numericFeaturesDefaults[LICENSE_QUOTAS.AI_CREDITS], + [LICENSE_QUOTAS.API_KEYS_PER_USER_LIMIT]: + E2EController.numericFeaturesDefaults[LICENSE_QUOTAS.API_KEYS_PER_USER_LIMIT], }; constructor( diff --git a/packages/cli/src/controllers/oauth/__tests__/oauth2-credential.controller.test.ts b/packages/cli/src/controllers/oauth/__tests__/oauth2-credential.controller.test.ts index 5281378fe0aed..1984d12f59f72 100644 --- a/packages/cli/src/controllers/oauth/__tests__/oauth2-credential.controller.test.ts +++ b/packages/cli/src/controllers/oauth/__tests__/oauth2-credential.controller.test.ts @@ -255,7 +255,7 @@ describe('OAuth2CredentialController', () => { type: 'oAuth2Api', }), ); - expect(res.render).toHaveBeenCalledWith('oauth-callback', { imagePath: 'n8n-logo.png' }); + expect(res.render).toHaveBeenCalledWith('oauth-callback'); }); it('merges oauthTokenData if it already exists', async () => { @@ -297,7 +297,7 @@ describe('OAuth2CredentialController', () => { type: 'oAuth2Api', }), ); - expect(res.render).toHaveBeenCalledWith('oauth-callback', { imagePath: 'n8n-logo.png' }); + expect(res.render).toHaveBeenCalledWith('oauth-callback'); }); it('overwrites oauthTokenData if it is a string', async () => { @@ -335,7 +335,7 @@ describe('OAuth2CredentialController', () => { type: 'oAuth2Api', }), ); - expect(res.render).toHaveBeenCalledWith('oauth-callback', { imagePath: 'n8n-logo.png' }); + expect(res.render).toHaveBeenCalledWith('oauth-callback'); }); }); }); diff --git a/packages/cli/src/controllers/oauth/oauth2-credential.controller.ts b/packages/cli/src/controllers/oauth/oauth2-credential.controller.ts index c4c24de0bc2b8..e188670fded11 100644 --- a/packages/cli/src/controllers/oauth/oauth2-credential.controller.ts +++ b/packages/cli/src/controllers/oauth/oauth2-credential.controller.ts @@ -149,7 +149,7 @@ export class OAuth2CredentialController extends AbstractOAuthController { credentialId: credential.id, }); - return res.render('oauth-callback', { imagePath: 'n8n-logo.png' }); + return res.render('oauth-callback'); } catch (error) { return this.renderCallbackError( res, diff --git a/packages/cli/src/errors/max-stalled-count.error.ts b/packages/cli/src/errors/max-stalled-count.error.ts index 653ca18eacac7..38f73023a7485 100644 --- a/packages/cli/src/errors/max-stalled-count.error.ts +++ b/packages/cli/src/errors/max-stalled-count.error.ts @@ -5,9 +5,12 @@ import { ApplicationError } from 'n8n-workflow'; */ export class MaxStalledCountError extends ApplicationError { constructor(cause: Error) { - super('The execution has reached the maximum number of attempts and will no longer retry.', { - level: 'warning', - cause, - }); + super( + 'This execution failed to be processed too many times and will no longer retry. To allow this execution to complete, please break down your workflow or scale up your workers or adjust your worker settings.', + { + level: 'warning', + cause, + }, + ); } } diff --git a/packages/cli/src/evaluation.ee/test-runner/__tests__/test-runner.service.ee.test.ts b/packages/cli/src/evaluation.ee/test-runner/__tests__/test-runner.service.ee.test.ts index 026b5d2eb8aaa..f82b09a8e1b1c 100644 --- a/packages/cli/src/evaluation.ee/test-runner/__tests__/test-runner.service.ee.test.ts +++ b/packages/cli/src/evaluation.ee/test-runner/__tests__/test-runner.service.ee.test.ts @@ -18,6 +18,7 @@ import type { TestRunRepository } from '@/databases/repositories/test-run.reposi import type { WorkflowRepository } from '@/databases/repositories/workflow.repository'; import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials'; import { NodeTypes } from '@/node-types'; +import type { Telemetry } from '@/telemetry'; import type { WorkflowRunner } from '@/workflow-runner'; import { mockInstance, mockLogger } from '@test/mocking'; import { mockNodeTypesData } from '@test-integration/utils/node-types-data'; @@ -131,6 +132,7 @@ function mockEvaluationExecutionData(metrics: Record) { const errorReporter = mock(); const logger = mockLogger(); +const telemetry = mock(); async function mockLongExecutionPromise(data: IRun, delay: number): Promise { return await new Promise((resolve) => { @@ -182,6 +184,7 @@ describe('TestRunnerService', () => { test('should create an instance of TestRunnerService', async () => { const testRunnerService = new TestRunnerService( logger, + telemetry, workflowRepository, workflowRunner, executionRepository, @@ -198,6 +201,7 @@ describe('TestRunnerService', () => { test('should create and run test cases from past executions', async () => { const testRunnerService = new TestRunnerService( logger, + telemetry, workflowRepository, workflowRunner, executionRepository, @@ -237,6 +241,7 @@ describe('TestRunnerService', () => { test('should run both workflow under test and evaluation workflow', async () => { const testRunnerService = new TestRunnerService( logger, + telemetry, workflowRepository, workflowRunner, executionRepository, @@ -339,6 +344,7 @@ describe('TestRunnerService', () => { test('should properly count passed and failed executions', async () => { const testRunnerService = new TestRunnerService( logger, + telemetry, workflowRepository, workflowRunner, executionRepository, @@ -398,6 +404,7 @@ describe('TestRunnerService', () => { test('should properly count failed test executions', async () => { const testRunnerService = new TestRunnerService( logger, + telemetry, workflowRepository, workflowRunner, executionRepository, @@ -453,6 +460,7 @@ describe('TestRunnerService', () => { test('should properly count failed evaluations', async () => { const testRunnerService = new TestRunnerService( logger, + telemetry, workflowRepository, workflowRunner, executionRepository, @@ -512,6 +520,7 @@ describe('TestRunnerService', () => { test('should specify correct start nodes when running workflow under test', async () => { const testRunnerService = new TestRunnerService( logger, + telemetry, workflowRepository, workflowRunner, executionRepository, @@ -587,6 +596,7 @@ describe('TestRunnerService', () => { test('should properly choose trigger and start nodes', async () => { const testRunnerService = new TestRunnerService( logger, + telemetry, workflowRepository, workflowRunner, executionRepository, @@ -613,6 +623,7 @@ describe('TestRunnerService', () => { test('should properly choose trigger and start nodes 2', async () => { const testRunnerService = new TestRunnerService( logger, + telemetry, workflowRepository, workflowRunner, executionRepository, @@ -644,6 +655,7 @@ describe('TestRunnerService', () => { test('should cancel test run', async () => { const testRunnerService = new TestRunnerService( logger, + telemetry, workflowRepository, workflowRunner, executionRepository, diff --git a/packages/cli/src/evaluation.ee/test-runner/test-runner.service.ee.ts b/packages/cli/src/evaluation.ee/test-runner/test-runner.service.ee.ts index a594e15c05941..f2928f0b91a2c 100644 --- a/packages/cli/src/evaluation.ee/test-runner/test-runner.service.ee.ts +++ b/packages/cli/src/evaluation.ee/test-runner/test-runner.service.ee.ts @@ -23,6 +23,7 @@ import { TestMetricRepository } from '@/databases/repositories/test-metric.repos import { TestRunRepository } from '@/databases/repositories/test-run.repository.ee'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; import { NodeTypes } from '@/node-types'; +import { Telemetry } from '@/telemetry'; import { getRunData } from '@/workflow-execute-additional-data'; import { WorkflowRunner } from '@/workflow-runner'; @@ -43,6 +44,7 @@ export class TestRunnerService { constructor( private readonly logger: Logger, + private readonly telemetry: Telemetry, private readonly workflowRepository: WorkflowRepository, private readonly workflowRunner: WorkflowRunner, private readonly executionRepository: ExecutionRepository, @@ -268,12 +270,22 @@ export class TestRunnerService { const testMetricNames = await this.getTestMetricNames(test.id); // 2. Run over all the test cases + const pastExecutionIds = pastExecutions.map((e) => e.id); + await this.testRunRepository.markAsRunning(testRun.id, pastExecutions.length); + this.telemetry.track('User runs test', { + user_id: user.id, + test_id: test.id, + run_id: testRun.id, + executions_ids: pastExecutionIds, + workflow_id: test.workflowId, + evaluation_workflow_id: test.evaluationWorkflowId, + }); // Object to collect the results of the evaluation workflow executions const metrics = new EvaluationMetrics(testMetricNames); - for (const { id: pastExecutionId } of pastExecutions) { + for (const pastExecutionId of pastExecutionIds) { if (abortSignal.aborted) { this.logger.debug('Test run was cancelled', { testId: test.id, diff --git a/packages/cli/src/execution-lifecycle/__tests__/execution-lifecycle-hooks.test.ts b/packages/cli/src/execution-lifecycle/__tests__/execution-lifecycle-hooks.test.ts index 5ea8e411ad297..46f27d3541e56 100644 --- a/packages/cli/src/execution-lifecycle/__tests__/execution-lifecycle-hooks.test.ts +++ b/packages/cli/src/execution-lifecycle/__tests__/execution-lifecycle-hooks.test.ts @@ -25,6 +25,7 @@ import { WorkflowStaticDataService } from '@/workflows/workflow-static-data.serv import { mockInstance } from '@test/mocking'; import { + getWorkflowHooksIntegrated, getWorkflowHooksMain, getWorkflowHooksWorkerExecuter, getWorkflowHooksWorkerMain, @@ -85,6 +86,8 @@ describe('Execution Lifecycle Hooks', () => { const now = new Date('2025-01-13T18:25:50.267Z'); jest.useFakeTimers({ now }); + let hooks: WorkflowHooks; + beforeEach(() => { jest.clearAllMocks(); workflowData.settings = {}; @@ -101,8 +104,62 @@ describe('Execution Lifecycle Hooks', () => { }; }); + const workflowEventTests = () => { + describe('workflowExecuteBefore', () => { + it('should emit workflow-pre-execute events', async () => { + await hooks.executeHookFunctions('workflowExecuteBefore', [workflow, runExecutionData]); + + expect(eventService.emit).toHaveBeenCalledWith('workflow-pre-execute', { + executionId, + data: workflowData, + }); + }); + }); + + describe('workflowExecuteAfter', () => { + it('should emit workflow-post-execute events', async () => { + await hooks.executeHookFunctions('workflowExecuteAfter', [successfulRun, {}]); + + expect(eventService.emit).toHaveBeenCalledWith('workflow-post-execute', { + executionId, + runData: successfulRun, + workflow: workflowData, + }); + }); + }); + }; + + const nodeEventsTests = () => { + describe('nodeExecuteBefore', () => { + it('should emit node-pre-execute event', async () => { + await hooks.executeHookFunctions('nodeExecuteBefore', [nodeName]); + + expect(eventService.emit).toHaveBeenCalledWith('node-pre-execute', { + executionId, + workflow: workflowData, + nodeName, + }); + }); + }); + + describe('nodeExecuteAfter', () => { + it('should emit node-post-execute event', async () => { + await hooks.executeHookFunctions('nodeExecuteAfter', [ + nodeName, + taskData, + runExecutionData, + ]); + + expect(eventService.emit).toHaveBeenCalledWith('node-post-execute', { + executionId, + workflow: workflowData, + nodeName, + }); + }); + }); + }; + describe('getWorkflowHooksMain', () => { - let hooks: WorkflowHooks; beforeEach(() => { hooks = getWorkflowHooksMain( { @@ -115,6 +172,9 @@ describe('Execution Lifecycle Hooks', () => { ); }); + workflowEventTests(); + nodeEventsTests(); + it('should setup the correct set of hooks', () => { expect(hooks).toBeInstanceOf(WorkflowHooks); expect(hooks.mode).toBe('manual'); @@ -126,10 +186,10 @@ describe('Execution Lifecycle Hooks', () => { const { hookFunctions } = hooks; expect(hookFunctions.nodeExecuteBefore).toHaveLength(2); expect(hookFunctions.nodeExecuteAfter).toHaveLength(3); - expect(hookFunctions.workflowExecuteBefore).toHaveLength(2); - expect(hookFunctions.workflowExecuteAfter).toHaveLength(2); + expect(hookFunctions.workflowExecuteBefore).toHaveLength(3); + expect(hookFunctions.workflowExecuteAfter).toHaveLength(4); expect(hookFunctions.nodeFetchedData).toHaveLength(1); - expect(hookFunctions.sendResponse).toBeUndefined(); + expect(hookFunctions.sendResponse).toHaveLength(0); }); describe('nodeExecuteBefore', () => { @@ -141,16 +201,6 @@ describe('Execution Lifecycle Hooks', () => { pushRef, ); }); - - it('should emit node-pre-execute event', async () => { - await hooks.executeHookFunctions('nodeExecuteBefore', [nodeName]); - - expect(eventService.emit).toHaveBeenCalledWith('node-pre-execute', { - executionId, - workflow: workflowData, - nodeName, - }); - }); }); describe('nodeExecuteAfter', () => { @@ -167,20 +217,6 @@ describe('Execution Lifecycle Hooks', () => { ); }); - it('should emit node-post-execute event', async () => { - await hooks.executeHookFunctions('nodeExecuteAfter', [ - nodeName, - taskData, - runExecutionData, - ]); - - expect(eventService.emit).toHaveBeenCalledWith('node-post-execute', { - executionId, - workflow: workflowData, - nodeName, - }); - }); - it('should save execution progress when enabled', async () => { workflowData.settings = { saveExecutionProgress: true }; @@ -230,12 +266,6 @@ describe('Execution Lifecycle Hooks', () => { ); }); - it('should not call eventService', async () => { - await hooks.executeHookFunctions('workflowExecuteBefore', [workflow, runExecutionData]); - - expect(eventService.emit).not.toHaveBeenCalled(); - }); - it('should run workflow.preExecute external hook', async () => { await hooks.executeHookFunctions('workflowExecuteBefore', [workflow, runExecutionData]); @@ -249,7 +279,6 @@ describe('Execution Lifecycle Hooks', () => { describe('workflowExecuteAfter', () => { it('should send executionFinished push event', async () => { await hooks.executeHookFunctions('workflowExecuteAfter', [successfulRun, {}]); - expect(eventService.emit).not.toHaveBeenCalled(); expect(push.send).toHaveBeenCalledWith( { type: 'executionFinished', @@ -320,15 +349,6 @@ describe('Execution Lifecycle Hooks', () => { ); }); - it('should handle errors when updating execution', async () => { - const error = new Error('Failed to update execution'); - executionRepository.updateExistingExecution.mockRejectedValueOnce(error); - - await hooks.executeHookFunctions('workflowExecuteAfter', [successfulRun, {}]); - - expect(errorReporter.error).toHaveBeenCalledWith(error); - }); - it('should not delete unfinished executions', async () => { const unfinishedRun = mock({ finished: false, status: 'running' }); @@ -457,11 +477,28 @@ describe('Execution Lifecycle Hooks', () => { }); }); }); + + describe("when pushRef isn't set", () => { + beforeEach(() => { + hooks = getWorkflowHooksMain({ executionMode, workflowData }, executionId); + }); + + it('should not send any push events', async () => { + await hooks.executeHookFunctions('nodeExecuteBefore', [nodeName]); + await hooks.executeHookFunctions('nodeExecuteAfter', [ + nodeName, + taskData, + runExecutionData, + ]); + await hooks.executeHookFunctions('workflowExecuteBefore', [workflow, runExecutionData]); + await hooks.executeHookFunctions('workflowExecuteAfter', [successfulRun, {}]); + + expect(push.send).not.toHaveBeenCalled(); + }); + }); }); describe('getWorkflowHooksWorkerMain', () => { - let hooks: WorkflowHooks; - beforeEach(() => { hooks = getWorkflowHooksWorkerMain(executionMode, executionId, workflowData, { pushRef, @@ -469,6 +506,8 @@ describe('Execution Lifecycle Hooks', () => { }); }); + workflowEventTests(); + it('should setup the correct set of hooks', () => { expect(hooks).toBeInstanceOf(WorkflowHooks); expect(hooks.mode).toBe('manual'); @@ -480,8 +519,10 @@ describe('Execution Lifecycle Hooks', () => { const { hookFunctions } = hooks; expect(hookFunctions.nodeExecuteBefore).toHaveLength(0); expect(hookFunctions.nodeExecuteAfter).toHaveLength(0); - expect(hookFunctions.workflowExecuteBefore).toHaveLength(1); - expect(hookFunctions.workflowExecuteAfter).toHaveLength(1); + expect(hookFunctions.workflowExecuteBefore).toHaveLength(2); + expect(hookFunctions.workflowExecuteAfter).toHaveLength(3); + expect(hookFunctions.nodeFetchedData).toHaveLength(0); + expect(hookFunctions.sendResponse).toHaveLength(0); }); describe('workflowExecuteBefore', () => { @@ -535,8 +576,6 @@ describe('Execution Lifecycle Hooks', () => { }); describe('getWorkflowHooksWorkerExecuter', () => { - let hooks: WorkflowHooks; - beforeEach(() => { hooks = getWorkflowHooksWorkerExecuter(executionMode, executionId, workflowData, { pushRef, @@ -544,6 +583,25 @@ describe('Execution Lifecycle Hooks', () => { }); }); + nodeEventsTests(); + + it('should setup the correct set of hooks', () => { + expect(hooks).toBeInstanceOf(WorkflowHooks); + expect(hooks.mode).toBe('manual'); + expect(hooks.executionId).toBe(executionId); + expect(hooks.workflowData).toEqual(workflowData); + expect(hooks.pushRef).toEqual('test-push-ref'); + expect(hooks.retryOf).toEqual('test-retry-of'); + + const { hookFunctions } = hooks; + expect(hookFunctions.nodeExecuteBefore).toHaveLength(2); + expect(hookFunctions.nodeExecuteAfter).toHaveLength(3); + expect(hookFunctions.workflowExecuteBefore).toHaveLength(2); + expect(hookFunctions.workflowExecuteAfter).toHaveLength(4); + expect(hookFunctions.nodeFetchedData).toHaveLength(1); + expect(hookFunctions.sendResponse).toHaveLength(0); + }); + describe('saving static data', () => { it('should skip saving static data for manual executions', async () => { hooks.mode = 'manual'; @@ -614,4 +672,30 @@ describe('Execution Lifecycle Hooks', () => { }); }); }); + + describe('getWorkflowHooksIntegrated', () => { + beforeEach(() => { + hooks = getWorkflowHooksIntegrated(executionMode, executionId, workflowData, undefined); + }); + + workflowEventTests(); + nodeEventsTests(); + + it('should setup the correct set of hooks', () => { + expect(hooks).toBeInstanceOf(WorkflowHooks); + expect(hooks.mode).toBe('manual'); + expect(hooks.executionId).toBe(executionId); + expect(hooks.workflowData).toEqual(workflowData); + expect(hooks.pushRef).toBeUndefined(); + expect(hooks.retryOf).toBeUndefined(); + + const { hookFunctions } = hooks; + expect(hookFunctions.nodeExecuteBefore).toHaveLength(1); + expect(hookFunctions.nodeExecuteAfter).toHaveLength(2); + expect(hookFunctions.workflowExecuteBefore).toHaveLength(2); + expect(hookFunctions.workflowExecuteAfter).toHaveLength(3); + expect(hookFunctions.nodeFetchedData).toHaveLength(1); + expect(hookFunctions.sendResponse).toHaveLength(0); + }); + }); }); diff --git a/packages/cli/src/execution-lifecycle/execution-lifecycle-hooks.ts b/packages/cli/src/execution-lifecycle/execution-lifecycle-hooks.ts index 1296f53958565..bb0542dba058e 100644 --- a/packages/cli/src/execution-lifecycle/execution-lifecycle-hooks.ts +++ b/packages/cli/src/execution-lifecycle/execution-lifecycle-hooks.ts @@ -34,6 +34,60 @@ import { } from './shared/shared-hook-functions'; import { toSaveSettings } from './to-save-settings'; +function mergeHookFunctions(...hookFunctions: IWorkflowExecuteHooks[]): IWorkflowExecuteHooks { + const result: IWorkflowExecuteHooks = { + nodeExecuteBefore: [], + nodeExecuteAfter: [], + workflowExecuteBefore: [], + workflowExecuteAfter: [], + sendResponse: [], + nodeFetchedData: [], + }; + for (const hooks of hookFunctions) { + for (const key in hooks) { + if (!result[key] || !hooks[key]) continue; + result[key].push(...hooks[key]); + } + } + return result; +} + +function hookFunctionsWorkflowEvents(userId?: string): IWorkflowExecuteHooks { + const eventService = Container.get(EventService); + return { + workflowExecuteBefore: [ + async function (this: WorkflowHooks): Promise { + const { executionId, workflowData } = this; + eventService.emit('workflow-pre-execute', { executionId, data: workflowData }); + }, + ], + workflowExecuteAfter: [ + async function (this: WorkflowHooks, runData: IRun): Promise { + const { executionId, workflowData: workflow } = this; + eventService.emit('workflow-post-execute', { executionId, runData, workflow, userId }); + }, + ], + }; +} + +function hookFunctionsNodeEvents(): IWorkflowExecuteHooks { + const eventService = Container.get(EventService); + return { + nodeExecuteBefore: [ + async function (this: WorkflowHooks, nodeName: string): Promise { + const { executionId, workflowData: workflow } = this; + eventService.emit('node-pre-execute', { executionId, workflow, nodeName }); + }, + ], + nodeExecuteAfter: [ + async function (this: WorkflowHooks, nodeName: string): Promise { + const { executionId, workflowData: workflow } = this; + eventService.emit('node-post-execute', { executionId, workflow, nodeName }); + }, + ], + }; +} + /** * Returns hook functions to push data to Editor-UI */ @@ -166,29 +220,24 @@ function hookFunctionsPreExecute(): IWorkflowExecuteHooks { }; } +/** This should ideally be added before any other `workflowExecuteAfter` hook to ensure all hooks get the same execution status */ +function hookFunctionsFinalizeExecutionStatus(): IWorkflowExecuteHooks { + return { + workflowExecuteAfter: [ + async function (fullRunData: IRun) { + fullRunData.status = determineFinalExecutionStatus(fullRunData); + }, + ], + }; +} + /** * Returns hook functions to save workflow execution and call error workflow */ function hookFunctionsSave(): IWorkflowExecuteHooks { const logger = Container.get(Logger); const workflowStatisticsService = Container.get(WorkflowStatisticsService); - const eventService = Container.get(EventService); return { - nodeExecuteBefore: [ - async function (this: WorkflowHooks, nodeName: string): Promise { - const { executionId, workflowData: workflow } = this; - - eventService.emit('node-pre-execute', { executionId, workflow, nodeName }); - }, - ], - nodeExecuteAfter: [ - async function (this: WorkflowHooks, nodeName: string): Promise { - const { executionId, workflowData: workflow } = this; - - eventService.emit('node-post-execute', { executionId, workflow, nodeName }); - }, - ], - workflowExecuteBefore: [], workflowExecuteAfter: [ async function ( this: WorkflowHooks, @@ -222,9 +271,6 @@ function hookFunctionsSave(): IWorkflowExecuteHooks { } } - const executionStatus = determineFinalExecutionStatus(fullRunData); - fullRunData.status = executionStatus; - const saveSettings = toSaveSettings(this.workflowData.settings); if (isManualMode && !saveSettings.manual && !fullRunData.waitTill) { @@ -243,8 +289,8 @@ function hookFunctionsSave(): IWorkflowExecuteHooks { } const shouldNotSave = - (executionStatus === 'success' && !saveSettings.success) || - (executionStatus !== 'success' && !saveSettings.error); + (fullRunData.status === 'success' && !saveSettings.success) || + (fullRunData.status !== 'success' && !saveSettings.error); if (shouldNotSave && !fullRunData.waitTill && !isManualMode) { executeErrorWorkflow( @@ -268,7 +314,7 @@ function hookFunctionsSave(): IWorkflowExecuteHooks { const fullExecutionData = prepareExecutionDataForDbUpdate({ runData: fullRunData, workflowData: this.workflowData, - workflowStatusFinal: executionStatus, + workflowStatusFinal: fullRunData.status, retryOf: this.retryOf, }); @@ -283,23 +329,6 @@ function hookFunctionsSave(): IWorkflowExecuteHooks { executionData: fullExecutionData, }); - if (!isManualMode) { - executeErrorWorkflow( - this.workflowData, - fullRunData, - this.mode, - this.executionId, - this.retryOf, - ); - } - } catch (error) { - Container.get(ErrorReporter).error(error); - logger.error(`Failed saving execution data to DB on execution ID ${this.executionId}`, { - executionId: this.executionId, - workflowId: this.workflowData.id, - // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment - error, - }); if (!isManualMode) { executeErrorWorkflow( this.workflowData, @@ -333,29 +362,7 @@ function hookFunctionsSave(): IWorkflowExecuteHooks { function hookFunctionsSaveWorker(): IWorkflowExecuteHooks { const logger = Container.get(Logger); const workflowStatisticsService = Container.get(WorkflowStatisticsService); - const eventService = Container.get(EventService); return { - nodeExecuteBefore: [ - async function (this: WorkflowHooks, nodeName: string): Promise { - const { executionId, workflowData: workflow } = this; - - eventService.emit('node-pre-execute', { executionId, workflow, nodeName }); - }, - ], - nodeExecuteAfter: [ - async function (this: WorkflowHooks, nodeName: string): Promise { - const { executionId, workflowData: workflow } = this; - - eventService.emit('node-post-execute', { executionId, workflow, nodeName }); - }, - ], - workflowExecuteBefore: [ - async function (this: WorkflowHooks): Promise { - const { executionId, workflowData } = this; - - eventService.emit('workflow-pre-execute', { executionId, data: workflowData }); - }, - ], workflowExecuteAfter: [ async function ( this: WorkflowHooks, @@ -387,13 +394,10 @@ function hookFunctionsSaveWorker(): IWorkflowExecuteHooks { } } - const workflowStatusFinal = determineFinalExecutionStatus(fullRunData); - fullRunData.status = workflowStatusFinal; - if ( !isManualMode && - workflowStatusFinal !== 'success' && - workflowStatusFinal !== 'waiting' + fullRunData.status !== 'success' && + fullRunData.status !== 'waiting' ) { executeErrorWorkflow( this.workflowData, @@ -409,7 +413,7 @@ function hookFunctionsSaveWorker(): IWorkflowExecuteHooks { const fullExecutionData = prepareExecutionDataForDbUpdate({ runData: fullRunData, workflowData: this.workflowData, - workflowStatusFinal, + workflowStatusFinal: fullRunData.status, retryOf: this.retryOf, }); @@ -423,16 +427,6 @@ function hookFunctionsSaveWorker(): IWorkflowExecuteHooks { workflowId: this.workflowData.id, executionData: fullExecutionData, }); - } catch (error) { - if (!isManualMode) { - executeErrorWorkflow( - this.workflowData, - fullRunData, - this.mode, - this.executionId, - this.retryOf, - ); - } } finally { workflowStatisticsService.emit('workflowExecutionCompleted', { workflowData: this.workflowData, @@ -440,33 +434,15 @@ function hookFunctionsSaveWorker(): IWorkflowExecuteHooks { }); } }, - async function (this: WorkflowHooks, runData: IRun): Promise { - const { executionId, workflowData: workflow } = this; - - eventService.emit('workflow-post-execute', { - workflow, - executionId, - runData, - }); - }, async function (this: WorkflowHooks, fullRunData: IRun) { const externalHooks = Container.get(ExternalHooks); - if (externalHooks.exists('workflow.postExecute')) { - try { - await externalHooks.run('workflow.postExecute', [ - fullRunData, - this.workflowData, - this.executionId, - ]); - } catch (error) { - Container.get(ErrorReporter).error(error); - Container.get(Logger).error( - 'There was a problem running hook "workflow.postExecute"', - // eslint-disable-next-line @typescript-eslint/no-unsafe-argument - error, - ); - } - } + try { + await externalHooks.run('workflow.postExecute', [ + fullRunData, + this.workflowData, + this.executionId, + ]); + } catch {} }, ], nodeFetchedData: [ @@ -485,13 +461,15 @@ export function getWorkflowHooksIntegrated( mode: WorkflowExecuteMode, executionId: string, workflowData: IWorkflowBase, + userId?: string, ): WorkflowHooks { - const hookFunctions = hookFunctionsSave(); - const preExecuteFunctions = hookFunctionsPreExecute(); - for (const key of Object.keys(preExecuteFunctions)) { - const hooks = hookFunctions[key] ?? []; - hooks.push.apply(hookFunctions[key], preExecuteFunctions[key]); - } + const hookFunctions = mergeHookFunctions( + hookFunctionsWorkflowEvents(userId), + hookFunctionsNodeEvents(), + hookFunctionsFinalizeExecutionStatus(), + hookFunctionsSave(), + hookFunctionsPreExecute(), + ); return new WorkflowHooks(hookFunctions, mode, executionId, workflowData); } @@ -502,27 +480,20 @@ export function getWorkflowHooksWorkerExecuter( mode: WorkflowExecuteMode, executionId: string, workflowData: IWorkflowBase, - optionalParameters?: IWorkflowHooksOptionalParameters, + optionalParameters: IWorkflowHooksOptionalParameters = {}, ): WorkflowHooks { - optionalParameters = optionalParameters || {}; - const hookFunctions = hookFunctionsSaveWorker(); - const preExecuteFunctions = hookFunctionsPreExecute(); - for (const key of Object.keys(preExecuteFunctions)) { - const hooks = hookFunctions[key] ?? []; - hooks.push.apply(hookFunctions[key], preExecuteFunctions[key]); - } + const toMerge = [ + hookFunctionsNodeEvents(), + hookFunctionsFinalizeExecutionStatus(), + hookFunctionsSaveWorker(), + hookFunctionsPreExecute(), + ]; if (mode === 'manual' && Container.get(InstanceSettings).isWorker) { - const pushHooks = hookFunctionsPush(); - for (const key of Object.keys(pushHooks)) { - if (hookFunctions[key] === undefined) { - hookFunctions[key] = []; - } - // eslint-disable-next-line prefer-spread - hookFunctions[key].push.apply(hookFunctions[key], pushHooks[key]); - } + toMerge.push(hookFunctionsPush()); } + const hookFunctions = mergeHookFunctions(...toMerge); return new WorkflowHooks(hookFunctions, mode, executionId, workflowData, optionalParameters); } @@ -533,67 +504,57 @@ export function getWorkflowHooksWorkerMain( mode: WorkflowExecuteMode, executionId: string, workflowData: IWorkflowBase, - optionalParameters?: IWorkflowHooksOptionalParameters, + optionalParameters: IWorkflowHooksOptionalParameters = {}, ): WorkflowHooks { - optionalParameters = optionalParameters || {}; - const hookFunctions = hookFunctionsPreExecute(); + const hookFunctions = mergeHookFunctions( + hookFunctionsWorkflowEvents(), + hookFunctionsPreExecute(), + hookFunctionsFinalizeExecutionStatus(), + { + workflowExecuteAfter: [ + async function (this: WorkflowHooks, fullRunData: IRun): Promise { + // Don't delete executions before they are finished + if (!fullRunData.finished) return; + + const saveSettings = toSaveSettings(this.workflowData.settings); + + const isManualMode = this.mode === 'manual'; + + if (isManualMode && !saveSettings.manual && !fullRunData.waitTill) { + /** + * When manual executions are not being saved, we only soft-delete + * the execution so that the user can access its binary data + * while building their workflow. + * + * The manual execution and its binary data will be hard-deleted + * on the next pruning cycle after the grace period set by + * `EXECUTIONS_DATA_HARD_DELETE_BUFFER`. + */ + await Container.get(ExecutionRepository).softDelete(this.executionId); - // TODO: why are workers pushing to frontend? - // TODO: simplifying this for now to just leave the bare minimum hooks + return; + } - // const hookFunctions = hookFunctionsPush(); - // const preExecuteFunctions = hookFunctionsPreExecute(); - // for (const key of Object.keys(preExecuteFunctions)) { - // if (hookFunctions[key] === undefined) { - // hookFunctions[key] = []; - // } - // hookFunctions[key]!.push.apply(hookFunctions[key], preExecuteFunctions[key]); - // } + const shouldNotSave = + (fullRunData.status === 'success' && !saveSettings.success) || + (fullRunData.status !== 'success' && !saveSettings.error); + + if (!isManualMode && shouldNotSave && !fullRunData.waitTill) { + await Container.get(ExecutionRepository).hardDelete({ + workflowId: this.workflowData.id, + executionId: this.executionId, + }); + } + }, + ], + }, + ); // When running with worker mode, main process executes // Only workflowExecuteBefore + workflowExecuteAfter // So to avoid confusion, we are removing other hooks. hookFunctions.nodeExecuteBefore = []; hookFunctions.nodeExecuteAfter = []; - hookFunctions.workflowExecuteAfter = [ - async function (this: WorkflowHooks, fullRunData: IRun): Promise { - // Don't delete executions before they are finished - if (!fullRunData.finished) return; - - const executionStatus = determineFinalExecutionStatus(fullRunData); - fullRunData.status = executionStatus; - - const saveSettings = toSaveSettings(this.workflowData.settings); - - const isManualMode = this.mode === 'manual'; - - if (isManualMode && !saveSettings.manual && !fullRunData.waitTill) { - /** - * When manual executions are not being saved, we only soft-delete - * the execution so that the user can access its binary data - * while building their workflow. - * - * The manual execution and its binary data will be hard-deleted - * on the next pruning cycle after the grace period set by - * `EXECUTIONS_DATA_HARD_DELETE_BUFFER`. - */ - await Container.get(ExecutionRepository).softDelete(this.executionId); - - return; - } - - const shouldNotSave = - (executionStatus === 'success' && !saveSettings.success) || - (executionStatus !== 'success' && !saveSettings.error); - - if (!isManualMode && shouldNotSave && !fullRunData.waitTill) { - await Container.get(ExecutionRepository).hardDelete({ - workflowId: this.workflowData.id, - executionId: this.executionId, - }); - } - }, - ]; return new WorkflowHooks(hookFunctions, mode, executionId, workflowData, optionalParameters); } @@ -605,22 +566,14 @@ export function getWorkflowHooksMain( data: IWorkflowExecutionDataProcess, executionId: string, ): WorkflowHooks { - const hookFunctions = hookFunctionsSave(); - const pushFunctions = hookFunctionsPush(); - for (const key of Object.keys(pushFunctions)) { - const hooks = hookFunctions[key] ?? []; - hooks.push.apply(hookFunctions[key], pushFunctions[key]); - } - - const preExecuteFunctions = hookFunctionsPreExecute(); - for (const key of Object.keys(preExecuteFunctions)) { - const hooks = hookFunctions[key] ?? []; - hooks.push.apply(hookFunctions[key], preExecuteFunctions[key]); - } - - if (!hookFunctions.nodeExecuteBefore) hookFunctions.nodeExecuteBefore = []; - if (!hookFunctions.nodeExecuteAfter) hookFunctions.nodeExecuteAfter = []; - + const hookFunctions = mergeHookFunctions( + hookFunctionsWorkflowEvents(), + hookFunctionsNodeEvents(), + hookFunctionsFinalizeExecutionStatus(), + hookFunctionsSave(), + hookFunctionsPush(), + hookFunctionsPreExecute(), + ); return new WorkflowHooks(hookFunctions, data.executionMode, executionId, data.workflowData, { pushRef: data.pushRef, retryOf: data.retryOf as string, diff --git a/packages/cli/src/external-hooks.ts b/packages/cli/src/external-hooks.ts index 8a0ba82c9853e..da8aa2d230f9a 100644 --- a/packages/cli/src/external-hooks.ts +++ b/packages/cli/src/external-hooks.ts @@ -1,25 +1,104 @@ -/* eslint-disable @typescript-eslint/no-var-requires */ +import type { FrontendSettings, UserUpdateRequestDto } from '@n8n/api-types'; +import type { ClientOAuth2Options } from '@n8n/client-oauth2'; +import { GlobalConfig } from '@n8n/config'; import { Service } from '@n8n/di'; +import { ErrorReporter, Logger } from 'n8n-core'; +import type { IRun, IWorkflowBase, Workflow, WorkflowExecuteMode } from 'n8n-workflow'; import { ApplicationError } from 'n8n-workflow'; +import type clientOAuth1 from 'oauth-1.0a'; -import config from '@/config'; +import type { AbstractServer } from '@/abstract-server'; +import type { Config } from '@/config'; +import type { TagEntity } from '@/databases/entities/tag-entity'; +import type { User } from '@/databases/entities/user'; import { CredentialsRepository } from '@/databases/repositories/credentials.repository'; import { SettingsRepository } from '@/databases/repositories/settings.repository'; import { UserRepository } from '@/databases/repositories/user.repository'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; -import type { IExternalHooksFileData, IExternalHooksFunctions } from '@/interfaces'; +import type { ICredentialsDb, PublicUser } from '@/interfaces'; + +type Repositories = { + User: UserRepository; + Settings: SettingsRepository; + Credentials: CredentialsRepository; + Workflow: WorkflowRepository; +}; + +type ExternalHooksMap = { + 'n8n.ready': [server: AbstractServer, config: Config]; + 'n8n.stop': never; + 'worker.ready': never; + + 'activeWorkflows.initialized': never; + + 'credentials.create': [encryptedData: ICredentialsDb]; + 'credentials.update': [newCredentialData: ICredentialsDb]; + 'credentials.delete': [credentialId: string]; + + 'frontend.settings': [frontendSettings: FrontendSettings]; + + 'mfa.beforeSetup': [user: User]; + + 'oauth1.authenticate': [ + oAuthOptions: clientOAuth1.Options, + oauthRequestData: { oauth_callback: string }, + ]; + 'oauth2.authenticate': [oAuthOptions: ClientOAuth2Options]; + 'oauth2.callback': [oAuthOptions: ClientOAuth2Options]; + + 'tag.beforeCreate': [tag: TagEntity]; + 'tag.afterCreate': [tag: TagEntity]; + 'tag.beforeUpdate': [tag: TagEntity]; + 'tag.afterUpdate': [tag: TagEntity]; + 'tag.beforeDelete': [tagId: string]; + 'tag.afterDelete': [tagId: string]; + + 'user.deleted': [user: PublicUser]; + 'user.profile.beforeUpdate': [ + userId: string, + currentEmail: string, + payload: UserUpdateRequestDto, + ]; + 'user.profile.update': [currentEmail: string, publicUser: PublicUser]; + 'user.password.update': [updatedEmail: string, updatedPassword: string]; + 'user.invited': [emails: string[]]; + + 'workflow.create': [createdWorkflow: IWorkflowBase]; + 'workflow.afterCreate': [createdWorkflow: IWorkflowBase]; + 'workflow.activate': [updatedWorkflow: IWorkflowBase]; + 'workflow.update': [updatedWorkflow: IWorkflowBase]; + 'workflow.afterUpdate': [updatedWorkflow: IWorkflowBase]; + 'workflow.delete': [workflowId: string]; + 'workflow.afterDelete': [workflowId: string]; + + 'workflow.preExecute': [workflow: Workflow, mode: WorkflowExecuteMode]; + 'workflow.postExecute': [ + fullRunData: IRun | undefined, + workflowData: IWorkflowBase, + executionId: string, + ]; +}; +type HookNames = keyof ExternalHooksMap; + +// TODO: Derive this type from Hooks +interface IExternalHooksFileData { + [Resource: string]: { + [Operation: string]: Array<(...args: unknown[]) => Promise>; + }; +} @Service() export class ExternalHooks { - externalHooks: { - [key: string]: Array<() => {}>; + private readonly registered: { + [hookName in HookNames]?: Array<(...args: ExternalHooksMap[hookName]) => Promise>; } = {}; - private initDidRun = false; - - private dbCollections: IExternalHooksFunctions['dbCollections']; + private readonly dbCollections: Repositories; constructor( + private readonly logger: Logger, + private readonly errorReporter: ErrorReporter, + private readonly globalConfig: GlobalConfig, userRepository: UserRepository, settingsRepository: SettingsRepository, credentialsRepository: CredentialsRepository, @@ -33,72 +112,58 @@ export class ExternalHooks { }; } - async init(): Promise { - if (this.initDidRun) { - return; - } - - await this.loadHooksFiles(); - - this.initDidRun = true; - } - - private async loadHooksFiles() { - const externalHookFiles = config.getEnv('externalHookFiles').split(':'); + async init() { + const externalHookFiles = this.globalConfig.externalHooks.files; // Load all the provided hook-files for (let hookFilePath of externalHookFiles) { hookFilePath = hookFilePath.trim(); - if (hookFilePath !== '') { - try { - const hookFile = require(hookFilePath) as IExternalHooksFileData; - this.loadHooks(hookFile); - } catch (e) { - const error = e instanceof Error ? e : new Error(`${e}`); - - throw new ApplicationError('Problem loading external hook file', { - extra: { errorMessage: error.message, hookFilePath }, - cause: error, - }); - } + try { + // eslint-disable-next-line @typescript-eslint/no-var-requires + const hookFile = require(hookFilePath) as IExternalHooksFileData; + this.loadHooks(hookFile); + } catch (e) { + const error = e instanceof Error ? e : new Error(`${e}`); + + throw new ApplicationError('Problem loading external hook file', { + extra: { errorMessage: error.message, hookFilePath }, + cause: error, + }); } } } private loadHooks(hookFileData: IExternalHooksFileData) { - for (const resource of Object.keys(hookFileData)) { - for (const operation of Object.keys(hookFileData[resource])) { - // Save all the hook functions directly under their string - // format in an array - const hookString = `${resource}.${operation}`; - if (this.externalHooks[hookString] === undefined) { - this.externalHooks[hookString] = []; - } - - // eslint-disable-next-line prefer-spread - this.externalHooks[hookString].push.apply( - this.externalHooks[hookString], - hookFileData[resource][operation], - ); + const { registered } = this; + for (const [resource, operations] of Object.entries(hookFileData)) { + for (const operation of Object.keys(operations)) { + const hookName = `${resource}.${operation}` as HookNames; + registered[hookName] ??= []; + registered[hookName].push(...operations[operation]); } } } - async run(hookName: string, hookParameters?: any[]): Promise { - if (this.externalHooks[hookName] === undefined) { - return; - } - - const externalHookFunctions: IExternalHooksFunctions = { - dbCollections: this.dbCollections, - }; - - for (const externalHookFunction of this.externalHooks[hookName]) { - await externalHookFunction.apply(externalHookFunctions, hookParameters); + async run( + hookName: HookName, + hookParameters?: ExternalHooksMap[HookName], + ): Promise { + const { registered, dbCollections } = this; + const hookFunctions = registered[hookName]; + if (!hookFunctions?.length) return; + + const context = { dbCollections }; + + for (const hookFunction of hookFunctions) { + try { + await hookFunction.apply(context, hookParameters); + } catch (cause) { + this.logger.error(`There was a problem running hook "${hookName}"`); + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + const error = new ApplicationError(`External hook "${hookName}" failed`, { cause }); + this.errorReporter.error(error, { level: 'fatal' }); + throw error; + } } } - - exists(hookName: string): boolean { - return !!this.externalHooks[hookName]; - } } diff --git a/packages/cli/src/interfaces.ts b/packages/cli/src/interfaces.ts index e50b96c38456f..c5cf76bd328a1 100644 --- a/packages/cli/src/interfaces.ts +++ b/packages/cli/src/interfaces.ts @@ -31,10 +31,6 @@ import type { AuthProviderType } from '@/databases/entities/auth-identity'; import type { SharedCredentials } from '@/databases/entities/shared-credentials'; import type { TagEntity } from '@/databases/entities/tag-entity'; import type { AssignableRole, GlobalRole, User } from '@/databases/entities/user'; -import type { CredentialsRepository } from '@/databases/repositories/credentials.repository'; -import type { SettingsRepository } from '@/databases/repositories/settings.repository'; -import type { UserRepository } from '@/databases/repositories/user.repository'; -import type { WorkflowRepository } from '@/databases/repositories/workflow.repository'; import type { LICENSE_FEATURES, LICENSE_QUOTAS } from './constants'; import type { ExternalHooks } from './external-hooks'; @@ -220,46 +216,6 @@ export interface IExecutingWorkflowData { status: ExecutionStatus; } -export interface IExternalHooks { - credentials?: { - create?: Array<{ - (this: IExternalHooksFunctions, credentialsData: ICredentialsEncrypted): Promise; - }>; - delete?: Array<{ (this: IExternalHooksFunctions, credentialId: string): Promise }>; - update?: Array<{ - (this: IExternalHooksFunctions, credentialsData: ICredentialsDb): Promise; - }>; - }; - workflow?: { - activate?: Array<{ (this: IExternalHooksFunctions, workflowData: IWorkflowDb): Promise }>; - create?: Array<{ (this: IExternalHooksFunctions, workflowData: IWorkflowBase): Promise }>; - delete?: Array<{ (this: IExternalHooksFunctions, workflowId: string): Promise }>; - execute?: Array<{ - ( - this: IExternalHooksFunctions, - workflowData: IWorkflowDb, - mode: WorkflowExecuteMode, - ): Promise; - }>; - update?: Array<{ (this: IExternalHooksFunctions, workflowData: IWorkflowDb): Promise }>; - }; -} - -export interface IExternalHooksFileData { - [key: string]: { - [key: string]: Array<(...args: any[]) => Promise>; - }; -} - -export interface IExternalHooksFunctions { - dbCollections: { - User: UserRepository; - Settings: SettingsRepository; - Credentials: CredentialsRepository; - Workflow: WorkflowRepository; - }; -} - export interface IPersonalizationSurveyAnswers { email: string | null; codingSkill: string | null; diff --git a/packages/cli/src/license.ts b/packages/cli/src/license.ts index 0b02e7da1a79c..4a1b7d134e674 100644 --- a/packages/cli/src/license.ts +++ b/packages/cli/src/license.ts @@ -330,7 +330,7 @@ export class License { } /** - * Helper function to get the main plan for a license + * Helper function to get the latest main plan for a license */ getMainPlan(): TEntitlement | undefined { if (!this.manager) { @@ -342,6 +342,8 @@ export class License { return undefined; } + entitlements.sort((a, b) => b.validFrom.getTime() - a.validFrom.getTime()); + return entitlements.find( (entitlement) => (entitlement.productMetadata?.terms as { isMainPlan?: boolean })?.isMainPlan, ); @@ -356,6 +358,10 @@ export class License { return this.getFeatureValue(LICENSE_QUOTAS.USERS_LIMIT) ?? UNLIMITED_LICENSE_QUOTA; } + getApiKeysPerUserLimit() { + return this.getFeatureValue(LICENSE_QUOTAS.API_KEYS_PER_USER_LIMIT) ?? 1; + } + getTriggerLimit() { return this.getFeatureValue(LICENSE_QUOTAS.TRIGGER_LIMIT) ?? UNLIMITED_LICENSE_QUOTA; } diff --git a/packages/cli/src/load-nodes-and-credentials.ts b/packages/cli/src/load-nodes-and-credentials.ts index 88cbfc05cbab2..4d6493d7ee7bc 100644 --- a/packages/cli/src/load-nodes-and-credentials.ts +++ b/packages/cli/src/load-nodes-and-credentials.ts @@ -174,6 +174,29 @@ export class LoadNodesAndCredentials { return isContainedWithin(loader.directory, filePath) ? filePath : undefined; } + resolveSchema({ + node, + version, + resource, + operation, + }: { + node: string; + version: string; + resource?: string; + operation?: string; + }): string | undefined { + const nodePath = this.known.nodes[node]?.sourcePath; + if (!nodePath) { + return undefined; + } + + const nodeParentPath = path.dirname(nodePath); + const schemaPath = ['__schema__', `v${version}`, resource, operation].filter(Boolean).join('/'); + const filePath = path.resolve(nodeParentPath, schemaPath + '.json'); + + return isContainedWithin(nodeParentPath, filePath) ? filePath : undefined; + } + getCustomDirectories(): string[] { const customDirectories = [this.instanceSettings.customExtensionDir]; diff --git a/packages/cli/src/requests.ts b/packages/cli/src/requests.ts index b9a5ae97a3ecc..9c26f740bb738 100644 --- a/packages/cli/src/requests.ts +++ b/packages/cli/src/requests.ts @@ -175,14 +175,6 @@ export declare namespace CredentialRequest { >; } -// ---------------------------------- -// /api-keys -// ---------------------------------- - -export declare namespace ApiKeysRequest { - export type DeleteAPIKey = AuthenticatedRequest<{ id: string }>; -} - // ---------------------------------- // /me // ---------------------------------- diff --git a/packages/cli/src/scaling/scaling.service.ts b/packages/cli/src/scaling/scaling.service.ts index f20d0764c6aa9..7c48ce57e2632 100644 --- a/packages/cli/src/scaling/scaling.service.ts +++ b/packages/cli/src/scaling/scaling.service.ts @@ -17,7 +17,6 @@ import config from '@/config'; import { HIGHEST_SHUTDOWN_PRIORITY, Time } from '@/constants'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; import { OnShutdown } from '@/decorators/on-shutdown'; -import { MaxStalledCountError } from '@/errors/max-stalled-count.error'; import { EventService } from '@/events/event.service'; import { OrchestrationService } from '@/services/orchestration.service'; import { assertNever } from '@/utils'; @@ -271,10 +270,6 @@ export class ScalingService { this.queue.on('error', (error: Error) => { if ('code' in error && error.code === 'ECONNREFUSED') return; // handled by RedisClientService.retryStrategy - if (error.message.includes('job stalled more than maxStalledCount')) { - throw new MaxStalledCountError(error); - } - /** * Non-recoverable error on worker start with Redis unavailable. * Even if Redis recovers, worker will remain unable to process jobs. diff --git a/packages/cli/src/server.ts b/packages/cli/src/server.ts index 17ffd66ad9ca3..9db860b1e67f1 100644 --- a/packages/cli/src/server.ts +++ b/packages/cli/src/server.ts @@ -138,6 +138,7 @@ export class Server extends AbstractServer { if (!this.globalConfig.tags.disabled) { await import('@/controllers/tags.controller'); } + // ---------------------------------------- // SAML // ---------------------------------------- @@ -322,8 +323,27 @@ export class Server extends AbstractServer { res.sendStatus(404); }; + const serveSchemas: express.RequestHandler = async (req, res) => { + const { node, version, resource, operation } = req.params; + const filePath = this.loadNodesAndCredentials.resolveSchema({ + node, + resource, + operation, + version, + }); + + if (filePath) { + try { + await fsAccess(filePath); + return res.sendFile(filePath, cacheOptions); + } catch {} + } + res.sendStatus(404); + }; + this.app.use('/icons/@:scope/:packageName/*/*.(svg|png)', serveIcons); this.app.use('/icons/:packageName/*/*.(svg|png)', serveIcons); + this.app.use('/schemas/:node/:version/:resource?/:operation?.json', serveSchemas); const isTLSEnabled = this.globalConfig.protocol === 'https' && !!(this.sslKey && this.sslCert); diff --git a/packages/cli/src/services/__tests__/public-api-key.service.test.ts b/packages/cli/src/services/__tests__/public-api-key.service.test.ts index 7c60b62983ead..86db071f352d1 100644 --- a/packages/cli/src/services/__tests__/public-api-key.service.test.ts +++ b/packages/cli/src/services/__tests__/public-api-key.service.test.ts @@ -144,4 +144,28 @@ describe('PublicApiKeyService', () => { ); }); }); + + describe('redactApiKey', () => { + it('should redact api key', async () => { + //Arrange + + const jwt = + 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE0ODUxNDA5ODQsImlhdCI6MTQ4NTEzNzM4NCwiaXNzIjoiYWNtZS5jb20iLCJzdWIiOiIyOWFjMGMxOC0wYjRhLTQyY2YtODJmYy0wM2Q1NzAzMThhMWQiLCJhcHBsaWNhdGlvbklkIjoiNzkxMDM3MzQtOTdhYi00ZDFhLWFmMzctZTAwNmQwNWQyOTUyIiwicm9sZXMiOltdfQ.Mp0Pcwsz5VECK11Kf2ZZNF_SMKu5CgBeLN9ZOP04kZo'; + + const publicApiKeyService = new PublicApiKeyService( + apiKeyRepository, + userRepository, + jwtService, + eventService, + ); + + //Act + + const redactedApiKey = publicApiKeyService.redactApiKey(jwt); + + //Assert + + expect(redactedApiKey).toBe('******4kZo'); + }); + }); }); diff --git a/packages/cli/src/services/frontend.service.ts b/packages/cli/src/services/frontend.service.ts index 954c3e9fc3e7b..ad94bb60ea065 100644 --- a/packages/cli/src/services/frontend.service.ts +++ b/packages/cli/src/services/frontend.service.ts @@ -146,6 +146,7 @@ export class FrontendService { }, }, publicApi: { + apiKeysPerUserLimit: this.license.getApiKeysPerUserLimit(), enabled: isApiEnabled(), latestVersion: 1, path: this.globalConfig.publicApi.path, diff --git a/packages/cli/src/services/public-api-key.service.ts b/packages/cli/src/services/public-api-key.service.ts index a6b1133bc29d0..f2e43c3181bab 100644 --- a/packages/cli/src/services/public-api-key.service.ts +++ b/packages/cli/src/services/public-api-key.service.ts @@ -12,8 +12,8 @@ import { JwtService } from './jwt.service'; const API_KEY_AUDIENCE = 'public-api'; const API_KEY_ISSUER = 'n8n'; -const REDACT_API_KEY_REVEAL_COUNT = 15; -const REDACT_API_KEY_MAX_LENGTH = 80; +const REDACT_API_KEY_REVEAL_COUNT = 4; +const REDACT_API_KEY_MAX_LENGTH = 10; @Service() export class PublicApiKeyService { @@ -27,15 +27,14 @@ export class PublicApiKeyService { /** * Creates a new public API key for the specified user. * @param user - The user for whom the API key is being created. - * @returns A promise that resolves to the newly created API key. */ - async createPublicApiKeyForUser(user: User) { + async createPublicApiKeyForUser(user: User, { label }: { label: string }) { const apiKey = this.generateApiKey(user); await this.apiKeyRepository.upsert( this.apiKeyRepository.create({ userId: user.id, apiKey, - label: 'My API Key', + label, }), ['apiKey'], ); @@ -60,6 +59,10 @@ export class PublicApiKeyService { await this.apiKeyRepository.delete({ userId: user.id, id: apiKeyId }); } + async updateApiKeyForUser(user: User, apiKeyId: string, { label }: { label?: string } = {}) { + await this.apiKeyRepository.update({ id: apiKeyId, userId: user.id }, { label }); + } + private async getUserForApiKey(apiKey: string) { return await this.userRepository .createQueryBuilder('user') @@ -70,22 +73,24 @@ export class PublicApiKeyService { } /** - * Redacts an API key by keeping the first few characters and replacing the rest with asterisks. - * @param apiKey - The API key to be redacted. If null, the function returns undefined. - * @returns The redacted API key with a fixed prefix and asterisks replacing the rest of the characters. + * Redacts an API key by replacing a portion of it with asterisks. + * + * The function keeps the last `REDACT_API_KEY_REVEAL_COUNT` characters of the API key visible + * and replaces the rest with asterisks, up to a maximum length defined by `REDACT_API_KEY_MAX_LENGTH`. + * * @example * ```typescript * const redactedKey = PublicApiKeyService.redactApiKey('12345-abcdef-67890'); - * console.log(redactedKey); // Output: '12345-*****' + * console.log(redactedKey); // Output: '*****-67890' * ``` */ redactApiKey(apiKey: string) { - const visiblePart = apiKey.slice(0, REDACT_API_KEY_REVEAL_COUNT); - const redactedPart = '*'.repeat(apiKey.length - REDACT_API_KEY_REVEAL_COUNT); - - const completeRedactedApiKey = visiblePart + redactedPart; + const visiblePart = apiKey.slice(-REDACT_API_KEY_REVEAL_COUNT); + const redactedPart = '*'.repeat( + Math.max(0, REDACT_API_KEY_MAX_LENGTH - REDACT_API_KEY_REVEAL_COUNT), + ); - return completeRedactedApiKey.slice(0, REDACT_API_KEY_MAX_LENGTH); + return redactedPart + visiblePart; } getAuthMiddleware(version: string) { diff --git a/packages/cli/src/services/tag.service.ts b/packages/cli/src/services/tag.service.ts index 09695f44ff052..1f6cdd88fe2eb 100644 --- a/packages/cli/src/services/tag.service.ts +++ b/packages/cli/src/services/tag.service.ts @@ -8,6 +8,8 @@ import type { ITagWithCountDb } from '@/interfaces'; type GetAllResult = T extends { withUsageCount: true } ? ITagWithCountDb[] : TagEntity[]; +type Action = 'Create' | 'Update'; + @Service() export class TagService { constructor( @@ -24,7 +26,7 @@ export class TagService { async save(tag: TagEntity, actionKind: 'create' | 'update') { await validateEntity(tag); - const action = actionKind[0].toUpperCase() + actionKind.slice(1); + const action = (actionKind[0].toUpperCase() + actionKind.slice(1)) as Action; await this.externalHooks.run(`tag.before${action}`, [tag]); diff --git a/packages/cli/src/workflow-execute-additional-data.ts b/packages/cli/src/workflow-execute-additional-data.ts index c3b3ed869375f..98a736916a211 100644 --- a/packages/cli/src/workflow-execute-additional-data.ts +++ b/packages/cli/src/workflow-execute-additional-data.ts @@ -182,12 +182,8 @@ async function startExecution( runData: IWorkflowExecutionDataProcess, workflowData: IWorkflowBase, ): Promise { - const externalHooks = Container.get(ExternalHooks); - await externalHooks.init(); - const nodeTypes = Container.get(NodeTypes); const activeExecutions = Container.get(ActiveExecutions); - const eventService = Container.get(EventService); const executionRepository = Container.get(ExecutionRepository); const workflowName = workflowData ? workflowData.name : undefined; @@ -209,8 +205,6 @@ async function startExecution( */ await executionRepository.setRunning(executionId); - Container.get(EventService).emit('workflow-pre-execute', { executionId, data: runData }); - let data; try { await Container.get(PermissionChecker).check(workflowData.id, workflowData.nodes); @@ -228,6 +222,7 @@ async function startExecution( runData.executionMode, executionId, workflowData, + additionalData.userId, ); additionalDataIntegrated.executionId = executionId; additionalDataIntegrated.parentCallbackManager = options.parentCallbackManager; @@ -308,15 +303,9 @@ async function startExecution( ); } + const externalHooks = Container.get(ExternalHooks); await externalHooks.run('workflow.postExecute', [data, workflowData, executionId]); - eventService.emit('workflow-post-execute', { - workflow: workflowData, - executionId, - userId: additionalData.userId, - runData: data, - }); - // subworkflow either finished, or is in status waiting due to a wait node, both cases are considered successes here if (data.finished === true || data.status === 'waiting') { // Workflow did finish successfully diff --git a/packages/cli/src/workflow-runner.ts b/packages/cli/src/workflow-runner.ts index 148df7edcd8bb..da48255faaf0f 100644 --- a/packages/cli/src/workflow-runner.ts +++ b/packages/cli/src/workflow-runner.ts @@ -21,7 +21,6 @@ import { ActiveExecutions } from '@/active-executions'; import config from '@/config'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; import { ExecutionNotFoundError } from '@/errors/execution-not-found-error'; -import { EventService } from '@/events/event.service'; import { getWorkflowHooksMain, getWorkflowHooksWorkerExecuter, @@ -37,6 +36,8 @@ import * as WorkflowExecuteAdditionalData from '@/workflow-execute-additional-da import { generateFailedExecutionFromError } from '@/workflow-helpers'; import { WorkflowStaticDataService } from '@/workflows/workflow-static-data.service'; +import { MaxStalledCountError } from './errors/max-stalled-count.error'; + @Service() export class WorkflowRunner { private scalingService: ScalingService; @@ -52,7 +53,6 @@ export class WorkflowRunner { private readonly workflowStaticDataService: WorkflowStaticDataService, private readonly nodeTypes: NodeTypes, private readonly permissionChecker: PermissionChecker, - private readonly eventService: EventService, private readonly instanceSettings: InstanceSettings, private readonly manualExecutionService: ManualExecutionService, ) {} @@ -167,7 +167,6 @@ export class WorkflowRunner { await this.enqueueExecution(executionId, data, loadStaticData, realtime); } else { await this.runMainProcess(executionId, data, loadStaticData, restartExecutionId); - this.eventService.emit('workflow-pre-execute', { executionId, data }); } // only run these when not in queue mode or when the execution is manual, @@ -180,24 +179,13 @@ export class WorkflowRunner { const postExecutePromise = this.activeExecutions.getPostExecutePromise(executionId); postExecutePromise .then(async (executionData) => { - this.eventService.emit('workflow-post-execute', { - workflow: data.workflowData, - executionId, - userId: data.userId, - runData: executionData, - }); - if (this.externalHooks.exists('workflow.postExecute')) { - try { - await this.externalHooks.run('workflow.postExecute', [ - executionData, - data.workflowData, - executionId, - ]); - } catch (error) { - this.errorReporter.error(error); - this.logger.error('There was a problem running hook "workflow.postExecute"', error); - } - } + try { + await this.externalHooks.run('workflow.postExecute', [ + executionData, + data.workflowData, + executionId, + ]); + } catch {} }) .catch((error) => { if (error instanceof ExecutionCancelledError) return; @@ -416,6 +404,13 @@ export class WorkflowRunner { try { await job.finished(); } catch (error) { + if ( + error instanceof Error && + error.message.includes('job stalled more than maxStalledCount') + ) { + error = new MaxStalledCountError(error); + } + // We use "getWorkflowHooksWorkerExecuter" as "getWorkflowHooksWorkerMain" does not contain the // "workflowExecuteAfter" which we require. const hooks = getWorkflowHooksWorkerExecuter( @@ -424,6 +419,7 @@ export class WorkflowRunner { data.workflowData, { retryOf: data.retryOf ? data.retryOf.toString() : undefined }, ); + await this.processError(error, new Date(), data.executionMode, executionId, hooks); reject(error); diff --git a/packages/cli/templates/form-trigger.handlebars b/packages/cli/templates/form-trigger.handlebars index 6bad1a02d8ecc..21e10d86a9ec9 100644 --- a/packages/cli/templates/form-trigger.handlebars +++ b/packages/cli/templates/form-trigger.handlebars @@ -377,6 +377,10 @@ {{/if}} + {{#if isHidden}} + + {{/if}} + {{#if isTextarea}}
diff --git a/packages/cli/templates/oauth-callback.handlebars b/packages/cli/templates/oauth-callback.handlebars index 74d57db3033a1..311e4656fac49 100644 --- a/packages/cli/templates/oauth-callback.handlebars +++ b/packages/cli/templates/oauth-callback.handlebars @@ -1,59 +1,71 @@ + + + +
-
-
- -
+
+
+

Connection successful

diff --git a/packages/cli/test/integration/active-workflow-manager.test.ts b/packages/cli/test/integration/active-workflow-manager.test.ts index 3c98c2a4f1b72..8c502bba94afc 100644 --- a/packages/cli/test/integration/active-workflow-manager.test.ts +++ b/packages/cli/test/integration/active-workflow-manager.test.ts @@ -76,10 +76,7 @@ describe('init()', () => { it('should call external hook', async () => { await activeWorkflowManager.init(); - const [hook, arg] = externalHooks.run.mock.calls[0]; - - expect(hook).toBe('activeWorkflows.initialized'); - expect(arg).toBeEmptyArray(); + expect(externalHooks.run).toHaveBeenCalledWith('activeWorkflows.initialized'); }); it('should check that workflow can be activated', async () => { diff --git a/packages/cli/test/integration/api-keys.api.test.ts b/packages/cli/test/integration/api-keys.api.test.ts index 14050b543aa3a..e1649d4b0b434 100644 --- a/packages/cli/test/integration/api-keys.api.test.ts +++ b/packages/cli/test/integration/api-keys.api.test.ts @@ -1,7 +1,7 @@ +import type { ApiKeyWithRawValue } from '@n8n/api-types'; import { GlobalConfig } from '@n8n/config'; import { Container } from '@n8n/di'; -import type { ApiKey } from '@/databases/entities/api-key'; import type { User } from '@/databases/entities/user'; import { ApiKeyRepository } from '@/databases/repositories/api-key.repository'; import { PublicApiKeyService } from '@/services/public-api-key.service'; @@ -57,9 +57,12 @@ describe('Owner shell', () => { }); test('POST /api-keys should create an api key', async () => { - const newApiKeyResponse = await testServer.authAgentFor(ownerShell).post('/api-keys'); + const newApiKeyResponse = await testServer + .authAgentFor(ownerShell) + .post('/api-keys') + .send({ label: 'My API Key' }); - const newApiKey = newApiKeyResponse.body.data as ApiKey; + const newApiKey = newApiKeyResponse.body.data as ApiKeyWithRawValue; expect(newApiKeyResponse.statusCode).toBe(200); expect(newApiKey).toBeDefined(); @@ -72,31 +75,50 @@ describe('Owner shell', () => { id: expect.any(String), label: 'My API Key', userId: ownerShell.id, - apiKey: newApiKey.apiKey, + apiKey: newApiKey.rawApiKey, createdAt: expect.any(Date), updatedAt: expect.any(Date), }); }); + test('POST /api-keys should fail if max number of API keys reached', async () => { + await testServer.authAgentFor(ownerShell).post('/api-keys').send({ label: 'My API Key' }); + + const secondApiKey = await testServer + .authAgentFor(ownerShell) + .post('/api-keys') + .send({ label: 'My API Key' }); + + expect(secondApiKey.statusCode).toBe(400); + }); + test('GET /api-keys should fetch the api key redacted', async () => { - const newApiKeyResponse = await testServer.authAgentFor(ownerShell).post('/api-keys'); + const newApiKeyResponse = await testServer + .authAgentFor(ownerShell) + .post('/api-keys') + .send({ label: 'My API Key' }); const retrieveAllApiKeysResponse = await testServer.authAgentFor(ownerShell).get('/api-keys'); expect(retrieveAllApiKeysResponse.statusCode).toBe(200); + const redactedApiKey = publicApiKeyService.redactApiKey(newApiKeyResponse.body.data.rawApiKey); + expect(retrieveAllApiKeysResponse.body.data[0]).toEqual({ id: newApiKeyResponse.body.data.id, label: 'My API Key', userId: ownerShell.id, - apiKey: publicApiKeyService.redactApiKey(newApiKeyResponse.body.data.apiKey), + apiKey: redactedApiKey, createdAt: expect.any(String), updatedAt: expect.any(String), }); }); test('DELETE /api-keys/:id should delete the api key', async () => { - const newApiKeyResponse = await testServer.authAgentFor(ownerShell).post('/api-keys'); + const newApiKeyResponse = await testServer + .authAgentFor(ownerShell) + .post('/api-keys') + .send({ label: 'My API Key' }); const deleteApiKeyResponse = await testServer .authAgentFor(ownerShell) @@ -122,7 +144,10 @@ describe('Member', () => { }); test('POST /api-keys should create an api key', async () => { - const newApiKeyResponse = await testServer.authAgentFor(member).post('/api-keys'); + const newApiKeyResponse = await testServer + .authAgentFor(member) + .post('/api-keys') + .send({ label: 'My API Key' }); expect(newApiKeyResponse.statusCode).toBe(200); expect(newApiKeyResponse.body.data.apiKey).toBeDefined(); @@ -136,35 +161,54 @@ describe('Member', () => { id: expect.any(String), label: 'My API Key', userId: member.id, - apiKey: newApiKeyResponse.body.data.apiKey, + apiKey: newApiKeyResponse.body.data.rawApiKey, createdAt: expect.any(Date), updatedAt: expect.any(Date), }); }); + test('POST /api-keys should fail if max number of API keys reached', async () => { + await testServer.authAgentFor(member).post('/api-keys').send({ label: 'My API Key' }); + + const secondApiKey = await testServer + .authAgentFor(member) + .post('/api-keys') + .send({ label: 'My API Key' }); + + expect(secondApiKey.statusCode).toBe(400); + }); + test('GET /api-keys should fetch the api key redacted', async () => { - const newApiKeyResponse = await testServer.authAgentFor(member).post('/api-keys'); + const newApiKeyResponse = await testServer + .authAgentFor(member) + .post('/api-keys') + .send({ label: 'My API Key' }); const retrieveAllApiKeysResponse = await testServer.authAgentFor(member).get('/api-keys'); expect(retrieveAllApiKeysResponse.statusCode).toBe(200); + const redactedApiKey = publicApiKeyService.redactApiKey(newApiKeyResponse.body.data.rawApiKey); + expect(retrieveAllApiKeysResponse.body.data[0]).toEqual({ id: newApiKeyResponse.body.data.id, label: 'My API Key', userId: member.id, - apiKey: publicApiKeyService.redactApiKey(newApiKeyResponse.body.data.apiKey), + apiKey: redactedApiKey, createdAt: expect.any(String), updatedAt: expect.any(String), }); - expect(newApiKeyResponse.body.data.apiKey).not.toEqual( + expect(newApiKeyResponse.body.data.rawApiKey).not.toEqual( retrieveAllApiKeysResponse.body.data[0].apiKey, ); }); test('DELETE /api-keys/:id should delete the api key', async () => { - const newApiKeyResponse = await testServer.authAgentFor(member).post('/api-keys'); + const newApiKeyResponse = await testServer + .authAgentFor(member) + .post('/api-keys') + .send({ label: 'My API Key' }); const deleteApiKeyResponse = await testServer .authAgentFor(member) diff --git a/packages/cli/test/integration/controllers/oauth/oauth2.api.test.ts b/packages/cli/test/integration/controllers/oauth/oauth2.api.test.ts index 6411966dbf912..f20f9df5508e8 100644 --- a/packages/cli/test/integration/controllers/oauth/oauth2.api.test.ts +++ b/packages/cli/test/integration/controllers/oauth/oauth2.api.test.ts @@ -116,7 +116,7 @@ describe('OAuth2 API', () => { .query({ code: 'auth_code', state }) .expect(200); - expect(renderSpy).toHaveBeenCalledWith('oauth-callback', { imagePath: 'n8n-logo.png' }); + expect(renderSpy).toHaveBeenCalledWith('oauth-callback'); const updatedCredential = await Container.get(CredentialsHelper).getCredentials( credential, diff --git a/packages/cli/test/integration/shared/db/users.ts b/packages/cli/test/integration/shared/db/users.ts index bb4332f9dedfb..88751fd727564 100644 --- a/packages/cli/test/integration/shared/db/users.ts +++ b/packages/cli/test/integration/shared/db/users.ts @@ -81,7 +81,9 @@ export async function createUserWithMfaEnabled( } export const addApiKey = async (user: User) => { - return await Container.get(PublicApiKeyService).createPublicApiKeyForUser(user); + return await Container.get(PublicApiKeyService).createPublicApiKeyForUser(user, { + label: randomName(), + }); }; export async function createOwnerWithApiKey() { diff --git a/packages/cli/test/integration/task-runners/js-task-runner-execution.integration.test.ts b/packages/cli/test/integration/task-runners/js-task-runner-execution.integration.test.ts new file mode 100644 index 0000000000000..a69f60d1986c8 --- /dev/null +++ b/packages/cli/test/integration/task-runners/js-task-runner-execution.integration.test.ts @@ -0,0 +1,269 @@ +import { TaskRunnersConfig } from '@n8n/config'; +import { Container } from '@n8n/di'; +import { mock } from 'jest-mock-extended'; +import type { + IExecuteFunctions, + INode, + INodeExecutionData, + INodeParameters, + INodeTypes, + IRunExecutionData, + ITaskDataConnections, + IWorkflowExecuteAdditionalData, + WorkflowExecuteMode, +} from 'n8n-workflow'; +import { createEnvProviderState, NodeConnectionType, Workflow } from 'n8n-workflow'; + +import { LocalTaskRequester } from '@/task-runners/task-managers/local-task-requester'; +import { TaskRunnerModule } from '@/task-runners/task-runner-module'; + +/** + * Integration tests for the JS TaskRunner execution. Starts the TaskRunner + * as a child process and executes tasks on it via the broker. + */ +describe('JS TaskRunner execution on internal mode', () => { + const runnerConfig = Container.get(TaskRunnersConfig); + runnerConfig.mode = 'internal'; + runnerConfig.enabled = true; + runnerConfig.port = 45678; + + const taskRunnerModule = Container.get(TaskRunnerModule); + const taskRequester = Container.get(LocalTaskRequester); + + /** + * Sets up task data that includes a workflow with manual trigger and a + * code node with the given JS code. The input data is a single item: + * ```json + * { + * "input": "item" + * } + * ``` + */ + const newTaskData = (jsCode: string) => { + const taskSettings = { + code: jsCode, + nodeMode: 'runOnceForAllItems', + workflowMode: 'manual', + continueOnFail: false, + }; + + const codeNode: INode = { + parameters: { + jsCode, + }, + type: 'n8n-nodes-base.code', + typeVersion: 2, + position: [200, 80], + id: 'b35fd455-32e4-4d52-b840-36aa28dd1910', + name: 'Code', + }; + + const workflow = new Workflow({ + id: 'testWorkflow', + name: 'testWorkflow', + nodes: [ + { + parameters: {}, + type: 'n8n-nodes-base.manualTrigger', + typeVersion: 1, + position: [0, 0], + id: 'a39a566a-283a-433e-88bc-b3857aab706f', + name: 'ManualTrigger', + }, + codeNode, + ], + connections: { + ManualTrigger: { + main: [ + [ + { + node: 'Code', + type: NodeConnectionType.Main, + index: 0, + }, + ], + ], + }, + }, + active: true, + nodeTypes: mock(), + }); + + const inputData: INodeExecutionData[] = [ + { + json: { + input: 'item', + }, + }, + ]; + + const inputConnections: ITaskDataConnections = { + main: [inputData], + }; + + const runExecutionData: IRunExecutionData = { + startData: {}, + resultData: { + runData: { + ManualTrigger: [ + { + startTime: Date.now(), + executionTime: 0, + executionStatus: 'success', + source: [], + data: { + main: [inputData], + }, + }, + ], + }, + lastNodeExecuted: 'ManualTrigger', + }, + executionData: { + contextData: {}, + nodeExecutionStack: [], + metadata: {}, + waitingExecution: {}, + waitingExecutionSource: {}, + }, + }; + + return { + additionalData: mock(), + executeFunctions: mock(), + taskSettings, + codeNode, + workflow, + inputData, + inputConnections, + runExecutionData, + envProviderState: createEnvProviderState(), + }; + }; + + const runTaskWithCode = async (jsCode: string) => { + const { + additionalData, + taskSettings, + codeNode, + workflow, + inputData, + inputConnections, + runExecutionData, + executeFunctions, + envProviderState, + } = newTaskData(jsCode); + + return await taskRequester.startTask( + additionalData, + 'javascript', + taskSettings, + executeFunctions, + inputConnections, + codeNode, + workflow, + runExecutionData, + 0, + 0, + codeNode.name, + inputData, + mock(), + mock(), + envProviderState, + ); + }; + + describe('Basic code execution', () => { + beforeAll(async () => { + await taskRunnerModule.start(); + }); + + afterAll(async () => { + await taskRunnerModule.stop(); + }); + + it('should execute a simple JS task', async () => { + // Act + const result = await runTaskWithCode('return [{ hello: "world" }]'); + + // Assert + expect(result).toEqual({ + ok: true, + result: [{ json: { hello: 'world' } }], + }); + }); + }); + + describe('Internal and external libs', () => { + beforeAll(async () => { + process.env.NODE_FUNCTION_ALLOW_BUILTIN = 'crypto'; + process.env.NODE_FUNCTION_ALLOW_EXTERNAL = 'moment'; + await taskRunnerModule.start(); + }); + + afterAll(async () => { + await taskRunnerModule.stop(); + }); + + it('should allow importing allowed internal module', async () => { + // Act + const result = await runTaskWithCode(` + const crypto = require("crypto"); + return [{ + digest: crypto + .createHmac("sha256", Buffer.from("MySecretKey")) + .update("MESSAGE") + .digest("base64") + }] + `); + + expect(result).toEqual({ + ok: true, + result: [{ json: { digest: 'T09DMv7upNDKMD3Ht36FkwzrmWSgWpPiUNlcIX9/yaI=' } }], + }); + }); + + it('should not allow importing disallowed internal module', async () => { + // Act + const result = await runTaskWithCode(` + const fs = require("fs"); + return [{ file: fs.readFileSync("test.txt") }] + `); + + expect(result).toEqual({ + ok: false, + error: expect.objectContaining({ + message: "Cannot find module 'fs' [line 2]", + }), + }); + }); + + it('should allow importing allowed external module', async () => { + // Act + const result = await runTaskWithCode(` + const moment = require("moment"); + return [{ time: moment("1995-12-25").format("YYYY-MM-DD") }] + `); + + expect(result).toEqual({ + ok: true, + result: [{ json: { time: '1995-12-25' } }], + }); + }); + + it('should not allow importing disallowed external module', async () => { + // Act + const result = await runTaskWithCode(` + const lodash = require("lodash"); + return [{ obj: lodash.cloneDeep({}) }] + `); + + expect(result).toEqual({ + ok: false, + error: expect.objectContaining({ + message: "Cannot find module 'lodash' [line 2]", + }), + }); + }); + }); +}); diff --git a/packages/cli/test/integration/runners/task-runner-module.external.test.ts b/packages/cli/test/integration/task-runners/task-runner-module.external.test.ts similarity index 100% rename from packages/cli/test/integration/runners/task-runner-module.external.test.ts rename to packages/cli/test/integration/task-runners/task-runner-module.external.test.ts diff --git a/packages/cli/test/integration/runners/task-runner-module.internal.test.ts b/packages/cli/test/integration/task-runners/task-runner-module.internal.test.ts similarity index 100% rename from packages/cli/test/integration/runners/task-runner-module.internal.test.ts rename to packages/cli/test/integration/task-runners/task-runner-module.internal.test.ts diff --git a/packages/cli/test/integration/runners/task-runner-process.test.ts b/packages/cli/test/integration/task-runners/task-runner-process.test.ts similarity index 100% rename from packages/cli/test/integration/runners/task-runner-process.test.ts rename to packages/cli/test/integration/task-runners/task-runner-process.test.ts diff --git a/packages/cli/test/integration/runners/task-runner-server.test.ts b/packages/cli/test/integration/task-runners/task-runner-server.test.ts similarity index 100% rename from packages/cli/test/integration/runners/task-runner-server.test.ts rename to packages/cli/test/integration/task-runners/task-runner-server.test.ts diff --git a/packages/core/bin/copy-icons b/packages/core/bin/copy-static-files similarity index 56% rename from packages/core/bin/copy-icons rename to packages/core/bin/copy-static-files index bdcb011c25c4c..a178a40816f89 100755 --- a/packages/core/bin/copy-icons +++ b/packages/core/bin/copy-static-files @@ -6,13 +6,18 @@ const { cp } = require('fs/promises'); const { packageDir } = require('./common'); const limiter = pLimit(20); -const icons = glob.sync('{nodes,credentials}/**/*.{png,svg}', { cwd: packageDir }); +const staticFiles = glob.sync( + ['{nodes,credentials}/**/*.{png,svg}', 'nodes/**/__schema__/**/*.json'], + { + cwd: packageDir, + }, +); (async () => { await Promise.all( - icons.map((icon) => + staticFiles.map((path) => limiter(() => { - return cp(icon, `dist/${icon}`, { recursive: true }); + return cp(path, `dist/${path}`, { recursive: true }); }), ), ); diff --git a/packages/core/package.json b/packages/core/package.json index ec0c5e3c6c82f..151147a67648c 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -1,11 +1,11 @@ { "name": "n8n-core", - "version": "1.76.0", + "version": "1.77.0", "description": "Core functionality of n8n", "main": "dist/index", "types": "dist/index.d.ts", "bin": { - "n8n-copy-icons": "./bin/copy-icons", + "n8n-copy-static-files": "./bin/copy-static-files", "n8n-generate-translations": "./bin/generate-translations", "n8n-generate-metadata": "./bin/generate-metadata" }, diff --git a/packages/core/src/__tests__/credentials.test.ts b/packages/core/src/__tests__/credentials.test.ts index 366477def7fcf..09b7586daf9eb 100644 --- a/packages/core/src/__tests__/credentials.test.ts +++ b/packages/core/src/__tests__/credentials.test.ts @@ -1,13 +1,18 @@ import { Container } from '@n8n/di'; import { mock } from 'jest-mock-extended'; import type { CredentialInformation } from 'n8n-workflow'; +import { AssertionError } from 'node:assert'; +import { CREDENTIAL_ERRORS } from '@/constants'; import { Cipher } from '@/encryption/cipher'; import type { InstanceSettings } from '@/instance-settings'; import { Credentials } from '../credentials'; describe('Credentials', () => { + const nodeCredentials = { id: '123', name: 'Test Credential' }; + const credentialType = 'testApi'; + const cipher = new Cipher(mock({ encryptionKey: 'password' })); Container.set(Cipher, cipher); @@ -24,7 +29,7 @@ describe('Credentials', () => { describe('without nodeType set', () => { test('should be able to set and read key data without initial data set', () => { - const credentials = new Credentials({ id: null, name: 'testName' }, 'testType'); + const credentials = new Credentials(nodeCredentials, credentialType); const key = 'key1'; const newData = 1234; @@ -41,11 +46,7 @@ describe('Credentials', () => { const initialData = 4321; const initialDataEncoded = 'U2FsdGVkX1+0baznXt+Ag/ub8A2kHLyoLxn/rR9h4XQ='; - const credentials = new Credentials( - { id: null, name: 'testName' }, - 'testType', - initialDataEncoded, - ); + const credentials = new Credentials(nodeCredentials, credentialType, initialDataEncoded); const newData = 1234; @@ -57,4 +58,64 @@ describe('Credentials', () => { expect(credentials.getData().key1).toEqual(initialData); }); }); + + describe('getData', () => { + test('should throw an error when data is missing', () => { + const credentials = new Credentials(nodeCredentials, credentialType); + credentials.data = undefined; + + expect(() => credentials.getData()).toThrow(CREDENTIAL_ERRORS.NO_DATA); + }); + + test('should throw an error when decryption fails', () => { + const credentials = new Credentials(nodeCredentials, credentialType); + credentials.data = '{"key": "already-decrypted-credentials-data" }'; + + expect(() => credentials.getData()).toThrow(CREDENTIAL_ERRORS.DECRYPTION_FAILED); + + try { + credentials.getData(); + } catch (error) { + expect(error.constructor.name).toBe('CredentialDataError'); + expect(error.extra).toEqual({ ...nodeCredentials, type: credentialType }); + expect((error.cause.code as string).startsWith('ERR_OSSL_')).toBe(true); + } + }); + + test('should throw an error when JSON parsing fails', () => { + const credentials = new Credentials(nodeCredentials, credentialType); + credentials.data = cipher.encrypt('invalid-json-string'); + + expect(() => credentials.getData()).toThrow(CREDENTIAL_ERRORS.INVALID_JSON); + + try { + credentials.getData(); + } catch (error) { + expect(error.constructor.name).toBe('CredentialDataError'); + expect(error.extra).toEqual({ ...nodeCredentials, type: credentialType }); + expect(error.cause).toBeInstanceOf(SyntaxError); + expect(error.cause.message).toMatch('Unexpected token '); + } + }); + + test('should successfully decrypt and parse valid JSON credentials', () => { + const credentials = new Credentials(nodeCredentials, credentialType); + credentials.setData({ username: 'testuser', password: 'testpass' }); + + const decryptedData = credentials.getData(); + expect(decryptedData.username).toBe('testuser'); + expect(decryptedData.password).toBe('testpass'); + }); + }); + + describe('setData', () => { + test.each<{}>([[123], [null], [undefined]])( + 'should throw an AssertionError when data is %s', + (data) => { + const credentials = new Credentials<{}>(nodeCredentials, credentialType); + + expect(() => credentials.setData(data)).toThrow(AssertionError); + }, + ); + }); }); diff --git a/packages/core/src/__tests__/node-execute-functions.test.ts b/packages/core/src/__tests__/node-execute-functions.test.ts index a1c41355e698b..0feedf0bd14fb 100644 --- a/packages/core/src/__tests__/node-execute-functions.test.ts +++ b/packages/core/src/__tests__/node-execute-functions.test.ts @@ -474,7 +474,7 @@ describe('NodeExecuteFunctions', () => { body: 'Not Found', headers: {}, statusCode: 404, - statusMessage: null, + statusMessage: 'Not Found', }); expect(hooks.executeHookFunctions).toHaveBeenCalledWith('nodeFetchedData', [ workflow.id, diff --git a/packages/core/src/binary-data/object-store/__tests__/object-store.service.test.ts b/packages/core/src/binary-data/object-store/__tests__/object-store.service.test.ts index f5d2924eb55f8..ca8f4c0c9463f 100644 --- a/packages/core/src/binary-data/object-store/__tests__/object-store.service.test.ts +++ b/packages/core/src/binary-data/object-store/__tests__/object-store.service.test.ts @@ -1,3 +1,4 @@ +import type { S3Config } from '@n8n/config'; import axios from 'axios'; import { mock } from 'jest-mock-extended'; import { Readable } from 'stream'; @@ -18,6 +19,12 @@ const mockError = new Error('Something went wrong!'); const fileId = 'workflows/ObogjVbqpNOQpiyV/executions/999/binary_data/71f6209b-5d48-41a2-a224-80d529d8bb32'; const mockBuffer = Buffer.from('Test data'); +const s3Config = mock({ + host: mockHost, + bucket: mockBucket, + credentials: mockCredentials, + protocol: 'https', +}); const toDeletionXml = (filename: string) => ` ${filename} @@ -25,10 +32,13 @@ const toDeletionXml = (filename: string) => ` let objectStoreService: ObjectStoreService; +const now = new Date('2024-02-01T01:23:45.678Z'); +jest.useFakeTimers({ now }); + beforeEach(async () => { - objectStoreService = new ObjectStoreService(mock()); + objectStoreService = new ObjectStoreService(mock(), s3Config); mockAxios.request.mockResolvedValueOnce({ status: 200 }); // for checkConnection - await objectStoreService.init(mockHost, mockBucket, mockCredentials); + await objectStoreService.init(); jest.restoreAllMocks(); }); @@ -40,17 +50,17 @@ describe('checkConnection()', () => { await objectStoreService.checkConnection(); - expect(mockAxios.request).toHaveBeenCalledWith( - expect.objectContaining({ - method: 'HEAD', - url: `https://${mockHost}/${mockBucket.name}`, - headers: expect.objectContaining({ - 'X-Amz-Content-Sha256': expect.any(String), - 'X-Amz-Date': expect.any(String), - Authorization: expect.any(String), - }), - }), - ); + expect(mockAxios.request).toHaveBeenCalledWith({ + method: 'HEAD', + url: 'https://s3.us-east-1.amazonaws.com/test-bucket', + headers: { + Host: 's3.us-east-1.amazonaws.com', + 'X-Amz-Content-Sha256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', + 'X-Amz-Date': '20240201T012345Z', + Authorization: + 'AWS4-HMAC-SHA256 Credential=mock-access-key/20240201/us-east-1/s3/aws4_request, SignedHeaders=host;x-amz-content-sha256;x-amz-date, Signature=a5240c11a706e9e6c60e7033a848fc934911b12330e5a4609b0b943f97d9781b', + }, + }); }); it('should throw an error on request failure', async () => { @@ -70,18 +80,17 @@ describe('getMetadata()', () => { await objectStoreService.getMetadata(fileId); - expect(mockAxios.request).toHaveBeenCalledWith( - expect.objectContaining({ - method: 'HEAD', - url: `${mockUrl}/${fileId}`, - headers: expect.objectContaining({ - Host: mockHost, - 'X-Amz-Content-Sha256': expect.any(String), - 'X-Amz-Date': expect.any(String), - Authorization: expect.any(String), - }), - }), - ); + expect(mockAxios.request).toHaveBeenCalledWith({ + method: 'HEAD', + url: `${mockUrl}/${fileId}`, + headers: { + Host: mockHost, + 'X-Amz-Content-Sha256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', + 'X-Amz-Date': '20240201T012345Z', + Authorization: + 'AWS4-HMAC-SHA256 Credential=mock-access-key/20240201/us-east-1/s3/aws4_request, SignedHeaders=host;x-amz-content-sha256;x-amz-date, Signature=60e11c39580ad7dd3a3d549523e7115cdff018540f24c6412ed40053e52a21d0', + }, + }); }); it('should throw an error on request failure', async () => { @@ -101,19 +110,22 @@ describe('put()', () => { await objectStoreService.put(fileId, mockBuffer, metadata); - expect(mockAxios.request).toHaveBeenCalledWith( - expect.objectContaining({ - method: 'PUT', - url: `${mockUrl}/${fileId}`, - headers: expect.objectContaining({ - 'Content-Length': mockBuffer.length, - 'Content-MD5': expect.any(String), - 'x-amz-meta-filename': metadata.fileName, - 'Content-Type': metadata.mimeType, - }), - data: mockBuffer, - }), - ); + expect(mockAxios.request).toHaveBeenCalledWith({ + method: 'PUT', + url: 'https://s3.us-east-1.amazonaws.com/test-bucket/workflows/ObogjVbqpNOQpiyV/executions/999/binary_data/71f6209b-5d48-41a2-a224-80d529d8bb32', + headers: { + 'Content-Length': 9, + 'Content-MD5': 'yh6gLBC3w39CW5t92G1eEQ==', + 'x-amz-meta-filename': 'file.txt', + 'Content-Type': 'text/plain', + Host: 's3.us-east-1.amazonaws.com', + 'X-Amz-Content-Sha256': 'e27c8214be8b7cf5bccc7c08247e3cb0c1514a48ee1f63197fe4ef3ef51d7e6f', + 'X-Amz-Date': '20240201T012345Z', + Authorization: + 'AWS4-HMAC-SHA256 Credential=mock-access-key/20240201/us-east-1/s3/aws4_request, SignedHeaders=content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date;x-amz-meta-filename, Signature=6b0fbb51a35dbfa73ac79a964ffc7203b40517a062efc5b01f5f9b7ad553fa7a', + }, + data: mockBuffer, + }); }); it('should block if read-only', async () => { @@ -152,13 +164,18 @@ describe('get()', () => { const result = await objectStoreService.get(fileId, { mode: 'buffer' }); - expect(mockAxios.request).toHaveBeenCalledWith( - expect.objectContaining({ - method: 'GET', - url: `${mockUrl}/${fileId}`, - responseType: 'arraybuffer', - }), - ); + expect(mockAxios.request).toHaveBeenCalledWith({ + method: 'GET', + url: `${mockUrl}/${fileId}`, + responseType: 'arraybuffer', + headers: { + Authorization: + 'AWS4-HMAC-SHA256 Credential=mock-access-key/20240201/us-east-1/s3/aws4_request, SignedHeaders=host;x-amz-content-sha256;x-amz-date, Signature=5f69680786e0ad9f0a0324eb5e4b8fe8c78562afc924489ea423632a2ad2187d', + Host: 's3.us-east-1.amazonaws.com', + 'X-Amz-Content-Sha256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', + 'X-Amz-Date': '20240201T012345Z', + }, + }); expect(Buffer.isBuffer(result)).toBe(true); }); @@ -168,13 +185,18 @@ describe('get()', () => { const result = await objectStoreService.get(fileId, { mode: 'stream' }); - expect(mockAxios.request).toHaveBeenCalledWith( - expect.objectContaining({ - method: 'GET', - url: `${mockUrl}/${fileId}`, - responseType: 'stream', - }), - ); + expect(mockAxios.request).toHaveBeenCalledWith({ + method: 'GET', + url: `${mockUrl}/${fileId}`, + responseType: 'stream', + headers: { + Authorization: + 'AWS4-HMAC-SHA256 Credential=mock-access-key/20240201/us-east-1/s3/aws4_request, SignedHeaders=host;x-amz-content-sha256;x-amz-date, Signature=3ef579ebe2ae89303a89c0faf3ce8ef8e907295dc538d59e95bcf35481c0d03e', + Host: 's3.us-east-1.amazonaws.com', + 'X-Amz-Content-Sha256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', + 'X-Amz-Date': '20240201T012345Z', + }, + }); expect(result instanceof Readable).toBe(true); }); @@ -194,12 +216,17 @@ describe('deleteOne()', () => { await objectStoreService.deleteOne(fileId); - expect(mockAxios.request).toHaveBeenCalledWith( - expect.objectContaining({ - method: 'DELETE', - url: `${mockUrl}/${fileId}`, - }), - ); + expect(mockAxios.request).toHaveBeenCalledWith({ + method: 'DELETE', + url: `${mockUrl}/${fileId}`, + headers: { + Authorization: + 'AWS4-HMAC-SHA256 Credential=mock-access-key/20240201/us-east-1/s3/aws4_request, SignedHeaders=host;x-amz-content-sha256;x-amz-date, Signature=4ad61b1b4da335c6c49772d28e54a301f787d199c9403055b217f890f7aec7fc', + Host: 's3.us-east-1.amazonaws.com', + 'X-Amz-Content-Sha256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', + 'X-Amz-Date': '20240201T012345Z', + }, + }); }); it('should throw an error on request failure', async () => { @@ -232,19 +259,21 @@ describe('deleteMany()', () => { await objectStoreService.deleteMany(prefix); - expect(objectStoreService.list).toHaveBeenCalledWith(prefix); - expect(mockAxios.request).toHaveBeenCalledWith( - expect.objectContaining({ - method: 'POST', - url: `${mockUrl}/?delete`, - headers: expect.objectContaining({ - 'Content-Type': 'application/xml', - 'Content-Length': expect.any(Number), - 'Content-MD5': expect.any(String), - }), - data: toDeletionXml(fileName), - }), - ); + expect(mockAxios.request).toHaveBeenCalledWith({ + method: 'POST', + url: `${mockUrl}?delete=`, + headers: { + 'Content-Type': 'application/xml', + 'Content-Length': 55, + 'Content-MD5': 'ybYDrpQxwYvNIGBQs7PJNA==', + Host: 's3.us-east-1.amazonaws.com', + 'X-Amz-Content-Sha256': '5708e5c935cb75eb528e41ef1548e08b26c5b3b7504b67dc911abc1ff1881f76', + 'X-Amz-Date': '20240201T012345Z', + Authorization: + 'AWS4-HMAC-SHA256 Credential=mock-access-key/20240201/us-east-1/s3/aws4_request, SignedHeaders=content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date, Signature=039168f10927b31624f3a5edae8eb4c89405f7c594eb2d6e00257c1462363f99', + }, + data: toDeletionXml(fileName), + }); }); it('should not send a deletion request if no prefix match', async () => { diff --git a/packages/core/src/binary-data/object-store/object-store.service.ee.ts b/packages/core/src/binary-data/object-store/object-store.service.ee.ts index 508477d50e6f9..5561bd61dbc24 100644 --- a/packages/core/src/binary-data/object-store/object-store.service.ee.ts +++ b/packages/core/src/binary-data/object-store/object-store.service.ee.ts @@ -1,6 +1,7 @@ +import { S3Config } from '@n8n/config'; import { Service } from '@n8n/di'; import { sign } from 'aws4'; -import type { Request as Aws4Options, Credentials as Aws4Credentials } from 'aws4'; +import type { Request as Aws4Options } from 'aws4'; import axios from 'axios'; import type { AxiosRequestConfig, AxiosResponse, InternalAxiosRequestConfig, Method } from 'axios'; import { ApplicationError } from 'n8n-workflow'; @@ -9,43 +10,41 @@ import type { Readable } from 'stream'; import { Logger } from '@/logging/logger'; -import type { - Bucket, - ConfigSchemaCredentials, - ListPage, - MetadataResponseHeaders, - RawListPage, - RequestOptions, -} from './types'; +import type { ListPage, MetadataResponseHeaders, RawListPage, RequestOptions } from './types'; import { isStream, parseXml, writeBlockedMessage } from './utils'; import type { BinaryData } from '../types'; @Service() export class ObjectStoreService { - private host = ''; - - private bucket: Bucket = { region: '', name: '' }; - - private credentials: Aws4Credentials = { accessKeyId: '', secretAccessKey: '' }; + private baseUrl: URL; private isReady = false; private isReadOnly = false; - constructor(private readonly logger: Logger) {} + constructor( + private readonly logger: Logger, + private readonly s3Config: S3Config, + ) { + const { host, bucket, protocol } = s3Config; - async init(host: string, bucket: Bucket, credentials: ConfigSchemaCredentials) { - this.host = host; - this.bucket.name = bucket.name; - this.bucket.region = bucket.region; + if (host === '') { + throw new ApplicationError( + 'External storage host not configured. Please set `N8N_EXTERNAL_STORAGE_S3_HOST`.', + ); + } - this.credentials = { - accessKeyId: credentials.accessKey, - secretAccessKey: credentials.accessSecret, - }; + if (bucket.name === '') { + throw new ApplicationError( + 'External storage bucket name not configured. Please set `N8N_EXTERNAL_STORAGE_S3_BUCKET_NAME`.', + ); + } - await this.checkConnection(); + this.baseUrl = new URL(`${protocol}://${host}/${bucket.name}`); + } + async init() { + await this.checkConnection(); this.setReady(true); } @@ -65,7 +64,7 @@ export class ObjectStoreService { async checkConnection() { if (this.isReady) return; - return await this.request('HEAD', this.host, this.bucket.name); + return await this.request('HEAD', ''); } /** @@ -84,9 +83,7 @@ export class ObjectStoreService { if (metadata.fileName) headers['x-amz-meta-filename'] = metadata.fileName; if (metadata.mimeType) headers['Content-Type'] = metadata.mimeType; - const path = `/${this.bucket.name}/${filename}`; - - return await this.request('PUT', this.host, path, { headers, body: buffer }); + return await this.request('PUT', filename, { headers, body: buffer }); } /** @@ -97,9 +94,7 @@ export class ObjectStoreService { async get(fileId: string, { mode }: { mode: 'buffer' }): Promise; async get(fileId: string, { mode }: { mode: 'stream' }): Promise; async get(fileId: string, { mode }: { mode: 'stream' | 'buffer' }) { - const path = `${this.bucket.name}/${fileId}`; - - const { data } = await this.request('GET', this.host, path, { + const { data } = await this.request('GET', fileId, { responseType: mode === 'buffer' ? 'arraybuffer' : 'stream', }); @@ -116,9 +111,7 @@ export class ObjectStoreService { * @doc https://docs.aws.amazon.com/AmazonS3/latest/userguide/UsingMetadata.html */ async getMetadata(fileId: string) { - const path = `${this.bucket.name}/${fileId}`; - - const response = await this.request('HEAD', this.host, path); + const response = await this.request('HEAD', fileId); return response.headers as MetadataResponseHeaders; } @@ -129,9 +122,7 @@ export class ObjectStoreService { * @doc https://docs.aws.amazon.com/AmazonS3/latest/API/API_GetObject.html */ async deleteOne(fileId: string) { - const path = `${this.bucket.name}/${fileId}`; - - return await this.request('DELETE', this.host, path); + return await this.request('DELETE', fileId); } /** @@ -154,9 +145,7 @@ export class ObjectStoreService { 'Content-MD5': createHash('md5').update(body).digest('base64'), }; - const path = `${this.bucket.name}/?delete`; - - return await this.request('POST', this.host, path, { headers, body }); + return await this.request('POST', '', { headers, body, qs: { delete: '' } }); } /** @@ -192,7 +181,7 @@ export class ObjectStoreService { if (nextPageToken) qs['continuation-token'] = nextPageToken; - const { data } = await this.request('GET', this.host, this.bucket.name, { qs }); + const { data } = await this.request('GET', '', { qs }); if (typeof data !== 'string') { throw new TypeError(`Expected XML string but received ${typeof data}`); @@ -215,18 +204,6 @@ export class ObjectStoreService { return page as ListPage; } - private toPath(rawPath: string, qs?: Record) { - const path = rawPath.startsWith('/') ? rawPath : `/${rawPath}`; - - if (!qs) return path; - - const qsParams = Object.entries(qs) - .map(([key, value]) => `${key}=${value}`) - .join('&'); - - return path.concat(`?${qsParams}`); - } - private async blockWrite(filename: string): Promise { const logMessage = writeBlockedMessage(filename); @@ -243,28 +220,37 @@ export class ObjectStoreService { private async request( method: Method, - host: string, rawPath = '', { qs, headers, body, responseType }: RequestOptions = {}, ) { - const path = this.toPath(rawPath, qs); + const url = new URL(this.baseUrl); + if (rawPath && rawPath !== '/') { + url.pathname = `${url.pathname}/${rawPath}`; + } + Object.entries(qs ?? {}).forEach(([key, value]) => { + url.searchParams.set(key, String(value)); + }); const optionsToSign: Aws4Options = { method, service: 's3', - region: this.bucket.region, - host, - path, + region: this.s3Config.bucket.region, + host: this.s3Config.host, + path: `${url.pathname}${url.search}`, }; if (headers) optionsToSign.headers = headers; if (body) optionsToSign.body = body; - const signedOptions = sign(optionsToSign, this.credentials); + const { accessKey, accessSecret } = this.s3Config.credentials; + const signedOptions = sign(optionsToSign, { + accessKeyId: accessKey, + secretAccessKey: accessSecret, + }); const config: AxiosRequestConfig = { method, - url: `https://${host}${path}`, + url: url.toString(), headers: signedOptions.headers, }; diff --git a/packages/core/src/binary-data/object-store/types.ts b/packages/core/src/binary-data/object-store/types.ts index 49726f5c43c0d..20390cf243ca8 100644 --- a/packages/core/src/binary-data/object-store/types.ts +++ b/packages/core/src/binary-data/object-store/types.ts @@ -24,8 +24,6 @@ type Item = { export type ListPage = Omit & { contents: Item[] }; -export type Bucket = { region: string; name: string }; - export type RequestOptions = { qs?: Record; headers?: Record; @@ -38,5 +36,3 @@ export type MetadataResponseHeaders = AxiosResponseHeaders & { 'content-type'?: string; 'x-amz-meta-filename'?: string; } & BinaryData.PreWriteMetadata; - -export type ConfigSchemaCredentials = { accessKey: string; accessSecret: string }; diff --git a/packages/core/src/constants.ts b/packages/core/src/constants.ts index feb630e306f55..bd44e25d1003f 100644 --- a/packages/core/src/constants.ts +++ b/packages/core/src/constants.ts @@ -14,3 +14,11 @@ export const CONFIG_FILES = 'N8N_CONFIG_FILES'; export const BINARY_DATA_STORAGE_PATH = 'N8N_BINARY_DATA_STORAGE_PATH'; export const UM_EMAIL_TEMPLATES_INVITE = 'N8N_UM_EMAIL_TEMPLATES_INVITE'; export const UM_EMAIL_TEMPLATES_PWRESET = 'N8N_UM_EMAIL_TEMPLATES_PWRESET'; + +export const CREDENTIAL_ERRORS = { + NO_DATA: 'No data is set on this credentials.', + DECRYPTION_FAILED: + 'Credentials could not be decrypted. The likely reason is that a different "encryptionKey" was used to encrypt the data.', + INVALID_JSON: 'Decrypted credentials data is not valid JSON.', + INVALID_DATA: 'Credentials data is not in a valid format.', +}; diff --git a/packages/core/src/credentials.ts b/packages/core/src/credentials.ts index 9b5b4c14553dc..f2d10df156af1 100644 --- a/packages/core/src/credentials.ts +++ b/packages/core/src/credentials.ts @@ -1,8 +1,20 @@ import { Container } from '@n8n/di'; import type { ICredentialDataDecryptedObject, ICredentialsEncrypted } from 'n8n-workflow'; import { ApplicationError, ICredentials, jsonParse } from 'n8n-workflow'; +import * as a from 'node:assert'; +import { CREDENTIAL_ERRORS } from '@/constants'; import { Cipher } from '@/encryption/cipher'; +import { isObjectLiteral } from '@/utils'; + +export class CredentialDataError extends ApplicationError { + constructor({ name, type, id }: Credentials, message: string, cause?: unknown) { + super(message, { + extra: { name, type, id }, + cause, + }); + } +} export class Credentials< T extends object = ICredentialDataDecryptedObject, @@ -13,6 +25,8 @@ export class Credentials< * Sets new credential object */ setData(data: T): void { + a.ok(isObjectLiteral(data)); + this.data = this.cipher.encrypt(data); } @@ -21,17 +35,20 @@ export class Credentials< */ getData(): T { if (this.data === undefined) { - throw new ApplicationError('No data is set so nothing can be returned.'); + throw new CredentialDataError(this, CREDENTIAL_ERRORS.NO_DATA); } + let decryptedData: string; try { - const decryptedData = this.cipher.decrypt(this.data); + decryptedData = this.cipher.decrypt(this.data); + } catch (cause) { + throw new CredentialDataError(this, CREDENTIAL_ERRORS.DECRYPTION_FAILED, cause); + } + try { return jsonParse(decryptedData); - } catch (e) { - throw new ApplicationError( - 'Credentials could not be decrypted. The likely reason is that a different "encryptionKey" was used to encrypt the data.', - ); + } catch (cause) { + throw new CredentialDataError(this, CREDENTIAL_ERRORS.INVALID_JSON, cause); } } diff --git a/packages/core/src/execution-engine/node-execution-context/local-load-options-context.ts b/packages/core/src/execution-engine/node-execution-context/local-load-options-context.ts index 39456ff966808..dd96ab5f74174 100644 --- a/packages/core/src/execution-engine/node-execution-context/local-load-options-context.ts +++ b/packages/core/src/execution-engine/node-execution-context/local-load-options-context.ts @@ -35,6 +35,8 @@ export class LocalLoadOptionsContext implements ILocalLoadOptionsFunctions { if (selectedWorkflowNode) { const selectedSingleNodeWorkflow = new Workflow({ + id: dbWorkflow.id, + name: dbWorkflow.name, nodes: [selectedWorkflowNode], connections: {}, active: false, diff --git a/packages/core/src/execution-engine/node-execution-context/utils/__tests__/execution-metadata.test.ts b/packages/core/src/execution-engine/node-execution-context/utils/__tests__/execution-metadata.test.ts index c673a2fcfe8b0..b08e38da5bdbf 100644 --- a/packages/core/src/execution-engine/node-execution-context/utils/__tests__/execution-metadata.test.ts +++ b/packages/core/src/execution-engine/node-execution-context/utils/__tests__/execution-metadata.test.ts @@ -205,15 +205,12 @@ describe('Execution Metadata functions', () => { }, } as IRunExecutionData; - setWorkflowExecutionMetadata( - executionData, - 'test1', - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaab', - ); + const longValue = 'a'.repeat(513); + + setWorkflowExecutionMetadata(executionData, 'test1', longValue); expect(metadata).toEqual({ - test1: - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', + test1: longValue.slice(0, 512), }); }); }); diff --git a/packages/core/src/execution-engine/node-execution-context/utils/create-node-as-tool.ts b/packages/core/src/execution-engine/node-execution-context/utils/create-node-as-tool.ts index da34b377dfc4f..9f1e707a67436 100644 --- a/packages/core/src/execution-engine/node-execution-context/utils/create-node-as-tool.ts +++ b/packages/core/src/execution-engine/node-execution-context/utils/create-node-as-tool.ts @@ -1,411 +1,133 @@ import { DynamicStructuredTool } from '@langchain/core/tools'; -import type { IDataObject, INode, INodeType } from 'n8n-workflow'; -import { jsonParse, NodeOperationError } from 'n8n-workflow'; +import { generateZodSchema, NodeOperationError, traverseNodeParameters } from 'n8n-workflow'; +import type { IDataObject, INode, INodeType, FromAIArgument } from 'n8n-workflow'; import { z } from 'zod'; -type AllowedTypes = 'string' | 'number' | 'boolean' | 'json'; -interface FromAIArgument { - key: string; - description?: string; - type?: AllowedTypes; - defaultValue?: string | number | boolean | Record; -} - -type ParserOptions = { +export type CreateNodeAsToolOptions = { node: INode; nodeType: INodeType; handleToolInvocation: (toolArgs: IDataObject) => Promise; }; -// This file is temporarily duplicated in `packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/v2/utils/FromAIParser.ts` -// Please apply any changes in both files - /** - * AIParametersParser + * Retrieves and validates the Zod schema for the tool. + * + * This method: + * 1. Collects all $fromAI arguments from node parameters + * 2. Validates parameter keys against naming rules + * 3. Checks for duplicate keys and ensures consistency + * 4. Generates a Zod schema from the validated arguments * - * This class encapsulates the logic for parsing node parameters, extracting $fromAI calls, - * generating Zod schemas, and creating LangChain tools. + * @throws {NodeOperationError} When parameter keys are invalid or when duplicate keys have inconsistent definitions + * @returns {z.ZodObject} A Zod schema object representing the structure and validation rules for the node parameters */ -class AIParametersParser { - /** - * Constructs an instance of AIParametersParser. - */ - constructor(private readonly options: ParserOptions) {} - - /** - * Generates a Zod schema based on the provided FromAIArgument placeholder. - * @param placeholder The FromAIArgument object containing key, type, description, and defaultValue. - * @returns A Zod schema corresponding to the placeholder's type and constraints. - */ - private generateZodSchema(placeholder: FromAIArgument): z.ZodTypeAny { - let schema: z.ZodTypeAny; - - switch (placeholder.type?.toLowerCase()) { - case 'string': - schema = z.string(); - break; - case 'number': - schema = z.number(); - break; - case 'boolean': - schema = z.boolean(); - break; - case 'json': - schema = z.record(z.any()); - break; - default: - schema = z.string(); - } - - if (placeholder.description) { - schema = schema.describe(`${schema.description ?? ''} ${placeholder.description}`.trim()); - } - - if (placeholder.defaultValue !== undefined) { - schema = schema.default(placeholder.defaultValue); - } - - return schema; +function getSchema(node: INode) { + const collectedArguments: FromAIArgument[] = []; + try { + traverseNodeParameters(node.parameters, collectedArguments); + } catch (error) { + throw new NodeOperationError(node, error as Error); } - /** - * Recursively traverses the nodeParameters object to find all $fromAI calls. - * @param payload The current object or value being traversed. - * @param collectedArgs The array collecting FromAIArgument objects. - */ - private traverseNodeParameters(payload: unknown, collectedArgs: FromAIArgument[]) { - if (typeof payload === 'string') { - const fromAICalls = this.extractFromAICalls(payload); - fromAICalls.forEach((call) => collectedArgs.push(call)); - } else if (Array.isArray(payload)) { - payload.forEach((item: unknown) => this.traverseNodeParameters(item, collectedArgs)); - } else if (typeof payload === 'object' && payload !== null) { - Object.values(payload).forEach((value) => this.traverseNodeParameters(value, collectedArgs)); + // Validate each collected argument + const nameValidationRegex = /^[a-zA-Z0-9_-]{1,64}$/; + const keyMap = new Map(); + for (const argument of collectedArguments) { + if (argument.key.length === 0 || !nameValidationRegex.test(argument.key)) { + const isEmptyError = 'You must specify a key when using $fromAI()'; + const isInvalidError = `Parameter key \`${argument.key}\` is invalid`; + const error = new Error(argument.key.length === 0 ? isEmptyError : isInvalidError); + throw new NodeOperationError(node, error, { + description: + 'Invalid parameter key, must be between 1 and 64 characters long and only contain letters, numbers, underscores, and hyphens', + }); } - } - - /** - * Extracts all $fromAI calls from a given string - * @param str The string to search for $fromAI calls. - * @returns An array of FromAIArgument objects. - * - * This method uses a regular expression to find the start of each $fromAI function call - * in the input string. It then employs a character-by-character parsing approach to - * accurately extract the arguments of each call, handling nested parentheses and quoted strings. - * - * The parsing process: - * 1. Finds the starting position of a $fromAI call using regex. - * 2. Iterates through characters, keeping track of parentheses depth and quote status. - * 3. Handles escaped characters within quotes to avoid premature quote closing. - * 4. Builds the argument string until the matching closing parenthesis is found. - * 5. Parses the extracted argument string into a FromAIArgument object. - * 6. Repeats the process for all $fromAI calls in the input string. - * - */ - private extractFromAICalls(str: string): FromAIArgument[] { - const args: FromAIArgument[] = []; - // Regular expression to match the start of a $fromAI function call - const pattern = /\$fromAI\s*\(\s*/gi; - let match: RegExpExecArray | null; - - while ((match = pattern.exec(str)) !== null) { - const startIndex = match.index + match[0].length; - let current = startIndex; - let inQuotes = false; - let quoteChar = ''; - let parenthesesCount = 1; - let argsString = ''; - - // Parse the arguments string, handling nested parentheses and quotes - while (current < str.length && parenthesesCount > 0) { - const char = str[current]; - if (inQuotes) { - // Handle characters inside quotes, including escaped characters - if (char === '\\' && current + 1 < str.length) { - argsString += char + str[current + 1]; - current += 2; - continue; - } + if (keyMap.has(argument.key)) { + // If the key already exists in the Map + const existingArg = keyMap.get(argument.key)!; - if (char === quoteChar) { - inQuotes = false; - quoteChar = ''; - } - argsString += char; - } else { - // Handle characters outside quotes - if (['"', "'", '`'].includes(char)) { - inQuotes = true; - quoteChar = char; - } else if (char === '(') { - parenthesesCount++; - } else if (char === ')') { - parenthesesCount--; - } - - // Only add characters if we're still inside the main parentheses - if (parenthesesCount > 0 || char !== ')') { - argsString += char; - } - } - - current++; - } - - // If parentheses are balanced, parse the arguments - if (parenthesesCount === 0) { - try { - const parsedArgs = this.parseArguments(argsString); - args.push(parsedArgs); - } catch (error) { - // If parsing fails, throw an ApplicationError with details - throw new NodeOperationError( - this.options.node, - `Failed to parse $fromAI arguments: ${argsString}: ${error}`, - ); - } - } else { - // Log an error if parentheses are unbalanced + // Check if the existing argument has the same description and type + if (existingArg.description !== argument.description || existingArg.type !== argument.type) { + // If not, throw an error for inconsistent duplicate keys throw new NodeOperationError( - this.options.node, - `Unbalanced parentheses while parsing $fromAI call: ${str.slice(startIndex)}`, + node, + `Duplicate key '${argument.key}' found with different description or type`, + { + description: + 'Ensure all $fromAI() calls with the same key have consistent descriptions and types', + }, ); } + // If the duplicate key has consistent description and type, it's allowed (no action needed) + } else { + // If the key doesn't exist in the Map, add it + keyMap.set(argument.key, argument); } - - return args; } - /** - * Parses the arguments of a single $fromAI function call. - * @param argsString The string containing the function arguments. - * @returns A FromAIArgument object. - */ - private parseArguments(argsString: string): FromAIArgument { - // Split arguments by commas not inside quotes - const args: string[] = []; - let currentArg = ''; - let inQuotes = false; - let quoteChar = ''; - let escapeNext = false; - - for (let i = 0; i < argsString.length; i++) { - const char = argsString[i]; - - if (escapeNext) { - currentArg += char; - escapeNext = false; - continue; - } - - if (char === '\\') { - escapeNext = true; - continue; - } - - if (['"', "'", '`'].includes(char)) { - if (!inQuotes) { - inQuotes = true; - quoteChar = char; - currentArg += char; - } else if (char === quoteChar) { - inQuotes = false; - quoteChar = ''; - currentArg += char; - } else { - currentArg += char; - } - continue; - } - - if (char === ',' && !inQuotes) { - args.push(currentArg.trim()); - currentArg = ''; - continue; - } + // Remove duplicate keys, latest occurrence takes precedence + const uniqueArgsMap = collectedArguments.reduce((map, arg) => { + map.set(arg.key, arg); + return map; + }, new Map()); - currentArg += char; - } + const uniqueArguments = Array.from(uniqueArgsMap.values()); - if (currentArg) { - args.push(currentArg.trim()); - } + // Generate Zod schema from unique arguments + const schemaObj = uniqueArguments.reduce((acc: Record, placeholder) => { + acc[placeholder.key] = generateZodSchema(placeholder); + return acc; + }, {}); - // Remove surrounding quotes if present - const cleanArgs = args.map((arg) => { - const trimmed = arg.trim(); - if ( - (trimmed.startsWith("'") && trimmed.endsWith("'")) || - (trimmed.startsWith('`') && trimmed.endsWith('`')) || - (trimmed.startsWith('"') && trimmed.endsWith('"')) - ) { - return trimmed - .slice(1, -1) - .replace(/\\'/g, "'") - .replace(/\\`/g, '`') - .replace(/\\"/g, '"') - .replace(/\\\\/g, '\\'); - } - return trimmed; - }); - - const type = cleanArgs?.[2] || 'string'; + return z.object(schemaObj).required(); +} - if (!['string', 'number', 'boolean', 'json'].includes(type.toLowerCase())) { - throw new NodeOperationError(this.options.node, `Invalid type: ${type}`); +/** + * Generates a description for a node based on the provided parameters. + * @param node The node type. + * @param nodeParameters The parameters of the node. + * @returns A string description for the node. + */ +function makeDescription(node: INode, nodeType: INodeType): string { + const manualDescription = node.parameters.toolDescription as string; + + if (node.parameters.descriptionType === 'auto') { + const resource = node.parameters.resource as string; + const operation = node.parameters.operation as string; + let description = nodeType.description.description; + if (resource) { + description += `\n Resource: ${resource}`; } - - return { - key: cleanArgs[0] || '', - description: cleanArgs[1], - type: (cleanArgs?.[2] ?? 'string') as AllowedTypes, - defaultValue: this.parseDefaultValue(cleanArgs[3]), - }; - } - - /** - * Parses the default value, preserving its original type. - * @param value The default value as a string. - * @returns The parsed default value in its appropriate type. - */ - private parseDefaultValue( - value: string | undefined, - ): string | number | boolean | Record | undefined { - if (value === undefined || value === '') return undefined; - const lowerValue = value.toLowerCase(); - if (lowerValue === 'true') return true; - if (lowerValue === 'false') return false; - if (!isNaN(Number(value))) return Number(value); - try { - return jsonParse(value); - } catch { - return value; + if (operation) { + description += `\n Operation: ${operation}`; } + return description.trim(); } - - /** - * Retrieves and validates the Zod schema for the tool. - * - * This method: - * 1. Collects all $fromAI arguments from node parameters - * 2. Validates parameter keys against naming rules - * 3. Checks for duplicate keys and ensures consistency - * 4. Generates a Zod schema from the validated arguments - * - * @throws {NodeOperationError} When parameter keys are invalid or when duplicate keys have inconsistent definitions - * @returns {z.ZodObject} A Zod schema object representing the structure and validation rules for the node parameters - */ - private getSchema() { - const { node } = this.options; - const collectedArguments: FromAIArgument[] = []; - this.traverseNodeParameters(node.parameters, collectedArguments); - - // Validate each collected argument - const nameValidationRegex = /^[a-zA-Z0-9_-]{1,64}$/; - const keyMap = new Map(); - for (const argument of collectedArguments) { - if (argument.key.length === 0 || !nameValidationRegex.test(argument.key)) { - const isEmptyError = 'You must specify a key when using $fromAI()'; - const isInvalidError = `Parameter key \`${argument.key}\` is invalid`; - const error = new Error(argument.key.length === 0 ? isEmptyError : isInvalidError); - throw new NodeOperationError(node, error, { - description: - 'Invalid parameter key, must be between 1 and 64 characters long and only contain letters, numbers, underscores, and hyphens', - }); - } - - if (keyMap.has(argument.key)) { - // If the key already exists in the Map - const existingArg = keyMap.get(argument.key)!; - - // Check if the existing argument has the same description and type - if ( - existingArg.description !== argument.description || - existingArg.type !== argument.type - ) { - // If not, throw an error for inconsistent duplicate keys - throw new NodeOperationError( - node, - `Duplicate key '${argument.key}' found with different description or type`, - { - description: - 'Ensure all $fromAI() calls with the same key have consistent descriptions and types', - }, - ); - } - // If the duplicate key has consistent description and type, it's allowed (no action needed) - } else { - // If the key doesn't exist in the Map, add it - keyMap.set(argument.key, argument); - } - } - - // Remove duplicate keys, latest occurrence takes precedence - const uniqueArgsMap = collectedArguments.reduce((map, arg) => { - map.set(arg.key, arg); - return map; - }, new Map()); - - const uniqueArguments = Array.from(uniqueArgsMap.values()); - - // Generate Zod schema from unique arguments - const schemaObj = uniqueArguments.reduce((acc: Record, placeholder) => { - acc[placeholder.key] = this.generateZodSchema(placeholder); - return acc; - }, {}); - - return z.object(schemaObj).required(); + if (node.parameters.descriptionType === 'manual') { + return manualDescription ?? nodeType.description.description; } - /** - * Generates a description for a node based on the provided parameters. - * @param node The node type. - * @param nodeParameters The parameters of the node. - * @returns A string description for the node. - */ - private getDescription(): string { - const { node, nodeType } = this.options; - const manualDescription = node.parameters.toolDescription as string; - - if (node.parameters.descriptionType === 'auto') { - const resource = node.parameters.resource as string; - const operation = node.parameters.operation as string; - let description = nodeType.description.description; - if (resource) { - description += `\n Resource: ${resource}`; - } - if (operation) { - description += `\n Operation: ${operation}`; - } - return description.trim(); - } - if (node.parameters.descriptionType === 'manual') { - return manualDescription ?? nodeType.description.description; - } - - return nodeType.description.description; - } - - /** - * Creates a DynamicStructuredTool from a node. - * @returns A DynamicStructuredTool instance. - */ - createTool(): DynamicStructuredTool { - const { node, nodeType } = this.options; - const schema = this.getSchema(); - const description = this.getDescription(); - const nodeName = node.name.replace(/ /g, '_'); - const name = nodeName || nodeType.description.name; + return nodeType.description.description; +} - return new DynamicStructuredTool({ - name, - description, - schema, - func: async (toolArgs: z.infer) => - await this.options.handleToolInvocation(toolArgs), - }); - } +/** + * Creates a DynamicStructuredTool from a node. + * @returns A DynamicStructuredTool instance. + */ +function createTool(options: CreateNodeAsToolOptions) { + const { node, nodeType, handleToolInvocation } = options; + const schema = getSchema(node); + const description = makeDescription(node, nodeType); + const nodeName = node.name.replace(/ /g, '_'); + const name = nodeName || nodeType.description.name; + + return new DynamicStructuredTool({ + name, + description, + schema, + func: async (toolArgs: z.infer) => await handleToolInvocation(toolArgs), + }); } /** @@ -413,7 +135,6 @@ class AIParametersParser { * identifying placeholders using the $fromAI function, and generating a Zod schema. It then creates * a DynamicStructuredTool that can be used in LangChain workflows. */ -export function createNodeAsTool(options: ParserOptions) { - const parser = new AIParametersParser(options); - return { response: parser.createTool() }; +export function createNodeAsTool(options: CreateNodeAsToolOptions) { + return { response: createTool(options) }; } diff --git a/packages/core/src/execution-engine/node-execution-context/utils/execution-metadata.ts b/packages/core/src/execution-engine/node-execution-context/utils/execution-metadata.ts index 5e7c4954d629a..29957b983abee 100644 --- a/packages/core/src/execution-engine/node-execution-context/utils/execution-metadata.ts +++ b/packages/core/src/execution-engine/node-execution-context/utils/execution-metadata.ts @@ -38,9 +38,9 @@ export function setWorkflowExecutionMetadata( Logger.error('Custom data key over 50 characters long. Truncating to 50 characters.'); } if (val.length > 255) { - Logger.error('Custom data value over 255 characters long. Truncating to 255 characters.'); + Logger.error('Custom data value over 512 characters long. Truncating to 512 characters.'); } - executionData.resultData.metadata[key.slice(0, 50)] = val.slice(0, 255); + executionData.resultData.metadata[key.slice(0, 50)] = val.slice(0, 512); } export function setAllWorkflowExecutionMetadata( diff --git a/packages/design-system/package.json b/packages/design-system/package.json index a4c97de675e69..730593efbd6a6 100644 --- a/packages/design-system/package.json +++ b/packages/design-system/package.json @@ -1,6 +1,6 @@ { "name": "n8n-design-system", - "version": "1.66.0", + "version": "1.67.0", "main": "src/main.ts", "import": "src/main.ts", "scripts": { diff --git a/packages/design-system/src/components/N8nCheckbox/__snapshots__/Checkbox.test.ts.snap b/packages/design-system/src/components/N8nCheckbox/__snapshots__/Checkbox.test.ts.snap index c6dd12ec37eea..fa1e46c8896ad 100644 --- a/packages/design-system/src/components/N8nCheckbox/__snapshots__/Checkbox.test.ts.snap +++ b/packages/design-system/src/components/N8nCheckbox/__snapshots__/Checkbox.test.ts.snap @@ -35,20 +35,29 @@ exports[`components > N8nCheckbox > should render with both child and label 1`] class="n8n-input-label inputLabel heading medium" >
- - - Checkbox - - - + + + Checkbox + + + +
+ + +
+ + +
- - - @@ -126,20 +135,29 @@ exports[`components > N8nCheckbox > should render with label 1`] = ` class="n8n-input-label inputLabel heading medium" >
- - - Checkbox - - - + + + Checkbox + + + +
+ + +
+ + +
- - - diff --git a/packages/design-system/src/components/N8nFormBox/__snapshots__/FormBox.test.ts.snap b/packages/design-system/src/components/N8nFormBox/__snapshots__/FormBox.test.ts.snap index 8138b44b8cecf..7123cc6c5a231 100644 --- a/packages/design-system/src/components/N8nFormBox/__snapshots__/FormBox.test.ts.snap +++ b/packages/design-system/src/components/N8nFormBox/__snapshots__/FormBox.test.ts.snap @@ -38,26 +38,35 @@ exports[`FormBox > should render the component 1`] = ` for="name" >
- - - Name - * + Name + + + * + + - - +
+ + +
+ + +
- - -
should render the component 1`] = ` for="email" >
- - - Email - * + Email + + + * + + - - +
+ +
+
+ + +
- - -
should render the component 1`] = ` for="password" >
- - - Password - * + Password + + + * + + - - +
+ +
+
+ + +
- - -
[$style.overflow]: !!$slots.options, }" > -
- +
+ + {{ label }} + * + +
+ - {{ label }} - * -
+ + + + +
- - - - - - -
-
- +
+
+
+ +
+
+ +
@@ -98,20 +109,40 @@ const addTargetBlank = (html: string) => .container { display: flex; flex-direction: column; + + label { + display: flex; + justify-content: space-between; + } +} + +.main-content { + display: flex; + &:hover { + .infoIcon { + opacity: 1; + + &:hover { + color: var(--color-text-base); + } + } + } +} + +.trailing-content { + display: flex; + gap: var(--spacing-3xs); + + * { + align-self: center; + } } + .inputLabel { display: block; } .container:hover, .inputLabel:hover { - .infoIcon { - opacity: 1; - - &:hover { - color: var(--color-text-base); - } - } - .options { opacity: 1; transition: opacity 100ms ease-in; // transition on hover in @@ -150,10 +181,13 @@ const addTargetBlank = (html: string) => .options { opacity: 0; transition: opacity 250ms cubic-bezier(0.98, -0.06, 0.49, -0.2); // transition on hover out + display: flex; + align-self: center; +} - > * { - float: right; - } +.issues { + display: flex; + align-self: center; } .overlay { diff --git a/packages/design-system/src/components/N8nInputLabel/__snapshots__/InputLabel.test.ts.snap b/packages/design-system/src/components/N8nInputLabel/__snapshots__/InputLabel.test.ts.snap index b4b61558a4493..5335044bf5c3b 100644 --- a/packages/design-system/src/components/N8nInputLabel/__snapshots__/InputLabel.test.ts.snap +++ b/packages/design-system/src/components/N8nInputLabel/__snapshots__/InputLabel.test.ts.snap @@ -10,20 +10,29 @@ exports[`component > Text overflow behavior > displays ellipsis with options 1`] class="n8n-input-label inputLabel heading medium" >
- - - a label - - - + + + a label + + + +
+ +
+
+ + +
- - - @@ -41,20 +50,29 @@ exports[`component > Text overflow behavior > displays full text without options class="n8n-input-label inputLabel heading medium" >
- - - a label - - - + + + a label + + + +
+ +
+
+ + +
- - - diff --git a/packages/editor-ui/package.json b/packages/editor-ui/package.json index 83a84ded1e195..c20e298e2126a 100644 --- a/packages/editor-ui/package.json +++ b/packages/editor-ui/package.json @@ -1,6 +1,6 @@ { "name": "n8n-editor-ui", - "version": "1.76.0", + "version": "1.77.0", "description": "Workflow Editor UI for n8n", "main": "index.js", "scripts": { @@ -41,8 +41,8 @@ "@n8n/codemirror-lang-sql": "^1.0.2", "@n8n/permissions": "workspace:*", "@replit/codemirror-indentation-markers": "^6.5.3", - "@typescript/vfs": "^1.6.0", "@sentry/vue": "catalog:frontend", + "@typescript/vfs": "^1.6.0", "@vue-flow/background": "^1.3.2", "@vue-flow/controls": "^1.1.2", "@vue-flow/core": "^1.41.6", @@ -56,6 +56,7 @@ "chart.js": "^4.4.0", "codemirror-lang-html-n8n": "^1.0.0", "comlink": "^4.4.1", + "core-js": "^3.40.0", "dateformat": "^3.0.3", "email-providers": "^2.0.1", "esprima-next": "5.8.4", diff --git a/packages/editor-ui/public/static/n8n-logo.png b/packages/editor-ui/public/static/n8n-logo.png new file mode 100644 index 0000000000000..2bb6b2d750a1c Binary files /dev/null and b/packages/editor-ui/public/static/n8n-logo.png differ diff --git a/packages/editor-ui/src/Interface.ts b/packages/editor-ui/src/Interface.ts index 6cdd045703d10..bd207122b0943 100644 --- a/packages/editor-ui/src/Interface.ts +++ b/packages/editor-ui/src/Interface.ts @@ -1484,14 +1484,6 @@ export interface IN8nPromptResponse { updated: boolean; } -export type ApiKey = { - id: string; - label: string; - apiKey: string; - createdAt: string; - updatedAt: string; -}; - export type InputPanel = { nodeName?: string; run?: number; diff --git a/packages/editor-ui/src/__tests__/defaults.ts b/packages/editor-ui/src/__tests__/defaults.ts index 46e35a7d172d9..2272c2f40f282 100644 --- a/packages/editor-ui/src/__tests__/defaults.ts +++ b/packages/editor-ui/src/__tests__/defaults.ts @@ -62,7 +62,13 @@ export const defaultSettings: FrontendSettings = { disableSessionRecording: false, enabled: false, }, - publicApi: { enabled: false, latestVersion: 0, path: '', swaggerUi: { enabled: false } }, + publicApi: { + apiKeysPerUserLimit: 0, + enabled: false, + latestVersion: 0, + path: '', + swaggerUi: { enabled: false }, + }, pushBackend: 'websocket', saveDataErrorExecution: 'all', saveDataSuccessExecution: 'all', diff --git a/packages/editor-ui/src/__tests__/setup.ts b/packages/editor-ui/src/__tests__/setup.ts index 3ddee75f14410..934017e2bafe3 100644 --- a/packages/editor-ui/src/__tests__/setup.ts +++ b/packages/editor-ui/src/__tests__/setup.ts @@ -1,5 +1,6 @@ import '@testing-library/jest-dom'; import { configure } from '@testing-library/vue'; +import 'core-js/proposals/set-methods-v2'; configure({ testIdAttribute: 'data-test-id' }); diff --git a/packages/editor-ui/src/api/api-keys.ts b/packages/editor-ui/src/api/api-keys.ts index b4b44c8e134f9..5ea2f593b7079 100644 --- a/packages/editor-ui/src/api/api-keys.ts +++ b/packages/editor-ui/src/api/api-keys.ts @@ -1,12 +1,16 @@ -import type { ApiKey, IRestApiContext } from '@/Interface'; +import type { IRestApiContext } from '@/Interface'; import { makeRestApiRequest } from '@/utils/apiUtils'; +import type { CreateOrUpdateApiKeyRequestDto, ApiKey, ApiKeyWithRawValue } from '@n8n/api-types'; export async function getApiKeys(context: IRestApiContext): Promise { return await makeRestApiRequest(context, 'GET', '/api-keys'); } -export async function createApiKey(context: IRestApiContext): Promise { - return await makeRestApiRequest(context, 'POST', '/api-keys'); +export async function createApiKey( + context: IRestApiContext, + payload: CreateOrUpdateApiKeyRequestDto, +): Promise { + return await makeRestApiRequest(context, 'POST', '/api-keys', payload); } export async function deleteApiKey( @@ -15,3 +19,11 @@ export async function deleteApiKey( ): Promise<{ success: boolean }> { return await makeRestApiRequest(context, 'DELETE', `/api-keys/${id}`); } + +export async function updateApiKey( + context: IRestApiContext, + id: string, + payload: CreateOrUpdateApiKeyRequestDto, +): Promise<{ success: boolean }> { + return await makeRestApiRequest(context, 'PATCH', `/api-keys/${id}`, payload); +} diff --git a/packages/editor-ui/src/components/ApiKeyCard.vue b/packages/editor-ui/src/components/ApiKeyCard.vue new file mode 100644 index 0000000000000..2167bede962c9 --- /dev/null +++ b/packages/editor-ui/src/components/ApiKeyCard.vue @@ -0,0 +1,115 @@ + + + + + diff --git a/packages/editor-ui/src/components/ApiKeyCreateOrEditModal.test.ts b/packages/editor-ui/src/components/ApiKeyCreateOrEditModal.test.ts new file mode 100644 index 0000000000000..781a8341bde54 --- /dev/null +++ b/packages/editor-ui/src/components/ApiKeyCreateOrEditModal.test.ts @@ -0,0 +1,119 @@ +import { createComponentRenderer } from '@/__tests__/render'; +import { createTestingPinia } from '@pinia/testing'; +import { API_KEY_CREATE_OR_EDIT_MODAL_KEY, STORES } from '@/constants'; +import { cleanupAppModals, createAppModals, mockedStore, retry } from '@/__tests__/utils'; +import ApiKeyEditModal from './ApiKeyCreateOrEditModal.vue'; +import { fireEvent } from '@testing-library/vue'; +import { useApiKeysStore } from '@/stores/apiKeys.store'; + +const renderComponent = createComponentRenderer(ApiKeyEditModal, { + pinia: createTestingPinia({ + initialState: { + [STORES.UI]: { + modalsById: { + [API_KEY_CREATE_OR_EDIT_MODAL_KEY]: { open: true }, + }, + }, + }, + }), +}); + +const apiKeysStore = mockedStore(useApiKeysStore); + +describe('ApiKeyCreateOrEditModal', () => { + beforeEach(() => { + createAppModals(); + }); + + afterEach(() => { + cleanupAppModals(); + vi.clearAllMocks(); + }); + + test('should allow creating API key from modal', async () => { + apiKeysStore.createApiKey.mockResolvedValue({ + id: '123', + label: 'new api key', + apiKey: '123456', + createdAt: new Date().toString(), + updatedAt: new Date().toString(), + rawApiKey: '***456', + }); + + const { getByText, getByPlaceholderText } = renderComponent({ + props: { + mode: 'new', + }, + }); + + await retry(() => expect(getByText('Create API Key')).toBeInTheDocument()); + expect(getByText('Label')).toBeInTheDocument(); + + const inputLabel = getByPlaceholderText('e.g Internal Project'); + const saveButton = getByText('Save'); + + expect(inputLabel).toBeInTheDocument(); + expect(saveButton).toBeInTheDocument(); + + await fireEvent.update(inputLabel, 'new label'); + + await fireEvent.click(saveButton); + + expect(getByText('***456')).toBeInTheDocument(); + + expect(getByText('API Key Created')).toBeInTheDocument(); + + expect(getByText('Done')).toBeInTheDocument(); + + expect( + getByText('Make sure to copy your API key now as you will not be able to see this again.'), + ).toBeInTheDocument(); + + expect(getByText('You can find more details in')).toBeInTheDocument(); + + expect(getByText('the API documentation')).toBeInTheDocument(); + + expect(getByText('Click to copy')).toBeInTheDocument(); + + expect(getByText('new api key')).toBeInTheDocument(); + }); + + test('should allow editing API key label', async () => { + apiKeysStore.apiKeys = [ + { + id: '123', + label: 'new api key', + apiKey: '123**', + createdAt: new Date().toString(), + updatedAt: new Date().toString(), + }, + ]; + + apiKeysStore.updateApiKey.mockResolvedValue(); + + const { getByText, getByTestId } = renderComponent({ + props: { + mode: 'edit', + activeId: '123', + }, + }); + + await retry(() => expect(getByText('Edit API Key')).toBeInTheDocument()); + + expect(getByText('Label')).toBeInTheDocument(); + + const labelInput = getByTestId('api-key-label'); + + expect((labelInput as unknown as HTMLInputElement).value).toBe('new api key'); + + await fireEvent.update(labelInput, 'updated api key'); + + const editButton = getByText('Edit'); + + expect(editButton).toBeInTheDocument(); + + await fireEvent.click(editButton); + + expect(apiKeysStore.updateApiKey).toHaveBeenCalledWith('123', { label: 'updated api key' }); + }); +}); diff --git a/packages/editor-ui/src/components/ApiKeyCreateOrEditModal.vue b/packages/editor-ui/src/components/ApiKeyCreateOrEditModal.vue new file mode 100644 index 0000000000000..04139e84ad1f5 --- /dev/null +++ b/packages/editor-ui/src/components/ApiKeyCreateOrEditModal.vue @@ -0,0 +1,239 @@ + + + + + diff --git a/packages/editor-ui/src/components/FixedCollectionParameter.vue b/packages/editor-ui/src/components/FixedCollectionParameter.vue index 59eacfdb100e6..04f8c9e7734bb 100644 --- a/packages/editor-ui/src/components/FixedCollectionParameter.vue +++ b/packages/editor-ui/src/components/FixedCollectionParameter.vue @@ -18,6 +18,9 @@ import { } from 'n8n-design-system'; import ParameterInputList from './ParameterInputList.vue'; import Draggable from 'vuedraggable'; +import { useWorkflowsStore } from '@/stores/workflows.store'; +import { useNDVStore } from '@/stores/ndv.store'; +import { telemetry } from '@/plugins/telemetry'; const locale = useI18n(); @@ -44,6 +47,9 @@ const emit = defineEmits<{ valueChanged: [value: ValueChangedEvent]; }>(); +const workflowsStore = useWorkflowsStore(); +const ndvStore = useNDVStore(); + const getPlaceholderText = computed(() => { const placeholder = locale.nodeText().placeholder(props.parameter, props.path); return placeholder ? placeholder : locale.baseText('fixedCollectionParameter.choose'); @@ -127,6 +133,13 @@ const getOptionProperties = (optionName: string) => { return undefined; }; +const onAddButtonClick = (optionName: string) => { + optionSelected(optionName); + if (props.parameter.name === 'workflowInputs') { + trackWorkflowInputFieldAdded(); + } +}; + const optionSelected = (optionName: string) => { const option = getOptionProperties(optionName); if (option === undefined) { @@ -183,6 +196,9 @@ const optionSelected = (optionName: string) => { const valueChanged = (parameterData: IUpdateInformation) => { emit('valueChanged', parameterData); + if (props.parameter.name === 'workflowInputs') { + trackWorkflowInputFieldTypeChange(parameterData); + } }; const onDragChange = (optionName: string) => { const parameterData: ValueChangedEvent = { @@ -193,6 +209,21 @@ const onDragChange = (optionName: string) => { emit('valueChanged', parameterData); }; + +const trackWorkflowInputFieldTypeChange = (parameterData: IUpdateInformation) => { + telemetry.track('User changed workflow input field type', { + type: parameterData.value, + workflow_id: workflowsStore.workflow.id, + node_id: ndvStore.activeNode?.id, + }); +}; + +const trackWorkflowInputFieldAdded = () => { + telemetry.track('User added workflow input field', { + workflow_id: workflowsStore.workflow.id, + node_id: ndvStore.activeNode?.id, + }); +}; + + + + + diff --git a/packages/editor-ui/src/components/ParameterInput.vue b/packages/editor-ui/src/components/ParameterInput.vue index 7d0aa444413fc..0730d175e7c55 100644 --- a/packages/editor-ui/src/components/ParameterInput.vue +++ b/packages/editor-ui/src/components/ParameterInput.vue @@ -837,6 +837,25 @@ function valueChanged(value: NodeParameterValueType | {} | Date) { parameter: props.parameter.name, }); } + // Track workflow input data mode change + const isWorkflowInputParameter = + props.parameter.name === 'inputSource' && props.parameter.default === 'workflowInputs'; + if (isWorkflowInputParameter) { + trackWorkflowInputModeEvent(value as string); + } +} + +function trackWorkflowInputModeEvent(value: string) { + const telemetryValuesMap: Record = { + workflowInputs: 'fields', + jsonExample: 'json', + passthrough: 'all', + }; + telemetry.track('User chose input data mode', { + option: telemetryValuesMap[value], + workflow_id: workflowsStore.workflowId, + node_id: node.value?.id, + }); } async function optionSelected(command: string) { diff --git a/packages/editor-ui/src/components/ParameterInputList.test.constants.ts b/packages/editor-ui/src/components/ParameterInputList.test.constants.ts new file mode 100644 index 0000000000000..4e564aa90db05 --- /dev/null +++ b/packages/editor-ui/src/components/ParameterInputList.test.constants.ts @@ -0,0 +1,70 @@ +import type { INodeUi } from '@/Interface'; +import type { INodeParameters, INodeProperties } from 'n8n-workflow'; + +export const TEST_PARAMETERS: INodeProperties[] = [ + { + displayName: 'Test Fixed Collection', + name: 'fixedCollectionTest', + placeholder: 'Test', + type: 'fixedCollection', + description: + 'Test fixed collection description. This is a long description that should be wrapped.', + typeOptions: { multipleValues: true, sortable: true, minRequiredFields: 1 }, + displayOptions: { + show: { '@version': [{ _cnd: { gte: 1.1 } }] }, + }, + default: {}, + options: [ + { + name: 'values', + displayName: 'Values', + values: [ + { + displayName: 'Name', + name: 'name', + type: 'string', + default: '', + placeholder: 'e.g. fieldName', + description: 'A name of the field in the collection', + required: true, + noDataExpression: true, + }, + ], + }, + ], + }, +]; + +export const FIXED_COLLECTION_PARAMETERS: INodeProperties[] = TEST_PARAMETERS.filter( + (p) => p.type === 'fixedCollection', +); + +export const TEST_NODE_VALUES: INodeParameters = { + color: '#ff0000', + alwaysOutputData: false, + executeOnce: false, + notesInFlow: false, + onError: 'stopWorkflow', + retryOnFail: false, + maxTries: 3, + waitBetweenTries: 1000, + notes: '', + parameters: { fixedCollectionTest: {} }, +}; + +export const TEST_NODE_NO_ISSUES: INodeUi = { + id: 'test-123', + parameters: { fixedCollectionTest: { values: [{ name: 'firstName' }] } }, + typeVersion: 1.1, + name: 'Test Node', + type: 'n8n-nodes-base.executeWorkflowTrigger', + position: [260, 340], +}; + +export const TEST_ISSUE = 'At least 1 field is required.'; + +export const TEST_NODE_WITH_ISSUES: INodeUi = { + ...TEST_NODE_NO_ISSUES, + parameters: { fixedCollectionTest: {} }, + issues: { parameters: { fixedCollectionTest: [TEST_ISSUE] } }, +}; diff --git a/packages/editor-ui/src/components/ParameterInputList.test.ts b/packages/editor-ui/src/components/ParameterInputList.test.ts new file mode 100644 index 0000000000000..aa5f6a941f1eb --- /dev/null +++ b/packages/editor-ui/src/components/ParameterInputList.test.ts @@ -0,0 +1,101 @@ +import { createComponentRenderer } from '@/__tests__/render'; +import ParameterInputList from './ParameterInputList.vue'; +import { createTestingPinia } from '@pinia/testing'; +import { mockedStore } from '@/__tests__/utils'; +import { useNDVStore } from '@/stores/ndv.store'; +import { + TEST_NODE_NO_ISSUES, + TEST_PARAMETERS, + TEST_NODE_VALUES, + TEST_NODE_WITH_ISSUES, + FIXED_COLLECTION_PARAMETERS, + TEST_ISSUE, +} from './ParameterInputList.test.constants'; + +vi.mock('vue-router', async () => { + const actual = await vi.importActual('vue-router'); + const params = {}; + const location = {}; + return { + ...actual, + useRouter: () => ({ + push: vi.fn(), + }), + useRoute: () => ({ + params, + location, + }), + }; +}); + +let ndvStore: ReturnType>; + +const renderComponent = createComponentRenderer(ParameterInputList, { + props: { + hideDelete: true, + indent: true, + isReadOnly: false, + }, + global: { + stubs: { + ParameterInputFull: { template: '
' }, + Suspense: { template: '
' }, + }, + }, +}); + +describe('ParameterInputList', () => { + beforeEach(() => { + createTestingPinia(); + ndvStore = mockedStore(useNDVStore); + }); + + it('renders', () => { + ndvStore.activeNode = TEST_NODE_NO_ISSUES; + expect(() => + renderComponent({ + props: { + parameters: TEST_PARAMETERS, + nodeValues: TEST_NODE_VALUES, + }, + }), + ).not.toThrow(); + }); + + it('renders fixed collection inputs correctly', () => { + ndvStore.activeNode = TEST_NODE_NO_ISSUES; + const { getAllByTestId, getByText } = renderComponent({ + props: { + parameters: TEST_PARAMETERS, + nodeValues: TEST_NODE_VALUES, + }, + }); + + // Should render labels for all parameters + TEST_PARAMETERS.forEach((parameter) => { + expect(getByText(parameter.displayName)).toBeInTheDocument(); + }); + // Should render input placeholders for all fixed collection parameters + expect(getAllByTestId('suspense-stub')).toHaveLength(FIXED_COLLECTION_PARAMETERS.length); + }); + + it('renders fixed collection inputs correctly with issues', () => { + ndvStore.activeNode = TEST_NODE_WITH_ISSUES; + const { getByText, getByTestId } = renderComponent({ + props: { + parameters: TEST_PARAMETERS, + nodeValues: TEST_NODE_VALUES, + }, + }); + + // Should render labels for all parameters + TEST_PARAMETERS.forEach((parameter) => { + expect(getByText(parameter.displayName)).toBeInTheDocument(); + }); + // Should render error message for fixed collection parameter + expect( + getByTestId(`${FIXED_COLLECTION_PARAMETERS[0].name}-parameter-input-issues-container`), + ).toBeInTheDocument(); + expect(getByText(TEST_ISSUE)).toBeInTheDocument(); + }); +}); diff --git a/packages/editor-ui/src/components/ParameterInputList.vue b/packages/editor-ui/src/components/ParameterInputList.vue index 11fe4e1f42a5a..ebc1839fea312 100644 --- a/packages/editor-ui/src/components/ParameterInputList.vue +++ b/packages/editor-ui/src/components/ParameterInputList.vue @@ -5,7 +5,7 @@ import type { NodeParameterValue, NodeParameterValueType, } from 'n8n-workflow'; -import { deepCopy, ADD_FORM_NOTICE } from 'n8n-workflow'; +import { deepCopy, ADD_FORM_NOTICE, NodeHelpers } from 'n8n-workflow'; import { computed, defineAsyncComponent, onErrorCaptured, ref, watch } from 'vue'; import type { IUpdateInformation } from '@/Interface'; @@ -45,6 +45,9 @@ const LazyCollectionParameter = defineAsyncComponent( async () => await import('./CollectionParameter.vue'), ); +// Parameter issues are displayed within the inputs themselves, but some parameters need to show them in the label UI +const showIssuesInLabelFor = ['fixedCollection']; + type Props = { nodeValues: INodeParameters; parameters: INodeProperties[]; @@ -432,6 +435,15 @@ function onNoticeAction(action: string) { } } +function getParameterIssues(parameter: INodeProperties): string[] { + if (!node.value || !showIssuesInLabelFor.includes(parameter.type)) { + return []; + } + const issues = NodeHelpers.getParameterIssues(parameter, node.value.parameters, '', node.value); + + return issues.parameters?.[parameter.name] ?? []; +} + /** * Handles default node button parameter type actions * @param parameter @@ -536,8 +548,26 @@ function getParameterValue + > + +