From 15379f37f564467347237a8fcda4e20859609051 Mon Sep 17 00:00:00 2001 From: Andrey Antukh Date: Mon, 11 May 2026 10:59:35 +0200 Subject: [PATCH 1/6] :bug: Fix maximum call stack size exceeded in SSE read-stream (#9484) The recursive `read-items` function in `app.util.sse/read-stream` caused a synchronous stack overflow when reading buffered stream data. Each `rx/mapcat` call chained another recursive invocation on the same call stack without yielding to the event loop. Replace the recursive pattern with an `rx/create`-based async pump that uses Promise `.then()` chaining, keeping the call stack depth constant regardless of stream size. Also add progress reporting with names and IDs during binfile export and import, and bump `eventsource-parser` dependency. Closes #9470 Signed-off-by: Andrey Antukh --- backend/src/app/binfile/v3.clj | 26 ++- frontend/package.json | 4 +- frontend/pnpm-lock.yaml | 360 +++++++++++++++++++++++++++++---- frontend/src/app/util/sse.cljs | 40 ++-- 4 files changed, 368 insertions(+), 62 deletions(-) diff --git a/backend/src/app/binfile/v3.clj b/backend/src/app/binfile/v3.clj index 0db826e407..b4ed065317 100644 --- a/backend/src/app/binfile/v3.clj +++ b/backend/src/app/binfile/v3.clj @@ -281,7 +281,7 @@ thumbnails (bfc/get-file-object-thumbnails cfg file-id)] - (events/tap :progress {:section :file :id file-id}) + (events/tap :progress {:section :file :id file-id :name (:name file)}) (vswap! bfc/*state* update :files assoc file-id {:id file-id @@ -301,6 +301,7 @@ (write-entry! output path file)) (doseq [[index page-id] (d/enumerate pages)] + (let [path (str "files/" file-id "/pages/" page-id ".json") page (get pages-index page-id) objects (:objects page) @@ -311,6 +312,8 @@ (write-entry! output path page) + (events/tap :progress {:section :page :id page-id :name (:name page) :file-id file-id}) + (doseq [[shape-id shape] objects] (let [path (str "files/" file-id "/pages/" page-id "/" shape-id ".json") shape (assoc shape :page-id page-id) @@ -323,6 +326,8 @@ (doseq [{:keys [id] :as media} media] (let [path (str "files/" file-id "/media/" id ".json") media (encode-media media)] + + (events/tap :progress {:section :media :id id :file-id file-id}) (write-entry! output path media))) (doseq [thumbnail thumbnails] @@ -332,11 +337,13 @@ data (-> data (assoc :media-id (:media-id thumbnail)) (encode-file-thumbnail))] + (events/tap :progress {:section :thumbnails :id (:object-id thumbnail) :file-id file-id}) (write-entry! output path data))) (doseq [[id component] components] (let [path (str "files/" file-id "/components/" id ".json") component (encode-component component)] + (events/tap :progress {:section :component :id id :file-id file-id}) (write-entry! output path component))) (doseq [[id color] colors] @@ -347,17 +354,20 @@ (and (contains? color :path) (str/empty? (:path color))) (dissoc :path))] + (events/tap :progress {:section :color :id id :file-id file-id}) (write-entry! output path color))) (doseq [[id object] typographies] (let [path (str "files/" file-id "/typographies/" id ".json") typography (encode-typography object)] + (events/tap :progress {:section :typography :id id :file-id file-id}) (write-entry! output path typography))) (when (and tokens-lib (not (ctob/empty-lib? tokens-lib))) (let [path (str "files/" file-id "/tokens.json") encoded-tokens (encode-tokens-lib tokens-lib)] + (events/tap :progress {:section :tokens-lib :file-id file-id}) (write-entry! output path encoded-tokens))))) (defn- export-files @@ -600,6 +610,7 @@ (let [object (->> (read-entry input entry) (decode-color) (validate-color))] + (events/tap :progress {:section :color :id id :file-id file-id}) (if (= id (:id object)) (assoc result id object) result))) @@ -631,6 +642,7 @@ (clean-component-pre-decode) (decode-component) (clean-component-post-decode))] + (events/tap :progress {:section :component :id id :file-id file-id}) (if (= id (:id object)) (assoc result id object) result))) @@ -644,6 +656,7 @@ (let [object (->> (read-entry input entry) (decode-typography) (validate-typography))] + (events/tap :progress {:section :typography :id id :file-id file-id}) (if (= id (:id object)) (assoc result id object) result))) @@ -653,6 +666,7 @@ (defn- read-file-tokens-lib [{:keys [::bfc/input ::entries]} file-id] (when-let [entry (d/seek (match-tokens-lib-entry-fn file-id) entries)] + (events/tap :progress {:section :tokens-lib :file-id file-id}) (->> (read-plain-entry input entry) (decode-tokens-lib) (validate-tokens-lib)))) @@ -678,6 +692,7 @@ (let [page (->> (read-entry input entry) (decode-page)) page (dissoc page :options)] + (events/tap :progress {:section :page :id id :file-id file-id}) (when (= id (:id page)) (let [objects (read-file-shapes cfg file-id id)] (assoc page :objects objects)))))) @@ -693,6 +708,7 @@ (let [object (->> (read-entry input entry) (decode-file-thumbnail) (validate-file-thumbnail))] + (if (and (= frame-id (:frame-id object)) (= page-id (:page-id object)) (= tag (:tag object))) @@ -733,8 +749,6 @@ (vswap! bfc/*state* update :index bfc/update-index media :id) - (events/tap :progress {:section :media :file-id file-id}) - (doseq [item media] (let [params (-> item (update :id bfc/lookup-index) @@ -742,6 +756,8 @@ (d/update-when :media-id bfc/lookup-index) (d/update-when :thumbnail-id bfc/lookup-index))] + (events/tap :progress {:section :media :id (:id params) :file-id file-id}) + (l/dbg :hint "inserting media object" :file-id (str file-id') :id (str (:id params)) @@ -753,8 +769,6 @@ (db/insert! conn :file-media-object params ::db/on-conflict-do-nothing? (::bfc/overwrite cfg)))) - (events/tap :progress {:section :thumbnails :file-id file-id}) - (doseq [item thumbnails] (let [media-id (bfc/lookup-index (:media-id item)) object-id (-> (assoc item :file-id file-id') @@ -769,6 +783,8 @@ :media-id (str media-id) ::l/sync? true) + (events/tap :progress {:section :thumbnail :file-id file-id :object-id object-id}) + (db/insert! conn :file-tagged-object-thumbnail params ::db/on-conflict-do-nothing? true))) diff --git a/frontend/package.json b/frontend/package.json index 2e06b79905..0aaa88ed4d 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -70,8 +70,8 @@ "compression": "^1.8.1", "concurrently": "^9.2.1", "date-fns": "^4.1.0", - "esbuild": "^0.27.4", - "eventsource-parser": "^3.0.6", + "esbuild": "^0.28.0", + "eventsource-parser": "^3.0.8", "express": "^5.1.0", "fancy-log": "^2.0.0", "getopts": "^2.3.0", diff --git a/frontend/pnpm-lock.yaml b/frontend/pnpm-lock.yaml index e98f362410..5d14fe10a4 100644 --- a/frontend/pnpm-lock.yaml +++ b/frontend/pnpm-lock.yaml @@ -39,7 +39,7 @@ importers: version: 1.59.0 '@storybook/addon-docs': specifier: 10.1.11 - version: 10.1.11(@types/react@19.2.13)(esbuild@0.27.4)(rollup@4.57.1)(storybook@10.1.11(@testing-library/dom@10.4.0)(prettier@3.5.3)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2)) + version: 10.1.11(@types/react@19.2.13)(esbuild@0.28.0)(rollup@4.57.1)(storybook@10.1.11(@testing-library/dom@10.4.0)(prettier@3.5.3)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2)) '@storybook/addon-themes': specifier: 10.1.11 version: 10.1.11(storybook@10.1.11(@testing-library/dom@10.4.0)(prettier@3.5.3)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)) @@ -48,7 +48,7 @@ importers: version: 10.1.11(@vitest/browser-playwright@4.0.18)(@vitest/browser@4.0.18(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2))(vitest@4.0.18))(@vitest/runner@4.0.18)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(storybook@10.1.11(@testing-library/dom@10.4.0)(prettier@3.5.3)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(vitest@4.0.18) '@storybook/react-vite': specifier: 10.1.11 - version: 10.1.11(esbuild@0.27.4)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(rollup@4.57.1)(storybook@10.1.11(@testing-library/dom@10.4.0)(prettier@3.5.3)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(typescript@5.9.3)(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2)) + version: 10.1.11(esbuild@0.28.0)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(rollup@4.57.1)(storybook@10.1.11(@testing-library/dom@10.4.0)(prettier@3.5.3)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(typescript@5.9.3)(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2)) '@tokens-studio/sd-transforms': specifier: 1.2.11 version: 1.2.11(style-dictionary@5.0.0-rc.1(tslib@2.8.1)) @@ -80,11 +80,11 @@ importers: specifier: ^4.1.0 version: 4.1.0 esbuild: - specifier: ^0.27.4 - version: 0.27.4 + specifier: ^0.28.0 + version: 0.28.0 eventsource-parser: - specifier: ^3.0.6 - version: 3.0.6 + specifier: ^3.0.8 + version: 3.0.8 express: specifier: ^5.1.0 version: 5.2.1 @@ -283,7 +283,7 @@ importers: version: 10.2.0(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(storybook@10.2.0(@testing-library/dom@10.4.0)(prettier@3.8.1)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(typescript@5.9.3) '@storybook/react-vite': specifier: 10.2.0 - version: 10.2.0(esbuild@0.27.4)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(rollup@4.57.1)(storybook@10.2.0(@testing-library/dom@10.4.0)(prettier@3.8.1)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(typescript@5.9.3)(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2)) + version: 10.2.0(esbuild@0.28.0)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(rollup@4.57.1)(storybook@10.2.0(@testing-library/dom@10.4.0)(prettier@3.8.1)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(typescript@5.9.3)(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2)) '@testing-library/dom': specifier: 10.4.0 version: 10.4.0 @@ -599,6 +599,12 @@ packages: cpu: [ppc64] os: [aix] + '@esbuild/aix-ppc64@0.28.0': + resolution: {integrity: sha512-lhRUCeuOyJQURhTxl4WkpFTjIsbDayJHih5kZC1giwE+MhIzAb7mEsQMqMf18rHLsrb5qI1tafG20mLxEWcWlA==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [aix] + '@esbuild/android-arm64@0.21.5': resolution: {integrity: sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==} engines: {node: '>=12'} @@ -617,6 +623,12 @@ packages: cpu: [arm64] os: [android] + '@esbuild/android-arm64@0.28.0': + resolution: {integrity: sha512-+WzIXQOSaGs33tLEgYPYe/yQHf0WTU0X42Jca3y8NWMbUVhp7rUnw+vAsRC/QiDrdD31IszMrZy+qwPOPjd+rw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [android] + '@esbuild/android-arm@0.21.5': resolution: {integrity: sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==} engines: {node: '>=12'} @@ -635,6 +647,12 @@ packages: cpu: [arm] os: [android] + '@esbuild/android-arm@0.28.0': + resolution: {integrity: sha512-wqh0ByljabXLKHeWXYLqoJ5jKC4XBaw6Hk08OfMrCRd2nP2ZQ5eleDZC41XHyCNgktBGYMbqnrJKq/K/lzPMSQ==} + engines: {node: '>=18'} + cpu: [arm] + os: [android] + '@esbuild/android-x64@0.21.5': resolution: {integrity: sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==} engines: {node: '>=12'} @@ -653,6 +671,12 @@ packages: cpu: [x64] os: [android] + '@esbuild/android-x64@0.28.0': + resolution: {integrity: sha512-+VJggoaKhk2VNNqVL7f6S189UzShHC/mR9EE8rDdSkdpN0KflSwWY/gWjDrNxxisg8Fp1ZCD9jLMo4m0OUfeUA==} + engines: {node: '>=18'} + cpu: [x64] + os: [android] + '@esbuild/darwin-arm64@0.21.5': resolution: {integrity: sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==} engines: {node: '>=12'} @@ -671,6 +695,12 @@ packages: cpu: [arm64] os: [darwin] + '@esbuild/darwin-arm64@0.28.0': + resolution: {integrity: sha512-0T+A9WZm+bZ84nZBtk1ckYsOvyA3x7e2Acj1KdVfV4/2tdG4fzUp91YHx+GArWLtwqp77pBXVCPn2We7Letr0Q==} + engines: {node: '>=18'} + cpu: [arm64] + os: [darwin] + '@esbuild/darwin-x64@0.21.5': resolution: {integrity: sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==} engines: {node: '>=12'} @@ -689,6 +719,12 @@ packages: cpu: [x64] os: [darwin] + '@esbuild/darwin-x64@0.28.0': + resolution: {integrity: sha512-fyzLm/DLDl/84OCfp2f/XQ4flmORsjU7VKt8HLjvIXChJoFFOIL6pLJPH4Yhd1n1gGFF9mPwtlN5Wf82DZs+LQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [darwin] + '@esbuild/freebsd-arm64@0.21.5': resolution: {integrity: sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==} engines: {node: '>=12'} @@ -707,6 +743,12 @@ packages: cpu: [arm64] os: [freebsd] + '@esbuild/freebsd-arm64@0.28.0': + resolution: {integrity: sha512-l9GeW5UZBT9k9brBYI+0WDffcRxgHQD8ShN2Ur4xWq/NFzUKm3k5lsH4PdaRgb2w7mI9u61nr2gI2mLI27Nh3Q==} + engines: {node: '>=18'} + cpu: [arm64] + os: [freebsd] + '@esbuild/freebsd-x64@0.21.5': resolution: {integrity: sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==} engines: {node: '>=12'} @@ -725,6 +767,12 @@ packages: cpu: [x64] os: [freebsd] + '@esbuild/freebsd-x64@0.28.0': + resolution: {integrity: sha512-BXoQai/A0wPO6Es3yFJ7APCiKGc1tdAEOgeTNy3SsB491S3aHn4S4r3e976eUnPdU+NbdtmBuLncYir2tMU9Nw==} + engines: {node: '>=18'} + cpu: [x64] + os: [freebsd] + '@esbuild/linux-arm64@0.21.5': resolution: {integrity: sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==} engines: {node: '>=12'} @@ -743,6 +791,12 @@ packages: cpu: [arm64] os: [linux] + '@esbuild/linux-arm64@0.28.0': + resolution: {integrity: sha512-RVyzfb3FWsGA55n6WY0MEIEPURL1FcbhFE6BffZEMEekfCzCIMtB5yyDcFnVbTnwk+CLAgTujmV/Lgvih56W+A==} + engines: {node: '>=18'} + cpu: [arm64] + os: [linux] + '@esbuild/linux-arm@0.21.5': resolution: {integrity: sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==} engines: {node: '>=12'} @@ -761,6 +815,12 @@ packages: cpu: [arm] os: [linux] + '@esbuild/linux-arm@0.28.0': + resolution: {integrity: sha512-CjaaREJagqJp7iTaNQjjidaNbCKYcd4IDkzbwwxtSvjI7NZm79qiHc8HqciMddQ6CKvJT6aBd8lO9kN/ZudLlw==} + engines: {node: '>=18'} + cpu: [arm] + os: [linux] + '@esbuild/linux-ia32@0.21.5': resolution: {integrity: sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==} engines: {node: '>=12'} @@ -779,6 +839,12 @@ packages: cpu: [ia32] os: [linux] + '@esbuild/linux-ia32@0.28.0': + resolution: {integrity: sha512-KBnSTt1kxl9x70q+ydterVdl+Cn0H18ngRMRCEQfrbqdUuntQQ0LoMZv47uB97NljZFzY6HcfqEZ2SAyIUTQBQ==} + engines: {node: '>=18'} + cpu: [ia32] + os: [linux] + '@esbuild/linux-loong64@0.21.5': resolution: {integrity: sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==} engines: {node: '>=12'} @@ -797,6 +863,12 @@ packages: cpu: [loong64] os: [linux] + '@esbuild/linux-loong64@0.28.0': + resolution: {integrity: sha512-zpSlUce1mnxzgBADvxKXX5sl8aYQHo2ezvMNI8I0lbblJtp8V4odlm3Yzlj7gPyt3T8ReksE6bK+pT3WD+aJRg==} + engines: {node: '>=18'} + cpu: [loong64] + os: [linux] + '@esbuild/linux-mips64el@0.21.5': resolution: {integrity: sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==} engines: {node: '>=12'} @@ -815,6 +887,12 @@ packages: cpu: [mips64el] os: [linux] + '@esbuild/linux-mips64el@0.28.0': + resolution: {integrity: sha512-2jIfP6mmjkdmeTlsX/9vmdmhBmKADrWqN7zcdtHIeNSCH1SqIoNI63cYsjQR8J+wGa4Y5izRcSHSm8K3QWmk3w==} + engines: {node: '>=18'} + cpu: [mips64el] + os: [linux] + '@esbuild/linux-ppc64@0.21.5': resolution: {integrity: sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==} engines: {node: '>=12'} @@ -833,6 +911,12 @@ packages: cpu: [ppc64] os: [linux] + '@esbuild/linux-ppc64@0.28.0': + resolution: {integrity: sha512-bc0FE9wWeC0WBm49IQMPSPILRocGTQt3j5KPCA8os6VprfuJ7KD+5PzESSrJ6GmPIPJK965ZJHTUlSA6GNYEhg==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [linux] + '@esbuild/linux-riscv64@0.21.5': resolution: {integrity: sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==} engines: {node: '>=12'} @@ -851,6 +935,12 @@ packages: cpu: [riscv64] os: [linux] + '@esbuild/linux-riscv64@0.28.0': + resolution: {integrity: sha512-SQPZOwoTTT/HXFXQJG/vBX8sOFagGqvZyXcgLA3NhIqcBv1BJU1d46c0rGcrij2B56Z2rNiSLaZOYW5cUk7yLQ==} + engines: {node: '>=18'} + cpu: [riscv64] + os: [linux] + '@esbuild/linux-s390x@0.21.5': resolution: {integrity: sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==} engines: {node: '>=12'} @@ -869,6 +959,12 @@ packages: cpu: [s390x] os: [linux] + '@esbuild/linux-s390x@0.28.0': + resolution: {integrity: sha512-SCfR0HN8CEEjnYnySJTd2cw0k9OHB/YFzt5zgJEwa+wL/T/raGWYMBqwDNAC6dqFKmJYZoQBRfHjgwLHGSrn3Q==} + engines: {node: '>=18'} + cpu: [s390x] + os: [linux] + '@esbuild/linux-x64@0.21.5': resolution: {integrity: sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==} engines: {node: '>=12'} @@ -887,6 +983,12 @@ packages: cpu: [x64] os: [linux] + '@esbuild/linux-x64@0.28.0': + resolution: {integrity: sha512-us0dSb9iFxIi8srnpl931Nvs65it/Jd2a2K3qs7fz2WfGPHqzfzZTfec7oxZJRNPXPnNYZtanmRc4AL/JwVzHQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [linux] + '@esbuild/netbsd-arm64@0.27.3': resolution: {integrity: sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==} engines: {node: '>=18'} @@ -899,6 +1001,12 @@ packages: cpu: [arm64] os: [netbsd] + '@esbuild/netbsd-arm64@0.28.0': + resolution: {integrity: sha512-CR/RYotgtCKwtftMwJlUU7xCVNg3lMYZ0RzTmAHSfLCXw3NtZtNpswLEj/Kkf6kEL3Gw+BpOekRX0BYCtklhUw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [netbsd] + '@esbuild/netbsd-x64@0.21.5': resolution: {integrity: sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==} engines: {node: '>=12'} @@ -917,6 +1025,12 @@ packages: cpu: [x64] os: [netbsd] + '@esbuild/netbsd-x64@0.28.0': + resolution: {integrity: sha512-nU1yhmYutL+fQ71Kxnhg8uEOdC0pwEW9entHykTgEbna2pw2dkbFSMeqjjyHZoCmt8SBkOSvV+yNmm94aUrrqw==} + engines: {node: '>=18'} + cpu: [x64] + os: [netbsd] + '@esbuild/openbsd-arm64@0.27.3': resolution: {integrity: sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==} engines: {node: '>=18'} @@ -929,6 +1043,12 @@ packages: cpu: [arm64] os: [openbsd] + '@esbuild/openbsd-arm64@0.28.0': + resolution: {integrity: sha512-cXb5vApOsRsxsEl4mcZ1XY3D4DzcoMxR/nnc4IyqYs0rTI8ZKmW6kyyg+11Z8yvgMfAEldKzP7AdP64HnSC/6g==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openbsd] + '@esbuild/openbsd-x64@0.21.5': resolution: {integrity: sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==} engines: {node: '>=12'} @@ -947,6 +1067,12 @@ packages: cpu: [x64] os: [openbsd] + '@esbuild/openbsd-x64@0.28.0': + resolution: {integrity: sha512-8wZM2qqtv9UP3mzy7HiGYNH/zjTA355mpeuA+859TyR+e+Tc08IHYpLJuMsfpDJwoLo1ikIJI8jC3GFjnRClzA==} + engines: {node: '>=18'} + cpu: [x64] + os: [openbsd] + '@esbuild/openharmony-arm64@0.27.3': resolution: {integrity: sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==} engines: {node: '>=18'} @@ -959,6 +1085,12 @@ packages: cpu: [arm64] os: [openharmony] + '@esbuild/openharmony-arm64@0.28.0': + resolution: {integrity: sha512-FLGfyizszcef5C3YtoyQDACyg95+dndv79i2EekILBofh5wpCa1KuBqOWKrEHZg3zrL3t5ouE5jgr94vA+Wb2w==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openharmony] + '@esbuild/sunos-x64@0.21.5': resolution: {integrity: sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==} engines: {node: '>=12'} @@ -977,6 +1109,12 @@ packages: cpu: [x64] os: [sunos] + '@esbuild/sunos-x64@0.28.0': + resolution: {integrity: sha512-1ZgjUoEdHZZl/YlV76TSCz9Hqj9h9YmMGAgAPYd+q4SicWNX3G5GCyx9uhQWSLcbvPW8Ni7lj4gDa1T40akdlw==} + engines: {node: '>=18'} + cpu: [x64] + os: [sunos] + '@esbuild/win32-arm64@0.21.5': resolution: {integrity: sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==} engines: {node: '>=12'} @@ -995,6 +1133,12 @@ packages: cpu: [arm64] os: [win32] + '@esbuild/win32-arm64@0.28.0': + resolution: {integrity: sha512-Q9StnDmQ/enxnpxCCLSg0oo4+34B9TdXpuyPeTedN/6+iXBJ4J+zwfQI28u/Jl40nOYAxGoNi7mFP40RUtkmUA==} + engines: {node: '>=18'} + cpu: [arm64] + os: [win32] + '@esbuild/win32-ia32@0.21.5': resolution: {integrity: sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==} engines: {node: '>=12'} @@ -1013,6 +1157,12 @@ packages: cpu: [ia32] os: [win32] + '@esbuild/win32-ia32@0.28.0': + resolution: {integrity: sha512-zF3ag/gfiCe6U2iczcRzSYJKH1DCI+ByzSENHlM2FcDbEeo5Zd2C86Aq0tKUYAJJ1obRP84ymxIAksZUcdztHA==} + engines: {node: '>=18'} + cpu: [ia32] + os: [win32] + '@esbuild/win32-x64@0.21.5': resolution: {integrity: sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==} engines: {node: '>=12'} @@ -1031,6 +1181,12 @@ packages: cpu: [x64] os: [win32] + '@esbuild/win32-x64@0.28.0': + resolution: {integrity: sha512-pEl1bO9mfAmIC+tW5btTmrKaujg3zGtUmWNdCw/xs70FBjwAL3o9OEKNHvNmnyylD6ubxUERiEhdsL0xBQ9efw==} + engines: {node: '>=18'} + cpu: [x64] + os: [win32] + '@eslint-community/eslint-utils@4.9.1': resolution: {integrity: sha512-phrYmNiYppR7znFEdqgfWHXR6NCkZEK7hwWDHZUjit/2/U0r6XvkDl0SYnoM51Hq7FhCGdLDT6zxCCOY1hexsQ==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} @@ -1098,12 +1254,16 @@ packages: '@hapi/topo@6.0.2': resolution: {integrity: sha512-KR3rD5inZbGMrHmgPxsJ9dbi6zEK+C3ZwUwTa+eMwWLz7oijWUTWD2pMSNNYJAU6Qq+65NkxXjqHr/7LM2Xkqg==} - '@humanfs/core@0.19.1': - resolution: {integrity: sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==} + '@humanfs/core@0.19.2': + resolution: {integrity: sha512-UhXNm+CFMWcbChXywFwkmhqjs3PRCmcSa/hfBgLIb7oQ5HNb1wS0icWsGtSAUNgefHeI+eBrA8I1fxmbHsGdvA==} engines: {node: '>=18.18.0'} - '@humanfs/node@0.16.7': - resolution: {integrity: sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ==} + '@humanfs/node@0.16.8': + resolution: {integrity: sha512-gE1eQNZ3R++kTzFUpdGlpmy8kDZD/MLyHqDwqjkVQI0JMdI1D51sy1H958PNXYkM2rAac7e5/CnIKZrHtPh3BQ==} + engines: {node: '>=18.18.0'} + + '@humanfs/types@0.15.0': + resolution: {integrity: sha512-ZZ1w0aoQkwuUuC7Yf+7sdeaNfqQiiLcSRbfI08oAxqLtpXQr9AIVX7Ay7HLDuiLYAaFPu8oBYNq/QIi9URHJ3Q==} engines: {node: '>=18.18.0'} '@humanwhocodes/module-importer@1.0.1': @@ -1908,6 +2068,9 @@ packages: '@types/estree@1.0.8': resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==} + '@types/estree@1.0.9': + resolution: {integrity: sha512-GhdPgy1el4/ImP05X05Uw4cw2/M93BCUmnEvWZNStlCzEKME4Fkk+YpoA5OiHNQmoS7Cafb8Xa3Pya8m1Qrzeg==} + '@types/json-schema@7.0.15': resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} @@ -2146,8 +2309,8 @@ packages: ajv: optional: true - ajv@6.14.0: - resolution: {integrity: sha512-IWrosm/yrn43eiKqkfkHis7QioDleaXQHdDVPKg0FSwwd/DuvyX79TZnFOnYpB7dcsFAMmtFztZuXPDvSePkFw==} + ajv@6.15.0: + resolution: {integrity: sha512-fgFx7Hfoq60ytK2c7DhnF8jIvzYgOMxfugjLOSMHjLIPgenqa7S7oaagATUq99mV6IYvN2tRmC0wnTYX6iPbMw==} ajv@8.12.0: resolution: {integrity: sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==} @@ -2937,6 +3100,11 @@ packages: engines: {node: '>=18'} hasBin: true + esbuild@0.28.0: + resolution: {integrity: sha512-sNR9MHpXSUV/XB4zmsFKN+QgVG82Cc7+/aaxJ8Adi8hyOac+EXptIp45QBPaVyX3N70664wRbTcLTOemCAnyqw==} + engines: {node: '>=18'} + hasBin: true + escalade@3.2.0: resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==} engines: {node: '>=6'} @@ -3069,8 +3237,8 @@ packages: resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==} engines: {node: '>=0.8.x'} - eventsource-parser@3.0.6: - resolution: {integrity: sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg==} + eventsource-parser@3.0.8: + resolution: {integrity: sha512-70QWGkr4snxr0OXLRWsFLeRBIRPuQOvt4s8QYjmUlmlkyTZkRqS7EDVRZtzU3TiyDbXSzaOeF0XUKy8PchzukQ==} engines: {node: '>=18.0.0'} execa@8.0.1: @@ -5953,6 +6121,9 @@ snapshots: '@esbuild/aix-ppc64@0.27.4': optional: true + '@esbuild/aix-ppc64@0.28.0': + optional: true + '@esbuild/android-arm64@0.21.5': optional: true @@ -5962,6 +6133,9 @@ snapshots: '@esbuild/android-arm64@0.27.4': optional: true + '@esbuild/android-arm64@0.28.0': + optional: true + '@esbuild/android-arm@0.21.5': optional: true @@ -5971,6 +6145,9 @@ snapshots: '@esbuild/android-arm@0.27.4': optional: true + '@esbuild/android-arm@0.28.0': + optional: true + '@esbuild/android-x64@0.21.5': optional: true @@ -5980,6 +6157,9 @@ snapshots: '@esbuild/android-x64@0.27.4': optional: true + '@esbuild/android-x64@0.28.0': + optional: true + '@esbuild/darwin-arm64@0.21.5': optional: true @@ -5989,6 +6169,9 @@ snapshots: '@esbuild/darwin-arm64@0.27.4': optional: true + '@esbuild/darwin-arm64@0.28.0': + optional: true + '@esbuild/darwin-x64@0.21.5': optional: true @@ -5998,6 +6181,9 @@ snapshots: '@esbuild/darwin-x64@0.27.4': optional: true + '@esbuild/darwin-x64@0.28.0': + optional: true + '@esbuild/freebsd-arm64@0.21.5': optional: true @@ -6007,6 +6193,9 @@ snapshots: '@esbuild/freebsd-arm64@0.27.4': optional: true + '@esbuild/freebsd-arm64@0.28.0': + optional: true + '@esbuild/freebsd-x64@0.21.5': optional: true @@ -6016,6 +6205,9 @@ snapshots: '@esbuild/freebsd-x64@0.27.4': optional: true + '@esbuild/freebsd-x64@0.28.0': + optional: true + '@esbuild/linux-arm64@0.21.5': optional: true @@ -6025,6 +6217,9 @@ snapshots: '@esbuild/linux-arm64@0.27.4': optional: true + '@esbuild/linux-arm64@0.28.0': + optional: true + '@esbuild/linux-arm@0.21.5': optional: true @@ -6034,6 +6229,9 @@ snapshots: '@esbuild/linux-arm@0.27.4': optional: true + '@esbuild/linux-arm@0.28.0': + optional: true + '@esbuild/linux-ia32@0.21.5': optional: true @@ -6043,6 +6241,9 @@ snapshots: '@esbuild/linux-ia32@0.27.4': optional: true + '@esbuild/linux-ia32@0.28.0': + optional: true + '@esbuild/linux-loong64@0.21.5': optional: true @@ -6052,6 +6253,9 @@ snapshots: '@esbuild/linux-loong64@0.27.4': optional: true + '@esbuild/linux-loong64@0.28.0': + optional: true + '@esbuild/linux-mips64el@0.21.5': optional: true @@ -6061,6 +6265,9 @@ snapshots: '@esbuild/linux-mips64el@0.27.4': optional: true + '@esbuild/linux-mips64el@0.28.0': + optional: true + '@esbuild/linux-ppc64@0.21.5': optional: true @@ -6070,6 +6277,9 @@ snapshots: '@esbuild/linux-ppc64@0.27.4': optional: true + '@esbuild/linux-ppc64@0.28.0': + optional: true + '@esbuild/linux-riscv64@0.21.5': optional: true @@ -6079,6 +6289,9 @@ snapshots: '@esbuild/linux-riscv64@0.27.4': optional: true + '@esbuild/linux-riscv64@0.28.0': + optional: true + '@esbuild/linux-s390x@0.21.5': optional: true @@ -6088,6 +6301,9 @@ snapshots: '@esbuild/linux-s390x@0.27.4': optional: true + '@esbuild/linux-s390x@0.28.0': + optional: true + '@esbuild/linux-x64@0.21.5': optional: true @@ -6097,12 +6313,18 @@ snapshots: '@esbuild/linux-x64@0.27.4': optional: true + '@esbuild/linux-x64@0.28.0': + optional: true + '@esbuild/netbsd-arm64@0.27.3': optional: true '@esbuild/netbsd-arm64@0.27.4': optional: true + '@esbuild/netbsd-arm64@0.28.0': + optional: true + '@esbuild/netbsd-x64@0.21.5': optional: true @@ -6112,12 +6334,18 @@ snapshots: '@esbuild/netbsd-x64@0.27.4': optional: true + '@esbuild/netbsd-x64@0.28.0': + optional: true + '@esbuild/openbsd-arm64@0.27.3': optional: true '@esbuild/openbsd-arm64@0.27.4': optional: true + '@esbuild/openbsd-arm64@0.28.0': + optional: true + '@esbuild/openbsd-x64@0.21.5': optional: true @@ -6127,12 +6355,18 @@ snapshots: '@esbuild/openbsd-x64@0.27.4': optional: true + '@esbuild/openbsd-x64@0.28.0': + optional: true + '@esbuild/openharmony-arm64@0.27.3': optional: true '@esbuild/openharmony-arm64@0.27.4': optional: true + '@esbuild/openharmony-arm64@0.28.0': + optional: true + '@esbuild/sunos-x64@0.21.5': optional: true @@ -6142,6 +6376,9 @@ snapshots: '@esbuild/sunos-x64@0.27.4': optional: true + '@esbuild/sunos-x64@0.28.0': + optional: true + '@esbuild/win32-arm64@0.21.5': optional: true @@ -6151,6 +6388,9 @@ snapshots: '@esbuild/win32-arm64@0.27.4': optional: true + '@esbuild/win32-arm64@0.28.0': + optional: true + '@esbuild/win32-ia32@0.21.5': optional: true @@ -6160,6 +6400,9 @@ snapshots: '@esbuild/win32-ia32@0.27.4': optional: true + '@esbuild/win32-ia32@0.28.0': + optional: true + '@esbuild/win32-x64@0.21.5': optional: true @@ -6169,6 +6412,9 @@ snapshots: '@esbuild/win32-x64@0.27.4': optional: true + '@esbuild/win32-x64@0.28.0': + optional: true + '@eslint-community/eslint-utils@4.9.1(eslint@9.39.2)': dependencies: eslint: 9.39.2 @@ -6194,7 +6440,7 @@ snapshots: '@eslint/eslintrc@3.3.5': dependencies: - ajv: 6.14.0 + ajv: 6.15.0 debug: 4.4.3(supports-color@5.5.0) espree: 10.4.0 globals: 14.0.0 @@ -6233,13 +6479,18 @@ snapshots: dependencies: '@hapi/hoek': 11.0.7 - '@humanfs/core@0.19.1': {} - - '@humanfs/node@0.16.7': + '@humanfs/core@0.19.2': dependencies: - '@humanfs/core': 0.19.1 + '@humanfs/types': 0.15.0 + + '@humanfs/node@0.16.8': + dependencies: + '@humanfs/core': 0.19.2 + '@humanfs/types': 0.15.0 '@humanwhocodes/retry': 0.4.3 + '@humanfs/types@0.15.0': {} + '@humanwhocodes/module-importer@1.0.1': {} '@humanwhocodes/retry@0.4.3': {} @@ -6737,10 +6988,10 @@ snapshots: '@standard-schema/spec@1.1.0': {} - '@storybook/addon-docs@10.1.11(@types/react@19.2.13)(esbuild@0.27.4)(rollup@4.57.1)(storybook@10.1.11(@testing-library/dom@10.4.0)(prettier@3.5.3)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2))': + '@storybook/addon-docs@10.1.11(@types/react@19.2.13)(esbuild@0.28.0)(rollup@4.57.1)(storybook@10.1.11(@testing-library/dom@10.4.0)(prettier@3.5.3)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2))': dependencies: '@mdx-js/react': 3.1.1(@types/react@19.2.13)(react@19.2.3) - '@storybook/csf-plugin': 10.1.11(esbuild@0.27.4)(rollup@4.57.1)(storybook@10.1.11(@testing-library/dom@10.4.0)(prettier@3.5.3)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2)) + '@storybook/csf-plugin': 10.1.11(esbuild@0.28.0)(rollup@4.57.1)(storybook@10.1.11(@testing-library/dom@10.4.0)(prettier@3.5.3)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2)) '@storybook/icons': 2.0.1(react-dom@19.2.3(react@19.2.3))(react@19.2.3) '@storybook/react-dom-shim': 10.1.11(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(storybook@10.1.11(@testing-library/dom@10.4.0)(prettier@3.5.3)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)) react: 19.2.3 @@ -6773,9 +7024,9 @@ snapshots: - react - react-dom - '@storybook/builder-vite@10.1.11(esbuild@0.27.4)(rollup@4.57.1)(storybook@10.1.11(@testing-library/dom@10.4.0)(prettier@3.5.3)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2))': + '@storybook/builder-vite@10.1.11(esbuild@0.28.0)(rollup@4.57.1)(storybook@10.1.11(@testing-library/dom@10.4.0)(prettier@3.5.3)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2))': dependencies: - '@storybook/csf-plugin': 10.1.11(esbuild@0.27.4)(rollup@4.57.1)(storybook@10.1.11(@testing-library/dom@10.4.0)(prettier@3.5.3)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2)) + '@storybook/csf-plugin': 10.1.11(esbuild@0.28.0)(rollup@4.57.1)(storybook@10.1.11(@testing-library/dom@10.4.0)(prettier@3.5.3)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2)) '@vitest/mocker': 3.2.4(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2)) storybook: 10.1.11(@testing-library/dom@10.4.0)(prettier@3.5.3)(react-dom@19.2.3(react@19.2.3))(react@19.2.3) ts-dedent: 2.2.0 @@ -6786,9 +7037,9 @@ snapshots: - rollup - webpack - '@storybook/builder-vite@10.2.0(esbuild@0.27.4)(rollup@4.57.1)(storybook@10.2.0(@testing-library/dom@10.4.0)(prettier@3.8.1)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2))': + '@storybook/builder-vite@10.2.0(esbuild@0.28.0)(rollup@4.57.1)(storybook@10.2.0(@testing-library/dom@10.4.0)(prettier@3.8.1)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2))': dependencies: - '@storybook/csf-plugin': 10.2.0(esbuild@0.27.4)(rollup@4.57.1)(storybook@10.2.0(@testing-library/dom@10.4.0)(prettier@3.8.1)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2)) + '@storybook/csf-plugin': 10.2.0(esbuild@0.28.0)(rollup@4.57.1)(storybook@10.2.0(@testing-library/dom@10.4.0)(prettier@3.8.1)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2)) '@vitest/mocker': 3.2.4(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2)) storybook: 10.2.0(@testing-library/dom@10.4.0)(prettier@3.8.1)(react-dom@19.2.3(react@19.2.3))(react@19.2.3) ts-dedent: 2.2.0 @@ -6799,21 +7050,21 @@ snapshots: - rollup - webpack - '@storybook/csf-plugin@10.1.11(esbuild@0.27.4)(rollup@4.57.1)(storybook@10.1.11(@testing-library/dom@10.4.0)(prettier@3.5.3)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2))': + '@storybook/csf-plugin@10.1.11(esbuild@0.28.0)(rollup@4.57.1)(storybook@10.1.11(@testing-library/dom@10.4.0)(prettier@3.5.3)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2))': dependencies: storybook: 10.1.11(@testing-library/dom@10.4.0)(prettier@3.5.3)(react-dom@19.2.3(react@19.2.3))(react@19.2.3) unplugin: 2.3.11 optionalDependencies: - esbuild: 0.27.4 + esbuild: 0.28.0 rollup: 4.57.1 vite: 7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2) - '@storybook/csf-plugin@10.2.0(esbuild@0.27.4)(rollup@4.57.1)(storybook@10.2.0(@testing-library/dom@10.4.0)(prettier@3.8.1)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2))': + '@storybook/csf-plugin@10.2.0(esbuild@0.28.0)(rollup@4.57.1)(storybook@10.2.0(@testing-library/dom@10.4.0)(prettier@3.8.1)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2))': dependencies: storybook: 10.2.0(@testing-library/dom@10.4.0)(prettier@3.8.1)(react-dom@19.2.3(react@19.2.3))(react@19.2.3) unplugin: 2.3.11 optionalDependencies: - esbuild: 0.27.4 + esbuild: 0.28.0 rollup: 4.57.1 vite: 7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2) @@ -6836,11 +7087,11 @@ snapshots: react-dom: 19.2.3(react@19.2.3) storybook: 10.2.0(@testing-library/dom@10.4.0)(prettier@3.8.1)(react-dom@19.2.3(react@19.2.3))(react@19.2.3) - '@storybook/react-vite@10.1.11(esbuild@0.27.4)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(rollup@4.57.1)(storybook@10.1.11(@testing-library/dom@10.4.0)(prettier@3.5.3)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(typescript@5.9.3)(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2))': + '@storybook/react-vite@10.1.11(esbuild@0.28.0)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(rollup@4.57.1)(storybook@10.1.11(@testing-library/dom@10.4.0)(prettier@3.5.3)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(typescript@5.9.3)(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2))': dependencies: '@joshwooding/vite-plugin-react-docgen-typescript': 0.6.3(typescript@5.9.3)(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2)) '@rollup/pluginutils': 5.3.0(rollup@4.57.1) - '@storybook/builder-vite': 10.1.11(esbuild@0.27.4)(rollup@4.57.1)(storybook@10.1.11(@testing-library/dom@10.4.0)(prettier@3.5.3)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2)) + '@storybook/builder-vite': 10.1.11(esbuild@0.28.0)(rollup@4.57.1)(storybook@10.1.11(@testing-library/dom@10.4.0)(prettier@3.5.3)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2)) '@storybook/react': 10.1.11(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(storybook@10.1.11(@testing-library/dom@10.4.0)(prettier@3.5.3)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(typescript@5.9.3) empathic: 2.0.0 magic-string: 0.30.21 @@ -6859,11 +7110,11 @@ snapshots: - typescript - webpack - '@storybook/react-vite@10.2.0(esbuild@0.27.4)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(rollup@4.57.1)(storybook@10.2.0(@testing-library/dom@10.4.0)(prettier@3.8.1)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(typescript@5.9.3)(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2))': + '@storybook/react-vite@10.2.0(esbuild@0.28.0)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(rollup@4.57.1)(storybook@10.2.0(@testing-library/dom@10.4.0)(prettier@3.8.1)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(typescript@5.9.3)(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2))': dependencies: '@joshwooding/vite-plugin-react-docgen-typescript': 0.6.3(typescript@5.9.3)(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2)) '@rollup/pluginutils': 5.3.0(rollup@4.57.1) - '@storybook/builder-vite': 10.2.0(esbuild@0.27.4)(rollup@4.57.1)(storybook@10.2.0(@testing-library/dom@10.4.0)(prettier@3.8.1)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2)) + '@storybook/builder-vite': 10.2.0(esbuild@0.28.0)(rollup@4.57.1)(storybook@10.2.0(@testing-library/dom@10.4.0)(prettier@3.8.1)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(vite@7.3.1(@types/node@25.2.1)(sass-embedded@1.97.3)(sass@1.97.3)(yaml@2.8.2)) '@storybook/react': 10.2.0(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(storybook@10.2.0(@testing-library/dom@10.4.0)(prettier@3.8.1)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(typescript@5.9.3) empathic: 2.0.0 magic-string: 0.30.21 @@ -6999,6 +7250,8 @@ snapshots: '@types/estree@1.0.8': {} + '@types/estree@1.0.9': {} + '@types/json-schema@7.0.15': {} '@types/json5@0.0.29': {} @@ -7304,7 +7557,7 @@ snapshots: optionalDependencies: ajv: 8.13.0 - ajv@6.14.0: + ajv@6.15.0: dependencies: fast-deep-equal: 3.1.3 fast-json-stable-stringify: 2.1.0 @@ -8281,6 +8534,35 @@ snapshots: '@esbuild/win32-ia32': 0.27.4 '@esbuild/win32-x64': 0.27.4 + esbuild@0.28.0: + optionalDependencies: + '@esbuild/aix-ppc64': 0.28.0 + '@esbuild/android-arm': 0.28.0 + '@esbuild/android-arm64': 0.28.0 + '@esbuild/android-x64': 0.28.0 + '@esbuild/darwin-arm64': 0.28.0 + '@esbuild/darwin-x64': 0.28.0 + '@esbuild/freebsd-arm64': 0.28.0 + '@esbuild/freebsd-x64': 0.28.0 + '@esbuild/linux-arm': 0.28.0 + '@esbuild/linux-arm64': 0.28.0 + '@esbuild/linux-ia32': 0.28.0 + '@esbuild/linux-loong64': 0.28.0 + '@esbuild/linux-mips64el': 0.28.0 + '@esbuild/linux-ppc64': 0.28.0 + '@esbuild/linux-riscv64': 0.28.0 + '@esbuild/linux-s390x': 0.28.0 + '@esbuild/linux-x64': 0.28.0 + '@esbuild/netbsd-arm64': 0.28.0 + '@esbuild/netbsd-x64': 0.28.0 + '@esbuild/openbsd-arm64': 0.28.0 + '@esbuild/openbsd-x64': 0.28.0 + '@esbuild/openharmony-arm64': 0.28.0 + '@esbuild/sunos-x64': 0.28.0 + '@esbuild/win32-arm64': 0.28.0 + '@esbuild/win32-ia32': 0.28.0 + '@esbuild/win32-x64': 0.28.0 + escalade@3.2.0: {} escape-html@1.0.3: {} @@ -8405,11 +8687,11 @@ snapshots: '@eslint/eslintrc': 3.3.5 '@eslint/js': 9.39.2 '@eslint/plugin-kit': 0.4.1 - '@humanfs/node': 0.16.7 + '@humanfs/node': 0.16.8 '@humanwhocodes/module-importer': 1.0.1 '@humanwhocodes/retry': 0.4.3 - '@types/estree': 1.0.8 - ajv: 6.14.0 + '@types/estree': 1.0.9 + ajv: 6.15.0 chalk: 4.1.2 cross-spawn: 7.0.6 debug: 4.4.3(supports-color@5.5.0) @@ -8466,7 +8748,7 @@ snapshots: events@3.3.0: {} - eventsource-parser@3.0.6: {} + eventsource-parser@3.0.8: {} execa@8.0.1: dependencies: diff --git a/frontend/src/app/util/sse.cljs b/frontend/src/app/util/sse.cljs index 8e1044ec37..40a02f4535 100644 --- a/frontend/src/app/util/sse.cljs +++ b/frontend/src/app/util/sse.cljs @@ -17,22 +17,30 @@ (defn read-stream [^js/ReadableStream stream decode-fn] - (letfn [(read-items [^js reader] - (->> (rx/from (.read reader)) - (rx/mapcat (fn [result] - (if (.-done result) - (rx/empty) - (rx/concat - (rx/of (.-value result)) - (read-items reader)))))))] - (->> (read-items (.getReader stream)) - (rx/mapcat (fn [^js event] - (let [type (.-event event) - data (.-data event) - data (decode-fn data)] - (if (= "error" type) - (rx/throw (ex-info "stream exception" data)) - (rx/of #js {:type type :data data})))))))) + (->> (rx/create + (fn [subs] + (let [reader (.getReader stream)] + (letfn [(pump [] + (-> (.read reader) + (.then (fn [result] + (if (.-done result) + (rx/end! subs) + (do + (rx/push! subs (.-value result)) + (pump))))) + (.catch (fn [cause] + (rx/error! subs cause)))))] + (pump) + ;; teardown: cancel the reader when unsubscribed + (fn [] (.cancel reader)))))) + (rx/mapcat (fn [^js event] + (let [type (.-event event) + data (.-data event) + data (decode-fn data)] + (if (= "error" type) + (rx/throw (ex-info "stream exception" data)) + (rx/of #js {:type type :data data}))))))) + (defn get-type [event] From b312e6b059002a1ca74d5793ad77a7dd93b989b8 Mon Sep 17 00:00:00 2001 From: Andrey Antukh Date: Mon, 11 May 2026 11:26:54 +0200 Subject: [PATCH 2/6] :books: Update changelog --- CHANGES.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGES.md b/CHANGES.md index 1cf98e1b00..cb4fba79e5 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -26,6 +26,7 @@ - Fix multiple selection on shapes with token applied to stroke color [Github #9110](https://github.com/penpot/penpot/pull/9110) - Fix onboarding modals appearing behind libraries and templates panel [Github #9178](https://github.com/penpot/penpot/pull/9178) - Fix release notes modal appearing behind the dashboard sidebar (by @RenzoMXD) [Github #8296](https://github.com/penpot/penpot/issues/8296) +- Fix maximum call stack size exceeded in SSE read-stream [Github #9470](https://github.com/penpot/penpot/issues/9470) ## 2.14.5 From cd882f9ebd3f7e17763fed744cbdef8ba867502a Mon Sep 17 00:00:00 2001 From: Andrey Antukh Date: Mon, 11 May 2026 12:38:33 +0200 Subject: [PATCH 3/6] :wrench: Add minor changes to devenv config --- docker/devenv/files/bashrc | 1 + docker/devenv/files/entrypoint.sh | 1 + 2 files changed, 2 insertions(+) diff --git a/docker/devenv/files/bashrc b/docker/devenv/files/bashrc index 1c1ff83db8..799d2f285a 100644 --- a/docker/devenv/files/bashrc +++ b/docker/devenv/files/bashrc @@ -2,6 +2,7 @@ EMSDK_QUIET=1 . /opt/emsdk/emsdk_env.sh; +export JAVA_OPTS="-Djava.net.preferIPv4Stack=true" export PATH="/home/penpot/.cargo/bin:/opt/jdk/bin:/opt/gh/bin:/opt/utils/bin:/opt/clojure/bin:/opt/node/bin:/opt/imagick/bin:/opt/cargo/bin:$PATH" export CARGO_HOME="/home/penpot/.cargo" diff --git a/docker/devenv/files/entrypoint.sh b/docker/devenv/files/entrypoint.sh index 1427b19148..b6240777e7 100755 --- a/docker/devenv/files/entrypoint.sh +++ b/docker/devenv/files/entrypoint.sh @@ -13,6 +13,7 @@ cp /root/.tmux.conf /home/penpot/.tmux.conf chown penpot:users /home/penpot rsync -ar --chown=penpot:users /opt/cargo/ /home/penpot/.cargo/ +export JAVA_OPTS="-Djava.net.preferIPv4Stack=true" export PATH="/home/penpot/.cargo/bin:$PATH" export CARGO_HOME="/home/penpot/.cargo" From 7d4be33d4f3f2f7dd5f2056abd665c2101ddd561 Mon Sep 17 00:00:00 2001 From: Andrey Antukh Date: Mon, 11 May 2026 12:19:59 +0200 Subject: [PATCH 4/6] :tada: Add telemetry anonymous event collection (#9483) * :tada: Add telemetry anonymous event collection Rewrite the audit logging subsystem to support three operating modes and add anonymous telemetry event collection: Modes: - A (audit-log only): events persisted with full context - B (audit-log + telemetry): same as A, plus events are collected for telemetry shipping - C (telemetry-only): events stored anonymously with PII stripped, telemetry flag active, audit-log flag inactive Audit system refactoring (app.loggers.audit): - Replace qualified map keys (::audit/name etc.) with plain keywords - Rename submit! -> submit, insert! -> insert, prepare-event -> prepare-rpc-event - Add submit* as a lower-level public API - Add process-event dispatch function that handles all three modes and webhooks in a single tx-run! - Add :id to event schema (auto-generated if omitted) - Add filter-telemetry-props: anonymises event props per event type. Keeps UUID/boolean/number values; for login/identify events preserves lang, auth-backend, email-domain; for navigate events preserves route, file-id, team-id, page-id; instance-start trigger passes through. - Add filter-telemetry-context: retains only safe context keys. Backend: version, initiator, client-version, client-user-agent. Frontend: browser, os, locale, screen metrics, event-origin. - Timestamps truncated to day precision via ct/truncate for telemetry storage - PII stripped: props emptied, ip-addr zeroed, session-linking and access-token fields removed from context Config (app.config): - Derive :enable-telemetry flag from telemetry-enabled config option Email utilities (app.email): - Add email/clean and email/get-domain helper functions for domain extraction from email addresses Setup (app.setup): - Emit instance-start trigger event at system startup - Simplify handle-instance-id (remove read-only check) RPC layer (app.rpc): - wrap-audit now activates when :telemetry flag is set - Add :request-id to RPC params context for event correlation RPC commands (management, teams_invitations, verify_token, OIDC auth, webhooks): migrate all audit call sites to use the new plain-key API SREPL (app.srepl.main): - Migrate all audit/insert! calls to audit/insert with plain keys Telemetry task (app.tasks.telemetry): - Restructure legacy report into make-legacy-request; distinguish payload type as :telemetry-legacy-report - Add collect-and-send-audit-events: loop fetching up to 10,000 rows per iteration, encodes and sends each page, deletes on success, stops immediately on failure for retry - Add send-event-batch: POSTs fressian+zstd batch (base64 via blob/encode-str) to the telemetry endpoint with instance-id per event - Add gc-telemetry-events: enforces 100,000-row safety cap by dropping oldest rows first - Add delete-sent-events: deletes successfully shipped rows by id Blob utilities (app.util.blob): - Add encode-str/decode-str: combine fressian+zstd encoding with URL- safe base64 for JSON-safe string transport Database: - Add migration 0145: index on audit_log (source, created_at ASC) for efficient telemetry batch collection queries Frontend: - Always initialize event system regardless of :audit-log flag - Defer auth events (signin identify) to after profile is set - Refactor event subsystem for telemetry support Tests (21 test vars, 94 assertions in tasks-telemetry-test): - Cover all code paths: disabled/enabled telemetry, no-events no-op, happy-path batch send and delete, failure retention, payload anonymity, context stripping, timestamp day precision, batch encoding round-trip, multi-page iteration, GC cap enforcement, partial failure handling - blob encode-str/decode-str round-trip tests (14 test vars) - RPC audit integration tests (5 test vars) Signed-off-by: Andrey Antukh * :paperclip: Add pr feedback changes --------- Signed-off-by: Andrey Antukh --- backend/AGENTS.md | 7 +- backend/src/app/auth/oidc.clj | 12 +- backend/src/app/config.clj | 15 +- backend/src/app/email.clj | 19 + backend/src/app/loggers/audit.clj | 442 +++++---- backend/src/app/loggers/webhooks.clj | 14 +- .../0145-add-audit-log-telemetry-index.sql | 5 + backend/src/app/rpc.clj | 8 +- backend/src/app/rpc/commands/audit.clj | 114 ++- backend/src/app/rpc/commands/management.clj | 8 +- .../app/rpc/commands/teams_invitations.clj | 12 +- backend/src/app/rpc/commands/verify_token.clj | 24 +- backend/src/app/setup.clj | 33 +- backend/src/app/srepl/main.clj | 131 ++- backend/src/app/tasks/telemetry.clj | 234 +++-- backend/src/app/util/blob.clj | 13 + backend/test/backend_tests/helpers.clj | 2 +- backend/test/backend_tests/rpc_audit_test.clj | 401 ++++++++ .../backend_tests/tasks_telemetry_test.clj | 909 +++++++++++++++++- backend/test/backend_tests/util_blob_test.clj | 106 ++ frontend/src/app/main.cljs | 7 +- frontend/src/app/main/data/auth.cljs | 26 +- frontend/src/app/main/data/event.cljs | 169 ++-- 23 files changed, 2182 insertions(+), 529 deletions(-) create mode 100644 backend/src/app/migrations/sql/0145-add-audit-log-telemetry-index.sql create mode 100644 backend/test/backend_tests/util_blob_test.clj diff --git a/backend/AGENTS.md b/backend/AGENTS.md index 913540f808..a260a74c13 100644 --- a/backend/AGENTS.md +++ b/backend/AGENTS.md @@ -8,7 +8,10 @@ Redis for messaging/caching. ## General Guidelines To ensure consistency across the Penpot JVM stack, all contributions must adhere -to these criteria: +to these criteria. + +IMPORTANT: all CLI commands should be executed under backend/ +subdirectory for make them work correctly. ### 1. Testing & Validation @@ -21,7 +24,7 @@ to these criteria: ### 2. Code Quality & Formatting -* **Linting:** All code must pass `clj-kondo` checks (run `pnpm run lint:clj`) +* **Linting:** All code must pass linter checks (run `pnpm run lint:clj` or `pnpm run lint` on the repository root) * **Formatting:** All the code must pass the formatting check (run `pnpm run check-fmt`). Use `pnpm run fmt` to fix formatting issues. Avoid "dirty" diffs caused by unrelated whitespace changes. diff --git a/backend/src/app/auth/oidc.clj b/backend/src/app/auth/oidc.clj index 3995a056e0..c6ff249e39 100644 --- a/backend/src/app/auth/oidc.clj +++ b/backend/src/app/auth/oidc.clj @@ -804,12 +804,12 @@ props (audit/profile->props profile) context (d/without-nils {:external-session-id (:external-session-id info)})] - (audit/submit! cfg {::audit/type "action" - ::audit/name "login-with-oidc" - ::audit/profile-id (:id profile) - ::audit/ip-addr (inet/parse-request request) - ::audit/props props - ::audit/context context}) + (audit/submit cfg {:type "action" + :name "login-with-oidc" + :profile-id (:id profile) + :ip-addr (inet/parse-request request) + :props props + :context context}) (->> (redirect-to-verify-token token) (sxf request))))) diff --git a/backend/src/app/config.clj b/backend/src/app/config.clj index 1a2a6c6bd9..b9aef15d04 100644 --- a/backend/src/app/config.clj +++ b/backend/src/app/config.clj @@ -259,11 +259,16 @@ [config] (let [public-uri (c/get config :public-uri) public-uri (some-> public-uri (u/uri)) - extra-flags (if (and public-uri - (= (:scheme public-uri) "http") - (not= (:host public-uri) "localhost")) - #{:disable-secure-session-cookies} - #{})] + extra-flags (cond-> #{} + ;; When public-uri is http (non-localhost), disable secure cookies + (and public-uri + (= (:scheme public-uri) "http") + (not= (:host public-uri) "localhost")) + (conj :disable-secure-session-cookies) + + ;; When telemetry-enabled config is true, add :telemetry flag + (true? (c/get config :telemetry-enabled)) + (conj :enable-telemetry))] (flags/parse flags/default extra-flags (:flags config)))) (defn read-env diff --git a/backend/src/app/email.clj b/backend/src/app/email.clj index b42206dc93..eacd7c83c1 100644 --- a/backend/src/app/email.clj +++ b/backend/src/app/email.clj @@ -31,6 +31,25 @@ jakarta.mail.Transport java.util.Properties)) +(defn clean + "Clean and normalizes email address string" + [email] + (let [email (str/lower email) + email (if (str/starts-with? email "mailto:") + (subs email 7) + email) + email (if (or (str/starts-with? email "<") + (str/ends-with? email ">")) + (str/trim email "<>") + email)] + email)) + +(defn get-domain + [email] + (let [email (clean email) + [_ domain] (str/split email "@" 2)] + domain)) + ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; EMAIL IMPL ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; diff --git a/backend/src/app/loggers/audit.clj b/backend/src/app/loggers/audit.clj index 89119b04e1..f060921d5c 100644 --- a/backend/src/app/loggers/audit.clj +++ b/backend/src/app/loggers/audit.clj @@ -16,12 +16,12 @@ [app.common.uuid :as uuid] [app.config :as cf] [app.db :as db] + [app.email :as email] [app.http :as-alias http] [app.http.access-token :as-alias actoken] [app.loggers.audit.tasks :as-alias tasks] [app.loggers.webhooks :as-alias webhooks] [app.rpc :as-alias rpc] - [app.rpc.retry :as rtry] [app.setup :as-alias setup] [app.util.inet :as inet] [app.util.services :as-alias sv] @@ -33,6 +33,63 @@ ;; HELPERS ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +(def ^:private filter-auth-events + #{"login-with-oidc" "login-with-password" "register-profile" "update-profile"}) + +(def ^:private safe-backend-context-keys + #{:version + :initiator + :client-version + :client-user-agent}) + +(def ^:private safe-frontend-context-keys + #{:version + :locale + :browser + :browser-version + :engine + :engine-version + :os + :os-version + :device-type + :device-arch + :screen-width + :screen-height + :screen-color-depth + :screen-orientation + :event-origin + :event-namespace + :event-symbol}) + +(def profile-props + [:id + :is-active + :is-muted + :auth-backend + :email + :default-team-id + :default-project-id + :fullname + :lang]) + +(def ^:private event-keys + #{:id + :name + :type + :profile-id + :ip-addr + :props + :context + :source + :tracked-at + :created-at}) + +(def reserved-props + #{:session-id + :password + :old-password + :token}) + (defn extract-utm-params "Extracts additional data from params and namespace them under `penpot` ns." @@ -47,17 +104,6 @@ (assoc (->> sk str/kebab (keyword "penpot")) v))))] (reduce-kv process-param {} params))) -(def profile-props - [:id - :is-active - :is-muted - :auth-backend - :email - :default-team-id - :default-project-id - :fullname - :lang]) - (defn profile->props [profile] (-> profile @@ -65,12 +111,6 @@ (merge (:props profile)) (d/without-nils))) -(def reserved-props - #{:session-id - :password - :old-password - :token}) - (defn clean-props [props] (into {} @@ -121,15 +161,16 @@ (def ^:private schema:event [:map {:title "AuditEvent"} - [::type ::sm/text] - [::name ::sm/text] - [::profile-id ::sm/uuid] - [::ip-addr {:optional true} ::sm/text] - [::props {:optional true} [:map-of :keyword :any]] - [::context {:optional true} [:map-of :keyword :any]] - [::tracked-at {:optional true} ::ct/inst] - [::created-at {:optional true} ::ct/inst] - [::source {:optional true} ::sm/text] + [:id {:optional true} ::sm/uuid] + [:type ::sm/text] + [:name ::sm/text] + [:profile-id ::sm/uuid] + [:props [:map-of :keyword :any]] + [:context [:map-of :keyword :any]] + [:tracked-at ::ct/inst] + [:created-at ::ct/inst] + [:source ::sm/text] + [:ip-addr {:optional true} ::sm/text] [::webhooks/event? {:optional true} ::sm/boolean] [::webhooks/batch-timeout {:optional true} ::ct/duration] [::webhooks/batch-key {:optional true} @@ -141,7 +182,155 @@ (def valid-event? (sm/validator schema:event)) -(defn prepare-event +(defn- prepare-context-from-request + "Prepare backend event context from request" + [request] + (let [client-event-origin (get-client-event-origin request) + client-version (get-client-version request) + client-user-agent (get-client-user-agent request) + session-id (get-external-session-id request) + key-id (::http/auth-key-id request) + token-id (::actoken/id request) + token-type (::actoken/type request)] + {:external-session-id session-id + :initiator (or key-id "app") + :access-token-id (some-> token-id str) + :access-token-type (some-> token-type str) + :client-event-origin client-event-origin + :client-user-agent client-user-agent + :client-version client-version + :version (:full cf/version)})) + +(defn- append-audit-entry + [cfg params] + (let [params (-> params + (assoc :id (uuid/next)) + (update :props db/tjson) + (update :context db/tjson) + (update :ip-addr db/inet)) + params (select-keys params event-keys)] + (db/insert! cfg :audit-log params))) + +(def ^:private xf:filter-telemetry-props + "Transducer that keeps only map entries whose values are UUIDs, + booleans or numbers." + (filter (fn [[k v]] + (and (simple-keyword? k) + (or (uuid? v) (boolean? v) (number? v)))))) + +(declare filter-telemetry-props) +(declare filter-telemetry-context) + +(defn- process-event + [cfg event] + (when (contains? cf/flags :audit-log-logger) + (l/log! ::l/logger "app.audit" + ::l/level :info + :profile-id (str (:profile-id event)) + :ip-addr (str (:ip-addr event)) + :type (:type event) + :name (:name event) + :props (json/encode (:props event) :key-fn json/write-camel-key) + :context (json/encode (:context event) :key-fn json/write-camel-key))) + + (when (contains? cf/flags :audit-log) + (append-audit-entry cfg event)) + + (when (contains? cf/flags :telemetry) + ;; NOTE: when both audit-log and telemetry are enabled, events are stored + ;; twice: once with full details (above) and once stripped of props and + ;; ip-addr, tagged with source="telemetry" so the telemetry task can + ;; collect and ship them. The profile-id is preserved (UUIDs are already + ;; anonymous random identifiers). Only a safe subset of context fields + ;; is kept: initiator, version, client-version and client-user-agent. + ;; Timestamps are truncated to day precision to avoid leaking exact event + ;; timing. + (let [event (-> event + (filter-telemetry-props) + (filter-telemetry-context) + (update :created-at ct/truncate :days) + (update :tracked-at ct/truncate :days) + (assoc :source "telemetry:backend") + (assoc :ip-addr "0.0.0.0"))] + (append-audit-entry cfg event))) + + (when (and (contains? cf/flags :webhooks) + (::webhooks/event? event)) + (let [batch-key (::webhooks/batch-key event) + batch-timeout (::webhooks/batch-timeout event) + label (dm/str "rpc:" (:name event)) + label (cond + (ifn? batch-key) (dm/str label ":" (batch-key (::rpc/params event))) + (string? batch-key) (dm/str label ":" batch-key) + :else label) + dedupe? (boolean (and batch-key batch-timeout))] + + (wrk/submit! (-> cfg + (assoc ::wrk/task :process-webhook-event) + (assoc ::wrk/queue :webhooks) + (assoc ::wrk/max-retries 0) + (assoc ::wrk/delay (or batch-timeout 0)) + (assoc ::wrk/dedupe dedupe?) + (assoc ::wrk/label label) + (assoc ::wrk/params (-> event + (dissoc :source) + (dissoc :context) + (dissoc :ip-addr) + (dissoc :type))))))) + event) + +(defn submit* + "A public API, lower-level than submit, assumes all required fields are filled" + [cfg event] + (try + (let [event (check-event event)] + (db/tx-run! cfg process-event event)) + (catch Throwable cause + (l/error :hint "unexpected error processing event" :cause cause)))) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; PUBLIC API +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +(defn filter-telemetry-props + [{:keys [source name props type] :as params}] + (cond + (or (and (= source "frontend") + (= type "identify")) + (and (= source "backend") + (filter-auth-events name))) + + (let [props' (into {} xf:filter-telemetry-props props) + props' (-> props' + (assoc :lang (:lang props)) + (assoc :auth-backend (:auth-backend props)) + (assoc :email-domain (email/get-domain (:email props))) + (d/without-nils))] + (assoc params :props props')) + + (and (= source "backend") + (= type "trigger") + (= name "instance-start")) + params + + (and (= source "frontend") + (= type "action") + (= name "navigate")) + (assoc params :props (select-keys props [:route :file-id :team-id :page-id])) + + :else + (let [props (into {} xf:filter-telemetry-props props)] + (assoc params :props props)))) + +(defn filter-telemetry-context + [{:keys [source context] :as params}] + (let [context (case source + "backend" (select-keys context safe-backend-context-keys) + "frontend" (select-keys context safe-frontend-context-keys) + {})] + (assoc params :context context))) + +(defn prepare-rpc-event [cfg mdata params result] (let [resultm (meta result) request (-> params meta ::http/request) @@ -154,23 +343,29 @@ (merge params (::props resultm))) (clean-props)) - context (merge (::context resultm) - (prepare-context-from-request request)) + context (-> (::context resultm) + (merge (prepare-context-from-request request)) + (assoc :request-id (::rpc/request-id params)) + (d/without-nils)) + ip-addr (inet/parse-request request) module (get cfg ::rpc/module)] - {::type (or (::type resultm) - (::rpc/type cfg)) - ::name (or (::name resultm) - (let [sname (::sv/name mdata)] - (if (not= module "main") - (str module "-" sname) - sname))) + {:type (or (::type resultm) + (::rpc/type cfg)) + :name (or (::name resultm) + (let [sname (::sv/name mdata)] + (if (not= module "main") + (str module "-" sname) + sname))) - ::profile-id profile-id - ::ip-addr ip-addr - ::props props - ::context context + :profile-id profile-id + :ip-addr ip-addr + :props props + :context context + + :created-at (::rpc/request-at params) + :tracked-at (::rpc/request-at params) ;; NOTE: for batch-key lookup we need the params as-is ;; because the rpc api does not need to know the @@ -190,148 +385,49 @@ (::webhooks/event? resultm) false)})) -(defn- prepare-context-from-request - "Prepare backend event context from request" - [request] - (let [client-event-origin (get-client-event-origin request) - client-version (get-client-version request) - client-user-agent (get-client-user-agent request) - session-id (get-external-session-id request) - key-id (::http/auth-key-id request) - token-id (::actoken/id request) - token-type (::actoken/type request)] - (d/without-nils - {:external-session-id session-id - :initiator (or key-id "app") - :access-token-id (some-> token-id str) - :access-token-type (some-> token-type str) - :client-event-origin client-event-origin - :client-user-agent client-user-agent - :client-version client-version - :version (:full cf/version)}))) - (defn event-from-rpc-params "Create a base event skeleton with pre-filled some important data that can be extracted from RPC params object" [params] - (let [context (some-> params meta ::http/request prepare-context-from-request) - event {::type "action" - ::profile-id (or (::rpc/profile-id params) uuid/zero) - ::ip-addr (::rpc/ip-addr params)}] - (cond-> event - (some? context) - (assoc ::context context)))) + (let [context (some-> params meta ::http/request prepare-context-from-request) + context (assoc context :request-id (::rpc/request-id params)) + request-at (::rpc/request-at params)] + {:type "action" + :profile-id (::rpc/profile-id params) + :created-at request-at + :tracked-at request-at + :ip-addr (::rpc/ip-addr params) + :context (d/without-nils context)})) -(defn- event->params - [event] - (let [params {:id (uuid/next) - :name (::name event) - :type (::type event) - :profile-id (::profile-id event) - :ip-addr (::ip-addr event) - :context (::context event {}) - :props (::props event {}) - :source "backend"} - tnow (::tracked-at event)] - - (cond-> params - (some? tnow) - (assoc :tracked-at tnow)))) - -(defn- append-audit-entry - [cfg params] - (let [params (-> params - (update :props db/tjson) - (update :context db/tjson) - (update :ip-addr db/inet))] - (db/insert! cfg :audit-log params))) - -(defn- handle-event! +(defn submit + "Submit an event to be registered under audit-log subsystem" [cfg event] - (let [tnow (ct/now) - params (-> (event->params event) - (assoc :created-at tnow) - (update :tracked-at #(or % tnow)))] + (let [tnow (ct/now) + event (-> event + (assoc :created-at tnow) + (update :profile-id d/nilv uuid/zero) + (update :tracked-at d/nilv tnow) + (update :ip-addr d/nilv "0.0.0.0") + (update :props d/nilv {}) + (update :context d/nilv {}) + (assoc :source "backend") + (d/without-nils))] + (submit* cfg event))) - (when (contains? cf/flags :audit-log-logger) - (l/log! ::l/logger "app.audit" - ::l/level :info - :profile-id (str (::profile-id event)) - :ip-addr (str (::ip-addr event)) - :type (::type event) - :name (::name event) - :props (json/encode (::props event) :key-fn json/write-camel-key) - :context (json/encode (::context event) :key-fn json/write-camel-key))) - - (when (contains? cf/flags :audit-log) - ;; NOTE: this operation may cause primary key conflicts on inserts - ;; because of the timestamp precission (two concurrent requests), in - ;; this case we just retry the operation. - (append-audit-entry cfg params)) - - (when (and (or (contains? cf/flags :telemetry) - (cf/get :telemetry-enabled)) - (not (contains? cf/flags :audit-log))) - ;; NOTE: this operation may cause primary key conflicts on inserts - ;; because of the timestamp precission (two concurrent requests), in - ;; this case we just retry the operation. - ;; - ;; NOTE: this is only executed when general audit log is disabled - (let [params (-> params - (assoc :props {}) - (assoc :context {}))] - (append-audit-entry cfg params))) - - (when (and (contains? cf/flags :webhooks) - (::webhooks/event? event)) - (let [batch-key (::webhooks/batch-key event) - batch-timeout (::webhooks/batch-timeout event) - label (dm/str "rpc:" (:name params)) - label (cond - (ifn? batch-key) (dm/str label ":" (batch-key (::rpc/params event))) - (string? batch-key) (dm/str label ":" batch-key) - :else label) - dedupe? (boolean (and batch-key batch-timeout))] - - (wrk/submit! (-> cfg - (assoc ::wrk/task :process-webhook-event) - (assoc ::wrk/queue :webhooks) - (assoc ::wrk/max-retries 0) - (assoc ::wrk/delay (or batch-timeout 0)) - (assoc ::wrk/dedupe dedupe?) - (assoc ::wrk/label label) - (assoc ::wrk/params (-> params - (dissoc :source) - (dissoc :context) - (dissoc :ip-addr) - (dissoc :type))))))) - params)) - -(defn submit! - "Submit audit event to the collector." - [cfg event] - (try - (let [event (-> (d/without-nils event) - (check-event)) - cfg (-> cfg - (assoc ::rtry/when rtry/conflict-exception?) - (assoc ::rtry/max-retries 6) - (assoc ::rtry/label "persist-audit-log"))] - (rtry/invoke! cfg db/tx-run! handle-event! event)) - (catch Throwable cause - (l/error :hint "unexpected error processing event" :cause cause)))) - -(defn insert! +(defn insert "Submit audit event to the collector, intended to be used only from command line helpers because this skips all webhooks and telemetry logic." [cfg event] (when (contains? cf/flags :audit-log) - (let [event (-> (d/without-nils event) + (let [tnow (ct/now) + event (-> event + (assoc :created-at tnow) + (update :tracked-at d/nilv tnow) + (update :profile-id d/nilv uuid/zero) + (update :props d/nilv {}) + (update :context d/nilv {}) + (assoc :source "backend") + (select-keys event-keys) (check-event))] - (db/run! cfg (fn [cfg] - (let [tnow (ct/now) - params (-> (event->params event) - (assoc :created-at tnow) - (update :tracked-at #(or % tnow)))] - (append-audit-entry cfg params))))))) + (db/run! cfg append-audit-entry event)))) diff --git a/backend/src/app/loggers/webhooks.clj b/backend/src/app/loggers/webhooks.clj index eab1344047..2f89876e04 100644 --- a/backend/src/app/loggers/webhooks.clj +++ b/backend/src/app/loggers/webhooks.clj @@ -70,14 +70,14 @@ (fn [{:keys [props] :as task}] (let [items (lookup-webhooks cfg props) - event {::audit/profile-id (:profile-id props) - ::audit/name "webhook" - ::audit/type "trigger" - ::audit/props {:name (get props :name) - :event-id (get props :id) - :total-affected (count items)}}] + event {:profile-id (:profile-id props) + :name "webhook" + :type "trigger" + :props {:name (get props :name) + :event-id (get props :id) + :total-affected (count items)}}] - (audit/insert! cfg event) + (audit/insert cfg event) (when items (l/trc :hint "webhooks found for event" :total (count items)) diff --git a/backend/src/app/migrations/sql/0145-add-audit-log-telemetry-index.sql b/backend/src/app/migrations/sql/0145-add-audit-log-telemetry-index.sql new file mode 100644 index 0000000000..12c59a6c83 --- /dev/null +++ b/backend/src/app/migrations/sql/0145-add-audit-log-telemetry-index.sql @@ -0,0 +1,5 @@ +-- Add index on audit_log (source, created_at) to support efficient +-- queries for the telemetry batch collection mode. + +CREATE INDEX IF NOT EXISTS audit_log__source__created_at__idx + ON audit_log (source, created_at ASC); diff --git a/backend/src/app/rpc.clj b/backend/src/app/rpc.clj index 339cc51b14..15174f1a15 100644 --- a/backend/src/app/rpc.clj +++ b/backend/src/app/rpc.clj @@ -105,6 +105,7 @@ (assoc ::handler-name handler-name) (assoc ::ip-addr ip-addr) (assoc ::request-at (ct/now)) + (assoc ::request-id (uuid/next)) (assoc ::session-id (some-> session-id uuid/parse*)) (assoc ::cond/key etag) (cond-> (uuid? profile-id) @@ -161,12 +162,13 @@ (defn- wrap-audit [_ f mdata] (if (or (contains? cf/flags :webhooks) - (contains? cf/flags :audit-log)) + (contains? cf/flags :audit-log) + (contains? cf/flags :telemetry)) (if-not (::audit/skip mdata) (fn [cfg params] (let [result (f cfg params)] - (->> (audit/prepare-event cfg mdata params result) - (audit/submit! cfg)) + (->> (audit/prepare-rpc-event cfg mdata params result) + (audit/submit cfg)) result)) f) f)) diff --git a/backend/src/app/rpc/commands/audit.clj b/backend/src/app/rpc/commands/audit.clj index 757c4fa5cb..387f23e832 100644 --- a/backend/src/app/rpc/commands/audit.clj +++ b/backend/src/app/rpc/commands/audit.clj @@ -15,7 +15,7 @@ [app.config :as cf] [app.db :as db] [app.http :as-alias http] - [app.loggers.audit :as-alias audit] + [app.loggers.audit :as audit] [app.loggers.database :as loggers.db] [app.loggers.mattermost :as loggers.mm] [app.rpc :as-alias rpc] @@ -23,7 +23,8 @@ [app.rpc.doc :as-alias doc] [app.rpc.helpers :as rph] [app.util.inet :as inet] - [app.util.services :as sv])) + [app.util.services :as sv] + [clojure.set :as set])) (def ^:private event-columns [:id @@ -38,31 +39,31 @@ :context]) (defn- event->row [event] - [(::audit/id event) - (::audit/name event) - (::audit/source event) - (::audit/type event) - (::audit/tracked-at event) - (::audit/created-at event) - (::audit/profile-id event) - (db/inet (::audit/ip-addr event)) - (db/tjson (::audit/props event)) - (db/tjson (d/without-nils (::audit/context event)))]) + [(:id event) + (:name event) + (:source event) + (:type event) + (:tracked-at event) + (:created-at event) + (:profile-id event) + (db/inet (:ip-addr event)) + (db/tjson (:props event)) + (db/tjson (d/without-nils (:context event)))]) (defn- adjust-timestamp - [{:keys [::audit/tracked-at ::audit/created-at] :as event}] + [{:keys [tracked-at created-at] :as event}] (let [margin (inst-ms (ct/diff tracked-at created-at))] (if (or (neg? margin) (> margin 3600000)) ;; If event is in future or lags more than 1 hour, we reasign ;; tracked-at to the server creation date (-> event - (assoc ::audit/tracked-at created-at) - (update ::audit/context assoc :original-tracked-at tracked-at)) + (assoc :tracked-at created-at) + (update :context assoc :original-tracked-at tracked-at)) event))) (defn- exception-event? - [{:keys [::audit/type ::audit/name] :as ev}] + [{:keys [type name] :as ev}] (and (= "action" type) (or (= "unhandled-exception" name) (= "exception-page" name)))) @@ -72,28 +73,41 @@ (map adjust-timestamp) (map event->row))) -(defn- get-events +(defn- prepare-events [{:keys [::rpc/request-at ::rpc/profile-id events] :as params}] (let [request (-> params meta ::http/request) ip-addr (inet/parse-request request) - xform (map (fn [event] - {::audit/id (uuid/next) - ::audit/type (:type event) - ::audit/name (:name event) - ::audit/props (:props event) - ::audit/context (:context event) - ::audit/profile-id profile-id - ::audit/ip-addr ip-addr - ::audit/source "frontend" - ::audit/tracked-at (:timestamp event) - ::audit/created-at request-at}))] + {:id (uuid/next) + :type (:type event) + :name (:name event) + :props (:props event) + :context (:context event) + :profile-id profile-id + :ip-addr ip-addr + :source "frontend" + :tracked-at (:timestamp event) + :created-at request-at}))] (sequence xform events))) +(def ^:private xf:map-telemetry-event-row + (comp + (map adjust-timestamp) + (map (fn [event] + (-> event + (assoc :id (uuid/next)) + (update :created-at ct/truncate :days) + (update :tracked-at ct/truncate :days) + (audit/filter-telemetry-props) + (audit/filter-telemetry-context) + (assoc :ip-addr "0.0.0.0") + (assoc :source "telemetry:frontend")))) + (map event->row))) + (defn- handle-events [{:keys [::db/pool] :as cfg} params] - (let [events (get-events params)] + (let [events (prepare-events params)] ;; Look for error reports and save them on internal reports table (when-let [events (->> events @@ -102,9 +116,18 @@ (run! (partial loggers.db/emit cfg) events) (run! (partial loggers.mm/emit cfg) events)) - ;; Process and save events - (when (seq events) - (let [rows (sequence xf:map-event-row events)] + (when (contains? cf/flags :audit-log) + ;; Process and save full audit events when audit-log flag is active + (when-let [rows (-> (sequence xf:map-event-row events) + (not-empty))] + (db/insert-many! pool :audit-log event-columns rows))) + + (when (contains? cf/flags :telemetry) + ;; Store anonymized frontend events so the telemetry task can ship them + ;; in batches. Runs independently from the audit-log insert above so + ;; both modes can be active simultaneously. + (when-let [rows (-> (sequence xf:map-telemetry-event-row events) + (not-empty))] (db/insert-many! pool :audit-log event-columns rows))))) (def ^:private valid-event-types @@ -138,17 +161,26 @@ ::doc/skip true ::doc/added "1.17"} [{:keys [::db/pool] :as cfg} params] - (if (or (db/read-only? pool) - (not (contains? cf/flags :audit-log))) - (do - (l/warn :hint "audit: http handler disabled or db is read-only") - (rph/wrap nil)) - - (do + (let [telemetry? (contains? cf/flags :telemetry) + audit-log? (contains? cf/flags :audit-log) + enabled? (and (not (db/read-only? pool)) + (or audit-log? telemetry?))] + (when enabled? (try (handle-events cfg params) (catch Throwable cause (l/error :hint "unexpected error on persisting audit events from frontend" - :cause cause))) + :cause cause)))) - (rph/wrap nil)))) + (rph/wrap nil))) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; GET-ENABLED-FLAGS +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +(sv/defmethod ::get-enabled-flags + {::audit/skip true + ::doc/skip true + ::doc/added "1.20"} + [_cfg _params] + (set/intersection cf/flags #{:audit-log :telemetry})) diff --git a/backend/src/app/rpc/commands/management.clj b/backend/src/app/rpc/commands/management.clj index 0908b358d7..974004ec63 100644 --- a/backend/src/app/rpc/commands/management.clj +++ b/backend/src/app/rpc/commands/management.clj @@ -425,10 +425,10 @@ (doseq [file-id result] (let [props (assoc props :id file-id) event (-> (audit/event-from-rpc-params params) - (assoc ::audit/profile-id profile-id) - (assoc ::audit/name "create-file") - (assoc ::audit/props props))] - (audit/submit! cfg event)))))) + (assoc :profile-id profile-id) + (assoc :name "create-file") + (assoc :props props))] + (audit/submit cfg event)))))) result)) diff --git a/backend/src/app/rpc/commands/teams_invitations.clj b/backend/src/app/rpc/commands/teams_invitations.clj index dfc83000a5..9d59b63b88 100644 --- a/backend/src/app/rpc/commands/teams_invitations.clj +++ b/backend/src/app/rpc/commands/teams_invitations.clj @@ -156,9 +156,9 @@ "update-team-invitation" "create-team-invitation") event (-> (audit/event-from-rpc-params params) - (assoc ::audit/name evname) - (assoc ::audit/props props))] - (audit/submit! cfg event)) + (assoc :name evname) + (assoc :props props))] + (audit/submit cfg event)) (when (allow-invitation-emails? member) (eml/send! {::eml/conn conn @@ -410,9 +410,9 @@ (let [props {:name name :features features} event (-> (audit/event-from-rpc-params params) - (assoc ::audit/name "create-team") - (assoc ::audit/props props))] - (audit/submit! cfg event)) + (assoc :name "create-team") + (assoc :props props))] + (audit/submit cfg event)) ;; Create invitations for all provided emails. (let [profile (db/get-by-id conn :profile profile-id) diff --git a/backend/src/app/rpc/commands/verify_token.clj b/backend/src/app/rpc/commands/verify_token.clj index 38a3e2b42f..246c35fe11 100644 --- a/backend/src/app/rpc/commands/verify_token.clj +++ b/backend/src/app/rpc/commands/verify_token.clj @@ -173,24 +173,22 @@ :role (:role claims) :invitation-id (:id invitation)}] - (audit/submit! - cfg - (-> (audit/event-from-rpc-params params) - (assoc ::audit/name "accept-team-invitation") - (assoc ::audit/props props))) + (audit/submit cfg + (-> (audit/event-from-rpc-params params) + (assoc :name "accept-team-invitation") + (assoc :props props))) ;; NOTE: Backward compatibility; old invitations can ;; have the `created-by` to be nil; so in this case we ;; don't submit this event to the audit-log (when-let [created-by (:created-by invitation)] - (audit/submit! - cfg - (-> (audit/event-from-rpc-params params) - (assoc ::audit/profile-id created-by) - (assoc ::audit/name "accept-team-invitation-from") - (assoc ::audit/props (assoc props - :profile-id (:id profile) - :email (:email profile)))))) + (audit/submit cfg + (-> (audit/event-from-rpc-params params) + (assoc :profile-id created-by) + (assoc :name "accept-team-invitation-from") + (assoc :props (assoc props + :profile-id (:id profile) + :email (:email profile)))))) (accept-invitation cfg claims invitation profile) (assoc claims :state :created)) diff --git a/backend/src/app/setup.clj b/backend/src/app/setup.clj index 2a860f4262..66d80f1a3b 100644 --- a/backend/src/app/setup.clj +++ b/backend/src/app/setup.clj @@ -11,7 +11,9 @@ [app.common.logging :as l] [app.common.schema :as sm] [app.common.uuid :as uuid] + [app.config :as cf] [app.db :as db] + [app.loggers.audit :as audit] [app.main :as-alias main] [app.setup.keys :as keys] [app.setup.templates] @@ -35,22 +37,20 @@ (into {}))) (defn- handle-instance-id - [instance-id conn read-only?] + [instance-id conn] (or instance-id (let [instance-id (uuid/random)] - (when-not read-only? - (try - (db/insert! conn :server-prop - {:id "instance-id" - :preload true - :content (db/tjson instance-id)}) - (catch Throwable cause - (l/warn :hint "unable to persist instance-id" - :instance-id instance-id - :cause cause)))) + (try + (db/insert! conn :server-prop + {:id "instance-id" + :preload true + :content (db/tjson instance-id)}) + (catch Throwable cause + (l/warn :hint "unable to persist instance-id" + :instance-id instance-id + :cause cause))) instance-id))) - (def sql:add-prop "INSERT INTO server_prop (id, content, preload) VALUES (?, ?, ?) @@ -77,7 +77,12 @@ (assert (db/pool? (::db/pool params)) "expected valid database pool")) (defmethod ig/init-key ::props - [_ {:keys [::db/pool ::key] :as cfg}] + [_ {:keys [::key] :as cfg}] + (audit/submit cfg {:type "trigger" + :name "instance-start" + :props {:version (:full cf/version) + :flags (mapv name cf/flags) + :public-uri (str (cf/get :public-uri))}}) (db/tx-run! cfg (fn [{:keys [::db/conn]}] (db/xact-lock! conn 0) @@ -91,7 +96,7 @@ (-> (get-all-props conn) (assoc :secret-key secret) (assoc :tokens-key (keys/derive secret :salt "tokens")) - (update :instance-id handle-instance-id conn (db/read-only? pool))))))) + (update :instance-id handle-instance-id conn)))))) (defmethod ig/init-key ::shared-keys [_ {:keys [::props] :as cfg}] diff --git a/backend/src/app/srepl/main.clj b/backend/src/app/srepl/main.clj index 30c8b403dc..8e4ba92270 100644 --- a/backend/src/app/srepl/main.clj +++ b/backend/src/app/srepl/main.clj @@ -553,14 +553,13 @@ (let [file-id (h/parse-uuid file-id) tnow (ct/now)] - (audit/insert! main/system - {::audit/name "delete-file" - ::audit/type "action" - ::audit/profile-id uuid/zero - ::audit/props {:id file-id} - ::audit/context {:triggered-by "srepl" - :cause "explicit call to delete-file!"} - ::audit/tracked-at tnow}) + (audit/insert main/system + {:name "delete-file" + :type "action" + :props {:id file-id} + :context {:triggered-by "srepl" + :cause "explicit call to delete-file!"} + :tracked-at tnow}) (wrk/invoke! (-> main/system (assoc ::wrk/task :delete-object) (assoc ::wrk/params {:object :file @@ -578,15 +577,12 @@ {:id file-id} {::db/remove-deleted false ::sql/columns [:id :name]})] - (audit/insert! system - {::audit/name "restore-file" - ::audit/type "action" - ::audit/profile-id uuid/zero - ::audit/props file - ::audit/context {:triggered-by "srepl" - :cause "explicit call to restore-file!"} - ::audit/tracked-at (ct/now)}) - + (audit/insert system + {:name "restore-file" + :type "action" + :props file + :context {:triggered-by "srepl" + :cause "explicit call to restore-file!"}}) (#'files/restore-file conn file-id)) :restored)))) @@ -597,14 +593,13 @@ (let [project-id (h/parse-uuid project-id) tnow (ct/now)] - (audit/insert! main/system - {::audit/name "delete-project" - ::audit/type "action" - ::audit/profile-id uuid/zero - ::audit/props {:id project-id} - ::audit/context {:triggered-by "srepl" - :cause "explicit call to delete-project!"} - ::audit/tracked-at tnow}) + (audit/insert main/system + {:name "delete-project" + :type "action" + :props {:id project-id} + :context {:triggered-by "srepl" + :cause "explicit call to delete-project!"} + :tracked-at tnow}) (wrk/invoke! (-> main/system (assoc ::wrk/task :delete-object) @@ -635,14 +630,12 @@ (when-let [project (db/get* system :project {:id project-id} {::db/remove-deleted false})] - (audit/insert! system - {::audit/name "restore-project" - ::audit/type "action" - ::audit/profile-id uuid/zero - ::audit/props project - ::audit/context {:triggered-by "srepl" - :cause "explicit call to restore-team!"} - ::audit/tracked-at (ct/now)}) + (audit/insert system + {:name "restore-project" + :type "action" + :props project + :context {:triggered-by "srepl" + :cause "explicit call to restore-team!"}}) (restore-project* system project-id)))))) @@ -652,14 +645,13 @@ (let [team-id (h/parse-uuid team-id) tnow (ct/now)] - (audit/insert! main/system - {::audit/name "delete-team" - ::audit/type "action" - ::audit/profile-id uuid/zero - ::audit/props {:id team-id} - ::audit/context {:triggered-by "srepl" - :cause "explicit call to delete-profile!"} - ::audit/tracked-at tnow}) + (audit/insert main/system + {:name "delete-team" + :type "action" + :props {:id team-id} + :context {:triggered-by "srepl" + :cause "explicit call to delete-profile!"} + :tracked-at tnow}) (wrk/invoke! (-> main/system (assoc ::wrk/task :delete-object) @@ -695,14 +687,12 @@ {:id team-id} {::db/remove-deleted false}) (teams/decode-row))] - (audit/insert! system - {::audit/name "restore-team" - ::audit/type "action" - ::audit/profile-id uuid/zero - ::audit/props team - ::audit/context {:triggered-by "srepl" - :cause "explicit call to restore-team!"} - ::audit/tracked-at (ct/now)}) + (audit/insert system + {:name "restore-team" + :type "action" + :props team + :context {:triggered-by "srepl" + :cause "explicit call to restore-team!"}}) (restore-team* system team-id)))))) @@ -712,13 +702,12 @@ (let [profile-id (h/parse-uuid profile-id) tnow (ct/now)] - (audit/insert! main/system - {::audit/name "delete-profile" - ::audit/type "action" - ::audit/profile-id uuid/zero - ::audit/context {:triggered-by "srepl" - :cause "explicit call to delete-profile!"} - ::audit/tracked-at tnow}) + (audit/insert main/system + {:name "delete-profile" + :type "action" + :context {:triggered-by "srepl" + :cause "explicit call to delete-profile!"} + :tracked-at tnow}) (wrk/invoke! (-> main/system (assoc ::wrk/task :delete-object) @@ -737,14 +726,12 @@ {:id profile-id} {::db/remove-deleted false}) (profile/decode-row))] - (audit/insert! system - {::audit/name "restore-profile" - ::audit/type "action" - ::audit/profile-id uuid/zero - ::audit/props (audit/profile->props profile) - ::audit/context {:triggered-by "srepl" - :cause "explicit call to restore-profile!"} - ::audit/tracked-at (ct/now)}) + (audit/insert system + {:name "restore-profile" + :type "action" + :props (audit/profile->props profile) + :context {:triggered-by "srepl" + :cause "explicit call to restore-profile!"}}) (db/update! system :profile {:deleted-at nil} @@ -768,14 +755,14 @@ {::db/remove-deleted false}) (profile/decode-row))] (do - (audit/insert! system - {::audit/name "delete-profile" - ::audit/type "action" - ::audit/profile-id (:id profile) - ::audit/tracked-at deleted-at - ::audit/props (audit/profile->props profile) - ::audit/context {:triggered-by "srepl" - :cause "explicit call to delete-profiles-in-bulk!"}}) + (audit/insert system + {:name "delete-profile" + :type "action" + :profile-id (:id profile) + :tracked-at deleted-at + :props (audit/profile->props profile) + :context {:triggered-by "srepl" + :cause "explicit call to delete-profiles-in-bulk!"}}) (wrk/invoke! (-> system (assoc ::wrk/task :delete-object) (assoc ::wrk/params {:object :profile diff --git a/backend/src/app/tasks/telemetry.clj b/backend/src/app/tasks/telemetry.clj index c6d989694b..797c3e6050 100644 --- a/backend/src/app/tasks/telemetry.clj +++ b/backend/src/app/tasks/telemetry.clj @@ -11,43 +11,27 @@ (:require [app.common.data :as d] [app.common.exceptions :as ex] + [app.common.logging :as l] [app.config :as cf] [app.db :as db] [app.http.client :as http] [app.main :as-alias main] [app.setup :as-alias setup] + [app.util.blob :as blob] [app.util.json :as json] [integrant.core :as ig] [promesa.exec :as px])) -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; IMPL -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - -(defn- send! - [cfg data] - (let [request {:method :post - :uri (cf/get :telemetry-uri) - :headers {"content-type" "application/json"} - :body (json/encode-str data)} - response (http/req cfg request)] - (when (> (:status response) 206) - (ex/raise :type :internal - :code :invalid-response - :response-status (:status response) - :response-body (:body response))))) - -(defn- get-subscriptions-newsletter-updates - [conn] +(defn- get-subscriptions + [cfg] (let [sql "SELECT email FROM profile where props->>'~:newsletter-updates' = 'true'"] - (->> (db/exec! conn [sql]) - (mapv :email)))) + (db/run! cfg (fn [{:keys [::db/conn]}] + (->> (db/exec! conn [sql]) + (mapv :email)))))) -(defn- get-subscriptions-newsletter-news - [conn] - (let [sql "SELECT email FROM profile where props->>'~:newsletter-news' = 'true'"] - (->> (db/exec! conn [sql]) - (mapv :email)))) +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; LEGACY DATA COLLECTION +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- get-num-teams [conn] @@ -161,8 +145,9 @@ (def ^:private sql:get-counters "SELECT name, count(*) AS count FROM audit_log - WHERE source = 'backend' - AND tracked_at >= date_trunc('day', now()) + WHERE source LIKE 'telemetry:%' + AND created_at >= date_trunc('day', now()) + AND created_at < date_trunc('day', now()) + interval '1 day' GROUP BY 1 ORDER BY 2 DESC") @@ -174,23 +159,13 @@ {:total-accomulated-events total :event-counters counters})) -(def ^:private sql:clean-counters - "DELETE FROM audit_log - WHERE ip_addr = '0.0.0.0'::inet -- we know this is from telemetry - AND tracked_at < (date_trunc('day', now()) - '1 day'::interval)") - -(defn- clean-counters-data! - [conn] - (when-not (contains? cf/flags :audit-log) - (db/exec-one! conn [sql:clean-counters]))) - -(defn- get-stats - [conn] +(defn- get-legacy-stats + [{:keys [::db/conn]}] (let [referer (if (cf/get :telemetry-with-taiga) "taiga" (cf/get :telemetry-referer))] (-> {:referer referer - :public-uri (cf/get :public-uri) + :public-uri (str (cf/get :public-uri)) :total-teams (get-num-teams conn) :total-projects (get-num-projects conn) :total-files (get-num-files conn) @@ -207,6 +182,124 @@ (get-action-counters conn)) (d/without-nils)))) +(defn- make-legacy-request + [cfg data] + (let [request {:method :post + :uri (cf/get :telemetry-uri) + :headers {"content-type" "application/json"} + :body (json/encode-str data)} + response (http/req cfg request {:skip-ssrf-check? true})] + (when (> (:status response) 206) + (ex/raise :type :internal + :code :invalid-response + :response-status (:status response) + :response-body (:body response))))) + +(defn- send-legacy-data + [{:keys [::setup/props] :as cfg} stats subs] + (let [data (cond-> {:type :telemetry-legacy-report + :version (:full cf/version) + :instance-id (:instance-id props)} + (some? stats) + (assoc :stats stats) + + (seq subs) + (assoc :subscriptions subs))] + + (make-legacy-request cfg data))) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; AUDIT-EVENT BATCH (TELEMETRY MODE) +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +;; Telemetry events older than this are purged by the GC step so the +;; buffer stays bounded. +(def ^:private batch-size 10000) + +(def ^:private sql:gc-events + "DELETE FROM audit_log + WHERE source LIKE 'telemetry:%' + AND created_at < now() - interval '7 days'") + +(defn- gc-events + "Delete telemetry-mode events older than `telemetry-retention-days` + so that the buffer stays bounded." + [{:keys [::db/conn]}] + (let [result (db/exec-one! conn [sql:gc-events])] + (when (pos? (:next.jdbc/update-count result)) + (l/warn :hint "purged stale telemetry events" + :count (:next.jdbc/update-count result))))) + +(def ^:private sql:fetch-telemetry-events + "SELECT id, name, type, source, tracked_at, profile_id, props, context + FROM audit_log + WHERE source LIKE 'telemetry:%' + ORDER BY created_at ASC + LIMIT ?") + +(defn- row->event + [{:keys [name type source tracked-at profile-id props context]}] + (d/without-nils + {:name name + :type type + :source source + :tracked-at tracked-at + :profile-id profile-id + :props (or (some-> props db/decode-transit-pgobject) {}) + :context (or (some-> context db/decode-transit-pgobject) {})})) + +(defn- encode-batch + "Encode a sequence of event maps into a fressian+zstd base64 string + suitable for JSON transport." + ^String [events] + (blob/encode-str events {:version 4})) + +(defn send-event-batch + "Send a single batch of events to the telemetry endpoint. Returns + true on success." + [{:keys [::setup/props] :as cfg} batch] + (let [payload {:type :telemetry-events + :version (:full cf/version) + :instance-id (:instance-id props) + :events (encode-batch batch)} + request {:method :post + :uri (cf/get :telemetry-uri) + :headers {"content-type" "application/json"} + :body (json/encode-str payload)} + resp (http/req cfg request {:skip-ssrf-check? true})] + (if (<= (:status resp) 206) + true + (do + (l/warn :hint "telemetry event batch send failed" + :status (:status resp) + :body (:body resp)) + false)))) + +(defn- delete-sent-events + "Delete rows by their ids after a successful send." + [conn ids] + (let [arr (db/create-array conn "uuid" ids)] + (db/exec-one! conn ["DELETE FROM audit_log WHERE id = ANY(?)" arr]))) + +(defn- collect-and-send-audit-events + "Collect anonymous telemetry-mode audit events and ship them to the + telemetry endpoint in a loop. Each iteration fetches one page of + `batch-size` rows, encodes and sends them, then deletes the rows on + success. The loop stops as soon as a send returns false, leaving + remaining rows intact for the next run." + [{:keys [::db/conn] :as cfg}] + (loop [counter 1] + (when-let [rows (-> (db/exec! conn [sql:fetch-telemetry-events batch-size]) + (not-empty))] + (let [events (mapv row->event rows) + ids (mapv :id rows)] + (l/dbg :hint "shipping telemetry event batch" + :total (count events) + :batch counter) + (when (send-event-batch cfg events) + (delete-sent-events conn ids) + (recur (inc counter))))))) + ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; TASK ENTRY POINT ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -218,46 +311,47 @@ (assert (some? (::setup/props params)) "expected setup props to be available")) (defmethod ig/init-key ::handler - [_ {:keys [::db/pool ::setup/props] :as cfg}] + [_ cfg] (fn [task] (let [params (:props task) send? (get params :send? true) enabled? (or (get params :enabled? false) - (contains? cf/flags :telemetry) - (cf/get :telemetry-enabled)) + (contains? cf/flags :telemetry)) + subs (get-subscriptions cfg)] - subs {:newsletter-updates (get-subscriptions-newsletter-updates pool) - :newsletter-news (get-subscriptions-newsletter-news pool)} - data {:subscriptions subs - :version (:full cf/version) - :instance-id (:instance-id props)}] + ;; If we have telemetry enabled, then proceed the normal + ;; operation sending legacy report - (when enabled? - (clean-counters-data! pool)) + (if enabled? + (when send? + (db/run! cfg gc-events) + ;; Randomize start time to avoid thundering herd when multiple + ;; instances restart at the same time. + (px/sleep (rand-int 10000)) - (cond - ;; If we have telemetry enabled, then proceed the normal - ;; operation. - enabled? - (let [data (merge data (get-stats pool))] - (when send? - (px/sleep (rand-int 10000)) - (send! cfg data)) - data) + (try + (let [stats (db/run! cfg get-legacy-stats)] + (send-legacy-data cfg stats subs)) + (catch Exception cause + (l/wrn :hint "unable to send legacy report" + :cause cause))) + + ;; Ship any anonymous audit-log events accumulated in + ;; telemetry mode (only when audit-log feature is off). + (when-not (contains? cf/flags :audit-log) + (try + (db/run! cfg collect-and-send-audit-events) + (catch Exception cause + (l/wrn :hint "unable to send events" + :cause cause))))) ;; If we have telemetry disabled, but there are users that are ;; explicitly checked the newsletter subscription on the ;; onboarding dialog or the profile section, then proceed to ;; send a limited telemetry data, that consists in the list of ;; subscribed emails and the running penpot version. - (or (seq (:newsletter-updates subs)) - (seq (:newsletter-news subs))) - (do - (when send? - (px/sleep (rand-int 10000)) - (send! cfg data)) - data) - - :else - data)))) + (when (and send? (seq subs)) + (px/sleep (rand-int 10000)) + (ex/ignoring + (send-legacy-data cfg nil subs))))))) diff --git a/backend/src/app/util/blob.clj b/backend/src/app/util/blob.clj index 6263e8e878..1aa9b8fa34 100644 --- a/backend/src/app/util/blob.clj +++ b/backend/src/app/util/blob.clj @@ -19,6 +19,7 @@ java.io.DataOutputStream java.io.InputStream java.io.OutputStream + java.util.Base64 net.jpountz.lz4.LZ4Compressor net.jpountz.lz4.LZ4Factory net.jpountz.lz4.LZ4FastDecompressor @@ -49,6 +50,13 @@ 5 (encode-v5 data) (throw (ex-info "unsupported version" {:version version})))))) +(defn encode-str + "Encode data to a blob and return it as a URL-safe base64 string + (no padding). Accepts the same options as `encode`." + (^String [data] (encode-str data nil)) + (^String [data opts] + (.encodeToString (.withoutPadding (Base64/getUrlEncoder)) ^bytes (encode data opts)))) + (defn decode "A function used for decode persisted blobs in the database." [^bytes data] @@ -63,6 +71,11 @@ 5 (decode-v5 data) (throw (ex-info "unsupported version" {:version version})))))) +(defn decode-str + "Decode a URL-safe base64 string produced by `encode-str` back to data." + [^String s] + (decode (.decode (Base64/getUrlDecoder) s))) + ;; --- IMPL (defn- encode-v1 diff --git a/backend/test/backend_tests/helpers.clj b/backend/test/backend_tests/helpers.clj index 8ddb3448a2..5e8fc40c60 100644 --- a/backend/test/backend_tests/helpers.clj +++ b/backend/test/backend_tests/helpers.clj @@ -83,7 +83,7 @@ [next] (with-redefs [app.config/flags (flags/parse flags/default default-flags) app.config/config config - app.loggers.audit/submit! (constantly nil) + app.loggers.audit/submit (constantly nil) app.auth/derive-password identity app.auth/verify-password (fn [a b] {:valid (= a b)}) app.common.features/get-enabled-features (fn [& _] app.common.features/supported-features)] diff --git a/backend/test/backend_tests/rpc_audit_test.clj b/backend/test/backend_tests/rpc_audit_test.clj index be612455d0..be7b1edf3a 100644 --- a/backend/test/backend_tests/rpc_audit_test.clj +++ b/backend/test/backend_tests/rpc_audit_test.clj @@ -9,7 +9,9 @@ [app.common.pprint :as pp] [app.common.time :as ct] [app.common.uuid :as uuid] + [app.config :as cf] [app.db :as db] + [app.loggers.audit :as audit] [app.rpc :as-alias rpc] [backend-tests.helpers :as th] [clojure.test :as t] @@ -96,4 +98,403 @@ (t/is (= "navigate" (:name row))) (t/is (= "frontend" (:source row))))))) +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; TELEMETRY MODE (frontend ingest) +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +(t/deftest push-events-telemetry-mode-stores-anonymized-row + ;; When telemetry is enabled and audit-log is NOT, frontend events + ;; must be stored with source="telemetry", empty props, zeroed ip, + ;; and context filtered to safe keys only. + (with-redefs [cf/flags #{:telemetry}] + (let [prof (th/create-profile* 1 {:is-active true}) + team-id (:default-team-id prof) + proj-id (:default-project-id prof) + + params {::th/type :push-audit-events + ::rpc/profile-id (:id prof) + :events [{:name "navigate" + :props {:project-id (str proj-id) + :team-id (str team-id) + :route "dashboard-files"} + :context {:browser "Chrome" + :browser-version "120.0" + :os "Linux" + :version "2.0.0" + :session "should-be-stripped" + :external-session-id "also-stripped" + :initiator "app"} + :timestamp (ct/now) + :type "action"}]} + + params (with-meta params + {:app.http/request http-request}) + out (th/command! params)] + + (t/is (nil? (:error out))) + (t/is (nil? (:result out))) + + (let [[row :as rows] (->> (th/db-exec! ["select * from audit_log"]) + (mapv decode-row))] + (t/is (= 1 (count rows))) + ;; source is telemetry:frontend, not frontend + (t/is (= "telemetry:frontend" (:source row))) + ;; profile-id preserved + (t/is (= (:id prof) (:profile-id row))) + ;; event name preserved + (t/is (= "navigate" (:name row))) + ;; navigate events keep route and team-id; other keys stripped + (t/is (= {:route "dashboard-files" + :team-id (str team-id)} + (:props row))) + ;; ip zeroed + (t/is (= "0.0.0.0" (str (:ip-addr row)))) + ;; timestamps truncated to day precision + (let [day-now (ct/truncate (ct/now) :days)] + (t/is (= day-now (:created-at row))) + (t/is (= day-now (:tracked-at row)))) + ;; context only contains safe keys + (let [ctx (:context row)] + (t/is (contains? ctx :browser)) + (t/is (= "Chrome" (:browser ctx))) + (t/is (contains? ctx :os)) + (t/is (= "Linux" (:os ctx))) + ;; session-linking keys stripped + (t/is (not (contains? ctx :session))) + (t/is (not (contains? ctx :external-session-id)))))))) + +(t/deftest push-events-both-flags-creates-two-rows + ;; When both :audit-log and :telemetry flags are active, two rows + ;; should be stored: one full audit entry and one telemetry entry. + (with-redefs [cf/flags #{:audit-log :telemetry}] + (let [prof (th/create-profile* 1 {:is-active true}) + params {::th/type :push-audit-events + ::rpc/profile-id (:id prof) + :events [{:name "navigate" + :props {:route "dashboard"} + :context {:browser "Chrome" + :version "2.0.0" + :initiator "app"} + :timestamp (ct/now) + :type "action"}]} + params (with-meta params + {:app.http/request http-request}) + out (th/command! params)] + + (t/is (nil? (:error out))) + + (let [[row1 row2 :as rows] (->> (th/db-exec! ["select * from audit_log order by source"]) + (mapv decode-row))] + (t/is (= 2 (count rows))) + ;; First row: full audit-log entry + (t/is (= "frontend" (:source row1))) + (t/is (contains? (:props row1) :route)) + (t/is (not= "0.0.0.0" (str (:ip-addr row1)))) + ;; Second row: telemetry entry + (t/is (= "telemetry:frontend" (:source row2))) + (t/is (= "0.0.0.0" (str (:ip-addr row2)))) + (let [day-now (ct/truncate (ct/now) :days)] + (t/is (= day-now (:created-at row2))) + (t/is (= day-now (:tracked-at row2)))))))) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; BACKEND PROCESS-EVENT PATH (RPC commands) +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +(t/deftest backend-process-event-only-audit-log + (with-redefs [cf/flags #{:audit-log}] + (let [prof (th/create-profile* 1 {:is-active true}) + event {:id (uuid/next) + :type "action" + :name "test-cmd" + :profile-id (:id prof) + :props {:full-key "full-val"} + :context {:version "2.0.0" :initiator "app"} + :tracked-at (ct/now) + :created-at (ct/now) + :source "backend"}] + (audit/submit* th/*system* event) + (let [[row :as rows] (->> (th/db-exec! ["select * from audit_log"]) + (mapv decode-row))] + (t/is (= 1 (count rows))) + (t/is (= "backend" (:source row))) + (t/is (= "full-val" (get-in row [:props :full-key]))) + (t/is (not= "0.0.0.0" (str (:ip-addr row)))))))) + +(t/deftest backend-process-event-only-telemetry + (with-redefs [cf/flags #{:telemetry}] + (let [prof (th/create-profile* 1 {:is-active true}) + event {:id (uuid/next) + :type "action" + :name "test-cmd" + :profile-id (:id prof) + :props {:full-key "full-val"} + :context {:version "2.0.0" :initiator "app"} + :tracked-at (ct/now) + :created-at (ct/now) + :source "backend"}] + (audit/submit* th/*system* event) + (let [[row :as rows] (->> (th/db-exec! ["select * from audit_log"]) + (mapv decode-row))] + (t/is (= 1 (count rows))) + (t/is (= "telemetry:backend" (:source row))) + (t/is (= "0.0.0.0" (str (:ip-addr row)))))))) + +(t/deftest backend-process-event-both-flags-creates-two-rows + ;; When both :audit-log and :telemetry are active, the backend + ;; process-event must store two rows: one full audit entry and one + ;; telemetry entry. + (with-redefs [cf/flags #{:audit-log :telemetry}] + (let [prof (th/create-profile* 1 {:is-active true}) + event {:id (uuid/next) + :type "action" + :name "test-cmd" + :profile-id (:id prof) + :props {:keep-me "important"} + :context {:version "2.0.0" :initiator "app"} + :tracked-at (ct/now) + :created-at (ct/now) + :source "backend"}] + (audit/submit* th/*system* event) + (let [[row1 row2 :as rows] (->> (th/db-exec! ["select * from audit_log order by source"]) + (mapv decode-row))] + (t/is (= 2 (count rows))) + ;; First row: full audit-log entry + (t/is (= "backend" (:source row1))) + (t/is (= "important" (get-in row1 [:props :keep-me]))) + (t/is (not= "0.0.0.0" (str (:ip-addr row1)))) + ;; Second row: telemetry entry + (t/is (= "telemetry:backend" (:source row2))) + (t/is (= "0.0.0.0" (str (:ip-addr row2)))) + (let [day-now (ct/truncate (ct/now) :days)] + (t/is (= day-now (:created-at row2))) + (t/is (= day-now (:tracked-at row2)))))))) + +(t/deftest push-events-disabled-when-no-flags-and-no-telemetry + ;; When neither audit-log nor telemetry is enabled, no rows should + ;; be stored. + (with-redefs [cf/flags #{}] + (let [prof (th/create-profile* 1 {:is-active true}) + params {::th/type :push-audit-events + ::rpc/profile-id (:id prof) + :events [{:name "navigate" + :props {:route "dashboard"} + :timestamp (ct/now) + :type "action"}]} + params (with-meta params + {:app.http/request http-request}) + out (th/command! params)] + + (t/is (nil? (:error out))) + (t/is (= 0 (count (th/db-exec! ["select * from audit_log"]))))))) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; PURE HELPER UNIT TESTS +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +(t/deftest extract-utm-params-utm + ;; UTM params are namespaced under :penpot + (let [result (audit/extract-utm-params {:utm_source "google" + :utm_medium "cpc" + :utm_campaign "spring" + :other "ignored"})] + (t/is (= "google" (:penpot/utm-source result))) + (t/is (= "cpc" (:penpot/utm-medium result))) + (t/is (= "spring" (:penpot/utm-campaign result))) + (t/is (not (contains? result :other))))) + +(t/deftest extract-utm-params-mtm + ;; MTM params are also namespaced under :penpot + (let [result (audit/extract-utm-params {:mtm_source "newsletter" + :mtm_medium "email"})] + (t/is (= "newsletter" (:penpot/mtm-source result))) + (t/is (= "email" (:penpot/mtm-medium result))))) + +(t/deftest extract-utm-params-empty + (t/is (= {} (audit/extract-utm-params {}))) + (t/is (= {} (audit/extract-utm-params {:foo "bar" :baz 42})))) + +(t/deftest profile->props-selects-and-merges + ;; Selects profile-props keys and merges with (:props profile) + (let [profile {:id (uuid/next) + :fullname "John" + :email "john@example.com" + :is-active true + :lang "en" + :deleted-field "gone" + :props {:custom-key "custom-val" + :newsletter-updates true}} + result (audit/profile->props profile)] + ;; Selected keys from profile + (t/is (= "John" (:fullname result))) + (t/is (= "john@example.com" (:email result))) + (t/is (true? (:is-active result))) + (t/is (= "en" (:lang result))) + ;; Merged from (:props profile) + (t/is (= "custom-val" (:custom-key result))) + (t/is (true? (:newsletter-updates result))) + ;; Keys not in profile-props are excluded + (t/is (not (contains? result :deleted-field))))) + +(t/deftest profile->props-removes-nils + (let [profile {:id (uuid/next) :fullname nil :email "a@b.com"} + result (audit/profile->props profile)] + (t/is (not (contains? result :fullname))) + (t/is (= "a@b.com" (:email result))))) + +(t/deftest clean-props-removes-reserved + ;; Reserved props (:session-id, :password, :old-password, :token) are stripped + (let [props {:name "test" + :session-id "sess-123" + :password "secret" + :old-password "old-secret" + :token "tok-456" + :valid-key "kept"} + result (audit/clean-props props)] + (t/is (= "test" (:name result))) + (t/is (= "kept" (:valid-key result))) + (t/is (not (contains? result :session-id))) + (t/is (not (contains? result :password))) + (t/is (not (contains? result :old-password))) + (t/is (not (contains? result :token))))) + +(t/deftest clean-props-removes-qualified-keys + ;; Qualified keywords (namespaced) are stripped + (let [props {:simple "kept" + ::namespaced "stripped" + :app.rpc/also-stripped true} + result (audit/clean-props props)] + (t/is (= "kept" (:simple result))) + (t/is (not (contains? result ::namespaced))) + (t/is (not (contains? result :app.rpc/also-stripped))))) + +(t/deftest clean-props-removes-nils + (let [props {:a nil :b "val" :c nil} + result (audit/clean-props props)] + (t/is (= "val" (:b result))) + (t/is (not (contains? result :a))) + (t/is (not (contains? result :c))))) + +(t/deftest get-external-session-id-valid + (let [request (reify yetti.request/IRequest + (get-header [_ name] + (case name "x-external-session-id" "abc-123")))] + (t/is (= "abc-123" (audit/get-external-session-id request))))) + +(t/deftest get-external-session-id-nil-when-missing + (let [request (reify yetti.request/IRequest + (get-header [_ _] nil))] + (t/is (nil? (audit/get-external-session-id request))))) + +(t/deftest get-external-session-id-nil-when-null-string + (let [request (reify yetti.request/IRequest + (get-header [_ name] + (case name "x-external-session-id" "null")))] + (t/is (nil? (audit/get-external-session-id request))))) + +(t/deftest get-external-session-id-nil-when-blank + (let [request (reify yetti.request/IRequest + (get-header [_ name] + (case name "x-external-session-id" " ")))] + (t/is (nil? (audit/get-external-session-id request))))) + +(t/deftest get-external-session-id-nil-when-too-long + (let [long-id (apply str (repeat 300 "x")) + request (reify yetti.request/IRequest + (get-header [_ name] + (case name "x-external-session-id" long-id)))] + (t/is (nil? (audit/get-external-session-id request))))) + +(t/deftest get-client-user-agent-valid + (let [request (reify yetti.request/IRequest + (get-header [_ name] + (case name "user-agent" "Mozilla/5.0 (Test)")))] + (t/is (= "Mozilla/5.0 (Test)" (audit/get-client-user-agent request))))) + +(t/deftest get-client-user-agent-nil-when-missing + (let [request (reify yetti.request/IRequest + (get-header [_ _] nil))] + (t/is (nil? (audit/get-client-user-agent request))))) + +(t/deftest get-client-user-agent-truncates-long + (let [long-ua (apply str (repeat 600 "x")) + request (reify yetti.request/IRequest + (get-header [_ name] + (case name "user-agent" long-ua)))] + (t/is (<= (count (audit/get-client-user-agent request)) 500)))) + +(t/deftest get-client-event-origin-valid + (let [get-client-event-origin (ns-resolve 'app.loggers.audit 'get-client-event-origin) + request (reify yetti.request/IRequest + (get-header [_ name] + (case name "x-event-origin" "workspace")))] + (t/is (= "workspace" (get-client-event-origin request))))) + +(t/deftest get-client-event-origin-nil-when-null + (let [get-client-event-origin (ns-resolve 'app.loggers.audit 'get-client-event-origin) + request (reify yetti.request/IRequest + (get-header [_ name] + (case name "x-event-origin" "null")))] + (t/is (nil? (get-client-event-origin request))))) + +(t/deftest get-client-event-origin-nil-when-blank + (let [get-client-event-origin (ns-resolve 'app.loggers.audit 'get-client-event-origin) + request (reify yetti.request/IRequest + (get-header [_ name] + (case name "x-event-origin" " ")))] + (t/is (nil? (get-client-event-origin request))))) + +(t/deftest get-client-event-origin-truncates-long + (let [get-client-event-origin (ns-resolve 'app.loggers.audit 'get-client-event-origin) + long-origin (apply str (repeat 300 "a")) + request (reify yetti.request/IRequest + (get-header [_ name] + (case name "x-event-origin" long-origin)))] + (t/is (<= (count (get-client-event-origin request)) 200)))) + +(t/deftest get-client-version-valid + (let [get-client-version (ns-resolve 'app.loggers.audit 'get-client-version) + request (reify yetti.request/IRequest + (get-header [_ name] + (case name "x-frontend-version" "2.0.0")))] + (t/is (= "2.0.0" (get-client-version request))))) + +(t/deftest get-client-version-nil-when-null + (let [get-client-version (ns-resolve 'app.loggers.audit 'get-client-version) + request (reify yetti.request/IRequest + (get-header [_ name] + (case name "x-frontend-version" "null")))] + (t/is (nil? (get-client-version request))))) + +(t/deftest get-client-version-nil-when-blank + (let [get-client-version (ns-resolve 'app.loggers.audit 'get-client-version) + request (reify yetti.request/IRequest + (get-header [_ name] + (case name "x-frontend-version" " ")))] + (t/is (nil? (get-client-version request))))) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; INSERT DEFAULTS +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +(t/deftest insert-only-runs-with-audit-log-flag + ;; insert must be a no-op when :audit-log flag is not set + (with-redefs [app.config/flags #{:telemetry}] + (audit/insert th/*system* {:name "test" :type "action"}) + (t/is (= 0 (count (th/db-exec! ["select * from audit_log"])))))) + +(t/deftest insert-sets-defaults + ;; insert must set defaults and persist when :audit-log is set + (with-redefs [app.config/flags #{:audit-log}] + (audit/insert th/*system* {:name "test-action" :type "action"}) + (let [[row] (->> (th/db-exec! ["select * from audit_log"]) + (mapv decode-row))] + (t/is (some? row)) + (t/is (= "test-action" (:name row))) + (t/is (= "action" (:type row))) + (t/is (= "backend" (:source row))) + (t/is (some? (:id row))) + (t/is (some? (:created-at row))) + (t/is (some? (:tracked-at row))) + (t/is (= {} (:props row))) + (t/is (= {} (:context row)))))) diff --git a/backend/test/backend_tests/tasks_telemetry_test.clj b/backend/test/backend_tests/tasks_telemetry_test.clj index c6edf381af..c010a95b72 100644 --- a/backend/test/backend_tests/tasks_telemetry_test.clj +++ b/backend/test/backend_tests/tasks_telemetry_test.clj @@ -6,42 +6,905 @@ (ns backend-tests.tasks-telemetry-test (:require + [app.common.time :as ct] + [app.common.uuid :as uuid] + [app.config :as cf] [app.db :as db] + [app.loggers.audit :as audit] + [app.tasks.telemetry :as telemetry] + [app.util.blob :as blob] + [app.util.json :as json] [backend-tests.helpers :as th] - [clojure.pprint :refer [pprint]] [clojure.test :as t] - [mockery.core :refer [with-mocks]])) + [mockery.core :refer [with-mocks]] + [promesa.exec :as px])) (t/use-fixtures :once th/state-init) -(t/use-fixtures :each th/database-reset) + +;; Mock px/sleep for all tests to avoid 10s random delays. +;; Composed with database-reset so both apply. +(defn- test-fixture [next] + (th/database-reset + (fn [] + (with-redefs [px/sleep (constantly nil)] + (next))))) + +(t/use-fixtures :each test-fixture) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; HELPERS +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +(defn- insert-telemetry-row! + "Insert a single anonymised audit_log row as the telemetry mode does." + ([name] (insert-telemetry-row! name {})) + ([name {:keys [tracked-at created-at source] + :or {tracked-at (ct/now) + created-at (ct/now) + source "telemetry:backend"}}] + (th/db-insert! :audit-log + {:id (uuid/next) + :name name + :type "action" + :source source + :profile-id uuid/zero + :ip-addr (db/inet "0.0.0.0") + :props (db/tjson {}) + :context (db/tjson {}) + :tracked-at tracked-at + :created-at created-at}))) + +(defn- count-telemetry-rows [] + (-> (th/db-exec-one! ["SELECT count(*) AS cnt FROM audit_log WHERE source IN ('telemetry:backend', 'telemetry:frontend')"]) + :cnt + long)) + +(defn- decode-event-batch + "Decode the base64+fressian+zstd event-batch sent to the mock." + [b64-str] + (blob/decode-str b64-str)) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; STATS / REPORT STRUCTURE TESTS (existing behaviour, extended) +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (t/deftest test-base-report-data-structure - (with-mocks [mock {:target 'app.tasks.telemetry/send! + (with-mocks [mock {:target 'app.tasks.telemetry/make-legacy-request :return nil}] (let [prof (th/create-profile* 1 {:is-active true - :props {:newsletter-news true}})] + :props {:newsletter-updates true}})] (th/run-task! :telemetry {:send? true :enabled? true}) (t/is (:called? @mock)) (let [[_ data] (-> @mock :call-args)] + (t/is (= :telemetry-legacy-report (:type data))) (t/is (contains? data :subscriptions)) - (t/is (= [(:email prof)] (get-in data [:subscriptions :newsletter-news]))) - (t/is (contains? data :total-fonts)) - (t/is (contains? data :total-users)) - (t/is (contains? data :total-projects)) - (t/is (contains? data :total-files)) - (t/is (contains? data :total-teams)) - (t/is (contains? data :total-comments)) - (t/is (contains? data :instance-id)) - (t/is (contains? data :jvm-cpus)) - (t/is (contains? data :jvm-heap-max)) - (t/is (contains? data :max-users-on-team)) - (t/is (contains? data :avg-users-on-team)) - (t/is (contains? data :max-files-on-project)) - (t/is (contains? data :avg-files-on-project)) - (t/is (contains? data :max-projects-on-team)) - (t/is (contains? data :avg-files-on-project)) + (t/is (= [(:email prof)] (:subscriptions data))) + (t/is (contains? data :stats)) + (let [stats (:stats data)] + (t/is (contains? stats :total-fonts)) + (t/is (contains? stats :total-users)) + (t/is (contains? stats :total-projects)) + (t/is (contains? stats :total-files)) + (t/is (contains? stats :total-teams)) + (t/is (contains? stats :total-comments)) + (t/is (contains? stats :jvm-cpus)) + (t/is (contains? stats :jvm-heap-max)) + (t/is (contains? stats :max-users-on-team)) + (t/is (contains? stats :avg-users-on-team)) + (t/is (contains? stats :max-files-on-project)) + (t/is (contains? stats :avg-files-on-project)) + (t/is (contains? stats :max-projects-on-team)) + (t/is (contains? stats :avg-files-on-project)) + (t/is (contains? stats :email-domains)) + (t/is (= ["nodomain.com"] (:email-domains stats))) + ;; public-uri must be a string + (t/is (string? (:public-uri stats))) + (t/is (not-empty (:public-uri stats)))) (t/is (contains? data :version)) - (t/is (contains? data :email-domains)) - (t/is (= ["nodomain.com"] (:email-domains data))))))) + (t/is (contains? data :instance-id)))))) + +(t/deftest test-telemetry-disabled-no-send + ;; When telemetry is disabled and no newsletter subscriptions exist, + ;; make-legacy-request must not be called at all. + (with-mocks [mock {:target 'app.tasks.telemetry/make-legacy-request + :return nil}] + (with-redefs [cf/flags #{}] + (th/create-profile* 1 {:is-active true}) + (th/run-task! :telemetry {:send? true}) + (t/is (not (:called? @mock)))))) + +(t/deftest test-telemetry-disabled-newsletter-only-send + ;; When telemetry is disabled but a user has newsletter-updates opted in, + ;; make-legacy-request is called once with only subscriptions + version (no stats). + (with-mocks [mock {:target 'app.tasks.telemetry/make-legacy-request + :return nil}] + (with-redefs [cf/flags #{}] + (let [prof (th/create-profile* 1 {:is-active true + :props {:newsletter-updates true}})] + (th/run-task! :telemetry {:send? true}) + (t/is (:called? @mock)) + (let [[_ data] (:call-args @mock)] + ;; Limited payload — no stats + (t/is (contains? data :subscriptions)) + (t/is (contains? data :version)) + (t/is (not (contains? data :stats))) + (t/is (= [(:email prof)] (:subscriptions data)))))))) + +(t/deftest test-send-is-skipped-when-send?-false + ;; Passing send?=false must suppress all HTTP calls even when enabled. + (with-mocks [mock {:target 'app.tasks.telemetry/make-legacy-request + :return nil}] + (with-redefs [cf/flags #{:telemetry}] + (th/create-profile* 1 {:is-active true}) + (th/run-task! :telemetry {:send? false :enabled? true}) + (t/is (not (:called? @mock)))))) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; AUDIT-EVENT BATCH COLLECTION TESTS +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +(t/deftest test-no-audit-events-no-batch-call + ;; When telemetry is enabled but there are no audit_log rows with + ;; source='telemetry', the batch send path must not be invoked. + (with-mocks [legacy-mock {:target 'app.tasks.telemetry/make-legacy-request + :return nil} + batch-mock {:target 'app.tasks.telemetry/send-event-batch + :return true}] + (with-redefs [cf/flags #{:telemetry}] + (th/run-task! :telemetry {:send? true :enabled? true}) + (t/is (:called? @legacy-mock)) + (t/is (not (:called? @batch-mock)))))) + +(t/deftest test-audit-events-sent-and-deleted-on-success + ;; Happy path: telemetry rows are collected, shipped as a batch and + ;; deleted from the table when the endpoint returns success. + (with-mocks [legacy-mock {:target 'app.tasks.telemetry/make-legacy-request + :return nil} + batch-mock {:target 'app.tasks.telemetry/send-event-batch + :return true}] + (with-redefs [cf/flags #{:telemetry}] + (insert-telemetry-row! "navigate") + (insert-telemetry-row! "create-file") + (insert-telemetry-row! "update-file") + + (t/is (= 3 (count-telemetry-rows))) + + (th/run-task! :telemetry {:send? true :enabled? true}) + + ;; batch send was called at least once + (t/is (:called? @batch-mock)) + + ;; all rows deleted after successful send + (t/is (= 0 (count-telemetry-rows)))))) + +(t/deftest test-audit-events-kept-on-batch-failure + ;; When the batch endpoint returns failure the rows must be retained + ;; so the next scheduled run can retry. + (with-mocks [legacy-mock {:target 'app.tasks.telemetry/make-legacy-request + :return nil} + batch-mock {:target 'app.tasks.telemetry/send-event-batch + :return false}] + (with-redefs [cf/flags #{:telemetry}] + (insert-telemetry-row! "navigate") + (insert-telemetry-row! "create-file") + + (th/run-task! :telemetry {:send? true :enabled? true}) + + (t/is (:called? @batch-mock)) + ;; rows still present — not deleted on failure + (t/is (= 2 (count-telemetry-rows)))))) + +(t/deftest test-audit-events-not-collected-when-audit-log-flag-set + ;; When the :audit-log flag is active, mode C is disabled and the + ;; batch path must never run (audit-log owns those rows instead). + (with-mocks [legacy-mock {:target 'app.tasks.telemetry/make-legacy-request + :return nil} + batch-mock {:target 'app.tasks.telemetry/send-event-batch + :return true}] + (with-redefs [cf/flags #{:telemetry :audit-log}] + (insert-telemetry-row! "navigate") + + (th/run-task! :telemetry {:send? true :enabled? true}) + + (t/is (not (:called? @batch-mock))) + ;; row untouched + (t/is (= 1 (count-telemetry-rows)))))) + +(t/deftest test-batch-payload-contains-required-fields + ;; Inspect the actual arguments forwarded to send-event-batch to + ;; verify the payload carries instance-id, version and events. + (let [captured (atom nil)] + (with-mocks [legacy-mock {:target 'app.tasks.telemetry/make-legacy-request + :return nil}] + (with-redefs [cf/flags #{:telemetry} + telemetry/send-event-batch + (fn [_cfg batch] + (reset! captured batch) + true)] + (insert-telemetry-row! "navigate") + (insert-telemetry-row! "create-file") + + (th/run-task! :telemetry {:send? true :enabled? true}) + + (t/is (some? @captured)) + (let [batch @captured] + ;; batch is a seq of event maps + (t/is (seq batch)) + (t/is (= 2 (count batch))) + ;; each event has name, type, source — profile-id is preserved, + ;; props and ip-addr are stripped + (let [ev (first batch)] + (t/is (contains? ev :name)) + (t/is (contains? ev :type)) + (t/is (contains? ev :source)) + (t/is (contains? ev :profile-id)) + ;; props are present but empty (stripped at ingest time) + (t/is (= {} (:props ev))) + (t/is (not (contains? ev :ip-addr))))))))) + +(t/deftest test-batch-encoding-is-decodable + ;; Verify that encode-batch produces a blob that round-trips back + ;; through blob/decode to the original data. + (let [events [{:name "navigate" :type "action" :source "telemetry" + :tracked-at (ct/now)} + {:name "create-file" :type "action" :source "telemetry" + :tracked-at (ct/now)}] + ;; Call the private fn through the ns-mapped var + encode (ns-resolve 'app.tasks.telemetry 'encode-batch) + encoded (encode events) + decoded (decode-event-batch encoded)] + (t/is (string? encoded)) + (t/is (seq decoded)) + (t/is (= (count events) (count decoded))) + (t/is (= "navigate" (:name (first decoded)))) + (t/is (= "create-file" (:name (second decoded)))))) + +(t/deftest test-multiple-batches-when-many-events + ;; Lower batch-size to 1 so that 3 events produce 3 separate + ;; HTTP requests and verify all are sent and all rows deleted. + (let [call-count (atom 0)] + (with-mocks [legacy-mock {:target 'app.tasks.telemetry/make-legacy-request + :return nil}] + (with-redefs [cf/flags #{:telemetry} + telemetry/batch-size 1 + telemetry/send-event-batch + (fn [_cfg _batch] + (swap! call-count inc) + true)] + (insert-telemetry-row! "navigate") + (insert-telemetry-row! "create-file") + (insert-telemetry-row! "update-file") + + (th/run-task! :telemetry {:send? true :enabled? true}) + + ;; Each event is fetched and sent in its own loop iteration + (t/is (= 3 @call-count)) + ;; All rows deleted after all iterations succeed + (t/is (= 0 (count-telemetry-rows))))))) + +(t/deftest test-partial-failure-stops-remaining-batches + ;; With batch-size 1, when the second send fails the loop stops. + ;; The first batch was already deleted; the two remaining rows + ;; are retained for the next run. + (let [call-count (atom 0)] + (with-mocks [legacy-mock {:target 'app.tasks.telemetry/make-legacy-request + :return nil}] + (with-redefs [cf/flags #{:telemetry} + telemetry/batch-size 1 + telemetry/send-event-batch + (fn [_cfg _batch] + (swap! call-count inc) + ;; fail on the second call + (not= 2 @call-count))] + (insert-telemetry-row! "navigate") + (insert-telemetry-row! "create-file") + (insert-telemetry-row! "update-file") + + (th/run-task! :telemetry {:send? true :enabled? true}) + + ;; Stopped at iteration 2 — third event never attempted + (t/is (= 2 @call-count)) + ;; First batch was deleted on success; 2 rows remain for retry + (t/is (= 2 (count-telemetry-rows))))))) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; GC / RETENTION-WINDOW TESTS +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +(t/deftest test-gc-purges-events-older-than-7-days + ;; Insert events from 8 days ago (stale) and from today (fresh). + ;; After the task runs, stale events must be purged by GC and fresh + ;; ones shipped by the batch sender. + (with-mocks [legacy-mock {:target 'app.tasks.telemetry/make-legacy-request + :return nil} + batch-mock {:target 'app.tasks.telemetry/send-event-batch + :return true}] + (with-redefs [cf/flags #{:telemetry}] + (let [now (ct/now) + eight-days (ct/minus now (ct/duration {:days 8}))] + ;; Stale events (older than 7 days) + (insert-telemetry-row! "stale-1" {:created-at eight-days :tracked-at eight-days}) + (insert-telemetry-row! "stale-2" {:created-at eight-days :tracked-at eight-days}) + ;; Fresh events (today) + (insert-telemetry-row! "fresh-1" {:created-at now :tracked-at now}) + (insert-telemetry-row! "fresh-2" {:created-at now :tracked-at now}) + + (t/is (= 4 (count-telemetry-rows))) + + (th/run-task! :telemetry {:send? true :enabled? true}) + + ;; GC purged the 2 stale rows, batch sender shipped the 2 fresh ones + (t/is (= 0 (count-telemetry-rows))))))) + +(t/deftest test-gc-keeps-events-within-7-day-window + ;; When all events are within the 7-day window, GC must not delete + ;; anything and all rows are forwarded to the batch sender. + (let [batch-events (atom nil)] + (with-mocks [legacy-mock {:target 'app.tasks.telemetry/make-legacy-request + :return nil}] + (with-redefs [cf/flags #{:telemetry} + telemetry/send-event-batch + (fn [_cfg batch] + (reset! batch-events batch) + true)] + (let [six-days-ago (ct/minus (ct/now) (ct/duration {:days 6}))] + (insert-telemetry-row! "recent-1" {:created-at six-days-ago :tracked-at six-days-ago}) + (insert-telemetry-row! "recent-2" {:created-at six-days-ago :tracked-at six-days-ago})) + + (th/run-task! :telemetry {:send? true :enabled? true}) + + ;; Both events forwarded — GC left them alone + (t/is (= 2 (count @batch-events))) + (t/is (= 0 (count-telemetry-rows))))))) + +(t/deftest test-gc-deletes-only-stale-events + ;; Insert a mix of stale (8 days old) and fresh (1 day old) events. + ;; After GC, only fresh events should remain for the batch sender. + (let [batch-events (atom nil)] + (with-mocks [legacy-mock {:target 'app.tasks.telemetry/make-legacy-request + :return nil}] + (with-redefs [cf/flags #{:telemetry} + telemetry/send-event-batch + (fn [_cfg batch] + (reset! batch-events batch) + true)] + (let [eight-days (ct/minus (ct/now) (ct/duration {:days 8})) + one-day (ct/minus (ct/now) (ct/duration {:days 1}))] + (insert-telemetry-row! "stale" {:created-at eight-days :tracked-at eight-days}) + (insert-telemetry-row! "fresh" {:created-at one-day :tracked-at one-day})) + + (t/is (= 2 (count-telemetry-rows))) + + (th/run-task! :telemetry {:send? true :enabled? true}) + + ;; GC purged stale, batch shipped fresh + (t/is (= 1 (count @batch-events))) + (t/is (= "fresh" (:name (first @batch-events)))) + (t/is (= 0 (count-telemetry-rows))))))) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; ANONYMITY TESTS +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +(t/deftest test-telemetry-rows-stored-without-pii + ;; Rows written to audit_log in telemetry mode must carry no PII: + ;; empty props, zeroed ip, profile-id=zero, source='telemetry'. + ;; Safe context fields (browser, os, version, etc.) are preserved + ;; but session-linking and access-token fields are stripped. + (with-redefs [cf/flags #{:telemetry}] + (let [_prof (th/create-profile* 1 {:is-active true}) + safe-ctx {:browser "Chrome" + :browser-version "120.0" + :os "Linux" + :version "2.0.0"}] + ;; Simulate what app.loggers.audit/process-event does in mode C + (th/db-insert! :audit-log + {:id (uuid/next) + :name "create-project" + :type "action" + :source "telemetry:backend" + :profile-id uuid/zero + :ip-addr (db/inet "0.0.0.0") + :props (db/tjson {}) + :context (db/tjson safe-ctx) + :tracked-at (ct/now) + :created-at (ct/now)}) + + (let [[row] (th/db-exec! ["SELECT * FROM audit_log WHERE source = 'telemetry:backend'"])] + (t/is (= "telemetry:backend" (:source row))) + ;; props are always empty + (t/is (= "{}" (str (:props row)))) + ;; ip_addr is the sentinel zero address + (t/is (= "0.0.0.0" (str (:ip-addr row)))) + ;; profile-id is uuid/zero — not a real user id + (t/is (= uuid/zero (:profile-id row))))))) + +(t/deftest test-batch-events-contain-no-pii-fields + ;; The event maps forwarded to send-event-batch must not carry props, + ;; ip-addr or profile-id. Safe context fields (browser, os, etc.) may + ;; be present but session-linking keys must be absent. + (let [captured-batch (atom nil) + ;; Insert a row that carries safe context (as the real path does) + safe-ctx {:browser "Firefox" :browser-version "121.0" + :os "macOS" :session "should-be-stripped" + :external-session-id "also-stripped"}] + (with-mocks [legacy-mock {:target 'app.tasks.telemetry/make-legacy-request + :return nil}] + (with-redefs [cf/flags #{:telemetry} + telemetry/send-event-batch + (fn [_cfg batch] + (reset! captured-batch batch) + true)] + ;; Insert with safe context already pre-filtered (as the ingest path does) + (th/db-insert! :audit-log + {:id (uuid/next) + :name "navigate" + :type "action" + :source "telemetry:frontend" + :profile-id uuid/zero + :ip-addr (db/inet "0.0.0.0") + :props (db/tjson {}) + :context (db/tjson (dissoc safe-ctx :session :external-session-id)) + :tracked-at (ct/now) + :created-at (ct/now)}) + + (th/run-task! :telemetry {:send? true :enabled? true}) + + (t/is (= 1 (count @captured-batch))) + (let [ev (first @captured-batch)] + ;; must have the core identity fields including profile-id + (t/is (contains? ev :name)) + (t/is (contains? ev :type)) + (t/is (contains? ev :source)) + (t/is (contains? ev :tracked-at)) + (t/is (contains? ev :profile-id)) + ;; props are present but empty (stripped at ingest time) + (t/is (= {} (:props ev))) + ;; ip-addr is stripped + (t/is (not (contains? ev :ip-addr))) + ;; context may be present and must not contain session-linking keys + (when-let [ctx (:context ev)] + (t/is (not (contains? ctx :session))) + (t/is (not (contains? ctx :external-session-id))) + ;; safe keys should be present + (t/is (contains? ctx :browser)))))))) + +(t/deftest test-telemetry-rows-have-day-precision-timestamps + ;; Telemetry events must be stored with timestamps truncated to day + ;; precision so that exact event timing cannot be inferred. + (with-redefs [cf/flags #{:telemetry}] + (let [process-event (ns-resolve 'app.loggers.audit 'process-event) + profile (th/create-profile* 1 {:is-active true}) + tnow (ct/now) + event {:type "action" + :name "create-project" + :profile-id (:id profile) + :source "backend" + :props {} + :context {} + :created-at tnow + :tracked-at tnow + :ip-addr "0.0.0.0"}] + (db/tx-run! th/*system* process-event event) + (let [[row] (th/db-exec! ["SELECT * FROM audit_log WHERE source = 'telemetry:backend'"])] + (t/is (some? row)) + (let [created-at (:created-at row) + tracked-at (:tracked-at row) + day-now (ct/truncate (ct/now) :days)] + ;; Both timestamps must equal midnight of the current day + (t/is (= day-now created-at)) + (t/is (= day-now tracked-at))))))) + +(t/deftest test-backend-ingest-full-row-shape + ;; Verify the full row shape stored by process-event in telemetry mode: + ;; source=telemetry:backend, empty props, zeroed ip, context filtered to safe + ;; backend keys only, profile-id preserved, timestamps truncated. + (with-redefs [cf/flags #{:telemetry}] + (let [process-event (ns-resolve 'app.loggers.audit 'process-event) + profile (th/create-profile* 1 {:is-active true}) + tnow (ct/now) + event {:type "action" + :name "create-project" + :profile-id (:id profile) + :source "backend" + :context {:initiator "app" + :version "2.0.0" + :client-version "1.0" + :client-user-agent "Mozilla/5.0" + :external-session-id "should-be-stripped" + :session "also-stripped"} + :props {:some-prop "value"} + :created-at tnow + :tracked-at tnow + :ip-addr "0.0.0.0"}] + (db/tx-run! th/*system* process-event event) + + (let [[row] (th/db-exec! ["SELECT * FROM audit_log WHERE source = 'telemetry:backend'"])] + (t/is (some? row)) + ;; source + (t/is (= "telemetry:backend" (:source row))) + ;; profile-id preserved + (t/is (= (:id profile) (:profile-id row))) + ;; name + (t/is (= "create-project" (:name row))) + ;; type + (t/is (= "action" (:type row))) + ;; props stripped to empty + (t/is (= "{}" (str (:props row)))) + ;; ip zeroed + (t/is (= "0.0.0.0" (str (:ip-addr row)))) + ;; timestamps truncated to day + (let [day-now (ct/truncate (ct/now) :days)] + (t/is (= day-now (:created-at row))) + (t/is (= day-now (:tracked-at row)))) + ;; context filtered: only safe backend keys retained + (let [ctx (db/decode-transit-pgobject (:context row))] + (t/is (= "app" (:initiator ctx))) + (t/is (= "2.0.0" (:version ctx))) + (t/is (= "1.0" (:client-version ctx))) + (t/is (= "Mozilla/5.0" (:client-user-agent ctx))) + ;; session-linking keys stripped + (t/is (not (contains? ctx :external-session-id))) + (t/is (not (contains? ctx :session)))))))) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; FILTER-TELEMETRY-CONTEXT UNIT TESTS +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +(t/deftest test-filter-telemetry-context-keeps-browser-fields + ;; Safe environment fields must survive the filter. + (let [filter-telemetry-context (ns-resolve 'app.loggers.audit 'filter-telemetry-context) + ctx {:browser "Chrome" + :browser-version "120.0" + :engine "Blink" + :engine-version "120.0" + :os "Windows 11" + :os-version "11" + :device-type "unknown" + :device-arch "amd64" + :locale "en-US" + :version "2.0.0" + :screen-width 1920 + :screen-height 1080 + :event-origin "workspace"} + result (:context (filter-telemetry-context {:source "frontend" :context ctx}))] + (t/is (= "Chrome" (:browser result))) + (t/is (= "120.0" (:browser-version result))) + (t/is (= "Windows 11" (:os result))) + (t/is (= "en-US" (:locale result))) + (t/is (= "workspace" (:event-origin result))) + (t/is (= 1920 (:screen-width result))))) + +(t/deftest test-filter-telemetry-context-strips-pii-keys + ;; Session-linking and access-token fields must be removed. + (let [filter-telemetry-context (ns-resolve 'app.loggers.audit 'filter-telemetry-context) + ctx {:browser "Firefox" + :session "abc-session-id" + :external-session-id "ext-123" + :file-stats {:total-shapes 42} + :initiator "app" + :access-token-id "tok-456" + :access-token-type "api-key"} + result (:context (filter-telemetry-context {:source "frontend" :context ctx}))] + (t/is (= "Firefox" (:browser result))) + (t/is (not (contains? result :session))) + (t/is (not (contains? result :external-session-id))) + (t/is (not (contains? result :file-stats))) + (t/is (not (contains? result :initiator))) + (t/is (not (contains? result :access-token-id))) + (t/is (not (contains? result :access-token-type))))) + +(t/deftest test-filter-telemetry-context-empty-input + ;; An empty context should return an empty map without error. + (let [filter-telemetry-context (ns-resolve 'app.loggers.audit 'filter-telemetry-context)] + (t/is (= {} (:context (filter-telemetry-context {:source "frontend" :context {}})))))) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; FILTER-TELEMETRY-PROPS UNIT TESTS +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +(t/deftest test-filter-telemetry-props-login-event-keeps-safe-profile-fields + ;; Login/register/update events carry safe profile-derived fields: + ;; :lang, :auth-backend, :email-domain. Raw :email is stripped. + (let [ftp (ns-resolve 'app.loggers.audit 'filter-telemetry-props)] + ;; backend login-with-password + (let [result (ftp {:source "backend" + :name "login-with-password" + :type "action" + :props {:email "user@example.com" + :fullname "John Doe" + :lang "en" + :auth-backend "password" + :id (uuid/next)}})] + (t/is (= "en" (get-in result [:props :lang]))) + (t/is (= "password" (get-in result [:props :auth-backend]))) + (t/is (= "example.com" (get-in result [:props :email-domain]))) + ;; Raw email and fullname are stripped + (t/is (not (contains? (:props result) :email))) + (t/is (not (contains? (:props result) :fullname))) + ;; UUID values survive the xf:filter-telemetry-props filter + (t/is (some? (get-in result [:props :id])))) + + ;; backend register-profile + (let [result (ftp {:source "backend" + :name "register-profile" + :type "action" + :props {:email "new@corp.org" + :lang "es" + :auth-backend "oidc"}})] + (t/is (= "es" (get-in result [:props :lang]))) + (t/is (= "oidc" (get-in result [:props :auth-backend]))) + (t/is (= "corp.org" (get-in result [:props :email-domain])))) + + ;; backend login-with-oidc + (let [result (ftp {:source "backend" + :name "login-with-oidc" + :type "action" + :props {:email "u@corp.io" :lang "fr" :auth-backend "oidc"}})] + (t/is (= "fr" (get-in result [:props :lang]))) + (t/is (= "oidc" (get-in result [:props :auth-backend]))) + (t/is (= "corp.io" (get-in result [:props :email-domain])))) + + ;; backend update-profile + (let [result (ftp {:source "backend" + :name "update-profile" + :type "action" + :props {:email "u@corp.io" :lang "de"}})] + (t/is (= "de" (get-in result [:props :lang]))) + (t/is (= "corp.io" (get-in result [:props :email-domain])))))) + +(t/deftest test-filter-telemetry-props-frontend-identify-keeps-safe-profile-fields + ;; Frontend identify events also carry safe profile-derived fields. + (let [ftp (ns-resolve 'app.loggers.audit 'filter-telemetry-props)] + (let [result (ftp {:source "frontend" + :name "signin" + :type "identify" + :props {:email "user@example.com" + :fullname "Jane Doe" + :lang "pt" + :auth-backend "password" + :some-string "should-be-stripped"}})] + (t/is (= "pt" (get-in result [:props :lang]))) + (t/is (= "password" (get-in result [:props :auth-backend]))) + (t/is (= "example.com" (get-in result [:props :email-domain]))) + ;; PII stripped + (t/is (not (contains? (:props result) :email))) + (t/is (not (contains? (:props result) :fullname))) + ;; String values that are not UUID/boolean/number are stripped + (t/is (not (contains? (:props result) :some-string)))))) + +(t/deftest test-filter-telemetry-props-instance-start-passthrough + ;; instance-start trigger events pass through as-is. + (let [ftp (ns-resolve 'app.loggers.audit 'filter-telemetry-props) + props {:total-teams 5 :total-users 42 :version "2.0"} + result (ftp {:source "backend" + :name "instance-start" + :type "trigger" + :props props})] + (t/is (= props (:props result))))) + +(t/deftest test-filter-telemetry-props-generic-event-keeps-uuid-boolean-number + ;; Generic events (create-file, etc.) keep only entries + ;; whose values are UUIDs, booleans, or numbers. + (let [ftp (ns-resolve 'app.loggers.audit 'filter-telemetry-props) + id (uuid/next) + result (ftp {:source "frontend" + :name "create-file" + :type "action" + :props {:project-id id + :team-id id + :route "dashboard-files" + :count 42 + :active true + :label "should-be-stripped"}})] + ;; UUIDs survive + (t/is (= id (get-in result [:props :project-id]))) + (t/is (= id (get-in result [:props :team-id]))) + ;; Numbers survive + (t/is (= 42 (get-in result [:props :count]))) + ;; Booleans survive + (t/is (true? (get-in result [:props :active]))) + ;; Strings are stripped + (t/is (not (contains? (:props result) :route))) + (t/is (not (contains? (:props result) :label))))) + +(t/deftest test-filter-telemetry-props-navigate-keeps-route-and-ids + ;; Frontend navigate events keep specific routing keys: :route, + ;; :file-id, :team-id, :page-id. These ids are strings because + ;; routing events don't coerce them. All other props are stripped. + (let [ftp (ns-resolve 'app.loggers.audit 'filter-telemetry-props) + file-id (str (uuid/next)) + team-id (str (uuid/next)) + page-id (str (uuid/next)) + result (ftp {:source "frontend" + :name "navigate" + :type "action" + :props {:file-id file-id + :team-id team-id + :page-id page-id + :route "dashboard-index" + :session "abc" + :count 42 + :active true + :label "should-be-stripped"}})] + ;; Allowed routing keys survive (as strings, not coerced to UUID) + (t/is (= file-id (get-in result [:props :file-id]))) + (t/is (= team-id (get-in result [:props :team-id]))) + (t/is (= page-id (get-in result [:props :page-id]))) + (t/is (= "dashboard-index" (get-in result [:props :route]))) + ;; Everything else is stripped + (t/is (not (contains? (:props result) :session))) + (t/is (not (contains? (:props result) :count))) + (t/is (not (contains? (:props result) :active))) + (t/is (not (contains? (:props result) :label))))) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; SEND-EVENT-BATCH PAYLOAD STRUCTURE +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +(t/deftest test-send-event-batch-payload-structure + ;; Verify the HTTP request sent by send-event-batch carries the + ;; correct outer wrapper: :type, :version, :instance-id, :events. + (let [captured-request (atom nil)] + (with-mocks [legacy-mock {:target 'app.tasks.telemetry/make-legacy-request + :return nil} + http-mock {:target 'app.http.client/req + :return {:status 200}}] + (with-redefs [cf/flags #{:telemetry}] + (insert-telemetry-row! "navigate") + (insert-telemetry-row! "create-file") + + (th/run-task! :telemetry {:send? true :enabled? true}) + + ;; http/req was called (by both send-legacy-data and send-event-batch) + (t/is (:called? @http-mock)) + ;; Find the call whose body contains :telemetry-events + (let [calls (filter (fn [args] + (let [[_ request] args + body (:body request)] + (and (string? body) + (re-find #"telemetry-events" body)))) + (:call-args-list @http-mock))] + (t/is (= 1 (count calls))) + (let [[_ request] (first calls) + body (json/decode (:body request))] + ;; Outer payload fields + (t/is (= "telemetry-events" (name (:type body)))) + (t/is (string? (:version body))) + (t/is (some? (:instance-id body))) + ;; :events is a base64-encoded blob + (t/is (string? (:events body))) + (t/is (pos? (count (:events body)))))))))) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; TASK BRANCH COVERAGE +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +(t/deftest test-enabled-no-subs-no-events-legacy-still-sends + ;; When telemetry is enabled, there are no newsletter subscriptions + ;; and no audit_log rows, the legacy report must still be sent. + (with-mocks [legacy-mock {:target 'app.tasks.telemetry/make-legacy-request + :return nil} + batch-mock {:target 'app.tasks.telemetry/send-event-batch + :return true}] + (with-redefs [cf/flags #{:telemetry}] + ;; No profiles with newsletter-updates, no telemetry rows + (th/run-task! :telemetry {:send? true :enabled? true}) + + ;; Legacy report was sent + (t/is (:called? @legacy-mock)) + (let [[_ data] (:call-args @legacy-mock)] + (t/is (= :telemetry-legacy-report (:type data))) + (t/is (contains? data :stats)) + ;; No subscriptions in the payload + (t/is (not (contains? data :subscriptions)))) + + ;; No events to batch-send + (t/is (not (:called? @batch-mock)))))) + +(t/deftest test-legacy-succeeds-batch-fails + ;; The legacy report and event batch are independent paths. + ;; When the batch endpoint fails, the legacy report must still + ;; have been sent successfully. + (with-mocks [legacy-mock {:target 'app.tasks.telemetry/make-legacy-request + :return nil} + batch-mock {:target 'app.tasks.telemetry/send-event-batch + :return false}] + (with-redefs [cf/flags #{:telemetry}] + (insert-telemetry-row! "navigate") + + (th/run-task! :telemetry {:send? true :enabled? true}) + + ;; Legacy report was sent + (t/is (:called? @legacy-mock)) + (let [[_ data] (:call-args @legacy-mock)] + (t/is (= :telemetry-legacy-report (:type data)))) + + ;; Batch send was attempted but failed + (t/is (:called? @batch-mock)) + ;; Row still present (not deleted on failure) + (t/is (= 1 (count-telemetry-rows)))))) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; GC + BATCH FAILURE INTERACTION +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +(t/deftest test-gc-runs-even-when-batch-fails + ;; GC must purge stale events regardless of whether the subsequent + ;; batch send succeeds or fails. + (with-mocks [legacy-mock {:target 'app.tasks.telemetry/make-legacy-request + :return nil} + batch-mock {:target 'app.tasks.telemetry/send-event-batch + :return false}] + (with-redefs [cf/flags #{:telemetry}] + (let [eight-days (ct/minus (ct/now) (ct/duration {:days 8})) + one-day (ct/minus (ct/now) (ct/duration {:days 1}))] + ;; Stale events (should be GC'd) + (insert-telemetry-row! "stale-1" {:created-at eight-days :tracked-at eight-days}) + (insert-telemetry-row! "stale-2" {:created-at eight-days :tracked-at eight-days}) + ;; Fresh event (should survive GC but fail to send) + (insert-telemetry-row! "fresh" {:created-at one-day :tracked-at one-day}) + + (t/is (= 3 (count-telemetry-rows))) + + (th/run-task! :telemetry {:send? true :enabled? true}) + + ;; Batch send was attempted (and failed) + (t/is (:called? @batch-mock)) + ;; Stale rows were purged by GC, fresh row remains + (t/is (= 1 (count-telemetry-rows))) + (t/is (= "fresh" (:name (first (th/db-exec! ["SELECT name FROM audit_log WHERE source LIKE 'telemetry:%'"]))))))))) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; ROW->EVENT CONTEXT GUARANTEE +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +(t/deftest test-row->event-always-includes-context + ;; row->event must always include :context as a map, even when the + ;; DB column contains an empty transit object. + (let [row->event (ns-resolve 'app.tasks.telemetry 'row->event)] + ;; With non-empty context + (let [ev (row->event {:name "test" :type "action" :source "telemetry:backend" + :tracked-at (ct/now) :profile-id uuid/zero + :context (db/tjson {:browser "Chrome"})})] + (t/is (contains? ev :context)) + (t/is (= {:browser "Chrome"} (:context ev)))) + + ;; With empty context ({} in transit) + (let [ev (row->event {:name "test" :type "action" :source "telemetry:backend" + :tracked-at (ct/now) :profile-id uuid/zero + :context (db/tjson {})})] + (t/is (contains? ev :context)) + (t/is (= {} (:context ev)))))) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; NO DUPLICATE EVENTS ON SUCCESS +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +(t/deftest test-no-duplicate-events-after-successful-send + ;; After a successful batch send, the sent rows must be deleted. + ;; Running the task again must NOT re-send the same events. + (let [send-count (atom 0)] + (with-mocks [legacy-mock {:target 'app.tasks.telemetry/make-legacy-request + :return nil}] + (with-redefs [cf/flags #{:telemetry} + telemetry/send-event-batch + (fn [_cfg _batch] + (swap! send-count inc) + true)] + (insert-telemetry-row! "navigate") + (insert-telemetry-row! "create-file") + + (t/is (= 2 (count-telemetry-rows))) + + ;; First run: sends and deletes + (th/run-task! :telemetry {:send? true :enabled? true}) + (t/is (= 1 @send-count)) + (t/is (= 0 (count-telemetry-rows))) + + ;; Second run: no events to send + (th/run-task! :telemetry {:send? true :enabled? true}) + (t/is (= 1 @send-count)) ;; still 1, not 2 + (t/is (= 0 (count-telemetry-rows))))))) diff --git a/backend/test/backend_tests/util_blob_test.clj b/backend/test/backend_tests/util_blob_test.clj new file mode 100644 index 0000000000..fd474f9d88 --- /dev/null +++ b/backend/test/backend_tests/util_blob_test.clj @@ -0,0 +1,106 @@ +;; This Source Code Form is subject to the terms of the Mozilla Public +;; License, v. 2.0. If a copy of the MPL was not distributed with this +;; file, You can obtain one at http://mozilla.org/MPL/2.0/. +;; +;; Copyright (c) KALEIDOS INC + +(ns backend-tests.util-blob-test + (:require + [app.util.blob :as blob] + [clojure.string :as str] + [clojure.test :as t])) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; encode-str / decode-str round-trip +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +(t/deftest encode-str-roundtrip-empty-map + (let [data {}] + (t/is (= data (blob/decode-str (blob/encode-str data)))))) + +(t/deftest encode-str-roundtrip-empty-vector + (let [data []] + (t/is (= data (blob/decode-str (blob/encode-str data)))))) + +(t/deftest encode-str-roundtrip-nil + (let [data nil] + (t/is (= data (blob/decode-str (blob/encode-str data)))))) + +(t/deftest encode-str-roundtrip-simple-map + (let [data {:name "penpot" :version 42}] + (t/is (= data (blob/decode-str (blob/encode-str data)))))) + +(t/deftest encode-str-roundtrip-nested-structure + (let [data {:users [{:name "Alice" :tags #{"admin" "active"}} + {:name "Bob" :tags #{"user"}}] + :config {:debug false :timeout 3000}}] + (t/is (= data (blob/decode-str (blob/encode-str data)))))) + +(t/deftest encode-str-roundtrip-vector-of-maps + (let [data [{:name "navigate" :type "action" :source "telemetry"} + {:name "create-file" :type "action" :source "telemetry"}]] + (t/is (= data (blob/decode-str (blob/encode-str data)))))) + +(t/deftest encode-str-roundtrip-keywords-and-strings + (let [data {:keyword/value :foo + :string/value "hello world" + :boolean/value true + :nil/value nil}] + (t/is (= data (blob/decode-str (blob/encode-str data)))))) + +(t/deftest encode-str-roundtrip-numeric-types + (let [data {:int 42 + :neg -7 + :zero 0 + :big 9999999999}] + (t/is (= data (blob/decode-str (blob/encode-str data)))))) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; URL-safe encoding properties +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +(t/deftest encode-str-url-safe-no-unsafe-chars + ;; URL-safe base64 must not contain +, /, or padding = + (let [data {:a (apply str (repeat 100 "x")) + :b (range 200) + :c {"key" "value with special chars: @#$%^&*()"}} + encoded (blob/encode-str data)] + (t/is (not (str/includes? encoded "+"))) + (t/is (not (str/includes? encoded "/"))) + (t/is (not (str/includes? encoded "="))))) + +(t/deftest encode-str-url-safe-roundtrip-after-encoding + ;; Ensure the URL-safe encoding still round-trips correctly + (let [data {:payload (vec (range 500)) + :nested {:a {:b {:c "deep"}}}} + encoded (blob/encode-str data) + decoded (blob/decode-str encoded)] + (t/is (= data decoded)))) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; version-specific encoding +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +(t/deftest encode-str-with-version-4 + (let [data {:events [{:name "click"} {:name "scroll"}]} + encoded (blob/encode-str data {:version 4}) + decoded (blob/decode-str encoded)] + (t/is (= data decoded)))) + +(t/deftest encode-str-with-version-5 + (let [data {:events [{:name "click"} {:name "scroll"}]} + encoded (blob/encode-str data {:version 5}) + decoded (blob/decode-str encoded)] + (t/is (= data decoded)))) + +(t/deftest encode-str-with-version-1 + (let [data {:simple "data"} + encoded (blob/encode-str data {:version 1}) + decoded (blob/decode-str encoded)] + (t/is (= data decoded)))) + +(t/deftest encode-str-with-version-3 + (let [data {:simple "data"} + encoded (blob/encode-str data {:version 3}) + decoded (blob/decode-str encoded)] + (t/is (= data decoded)))) diff --git a/frontend/src/app/main.cljs b/frontend/src/app/main.cljs index 7bcbd1469f..fa0a853cd9 100644 --- a/frontend/src/app/main.cljs +++ b/frontend/src/app/main.cljs @@ -68,11 +68,8 @@ ptk/WatchEvent (watch [_ _ stream] (rx/merge - (if (contains? cf/flags :audit-log) - (rx/of (ev/initialize)) - (rx/empty)) - - (rx/of (dp/refresh-profile)) + (rx/of (ev/initialize) + (dp/refresh-profile)) ;; Watch for profile deletion events (->> stream diff --git a/frontend/src/app/main/data/auth.cljs b/frontend/src/app/main/data/auth.cljs index e3aa763ad6..41ff00a6b2 100644 --- a/frontend/src/app/main/data/auth.cljs +++ b/frontend/src/app/main/data/auth.cljs @@ -61,26 +61,30 @@ (rx/of (dcm/go-to-dashboard-recent {:team-id team-id})))))))] (ptk/reify ::logged-in - ev/Event - (-data [_] - {::ev/name "signin" - ::ev/type "identify" - :email (:email profile) - :auth-backend (:auth-backend profile) - :fullname (:fullname profile) - :is-muted (:is-muted profile) - :default-team-id (:default-team-id profile) - :default-project-id (:default-project-id profile)}) - ptk/WatchEvent (watch [_ _ stream] (cf/initialize-external-context-info) + (->> (rx/merge (rx/of (dp/set-profile profile) (ws/initialize) (dtm/fetch-teams)) + ;; We schedule this event to be executed a bit later, + ;; when the profile is already set + (->> (rx/of (ev/event {::ev/name "signin" + ::ev/type "identify" + :id (:id profile) + :email (:email profile) + :auth-backend (:auth-backend profile) + :fullname (:fullname profile) + :is-muted (:is-muted profile) + :default-team-id (:default-team-id profile) + :default-project-id (:default-project-id profile)})) + (rx/observe-on :async)) + + (->> stream (rx/filter (ptk/type? ::dtm/teams-fetched)) (rx/take 1) diff --git a/frontend/src/app/main/data/event.cljs b/frontend/src/app/main/data/event.cljs index dfc925cce8..b8c439f553 100644 --- a/frontend/src/app/main/data/event.cljs +++ b/frontend/src/app/main/data/event.cljs @@ -25,6 +25,7 @@ [app.util.storage :as storage] [beicon.v2.core :as rx] [beicon.v2.operators :as rxo] + [cuerdas.core :as str] [lambdaisland.uri :as u] [potok.v2.core :as ptk])) @@ -376,83 +377,105 @@ (l/debug :hint "event instrumentation initialized") - (->> (rx/merge - (->> (rx/from-atom buffer) - (rx/filter #(pos? (count %))) - (rx/debounce 2000)) - (->> stream - (rx/filter (ptk/type? :app.main.data.profile/logout)) - (rx/observe-on :async))) - (rx/map (fn [_] - (into [] (take max-chunk-size) @buffer))) - (rx/with-latest-from profile) - (rx/mapcat (fn [[chunk profile-id]] - (let [events (filterv #(= profile-id (:profile-id %)) chunk)] - (->> (persist-events events) - (rx/tap (fn [_] - (l/debug :hint "events chunk persisted" :total (count chunk)))) - (rx/map (constantly chunk)))))) - (rx/take-until stopper) - (rx/subs! (fn [chunk] - (swap! buffer remove-from-buffer (count chunk))) - (fn [cause] - (l/error :hint "unexpected error on audit persistence" :cause cause)) - (fn [] - (l/debug :hint "audit persistence terminated")))) - - (->> (rx/merge - (->> stream - (rx/with-latest-from profile) - (rx/map make-event)) - - (->> (user-input-observer) - (rx/with-latest-from profile) - (rx/map make-performance-event) - (rx/debounce debounce-browser-event-time)) - - (->> (longtask-observer) - (rx/with-latest-from profile) - (rx/map make-performance-event) - (rx/debounce debounce-longtask-time)) - - (if (and (exists? js/globalThis) - (exists? (.-requestAnimationFrame js/globalThis)) - (exists? (.-scheduler js/globalThis)) - (exists? (.-postTask (.-scheduler js/globalThis)))) - (->> stream + ;; Fetch backend flags and only start event collection if + ;; :audit-log or :telemetry is enabled. On RPC failure, proceed + ;; with event collection anyway (backend will reject if truly disabled). + (->> (rp/cmd! :get-enabled-flags) + (rx/catch (fn [cause] + (l/debug :hint "unable to fetch backend flags, proceeding with event collection" :cause cause) + (rx/of #{:telemetry}))) + (rx/mapcat (fn [flags] + (if (or (contains? flags :audit-log) + (contains? flags :telemetry)) + (do + (l/debug :hint "event collection enabled" :flags (str/join " " (map name flags))) + (rx/of true)) + (do + (l/debug :hint "event collection disabled (no audit-log or telemetry flag)") + (rx/empty))))) + (rx/take 1) + (rx/subs! + (fn [_] + ;; Start the event collection pipeline + (->> (rx/merge + (->> (rx/from-atom buffer) + (rx/filter #(pos? (count %))) + (rx/debounce 2000)) + (->> stream + (rx/filter (ptk/type? :app.main.data.profile/logout)) + (rx/observe-on :async))) + (rx/map (fn [_] + (into [] (take max-chunk-size) @buffer))) (rx/with-latest-from profile) - (rx/merge-map process-performance-event) - (rx/debounce debounce-performance-event-time)) - (rx/empty))) + (rx/mapcat (fn [[chunk profile-id]] + (let [events (filterv #(= profile-id (:profile-id %)) chunk)] + (->> (persist-events events) + (rx/tap (fn [_] + (l/debug :hint "events chunk persisted" :total (count chunk)))) + (rx/map (constantly chunk)))))) + (rx/take-until stopper) + (rx/subs! (fn [chunk] + (swap! buffer remove-from-buffer (count chunk))) + (fn [cause] + (l/error :hint "unexpected error on audit persistence" :cause cause)) + (fn [] + (l/debug :hint "audit persistence terminated")))) - (rx/filter :profile-id) - (rx/map (fn [event] - (let [session* (or @session (ct/now)) - context (-> @context - (merge (:context event)) - (assoc :session session*) - (assoc :session-id cf/session-id) - (assoc :external-session-id (cf/external-session-id)) - (add-external-context-info) - (d/without-nils))] - (reset! session session*) - (-> event - (assoc :timestamp (ct/now)) - (assoc :context context))))) + (->> (rx/merge + (->> stream + (rx/with-latest-from profile) + (rx/map make-event)) - (rx/tap (fn [event] - (l/debug :hint "event enqueued") - (swap! buffer append-to-buffer event))) + (->> (user-input-observer) + (rx/with-latest-from profile) + (rx/map make-performance-event) + (rx/debounce debounce-browser-event-time)) - (rx/switch-map #(rx/timer session-timeout)) - (rx/take-until stopper) - (rx/subs! (fn [_] - (l/debug :hint "session reinitialized") - (reset! session nil)) - (fn [cause] - (l/error :hint "error on event batching stream" :cause cause)) - (fn [] - (l/debug :hitn "events batching stream terminated")))))))) + (->> (longtask-observer) + (rx/with-latest-from profile) + (rx/map make-performance-event) + (rx/debounce debounce-longtask-time)) + + (if (and (exists? js/globalThis) + (exists? (.-requestAnimationFrame js/globalThis)) + (exists? (.-scheduler js/globalThis)) + (exists? (.-postTask (.-scheduler js/globalThis)))) + (->> stream + (rx/with-latest-from profile) + (rx/merge-map process-performance-event) + (rx/debounce debounce-performance-event-time)) + (rx/empty))) + + (rx/filter :profile-id) + (rx/map (fn [event] + (let [session* (or @session (ct/now)) + context (-> @context + (merge (:context event)) + (assoc :session session*) + (assoc :session-id cf/session-id) + (assoc :external-session-id (cf/external-session-id)) + (add-external-context-info) + (d/without-nils))] + (reset! session session*) + (-> event + (assoc :timestamp (ct/now)) + (assoc :context context))))) + + (rx/tap (fn [event] + (l/debug :hint "event enqueued") + (swap! buffer append-to-buffer event))) + + (rx/switch-map #(rx/timer session-timeout)) + (rx/take-until stopper) + (rx/subs! (fn [_] + (l/debug :hint "session reinitialized") + (reset! session nil)) + (fn [cause] + (l/error :hint "error on event batching stream" :cause cause)) + (fn [] + (l/debug :hint "events batching stream terminated"))))) + (fn [cause] + (l/warn :hint "unexpected error during event collection initialization" :cause cause)))))))) (defn event [props] From feb49bc07aae629c39fc9229c0c2d03dd31de584 Mon Sep 17 00:00:00 2001 From: Andrey Antukh Date: Mon, 11 May 2026 13:28:53 +0200 Subject: [PATCH 5/6] :bug: Add missing migrations for audit-log tables --- backend/src/app/migrations.clj | 9 +++++++++ .../src/app/migrations/sql/0145-mod-audit-log-table.sql | 2 ++ ...-telemetry-index.sql => 0146-mod-audit-log-table.sql} | 0 3 files changed, 11 insertions(+) create mode 100644 backend/src/app/migrations/sql/0145-mod-audit-log-table.sql rename backend/src/app/migrations/sql/{0145-add-audit-log-telemetry-index.sql => 0146-mod-audit-log-table.sql} (100%) diff --git a/backend/src/app/migrations.clj b/backend/src/app/migrations.clj index 6778316e5e..fe517762aa 100644 --- a/backend/src/app/migrations.clj +++ b/backend/src/app/migrations.clj @@ -465,6 +465,15 @@ {:name "0145-fix-plugins-uri-on-profile" :fn mg0145/migrate} + {:name "0145-mod-audit-log-table" + :fn (mg/resource "app/migrations/sql/0145-mod-audit-log-table.sql")} + + {:name "0145-mod-audit-log-table" + :fn (mg/resource "app/migrations/sql/0145-mod-audit-log-table.sql")} + + {:name "0146-mod-audit-log-table" + :fn (mg/resource "app/migrations/sql/0146-mod-audit-log-table.sql")} + {:name "0146-mod-access-token-table" :fn (mg/resource "app/migrations/sql/0146-mod-access-token-table.sql")} diff --git a/backend/src/app/migrations/sql/0145-mod-audit-log-table.sql b/backend/src/app/migrations/sql/0145-mod-audit-log-table.sql new file mode 100644 index 0000000000..6d95ecc6af --- /dev/null +++ b/backend/src/app/migrations/sql/0145-mod-audit-log-table.sql @@ -0,0 +1,2 @@ +CREATE INDEX audit_log__created_at__idx ON audit_log(created_at) WHERE archived_at IS NULL; +CREATE INDEX audit_log__archived_at__idx ON audit_log(archived_at) WHERE archived_at IS NOT NULL; diff --git a/backend/src/app/migrations/sql/0145-add-audit-log-telemetry-index.sql b/backend/src/app/migrations/sql/0146-mod-audit-log-table.sql similarity index 100% rename from backend/src/app/migrations/sql/0145-add-audit-log-telemetry-index.sql rename to backend/src/app/migrations/sql/0146-mod-audit-log-table.sql From 1f9f4126b7637213d524daa246c2941db34e4d4f Mon Sep 17 00:00:00 2001 From: "Dr. Dominik Jain" Date: Mon, 11 May 2026 14:00:23 +0200 Subject: [PATCH 6/6] :sparkles: Improve MCP server logging, adding Loki support (#9425) * :sparkles: Improve MCP server logging Log only fingerprints of user tokens * :sparkles: Add Loki transport support to MCP server logger Loki logging is enabled iff PENPOT_LOGGERS_LOKI_URI is non-empty. File logging is now enabled iff PENPOT_MCP_LOG_DIR is set to a non-empty value (previously defaulted to the "logs" directory when unset). GitHub #9415 --- mcp/README.md | 2 +- mcp/packages/server/package.json | 3 +- mcp/packages/server/src/PenpotMcpServer.ts | 34 ++- mcp/packages/server/src/index.ts | 6 +- mcp/packages/server/src/logger.ts | 233 +++++++++++++++++---- mcp/pnpm-lock.yaml | 12 ++ 6 files changed, 243 insertions(+), 47 deletions(-) diff --git a/mcp/README.md b/mcp/README.md index f23af675e9..07fa01ac26 100644 --- a/mcp/README.md +++ b/mcp/README.md @@ -278,7 +278,7 @@ The Penpot MCP server can be configured using environment variables. | Environment Variable | Description | Default | |------------------------|------------------------------------------------------|----------| | `PENPOT_MCP_LOG_LEVEL` | Log level: `trace`, `debug`, `info`, `warn`, `error` | `info` | -| `PENPOT_MCP_LOG_DIR` | Directory for log files | `logs` | +| `PENPOT_MCP_LOG_DIR` | Directory for log files; file logging is enabled iff this is set to a non-empty value | (unset) | ### Plugin Server Configuration diff --git a/mcp/packages/server/package.json b/mcp/packages/server/package.json index 922be86975..68d33859ac 100644 --- a/mcp/packages/server/package.json +++ b/mcp/packages/server/package.json @@ -5,7 +5,7 @@ "type": "module", "main": "dist/index.js", "scripts": { - "build:server": "esbuild src/index.ts --bundle --platform=node --target=node18 --format=esm --outfile=dist/index.js --external:@modelcontextprotocol/* --external:ws --external:express --external:class-transformer --external:class-validator --external:reflect-metadata --external:pino --external:pino-pretty --external:js-yaml --external:sharp", + "build:server": "esbuild src/index.ts --bundle --platform=node --target=node18 --format=esm --outfile=dist/index.js --external:@modelcontextprotocol/* --external:ws --external:express --external:class-transformer --external:class-validator --external:reflect-metadata --external:pino --external:pino-pretty --external:pino-loki --external:js-yaml --external:sharp", "build": "pnpm run build:server && node scripts/copy-resources.js", "build:types": "tsc --emitDeclarationOnly --outDir dist", "start": "node dist/index.js", @@ -31,6 +31,7 @@ "js-yaml": "^4.1.1", "penpot-mcp": "file:..", "pino": "^9.10.0", + "pino-loki": "^2.6.0", "pino-pretty": "^13.1.1", "reflect-metadata": "^0.1.13", "sharp": "^0.34.5", diff --git a/mcp/packages/server/src/PenpotMcpServer.ts b/mcp/packages/server/src/PenpotMcpServer.ts index 47a3ee3355..6bd3ca33e5 100644 --- a/mcp/packages/server/src/PenpotMcpServer.ts +++ b/mcp/packages/server/src/PenpotMcpServer.ts @@ -49,6 +49,28 @@ export class PenpotMcpServer { */ private static readonly SESSION_TIMEOUT_MINUTES = 60; + /** + * Returns a short, non-reversible fingerprint of a user token, suitable for + * correlating log lines without exposing the full credential. + * + * Penpot tokens are JWEs in compact serialization (RFC 7516 §7.1) with five + * dot-separated segments; we use the first 8 chars of the wrapped CEK + * (segment 1) as a stable per-token identifier. For malformed tokens (e.g. + * test stubs that aren't real JWEs), we fall back to the first 8 chars of + * the raw token. + * + * @param token - the token to fingerprint, or `undefined` + * @returns a short fingerprint, or `` if no token was given + */ + private static tokenFingerprint(token: string | undefined): string { + if (!token) { + return ""; + } + const segments = token.split("."); + const source = segments.length === 5 ? segments[1] : token; + return source.slice(0, 8); + } + private readonly logger = createLogger("PenpotMcpServer"); private readonly tools: ToolInfo[]; public readonly configLoader: ConfigurationLoader; @@ -222,12 +244,14 @@ export class PenpotMcpServer { userToken = session.userToken; session.lastActiveTime = Date.now(); this.logger.info( - `Received request for existing session with id=${sessionId}; userToken=${session.userToken}` + `Received request for existing session with id=${sessionId}; userTokenFp=${PenpotMcpServer.tokenFingerprint(session.userToken)}` ); } else { // new session: create a fresh McpServer and transport userToken = req.query.userToken as string | undefined; - this.logger.info(`Received new session request; userToken=${userToken}`); + this.logger.info( + `Received new session request; userTokenFp=${PenpotMcpServer.tokenFingerprint(userToken)}` + ); const { randomUUID } = await import("node:crypto"); const server = this.createMcpServer(); transport = new StreamableHTTPServerTransport({ @@ -235,13 +259,15 @@ export class PenpotMcpServer { onsessioninitialized: (id) => { this.streamableTransports[id] = new StreamableSession(transport, userToken, Date.now()); this.logger.info( - `Session initialized with id=${id} for userToken=${userToken}; total sessions: ${Object.keys(this.streamableTransports).length}` + `Session initialized with id=${id} for userTokenFp=${PenpotMcpServer.tokenFingerprint(userToken)}; total sessions: ${Object.keys(this.streamableTransports).length}` ); }, }); transport.onclose = () => { if (transport.sessionId) { - this.logger.info(`Closing session with id=${transport.sessionId} for userToken=${userToken}`); + this.logger.info( + `Closing session with id=${transport.sessionId} for userTokenFp=${PenpotMcpServer.tokenFingerprint(userToken)}` + ); delete this.streamableTransports[transport.sessionId]; } }; diff --git a/mcp/packages/server/src/index.ts b/mcp/packages/server/src/index.ts index 356c2cea81..84ef23f204 100644 --- a/mcp/packages/server/src/index.ts +++ b/mcp/packages/server/src/index.ts @@ -1,7 +1,7 @@ #!/usr/bin/env node import { PenpotMcpServer } from "./PenpotMcpServer"; -import { createLogger, logFilePath } from "./logger"; +import { createLogger, logActiveTransports } from "./logger"; /** * Entry point for Penpot MCP Server @@ -14,8 +14,8 @@ import { createLogger, logFilePath } from "./logger"; async function main(): Promise { const logger = createLogger("main"); - // log the file path early so it appears before any potential errors - logger.info(`Logging to file: ${logFilePath}`); + // announce active transports early so they appear before any potential errors + logActiveTransports(logger); try { const args = process.argv.slice(2); diff --git a/mcp/packages/server/src/logger.ts b/mcp/packages/server/src/logger.ts index 1e8f96e5ec..45cb359e90 100644 --- a/mcp/packages/server/src/logger.ts +++ b/mcp/packages/server/src/logger.ts @@ -1,12 +1,21 @@ -import pino from "pino"; +import pino, { type TransportTargetOptions } from "pino"; import { join, resolve } from "path"; /** - * Configuration for log file location and level. + * Configured log level (defaults to `info`). */ -const LOG_DIR = process.env.PENPOT_MCP_LOG_DIR || "logs"; const LOG_LEVEL = process.env.PENPOT_MCP_LOG_LEVEL || "info"; +/** + * Configured log directory; file logging is enabled iff this is set to a non-empty value. + */ +const LOG_DIR = process.env.PENPOT_MCP_LOG_DIR; + +/** + * Loki host URI; if set and non-empty, Loki logging is enabled. + */ +const LOKI_URI = process.env.PENPOT_LOGGERS_LOKI_URI; + /** * Generates a timestamped log file name. * @@ -24,56 +33,204 @@ function generateLogFileName(): string { } /** - * Absolute path to the log file being written. + * The pino transport target spec, as expected in `transport.targets[]`. */ -export const logFilePath = resolve(join(LOG_DIR, generateLogFileName())); +type TransportTargetSpec = TransportTargetOptions; /** - * Logger instance configured for both console and file output with metadata. + * Provides a single pino transport target, either active or inactive. * - * Both console and file output use pretty formatting for human readability. - * Console output includes colors, while file output is plain text. + * Implementations decide their own activation based on environment configuration. + * An inactive provider returns `null` from {@link getTarget} and is skipped. + */ +interface LogTransportProvider { + /** + * Returns the pino transport target spec, or `null` if this transport is disabled. + */ + getTarget(): TransportTargetSpec | null; + + /** + * Returns a human-readable startup message describing the transport, or `null` if disabled. + */ + getStartupMessage(): string | null; +} + +/** + * Console transport with pretty-printed, colorized output. Always active. + */ +class ConsoleLogTransport implements LogTransportProvider { + public getTarget(): TransportTargetSpec { + return { + target: "pino-pretty", + level: LOG_LEVEL, + options: { + colorize: true, + translateTime: "SYS:yyyy-mm-dd HH:MM:ss.l", + ignore: "pid,hostname", + messageFormat: "{msg}", + levelFirst: true, + }, + }; + } + + public getStartupMessage(): string { + return "Logging to console"; + } +} + +/** + * File transport writing pretty-formatted logs to a timestamped file in a configurable directory. + * Active iff `PENPOT_MCP_LOG_DIR` is set and non-empty. + */ +class FileLogTransport implements LogTransportProvider { + private readonly enabled: boolean; + private readonly filePath: string | null; + + public constructor(logDir: string | undefined) { + this.enabled = logDir !== undefined && logDir !== ""; + this.filePath = this.enabled ? resolve(join(logDir as string, generateLogFileName())) : null; + } + + public isEnabled(): boolean { + return this.enabled; + } + + public getTarget(): TransportTargetSpec | null { + if (!this.enabled) { + return null; + } + return { + target: "pino-pretty", + level: LOG_LEVEL, + options: { + destination: this.filePath, + colorize: false, + translateTime: "SYS:yyyy-mm-dd HH:MM:ss.l", + ignore: "pid,hostname", + messageFormat: "{msg}", + levelFirst: true, + mkdir: true, + }, + }; + } + + public getStartupMessage(): string | null { + return this.enabled ? `Logging to file: ${this.filePath}` : null; + } + + /** + * Returns the absolute path of the active log file, or `undefined` if file logging is disabled. + */ + public getFilePath(): string | undefined { + return this.filePath ?? undefined; + } +} + +/** + * Loki transport forwarding logs to a Grafana Loki instance via `pino-loki`. + * + * Active iff `PENPOT_LOGGERS_LOKI_URI` is set and non-empty. + */ +class LokiLogTransport implements LogTransportProvider { + private readonly host: string | null; + + public constructor(lokiUri: string | undefined) { + this.host = lokiUri !== undefined && lokiUri !== "" ? lokiUri : null; + } + + public isEnabled(): boolean { + return this.host !== null; + } + + public getTarget(): TransportTargetSpec | null { + if (this.host === null) { + return null; + } + return { + target: "pino-loki", + level: LOG_LEVEL, + options: { + host: this.host, + json: false, + batching: true, + interval: 5, + replaceTimestamp: true, + labels: this.buildLabels(), + }, + }; + } + + /** + * Builds the set of static labels to attach to every log entry sent to Loki. + * + * The `environment` and `instance` labels are only included if their respective + * environment variables are set and non-empty. + */ + private buildLabels(): Record { + const labels: Record = { + job: process.env.PENPOT_LOGGERS_LOKI_JOB || "mcp", + }; + const environment = process.env.PENPOT_LOGGERS_LOKI_ENVIRONMENT; + if (environment) { + labels.environment = environment; + } + const instance = process.env.PENPOT_LOGGERS_LOKI_INSTANCE; + if (instance) { + labels.instance = instance; + } + return labels; + } + + public getStartupMessage(): string | null { + return this.host !== null ? `Logging to Loki: ${this.host}` : null; + } +} + +// build the transport providers; each decides its own activation independently +const consoleTransport = new ConsoleLogTransport(); +const fileTransport = new FileLogTransport(LOG_DIR); +const lokiTransport = new LokiLogTransport(LOKI_URI); + +const transports: LogTransportProvider[] = [consoleTransport, fileTransport, lokiTransport]; + +/** + * Absolute path to the log file being written, or `undefined` if file logging is disabled. + */ +export const logFilePath: string | undefined = fileTransport.getFilePath(); + +/** + * Logger instance configured with the active transports (console, optional file, optional Loki). */ export const logger = pino({ level: LOG_LEVEL, timestamp: pino.stdTimeFunctions.isoTime, transport: { - targets: [ - { - // console transport with pretty formatting - target: "pino-pretty", - level: LOG_LEVEL, - options: { - colorize: true, - translateTime: "SYS:yyyy-mm-dd HH:MM:ss.l", - ignore: "pid,hostname", - messageFormat: "{msg}", - levelFirst: true, - }, - }, - { - // file transport with pretty formatting (same as console) - target: "pino-pretty", - level: LOG_LEVEL, - options: { - destination: logFilePath, - colorize: false, - translateTime: "SYS:yyyy-mm-dd HH:MM:ss.l", - ignore: "pid,hostname", - messageFormat: "{msg}", - levelFirst: true, - mkdir: true, - }, - }, - ], + targets: transports + .map((t) => t.getTarget()) + .filter((target): target is TransportTargetSpec => target !== null), }, }); +/** + * Logs a startup line for each active transport, allowing the user to see at a glance + * where logs are being written. + * + * @param log - the logger to emit the startup messages on + */ +export function logActiveTransports(log: pino.Logger): void { + for (const t of transports) { + const msg = t.getStartupMessage(); + if (msg !== null) { + log.info(msg); + } + } +} + /** * Creates a child logger with the specified name/origin. * - * @param name - The name/origin identifier for the logger - * @returns Child logger instance with the specified name + * @param name - the name/origin identifier for the logger + * @returns child logger instance with the specified name */ export function createLogger(name: string) { return logger.child({ name }); diff --git a/mcp/pnpm-lock.yaml b/mcp/pnpm-lock.yaml index c90b714cc5..c59ac34c3f 100644 --- a/mcp/pnpm-lock.yaml +++ b/mcp/pnpm-lock.yaml @@ -66,6 +66,9 @@ importers: pino: specifier: ^9.10.0 version: 9.14.0 + pino-loki: + specifier: ^2.6.0 + version: 2.6.0 pino-pretty: specifier: ^13.1.1 version: 13.1.3 @@ -1268,6 +1271,10 @@ packages: pino-abstract-transport@3.0.0: resolution: {integrity: sha512-wlfUczU+n7Hy/Ha5j9a/gZNy7We5+cXp8YL+X+PG8S0KXxw7n/JXA3c46Y0zQznIJ83URJiwy7Lh56WLokNuxg==} + pino-loki@2.6.0: + resolution: {integrity: sha512-Qy+NeIdb0YmZe/M5mgnO5aGaAyVaeqgwn45T6VajhRXZlZVfGe1YNYhFa9UZyCeNFAPGaUkD2e9yPGjx+2BBYA==} + hasBin: true + pino-pretty@13.1.3: resolution: {integrity: sha512-ttXRkkOz6WWC95KeY9+xxWL6AtImwbyMHrL1mSwqwW9u+vLp/WIElvHvCSDg0xO/Dzrggz1zv3rN5ovTRVowKg==} hasBin: true @@ -2481,6 +2488,11 @@ snapshots: dependencies: split2: 4.2.0 + pino-loki@2.6.0: + dependencies: + pino-abstract-transport: 2.0.0 + pump: 3.0.3 + pino-pretty@13.1.3: dependencies: colorette: 2.0.20