diff --git a/.circleci/config.yml b/.circleci/config.yml index 63e3794e67..f56bccd844 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -11,11 +11,11 @@ jobs: - image: cimg/redis:7.0.5 working_directory: ~/repo - resource_class: large + resource_class: medium+ environment: - # Customize the JVM maximum heap limit - JVM_OPTS: -Xmx4g + JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC + NODE_OPTIONS: --max-old-space-size=4096 steps: - checkout @@ -72,11 +72,6 @@ jobs: yarn test clojure -X:dev:test :patterns '["common-tests.*-test"]' - environment: - PATH: /usr/local/nodejs/bin/:/usr/local/bin:/bin:/usr/bin - JVM_OPTS: -Xmx4g - NODE_OPTIONS: --max-old-space-size=4096 - - run: name: "frontend tests" working_directory: "./frontend" @@ -84,10 +79,6 @@ jobs: yarn install yarn test - environment: - PATH: /usr/local/nodejs/bin/:/usr/local/bin:/bin:/usr/bin - NODE_OPTIONS: --max-old-space-size=4096 - - run: name: "backend tests" working_directory: "./backend" @@ -99,7 +90,6 @@ jobs: PENPOT_TEST_DATABASE_USERNAME: penpot_test PENPOT_TEST_DATABASE_PASSWORD: penpot_test PENPOT_TEST_REDIS_URI: "redis://localhost/1" - JVM_OPTS: -Xmx4g - save_cache: paths: diff --git a/backend/resources/app/templates/api-doc-entry.tmpl b/backend/resources/app/templates/api-doc-entry.tmpl index c61157ec24..31c48deebf 100644 --- a/backend/resources/app/templates/api-doc-entry.tmpl +++ b/backend/resources/app/templates/api-doc-entry.tmpl @@ -25,6 +25,12 @@ SCHEMA {% endif %} + + {% if item.sse %} + + SSE + + {% endif %}
The methods marked with SSE returns + a SSE + formatted stream on the response body, always with status 200. The events are + always encoded using `application/transit+json` encoding (for now no content + negotiation is possible on methods that return SSE streams).
+On the javascript side you can use + the eventsoure-parser + library for propertly parsing the response body using the + standard Fetch + API
The rate limit work per user basis (this means that different api keys share diff --git a/backend/scripts/repl b/backend/scripts/repl index 2992115816..e2cd441c02 100755 --- a/backend/scripts/repl +++ b/backend/scripts/repl @@ -75,7 +75,7 @@ export OPTIONS=" export OPTIONS="$OPTIONS -J--enable-preview" # Setup HEAP -export OPTIONS="$OPTIONS -J-Xms50m -J-Xmx1024m" +# export OPTIONS="$OPTIONS -J-Xms50m -J-Xmx1024m" # export OPTIONS="$OPTIONS -J-Xms1100m -J-Xmx1100m -J-XX:+AlwaysPreTouch" # Increase virtual thread pool size @@ -88,7 +88,7 @@ export OPTIONS="$OPTIONS -J-Xms50m -J-Xmx1024m" # export OPTIONS="$OPTIONS -J-Xint" # Setup GC -export OPTIONS="$OPTIONS -J-XX:+UseG1GC" +# export OPTIONS="$OPTIONS -J-XX:+UseG1GC" # Setup GC # export OPTIONS="$OPTIONS -J-XX:+UseZGC" diff --git a/backend/scripts/start-dev b/backend/scripts/start-dev index 4c760d751c..59a8a189bd 100755 --- a/backend/scripts/start-dev +++ b/backend/scripts/start-dev @@ -23,6 +23,39 @@ export PENPOT_FLAGS="\ disable-soft-file-schema-validation \ disable-soft-file-validation"; +export OPTIONS=" + -A:jmx-remote -A:dev \ + -J-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \ + -J-Djdk.attach.allowAttachSelf \ + -J-Dpolyglot.engine.WarnInterpreterOnly=false \ + -J-Dlog4j2.configurationFile=log4j2.xml \ + -J-XX:+EnableDynamicAgentLoading \ + -J-XX:-OmitStackTraceInFastThrow \ + -J-XX:+UnlockDiagnosticVMOptions \ + -J-XX:+DebugNonSafepoints" + +# Setup HEAP +# export OPTIONS="$OPTIONS -J-Xms50m -J-Xmx1024m" +# export OPTIONS="$OPTIONS -J-Xms1100m -J-Xmx1100m -J-XX:+AlwaysPreTouch" + +# Increase virtual thread pool size +# export OPTIONS="$OPTIONS -J-Djdk.virtualThreadScheduler.parallelism=16" + +# Disable C2 Compiler +# export OPTIONS="$OPTIONS -J-XX:TieredStopAtLevel=1" + +# Disable all compilers +# export OPTIONS="$OPTIONS -J-Xint" + +# Setup GC +# export OPTIONS="$OPTIONS -J-XX:+UseG1GC" + +# Setup GC +# export OPTIONS="$OPTIONS -J-XX:+UseZGC" + +# Enable ImageMagick v7.x support +# export OPTIONS="-J-Dim4java.useV7=true $OPTIONS"; + # Initialize MINIO config mc alias set penpot-s3/ http://minio:9000 minioadmin minioadmin -q @@ -45,7 +78,7 @@ if [ "$1" = "--watch" ]; then echo "Start Watch..." - clojure -A:dev -M -m app.main & + clojure $OPTIONS -A:dev -M -m app.main & npx nodemon \ --watch src \ @@ -57,5 +90,6 @@ if [ "$1" = "--watch" ]; then wait; else - clojure -A:dev -M -m app.main; + set -x + clojure $OPTIONS -A:dev -M -m app.main; fi diff --git a/backend/src/app/db.clj b/backend/src/app/db.clj index 7ab340e216..8d7fbef4c9 100644 --- a/backend/src/app/db.clj +++ b/backend/src/app/db.clj @@ -249,6 +249,12 @@ :code :unable-resolve-connection :hint "expected conn or system map")))) +(defn connection-map? + "Check if the provided value is a map like data structure that + contains a database connection." + [o] + (and (map? o) (connection? (::conn o)))) + (defn- get-connectable [o] (cond diff --git a/backend/src/app/features/components_v2.clj b/backend/src/app/features/components_v2.clj index 3a41d818a0..663abb8c1b 100644 --- a/backend/src/app/features/components_v2.clj +++ b/backend/src/app/features/components_v2.clj @@ -14,7 +14,7 @@ [app.common.files.changes-builder :as fcb] [app.common.files.helpers :as cfh] [app.common.files.libraries-helpers :as cflh] - [app.common.files.migrations :as pmg] + [app.common.files.migrations :as fmg] [app.common.files.shapes-helpers :as cfsh] [app.common.files.validate :as cfv] [app.common.geom.point :as gpt] @@ -32,6 +32,8 @@ [app.common.types.shape-tree :as ctst] [app.common.uuid :as uuid] [app.db :as db] + [app.features.fdata :as fdata] + [app.http.sse :as sse] [app.media :as media] [app.rpc.commands.files :as files] [app.rpc.commands.files-snapshot :as fsnap] @@ -39,14 +41,14 @@ [app.storage :as sto] [app.storage.tmp :as tmp] [app.util.blob :as blob] - [app.util.objects-map :as omap] [app.util.pointer-map :as pmap] [app.util.time :as dt] [buddy.core.codecs :as bc] [cuerdas.core :as str] [datoteka.io :as io] [promesa.exec :as px] - [promesa.exec.semaphore :as ps])) + [promesa.exec.semaphore :as ps] + [promesa.util :as pu])) (def ^:dynamic *system* nil) (def ^:dynamic *stats* nil) @@ -71,7 +73,7 @@ detach-shape (fn [container shape] - ; Detach a shape. If it's inside a component, add it to detached-ids, for further use. + ;; Detach a shape. If it's inside a component, add it to detached-ids, for further use. (let [is-component? (let [root-shape (ctst/get-shape container (:id container))] (and (some? root-shape) (nil? (:parent-id root-shape))))] (when is-component? @@ -80,8 +82,8 @@ fix-orphan-shapes (fn [file-data] - ; Find shapes that are not listed in their parent's children list. - ; Remove them, and also their children + ;; Find shapes that are not listed in their parent's children list. + ;; Remove them, and also their children (letfn [(fix-container [container] (reduce fix-shape container (ctn/shapes-seq container))) @@ -103,7 +105,7 @@ remove-nested-roots (fn [file-data] - ; Remove :component-root in head shapes that are nested. + ;; Remove :component-root in head shapes that are nested. (letfn [(fix-container [container] (update container :objects update-vals (partial fix-shape container))) @@ -120,7 +122,7 @@ add-not-nested-roots (fn [file-data] - ; Add :component-root in head shapes that are not nested. + ;; Add :component-root in head shapes that are not nested. (letfn [(fix-container [container] (update container :objects update-vals (partial fix-shape container))) @@ -137,7 +139,7 @@ fix-orphan-copies (fn [file-data] - ; Detach shapes that were inside a copy (have :shape-ref) but now they aren't. + ;; Detach shapes that were inside a copy (have :shape-ref) but now they aren't. (letfn [(fix-container [container] (update container :objects update-vals (partial fix-shape container))) @@ -155,38 +157,38 @@ remap-refs (fn [file-data] - ; Remap shape-refs so that they point to the near main. - ; At the same time, if there are any dangling ref, detach the shape and its children. + ;; Remap shape-refs so that they point to the near main. + ;; At the same time, if there are any dangling ref, detach the shape and its children. (letfn [(fix-container [container] (reduce fix-shape container (ctn/shapes-seq container))) (fix-shape [container shape] (if (ctk/in-component-copy? shape) - ; First look for the direct shape. + ;; First look for the direct shape. (let [root (ctn/get-component-shape (:objects container) shape) libraries (assoc-in libraries [(:id file-data) :data] file-data) library (get libraries (:component-file root)) component (ctkl/get-component (:data library) (:component-id root) true) direct-shape (ctf/get-component-shape (:data library) component (:shape-ref shape))] (if (some? direct-shape) - ; If it exists, there is nothing else to do. + ;; If it exists, there is nothing else to do. container - ; If not found, find the near shape. + ;; If not found, find the near shape. (let [near-shape (d/seek #(= (:shape-ref %) (:shape-ref shape)) (ctf/get-component-shapes (:data library) component))] (if (some? near-shape) - ; If found, update the ref to point to the near shape. + ;; If found, update the ref to point to the near shape. (ctn/update-shape container (:id shape) #(assoc % :shape-ref (:id near-shape))) - ; If not found, it may be a fostered component. Try to locate a direct shape - ; in the head component. + ;; If not found, it may be a fostered component. Try to locate a direct shape + ;; in the head component. (let [head (ctn/get-head-shape (:objects container) shape) library-2 (get libraries (:component-file head)) component-2 (ctkl/get-component (:data library-2) (:component-id head) true) direct-shape-2 (ctf/get-component-shape (:data library-2) component-2 (:shape-ref shape))] (if (some? direct-shape-2) - ; If it exists, there is nothing else to do. + ;; If it exists, there is nothing else to do. container - ; If not found, detach shape and all children (stopping if a nested instance is reached) + ;; If not found, detach shape and all children (stopping if a nested instance is reached) (let [children (ctn/get-children-in-instance (:objects container) (:id shape))] (reduce #(ctn/update-shape %1 (:id %2) (partial detach-shape %1)) container @@ -199,8 +201,8 @@ fix-copies-of-detached (fn [file-data] - ; Find any copy that is referencing a detached shape inside a component, and - ; undo the nested copy, converting it into a direct copy. + ;; Find any copy that is referencing a detached shape inside a component, and + ;; undo the nested copy, converting it into a direct copy. (letfn [(fix-container [container] (update container :objects update-vals fix-shape)) @@ -217,8 +219,8 @@ transform-to-frames (fn [file-data] - ; Transform component and copy heads to frames, and set the - ; frame-id of its childrens + ;; Transform component and copy heads to frames, and set the + ;; frame-id of its childrens (letfn [(fix-container [container] (update container :objects update-vals fix-shape)) @@ -239,8 +241,8 @@ remap-frame-ids (fn [file-data] - ; Remap the frame-ids of the primary childs of the head instances - ; to point to the head instance. + ;; Remap the frame-ids of the primary childs of the head instances + ;; to point to the head instance. (letfn [(fix-container [container] (update container :objects update-vals (partial fix-shape container))) @@ -282,8 +284,7 @@ fix-component-nil-objects (fn [file-data] ;; Ensure that objects of all components is not null - (letfn [(fix-component - [component] + (letfn [(fix-component [component] (if (and (contains? component :objects) (nil? (:objects component))) (if (:deleted component) (assoc component :objects {}) @@ -362,6 +363,8 @@ shapes from library components. Mark the file with the :components-v2 option." [file-data libraries] + (sse/tap {:type :migration-progress + :section :components}) (let [components (ctkl/components-seq file-data)] (if (empty? components) (assoc-in file-data [:options :components-v2] true) @@ -435,6 +438,9 @@ add-instance-grid (fn [fdata frame-id grid assets] (reduce (fn [result [component position]] + (sse/tap {:type :migration-progress + :section :components + :name (:name component)}) (add-main-instance result component frame-id (gpt/add position (gpt/point grid-gap grid-gap)))) fdata @@ -693,14 +699,13 @@ (l/trc :hint "graphic processed" :file-id (str (:id fdata)) :media-id (str (:id mobj)) - :elapsed (dt/format-duration (tp1))))))) - - process (px/wrap-bindings process)] - + :elapsed (dt/format-duration (tp1)))))))] (try (->> (d/zip media-group grid) (map (fn [[mobj position]] - (l/trc :hint "submit graphic processing" :file-id (str (:id fdata)) :id (str (:id mobj))) + (sse/tap {:type :migration-progress + :section :graphics + :name (:name mobj)}) (px/submit! executor (partial process mobj position)))) (reduce (fn [fdata promise] (if-let [changes (deref promise)] @@ -709,10 +714,12 @@ fdata)) fdata)) (finally - (.close ^java.lang.AutoCloseable executor))))) + (pu/close! executor))))) (defn- migrate-graphics [fdata] + (sse/tap {:type :migration-progress + :section :graphics}) (if (empty? (:media fdata)) fdata (let [[fdata page-id start-pos] @@ -753,7 +760,7 @@ (create-media-grid fdata page-id (:id frame) grid assets) (gpt/add position (gpt/point 0 (+ height (* 2 grid-gap) frame-gap)))))))))) -(defn- migrate-file-data +(defn- migrate-fdata [fdata libs] (let [migrated? (dm/get-in fdata [:options :components-v2])] (if migrated? @@ -762,57 +769,61 @@ fdata (migrate-graphics fdata)] (update fdata :options assoc :components-v2 true))))) +(defn- process-fdata + [fdata id] + (-> fdata + (assoc :id id) + (fdata/process-pointers deref) + (fmg/migrate-data))) + (defn- process-file - [{:keys [id] :as file} & {:keys [validate? throw-on-validate?]}] - (let [conn (::db/conn *system*)] - (binding [pmap/*tracked* (atom {}) - pmap/*load-fn* (partial files/load-pointer conn id) - cfeat/*wrap-with-pointer-map-fn* - (if (contains? (:features file) "fdata/pointer-map") pmap/wrap identity) - cfeat/*wrap-with-objects-map-fn* - (if (contains? (:features file) "fdata/objectd-map") omap/wrap identity)] + [{:keys [::db/conn] :as system} id & {:keys [validate? throw-on-validate?]}] + (binding [pmap/*tracked* (pmap/create-tracked) + pmap/*load-fn* (partial fdata/load-pointer *system* id)] - (let [file (-> file - (update :data blob/decode) - (update :data assoc :id id) - (pmg/migrate-file)) + (let [file (binding [cfeat/*new* (atom #{})] + (-> (files/get-file system id :migrate? false) + (update :data process-fdata id) + (update :features into (deref cfeat/*new*)) + (update :features cfeat/migrate-legacy-features))) - libs (->> (files/get-file-libraries conn id) - (into [file] (map (fn [{:keys [id]}] - (binding [pmap/*load-fn* (partial files/load-pointer conn id)] - (-> (db/get conn :file {:id id}) - (files/decode-row) - (files/process-pointers deref) ; ensure all pointers resolved - (pmg/migrate-file)))))) - (d/index-by :id)) + libs (->> (files/get-file-libraries conn id) + (into [file] (map (fn [{:keys [id]}] + (binding [pmap/*load-fn* (partial fdata/load-pointer system id)] + (-> (files/get-file system id :migrate? false) + (update :data process-fdata id)))))) + (d/index-by :id)) - file (-> file - (update :data migrate-file-data libs) - (update :features conj "components/v2"))] + pmap? (contains? (:features file) "fdata/pointer-map") - (when (contains? (:features file) "fdata/pointer-map") - (files/persist-pointers! conn id)) + file (-> file + (update :data migrate-fdata libs) + (update :features conj "components/v2") + (cond-> pmap? (fdata/enable-pointer-map))) + ] - (db/update! conn :file - {:data (blob/encode (:data file)) - :features (db/create-array conn "text" (:features file)) - :revn (:revn file)} - {:id (:id file)}) + (when validate? + (if throw-on-validate? + (cfv/validate-file! file libs) + (doseq [error (cfv/validate-file file libs)] + (l/wrn :hint "migrate:file:validation-error" + :file-id (str (:id file)) + :file-name (:name file) + :error error)))) - (when validate? - (if throw-on-validate? - (cfv/validate-file! file libs) - (doseq [error (cfv/validate-file file libs)] - (l/wrn :hint "migrate:file:validation-error" - :file-id (str (:id file)) - :file-name (:name file) - :error error)))) + (db/update! conn :file + {:data (blob/encode (:data file)) + :features (db/create-array conn "text" (:features file)) + :revn (:revn file)} + {:id (:id file)}) - (dissoc file :data))))) + (when pmap? + (fdata/persist-pointers! system id)) + + (dissoc file :data)))) (defn migrate-file! [system file-id & {:keys [validate? throw-on-validate?]}] - (let [tpoint (dt/tpoint) file-id (if (string? file-id) (parse-uuid file-id) @@ -823,13 +834,12 @@ (let [system (update system ::sto/storage media/configure-assets-storage)] (db/tx-run! system - (fn [{:keys [::db/conn] :as system}] + (fn [system] (binding [*system* system] (fsnap/take-file-snapshot! system {:file-id file-id :label "migration/components-v2"}) - (-> (db/get conn :file {:id file-id}) - (update :features db/decode-pgarray #{}) - (process-file :validate? validate? - :throw-on-validate? throw-on-validate?)))))) + (process-file system file-id + :validate? validate? + :throw-on-validate? throw-on-validate?))))) (finally (let [elapsed (tpoint) diff --git a/backend/src/app/features/fdata.clj b/backend/src/app/features/fdata.clj index 1c83493967..832d3360d0 100644 --- a/backend/src/app/features/fdata.clj +++ b/backend/src/app/features/fdata.clj @@ -7,42 +7,90 @@ (ns app.features.fdata "A `fdata/*` related feature migration helpers" (:require + [app.common.data :as d] + [app.common.exceptions :as ex] + [app.common.logging :as l] + [app.db :as db] + [app.util.blob :as blob] [app.util.objects-map :as omap] [app.util.pointer-map :as pmap])) +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; OBJECTS-MAP +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + (defn enable-objects-map [file] - (-> file - (update :data (fn [data] - (-> data - (update :pages-index update-vals #(update % :objects omap/wrap)) - (update :components update-vals #(update % :objects omap/wrap))))) - (update :features conj "fdata/objects-map"))) + (let [update-fn #(d/update-when % :objects omap/wrap)] + (-> file + (update :data (fn [fdata] + (-> fdata + (update :pages-index update-vals update-fn) + (update :components update-vals update-fn)))) + (update :features conj "fdata/objects-map")))) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; POINTER-MAP +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +(defn load-pointer + "A database loader pointer helper" + [system file-id id] + (let [{:keys [content]} (db/get system :file-data-fragment + {:id id :file-id file-id} + {::db/columns [:content] + ::db/check-deleted? false})] + (when-not content + (ex/raise :type :internal + :code :fragment-not-found + :hint "fragment not found" + :file-id file-id + :fragment-id id)) + + (blob/decode content))) + +(defn persist-pointers! + "Given a database connection and the final file-id, persist all + pointers to the underlying storage (the database)." + [system file-id] + (doseq [[id item] @pmap/*tracked*] + (when (pmap/modified? item) + (l/trc :hint "persist pointer" :file-id (str file-id) :id (str id)) + (let [content (-> item deref blob/encode)] + (db/insert! system :file-data-fragment + {:id id + :file-id file-id + :content content}))))) + +(defn process-pointers + "Apply a function to all pointers on the file. Usuly used for + dereference the pointer to a plain value before some processing." + [fdata update-fn] + (cond-> fdata + (contains? fdata :pages-index) + (update :pages-index process-pointers update-fn) + + :always + (update-vals (fn [val] + (if (pmap/pointer-map? val) + (update-fn val) + val))))) + +(defn get-used-pointer-ids + "Given a file, return all pointer ids used in the data." + [fdata] + (->> (concat (vals fdata) + (vals (:pages-index fdata))) + (into #{} (comp (filter pmap/pointer-map?) + (map pmap/get-id))))) (defn enable-pointer-map + "Enable the fdata/pointer-map feature on the file." [file] (-> file - (update :data (fn [data] - (-> data + (update :data (fn [fdata] + (-> fdata (update :pages-index update-vals pmap/wrap) (update :components pmap/wrap)))) (update :features conj "fdata/pointer-map"))) - -;; (defn enable-shape-data-type -;; [file] -;; (letfn [(update-object [object] -;; (-> object -;; (d/update-when :selrect grc/make-rect) -;; (d/update-when :svg-viewbox grc/make-rect) -;; (cts/map->Shape))) - -;; (update-container [container] -;; (d/update-when container :objects update-vals update-object))] - -;; (-> file -;; (update :data (fn [data] -;; (-> data -;; (update :pages-index update-vals update-container) -;; (update :components update-vals update-container)))) -;; (update :features conj "fdata/shape-data-type")))) diff --git a/backend/src/app/http/errors.clj b/backend/src/app/http/errors.clj index 93c67845b9..0e8e065ad2 100644 --- a/backend/src/app/http/errors.clj +++ b/backend/src/app/http/errors.clj @@ -232,3 +232,7 @@ (if (ex/error? cause) (handle-error cause request nil) (handle-exception cause request nil))) + +(defn handle' + [cause request] + (::rres/body (handle cause request))) diff --git a/backend/src/app/http/sse.clj b/backend/src/app/http/sse.clj new file mode 100644 index 0000000000..c89c91b618 --- /dev/null +++ b/backend/src/app/http/sse.clj @@ -0,0 +1,86 @@ +;; This Source Code Form is subject to the terms of the Mozilla Public +;; License, v. 2.0. If a copy of the MPL was not distributed with this +;; file, You can obtain one at http://mozilla.org/MPL/2.0/. +;; +;; Copyright (c) KALEIDOS INC + +(ns app.http.sse + "SSE (server sent events) helpers" + (:refer-clojure :exclude [tap]) + (:require + [app.common.data :as d] + [app.common.exceptions :as ex] + [app.common.logging :as l] + [app.common.transit :as t] + [app.http.errors :as errors] + [promesa.core :as p] + [promesa.exec :as px] + [promesa.exec.csp :as sp] + [promesa.util :as pu] + [ring.response :as rres]) + (:import + java.io.OutputStream)) + +(def ^:dynamic *channel* nil) + +(defn- write! + [^OutputStream output ^bytes data] + (l/trc :hint "writting data" :data data :length (alength data)) + (.write output data) + (.flush output)) + +(defn- create-writer-loop + [^OutputStream output] + (try + (loop [] + (when-let [event (sp/take! *channel*)] + (let [result (ex/try! (write! output event))] + (if (ex/exception? result) + (l/wrn :hint "unexpected exception on sse writer" :cause result) + (recur))))) + (finally + (pu/close! output)))) + +(defn- encode + [[name data]] + (try + (let [data (with-out-str + (println "event:" (d/name name)) + (println "data:" (t/encode-str data {:type :json-verbose})) + (println))] + (.getBytes data "UTF-8")) + (catch Throwable cause + (l/err :hint "unexpected error on encoding value on sse stream" + :cause cause) + nil))) + +;; ---- PUBLIC API + +(def default-headers + {"Content-Type" "text/event-stream;charset=UTF-8" + "Cache-Control" "no-cache, no-store, max-age=0, must-revalidate" + "Pragma" "no-cache"}) + +(defn tap + ([data] (tap "event" data)) + ([name data] + (when-let [channel *channel*] + (sp/put! channel [name data]) + nil))) + +(defn response + [handler & {:keys [buf] :or {buf 32} :as opts}] + (fn [request] + {::rres/headers default-headers + ::rres/status 200 + ::rres/body (reify rres/StreamableResponseBody + (-write-body-to-stream [_ _ output] + (binding [*channel* (sp/chan :buf buf :xf (keep encode))] + (let [writer (px/run! :virtual (partial create-writer-loop output))] + (try + (tap "end" (handler)) + (catch Throwable cause + (tap "error" (errors/handle' cause request))) + (finally + (sp/close! *channel*) + (p/await! writer)))))))})) diff --git a/backend/src/app/rpc.clj b/backend/src/app/rpc.clj index d4b3a4baaf..69f9d84fbc 100644 --- a/backend/src/app/rpc.clj +++ b/backend/src/app/rpc.clj @@ -57,14 +57,16 @@ (defn- handle-response [request result] - (if (fn? result) - (result request) - (let [mdata (meta result)] - (-> {::rres/status (::http/status mdata 200) - ::rres/headers (::http/headers mdata {}) - ::rres/body (rph/unwrap result)} - (handle-response-transformation request mdata) - (handle-before-comple-hook mdata))))) + (let [mdata (meta result) + response (if (fn? result) + (result request) + (let [result (rph/unwrap result)] + {::rres/status (::http/status mdata 200) + ::rres/headers (::http/headers mdata {}) + ::rres/body result}))] + (-> response + (handle-response-transformation request mdata) + (handle-before-comple-hook mdata)))) (defn- rpc-handler "Ring handler that dispatches cmd requests and convert between diff --git a/backend/src/app/rpc/climit.clj b/backend/src/app/rpc/climit.clj index dd366fea91..d6e4ccb51b 100644 --- a/backend/src/app/rpc/climit.clj +++ b/backend/src/app/rpc/climit.clj @@ -177,9 +177,7 @@ (f))) ([{:keys [::id ::cache ::mtx/metrics]} f executor] - (let [f (fn [] - (let [f (px/wrap-bindings f)] - (p/await! (px/submit! executor f))))] + (let [f #(p/await! (px/submit! executor f))] (if (and cache id) (invoke! cache metrics id nil f) (f))))) diff --git a/backend/src/app/rpc/commands/binfile.clj b/backend/src/app/rpc/commands/binfile.clj index ba00b26fa0..b0c44a16a8 100644 --- a/backend/src/app/rpc/commands/binfile.clj +++ b/backend/src/app/rpc/commands/binfile.clj @@ -15,13 +15,15 @@ [app.common.files.validate :as fval] [app.common.fressian :as fres] [app.common.logging :as l] + [app.common.schema :as sm] [app.common.spec :as us] [app.common.types.file :as ctf] [app.common.uuid :as uuid] [app.config :as cf] [app.db :as db] - [app.features.components-v2 :as features.components-v2] - [app.features.fdata :as features.fdata] + [app.features.components-v2 :as feat.compv2] + [app.features.fdata :as feat.fdata] + [app.http.sse :as sse] [app.loggers.audit :as-alias audit] [app.loggers.webhooks :as-alias webhooks] [app.media :as media] @@ -30,7 +32,6 @@ [app.rpc.commands.projects :as projects] [app.rpc.commands.teams :as teams] [app.rpc.doc :as-alias doc] - [app.rpc.helpers :as rph] [app.storage :as sto] [app.storage.tmp :as tmp] [app.tasks.file-gc] @@ -38,11 +39,13 @@ [app.util.pointer-map :as pmap] [app.util.services :as sv] [app.util.time :as dt] + [app.worker :as-alias wrk] [clojure.set :as set] [clojure.spec.alpha :as s] [clojure.walk :as walk] [cuerdas.core :as str] [datoteka.io :as io] + [promesa.core :as p] [promesa.util :as pu] [ring.response :as rres] [yetti.adapter :as yt]) @@ -302,25 +305,21 @@ (defn- get-files [cfg ids] - (letfn [(get-files* [{:keys [::db/conn]}] - (let [sql (str "SELECT id FROM file " - " WHERE id = ANY(?) ") - ids (db/create-array conn "uuid" ids)] - (->> (db/exec! conn [sql ids]) - (into [] (map :id)) - (not-empty))))] - - (db/run! cfg get-files*))) + (db/run! cfg (fn [{:keys [::db/conn]}] + (let [sql (str "SELECT id FROM file " + " WHERE id = ANY(?) ") + ids (db/create-array conn "uuid" ids)] + (->> (db/exec! conn [sql ids]) + (into [] (map :id)) + (not-empty)))))) (defn- get-file [cfg file-id] - (letfn [(get-file* [{:keys [::db/conn]}] - (binding [pmap/*load-fn* (partial files/load-pointer conn file-id)] - (some-> (db/get* conn :file {:id file-id} {::db/remove-deleted? false}) - (files/decode-row) - (files/process-pointers deref))))] - - (db/run! cfg get-file*))) + (db/run! cfg (fn [{:keys [::db/conn] :as cfg}] + (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)] + (some-> (db/get* conn :file {:id file-id} {::db/remove-deleted? false}) + (files/decode-row) + (update :data feat.fdata/process-pointers deref)))))) (defn- get-file-media [{:keys [::db/pool]} {:keys [data id] :as file}] @@ -642,6 +641,9 @@ validate? (contains? cf/flags :file-validation) features (cfeat/get-team-enabled-features cf/flags team)] + (sse/tap {:type :import-progress + :section :read-import}) + ;; Process all sections (run! (fn [section] (l/dbg :hint "reading section" :section section ::l/sync? true) @@ -651,6 +653,8 @@ (assoc ::section section) (assoc ::input input))] (binding [*options* options] + (sse/tap {:type :import-progress + :section section}) (read-section options)))) [:v1/metadata :v1/files :v1/rels :v1/sobjects]) @@ -658,9 +662,9 @@ (doseq [[feature file-id] (-> *state* deref :pending-to-migrate)] (case feature "components/v2" - (features.components-v2/migrate-file! options file-id - :validate? validate? - :throw-on-validate? true) + (feat.compv2/migrate-file! options file-id + :validate? validate? + :throw-on-validate? true) "fdata/shape-data-type" nil @@ -694,11 +698,11 @@ (cond-> file (and (contains? cfeat/*current* "fdata/objects-map") (not (contains? cfeat/*previous* "fdata/objects-map"))) - (features.fdata/enable-objects-map) + (feat.fdata/enable-objects-map) (and (contains? cfeat/*current* "fdata/pointer-map") (not (contains? cfeat/*previous* "fdata/pointer-map"))) - (features.fdata/enable-pointer-map))) + (feat.fdata/enable-pointer-map))) (defn- get-remaped-thumbnails [thumbnails file-id] @@ -709,7 +713,7 @@ thumbnails)) (defmethod read-section :v1/files - [{:keys [::db/conn ::input ::project-id ::enabled-features ::timestamp ::overwrite?]}] + [{:keys [::db/conn ::input ::project-id ::enabled-features ::timestamp ::overwrite?] :as system}] (doseq [expected-file-id (-> *state* deref :files)] (let [file (read-obj! input) @@ -765,7 +769,6 @@ cfeat/*previous* (:features file) pmap/*tracked* (atom {})] - (let [params (-> file (assoc :id file-id') (assoc :features features) @@ -813,7 +816,7 @@ (create-or-update-file! conn params) (db/insert! conn :file params)) - (files/persist-pointers! conn file-id') + (feat.fdata/persist-pointers! system file-id') (when overwrite? (db/delete! conn :file-thumbnail {:file-id file-id'})) @@ -1056,54 +1059,71 @@ ;; --- Command: export-binfile -(s/def ::file-id ::us/uuid) -(s/def ::include-libraries? ::us/boolean) -(s/def ::embed-assets? ::us/boolean) - -(s/def ::export-binfile - (s/keys :req [::rpc/profile-id] - :req-un [::file-id ::include-libraries? ::embed-assets?])) +(def ^:private + schema:export-binfile + (sm/define + [:map {:title "export-binfile"} + [:file-id ::sm/uuid] + [:include-libraries? :boolean] + [:embed-assets? :boolean]])) (sv/defmethod ::export-binfile "Export a penpot file in a binary format." {::doc/added "1.15" - ::webhooks/event? true} + ::webhooks/event? true + ::sm/result schema:export-binfile} [{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id include-libraries? embed-assets?] :as params}] (files/check-read-permissions! pool profile-id file-id) - (let [body (reify rres/StreamableResponseBody - (-write-body-to-stream [_ _ output-stream] - (-> cfg - (assoc ::file-ids [file-id]) - (assoc ::embed-assets? embed-assets?) - (assoc ::include-libraries? include-libraries?) - (export! output-stream))))] + (fn [_] + {::rres/status 200 + ::rres/headers {"content-type" "application/octet-stream"} + ::rres/body (reify rres/StreamableResponseBody + (-write-body-to-stream [_ _ output-stream] + (-> cfg + (assoc ::file-ids [file-id]) + (assoc ::embed-assets? embed-assets?) + (assoc ::include-libraries? include-libraries?) + (export! output-stream))))})) - (fn [_] - {::rres/status 200 - ::rres/body body - ::rres/headers {"content-type" "application/octet-stream"}}))) -(s/def ::file ::media/upload) -(s/def ::import-binfile - (s/keys :req [::rpc/profile-id] - :req-un [::project-id ::file])) +;; --- Command: import-binfile + +(def ^:private + schema:import-binfile + (sm/define + [:map {:title "import-binfile"} + [:project-id ::sm/uuid] + [:file ::media/upload]])) + +(declare ^:private import-binfile) (sv/defmethod ::import-binfile "Import a penpot file in a binary format." {::doc/added "1.15" - ::webhooks/event? true} + ::webhooks/event? true + ::sse/stream? true + ::sm/params schema:import-binfile} [{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id project-id file] :as params}] - (db/with-atomic [conn pool] - (projects/check-read-permissions! conn profile-id project-id) - (let [ids (import! (assoc cfg - ::input (:path file) - ::project-id project-id - ::profile-id profile-id - ::ignore-index-errors? true))] + (projects/check-read-permissions! pool profile-id project-id) + (let [params (-> cfg + (assoc ::input (:path file)) + (assoc ::project-id project-id) + (assoc ::profile-id profile-id) + (assoc ::ignore-index-errors? true))] + (with-meta + (sse/response #(import-binfile params)) + {::audit/props {:file nil}}))) - (db/update! conn :project - {:modified-at (dt/now)} - {:id project-id}) - - (rph/with-meta ids - {::audit/props {:file nil :file-ids ids}})))) +(defn- import-binfile + [{:keys [::wrk/executor ::project-id] :as params}] + (db/tx-run! params + (fn [{:keys [::db/conn] :as params}] + ;; NOTE: the importation process performs some operations that + ;; are not very friendly with virtual threads, and for avoid + ;; unexpected blocking of other concurrent operations we + ;; dispatch that operation to a dedicated executor. + (let [result (p/thread-call executor (partial import! params))] + (db/update! conn :project + {:modified-at (dt/now)} + {:id project-id}) + (deref result))))) diff --git a/backend/src/app/rpc/commands/comments.clj b/backend/src/app/rpc/commands/comments.clj index b8352f6220..5e87884f6d 100644 --- a/backend/src/app/rpc/commands/comments.clj +++ b/backend/src/app/rpc/commands/comments.clj @@ -12,6 +12,7 @@ [app.common.spec :as us] [app.common.uuid :as uuid] [app.db :as db] + [app.features.fdata :as feat.fdata] [app.loggers.audit :as-alias audit] [app.loggers.webhooks :as-alias webhooks] [app.rpc :as-alias rpc] @@ -43,15 +44,17 @@ (defn- get-file "A specialized version of get-file for comments module." - [conn file-id page-id] - (binding [pmap/*load-fn* (partial files/load-pointer conn file-id)] - (if-let [{:keys [data] :as file} (some-> (db/exec-one! conn [sql:get-file file-id]) (files/decode-row))] + [{:keys [::db/conn] :as cfg} file-id page-id] + (if-let [{:keys [data] :as file} (some-> (db/exec-one! conn [sql:get-file file-id]) + (files/decode-row))] + (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)] (-> file (assoc :page-name (dm/get-in data [:pages-index page-id :name])) - (assoc :page-id page-id)) - (ex/raise :type :not-found - :code :object-not-found - :hint "file not found")))) + (assoc :page-id page-id))) + + (ex/raise :type :not-found + :code :object-not-found + :hint "file not found"))) (defn- get-comment-thread [conn thread-id & {:as opts}] @@ -288,38 +291,37 @@ (sv/defmethod ::create-comment-thread {::doc/added "1.15" ::webhooks/event? true} - [{:keys [::db/pool] :as cfg} - {:keys [::rpc/profile-id ::rpc/request-at file-id page-id share-id position content frame-id]}] + [cfg {:keys [::rpc/profile-id ::rpc/request-at file-id page-id share-id position content frame-id]}] + (db/tx-run! cfg + (fn [{:keys [::db/conn] :as cfg}] + (files/check-comment-permissions! conn profile-id file-id share-id) + (let [{:keys [team-id project-id page-name] :as file} (get-file cfg file-id page-id)] - (db/with-atomic [conn pool] - (let [{:keys [team-id project-id page-name] :as file} (get-file conn file-id page-id)] - (files/check-comment-permissions! conn profile-id file-id share-id) + (run! (partial quotes/check-quote! conn) + (list {::quotes/id ::quotes/comment-threads-per-file + ::quotes/profile-id profile-id + ::quotes/team-id team-id + ::quotes/project-id project-id + ::quotes/file-id file-id} + {::quotes/id ::quotes/comments-per-file + ::quotes/profile-id profile-id + ::quotes/team-id team-id + ::quotes/project-id project-id + ::quotes/file-id file-id})) - (run! (partial quotes/check-quote! conn) - (list {::quotes/id ::quotes/comment-threads-per-file - ::quotes/profile-id profile-id - ::quotes/team-id team-id - ::quotes/project-id project-id - ::quotes/file-id file-id} - {::quotes/id ::quotes/comments-per-file - ::quotes/profile-id profile-id - ::quotes/team-id team-id - ::quotes/project-id project-id - ::quotes/file-id file-id})) - - (rtry/with-retry {::rtry/when rtry/conflict-exception? - ::rtry/max-retries 3 - ::rtry/label "create-comment-thread" - ::db/conn conn} - (create-comment-thread conn - {:created-at request-at - :profile-id profile-id - :file-id file-id - :page-id page-id - :page-name page-name - :position position - :content content - :frame-id frame-id}))))) + (rtry/with-retry {::rtry/when rtry/conflict-exception? + ::rtry/max-retries 3 + ::rtry/label "create-comment-thread" + ::db/conn conn} + (create-comment-thread conn + {:created-at request-at + :profile-id profile-id + :file-id file-id + :page-id page-id + :page-name page-name + :position position + :content content + :frame-id frame-id})))))) (defn- create-comment-thread @@ -402,8 +404,7 @@ ;; --- COMMAND: Add Comment -(declare get-comment-thread) -(declare create-comment) +(declare ^:private get-comment-thread) (s/def ::create-comment (s/keys :req [::rpc/profile-id] @@ -413,49 +414,51 @@ (sv/defmethod ::create-comment {::doc/added "1.15" ::webhooks/event? true} - [{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id ::rpc/request-at thread-id share-id content] :as params}] - (db/with-atomic [conn pool] - (let [{:keys [file-id page-id] :as thread} (get-comment-thread conn thread-id ::db/for-update? true) - {:keys [team-id project-id page-name] :as file} (get-file conn file-id page-id)] + [cfg {:keys [::rpc/profile-id ::rpc/request-at thread-id share-id content]}] + (db/tx-run! cfg + (fn [{:keys [::db/conn] :as cfg}] + (let [{:keys [file-id page-id] :as thread} (get-comment-thread conn thread-id ::db/for-update? true) + {:keys [team-id project-id page-name] :as file} (get-file cfg file-id page-id)] - (files/check-comment-permissions! conn profile-id (:id file) share-id) - (quotes/check-quote! conn - {::quotes/id ::quotes/comments-per-file - ::quotes/profile-id profile-id - ::quotes/team-id team-id - ::quotes/project-id project-id - ::quotes/file-id (:id file)}) + (files/check-comment-permissions! conn profile-id (:id file) share-id) + (quotes/check-quote! conn + {::quotes/id ::quotes/comments-per-file + ::quotes/profile-id profile-id + ::quotes/team-id team-id + ::quotes/project-id project-id + ::quotes/file-id (:id file)}) - ;; Update the page-name cached attribute on comment thread table. - (when (not= page-name (:page-name thread)) - (db/update! conn :comment-thread - {:page-name page-name} - {:id thread-id})) + ;; Update the page-name cached attribute on comment thread table. + (when (not= page-name (:page-name thread)) + (db/update! conn :comment-thread + {:page-name page-name} + {:id thread-id})) - (let [comment (db/insert! conn :comment - {:id (uuid/next) - :created-at request-at - :modified-at request-at - :thread-id thread-id - :owner-id profile-id - :content content}) - props {:file-id file-id - :share-id nil}] + (let [comment (db/insert! conn :comment + {:id (uuid/next) + :created-at request-at + :modified-at request-at + :thread-id thread-id + :owner-id profile-id + :content content}) + props {:file-id file-id + :share-id nil}] - ;; Update thread modified-at attribute and assoc the current - ;; profile to the participant set. - (db/update! conn :comment-thread - {:modified-at request-at - :participants (-> (:participants thread #{}) - (conj profile-id) - (db/tjson))} - {:id thread-id}) + ;; Update thread modified-at attribute and assoc the current + ;; profile to the participant set. + (db/update! conn :comment-thread + {:modified-at request-at + :participants (-> (:participants thread #{}) + (conj profile-id) + (db/tjson))} + {:id thread-id}) - ;; Update the current profile status in relation to the - ;; current thread. - (upsert-comment-thread-status! conn profile-id thread-id request-at) + ;; Update the current profile status in relation to the + ;; current thread. + (upsert-comment-thread-status! conn profile-id thread-id request-at) + + (vary-meta comment assoc ::audit/props props)))))) - (vary-meta comment assoc ::audit/props props))))) ;; --- COMMAND: Update Comment @@ -466,29 +469,31 @@ (sv/defmethod ::update-comment {::doc/added "1.15"} - [{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id ::rpc/request-at id share-id content] :as params}] - (db/with-atomic [conn pool] - (let [{:keys [thread-id owner-id] :as comment} (get-comment conn id ::db/for-update? true) - {:keys [file-id page-id] :as thread} (get-comment-thread conn thread-id ::db/for-update? true)] + [cfg {:keys [::rpc/profile-id ::rpc/request-at id share-id content]}] - (files/check-comment-permissions! conn profile-id file-id share-id) + (db/tx-run! cfg + (fn [{:keys [::db/conn] :as cfg}] + (let [{:keys [thread-id owner-id] :as comment} (get-comment conn id ::db/for-update? true) + {:keys [file-id page-id] :as thread} (get-comment-thread conn thread-id ::db/for-update? true)] - ;; Don't allow edit comments to not owners - (when-not (= owner-id profile-id) - (ex/raise :type :validation - :code :not-allowed)) + (files/check-comment-permissions! conn profile-id file-id share-id) - (let [{:keys [page-name] :as file} (get-file conn file-id page-id)] - (db/update! conn :comment - {:content content - :modified-at request-at} - {:id id}) + ;; Don't allow edit comments to not owners + (when-not (= owner-id profile-id) + (ex/raise :type :validation + :code :not-allowed)) - (db/update! conn :comment-thread - {:modified-at request-at - :page-name page-name} - {:id thread-id}) - nil)))) + (let [{:keys [page-name] :as file} (get-file cfg file-id page-id)] + (db/update! conn :comment + {:content content + :modified-at request-at} + {:id id}) + + (db/update! conn :comment-thread + {:modified-at request-at + :page-name page-name} + {:id thread-id}) + nil))))) ;; --- COMMAND: Delete Comment Thread @@ -499,7 +504,7 @@ (sv/defmethod ::delete-comment-thread {::doc/added "1.15"} - [{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id share-id] :as params}] + [{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id share-id]}] (db/with-atomic [conn pool] (let [{:keys [owner-id file-id] :as thread} (get-comment-thread conn id ::db/for-update? true)] (files/check-comment-permissions! conn profile-id file-id share-id) @@ -539,12 +544,12 @@ (sv/defmethod ::update-comment-thread-position {::doc/added "1.15"} - [{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id position frame-id share-id] :as params}] + [{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id ::rpc/request-at id position frame-id share-id]}] (db/with-atomic [conn pool] (let [{:keys [file-id] :as thread} (get-comment-thread conn id ::db/for-update? true)] (files/check-comment-permissions! conn profile-id file-id share-id) (db/update! conn :comment-thread - {:modified-at (::rpc/request-at params) + {:modified-at request-at :position (db/pgpoint position) :frame-id frame-id} {:id (:id thread)}) @@ -559,12 +564,12 @@ (sv/defmethod ::update-comment-thread-frame {::doc/added "1.15"} - [{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id frame-id share-id] :as params}] + [{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id ::rpc/request-at id frame-id share-id]}] (db/with-atomic [conn pool] (let [{:keys [file-id] :as thread} (get-comment-thread conn id ::db/for-update? true)] (files/check-comment-permissions! conn profile-id file-id share-id) (db/update! conn :comment-thread - {:modified-at (::rpc/request-at params) + {:modified-at request-at :frame-id frame-id} {:id id}) nil))) diff --git a/backend/src/app/rpc/commands/files.clj b/backend/src/app/rpc/commands/files.clj index 31fbd359f0..de9bc4b3dc 100644 --- a/backend/src/app/rpc/commands/files.clj +++ b/backend/src/app/rpc/commands/files.clj @@ -20,6 +20,7 @@ [app.common.types.file :as ctf] [app.config :as cf] [app.db :as db] + [app.features.fdata :as feat.fdata] [app.loggers.audit :as-alias audit] [app.loggers.webhooks :as-alias webhooks] [app.rpc :as-alias rpc] @@ -181,62 +182,6 @@ :code :object-not-found :hint "not found")))) -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; FEATURES: pointer-map -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - -(defn load-pointer - [conn file-id id] - - (dm/assert! - "expected valid connection" - (db/connection? conn)) - - (let [{:keys [content]} (db/get conn :file-data-fragment - {:id id :file-id file-id} - {::db/columns [:content] - ::db/check-deleted? false})] - (when-not content - (ex/raise :type :internal - :code :fragment-not-found - :hint "fragment not found" - :file-id file-id - :fragment-id id)) - - (blob/decode content))) - -(defn persist-pointers! - [conn file-id] - (doseq [[id item] @pmap/*tracked*] - (when (pmap/modified? item) - (let [content (-> item deref blob/encode)] - (db/insert! conn :file-data-fragment - {:id id - :file-id file-id - :content content}))))) - -(defn process-pointers - [file update-fn] - (update file :data (fn resolve-fn [data] - (cond-> data - (contains? data :pages-index) - (update :pages-index resolve-fn) - - :always - (update-vals (fn [val] - (if (pmap/pointer-map? val) - (update-fn val) - val))))))) - - -(defn get-all-pointer-ids - "Given a file, return all pointer ids used in the data." - [fdata] - (->> (concat (vals fdata) - (vals (:pages-index fdata))) - (into #{} (comp (filter pmap/pointer-map?) - (map pmap/get-id))))) - ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; QUERY COMMANDS ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -277,49 +222,51 @@ [:id ::sm/uuid] [:project-id {:optional true} ::sm/uuid]])) -(defn get-file - [conn id & {:keys [project-id migrate? - include-deleted? - lock-for-update?] - :or {include-deleted? false - lock-for-update? false}}] - (dm/assert! - "expected raw connection" - (db/connection? conn)) - - (binding [pmap/*load-fn* (partial load-pointer conn id) +(defn- migrate-file + [{:keys [::db/conn] :as cfg} {:keys [id] :as file}] + (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id) pmap/*tracked* (pmap/create-tracked) cfeat/*new* (atom #{})] + (let [file (fmg/migrate-file file)] + ;; NOTE: when file is migrated, we break the rule of no perform + ;; mutations on get operations and update the file with all + ;; migrations applied + ;; + ;; NOTE: the following code will not work on read-only mode, it + ;; is a known issue; we keep is not implemented until we really + ;; need this + (if (fmg/migrated? file) + (let [file (update file :features cfeat/migrate-legacy-features) + features (set/union (deref cfeat/*new*) (:features file))] + (db/update! conn :file + {:data (blob/encode (:data file)) + :features (db/create-array conn "text" features)} + {:id id}) + (feat.fdata/persist-pointers! cfg id) + (assoc file :features features)) + file)))) - (let [params (merge {:id id} - (when (some? project-id) - {:project-id project-id})) +(defn get-file + [{:keys [::db/conn] :as cfg} id & {:keys [project-id migrate? + include-deleted? + lock-for-update?] + :or {include-deleted? false + lock-for-update? false}}] + (dm/assert! + "expected cfg with valid connection" + (db/connection-map? cfg)) - file (-> (db/get conn :file params - {::db/check-deleted? (not include-deleted?) - ::db/remove-deleted? (not include-deleted?) - ::db/for-update? lock-for-update?}) - (decode-row) - (cond-> migrate? - (fmg/migrate-file)))] - - ;; NOTE: when file is migrated, we break the rule of no perform - ;; mutations on get operations and update the file with all - ;; migrations applied - ;; - ;; NOTE: the following code will not work on read-only mode, it - ;; is a known issue; we keep is not implemented until we really - ;; need this - (if (fmg/migrated? file) - (let [file (update file :features cfeat/migrate-legacy-features) - features (set/union (deref cfeat/*new*) (:features file))] - (db/update! conn :file - {:data (blob/encode (:data file)) - :features (db/create-array conn "text" features)} - {:id id}) - (persist-pointers! conn id) - (assoc file :features features)) - file)))) + (let [params (merge {:id id} + (when (some? project-id) + {:project-id project-id})) + file (-> (db/get conn :file params + {::db/check-deleted? (not include-deleted?) + ::db/remove-deleted? (not include-deleted?) + ::db/for-update? lock-for-update?}) + (decode-row))] + (if migrate? + (migrate-file cfg file) + file))) (defn get-minimal-file [{:keys [::db/pool] :as cfg} id] @@ -345,7 +292,7 @@ :project-id project-id :file-id id) - file (-> (get-file conn id :project-id project-id) + file (-> (get-file cfg id :project-id project-id) (assoc :permissions perms) (check-version!)) @@ -358,8 +305,8 @@ ;; pointers on backend and return a complete file. file (if (and (contains? (:features file) "fdata/pointer-map") (not (contains? (:features params) "fdata/pointer-map"))) - (binding [pmap/*load-fn* (partial load-pointer conn id)] - (process-pointers file deref)) + (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)] + (update file :data feat.fdata/process-pointers deref)) file)] (vary-meta file assoc ::cond/key (get-file-etag params file))))))) @@ -498,6 +445,7 @@ (defn get-page [{:keys [::db/conn] :as cfg} {:keys [profile-id file-id page-id object-id] :as params}] + (when (and (uuid? object-id) (not (uuid? page-id))) (ex/raise :type :validation @@ -508,13 +456,13 @@ :profile-id profile-id :file-id file-id) - file (get-file conn file-id) + file (get-file cfg file-id) _ (-> (cfeat/get-team-enabled-features cf/flags team) (cfeat/check-client-features! (:features params)) (cfeat/check-file-features! (:features file) (:features params))) - page (binding [pmap/*load-fn* (partial load-pointer conn file-id)] + page (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)] (let [page-id (or page-id (-> file :data :pages first)) page (dm/get-in file [:data :pages-index page-id])] (if (pmap/pointer-map? page) @@ -573,37 +521,39 @@ and p.team_id = ? order by f.modified_at desc") -;; FIXME: i'm not sure about feature handling here... ??? -(defn get-team-shared-files - [conn team-id] + +(defn- get-library-summary + [cfg {:keys [id data] :as file}] (letfn [(assets-sample [assets limit] (let [sorted-assets (->> (vals assets) (sort-by #(str/lower (:name %))))] {:count (count sorted-assets) - :sample (into [] (take limit sorted-assets))})) + :sample (into [] (take limit sorted-assets))}))] - (library-summary [{:keys [id data] :as file}] - (binding [pmap/*load-fn* (partial load-pointer conn id)] - (let [load-objects (fn [component] - (ctf/load-component-objects data component)) - components-sample (-> (assets-sample (ctkl/components data) 4) - (update :sample #(mapv load-objects %)))] - {:components components-sample - :media (assets-sample (:media data) 3) - :colors (assets-sample (:colors data) 3) - :typographies (assets-sample (:typographies data) 3)})))] + (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)] + (let [load-objects (fn [component] + (ctf/load-component-objects data component)) + components-sample (-> (assets-sample (ctkl/components data) 4) + (update :sample #(mapv load-objects %)))] + {:components components-sample + :media (assets-sample (:media data) 3) + :colors (assets-sample (:colors data) 3) + :typographies (assets-sample (:typographies data) 3)})))) - (->> (db/exec! conn [sql:team-shared-files team-id]) - (into #{} (comp - (map decode-row) - (map (fn [row] - (if-let [media-id (:media-id row)] - (-> row - (dissoc :media-id) - (assoc :thumbnail-uri (resolve-public-uri media-id))) - (dissoc row :media-id)))) - (map #(assoc % :library-summary (library-summary %))) - (map #(dissoc % :data))))))) +(defn- get-team-shared-files + [{:keys [::db/conn] :as cfg} {:keys [team-id profile-id]}] + (teams/check-read-permissions! conn profile-id team-id) + (->> (db/exec! conn [sql:team-shared-files team-id]) + (into #{} (comp + (map decode-row) + (map (fn [row] + (if-let [media-id (:media-id row)] + (-> row + (dissoc :media-id) + (assoc :thumbnail-uri (resolve-public-uri media-id))) + (dissoc row :media-id)))) + (map #(assoc % :library-summary (get-library-summary cfg %))) + (map #(dissoc % :data)))))) (def ^:private schema:get-team-shared-files [:map {:title "get-team-shared-files"} @@ -613,10 +563,8 @@ "Get all file (libraries) for the specified team." {::doc/added "1.17" ::sm/params schema:get-team-shared-files} - [{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id]}] - (dm/with-open [conn (db/open pool)] - (teams/check-read-permissions! conn profile-id team-id) - (get-team-shared-files conn team-id))) + [cfg {:keys [::rpc/profile-id] :as params}] + (db/tx-run! cfg get-team-shared-files (assoc params :profile-id profile-id))) ;; --- COMMAND QUERY: get-file-libraries @@ -744,30 +692,32 @@ ;; --- COMMAND QUERY: get-file-summary +(defn- get-file-summary + [{:keys [::db/conn] :as cfg} {:keys [profile-id id project-id] :as params}] + (check-read-permissions! conn profile-id id) + (let [team (teams/get-team conn + :profile-id profile-id + :project-id project-id + :file-id id) + + file (get-file cfg id :project-id project-id)] + + (-> (cfeat/get-team-enabled-features cf/flags team) + (cfeat/check-client-features! (:features params)) + (cfeat/check-file-features! (:features file) (:features params))) + + {:name (:name file) + :components-count (count (ctkl/components-seq (:data file))) + :graphics-count (count (get-in file [:data :media] [])) + :colors-count (count (get-in file [:data :colors] [])) + :typography-count (count (get-in file [:data :typographies] []))})) + (sv/defmethod ::get-file-summary "Retrieve a file summary by its ID. Only authenticated users." {::doc/added "1.20" ::sm/params schema:get-file} - [cfg {:keys [::rpc/profile-id id project-id] :as params}] - (db/tx-run! cfg - (fn [{:keys [::db/conn] :as cfg}] - (check-read-permissions! conn profile-id id) - (let [team (teams/get-team conn - :profile-id profile-id - :project-id project-id - :file-id id) - - file (get-file conn id :project-id project-id)] - - (-> (cfeat/get-team-enabled-features cf/flags team) - (cfeat/check-client-features! (:features params)) - (cfeat/check-file-features! (:features file) (:features params))) - - {:name (:name file) - :components-count (count (ctkl/components-seq (:data file))) - :graphics-count (count (get-in file [:data :media] [])) - :colors-count (count (get-in file [:data :colors] [])) - :typography-count (count (get-in file [:data :typographies] []))})))) + [cfg {:keys [::rpc/profile-id] :as params}] + (db/tx-run! cfg get-file-summary (assoc params :profile-id profile-id))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; MUTATION COMMANDS @@ -827,10 +777,15 @@ ORDER BY f.created_at ASC;") (defn- absorb-library-by-file! - [conn ldata file-id] - (binding [pmap/*load-fn* (partial load-pointer conn file-id) + [cfg ldata file-id] + + (dm/assert! + "expected cfg with valid connection" + (db/connection-map? cfg)) + + (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id) pmap/*tracked* (pmap/create-tracked)] - (let [file (-> (get-file conn file-id + (let [file (-> (get-file cfg file-id :include-deleted? true :lock-for-update? true) (update :data ctf/absorb-assets ldata))] @@ -839,28 +794,84 @@ :library-id (str (:id ldata)) :file-id (str file-id)) - (db/update! conn :file + (db/update! cfg :file {:revn (inc (:revn file)) :data (blob/encode (:data file)) :modified-at (dt/now)} {:id file-id}) - (persist-pointers! conn file-id)))) + (feat.fdata/persist-pointers! cfg file-id)))) (defn- absorb-library! "Find all files using a shared library, and absorb all library assets into the file local libraries" - [conn {:keys [id] :as library}] - (let [ldata (binding [pmap/*load-fn* (partial load-pointer conn id)] - (-> library (process-pointers deref) :data)) - ids (->> (db/exec! conn [sql:get-referenced-files id]) + [cfg {:keys [id] :as library}] + + (dm/assert! + "expected cfg with valid connection" + (db/connection-map? cfg)) + + (let [ldata (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)] + (-> library :data (feat.fdata/process-pointers deref))) + ids (->> (db/exec! cfg [sql:get-referenced-files id]) (map :id))] (l/trc :hint "absorbing library" :library-id (str id) :files (str/join "," (map str ids))) - (run! (partial absorb-library-by-file! conn ldata) ids))) + (run! (partial absorb-library-by-file! cfg ldata) ids))) + +(defn- set-file-shared + [{:keys [::db/conn] :as cfg} {:keys [profile-id id] :as params}] + (check-edition-permissions! conn profile-id id) + (let [file (db/get-by-id conn :file id {:columns [:id :name :is-shared]}) + file (cond + (and (true? (:is-shared file)) + (false? (:is-shared params))) + ;; When we disable shared flag on an already shared + ;; file, we need to perform more complex operation, + ;; so in this case we retrieve the complete file and + ;; perform all required validations. + (let [file (-> (get-file cfg id :lock-for-update? true) + (check-version!) + (assoc :is-shared false)) + team (teams/get-team conn + :profile-id profile-id + :project-id (:project-id file))] + + (-> (cfeat/get-team-enabled-features cf/flags team) + (cfeat/check-client-features! (:features params)) + (cfeat/check-file-features! (:features file))) + + (absorb-library! cfg file) + + (db/delete! conn :file-library-rel {:library-file-id id}) + (db/update! conn :file + {:is-shared false} + {:id id}) + file) + + (and (false? (:is-shared file)) + (true? (:is-shared params))) + (let [file (assoc file :is-shared true)] + (db/update! conn :file + {:is-shared false} + {:id id}) + file) + + :else + (ex/raise :type :validation + :code :invalid-shared-state + :hint "unexpected state found" + :params-is-shared (:is-shared params) + :file-is-shared (:is-shared file)))] + + (rph/with-meta + (select-keys file [:id :name :is-shared]) + {::audit/props {:name (:name file) + :project-id (:project-id file) + :is-shared (:is-shared file)}}))) (def ^:private schema:set-file-shared @@ -873,56 +884,8 @@ {::doc/added "1.17" ::webhooks/event? true ::sm/params schema:set-file-shared} - [{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id] :as params}] - (db/with-atomic [conn pool] - (check-edition-permissions! conn profile-id id) - (let [file (db/get-by-id conn id {:columns [:id :name :is-shared]}) - file (cond - (and (true? (:is-shared file)) - (false? (:is-shared params))) - ;; When we disable shared flag on an already shared - ;; file, we need to perform more complex operation, - ;; so in this case we retrieve the complete file and - ;; perform all required validations. - (let [file (-> (get-file conn id :lock-for-update? true) - (check-version!) - (assoc :is-shared false)) - team (teams/get-team conn - :profile-id profile-id - :project-id (:project-id file))] - - (-> (cfeat/get-team-enabled-features cf/flags team) - (cfeat/check-client-features! (:features params)) - (cfeat/check-file-features! (:features file))) - - (absorb-library! conn file) - - (db/delete! conn :file-library-rel {:library-file-id id}) - (db/update! conn :file - {:is-shared false} - {:id id}) - file) - - (and (false? (:is-shared file)) - (true? (:is-shared params))) - (let [file (assoc file :is-shared true)] - (db/update! conn :file - {:is-shared false} - {:id id}) - file) - - :else - (ex/raise :type :validation - :code :invalid-shared-state - :hint "unexpected state found" - :params-is-shared (:is-shared params) - :file-is-shared (:is-shared file)))] - - (rph/with-meta - (select-keys file [:id :name :is-shared]) - {::audit/props {:name (:name file) - :project-id (:project-id file) - :is-shared (:is-shared file)}})))) + [cfg {:keys [::rpc/profile-id] :as params}] + (db/tx-run! cfg set-file-shared (assoc params :profile-id profile-id))) ;; --- MUTATION COMMAND: delete-file @@ -939,41 +902,44 @@ [:map {:title "delete-file"} [:id ::sm/uuid]])) +(defn- delete-file + [{:keys [::db/conn] :as cfg} {:keys [profile-id id] :as params}] + (check-edition-permissions! conn profile-id id) + (let [file (mark-file-deleted! conn id)] + + ;; NOTE: when a file is a shared library, then we proceed to load + ;; the whole file, proceed with feature checking and properly execute + ;; the absorb-library procedure + (when (:is-shared file) + (let [file (-> (get-file cfg id + :lock-for-update? true + :include-deleted? true) + (check-version!)) + + team (teams/get-team conn + :profile-id profile-id + :project-id (:project-id file))] + + + + (-> (cfeat/get-team-enabled-features cf/flags team) + (cfeat/check-client-features! (:features params)) + (cfeat/check-file-features! (:features file))) + + (absorb-library! cfg file))) + + (rph/with-meta (rph/wrap) + {::audit/props {:project-id (:project-id file) + :name (:name file) + :created-at (:created-at file) + :modified-at (:modified-at file)}}))) + (sv/defmethod ::delete-file {::doc/added "1.17" ::webhooks/event? true ::sm/params schema:delete-file} - [{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id] :as params}] - (db/with-atomic [conn pool] - (check-edition-permissions! conn profile-id id) - (let [file (mark-file-deleted! conn id)] - - ;; NOTE: when a file is a shared library, then we proceed to load - ;; the whole file, proceed with feature checking and properly execute - ;; the absorb-library procedure - (when (:is-shared file) - (let [file (-> (get-file conn id - :lock-for-update? true - :include-deleted? true) - (check-version!)) - - team (teams/get-team conn - :profile-id profile-id - :project-id (:project-id file))] - - - - (-> (cfeat/get-team-enabled-features cf/flags team) - (cfeat/check-client-features! (:features params)) - (cfeat/check-file-features! (:features file))) - - (absorb-library! conn file))) - - (rph/with-meta (rph/wrap) - {::audit/props {:project-id (:project-id file) - :name (:name file) - :created-at (:created-at file) - :modified-at (:modified-at file)}})))) + [cfg {:keys [::rpc/profile-id] :as params}] + (db/tx-run! cfg delete-file (assoc params :profile-id profile-id))) ;; --- MUTATION COMMAND: link-file-to-library diff --git a/backend/src/app/rpc/commands/files_create.clj b/backend/src/app/rpc/commands/files_create.clj index b3cae10976..59273f033e 100644 --- a/backend/src/app/rpc/commands/files_create.clj +++ b/backend/src/app/rpc/commands/files_create.clj @@ -7,12 +7,14 @@ (ns app.rpc.commands.files-create (:require [app.common.data :as d] + [app.common.data.macros :as dm] [app.common.features :as cfeat] [app.common.schema :as sm] [app.common.types.file :as ctf] [app.common.uuid :as uuid] [app.config :as cf] [app.db :as db] + [app.features.fdata :as feat.fdata] [app.loggers.audit :as-alias audit] [app.loggers.webhooks :as-alias webhooks] [app.rpc :as-alias rpc] @@ -44,13 +46,20 @@ :or {is-shared false revn 0 create-page true} :as params}] + (dm/assert! + "expected a valid connection" + (db/connection? conn)) + (let [id (or id (uuid/next)) - pointers (atom {}) + pointers (pmap/create-tracked) + pmap? (contains? features "fdata/pointer-map") + omap? (contains? features "fdata/objects-map") + data (binding [pmap/*tracked* pointers cfeat/*current* features - cfeat/*wrap-with-objects-map-fn* (if (features "fdata/objects-map") omap/wrap identity) - cfeat/*wrap-with-pointer-map-fn* (if (features "fdata/pointer-map") pmap/wrap identity)] + cfeat/*wrap-with-objects-map-fn* (if omap? omap/wrap identity) + cfeat/*wrap-with-pointer-map-fn* (if pmap? pmap/wrap identity)] (if create-page (ctf/make-file-data id) (ctf/make-file-data id nil))) @@ -72,7 +81,7 @@ :deleted-at deleted-at}))] (binding [pmap/*tracked* pointers] - (files/persist-pointers! conn id)) + (feat.fdata/persist-pointers! cfg id)) (->> (assoc params :file-id id :role :owner) (create-file-role! conn)) diff --git a/backend/src/app/rpc/commands/files_thumbnails.clj b/backend/src/app/rpc/commands/files_thumbnails.clj index 3eb5c5444b..c60338349f 100644 --- a/backend/src/app/rpc/commands/files_thumbnails.clj +++ b/backend/src/app/rpc/commands/files_thumbnails.clj @@ -16,6 +16,7 @@ [app.common.types.shape-tree :as ctt] [app.config :as cf] [app.db :as db] + [app.features.fdata :as feat.fdata] [app.loggers.audit :as-alias audit] [app.loggers.webhooks :as-alias webhooks] [app.media :as media] @@ -100,28 +101,28 @@ ;; loading all pages into memory for find the frame set for thumbnail. (defn get-file-data-for-thumbnail - [conn {:keys [data id] :as file}] + [{:keys [::db/conn] :as cfg} {:keys [data id] :as file}] (letfn [;; function responsible on finding the frame marked to be ;; used as thumbnail; the returned frame always have ;; the :page-id set to the page that it belongs. - (get-thumbnail-frame [data] + (get-thumbnail-frame [file] ;; NOTE: this is a hack for avoid perform blocking ;; operation inside the for loop, clojure lazy-seq uses ;; synchronized blocks that does not plays well with - ;; virtual threads, so we need to perform the load - ;; operation first. This operation forces all pointer maps - ;; load into the memory. - (->> (-> data :pages-index vals) - (filter pmap/pointer-map?) - (run! pmap/load!)) - - ;; Then proceed to find the frame set for thumbnail - (d/seek #(or (:use-for-thumbnail %) - (:use-for-thumbnail? %)) ; NOTE: backward comp (remove on v1.21) - (for [page (-> data :pages-index vals) - frame (-> page :objects ctt/get-frames)] - (assoc frame :page-id (:id page))))) + ;; virtual threads where all rpc methods calls are + ;; dispatched, so we need to perform the load operation + ;; first. This operation forces all pointer maps load into + ;; the memory. + ;; + ;; FIXME: this is no longer true with clojure>=1.12 + (let [{:keys [data]} (update file :data feat.fdata/process-pointers pmap/load!)] + ;; Then proceed to find the frame set for thumbnail + (d/seek #(or (:use-for-thumbnail %) + (:use-for-thumbnail? %)) ; NOTE: backward comp (remove on v1.21) + (for [page (-> data :pages-index vals) + frame (-> page :objects ctt/get-frames)] + (assoc frame :page-id (:id page)))))) ;; function responsible to filter objects data structure of ;; all unneeded shapes if a concrete frame is provided. If no @@ -165,8 +166,8 @@ objects)))] - (binding [pmap/*load-fn* (partial files/load-pointer conn id)] - (let [frame (get-thumbnail-frame data) + (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)] + (let [frame (get-thumbnail-frame file) frame-id (:id frame) page-id (or (:page-id frame) (-> data :pages first)) @@ -220,7 +221,7 @@ :profile-id profile-id :file-id file-id) - file (files/get-file conn file-id)] + file (files/get-file cfg file-id)] (-> (cfeat/get-team-enabled-features cf/flags team) (cfeat/check-client-features! (:features params)) @@ -228,7 +229,7 @@ {:file-id file-id :revn (:revn file) - :page (get-file-data-for-thumbnail conn file)})))) + :page (get-file-data-for-thumbnail cfg file)})))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; MUTATION COMMANDS diff --git a/backend/src/app/rpc/commands/files_update.clj b/backend/src/app/rpc/commands/files_update.clj index 0d94874f36..28e16ab2c7 100644 --- a/backend/src/app/rpc/commands/files_update.clj +++ b/backend/src/app/rpc/commands/files_update.clj @@ -17,7 +17,7 @@ [app.common.uuid :as uuid] [app.config :as cf] [app.db :as db] - [app.features.fdata :refer [enable-pointer-map enable-objects-map]] + [app.features.fdata :as feat.fdata] [app.http.errors :as errors] [app.loggers.audit :as audit] [app.loggers.webhooks :as webhooks] @@ -106,12 +106,12 @@ (defn- wrap-with-pointer-map-context [f] - (fn [{:keys [::db/conn] :as cfg} {:keys [id] :as file}] - (binding [pmap/*tracked* (atom {}) - pmap/*load-fn* (partial files/load-pointer conn id) + (fn [cfg {:keys [id] :as file}] + (binding [pmap/*tracked* (pmap/create-tracked) + pmap/*load-fn* (partial feat.fdata/load-pointer cfg id) cfeat/*wrap-with-pointer-map-fn* pmap/wrap] (let [result (f cfg file)] - (files/persist-pointers! conn id) + (feat.fdata/persist-pointers! cfg id) result)))) (defn- wrap-with-objects-map-context @@ -236,7 +236,7 @@ ;; to be executed on a separated executor for avoid to do the ;; CPU intensive operation on vthread. - update-fdata-fn (partial update-file-data conn file changes skip-validate) + update-fdata-fn (partial update-file-data cfg file changes skip-validate) file (-> (climit/configure cfg :update-file/global) (climit/run! update-fdata-fn executor))] @@ -290,7 +290,7 @@ file) (defn- update-file-data - [conn file changes skip-validate] + [{:keys [::db/conn] :as cfg} file changes skip-validate] (let [file (update file :data (fn [data] (-> data (blob/decode) @@ -304,10 +304,10 @@ (not skip-validate)) (->> (files/get-file-libraries conn (:id file)) (into [file] (map (fn [{:keys [id]}] - (binding [pmap/*load-fn* (partial files/load-pointer conn id) + (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id) pmap/*tracked* nil] - (-> (files/get-file conn id :migrate? false) - (files/process-pointers deref) ; ensure all pointers resolved + (-> (files/get-file cfg id :migrate? false) + (feat.fdata/process-pointers deref) ; ensure all pointers resolved (fmg/migrate-file)))))) (d/index-by :id)))] @@ -332,11 +332,11 @@ (cond-> (and (contains? cfeat/*current* "fdata/objects-map") (not (contains? cfeat/*previous* "fdata/objects-map"))) - (enable-objects-map)) + (feat.fdata/enable-objects-map)) (cond-> (and (contains? cfeat/*current* "fdata/pointer-map") (not (contains? cfeat/*previous* "fdata/pointer-map"))) - (enable-pointer-map)) + (feat.fdata/enable-pointer-map)) (update :data blob/encode)))) diff --git a/backend/src/app/rpc/commands/management.clj b/backend/src/app/rpc/commands/management.clj index 1c0090e69d..fd6d254ff5 100644 --- a/backend/src/app/rpc/commands/management.clj +++ b/backend/src/app/rpc/commands/management.clj @@ -14,12 +14,14 @@ [app.common.schema :as sm] [app.common.uuid :as uuid] [app.db :as db] + [app.features.fdata :as feat.fdata] + [app.http.sse :as sse] [app.loggers.webhooks :as-alias webhooks] [app.rpc :as-alias rpc] [app.rpc.commands.binfile :as binfile] [app.rpc.commands.files :as files] [app.rpc.commands.projects :as proj] - [app.rpc.commands.teams :as teams :refer [create-project-role create-project]] + [app.rpc.commands.teams :as teams] [app.rpc.doc :as-alias doc] [app.setup :as-alias setup] [app.setup.templates :as tmpl] @@ -27,7 +29,9 @@ [app.util.pointer-map :as pmap] [app.util.services :as sv] [app.util.time :as dt] + [app.worker :as-alias wrk] [clojure.walk :as walk] + [promesa.core :as p] [promesa.exec :as px])) ;; --- COMMAND: Duplicate File @@ -46,9 +50,8 @@ {::doc/added "1.16" ::webhooks/event? true ::sm/params schema:duplicate-file} - [{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id] :as params}] - (db/with-atomic [conn pool] - (duplicate-file conn (assoc params :profile-id profile-id)))) + [cfg {:keys [::rpc/profile-id] :as params}] + (db/tx-run! cfg duplicate-file (assoc params :profile-id profile-id))) (defn- remap-id [item index key] @@ -57,7 +60,7 @@ (assoc key (get index (get item key) (get item key))))) (defn- process-file - [conn {:keys [id] :as file} index] + [cfg index {:keys [id] :as file}] (letfn [(process-form [form] (cond-> form ;; Relink library items @@ -100,26 +103,29 @@ (dissoc k)) res))) media - media))] - (-> file - (update :id #(get index %)) - (update :data - (fn [data] - (binding [pmap/*load-fn* (partial files/load-pointer conn id) - pmap/*tracked* (atom {})] - (let [file-id (get index id) - data (-> data - (blob/decode) - (assoc :id file-id) - (pmg/migrate-data) - (update :pages-index relink-shapes) - (update :components relink-shapes) - (update :media relink-media) - (d/without-nils) - (files/process-pointers pmap/clone) - (blob/encode))] - (files/persist-pointers! conn file-id) - data))))))) + media)) + + (update-fdata [fdata new-id] + (-> fdata + (assoc :id new-id) + (pmg/migrate-data) + (update :pages-index relink-shapes) + (update :components relink-shapes) + (update :media relink-media) + (d/without-nils) + (feat.fdata/process-pointers pmap/clone)))] + + (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id) + pmap/*tracked* (pmap/create-tracked) + cfeat/*new* (atom #{})] + (let [new-id (get index id) + file (-> file + (assoc :id new-id) + (update :data update-fdata new-id) + (update :features into (deref cfeat/*new*)) + (update :features cfeat/migrate-legacy-features))] + (feat.fdata/persist-pointers! cfg new-id) + file)))) (def sql:get-used-libraries "select flr.* @@ -136,7 +142,7 @@ and so.deleted_at is null") (defn duplicate-file* - [conn {:keys [profile-id file index project-id name flibs fmeds]} {:keys [reset-shared-flag]}] + [{:keys [::db/conn] :as cfg} {:keys [profile-id file index project-id name flibs fmeds]} {:keys [reset-shared-flag]}] (let [flibs (or flibs (db/exec! conn [sql:get-used-libraries (:id file)])) fmeds (or fmeds (db/exec! conn [sql:get-used-media-objects (:id file)])) @@ -179,9 +185,13 @@ (assoc :modified-at now) (assoc :ignore-sync-until ignore)) - file (process-file conn file index)] + file (process-file cfg index file)] + + (db/insert! conn :file + (-> file + (update :features #(db/create-array conn "text" %)) + (update :data blob/encode))) - (db/insert! conn :file file) (db/insert! conn :file-profile-rel {:file-id (:id file) :profile-id profile-id @@ -198,15 +208,14 @@ file)) (defn duplicate-file - [conn {:keys [profile-id file-id] :as params}] - (let [file (db/get-by-id conn :file file-id) + [{:keys [::db/conn] :as cfg} {:keys [profile-id file-id] :as params}] + (let [;; We don't touch the original file on duplication + file (files/get-file cfg file-id :migrate? false) index {file-id (uuid/next)} params (assoc params :index index :file file)] (proj/check-edition-permissions! conn profile-id (:project-id file)) (db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"]) - (-> (duplicate-file* conn params {:reset-shared-flag true}) - (update :data blob/decode) - (update :features db/decode-pgarray #{})))) + (duplicate-file* cfg params {:reset-shared-flag true}))) ;; --- COMMAND: Duplicate Project @@ -224,12 +233,11 @@ {::doc/added "1.16" ::webhooks/event? true ::sm/params schema:duplicate-project} - [{:keys [::db/pool] :as cfg} params] - (db/with-atomic [conn pool] - (duplicate-project conn (assoc params :profile-id (::rpc/profile-id params))))) + [cfg {:keys [::rpc/profile-id] :as params}] + (db/tx-run! cfg duplicate-project (assoc params :profile-id profile-id))) (defn duplicate-project - [conn {:keys [profile-id project-id name] :as params}] + [{:keys [::db/conn] :as cfg} {:keys [profile-id project-id name] :as params}] ;; Defer all constraints (db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"]) @@ -254,8 +262,8 @@ ;; create the duplicated project and assign the current profile as ;; a project owner - (create-project conn project) - (create-project-role conn profile-id (:id project) :owner) + (teams/create-project conn project) + (teams/create-project-role conn profile-id (:id project) :owner) ;; duplicate all files (let [index (reduce #(assoc %1 (:id %2) (uuid/next)) {} files) @@ -264,10 +272,10 @@ (assoc :project-id (:id project)) (assoc :index index))] (doseq [{:keys [id]} files] - (let [file (db/get-by-id conn :file id) + (let [file (files/get-file cfg id :migrate? false) params (assoc params :file file) opts {:reset-shared-flag false}] - (duplicate-file* conn params opts)))) + (duplicate-file* cfg params opts)))) ;; return the created project project)) @@ -405,29 +413,6 @@ ;; --- COMMAND: Clone Template -(defn- clone-template! - [{:keys [::db/conn] :as cfg} {:keys [profile-id template-id project-id]}] - (let [template (tmpl/get-template-stream cfg template-id) - project (db/get-by-id conn :project project-id {:columns [:id :team-id]})] - - (when-not template - (ex/raise :type :not-found - :code :template-not-found - :hint "template not found")) - - (teams/check-edition-permissions! conn profile-id (:team-id project)) - - (-> cfg - ;; FIXME: maybe reuse the conn instead of creating more - ;; connections in the import process? - (dissoc ::db/conn) - (assoc ::binfile/input template) - (assoc ::binfile/project-id (:id project)) - (assoc ::binfile/profile-id profile-id) - (assoc ::binfile/ignore-index-errors? true) - (assoc ::binfile/migrate? true) - (binfile/import!)))) - (def ^:private schema:clone-template (sm/define @@ -435,15 +420,46 @@ [:project-id ::sm/uuid] [:template-id ::sm/word-string]])) +(declare ^:private clone-template) + (sv/defmethod ::clone-template "Clone into the specified project the template by its id." {::doc/added "1.16" + ::sse/stream? true ::webhooks/event? true ::sm/params schema:clone-template} - [{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id] :as params}] - (db/with-atomic [conn pool] - (-> (assoc cfg ::db/conn conn) - (clone-template! (assoc params :profile-id profile-id))))) + [{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id project-id template-id] :as params}] + (let [project (db/get-by-id pool :project project-id {:columns [:id :team-id]}) + _ (teams/check-edition-permissions! pool profile-id (:team-id project)) + template (tmpl/get-template-stream cfg template-id) + params (-> cfg + (assoc ::binfile/input template) + (assoc ::binfile/project-id (:id project)) + (assoc ::binfile/profile-id profile-id) + (assoc ::binfile/ignore-index-errors? true) + (assoc ::binfile/migrate? true))] + + (when-not template + (ex/raise :type :not-found + :code :template-not-found + :hint "template not found")) + + (sse/response #(clone-template params)))) + +(defn- clone-template + [{:keys [::wrk/executor ::binfile/project-id] :as params}] + (db/tx-run! params + (fn [{:keys [::db/conn] :as params}] + ;; NOTE: the importation process performs some operations that + ;; are not very friendly with virtual threads, and for avoid + ;; unexpected blocking of other concurrent operations we + ;; dispatch that operation to a dedicated executor. + (let [result (p/thread-call executor (partial binfile/import! params))] + (db/update! conn :project + {:modified-at (dt/now)} + {:id project-id}) + + (deref result))))) ;; --- COMMAND: Get list of builtin templates diff --git a/backend/src/app/rpc/commands/viewer.clj b/backend/src/app/rpc/commands/viewer.clj index c3e7d86085..c2887ef967 100644 --- a/backend/src/app/rpc/commands/viewer.clj +++ b/backend/src/app/rpc/commands/viewer.clj @@ -29,7 +29,7 @@ (defn- get-view-only-bundle [{:keys [::db/conn] :as cfg} {:keys [profile-id file-id ::perms] :as params}] - (let [file (files/get-file conn file-id) + (let [file (files/get-file cfg file-id) project (db/get conn :project {:id (:project-id file)} diff --git a/backend/src/app/rpc/doc.clj b/backend/src/app/rpc/doc.clj index bcfcd96c80..185f3fc4c2 100644 --- a/backend/src/app/rpc/doc.clj +++ b/backend/src/app/rpc/doc.clj @@ -16,6 +16,7 @@ [app.common.schema.openapi :as oapi] [app.common.schema.registry :as sr] [app.config :as cf] + [app.http.sse :as-alias sse] [app.loggers.webhooks :as-alias webhooks] [app.rpc :as-alias rpc] [app.util.json :as json] @@ -55,6 +56,7 @@ :module (or (some-> (::module mdata) d/name) (-> (:ns mdata) (str/split ".") last)) :auth (::rpc/auth mdata true) + :sse (::sse/stream? mdata false) :webhook (::webhooks/event? mdata false) :docs (::sv/docstring mdata) :deprecated (::deprecated mdata) diff --git a/backend/src/app/srepl/helpers.clj b/backend/src/app/srepl/helpers.clj index 9229006fd4..d0a0674271 100644 --- a/backend/src/app/srepl/helpers.clj +++ b/backend/src/app/srepl/helpers.clj @@ -23,6 +23,7 @@ [app.config :as cfg] [app.db :as db] [app.db.sql :as sql] + [app.features.fdata :as feat.fdata] [app.main :refer [system]] [app.rpc.commands.files :as files] [app.rpc.commands.files-update :as files-update] @@ -39,7 +40,6 @@ [promesa.exec :as px] [promesa.exec.csp :as sp])) -(def ^:dynamic *conn* nil) (def ^:dynamic *system* nil) (defn println! @@ -63,71 +63,75 @@ (defn get-file "Get the migrated data of one file." - [system id] + [system id & {:keys [migrate?] :or {migrate? true}}] (db/run! system - (fn [{:keys [::db/conn]}] - (binding [pmap/*load-fn* (partial files/load-pointer conn id)] - (-> (files/get-file conn id) - (files/process-pointers deref)))))) + (fn [system] + (binding [pmap/*load-fn* (partial feat.fdata/load-pointer system id)] + (-> (files/get-file system id :migrate? migrate?) + (update :data feat.fdata/process-pointers deref)))))) (defn validate "Validate structure, referencial integrity and semantic coherence of all contents of a file. Returns a list of errors." [system id] - (db/with-atomic [conn (:app.db/pool system)] - (binding [pmap/*load-fn* (partial files/load-pointer conn id)] - (let [file (files/get-file conn id) - libraries (->> (files/get-file-libraries conn id) - (into [file] (map (fn [{:keys [id]}] - (binding [pmap/*load-fn* (partial files/load-pointer conn id)] - (-> (db/get conn :file {:id id}) - (files/decode-row) - (files/process-pointers deref) ; ensure all pointers resolved - (pmg/migrate-file)))))) - (d/index-by :id))] - (validate/validate-file file libraries))))) + (db/tx-run! system + (fn [{:keys [::db/conn] :as system}] + (binding [pmap/*load-fn* (partial feat.fdata/load-pointer system id)] + (let [id (if (string? id) (parse-uuid id) id) + file (files/get-file system id) + libs (->> (files/get-file-libraries conn id) + (into [file] (map (fn [{:keys [id]}] + (binding [pmap/*load-fn* (partial feat.fdata/load-pointer system id)] + (-> (files/get-file system id :migrate? false) + (feat.fdata/process-pointers deref) + (pmg/migrate-file)))))) + (d/index-by :id))] + (validate/validate-file file libs)))))) -(defn repair +(defn repair! "Repair the list of errors detected by validation." [system id] - (db/with-atomic [conn (:app.db/pool system)] - (let [file (files/get-file conn id)] - (binding [*conn* conn - pmap/*tracked* (atom {}) - pmap/*load-fn* (partial files/load-pointer conn id)] - (let [libraries (->> (files/get-file-libraries conn id) - (into [file] (map (fn [{:keys [id]}] - (binding [pmap/*load-fn* (partial files/load-pointer conn id)] - (-> (db/get conn :file {:id id}) - (files/decode-row) - (files/process-pointers deref) ; ensure all pointers resolved - (pmg/migrate-file)))))) - (d/index-by :id)) - errors (validate/validate-file file libraries) - changes (-> (repair/repair-file (:data file) libraries errors) - (get :redo-changes)) - file (-> file - (update :revn inc) - (update :data cpc/process-changes changes) - (update :data blob/encode))] + (db/tx-run! system + (fn [{:keys [::db/conn] :as system}] + (binding [pmap/*tracked* (pmap/create-tracked) + pmap/*load-fn* (partial feat.fdata/load-pointer system id)] + (let [id (if (string? id) (parse-uuid id) id) + file (files/get-file system id) + libs (->> (files/get-file-libraries conn id) + (into [file] (map (fn [{:keys [id]}] + (binding [pmap/*load-fn* (partial feat.fdata/load-pointer system id)] + (-> (files/get-file system id :migrate? false) + (feat.fdata/process-pointers deref) + (pmg/migrate-file)))))) + (d/index-by :id)) + errors (validate/validate-file file libs) + changes (-> (repair/repair-file (:data file) libs errors) :redo-changes) - (files/persist-pointers! conn id) - (db/update! conn :file - {:revn (:revn file) - :data (:data file) - :data-backend nil - :modified-at (dt/now) - :has-media-trimmed false} - {:id (:id file)})))))) + file (-> file + (update :revn inc) + (update :data cpc/process-changes changes) + (update :data blob/encode))] + + (when (contains? (:features file) "fdata/pointer-map") + (feat.fdata/persist-pointers! system id)) + + (db/update! conn :file + {:revn (:revn file) + :data (:data file) + :data-backend nil + :modified-at (dt/now) + :has-media-trimmed false} + {:id (:id file)}) + :repaired))))) (defn update-file! "Apply a function to the data of one file. Optionally save the changes or not. The function receives the decoded and migrated file data." [system & {:keys [update-fn id rollback? migrate? inc-revn?] :or {rollback? true migrate? true inc-revn? true}}] - (letfn [(process-file [conn {:keys [features] :as file}] - (binding [pmap/*tracked* (atom {}) - pmap/*load-fn* (partial files/load-pointer conn id) + (letfn [(process-file [{:keys [::db/conn] :as system} {:keys [features] :as file}] + (binding [pmap/*tracked* (pmap/create-tracked) + pmap/*load-fn* (partial feat.fdata/load-pointer system id) cfeat/*wrap-with-pointer-map-fn* (if (contains? features "fdata/pointer-map") pmap/wrap identity) cfeat/*wrap-with-objects-map-fn* @@ -145,19 +149,19 @@ {:id id})) (when (contains? (:features file) "fdata/pointer-map") - (files/persist-pointers! conn id)) + (feat.fdata/persist-pointers! system id)) (dissoc file :data)))] (db/tx-run! system - (fn [{:keys [::db/conn] :as system}] - (binding [*conn* conn *system* system] + (fn [system] + (binding [*system* system] (try - (->> (files/get-file conn id :migrate? migrate?) - (process-file conn)) + (->> (files/get-file system id :migrate? migrate?) + (process-file system)) (finally (when rollback? - (db/rollback! conn))))))))) + (db/rollback! system))))))))) (defn analyze-files "Apply a function to all files in the database, reading them in @@ -187,17 +191,17 @@ (println "unexpected exception happened on processing file: " (:id file)) (strace/print-stack-trace cause)) - (process-file [conn file-id] - (let [file (binding [pmap/*load-fn* (partial files/load-pointer conn file-id)] - (-> (files/get-file conn file-id) - (files/process-pointers deref))) + (process-file [{:keys [::db/conn] :as system} file-id] + (let [file (binding [pmap/*load-fn* (partial feat.fdata/load-pointer system file-id)] + (-> (files/get-file system file-id) + (update :data feat.fdata/process-pointers deref))) libs (when with-libraries? (->> (files/get-file-libraries conn file-id) (into [file] (map (fn [{:keys [id]}] - (binding [pmap/*load-fn* (partial files/load-pointer conn id)] - (-> (files/get-file conn id) - (files/process-pointers deref)))))) + (binding [pmap/*load-fn* (partial feat.fdata/load-pointer system id)] + (-> (files/get-file system id) + (update :data feat.fdata/process-pointers deref)))))) (d/index-by :id)))] (try (if with-libraries? @@ -209,31 +213,31 @@ (db/tx-run! system (fn [{:keys [::db/conn] :as system}] (try - (binding [*conn* conn *system* system] + (binding [*system* system] (when (fn? on-init) (on-init)) - (run! (partial process-file conn) (get-candidates conn))) + (run! (partial process-file system) (get-candidates conn))) (finally (when (fn? on-end) (ex/ignoring (on-end))) - (db/rollback! conn))))))) + (db/rollback! system))))))) (defn process-files! "Apply a function to all files in the database, reading them in batches." - [{:keys [::db/pool] :as system} & {:keys [chunk-size - max-items - workers - start-at - on-file - on-error - on-end - on-init - rollback?] - :or {chunk-size 10 - max-items Long/MAX_VALUE - workers 1 - rollback? true}}] + [system & {:keys [chunk-size + max-items + workers + start-at + on-file + on-error + on-end + on-init + rollback?] + :or {chunk-size 10 + max-items Long/MAX_VALUE + workers 1 + rollback? true}}] (letfn [(get-chunk [conn cursor] (let [sql (str "SELECT id, created_at FROM file " " WHERE created_at < ? AND deleted_at is NULL " @@ -252,11 +256,11 @@ (println! "unexpected exception happened on processing file: " (:id file)) (strace/print-stack-trace cause)) - (process-file [conn file-id] + (process-file [system file-id] (try - (let [{:keys [features] :as file} (files/get-file conn file-id)] - (binding [pmap/*tracked* (atom {}) - pmap/*load-fn* (partial files/load-pointer conn file-id) + (let [{:keys [features] :as file} (files/get-file system file-id)] + (binding [pmap/*tracked* (pmap/create-tracked) + pmap/*load-fn* (partial feat.fdata/load-pointer system file-id) cfeat/*wrap-with-pointer-map-fn* (if (contains? features "fdata/pointer-map") pmap/wrap identity) cfeat/*wrap-with-objects-map-fn* @@ -265,30 +269,30 @@ (on-file file) (when (contains? features "fdata/pointer-map") - (files/persist-pointers! conn file-id)))) + (feat.fdata/persist-pointers! system file-id)))) (catch Throwable cause ((or on-error on-error*) cause file-id)))) (run-worker [in index] (db/tx-run! system - (fn [{:keys [::db/conn] :as system}] - (binding [*conn* conn *system* system] + (fn [system] + (binding [*system* system] (loop [i 0] (when-let [file-id (sp/take! in)] (println! "=> worker: index:" index "| loop:" i "| file:" (str file-id) "|" (px/get-name)) - (process-file conn file-id) + (process-file system file-id) (recur (inc i))))) (when rollback? - (db/rollback! conn))))) + (db/rollback! system))))) (run-producer [input] - (db/with-atomic [conn pool] - (doseq [file-id (get-candidates conn)] - (println! "=> producer:" file-id "|" (px/get-name)) - (sp/put! input file-id)) - (sp/close! input)))] + (db/tx-run! system (fn [{:keys [::db/conn]}] + (doseq [file-id (get-candidates conn)] + (println! "=> producer:" file-id "|" (px/get-name)) + (sp/put! input file-id)) + (sp/close! input))))] (when (fn? on-init) (on-init)) diff --git a/backend/src/app/tasks/file_gc.clj b/backend/src/app/tasks/file_gc.clj index e7eb3a232a..5e97bdabc0 100644 --- a/backend/src/app/tasks/file_gc.clj +++ b/backend/src/app/tasks/file_gc.clj @@ -19,8 +19,8 @@ [app.common.types.shape-tree :as ctt] [app.config :as cf] [app.db :as db] + [app.features.fdata :as feat.fdata] [app.media :as media] - [app.rpc.commands.files :as files] [app.storage :as sto] [app.util.blob :as blob] [app.util.pointer-map :as pmap] @@ -271,9 +271,9 @@ " limit 1;") rows (db/exec! conn [sql file-id cursor])] [(some-> rows peek :created-at) - (mapcat (comp files/get-all-pointer-ids blob/decode :data) rows)]))] + (mapcat (comp feat.fdata/get-used-pointer-ids blob/decode :data) rows)]))] - (let [used (into (files/get-all-pointer-ids data) + (let [used (into (feat.fdata/get-used-pointer-ids data) (d/iteration get-pointers-chunk :vf second :kf first @@ -290,10 +290,10 @@ (defn- process-file [{:keys [::db/conn] :as cfg} {:keys [id data revn modified-at features] :as file}] - (l/dbg :hint "processing file" :id id :modified-at modified-at) + (l/dbg :hint "processing file" :file-id (str id) :modified-at modified-at) - (binding [pmap/*load-fn* (partial files/load-pointer conn id) - pmap/*tracked* (atom {})] + (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id) + pmap/*tracked* (pmap/create-tracked)] (let [data (-> (blob/decode data) (assoc :id id) (pmg/migrate-data))] @@ -311,4 +311,4 @@ {:has-media-trimmed true} {:id id}) - (files/persist-pointers! conn id)))) + (feat.fdata/persist-pointers! cfg id)))) diff --git a/backend/test/backend_tests/helpers.clj b/backend/test/backend_tests/helpers.clj index 6b8ef788a4..3fad161142 100644 --- a/backend/test/backend_tests/helpers.clj +++ b/backend/test/backend_tests/helpers.clj @@ -15,12 +15,13 @@ [app.common.pprint :as pp] [app.common.schema :as sm] [app.common.spec :as us] + [app.common.transit :as tr] [app.common.uuid :as uuid] [app.config :as cf] [app.db :as db] [app.main :as main] - [app.media] [app.media :as-alias mtx] + [app.media] [app.migrations] [app.msgbus :as-alias mbus] [app.rpc :as-alias rpc] @@ -43,8 +44,12 @@ [integrant.core :as ig] [mockery.core :as mk] [promesa.core :as p] + [promesa.exec :as px] + [ring.response :as rres] [yetti.request :as yrq]) (:import + java.io.PipedInputStream + java.io.PipedOutputStream java.util.UUID org.postgresql.ds.PGSimpleDataSource)) @@ -553,3 +558,28 @@ (assoc :return-list []) (assoc :call-args nil) (assoc :call-args-list []))))) + +(defn- slurp' + [input & opts] + (let [sw (java.io.StringWriter.)] + (with-open [^java.io.Reader r (java.io.InputStreamReader. input "UTF-8")] + (io/copy r sw) + (.toString sw)))) + +(defn consume-sse + [callback] + (let [{:keys [::rres/status ::rres/body ::rres/headers] :as response} (callback {}) + output (PipedOutputStream.) + input (PipedInputStream. output)] + + (try + (px/exec! :virtual #(rres/-write-body-to-stream body nil output)) + (into [] + (map (fn [event] + (let [[item1 item2] (re-seq #"(.*): (.*)\n?" event)] + [(keyword (nth item1 2)) + (tr/decode-str (nth item2 2))]))) + (-> (slurp' input) + (str/split "\n\n"))) + (finally + (.close input))))) diff --git a/backend/test/backend_tests/rpc_management_test.clj b/backend/test/backend_tests/rpc_management_test.clj index e56109d334..2b5f182e88 100644 --- a/backend/test/backend_tests/rpc_management_test.clj +++ b/backend/test/backend_tests/rpc_management_test.clj @@ -6,6 +6,7 @@ (ns backend-tests.rpc-management-test (:require + [app.common.pprint :as pp] [app.common.uuid :as uuid] [app.db :as db] [app.http :as http] @@ -604,9 +605,11 @@ (t/is (nil? (:error out))) (let [result (:result out)] - (t/is (set? result)) - (t/is (uuid? (first result))) - (t/is (= 1 (count result)))))) + (t/is (fn? result)) + + (let [events (th/consume-sse result)] + (t/is (= 8 (count events))) + (t/is (= :end (first (last events)))))))) (t/deftest get-list-of-buitin-templates (let [prof (th/create-profile* 1 {:is-active true}) diff --git a/common/deps.edn b/common/deps.edn index 4bb93a5a8e..4dcd1987db 100644 --- a/common/deps.edn +++ b/common/deps.edn @@ -32,7 +32,7 @@ funcool/tubax {:mvn/version "2021.05.20-0"} funcool/cuerdas {:mvn/version "2022.06.16-403"} - funcool/promesa {:git/sha "658c429c56c11c33da7594fa2ef53f4e6afedac4" + funcool/promesa {:git/sha "484b7f5c0d08d817746caa685ed9ac5583eb37fa" :git/url "https://github.com/funcool/promesa"} funcool/datoteka {:mvn/version "3.0.66" diff --git a/common/shadow-cljs.edn b/common/shadow-cljs.edn index a06131055d..274f6dae1a 100644 --- a/common/shadow-cljs.edn +++ b/common/shadow-cljs.edn @@ -1,8 +1,4 @@ {:deps {:aliases [:dev]} - ;; :http {:port 3448} - ;; :nrepl {:port 3447} - :jvm-opts ["-Xmx700m" "-Xms100m" "-XX:+UseSerialGC" "-XX:-OmitStackTraceInFastThrow"] - :builds {:test {:target :node-test diff --git a/common/src/app/common/files/validate.cljc b/common/src/app/common/files/validate.cljc index 69651085d5..23daff25d1 100644 --- a/common/src/app/common/files/validate.cljc +++ b/common/src/app/common/files/validate.cljc @@ -186,7 +186,7 @@ (when-not (= (:main-instance-page component) (:id page)) (let [component-page (ctf/get-component-page (:data file) component) main-component (ctst/get-shape component-page (:main-instance-id component))] - ;; We must check if the same component has main instances in different pages. + ;; We must check if the same component has main instances in different pages. ;; In that case one of those instances shouldn't be main (if (:main-instance main-component) (report-error! :component-main @@ -486,6 +486,5 @@ all contents of a file. Returns a list of errors." [file libraries] (binding [*errors* (volatile! [])] - (validate-file-schema! file) (validate-file! file libraries) (deref *errors*))) diff --git a/docker/devenv/files/bashrc b/docker/devenv/files/bashrc index 420ebe2132..bb53eb472c 100644 --- a/docker/devenv/files/bashrc +++ b/docker/devenv/files/bashrc @@ -1,9 +1,15 @@ #!/usr/bin/env bash export PATH=/usr/lib/jvm/openjdk/bin:/usr/local/nodejs/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/snap/bin +export JAVA_OPTS="-Xmx900m -Xms50m" alias l='ls --color -GFlh' alias rm='rm -r' alias ls='ls --color -F' alias lsd='ls -d *(/)' alias lsf='ls -h *(.)' + +# include .bashrc if it exists +if [ -f "$HOME/.bashrc.local" ]; then + . "$HOME/.bashrc.local" +fi diff --git a/docker/devenv/files/nginx.conf b/docker/devenv/files/nginx.conf index 175f7f2cf6..ab2c5ea6e9 100644 --- a/docker/devenv/files/nginx.conf +++ b/docker/devenv/files/nginx.conf @@ -105,6 +105,8 @@ http { location /api { proxy_pass http://127.0.0.1:6060/api; + proxy_buffering off; + proxy_http_version 1.1; } location /admin { diff --git a/exporter/deps.edn b/exporter/deps.edn index 117b839b79..ff538c79c9 100644 --- a/exporter/deps.edn +++ b/exporter/deps.edn @@ -15,7 +15,7 @@ :dev {:extra-deps - {thheller/shadow-cljs {:mvn/version "2.25.10"}}} + {thheller/shadow-cljs {:mvn/version "2.26.2"}}} :shadow-cljs {:main-opts ["-m" "shadow.cljs.devtools.cli"]} diff --git a/exporter/package.json b/exporter/package.json index ad5670e6a3..9437e9a678 100644 --- a/exporter/package.json +++ b/exporter/package.json @@ -22,7 +22,7 @@ "xregexp": "^5.1.1" }, "devDependencies": { - "shadow-cljs": "^2.25.10", + "shadow-cljs": "2.26.2", "source-map-support": "^0.5.21" }, "scripts": { diff --git a/exporter/shadow-cljs.edn b/exporter/shadow-cljs.edn index 2fde74be34..1c24414c61 100644 --- a/exporter/shadow-cljs.edn +++ b/exporter/shadow-cljs.edn @@ -1,6 +1,5 @@ {:deps {:aliases [:dev]} :source-paths ["src" "vendor" "../common"] - :jvm-opts ["-Xmx512m" "-Xms50m" "-XX:+UseSerialGC"] :builds {:main diff --git a/exporter/yarn.lock b/exporter/yarn.lock index f2ef78fa16..9e9c1111c5 100644 --- a/exporter/yarn.lock +++ b/exporter/yarn.lock @@ -735,7 +735,7 @@ __metadata: luxon: "npm:^3.4.2" playwright: "npm:^1.37.1" raw-body: "npm:^2.5.2" - shadow-cljs: "npm:^2.25.10" + shadow-cljs: "npm:2.26.2" source-map-support: "npm:^0.5.21" xml-js: "npm:^1.6.11" xregexp: "npm:^5.1.1" @@ -1842,7 +1842,7 @@ __metadata: languageName: node linkType: hard -"shadow-cljs@npm:^2.25.10": +"shadow-cljs@npm:2.26.2": version: 2.26.2 resolution: "shadow-cljs@npm:2.26.2" dependencies: diff --git a/frontend/deps.edn b/frontend/deps.edn index a12627a053..b30b74b78f 100644 --- a/frontend/deps.edn +++ b/frontend/deps.edn @@ -35,7 +35,7 @@ :dev {:extra-paths ["dev"] :extra-deps - {thheller/shadow-cljs {:mvn/version "2.25.10"} + {thheller/shadow-cljs {:mvn/version "2.26.2"} org.clojure/tools.namespace {:mvn/version "RELEASE"} cider/cider-nrepl {:mvn/version "0.37.0"}}} diff --git a/frontend/package.json b/frontend/package.json index d2ac50f09a..2b9c7213a9 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -71,13 +71,14 @@ "prop-types": "^15.8.1", "rimraf": "^5.0.1", "sass": "^1.66.1", - "shadow-cljs": "2.25.10", + "shadow-cljs": "2.26.2", "storybook": "^7.5.3", "vite": "^5.0.2" }, "dependencies": { "date-fns": "^2.30.0", "draft-js": "^0.11.7", + "eventsource-parser": "^1.1.1", "highlight.js": "^11.8.0", "js-beautify": "^1.14.9", "jszip": "^3.10.1", diff --git a/frontend/resources/styles/common/refactor/mixins.scss b/frontend/resources/styles/common/refactor/mixins.scss index 578b69720c..583ee3a73a 100644 --- a/frontend/resources/styles/common/refactor/mixins.scss +++ b/frontend/resources/styles/common/refactor/mixins.scss @@ -10,10 +10,10 @@ align-items: center; } -@mixin flexColumn { +@mixin flexColumn($gap: $s-4) { display: flex; flex-direction: column; - gap: $s-4; + gap: #{$gap}; } @mixin flexRow { diff --git a/frontend/shadow-cljs.edn b/frontend/shadow-cljs.edn index e08e48e9ed..c5eb316281 100644 --- a/frontend/shadow-cljs.edn +++ b/frontend/shadow-cljs.edn @@ -1,7 +1,6 @@ {:deps {:aliases [:dev]} :http {:port 3448} :nrepl {:port 3447 :host "0.0.0.0"} - :jvm-opts ["-Xmx700m" "-Xms100m" "-XX:+UseSerialGC" "-XX:-OmitStackTraceInFastThrow"] :dev-http {8888 "classpath:public"} :builds diff --git a/frontend/src/app/main/data/dashboard.cljs b/frontend/src/app/main/data/dashboard.cljs index fbe417428a..9e723ca20b 100644 --- a/frontend/src/app/main/data/dashboard.cljs +++ b/frontend/src/app/main/data/dashboard.cljs @@ -10,6 +10,7 @@ [app.common.data.macros :as dm] [app.common.features :as cfeat] [app.common.files.helpers :as cfh] + [app.common.logging :as log] [app.common.schema :as sm] [app.common.uri :as u] [app.common.uuid :as uuid] @@ -25,6 +26,7 @@ [app.util.dom :as dom] [app.util.i18n :as i18n :refer [tr]] [app.util.router :as rt] + [app.util.sse :as sse] [app.util.time :as dt] [app.util.timers :as tm] [app.util.webapi :as wapi] @@ -32,6 +34,8 @@ [clojure.set :as set] [potok.core :as ptk])) +(log/set-level! :warn) + ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Initialization ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -949,7 +953,17 @@ (let [{:keys [on-success on-error] :or {on-success identity on-error rx/throw}} (meta params)] - (->> (rp/cmd! :clone-template {:project-id project-id :template-id template-id}) + (->> (rp/cmd! ::sse/clone-template {:project-id project-id + :template-id template-id}) + (rx/tap (fn [event] + (let [payload (sse/get-payload event) + type (sse/get-type event)] + (if (= type "event") + (log/dbg :hint "clone-template: progress" :section (:section payload) :name (:name payload)) + (log/dbg :hint "clone-template: end"))))) + + (rx/filter sse/end-of-stream?) + (rx/map sse/get-payload) (rx/tap on-success) (rx/catch on-error)))))) diff --git a/frontend/src/app/main/repo.cljs b/frontend/src/app/main/repo.cljs index e8b5f8f018..e99f9a4612 100644 --- a/frontend/src/app/main/repo.cljs +++ b/frontend/src/app/main/repo.cljs @@ -7,9 +7,11 @@ (ns app.main.repo (:require [app.common.data :as d] + [app.common.transit :as t] [app.common.uri :as u] [app.config :as cf] [app.util.http :as http] + [app.util.sse :as sse] [beicon.core :as rx] [cuerdas.core :as str])) @@ -56,8 +58,14 @@ {:query-params [:file-id :revn] :form-data? true} + ::sse/clone-template + {:response-type ::sse/stream} + + ::sse/import-binfile + {:response-type ::sse/stream + :form-data? true} + :export-binfile {:response-type :blob} - :import-binfile {:form-data? true} :retrieve-list-of-builtin-templates {:query-params :all} }) @@ -85,9 +93,9 @@ :else :post) request {:method method - :uri (u/join cf/public-uri "api/rpc/command/" (name id)) + :uri (u/join cf/public-uri "api/rpc/command/" nid) :credentials "include" - :headers {"accept" "application/transit+json"} + :headers {"accept" "application/transit+json,text/event-stream,*/*"} :body (when (= method :post) (if form-data? (http/form-data params) @@ -97,11 +105,21 @@ (if query-params (select-keys params query-params) nil)) - :response-type (or response-type :text)}] - (->> (http/send! request) - (rx/map decode-fn) - (rx/mapcat handle-response)))) + :response-type + (if (= response-type ::sse/stream) + :stream + (or response-type :text))} + + result (->> (http/send! request) + (rx/map decode-fn) + (rx/mapcat handle-response))] + + (cond->> result + (= ::sse/stream response-type) + (rx/mapcat (fn [body] + (-> (sse/create-stream body) + (sse/read-stream t/decode-str))))))) (defmulti cmd! (fn [id _] id)) diff --git a/frontend/src/app/main/ui/components/select.scss b/frontend/src/app/main/ui/components/select.scss index 291e239cfe..630619a70f 100644 --- a/frontend/src/app/main/ui/components/select.scss +++ b/frontend/src/app/main/ui/components/select.scss @@ -9,7 +9,8 @@ .custom-select { @include titleTipography; position: relative; - display: flex; + display: grid; + grid-template-columns: 1fr auto; align-items: center; height: $s-32; width: 100%; @@ -20,10 +21,7 @@ border: $s-1 solid var(--menu-background-color); color: var(--menu-foreground-color); cursor: pointer; - .current-label { - width: 100%; - flex-grow: 1; - } + .current-icon { @include flexCenter; height: $s-24; @@ -109,3 +107,7 @@ } } } + +.current-label { + @include textEllipsis; +} diff --git a/frontend/src/app/main/ui/dashboard/projects.cljs b/frontend/src/app/main/ui/dashboard/projects.cljs index b017c20204..545d9c0aa9 100644 --- a/frontend/src/app/main/ui/dashboard/projects.cljs +++ b/frontend/src/app/main/ui/dashboard/projects.cljs @@ -147,8 +147,10 @@ (mf/use-fn (mf/deps template default-project-id) (fn [] - (let [mdata {:on-success on-template-cloned-success :on-error on-template-cloned-error} - params {:project-id default-project-id :template-id (:id template)}] + (let [mdata {:on-success on-template-cloned-success + :on-error on-template-cloned-error} + params {:project-id default-project-id + :template-id (:id template)}] (swap! state #(assoc % :status :importing)) (st/emit! (with-meta (dd/clone-template (with-meta params mdata)) {::ev/origin "get-started-hero-block"})))))] diff --git a/frontend/src/app/main/ui/viewer/inspect/attributes/svg.cljs b/frontend/src/app/main/ui/viewer/inspect/attributes/svg.cljs index 4407688634..c6dc88e92b 100644 --- a/frontend/src/app/main/ui/viewer/inspect/attributes/svg.cljs +++ b/frontend/src/app/main/ui/viewer/inspect/attributes/svg.cljs @@ -58,7 +58,7 @@ [{:keys [shape]}] [:* (for [[attr-key attr-value] (:svg-attrs shape)] - [:& svg-attr {:attr attr-key :value attr-value}])] ) + [:& svg-attr {:attr attr-key :value attr-value}])]) (mf/defc svg-panel diff --git a/frontend/src/app/main/ui/workspace/shapes/frame.cljs b/frontend/src/app/main/ui/workspace/shapes/frame.cljs index 2adf9ba640..1be9a2a836 100644 --- a/frontend/src/app/main/ui/workspace/shapes/frame.cljs +++ b/frontend/src/app/main/ui/workspace/shapes/frame.cljs @@ -114,6 +114,7 @@ modifiers (mf/deref modifiers-ref) hidden? (true? (:hidden shape)) + content-visible? (or (not ^boolean thumbnail?) (not ^boolean thumbnail-uri)) tries-ref (mf/use-ref 0) imposter-ref (mf/use-ref nil) @@ -157,7 +158,7 @@ :opacity (when ^boolean hidden? 0)} ;; When there is no thumbnail, we generate a empty rect. - (when (and (not ^boolean thumbnail-uri) (not (mf/ref-val imposter-loaded-ref))) + (when (and (not ^boolean content-visible?) (not (mf/ref-val imposter-loaded-ref))) [:g.frame-placeholder [:rect {:x x :y y @@ -188,7 +189,7 @@ :stroke-width 2}])] ;; When thumbnail is disabled. - (when (or (not ^boolean thumbnail?) (not ^boolean thumbnail-uri)) + (when ^boolean content-visible? [:g.frame-content {:id (dm/str "frame-content-" frame-id) :ref container-ref} diff --git a/frontend/src/app/main/ui/workspace/sidebar/options/menus/interactions.scss b/frontend/src/app/main/ui/workspace/sidebar/options/menus/interactions.scss index 225392afbc..78b905d5e3 100644 --- a/frontend/src/app/main/ui/workspace/sidebar/options/menus/interactions.scss +++ b/frontend/src/app/main/ui/workspace/sidebar/options/menus/interactions.scss @@ -47,167 +47,6 @@ } } } - .groups { - @include flexColumn; - .element-set-options-group { - &.open { - @include flexColumn; - .extended-options { - @include flexColumn; - .property-row { - @extend .attr-row; - &.big-row { - height: 100%; - } - .interaction-name { - @include twoLineTextEllipsis; - @include titleTipography; - padding-left: $s-4; - width: $s-92; - margin: auto 0; - grid-area: name; - } - .select-wrapper { - display: flex; - align-items: center; - grid-area: content; - .easing-select { - width: $s-156; - padding: 0 $s-8; - .dropdown-upwards { - bottom: $s-36; - width: $s-156; - top: unset; - } - } - } - .input-element-wrapper { - @extend .input-element; - grid-area: content; - } - .checkbox-option { - @extend .input-checkbox; - grid-area: content; - } - .position-btns-wrapper { - grid-area: content; - display: grid; - grid-template-areas: - "topleft top topright" - "left center right" - "bottomleft bottom bottomright"; - grid-template-columns: repeat(3, 1fr); - grid-template-rows: repeat(3, 1fr); - width: $s-84; - height: $s-84; - border-radius: $br-8; - background-color: var(--color-background-tertiary); - .direction-btn { - @extend .button-tertiary; - height: $s-28; - width: $s-28; - .rectangle { - height: $s-8; - width: $s-8; - background-color: var(--color-background-quaternary); - } - &:hover { - .rectangle { - background-color: var(--color-accent-primary); - } - } - &.active { - background-color: var(--color-background-quaternary); - .rectangle { - background-color: var(--color-accent-primary); - } - } - } - .center-btn { - grid-area: center; - } - .top-left-btn { - grid-area: topleft; - } - .top-right-btn { - grid-area: topright; - } - .top-center-btn { - grid-area: top; - } - .bottom-left-btn { - grid-area: bottomleft; - } - .bottom-right-btn { - grid-area: bottomright; - } - .bottom-center-btn { - grid-area: bottom; - } - } - .buttons-wrapper { - grid-area: content; - .right svg { - transform: rotate(-90deg); - } - .left svg { - transform: rotate(90deg); - } - .up svg { - transform: rotate(180deg); - } - } - .inputs-wrapper { - grid-area: content; - @include flexRow; - .radio-btn { - @extend .input-checkbox; - } - } - } - } - } - - .interactions-summary { - @extend .asset-element; - height: $s-44; - padding: 0; - gap: $s-4; - .extend-btn { - @extend .button-tertiary; - height: 100%; - width: $s-28; - svg { - @extend .button-icon; - } - &.extended { - background-color: var(--button-radio-background-color-active); - svg { - stroke: var(--button-radio-foreground-color-active); - } - } - } - - .interactions-info { - flex-grow: 1; - .trigger-name { - color: white; - } - .action-summary { - color: var(--color-foreground-secondary); - } - } - .remove-btn { - @extend .button-tertiary; - height: $s-32; - width: $s-28; - svg { - @extend .button-icon-small; - } - } - } - } - } } .element-set { @include flexColumn; @@ -221,6 +60,174 @@ } } } + +.interactions-info { + flex-grow: 1; + display: grid; + + .trigger-name { + color: var(--color-foreground-primary); + } + .action-summary { + color: var(--color-foreground-secondary); + @include textEllipsis; + } +} + +.groups { + @include flexColumn($s-12); +} + +.element-set-options-group { + &.open { + @include flexColumn; + .extended-options { + @include flexColumn; + .property-row { + @extend .attr-row; + &.big-row { + height: 100%; + } + .interaction-name { + @include twoLineTextEllipsis; + @include titleTipography; + padding-left: $s-4; + width: $s-92; + margin: auto 0; + grid-area: name; + } + .select-wrapper { + display: flex; + align-items: center; + grid-area: content; + .easing-select { + width: $s-156; + padding: 0 $s-8; + .dropdown-upwards { + bottom: $s-36; + width: $s-156; + top: unset; + } + } + } + .input-element-wrapper { + @extend .input-element; + grid-area: content; + } + .checkbox-option { + @extend .input-checkbox; + grid-area: content; + } + .position-btns-wrapper { + grid-area: content; + display: grid; + grid-template-areas: + "topleft top topright" + "left center right" + "bottomleft bottom bottomright"; + grid-template-columns: repeat(3, 1fr); + grid-template-rows: repeat(3, 1fr); + width: $s-84; + height: $s-84; + border-radius: $br-8; + background-color: var(--color-background-tertiary); + .direction-btn { + @extend .button-tertiary; + height: $s-28; + width: $s-28; + .rectangle { + height: $s-8; + width: $s-8; + background-color: var(--color-background-quaternary); + } + &:hover { + .rectangle { + background-color: var(--color-accent-primary); + } + } + &.active { + background-color: var(--color-background-quaternary); + .rectangle { + background-color: var(--color-accent-primary); + } + } + } + .center-btn { + grid-area: center; + } + .top-left-btn { + grid-area: topleft; + } + .top-right-btn { + grid-area: topright; + } + .top-center-btn { + grid-area: top; + } + .bottom-left-btn { + grid-area: bottomleft; + } + .bottom-right-btn { + grid-area: bottomright; + } + .bottom-center-btn { + grid-area: bottom; + } + } + .buttons-wrapper { + grid-area: content; + .right svg { + transform: rotate(-90deg); + } + .left svg { + transform: rotate(90deg); + } + .up svg { + transform: rotate(180deg); + } + } + .inputs-wrapper { + grid-area: content; + @include flexRow; + .radio-btn { + @extend .input-checkbox; + } + } + } + } + } + + .interactions-summary { + @extend .asset-element; + height: $s-44; + padding: 0; + gap: $s-4; + .extend-btn { + @extend .button-tertiary; + height: 100%; + width: $s-28; + svg { + @extend .button-icon; + } + &.extended { + background-color: var(--button-radio-background-color-active); + svg { + stroke: var(--button-radio-foreground-color-active); + } + } + } + + .remove-btn { + @extend .button-tertiary; + height: $s-32; + width: $s-28; + svg { + @extend .button-icon-small; + } + } + } +} + .flow-element { @extend .asset-element; padding: 0; diff --git a/frontend/src/app/main/ui/workspace/sidebar/options/menus/svg_attrs.cljs b/frontend/src/app/main/ui/workspace/sidebar/options/menus/svg_attrs.cljs index 0399ea390f..792b83f943 100644 --- a/frontend/src/app/main/ui/workspace/sidebar/options/menus/svg_attrs.cljs +++ b/frontend/src/app/main/ui/workspace/sidebar/options/menus/svg_attrs.cljs @@ -46,18 +46,16 @@ [:button {:class (stl/css :attr-action-btn) :on-click handle-delete} i/remove-refactor]]] - [:div {:class (stl/css :attr-row)} - [:span {:class (stl/css :attr-title)} - (str (d/name (last attr)))] - - (for [[key value] value] - [:& attribute-value {:key key - :attr (conj attr key) - :value value - :on-change on-change - :on-delete on-delete}])])] - - + [:div {:class (stl/css :attr-nested-content)} + [:div {:class (stl/css :attr-title)} + (str (d/name (last attr)))] + (for [[key value] value] + [:div {:class (stl/css :attr-row) :key key} + [:& attribute-value {:key key + :attr (conj attr key) + :value value + :on-change on-change + :on-delete on-delete}]])])] [:div.element-set-content (if (string? value) [:div.row-flex.row-flex-removable @@ -122,13 +120,14 @@ :on-collapsed toggle-content :title (tr "workspace.sidebar.options.svg-attrs.title") :class (stl/css-case :title-spacing-svg-attrs (not has-attributes?))}]] - [:div {:class (stl/css :element-set-content)} + (when open? + [:div {:class (stl/css :element-set-content)} (for [[attr-key attr-value] attrs] [:& attribute-value {:key attr-key :attr [attr-key] :value attr-value :on-change handle-change - :on-delete handle-delete}])]] + :on-delete handle-delete}])])] [:div.element-set [:div.element-set-title diff --git a/frontend/src/app/main/ui/workspace/sidebar/options/menus/svg_attrs.scss b/frontend/src/app/main/ui/workspace/sidebar/options/menus/svg_attrs.scss index 72897cfcee..8dc2d5bb67 100644 --- a/frontend/src/app/main/ui/workspace/sidebar/options/menus/svg_attrs.scss +++ b/frontend/src/app/main/ui/workspace/sidebar/options/menus/svg_attrs.scss @@ -12,14 +12,6 @@ } .element-set-content { @include flexColumn; - .attr-row { - display: flex; - gap: $s-4; - .attr-title { - @include tabTitleTipography; - border-bottom: $s-1 solid var(--color-foreground-secondary); - } - } .attr-content { display: flex; @@ -27,9 +19,10 @@ .attr-name { @include titleTipography; @include twoLineTextEllipsis; - width: $s-92; + width: $s-88; margin: auto $s-4; margin-right: 0; + display: inline-block; } .attr-input { @extend .input-element; @@ -50,3 +43,20 @@ } } } + +.attr-nested-content { + display: grid; + row-gap: $s-4; +} + +.attr-title { + @include titleTipography; + font-size: $fs-10; + text-transform: uppercase; + margin-inline-start: $s-4; +} + +.attr-row { + display: flex; + gap: $s-4; +} diff --git a/frontend/src/app/util/http.cljs b/frontend/src/app/util/http.cljs index 60fc675f6e..35c8ae485d 100644 --- a/frontend/src/app/util/http.cljs +++ b/frontend/src/app/util/http.cljs @@ -105,17 +105,22 @@ (defn send! [{:keys [response-type] :or {response-type :text} :as params}] - (letfn [(on-response [response] - (let [body (case response-type - :json (.json ^js response) - :text (.text ^js response) - :blob (.blob ^js response))] - (->> (rx/from body) - (rx/map (fn [body] - {::response response - :status (.-status ^js response) - :headers (parse-headers (.-headers ^js response)) - :body body})))))] + (letfn [(on-response [^js response] + (if (= :stream response-type) + (rx/of {:status (.-status response) + :headers (parse-headers (.-headers response)) + :body (.-body response) + ::response response}) + (let [body (case response-type + :json (.json ^js response) + :text (.text ^js response) + :blob (.blob ^js response))] + (->> (rx/from body) + (rx/map (fn [body] + {::response response + :status (.-status ^js response) + :headers (parse-headers (.-headers ^js response)) + :body body}))))))] (->> (fetch params) (rx/mapcat on-response)))) diff --git a/frontend/src/app/util/sse.cljs b/frontend/src/app/util/sse.cljs new file mode 100644 index 0000000000..222e1bda7f --- /dev/null +++ b/frontend/src/app/util/sse.cljs @@ -0,0 +1,54 @@ +;; This Source Code Form is subject to the terms of the Mozilla Public +;; License, v. 2.0. If a copy of the MPL was not distributed with this +;; file, You can obtain one at http://mozilla.org/MPL/2.0/. +;; +;; Copyright (c) KALEIDOS INC + +(ns app.util.sse + (:require + ["eventsource-parser/stream" :as sse] + [beicon.core :as rx])) + +(defn create-stream + [^js/ReadableStream stream] + (.. stream + (pipeThrough (js/TextDecoderStream.)) + (pipeThrough (sse/EventSourceParserStream.)))) + +(defn read-stream + [^js/ReadableStream stream decode-fn] + (letfn [(read-items [^js reader] + (->> (rx/from (.read reader)) + (rx/mapcat (fn [result] + (if (.-done result) + (rx/empty) + (rx/concat + (rx/of (.-value result)) + (read-items reader)))))))] + (->> (read-items (.getReader stream)) + (rx/mapcat (fn [^js event] + (let [type (.-event event) + data (.-data event) + data (decode-fn data)] + (if (= "error" type) + (rx/throw (ex-info "stream exception" data)) + (rx/of #js {:type type :data data})))))))) + +(defn get-type + [event] + (unchecked-get event "type")) + +(defn get-payload + [event] + (unchecked-get event "data")) + +(defn end-of-stream? + [event] + (= "end" (get-type event))) + +(defn event? + [event] + (= "event" (get-type event))) + + + diff --git a/frontend/src/app/worker/import.cljs b/frontend/src/app/worker/import.cljs index 9895aaf47d..bccb975ea0 100644 --- a/frontend/src/app/worker/import.cljs +++ b/frontend/src/app/worker/import.cljs @@ -22,6 +22,7 @@ [app.util.i18n :as i18n :refer [tr]] [app.util.import.parser :as cip] [app.util.json :as json] + [app.util.sse :as sse] [app.util.webapi :as wapi] [app.util.zip :as uz] [app.worker.impl :as impl] @@ -329,7 +330,7 @@ (map #(assoc % :type :fill))) stroke-images-data (->> (cip/get-stroke-images-data node) (map #(assoc % :type :stroke))) - + images-data (concat fill-images-data stroke-images-data @@ -709,15 +710,22 @@ :response-type :blob :method :get}) (rx/map :body) - (rx/mapcat #(rp/cmd! :import-binfile {:file % :project-id project-id})) - (rx/map (fn [_] - {:status :import-finish - :file-id (:file-id data)})) + (rx/mapcat (fn [file] + (->> (rp/cmd! ::sse/import-binfile {:file file :project-id project-id}) + (rx/tap (fn [event] + (let [payload (sse/get-payload event) + type (sse/get-type event)] + (if (= type "event") + (log/dbg :hint "import-binfile: progress" :section (:section payload) :name (:name payload)) + (log/dbg :hint "import-binfile: end"))))) + (rx/filter sse/end-of-stream?) + (rx/map (fn [_] + {:status :import-finish + :file-id (:file-id data)}))))) (rx/catch (fn [cause] (log/error :hint "unexpected error on import process" :project-id project-id ::log/sync? true) - ;; TODO: consider do thi son logging directly ? (when (map? cause) (println "Error data:") diff --git a/frontend/test/frontend_tests/helpers/events.cljs b/frontend/test/frontend_tests/helpers/events.cljs index 4b59dca6c8..62c4b47f30 100644 --- a/frontend/test/frontend_tests/helpers/events.cljs +++ b/frontend/test/frontend_tests/helpers/events.cljs @@ -23,7 +23,7 @@ (js/console.log "STORE ERROR" (.-stack cause)) (when-let [data (some-> cause ex-data ::sm/explain)] - (pp/pprint (sm/humanize-data data)))) + (pp/pprint (sm/humanize-explain data)))) (defn prepare-store "Create a store with the given initial state. Wait until diff --git a/frontend/test/frontend_tests/setup_test.cljs b/frontend/test/frontend_tests/setup_test.cljs index 423357c815..047d8d2d06 100644 --- a/frontend/test/frontend_tests/setup_test.cljs +++ b/frontend/test/frontend_tests/setup_test.cljs @@ -14,7 +14,7 @@ (try (js/console.log "EE" (.-stack cause)) (when-let [data (some-> cause ex-data ::sm/explain)] - (pp/pprint (sm/humanize-data data))) + (pp/pprint (sm/humanize-explain data))) (finally (js/console.log "EXIT") (.exit js/process -1))))) diff --git a/frontend/yarn.lock b/frontend/yarn.lock index fc9af1bd1a..f50dbb9699 100644 --- a/frontend/yarn.lock +++ b/frontend/yarn.lock @@ -7028,6 +7028,13 @@ __metadata: languageName: node linkType: hard +"eventsource-parser@npm:^1.1.1": + version: 1.1.1 + resolution: "eventsource-parser@npm:1.1.1" + checksum: 6eae5e8300dc5d4dcd29d09d037a43b9954077fbcc936904065d1abf5ec503f1fa56a942ad4f796b7dcc3bf64224440917ea635ef4a75f5522e9951df4b9aadf + languageName: node + linkType: hard + "evp_bytestokey@npm:^1.0.0, evp_bytestokey@npm:^1.0.3": version: 1.0.3 resolution: "evp_bytestokey@npm:1.0.3" @@ -7578,6 +7585,7 @@ __metadata: concurrently: "npm:^8.2.2" date-fns: "npm:^2.30.0" draft-js: "npm:^0.11.7" + eventsource-parser: "npm:^1.1.1" gettext-parser: "npm:^7.0.1" gulp: "npm:4.0.2" gulp-cached: "npm:^1.1.1" @@ -7613,7 +7621,7 @@ __metadata: rxjs: "npm:~7.8.1" sass: "npm:^1.66.1" sax: "npm:^1.2.4" - shadow-cljs: "npm:2.25.10" + shadow-cljs: "npm:2.26.2" source-map-support: "npm:^0.5.21" storybook: "npm:^7.5.3" tdigest: "npm:^0.1.2" @@ -12752,9 +12760,9 @@ __metadata: languageName: node linkType: hard -"shadow-cljs@npm:2.25.10": - version: 2.25.10 - resolution: "shadow-cljs@npm:2.25.10" +"shadow-cljs@npm:2.26.2": + version: 2.26.2 + resolution: "shadow-cljs@npm:2.26.2" dependencies: node-libs-browser: "npm:^2.2.1" readline-sync: "npm:^1.4.7" @@ -12764,7 +12772,7 @@ __metadata: ws: "npm:^7.4.6" bin: shadow-cljs: cli/runner.js - checksum: fc341928abae469b67904dec6ab1c666e7a376c89ac641a64661017ccec57ff7289da5508a79cb1f1c8a623f8468a45b3910268a60176ec70a9621ddd62440fa + checksum: d504969ea28bcf3d5fc879c8111cb630a8ae910ea692bbfb0d73097fb336e13e642116db9fcc91524686a6824e71d439ef0df31941eabb6331feb4aa4146e830 languageName: node linkType: hard