0
Fork 0
mirror of https://github.com/penpot/penpot.git synced 2025-03-20 19:51:23 -05:00

Merge pull request #3959 from penpot/niwinz-staging-storage-improvements-2

 Improvements to components migration
This commit is contained in:
Alejandro 2024-01-04 15:55:31 +01:00 committed by GitHub
commit 65b3c62a87
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
40 changed files with 904 additions and 678 deletions

View file

@ -26,7 +26,7 @@
:git/url "https://github.com/funcool/yetti.git"
:exclusions [org.slf4j/slf4j-api]}
com.github.seancorfield/next.jdbc {:mvn/version "1.3.894"}
com.github.seancorfield/next.jdbc {:mvn/version "1.3.909"}
metosin/reitit-core {:mvn/version "0.6.0"}
nrepl/nrepl {:mvn/version "1.1.0"}
cider/cider-nrepl {:mvn/version "0.43.1"}

View file

@ -8,6 +8,7 @@
(:require
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.files.helpers :as cfh]
[app.common.fressian :as fres]
[app.common.geom.matrix :as gmt]
[app.common.logging :as l]
@ -136,3 +137,12 @@
(add-tap #(locking debug-tap
(prn "tap debug:" %)))
1))
(defn calculate-frames
[{:keys [data]}]
(->> (vals (:pages-index data))
(mapcat (comp vals :objects))
(filter cfh/is-direct-child-of-root?)
(filter cfh/frame-shape?)
(count)))

View file

@ -207,6 +207,7 @@
(s/def ::telemetry-uri ::us/string)
(s/def ::telemetry-with-taiga ::us/boolean)
(s/def ::tenant ::us/string)
(s/def ::svgo-max-procs ::us/integer)
(s/def ::config
(s/keys :opt-un [::secret-key
@ -326,7 +327,9 @@
::telemetry-uri
::telemetry-referer
::telemetry-with-taiga
::tenant]))
::tenant
::svgo-max-procs]))
(def default-flags
[:enable-backend-api-doc

View file

@ -19,6 +19,7 @@
[app.util.json :as json]
[app.util.time :as dt]
[clojure.java.io :as io]
[clojure.set :as set]
[clojure.spec.alpha :as s]
[integrant.core :as ig]
[next.jdbc :as jdbc]
@ -239,6 +240,10 @@
(ex/raise :type :internal
:code :unable-resolve-pool))))
(defn get-update-count
[result]
(:next.jdbc/update-count result))
(defn get-connection
[cfg-or-conn]
(if (connection? cfg-or-conn)
@ -265,48 +270,120 @@
:code :unable-resolve-connectable
:hint "expected conn, pool or system")))
(def ^:private params-mapping
{::return-keys? :return-keys
::return-keys :return-keys})
(defn rename-opts
[opts]
(set/rename-keys opts params-mapping))
(def ^:private default-insert-opts
{:builder-fn sql/as-kebab-maps
:return-keys true})
(def ^:private default-opts
{:builder-fn sql/as-kebab-maps})
(defn exec!
([ds sv]
(-> (get-connectable ds)
(jdbc/execute! sv default-opts)))
([ds sv] (exec! ds sv nil))
([ds sv opts]
(-> (get-connectable ds)
(jdbc/execute! sv (into default-opts (sql/adapt-opts opts))))))
(let [conn (get-connectable ds)
opts (if (empty? opts)
default-opts
(into default-opts (rename-opts opts)))]
(jdbc/execute! conn sv opts))))
(defn exec-one!
([ds sv]
(-> (get-connectable ds)
(jdbc/execute-one! sv default-opts)))
([ds sv] (exec-one! ds sv nil))
([ds sv opts]
(-> (get-connectable ds)
(jdbc/execute-one! sv (into default-opts (sql/adapt-opts opts))))))
(let [conn (get-connectable ds)
opts (if (empty? opts)
default-opts
(into default-opts (rename-opts opts)))]
(jdbc/execute-one! conn sv opts))))
(defn insert!
[ds table params & {:as opts :keys [::return-keys?] :or {return-keys? true}}]
(-> (get-connectable ds)
(exec-one! (sql/insert table params opts)
(assoc opts ::return-keys? return-keys?))))
"A helper that builds an insert sql statement and executes it. By
default returns the inserted row with all the field; you can delimit
the returned columns with the `::columns` option."
[ds table params & {:as opts}]
(let [conn (get-connectable ds)
sql (sql/insert table params opts)
opts (if (empty? opts)
default-insert-opts
(into default-insert-opts (rename-opts opts)))]
(jdbc/execute-one! conn sql opts)))
(defn insert-multi!
[ds table cols rows & {:as opts :keys [::return-keys?] :or {return-keys? true}}]
(-> (get-connectable ds)
(exec! (sql/insert-multi table cols rows opts)
(assoc opts ::return-keys? return-keys?))))
(defn insert-many!
"An optimized version of `insert!` that perform insertion of multiple
values at once.
This expands to a single SQL statement with placeholders for every
value being inserted. For large data sets, this may exceed the limit
of sql string size and/or number of parameters."
[ds table cols rows & {:as opts}]
(let [conn (get-connectable ds)
sql (sql/insert-many table cols rows opts)
opts (if (empty? opts)
default-insert-opts
(into default-insert-opts (rename-opts opts)))
opts (update opts :return-keys boolean)]
(jdbc/execute! conn sql opts)))
(defn update!
[ds table params where & {:as opts :keys [::return-keys?] :or {return-keys? true}}]
(-> (get-connectable ds)
(exec-one! (sql/update table params where opts)
(assoc opts ::return-keys? return-keys?))))
"A helper that build an UPDATE SQL statement and executes it.
Given a connectable object, a table name, a hash map of columns and
values to set, and either a hash map of columns and values to search
on or a vector of a SQL where clause and parameters, perform an
update on the table.
By default returns an object with the number of affected rows; a
complete row can be returned if you pass `::return-keys` with `true`
or with a vector of columns.
Also it can be combined with the `::many` option if you perform an
update to multiple rows and you want all the affected rows to be
returned."
[ds table params where & {:as opts}]
(let [conn (get-connectable ds)
sql (sql/update table params where opts)
opts (if (empty? opts)
default-opts
(into default-opts (rename-opts opts)))
opts (update opts :return-keys boolean)]
(if (::many opts)
(jdbc/execute! conn sql opts)
(jdbc/execute-one! conn sql opts))))
(defn delete!
[ds table params & {:as opts :keys [::return-keys?] :or {return-keys? true}}]
(-> (get-connectable ds)
(exec-one! (sql/delete table params opts)
(assoc opts ::return-keys? return-keys?))))
"A helper that builds an DELETE SQL statement and executes it.
Given a connectable object, a table name, and either a hash map of columns
and values to search on or a vector of a SQL where clause and parameters,
perform a delete on the table.
By default returns an object with the number of affected rows; a
complete row can be returned if you pass `::return-keys` with `true`
or with a vector of columns.
Also it can be combined with the `::many` option if you perform an
update to multiple rows and you want all the affected rows to be
returned."
[ds table params & {:as opts}]
(let [conn (get-connectable ds)
sql (sql/delete table params opts)
opts (if (empty? opts)
default-opts
(into default-opts (rename-opts opts)))]
(if (::many opts)
(jdbc/execute! conn sql opts)
(jdbc/execute-one! conn sql opts))))
(defn query
[ds table params & {:as opts}]
(exec! ds (sql/select table params opts) opts))
(defn is-row-deleted?
[{:keys [deleted-at]}]
@ -320,7 +397,7 @@
[ds table params & {:as opts}]
(let [rows (exec! ds (sql/select table params opts))
rows (cond->> rows
(::remove-deleted? opts true)
(::remove-deleted opts true)
(remove is-row-deleted?))]
(first rows)))
@ -329,7 +406,7 @@
filters. Raises :not-found exception if no object is found."
[ds table params & {:as opts}]
(let [row (get* ds table params opts)]
(when (and (not row) (::check-deleted? opts true))
(when (and (not row) (::check-deleted opts true))
(ex/raise :type :not-found
:code :object-not-found
:table table
@ -364,10 +441,6 @@
[ds table id & {:as opts}]
(get ds table {:id id} opts))
(defn query
[ds table params & {:as opts}]
(exec! ds (sql/select table params opts)))
(defn pgobject?
([v]
(instance? PGobject v))
@ -567,11 +640,6 @@
(.setType "jsonb")
(.setValue (json/encode-str data)))))
(defn get-update-count
[result]
(:next.jdbc/update-count result))
;; --- Locks
(def ^:private siphash-state

View file

@ -8,7 +8,6 @@
(:refer-clojure :exclude [update])
(:require
[app.db :as-alias db]
[clojure.set :as set]
[clojure.string :as str]
[next.jdbc.optional :as jdbc-opt]
[next.jdbc.sql.builder :as sql]))
@ -20,14 +19,6 @@
{:table-fn snake-case
:column-fn snake-case})
(def params-mapping
{::db/return-keys? :return-keys
::db/columns :columns})
(defn adapt-opts
[opts]
(set/rename-keys opts params-mapping))
(defn as-kebab-maps
[rs opts]
(jdbc-opt/as-unqualified-modified-maps rs (assoc opts :label-fn kebab-case)))
@ -42,7 +33,7 @@
(assoc :suffix "ON CONFLICT DO NOTHING"))]
(sql/for-insert table key-map opts))))
(defn insert-multi
(defn insert-many
[table cols rows opts]
(let [opts (merge default-opts opts)]
(sql/for-insert-multi table cols rows opts)))
@ -53,11 +44,9 @@
([table where-params opts]
(let [opts (merge default-opts opts)
opts (cond-> opts
(::db/columns opts) (assoc :columns (::db/columns opts))
(::db/for-update? opts) (assoc :suffix "FOR UPDATE")
(::db/for-share? opts) (assoc :suffix "FOR KEY SHARE")
(:for-update opts) (assoc :suffix "FOR UPDATE")
(:for-key-share opts) (assoc :suffix "FOR KEY SHARE"))]
(::columns opts) (assoc :columns (::columns opts))
(::for-update opts) (assoc :suffix "FOR UPDATE")
(::for-share opts) (assoc :suffix "FOR KEY SHARE"))]
(sql/for-query table where-params opts))))
(defn update
@ -65,11 +54,9 @@
(update table key-map where-params nil))
([table key-map where-params opts]
(let [opts (into default-opts opts)
opts (if-let [columns (::db/columns opts)]
(let [columns (if (seq columns)
(sql/as-cols columns opts)
"*")]
(assoc opts :suffix (str "RETURNING " columns)))
keys (::db/return-keys opts)
opts (if (vector? keys)
(assoc opts :suffix (str "RETURNING " (sql/as-cols keys opts)))
opts)]
(sql/for-update table key-map where-params opts))))
@ -77,5 +64,9 @@
([table where-params]
(delete table where-params nil))
([table where-params opts]
(let [opts (merge default-opts opts)]
(let [opts (merge default-opts opts)
keys (::db/return-keys opts)
opts (if (vector? keys)
(assoc opts :suffix (str "RETURNING " (sql/as-cols keys opts)))
opts)]
(sql/for-delete table where-params opts))))

View file

@ -39,27 +39,54 @@
[app.rpc.commands.media :as cmd.media]
[app.storage :as sto]
[app.storage.tmp :as tmp]
[app.svgo :as svgo]
[app.util.blob :as blob]
[app.util.pointer-map :as pmap]
[app.util.time :as dt]
[buddy.core.codecs :as bc]
[cuerdas.core :as str]
[datoteka.io :as io]
[promesa.exec :as px]
[promesa.exec.semaphore :as ps]
[promesa.util :as pu]))
[promesa.core :as p]))
(def ^:dynamic *system* nil)
(def ^:dynamic *stats* nil)
(def ^:dynamic *file-stats* nil)
(def ^:dynamic *team-stats* nil)
(def ^:dynamic *semaphore* nil)
(def ^:dynamic *skip-on-error* true)
(def ^:dynamic *stats*
"A dynamic var for setting up state for collect stats globally."
nil)
(def ^:dynamic *skip-on-error*
"A dynamic var for setting up the default error behavior."
true)
(def ^:dynamic ^:private *system*
"An internal var for making the current `system` available to all
internal functions without the need to explicitly pass it top down."
nil)
(def ^:dynamic ^:private *max-procs*
"A dynamic variable that can optionally indicates the maxumum number
of concurrent graphics migration processes."
nil)
(def ^:dynamic ^:private *file-stats*
"An internal dynamic var for collect stats by file."
nil)
(def ^:dynamic ^:private *team-stats*
"An internal dynamic var for collect stats by team."
nil)
(def grid-gap 50)
(def frame-gap 200)
(def max-group-size 50)
(defn decode-row
[{:keys [features data] :as row}]
(cond-> row
(some? features)
(assoc :features (db/decode-pgarray features #{}))
(some? data)
(assoc :data (blob/decode data))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; FILE PREPARATION BEFORE MIGRATION
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@ -220,19 +247,17 @@
(fn [file-data]
;; Transform component and copy heads to frames, and set the
;; frame-id of its childrens
(letfn [(fix-container
[container]
(letfn [(fix-container [container]
(update container :objects update-vals fix-shape))
(fix-shape
[shape]
(fix-shape [shape]
(if (or (nil? (:parent-id shape)) (ctk/instance-head? shape))
(assoc shape
:type :frame ; Old groups must be converted
:fills (or (:fills shape) []) ; to frames and conform to spec
:hide-in-viewer (or (:hide-in-viewer shape) true)
:rx (or (:rx shape) 0)
:ry (or (:ry shape) 0))
:type :frame ; Old groups must be converted
:fills (or (:fills shape) []) ; to frames and conform to spec
:hide-in-viewer (or (:hide-in-viewer shape) true)
:rx (or (:rx shape) 0)
:ry (or (:ry shape) 0))
shape))]
(-> file-data
(update :pages-index update-vals fix-container)
@ -310,10 +335,10 @@
(defn- get-asset-groups
[assets generic-name]
(let [; Group by first element of the path.
(let [;; Group by first element of the path.
groups (d/group-by #(first (cfh/split-path (:path %))) assets)
; Split large groups in chunks of max-group-size elements
;; Split large groups in chunks of max-group-size elements
groups (loop [groups (seq groups)
result {}]
(if (empty? groups)
@ -334,15 +359,14 @@
result
splits)))))))
; Sort assets in each group by path
;; Sort assets in each group by path
groups (update-vals groups (fn [assets]
(sort-by (fn [{:keys [path name]}]
(str/lower (cfh/merge-path-item path name)))
assets)))
assets)))]
; Sort groups by name
groups (into (sorted-map) groups)]
groups))
;; Sort groups by name
(into (sorted-map) groups)))
(defn- create-frame
[name position width height]
@ -612,14 +636,11 @@
(defn- create-shapes-for-svg
[{:keys [id] :as mobj} file-id objects frame-id position]
(let [svg-text (get-svg-content id)
optimizer (::csvg/optimizer *system*)
svg-text (csvg/optimize optimizer svg-text)
svg-data (-> (csvg/parse svg-text)
(assoc :name (:name mobj))
(collect-and-persist-images file-id))]
(let [svg-text (get-svg-content id)
svg-text (svgo/optimize *system* svg-text)
svg-data (-> (csvg/parse svg-text)
(assoc :name (:name mobj))
(collect-and-persist-images file-id))]
(sbuilder/create-svg-shapes svg-data position objects frame-id frame-id #{} false)))
@ -678,9 +699,7 @@
(defn- create-media-grid
[fdata page-id frame-id grid media-group]
(let [factory (px/thread-factory :virtual true)
executor (px/fixed-executor :parallelism 10 :factory factory)
process (fn [mobj position]
(let [process (fn [mobj position]
(let [position (gpt/add position (gpt/point grid-gap grid-gap))
tp1 (dt/tpoint)]
(try
@ -690,7 +709,6 @@
:file-id (str (:id fdata))
:id (str (:id mobj))
:cause cause)
(if-not *skip-on-error*
(throw cause)
nil))
@ -699,21 +717,24 @@
:file-id (str (:id fdata))
:media-id (str (:id mobj))
:elapsed (dt/format-duration (tp1)))))))]
(try
(->> (d/zip media-group grid)
(map (fn [[mobj position]]
(sse/tap {:type :migration-progress
:section :graphics
:name (:name mobj)})
(px/submit! executor (partial process mobj position))))
(reduce (fn [fdata promise]
(if-let [changes (deref promise)]
(-> (assoc-in fdata [:options :components-v2] true)
(cp/process-changes changes false))
fdata))
fdata))
(finally
(pu/close! executor)))))
(->> (d/zip media-group grid)
(partition-all (or *max-procs* 1))
(mapcat (fn [partition]
(->> partition
(map (fn [[mobj position]]
(sse/tap {:type :migration-progress
:section :graphics
:name (:name mobj)})
(p/vthread (process mobj position))))
(doall)
(map deref)
(doall))))
(filter some?)
(reduce (fn [fdata changes]
(-> (assoc-in fdata [:options :components-v2] true)
(cp/process-changes changes false)))
fdata))))
(defn- migrate-graphics
[fdata]
@ -759,6 +780,11 @@
(create-media-grid fdata page-id (:id frame) grid assets)
(gpt/add position (gpt/point 0 (+ height (* 2 grid-gap) frame-gap))))))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; PRIVATE HELPERS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- migrate-fdata
[fdata libs]
(let [migrated? (dm/get-in fdata [:options :components-v2])]
@ -771,11 +797,22 @@
(defn- get-file
[system id]
(binding [pmap/*load-fn* (partial fdata/load-pointer system id)]
(-> (files/get-file system id :migrate? false)
(-> (db/get system :file {:id id}
{::db/remove-deleted false
::db/check-deleted false})
(decode-row)
(update :data assoc :id id)
(update :data fdata/process-pointers deref)
(fmg/migrate-file))))
(defn- get-team
[system team-id]
(-> (db/get system :team {:id team-id}
{::db/remove-deleted false
::db/check-deleted false})
(decode-row)))
(defn- validate-file!
[file libs throw-on-validate?]
(try
@ -791,7 +828,8 @@
(let [file (get-file system id)
libs (->> (files/get-file-libraries conn id)
(into [file] (comp (map :id) (map (partial get-file system))))
(into [file] (comp (map :id)
(map (partial get-file system))))
(d/index-by :id))
file (-> file
@ -816,18 +854,39 @@
{:data (blob/encode (:data file))
:features (db/create-array conn "text" (:features file))
:revn (:revn file)}
{:id (:id file)}
{::db/return-keys? false})
{:id (:id file)})
(dissoc file :data)))
(def ^:private sql:get-and-lock-team-files
"SELECT f.id
FROM file AS f
JOIN project AS p ON (p.id = f.project_id)
WHERE p.team_id = ?
FOR UPDATE")
(defn- get-and-lock-files
[conn team-id]
(->> (db/cursor conn [sql:get-and-lock-team-files team-id])
(map :id)))
(defn- update-team-features!
[conn team-id features]
(let [features (db/create-array conn "text" features)]
(db/update! conn :team
{:features features}
{:id team-id})))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; PUBLIC API
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn migrate-file!
[system file-id & {:keys [validate? throw-on-validate?]}]
(let [tpoint (dt/tpoint)
file-id (if (string? file-id)
(parse-uuid file-id)
file-id)]
(binding [*file-stats* (atom {})]
[system file-id & {:keys [validate? throw-on-validate? max-procs]}]
(let [tpoint (dt/tpoint)]
(binding [*file-stats* (atom {})
*max-procs* max-procs]
(try
(l/dbg :hint "migrate:file:start" :file-id (str file-id))
@ -839,7 +898,6 @@
(process-file system file-id
:validate? validate?
:throw-on-validate? throw-on-validate?)))))
(finally
(let [elapsed (tpoint)
components (get @*file-stats* :processed/components 0)
@ -855,73 +913,51 @@
(some-> *team-stats* (swap! update :processed/files (fnil inc 0)))))))))
(defn migrate-team!
[system team-id & {:keys [validate? throw-on-validate?]}]
(let [tpoint (dt/tpoint)
team-id (if (string? team-id)
(parse-uuid team-id)
team-id)]
(l/dbg :hint "migrate:team:start" :team-id (dm/str team-id))
[system team-id & {:keys [validate? throw-on-validate? max-procs]}]
(l/dbg :hint "migrate:team:start"
:team-id (dm/str team-id))
(let [tpoint (dt/tpoint)
migrate-file
(fn [system file-id]
(migrate-file! system file-id
:max-procs max-procs
:validate? validate?
:throw-on-validate? throw-on-validate?))
migrate-team
(fn [{:keys [::db/conn] :as system} {:keys [id features] :as team}]
(let [features (-> features
(disj "ephimeral/v2-migration")
(conj "components/v2")
(conj "layout/grid")
(conj "styles/v2"))]
(run! (partial migrate-file system)
(get-and-lock-files conn id))
(update-team-features! conn id features)))]
(binding [*team-stats* (atom {})]
(try
;; We execute this out of transaction because we want this
;; change to be visible to all other sessions before starting
;; the migration
(let [sql (str "UPDATE team SET features = "
" array_append(features, 'ephimeral/v2-migration') "
" WHERE id = ?")]
(db/exec-one! system [sql team-id]))
(db/tx-run! system
(fn [{:keys [::db/conn] :as system}]
;; Lock the team
(db/exec-one! conn ["SET idle_in_transaction_session_timeout = 0"])
(let [{:keys [features] :as team} (-> (db/get conn :team {:id team-id})
(update :features db/decode-pgarray #{}))]
(if (contains? features "components/v2")
(l/dbg :hint "team already migrated")
(let [sql (str/concat
"SELECT f.id FROM file AS f "
" JOIN project AS p ON (p.id = f.project_id) "
"WHERE p.team_id = ? AND f.deleted_at IS NULL AND p.deleted_at IS NULL "
"FOR UPDATE")]
(doseq [file-id (->> (db/exec! conn [sql team-id])
(map :id))]
(migrate-file! system file-id
:validate? validate?
:throw-on-validate? throw-on-validate?))
(let [features (-> features
(disj "ephimeral/v2-migration")
(conj "components/v2")
(conj "layout/grid")
(conj "styles/v2"))]
(db/update! conn :team
{:features (db/create-array conn "text" features)}
{:id team-id})))))))
(db/tx-run! system (fn [system]
(db/exec-one! system ["SET idle_in_transaction_session_timeout = 0"])
(let [team (get-team system team-id)]
(if (contains? (:features team) "components/v2")
(l/inf :hint "team already migrated")
(migrate-team system team)))))
(finally
(some-> *semaphore* ps/release!)
(let [elapsed (tpoint)]
(let [elapsed (tpoint)
components (get @*team-stats* :processed/components 0)
graphics (get @*team-stats* :processed/graphics 0)
files (get @*team-stats* :processed/files 0)]
(some-> *stats* (swap! update :processed/teams (fnil inc 0)))
;; We execute this out of transaction because we want this
;; change to be visible to all other sessions before starting
;; the migration
(let [sql (str "UPDATE team SET features = "
" array_remove(features, 'ephimeral/v2-migration') "
" WHERE id = ?")]
(db/exec-one! system [sql team-id]))
(let [components (get @*team-stats* :processed/components 0)
graphics (get @*team-stats* :processed/graphics 0)
files (get @*team-stats* :processed/files 0)]
(l/dbg :hint "migrate:team:end"
:team-id (dm/str team-id)
:files files
:components components
:graphics graphics
:elapsed (dt/format-duration elapsed)))))))))
(l/dbg :hint "migrate:team:end"
:team-id (dm/str team-id)
:files files
:components components
:graphics graphics
:elapsed (dt/format-duration elapsed))))))))

View file

@ -11,6 +11,7 @@
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.db :as db]
[app.db.sql :as-alias sql]
[app.util.blob :as blob]
[app.util.objects-map :as omap]
[app.util.pointer-map :as pmap]))
@ -38,8 +39,8 @@
[system file-id id]
(let [{:keys [content]} (db/get system :file-data-fragment
{:id id :file-id file-id}
{::db/columns [:content]
::db/check-deleted? false})]
{::sql/columns [:content]
::db/check-deleted false})]
(when-not content
(ex/raise :type :internal
:code :fragment-not-found

View file

@ -111,9 +111,11 @@
" where id=?")
err
(:id whook)]
res (db/exec-one! pool sql {::db/return-keys? true})]
res (db/exec-one! pool sql {::db/return-keys true})]
(when (>= (:error-count res) max-errors)
(db/update! pool :webhook {:is-active false} {:id (:id whook)})))
(db/update! pool :webhook
{:is-active false}
{:id (:id whook)})))
(db/update! pool :webhook
{:updated-at (dt/now)

View file

@ -10,7 +10,6 @@
[app.auth.oidc :as-alias oidc]
[app.auth.oidc.providers :as-alias oidc.providers]
[app.common.logging :as l]
[app.common.svg :as csvg]
[app.config :as cf]
[app.db :as-alias db]
[app.email :as-alias email]
@ -37,6 +36,7 @@
[app.storage.gc-deleted :as-alias sto.gc-deleted]
[app.storage.gc-touched :as-alias sto.gc-touched]
[app.storage.s3 :as-alias sto.s3]
[app.svgo :as-alias svgo]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[cider.nrepl :refer [cider-nrepl-handler]]
@ -316,7 +316,7 @@
::mtx/metrics (ig/ref ::mtx/metrics)
::mbus/msgbus (ig/ref ::mbus/msgbus)
::rds/redis (ig/ref ::rds/redis)
::csvg/optimizer (ig/ref ::csvg/optimizer)
::svgo/optimizer (ig/ref ::svgo/optimizer)
::rpc/climit (ig/ref ::rpc/climit)
::rpc/rlimit (ig/ref ::rpc/rlimit)
@ -409,8 +409,9 @@
;; module requires the migrations to run before initialize.
::migrations (ig/ref :app.migrations/migrations)}
::csvg/optimizer
{}
::svgo/optimizer
{::wrk/executor (ig/ref ::wrk/executor)
::svgo/max-procs (cf/get :svgo-max-procs)}
::audit.tasks/archive
{::props (ig/ref ::setup/props)

View file

@ -48,7 +48,7 @@
(map event->row))
events (sequence xform events)]
(when (seq events)
(db/insert-multi! pool :audit-log event-columns events))))
(db/insert-many! pool :audit-log event-columns events))))
(def schema:event
[:map {:title "Event"}

View file

@ -133,7 +133,8 @@
(update-password [conn profile-id]
(let [pwd (profile/derive-password cfg password)]
(db/update! conn :profile {:password pwd} {:id profile-id})))]
(db/update! conn :profile {:password pwd} {:id profile-id})
nil))]
(db/with-atomic [conn pool]
(->> (validate-token token)
@ -303,7 +304,8 @@
(-> (db/update! conn :profile
{:default-team-id (:id team)
:default-project-id (:default-project-id team)}
{:id id})
{:id id}
{::db/return-keys true})
(profile/decode-row))))

View file

@ -317,7 +317,7 @@
[cfg file-id]
(db/run! cfg (fn [{:keys [::db/conn] :as cfg}]
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)]
(some-> (db/get* conn :file {:id file-id} {::db/remove-deleted? false})
(some-> (db/get* conn :file {:id file-id} {::db/remove-deleted false})
(files/decode-row)
(update :data feat.fdata/process-pointers deref))))))
@ -664,6 +664,7 @@
(case feature
"components/v2"
(feat.compv2/migrate-file! options file-id
:max-procs 2
:validate? validate?
:throw-on-validate? true)

View file

@ -12,6 +12,7 @@
[app.common.spec :as us]
[app.common.uuid :as uuid]
[app.db :as db]
[app.db.sql :as sql]
[app.features.fdata :as feat.fdata]
[app.loggers.audit :as-alias audit]
[app.loggers.webhooks :as-alias webhooks]
@ -62,8 +63,8 @@
(decode-row)))
(defn- get-comment
[conn comment-id & {:keys [for-update?]}]
(db/get-by-id conn :comment comment-id {:for-update for-update?}))
[conn comment-id & {:as opts}]
(db/get-by-id conn :comment comment-id opts))
(defn- get-next-seqn
[conn file-id]
@ -375,7 +376,7 @@
{::doc/added "1.15"}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id share-id] :as params}]
(db/with-atomic [conn pool]
(let [{:keys [file-id] :as thread} (get-comment-thread conn id ::db/for-update? true)]
(let [{:keys [file-id] :as thread} (get-comment-thread conn id ::sql/for-update true)]
(files/check-comment-permissions! conn profile-id file-id share-id)
(upsert-comment-thread-status! conn profile-id id))))
@ -392,7 +393,7 @@
{::doc/added "1.15"}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id is-resolved share-id] :as params}]
(db/with-atomic [conn pool]
(let [{:keys [file-id] :as thread} (get-comment-thread conn id ::db/for-update? true)]
(let [{:keys [file-id] :as thread} (get-comment-thread conn id ::sql/for-update true)]
(files/check-comment-permissions! conn profile-id file-id share-id)
(db/update! conn :comment-thread
{:is-resolved is-resolved}
@ -415,7 +416,7 @@
[cfg {:keys [::rpc/profile-id ::rpc/request-at thread-id share-id content]}]
(db/tx-run! cfg
(fn [{:keys [::db/conn] :as cfg}]
(let [{:keys [file-id page-id] :as thread} (get-comment-thread conn thread-id ::db/for-update? true)
(let [{:keys [file-id page-id] :as thread} (get-comment-thread conn thread-id ::sql/for-update true)
{:keys [team-id project-id page-name] :as file} (get-file cfg file-id page-id)]
(files/check-comment-permissions! conn profile-id (:id file) share-id)
@ -471,8 +472,8 @@
(db/tx-run! cfg
(fn [{:keys [::db/conn] :as cfg}]
(let [{:keys [thread-id owner-id] :as comment} (get-comment conn id ::db/for-update? true)
{:keys [file-id page-id] :as thread} (get-comment-thread conn thread-id ::db/for-update? true)]
(let [{:keys [thread-id owner-id] :as comment} (get-comment conn id ::sql/for-update true)
{:keys [file-id page-id] :as thread} (get-comment-thread conn thread-id ::sql/for-update true)]
(files/check-comment-permissions! conn profile-id file-id share-id)
@ -504,7 +505,7 @@
{::doc/added "1.15"}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id share-id]}]
(db/with-atomic [conn pool]
(let [{:keys [owner-id file-id] :as thread} (get-comment-thread conn id ::db/for-update? true)]
(let [{:keys [owner-id file-id] :as thread} (get-comment-thread conn id ::sql/for-update true)]
(files/check-comment-permissions! conn profile-id file-id share-id)
(when-not (= owner-id profile-id)
(ex/raise :type :validation
@ -524,14 +525,14 @@
{::doc/added "1.15"}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id share-id] :as params}]
(db/with-atomic [conn pool]
(let [{:keys [owner-id thread-id] :as comment} (get-comment conn id ::db/for-update? true)
(let [{:keys [owner-id thread-id] :as comment} (get-comment conn id ::sql/for-update true)
{:keys [file-id] :as thread} (get-comment-thread conn thread-id)]
(files/check-comment-permissions! conn profile-id file-id share-id)
(when-not (= owner-id profile-id)
(ex/raise :type :validation
:code :not-allowed))
(db/delete! conn :comment {:id id}))))
(db/delete! conn :comment {:id id})
nil)))
;; --- COMMAND: Update comment thread position
@ -544,7 +545,7 @@
{::doc/added "1.15"}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id ::rpc/request-at id position frame-id share-id]}]
(db/with-atomic [conn pool]
(let [{:keys [file-id] :as thread} (get-comment-thread conn id ::db/for-update? true)]
(let [{:keys [file-id] :as thread} (get-comment-thread conn id ::sql/for-update true)]
(files/check-comment-permissions! conn profile-id file-id share-id)
(db/update! conn :comment-thread
{:modified-at request-at
@ -564,7 +565,7 @@
{::doc/added "1.15"}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id ::rpc/request-at id frame-id share-id]}]
(db/with-atomic [conn pool]
(let [{:keys [file-id] :as thread} (get-comment-thread conn id ::db/for-update? true)]
(let [{:keys [file-id] :as thread} (get-comment-thread conn id ::sql/for-update true)]
(files/check-comment-permissions! conn profile-id file-id share-id)
(db/update! conn :comment-thread
{:modified-at request-at

View file

@ -20,6 +20,7 @@
[app.common.types.file :as ctf]
[app.config :as cf]
[app.db :as db]
[app.db.sql :as-alias sql]
[app.features.fdata :as feat.fdata]
[app.loggers.audit :as-alias audit]
[app.loggers.webhooks :as-alias webhooks]
@ -238,8 +239,7 @@
(db/update! conn :file
{:data (blob/encode (:data file))
:features (db/create-array conn "text" (:features file))}
{:id id}
{::db/return-keys? false})
{:id id})
(when (contains? (:features file) "fdata/pointer-map")
(feat.fdata/persist-pointers! cfg id)))
@ -262,9 +262,9 @@
(when (some? project-id)
{:project-id project-id}))
file (-> (db/get conn :file params
{::db/check-deleted? (not include-deleted?)
::db/remove-deleted? (not include-deleted?)
::db/for-update? lock-for-update?})
{::db/check-deleted (not include-deleted?)
::db/remove-deleted (not include-deleted?)
::sql/for-update lock-for-update?})
(decode-row))]
(if migrate?
(migrate-file cfg file)
@ -733,7 +733,8 @@
(db/update! conn :file
{:name name
:modified-at (dt/now)}
{:id id}))
{:id id}
{::db/return-keys true}))
(sv/defmethod ::rename-file
{::doc/added "1.17"
@ -860,9 +861,7 @@
(let [file (assoc file :is-shared true)]
(db/update! conn :file
{:is-shared true}
{:id id}
::db/return-keys? false)
{:id id})
file)
:else
@ -899,7 +898,7 @@
(db/update! conn :file
{:deleted-at (dt/now)}
{:id file-id}
{::db/columns [:id :name :is-shared :project-id :created-at :modified-at]}))
{::db/return-keys [:id :name :is-shared :project-id :created-at :modified-at]}))
(def ^:private
schema:delete-file
@ -998,8 +997,8 @@
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id] :as params}]
(db/with-atomic [conn pool]
(check-edition-permissions! conn profile-id file-id)
(unlink-file-from-library conn params)))
(unlink-file-from-library conn params)
nil))
;; --- MUTATION COMMAND: update-sync
@ -1008,7 +1007,8 @@
(db/update! conn :file-library-rel
{:synced-at (dt/now)}
{:file-id file-id
:library-file-id library-id}))
:library-file-id library-id}
{::db/return-keys true}))
(def ^:private schema:update-file-library-sync-status
[:map {:title "update-file-library-sync-status"}
@ -1031,7 +1031,8 @@
[conn {:keys [file-id date] :as params}]
(db/update! conn :file
{:ignore-sync-until date}
{:id file-id}))
{:id file-id}
{::db/return-keys true}))
(s/def ::ignore-file-library-sync-status
(s/keys :req [::rpc/profile-id]

View file

@ -17,6 +17,7 @@
[app.common.types.shape-tree :as ctt]
[app.config :as cf]
[app.db :as db]
[app.db.sql :as-alias sql]
[app.features.fdata :as feat.fdata]
[app.loggers.audit :as-alias audit]
[app.loggers.webhooks :as-alias webhooks]
@ -236,8 +237,8 @@
{:file-id file-id
:object-id object-id
:tag tag}
{::db/remove-deleted? false
::db/for-update? true})
{::db/remove-deleted false
::sql/for-update true})
path (:path media)
mtype (:mtype media)
@ -312,14 +313,13 @@
(when-let [{:keys [media-id tag]} (db/get* conn :file-tagged-object-thumbnail
{:file-id file-id
:object-id object-id}
{::db/for-update? true})]
{::sql/for-update true})]
(sto/touch-object! storage media-id)
(db/update! conn :file-tagged-object-thumbnail
{:deleted-at (dt/now)}
{:file-id file-id
:object-id object-id
:tag tag}
{::db/return-keys? false})))
:tag tag})))
(s/def ::delete-file-object-thumbnail
(s/keys :req [::rpc/profile-id]
@ -365,8 +365,8 @@
thumb (db/get* conn :file-thumbnail
{:file-id file-id
:revn revn}
{::db/remove-deleted? false
::db/for-update? true})]
{::db/remove-deleted false
::sql/for-update true})]
(if (some? thumb)
(do

View file

@ -250,7 +250,8 @@
:features (db/create-array conn "text" (:features file))
:data (when (take-snapshot? file)
(:data file))
:changes (blob/encode changes)})
:changes (blob/encode changes)}
{::db/return-keys false})
(db/update! conn :file
{:revn (:revn file)

View file

@ -11,6 +11,7 @@
[app.common.schema :as sm]
[app.common.uuid :as uuid]
[app.db :as db]
[app.db.sql :as-alias sql]
[app.loggers.audit :as-alias audit]
[app.loggers.webhooks :as-alias webhooks]
[app.media :as media]
@ -179,8 +180,7 @@
(db/update! conn :team-font-variant
{:font-family name}
{:font-id id
:team-id team-id}
{::db/return-keys? false})
:team-id team-id})
(rph/with-meta (rph/wrap nil)
{::audit/replace-props {:id id
@ -201,7 +201,6 @@
::webhooks/event? true
::sm/params schema:delete-font}
[cfg {:keys [::rpc/profile-id id team-id]}]
(db/tx-run! cfg
(fn [{:keys [::db/conn ::sto/storage] :as cfg}]
(teams/check-edition-permissions! conn profile-id team-id)
@ -209,7 +208,7 @@
{:team-id team-id
:font-id id
:deleted-at nil}
{::db/for-update? true})
{::sql/for-update true})
storage (media/configure-assets-storage storage conn)
tnow (dt/now)]
@ -220,8 +219,7 @@
(doseq [font fonts]
(db/update! conn :team-font-variant
{:deleted-at tnow}
{:id (:id font)}
{::db/return-keys? false})
{:id (:id font)})
(some->> (:woff1-file-id font) (sto/touch-object! storage))
(some->> (:woff2-file-id font) (sto/touch-object! storage))
(some->> (:ttf-file-id font) (sto/touch-object! storage))
@ -250,13 +248,12 @@
(teams/check-edition-permissions! conn profile-id team-id)
(let [variant (db/get conn :team-font-variant
{:id id :team-id team-id}
{::db/for-update? true})
{::sql/for-update true})
storage (media/configure-assets-storage storage conn)]
(db/update! conn :team-font-variant
{:deleted-at (dt/now)}
{:id (:id variant)}
{::db/return-keys? false})
{:id (:id variant)})
(some->> (:woff1-file-id variant) (sto/touch-object! storage))
(some->> (:woff2-file-id variant) (sto/touch-object! storage))

View file

@ -215,7 +215,7 @@
(-> file
(update :features #(db/create-array conn "text" %))
(update :data blob/encode))
{::db/return-keys? false})
{::db/return-keys false})
;; The file profile creation is optional, so when no profile is
;; present (when this function is called from profile less
@ -231,10 +231,10 @@
{::db/return-keys? false}))
(doseq [params flibs]
(db/insert! conn :file-library-rel params ::db/return-keys? false))
(db/insert! conn :file-library-rel params ::db/return-keys false))
(doseq [params fmeds]
(db/insert! conn :file-media-object params ::db/return-keys? false))
(db/insert! conn :file-media-object params ::db/return-keys false))
file))

View file

@ -157,8 +157,7 @@
(db/update! conn :file
{:modified-at (dt/now)
:has-media-trimmed false}
{:id file-id}
{::db/return-keys? false})
{:id file-id})
(db/exec-one! conn [sql:create-file-media-object
(or id (uuid/next))

View file

@ -13,6 +13,7 @@
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.db.sql :as-alias sql]
[app.email :as eml]
[app.http.session :as session]
[app.loggers.audit :as audit]
@ -99,7 +100,7 @@
;; NOTE: we need to retrieve the profile independently if we use
;; it or not for explicit locking and avoid concurrent updates of
;; the same row/object.
(let [profile (-> (db/get-by-id conn :profile profile-id ::db/for-update? true)
(let [profile (-> (db/get-by-id conn :profile profile-id ::sql/for-update true)
(decode-row))
;; Update the profile map with direct params
@ -164,7 +165,7 @@
(defn- validate-password!
[{:keys [::db/conn] :as cfg} {:keys [profile-id old-password] :as params}]
(let [profile (db/get-by-id conn :profile profile-id ::db/for-update? true)]
(let [profile (db/get-by-id conn :profile profile-id ::sql/for-update true)]
(when (and (not= (:password profile) "!")
(not (:valid (verify-password cfg old-password (:password profile)))))
(ex/raise :type :validation
@ -176,7 +177,8 @@
(when-not (db/read-only? conn)
(db/update! conn :profile
{:password (auth/derive-password password)}
{:id id})))
{:id id})
nil))
;; --- MUTATION: Update Photo
@ -202,7 +204,7 @@
(defn update-profile-photo
[{:keys [::db/pool ::sto/storage] :as cfg} {:keys [profile-id file] :as params}]
(let [photo (upload-photo cfg params)
profile (db/get-by-id pool :profile profile-id ::db/for-update? true)]
profile (db/get-by-id pool :profile profile-id ::sql/for-update true)]
;; Schedule deletion of old photo
(when-let [id (:photo-id profile)]
@ -329,7 +331,7 @@
::sm/params schema:update-profile-props}
[{:keys [::db/pool]} {:keys [::rpc/profile-id props]}]
(db/with-atomic [conn pool]
(let [profile (get-profile conn profile-id ::db/for-update? true)
(let [profile (get-profile conn profile-id ::sql/for-update true)
props (reduce-kv (fn [props k v]
;; We don't accept namespaced keys
(if (simple-ident? k)

View file

@ -9,6 +9,7 @@
[app.common.data.macros :as dm]
[app.common.spec :as us]
[app.db :as db]
[app.db.sql :as-alias sql]
[app.loggers.audit :as-alias audit]
[app.loggers.webhooks :as webhooks]
[app.rpc :as-alias rpc]
@ -233,7 +234,7 @@
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id name] :as params}]
(db/with-atomic [conn pool]
(check-edition-permissions! conn profile-id id)
(let [project (db/get-by-id conn :project id ::db/for-update? true)]
(let [project (db/get-by-id conn :project id ::sql/for-update true)]
(db/update! conn :project
{:name name}
{:id id})
@ -259,7 +260,8 @@
(check-edition-permissions! conn profile-id id)
(let [project (db/update! conn :project
{:deleted-at (dt/now)}
{:id id :is-default false})]
{:id id :is-default false}
{::db/return-keys true})]
(rph/with-meta (rph/wrap)
{::audit/props {:team-id (:team-id project)
:name (:name project)

View file

@ -963,5 +963,6 @@
(let [invitation (db/delete! conn :team-invitation
{:team-id team-id
:email-to (str/lower email)})]
:email-to (str/lower email)}
{::db/return-keys true})]
(rph/wrap nil {::audit/props {:invitation-id (:id invitation)}})))))

View file

@ -95,7 +95,8 @@
:mtype mtype
:error-code nil
:error-count 0}
{:id id})
{:id id}
{::db/return-keys true})
(decode-row)))
(sv/defmethod ::create-webhook

View file

@ -65,9 +65,8 @@
(let [res (db/update! conn :profile
params
{:email email
:deleted-at nil}
{::db/return-keys? false})]
(pos? (:next.jdbc/update-count res))))))))
:deleted-at nil})]
(pos? (db/get-update-count res))))))))
(defmethod exec-command :delete-profile
[{:keys [email soft]}]
@ -82,12 +81,10 @@
(let [res (if soft
(db/update! conn :profile
{:deleted-at (dt/now)}
{:email email :deleted-at nil}
{::db/return-keys? false})
{:email email :deleted-at nil})
(db/delete! conn :profile
{:email email}
{::db/return-keys? false}))]
(pos? (:next.jdbc/update-count res))))))
{:email email}))]
(pos? (db/get-update-count res))))))
(defmethod exec-command :search-profile
[{:keys [email]}]

View file

@ -6,8 +6,6 @@
(ns app.srepl.components-v2
(:require
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.logging :as l]
[app.common.pprint :as pp]
[app.db :as db]
@ -19,6 +17,13 @@
[promesa.exec.semaphore :as ps]
[promesa.util :as pu]))
(def ^:dynamic *scope* nil)
(def ^:dynamic *semaphore* nil)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; PRIVATE HELPERS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- print-stats!
[stats]
(->> stats
@ -87,210 +92,228 @@
res (db/exec-one! pool [sql])]
(:count res)))
(defn migrate-file!
[system file-id & {:keys [rollback?] :or {rollback? true}}]
(l/dbg :hint "migrate:start")
(let [tpoint (dt/tpoint)]
(try
(binding [feat/*stats* (atom {})]
(defn- mark-team-migration!
[{:keys [::db/pool]} team-id]
;; We execute this out of transaction because we want this
;; change to be visible to all other sessions before starting
;; the migration
(let [sql (str "UPDATE team SET features = "
" array_append(features, 'ephimeral/v2-migration') "
" WHERE id = ?")]
(db/exec-one! pool [sql team-id])))
(defn- unmark-team-migration!
[{:keys [::db/pool]} team-id]
;; We execute this out of transaction because we want this
;; change to be visible to all other sessions before starting
;; the migration
(let [sql (str "UPDATE team SET features = "
" array_remove(features, 'ephimeral/v2-migration') "
" WHERE id = ?")]
(db/exec-one! pool [sql team-id])))
(def ^:private sql:get-teams
"SELECT id, features
FROM team
WHERE deleted_at IS NULL
ORDER BY created_at ASC")
(defn- get-teams
[conn]
(->> (db/cursor conn sql:get-teams)
(map feat/decode-row)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; PUBLIC API
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn migrate-file!
[system file-id & {:keys [rollback? max-procs]
:or {rollback? true}}]
(l/dbg :hint "migrate:start" :rollback rollback?)
(let [tpoint (dt/tpoint)
file-id (if (string? file-id)
(parse-uuid file-id)
file-id)]
(binding [feat/*stats* (atom {})]
(try
(-> (assoc system ::db/rollback rollback?)
(feat/migrate-file! file-id))
(feat/migrate-file! file-id :max-procs max-procs))
(-> (deref feat/*stats*)
(assoc :elapsed (dt/format-duration (tpoint)))))
(assoc :elapsed (dt/format-duration (tpoint))))
(catch Throwable cause
(l/wrn :hint "migrate:error" :cause cause))
(catch Throwable cause
(l/wrn :hint "migrate:error" :cause cause))
(finally
(let [elapsed (dt/format-duration (tpoint))]
(l/dbg :hint "migrate:end" :elapsed elapsed))))))
(defn migrate-files!
[{:keys [::db/pool] :as system}
& {:keys [chunk-size max-jobs max-items start-at preset rollback? skip-on-error validate?]
:or {chunk-size 10
skip-on-error true
max-jobs 10
max-items Long/MAX_VALUE
preset :shutdown-on-failure
rollback? true
validate? false}}]
(letfn [(get-chunk [cursor]
(let [sql (str/concat
"SELECT id, created_at FROM file "
" WHERE created_at < ? AND deleted_at IS NULL "
" ORDER BY created_at desc LIMIT ?")
rows (db/exec! pool [sql cursor chunk-size])]
[(some->> rows peek :created-at) (seq rows)]))
(get-candidates []
(->> (d/iteration get-chunk
:vf second
:kf first
:initk (or start-at (dt/now)))
(take max-items)
(map :id)))]
(l/dbg :hint "migrate:start")
(let [fsem (ps/create :permits max-jobs)
total (get-total-files pool)
stats (atom {:files/total total})
tpoint (dt/tpoint)]
(add-watch stats :progress-report (report-progress-files tpoint))
(binding [feat/*stats* stats
feat/*semaphore* fsem
feat/*skip-on-error* skip-on-error]
(try
(pu/with-open [scope (px/structured-task-scope :preset preset :factory :virtual)]
(run! (fn [file-id]
(ps/acquire! feat/*semaphore*)
(px/submit! scope (fn []
(-> (assoc system ::db/rollback rollback?)
(feat/migrate-file! file-id
:validate? validate?
:throw-on-validate? (not skip-on-error))))))
(get-candidates))
(p/await! scope))
(-> (deref feat/*stats*)
(assoc :elapsed (dt/format-duration (tpoint))))
(catch Throwable cause
(l/dbg :hint "migrate:error" :cause cause))
(finally
(let [elapsed (dt/format-duration (tpoint))]
(l/dbg :hint "migrate:end" :elapsed elapsed))))))))
(finally
(let [elapsed (dt/format-duration (tpoint))]
(l/dbg :hint "migrate:end" :rollback rollback? :elapsed elapsed)))))))
(defn migrate-team!
[{:keys [::db/pool] :as system} team-id
& {:keys [rollback? skip-on-error validate?]
:or {rollback? true skip-on-error true validate? false}}]
(l/dbg :hint "migrate:start")
[{:keys [::db/pool] :as system} team-id & {:keys [rollback? skip-on-error validate? max-procs]
:or {rollback? true
skip-on-error true
validate? false
max-procs 1 }
:as opts}]
(let [total (get-total-files pool :team-id team-id)
stats (atom {:total/files total})
tpoint (dt/tpoint)]
(l/dbg :hint "migrate:start" :rollback rollback?)
(let [team-id (if (string? team-id)
(parse-uuid team-id)
team-id)
total (get-total-files pool :team-id team-id)
stats (atom {:total/files total})
tpoint (dt/tpoint)]
(add-watch stats :progress-report (report-progress-files tpoint))
(try
(binding [feat/*stats* stats
feat/*skip-on-error* skip-on-error]
(binding [feat/*stats* stats
feat/*skip-on-error* skip-on-error]
(try
(mark-team-migration! system team-id)
(-> (assoc system ::db/rollback rollback?)
(feat/migrate-team! team-id
:max-procs max-procs
:validate? validate?
:throw-on-validate? (not skip-on-error)))
(print-stats!
(-> (deref feat/*stats*)
(dissoc :total/files)
(assoc :elapsed (dt/format-duration (tpoint))))))
(assoc :elapsed (dt/format-duration (tpoint)))))
(catch Throwable cause
(l/dbg :hint "migrate:error" :cause cause))
(catch Throwable cause
(l/dbg :hint "migrate:error" :cause cause))
(finally
(let [elapsed (dt/format-duration (tpoint))]
(l/dbg :hint "migrate:end" :elapsed elapsed))))))
(finally
(unmark-team-migration! system team-id)
(defn default-on-end
[stats]
(print-stats!
(-> stats
(update :elapsed/total dt/format-duration)
(dissoc :total/teams))))
(let [elapsed (dt/format-duration (tpoint))]
(l/dbg :hint "migrate:end" :rollback rollback? :elapsed elapsed)))))))
(defn migrate-teams!
[{:keys [::db/pool] :as system}
& {:keys [chunk-size max-jobs max-items start-at
rollback? validate? preset skip-on-error
max-time on-start on-progress on-error on-end]
:or {chunk-size 10000
validate? false
rollback? true
skip-on-error true
on-end default-on-end
preset :shutdown-on-failure
max-jobs Integer/MAX_VALUE
max-items Long/MAX_VALUE}}]
"A REPL helper for migrate all teams.
(letfn [(get-chunk [cursor]
(let [sql (str/concat
"SELECT id, created_at, features FROM team "
" WHERE created_at < ? AND deleted_at IS NULL "
" ORDER BY created_at desc LIMIT ?")
rows (db/exec! pool [sql cursor chunk-size])]
[(some->> rows peek :created-at) (seq rows)]))
This function starts multiple concurrent team migration processes
until thw maximum number of jobs is reached which by default has the
value of `1`. This is controled with the `:max-jobs` option.
(get-candidates []
(->> (d/iteration get-chunk
:vf second
:kf first
:initk (or start-at (dt/now)))
(map #(update % :features db/decode-pgarray #{}))
(remove #(contains? (:features %) "ephimeral/v2-migration"))
(take max-items)
(map :id)))
Each tram migration process also can start multiple procs for
graphics migration, the total of that procs is controled with the
`:max-procs` option.
(migrate-team [team-id]
(try
(-> (assoc system ::db/rollback rollback?)
(feat/migrate-team! team-id
:validate? validate?
:throw-on-validate? (not skip-on-error)))
(catch Throwable cause
(l/err :hint "unexpected error on processing team" :team-id (dm/str team-id) :cause cause))))
Internally, the graphics migration process uses SVGO module which by
default has a limited number of maximum concurent
operations (globally), ensure setting up correct number with
PENPOT_SVGO_MAX_PROCS environment variable."
(process-team [scope tpoint mtime team-id]
(ps/acquire! feat/*semaphore*)
(let [ts (tpoint)]
(if (and mtime (neg? (compare mtime ts)))
(l/inf :hint "max time constraint reached" :elapsed (dt/format-duration ts))
(px/submit! scope (partial migrate-team team-id)))))]
[{:keys [::db/pool] :as system} & {:keys [max-jobs max-procs max-items
rollback? validate? preset
skip-on-error max-time
on-start on-progress on-error on-end]
:or {validate? false
rollback? true
skip-on-error true
preset :shutdown-on-failure
max-jobs 1
max-procs 10
max-items Long/MAX_VALUE}
:as opts}]
(l/dbg :hint "migrate:start")
(let [total (get-total-teams pool)
stats (atom {:total/teams (min total max-items)})
(let [sem (ps/create :permits max-jobs)
total (get-total-teams pool)
stats (atom {:total/teams (min total max-items)})
tpoint (dt/tpoint)
mtime (some-> max-time dt/duration)]
tpoint (dt/tpoint)
mtime (some-> max-time dt/duration)
(when (fn? on-start)
(on-start {:total total :rollback rollback?}))
scope (px/structured-task-scope :preset preset :factory :virtual)
sjobs (ps/create :permits max-jobs)
(add-watch stats :progress-report (report-progress-teams tpoint on-progress))
migrate-team
(fn [{:keys [id features] :as team}]
(ps/acquire! sjobs)
(let [ts (tpoint)]
(cond
(and mtime (neg? (compare mtime ts)))
(do
(l/inf :hint "max time constraint reached"
:team-id (str id)
:elapsed (dt/format-duration ts))
(ps/release! sjobs)
(reduced nil))
(binding [feat/*stats* stats
feat/*semaphore* sem
feat/*skip-on-error* skip-on-error]
(or (contains? features "ephimeral/v2-migration")
(contains? features "components/v2"))
(do
(l/dbg :hint "skip team" :team-id (str id))
(ps/release! sjobs))
:else
(px/submit! scope (fn []
(try
(mark-team-migration! system id)
(-> (assoc system ::db/rollback rollback?)
(feat/migrate-team! id
:max-procs max-procs
:validate? validate?
:throw-on-validate? (not skip-on-error)))
(catch Throwable cause
(l/err :hint "unexpected error on processing team"
:team-id (str id)
:cause cause))
(finally
(ps/release! sjobs)
(unmark-team-migration! system id))))))))]
(l/dbg :hint "migrate:start"
:rollback rollback?
:total total
:max-jobs max-jobs
:max-procs max-procs
:max-items max-items)
(add-watch stats :progress-report (report-progress-teams tpoint on-progress))
(binding [feat/*stats* stats
feat/*skip-on-error* skip-on-error]
(try
(when (fn? on-start)
(on-start {:total total :rollback rollback?}))
(db/tx-run! system
(fn [{:keys [::db/conn]}]
(run! (partial migrate-team)
(->> (get-teams conn)
(take max-items)))))
(try
(pu/with-open [scope (px/structured-task-scope :preset preset
:factory :virtual)]
(loop [candidates (get-candidates)]
(when-let [team-id (first candidates)]
(when (process-team scope tpoint mtime team-id)
(recur (rest candidates)))))
(p/await! scope))
(when (fn? on-end)
(-> (deref stats)
(assoc :elapsed/total (tpoint))
(on-end)))
(catch Throwable cause
(l/dbg :hint "migrate:error" :cause cause)
(when (fn? on-error)
(on-error cause)))
(p/await! scope)
(finally
(let [elapsed (dt/format-duration (tpoint))]
(l/dbg :hint "migrate:end" :elapsed elapsed))))))))
(pu/close! scope)))
(if (fn? on-end)
(-> (deref stats)
(assoc :elapsed/total (tpoint))
(on-end))
(-> (deref stats)
(assoc :elapsed/total (tpoint))
(update :elapsed/total dt/format-duration)
(dissoc :total/teams)
(print-stats!)))
(catch Throwable cause
(l/dbg :hint "migrate:error" :cause cause)
(when (fn? on-error)
(on-error cause)))
(finally
(let [elapsed (dt/format-duration (tpoint))]
(l/dbg :hint "migrate:end"
:rollback rollback?
:elapsed elapsed)))))))

View file

@ -170,8 +170,7 @@
(let [id (if (impl/object? object-or-id) (:id object-or-id) object-or-id)
rs (db/update! pool-or-conn :storage-object
{:touched-at (dt/now)}
{:id id}
{::db/return-keys? false})]
{:id id})]
(pos? (db/get-update-count rs))))
(defn get-object-data
@ -220,8 +219,7 @@
(let [id (if (impl/object? object-or-id) (:id object-or-id) object-or-id)
res (db/update! pool-or-conn :storage-object
{:deleted-at (dt/now)}
{:id id}
{::db/return-keys? false})]
{:id id})]
(pos? (db/get-update-count res))))
(dm/export impl/resolve-backend)

View file

@ -47,8 +47,7 @@
[conn ids]
(let [ids (db/create-array conn "uuid" ids)]
(-> (db/exec-one! conn [sql:delete-sobjects ids])
(db/get-update-count))))
(db/get-update-count))))
(defn- delete-in-bulk!
[cfg backend-id ids]
@ -60,7 +59,6 @@
(fn [{:keys [::db/conn ::sto/storage]}]
(when-let [ids (lock-ids conn ids)]
(let [total (delete-sobjects! conn ids)]
(-> (impl/resolve-backend storage backend-id)
(impl/del-objects-in-bulk ids))
@ -68,7 +66,6 @@
(l/dbg :hint "permanently delete storage object"
:id (str id)
:backend (name backend-id)))
total))))
(catch Throwable cause
(l/err :hint "unexpected error on bulk deletion"

View file

@ -39,6 +39,7 @@
software.amazon.awssdk.core.async.AsyncRequestBody
software.amazon.awssdk.core.async.AsyncResponseTransformer
software.amazon.awssdk.core.client.config.ClientAsyncConfiguration
software.amazon.awssdk.core.client.config.SdkAdvancedAsyncClientOption
software.amazon.awssdk.http.nio.netty.NettyNioAsyncHttpClient
software.amazon.awssdk.http.nio.netty.SdkEventLoopGroup
software.amazon.awssdk.regions.Region
@ -169,32 +170,34 @@
(defn- build-s3-client
[{:keys [::region ::endpoint ::io-threads]}]
(let [aconfig (-> (ClientAsyncConfiguration/builder)
(.build))
(let [executor (px/resolve-executor :virtual)
aconfig (-> (ClientAsyncConfiguration/builder)
(.advancedOption SdkAdvancedAsyncClientOption/FUTURE_COMPLETION_EXECUTOR executor)
(.build))
sconfig (-> (S3Configuration/builder)
(cond-> (some? endpoint) (.pathStyleAccessEnabled true))
(.build))
sconfig (-> (S3Configuration/builder)
(cond-> (some? endpoint) (.pathStyleAccessEnabled true))
(.build))
thr-num (or io-threads (min 16 (px/get-available-processors)))
hclient (-> (NettyNioAsyncHttpClient/builder)
(.eventLoopGroupBuilder (-> (SdkEventLoopGroup/builder)
(.numberOfThreads (int thr-num))))
(.connectionAcquisitionTimeout default-timeout)
(.connectionTimeout default-timeout)
(.readTimeout default-timeout)
(.writeTimeout default-timeout)
(.build))
thr-num (or io-threads (min 16 (px/get-available-processors)))
hclient (-> (NettyNioAsyncHttpClient/builder)
(.eventLoopGroupBuilder (-> (SdkEventLoopGroup/builder)
(.numberOfThreads (int thr-num))))
(.connectionAcquisitionTimeout default-timeout)
(.connectionTimeout default-timeout)
(.readTimeout default-timeout)
(.writeTimeout default-timeout)
(.build))
client (let [builder (S3AsyncClient/builder)
builder (.serviceConfiguration ^S3AsyncClientBuilder builder ^S3Configuration sconfig)
builder (.asyncConfiguration ^S3AsyncClientBuilder builder ^ClientAsyncConfiguration aconfig)
builder (.httpClient ^S3AsyncClientBuilder builder ^NettyNioAsyncHttpClient hclient)
builder (.region ^S3AsyncClientBuilder builder (lookup-region region))
builder (cond-> ^S3AsyncClientBuilder builder
(some? endpoint)
(.endpointOverride (URI. endpoint)))]
(.build ^S3AsyncClientBuilder builder))]
client (let [builder (S3AsyncClient/builder)
builder (.serviceConfiguration ^S3AsyncClientBuilder builder ^S3Configuration sconfig)
builder (.asyncConfiguration ^S3AsyncClientBuilder builder ^ClientAsyncConfiguration aconfig)
builder (.httpClient ^S3AsyncClientBuilder builder ^NettyNioAsyncHttpClient hclient)
builder (.region ^S3AsyncClientBuilder builder (lookup-region region))
builder (cond-> ^S3AsyncClientBuilder builder
(some? endpoint)
(.endpointOverride (URI. endpoint)))]
(.build ^S3AsyncClientBuilder builder))]
(reify
clojure.lang.IDeref

65
backend/src/app/svgo.clj Normal file
View file

@ -0,0 +1,65 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.svgo
"A SVG Optimizer service"
(:require
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.jsrt :as jsrt]
[app.common.logging :as l]
[app.common.spec :as us]
[app.worker :as-alias wrk]
[clojure.spec.alpha :as s]
[integrant.core :as ig]
[promesa.exec :as px]
[promesa.exec.bulkhead :as bh]
[promesa.exec.semaphore :as ps]
[promesa.util :as pu]))
(def ^:dynamic *semaphore*
"A dynamic variable that can optionally contain a traffic light to
appropriately delimit the use of resources, managed externally."
nil)
(defn optimize
[system data]
(dm/assert! "expect data to be a string" (string? data))
(letfn [(optimize-fn [pool]
(jsrt/run! pool
(fn [context]
(jsrt/set! context "svgData" data)
(jsrt/eval! context "penpotSvgo.optimize(svgData, {plugins: ['safeAndFastPreset']})"))))]
(try
(some-> *semaphore* ps/acquire!)
(let [{:keys [::jsrt/pool ::wrk/executor]} (::optimizer system)]
(dm/assert! "expect optimizer instance" (jsrt/pool? pool))
(px/invoke! executor (partial optimize-fn pool)))
(finally
(some-> *semaphore* ps/release!)))))
(s/def ::max-procs (s/nilable ::us/integer))
(defmethod ig/pre-init-spec ::optimizer [_]
(s/keys :req [::wrk/executor ::max-procs]))
(defmethod ig/prep-key ::optimizer
[_ cfg]
(merge {::max-procs 20} (d/without-nils cfg)))
(defmethod ig/init-key ::optimizer
[_ {:keys [::wrk/executor ::max-procs]}]
(l/inf :hint "initializing svg optimizer pool" :max-procs max-procs)
(let [init (jsrt/resource->source "app/common/svg/optimizer.js")
executor (bh/create :type :executor :executor executor :permits max-procs)]
{::jsrt/pool (jsrt/pool :init init)
::wrk/executor executor}))
(defmethod ig/halt-key! ::optimizer
[_ {:keys [::jsrt/pool]}]
(l/info :hint "stopping svg optimizer pool")
(pu/close! pool))

View file

@ -249,8 +249,7 @@
(blob/encode))]
(db/update! conn :file
{:data data}
{:id file-id}
{::db/return-keys? false}))
{:id file-id}))
(count unused))))
@ -315,7 +314,6 @@
;; Mark file as trimmed
(db/update! conn :file
{:has-media-trimmed true}
{:id id}
{::db/return-keys? false})
{:id id})
(feat.fdata/persist-pointers! cfg id))))

View file

@ -89,9 +89,7 @@
;; CASCADE database triggers. This may leave orphan
;; teams, but there is a special task for deleting
;; orphaned teams.
(db/delete! conn :profile
{:id id}
{::db/return-keys? false})
(db/delete! conn :profile {:id id})
(inc total))
0)))
@ -118,20 +116,16 @@
(some->> photo-id (sto/touch-object! storage))
;; And finally, permanently delete the team.
(db/delete! conn :team
{:id id}
{::db/return-keys? false})
(db/delete! conn :team {:id id})
;; Mark for deletion in cascade
(db/update! conn :team-font-variant
{:deleted-at deleted-at}
{:team-id id}
{::db/return-keys? false})
{:team-id id})
(db/update! conn :project
{:deleted-at deleted-at}
{:team-id id}
{::db/return-keys? false})
{:team-id id})
(inc total))
0)))
@ -162,9 +156,7 @@
(some->> (:ttf-file-id font) (sto/touch-object! storage))
;; And finally, permanently delete the team font variant
(db/delete! conn :team-font-variant
{:id id}
{::db/return-keys? false})
(db/delete! conn :team-font-variant {:id id})
(inc total))
0)))
@ -187,16 +179,14 @@
:id (str id)
:team-id (str team-id)
:deleted-at (dt/format-instant deleted-at))
;; And finally, permanently delete the project.
(db/delete! conn :project
{:id id}
{::db/return-keys? false})
(db/delete! conn :project {:id id})
;; Mark files to be deleted
(db/update! conn :file
{:deleted-at deleted-at}
{:project-id id}
{::db/return-keys? false})
{:project-id id})
(inc total))
0)))
@ -221,25 +211,21 @@
:deleted-at (dt/format-instant deleted-at))
;; And finally, permanently delete the file.
(db/delete! conn :file
{:id id}
{::db/return-keys? false})
(db/delete! conn :file {:id id})
;; Mark file media objects to be deleted
(db/update! conn :file-media-object
{:deleted-at deleted-at}
{:file-id id}
{::db/return-keys? false})
{:file-id id})
;; Mark thumbnails to be deleted
(db/update! conn :file-thumbnail
{:deleted-at deleted-at}
{:file-id id}
{::db/return-keys? false})
{:file-id id})
(db/update! conn :file-tagged-object-thumbnail
{:deleted-at deleted-at}
{:file-id id}
{::db/return-keys? false})
{:file-id id})
(inc total))
0)))

View file

@ -54,7 +54,6 @@
(l/trc :hint "mark orphan team for deletion" :id (str team-id))
(db/update! conn :team
{:deleted-at (dt/now)}
{:id team-id}
{::db/return-keys? false})
{:id team-id})
(inc total))
0)))

View file

@ -42,8 +42,8 @@
(defmethod ig/init-key ::executor
[_ _]
(let [factory (px/thread-factory :prefix "penpot/default/")
executor (px/cached-executor :factory factory :keepalive 30000)]
(let [factory (px/thread-factory :prefix "penpot/default/")
executor (px/cached-executor :factory factory :keepalive 60000)]
(l/inf :hint "starting executor")
(reify
java.lang.AutoCloseable

View file

@ -140,7 +140,7 @@
(t/is (= 0 (:freeze res))))
;; check that storage object is still exists but is marked as deleted
(let [row (th/db-get :storage-object {:id (:media-id row1)} {::db/remove-deleted? false})]
(let [row (th/db-get :storage-object {:id (:media-id row1)} {::db/remove-deleted false})]
(t/is (some? (:deleted-at row))))
;; Run the storage gc deleted task, it should permanently delete
@ -152,7 +152,7 @@
(t/is (some? (sto/get-object storage (:media-id row2))))
;; check that storage object is still exists but is marked as deleted
(let [row (th/db-get :storage-object {:id (:media-id row1)} {::db/remove-deleted? false})]
(let [row (th/db-get :storage-object {:id (:media-id row1)} {::db/remove-deleted false})]
(t/is (nil? row))))))
(t/deftest create-file-thumbnail
@ -240,7 +240,7 @@
(t/is (nil? (sto/get-object storage (:media-id row1))))
(t/is (some? (sto/get-object storage (:media-id row2))))
(let [row (th/db-get :storage-object {:id (:media-id row1)} {::db/remove-deleted? false})]
(let [row (th/db-get :storage-object {:id (:media-id row1)} {::db/remove-deleted false})]
(t/is (some? (:deleted-at row))))
;; Run the storage gc deleted task, it should permanently delete
@ -320,6 +320,3 @@
(let [result (:result out)]
(t/is (contains? result "test-key-2"))))))

View file

@ -149,7 +149,7 @@
(let [row (th/db-get :team
{:id (:default-team-id prof)}
{::db/remove-deleted? false})]
{::db/remove-deleted false})]
(t/is (nil? (:deleted-at row))))
(let [result (th/run-task! :orphan-teams-gc {:min-age 0})]
@ -157,7 +157,7 @@
(let [row (th/db-get :team
{:id (:default-team-id prof)}
{::db/remove-deleted? false})]
{::db/remove-deleted false})]
(t/is (dt/instant? (:deleted-at row))))
;; query profile after delete

View file

@ -9,9 +9,6 @@
#?(:cljs ["./svg/optimizer.js" :as svgo])
#?(:clj [clojure.xml :as xml]
:cljs [tubax.core :as tubax])
#?(:clj [integrant.core :as ig])
#?(:clj [app.common.jsrt :as jsrt])
#?(:clj [app.common.logging :as l])
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.geom.matrix :as gmt]
@ -1053,21 +1050,6 @@
:height (d/parse-integer (:height attrs) 0)})))]
(reduce-nodes redfn [] svg-data )))
#?(:cljs
(defn optimize
([input] (optimize input nil))
([input options]
(svgo/optimize input (clj->js options))))
:clj
(defn optimize
[pool data]
(dm/assert! "expected a valid pool" (jsrt/pool? pool))
(dm/assert! "expect data to be a string" (string? data))
(jsrt/run! pool
(fn [context]
(jsrt/set! context "svgData" data)
(jsrt/eval! context "penpotSvgo.optimize(svgData, {})")))))
#?(:clj
(defn- secure-parser-factory
[^InputStream input ^XMLHandler handler]
@ -1091,15 +1073,9 @@
(dm/with-open [istream (IOUtils/toInputStream text "UTF-8")]
(xml/parse istream secure-parser-factory)))))
#?(:clj
(defmethod ig/init-key ::optimizer
[_ _]
(l/info :hint "initializing svg optimizer pool")
(let [init (jsrt/resource->source "app/common/svg/optimizer.js")]
(jsrt/pool :init init))))
#?(:clj
(defmethod ig/halt-key! ::optimizer
[_ pool]
(l/info :hint "stopping svg optimizer pool")
(.close ^java.lang.AutoCloseable pool)))
;; FIXME pass correct plugin set
#?(:cljs
(defn optimize
([input] (optimize input nil))
([input options]
(svgo/optimize input (clj->js options)))))

View file

@ -29431,12 +29431,13 @@ const optimize = (input, config) => {
exports.optimize = optimize;
},{"./svgo/parser.js":322,"./svgo/plugins.js":324,"./svgo/stringifier.js":381,"./svgo/tools.js":383,"lodash/isPlainObject":308}],320:[function(require,module,exports){
},{"./svgo/parser.js":322,"./svgo/plugins.js":324,"./svgo/stringifier.js":382,"./svgo/tools.js":384,"lodash/isPlainObject":308}],320:[function(require,module,exports){
'use strict';
exports.builtin = [
require('./plugins/default.js'),
require('./plugins/safe.js'),
require('./plugins/safeAndFast.js'),
require('./plugins/addAttributesToSVGElement.js'),
require('./plugins/addClassesToSVGElement.js'),
require('./plugins/cleanupAttrs.js'),
@ -29489,7 +29490,7 @@ exports.builtin = [
require('./plugins/sortDefsChildren.js'),
];
},{"./plugins/addAttributesToSVGElement.js":328,"./plugins/addClassesToSVGElement.js":329,"./plugins/cleanupAttrs.js":331,"./plugins/cleanupEnableBackground.js":332,"./plugins/cleanupIds.js":333,"./plugins/cleanupListOfValues.js":334,"./plugins/cleanupNumericValues.js":335,"./plugins/collapseGroups.js":336,"./plugins/convertColors.js":337,"./plugins/convertEllipseToCircle.js":338,"./plugins/convertPathData.js":339,"./plugins/convertShapeToPath.js":340,"./plugins/convertStyleToAttrs.js":341,"./plugins/convertTransform.js":342,"./plugins/default.js":343,"./plugins/inlineStyles.js":344,"./plugins/mergePaths.js":345,"./plugins/mergeStyles.js":346,"./plugins/minifyStyles.js":347,"./plugins/moveElemsAttrsToGroup.js":348,"./plugins/moveGroupAttrsToElems.js":349,"./plugins/prefixIds.js":350,"./plugins/removeAttributesBySelector.js":351,"./plugins/removeAttrs.js":352,"./plugins/removeComments.js":353,"./plugins/removeDesc.js":354,"./plugins/removeDimensions.js":355,"./plugins/removeDoctype.js":356,"./plugins/removeEditorsNSData.js":357,"./plugins/removeElementsByAttr.js":358,"./plugins/removeEmptyAttrs.js":359,"./plugins/removeEmptyContainers.js":360,"./plugins/removeEmptyText.js":361,"./plugins/removeHiddenElems.js":362,"./plugins/removeMetadata.js":363,"./plugins/removeNonInheritableGroupAttrs.js":364,"./plugins/removeOffCanvasPaths.js":365,"./plugins/removeRasterImages.js":366,"./plugins/removeScriptElement.js":367,"./plugins/removeStyleElement.js":368,"./plugins/removeTitle.js":369,"./plugins/removeUnknownsAndDefaults.js":370,"./plugins/removeUnusedNS.js":371,"./plugins/removeUselessDefs.js":372,"./plugins/removeUselessStrokeAndFill.js":373,"./plugins/removeViewBox.js":374,"./plugins/removeXMLNS.js":375,"./plugins/removeXMLProcInst.js":376,"./plugins/reusePaths.js":377,"./plugins/safe.js":378,"./plugins/sortAttrs.js":379,"./plugins/sortDefsChildren.js":380}],321:[function(require,module,exports){
},{"./plugins/addAttributesToSVGElement.js":328,"./plugins/addClassesToSVGElement.js":329,"./plugins/cleanupAttrs.js":331,"./plugins/cleanupEnableBackground.js":332,"./plugins/cleanupIds.js":333,"./plugins/cleanupListOfValues.js":334,"./plugins/cleanupNumericValues.js":335,"./plugins/collapseGroups.js":336,"./plugins/convertColors.js":337,"./plugins/convertEllipseToCircle.js":338,"./plugins/convertPathData.js":339,"./plugins/convertShapeToPath.js":340,"./plugins/convertStyleToAttrs.js":341,"./plugins/convertTransform.js":342,"./plugins/default.js":343,"./plugins/inlineStyles.js":344,"./plugins/mergePaths.js":345,"./plugins/mergeStyles.js":346,"./plugins/minifyStyles.js":347,"./plugins/moveElemsAttrsToGroup.js":348,"./plugins/moveGroupAttrsToElems.js":349,"./plugins/prefixIds.js":350,"./plugins/removeAttributesBySelector.js":351,"./plugins/removeAttrs.js":352,"./plugins/removeComments.js":353,"./plugins/removeDesc.js":354,"./plugins/removeDimensions.js":355,"./plugins/removeDoctype.js":356,"./plugins/removeEditorsNSData.js":357,"./plugins/removeElementsByAttr.js":358,"./plugins/removeEmptyAttrs.js":359,"./plugins/removeEmptyContainers.js":360,"./plugins/removeEmptyText.js":361,"./plugins/removeHiddenElems.js":362,"./plugins/removeMetadata.js":363,"./plugins/removeNonInheritableGroupAttrs.js":364,"./plugins/removeOffCanvasPaths.js":365,"./plugins/removeRasterImages.js":366,"./plugins/removeScriptElement.js":367,"./plugins/removeStyleElement.js":368,"./plugins/removeTitle.js":369,"./plugins/removeUnknownsAndDefaults.js":370,"./plugins/removeUnusedNS.js":371,"./plugins/removeUselessDefs.js":372,"./plugins/removeUselessStrokeAndFill.js":373,"./plugins/removeViewBox.js":374,"./plugins/removeXMLNS.js":375,"./plugins/removeXMLProcInst.js":376,"./plugins/reusePaths.js":377,"./plugins/safe.js":378,"./plugins/safeAndFast.js":379,"./plugins/sortAttrs.js":380,"./plugins/sortDefsChildren.js":381}],321:[function(require,module,exports){
'use strict';
const isTag = (node) => {
@ -30102,7 +30103,7 @@ const stringifyPathData = ({ pathData, precision, disableSpaceAfterFlags }) => {
};
exports.stringifyPathData = stringifyPathData;
},{"./tools.js":383}],324:[function(require,module,exports){
},{"./tools.js":384}],324:[function(require,module,exports){
'use strict';
const { builtin } = require('./builtin.js');
@ -33826,7 +33827,7 @@ const applyMatrixToPathData = (pathData, matrix) => {
}
};
},{"../style.js":382,"../tools.js":383,"./_collections.js":325,"./_path.js":326,"./_transforms.js":327}],331:[function(require,module,exports){
},{"../style.js":383,"../tools.js":384,"./_collections.js":325,"./_path.js":326,"./_transforms.js":327}],331:[function(require,module,exports){
'use strict';
exports.name = 'cleanupAttrs';
@ -33948,7 +33949,7 @@ exports.fn = (root) => {
};
};
},{"../xast.js":384}],333:[function(require,module,exports){
},{"../xast.js":385}],333:[function(require,module,exports){
'use strict';
const { visitSkip } = require('../xast.js');
@ -34207,7 +34208,7 @@ exports.fn = (_root, params) => {
};
};
},{"../xast.js":384,"./_collections.js":325}],334:[function(require,module,exports){
},{"../xast.js":385,"./_collections.js":325}],334:[function(require,module,exports){
'use strict';
const { removeLeadingZero } = require('../tools.js');
@ -34345,7 +34346,7 @@ exports.fn = (_root, params) => {
};
};
},{"../tools.js":383}],335:[function(require,module,exports){
},{"../tools.js":384}],335:[function(require,module,exports){
'use strict';
const { removeLeadingZero } = require('../tools.js');
@ -34451,7 +34452,7 @@ exports.fn = (_root, params) => {
};
};
},{"../tools.js":383}],336:[function(require,module,exports){
},{"../tools.js":384}],336:[function(require,module,exports){
'use strict';
const { inheritableAttrs, elemsGroups } = require('./_collections.js');
@ -35838,7 +35839,7 @@ function data2Path(params, pathData) {
}, '');
}
},{"../style.js":382,"../tools.js":383,"../xast.js":384,"./_collections.js":325,"./_path.js":326,"./applyTransforms.js":330}],340:[function(require,module,exports){
},{"../style.js":383,"../tools.js":384,"../xast.js":385,"./_collections.js":325,"./_path.js":326,"./applyTransforms.js":330}],340:[function(require,module,exports){
'use strict';
const { stringifyPathData } = require('../path.js');
@ -36004,7 +36005,7 @@ exports.fn = (root, params) => {
};
};
},{"../path.js":323,"../xast.js":384}],341:[function(require,module,exports){
},{"../path.js":323,"../xast.js":385}],341:[function(require,module,exports){
'use strict';
const { attrsGroups } = require('./_collections');
@ -36506,7 +36507,7 @@ const smartRound = (precision, data) => {
return data;
};
},{"../tools.js":383,"./_transforms.js":327}],343:[function(require,module,exports){
},{"../tools.js":384,"./_transforms.js":327}],343:[function(require,module,exports){
'use strict';
const { createPreset } = require('../tools.js');
@ -36590,7 +36591,7 @@ const presetDefault = createPreset({
module.exports = presetDefault;
},{"../tools.js":383,"./cleanupAttrs.js":331,"./cleanupEnableBackground.js":332,"./cleanupIds.js":333,"./cleanupNumericValues.js":335,"./collapseGroups.js":336,"./convertColors.js":337,"./convertEllipseToCircle.js":338,"./convertPathData.js":339,"./convertShapeToPath.js":340,"./convertTransform.js":342,"./inlineStyles.js":344,"./mergePaths.js":345,"./mergeStyles.js":346,"./minifyStyles.js":347,"./moveElemsAttrsToGroup.js":348,"./moveGroupAttrsToElems.js":349,"./removeComments.js":353,"./removeDesc.js":354,"./removeDoctype.js":356,"./removeEditorsNSData.js":357,"./removeEmptyAttrs.js":359,"./removeEmptyContainers.js":360,"./removeEmptyText.js":361,"./removeHiddenElems.js":362,"./removeMetadata.js":363,"./removeNonInheritableGroupAttrs.js":364,"./removeTitle.js":369,"./removeUnknownsAndDefaults.js":370,"./removeUnusedNS.js":371,"./removeUselessDefs.js":372,"./removeUselessStrokeAndFill.js":373,"./removeViewBox.js":374,"./removeXMLProcInst.js":376,"./sortAttrs.js":379,"./sortDefsChildren.js":380}],344:[function(require,module,exports){
},{"../tools.js":384,"./cleanupAttrs.js":331,"./cleanupEnableBackground.js":332,"./cleanupIds.js":333,"./cleanupNumericValues.js":335,"./collapseGroups.js":336,"./convertColors.js":337,"./convertEllipseToCircle.js":338,"./convertPathData.js":339,"./convertShapeToPath.js":340,"./convertTransform.js":342,"./inlineStyles.js":344,"./mergePaths.js":345,"./mergeStyles.js":346,"./minifyStyles.js":347,"./moveElemsAttrsToGroup.js":348,"./moveGroupAttrsToElems.js":349,"./removeComments.js":353,"./removeDesc.js":354,"./removeDoctype.js":356,"./removeEditorsNSData.js":357,"./removeEmptyAttrs.js":359,"./removeEmptyContainers.js":360,"./removeEmptyText.js":361,"./removeHiddenElems.js":362,"./removeMetadata.js":363,"./removeNonInheritableGroupAttrs.js":364,"./removeTitle.js":369,"./removeUnknownsAndDefaults.js":370,"./removeUnusedNS.js":371,"./removeUselessDefs.js":372,"./removeUselessStrokeAndFill.js":373,"./removeViewBox.js":374,"./removeXMLProcInst.js":376,"./sortAttrs.js":380,"./sortDefsChildren.js":381}],344:[function(require,module,exports){
'use strict';
const csstree = require('css-tree');
@ -36937,7 +36938,7 @@ exports.fn = (root, params) => {
};
};
},{"../xast.js":384,"css-tree":25,"csso":138}],345:[function(require,module,exports){
},{"../xast.js":385,"css-tree":25,"csso":138}],345:[function(require,module,exports){
'use strict';
const { detachNodeFromParent } = require('../xast.js');
@ -37035,7 +37036,7 @@ exports.fn = (root, params) => {
};
};
},{"../style.js":382,"../xast.js":384,"./_path.js":326}],346:[function(require,module,exports){
},{"../style.js":383,"../xast.js":385,"./_path.js":326}],346:[function(require,module,exports){
'use strict';
const { visitSkip, detachNodeFromParent } = require('../xast.js');
@ -37119,7 +37120,7 @@ exports.fn = () => {
};
};
},{"../xast.js":384}],347:[function(require,module,exports){
},{"../xast.js":385}],347:[function(require,module,exports){
'use strict';
const csso = require('csso');
@ -37364,7 +37365,7 @@ exports.fn = (root) => {
};
};
},{"../xast.js":384,"./_collections.js":325}],349:[function(require,module,exports){
},{"../xast.js":385,"./_collections.js":325}],349:[function(require,module,exports){
'use strict';
const { pathElems, referencesProps } = require('./_collections.js');
@ -37742,7 +37743,7 @@ exports.fn = (root, params) => {
return {};
};
},{"../xast.js":384}],352:[function(require,module,exports){
},{"../xast.js":385}],352:[function(require,module,exports){
'use strict';
exports.name = 'removeAttrs';
@ -37924,7 +37925,7 @@ exports.fn = () => {
};
};
},{"../xast.js":384}],354:[function(require,module,exports){
},{"../xast.js":385}],354:[function(require,module,exports){
'use strict';
const { detachNodeFromParent } = require('../xast.js');
@ -37963,7 +37964,7 @@ exports.fn = (root, params) => {
};
};
},{"../xast.js":384}],355:[function(require,module,exports){
},{"../xast.js":385}],355:[function(require,module,exports){
'use strict';
exports.name = 'removeDimensions';
@ -38046,7 +38047,7 @@ exports.fn = () => {
};
};
},{"../xast.js":384}],357:[function(require,module,exports){
},{"../xast.js":385}],357:[function(require,module,exports){
'use strict';
const { detachNodeFromParent } = require('../xast.js');
@ -38110,7 +38111,7 @@ exports.fn = (_root, params) => {
};
};
},{"../xast.js":384,"./_collections.js":325}],358:[function(require,module,exports){
},{"../xast.js":385,"./_collections.js":325}],358:[function(require,module,exports){
'use strict';
const { detachNodeFromParent } = require('../xast.js');
@ -38183,7 +38184,7 @@ exports.fn = (root, params) => {
};
};
},{"../xast.js":384}],359:[function(require,module,exports){
},{"../xast.js":385}],359:[function(require,module,exports){
'use strict';
const { attrsGroups } = require('./_collections.js');
@ -38270,7 +38271,7 @@ exports.fn = () => {
};
};
},{"../xast.js":384,"./_collections.js":325}],361:[function(require,module,exports){
},{"../xast.js":385,"./_collections.js":325}],361:[function(require,module,exports){
'use strict';
const { detachNodeFromParent } = require('../xast.js');
@ -38321,7 +38322,7 @@ exports.fn = (root, params) => {
};
};
},{"../xast.js":384}],362:[function(require,module,exports){
},{"../xast.js":385}],362:[function(require,module,exports){
'use strict';
const {
@ -38631,7 +38632,7 @@ exports.fn = (root, params) => {
};
};
},{"../path.js":323,"../style.js":382,"../xast.js":384}],363:[function(require,module,exports){
},{"../path.js":323,"../style.js":383,"../xast.js":385}],363:[function(require,module,exports){
'use strict';
const { detachNodeFromParent } = require('../xast.js');
@ -38658,7 +38659,7 @@ exports.fn = () => {
};
};
},{"../xast.js":384}],364:[function(require,module,exports){
},{"../xast.js":385}],364:[function(require,module,exports){
'use strict';
const {
@ -38815,7 +38816,7 @@ exports.fn = () => {
};
};
},{"../path.js":323,"../xast.js":384,"./_path.js":326}],366:[function(require,module,exports){
},{"../path.js":323,"../xast.js":385,"./_path.js":326}],366:[function(require,module,exports){
'use strict';
const { detachNodeFromParent } = require('../xast.js');
@ -38846,7 +38847,7 @@ exports.fn = () => {
};
};
},{"../xast.js":384}],367:[function(require,module,exports){
},{"../xast.js":385}],367:[function(require,module,exports){
'use strict';
const { detachNodeFromParent } = require('../xast.js');
@ -38873,7 +38874,7 @@ exports.fn = () => {
};
};
},{"../xast.js":384}],368:[function(require,module,exports){
},{"../xast.js":385}],368:[function(require,module,exports){
'use strict';
const { detachNodeFromParent } = require('../xast.js');
@ -38900,7 +38901,7 @@ exports.fn = () => {
};
};
},{"../xast.js":384}],369:[function(require,module,exports){
},{"../xast.js":385}],369:[function(require,module,exports){
'use strict';
const { detachNodeFromParent } = require('../xast.js');
@ -38927,7 +38928,7 @@ exports.fn = () => {
};
};
},{"../xast.js":384}],370:[function(require,module,exports){
},{"../xast.js":385}],370:[function(require,module,exports){
'use strict';
const { visitSkip, detachNodeFromParent } = require('../xast.js');
@ -39135,7 +39136,7 @@ exports.fn = (root, params) => {
};
};
},{"../style.js":382,"../xast.js":384,"./_collections":325}],371:[function(require,module,exports){
},{"../style.js":383,"../xast.js":385,"./_collections":325}],371:[function(require,module,exports){
'use strict';
exports.name = 'removeUnusedNS';
@ -39252,7 +39253,7 @@ const collectUsefulNodes = (node, usefulNodes) => {
}
};
},{"../xast.js":384,"./_collections.js":325}],373:[function(require,module,exports){
},{"../xast.js":385,"./_collections.js":325}],373:[function(require,module,exports){
'use strict';
const { visit, visitSkip, detachNodeFromParent } = require('../xast.js');
@ -39390,7 +39391,7 @@ exports.fn = (root, params) => {
};
};
},{"../style.js":382,"../xast.js":384,"./_collections.js":325}],374:[function(require,module,exports){
},{"../style.js":383,"../xast.js":385,"./_collections.js":325}],374:[function(require,module,exports){
'use strict';
exports.name = 'removeViewBox';
@ -39497,7 +39498,7 @@ exports.fn = () => {
};
};
},{"../xast.js":384}],377:[function(require,module,exports){
},{"../xast.js":385}],377:[function(require,module,exports){
'use strict';
exports.name = 'reusePaths';
@ -39678,7 +39679,72 @@ const presetSafe = createPreset({
module.exports = presetSafe;
},{"../tools.js":383,"./cleanupAttrs.js":331,"./cleanupEnableBackground.js":332,"./cleanupIds.js":333,"./cleanupNumericValues.js":335,"./collapseGroups.js":336,"./convertColors.js":337,"./convertEllipseToCircle.js":338,"./convertPathData.js":339,"./convertTransform.js":342,"./inlineStyles.js":344,"./mergePaths.js":345,"./mergeStyles.js":346,"./minifyStyles.js":347,"./removeComments.js":353,"./removeDesc.js":354,"./removeDoctype.js":356,"./removeEditorsNSData.js":357,"./removeEmptyAttrs.js":359,"./removeEmptyContainers.js":360,"./removeEmptyText.js":361,"./removeHiddenElems.js":362,"./removeMetadata.js":363,"./removeNonInheritableGroupAttrs.js":364,"./removeTitle.js":369,"./removeUnknownsAndDefaults.js":370,"./removeUnusedNS.js":371,"./removeUselessDefs.js":372,"./removeUselessStrokeAndFill.js":373,"./removeViewBox.js":374,"./removeXMLProcInst.js":376,"./sortDefsChildren.js":380}],379:[function(require,module,exports){
},{"../tools.js":384,"./cleanupAttrs.js":331,"./cleanupEnableBackground.js":332,"./cleanupIds.js":333,"./cleanupNumericValues.js":335,"./collapseGroups.js":336,"./convertColors.js":337,"./convertEllipseToCircle.js":338,"./convertPathData.js":339,"./convertTransform.js":342,"./inlineStyles.js":344,"./mergePaths.js":345,"./mergeStyles.js":346,"./minifyStyles.js":347,"./removeComments.js":353,"./removeDesc.js":354,"./removeDoctype.js":356,"./removeEditorsNSData.js":357,"./removeEmptyAttrs.js":359,"./removeEmptyContainers.js":360,"./removeEmptyText.js":361,"./removeHiddenElems.js":362,"./removeMetadata.js":363,"./removeNonInheritableGroupAttrs.js":364,"./removeTitle.js":369,"./removeUnknownsAndDefaults.js":370,"./removeUnusedNS.js":371,"./removeUselessDefs.js":372,"./removeUselessStrokeAndFill.js":373,"./removeViewBox.js":374,"./removeXMLProcInst.js":376,"./sortDefsChildren.js":381}],379:[function(require,module,exports){
'use strict';
const { createPreset } = require('../tools.js');
const removeDoctype = require('./removeDoctype.js');
const removeXMLProcInst = require('./removeXMLProcInst.js');
const removeComments = require('./removeComments.js');
const removeMetadata = require('./removeMetadata.js');
const removeEditorsNSData = require('./removeEditorsNSData.js');
const cleanupAttrs = require('./cleanupAttrs.js');
const mergeStyles = require('./mergeStyles.js');
const minifyStyles = require('./minifyStyles.js');
const cleanupIds = require('./cleanupIds.js');
const removeUselessDefs = require('./removeUselessDefs.js');
const cleanupNumericValues = require('./cleanupNumericValues.js');
const convertColors = require('./convertColors.js');
const removeUnknownsAndDefaults = require('./removeUnknownsAndDefaults.js');
const removeNonInheritableGroupAttrs = require('./removeNonInheritableGroupAttrs.js');
const removeUselessStrokeAndFill = require('./removeUselessStrokeAndFill.js');
const removeViewBox = require('./removeViewBox.js');
const cleanupEnableBackground = require('./cleanupEnableBackground.js');
const removeHiddenElems = require('./removeHiddenElems.js');
const removeEmptyText = require('./removeEmptyText.js');
const collapseGroups = require('./collapseGroups.js');
const removeEmptyAttrs = require('./removeEmptyAttrs.js');
const removeEmptyContainers = require('./removeEmptyContainers.js');
const mergePaths = require('./mergePaths.js');
const removeUnusedNS = require('./removeUnusedNS.js');
const sortDefsChildren = require('./sortDefsChildren.js');
const removeTitle = require('./removeTitle.js');
const removeDesc = require('./removeDesc.js');
const presetSafe = createPreset({
name: 'safeAndFastPreset',
plugins: [
removeDoctype,
removeXMLProcInst,
removeComments,
removeMetadata,
removeEditorsNSData,
cleanupAttrs,
mergeStyles,
cleanupIds,
removeUselessDefs,
cleanupNumericValues,
convertColors,
removeUnknownsAndDefaults,
removeNonInheritableGroupAttrs,
removeUselessStrokeAndFill,
removeViewBox,
cleanupEnableBackground,
removeHiddenElems,
removeEmptyText,
collapseGroups,
removeEmptyAttrs,
removeEmptyContainers,
removeUnusedNS,
removeTitle,
removeDesc,
],
});
module.exports = presetSafe;
},{"../tools.js":384,"./cleanupAttrs.js":331,"./cleanupEnableBackground.js":332,"./cleanupIds.js":333,"./cleanupNumericValues.js":335,"./collapseGroups.js":336,"./convertColors.js":337,"./mergePaths.js":345,"./mergeStyles.js":346,"./minifyStyles.js":347,"./removeComments.js":353,"./removeDesc.js":354,"./removeDoctype.js":356,"./removeEditorsNSData.js":357,"./removeEmptyAttrs.js":359,"./removeEmptyContainers.js":360,"./removeEmptyText.js":361,"./removeHiddenElems.js":362,"./removeMetadata.js":363,"./removeNonInheritableGroupAttrs.js":364,"./removeTitle.js":369,"./removeUnknownsAndDefaults.js":370,"./removeUnusedNS.js":371,"./removeUselessDefs.js":372,"./removeUselessStrokeAndFill.js":373,"./removeViewBox.js":374,"./removeXMLProcInst.js":376,"./sortDefsChildren.js":381}],380:[function(require,module,exports){
'use strict';
exports.name = 'sortAttrs';
@ -39778,7 +39844,7 @@ exports.fn = (_root, params) => {
};
};
},{}],380:[function(require,module,exports){
},{}],381:[function(require,module,exports){
'use strict';
exports.name = 'sortDefsChildren';
@ -39836,7 +39902,7 @@ exports.fn = () => {
};
};
},{}],381:[function(require,module,exports){
},{}],382:[function(require,module,exports){
'use strict';
const { textElems } = require('./plugins/_collections.js');
@ -40070,7 +40136,7 @@ const stringifyText = (node, config, state) => {
);
};
},{"./plugins/_collections.js":325}],382:[function(require,module,exports){
},{"./plugins/_collections.js":325}],383:[function(require,module,exports){
'use strict';
const csstree = require('css-tree');
@ -40300,7 +40366,7 @@ const computeStyle = (stylesheet, node) => {
};
exports.computeStyle = computeStyle;
},{"./plugins/_collections.js":325,"./xast.js":384,"css-tree":25,"csso":138}],383:[function(require,module,exports){
},{"./plugins/_collections.js":325,"./xast.js":385,"css-tree":25,"csso":138}],384:[function(require,module,exports){
(function (Buffer){(function (){
'use strict';
@ -40455,7 +40521,7 @@ exports.invokePlugins = invokePlugins;
exports.createPreset = createPreset;
}).call(this)}).call(this,require("buffer").Buffer)
},{"./xast.js":384,"buffer":4}],384:[function(require,module,exports){
},{"./xast.js":385,"buffer":4}],385:[function(require,module,exports){
'use strict';
const { selectAll, selectOne, is } = require('css-select');

View file

@ -255,7 +255,11 @@
(dissoc :component-root)))
[new-root-shape new-shapes updated-shapes]
(ctst/clone-object shape nil objects update-new-shape update-original-shape)
(ctst/clone-shape shape
nil
objects
:update-new-shape update-new-shape
:update-original-shape update-original-shape)
;; If frame-id points to a shape inside the component, remap it to the
;; corresponding new frame shape. If not, set it to nil.
@ -339,15 +343,14 @@
(dissoc :component-root))))
[new-shape new-shapes _]
(ctst/clone-object component-shape
frame-id
(if components-v2 (:objects component-page) (:objects component))
update-new-shape
(fn [object _] object)
force-id
keep-ids?
frame-id)
(ctst/clone-shape component-shape
frame-id
(if components-v2 (:objects component-page) (:objects component))
:update-new-shape update-new-shape
:force-id force-id
:keep-ids? keep-ids?
:frame-id frame-id
:dest-objects (:objects container))
;; Fix empty parent-id and remap all grid cells to the new ids.
remap-ids

View file

@ -342,91 +342,89 @@
[frame]
(not (mth/almost-zero? (:rotation frame 0))))
(defn clone-object
"Gets a copy of the object and all its children, with new ids and with
(defn clone-shape
"Gets a copy of the shape and all its children, with new ids and with
the parent-children links correctly set. Admits functions to make
more transformations to the cloned objects and the original ones.
more transformations to the cloned shapes and the original ones.
Returns the cloned object, the list of all new objects (including
the cloned one), and possibly a list of original objects modified.
Returns the cloned shape, the list of all new shapes (including
the cloned one), and possibly a list of original shapes modified.
The list of objects are returned in tree traversal order, respecting
The list of shapes are returned in tree traversal order, respecting
the order of the children of each parent."
([object parent-id objects]
(clone-object object parent-id objects (fn [object _] object) (fn [object _] object) nil false nil))
([object parent-id objects update-new-object]
(clone-object object parent-id objects update-new-object (fn [object _] object) nil false nil))
([object parent-id objects update-new-object update-original-object]
(clone-object object parent-id objects update-new-object update-original-object nil false nil))
([object parent-id objects update-new-object update-original-object force-id]
(clone-object object parent-id objects update-new-object update-original-object force-id false nil))
([object parent-id objects update-new-object update-original-object force-id keep-ids?]
(clone-object object parent-id objects update-new-object update-original-object force-id keep-ids? nil))
([object parent-id objects update-new-object update-original-object force-id keep-ids? frame-id]
(let [new-id (cond
(some? force-id) force-id
keep-ids? (:id object)
:else (uuid/next))
[shape parent-id objects & {:keys [update-new-shape update-original-shape force-id keep-ids? frame-id dest-objects]
:or {update-new-shape (fn [shape _] shape)
update-original-shape (fn [shape _] shape)
force-id nil
keep-ids? false
frame-id nil
dest-objects objects}}]
(let [new-id (cond
(some? force-id) force-id
keep-ids? (:id shape)
:else (uuid/next))
;; Assign the correct frame-id for the given parent. It's the parent-id (if parent is frame)
;; or the parent's frame-id otherwise. Only for the first cloned shapes. In recursive calls
;; this is not needed.
frame-id (cond
(and (nil? frame-id) (cfh/frame-shape? objects parent-id))
parent-id
frame-id (cond
(and (nil? frame-id) (cfh/frame-shape? dest-objects parent-id))
parent-id
(nil? frame-id)
(dm/get-in objects [parent-id :frame-id])
(nil? frame-id)
(dm/get-in dest-objects [parent-id :frame-id] uuid/zero)
:else
frame-id)]
:else
frame-id)]
(loop [child-ids (seq (:shapes object))
new-direct-children []
new-children []
updated-children []]
(loop [child-ids (seq (:shapes shape))
new-direct-children []
new-children []
updated-children []]
(if (empty? child-ids)
(let [new-object (cond-> object
:always
(assoc :id new-id
:parent-id parent-id
:frame-id frame-id)
(if (empty? child-ids)
(let [new-shape (cond-> shape
:always
(assoc :id new-id
:parent-id parent-id
:frame-id frame-id)
(some? (:shapes object))
(assoc :shapes (mapv :id new-direct-children)))
(some? (:shapes shape))
(assoc :shapes (mapv :id new-direct-children)))
new-object (update-new-object new-object object)
new-objects (into [new-object] new-children)
new-shape (update-new-shape new-shape shape)
new-shapes (into [new-shape] new-children)
updated-object (update-original-object object new-object)
updated-objects (if (identical? object updated-object)
updated-children
(into [updated-object] updated-children))]
updated-shape (update-original-shape shape new-shape)
updated-shapes (if (identical? shape updated-shape)
updated-children
(into [updated-shape] updated-children))]
[new-object new-objects updated-objects])
[new-shape new-shapes updated-shapes])
(let [child-id (first child-ids)
child (get objects child-id)
_ (dm/assert! (some? child))
frame-id-child (if (cfh/frame-shape? object)
new-id
frame-id)
(let [child-id (first child-ids)
child (get objects child-id)
_ (dm/assert! (some? child))
frame-id-child (if (cfh/frame-shape? shape)
new-id
frame-id)
[new-child new-child-objects updated-child-objects]
(clone-object child new-id objects update-new-object update-original-object nil keep-ids? frame-id-child)]
[new-child new-child-shapes updated-child-shapes]
(clone-shape child
new-id
objects
:update-new-shape update-new-shape
:update-original-shape update-original-shape
:force-id nil
:keep-ids? keep-ids?
:frame-id frame-id-child
:dest-objects dest-objects)]
(recur
(next child-ids)
(into new-direct-children [new-child])
(into new-children new-child-objects)
(into updated-children updated-child-objects))))))))
(recur
(next child-ids)
(into new-direct-children [new-child])
(into new-children new-child-shapes)
(into updated-children updated-child-shapes)))))))
(defn generate-shape-grid
"Generate a sequence of positions that lays out the list of

View file

@ -98,11 +98,11 @@
(gsh/move delta)))
[new-instance-shape new-instance-shapes _]
(ctst/clone-object main-instance-shape
(:parent-id main-instance-shape)
(:objects main-instance-page)
update-new-shape
update-original-shape)
(ctst/clone-shape main-instance-shape
(:parent-id main-instance-shape)
(:objects main-instance-page)
:update-new-shape update-new-shape
:update-original-shape update-original-shape)
remap-frame
(fn [shape]
@ -134,10 +134,9 @@
(let [component-root (d/seek #(nil? (:parent-id %)) (vals (:objects component)))
[new-component-shape new-component-shapes _]
(ctst/clone-object component-root
nil
(get component :objects)
identity)]
(ctst/clone-shape component-root
nil
(get component :objects))]
[new-component-shape new-component-shapes nil nil]))))
@ -976,11 +975,12 @@
original-shape)
[_ new-shapes _]
(ctst/clone-object component-shape
(ctst/clone-shape component-shape
(:id parent-shape)
(get component-page :objects)
update-new-shape
update-original-shape)
:update-new-shape update-new-shape
:update-original-shape update-original-shape
:dest-objects (get container :objects))
add-obj-change (fn [changes shape']
(update changes :redo-changes conj
@ -1038,11 +1038,11 @@
original-shape))
[_new-shape new-shapes updated-shapes]
(ctst/clone-object shape
(:id component-parent-shape)
(get page :objects)
update-new-shape
update-original-shape)
(ctst/clone-shape shape
(:id component-parent-shape)
(get page :objects)
:update-new-shape update-new-shape
:update-original-shape update-original-shape)
add-obj-change (fn [changes shape']
(update changes :redo-changes conj