0
Fork 0
mirror of https://github.com/penpot/penpot.git synced 2025-04-02 10:01:34 -05:00

Merge pull request #4077 from penpot/niwinz-staging-binfile-join

♻️ Unify binfile exportation code
This commit is contained in:
Alejandro 2024-01-30 16:43:17 +01:00 committed by GitHub
commit 86f09fa028
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
20 changed files with 1793 additions and 2001 deletions

View file

@ -145,17 +145,6 @@ Debug Main Page
</small>
</div>
<div class="row">
<label>Ignore index errors?</label>
<input type="checkbox" name="ignore-index-errors" checked/>
<br />
<small>
Do not break on index lookup errors (remap operation).
Useful when importing a broken file that has broken
relations or missing pieces.
</small>
</div>
<div class="row">
<input type="submit" name="upload" value="Upload" />
</div>

View file

@ -0,0 +1,491 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.binfile.common
"A binfile related file processing common code, used for different
binfile format implementations and management rpc methods."
(:require
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.common.features :as cfeat]
[app.common.files.defaults :as cfd]
[app.common.files.migrations :as fmg]
[app.common.files.validate :as fval]
[app.common.logging :as l]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.db.sql :as sql]
[app.features.components-v2 :as feat.compv2]
[app.features.fdata :as feat.fdata]
[app.loggers.audit :as-alias audit]
[app.loggers.webhooks :as-alias webhooks]
[app.util.blob :as blob]
[app.util.pointer-map :as pmap]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[clojure.set :as set]
[clojure.walk :as walk]
[cuerdas.core :as str]))
(set! *warn-on-reflection* true)
(def ^:dynamic *state* nil)
(def ^:dynamic *options* nil)
(def xf-map-id
(map :id))
(def xf-map-media-id
(comp
(mapcat (juxt :media-id
:thumbnail-id
:woff1-file-id
:woff2-file-id
:ttf-file-id
:otf-file-id))
(filter uuid?)))
(def into-vec
(fnil into []))
(def conj-vec
(fnil conj []))
(defn collect-storage-objects
[state items]
(update state :storage-objects into xf-map-media-id items))
(defn collect-summary
[state key items]
(update state key into xf-map-media-id items))
(defn lookup-index
[id]
(when id
(let [val (get-in @*state* [:index id])]
(l/trc :fn "lookup-index" :id (str id) :result (some-> val str) ::l/sync? true)
(or val id))))
(defn remap-id
[item key]
(cond-> item
(contains? item key)
(update key lookup-index)))
(defn- index-object
[index obj & attrs]
(reduce (fn [index attr-fn]
(let [old-id (attr-fn obj)
new-id (if (::overwrite *options*) old-id (uuid/next))]
(assoc index old-id new-id)))
index
attrs))
(defn update-index
([index coll]
(update-index index coll identity))
([index coll attr]
(reduce #(index-object %1 %2 attr) index coll)))
(defn decode-row
"A generic decode row helper"
[{:keys [data features] :as row}]
(cond-> row
features (assoc :features (db/decode-pgarray features #{}))
data (assoc :data (blob/decode data))))
(defn get-file
[cfg file-id]
(db/run! cfg (fn [{:keys [::db/conn] :as cfg}]
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)]
(when-let [file (db/get* conn :file {:id file-id}
{::db/remove-deleted false})]
(-> file
(decode-row)
(update :data feat.fdata/process-pointers deref)
(update :data feat.fdata/process-objects (partial into {}))))))))
(defn get-project
[cfg project-id]
(db/get cfg :project {:id project-id}))
(defn get-team
[cfg team-id]
(-> (db/get cfg :team {:id team-id})
(decode-row)))
(defn get-fonts
[cfg team-id]
(db/query cfg :team-font-variant
{:team-id team-id
:deleted-at nil}))
(defn get-files-rels
"Given a set of file-id's, return all matching relations with the libraries"
[cfg ids]
(dm/assert!
"expected a set of uuids"
(and (set? ids)
(every? uuid? ids)))
(db/run! cfg (fn [{:keys [::db/conn]}]
(let [ids (db/create-array conn "uuid" ids)
sql (str "SELECT flr.* FROM file_library_rel AS flr "
" JOIN file AS l ON (flr.library_file_id = l.id) "
" WHERE flr.file_id = ANY(?) AND l.deleted_at IS NULL")]
(db/exec! conn [sql ids])))))
;; NOTE: Will be used in future, commented for satisfy linter
(def ^:private sql:get-libraries
"WITH RECURSIVE libs AS (
SELECT fl.id
FROM file AS fl
JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
WHERE flr.file_id = ANY(?)
UNION
SELECT fl.id
FROM file AS fl
JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
JOIN libs AS l ON (flr.file_id = l.id)
)
SELECT DISTINCT l.id
FROM libs AS l")
(defn get-libraries
"Get all libraries ids related to provided file ids"
[cfg ids]
(db/run! cfg (fn [{:keys [::db/conn]}]
(let [ids' (db/create-array conn "uuid" ids)]
(->> (db/exec! conn [sql:get-libraries ids'])
(into #{} xf-map-id))))))
(defn get-file-object-thumbnails
"Return all file object thumbnails for a given file."
[cfg file-id]
(db/query cfg :file-tagged-object-thumbnail
{:file-id file-id
:deleted-at nil}))
(defn get-file-thumbnail
"Return the thumbnail for the specified file-id"
[cfg {:keys [id revn]}]
(db/get* cfg :file-thumbnail
{:file-id id
:revn revn
:data nil}
{::sql/columns [:media-id :file-id :revn]}))
(def ^:private
xform:collect-media-id
(comp
(map :objects)
(mapcat vals)
(mapcat (fn [obj]
;; NOTE: because of some bug, we ended with
;; many shape types having the ability to
;; have fill-image attribute (which initially
;; designed for :path shapes).
(sequence
(keep :id)
(concat [(:fill-image obj)
(:metadata obj)]
(map :fill-image (:fills obj))
(map :stroke-image (:strokes obj))
(->> (:content obj)
(tree-seq map? :children)
(mapcat :fills)
(map :fill-image))))))))
(defn collect-used-media
"Given a fdata (file data), returns all media references."
[data]
(-> #{}
(into xform:collect-media-id (vals (:pages-index data)))
(into xform:collect-media-id (vals (:components data)))
(into (keys (:media data)))))
(defn get-file-media
[cfg {:keys [data id] :as file}]
(db/run! cfg (fn [{:keys [::db/conn]}]
(let [ids (collect-used-media data)
ids (db/create-array conn "uuid" ids)
sql (str "SELECT * FROM file_media_object WHERE id = ANY(?)")]
;; We assoc the file-id again to the file-media-object row
;; because there are cases that used objects refer to other
;; files and we need to ensure in the exportation process that
;; all ids matches
(->> (db/exec! conn [sql ids])
(mapv #(assoc % :file-id id)))))))
(def ^:private sql:get-team-files
"SELECT f.id FROM file AS f
JOIN project AS p ON (p.id = f.project_id)
WHERE p.team_id = ?")
(defn get-team-files
"Get a set of file ids for the specified team-id"
[{:keys [::db/conn]} team-id]
(->> (db/exec! conn [sql:get-team-files team-id])
(into #{} xf-map-id)))
(def ^:private sql:get-team-projects
"SELECT p.id FROM project AS p
WHERE p.team_id = ?
AND p.deleted_at IS NULL")
(defn get-team-projects
"Get a set of project ids for the team"
[{:keys [::db/conn]} team-id]
(->> (db/exec! conn [sql:get-team-projects team-id])
(into #{} xf-map-id)))
(def ^:private sql:get-project-files
"SELECT f.id FROM file AS f
WHERE f.project_id = ?
AND f.deleted_at IS NULL")
(defn get-project-files
"Get a set of file ids for the project"
[{:keys [::db/conn]} project-id]
(->> (db/exec! conn [sql:get-project-files project-id])
(into #{} xf-map-id)))
(defn- relink-shapes
"A function responsible to analyze all file data and
replace the old :component-file reference with the new
ones, using the provided file-index."
[data]
(letfn [(process-map-form [form]
(cond-> form
;; Relink image shapes
(and (map? (:metadata form))
(= :image (:type form)))
(update-in [:metadata :id] lookup-index)
;; Relink paths with fill image
(map? (:fill-image form))
(update-in [:fill-image :id] lookup-index)
;; This covers old shapes and the new :fills.
(uuid? (:fill-color-ref-file form))
(update :fill-color-ref-file lookup-index)
;; This covers the old shapes and the new :strokes
(uuid? (:storage-color-ref-file form))
(update :stroke-color-ref-file lookup-index)
;; This covers all text shapes that have typography referenced
(uuid? (:typography-ref-file form))
(update :typography-ref-file lookup-index)
;; This covers the component instance links
(uuid? (:component-file form))
(update :component-file lookup-index)
;; This covers the shadows and grids (they have directly
;; the :file-id prop)
(uuid? (:file-id form))
(update :file-id lookup-index)))
(process-form [form]
(if (map? form)
(try
(process-map-form form)
(catch Throwable cause
(l/warn :hint "failed form" :form (pr-str form) ::l/sync? true)
(throw cause)))
form))]
(walk/postwalk process-form data)))
(defn- relink-media
"A function responsible of process the :media attr of file data and
remap the old ids with the new ones."
[media]
(reduce-kv (fn [res k v]
(let [id (lookup-index k)]
(if (uuid? id)
(-> res
(assoc id (assoc v :id id))
(dissoc k))
res)))
media
media))
(defn- relink-colors
"A function responsible of process the :colors attr of file data and
remap the old ids with the new ones."
[colors]
(reduce-kv (fn [res k v]
(if (:image v)
(update-in res [k :image :id] lookup-index)
res))
colors
colors))
(defn embed-assets
[cfg data file-id]
(letfn [(walk-map-form [form state]
(cond
(uuid? (:fill-color-ref-file form))
(do
(vswap! state conj [(:fill-color-ref-file form) :colors (:fill-color-ref-id form)])
(assoc form :fill-color-ref-file file-id))
(uuid? (:stroke-color-ref-file form))
(do
(vswap! state conj [(:stroke-color-ref-file form) :colors (:stroke-color-ref-id form)])
(assoc form :stroke-color-ref-file file-id))
(uuid? (:typography-ref-file form))
(do
(vswap! state conj [(:typography-ref-file form) :typographies (:typography-ref-id form)])
(assoc form :typography-ref-file file-id))
(uuid? (:component-file form))
(do
(vswap! state conj [(:component-file form) :components (:component-id form)])
(assoc form :component-file file-id))
:else
form))
(process-group-of-assets [data [lib-id items]]
;; NOTE: there is a possibility that shape refers to an
;; non-existant file because the file was removed. In this
;; case we just ignore the asset.
(if-let [lib (get-file cfg lib-id)]
(reduce (partial process-asset lib) data items)
data))
(process-asset [lib data [bucket asset-id]]
(let [asset (get-in lib [:data bucket asset-id])
;; Add a special case for colors that need to have
;; correctly set the :file-id prop (pending of the
;; refactor that will remove it).
asset (cond-> asset
(= bucket :colors) (assoc :file-id file-id))]
(update data bucket assoc asset-id asset)))]
(let [assets (volatile! [])]
(walk/postwalk #(cond-> % (map? %) (walk-map-form assets)) data)
(->> (deref assets)
(filter #(as-> (first %) $ (and (uuid? $) (not= $ file-id))))
(d/group-by first rest)
(reduce (partial process-group-of-assets) data)))))
(defn process-file
[{:keys [id] :as file}]
(-> file
(update :data (fn [fdata]
(-> fdata
(assoc :id id)
(dissoc :recent-colors)
(cond-> (> (:version fdata) cfd/version)
(assoc :version cfd/version))
;; FIXME: We're temporarily activating all
;; migrations because a problem in the
;; environments messed up with the version
;; numbers When this problem is fixed delete
;; the following line
(cond-> (> (:version fdata) 22)
(assoc :version 22)))))
(fmg/migrate-file)
(update :data (fn [fdata]
(-> fdata
(update :pages-index relink-shapes)
(update :components relink-shapes)
(update :media relink-media)
(update :colors relink-colors)
(d/without-nils))))))
(defn- upsert-file!
[conn file]
(let [sql (str "INSERT INTO file (id, project_id, name, revn, is_shared, data, created_at, modified_at) "
"VALUES (?, ?, ?, ?, ?, ?, ?, ?) "
"ON CONFLICT (id) DO UPDATE SET data=?")]
(db/exec-one! conn [sql
(:id file)
(:project-id file)
(:name file)
(:revn file)
(:is-shared file)
(:data file)
(:created-at file)
(:modified-at file)
(:data file)])))
(defn persist-file!
"Applies all the final validations and perist the file."
[{:keys [::db/conn ::timestamp] :as cfg} {:keys [id] :as file}]
(dm/assert!
"expected valid timestamp"
(dt/instant? timestamp))
(let [file (-> file
(assoc :created-at timestamp)
(assoc :modified-at timestamp)
(assoc :ignore-sync-until (dt/plus timestamp (dt/duration {:seconds 5})))
(update :features
(fn [features]
(let [features (cfeat/check-supported-features! features)]
(-> (::features cfg #{})
(set/difference cfeat/frontend-only-features)
(set/union features))))))
_ (when (contains? cf/flags :file-schema-validation)
(fval/validate-file-schema! file))
_ (when (contains? cf/flags :soft-file-schema-validation)
(let [result (ex/try! (fval/validate-file-schema! file))]
(when (ex/exception? result)
(l/error :hint "file schema validation error" :cause result))))
file (if (contains? (:features file) "fdata/objects-map")
(feat.fdata/enable-objects-map file)
file)
file (if (contains? (:features file) "fdata/pointer-map")
(binding [pmap/*tracked* (pmap/create-tracked)]
(let [file (feat.fdata/enable-pointer-map file)]
(feat.fdata/persist-pointers! cfg id)
file))
file)
params (-> file
(update :features db/encode-pgarray conn "text")
(update :data blob/encode))]
(if (::overwrite cfg)
(upsert-file! conn params)
(db/insert! conn :file params ::db/return-keys false))
file))
(defn apply-pending-migrations!
"Apply alredy registered pending migrations to files"
[cfg]
(doseq [[feature file-id] (-> *state* deref :pending-to-migrate)]
(case feature
"components/v2"
(feat.compv2/migrate-file! cfg file-id :validate? (::validate cfg true))
"fdata/shape-data-type"
nil
(ex/raise :type :internal
:code :no-migration-defined
:hint (str/ffmt "no migation for feature '%' on file importation" feature)
:feature feature))))

View file

@ -0,0 +1,758 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.binfile.v1
"A custom, perfromance and efficiency focused binfile format impl"
(:refer-clojure :exclude [assert])
(:require
[app.binfile.common :as bfc]
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.common.features :as cfeat]
[app.common.fressian :as fres]
[app.common.logging :as l]
[app.common.spec :as us]
[app.common.types.file :as ctf]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.loggers.audit :as-alias audit]
[app.loggers.webhooks :as-alias webhooks]
[app.media :as media]
[app.rpc :as-alias rpc]
[app.rpc.commands.teams :as teams]
[app.rpc.doc :as-alias doc]
[app.storage :as sto]
[app.storage.tmp :as tmp]
[app.tasks.file-gc]
[app.util.events :as events]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[clojure.java.io :as jio]
[clojure.set :as set]
[clojure.spec.alpha :as s]
[cuerdas.core :as str]
[datoteka.io :as io]
[promesa.util :as pu]
[yetti.adapter :as yt])
(:import
com.github.luben.zstd.ZstdInputStream
com.github.luben.zstd.ZstdOutputStream
java.io.DataInputStream
java.io.DataOutputStream
java.io.InputStream
java.io.OutputStream))
(set! *warn-on-reflection* true)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; DEFAULTS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Threshold in MiB when we pass from using
;; in-memory byte-array's to use temporal files.
(def temp-file-threshold
(* 1024 1024 2))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; LOW LEVEL STREAM IO API
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def ^:const buffer-size (:xnio/buffer-size yt/defaults))
(def ^:const penpot-magic-number 800099563638710213)
(def ^:const max-object-size (* 1024 1024 100)) ; Only allow 100MiB max file size.
(def ^:dynamic *position* nil)
(defn get-mark
[id]
(case id
:header 1
:stream 2
:uuid 3
:label 4
:obj 5
(ex/raise :type :validation
:code :invalid-mark-id
:hint (format "invalid mark id %s" id))))
(defmacro assert
[expr hint]
`(when-not ~expr
(ex/raise :type :validation
:code :unexpected-condition
:hint ~hint)))
(defmacro assert-mark
[v type]
`(let [expected# (get-mark ~type)
val# (long ~v)]
(when (not= val# expected#)
(ex/raise :type :validation
:code :unexpected-mark
:hint (format "received mark %s, expected %s" val# expected#)))))
(defmacro assert-label
[expr label]
`(let [v# ~expr]
(when (not= v# ~label)
(ex/raise :type :assertion
:code :unexpected-label
:hint (format "received label %s, expected %s" v# ~label)))))
;; --- PRIMITIVE IO
(defn write-byte!
[^DataOutputStream output data]
(l/trace :fn "write-byte!" :data data :position @*position* ::l/sync? true)
(.writeByte output (byte data))
(swap! *position* inc))
(defn read-byte!
[^DataInputStream input]
(let [v (.readByte input)]
(l/trace :fn "read-byte!" :val v :position @*position* ::l/sync? true)
(swap! *position* inc)
v))
(defn write-long!
[^DataOutputStream output data]
(l/trace :fn "write-long!" :data data :position @*position* ::l/sync? true)
(.writeLong output (long data))
(swap! *position* + 8))
(defn read-long!
[^DataInputStream input]
(let [v (.readLong input)]
(l/trace :fn "read-long!" :val v :position @*position* ::l/sync? true)
(swap! *position* + 8)
v))
(defn write-bytes!
[^DataOutputStream output ^bytes data]
(let [size (alength data)]
(l/trace :fn "write-bytes!" :size size :position @*position* ::l/sync? true)
(.write output data 0 size)
(swap! *position* + size)))
(defn read-bytes!
[^InputStream input ^bytes buff]
(let [size (alength buff)
readed (.readNBytes input buff 0 size)]
(l/trace :fn "read-bytes!" :expected (alength buff) :readed readed :position @*position* ::l/sync? true)
(swap! *position* + readed)
readed))
;; --- COMPOSITE IO
(defn write-uuid!
[^DataOutputStream output id]
(l/trace :fn "write-uuid!" :position @*position* :WRITTEN? (.size output) ::l/sync? true)
(doto output
(write-byte! (get-mark :uuid))
(write-long! (uuid/get-word-high id))
(write-long! (uuid/get-word-low id))))
(defn read-uuid!
[^DataInputStream input]
(l/trace :fn "read-uuid!" :position @*position* ::l/sync? true)
(let [m (read-byte! input)]
(assert-mark m :uuid)
(let [a (read-long! input)
b (read-long! input)]
(uuid/custom a b))))
(defn write-obj!
[^DataOutputStream output data]
(l/trace :fn "write-obj!" :position @*position* ::l/sync? true)
(let [^bytes data (fres/encode data)]
(doto output
(write-byte! (get-mark :obj))
(write-long! (alength data))
(write-bytes! data))))
(defn read-obj!
[^DataInputStream input]
(l/trace :fn "read-obj!" :position @*position* ::l/sync? true)
(let [m (read-byte! input)]
(assert-mark m :obj)
(let [size (read-long! input)]
(assert (pos? size) "incorrect header size found on reading header")
(let [buff (byte-array size)]
(read-bytes! input buff)
(fres/decode buff)))))
(defn write-label!
[^DataOutputStream output label]
(l/trace :fn "write-label!" :label label :position @*position* ::l/sync? true)
(doto output
(write-byte! (get-mark :label))
(write-obj! label)))
(defn read-label!
[^DataInputStream input]
(l/trace :fn "read-label!" :position @*position* ::l/sync? true)
(let [m (read-byte! input)]
(assert-mark m :label)
(read-obj! input)))
(defn write-header!
[^OutputStream output version]
(l/trace :fn "write-header!"
:version version
:position @*position*
::l/sync? true)
(let [vers (-> version name (subs 1) parse-long)
output (io/data-output-stream output)]
(doto output
(write-byte! (get-mark :header))
(write-long! penpot-magic-number)
(write-long! vers))))
(defn read-header!
[^InputStream input]
(l/trace :fn "read-header!" :position @*position* ::l/sync? true)
(let [input (io/data-input-stream input)
mark (read-byte! input)
mnum (read-long! input)
vers (read-long! input)]
(when (or (not= mark (get-mark :header))
(not= mnum penpot-magic-number))
(ex/raise :type :validation
:code :invalid-penpot-file
:hint "invalid penpot file"))
(keyword (str "v" vers))))
(defn copy-stream!
[^OutputStream output ^InputStream input ^long size]
(let [written (io/copy! input output :size size)]
(l/trace :fn "copy-stream!" :position @*position* :size size :written written ::l/sync? true)
(swap! *position* + written)
written))
(defn write-stream!
[^DataOutputStream output stream size]
(l/trace :fn "write-stream!" :position @*position* ::l/sync? true :size size)
(doto output
(write-byte! (get-mark :stream))
(write-long! size))
(copy-stream! output stream size))
(defn read-stream!
[^DataInputStream input]
(l/trace :fn "read-stream!" :position @*position* ::l/sync? true)
(let [m (read-byte! input)
s (read-long! input)
p (tmp/tempfile :prefix "penpot.binfile.")]
(assert-mark m :stream)
(when (> s max-object-size)
(ex/raise :type :validation
:code :max-file-size-reached
:hint (str/ffmt "unable to import storage object with size % bytes" s)))
(if (> s temp-file-threshold)
(with-open [^OutputStream output (io/output-stream p)]
(let [readed (io/copy! input output :offset 0 :size s)]
(l/trace :fn "read-stream*!" :expected s :readed readed :position @*position* ::l/sync? true)
(swap! *position* + readed)
[s p]))
[s (io/read-as-bytes input :size s)])))
(defmacro assert-read-label!
[input expected-label]
`(let [readed# (read-label! ~input)
expected# ~expected-label]
(when (not= readed# expected#)
(ex/raise :type :validation
:code :unexpected-label
:hint (format "unexpected label found: %s, expected: %s" readed# expected#)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; API
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; --- HELPERS
(defn zstd-input-stream
^InputStream
[input]
(ZstdInputStream. ^InputStream input))
(defn zstd-output-stream
^OutputStream
[output & {:keys [level] :or {level 0}}]
(ZstdOutputStream. ^OutputStream output (int level)))
(defn- get-files
[cfg ids]
(db/run! cfg (fn [{:keys [::db/conn]}]
(let [sql (str "SELECT id FROM file "
" WHERE id = ANY(?) ")
ids (db/create-array conn "uuid" ids)]
(->> (db/exec! conn [sql ids])
(into [] (map :id))
(not-empty))))))
;; --- EXPORT WRITER
(defmulti write-export ::version)
(defmulti write-section ::section)
(defn write-export!
[{:keys [::include-libraries ::embed-assets] :as cfg}]
(when (and include-libraries embed-assets)
(throw (IllegalArgumentException.
"the `include-libraries` and `embed-assets` are mutally excluding options")))
(write-export cfg))
(defmethod write-export :default
[{:keys [::output] :as options}]
(write-header! output :v1)
(pu/with-open [output (zstd-output-stream output :level 12)
output (io/data-output-stream output)]
(binding [bfc/*state* (volatile! {})]
(run! (fn [section]
(l/dbg :hint "write section" :section section ::l/sync? true)
(write-label! output section)
(let [options (-> options
(assoc ::output output)
(assoc ::section section))]
(binding [bfc/*options* options]
(write-section options))))
[:v1/metadata :v1/files :v1/rels :v1/sobjects]))))
(defmethod write-section :v1/metadata
[{:keys [::output ::ids ::include-libraries] :as cfg}]
(if-let [fids (get-files cfg ids)]
(let [lids (when include-libraries
(bfc/get-libraries cfg ids))
ids (into fids lids)]
(write-obj! output {:version cf/version :files ids})
(vswap! bfc/*state* assoc :files ids))
(ex/raise :type :not-found
:code :files-not-found
:hint "unable to retrieve files for export")))
(defmethod write-section :v1/files
[{:keys [::output ::embed-assets ::include-libraries] :as cfg}]
;; Initialize SIDS with empty vector
(vswap! bfc/*state* assoc :sids [])
(doseq [file-id (-> bfc/*state* deref :files)]
(let [detach? (and (not embed-assets) (not include-libraries))
thumbnails (->> (bfc/get-file-object-thumbnails cfg file-id)
(mapv #(dissoc % :file-id)))
file (cond-> (bfc/get-file cfg file-id)
detach?
(-> (ctf/detach-external-references file-id)
(dissoc :libraries))
embed-assets
(update :data #(bfc/embed-assets cfg % file-id))
:always
(assoc :thumbnails thumbnails))
media (bfc/get-file-media cfg file)]
(l/dbg :hint "write penpot file"
:id (str file-id)
:name (:name file)
:thumbnails (count thumbnails)
:features (:features file)
:media (count media)
::l/sync? true)
(doseq [item media]
(l/dbg :hint "write penpot file media object" :id (:id item) ::l/sync? true))
(doseq [item thumbnails]
(l/dbg :hint "write penpot file object thumbnail" :media-id (str (:media-id item)) ::l/sync? true))
(doto output
(write-obj! file)
(write-obj! media))
(vswap! bfc/*state* update :sids into bfc/xf-map-media-id media)
(vswap! bfc/*state* update :sids into bfc/xf-map-media-id thumbnails))))
(defmethod write-section :v1/rels
[{:keys [::output ::include-libraries] :as cfg}]
(let [ids (-> bfc/*state* deref :files set)
rels (when include-libraries
(bfc/get-files-rels cfg ids))]
(l/dbg :hint "found rels" :total (count rels) ::l/sync? true)
(write-obj! output rels)))
(defmethod write-section :v1/sobjects
[{:keys [::sto/storage ::output]}]
(let [sids (-> bfc/*state* deref :sids)
storage (media/configure-assets-storage storage)]
(l/dbg :hint "found sobjects"
:items (count sids)
::l/sync? true)
;; Write all collected storage objects
(write-obj! output sids)
(doseq [id sids]
(let [{:keys [size] :as obj} (sto/get-object storage id)]
(l/dbg :hint "write sobject" :id (str id) ::l/sync? true)
(doto output
(write-uuid! id)
(write-obj! (meta obj)))
(pu/with-open [stream (sto/get-object-data storage obj)]
(let [written (write-stream! output stream size)]
(when (not= written size)
(ex/raise :type :validation
:code :mismatch-readed-size
:hint (str/ffmt "found unexpected object size; size=% written=%" size written)))))))))
;; --- EXPORT READER
(defmulti read-import ::version)
(defmulti read-section ::section)
(s/def ::profile-id ::us/uuid)
(s/def ::project-id ::us/uuid)
(s/def ::input io/input-stream?)
(s/def ::overwrite? (s/nilable ::us/boolean))
(s/def ::ignore-index-errors? (s/nilable ::us/boolean))
;; FIXME: replace with schema
(s/def ::read-import-options
(s/keys :req [::db/pool ::sto/storage ::project-id ::profile-id ::input]
:opt [::overwrite? ::ignore-index-errors?]))
(defn read-import!
"Do the importation of the specified resource in penpot custom binary
format. There are some options for customize the importation
behavior:
`::bfc/overwrite`: if true, instead of creating new files and remapping id references,
it reuses all ids and updates existing objects; defaults to `false`."
[{:keys [::input ::bfc/timestamp] :or {timestamp (dt/now)} :as options}]
(dm/assert!
"expected input stream"
(io/input-stream? input))
(dm/assert!
"expected valid instant"
(dt/instant? timestamp))
(let [version (read-header! input)]
(read-import (assoc options ::version version ::bfc/timestamp timestamp))))
(defn- read-import-v1
[{:keys [::db/conn ::project-id ::profile-id ::input] :as cfg}]
(db/exec-one! conn ["SET idle_in_transaction_session_timeout = 0"])
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
(pu/with-open [input (zstd-input-stream input)
input (io/data-input-stream input)]
(binding [bfc/*state* (volatile! {:media [] :index {}})]
(let [team (teams/get-team conn
:profile-id profile-id
:project-id project-id)
features (cfeat/get-team-enabled-features cf/flags team)]
;; Process all sections
(run! (fn [section]
(l/dbg :hint "reading section" :section section ::l/sync? true)
(assert-read-label! input section)
(let [options (-> cfg
(assoc ::bfc/features features)
(assoc ::section section)
(assoc ::input input))]
(binding [bfc/*options* options]
(events/tap :progress {:op :import :section section})
(read-section options))))
[:v1/metadata :v1/files :v1/rels :v1/sobjects])
(bfc/apply-pending-migrations! cfg)
;; Knowing that the ids of the created files are in index,
;; just lookup them and return it as a set
(let [files (-> bfc/*state* deref :files)]
(into #{} (keep #(get-in @bfc/*state* [:index %])) files))))))
(defmethod read-import :v1
[options]
(db/tx-run! options read-import-v1))
(defmethod read-section :v1/metadata
[{:keys [::input]}]
(let [{:keys [version files]} (read-obj! input)]
(l/dbg :hint "metadata readed"
:version (:full version)
:files (mapv str files)
::l/sync? true)
(vswap! bfc/*state* update :index bfc/update-index files)
(vswap! bfc/*state* assoc :version version :files files)))
(defn- remap-thumbnails
[thumbnails file-id]
(mapv (fn [thumbnail]
(-> thumbnail
(assoc :file-id file-id)
(update :object-id #(str/replace-first % #"^(.*?)/" (str file-id "/")))))
thumbnails))
(defmethod read-section :v1/files
[{:keys [::db/conn ::input ::project-id ::bfc/overwrite ::name] :as system}]
(doseq [[idx expected-file-id] (d/enumerate (-> bfc/*state* deref :files))]
(let [file (read-obj! input)
media (read-obj! input)
file-id (:id file)
file-id' (bfc/lookup-index file-id)
thumbnails (:thumbnails file)]
(when (not= file-id expected-file-id)
(ex/raise :type :validation
:code :inconsistent-penpot-file
:found-id file-id
:expected-id expected-file-id
:hint "the penpot file seems corrupt, found unexpected uuid (file-id)"))
(l/dbg :hint "processing file"
:id (str file-id)
:features (:features file)
:version (-> file :data :version)
:media (count media)
:thumbnails (count thumbnails)
::l/sync? true)
(when (seq thumbnails)
(let [thumbnails (remap-thumbnails thumbnails file-id')]
(l/dbg :hint "updated index with thumbnails" :total (count thumbnails) ::l/sync? true)
(vswap! bfc/*state* update :thumbnails bfc/into-vec thumbnails)))
(when (seq media)
;; Update index with media
(l/dbg :hint "update index with media" :total (count media) ::l/sync? true)
(vswap! bfc/*state* update :index bfc/update-index (map :id media))
;; Store file media for later insertion
(l/dbg :hint "update media references" ::l/sync? true)
(vswap! bfc/*state* update :media into (map #(update % :id bfc/lookup-index)) media))
(let [file (-> file
(assoc :id file-id')
(cond-> (and (= idx 0) (some? name))
(assoc :name name))
(assoc :project-id project-id)
(dissoc :thumbnails)
(bfc/process-file))]
;; All features that are enabled and requires explicit migration are
;; added to the state for a posterior migration step.
(doseq [feature (-> (::bfc/features system)
(set/difference cfeat/no-migration-features)
(set/difference (:features file)))]
(vswap! bfc/*state* update :pending-to-migrate (fnil conj []) [feature file-id']))
(l/dbg :hint "create file" :id (str file-id') ::l/sync? true)
(bfc/persist-file! system file)
(when overwrite
(db/delete! conn :file-thumbnail {:file-id file-id'}))
file-id'))))
(defmethod read-section :v1/rels
[{:keys [::db/conn ::input ::bfc/timestamp]}]
(let [rels (read-obj! input)
ids (into #{} (-> bfc/*state* deref :files))]
;; Insert all file relations
(doseq [{:keys [library-file-id] :as rel} rels]
(let [rel (-> rel
(assoc :synced-at timestamp)
(update :file-id bfc/lookup-index)
(update :library-file-id bfc/lookup-index))]
(if (contains? ids library-file-id)
(do
(l/dbg :hint "create file library link"
:file-id (:file-id rel)
:lib-id (:library-file-id rel)
::l/sync? true)
(db/insert! conn :file-library-rel rel))
(l/warn :hint "ignoring file library link"
:file-id (:file-id rel)
:lib-id (:library-file-id rel)
::l/sync? true))))))
(defmethod read-section :v1/sobjects
[{:keys [::sto/storage ::db/conn ::input ::bfc/overwrite ::bfc/timestamp]}]
(let [storage (media/configure-assets-storage storage)
ids (read-obj! input)
thumb? (into #{} (map :media-id) (:thumbnails @bfc/*state*))]
(doseq [expected-storage-id ids]
(let [id (read-uuid! input)
mdata (read-obj! input)]
(when (not= id expected-storage-id)
(ex/raise :type :validation
:code :inconsistent-penpot-file
:hint "the penpot file seems corrupt, found unexpected uuid (storage-object-id)"))
(l/dbg :hint "readed storage object" :id (str id) ::l/sync? true)
(let [[size resource] (read-stream! input)
hash (sto/calculate-hash resource)
content (-> (sto/content resource size)
(sto/wrap-with-hash hash))
params (-> mdata
(assoc ::sto/content content)
(assoc ::sto/deduplicate? true)
(assoc ::sto/touched-at timestamp))
params (if (thumb? id)
(assoc params :bucket "file-object-thumbnail")
(assoc params :bucket "file-media-object"))
sobject (sto/put-object! storage params)]
(l/dbg :hint "persisted storage object"
:old-id (str id)
:new-id (str (:id sobject))
:is-thumbnail (boolean (thumb? id))
::l/sync? true)
(vswap! bfc/*state* update :index assoc id (:id sobject)))))
(doseq [item (:media @bfc/*state*)]
(l/dbg :hint "inserting file media object"
:id (str (:id item))
:file-id (str (:file-id item))
::l/sync? true)
(let [file-id (bfc/lookup-index (:file-id item))]
(if (= file-id (:file-id item))
(l/warn :hint "ignoring file media object" :file-id (str file-id) ::l/sync? true)
(db/insert! conn :file-media-object
(-> item
(assoc :file-id file-id)
(d/update-when :media-id bfc/lookup-index)
(d/update-when :thumbnail-id bfc/lookup-index))
{::db/on-conflict-do-nothing? overwrite}))))
(doseq [item (:thumbnails @bfc/*state*)]
(let [item (update item :media-id bfc/lookup-index)]
(l/dbg :hint "inserting file object thumbnail"
:file-id (str (:file-id item))
:media-id (str (:media-id item))
:object-id (:object-id item)
::l/sync? true)
(db/insert! conn :file-tagged-object-thumbnail item
{::db/on-conflict-do-nothing? overwrite})))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; HIGH LEVEL API
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn export-files!
"Do the exportation of a specified file in custom penpot binary
format. There are some options available for customize the output:
`::include-libraries`: additionally to the specified file, all the
linked libraries also will be included (including transitive
dependencies).
`::embed-assets`: instead of including the libraries, embed in the
same file library all assets used from external libraries."
[{:keys [::ids] :as cfg} output]
(dm/assert!
"expected a set of uuid's for `::ids` parameter"
(and (set? ids)
(every? uuid? ids)))
(dm/assert!
"expected instance of jio/IOFactory for `input`"
(satisfies? jio/IOFactory output))
(let [id (uuid/next)
tp (dt/tpoint)
ab (volatile! false)
cs (volatile! nil)]
(try
(l/info :hint "start exportation" :export-id (str id))
(pu/with-open [output (io/output-stream output)]
(binding [*position* (atom 0)]
(write-export! (assoc cfg ::output output))))
(catch java.io.IOException _cause
;; Do nothing, EOF means client closes connection abruptly
(vreset! ab true)
nil)
(catch Throwable cause
(vreset! cs cause)
(vreset! ab true)
(throw cause))
(finally
(l/info :hint "exportation finished" :export-id (str id)
:elapsed (str (inst-ms (tp)) "ms")
:aborted @ab
:cause @cs)))))
(defn import-files!
[cfg input]
(dm/assert!
"expected valid profile-id and project-id on `cfg`"
(and (uuid? (::profile-id cfg))
(uuid? (::project-id cfg))))
(dm/assert!
"expected instance of jio/IOFactory for `input`"
(satisfies? jio/IOFactory input))
(let [id (uuid/next)
tp (dt/tpoint)
cs (volatile! nil)]
(l/info :hint "import: started" :id (str id))
(try
(binding [*position* (atom 0)]
(pu/with-open [input (io/input-stream input)]
(read-import! (assoc cfg ::input input))))
(catch Throwable cause
(vreset! cs cause)
(throw cause))
(finally
(l/info :hint "import: terminated"
:id (str id)
:elapsed (dt/format-duration (tp))
:error? (some? @cs))))))

View file

@ -9,31 +9,24 @@
of entire team (or multiple teams) at once."
(:refer-clojure :exclude [read])
(:require
[app.binfile.common :as bfc]
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.features :as cfeat]
[app.common.files.defaults :as cfd]
[app.common.files.migrations :as fmg]
[app.common.files.validate :as fval]
[app.common.logging :as l]
[app.common.transit :as t]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.db.sql :as sql]
[app.features.fdata :as feat.fdata]
[app.http.sse :as sse]
[app.loggers.audit :as-alias audit]
[app.loggers.webhooks :as-alias webhooks]
[app.media :as media]
[app.storage :as sto]
[app.storage.tmp :as tmp]
[app.util.blob :as blob]
[app.util.pointer-map :as pmap]
[app.util.events :as events]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[clojure.set :as set]
[clojure.walk :as walk]
[cuerdas.core :as str]
[datoteka.io :as io]
[promesa.util :as pu])
@ -42,34 +35,10 @@
(set! *warn-on-reflection* true)
(def ^:dynamic *state* nil)
(def ^:dynamic *options* nil)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; LOW LEVEL API
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- lookup-index
[id]
(when-let [val (get-in @*state* [:index id])]
(l/trc :fn "lookup-index" :id (some-> id str) :result (some-> val str) ::l/sync? true)
(or val id)))
(defn- index-object
[index obj & attrs]
(reduce (fn [index attr-fn]
(let [old-id (attr-fn obj)
new-id (uuid/next)]
(assoc index old-id new-id)))
index
attrs))
(defn- update-index
([index coll]
(update-index index coll identity))
([index coll attr]
(reduce #(index-object %1 %2 attr) index coll)))
(defn- create-database
([cfg]
(let [path (tmp/tempfile :prefix "penpot.binfile." :suffix ".sqlite")]
@ -92,12 +61,6 @@
"CREATE INDEX kvdata__tag_key__idx
ON kvdata (tag, key)")
(defn- decode-row
[{:keys [data features] :as row}]
(cond-> row
features (assoc :features (db/decode-pgarray features #{}))
data (assoc :data (blob/decode data))))
(defn- setup-schema!
[{:keys [::db]}]
(db/exec-one! db [sql:create-kvdata-table])
@ -147,156 +110,65 @@
;; IMPORT/EXPORT IMPL
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def ^:private xf-map-id
(map :id))
(def ^:private xf-map-media-id
(comp
(mapcat (juxt :media-id
:thumbnail-id
:woff1-file-id
:woff2-file-id
:ttf-file-id
:otf-file-id))
(filter uuid?)))
;; NOTE: Will be used in future, commented for satisfy linter
;; (def ^:private sql:get-libraries
;; "WITH RECURSIVE libs AS (
;; SELECT fl.id
;; FROM file AS fl
;; JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
;; WHERE flr.file_id = ANY(?)
;; UNION
;; SELECT fl.id
;; FROM file AS fl
;; JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
;; JOIN libs AS l ON (flr.file_id = l.id)
;; )
;; SELECT DISTINCT l.id
;; FROM libs AS l")
;;
;; (defn- get-libraries
;; "Get all libraries ids related to provided file ids"
;; [{:keys [::db/conn]} ids]
;; (let [ids' (db/create-array conn "uuid" ids)]
;; (->> (db/exec! conn [sql:get-libraries ids])
;; (into #{} xf-map-id))))
;;
;; (def ^:private sql:get-project-files
;; "SELECT f.id FROM file AS f
;; WHERE f.project_id = ?")
;; (defn- get-project-files
;; "Get a set of file ids for the project"
;; [{:keys [::db/conn]} project-id]
;; (->> (db/exec! conn [sql:get-project-files project-id])
;; (into #{} xf-map-id)))
(def ^:private sql:get-team-files
"SELECT f.id FROM file AS f
JOIN project AS p ON (p.id = f.project_id)
WHERE p.team_id = ?")
(defn- get-team-files
"Get a set of file ids for the specified team-id"
[{:keys [::db/conn]} team-id]
(->> (db/exec! conn [sql:get-team-files team-id])
(into #{} xf-map-id)))
(def ^:private sql:get-team-projects
"SELECT p.id FROM project AS p
WHERE p.team_id = ?")
(defn- get-team-projects
"Get a set of project ids for the team"
[{:keys [::db/conn]} team-id]
(->> (db/exec! conn [sql:get-team-projects team-id])
(into #{} xf-map-id)))
(declare ^:private write-project!)
(declare ^:private write-file!)
(defn- write-team!
[{:keys [::db/conn] :as cfg} team-id]
[cfg team-id]
(sse/tap {:type :export-progress
:section :write-team
:id team-id})
(let [team (bfc/get-team cfg team-id)
fonts (bfc/get-fonts cfg team-id)]
(let [team (db/get conn :team {:id team-id}
::db/remove-deleted false
::db/check-deleted false)
team (decode-row team)
fonts (db/query conn :team-font-variant
{:team-id team-id
:deleted-at nil}
{::sql/for-share true})]
(events/tap :progress
{:op :export
:section :write-team
:id team-id
:name (:name team)})
(l/trc :hint "write" :obj "team"
:id (str team-id)
:fonts (count fonts))
(vswap! *state* update :teams conj team-id)
(vswap! *state* update :storage-objects into xf-map-media-id fonts)
(vswap! bfc/*state* update :teams conj team-id)
(vswap! bfc/*state* bfc/collect-storage-objects fonts)
(write! cfg :team team-id team)
(doseq [{:keys [id] :as font} fonts]
(vswap! *state* update :team-font-variants conj id)
(vswap! bfc/*state* update :team-font-variants conj id)
(write! cfg :team-font-variant id font))))
(defn- write-project!
[{:keys [::db/conn] :as cfg} project-id]
(sse/tap {:type :export-progress
:section :write-project
:id project-id})
(let [project (db/get conn :project {:id project-id}
::db/remove-deleted false
::db/check-deleted false)]
[cfg project-id]
(let [project (bfc/get-project cfg project-id)]
(events/tap :progress
{:op :export
:section :write-project
:id project-id
:name (:name project)})
(l/trc :hint "write" :obj "project" :id (str project-id))
(write! cfg :project (str project-id) project)
(vswap! *state* update :projects conj project-id)))
(vswap! bfc/*state* update :projects conj project-id)))
(defn- write-file!
[{:keys [::db/conn] :as cfg} file-id]
[cfg file-id]
(let [file (bfc/get-file cfg file-id)
thumbs (bfc/get-file-object-thumbnails cfg file-id)
media (bfc/get-file-media cfg file)
rels (bfc/get-files-rels cfg #{file-id})]
(sse/tap {:type :export-progress
:section :write-file
:id file-id})
(events/tap :progress
{:op :export
:section :write-file
:id file-id
:name (:name file)})
(let [file (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)]
(-> (db/get conn :file {:id file-id}
::sql/for-share true
::db/remove-deleted false
::db/check-deleted false)
(decode-row)
(update :data feat.fdata/process-pointers deref)
(update :data feat.fdata/process-objects (partial into {}))))
thumbs (db/query conn :file-tagged-object-thumbnail
{:file-id file-id
:deleted-at nil}
{::sql/for-share true})
media (db/query conn :file-media-object
{:file-id file-id
:deleted-at nil}
{::sql/for-share true})
rels (db/query conn :file-library-rel
{:file-id file-id})]
(vswap! *state* (fn [state]
(-> state
(update :files conj file-id)
(update :file-media-objects into (map :id) media)
(update :storage-objects into xf-map-media-id thumbs)
(update :storage-objects into xf-map-media-id media))))
(vswap! bfc/*state* (fn [state]
(-> state
(update :files conj file-id)
(update :file-media-objects into bfc/xf-map-id media)
(bfc/collect-storage-objects thumbs)
(bfc/collect-storage-objects media))))
(write! cfg :file file-id file)
(write! cfg :file-rels file-id rels)
@ -304,13 +176,8 @@
(run! (partial write! cfg :file-media-object file-id) media)
(run! (partial write! cfg :file-object-thumbnail file-id) thumbs)
(when-let [thumb (db/get* conn :file-thumbnail
{:file-id file-id
:revn (:revn file)
:data nil}
{::sql/for-share true
::sql/columns [:media-id :file-id :revn]})]
(vswap! *state* update :storage-objects into xf-map-media-id [thumb])
(when-let [thumb (bfc/get-file-thumbnail cfg file)]
(vswap! bfc/*state* bfc/collect-storage-objects [thumb])
(write! cfg :file-thumbnail file-id thumb))
(l/trc :hint "write" :obj "file"
@ -328,7 +195,7 @@
(write! cfg :storage-object id (meta sobj) data)))
(defn- read-storage-object!
[{:keys [::sto/storage ::timestamp] :as cfg} id]
[{:keys [::sto/storage ::bfc/timestamp] :as cfg} id]
(let [mdata (read-obj cfg :storage-object id)
data (read-blob cfg :storage-object id)
hash (sto/calculate-hash data)
@ -343,7 +210,7 @@
sobject (sto/put-object! storage params)]
(vswap! *state* update :index assoc id (:id sobject))
(vswap! bfc/*state* update :index assoc id (:id sobject))
(l/trc :hint "read" :obj "storage-object"
:id (str id)
@ -351,20 +218,22 @@
:size (:size sobject))))
(defn read-team!
[{:keys [::db/conn ::timestamp] :as cfg} team-id]
[{:keys [::db/conn ::bfc/timestamp] :as cfg} team-id]
(l/trc :hint "read" :obj "team" :id (str team-id))
(sse/tap {:type :import-progress
:section :read-team
:id team-id})
(let [team (read-obj cfg :team team-id)
team (-> team
(update :id lookup-index)
(update :photo-id lookup-index)
(update :id bfc/lookup-index)
(update :photo-id bfc/lookup-index)
(assoc :created-at timestamp)
(assoc :modified-at timestamp))]
(events/tap :progress
{:op :import
:section :read-team
:id team-id
:name (:name team)})
(db/insert! conn :team
(update team :features db/encode-pgarray conn "text")
::db/return-keys false)
@ -372,12 +241,12 @@
(doseq [font (->> (read-seq cfg :team-font-variant)
(filter #(= team-id (:team-id %))))]
(let [font (-> font
(update :id lookup-index)
(update :team-id lookup-index)
(update :woff1-file-id lookup-index)
(update :woff2-file-id lookup-index)
(update :ttf-file-id lookup-index)
(update :otf-file-id lookup-index)
(update :id bfc/lookup-index)
(update :team-id bfc/lookup-index)
(update :woff1-file-id bfc/lookup-index)
(update :woff2-file-id bfc/lookup-index)
(update :ttf-file-id bfc/lookup-index)
(update :otf-file-id bfc/lookup-index)
(assoc :created-at timestamp)
(assoc :modified-at timestamp))]
(db/insert! conn :team-font-variant font
@ -386,184 +255,53 @@
team))
(defn read-project!
[{:keys [::db/conn ::timestamp] :as cfg} project-id]
[{:keys [::db/conn ::bfc/timestamp] :as cfg} project-id]
(l/trc :hint "read" :obj "project" :id (str project-id))
(sse/tap {:type :import-progress
:section :read-project
:id project-id})
(let [project (read-obj cfg :project project-id)
project (-> project
(update :id lookup-index)
(update :team-id lookup-index)
(update :id bfc/lookup-index)
(update :team-id bfc/lookup-index)
(assoc :created-at timestamp)
(assoc :modified-at timestamp))]
(events/tap :progress
{:op :import
:section :read-project
:id project-id
:name (:name project)})
(db/insert! conn :project project
::db/return-keys false)))
(defn- relink-shapes
"A function responsible to analyze all file data and
replace the old :component-file reference with the new
ones, using the provided file-index."
[data]
(letfn [(process-map-form [form]
(cond-> form
;; Relink image shapes
(and (map? (:metadata form))
(= :image (:type form)))
(update-in [:metadata :id] lookup-index)
;; Relink paths with fill image
(map? (:fill-image form))
(update-in [:fill-image :id] lookup-index)
;; This covers old shapes and the new :fills.
(uuid? (:fill-color-ref-file form))
(update :fill-color-ref-file lookup-index)
;; This covers the old shapes and the new :strokes
(uuid? (:storage-color-ref-file form))
(update :stroke-color-ref-file lookup-index)
;; This covers all text shapes that have typography referenced
(uuid? (:typography-ref-file form))
(update :typography-ref-file lookup-index)
;; This covers the component instance links
(uuid? (:component-file form))
(update :component-file lookup-index)
;; This covers the shadows and grids (they have directly
;; the :file-id prop)
(uuid? (:file-id form))
(update :file-id lookup-index)))]
(walk/postwalk (fn [form]
(if (map? form)
(try
(process-map-form form)
(catch Throwable cause
(l/warn :hint "failed form" :form (pr-str form) ::l/sync? true)
(throw cause)))
form))
data)))
(defn- relink-media
"A function responsible of process the :media attr of file data and
remap the old ids with the new ones."
[media]
(reduce-kv (fn [res k v]
(let [id (lookup-index k)]
(if (uuid? id)
(-> res
(assoc id (assoc v :id id))
(dissoc k))
res)))
media
media))
(defn- relink-colors
"A function responsible of process the :colors attr of file data and
remap the old ids with the new ones."
[colors]
(reduce-kv (fn [res k v]
(if (:image v)
(update-in res [k :image :id] lookup-index)
res))
colors
colors))
(defn- process-file
[{:keys [id] :as file}]
(-> file
(update :data (fn [fdata]
(-> fdata
(assoc :id id)
(dissoc :recent-colors)
(cond-> (> (:version fdata) cfd/version)
(assoc :version cfd/version))
;; FIXME: We're temporarily activating all
;; migrations because a problem in the
;; environments messed up with the version
;; numbers When this problem is fixed delete
;; the following line
(cond-> (> (:version fdata) 22)
(assoc :version 22)))))
(fmg/migrate-file)
(update :data (fn [fdata]
(-> fdata
(update :pages-index relink-shapes)
(update :components relink-shapes)
(update :media relink-media)
(update :colors relink-colors)
(d/without-nils))))))
(defn read-file!
[{:keys [::db/conn ::timestamp] :as cfg} file-id]
[{:keys [::db/conn ::bfc/timestamp] :as cfg} file-id]
(l/trc :hint "read" :obj "file" :id (str file-id))
(sse/tap {:type :import-progress
:section :read-file
:id file-id})
(let [file (-> (read-obj cfg :file file-id)
(update :id bfc/lookup-index)
(update :project-id bfc/lookup-index)
(bfc/process-file))]
(let [file (read-obj cfg :file file-id)
(events/tap :progress
{:op :import
:section :read-file
:id file-id
:name (:name file)})
file (-> file
(update :id lookup-index)
(process-file))
;; All features that are enabled and requires explicit migration are
;; added to the state for a posterior migration step.
(doseq [feature (-> (::bfc/features cfg)
(set/difference cfeat/no-migration-features)
(set/difference (:features file)))]
(vswap! bfc/*state* update :pending-to-migrate (fnil conj []) [feature (:id file)]))
;; All features that are enabled and requires explicit migration are
;; added to the state for a posterior migration step.
_ (doseq [feature (-> (::features cfg)
(set/difference cfeat/no-migration-features)
(set/difference (:features file)))]
(vswap! *state* update :pending-to-migrate (fnil conj []) [feature (:id file)]))
file (-> file
(update :project-id lookup-index))
file (-> file
(assoc :created-at timestamp)
(assoc :modified-at timestamp)
(update :features
(fn [features]
(let [features (cfeat/check-supported-features! features)]
(-> (::features cfg)
(set/difference cfeat/frontend-only-features)
(set/union features))))))
_ (when (contains? cf/flags :file-schema-validation)
(fval/validate-file-schema! file))
_ (when (contains? cf/flags :soft-file-schema-validation)
(let [result (ex/try! (fval/validate-file-schema! file))]
(when (ex/exception? result)
(l/error :hint "file schema validation error" :cause result))))
file (if (contains? (:features file) "fdata/objects-map")
(feat.fdata/enable-objects-map file)
file)
file (if (contains? (:features file) "fdata/pointer-map")
(binding [pmap/*tracked* (pmap/create-tracked)]
(let [file (feat.fdata/enable-pointer-map file)]
(feat.fdata/persist-pointers! cfg (:id file))
file))
file)]
(db/insert! conn :file
(-> file
(update :features db/encode-pgarray conn "text")
(update :data blob/encode))
{::db/return-keys false}))
(bfc/persist-file! cfg file))
(doseq [thumbnail (read-seq cfg :file-object-thumbnail file-id)]
(let [thumbnail (-> thumbnail
(update :file-id lookup-index)
(update :media-id lookup-index))
(update :file-id bfc/lookup-index)
(update :media-id bfc/lookup-index))
file-id (:file-id thumbnail)
thumbnail (update thumbnail :object-id
@ -574,20 +312,21 @@
(doseq [rel (read-obj cfg :file-rels file-id)]
(let [rel (-> rel
(update :file-id lookup-index)
(update :library-file-id lookup-index)
(update :file-id bfc/lookup-index)
(update :library-file-id bfc/lookup-index)
(assoc :synced-at timestamp))]
(db/insert! conn :file-library-rel rel
::db/return-keys false)))
(doseq [media (read-seq cfg :file-media-object file-id)]
(let [media (-> media
(update :id lookup-index)
(update :file-id lookup-index)
(update :media-id lookup-index)
(update :thumbnail-id lookup-index))]
(update :id bfc/lookup-index)
(update :file-id bfc/lookup-index)
(update :media-id bfc/lookup-index)
(update :thumbnail-id bfc/lookup-index))]
(db/insert! conn :file-media-object media
::db/return-keys false))))
::db/return-keys false
::sql/on-conflict-do-nothing true))))
(def ^:private empty-summary
{:teams #{}
@ -617,20 +356,20 @@
(try
(db/tx-run! cfg (fn [cfg]
(setup-schema! cfg)
(binding [*state* (volatile! empty-summary)]
(binding [bfc/*state* (volatile! empty-summary)]
(write-team! cfg team-id)
(run! (partial write-project! cfg)
(get-team-projects cfg team-id))
(bfc/get-team-projects cfg team-id))
(run! (partial write-file! cfg)
(get-team-files cfg team-id))
(bfc/get-team-files cfg team-id))
(run! (partial write-storage-object! cfg)
(-> *state* deref :storage-objects))
(-> bfc/*state* deref :storage-objects))
(write! cfg :manifest "team-id" team-id)
(write! cfg :manifest "objects" (deref *state*))
(write! cfg :manifest "objects" (deref bfc/*state*))
(::path cfg))))
(finally
@ -642,19 +381,6 @@
:id (str id)
:elapsed (dt/format-duration elapsed)))))))
;; NOTE: will be used in future, commented for satisfy linter
;; (defn- run-pending-migrations!
;; [cfg]
;; ;; Run all pending migrations
;; (doseq [[feature file-id] (-> *state* deref :pending-to-migrate)]
;; (case feature
;; "components/v2"
;; (feat.compv2/migrate-file! cfg file-id :validate? (::validate cfg true))
;; (ex/raise :type :internal
;; :code :no-migration-defined
;; :hint (str/ffmt "no migation for feature '%' on file importation" feature)
;; :feature feature))))
(defn import-team!
[cfg path]
(let [id (uuid/next)
@ -662,7 +388,7 @@
cfg (-> (create-database cfg path)
(update ::sto/storage media/configure-assets-storage)
(assoc ::timestamp (dt/now)))]
(assoc ::bfc/timestamp (dt/now)))]
(l/inf :hint "start"
:operation "import"
@ -674,7 +400,7 @@
(db/exec-one! conn ["SET idle_in_transaction_session_timeout = 0"])
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
(binding [*state* (volatile! {:index {}})]
(binding [bfc/*state* (volatile! {:index {}})]
(let [objects (read-obj cfg :manifest "objects")]
;; We first process all storage objects, they have
@ -683,19 +409,19 @@
(run! (partial read-storage-object! cfg) (:storage-objects objects))
;; Populate index with all the incoming objects
(vswap! *state* update :index
(vswap! bfc/*state* update :index
(fn [index]
(-> index
(update-index (:teams objects))
(update-index (:projects objects))
(update-index (:files objects))
(update-index (:file-media-objects objects))
(update-index (:team-font-variants objects)))))
(bfc/update-index (:teams objects))
(bfc/update-index (:projects objects))
(bfc/update-index (:files objects))
(bfc/update-index (:file-media-objects objects))
(bfc/update-index (:team-font-variants objects)))))
(let [team-id (read-obj cfg :manifest "team-id")
team (read-team! cfg team-id)
features (cfeat/get-team-enabled-features cf/flags team)
cfg (assoc cfg ::features features)]
cfg (assoc cfg ::bfc/features features)]
(run! (partial read-project! cfg) (:projects objects))
(run! (partial read-file! cfg) (:files objects))

View file

@ -30,6 +30,9 @@
(let [opts (merge default-opts opts)
opts (cond-> opts
(::db/on-conflict-do-nothing? opts)
(assoc :suffix "ON CONFLICT DO NOTHING")
(::on-conflict-do-nothing opts)
(assoc :suffix "ON CONFLICT DO NOTHING"))]
(sql/for-insert table key-map opts))))
@ -46,7 +49,7 @@
opts (cond-> opts
(::columns opts) (assoc :columns (::columns opts))
(::for-update opts) (assoc :suffix "FOR UPDATE")
(::for-share opts) (assoc :suffix "FOR KEY SHARE"))]
(::for-share opts) (assoc :suffix "FOR SHARE"))]
(sql/for-query table where-params opts))))
(defn update

View file

@ -41,7 +41,6 @@
[app.db :as db]
[app.db.sql :as sql]
[app.features.fdata :as fdata]
[app.http.sse :as sse]
[app.media :as media]
[app.rpc.commands.files :as files]
[app.rpc.commands.files-snapshot :as fsnap]
@ -51,6 +50,7 @@
[app.svgo :as svgo]
[app.util.blob :as blob]
[app.util.cache :as cache]
[app.util.events :as events]
[app.util.pointer-map :as pmap]
[app.util.time :as dt]
[buddy.core.codecs :as bc]
@ -767,8 +767,6 @@
backup', generate main instances for all components there and remove
shapes from library components. Mark the file with the :components-v2 option."
[file-data libraries]
(sse/tap {:type :migration-progress
:section :components})
(let [file-data (prepare-file-data file-data libraries)
components (ctkl/components-seq file-data)]
(if (empty? components)
@ -843,9 +841,9 @@
add-instance-grid
(fn [fdata frame-id grid assets]
(reduce (fn [result [component position]]
(sse/tap {:type :migration-progress
:section :components
:name (:name component)})
(events/tap :progress {:op :migrate-component
:id (:id component)
:name (:name component)})
(add-main-instance result component frame-id (gpt/add position
(gpt/point grid-gap grid-gap))))
fdata
@ -881,9 +879,9 @@
(gpt/add position (gpt/point 0 (+ height (* 2 grid-gap) frame-gap)))))))))]
(let [total (count components)]
(some-> *stats* (swap! update :processed/components (fnil + 0) total))
(some-> *team-stats* (swap! update :processed/components (fnil + 0) total))
(some-> *file-stats* (swap! assoc :processed/components total)))
(some-> *stats* (swap! update :processed-components (fnil + 0) total))
(some-> *team-stats* (swap! update :processed-components (fnil + 0) total))
(some-> *file-stats* (swap! assoc :processed-components total)))
(add-instance-grids file-data)))))
@ -1143,16 +1141,14 @@
(->> (d/zip media-group grid)
(reduce (fn [fdata [mobj position]]
(sse/tap {:type :migration-progress
:section :graphics
:name (:name mobj)})
(events/tap :progress {:op :migrate-graphic
:id (:id mobj)
:name (:name mobj)})
(or (process fdata mobj position) fdata))
(assoc-in fdata [:options :components-v2] true)))))
(defn- migrate-graphics
[fdata]
(sse/tap {:type :migration-progress
:section :graphics})
(if (empty? (:media fdata))
fdata
(let [[fdata page-id start-pos]
@ -1167,9 +1163,9 @@
groups (get-asset-groups media "Graphics")]
(let [total (count media)]
(some-> *stats* (swap! update :processed/graphics (fnil + 0) total))
(some-> *team-stats* (swap! update :processed/graphics (fnil + 0) total))
(some-> *file-stats* (swap! assoc :processed/graphics total)))
(some-> *stats* (swap! update :processed-graphics (fnil + 0) total))
(some-> *team-stats* (swap! update :processed-graphics (fnil + 0) total))
(some-> *file-stats* (swap! assoc :processed-graphics total)))
(loop [groups (seq groups)
fdata fdata
@ -1236,10 +1232,8 @@
(cfv/validate-file-schema! file))
(defn- process-file
[{:keys [::db/conn] :as system} id & {:keys [validate?]}]
(let [file (get-file system id)
libs (->> (files/get-file-libraries conn id)
[{:keys [::db/conn] :as system} {:keys [id] :as file} & {:keys [validate?]}]
(let [libs (->> (files/get-file-libraries conn id)
(into [file] (comp (map :id)
(map (partial get-file system))))
(d/index-by :id))
@ -1314,7 +1308,13 @@
(when (string? label)
(fsnap/take-file-snapshot! system {:file-id file-id
:label (str "migration/" label)}))
(process-file system file-id :validate? validate?))
(let [file (get-file system file-id)]
(events/tap :progress
{:op :migrate-file
:name (:name file)
:id (:id file)})
(process-file system file :validate? validate?)))
(catch Throwable cause
(let [team-id *team-id*]
@ -1325,8 +1325,8 @@
(finally
(let [elapsed (tpoint)
components (get @*file-stats* :processed/components 0)
graphics (get @*file-stats* :processed/graphics 0)]
components (get @*file-stats* :processed-components 0)
graphics (get @*file-stats* :processed-graphics 0)]
(l/dbg :hint "migrate:file:end"
:file-id (str file-id)
@ -1335,8 +1335,8 @@
:validate validate?
:elapsed (dt/format-duration elapsed))
(some-> *stats* (swap! update :processed/files (fnil inc 0)))
(some-> *team-stats* (swap! update :processed/files (fnil inc 0)))))))))
(some-> *stats* (swap! update :processed-files (fnil inc 0)))
(some-> *team-stats* (swap! update :processed-files (fnil inc 0)))))))))
(defn migrate-team!
[system team-id & {:keys [validate? skip-on-graphic-error? label]}]
@ -1355,7 +1355,7 @@
:skip-on-graphic-error? skip-on-graphic-error?))
migrate-team
(fn [{:keys [::db/conn] :as system} team-id]
(let [{:keys [id features]} (get-team system team-id)]
(let [{:keys [id features name]} (get-team system team-id)]
(if (contains? features "components/v2")
(l/inf :hint "team already migrated")
(let [features (-> features
@ -1364,6 +1364,11 @@
(conj "layout/grid")
(conj "styles/v2"))]
(events/tap :progress
{:op :migrate-team
:name name
:id id})
(run! (partial migrate-file system)
(get-and-lock-files conn id))
@ -1380,11 +1385,12 @@
(finally
(let [elapsed (tpoint)
components (get @*team-stats* :processed/components 0)
graphics (get @*team-stats* :processed/graphics 0)
files (get @*team-stats* :processed/files 0)]
components (get @*team-stats* :processed-components 0)
graphics (get @*team-stats* :processed-graphics 0)
files (get @*team-stats* :processed-files 0)]
(some-> *stats* (swap! update :processed/teams (fnil inc 0)))
(when-not @err
(some-> *stats* (swap! update :processed-teams (fnil inc 0))))
(if (cache/cache? *cache*)
(let [cache-stats (cache/stats *cache*)]

View file

@ -7,6 +7,7 @@
(ns app.http.debug
(:refer-clojure :exclude [error-handler])
(:require
[app.binfile.v1 :as bf.v1]
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.logging :as l]
@ -17,11 +18,11 @@
[app.http.session :as session]
[app.main :as-alias main]
[app.rpc.commands.auth :as auth]
[app.rpc.commands.binfile :as binf]
[app.rpc.commands.files-create :refer [create-file]]
[app.rpc.commands.profile :as profile]
[app.srepl.helpers :as srepl]
[app.storage :as-alias sto]
[app.storage.tmp :as tmp]
[app.util.blob :as blob]
[app.util.template :as tmpl]
[app.util.time :as dt]
@ -268,9 +269,10 @@
(defn export-handler
[{:keys [::db/pool] :as cfg} {:keys [params ::session/profile-id] :as request}]
(let [file-ids (->> (:file-ids params)
(remove empty?)
(mapv parse-uuid))
(let [file-ids (into #{}
(comp (remove empty?)
(map parse-uuid))
(:file-ids params))
libs? (contains? params :includelibs)
clone? (contains? params :clone)
embed? (contains? params :embedassets)]
@ -279,22 +281,22 @@
(ex/raise :type :validation
:code :missing-arguments))
(let [path (-> cfg
(assoc ::binf/file-ids file-ids)
(assoc ::binf/embed-assets? embed?)
(assoc ::binf/include-libraries? libs?)
(binf/export-to-tmpfile!))]
(let [path (tmp/tempfile :prefix "penpot.export.")]
(with-open [output (io/output-stream path)]
(-> cfg
(assoc ::bf.v1/ids file-ids)
(assoc ::bf.v1/embed-assets embed?)
(assoc ::bf.v1/include-libraries libs?)
(bf.v1/export-files! output)))
(if clone?
(let [profile (profile/get-profile pool profile-id)
project-id (:default-project-id profile)]
(binf/import!
(assoc cfg
::binf/input path
::binf/overwrite? false
::binf/ignore-index-errors? true
::binf/profile-id profile-id
::binf/project-id project-id))
project-id (:default-project-id profile)
cfg (assoc cfg
::bf.v1/overwrite false
::bf.v1/profile-id profile-id
::bf.v1/project-id project-id)]
(bf.v1/import-files! cfg path)
{::rres/status 200
::rres/headers {"content-type" "text/plain"}
::rres/body "OK CLONED"})
@ -305,7 +307,6 @@
"content-disposition" (str "attachmen; filename=" (first file-ids) ".penpot")}}))))
(defn import-handler
[{:keys [::db/pool] :as cfg} {:keys [params ::session/profile-id] :as request}]
(when-not (contains? params :file)
@ -316,26 +317,23 @@
(let [profile (profile/get-profile pool profile-id)
project-id (:default-project-id profile)
overwrite? (contains? params :overwrite)
migrate? (contains? params :migrate)
ignore-index-errors? (contains? params :ignore-index-errors)]
migrate? (contains? params :migrate)]
(when-not project-id
(ex/raise :type :validation
:code :missing-project
:hint "project not found"))
(binf/import!
(assoc cfg
::binf/input (-> params :file :path)
::binf/overwrite? overwrite?
::binf/migrate? migrate?
::binf/ignore-index-errors? ignore-index-errors?
::binf/profile-id profile-id
::binf/project-id project-id))
{::rres/status 200
::rres/headers {"content-type" "text/plain"}
::rres/body "OK"}))
(let [path (-> params :file :path)
cfg (assoc cfg
::bf.v1/overwrite overwrite?
::bf.v1/migrate migrate?
::bf.v1/profile-id profile-id
::bf.v1/project-id project-id)]
(bf.v1/import-files! cfg path)
{::rres/status 200
::rres/headers {"content-type" "text/plain"}
::rres/body "OK"})))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ACTIONS

View file

@ -9,11 +9,10 @@
(:refer-clojure :exclude [tap])
(:require
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.transit :as t]
[app.http.errors :as errors]
[promesa.core :as p]
[app.util.events :as events]
[promesa.exec :as px]
[promesa.exec.csp :as sp]
[promesa.util :as pu]
@ -21,26 +20,12 @@
(:import
java.io.OutputStream))
(def ^:dynamic *channel* nil)
(defn- write!
[^OutputStream output ^bytes data]
[^OutputStream output ^bytes data]
(l/trc :hint "writting data" :data data :length (alength data))
(.write output data)
(.flush output))
(defn- create-writer-loop
[^OutputStream output]
(try
(loop []
(when-let [event (sp/take! *channel*)]
(let [result (ex/try! (write! output event))]
(if (ex/exception? result)
(l/wrn :hint "unexpected exception on sse writer" :cause result)
(recur)))))
(finally
(pu/close! output))))
(defn- encode
[[name data]]
(try
@ -61,13 +46,6 @@
"Cache-Control" "no-cache, no-store, max-age=0, must-revalidate"
"Pragma" "no-cache"})
(defn tap
([data] (tap "event" data))
([name data]
(when-let [channel *channel*]
(sp/put! channel [name data])
nil)))
(defn response
[handler & {:keys [buf] :or {buf 32} :as opts}]
(fn [request]
@ -75,15 +53,18 @@
::rres/status 200
::rres/body (reify rres/StreamableResponseBody
(-write-body-to-stream [_ _ output]
(binding [*channel* (sp/chan :buf buf :xf (keep encode))]
(let [writer (px/run! :virtual (partial create-writer-loop output))]
(binding [events/*channel* (sp/chan :buf buf :xf (keep encode))]
(let [listener (events/start-listener
(partial write! output)
(partial pu/close! output))]
(try
(tap "end" (handler))
(let [result (handler)]
(events/tap :end result))
(catch Throwable cause
(binding [l/*context* (errors/request->context request)]
(l/err :hint "unexpected error process streaming response"
:cause cause))
(tap "error" (errors/handle' cause request)))
(events/tap :error (errors/handle' cause request)))
(finally
(sp/close! *channel*)
(p/await! writer)))))))}))
(sp/close! events/*channel*)
(px/await! listener)))))))}))

File diff suppressed because it is too large Load diff

View file

@ -7,51 +7,83 @@
(ns app.rpc.commands.management
"A collection of RPC methods for manage the files, projects and team organization."
(:require
[app.common.data :as d]
[app.binfile.common :as bfc]
[app.binfile.v1 :as bf.v1]
[app.common.exceptions :as ex]
[app.common.features :as cfeat]
[app.common.files.migrations :as pmg]
[app.common.schema :as sm]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.features.fdata :as feat.fdata]
[app.http.sse :as sse]
[app.loggers.webhooks :as-alias webhooks]
[app.rpc :as-alias rpc]
[app.rpc.commands.binfile :as binfile]
[app.rpc.commands.files :as files]
[app.rpc.commands.projects :as proj]
[app.rpc.commands.teams :as teams]
[app.rpc.doc :as-alias doc]
[app.setup :as-alias setup]
[app.setup.templates :as tmpl]
[app.util.blob :as blob]
[app.util.pointer-map :as pmap]
[app.util.services :as sv]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[clojure.walk :as walk]
[promesa.core :as p]
[promesa.exec :as px]))
(defn- index-row
[index obj]
(assoc index (:id obj) (uuid/next)))
(defn- lookup-index
[id index]
(get index id id))
(defn- remap-id
[item index key]
(cond-> item
(contains? item key)
(update key lookup-index index)))
;; --- COMMAND: Duplicate File
(declare duplicate-file)
(defn duplicate-file
[{:keys [::db/conn ::bfc/timestamp] :as cfg} {:keys [profile-id file-id name reset-shared-flag] :as params}]
(let [;; We don't touch the original file on duplication
file (bfc/get-file cfg file-id)
project-id (:project-id file)
file (-> file
(update :id bfc/lookup-index)
(update :project-id bfc/lookup-index)
(cond-> (string? name)
(assoc :name name))
(cond-> (true? reset-shared-flag)
(assoc :is-shared false)))
flibs (bfc/get-files-rels cfg #{file-id})
fmeds (bfc/get-file-media cfg file)]
(when (uuid? profile-id)
(proj/check-edition-permissions! conn profile-id project-id))
(vswap! bfc/*state* update :index bfc/update-index fmeds :id)
;; Process and persist file
(let [file (->> (bfc/process-file file)
(bfc/persist-file! cfg))]
;; The file profile creation is optional, so when no profile is
;; present (when this function is called from profile less
;; environment: SREPL) we just omit the creation of the relation
(when (uuid? profile-id)
(db/insert! conn :file-profile-rel
{:file-id (:id file)
:profile-id profile-id
:is-owner true
:is-admin true
:can-edit true}
{::db/return-keys? false}))
(doseq [params (sequence (comp
(map #(bfc/remap-id % :file-id))
(map #(bfc/remap-id % :library-file-id))
(map #(assoc % :synced-at timestamp))
(map #(assoc % :created-at timestamp)))
flibs)]
(db/insert! conn :file-library-rel params ::db/return-keys false))
(doseq [params (sequence (comp
(map #(bfc/remap-id % :id))
(map #(assoc % :created-at timestamp))
(map #(bfc/remap-id % :file-id)))
fmeds)]
(db/insert! conn :file-media-object params ::db/return-keys false))
file)))
(def ^:private
schema:duplicate-file
@ -69,178 +101,51 @@
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
(let [params (-> params
(assoc :index {file-id (uuid/next)})
(assoc :profile-id profile-id)
(assoc ::reset-shared-flag? true))]
(duplicate-file cfg params)))))
(defn- process-file
[cfg index {:keys [id] :as file}]
(letfn [(process-form [form]
(cond-> form
;; Relink library items
(and (map? form)
(uuid? (:component-file form)))
(update :component-file #(get index % %))
(and (map? form)
(uuid? (:fill-color-ref-file form)))
(update :fill-color-ref-file #(get index % %))
(and (map? form)
(uuid? (:stroke-color-ref-file form)))
(update :stroke-color-ref-file #(get index % %))
(and (map? form)
(uuid? (:typography-ref-file form)))
(update :typography-ref-file #(get index % %))
;; Relink Image Shapes
(and (map? form)
(map? (:metadata form))
(= :image (:type form)))
(update-in [:metadata :id] #(get index % %))))
;; A function responsible to analyze all file data and
;; replace the old :component-file reference with the new
;; ones, using the provided file-index
(relink-shapes [data]
(walk/postwalk process-form data))
;; A function responsible of process the :media attr of file
;; data and remap the old ids with the new ones.
(relink-media [media]
(reduce-kv (fn [res k v]
(let [id (get index k)]
(if (uuid? id)
(-> res
(assoc id (assoc v :id id))
(dissoc k))
res)))
media
media))
(process-file [{:keys [id] :as file}]
(-> file
(update :data assoc :id id)
(pmg/migrate-file)
(update :data (fn [data]
(-> data
(update :pages-index relink-shapes)
(update :components relink-shapes)
(update :media relink-media)
(d/without-nils))))))]
(let [file (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
(-> file
(update :id lookup-index index)
(update :project-id lookup-index index)
(update :data feat.fdata/process-pointers deref)
(process-file)))
file (if (contains? (:features file) "fdata/objects-map")
(feat.fdata/enable-objects-map file)
file)
file (if (contains? (:features file) "fdata/pointer-map")
(binding [pmap/*tracked* (pmap/create-tracked)]
(let [file (feat.fdata/enable-pointer-map file)]
(feat.fdata/persist-pointers! cfg (:id file))
file))
file)]
file)))
(defn duplicate-file
[{:keys [::db/conn] :as cfg} {:keys [profile-id index file-id name ::reset-shared-flag?]}]
(let [;; We don't touch the original file on duplication
file (files/get-file cfg file-id :migrate? false)
;; We only check permissions if profile-id is present; it can
;; be omited when this function is called from SREPL helpers
_ (when (uuid? profile-id)
(proj/check-edition-permissions! conn profile-id (:project-id file)))
flibs (let [sql (str "SELECT flr.* "
" FROM file_library_rel AS flr "
" JOIN file AS l ON (flr.library_file_id = l.id) "
" WHERE flr.file_id = ? AND l.deleted_at is null")]
(db/exec! conn [sql file-id]))
fmeds (let [sql (str "SELECT fmo.* "
" FROM file_media_object AS fmo "
" JOIN storage_object AS so ON (fmo.media_id = so.id) "
" WHERE fmo.file_id = ? AND so.deleted_at is null")]
(db/exec! conn [sql file-id]))
;; memo uniform creation/modification date
now (dt/now)
ignore (dt/plus now (dt/duration {:seconds 5}))
;; add to the index all file media objects.
index (reduce index-row index fmeds)
flibs-xf (comp
(map #(remap-id % index :file-id))
(map #(remap-id % index :library-file-id))
(map #(assoc % :synced-at now))
(map #(assoc % :created-at now)))
;; remap all file-library-rel row
flibs (sequence flibs-xf flibs)
fmeds-xf (comp
(map #(assoc % :id (get index (:id %))))
(map #(assoc % :created-at now))
(map #(remap-id % index :file-id)))
;; remap all file-media-object rows
fmeds (sequence fmeds-xf fmeds)
file (cond-> file
(string? name)
(assoc :name name)
(true? reset-shared-flag?)
(assoc :is-shared false))
file (-> file
(assoc :created-at now)
(assoc :modified-at now)
(assoc :ignore-sync-until ignore))
file (process-file cfg index file)]
(db/insert! conn :file
(-> file
(update :features #(db/create-array conn "text" %))
(update :data blob/encode))
{::db/return-keys false})
;; The file profile creation is optional, so when no profile is
;; present (when this function is called from profile less
;; environment: SREPL) we just omit the creation of the relation
(when (uuid? profile-id)
(db/insert! conn :file-profile-rel
{:file-id (:id file)
:profile-id profile-id
:is-owner true
:is-admin true
:can-edit true}
{::db/return-keys? false}))
(doseq [params flibs]
(db/insert! conn :file-library-rel params ::db/return-keys false))
(doseq [params fmeds]
(db/insert! conn :file-media-object params ::db/return-keys false))
file))
(binding [bfc/*state* (volatile! {:index {file-id (uuid/next)}})]
(duplicate-file (assoc cfg ::bfc/timestamp (dt/now))
(-> params
(assoc :profile-id profile-id)
(assoc :reset-shared-flag true)))))))
;; --- COMMAND: Duplicate Project
(declare duplicate-project)
(defn duplicate-project
[{:keys [::db/conn ::bfc/timestamp] :as cfg} {:keys [profile-id project-id name] :as params}]
(binding [bfc/*state* (volatile! {:index {project-id (uuid/next)}})]
(let [project (-> (db/get-by-id conn :project project-id)
(assoc :created-at timestamp)
(assoc :modified-at timestamp)
(assoc :is-pinned false)
(update :id bfc/lookup-index)
(cond-> (string? name)
(assoc :name name)))
files (bfc/get-project-files cfg project-id)]
;; Update index with the project files and the project-id
(vswap! bfc/*state* update :index bfc/update-index files)
;; Check if the source team-id allow creating new project for current user
(teams/check-edition-permissions! conn profile-id (:team-id project))
;; create the duplicated project and assign the current profile as
;; a project owner
(let [project (teams/create-project conn project)]
;; The project profile creation is optional, so when no profile is
;; present (when this function is called from profile less
;; environment: SREPL) we just omit the creation of the relation
(when (uuid? profile-id)
(teams/create-project-role conn profile-id (:id project) :owner))
(doseq [file-id files]
(let [params (-> params
(dissoc :name)
(assoc :file-id file-id)
(assoc :reset-shared-flag false))]
(duplicate-file cfg params)))
project))))
(def ^:private
schema:duplicate-project
@ -256,54 +161,13 @@
::sm/params schema:duplicate-project}
[cfg {:keys [::rpc/profile-id] :as params}]
(db/tx-run! cfg (fn [cfg]
;; Defer all constraints
;; Defer all constraints
(db/exec-one! cfg ["SET CONSTRAINTS ALL DEFERRED"])
(duplicate-project cfg (assoc params :profile-id profile-id)))))
(defn duplicate-project
[{:keys [::db/conn] :as cfg} {:keys [profile-id project-id name] :as params}]
(let [project (-> (db/get-by-id conn :project project-id)
(assoc :is-pinned false))
files (db/query conn :file
{:project-id project-id
:deleted-at nil}
{:columns [:id]})
index (reduce index-row {project-id (uuid/next)} files)
project (cond-> project
(string? name)
(assoc :name name)
:always
(update :id lookup-index index))]
;; Check if the source team-id allow creating new project for current user
(teams/check-edition-permissions! conn profile-id (:team-id project))
;; create the duplicated project and assign the current profile as
;; a project owner
(teams/create-project conn project)
;; The project profile creation is optional, so when no profile is
;; present (when this function is called from profile less
;; environment: SREPL) we just omit the creation of the relation
(when (uuid? profile-id)
(teams/create-project-role conn profile-id (:id project) :owner))
(doseq [{:keys [id] :as file} files]
(let [params (-> params
(dissoc :name)
(assoc :file-id id)
(assoc :index index)
(assoc ::reset-shared-flag? false))]
(duplicate-file cfg params)))
project))
(-> (assoc cfg ::bfc/timestamp (dt/now))
(duplicate-project (assoc params :profile-id profile-id))))))
(defn duplicate-team
[{:keys [::db/conn] :as cfg} & {:keys [profile-id team-id name] :as params}]
[{:keys [::db/conn ::bfc/timestamp] :as cfg} & {:keys [profile-id team-id name] :as params}]
;; Check if the source team-id allowed to be read by the user if
;; profile-id is present; it can be ommited if this function is
@ -311,92 +175,79 @@
(when (uuid? profile-id)
(teams/check-read-permissions! conn profile-id team-id))
(let [projs (db/query conn :project
{:team-id team-id})
(binding [bfc/*state* (volatile! {:index {team-id (uuid/next)}})]
(let [projs (bfc/get-team-projects cfg team-id)
files (bfc/get-team-files cfg team-id)
frels (bfc/get-files-rels cfg files)
files (let [sql (str "SELECT f.id "
" FROM file AS f "
" JOIN project AS p ON (p.id = f.project_id) "
" WHERE p.team_id = ? "
" AND p.deleted_at IS NULL "
" AND f.deleted_at IS NULL")]
(db/exec! conn [sql team-id]))
team (-> (db/get-by-id conn :team team-id)
(assoc :created-at timestamp)
(assoc :modified-at timestamp)
(update :id bfc/lookup-index)
(cond-> (string? name)
(assoc :name name)))
trels (db/query conn :team-profile-rel
{:team-id team-id})
fonts (db/query conn :team-font-variant
{:team-id team-id})]
prels (let [sql (str "SELECT r.* FROM project_profile_rel AS r "
" JOIN project AS p ON (r.project_id = p.id) "
" WHERE p.team_id = ?")]
(db/exec! conn [sql team-id]))
(vswap! bfc/*state* update :index
(fn [index]
(-> index
(bfc/update-index projs)
(bfc/update-index files)
(bfc/update-index fonts :id))))
;; FIXME: disallow clone default team
;; Create the new team in the database
(db/insert! conn :team team)
fonts (db/query conn :team-font-variant
{:team-id team-id})
;; Duplicate team <-> profile relations
(doseq [params frels]
(let [params (-> params
(assoc :id (uuid/next))
(update :team-id bfc/lookup-index)
(assoc :created-at timestamp)
(assoc :modified-at timestamp))]
(db/insert! conn :team-profile-rel params
{::db/return-keys false})))
index (as-> {team-id (uuid/next)} index
(reduce index-row index projs)
(reduce index-row index files)
(reduce index-row index fonts))
;; Duplicate team fonts
(doseq [font fonts]
(let [params (-> font
(update :id bfc/lookup-index)
(update :team-id bfc/lookup-index)
(assoc :created-at timestamp)
(assoc :modified-at timestamp))]
(db/insert! conn :team-font-variant params
{::db/return-keys false})))
team (db/get-by-id conn :team team-id)
team (cond-> team
(string? name)
(assoc :name name)
;; Duplicate projects; We don't reuse the `duplicate-project`
;; here because we handle files duplication by whole team
;; instead of by project and we want to preserve some project
;; props which are reset on the `duplicate-project` impl
(doseq [project-id projs]
(let [project (db/get conn :project {:id project-id})
project (-> project
(assoc :created-at timestamp)
(assoc :modified-at timestamp)
(update :id bfc/lookup-index)
(update :team-id bfc/lookup-index))]
(teams/create-project conn project)
:always
(update :id lookup-index index))]
;; The project profile creation is optional, so when no profile is
;; present (when this function is called from profile less
;; environment: SREPL) we just omit the creation of the relation
(when (uuid? profile-id)
(teams/create-project-role conn profile-id (:id project) :owner))))
;; FIXME: disallow clone default team
(doseq [file-id files]
(let [params (-> params
(dissoc :name)
(assoc :file-id file-id)
(assoc :reset-shared-flag false))]
(duplicate-file cfg params)))
;; Create the new team in the database
(db/insert! conn :team team)
;; Duplicate team <-> profile relations
(doseq [params trels]
(let [params (-> params
(assoc :id (uuid/next))
(update :team-id lookup-index index))]
(db/insert! conn :team-profile-rel params
{::db/return-keys? false})))
;; Duplucate team fonts
(doseq [font fonts]
(let [params (-> font
(update :id lookup-index index)
(update :team-id lookup-index index))]
(db/insert! conn :team-font-variant params
{::db/return-keys? false})))
;; Create all the projects in the database
(doseq [project projs]
(let [project (-> project
(update :id lookup-index index)
(update :team-id lookup-index index))]
(teams/create-project conn project)
;; The project profile creation is optional, so when no profile is
;; present (when this function is called from profile less
;; environment: SREPL) we just omit the creation of the relation
(when (uuid? profile-id)
(teams/create-project-role conn profile-id (:id project) :owner))))
;; Duplicate project <-> profile relations
(doseq [params prels]
(let [params (-> params
(assoc :id (uuid/next))
(update :project-id lookup-index index))]
(db/insert! conn :project-profile-rel params)))
(doseq [file-id (map :id files)]
(let [params (-> params
(dissoc :name)
(assoc :index index)
(assoc :file-id file-id)
(assoc ::reset-shared-flag? false))]
(duplicate-file cfg params)))
team))
team)))
;; --- COMMAND: Move file
@ -545,6 +396,19 @@
;; --- COMMAND: Clone Template
(defn- clone-template
[{:keys [::wrk/executor ::bf.v1/project-id] :as cfg} template]
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
;; NOTE: the importation process performs some operations that
;; are not very friendly with virtual threads, and for avoid
;; unexpected blocking of other concurrent operations we
;; dispatch that operation to a dedicated executor.
(let [result (px/submit! executor (partial bf.v1/import-files! cfg template))]
(db/update! conn :project
{:modified-at (dt/now)}
{:id project-id})
(deref result)))))
(def ^:private
schema:clone-template
(sm/define
@ -552,8 +416,6 @@
[:project-id ::sm/uuid]
[:template-id ::sm/word-string]]))
(declare ^:private clone-template)
(sv/defmethod ::clone-template
"Clone into the specified project the template by its id."
{::doc/added "1.16"
@ -565,33 +427,14 @@
_ (teams/check-edition-permissions! pool profile-id (:team-id project))
template (tmpl/get-template-stream cfg template-id)
params (-> cfg
(assoc ::binfile/input template)
(assoc ::binfile/project-id (:id project))
(assoc ::binfile/profile-id profile-id)
(assoc ::binfile/ignore-index-errors? true)
(assoc ::binfile/migrate? true))]
(assoc ::bf.v1/project-id (:id project))
(assoc ::bf.v1/profile-id profile-id))]
(when-not template
(ex/raise :type :not-found
:code :template-not-found
:hint "template not found"))
(sse/response #(clone-template params))))
(defn- clone-template
[{:keys [::wrk/executor ::binfile/project-id] :as params}]
(db/tx-run! params
(fn [{:keys [::db/conn] :as params}]
;; NOTE: the importation process performs some operations that
;; are not very friendly with virtual threads, and for avoid
;; unexpected blocking of other concurrent operations we
;; dispatch that operation to a dedicated executor.
(let [result (p/thread-call executor (partial binfile/import! params))]
(db/update! conn :project
{:modified-at (dt/now)}
{:id project-id})
(deref result)))))
(sse/response #(clone-template params template))))
;; --- COMMAND: Get list of builtin templates

View file

@ -416,14 +416,16 @@
;; namespace too.
(defn create-project
[conn {:keys [id team-id name is-default] :as params}]
[conn {:keys [id team-id name is-default created-at modified-at]}]
(let [id (or id (uuid/next))
is-default (if (boolean? is-default) is-default false)]
(db/insert! conn :project
{:id id
:name name
:team-id team-id
:is-default is-default})))
is-default (if (boolean? is-default) is-default false)
params {:id id
:name name
:team-id team-id
:is-default is-default
:created-at created-at
:modified-at modified-at}]
(db/insert! conn :project (d/without-nils params))))
(defn create-project-role
[conn profile-id project-id role]

View file

@ -13,7 +13,8 @@
[app.db :as db]
[app.main :as main]
[app.rpc.commands.auth :as cmd.auth]
[app.srepl.components-v2]
[app.srepl.components-v2 :refer [migrate-teams!]]
[app.util.events :as events]
[app.util.json :as json]
[app.util.time :as dt]
[cuerdas.core :as str]))
@ -106,25 +107,36 @@
(defmethod exec-command :migrate-v2
[_]
(letfn [(on-start [{:keys [total rollback]}]
(println
(str/ffmt "The components/v2 migration started (rollback:%, teams:%)"
(if rollback "on" "off")
total)))
(letfn [(on-progress-report [{:keys [elapsed completed errors]}]
(println (str/ffmt "-> Progress: completed: %, errors: %, elapsed: %"
completed errors elapsed)))
(on-progress [{:keys [op name]}]
(case op
:migrate-team
(println (str/ffmt "-> Migrating team: \"%\"" name))
:migrate-file
(println (str/ffmt "=> Migrating file: \"%\"" name))
nil))
(on-event [[type payload]]
(case type
:progress-report (on-progress-report payload)
:progress (on-progress payload)
:error (on-error payload)
nil))
(on-progress [{:keys [total elapsed progress completed]}]
(println (str/ffmt "Progress % (total: %, completed: %, elapsed: %)"
progress total completed elapsed)))
(on-error [cause]
(println "ERR:" (ex-message cause)))
(println "EE:" (ex-message cause)))]
(on-end [_]
(println "Migration finished"))]
(app.srepl.components-v2/migrate-teams! main/system
:on-start on-start
:on-error on-error
:on-progress on-progress
:on-end on-end)))
(println "The components/v2 migration started...")
(try
(let [result (-> (partial migrate-teams! main/system {:rollback? true})
(events/run-with! on-event))]
(println (str/ffmt "Migration process finished (elapsed: %)" (:elapsed result))))
(catch Throwable cause
(on-error cause)))))
(defmethod exec-command :default
[{:keys [::cmd]}]

View file

@ -8,13 +8,13 @@
(:require
[app.common.data :as d]
[app.common.logging :as l]
[app.common.pprint :as pp]
[app.common.uuid :as uuid]
[app.db :as db]
[app.features.components-v2 :as feat]
[app.main :as main]
[app.svgo :as svgo]
[app.util.cache :as cache]
[app.util.events :as events]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[cuerdas.core :as str]
@ -29,32 +29,30 @@
;; PRIVATE HELPERS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- print-stats!
[stats]
(->> stats
(into (sorted-map))
(pp/pprint)))
(defn- report-progress-files
[tpoint]
(fn [_ _ oldv newv]
(when (not= (:processed/files oldv)
(:processed/files newv))
(when (not= (:processed-files oldv)
(:processed-files newv))
(let [elapsed (tpoint)]
(l/dbg :hint "progress"
:completed (:processed/files newv)
:completed (:processed-files newv)
:elapsed (dt/format-duration elapsed))))))
(defn- report-progress-teams
[tpoint on-progress]
[tpoint]
(fn [_ _ oldv newv]
(when (not= (:processed/teams oldv)
(:processed/teams newv))
(let [completed (:processed/teams newv)
(when (or (not= (:processed-teams oldv)
(:processed-teams newv))
(not= (:errors oldv)
(:errors newv)))
(let [completed (:processed-teams newv 0)
errors (:errors newv 0)
elapsed (dt/format-duration (tpoint))]
(when (fn? on-progress)
(on-progress {:elapsed elapsed
:completed completed}))
(events/tap :progress-report
{:elapsed elapsed
:completed completed
:errors errors})
(l/dbg :hint "progress"
:completed completed
:elapsed elapsed)))))
@ -235,10 +233,10 @@
(feat/migrate-team! team-id
:label label
:validate? validate?
:skip-on-graphic-error? skip-on-graphic-error?))
(print-stats!
(-> (deref feat/*stats*)
(assoc :elapsed (dt/format-duration (tpoint)))))
:skip-on-graphics-error? skip-on-graphic-error?))
(-> (deref feat/*stats*)
(assoc :elapsed (dt/format-duration (tpoint))))
(catch Throwable cause
(l/dbg :hint "migrate:error" :cause cause))
@ -261,8 +259,8 @@
a correct `:label`. That label is also used for persist a file
snaphot before continue with the migration."
[& {:keys [max-jobs max-items max-time rollback? validate? query
pred max-procs cache on-start on-progress on-error on-end
skip-on-graphic-error? label partitions current-partition]
pred max-procs cache skip-on-graphic-error?
label partitions current-partition]
:or {validate? false
rollback? true
max-jobs 1
@ -310,6 +308,14 @@
(l/wrn :hint "unexpected error on processing team (skiping)"
:team-id (str team-id)
:cause cause)
(events/tap :error
(ex-info "unexpected error on processing team (skiping)"
{:team-id team-id}
cause))
(swap! stats update :errors (fnil inc 0))
(when (string? label)
(report! main/system team-id label (tpoint) (ex-message cause))))
@ -336,15 +342,12 @@
:max-jobs max-jobs
:max-items max-items)
(add-watch stats :progress-report (report-progress-teams tpoint on-progress))
(add-watch stats :progress-report (report-progress-teams tpoint))
(binding [feat/*stats* stats
feat/*cache* cache
svgo/*semaphore* sprocs]
(try
(when (fn? on-start)
(on-start {:rollback rollback?}))
(when (string? label)
(create-report-table! main/system)
(clean-reports! main/system label))
@ -367,20 +370,12 @@
;; Close and await tasks
(pu/close! executor)))
(if (fn? on-end)
(-> (deref stats)
(assoc :elapsed/total (tpoint))
(on-end))
(-> (deref stats)
(assoc :elapsed/total (tpoint))
(update :elapsed/total dt/format-duration)
(dissoc :total/teams)
(print-stats!)))
(-> (deref stats)
(assoc :elapsed (dt/format-duration (tpoint))))
(catch Throwable cause
(l/dbg :hint "migrate:error" :cause cause)
(when (fn? on-error)
(on-error cause)))
(events/tap :error cause))
(finally
(let [elapsed (dt/format-duration (tpoint))]

View file

@ -9,6 +9,7 @@
#_:clj-kondo/ignore
(:require
[app.auth :refer [derive-password]]
[app.binfile.common :as bfc]
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.features :as cfeat]
@ -336,4 +337,5 @@
(db/tx-run! main/system
(fn [cfg]
(db/exec-one! cfg ["SET CONSTRAINTS ALL DEFERRED"])
(mgmt/duplicate-team cfg :team-id team-id :name name)))))
(-> (assoc cfg ::bfc/timestamp (dt/now))
(mgmt/duplicate-team :team-id team-id :name name))))))

View file

@ -10,6 +10,7 @@
file is eligible to be garbage collected after some period of
inactivity (the default threshold is 72h)."
(:require
[app.binfile.common :as bfc]
[app.common.files.migrations :as pmg]
[app.common.logging :as l]
[app.common.thumbnails :as thc]
@ -99,35 +100,6 @@
(->> (db/cursor conn [sql:get-candidates min-age] {:chunk-size 1})
(map #(update % :features db/decode-pgarray #{}))))))
(defn collect-used-media
"Given a fdata (file data), returns all media references."
[data]
(let [xform (comp
(map :objects)
(mapcat vals)
(mapcat (fn [obj]
;; NOTE: because of some bug, we ended with
;; many shape types having the ability to
;; have fill-image attribute (which initially
;; designed for :path shapes).
(sequence
(keep :id)
(concat [(:fill-image obj)
(:metadata obj)]
(map :fill-image (:fills obj))
(map :stroke-image (:strokes obj))
(->> (:content obj)
(tree-seq map? :children)
(mapcat :fills)
(map :fill-image)))))))
pages (concat
(vals (:pages-index data))
(vals (:components data)))]
(-> #{}
(into xform pages)
(into (keys (:media data))))))
(def ^:private sql:mark-file-media-object-deleted
"UPDATE file_media_object
SET deleted_at = now()
@ -137,7 +109,7 @@
(defn- clean-file-media!
"Performs the garbage collection of file media objects."
[conn file-id data]
(let [used (collect-used-media data)
(let [used (bfc/collect-used-media data)
ids (db/create-array conn "uuid" used)
unused (->> (db/exec! conn [sql:mark-file-media-object-deleted file-id ids])
(into #{} (map :id)))]

View file

@ -0,0 +1,64 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.util.events
"A generic asynchronous events notifications subsystem; used mainly
for mark event points in functions and be able to attach listeners
to them. Mainly used in http.sse for progress reporting."
(:refer-clojure :exclude [tap run!])
(:require
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.common.logging :as l]
[promesa.exec :as px]
[promesa.exec.csp :as sp]))
(def ^:dynamic *channel* nil)
(defn channel
[]
(sp/chan :buf 32))
(defn tap
[type data]
(when-let [channel *channel*]
(sp/put! channel [type data])
nil))
(defn start-listener
[on-event on-close]
(dm/assert!
"expected active events channel"
(sp/chan? *channel*))
(px/thread
{:virtual true}
(try
(loop []
(when-let [event (sp/take! *channel*)]
(let [result (ex/try! (on-event event))]
(if (ex/exception? result)
(do
(l/wrn :hint "unexpected exception" :cause result)
(sp/close! *channel*))
(recur)))))
(finally
(on-close)))))
(defn run-with!
"A high-level facility for to run a function in context of event
emiter."
[f on-event]
(binding [*channel* (sp/chan :buf 32)]
(let [listener (start-listener on-event (constantly nil))]
(try
(f)
(finally
(sp/close! *channel*)
(px/await! listener))))))

View file

@ -612,7 +612,7 @@
(t/is (fn? result))
(let [events (th/consume-sse result)]
(t/is (= 8 (count events)))
(t/is (= 6 (count events)))
(t/is (= :end (first (last events))))))))
(t/deftest get-list-of-buitin-templates

View file

@ -1013,7 +1013,7 @@
(rx/tap (fn [event]
(let [payload (sse/get-payload event)
type (sse/get-type event)]
(if (= type "event")
(if (= type "progress")
(log/dbg :hint "clone-template: progress" :section (:section payload) :name (:name payload))
(log/dbg :hint "clone-template: end")))))

View file

@ -414,8 +414,8 @@
(rx/mapcat
(fn [file]
(->> (rp/cmd! :export-binfile {:file-id (:id file)
:include-libraries? (= export-type :all)
:embed-assets? (= export-type :merge)})
:include-libraries (= export-type :all)
:embed-assets (= export-type :merge)})
(rx/map #(hash-map :type :finish
:file-id (:id file)
:filename (:name file)

View file

@ -735,7 +735,7 @@
(rx/tap (fn [event]
(let [payload (sse/get-payload event)
type (sse/get-type event)]
(if (= type "event")
(if (= type "progress")
(log/dbg :hint "import-binfile: progress" :section (:section payload) :name (:name payload))
(log/dbg :hint "import-binfile: end")))))
(rx/filter sse/end-of-stream?)