0
Fork 0
mirror of https://github.com/penpot/penpot.git synced 2025-04-11 22:41:23 -05:00

Normalize file retrieving and persistence on binfile, file-gc and helpers

This commit is contained in:
Andrey Antukh 2025-01-23 19:10:53 +01:00
parent 74bdd72d2f
commit 541ed6282f
14 changed files with 222 additions and 278 deletions

View file

@ -25,6 +25,7 @@
[app.features.fdata :as feat.fdata]
[app.loggers.audit :as-alias audit]
[app.loggers.webhooks :as-alias webhooks]
[app.storage :as sto]
[app.util.blob :as blob]
[app.util.pointer-map :as pmap]
[app.util.time :as dt]
@ -148,17 +149,30 @@
features (assoc :features (db/decode-pgarray features #{}))
data (assoc :data (blob/decode data))))
(defn decode-file
"A general purpose file decoding function that resolves all external
pointers, run migrations and return plain vanilla file map"
[cfg {:keys [id] :as file}]
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
(-> (feat.fdata/resolve-file-data cfg file)
(update :features db/decode-pgarray #{})
(update :data blob/decode)
(update :data feat.fdata/process-pointers deref)
(update :data feat.fdata/process-objects (partial into {}))
(update :data assoc :id id)
(fmg/migrate-file))))
(defn get-file
[cfg file-id]
"Get file, resolve all features and apply migrations.
Usefull when you have plan to apply massive or not cirurgical
operations on file, because it removes the ovehead of lazy fetching
and decoding."
[cfg file-id & {:as opts}]
(db/run! cfg (fn [{:keys [::db/conn] :as cfg}]
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)]
(when-let [file (db/get* conn :file {:id file-id}
{::db/remove-deleted false})]
(let [file (feat.fdata/resolve-file-data cfg file)]
(-> file
(decode-row)
(update :data feat.fdata/process-pointers deref)
(update :data feat.fdata/process-objects (partial into {})))))))))
(some->> (db/get* conn :file {:id file-id}
(assoc opts ::db/remove-deleted false))
(decode-file cfg)))))
(defn clean-file-features
[file]
@ -306,19 +320,15 @@
file))
(defn get-file-media
[cfg {:keys [data id] :as file}]
(db/run! cfg (fn [{:keys [::db/conn]}]
(let [ids (cfh/collect-used-media data)
ids (db/create-array conn "uuid" ids)
sql (str "SELECT * FROM file_media_object WHERE id = ANY(?)")]
(def sql:get-file-media
"SELECT * FROM file_media_object WHERE id = ANY(?)")
;; We assoc the file-id again to the file-media-object row
;; because there are cases that used objects refer to other
;; files and we need to ensure in the exportation process that
;; all ids matches
(->> (db/exec! conn [sql ids])
(mapv #(assoc % :file-id id)))))))
(defn get-file-media
[cfg {:keys [data] :as file}]
(db/run! cfg (fn [{:keys [::db/conn]}]
(let [used (cfh/collect-used-media data)
used (db/create-array conn "uuid" used)]
(db/exec! conn [sql:get-file-media used])))))
(def ^:private sql:get-team-files-ids
"SELECT f.id FROM file AS f
@ -431,75 +441,96 @@
(update :colors relink-colors)
(d/without-nils))))))
(defn- upsert-file!
[conn file]
(let [sql (str "INSERT INTO file (id, project_id, name, revn, version, is_shared, data, created_at, modified_at) "
"VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) "
"ON CONFLICT (id) DO UPDATE SET data=?, version=?")]
(db/exec-one! conn [sql
(:id file)
(:project-id file)
(:name file)
(:revn file)
(:version file)
(:is-shared file)
(:data file)
(:created-at file)
(:modified-at file)
(:data file)
(:version file)])))
(defn- encode-file
[{:keys [::db/conn] :as cfg} {:keys [id] :as file}]
(let [file (if (contains? (:features file) "fdata/objects-map")
(feat.fdata/enable-objects-map file)
file)
(defn persist-file!
"Applies all the final validations and perist the file."
[{:keys [::db/conn ::timestamp] :as cfg} {:keys [id] :as file}]
file (if (contains? (:features file) "fdata/pointer-map")
(binding [pmap/*tracked* (pmap/create-tracked)]
(let [file (feat.fdata/enable-pointer-map file)]
(feat.fdata/persist-pointers! cfg id)
file))
file)]
(-> file
(update :features db/encode-pgarray conn "text")
(update :data blob/encode))))
(defn- file->params
[file]
(let [params {:has-media-trimmed (:has-media-trimmed file)
:ignore-sync-until (:ignore-sync-until file)
:project-id (:project-id file)
:features (:features file)
:name (:name file)
:is-shared (:is-shared file)
:version (:version file)
:data (:data file)
:id (:id file)
:deleted-at (:deleted-at file)
:created-at (:created-at file)
:modified-at (:modified-at file)
:revn (:revn file)
:vern (:vern file)}]
(-> (d/without-nils params)
(assoc :data-backend nil)
(assoc :data-ref-id nil))))
(defn insert-file!
"Insert a new file into the database table"
[{:keys [::db/conn] :as cfg} file]
(let [params (-> (encode-file cfg file)
(file->params))]
(db/insert! conn :file params {::db/return-keys true})))
(defn update-file!
"Update an existing file on the database."
[{:keys [::db/conn ::sto/storage] :as cfg} {:keys [id] :as file}]
(let [file (encode-file cfg file)
params (-> (file->params file)
(dissoc :id))]
;; If file was already offloaded, we touch the underlying storage
;; object for properly trigger storage-gc-touched task
(when (feat.fdata/offloaded? file)
(some->> (:data-ref-id file) (sto/touch-object! storage)))
(db/update! conn :file params {:id id} {::db/return-keys true})))
(defn save-file!
"Applies all the final validations and perist the file, binfile
specific, should not be used outside of binfile domain"
[{:keys [::timestamp] :as cfg} file]
(dm/assert!
"expected valid timestamp"
(dt/instant? timestamp))
(let [file (-> file
(assoc :created-at timestamp)
(assoc :modified-at timestamp)
(assoc :ignore-sync-until (dt/plus timestamp (dt/duration {:seconds 5})))
(update :features
(fn [features]
(let [features (cfeat/check-supported-features! features)]
(-> (::features cfg #{})
(set/union features)
;; We never want to store
;; frontend-only features on file
(set/difference cfeat/frontend-only-features))))))
(let [file (-> file
(assoc :created-at timestamp)
(assoc :modified-at timestamp)
(assoc :ignore-sync-until (dt/plus timestamp (dt/duration {:seconds 5})))
(update :features
(fn [features]
(let [features (cfeat/check-supported-features! features)]
(-> (::features cfg #{})
(set/union features)
;; We never want to store
;; frontend-only features on file
(set/difference cfeat/frontend-only-features))))))]
(when (contains? cf/flags :file-schema-validation)
(fval/validate-file-schema! file))
_ (when (contains? cf/flags :file-schema-validation)
(fval/validate-file-schema! file))
_ (when (contains? cf/flags :soft-file-schema-validation)
(let [result (ex/try! (fval/validate-file-schema! file))]
(when (ex/exception? result)
(l/error :hint "file schema validation error" :cause result))))
file (if (contains? (:features file) "fdata/objects-map")
(feat.fdata/enable-objects-map file)
file)
file (if (contains? (:features file) "fdata/pointer-map")
(binding [pmap/*tracked* (pmap/create-tracked)]
(let [file (feat.fdata/enable-pointer-map file)]
(feat.fdata/persist-pointers! cfg id)
file))
file)
params (-> file
(update :features db/encode-pgarray conn "text")
(update :data blob/encode))]
(if (::overwrite cfg)
(upsert-file! conn params)
(db/insert! conn :file params ::db/return-keys false))
file))
(when (contains? cf/flags :soft-file-schema-validation)
(let [result (ex/try! (fval/validate-file-schema! file))]
(when (ex/exception? result)
(l/error :hint "file schema validation error" :cause result))))
(insert-file! cfg file)))
(defn register-pending-migrations
"All features that are enabled and requires explicit migration are

View file

@ -424,21 +424,15 @@
(s/def ::bfc/profile-id ::us/uuid)
(s/def ::bfc/project-id ::us/uuid)
(s/def ::bfc/input io/input-stream?)
(s/def ::overwrite? (s/nilable ::us/boolean))
(s/def ::ignore-index-errors? (s/nilable ::us/boolean))
;; FIXME: replace with schema
(s/def ::read-import-options
(s/keys :req [::db/pool ::sto/storage ::bfc/project-id ::bfc/profile-id ::bfc/input]
:opt [::overwrite? ::ignore-index-errors?]))
:opt [::ignore-index-errors?]))
(defn read-import!
"Do the importation of the specified resource in penpot custom binary
format. There are some options for customize the importation
behavior:
`::bfc/overwrite`: if true, instead of creating new files and remapping id references,
it reuses all ids and updates existing objects; defaults to `false`."
format."
[{:keys [::bfc/input ::bfc/timestamp] :or {timestamp (dt/now)} :as options}]
(dm/assert!
@ -509,8 +503,7 @@
thumbnails))
(defmethod read-section :v1/files
[{:keys [::db/conn ::bfc/input ::bfc/project-id ::bfc/overwrite ::bfc/name] :as system}]
[{:keys [::bfc/input ::bfc/project-id ::bfc/name] :as system}]
(doseq [[idx expected-file-id] (d/enumerate (-> bfc/*state* deref :files))]
(let [file (read-obj! input)
media (read-obj! input)
@ -568,10 +561,7 @@
(vswap! bfc/*state* update :pending-to-migrate (fnil conj []) [feature file-id']))
(l/dbg :hint "create file" :id (str file-id') ::l/sync? true)
(bfc/persist-file! system file)
(when overwrite
(db/delete! conn :file-thumbnail {:file-id file-id'}))
(bfc/save-file! system file)
file-id'))))
@ -600,7 +590,7 @@
::l/sync? true))))))
(defmethod read-section :v1/sobjects
[{:keys [::db/conn ::bfc/input ::bfc/overwrite ::bfc/timestamp] :as cfg}]
[{:keys [::db/conn ::bfc/input ::bfc/timestamp] :as cfg}]
(let [storage (sto/resolve cfg)
ids (read-obj! input)
thumb? (into #{} (map :media-id) (:thumbnails @bfc/*state*))]
@ -653,8 +643,7 @@
(-> item
(assoc :file-id file-id)
(d/update-when :media-id bfc/lookup-index)
(d/update-when :thumbnail-id bfc/lookup-index))
{::db/on-conflict-do-nothing? overwrite}))))
(d/update-when :thumbnail-id bfc/lookup-index))))))
(doseq [item (:thumbnails @bfc/*state*)]
(let [item (update item :media-id bfc/lookup-index)]
@ -663,8 +652,7 @@
:media-id (str (:media-id item))
:object-id (:object-id item)
::l/sync? true)
(db/insert! conn :file-tagged-object-thumbnail item
{::db/on-conflict-do-nothing? overwrite})))))
(db/insert! conn :file-tagged-object-thumbnail item)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; HIGH LEVEL API

View file

@ -297,7 +297,7 @@
(set/difference (:features file)))]
(vswap! bfc/*state* update :pending-to-migrate (fnil conj []) [feature (:id file)]))
(bfc/persist-file! cfg file))
(bfc/save-file! cfg file))
(doseq [thumbnail (read-seq cfg :file-object-thumbnail file-id)]
(let [thumbnail (-> thumbnail

View file

@ -28,6 +28,7 @@
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.db.sql :as-alias sql]
[app.storage :as sto]
[app.storage.impl :as sto.impl]
[app.util.events :as events]
@ -212,18 +213,18 @@
(throw (IllegalArgumentException.
"the `include-libraries` and `embed-assets` are mutally excluding options")))
(let [detach? (and (not embed-assets) (not include-libraries))
file (bfc/get-file cfg file-id)]
(cond-> file
detach?
(-> (ctf/detach-external-references file-id)
(dissoc :libraries))
(let [detach? (and (not embed-assets) (not include-libraries))]
(db/tx-run! cfg (fn [cfg]
(cond-> (bfc/get-file cfg file-id {::sql/for-update true})
detach?
(-> (ctf/detach-external-references file-id)
(dissoc :libraries))
embed-assets
(update :data #(bfc/embed-assets cfg % file-id))
embed-assets
(update :data #(bfc/embed-assets cfg % file-id))
:always
(bfc/clean-file-features))))
:always
(bfc/clean-file-features))))))
(defn- resolve-extension
[mtype]
@ -262,6 +263,7 @@
(defn- export-file
[{:keys [::file-id ::output] :as cfg}]
(let [file (get-file cfg file-id)
media (->> (bfc/get-file-media cfg file)
(map (fn [media]
(dissoc media :file-id))))
@ -684,7 +686,7 @@
:plugin-data plugin-data}))
(defn- import-file
[{:keys [::db/conn ::bfc/project-id ::file-id ::file-name] :as cfg}]
[{:keys [::bfc/project-id ::file-id ::file-name] :as cfg}]
(let [file-id' (bfc/lookup-index file-id)
file (read-file cfg)
media (read-file-media cfg)
@ -734,10 +736,7 @@
(->> file
(bfc/register-pending-migrations cfg)
(bfc/persist-file! cfg))
(when (::bfc/overwrite cfg)
(db/delete! conn :file-thumbnail {:file-id file-id'}))
(bfc/save-file! cfg))
file-id')))
@ -832,8 +831,7 @@
:file-id (str (:file-id params))
::l/sync? true)
(db/insert! conn :file-media-object params
{::db/on-conflict-do-nothing? (::bfc/overwrite cfg)}))))
(db/insert! conn :file-media-object params))))
(defn- import-file-thumbnails
[{:keys [::db/conn] :as cfg}]
@ -853,8 +851,7 @@
:media-id (str media-id)
::l/sync? true)
(db/insert! conn :file-tagged-object-thumbnail params
{::db/on-conflict-do-nothing? (::bfc/overwrite cfg)}))))
(db/insert! conn :file-tagged-object-thumbnail params))))
(defn- import-files
[{:keys [::bfc/timestamp ::bfc/input ::bfc/name] :or {timestamp (dt/now)} :as cfg}]

View file

@ -119,6 +119,7 @@
(sv/defmethod ::update-file
{::climit/id [[:update-file/by-profile ::rpc/profile-id]
[:update-file/global]]
::webhooks/event? true
::webhooks/batch-timeout (dt/duration "2m")
::webhooks/batch-key (webhooks/key-fn ::rpc/profile-id :id)

View file

@ -57,7 +57,7 @@
;; Process and persist file
(let [file (->> (bfc/process-file file)
(bfc/persist-file! cfg))]
(bfc/save-file! cfg))]
;; The file profile creation is optional, so when no profile is
;; present (when this function is called from profile less
@ -86,7 +86,7 @@
fmeds)]
(db/insert! conn :file-media-object params ::db/return-keys false))
file)))
(bfc/decode-file cfg file))))
(def ^:private
schema:duplicate-file

View file

@ -272,6 +272,7 @@
(reduce +)))
num-missing-slots (count-slots-data (:data file))]
(when (pos? num-missing-slots)
(l/trc :info (str "Shapes with children with the same swap slot: " num-missing-slots) :file-id (str (:id file))))
file))

View file

@ -8,17 +8,14 @@
"A main namespace for server repl."
(:refer-clojure :exclude [parse-uuid])
(:require
[app.binfile.common :refer [update-file! decode-file]]
[app.common.data :as d]
[app.common.files.migrations :as fmg]
[app.common.files.validate :as cfv]
[app.db :as db]
[app.features.components-v2 :as feat.comp-v2]
[app.features.fdata :as feat.fdata]
[app.main :as main]
[app.rpc.commands.files :as files]
[app.rpc.commands.files-snapshot :as fsnap]
[app.util.blob :as blob]
[app.util.pointer-map :as pmap]))
[app.rpc.commands.files-snapshot :as fsnap]))
(def ^:dynamic *system* nil)
@ -35,49 +32,21 @@
(defn get-file
"Get the migrated data of one file."
([id] (get-file (or *system* main/system) id nil))
([system id & {:keys [raw?] :as opts}]
([id]
(get-file (or *system* main/system) id))
([system id]
(db/run! system
(fn [system]
(let [file (files/get-file system id :migrate? false)]
(if raw?
file
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer system id)]
(-> file
(update :data feat.fdata/process-pointers deref)
(update :data feat.fdata/process-objects (partial into {}))
(fmg/migrate-file)))))))))
(->> (files/get-file system id :migrate? false)
(decode-file system))))))
(defn update-file!
[system {:keys [id] :as file}]
(let [conn (db/get-connection system)
file (if (contains? (:features file) "fdata/objects-map")
(feat.fdata/enable-objects-map file)
file)
file (if (contains? (:features file) "fdata/pointer-map")
(binding [pmap/*tracked* (pmap/create-tracked)]
(let [file (feat.fdata/enable-pointer-map file)]
(feat.fdata/persist-pointers! system id)
file))
file)
file (-> file
(update :features db/encode-pgarray conn "text")
(update :data blob/encode))]
(db/update! conn :file
{:revn (:revn file)
:data (:data file)
:version (:version file)
:features (:features file)
:deleted-at (:deleted-at file)
:created-at (:created-at file)
:modified-at (:modified-at file)
:data-backend nil
:data-ref-id nil
:has-media-trimmed false}
{:id (:id file)})))
(defn get-raw-file
"Get the migrated data of one file."
([id] (get-raw-file (or *system* main/system) id))
([system id]
(db/run! system
(fn [system]
(files/get-file system id :migrate? false)))))
(defn update-team!
[system {:keys [id] :as team}]
@ -90,14 +59,6 @@
{:id id})
team))
(defn get-raw-file
"Get the migrated data of one file."
([id] (get-raw-file (or *system* main/system) id))
([system id]
(db/run! system
(fn [system]
(files/get-file system id :migrate? false)))))
(defn reset-file-data!
"Hardcode replace of the data of one file."
[system id data]
@ -167,7 +128,7 @@
(fsnap/create-file-snapshot! system nil file-id label))
(let [conn (db/get-connection system)
file (get-file system file-id opts)
file (get-file system file-id)
libs (when with-libraries?
(->> (files/get-file-libraries conn file-id)
(into [file] (map (fn [{:keys [id]}]
@ -180,7 +141,9 @@
(when (and (some? file')
(not (identical? file file')))
(when validate? (cfv/validate-file-schema! file'))
(when validate?
(cfv/validate-file-schema! file'))
(let [file' (update file' :revn inc)]
(update-file! system file')
true))))

View file

@ -10,8 +10,8 @@
file is eligible to be garbage collected after some period of
inactivity (the default threshold is 72h)."
(:require
[app.binfile.common :as bfc]
[app.common.files.helpers :as cfh]
[app.common.files.migrations :as fmg]
[app.common.files.validate :as cfv]
[app.common.logging :as l]
[app.common.thumbnails :as thc]
@ -22,28 +22,25 @@
[app.db :as db]
[app.features.fdata :as feat.fdata]
[app.storage :as sto]
[app.util.blob :as blob]
[app.util.pointer-map :as pmap]
[app.util.time :as dt]
[app.worker :as wrk]
[integrant.core :as ig]))
(declare ^:private get-file)
(declare ^:private decode-file)
(declare ^:private persist-file!)
(declare get-file)
(def ^:private sql:get-snapshots
"SELECT f.file_id AS id,
f.data,
f.revn,
f.version,
f.features,
f.data_backend,
f.data_ref_id
FROM file_change AS f
WHERE f.file_id = ?
AND f.data IS NOT NULL
ORDER BY f.created_at ASC")
(def sql:get-snapshots
"SELECT fc.file_id AS id,
fc.id AS snapshot_id,
fc.data,
fc.revn,
fc.version,
fc.features,
fc.data_backend,
fc.data_ref_id
FROM file_change AS fc
WHERE fc.file_id = ?
AND fc.data IS NOT NULL
ORDER BY fc.created_at ASC")
(def ^:private sql:mark-file-media-object-deleted
"UPDATE file_media_object
@ -51,7 +48,7 @@
WHERE file_id = ? AND id != ALL(?::uuid[])
RETURNING id")
(def ^:private xf:collect-used-media
(def xf:collect-used-media
(comp
(map :data)
(mapcat cfh/collect-used-media)))
@ -60,7 +57,7 @@
"Performs the garbage collection of file media objects."
[{:keys [::db/conn] :as cfg} {:keys [id] :as file}]
(let [xform (comp
(map (partial decode-file cfg))
(map (partial bfc/decode-file cfg))
xf:collect-used-media)
used (->> (db/plan conn [sql:get-snapshots id])
@ -149,8 +146,6 @@
AND f.deleted_at IS null
ORDER BY f.modified_at ASC")
(def ^:private xf:map-id (map :id))
(defn- get-used-components
"Given a file and a set of components marked for deletion, return a
filtered set of component ids that are still un use"
@ -170,14 +165,14 @@
used-remote
(->> (db/plan conn [sql:get-files-for-library file-id])
(transduce (comp (map (partial decode-file cfg)) xform) conj #{}))
(transduce (comp (map (partial bfc/decode-file cfg)) xform) conj #{}))
used-local
(into #{} xform [file])
unused
(transduce xf:map-id disj
(into #{} xf:map-id deleted-components)
(transduce bfc/xf-map-id disj
(into #{} bfc/xf-map-id deleted-components)
(concat used-remote used-local))
file
@ -204,31 +199,32 @@
(def ^:private xf:collect-pointers
(comp (map :data)
(map blob/decode)
(mapcat feat.fdata/get-used-pointer-ids)))
(defn- clean-data-fragments!
(defn- clean-fragments!
[{:keys [::db/conn]} {:keys [id] :as file}]
(let [used (into #{} xf:collect-pointers [file])
unused (let [ids (db/create-array conn "uuid" used)]
(->> (db/exec! conn [sql:mark-deleted-data-fragments id ids])
(into #{} (map :id))))]
unused (->> (db/exec! conn [sql:mark-deleted-data-fragments id
(db/create-array conn "uuid" used)])
(into #{} bfc/xf-map-id))]
(l/dbg :hint "clean" :rel "file-data-fragment" :file-id (str id) :total (count unused))
(doseq [id unused]
(l/trc :hint "mark deleted"
:rel "file-data-fragment"
:id (str id)
:file-id (str id)))))
:file-id (str id)))
file))
(defn- clean-media!
[cfg file]
(let [file (->> file
(clean-deleted-components! cfg)
(clean-file-media! cfg)
(clean-file-thumbnails! cfg)
(clean-file-object-thumbnails! cfg)
(clean-deleted-components! cfg))]
(clean-file-object-thumbnails! cfg))]
(cfv/validate-file-schema! file)
file))
@ -249,65 +245,28 @@
FOR UPDATE
SKIP LOCKED")
(defn- get-file
[{:keys [::db/conn ::min-age ::file-id]}]
(->> (db/exec! conn [sql:get-file min-age file-id])
(first)))
(defn get-file
[{:keys [::db/conn ::min-age]} file-id]
(let [min-age (if min-age
(db/interval min-age)
(db/interval 0))]
(->> (db/exec! conn [sql:get-file min-age file-id])
(first))))
(defn- decode-file
[cfg {:keys [id] :as file}]
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
(-> (feat.fdata/resolve-file-data cfg file)
(update :features db/decode-pgarray #{})
(update :data blob/decode)
(update :data feat.fdata/process-pointers deref)
(update :data feat.fdata/process-objects (partial into {}))
(update :data assoc :id id)
(fmg/migrate-file))))
(defn- persist-file!
[{:keys [::db/conn ::sto/storage] :as cfg} {:keys [id] :as file}]
(let [file (if (contains? (:features file) "fdata/objects-map")
(feat.fdata/enable-objects-map file)
file)
file (if (contains? (:features file) "fdata/pointer-map")
(binding [pmap/*tracked* (pmap/create-tracked)]
(let [file (feat.fdata/enable-pointer-map file)]
(feat.fdata/persist-pointers! cfg id)
file))
file)
file (-> file
(update :features db/encode-pgarray conn "text")
(update :data blob/encode))]
;; If file was already offloaded, we touch the underlying storage
;; object for properly trigger storage-gc-touched task
(when (feat.fdata/offloaded? file)
(some->> (:data-ref-id file) (sto/touch-object! storage)))
(db/update! conn :file
{:has-media-trimmed true
:features (:features file)
:version (:version file)
:data (:data file)
:data-backend nil
:data-ref-id nil}
{:id id}
{::db/return-keys true})))
(defn- process-file!
[cfg]
(if-let [file (get-file cfg)]
(let [file (decode-file cfg file)
file (clean-media! cfg file)
file (persist-file! cfg file)]
(clean-data-fragments! cfg file)
[cfg file-id]
(if-let [file (get-file cfg file-id)]
(let [file (->> file
(bfc/decode-file cfg)
(clean-media! cfg)
(clean-fragments! cfg))
file (assoc file :has-media-trimmed true)]
(bfc/update-file! cfg file)
true)
(do
(l/dbg :hint "skip" :file-id (str (::file-id cfg)))
(l/dbg :hint "skip" :file-id (str file-id))
false)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@ -324,15 +283,15 @@
(fn [{:keys [props] :as task}]
(let [min-age (dt/duration (or (:min-age props)
(cf/get-deletion-delay)))
file-id (get props :file-id)
cfg (-> cfg
(assoc ::db/rollback (:rollback? props))
(assoc ::file-id (:file-id props))
(assoc ::min-age (db/interval min-age)))]
(assoc ::min-age min-age))]
(try
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
(let [cfg (update cfg ::sto/storage sto/configure conn)
processed? (process-file! cfg)]
processed? (process-file! cfg file-id)]
(when (and processed? (contains? cf/flags :tiered-file-data-storage))
(wrk/submit! (-> cfg
(assoc ::wrk/task :offload-file-data)

View file

@ -16,6 +16,7 @@
[app.db.sql :as sql]
[app.http :as http]
[app.rpc :as-alias rpc]
[app.rpc.commands.files :as files]
[app.storage :as sto]
[app.util.time :as dt]
[backend-tests.helpers :as th]
@ -1827,5 +1828,3 @@
(t/is (= (:id file-2) (:file-id (get rows 0))))
(t/is (nil? (:deleted-at (get rows 0)))))))

View file

@ -6,6 +6,7 @@
(ns backend-tests.rpc-management-test
(:require
[app.binfile.common :as bfc]
[app.common.features :as cfeat]
[app.common.pprint :as pp]
[app.common.types.shape :as cts]
@ -82,7 +83,6 @@
;; Check that result is correct
(t/is (nil? (:error out)))
(let [result (:result out)]
;; Check that the returned result is a file but has different id
;; and different name.
(t/is (= "file 1 (copy)" (:name result)))

View file

@ -571,7 +571,8 @@
:code :schema-validation
:hint (str/ffmt "invalid file data structure found on file '%'" id)
:file-id id
::sm/explain (get-fdata-explain data))))
::sm/explain (get-fdata-explain data)))
file)
(defn validate-file!
"Validate full referential integrity and semantic coherence on file data.

View file

@ -202,7 +202,8 @@
position
components-v2
(cond-> {}
force-frame? (assoc :force-frame-id frame-id)))
force-frame?
(assoc :force-frame-id frame-id)))
first-shape
(cond-> (first new-shapes)

View file

@ -347,11 +347,14 @@
Clone the shapes of the component, generating new names and ids, and
linking each new shape to the corresponding one of the
component. Place the new instance coordinates in the given
position."
([container component library-data position components-v2]
(make-component-instance container component library-data position components-v2 {}))
position.
([container component library-data position components-v2
WARNING: This process does not remap media references (on fills, strokes, ...); that is
delegated to an async process on the backend side that checks unreferenced shapes and
automatically creates correct references."
([page component library-data position components-v2]
(make-component-instance page component library-data position components-v2 {}))
([page component library-data position components-v2
{:keys [main-instance? force-id force-frame-id keep-ids?]
:or {main-instance? false force-id nil force-frame-id nil keep-ids? false}}]
(let [component-page (when components-v2
@ -367,7 +370,7 @@
orig-pos (gpt/point (:x component-shape) (:y component-shape))
delta (gpt/subtract position orig-pos)
objects (:objects container)
objects (:objects page)
unames (volatile! (cfh/get-used-names objects))
component-children
@ -384,7 +387,7 @@
(nil? (get component-children (:id %)))
;; We must avoid that destiny frame is inside a copy
(not (ctk/in-component-copy? %)))}))
frame (get-shape container frame-id)
frame (get-shape page frame-id)
component-frame (get-component-shape objects frame {:allow-main? true})
ids-map (volatile! {})
@ -437,7 +440,7 @@
:force-id force-id
:keep-ids? keep-ids?
:frame-id frame-id
:dest-objects (:objects container))
:dest-objects (:objects page))
;; Fix empty parent-id and remap all grid cells to the new ids.
remap-ids