mirror of
https://github.com/penpot/penpot.git
synced 2025-04-14 07:51:35 -05:00
Merge pull request #5665 from penpot/niwinz-instantiate-component-bug-5
🎉 Add srepl fix function for process media refs on a file
This commit is contained in:
commit
d7477cd448
21 changed files with 366 additions and 407 deletions
27
CHANGES.md
27
CHANGES.md
|
@ -6,6 +6,33 @@
|
|||
|
||||
### :boom: Breaking changes & Deprecations
|
||||
|
||||
Although this is not a breaking change, we believe it’s important to highlight it in this
|
||||
section:
|
||||
|
||||
This release includes a fix for an internal bug in Penpot that caused incorrect handling
|
||||
of media assets (e.g., fill images). The issue has been resolved since version 2.4.3, so
|
||||
no new incorrect references will be generated. However, existing files may still contain
|
||||
incorrect references.
|
||||
|
||||
To address this, we’ve provided a script to correct these references in existing files.
|
||||
|
||||
While having incorrect references generally doesn’t result in visible issues, there are
|
||||
rare cases where it can cause problems. For example, if a component library (containing
|
||||
images) is deleted, and that library is being used in other files, running the FileGC task
|
||||
(responsible for freeing up space and performing logical deletions) could leave those
|
||||
files with broken references to the images.
|
||||
|
||||
To execute script:
|
||||
|
||||
```bash
|
||||
docker exec -ti <container-name-or-id> ./run.sh app.migrations.media-refs '{:max-jobs 1}'
|
||||
```
|
||||
|
||||
If you have a big database and many cores available, you can reduce the time of processing
|
||||
all files by increasing paralelizacion changing the `max-jobs` value from 1 to N (where N
|
||||
is a number of cores)
|
||||
|
||||
|
||||
### :heart: Community contributions (Thank you!)
|
||||
|
||||
### :sparkles: New features
|
||||
|
|
|
@ -25,6 +25,7 @@
|
|||
[app.features.fdata :as feat.fdata]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.storage :as sto]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.time :as dt]
|
||||
|
@ -148,17 +149,30 @@
|
|||
features (assoc :features (db/decode-pgarray features #{}))
|
||||
data (assoc :data (blob/decode data))))
|
||||
|
||||
(defn decode-file
|
||||
"A general purpose file decoding function that resolves all external
|
||||
pointers, run migrations and return plain vanilla file map"
|
||||
[cfg {:keys [id] :as file}]
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
|
||||
(-> (feat.fdata/resolve-file-data cfg file)
|
||||
(update :features db/decode-pgarray #{})
|
||||
(update :data blob/decode)
|
||||
(update :data feat.fdata/process-pointers deref)
|
||||
(update :data feat.fdata/process-objects (partial into {}))
|
||||
(update :data assoc :id id)
|
||||
(fmg/migrate-file))))
|
||||
|
||||
(defn get-file
|
||||
[cfg file-id]
|
||||
"Get file, resolve all features and apply migrations.
|
||||
|
||||
Usefull when you have plan to apply massive or not cirurgical
|
||||
operations on file, because it removes the ovehead of lazy fetching
|
||||
and decoding."
|
||||
[cfg file-id & {:as opts}]
|
||||
(db/run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)]
|
||||
(when-let [file (db/get* conn :file {:id file-id}
|
||||
{::db/remove-deleted false})]
|
||||
(let [file (feat.fdata/resolve-file-data cfg file)]
|
||||
(-> file
|
||||
(decode-row)
|
||||
(update :data feat.fdata/process-pointers deref)
|
||||
(update :data feat.fdata/process-objects (partial into {})))))))))
|
||||
(some->> (db/get* conn :file {:id file-id}
|
||||
(assoc opts ::db/remove-deleted false))
|
||||
(decode-file cfg)))))
|
||||
|
||||
(defn clean-file-features
|
||||
[file]
|
||||
|
@ -306,19 +320,15 @@
|
|||
|
||||
file))
|
||||
|
||||
(defn get-file-media
|
||||
[cfg {:keys [data id] :as file}]
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [ids (cfh/collect-used-media data)
|
||||
ids (db/create-array conn "uuid" ids)
|
||||
sql (str "SELECT * FROM file_media_object WHERE id = ANY(?)")]
|
||||
(def sql:get-file-media
|
||||
"SELECT * FROM file_media_object WHERE id = ANY(?)")
|
||||
|
||||
;; We assoc the file-id again to the file-media-object row
|
||||
;; because there are cases that used objects refer to other
|
||||
;; files and we need to ensure in the exportation process that
|
||||
;; all ids matches
|
||||
(->> (db/exec! conn [sql ids])
|
||||
(mapv #(assoc % :file-id id)))))))
|
||||
(defn get-file-media
|
||||
[cfg {:keys [data] :as file}]
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [used (cfh/collect-used-media data)
|
||||
used (db/create-array conn "uuid" used)]
|
||||
(db/exec! conn [sql:get-file-media used])))))
|
||||
|
||||
(def ^:private sql:get-team-files-ids
|
||||
"SELECT f.id FROM file AS f
|
||||
|
@ -431,75 +441,96 @@
|
|||
(update :colors relink-colors)
|
||||
(d/without-nils))))))
|
||||
|
||||
(defn- upsert-file!
|
||||
[conn file]
|
||||
(let [sql (str "INSERT INTO file (id, project_id, name, revn, version, is_shared, data, created_at, modified_at) "
|
||||
"VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) "
|
||||
"ON CONFLICT (id) DO UPDATE SET data=?, version=?")]
|
||||
(db/exec-one! conn [sql
|
||||
(:id file)
|
||||
(:project-id file)
|
||||
(:name file)
|
||||
(:revn file)
|
||||
(:version file)
|
||||
(:is-shared file)
|
||||
(:data file)
|
||||
(:created-at file)
|
||||
(:modified-at file)
|
||||
(:data file)
|
||||
(:version file)])))
|
||||
(defn- encode-file
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id] :as file}]
|
||||
(let [file (if (contains? (:features file) "fdata/objects-map")
|
||||
(feat.fdata/enable-objects-map file)
|
||||
file)
|
||||
|
||||
(defn persist-file!
|
||||
"Applies all the final validations and perist the file."
|
||||
[{:keys [::db/conn ::timestamp] :as cfg} {:keys [id] :as file}]
|
||||
file (if (contains? (:features file) "fdata/pointer-map")
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)]
|
||||
(let [file (feat.fdata/enable-pointer-map file)]
|
||||
(feat.fdata/persist-pointers! cfg id)
|
||||
file))
|
||||
file)]
|
||||
|
||||
(-> file
|
||||
(update :features db/encode-pgarray conn "text")
|
||||
(update :data blob/encode))))
|
||||
|
||||
(defn- file->params
|
||||
[file]
|
||||
(let [params {:has-media-trimmed (:has-media-trimmed file)
|
||||
:ignore-sync-until (:ignore-sync-until file)
|
||||
:project-id (:project-id file)
|
||||
:features (:features file)
|
||||
:name (:name file)
|
||||
:is-shared (:is-shared file)
|
||||
:version (:version file)
|
||||
:data (:data file)
|
||||
:id (:id file)
|
||||
:deleted-at (:deleted-at file)
|
||||
:created-at (:created-at file)
|
||||
:modified-at (:modified-at file)
|
||||
:revn (:revn file)
|
||||
:vern (:vern file)}]
|
||||
|
||||
(-> (d/without-nils params)
|
||||
(assoc :data-backend nil)
|
||||
(assoc :data-ref-id nil))))
|
||||
|
||||
(defn insert-file!
|
||||
"Insert a new file into the database table"
|
||||
[{:keys [::db/conn] :as cfg} file]
|
||||
(let [params (-> (encode-file cfg file)
|
||||
(file->params))]
|
||||
(db/insert! conn :file params {::db/return-keys true})))
|
||||
|
||||
(defn update-file!
|
||||
"Update an existing file on the database."
|
||||
[{:keys [::db/conn ::sto/storage] :as cfg} {:keys [id] :as file}]
|
||||
(let [file (encode-file cfg file)
|
||||
params (-> (file->params file)
|
||||
(dissoc :id))]
|
||||
|
||||
;; If file was already offloaded, we touch the underlying storage
|
||||
;; object for properly trigger storage-gc-touched task
|
||||
(when (feat.fdata/offloaded? file)
|
||||
(some->> (:data-ref-id file) (sto/touch-object! storage)))
|
||||
|
||||
(db/update! conn :file params {:id id} {::db/return-keys true})))
|
||||
|
||||
(defn save-file!
|
||||
"Applies all the final validations and perist the file, binfile
|
||||
specific, should not be used outside of binfile domain"
|
||||
[{:keys [::timestamp] :as cfg} file]
|
||||
|
||||
(dm/assert!
|
||||
"expected valid timestamp"
|
||||
(dt/instant? timestamp))
|
||||
|
||||
(let [file (-> file
|
||||
(assoc :created-at timestamp)
|
||||
(assoc :modified-at timestamp)
|
||||
(assoc :ignore-sync-until (dt/plus timestamp (dt/duration {:seconds 5})))
|
||||
(update :features
|
||||
(fn [features]
|
||||
(let [features (cfeat/check-supported-features! features)]
|
||||
(-> (::features cfg #{})
|
||||
(set/union features)
|
||||
;; We never want to store
|
||||
;; frontend-only features on file
|
||||
(set/difference cfeat/frontend-only-features))))))
|
||||
(let [file (-> file
|
||||
(assoc :created-at timestamp)
|
||||
(assoc :modified-at timestamp)
|
||||
(assoc :ignore-sync-until (dt/plus timestamp (dt/duration {:seconds 5})))
|
||||
(update :features
|
||||
(fn [features]
|
||||
(let [features (cfeat/check-supported-features! features)]
|
||||
(-> (::features cfg #{})
|
||||
(set/union features)
|
||||
;; We never want to store
|
||||
;; frontend-only features on file
|
||||
(set/difference cfeat/frontend-only-features))))))]
|
||||
|
||||
(when (contains? cf/flags :file-schema-validation)
|
||||
(fval/validate-file-schema! file))
|
||||
|
||||
_ (when (contains? cf/flags :file-schema-validation)
|
||||
(fval/validate-file-schema! file))
|
||||
|
||||
_ (when (contains? cf/flags :soft-file-schema-validation)
|
||||
(let [result (ex/try! (fval/validate-file-schema! file))]
|
||||
(when (ex/exception? result)
|
||||
(l/error :hint "file schema validation error" :cause result))))
|
||||
|
||||
file (if (contains? (:features file) "fdata/objects-map")
|
||||
(feat.fdata/enable-objects-map file)
|
||||
file)
|
||||
|
||||
file (if (contains? (:features file) "fdata/pointer-map")
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)]
|
||||
(let [file (feat.fdata/enable-pointer-map file)]
|
||||
(feat.fdata/persist-pointers! cfg id)
|
||||
file))
|
||||
file)
|
||||
|
||||
params (-> file
|
||||
(update :features db/encode-pgarray conn "text")
|
||||
(update :data blob/encode))]
|
||||
|
||||
(if (::overwrite cfg)
|
||||
(upsert-file! conn params)
|
||||
(db/insert! conn :file params ::db/return-keys false))
|
||||
|
||||
file))
|
||||
(when (contains? cf/flags :soft-file-schema-validation)
|
||||
(let [result (ex/try! (fval/validate-file-schema! file))]
|
||||
(when (ex/exception? result)
|
||||
(l/error :hint "file schema validation error" :cause result))))
|
||||
|
||||
(insert-file! cfg file)))
|
||||
|
||||
(defn register-pending-migrations
|
||||
"All features that are enabled and requires explicit migration are
|
||||
|
|
|
@ -424,21 +424,15 @@
|
|||
(s/def ::bfc/profile-id ::us/uuid)
|
||||
(s/def ::bfc/project-id ::us/uuid)
|
||||
(s/def ::bfc/input io/input-stream?)
|
||||
(s/def ::overwrite? (s/nilable ::us/boolean))
|
||||
(s/def ::ignore-index-errors? (s/nilable ::us/boolean))
|
||||
|
||||
;; FIXME: replace with schema
|
||||
(s/def ::read-import-options
|
||||
(s/keys :req [::db/pool ::sto/storage ::bfc/project-id ::bfc/profile-id ::bfc/input]
|
||||
:opt [::overwrite? ::ignore-index-errors?]))
|
||||
:opt [::ignore-index-errors?]))
|
||||
|
||||
(defn read-import!
|
||||
"Do the importation of the specified resource in penpot custom binary
|
||||
format. There are some options for customize the importation
|
||||
behavior:
|
||||
|
||||
`::bfc/overwrite`: if true, instead of creating new files and remapping id references,
|
||||
it reuses all ids and updates existing objects; defaults to `false`."
|
||||
format."
|
||||
[{:keys [::bfc/input ::bfc/timestamp] :or {timestamp (dt/now)} :as options}]
|
||||
|
||||
(dm/assert!
|
||||
|
@ -509,8 +503,7 @@
|
|||
thumbnails))
|
||||
|
||||
(defmethod read-section :v1/files
|
||||
[{:keys [::db/conn ::bfc/input ::bfc/project-id ::bfc/overwrite ::bfc/name] :as system}]
|
||||
|
||||
[{:keys [::bfc/input ::bfc/project-id ::bfc/name] :as system}]
|
||||
(doseq [[idx expected-file-id] (d/enumerate (-> bfc/*state* deref :files))]
|
||||
(let [file (read-obj! input)
|
||||
media (read-obj! input)
|
||||
|
@ -568,10 +561,7 @@
|
|||
(vswap! bfc/*state* update :pending-to-migrate (fnil conj []) [feature file-id']))
|
||||
|
||||
(l/dbg :hint "create file" :id (str file-id') ::l/sync? true)
|
||||
(bfc/persist-file! system file)
|
||||
|
||||
(when overwrite
|
||||
(db/delete! conn :file-thumbnail {:file-id file-id'}))
|
||||
(bfc/save-file! system file)
|
||||
|
||||
file-id'))))
|
||||
|
||||
|
@ -600,7 +590,7 @@
|
|||
::l/sync? true))))))
|
||||
|
||||
(defmethod read-section :v1/sobjects
|
||||
[{:keys [::db/conn ::bfc/input ::bfc/overwrite ::bfc/timestamp] :as cfg}]
|
||||
[{:keys [::db/conn ::bfc/input ::bfc/timestamp] :as cfg}]
|
||||
(let [storage (sto/resolve cfg)
|
||||
ids (read-obj! input)
|
||||
thumb? (into #{} (map :media-id) (:thumbnails @bfc/*state*))]
|
||||
|
@ -653,8 +643,7 @@
|
|||
(-> item
|
||||
(assoc :file-id file-id)
|
||||
(d/update-when :media-id bfc/lookup-index)
|
||||
(d/update-when :thumbnail-id bfc/lookup-index))
|
||||
{::db/on-conflict-do-nothing? overwrite}))))
|
||||
(d/update-when :thumbnail-id bfc/lookup-index))))))
|
||||
|
||||
(doseq [item (:thumbnails @bfc/*state*)]
|
||||
(let [item (update item :media-id bfc/lookup-index)]
|
||||
|
@ -663,8 +652,7 @@
|
|||
:media-id (str (:media-id item))
|
||||
:object-id (:object-id item)
|
||||
::l/sync? true)
|
||||
(db/insert! conn :file-tagged-object-thumbnail item
|
||||
{::db/on-conflict-do-nothing? overwrite})))))
|
||||
(db/insert! conn :file-tagged-object-thumbnail item)))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; HIGH LEVEL API
|
||||
|
|
|
@ -297,7 +297,7 @@
|
|||
(set/difference (:features file)))]
|
||||
(vswap! bfc/*state* update :pending-to-migrate (fnil conj []) [feature (:id file)]))
|
||||
|
||||
(bfc/persist-file! cfg file))
|
||||
(bfc/save-file! cfg file))
|
||||
|
||||
(doseq [thumbnail (read-seq cfg :file-object-thumbnail file-id)]
|
||||
(let [thumbnail (-> thumbnail
|
||||
|
|
|
@ -28,6 +28,7 @@
|
|||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.storage :as sto]
|
||||
[app.storage.impl :as sto.impl]
|
||||
[app.util.events :as events]
|
||||
|
@ -212,18 +213,18 @@
|
|||
(throw (IllegalArgumentException.
|
||||
"the `include-libraries` and `embed-assets` are mutally excluding options")))
|
||||
|
||||
(let [detach? (and (not embed-assets) (not include-libraries))
|
||||
file (bfc/get-file cfg file-id)]
|
||||
(cond-> file
|
||||
detach?
|
||||
(-> (ctf/detach-external-references file-id)
|
||||
(dissoc :libraries))
|
||||
(let [detach? (and (not embed-assets) (not include-libraries))]
|
||||
(db/tx-run! cfg (fn [cfg]
|
||||
(cond-> (bfc/get-file cfg file-id {::sql/for-update true})
|
||||
detach?
|
||||
(-> (ctf/detach-external-references file-id)
|
||||
(dissoc :libraries))
|
||||
|
||||
embed-assets
|
||||
(update :data #(bfc/embed-assets cfg % file-id))
|
||||
embed-assets
|
||||
(update :data #(bfc/embed-assets cfg % file-id))
|
||||
|
||||
:always
|
||||
(bfc/clean-file-features))))
|
||||
:always
|
||||
(bfc/clean-file-features))))))
|
||||
|
||||
(defn- resolve-extension
|
||||
[mtype]
|
||||
|
@ -262,6 +263,7 @@
|
|||
(defn- export-file
|
||||
[{:keys [::file-id ::output] :as cfg}]
|
||||
(let [file (get-file cfg file-id)
|
||||
|
||||
media (->> (bfc/get-file-media cfg file)
|
||||
(map (fn [media]
|
||||
(dissoc media :file-id))))
|
||||
|
@ -684,7 +686,7 @@
|
|||
:plugin-data plugin-data}))
|
||||
|
||||
(defn- import-file
|
||||
[{:keys [::db/conn ::bfc/project-id ::file-id ::file-name] :as cfg}]
|
||||
[{:keys [::bfc/project-id ::file-id ::file-name] :as cfg}]
|
||||
(let [file-id' (bfc/lookup-index file-id)
|
||||
file (read-file cfg)
|
||||
media (read-file-media cfg)
|
||||
|
@ -734,10 +736,7 @@
|
|||
|
||||
(->> file
|
||||
(bfc/register-pending-migrations cfg)
|
||||
(bfc/persist-file! cfg))
|
||||
|
||||
(when (::bfc/overwrite cfg)
|
||||
(db/delete! conn :file-thumbnail {:file-id file-id'}))
|
||||
(bfc/save-file! cfg))
|
||||
|
||||
file-id')))
|
||||
|
||||
|
@ -832,8 +831,7 @@
|
|||
:file-id (str (:file-id params))
|
||||
::l/sync? true)
|
||||
|
||||
(db/insert! conn :file-media-object params
|
||||
{::db/on-conflict-do-nothing? (::bfc/overwrite cfg)}))))
|
||||
(db/insert! conn :file-media-object params))))
|
||||
|
||||
(defn- import-file-thumbnails
|
||||
[{:keys [::db/conn] :as cfg}]
|
||||
|
@ -853,8 +851,7 @@
|
|||
:media-id (str media-id)
|
||||
::l/sync? true)
|
||||
|
||||
(db/insert! conn :file-tagged-object-thumbnail params
|
||||
{::db/on-conflict-do-nothing? (::bfc/overwrite cfg)}))))
|
||||
(db/insert! conn :file-tagged-object-thumbnail params))))
|
||||
|
||||
(defn- import-files
|
||||
[{:keys [::bfc/timestamp ::bfc/input ::bfc/name] :or {timestamp (dt/now)} :as cfg}]
|
||||
|
|
|
@ -413,7 +413,7 @@
|
|||
|
||||
(def ^:private default-plan-opts
|
||||
(-> default-opts
|
||||
(assoc :fetch-size 1)
|
||||
(assoc :fetch-size 1000)
|
||||
(assoc :concurrency :read-only)
|
||||
(assoc :cursors :close)
|
||||
(assoc :result-type :forward-only)))
|
||||
|
|
|
@ -25,7 +25,6 @@
|
|||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.metrics :as-alias mtx]
|
||||
[app.metrics.definition :as-alias mdef]
|
||||
[app.migrations.v2 :as migrations.v2]
|
||||
[app.msgbus :as-alias mbus]
|
||||
[app.redis :as-alias rds]
|
||||
[app.rpc :as-alias rpc]
|
||||
|
@ -609,11 +608,6 @@
|
|||
(nrepl/start-server :bind "0.0.0.0" :port 6064 :handler cider-nrepl-handler))
|
||||
|
||||
(start)
|
||||
|
||||
(when (contains? cf/flags :v2-migration)
|
||||
(px/sleep 5000)
|
||||
(migrations.v2/migrate app.main/system))
|
||||
|
||||
(deref p))
|
||||
(catch Throwable cause
|
||||
(ex/print-throwable cause)
|
||||
|
|
49
backend/src/app/migrations/media_refs.clj
Normal file
49
backend/src/app/migrations/media_refs.clj
Normal file
|
@ -0,0 +1,49 @@
|
|||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.migrations.media-refs
|
||||
"A media refs migration fixer script"
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.pprint]
|
||||
[app.srepl.fixes.media-refs :refer [process-file]]
|
||||
[app.srepl.main :as srepl]
|
||||
[clojure.edn :as edn]))
|
||||
|
||||
(def ^:private required-services
|
||||
[:app.storage.s3/backend
|
||||
:app.storage.fs/backend
|
||||
:app.storage/storage
|
||||
:app.metrics/metrics
|
||||
:app.db/pool
|
||||
:app.worker/executor])
|
||||
|
||||
(defn -main
|
||||
[& [options]]
|
||||
(try
|
||||
(let [config-var (requiring-resolve 'app.main/system-config)
|
||||
start-var (requiring-resolve 'app.main/start-custom)
|
||||
stop-var (requiring-resolve 'app.main/stop)
|
||||
config (select-keys @config-var required-services)]
|
||||
|
||||
(start-var config)
|
||||
|
||||
(let [options (if (string? options)
|
||||
(ex/ignoring (edn/read-string options))
|
||||
{})]
|
||||
|
||||
(l/inf :hint "executing media-refs migration" :options options)
|
||||
(srepl/process-files! process-file options))
|
||||
|
||||
(stop-var)
|
||||
(System/exit 0))
|
||||
(catch Throwable cause
|
||||
(ex/print-throwable cause)
|
||||
(flush)
|
||||
(System/exit -1))))
|
||||
|
||||
|
|
@ -1,103 +0,0 @@
|
|||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.migrations.v2
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.db :as db]
|
||||
[app.features.components-v2 :as feat]
|
||||
[app.setup :as setup]
|
||||
[app.util.time :as dt]))
|
||||
|
||||
(def ^:private sql:get-teams
|
||||
"SELECT id, features,
|
||||
row_number() OVER (ORDER BY created_at DESC) AS rown
|
||||
FROM team
|
||||
WHERE deleted_at IS NULL
|
||||
AND (not (features @> '{components/v2}') OR features IS NULL)
|
||||
ORDER BY created_at DESC")
|
||||
|
||||
(defn- get-teams
|
||||
[conn]
|
||||
(->> (db/cursor conn [sql:get-teams] {:chunk-size 1})
|
||||
(map feat/decode-row)))
|
||||
|
||||
(defn- migrate-teams
|
||||
[{:keys [::db/conn] :as system}]
|
||||
;; Allow long running transaction for this connection
|
||||
(db/exec-one! conn ["SET LOCAL idle_in_transaction_session_timeout = 0"])
|
||||
|
||||
;; Do not allow other migration running in the same time
|
||||
(db/xact-lock! conn 0)
|
||||
|
||||
;; Run teams migration
|
||||
(run! (fn [{:keys [id rown]}]
|
||||
(try
|
||||
(-> (assoc system ::db/rollback false)
|
||||
(feat/migrate-team! id
|
||||
:rown rown
|
||||
:label "v2-migration"
|
||||
:validate? false
|
||||
:skip-on-graphics-error? true))
|
||||
(catch Throwable _
|
||||
(swap! feat/*stats* update :errors (fnil inc 0))
|
||||
(l/wrn :hint "error on migrating team (skiping)"))))
|
||||
(get-teams conn))
|
||||
|
||||
(setup/set-prop! system :v2-migrated true))
|
||||
|
||||
(defn migrate
|
||||
[system]
|
||||
(let [tpoint (dt/tpoint)
|
||||
stats (atom {})
|
||||
migrated? (setup/get-prop system :v2-migrated false)]
|
||||
|
||||
(when-not migrated?
|
||||
(l/inf :hint "v2 migration started")
|
||||
(try
|
||||
(binding [feat/*stats* stats]
|
||||
(db/tx-run! system migrate-teams))
|
||||
|
||||
(let [stats (deref stats)
|
||||
elapsed (dt/format-duration (tpoint))]
|
||||
(l/inf :hint "v2 migration finished"
|
||||
:files (:processed-files stats)
|
||||
:teams (:processed-teams stats)
|
||||
:errors (:errors stats)
|
||||
:elapsed elapsed))
|
||||
|
||||
(catch Throwable cause
|
||||
(l/err :hint "error on aplying v2 migration" :cause cause))))))
|
||||
|
||||
(def ^:private required-services
|
||||
[[:app.main/assets :app.storage.s3/backend]
|
||||
[:app.main/assets :app.storage.fs/backend]
|
||||
:app.storage/storage
|
||||
:app.db/pool
|
||||
:app.setup/props
|
||||
:app.svgo/optimizer
|
||||
:app.metrics/metrics
|
||||
:app.migrations/migrations
|
||||
:app.http.client/client])
|
||||
|
||||
(defn -main
|
||||
[& _args]
|
||||
(try
|
||||
(let [config-var (requiring-resolve 'app.main/system-config)
|
||||
start-var (requiring-resolve 'app.main/start-custom)
|
||||
stop-var (requiring-resolve 'app.main/stop)
|
||||
system-var (requiring-resolve 'app.main/system)
|
||||
config (select-keys @config-var required-services)]
|
||||
|
||||
(start-var config)
|
||||
(migrate @system-var)
|
||||
(stop-var)
|
||||
(System/exit 0))
|
||||
(catch Throwable cause
|
||||
(ex/print-throwable cause)
|
||||
(flush)
|
||||
(System/exit -1))))
|
|
@ -119,6 +119,7 @@
|
|||
(sv/defmethod ::update-file
|
||||
{::climit/id [[:update-file/by-profile ::rpc/profile-id]
|
||||
[:update-file/global]]
|
||||
|
||||
::webhooks/event? true
|
||||
::webhooks/batch-timeout (dt/duration "2m")
|
||||
::webhooks/batch-key (webhooks/key-fn ::rpc/profile-id :id)
|
||||
|
|
|
@ -57,7 +57,7 @@
|
|||
|
||||
;; Process and persist file
|
||||
(let [file (->> (bfc/process-file file)
|
||||
(bfc/persist-file! cfg))]
|
||||
(bfc/save-file! cfg))]
|
||||
|
||||
;; The file profile creation is optional, so when no profile is
|
||||
;; present (when this function is called from profile less
|
||||
|
@ -86,7 +86,7 @@
|
|||
fmeds)]
|
||||
(db/insert! conn :file-media-object params ::db/return-keys false))
|
||||
|
||||
file)))
|
||||
(bfc/decode-file cfg file))))
|
||||
|
||||
(def ^:private
|
||||
schema:duplicate-file
|
||||
|
|
|
@ -272,6 +272,7 @@
|
|||
(reduce +)))
|
||||
|
||||
num-missing-slots (count-slots-data (:data file))]
|
||||
|
||||
(when (pos? num-missing-slots)
|
||||
(l/trc :info (str "Shapes with children with the same swap slot: " num-missing-slots) :file-id (str (:id file))))
|
||||
file))
|
||||
|
|
43
backend/src/app/srepl/fixes/media_refs.clj
Normal file
43
backend/src/app/srepl/fixes/media_refs.clj
Normal file
|
@ -0,0 +1,43 @@
|
|||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.srepl.fixes.media-refs
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.files.helpers :as cfh]
|
||||
[app.srepl.helpers :as h]))
|
||||
|
||||
(defn- collect-media-refs
|
||||
"Given a file data map, returns all media references used on pages of
|
||||
the file data; components and other parts of the file data are not analized"
|
||||
[data]
|
||||
(let [xform-collect
|
||||
(comp
|
||||
(map val)
|
||||
(mapcat (fn [object]
|
||||
(->> (cfh/collect-shape-media-refs object)
|
||||
(map (fn [id]
|
||||
{:object-id (:id object)
|
||||
:id id}))))))
|
||||
process-page
|
||||
(fn [result page-id container]
|
||||
(let [xform (comp xform-collect (map #(assoc % :page-id page-id)))]
|
||||
(into result xform (:objects container))))]
|
||||
|
||||
(reduce-kv process-page [] (:pages-index data))))
|
||||
|
||||
(defn update-all-media-references
|
||||
"Check if all file media object references are in plance and create
|
||||
new ones if some of them are missing; this prevents strange bugs on
|
||||
having the same media object associated with two different files;"
|
||||
[cfg file]
|
||||
(let [media-refs (collect-media-refs (:data file))]
|
||||
(bfc/update-media-references! cfg file media-refs)))
|
||||
|
||||
(defn process-file
|
||||
[file _opts]
|
||||
(let [system (h/get-current-system)]
|
||||
(update-all-media-references system file)))
|
|
@ -8,17 +8,14 @@
|
|||
"A main namespace for server repl."
|
||||
(:refer-clojure :exclude [parse-uuid])
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.data :as d]
|
||||
[app.common.files.migrations :as fmg]
|
||||
[app.common.files.validate :as cfv]
|
||||
[app.db :as db]
|
||||
[app.features.components-v2 :as feat.comp-v2]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.main :as main]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.files-snapshot :as fsnap]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.pointer-map :as pmap]))
|
||||
[app.rpc.commands.files-snapshot :as fsnap]))
|
||||
|
||||
(def ^:dynamic *system* nil)
|
||||
|
||||
|
@ -27,6 +24,10 @@
|
|||
(locking println
|
||||
(apply println params)))
|
||||
|
||||
(defn get-current-system
|
||||
[]
|
||||
*system*)
|
||||
|
||||
(defn parse-uuid
|
||||
[v]
|
||||
(if (string? v)
|
||||
|
@ -35,49 +36,21 @@
|
|||
|
||||
(defn get-file
|
||||
"Get the migrated data of one file."
|
||||
([id] (get-file (or *system* main/system) id nil))
|
||||
([system id & {:keys [raw?] :as opts}]
|
||||
([id]
|
||||
(get-file (or *system* main/system) id))
|
||||
([system id]
|
||||
(db/run! system
|
||||
(fn [system]
|
||||
(let [file (files/get-file system id :migrate? false)]
|
||||
(if raw?
|
||||
file
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer system id)]
|
||||
(-> file
|
||||
(update :data feat.fdata/process-pointers deref)
|
||||
(update :data feat.fdata/process-objects (partial into {}))
|
||||
(fmg/migrate-file)))))))))
|
||||
(->> (bfc/get-file system id ::db/for-update true)
|
||||
(bfc/decode-file system))))))
|
||||
|
||||
(defn update-file!
|
||||
[system {:keys [id] :as file}]
|
||||
(let [conn (db/get-connection system)
|
||||
file (if (contains? (:features file) "fdata/objects-map")
|
||||
(feat.fdata/enable-objects-map file)
|
||||
file)
|
||||
|
||||
file (if (contains? (:features file) "fdata/pointer-map")
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)]
|
||||
(let [file (feat.fdata/enable-pointer-map file)]
|
||||
(feat.fdata/persist-pointers! system id)
|
||||
file))
|
||||
file)
|
||||
|
||||
file (-> file
|
||||
(update :features db/encode-pgarray conn "text")
|
||||
(update :data blob/encode))]
|
||||
|
||||
(db/update! conn :file
|
||||
{:revn (:revn file)
|
||||
:data (:data file)
|
||||
:version (:version file)
|
||||
:features (:features file)
|
||||
:deleted-at (:deleted-at file)
|
||||
:created-at (:created-at file)
|
||||
:modified-at (:modified-at file)
|
||||
:data-backend nil
|
||||
:data-ref-id nil
|
||||
:has-media-trimmed false}
|
||||
{:id (:id file)})))
|
||||
(defn get-raw-file
|
||||
"Get the migrated data of one file."
|
||||
([id] (get-raw-file (or *system* main/system) id))
|
||||
([system id]
|
||||
(db/run! system
|
||||
(fn [system]
|
||||
(files/get-file system id :migrate? false)))))
|
||||
|
||||
(defn update-team!
|
||||
[system {:keys [id] :as team}]
|
||||
|
@ -90,14 +63,6 @@
|
|||
{:id id})
|
||||
team))
|
||||
|
||||
(defn get-raw-file
|
||||
"Get the migrated data of one file."
|
||||
([id] (get-raw-file (or *system* main/system) id))
|
||||
([system id]
|
||||
(db/run! system
|
||||
(fn [system]
|
||||
(files/get-file system id :migrate? false)))))
|
||||
|
||||
(defn reset-file-data!
|
||||
"Hardcode replace of the data of one file."
|
||||
[system id data]
|
||||
|
@ -162,16 +127,12 @@
|
|||
|
||||
(defn process-file!
|
||||
[system file-id update-fn & {:keys [label validate? with-libraries?] :or {validate? true} :as opts}]
|
||||
|
||||
(when (string? label)
|
||||
(fsnap/create-file-snapshot! system nil file-id label))
|
||||
|
||||
(let [conn (db/get-connection system)
|
||||
file (get-file system file-id opts)
|
||||
file (bfc/get-file system file-id ::db/for-update true)
|
||||
libs (when with-libraries?
|
||||
(->> (files/get-file-libraries conn file-id)
|
||||
(into [file] (map (fn [{:keys [id]}]
|
||||
(get-file system id))))
|
||||
(bfc/get-file system id))))
|
||||
(d/index-by :id)))
|
||||
|
||||
file' (if with-libraries?
|
||||
|
@ -180,7 +141,12 @@
|
|||
|
||||
(when (and (some? file')
|
||||
(not (identical? file file')))
|
||||
(when validate? (cfv/validate-file-schema! file'))
|
||||
(when validate?
|
||||
(cfv/validate-file-schema! file'))
|
||||
|
||||
(when (string? label)
|
||||
(fsnap/create-file-snapshot! system nil file-id label))
|
||||
|
||||
(let [file' (update file' :revn inc)]
|
||||
(update-file! system file')
|
||||
(bfc/update-file! system file')
|
||||
true))))
|
||||
|
|
|
@ -10,8 +10,8 @@
|
|||
file is eligible to be garbage collected after some period of
|
||||
inactivity (the default threshold is 72h)."
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.files.helpers :as cfh]
|
||||
[app.common.files.migrations :as fmg]
|
||||
[app.common.files.validate :as cfv]
|
||||
[app.common.logging :as l]
|
||||
[app.common.thumbnails :as thc]
|
||||
|
@ -22,28 +22,25 @@
|
|||
[app.db :as db]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.storage :as sto]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
(declare ^:private get-file)
|
||||
(declare ^:private decode-file)
|
||||
(declare ^:private persist-file!)
|
||||
(declare get-file)
|
||||
|
||||
(def ^:private sql:get-snapshots
|
||||
"SELECT f.file_id AS id,
|
||||
f.data,
|
||||
f.revn,
|
||||
f.version,
|
||||
f.features,
|
||||
f.data_backend,
|
||||
f.data_ref_id
|
||||
FROM file_change AS f
|
||||
WHERE f.file_id = ?
|
||||
AND f.data IS NOT NULL
|
||||
ORDER BY f.created_at ASC")
|
||||
(def sql:get-snapshots
|
||||
"SELECT fc.file_id AS id,
|
||||
fc.id AS snapshot_id,
|
||||
fc.data,
|
||||
fc.revn,
|
||||
fc.version,
|
||||
fc.features,
|
||||
fc.data_backend,
|
||||
fc.data_ref_id
|
||||
FROM file_change AS fc
|
||||
WHERE fc.file_id = ?
|
||||
AND fc.data IS NOT NULL
|
||||
ORDER BY fc.created_at ASC")
|
||||
|
||||
(def ^:private sql:mark-file-media-object-deleted
|
||||
"UPDATE file_media_object
|
||||
|
@ -51,7 +48,7 @@
|
|||
WHERE file_id = ? AND id != ALL(?::uuid[])
|
||||
RETURNING id")
|
||||
|
||||
(def ^:private xf:collect-used-media
|
||||
(def xf:collect-used-media
|
||||
(comp
|
||||
(map :data)
|
||||
(mapcat cfh/collect-used-media)))
|
||||
|
@ -60,10 +57,10 @@
|
|||
"Performs the garbage collection of file media objects."
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id] :as file}]
|
||||
(let [xform (comp
|
||||
(map (partial decode-file cfg))
|
||||
(map (partial bfc/decode-file cfg))
|
||||
xf:collect-used-media)
|
||||
|
||||
used (->> (db/plan conn [sql:get-snapshots id])
|
||||
used (->> (db/plan conn [sql:get-snapshots id] {:fetch-size 1})
|
||||
(transduce xform conj #{}))
|
||||
used (into used xf:collect-used-media [file])
|
||||
|
||||
|
@ -149,8 +146,6 @@
|
|||
AND f.deleted_at IS null
|
||||
ORDER BY f.modified_at ASC")
|
||||
|
||||
(def ^:private xf:map-id (map :id))
|
||||
|
||||
(defn- get-used-components
|
||||
"Given a file and a set of components marked for deletion, return a
|
||||
filtered set of component ids that are still un use"
|
||||
|
@ -169,15 +164,15 @@
|
|||
(mapcat (partial get-used-components deleted-components file-id))
|
||||
|
||||
used-remote
|
||||
(->> (db/plan conn [sql:get-files-for-library file-id])
|
||||
(transduce (comp (map (partial decode-file cfg)) xform) conj #{}))
|
||||
(->> (db/plan conn [sql:get-files-for-library file-id] {:fetch-size 1})
|
||||
(transduce (comp (map (partial bfc/decode-file cfg)) xform) conj #{}))
|
||||
|
||||
used-local
|
||||
(into #{} xform [file])
|
||||
|
||||
unused
|
||||
(transduce xf:map-id disj
|
||||
(into #{} xf:map-id deleted-components)
|
||||
(transduce bfc/xf-map-id disj
|
||||
(into #{} bfc/xf-map-id deleted-components)
|
||||
(concat used-remote used-local))
|
||||
|
||||
file
|
||||
|
@ -204,31 +199,32 @@
|
|||
|
||||
(def ^:private xf:collect-pointers
|
||||
(comp (map :data)
|
||||
(map blob/decode)
|
||||
(mapcat feat.fdata/get-used-pointer-ids)))
|
||||
|
||||
(defn- clean-data-fragments!
|
||||
(defn- clean-fragments!
|
||||
[{:keys [::db/conn]} {:keys [id] :as file}]
|
||||
(let [used (into #{} xf:collect-pointers [file])
|
||||
|
||||
unused (let [ids (db/create-array conn "uuid" used)]
|
||||
(->> (db/exec! conn [sql:mark-deleted-data-fragments id ids])
|
||||
(into #{} (map :id))))]
|
||||
unused (->> (db/exec! conn [sql:mark-deleted-data-fragments id
|
||||
(db/create-array conn "uuid" used)])
|
||||
(into #{} bfc/xf-map-id))]
|
||||
|
||||
(l/dbg :hint "clean" :rel "file-data-fragment" :file-id (str id) :total (count unused))
|
||||
(doseq [id unused]
|
||||
(l/trc :hint "mark deleted"
|
||||
:rel "file-data-fragment"
|
||||
:id (str id)
|
||||
:file-id (str id)))))
|
||||
:file-id (str id)))
|
||||
|
||||
file))
|
||||
|
||||
(defn- clean-media!
|
||||
[cfg file]
|
||||
(let [file (->> file
|
||||
(clean-deleted-components! cfg)
|
||||
(clean-file-media! cfg)
|
||||
(clean-file-thumbnails! cfg)
|
||||
(clean-file-object-thumbnails! cfg)
|
||||
(clean-deleted-components! cfg))]
|
||||
(clean-file-object-thumbnails! cfg))]
|
||||
(cfv/validate-file-schema! file)
|
||||
file))
|
||||
|
||||
|
@ -249,65 +245,27 @@
|
|||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- get-file
|
||||
[{:keys [::db/conn ::min-age ::file-id]}]
|
||||
(->> (db/exec! conn [sql:get-file min-age file-id])
|
||||
(first)))
|
||||
|
||||
(defn- decode-file
|
||||
[cfg {:keys [id] :as file}]
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
|
||||
(-> (feat.fdata/resolve-file-data cfg file)
|
||||
(update :features db/decode-pgarray #{})
|
||||
(update :data blob/decode)
|
||||
(update :data feat.fdata/process-pointers deref)
|
||||
(update :data feat.fdata/process-objects (partial into {}))
|
||||
(update :data assoc :id id)
|
||||
(fmg/migrate-file))))
|
||||
|
||||
(defn- persist-file!
|
||||
[{:keys [::db/conn ::sto/storage] :as cfg} {:keys [id] :as file}]
|
||||
(let [file (if (contains? (:features file) "fdata/objects-map")
|
||||
(feat.fdata/enable-objects-map file)
|
||||
file)
|
||||
|
||||
file (if (contains? (:features file) "fdata/pointer-map")
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)]
|
||||
(let [file (feat.fdata/enable-pointer-map file)]
|
||||
(feat.fdata/persist-pointers! cfg id)
|
||||
file))
|
||||
file)
|
||||
|
||||
file (-> file
|
||||
(update :features db/encode-pgarray conn "text")
|
||||
(update :data blob/encode))]
|
||||
|
||||
;; If file was already offloaded, we touch the underlying storage
|
||||
;; object for properly trigger storage-gc-touched task
|
||||
(when (feat.fdata/offloaded? file)
|
||||
(some->> (:data-ref-id file) (sto/touch-object! storage)))
|
||||
|
||||
(db/update! conn :file
|
||||
{:has-media-trimmed true
|
||||
:features (:features file)
|
||||
:version (:version file)
|
||||
:data (:data file)
|
||||
:data-backend nil
|
||||
:data-ref-id nil}
|
||||
{:id id}
|
||||
{::db/return-keys true})))
|
||||
(defn get-file
|
||||
[{:keys [::db/conn ::min-age]} file-id]
|
||||
(let [min-age (if min-age
|
||||
(db/interval min-age)
|
||||
(db/interval 0))]
|
||||
(->> (db/exec! conn [sql:get-file min-age file-id])
|
||||
(first))))
|
||||
|
||||
(defn- process-file!
|
||||
[cfg]
|
||||
(if-let [file (get-file cfg)]
|
||||
(let [file (decode-file cfg file)
|
||||
file (clean-media! cfg file)
|
||||
file (persist-file! cfg file)]
|
||||
(clean-data-fragments! cfg file)
|
||||
[cfg file-id]
|
||||
(if-let [file (get-file cfg file-id)]
|
||||
(let [file (->> file
|
||||
(bfc/decode-file cfg)
|
||||
(clean-media! cfg)
|
||||
(clean-fragments! cfg))
|
||||
file (assoc file :has-media-trimmed true)]
|
||||
(bfc/update-file! cfg file)
|
||||
true)
|
||||
|
||||
(do
|
||||
(l/dbg :hint "skip" :file-id (str (::file-id cfg)))
|
||||
(l/dbg :hint "skip" :file-id (str file-id))
|
||||
false)))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
@ -324,15 +282,15 @@
|
|||
(fn [{:keys [props] :as task}]
|
||||
(let [min-age (dt/duration (or (:min-age props)
|
||||
(cf/get-deletion-delay)))
|
||||
file-id (get props :file-id)
|
||||
cfg (-> cfg
|
||||
(assoc ::db/rollback (:rollback? props))
|
||||
(assoc ::file-id (:file-id props))
|
||||
(assoc ::min-age (db/interval min-age)))]
|
||||
(assoc ::min-age min-age))]
|
||||
|
||||
(try
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(let [cfg (update cfg ::sto/storage sto/configure conn)
|
||||
processed? (process-file! cfg)]
|
||||
processed? (process-file! cfg file-id)]
|
||||
(when (and processed? (contains? cf/flags :tiered-file-data-storage))
|
||||
(wrk/submit! (-> cfg
|
||||
(assoc ::wrk/task :offload-file-data)
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
[app.db.sql :as sql]
|
||||
[app.http :as http]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.storage :as sto]
|
||||
[app.util.time :as dt]
|
||||
[backend-tests.helpers :as th]
|
||||
|
@ -1827,5 +1828,3 @@
|
|||
|
||||
(t/is (= (:id file-2) (:file-id (get rows 0))))
|
||||
(t/is (nil? (:deleted-at (get rows 0)))))))
|
||||
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
|
||||
(ns backend-tests.rpc-management-test
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.pprint :as pp]
|
||||
[app.common.types.shape :as cts]
|
||||
|
@ -82,7 +83,6 @@
|
|||
;; Check that result is correct
|
||||
(t/is (nil? (:error out)))
|
||||
(let [result (:result out)]
|
||||
|
||||
;; Check that the returned result is a file but has different id
|
||||
;; and different name.
|
||||
(t/is (= "file 1 (copy)" (:name result)))
|
||||
|
|
|
@ -571,7 +571,8 @@
|
|||
:code :schema-validation
|
||||
:hint (str/ffmt "invalid file data structure found on file '%'" id)
|
||||
:file-id id
|
||||
::sm/explain (get-fdata-explain data))))
|
||||
::sm/explain (get-fdata-explain data)))
|
||||
file)
|
||||
|
||||
(defn validate-file!
|
||||
"Validate full referential integrity and semantic coherence on file data.
|
||||
|
|
|
@ -202,7 +202,8 @@
|
|||
position
|
||||
components-v2
|
||||
(cond-> {}
|
||||
force-frame? (assoc :force-frame-id frame-id)))
|
||||
force-frame?
|
||||
(assoc :force-frame-id frame-id)))
|
||||
|
||||
first-shape
|
||||
(cond-> (first new-shapes)
|
||||
|
|
|
@ -347,11 +347,14 @@
|
|||
Clone the shapes of the component, generating new names and ids, and
|
||||
linking each new shape to the corresponding one of the
|
||||
component. Place the new instance coordinates in the given
|
||||
position."
|
||||
([container component library-data position components-v2]
|
||||
(make-component-instance container component library-data position components-v2 {}))
|
||||
position.
|
||||
|
||||
([container component library-data position components-v2
|
||||
WARNING: This process does not remap media references (on fills, strokes, ...); that is
|
||||
delegated to an async process on the backend side that checks unreferenced shapes and
|
||||
automatically creates correct references."
|
||||
([page component library-data position components-v2]
|
||||
(make-component-instance page component library-data position components-v2 {}))
|
||||
([page component library-data position components-v2
|
||||
{:keys [main-instance? force-id force-frame-id keep-ids?]
|
||||
:or {main-instance? false force-id nil force-frame-id nil keep-ids? false}}]
|
||||
(let [component-page (when components-v2
|
||||
|
@ -367,7 +370,7 @@
|
|||
orig-pos (gpt/point (:x component-shape) (:y component-shape))
|
||||
delta (gpt/subtract position orig-pos)
|
||||
|
||||
objects (:objects container)
|
||||
objects (:objects page)
|
||||
unames (volatile! (cfh/get-used-names objects))
|
||||
|
||||
component-children
|
||||
|
@ -384,7 +387,7 @@
|
|||
(nil? (get component-children (:id %)))
|
||||
;; We must avoid that destiny frame is inside a copy
|
||||
(not (ctk/in-component-copy? %)))}))
|
||||
frame (get-shape container frame-id)
|
||||
frame (get-shape page frame-id)
|
||||
component-frame (get-component-shape objects frame {:allow-main? true})
|
||||
|
||||
ids-map (volatile! {})
|
||||
|
@ -437,7 +440,7 @@
|
|||
:force-id force-id
|
||||
:keep-ids? keep-ids?
|
||||
:frame-id frame-id
|
||||
:dest-objects (:objects container))
|
||||
:dest-objects (:objects page))
|
||||
|
||||
;; Fix empty parent-id and remap all grid cells to the new ids.
|
||||
remap-ids
|
||||
|
|
|
@ -233,23 +233,26 @@ This will fetch the latest images. When you do <code class="language-bash">docke
|
|||
|
||||
**Important: Upgrade from version 1.x to 2.0**
|
||||
|
||||
The migration to version 2.0, due to the incorporation of the new v2
|
||||
components, includes an additional process that runs automatically as
|
||||
soon as the application starts. If your on-premises Penpot instance
|
||||
contains a significant amount of data (such as hundreds of penpot
|
||||
files, especially those utilizing SVG components and assets
|
||||
extensively), this process may take a few minutes.
|
||||
The migration to version 2.0, due to the incorporation of the new v2 components, includes
|
||||
an additional process that runs automatically as soon as the application starts. If your
|
||||
on-premises Penpot instance contains a significant amount of data (such as hundreds of
|
||||
penpot files, especially those utilizing SVG components and assets extensively), this
|
||||
process may take a few minutes.
|
||||
|
||||
In some cases, such as when the script encounters an error, it may be
|
||||
convenient to run the process manually. To do this, you can disable
|
||||
the automatic migration process using the <code class="language-bash">disable-v2-migration</code> flag
|
||||
in <code class="language-bash">PENPOT_FLAGS</code> environment variable. You can then execute the
|
||||
In some cases, such as when the script encounters an error, it may be convenient to run
|
||||
the process manually. To do this, you can disable the automatic migration process using
|
||||
the <code class="language-bash">disable-v2-migration</code> flag in <code
|
||||
class="language-bash">PENPOT_FLAGS</code> environment variable. You can then execute the
|
||||
migration process manually with the following command:
|
||||
|
||||
```bash
|
||||
docker exec -ti <container-name-or-id> ./run.sh app.migrations.v2
|
||||
```
|
||||
|
||||
**IMPORTANT:** this script should be executed on passing from 1.19.x to 2.0.x. Executing
|
||||
it on versions greater or equal to 2.1 of penpot will not work correctly. It is known that
|
||||
this script is removed since 2.4.3
|
||||
|
||||
|
||||
### Backup Penpot
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue