mirror of
https://github.com/penpot/penpot.git
synced 2025-01-07 15:39:42 -05:00
🎉 Add binfile-v3 export/import file format
This commit is contained in:
parent
4fb5d3fb20
commit
8618cb950f
35 changed files with 2031 additions and 599 deletions
|
@ -37,6 +37,21 @@
|
|||
(def ^:dynamic *state* nil)
|
||||
(def ^:dynamic *options* nil)
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; DEFAULTS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
;; Threshold in MiB when we pass from using
|
||||
;; in-memory byte-array's to use temporal files.
|
||||
(def temp-file-threshold
|
||||
(* 1024 1024 2))
|
||||
|
||||
;; A maximum (storage) object size allowed: 100MiB
|
||||
(def ^:const max-object-size
|
||||
(* 1024 1024 100))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def xf-map-id
|
||||
(map :id))
|
||||
|
||||
|
@ -56,6 +71,13 @@
|
|||
(def conj-vec
|
||||
(fnil conj []))
|
||||
|
||||
(defn initial-state
|
||||
[]
|
||||
{:storage-objects #{}
|
||||
:files #{}
|
||||
:teams #{}
|
||||
:projects #{}})
|
||||
|
||||
(defn collect-storage-objects
|
||||
[state items]
|
||||
(update state :storage-objects into xf-map-media-id items))
|
||||
|
@ -87,6 +109,8 @@
|
|||
attrs))
|
||||
|
||||
(defn update-index
|
||||
([coll]
|
||||
(update-index {} coll identity))
|
||||
([index coll]
|
||||
(update-index index coll identity))
|
||||
([index coll attr]
|
||||
|
@ -114,6 +138,16 @@
|
|||
[cfg project-id]
|
||||
(db/get cfg :project {:id project-id}))
|
||||
|
||||
(def ^:private sql:get-teams
|
||||
"SELECT t.* FROM team WHERE id = ANY(?)")
|
||||
|
||||
(defn get-teams
|
||||
[cfg ids]
|
||||
(let [conn (db/get-connection cfg)
|
||||
ids (db/create-array conn "uuid" ids)]
|
||||
(->> (db/exec! conn [sql:get-teams ids])
|
||||
(map decode-row))))
|
||||
|
||||
(defn get-team
|
||||
[cfg team-id]
|
||||
(-> (db/get cfg :team {:id team-id})
|
||||
|
@ -167,9 +201,10 @@
|
|||
(defn get-file-object-thumbnails
|
||||
"Return all file object thumbnails for a given file."
|
||||
[cfg file-id]
|
||||
(db/query cfg :file-tagged-object-thumbnail
|
||||
{:file-id file-id
|
||||
:deleted-at nil}))
|
||||
(->> (db/query cfg :file-tagged-object-thumbnail
|
||||
{:file-id file-id
|
||||
:deleted-at nil})
|
||||
(not-empty)))
|
||||
|
||||
(defn get-file-thumbnail
|
||||
"Return the thumbnail for the specified file-id"
|
||||
|
@ -224,26 +259,26 @@
|
|||
(->> (db/exec! conn [sql ids])
|
||||
(mapv #(assoc % :file-id id)))))))
|
||||
|
||||
(def ^:private sql:get-team-files
|
||||
(def ^:private sql:get-team-files-ids
|
||||
"SELECT f.id FROM file AS f
|
||||
JOIN project AS p ON (p.id = f.project_id)
|
||||
WHERE p.team_id = ?")
|
||||
|
||||
(defn get-team-files
|
||||
(defn get-team-files-ids
|
||||
"Get a set of file ids for the specified team-id"
|
||||
[{:keys [::db/conn]} team-id]
|
||||
(->> (db/exec! conn [sql:get-team-files team-id])
|
||||
(->> (db/exec! conn [sql:get-team-files-ids team-id])
|
||||
(into #{} xf-map-id)))
|
||||
|
||||
(def ^:private sql:get-team-projects
|
||||
"SELECT p.id FROM project AS p
|
||||
"SELECT p.* FROM project AS p
|
||||
WHERE p.team_id = ?
|
||||
AND p.deleted_at IS NULL")
|
||||
|
||||
(defn get-team-projects
|
||||
"Get a set of project ids for the team"
|
||||
[{:keys [::db/conn]} team-id]
|
||||
(->> (db/exec! conn [sql:get-team-projects team-id])
|
||||
[cfg team-id]
|
||||
(->> (db/exec! cfg [sql:get-team-projects team-id])
|
||||
(into #{} xf-map-id)))
|
||||
|
||||
(def ^:private sql:get-project-files
|
||||
|
@ -257,6 +292,10 @@
|
|||
(->> (db/exec! conn [sql:get-project-files project-id])
|
||||
(into #{} xf-map-id)))
|
||||
|
||||
(defn remap-thumbnail-object-id
|
||||
[object-id file-id]
|
||||
(str/replace-first object-id #"^(.*?)/" (str file-id "/")))
|
||||
|
||||
(defn- relink-shapes
|
||||
"A function responsible to analyze all file data and
|
||||
replace the old :component-file reference with the new
|
||||
|
@ -339,6 +378,12 @@
|
|||
data
|
||||
library-ids)))
|
||||
|
||||
(defn disable-database-timeouts!
|
||||
[cfg]
|
||||
(let [conn (db/get-connection cfg)]
|
||||
(db/exec-one! conn ["SET LOCAL idle_in_transaction_session_timeout = 0"])
|
||||
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])))
|
||||
|
||||
(defn- fix-version
|
||||
[file]
|
||||
(let [file (fmg/fix-version file)]
|
||||
|
@ -432,6 +477,20 @@
|
|||
|
||||
file))
|
||||
|
||||
|
||||
(defn register-pending-migrations
|
||||
"All features that are enabled and requires explicit migration are
|
||||
added to the state for a posterior migration step."
|
||||
[cfg {:keys [id features] :as file}]
|
||||
(doseq [feature (-> (::features cfg)
|
||||
(set/difference cfeat/no-migration-features)
|
||||
(set/difference cfeat/backend-only-features)
|
||||
(set/difference features))]
|
||||
(vswap! *state* update :pending-to-migrate (fnil conj []) [feature id]))
|
||||
|
||||
file)
|
||||
|
||||
|
||||
(defn apply-pending-migrations!
|
||||
"Apply alredy registered pending migrations to files"
|
||||
[cfg]
|
||||
|
|
|
@ -49,15 +49,6 @@
|
|||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; DEFAULTS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
;; Threshold in MiB when we pass from using
|
||||
;; in-memory byte-array's to use temporal files.
|
||||
(def temp-file-threshold
|
||||
(* 1024 1024 2))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; LOW LEVEL STREAM IO API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
@ -65,11 +56,6 @@
|
|||
(def ^:const buffer-size (:xnio/buffer-size yt/defaults))
|
||||
(def ^:const penpot-magic-number 800099563638710213)
|
||||
|
||||
|
||||
;; A maximum (storage) object size allowed: 100MiB
|
||||
(def ^:const max-object-size
|
||||
(* 1024 1024 100))
|
||||
|
||||
(def ^:dynamic *position* nil)
|
||||
|
||||
(defn get-mark
|
||||
|
@ -258,12 +244,12 @@
|
|||
p (tmp/tempfile :prefix "penpot.binfile.")]
|
||||
(assert-mark m :stream)
|
||||
|
||||
(when (> s max-object-size)
|
||||
(when (> s bfc/max-object-size)
|
||||
(ex/raise :type :validation
|
||||
:code :max-file-size-reached
|
||||
:hint (str/ffmt "unable to import storage object with size % bytes" s)))
|
||||
|
||||
(if (> s temp-file-threshold)
|
||||
(if (> s bfc/temp-file-threshold)
|
||||
(with-open [^OutputStream output (io/output-stream p)]
|
||||
(let [readed (io/copy! input output :offset 0 :size s)]
|
||||
(l/trace :fn "read-stream*!" :expected s :readed readed :position @*position* ::l/sync? true)
|
||||
|
@ -381,10 +367,12 @@
|
|||
::l/sync? true)
|
||||
|
||||
(doseq [item media]
|
||||
(l/dbg :hint "write penpot file media object" :id (:id item) ::l/sync? true))
|
||||
(l/dbg :hint "write penpot file media object"
|
||||
:id (:id item) ::l/sync? true))
|
||||
|
||||
(doseq [item thumbnails]
|
||||
(l/dbg :hint "write penpot file object thumbnail" :media-id (str (:media-id item)) ::l/sync? true))
|
||||
(l/dbg :hint "write penpot file object thumbnail"
|
||||
:media-id (str (:media-id item)) ::l/sync? true))
|
||||
|
||||
(doto output
|
||||
(write-obj! file)
|
||||
|
@ -466,8 +454,8 @@
|
|||
|
||||
(defn- read-import-v1
|
||||
[{:keys [::db/conn ::project-id ::profile-id ::input] :as cfg}]
|
||||
(db/exec-one! conn ["SET LOCAL idle_in_transaction_session_timeout = 0"])
|
||||
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
|
||||
|
||||
(bfc/disable-database-timeouts! cfg)
|
||||
|
||||
(pu/with-open [input (zstd-input-stream input)
|
||||
input (io/data-input-stream input)]
|
||||
|
@ -559,7 +547,9 @@
|
|||
|
||||
(when (seq thumbnails)
|
||||
(let [thumbnails (remap-thumbnails thumbnails file-id')]
|
||||
(l/dbg :hint "updated index with thumbnails" :total (count thumbnails) ::l/sync? true)
|
||||
(l/dbg :hint "updated index with thumbnails"
|
||||
:total (count thumbnails)
|
||||
::l/sync? true)
|
||||
(vswap! bfc/*state* update :thumbnails bfc/into-vec thumbnails)))
|
||||
|
||||
(when (seq media)
|
||||
|
@ -738,7 +728,7 @@
|
|||
:cause @cs)))))
|
||||
|
||||
(defn import-files!
|
||||
[cfg input]
|
||||
[{:keys [::input] :as cfg}]
|
||||
|
||||
(dm/assert!
|
||||
"expected valid profile-id and project-id on `cfg`"
|
||||
|
|
|
@ -141,16 +141,15 @@
|
|||
(write! cfg :team-font-variant id font))))
|
||||
|
||||
(defn- write-project!
|
||||
[cfg project-id]
|
||||
(let [project (bfc/get-project cfg project-id)]
|
||||
(events/tap :progress
|
||||
{:op :export
|
||||
:section :write-project
|
||||
:id project-id
|
||||
:name (:name project)})
|
||||
(l/trc :hint "write" :obj "project" :id (str project-id))
|
||||
(write! cfg :project (str project-id) project)
|
||||
(vswap! bfc/*state* update :projects conj project-id)))
|
||||
[cfg project]
|
||||
(events/tap :progress
|
||||
{:op :export
|
||||
:section :write-project
|
||||
:id (:id project)
|
||||
:name (:name project)})
|
||||
(l/trc :hint "write" :obj "project" :id (str (:id project)))
|
||||
(write! cfg :project (str (:id project)) project)
|
||||
(vswap! bfc/*state* update :projects conj (:id project)))
|
||||
|
||||
(defn- write-file!
|
||||
[cfg file-id]
|
||||
|
@ -363,7 +362,7 @@
|
|||
(bfc/get-team-projects cfg team-id))
|
||||
|
||||
(run! (partial write-file! cfg)
|
||||
(bfc/get-team-files cfg team-id))
|
||||
(bfc/get-team-files-ids cfg team-id))
|
||||
|
||||
(run! (partial write-storage-object! cfg)
|
||||
(-> bfc/*state* deref :storage-objects))
|
||||
|
|
957
backend/src/app/binfile/v3.clj
Normal file
957
backend/src/app/binfile/v3.clj
Normal file
|
@ -0,0 +1,957 @@
|
|||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.binfile.v3
|
||||
"A ZIP based binary file exportation"
|
||||
(:refer-clojure :exclude [read])
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.json :as json]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.thumbnails :as cth]
|
||||
[app.common.types.color :as ctcl]
|
||||
[app.common.types.component :as ctc]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.types.page :as ctp]
|
||||
[app.common.types.plugins :as ctpg]
|
||||
[app.common.types.shape :as cts]
|
||||
[app.common.types.typography :as cty]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.storage :as sto]
|
||||
[app.storage.impl :as sto.impl]
|
||||
[app.util.events :as events]
|
||||
[app.util.time :as dt]
|
||||
[clojure.java.io :as jio]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.fs :as fs]
|
||||
[datoteka.io :as io])
|
||||
(:import
|
||||
java.io.InputStream
|
||||
java.io.OutputStreamWriter
|
||||
java.util.zip.ZipEntry
|
||||
java.util.zip.ZipFile
|
||||
java.util.zip.ZipOutputStream))
|
||||
|
||||
;; --- SCHEMA
|
||||
|
||||
(def ^:private schema:manifest
|
||||
[:map {:title "Manifest"}
|
||||
[:version ::sm/int]
|
||||
[:type :string]
|
||||
|
||||
[:generated-by {:optional true} :string]
|
||||
|
||||
[:files
|
||||
[:vector
|
||||
[:map
|
||||
[:id ::sm/uuid]
|
||||
[:name :string]
|
||||
[:project-id ::sm/uuid]]]]
|
||||
|
||||
[:relations {:optional true}
|
||||
[:vector
|
||||
[:tuple ::sm/uuid ::sm/uuid]]]])
|
||||
|
||||
(def ^:private schema:storage-object
|
||||
[:map {:title "StorageObject"}
|
||||
[:id ::sm/uuid]
|
||||
[:size ::sm/int]
|
||||
[:content-type :string]
|
||||
[:bucket [::sm/one-of {:format :string} sto/valid-buckets]]
|
||||
[:hash :string]])
|
||||
|
||||
(def ^:private schema:file-thumbnail
|
||||
[:map {:title "FileThumbnail"}
|
||||
[:file-id ::sm/uuid]
|
||||
[:page-id ::sm/uuid]
|
||||
[:frame-id ::sm/uuid]
|
||||
[:tag :string]
|
||||
[:media-id ::sm/uuid]])
|
||||
|
||||
;; --- ENCODERS
|
||||
|
||||
(def encode-file
|
||||
(sm/encoder ::ctf/file sm/json-transformer))
|
||||
|
||||
(def encode-page
|
||||
(sm/encoder ::ctp/page sm/json-transformer))
|
||||
|
||||
(def encode-shape
|
||||
(sm/encoder ::cts/shape sm/json-transformer))
|
||||
|
||||
(def encode-media
|
||||
(sm/encoder ::ctf/media sm/json-transformer))
|
||||
|
||||
(def encode-component
|
||||
(sm/encoder ::ctc/component sm/json-transformer))
|
||||
|
||||
(def encode-color
|
||||
(sm/encoder ::ctcl/color sm/json-transformer))
|
||||
|
||||
(def encode-typography
|
||||
(sm/encoder ::cty/typography sm/json-transformer))
|
||||
|
||||
(def encode-plugin-data
|
||||
(sm/encoder ::ctpg/plugin-data sm/json-transformer))
|
||||
|
||||
(def encode-storage-object
|
||||
(sm/encoder schema:storage-object sm/json-transformer))
|
||||
|
||||
(def encode-file-thumbnail
|
||||
(sm/encoder schema:file-thumbnail sm/json-transformer))
|
||||
|
||||
;; --- DECODERS
|
||||
|
||||
(def decode-manifest
|
||||
(sm/decoder schema:manifest sm/json-transformer))
|
||||
|
||||
(def decode-media
|
||||
(sm/decoder ::ctf/media sm/json-transformer))
|
||||
|
||||
(def decode-component
|
||||
(sm/decoder ::ctc/component sm/json-transformer))
|
||||
|
||||
(def decode-color
|
||||
(sm/decoder ::ctcl/color sm/json-transformer))
|
||||
|
||||
(def decode-file
|
||||
(sm/decoder ::ctf/file sm/json-transformer))
|
||||
|
||||
(def decode-page
|
||||
(sm/decoder ::ctp/page sm/json-transformer))
|
||||
|
||||
(def decode-shape
|
||||
(sm/decoder ::cts/shape sm/json-transformer))
|
||||
|
||||
(def decode-typography
|
||||
(sm/decoder ::cty/typography sm/json-transformer))
|
||||
|
||||
(def decode-plugin-data
|
||||
(sm/decoder ::ctpg/plugin-data sm/json-transformer))
|
||||
|
||||
(def decode-storage-object
|
||||
(sm/decoder schema:storage-object sm/json-transformer))
|
||||
|
||||
(def decode-file-thumbnail
|
||||
(sm/decoder schema:file-thumbnail sm/json-transformer))
|
||||
|
||||
;; --- VALIDATORS
|
||||
|
||||
(def validate-manifest
|
||||
(sm/check-fn schema:manifest))
|
||||
|
||||
(def validate-file
|
||||
(sm/check-fn ::ctf/file))
|
||||
|
||||
(def validate-page
|
||||
(sm/check-fn ::ctp/page))
|
||||
|
||||
(def validate-shape
|
||||
(sm/check-fn ::cts/shape))
|
||||
|
||||
(def validate-media
|
||||
(sm/check-fn ::ctf/media))
|
||||
|
||||
(def validate-color
|
||||
(sm/check-fn ::ctcl/color))
|
||||
|
||||
(def validate-component
|
||||
(sm/check-fn ::ctc/component))
|
||||
|
||||
(def validate-typography
|
||||
(sm/check-fn ::cty/typography))
|
||||
|
||||
(def validate-plugin-data
|
||||
(sm/check-fn ::ctpg/plugin-data))
|
||||
|
||||
(def validate-storage-object
|
||||
(sm/check-fn schema:storage-object))
|
||||
|
||||
(def validate-file-thumbnail
|
||||
(sm/check-fn schema:file-thumbnail))
|
||||
|
||||
;; --- EXPORT IMPL
|
||||
|
||||
(defn- write-entry!
|
||||
[^ZipOutputStream output ^String path data]
|
||||
(.putNextEntry output (ZipEntry. path))
|
||||
(let [writer (OutputStreamWriter. output "UTF-8")]
|
||||
(json/write writer data :indent true :key-fn json/write-camel-key)
|
||||
(.flush writer))
|
||||
(.closeEntry output))
|
||||
|
||||
(defn- get-file
|
||||
[{:keys [::embed-assets ::include-libraries] :as cfg} file-id]
|
||||
|
||||
(when (and include-libraries embed-assets)
|
||||
(throw (IllegalArgumentException.
|
||||
"the `include-libraries` and `embed-assets` are mutally excluding options")))
|
||||
|
||||
(let [detach? (and (not embed-assets) (not include-libraries))]
|
||||
(cond-> (bfc/get-file cfg file-id)
|
||||
detach?
|
||||
(-> (ctf/detach-external-references file-id)
|
||||
(dissoc :libraries))
|
||||
|
||||
embed-assets
|
||||
(update :data #(bfc/embed-assets cfg % file-id)))))
|
||||
|
||||
(defn- resolve-extension
|
||||
[mtype]
|
||||
(case mtype
|
||||
"image/png" ".png"
|
||||
"image/jpeg" ".jpg"
|
||||
"image/gif" ".gif"
|
||||
"image/svg+xml" ".svg"
|
||||
"image/webp" ".webp"
|
||||
"font/woff" ".woff"
|
||||
"font/woff2" ".woff2"
|
||||
"font/ttf" ".ttf"
|
||||
"font/otf" ".otf"
|
||||
"application/octet-stream" ".bin"))
|
||||
|
||||
(defn- export-storage-objects
|
||||
[{:keys [::output] :as cfg}]
|
||||
(let [storage (sto/resolve cfg)]
|
||||
(doseq [id (-> bfc/*state* deref :storage-objects not-empty)]
|
||||
(let [sobject (sto/get-object storage id)
|
||||
smeta (meta sobject)
|
||||
ext (resolve-extension (:content-type smeta))
|
||||
path (str "objects/" id ".json")
|
||||
params (-> (meta sobject)
|
||||
(assoc :id (:id sobject))
|
||||
(assoc :size (:size sobject))
|
||||
(encode-storage-object))]
|
||||
|
||||
(write-entry! output path params)
|
||||
|
||||
(with-open [input (sto/get-object-data storage sobject)]
|
||||
(.putNextEntry output (ZipEntry. (str "objects/" id ext)))
|
||||
(io/copy! input output (:size sobject))
|
||||
(.closeEntry output))))))
|
||||
|
||||
(defn- export-file
|
||||
[{:keys [::file-id ::output] :as cfg}]
|
||||
(let [file (get-file cfg file-id)
|
||||
media (->> (bfc/get-file-media cfg file)
|
||||
(map (fn [media]
|
||||
(dissoc media :file-id))))
|
||||
|
||||
data (:data file)
|
||||
typographies (:typographies data)
|
||||
plugins-data (:plugin-data data)
|
||||
components (:components data)
|
||||
colors (:colors data)
|
||||
|
||||
pages (:pages data)
|
||||
pages-index (:pages-index data)
|
||||
|
||||
thumbnails (bfc/get-file-object-thumbnails cfg file-id)]
|
||||
|
||||
(vswap! bfc/*state* update :files assoc file-id
|
||||
{:id file-id
|
||||
:project-id (:project-id file)
|
||||
:name (:name file)})
|
||||
|
||||
(let [file (cond-> (dissoc file :data)
|
||||
(:options data)
|
||||
(assoc :options (:options data))
|
||||
:always
|
||||
(encode-file))
|
||||
path (str "files/" file-id ".json")]
|
||||
(write-entry! output path file))
|
||||
|
||||
(doseq [[index page-id] (d/enumerate pages)]
|
||||
(let [path (str "files/" file-id "/pages/" page-id ".json")
|
||||
page (get pages-index page-id)
|
||||
objects (:objects page)
|
||||
page (-> page
|
||||
(dissoc :objects)
|
||||
(assoc :index index))
|
||||
page (encode-page page)]
|
||||
|
||||
(write-entry! output path page)
|
||||
|
||||
(doseq [[shape-id shape] objects]
|
||||
(let [path (str "files/" file-id "/pages/" page-id "/" shape-id ".json")
|
||||
shape (assoc shape :page-id page-id)
|
||||
shape (encode-shape shape)]
|
||||
(write-entry! output path shape)))))
|
||||
|
||||
(vswap! bfc/*state* bfc/collect-storage-objects media)
|
||||
(vswap! bfc/*state* bfc/collect-storage-objects thumbnails)
|
||||
|
||||
(doseq [{:keys [id] :as media} media]
|
||||
(let [path (str "files/" file-id "/media/" id ".json")
|
||||
media (encode-media media)]
|
||||
(write-entry! output path media)))
|
||||
|
||||
(doseq [thumbnail thumbnails]
|
||||
(let [data (cth/parse-object-id (:object-id thumbnail))
|
||||
path (str "files/" file-id "/thumbnails/" (:page-id data)
|
||||
"/" (:frame-id data) ".json")
|
||||
data (-> data
|
||||
(assoc :media-id (:media-id thumbnail))
|
||||
(encode-file-thumbnail))]
|
||||
(write-entry! output path data)))
|
||||
|
||||
(doseq [[id component] components]
|
||||
(let [path (str "files/" file-id "/components/" id ".json")
|
||||
component (encode-component component)]
|
||||
(write-entry! output path component)))
|
||||
|
||||
(doseq [[id color] colors]
|
||||
(let [path (str "files/" file-id "/colors/" id ".json")
|
||||
color (-> (encode-color color)
|
||||
(dissoc :file-id))
|
||||
color (cond-> color
|
||||
(and (contains? color :path)
|
||||
(str/empty? (:path color)))
|
||||
(dissoc :path))]
|
||||
(write-entry! output path color)))
|
||||
|
||||
(doseq [[id object] typographies]
|
||||
(let [path (str "files/" file-id "/typographies/" id ".json")
|
||||
color (encode-typography object)]
|
||||
(write-entry! output path color)))
|
||||
|
||||
(when-let [data (not-empty plugins-data)]
|
||||
(let [path (str "files/" file-id "/plugin-data.json")]
|
||||
(write-entry! output path data)))))
|
||||
|
||||
(defn- export-files
|
||||
[{:keys [::ids ::include-libraries ::output] :as cfg}]
|
||||
(let [ids (into ids (when include-libraries (bfc/get-libraries cfg ids)))
|
||||
rels (if include-libraries
|
||||
(->> (bfc/get-files-rels cfg ids)
|
||||
(mapv (juxt :file-id :library-file-id)))
|
||||
[])]
|
||||
|
||||
(vswap! bfc/*state* assoc :files (d/ordered-map))
|
||||
|
||||
;; Write all the exporting files
|
||||
(doseq [[index file-id] (d/enumerate ids)]
|
||||
(-> cfg
|
||||
(assoc ::file-id file-id)
|
||||
(assoc ::file-seqn index)
|
||||
(export-file)))
|
||||
|
||||
;; Write manifest file
|
||||
(let [files (:files @bfc/*state*)
|
||||
params {:type "penpot/export-files"
|
||||
:version 1
|
||||
:generated-by (str "penpot/" (:full cf/version))
|
||||
:files (vec (vals files))
|
||||
:relations rels}]
|
||||
(write-entry! output "manifest.json" params))))
|
||||
|
||||
;; --- IMPORT IMPL
|
||||
|
||||
(defn- read-zip-entries
|
||||
[^ZipFile input]
|
||||
(into #{} (iterator-seq (.entries input))))
|
||||
|
||||
(defn- get-zip-entry*
|
||||
[^ZipFile input ^String path]
|
||||
(.getEntry input path))
|
||||
|
||||
(defn- get-zip-entry
|
||||
[input path]
|
||||
(let [entry (get-zip-entry* input path)]
|
||||
(when-not entry
|
||||
(ex/raise :type :validation
|
||||
:code :inconsistent-penpot-file
|
||||
:hint "the penpot file seems corrupt, missing underlying zip entry"
|
||||
:path path))
|
||||
entry))
|
||||
|
||||
(defn- get-zip-entry-size
|
||||
[^ZipEntry entry]
|
||||
(.getSize entry))
|
||||
|
||||
(defn- zip-entry-name
|
||||
[^ZipEntry entry]
|
||||
(.getName entry))
|
||||
|
||||
(defn- zip-entry-stream
|
||||
^InputStream
|
||||
[^ZipFile input ^ZipEntry entry]
|
||||
(.getInputStream input entry))
|
||||
|
||||
(defn- zip-entry-reader
|
||||
[^ZipFile input ^ZipEntry entry]
|
||||
(-> (zip-entry-stream input entry)
|
||||
(jio/reader :encoding "UTF-8")))
|
||||
|
||||
(defn- zip-entry-storage-content
|
||||
"Wraps a ZipFile and ZipEntry into a penpot storage compatible
|
||||
object and avoid creating temporal objects"
|
||||
[input entry]
|
||||
(let [hash (delay (->> entry
|
||||
(zip-entry-stream input)
|
||||
(sto.impl/calculate-hash)))]
|
||||
(reify
|
||||
sto.impl/IContentObject
|
||||
(get-size [_]
|
||||
(get-zip-entry-size entry))
|
||||
|
||||
sto.impl/IContentHash
|
||||
(get-hash [_]
|
||||
(deref hash))
|
||||
|
||||
jio/IOFactory
|
||||
(make-reader [this opts]
|
||||
(jio/make-reader this opts))
|
||||
(make-writer [_ _]
|
||||
(throw (UnsupportedOperationException. "not implemented")))
|
||||
|
||||
(make-input-stream [_ _]
|
||||
(zip-entry-stream input entry))
|
||||
(make-output-stream [_ _]
|
||||
(throw (UnsupportedOperationException. "not implemented"))))))
|
||||
|
||||
(defn- read-manifest
|
||||
[^ZipFile input]
|
||||
(let [entry (get-zip-entry input "manifest.json")]
|
||||
(with-open [reader (zip-entry-reader input entry)]
|
||||
(let [manifest (json/read reader :key-fn json/read-kebab-key)]
|
||||
(decode-manifest manifest)))))
|
||||
|
||||
(defn- match-media-entry-fn
|
||||
[file-id]
|
||||
(let [pattern (str "^files/" file-id "/media/([^/]+).json$")
|
||||
pattern (re-pattern pattern)]
|
||||
(fn [entry]
|
||||
(when-let [[_ id] (re-matches pattern (zip-entry-name entry))]
|
||||
{:entry entry
|
||||
:id (parse-uuid id)}))))
|
||||
|
||||
(defn- match-color-entry-fn
|
||||
[file-id]
|
||||
(let [pattern (str "^files/" file-id "/colors/([^/]+).json$")
|
||||
pattern (re-pattern pattern)]
|
||||
(fn [entry]
|
||||
(when-let [[_ id] (re-matches pattern (zip-entry-name entry))]
|
||||
{:entry entry
|
||||
:id (parse-uuid id)}))))
|
||||
|
||||
(defn- match-component-entry-fn
|
||||
[file-id]
|
||||
(let [pattern (str "^files/" file-id "/components/([^/]+).json$")
|
||||
pattern (re-pattern pattern)]
|
||||
(fn [entry]
|
||||
(when-let [[_ id] (re-matches pattern (zip-entry-name entry))]
|
||||
{:entry entry
|
||||
:id (parse-uuid id)}))))
|
||||
|
||||
(defn- match-typography-entry-fn
|
||||
[file-id]
|
||||
(let [pattern (str "^files/" file-id "/typographies/([^/]+).json$")
|
||||
pattern (re-pattern pattern)]
|
||||
(fn [entry]
|
||||
(when-let [[_ id] (re-matches pattern (zip-entry-name entry))]
|
||||
{:entry entry
|
||||
:id (parse-uuid id)}))))
|
||||
|
||||
(defn- match-thumbnail-entry-fn
|
||||
[file-id]
|
||||
(let [pattern (str "^files/" file-id "/thumbnails/([^/]+)/([^/]+).json$")
|
||||
pattern (re-pattern pattern)]
|
||||
(fn [entry]
|
||||
(when-let [[_ page-id frame-id] (re-matches pattern (zip-entry-name entry))]
|
||||
{:entry entry
|
||||
:page-id (parse-uuid page-id)
|
||||
:frame-id (parse-uuid frame-id)
|
||||
:file-id file-id}))))
|
||||
|
||||
(defn- match-page-entry-fn
|
||||
[file-id]
|
||||
(let [pattern (str "^files/" file-id "/pages/([^/]+).json$")
|
||||
pattern (re-pattern pattern)]
|
||||
(fn [entry]
|
||||
(when-let [[_ id] (re-matches pattern (zip-entry-name entry))]
|
||||
{:entry entry
|
||||
:id (parse-uuid id)}))))
|
||||
|
||||
(defn- match-shape-entry-fn
|
||||
[file-id page-id]
|
||||
(let [pattern (str "^files/" file-id "/pages/" page-id "/([^/]+).json$")
|
||||
pattern (re-pattern pattern)]
|
||||
(fn [entry]
|
||||
(when-let [[_ id] (re-matches pattern (zip-entry-name entry))]
|
||||
{:entry entry
|
||||
:page-id page-id
|
||||
:id (parse-uuid id)}))))
|
||||
|
||||
(defn- match-storage-entry-fn
|
||||
[]
|
||||
(let [pattern (str "^objects/([^/]+).json$")
|
||||
pattern (re-pattern pattern)]
|
||||
(fn [entry]
|
||||
(when-let [[_ id] (re-matches pattern (zip-entry-name entry))]
|
||||
{:entry entry
|
||||
:id (parse-uuid id)}))))
|
||||
|
||||
(defn- read-entry
|
||||
[^ZipFile input entry]
|
||||
(with-open [reader (zip-entry-reader input entry)]
|
||||
(json/read reader :key-fn json/read-kebab-key)))
|
||||
|
||||
(defn- read-file
|
||||
[{:keys [::input ::file-id]}]
|
||||
(let [path (str "files/" file-id ".json")
|
||||
entry (get-zip-entry input path)]
|
||||
(-> (read-entry input entry)
|
||||
(decode-file)
|
||||
(validate-file))))
|
||||
|
||||
(defn- read-file-plugin-data
|
||||
[{:keys [::input ::file-id]}]
|
||||
(let [path (str "files/" file-id "/plugin-data.json")
|
||||
entry (get-zip-entry* input path)]
|
||||
(some->> entry
|
||||
(read-entry input)
|
||||
(decode-plugin-data)
|
||||
(validate-plugin-data))))
|
||||
|
||||
(defn- read-file-media
|
||||
[{:keys [::input ::file-id ::entries]}]
|
||||
(->> (keep (match-media-entry-fn file-id) entries)
|
||||
(reduce (fn [result {:keys [id entry]}]
|
||||
(let [object (->> (read-entry input entry)
|
||||
(decode-media)
|
||||
(validate-media))
|
||||
object (assoc object :file-id file-id)]
|
||||
(if (= id (:id object))
|
||||
(conj result object)
|
||||
result)))
|
||||
[])
|
||||
(not-empty)))
|
||||
|
||||
(defn- read-file-colors
|
||||
[{:keys [::input ::file-id ::entries]}]
|
||||
(->> (keep (match-color-entry-fn file-id) entries)
|
||||
(reduce (fn [result {:keys [id entry]}]
|
||||
(let [object (->> (read-entry input entry)
|
||||
(decode-color)
|
||||
(validate-color))]
|
||||
(if (= id (:id object))
|
||||
(assoc result id object)
|
||||
result)))
|
||||
{})
|
||||
(not-empty)))
|
||||
|
||||
(defn- read-file-components
|
||||
[{:keys [::input ::file-id ::entries]}]
|
||||
(->> (keep (match-component-entry-fn file-id) entries)
|
||||
(reduce (fn [result {:keys [id entry]}]
|
||||
(let [object (->> (read-entry input entry)
|
||||
(decode-component)
|
||||
(validate-component))]
|
||||
(if (= id (:id object))
|
||||
(assoc result id object)
|
||||
result)))
|
||||
{})
|
||||
(not-empty)))
|
||||
|
||||
(defn- read-file-typographies
|
||||
[{:keys [::input ::file-id ::entries]}]
|
||||
(->> (keep (match-typography-entry-fn file-id) entries)
|
||||
(reduce (fn [result {:keys [id entry]}]
|
||||
(let [object (->> (read-entry input entry)
|
||||
(decode-typography)
|
||||
(validate-typography))]
|
||||
(if (= id (:id object))
|
||||
(assoc result id object)
|
||||
result)))
|
||||
{})
|
||||
(not-empty)))
|
||||
|
||||
(defn- read-file-shapes
|
||||
[{:keys [::input ::file-id ::page-id ::entries] :as cfg}]
|
||||
(->> (keep (match-shape-entry-fn file-id page-id) entries)
|
||||
(reduce (fn [result {:keys [id entry]}]
|
||||
(let [object (->> (read-entry input entry)
|
||||
(decode-shape)
|
||||
(validate-shape))]
|
||||
(if (= id (:id object))
|
||||
(assoc result id object)
|
||||
result)))
|
||||
{})
|
||||
(not-empty)))
|
||||
|
||||
(defn- read-file-pages
|
||||
[{:keys [::input ::file-id ::entries] :as cfg}]
|
||||
(->> (keep (match-page-entry-fn file-id) entries)
|
||||
(keep (fn [{:keys [id entry]}]
|
||||
(let [page (->> (read-entry input entry)
|
||||
(decode-page))
|
||||
page (dissoc page :options)]
|
||||
(when (= id (:id page))
|
||||
(let [objects (-> (assoc cfg ::page-id id)
|
||||
(read-file-shapes))]
|
||||
(assoc page :objects objects))))))
|
||||
(sort-by :index)
|
||||
(reduce (fn [result {:keys [id] :as page}]
|
||||
(assoc result id (dissoc page :index)))
|
||||
(d/ordered-map))))
|
||||
|
||||
(defn- read-file-thumbnails
|
||||
[{:keys [::input ::file-id ::entries] :as cfg}]
|
||||
(->> (keep (match-thumbnail-entry-fn file-id) entries)
|
||||
(reduce (fn [result {:keys [page-id frame-id entry]}]
|
||||
(let [object (->> (read-entry input entry)
|
||||
(decode-file-thumbnail)
|
||||
(validate-file-thumbnail))]
|
||||
(if (and (= frame-id (:frame-id object))
|
||||
(= page-id (:page-id object)))
|
||||
(conj result object)
|
||||
result)))
|
||||
[])
|
||||
(not-empty)))
|
||||
|
||||
(defn- read-file-data
|
||||
[{:keys [] :as cfg}]
|
||||
(let [colors (read-file-colors cfg)
|
||||
typographies (read-file-typographies cfg)
|
||||
components (read-file-components cfg)
|
||||
plugin-data (read-file-plugin-data cfg)
|
||||
pages (read-file-pages cfg)]
|
||||
|
||||
{:pages (-> pages keys vec)
|
||||
:pages-index (into {} pages)
|
||||
:colors colors
|
||||
:typographies typographies
|
||||
:components components
|
||||
:plugin-data plugin-data}))
|
||||
|
||||
(defn- import-file
|
||||
[{:keys [::db/conn ::project-id ::file-id ::file-name] :as cfg}]
|
||||
(let [file-id' (bfc/lookup-index file-id)
|
||||
file (read-file cfg)
|
||||
media (read-file-media cfg)
|
||||
thumbnails (read-file-thumbnails cfg)]
|
||||
|
||||
(l/dbg :hint "processing file"
|
||||
:id (str file-id')
|
||||
:prev-id (str file-id)
|
||||
:features (str/join "," (:features file))
|
||||
:version (:version file)
|
||||
::l/sync? true)
|
||||
|
||||
(events/tap :progress {:op :import :section :file :name file-name})
|
||||
|
||||
(when media
|
||||
;; Update index with media
|
||||
(l/dbg :hint "update media index"
|
||||
:file-id (str file-id')
|
||||
:total (count media)
|
||||
::l/sync? true)
|
||||
|
||||
(vswap! bfc/*state* update :index bfc/update-index (map :id media))
|
||||
(vswap! bfc/*state* update :media into media))
|
||||
|
||||
(when thumbnails
|
||||
(l/dbg :hint "update thumbnails index"
|
||||
:file-id (str file-id')
|
||||
:total (count thumbnails)
|
||||
::l/sync? true)
|
||||
|
||||
(vswap! bfc/*state* update :index bfc/update-index (map :media-id thumbnails))
|
||||
(vswap! bfc/*state* update :thumbnails into thumbnails))
|
||||
|
||||
(let [data (-> (read-file-data cfg)
|
||||
(d/without-nils)
|
||||
(assoc :id file-id')
|
||||
(cond-> (:options file)
|
||||
(assoc :options (:options file))))
|
||||
|
||||
file (-> file
|
||||
(assoc :id file-id')
|
||||
(assoc :data data)
|
||||
(assoc :name file-name)
|
||||
(assoc :project-id project-id)
|
||||
(dissoc :options)
|
||||
(bfc/process-file))]
|
||||
|
||||
(->> file
|
||||
(bfc/register-pending-migrations cfg)
|
||||
(bfc/persist-file! cfg))
|
||||
|
||||
(when (::bfc/overwrite cfg)
|
||||
(db/delete! conn :file-thumbnail {:file-id file-id'}))
|
||||
|
||||
file-id')))
|
||||
|
||||
(defn- import-file-relations
|
||||
[{:keys [::db/conn ::manifest ::bfc/timestamp] :as cfg}]
|
||||
(events/tap :progress {:op :import :section :relations})
|
||||
|
||||
(doseq [[file-id libr-id] (:relations manifest)]
|
||||
|
||||
(let [file-id (bfc/lookup-index file-id)
|
||||
libr-id (bfc/lookup-index libr-id)]
|
||||
|
||||
(when (and file-id libr-id)
|
||||
(l/dbg :hint "create file library link"
|
||||
:file-id (str file-id)
|
||||
:lib-id (str libr-id)
|
||||
::l/sync? true)
|
||||
(db/insert! conn :file-library-rel
|
||||
{:synced-at timestamp
|
||||
:file-id file-id
|
||||
:library-file-id libr-id})))))
|
||||
|
||||
(defn- import-storage-objects
|
||||
[{:keys [::input ::entries ::bfc/timestamp] :as cfg}]
|
||||
(events/tap :progress {:op :import :section :storage-objects})
|
||||
|
||||
(let [storage (sto/resolve cfg)
|
||||
entries (keep (match-storage-entry-fn) entries)]
|
||||
|
||||
(doseq [{:keys [id entry]} entries]
|
||||
(let [object (->> (read-entry input entry)
|
||||
(decode-storage-object)
|
||||
(validate-storage-object))]
|
||||
|
||||
(when (not= id (:id object))
|
||||
(ex/raise :type :validation
|
||||
:code :inconsistent-penpot-file
|
||||
:hint "the penpot file seems corrupt, found unexpected uuid (storage-object-id)"
|
||||
:expected-id (str id)
|
||||
:found-id (str (:id object))))
|
||||
|
||||
(let [ext (resolve-extension (:content-type object))
|
||||
path (str "objects/" id ext)
|
||||
content (->> path
|
||||
(get-zip-entry input)
|
||||
(zip-entry-storage-content input))]
|
||||
|
||||
(when (not= (:size object) (sto/get-size content))
|
||||
(ex/raise :type :validation
|
||||
:code :inconsistent-penpot-file
|
||||
:hint "found corrupted storage object: size does not match"
|
||||
:path path
|
||||
:expected-size (:size object)
|
||||
:found-size (sto/get-size content)))
|
||||
|
||||
(when (not= (:hash object) (sto/get-hash content))
|
||||
(ex/raise :type :validation
|
||||
:code :inconsistent-penpot-file
|
||||
:hint "found corrupted storage object: hash does not match"
|
||||
:path path
|
||||
:expected-hash (:hash object)
|
||||
:found-hash (sto/get-hash content)))
|
||||
|
||||
(let [params (-> object
|
||||
(dissoc :id :size)
|
||||
(assoc ::sto/content content)
|
||||
(assoc ::sto/deduplicate? true)
|
||||
(assoc ::sto/touched-at timestamp))
|
||||
sobject (sto/put-object! storage params)]
|
||||
|
||||
(l/dbg :hint "persisted storage object"
|
||||
:id (str (:id sobject))
|
||||
:prev-id (str id)
|
||||
:bucket (:bucket params)
|
||||
::l/sync? true)
|
||||
|
||||
(vswap! bfc/*state* update :index assoc id (:id sobject))))))))
|
||||
|
||||
(defn- import-file-media
|
||||
[{:keys [::db/conn] :as cfg}]
|
||||
(events/tap :progress {:op :import :section :media})
|
||||
|
||||
(doseq [item (:media @bfc/*state*)]
|
||||
(let [params (-> item
|
||||
(update :id bfc/lookup-index)
|
||||
(update :file-id bfc/lookup-index)
|
||||
(d/update-when :media-id bfc/lookup-index)
|
||||
(d/update-when :thumbnail-id bfc/lookup-index))]
|
||||
|
||||
(l/dbg :hint "inserting file media object"
|
||||
:id (str (:id params))
|
||||
:file-id (str (:file-id params))
|
||||
::l/sync? true)
|
||||
|
||||
(db/insert! conn :file-media-object params
|
||||
{::db/on-conflict-do-nothing? (::bfc/overwrite cfg)}))))
|
||||
|
||||
(defn- import-file-thumbnails
|
||||
[{:keys [::db/conn] :as cfg}]
|
||||
(events/tap :progress {:op :import :section :thumbnails})
|
||||
(doseq [item (:thumbnails @bfc/*state*)]
|
||||
(let [file-id (bfc/lookup-index (:file-id item))
|
||||
media-id (bfc/lookup-index (:media-id item))
|
||||
object-id (-> (assoc item :file-id file-id)
|
||||
(cth/fmt-object-id))
|
||||
|
||||
params {:file-id file-id
|
||||
:object-id object-id
|
||||
:tag (:tag item)
|
||||
:media-id media-id}]
|
||||
|
||||
(l/dbg :hint "inserting file object thumbnail"
|
||||
:file-id (str file-id)
|
||||
:media-id (str media-id)
|
||||
::l/sync? true)
|
||||
|
||||
(db/insert! conn :file-tagged-object-thumbnail params
|
||||
{::db/on-conflict-do-nothing? (::bfc/overwrite cfg)}))))
|
||||
|
||||
(defn- import-files
|
||||
[{:keys [::bfc/timestamp ::input ::name] :or {timestamp (dt/now)} :as cfg}]
|
||||
|
||||
(dm/assert!
|
||||
"expected zip file"
|
||||
(instance? ZipFile input))
|
||||
|
||||
(dm/assert!
|
||||
"expected valid instant"
|
||||
(dt/instant? timestamp))
|
||||
|
||||
(let [manifest (-> (read-manifest input)
|
||||
(validate-manifest))
|
||||
entries (read-zip-entries input)]
|
||||
|
||||
(when-not (= "penpot/export-files" (:type manifest))
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-binfile-v3-manifest
|
||||
:hint "unexpected type on manifest"
|
||||
:manifest manifest))
|
||||
|
||||
;; Check if all files referenced on manifest are present
|
||||
(doseq [{file-id :id} (:files manifest)]
|
||||
(let [path (str "files/" file-id ".json")]
|
||||
(when-not (get-zip-entry input path)
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-binfile-v3
|
||||
:hint "some files referenced on manifest not found"
|
||||
:path path
|
||||
:file-id file-id))))
|
||||
|
||||
(events/tap :progress {:op :import :section :manifest})
|
||||
|
||||
(let [index (bfc/update-index (map :id (:files manifest)))
|
||||
state {:media [] :index index}
|
||||
cfg (-> cfg
|
||||
(assoc ::entries entries)
|
||||
(assoc ::manifest manifest)
|
||||
(assoc ::bfc/timestamp timestamp))]
|
||||
|
||||
(binding [bfc/*state* (volatile! state)]
|
||||
(db/tx-run! cfg (fn [cfg]
|
||||
(bfc/disable-database-timeouts! cfg)
|
||||
(let [ids (->> (:files manifest)
|
||||
(reduce (fn [result {:keys [id] :as file}]
|
||||
(let [name' (get file :name)
|
||||
name' (if (map? name)
|
||||
(get name id)
|
||||
name')]
|
||||
(conj result (-> cfg
|
||||
(assoc ::file-id id)
|
||||
(assoc ::file-name name')
|
||||
(import-file)))))
|
||||
[]))]
|
||||
(import-file-relations cfg)
|
||||
(import-storage-objects cfg)
|
||||
(import-file-media cfg)
|
||||
(import-file-thumbnails cfg)
|
||||
|
||||
(bfc/apply-pending-migrations! cfg)
|
||||
|
||||
ids)))))))
|
||||
|
||||
;; --- PUBLIC API
|
||||
|
||||
(defn export-files!
|
||||
"Do the exportation of a specified file in custom penpot binary
|
||||
format. There are some options available for customize the output:
|
||||
|
||||
`::include-libraries`: additionally to the specified file, all the
|
||||
linked libraries also will be included (including transitive
|
||||
dependencies).
|
||||
|
||||
`::embed-assets`: instead of including the libraries, embed in the
|
||||
same file library all assets used from external libraries."
|
||||
|
||||
[{:keys [::ids] :as cfg} output]
|
||||
|
||||
(dm/assert!
|
||||
"expected a set of uuid's for `::ids` parameter"
|
||||
(and (set? ids)
|
||||
(every? uuid? ids)))
|
||||
|
||||
(dm/assert!
|
||||
"expected instance of jio/IOFactory for `input`"
|
||||
(satisfies? jio/IOFactory output))
|
||||
|
||||
(let [id (uuid/next)
|
||||
tp (dt/tpoint)
|
||||
ab (volatile! false)
|
||||
cs (volatile! nil)]
|
||||
(try
|
||||
(l/info :hint "start exportation" :export-id (str id))
|
||||
(binding [bfc/*state* (volatile! (bfc/initial-state))]
|
||||
(with-open [output (io/output-stream output)]
|
||||
(with-open [output (ZipOutputStream. output)]
|
||||
(let [cfg (assoc cfg ::output output)]
|
||||
(export-files cfg)
|
||||
(export-storage-objects cfg)))))
|
||||
|
||||
(catch java.io.IOException _cause
|
||||
;; Do nothing, EOF means client closes connection abruptly
|
||||
(vreset! ab true)
|
||||
nil)
|
||||
|
||||
(catch Throwable cause
|
||||
(vreset! cs cause)
|
||||
(vreset! ab true)
|
||||
(throw cause))
|
||||
|
||||
(finally
|
||||
(l/info :hint "exportation finished" :export-id (str id)
|
||||
:elapsed (str (inst-ms (tp)) "ms")
|
||||
:aborted @ab
|
||||
:cause @cs)))))
|
||||
|
||||
|
||||
(defn import-files!
|
||||
[{:keys [::input] :as cfg}]
|
||||
|
||||
(dm/assert!
|
||||
"expected valid profile-id and project-id on `cfg`"
|
||||
(and (uuid? (::profile-id cfg))
|
||||
(uuid? (::project-id cfg))))
|
||||
|
||||
(dm/assert!
|
||||
"expected instance of jio/IOFactory for `input`"
|
||||
(satisfies? jio/IOFactory input))
|
||||
|
||||
(let [id (uuid/next)
|
||||
tp (dt/tpoint)
|
||||
cs (volatile! nil)]
|
||||
|
||||
(l/info :hint "import: started" :id (str id))
|
||||
(try
|
||||
(with-open [input (ZipFile. (fs/file input))]
|
||||
(import-files (assoc cfg ::input input)))
|
||||
|
||||
(catch Throwable cause
|
||||
(vreset! cs cause)
|
||||
(throw cause))
|
||||
|
||||
(finally
|
||||
(l/info :hint "import: terminated"
|
||||
:id (str id)
|
||||
:elapsed (dt/format-duration (tp))
|
||||
:error? (some? @cs))))))
|
|
@ -295,8 +295,9 @@
|
|||
cfg (assoc cfg
|
||||
::bf.v1/overwrite false
|
||||
::bf.v1/profile-id profile-id
|
||||
::bf.v1/project-id project-id)]
|
||||
(bf.v1/import-files! cfg path)
|
||||
::bf.v1/project-id project-id
|
||||
::bf.v1/input path)]
|
||||
(bf.v1/import-files! cfg)
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/plain"}
|
||||
::rres/body "OK CLONED"})
|
||||
|
@ -329,8 +330,9 @@
|
|||
::bf.v1/overwrite overwrite?
|
||||
::bf.v1/migrate migrate?
|
||||
::bf.v1/profile-id profile-id
|
||||
::bf.v1/project-id project-id)]
|
||||
(bf.v1/import-files! cfg path)
|
||||
::bf.v1/project-id project-id
|
||||
::bf.v1/input path)]
|
||||
(bf.v1/import-files! cfg)
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/plain"}
|
||||
::rres/body "OK"})))
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
(:refer-clojure :exclude [assert])
|
||||
(:require
|
||||
[app.binfile.v1 :as bf.v1]
|
||||
[app.binfile.v3 :as bf.v3]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.db :as db]
|
||||
|
@ -35,51 +36,103 @@
|
|||
[:map {:title "export-binfile"}
|
||||
[:name [:string {:max 250}]]
|
||||
[:file-id ::sm/uuid]
|
||||
[:include-libraries :boolean]
|
||||
[:embed-assets :boolean]])
|
||||
[:version {:optional true} ::sm/int]
|
||||
[:include-libraries ::sm/boolean]
|
||||
[:embed-assets ::sm/boolean]])
|
||||
|
||||
(defn stream-export-v1
|
||||
[cfg {:keys [file-id include-libraries embed-assets] :as params}]
|
||||
(reify rres/StreamableResponseBody
|
||||
(-write-body-to-stream [_ _ output-stream]
|
||||
(try
|
||||
(-> cfg
|
||||
(assoc ::bf.v1/ids #{file-id})
|
||||
(assoc ::bf.v1/embed-assets embed-assets)
|
||||
(assoc ::bf.v1/include-libraries include-libraries)
|
||||
(bf.v1/export-files! output-stream))
|
||||
(catch Throwable cause
|
||||
(l/err :hint "exception on exporting file"
|
||||
:file-id (str file-id)
|
||||
:cause cause))))))
|
||||
|
||||
(defn stream-export-v3
|
||||
[cfg {:keys [file-id include-libraries embed-assets] :as params}]
|
||||
(reify rres/StreamableResponseBody
|
||||
(-write-body-to-stream [_ _ output-stream]
|
||||
(try
|
||||
(-> cfg
|
||||
(assoc ::bf.v3/ids #{file-id})
|
||||
(assoc ::bf.v3/embed-assets embed-assets)
|
||||
(assoc ::bf.v3/include-libraries include-libraries)
|
||||
(bf.v3/export-files! output-stream))
|
||||
(catch Throwable cause
|
||||
(l/err :hint "exception on exporting file"
|
||||
:file-id (str file-id)
|
||||
:cause cause))))))
|
||||
|
||||
(sv/defmethod ::export-binfile
|
||||
"Export a penpot file in a binary format."
|
||||
{::doc/added "1.15"
|
||||
::webhooks/event? true
|
||||
::sm/result schema:export-binfile}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id include-libraries embed-assets] :as params}]
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id version file-id] :as params}]
|
||||
(files/check-read-permissions! pool profile-id file-id)
|
||||
(fn [_]
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "application/octet-stream"}
|
||||
::rres/body (reify rres/StreamableResponseBody
|
||||
(-write-body-to-stream [_ _ output-stream]
|
||||
(try
|
||||
(-> cfg
|
||||
(assoc ::bf.v1/ids #{file-id})
|
||||
(assoc ::bf.v1/embed-assets embed-assets)
|
||||
(assoc ::bf.v1/include-libraries include-libraries)
|
||||
(bf.v1/export-files! output-stream))
|
||||
(catch Throwable cause
|
||||
(l/err :hint "exception on exporting file"
|
||||
:file-id (str file-id)
|
||||
:cause cause)))))}))
|
||||
(let [version (or version 1)
|
||||
body (case (int version)
|
||||
1 (stream-export-v1 cfg params)
|
||||
2 (throw (ex-info "not-implemented" {}))
|
||||
3 (stream-export-v3 cfg params))]
|
||||
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "application/octet-stream"}
|
||||
::rres/body body})))
|
||||
|
||||
;; --- Command: import-binfile
|
||||
|
||||
(defn- import-binfile-v1
|
||||
[{:keys [::wrk/executor] :as cfg} {:keys [project-id profile-id name file]}]
|
||||
(let [cfg (-> cfg
|
||||
(assoc ::bf.v1/project-id project-id)
|
||||
(assoc ::bf.v1/profile-id profile-id)
|
||||
(assoc ::bf.v1/name name)
|
||||
(assoc ::bf.v1/input (:path file)))]
|
||||
|
||||
;; NOTE: the importation process performs some operations that are
|
||||
;; not very friendly with virtual threads, and for avoid
|
||||
;; unexpected blocking of other concurrent operations we dispatch
|
||||
;; that operation to a dedicated executor.
|
||||
(px/invoke! executor (partial bf.v1/import-files! cfg))))
|
||||
|
||||
(defn- import-binfile-v3
|
||||
[{:keys [::wrk/executor] :as cfg} {:keys [project-id profile-id name file]}]
|
||||
(let [cfg (-> cfg
|
||||
(assoc ::bf.v3/project-id project-id)
|
||||
(assoc ::bf.v3/profile-id profile-id)
|
||||
(assoc ::bf.v3/name name)
|
||||
(assoc ::bf.v3/input (:path file)))]
|
||||
;; NOTE: the importation process performs some operations that are
|
||||
;; not very friendly with virtual threads, and for avoid
|
||||
;; unexpected blocking of other concurrent operations we dispatch
|
||||
;; that operation to a dedicated executor.
|
||||
(px/invoke! executor (partial bf.v3/import-files! cfg))))
|
||||
|
||||
(defn- import-binfile
|
||||
[{:keys [::wrk/executor ::bf.v1/project-id ::db/pool] :as cfg} input]
|
||||
;; NOTE: the importation process performs some operations that
|
||||
;; are not very friendly with virtual threads, and for avoid
|
||||
;; unexpected blocking of other concurrent operations we
|
||||
;; dispatch that operation to a dedicated executor.
|
||||
(let [result (px/invoke! executor (partial bf.v1/import-files! cfg input))]
|
||||
[{:keys [::db/pool] :as cfg} {:keys [project-id version] :as params}]
|
||||
(let [result (case (int version)
|
||||
1 (import-binfile-v1 cfg params)
|
||||
3 (import-binfile-v3 cfg params))]
|
||||
(db/update! pool :project
|
||||
{:modified-at (dt/now)}
|
||||
{:id project-id})
|
||||
result))
|
||||
|
||||
(def ^:private
|
||||
schema:import-binfile
|
||||
(def ^:private schema:import-binfile
|
||||
[:map {:title "import-binfile"}
|
||||
[:name [:string {:max 250}]]
|
||||
[:name [:or [:string {:max 250}]
|
||||
[:map-of ::sm/uuid [:string {:max 250}]]]]
|
||||
[:project-id ::sm/uuid]
|
||||
[:version {:optional true} ::sm/int]
|
||||
[:file ::media/upload]])
|
||||
|
||||
(sv/defmethod ::import-binfile
|
||||
|
@ -88,12 +141,11 @@
|
|||
::webhooks/event? true
|
||||
::sse/stream? true
|
||||
::sm/params schema:import-binfile}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id name project-id file] :as params}]
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id project-id version] :as params}]
|
||||
(projects/check-edition-permissions! pool profile-id project-id)
|
||||
(let [cfg (-> cfg
|
||||
(assoc ::bf.v1/project-id project-id)
|
||||
(assoc ::bf.v1/profile-id profile-id)
|
||||
(assoc ::bf.v1/name name))]
|
||||
(let [params (-> params
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :version (or version 1)))]
|
||||
(with-meta
|
||||
(sse/response #(import-binfile cfg (:path file)))
|
||||
(sse/response (partial import-binfile cfg params))
|
||||
{::audit/props {:file nil}})))
|
||||
|
|
|
@ -176,7 +176,7 @@
|
|||
|
||||
(binding [bfc/*state* (volatile! {:index {team-id (uuid/next)}})]
|
||||
(let [projs (bfc/get-team-projects cfg team-id)
|
||||
files (bfc/get-team-files cfg team-id)
|
||||
files (bfc/get-team-files-ids cfg team-id)
|
||||
frels (bfc/get-files-rels cfg files)
|
||||
|
||||
team (-> (db/get-by-id conn :team team-id)
|
||||
|
@ -396,14 +396,15 @@
|
|||
(defn clone-template
|
||||
[cfg {:keys [project-id profile-id] :as params} template]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn ::wrk/executor] :as cfg}]
|
||||
;; NOTE: the importation process performs some operations that
|
||||
;; are not very friendly with virtual threads, and for avoid
|
||||
;; unexpected blocking of other concurrent operations we
|
||||
;; dispatch that operation to a dedicated executor.
|
||||
;; NOTE: the importation process performs some operations
|
||||
;; that are not very friendly with virtual threads, and for
|
||||
;; avoid unexpected blocking of other concurrent operations
|
||||
;; we dispatch that operation to a dedicated executor.
|
||||
(let [cfg (-> cfg
|
||||
(assoc ::bf.v1/project-id project-id)
|
||||
(assoc ::bf.v1/profile-id profile-id))
|
||||
result (px/invoke! executor (partial bf.v1/import-files! cfg template))]
|
||||
(assoc ::bf.v1/profile-id profile-id)
|
||||
(assoc ::bf.v1/input template))
|
||||
result (px/invoke! executor (partial bf.v1/import-files! cfg))]
|
||||
|
||||
(db/update! conn :project
|
||||
{:modified-at (dt/now)}
|
||||
|
|
104
backend/test/backend_tests/binfile_test.clj
Normal file
104
backend/test/backend_tests/binfile_test.clj
Normal file
|
@ -0,0 +1,104 @@
|
|||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns backend-tests.binfile-test
|
||||
"Internal binfile test, no RPC involved"
|
||||
(:require
|
||||
[app.binfile.v3 :as v3]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.pprint :as pp]
|
||||
[app.common.thumbnails :as thc]
|
||||
[app.common.types.shape :as cts]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.http :as http]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.storage :as sto]
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.util.time :as dt]
|
||||
[backend-tests.helpers :as th]
|
||||
[clojure.test :as t]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.fs :as fs]
|
||||
[datoteka.io :as io]))
|
||||
|
||||
(t/use-fixtures :once th/state-init)
|
||||
(t/use-fixtures :each th/database-reset)
|
||||
|
||||
(defn- update-file!
|
||||
[& {:keys [profile-id file-id changes revn] :or {revn 0}}]
|
||||
(let [params {::th/type :update-file
|
||||
::rpc/profile-id profile-id
|
||||
:id file-id
|
||||
:session-id (uuid/random)
|
||||
:revn revn
|
||||
:features cfeat/supported-features
|
||||
:changes changes}
|
||||
out (th/command! params)]
|
||||
;; (th/print-result! out)
|
||||
(t/is (nil? (:error out)))
|
||||
(:result out)))
|
||||
|
||||
(defn- prepare-simple-file
|
||||
[profile]
|
||||
(let [page-id-1 (uuid/custom 1 1)
|
||||
page-id-2 (uuid/custom 1 2)
|
||||
shape-id (uuid/custom 2 1)
|
||||
file (th/create-file* 1 {:profile-id (:id profile)
|
||||
:project-id (:default-project-id profile)
|
||||
:is-shared false})]
|
||||
(update-file!
|
||||
:file-id (:id file)
|
||||
:profile-id (:id profile)
|
||||
:revn 0
|
||||
:changes
|
||||
[{:type :add-page
|
||||
:name "test 1"
|
||||
:id page-id-1}
|
||||
{:type :add-page
|
||||
:name "test 2"
|
||||
:id page-id-2}])
|
||||
|
||||
(update-file!
|
||||
:file-id (:id file)
|
||||
:profile-id (:id profile)
|
||||
:revn 0
|
||||
:changes
|
||||
[{:type :add-obj
|
||||
:page-id page-id-1
|
||||
:id shape-id
|
||||
:parent-id uuid/zero
|
||||
:frame-id uuid/zero
|
||||
:components-v2 true
|
||||
:obj (cts/setup-shape
|
||||
{:id shape-id
|
||||
:name "image"
|
||||
:frame-id uuid/zero
|
||||
:parent-id uuid/zero
|
||||
:type :rect})}])
|
||||
|
||||
(dissoc file :data)))
|
||||
|
||||
(t/deftest export-binfile-v3
|
||||
(let [profile (th/create-profile* 1)
|
||||
file (prepare-simple-file profile)
|
||||
output (tmp/tempfile :suffix ".zip")]
|
||||
|
||||
(v3/export-files!
|
||||
(-> th/*system*
|
||||
(assoc ::v3/ids #{(:id file)})
|
||||
(assoc ::v3/embed-assets false)
|
||||
(assoc ::v3/include-libraries false))
|
||||
(io/output-stream output))
|
||||
|
||||
(let [result (-> th/*system*
|
||||
(assoc ::v3/project-id (:default-project-id profile))
|
||||
(assoc ::v3/profile-id (:id profile))
|
||||
(assoc ::v3/input output)
|
||||
(v3/import-files!))]
|
||||
(t/is (= (count result) 1))
|
||||
(t/is (every? uuid? result)))))
|
|
@ -557,6 +557,7 @@
|
|||
(into []
|
||||
(map (fn [event]
|
||||
(let [[item1 item2] (re-seq #"(.*): (.*)\n?" event)]
|
||||
|
||||
[(keyword (nth item1 2))
|
||||
(tr/decode-str (nth item2 2))])))
|
||||
(-> (slurp' input)
|
||||
|
|
|
@ -6,7 +6,9 @@
|
|||
|
||||
(ns backend-tests.rpc-management-test
|
||||
(:require
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.pprint :as pp]
|
||||
[app.common.types.shape :as cts]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.http :as http]
|
||||
|
@ -21,6 +23,20 @@
|
|||
(t/use-fixtures :once th/state-init)
|
||||
(t/use-fixtures :each th/database-reset)
|
||||
|
||||
(defn- update-file!
|
||||
[& {:keys [profile-id file-id changes revn] :or {revn 0}}]
|
||||
(let [params {::th/type :update-file
|
||||
::rpc/profile-id profile-id
|
||||
:id file-id
|
||||
:session-id (uuid/random)
|
||||
:revn revn
|
||||
:features cfeat/supported-features
|
||||
:changes changes}
|
||||
out (th/command! params)]
|
||||
;; (th/print-result! out)
|
||||
(t/is (nil? (:error out)))
|
||||
(:result out)))
|
||||
|
||||
;; TODO: migrate to commands
|
||||
|
||||
(t/deftest duplicate-file
|
||||
|
@ -45,11 +61,13 @@
|
|||
mobj (th/create-file-media-object* {:file-id (:id file1)
|
||||
:is-local false
|
||||
:media-id (:id sobject)})]
|
||||
(th/update-file*
|
||||
{:file-id (:id file1)
|
||||
:profile-id (:id profile)
|
||||
:changes [{:type :add-media
|
||||
:object (select-keys mobj [:id :width :height :mtype :name])}]})
|
||||
(update-file!
|
||||
:file-id (:id file1)
|
||||
:profile-id (:id profile)
|
||||
:revn 0
|
||||
:changes
|
||||
[{:type :add-media
|
||||
:object mobj}])
|
||||
|
||||
(let [data {::th/type :duplicate-file
|
||||
::rpc/profile-id (:id profile)
|
||||
|
@ -173,13 +191,13 @@
|
|||
:is-local false
|
||||
:media-id (:id sobject)})]
|
||||
|
||||
|
||||
(th/update-file*
|
||||
{:file-id (:id file1)
|
||||
:profile-id (:id profile)
|
||||
:changes [{:type :add-media
|
||||
:object (select-keys mobj [:id :width :height :mtype :name])}]})
|
||||
|
||||
(update-file!
|
||||
:file-id (:id file1)
|
||||
:profile-id (:id profile)
|
||||
:revn 0
|
||||
:changes
|
||||
[{:type :add-media
|
||||
:object mobj}])
|
||||
|
||||
(let [data {::th/type :duplicate-project
|
||||
::rpc/profile-id (:id profile)
|
||||
|
|
|
@ -10,8 +10,6 @@
|
|||
[app.common.data.macros :as dm]
|
||||
[app.common.geom.shapes.common :as gco]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.types.components-list :as ctkl]
|
||||
[app.common.types.pages-list :as ctpl]
|
||||
[app.common.uuid :as uuid]
|
||||
[clojure.set :as set]
|
||||
[cuerdas.core :as str]))
|
||||
|
@ -369,17 +367,6 @@
|
|||
[container]
|
||||
(= (:type container) :component))
|
||||
|
||||
(defn get-container
|
||||
[file type id]
|
||||
(dm/assert! (map? file))
|
||||
(dm/assert! (keyword? type))
|
||||
(dm/assert! (uuid? id))
|
||||
|
||||
(-> (if (= type :page)
|
||||
(ctpl/get-page file id)
|
||||
(ctkl/get-component file id))
|
||||
(assoc :type type)))
|
||||
|
||||
(defn component-touched?
|
||||
"Check if any shape in the component is touched"
|
||||
[objects root-id]
|
||||
|
|
|
@ -1831,7 +1831,7 @@
|
|||
"Generate changes for remove all references to components in the shape,
|
||||
with the given id and all its children, at the current page."
|
||||
[changes id file page-id libraries]
|
||||
(let [container (cfh/get-container file :page page-id)]
|
||||
(let [container (ctn/get-container file :page page-id)]
|
||||
(-> changes
|
||||
(pcb/with-container container)
|
||||
(pcb/with-objects (:objects container))
|
||||
|
|
|
@ -194,7 +194,7 @@
|
|||
|
||||
(defn humanize-explain
|
||||
"Returns a string representation of the explain data structure"
|
||||
[{:keys [schema errors value]} & {:keys [length level]}]
|
||||
[{:keys [errors value]} & {:keys [length level]}]
|
||||
(let [errors (mapv #(update % :schema form) errors)]
|
||||
(with-out-str
|
||||
(println "Errors:")
|
||||
|
|
|
@ -5,14 +5,29 @@
|
|||
|
||||
(defn fmt-object-id
|
||||
"Returns ids formatted as a string (object-id)"
|
||||
[file-id page-id frame-id tag]
|
||||
(str/ffmt "%/%/%/%" file-id page-id frame-id tag))
|
||||
([object]
|
||||
(fmt-object-id (:file-id object)
|
||||
(:page-id object)
|
||||
(:frame-id object)
|
||||
(:tag object)))
|
||||
([file-id page-id frame-id tag]
|
||||
(str/ffmt "%/%/%/%" file-id page-id frame-id tag)))
|
||||
|
||||
;; FIXME: rename to a proper name
|
||||
|
||||
(defn file-id?
|
||||
"Returns ids formatted as a string (file-id)"
|
||||
[object-id file-id]
|
||||
(str/starts-with? object-id (str/concat file-id "/")))
|
||||
|
||||
(defn parse-object-id
|
||||
[object-id]
|
||||
(let [[file-id page-id frame-id tag] (str/split object-id "/")]
|
||||
{:file-id (parse-uuid file-id)
|
||||
:page-id (parse-uuid page-id)
|
||||
:frame-id (parse-uuid frame-id)
|
||||
:tag tag}))
|
||||
|
||||
(defn get-file-id
|
||||
[object-id]
|
||||
(uuid/uuid (str/slice object-id 0 (str/index-of object-id "/"))))
|
||||
|
|
|
@ -7,9 +7,36 @@
|
|||
(ns app.common.types.component
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.types.page :as ctp]
|
||||
[app.common.types.plugins :as ctpg]
|
||||
[app.common.uuid :as uuid]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; SCHEMA
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def schema:component
|
||||
[:map
|
||||
[:id ::sm/uuid]
|
||||
[:name :string]
|
||||
[:path {:optional true} [:maybe :string]]
|
||||
[:modified-at {:optional true} ::sm/inst]
|
||||
[:objects {:gen/max 10 :optional true} ::ctp/objects]
|
||||
[:main-instance-id ::sm/uuid]
|
||||
[:main-instance-page ::sm/uuid]
|
||||
[:plugin-data {:optional true} ::ctpg/plugin-data]])
|
||||
|
||||
(sm/register! ::component schema:component)
|
||||
|
||||
(def check-component!
|
||||
(sm/check-fn schema:component))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; INIT & HELPERS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
;; Attributes that may be synced in components, and the group they belong to.
|
||||
;; When one attribute is modified in a shape inside a component, the corresponding
|
||||
;; group is marked as :touched. Then, if the shape is synced with the remote shape
|
||||
|
@ -303,4 +330,4 @@
|
|||
(and (swap-slot? group)
|
||||
(some? (group->swap-slot group))))
|
||||
(catch #?(:clj Throwable :cljs :default) _
|
||||
false)))
|
||||
false)))
|
||||
|
|
|
@ -35,37 +35,63 @@
|
|||
;; SCHEMA
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(sm/register! ::media-object
|
||||
(def schema:media
|
||||
"A schema that represents the file media object"
|
||||
[:map {:title "FileMediaObject"}
|
||||
[:id ::sm/uuid]
|
||||
[:created-at ::sm/inst]
|
||||
[:deleted-at {:optional true} ::sm/inst]
|
||||
[:name :string]
|
||||
[:width ::sm/safe-int]
|
||||
[:height ::sm/safe-int]
|
||||
[:mtype :string]
|
||||
[:path {:optional true} [:maybe :string]]])
|
||||
[:file-id {:optional true} ::sm/uuid]
|
||||
[:media-id ::sm/uuid]
|
||||
[:thumbnail-id {:optional true} ::sm/uuid]
|
||||
[:is-local :boolean]])
|
||||
|
||||
(sm/register! ::data
|
||||
(def schema:colors
|
||||
[:map-of {:gen/max 5} ::sm/uuid ::ctc/color])
|
||||
|
||||
(def schema:components
|
||||
[:map-of {:gen/max 5} ::sm/uuid ::ctn/container])
|
||||
|
||||
(def schema:typographies
|
||||
[:map-of {:gen/max 2} ::sm/uuid ::cty/typography])
|
||||
|
||||
(def schema:pages-index
|
||||
[:map-of {:gen/max 5} ::sm/uuid ::ctp/page])
|
||||
|
||||
(def schema:data
|
||||
[:map {:title "FileData"}
|
||||
[:pages [:vector ::sm/uuid]]
|
||||
[:pages-index
|
||||
[:map-of {:gen/max 5} ::sm/uuid ::ctp/page]]
|
||||
[:colors {:optional true}
|
||||
[:map-of {:gen/max 5} ::sm/uuid ::ctc/color]]
|
||||
[:components {:optional true}
|
||||
[:map-of {:gen/max 5} ::sm/uuid ::ctn/container]]
|
||||
[:recent-colors {:optional true}
|
||||
[:vector {:gen/max 3} ::ctc/recent-color]]
|
||||
[:typographies {:optional true}
|
||||
[:map-of {:gen/max 2} ::sm/uuid ::cty/typography]]
|
||||
[:media {:optional true}
|
||||
[:map-of {:gen/max 5} ::sm/uuid ::media-object]]
|
||||
[:pages-index schema:pages-index]
|
||||
[:colors {:optional true} schema:colors]
|
||||
[:components {:optional true} schema:components]
|
||||
[:typographies {:optional true} schema:typographies]
|
||||
[:plugin-data {:optional true} ::ctpg/plugin-data]])
|
||||
|
||||
(def schema:file
|
||||
"A schema for validate a file data structure; data is optional
|
||||
because sometimes we want to validate file without the data."
|
||||
[:map {:title "file"}
|
||||
[:id ::sm/uuid]
|
||||
[:data {:optional true} schema:data]
|
||||
[:features ::cfeat/features]])
|
||||
|
||||
(sm/register! ::data schema:data)
|
||||
(sm/register! ::file schema:file)
|
||||
(sm/register! ::media schema:media)
|
||||
(sm/register! ::colors schema:colors)
|
||||
(sm/register! ::typographies schema:typographies)
|
||||
|
||||
(sm/register! ::media-object schema:media)
|
||||
|
||||
(def check-file-data!
|
||||
(sm/check-fn ::data))
|
||||
|
||||
(def check-media-object!
|
||||
(sm/check-fn ::media-object))
|
||||
(sm/check-fn schema:media))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; INITIALIZATION
|
||||
|
|
|
@ -33,8 +33,7 @@
|
|||
[:id ::sm/uuid]
|
||||
[:axis [::sm/one-of #{:x :y}]]
|
||||
[:position ::sm/safe-number]
|
||||
;; FIXME: remove maybe?
|
||||
[:frame-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
[:frame-id {:optional true} ::sm/uuid]])
|
||||
|
||||
(def schema:guides
|
||||
[:map-of {:gen/max 2} ::sm/uuid schema:guide])
|
||||
|
@ -51,6 +50,7 @@
|
|||
[:map {:title "FilePage"}
|
||||
[:id ::sm/uuid]
|
||||
[:name :string]
|
||||
[:index {:optional true} ::sm/int]
|
||||
[:objects schema:objects]
|
||||
[:default-grids {:optional true} ::ctg/default-grids]
|
||||
[:flows {:optional true} schema:flows]
|
||||
|
@ -59,12 +59,9 @@
|
|||
[:background {:optional true} ::ctc/rgb-color]
|
||||
|
||||
[:comment-thread-positions {:optional true}
|
||||
[:map-of ::sm/uuid schema:comment-thread-position]]
|
||||
|
||||
[:options
|
||||
;; DEPERECATED: remove after 2.3 release
|
||||
[:map {:title "PageOptions"}]]])
|
||||
[:map-of ::sm/uuid schema:comment-thread-position]]])
|
||||
|
||||
(sm/register! ::objects schema:objects)
|
||||
(sm/register! ::page schema:page)
|
||||
(sm/register! ::guide schema:guide)
|
||||
(sm/register! ::flow schema:flow)
|
||||
|
@ -72,7 +69,6 @@
|
|||
(def valid-guide?
|
||||
(sm/lazy-validator schema:guide))
|
||||
|
||||
;; FIXME: convert to validator
|
||||
(def check-page!
|
||||
(sm/check-fn schema:page))
|
||||
|
||||
|
|
|
@ -150,6 +150,7 @@
|
|||
;; FIXME: rename to shape-generic-attrs
|
||||
(def schema:shape-attrs
|
||||
[:map {:title "ShapeAttrs"}
|
||||
[:page-id {:optional true} ::sm/uuid]
|
||||
[:component-id {:optional true} ::sm/uuid]
|
||||
[:component-file {:optional true} ::sm/uuid]
|
||||
[:component-root {:optional true} :boolean]
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
;; SCHEMA
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(sm/register! ::typography
|
||||
(def schema:typography
|
||||
[:map {:title "Typography"}
|
||||
[:id ::sm/uuid]
|
||||
[:name :string]
|
||||
|
@ -33,6 +33,8 @@
|
|||
[:path {:optional true} [:maybe :string]]
|
||||
[:plugin-data {:optional true} ::ctpg/plugin-data]])
|
||||
|
||||
(sm/register! ::typography schema:typography)
|
||||
|
||||
(def check-typography!
|
||||
(sm/check-fn ::typography))
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
"A general purpose events."
|
||||
(:require
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.types.components-list :as ctkl]
|
||||
[app.common.types.team :as tt]
|
||||
[app.config :as cf]
|
||||
|
@ -136,9 +137,31 @@
|
|||
;; Exportations
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def ^:private schema:export-files
|
||||
[:sequential {:title "Files"}
|
||||
[:map {:title "FileParam"}
|
||||
[:id ::sm/uuid]
|
||||
[:name :string]
|
||||
[:project-id ::sm/uuid]
|
||||
[:is-shared ::sm/boolean]]])
|
||||
|
||||
(def check-export-files!
|
||||
(sm/check-fn schema:export-files))
|
||||
|
||||
(def valid-export-formats
|
||||
#{:binfile-v1 :binfile-v3 :legacy-zip})
|
||||
|
||||
(defn export-files
|
||||
[files binary?]
|
||||
(ptk/reify ::request-file-export
|
||||
[files format]
|
||||
(dm/assert!
|
||||
"expected valid files param"
|
||||
(check-export-files! files))
|
||||
|
||||
(dm/assert!
|
||||
"expected valid format"
|
||||
(contains? valid-export-formats format))
|
||||
|
||||
(ptk/reify ::export-files
|
||||
ptk/WatchEvent
|
||||
(watch [_ state _]
|
||||
(let [features (features/get-team-enabled-features state)
|
||||
|
@ -147,16 +170,15 @@
|
|||
(rx/mapcat
|
||||
(fn [file]
|
||||
(->> (rp/cmd! :has-file-libraries {:file-id (:id file)})
|
||||
(rx/map #(assoc file :has-libraries? %)))))
|
||||
(rx/map #(assoc file :has-libraries %)))))
|
||||
(rx/reduce conj [])
|
||||
(rx/map (fn [files]
|
||||
(modal/show
|
||||
{:type :export
|
||||
:features features
|
||||
:team-id team-id
|
||||
:has-libraries? (->> files (some :has-libraries?))
|
||||
:files files
|
||||
:binary? binary?}))))))))
|
||||
:format format}))))))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; Team Request
|
||||
|
|
|
@ -753,7 +753,7 @@
|
|||
libraries (wsh/get-libraries state)
|
||||
|
||||
page-id (:current-page-id state)
|
||||
container (cfh/get-container file :page page-id)
|
||||
container (ctn/get-container file :page page-id)
|
||||
|
||||
components-v2
|
||||
(features/active-feature? state "components/v2")
|
||||
|
@ -806,7 +806,7 @@
|
|||
(let [page-id (get state :current-page-id)
|
||||
local-file (wsh/get-local-file state)
|
||||
full-file (wsh/get-local-file-full state)
|
||||
container (cfh/get-container local-file :page page-id)
|
||||
container (ctn/get-container local-file :page page-id)
|
||||
shape (ctn/get-shape container id)
|
||||
components-v2 (features/active-feature? state "components/v2")]
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
(ns app.main.repo
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.transit :as t]
|
||||
[app.common.uri :as u]
|
||||
[app.config :as cf]
|
||||
|
@ -17,7 +18,7 @@
|
|||
[cuerdas.core :as str]))
|
||||
|
||||
(defn handle-response
|
||||
[{:keys [status body headers] :as response}]
|
||||
[{:keys [status body headers uri] :as response}]
|
||||
(cond
|
||||
(= 204 status)
|
||||
;; We need to send "something" so the streams listening downstream can act
|
||||
|
@ -52,8 +53,10 @@
|
|||
|
||||
:else
|
||||
(rx/throw
|
||||
(ex-info "http error"
|
||||
{:type :unexpected-error
|
||||
(ex-info "repository requet error"
|
||||
{:type :internal
|
||||
:code :repository-access-error
|
||||
:uri uri
|
||||
:status status
|
||||
:headers headers
|
||||
:data body}))))
|
||||
|
@ -71,20 +74,19 @@
|
|||
:form-data? true}
|
||||
|
||||
::sse/clone-template
|
||||
{:response-type ::sse/stream}
|
||||
{:stream? true}
|
||||
|
||||
::sse/import-binfile
|
||||
{:response-type ::sse/stream
|
||||
{:stream? true
|
||||
:form-data? true}
|
||||
|
||||
:export-binfile {:response-type :blob}
|
||||
:retrieve-list-of-builtin-templates {:query-params :all}})
|
||||
|
||||
(defn- send!
|
||||
"A simple helper for a common case of sending and receiving transit
|
||||
data to the penpot mutation api."
|
||||
[id params options]
|
||||
(let [{:keys [response-type
|
||||
stream?
|
||||
form-data?
|
||||
raw-transit?
|
||||
query-params
|
||||
|
@ -92,46 +94,61 @@
|
|||
(-> (get default-options id)
|
||||
(merge options))
|
||||
|
||||
decode-fn (if raw-transit?
|
||||
http/conditional-error-decode-transit
|
||||
http/conditional-decode-transit)
|
||||
decode-fn
|
||||
(if raw-transit?
|
||||
http/conditional-error-decode-transit
|
||||
http/conditional-decode-transit)
|
||||
|
||||
id (or rename-to id)
|
||||
nid (name id)
|
||||
method (cond
|
||||
(= query-params :all) :get
|
||||
(str/starts-with? nid "get-") :get
|
||||
:else :post)
|
||||
request {:method method
|
||||
:uri (u/join cf/public-uri "api/rpc/command/" nid)
|
||||
:credentials "include"
|
||||
:headers {"accept" "application/transit+json,text/event-stream,*/*"
|
||||
"x-external-session-id" (cf/external-session-id)
|
||||
"x-event-origin" (::ev/origin (meta params))}
|
||||
:body (when (= method :post)
|
||||
(if form-data?
|
||||
(http/form-data params)
|
||||
(http/transit-data params)))
|
||||
:query (if (= method :get)
|
||||
params
|
||||
(if query-params
|
||||
(select-keys params query-params)
|
||||
nil))
|
||||
id (or rename-to id)
|
||||
nid (name id)
|
||||
method (cond
|
||||
(= query-params :all) :get
|
||||
(str/starts-with? nid "get-") :get
|
||||
:else :post)
|
||||
|
||||
:response-type
|
||||
(if (= response-type ::sse/stream)
|
||||
:stream
|
||||
(or response-type :text))}
|
||||
response-type
|
||||
(d/nilv response-type :text)
|
||||
|
||||
result (->> (http/send! request)
|
||||
(rx/map decode-fn)
|
||||
(rx/mapcat handle-response))]
|
||||
request
|
||||
{:method method
|
||||
:uri (u/join cf/public-uri "api/rpc/command/" nid)
|
||||
:credentials "include"
|
||||
:headers {"accept" "application/transit+json,text/event-stream,*/*"
|
||||
"x-external-session-id" (cf/external-session-id)
|
||||
"x-event-origin" (::ev/origin (meta params))}
|
||||
:body (when (= method :post)
|
||||
(if form-data?
|
||||
(http/form-data params)
|
||||
(http/transit-data params)))
|
||||
:query (if (= method :get)
|
||||
params
|
||||
(if query-params
|
||||
(select-keys params query-params)
|
||||
nil))
|
||||
:response-type
|
||||
(if stream? nil response-type)}]
|
||||
|
||||
(cond->> result
|
||||
(= ::sse/stream response-type)
|
||||
(rx/mapcat (fn [body]
|
||||
(-> (sse/create-stream body)
|
||||
(sse/read-stream t/decode-str)))))))
|
||||
(->> (http/fetch request)
|
||||
(rx/map http/response->map)
|
||||
(rx/mapcat (fn [{:keys [headers body] :as response}]
|
||||
(let [ctype (get headers "content-type")
|
||||
response-stream? (str/starts-with? ctype "text/event-stream")]
|
||||
|
||||
(when (and response-stream? (not stream?))
|
||||
(ex/raise :type :internal
|
||||
:code :invalid-response-processing
|
||||
:hint "expected normal response, received sse stream"
|
||||
:response-uri (:uri response)
|
||||
:response-status (:status response)))
|
||||
|
||||
(if response-stream?
|
||||
(-> (sse/create-stream body)
|
||||
(sse/read-stream t/decode-str))
|
||||
|
||||
(->> response
|
||||
(http/process-response-type response-type)
|
||||
(rx/map decode-fn)
|
||||
(rx/mapcat handle-response)))))))))
|
||||
|
||||
(defmulti cmd! (fn [id _] id))
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
|
||||
(ns app.main.ui.dashboard.file-menu
|
||||
(:require
|
||||
[app.config :as cf]
|
||||
[app.main.data.common :as dcm]
|
||||
[app.main.data.dashboard :as dd]
|
||||
[app.main.data.events :as ev]
|
||||
|
@ -189,24 +190,30 @@
|
|||
on-export-files
|
||||
(mf/use-fn
|
||||
(mf/deps files)
|
||||
(fn [binary?]
|
||||
(let [evname (if binary?
|
||||
"export-binary-files"
|
||||
"export-standard-files")]
|
||||
(fn [format]
|
||||
(let [evname (if (= format :legacy-zip)
|
||||
"export-standard-files"
|
||||
"export-binary-files")]
|
||||
(st/emit! (ptk/event ::ev/event {::ev/name evname
|
||||
::ev/origin "dashboard"
|
||||
:format format
|
||||
:num-files (count files)})
|
||||
(dcm/export-files files binary?)))))
|
||||
(dcm/export-files files format)))))
|
||||
|
||||
on-export-binary-files
|
||||
(mf/use-fn
|
||||
(mf/deps on-export-files)
|
||||
(partial on-export-files true))
|
||||
(partial on-export-files :binfile-v1))
|
||||
|
||||
on-export-binary-files-v3
|
||||
(mf/use-fn
|
||||
(mf/deps on-export-files)
|
||||
(partial on-export-files :binfile-v3))
|
||||
|
||||
on-export-standard-files
|
||||
(mf/use-fn
|
||||
(mf/deps on-export-files)
|
||||
(partial on-export-files false))
|
||||
(partial on-export-files :legacy-zip))
|
||||
|
||||
;; NOTE: this is used for detect if component is still mounted
|
||||
mounted-ref (mf/use-ref true)]
|
||||
|
@ -256,9 +263,14 @@
|
|||
:options sub-options})
|
||||
|
||||
{:name (tr "dashboard.export-binary-multi" file-count)
|
||||
:id "file-binari-export-multi"
|
||||
:id "file-binary-export-multi"
|
||||
:handler on-export-binary-files}
|
||||
|
||||
(when (contains? cf/flags :export-file-v3)
|
||||
{:name (tr "dashboard.export-binary-multi-v3" file-count)
|
||||
:id "file-binary-export-multi-v3"
|
||||
:handler on-export-binary-files-v3})
|
||||
|
||||
{:name (tr "dashboard.export-standard-multi" file-count)
|
||||
:id "file-standard-export-multi"
|
||||
:handler on-export-standard-files}
|
||||
|
@ -315,6 +327,11 @@
|
|||
:id "download-binary-file"
|
||||
:handler on-export-binary-files}
|
||||
|
||||
(when (contains? cf/flags :export-file-v3)
|
||||
{:name (tr "dashboard.download-binary-file-v3")
|
||||
:id "download-binary-file-v3"
|
||||
:handler on-export-binary-files-v3})
|
||||
|
||||
{:name (tr "dashboard.download-standard-file")
|
||||
:id "download-standard-file"
|
||||
:handler on-export-standard-files}
|
||||
|
|
|
@ -33,7 +33,7 @@
|
|||
|
||||
(log/set-level! :debug)
|
||||
|
||||
(def ^:const emit-delay 1000)
|
||||
(def ^:const emit-delay 200)
|
||||
|
||||
(defn use-import-file
|
||||
[project-id on-finish-import]
|
||||
|
@ -82,51 +82,35 @@
|
|||
(assoc :deleted true)))
|
||||
entries))
|
||||
|
||||
(defn- update-with-analyze-error
|
||||
[entries uri error]
|
||||
(->> entries
|
||||
(mapv (fn [entry]
|
||||
(cond-> entry
|
||||
(= uri (:uri entry))
|
||||
(-> (assoc :status :analyze-error)
|
||||
(assoc :error error)))))))
|
||||
|
||||
(defn- update-with-analyze-result
|
||||
[entries uri type result]
|
||||
(let [existing-entries? (into #{} (keep :file-id) entries)
|
||||
replace-entry
|
||||
(fn [entry]
|
||||
(if (and (= uri (:uri entry))
|
||||
(= (:status entry) :analyzing))
|
||||
(->> (:files result)
|
||||
(remove (comp existing-entries? first))
|
||||
(map (fn [[file-id file-data]]
|
||||
(-> file-data
|
||||
(assoc :file-id file-id)
|
||||
(assoc :status :ready)
|
||||
(assoc :uri uri)
|
||||
(assoc :type type)))))
|
||||
[entry]))]
|
||||
(into [] (mapcat replace-entry) entries)))
|
||||
|
||||
(defn- mark-entries-importing
|
||||
[entries]
|
||||
(->> entries
|
||||
(filter #(= :ready (:status %)))
|
||||
(mapv #(assoc % :status :importing))))
|
||||
[entries {:keys [file-id status] :as updated}]
|
||||
(let [entries (filterv (comp uuid? :file-id) entries)
|
||||
status (case status
|
||||
:success :import-ready
|
||||
:error :analyze-error)
|
||||
updated (assoc updated :status status)]
|
||||
(if (some #(= file-id (:file-id %)) entries)
|
||||
(mapv (fn [entry]
|
||||
(if (= (:file-id entry) file-id)
|
||||
(merge entry updated)
|
||||
entry))
|
||||
entries)
|
||||
(conj entries updated))))
|
||||
|
||||
(defn- update-entry-status
|
||||
[entries file-id status progress errors]
|
||||
[entries message]
|
||||
(mapv (fn [entry]
|
||||
(cond-> entry
|
||||
(and (= file-id (:file-id entry)) (not= status :import-progress))
|
||||
(assoc :status status)
|
||||
|
||||
(and (= file-id (:file-id entry)) (= status :import-progress))
|
||||
(assoc :progress progress)
|
||||
|
||||
(= file-id (:file-id entry))
|
||||
(assoc :errors errors)))
|
||||
(if (= (:file-id entry) (:file-id message))
|
||||
(let [status (case (:status message)
|
||||
:progress :import-progress
|
||||
:finish :import-success
|
||||
:error :import-error)]
|
||||
(-> entry
|
||||
(assoc :progress (:progress message))
|
||||
(assoc :status status)
|
||||
(assoc :error (:error message))
|
||||
(d/without-nils)))
|
||||
entry))
|
||||
entries))
|
||||
|
||||
(defn- parse-progress-message
|
||||
|
@ -153,33 +137,27 @@
|
|||
:process-components
|
||||
(tr "dashboard.import.progress.process-components")
|
||||
|
||||
(str message)))
|
||||
:process-deleted-components
|
||||
(tr "dashboard.import.progress.process-components")
|
||||
|
||||
(defn- has-status-importing?
|
||||
[item]
|
||||
(= (:status item) :importing))
|
||||
""))
|
||||
|
||||
(defn- has-status-analyzing?
|
||||
(defn- has-status-analyze?
|
||||
[item]
|
||||
(= (:status item) :analyzing))
|
||||
(= (:status item) :analyze))
|
||||
|
||||
(defn- has-status-analyze-error?
|
||||
(defn- has-status-import-success?
|
||||
[item]
|
||||
(= (:status item) :analyzing))
|
||||
|
||||
(defn- has-status-success?
|
||||
[item]
|
||||
(and (= (:status item) :import-finish)
|
||||
(empty? (:errors item))))
|
||||
(= (:status item) :import-success))
|
||||
|
||||
(defn- has-status-error?
|
||||
[item]
|
||||
(and (= (:status item) :import-finish)
|
||||
(d/not-empty? (:errors item))))
|
||||
(or (= (:status item) :import-error)
|
||||
(= (:status item) :analyze-error)))
|
||||
|
||||
(defn- has-status-ready?
|
||||
[item]
|
||||
(and (= :ready (:status item))
|
||||
(and (= :import-ready (:status item))
|
||||
(not (:deleted item))))
|
||||
|
||||
(defn- analyze-entries
|
||||
|
@ -191,12 +169,10 @@
|
|||
(rx/mapcat #(rx/delay emit-delay (rx/of %)))
|
||||
(rx/filter some?)
|
||||
(rx/subs!
|
||||
(fn [{:keys [uri data error type] :as msg}]
|
||||
(if (some? error)
|
||||
(swap! state update-with-analyze-error uri error)
|
||||
(swap! state update-with-analyze-result uri type data))))))
|
||||
(fn [message]
|
||||
(swap! state update-with-analyze-result message)))))
|
||||
|
||||
(defn- import-files!
|
||||
(defn- import-files
|
||||
[state project-id entries]
|
||||
(st/emit! (ptk/data-event ::ev/event {::ev/name "import-files"
|
||||
:num-files (count entries)}))
|
||||
|
@ -205,28 +181,36 @@
|
|||
:project-id project-id
|
||||
:files entries
|
||||
:features @features/features-ref})
|
||||
(rx/filter (comp uuid? :file-id))
|
||||
(rx/subs!
|
||||
(fn [{:keys [file-id status message errors] :as msg}]
|
||||
(swap! state update-entry-status file-id status message errors)))))
|
||||
(fn [message]
|
||||
(swap! state update-entry-status message)))))
|
||||
|
||||
(mf/defc import-entry
|
||||
(mf/defc import-entry*
|
||||
{::mf/props :obj
|
||||
::mf/memo true
|
||||
::mf/private true}
|
||||
[{:keys [entries entry edition can-be-deleted on-edit on-change on-delete]}]
|
||||
(let [status (:status entry)
|
||||
loading? (or (= :analyzing status)
|
||||
(= :importing status))
|
||||
analyze-error? (= :analyze-error status)
|
||||
import-finish? (= :import-finish status)
|
||||
import-error? (= :import-error status)
|
||||
import-warn? (d/not-empty? (:errors entry))
|
||||
ready? (= :ready status)
|
||||
is-shared? (:shared entry)
|
||||
progress (:progress entry)
|
||||
(let [status (:status entry)
|
||||
;; FIXME: rename to format
|
||||
format (:type entry)
|
||||
|
||||
file-id (:file-id entry)
|
||||
editing? (and (some? file-id) (= edition file-id))
|
||||
loading? (or (= :analyze status)
|
||||
(= :import-progress status))
|
||||
analyze-error? (= :analyze-error status)
|
||||
import-success? (= :import-success status)
|
||||
import-error? (= :import-error status)
|
||||
import-ready? (= :import-ready status)
|
||||
|
||||
is-shared? (:shared entry)
|
||||
progress (:progress entry)
|
||||
|
||||
file-id (:file-id entry)
|
||||
editing? (and (some? file-id) (= edition file-id))
|
||||
|
||||
editable? (and (or (= :binfile-v3 format)
|
||||
(= :legacy-zip format))
|
||||
(= status :import-ready))
|
||||
|
||||
on-edit-key-press
|
||||
(mf/use-fn
|
||||
|
@ -261,23 +245,21 @@
|
|||
[:div {:class (stl/css-case
|
||||
:file-entry true
|
||||
:loading loading?
|
||||
:success (and import-finish? (not import-warn?) (not import-error?))
|
||||
:warning (and import-finish? import-warn? (not import-error?))
|
||||
:success import-success?
|
||||
:error (or import-error? analyze-error?)
|
||||
:editable (and ready? (not editing?)))}
|
||||
:editable (and import-ready? (not editing?)))}
|
||||
|
||||
[:div {:class (stl/css :file-name)}
|
||||
(if loading?
|
||||
[:> loader* {:width 16
|
||||
:title (tr "labels.loading")}]
|
||||
[:div {:class (stl/css-case :file-icon true
|
||||
:icon-fill ready?)}
|
||||
(cond ready? i/logo-icon
|
||||
import-warn? i/msg-warning
|
||||
import-error? i/close
|
||||
import-finish? i/tick
|
||||
analyze-error? i/close)])
|
||||
|
||||
[:> loader* {:width 16 :title (tr "labels.loading")}]
|
||||
[:div {:class (stl/css-case
|
||||
:file-icon true
|
||||
:icon-fill import-ready?)}
|
||||
(cond
|
||||
import-ready? i/logo-icon
|
||||
import-error? i/close
|
||||
import-success? i/tick
|
||||
analyze-error? i/close)])
|
||||
|
||||
(if editing?
|
||||
[:div {:class (stl/css :file-name-edit)}
|
||||
|
@ -294,10 +276,9 @@
|
|||
i/library])])
|
||||
|
||||
[:div {:class (stl/css :edit-entry-buttons)}
|
||||
(when (and (= "application/zip" (:type entry))
|
||||
(= status :ready))
|
||||
(when ^boolean editable?
|
||||
[:button {:on-click on-edit'} i/curve])
|
||||
(when can-be-deleted
|
||||
(when ^boolean can-be-deleted
|
||||
[:button {:on-click on-delete'} i/delete])]]
|
||||
|
||||
(cond
|
||||
|
@ -311,9 +292,10 @@
|
|||
[:div {:class (stl/css :error-message)}
|
||||
(tr "dashboard.import.import-error")]
|
||||
|
||||
(and (not import-finish?) (some? progress))
|
||||
(and (not import-success?) (some? progress))
|
||||
[:div {:class (stl/css :progress-message)} (parse-progress-message progress)])
|
||||
|
||||
;; This is legacy code, will be removed when legacy-zip format is removed
|
||||
[:div {:class (stl/css :linked-libraries)}
|
||||
(for [library-id (:libraries entry)]
|
||||
(let [library-data (d/seek #(= library-id (:file-id %)) entries)
|
||||
|
@ -328,6 +310,11 @@
|
|||
:error error?)}
|
||||
i/detach]])))]]))
|
||||
|
||||
(defn initialize-state
|
||||
[entries]
|
||||
(fn []
|
||||
(mapv #(assoc % :status :analyze) entries)))
|
||||
|
||||
(mf/defc import-dialog
|
||||
{::mf/register modal/components
|
||||
::mf/register-as :import
|
||||
|
@ -336,74 +323,66 @@
|
|||
[{:keys [project-id entries template on-finish-import]}]
|
||||
|
||||
(mf/with-effect []
|
||||
;; dispose uris when the component is umount
|
||||
;; Revoke all uri's on commonent unmount
|
||||
(fn [] (run! wapi/revoke-uri (map :uri entries))))
|
||||
|
||||
(let [entries* (mf/use-state
|
||||
(fn [] (mapv #(assoc % :status :analyzing) entries)))
|
||||
entries (deref entries*)
|
||||
(let [state* (mf/use-state (initialize-state entries))
|
||||
entries (deref state*)
|
||||
|
||||
status* (mf/use-state :analyzing)
|
||||
status* (mf/use-state :analyze)
|
||||
status (deref status*)
|
||||
|
||||
edition* (mf/use-state nil)
|
||||
edition (deref edition*)
|
||||
|
||||
template-finished* (mf/use-state nil)
|
||||
template-finished (deref template-finished*)
|
||||
|
||||
on-template-cloned-success
|
||||
(mf/use-fn
|
||||
(fn []
|
||||
(reset! status* :importing)
|
||||
(reset! template-finished* true)
|
||||
(st/emit! (dd/fetch-recent-files))))
|
||||
|
||||
on-template-cloned-error
|
||||
(mf/use-fn
|
||||
(fn [cause]
|
||||
(reset! status* :error)
|
||||
(reset! template-finished* true)
|
||||
(errors/print-error! cause)
|
||||
(rx/of (modal/hide)
|
||||
(ntf/error (tr "dashboard.libraries-and-templates.import-error")))))
|
||||
|
||||
continue-entries
|
||||
(mf/use-fn
|
||||
(mf/deps entries)
|
||||
(fn []
|
||||
(let [entries (filterv has-status-ready? entries)]
|
||||
(swap! status* (constantly :importing))
|
||||
(swap! entries* mark-entries-importing)
|
||||
(import-files! entries* project-id entries))))
|
||||
(reset! status* :import-progress)
|
||||
(import-files state* project-id entries))))
|
||||
|
||||
continue-template
|
||||
(mf/use-fn
|
||||
(mf/deps on-template-cloned-success
|
||||
on-template-cloned-error
|
||||
template)
|
||||
(fn []
|
||||
(let [mdata {:on-success on-template-cloned-success
|
||||
:on-error on-template-cloned-error}
|
||||
params {:project-id project-id :template-id (:id template)}]
|
||||
(swap! status* (constantly :importing))
|
||||
(st/emit! (dd/clone-template (with-meta params mdata))))))
|
||||
(fn [template]
|
||||
(let [on-success
|
||||
(fn [_event]
|
||||
(reset! status* :import-success)
|
||||
(st/emit! (dd/fetch-recent-files)))
|
||||
|
||||
on-error
|
||||
(fn [cause]
|
||||
(reset! status* :error)
|
||||
(errors/print-error! cause)
|
||||
(rx/of (modal/hide)
|
||||
(ntf/error (tr "dashboard.libraries-and-templates.import-error"))))
|
||||
|
||||
params
|
||||
{:project-id project-id
|
||||
:template-id (:id template)}]
|
||||
|
||||
(reset! status* :import-progress)
|
||||
(st/emit! (dd/clone-template
|
||||
(with-meta params
|
||||
{:on-success on-success
|
||||
:on-error on-error}))))))
|
||||
|
||||
on-edit
|
||||
(mf/use-fn
|
||||
(fn [file-id _event]
|
||||
(swap! edition* (constantly file-id))))
|
||||
(reset! edition* file-id)))
|
||||
|
||||
on-entry-change
|
||||
(mf/use-fn
|
||||
(fn [file-id value]
|
||||
(swap! edition* (constantly nil))
|
||||
(swap! entries* update-entry-name file-id value)))
|
||||
(swap! state* update-entry-name file-id value)))
|
||||
|
||||
on-entry-delete
|
||||
(mf/use-fn
|
||||
(fn [file-id]
|
||||
(swap! entries* remove-entry file-id)))
|
||||
(swap! state* remove-entry file-id)))
|
||||
|
||||
on-cancel
|
||||
(mf/use-fn
|
||||
|
@ -415,13 +394,12 @@
|
|||
|
||||
on-continue
|
||||
(mf/use-fn
|
||||
(mf/deps template
|
||||
continue-template
|
||||
(mf/deps continue-template
|
||||
continue-entries)
|
||||
(fn [event]
|
||||
(dom/prevent-default event)
|
||||
(if (some? template)
|
||||
(continue-template)
|
||||
(continue-template template)
|
||||
(continue-entries))))
|
||||
|
||||
on-accept
|
||||
|
@ -433,41 +411,40 @@
|
|||
(when (fn? on-finish-import)
|
||||
(on-finish-import))))
|
||||
|
||||
entries (filterv (comp not :deleted) entries)
|
||||
num-importing (+ (count (filterv has-status-importing? entries))
|
||||
(if (some? template) 1 0))
|
||||
entries
|
||||
(mf/with-memo [entries]
|
||||
(filterv (complement :deleted) entries))
|
||||
|
||||
success-num (if (some? template)
|
||||
1
|
||||
(count (filterv has-status-success? entries)))
|
||||
import-success-total
|
||||
(if (some? template)
|
||||
1
|
||||
(count (filterv has-status-import-success? entries)))
|
||||
|
||||
errors? (if (some? template)
|
||||
(= status :error)
|
||||
(or (some has-status-error? entries)
|
||||
(zero? (count entries))))
|
||||
errors?
|
||||
(if (some? template)
|
||||
(= status :error)
|
||||
(or (some has-status-error? entries)
|
||||
(zero? (count entries))))
|
||||
|
||||
pending-analysis? (some has-status-analyzing? entries)
|
||||
pending-import? (and (or (nil? template)
|
||||
(not template-finished))
|
||||
(pos? num-importing))
|
||||
pending-analysis?
|
||||
(some has-status-analyze? entries)]
|
||||
|
||||
valid-all-entries? (or (some? template)
|
||||
(not (some has-status-analyze-error? entries)))
|
||||
(mf/with-effect [entries]
|
||||
(cond
|
||||
(some? template)
|
||||
(reset! status* :import-ready)
|
||||
|
||||
template-status
|
||||
(cond
|
||||
(and (= :importing status) pending-import?)
|
||||
:importing
|
||||
(and (seq entries)
|
||||
(every? #(= :import-ready (:status %)) entries))
|
||||
(reset! status* :import-ready)
|
||||
|
||||
(and (= :importing status) (not ^boolean pending-import?))
|
||||
:import-finish
|
||||
|
||||
:else
|
||||
:ready)]
|
||||
(and (seq entries)
|
||||
(every? #(= :import-success (:status %)) entries))
|
||||
(reset! status* :import-success)))
|
||||
|
||||
;; Run analyze operation on component mount
|
||||
(mf/with-effect []
|
||||
(let [sub (analyze-entries entries* entries)]
|
||||
(let [sub (analyze-entries state* entries)]
|
||||
(partial rx/dispose! sub)))
|
||||
|
||||
[:div {:class (stl/css :modal-overlay)}
|
||||
|
@ -479,55 +456,51 @@
|
|||
:on-click on-cancel} i/close]]
|
||||
|
||||
[:div {:class (stl/css :modal-content)}
|
||||
(when (and (= :analyzing status) errors?)
|
||||
(when (and (= :analyze status) errors?)
|
||||
[:& context-notification
|
||||
{:level :warning
|
||||
:content (tr "dashboard.import.import-warning")}])
|
||||
|
||||
(when (and (= :importing status) (not ^boolean pending-import?))
|
||||
(cond
|
||||
errors?
|
||||
[:& context-notification
|
||||
{:level :warning
|
||||
:content (tr "dashboard.import.import-warning")}]
|
||||
|
||||
:else
|
||||
[:& context-notification
|
||||
{:level (if (zero? success-num) :warning :success)
|
||||
:content (tr "dashboard.import.import-message" (i18n/c success-num))}]))
|
||||
(when (= :import-success status)
|
||||
[:& context-notification
|
||||
{:level (if (zero? import-success-total) :warning :success)
|
||||
:content (tr "dashboard.import.import-message" (i18n/c import-success-total))}])
|
||||
|
||||
(for [entry entries]
|
||||
[:& import-entry {:edition edition
|
||||
:key (dm/str (:uri entry))
|
||||
:entry entry
|
||||
:entries entries
|
||||
:on-edit on-edit
|
||||
:on-change on-entry-change
|
||||
:on-delete on-entry-delete
|
||||
:can-be-deleted (> (count entries) 1)}])
|
||||
[:> import-entry* {:edition edition
|
||||
:key (dm/str (:uri entry) "/" (:file-id entry))
|
||||
:entry entry
|
||||
:entries entries
|
||||
:on-edit on-edit
|
||||
:on-change on-entry-change
|
||||
:on-delete on-entry-delete
|
||||
:can-be-deleted (> (count entries) 1)}])
|
||||
|
||||
(when (some? template)
|
||||
[:& import-entry {:entry (assoc template :status template-status)
|
||||
:can-be-deleted false}])]
|
||||
[:> import-entry* {:entry (assoc template :status status)
|
||||
:can-be-deleted false}])]
|
||||
|
||||
;; (prn "import-dialog" status)
|
||||
|
||||
[:div {:class (stl/css :modal-footer)}
|
||||
[:div {:class (stl/css :action-buttons)}
|
||||
(when (= :analyzing status)
|
||||
(when (= :analyze status)
|
||||
[:input {:class (stl/css :cancel-button)
|
||||
:type "button"
|
||||
:value (tr "labels.cancel")
|
||||
:on-click on-cancel}])
|
||||
|
||||
(when (and (= :analyzing status) (not errors?))
|
||||
(when (= status :import-ready)
|
||||
[:input {:class (stl/css :accept-btn)
|
||||
:type "button"
|
||||
:value (tr "labels.continue")
|
||||
:disabled (or pending-analysis? (not valid-all-entries?))
|
||||
:disabled pending-analysis?
|
||||
:on-click on-continue}])
|
||||
|
||||
(when (and (= :importing status) (not errors?))
|
||||
(when (or (= :import-success status)
|
||||
(= :import-progress status))
|
||||
[:input {:class (stl/css :accept-btn)
|
||||
:type "button"
|
||||
:value (tr "labels.accept")
|
||||
:disabled (or pending-import? (not valid-all-entries?))
|
||||
:disabled (= :import-progress status)
|
||||
:on-click on-accept}])]]]]))
|
||||
|
|
|
@ -66,7 +66,6 @@
|
|||
.file-entry {
|
||||
.file-name {
|
||||
@include flexRow;
|
||||
margin-bottom: $s-8;
|
||||
.file-icon {
|
||||
@include flexCenter;
|
||||
height: $s-24;
|
||||
|
|
|
@ -314,18 +314,16 @@
|
|||
:stroke-dashoffset (- 280 pwidth)
|
||||
:style {:transition "stroke-dashoffset 1s ease-in-out"}}]]])])]))
|
||||
|
||||
(def ^:const options [:all :merge :detach])
|
||||
|
||||
(mf/defc export-entry
|
||||
{::mf/wrap-props false}
|
||||
[{:keys [file]}]
|
||||
[:div {:class (stl/css-case :file-entry true
|
||||
:loading (:loading? file)
|
||||
:loading (:loading file)
|
||||
:success (:export-success? file)
|
||||
:error (:export-error? file))}
|
||||
|
||||
[:div {:class (stl/css :file-name)}
|
||||
(if (:loading? file)
|
||||
(if (:loading file)
|
||||
[:> loader* {:width 16
|
||||
:title (tr "labels.loading")}]
|
||||
[:span {:class (stl/css :file-icon)}
|
||||
|
@ -340,7 +338,7 @@
|
|||
(mapv #(cond-> %
|
||||
(= file-id (:id %))
|
||||
(assoc :export-error? true
|
||||
:loading? false))
|
||||
:loading false))
|
||||
files))
|
||||
|
||||
(defn- mark-file-success
|
||||
|
@ -348,30 +346,38 @@
|
|||
(mapv #(cond-> %
|
||||
(= file-id (:id %))
|
||||
(assoc :export-success? true
|
||||
:loading? false))
|
||||
:loading false))
|
||||
files))
|
||||
|
||||
(def export-types
|
||||
[:all :merge :detach])
|
||||
(defn- initialize-state
|
||||
"Initialize export dialog state"
|
||||
[files]
|
||||
(let [files (mapv (fn [file] (assoc file :loading true)) files)]
|
||||
{:status :prepare
|
||||
:selected :all
|
||||
:files files}))
|
||||
|
||||
(def default-export-types
|
||||
(d/ordered-set :all :merge :detach))
|
||||
|
||||
(mf/defc export-dialog
|
||||
{::mf/register modal/components
|
||||
::mf/register-as :export
|
||||
::mf/wrap-props false}
|
||||
[{:keys [team-id files has-libraries? binary? features]}]
|
||||
(let [state* (mf/use-state
|
||||
#(let [files (mapv (fn [file] (assoc file :loading? true)) files)]
|
||||
{:status :prepare
|
||||
:selected :all
|
||||
:files files}))
|
||||
[{:keys [team-id files features format]}]
|
||||
(let [state* (mf/use-state (partial initialize-state files))
|
||||
has-libs? (some :has-libraries files)
|
||||
|
||||
state (deref state*)
|
||||
selected (:selected state)
|
||||
status (:status state)
|
||||
|
||||
;; We've deprecated the merge option on non-binary files because it wasn't working
|
||||
;; and we're planning to remove this export in future releases.
|
||||
export-types (if binary? export-types [:all :detach])
|
||||
binary? (not= format :legacy-zip)
|
||||
|
||||
;; We've deprecated the merge option on non-binary files
|
||||
;; because it wasn't working and we're planning to remove this
|
||||
;; export in future releases.
|
||||
export-types (if binary? default-export-types [:all :detach])
|
||||
|
||||
start-export
|
||||
(mf/use-fn
|
||||
|
@ -379,10 +385,11 @@
|
|||
(fn []
|
||||
(swap! state* assoc :status :exporting)
|
||||
(->> (uw/ask-many!
|
||||
{:cmd (if binary? :export-binary-file :export-standard-file)
|
||||
{:cmd :export-files
|
||||
:format format
|
||||
:team-id team-id
|
||||
:features features
|
||||
:export-type selected
|
||||
:type selected
|
||||
:files files})
|
||||
(rx/mapcat #(->> (rx/of %)
|
||||
(rx/delay 1000)))
|
||||
|
@ -418,9 +425,9 @@
|
|||
(keyword))]
|
||||
(swap! state* assoc :selected type))))]
|
||||
|
||||
(mf/with-effect [has-libraries?]
|
||||
(mf/with-effect [has-libs?]
|
||||
;; Start download automatically when no libraries
|
||||
(when-not has-libraries?
|
||||
(when-not has-libs?
|
||||
(start-export)))
|
||||
|
||||
[:div {:class (stl/css :modal-overlay)}
|
||||
|
@ -443,13 +450,13 @@
|
|||
:key (name type)}
|
||||
[:label {:for (str "export-" type)
|
||||
:class (stl/css-case :global/checked (= selected type))}
|
||||
;; Execution time translation strings:
|
||||
;; (tr "dashboard.export.options.all.message")
|
||||
;; (tr "dashboard.export.options.all.title")
|
||||
;; (tr "dashboard.export.options.detach.message")
|
||||
;; (tr "dashboard.export.options.detach.title")
|
||||
;; (tr "dashboard.export.options.merge.message")
|
||||
;; (tr "dashboard.export.options.merge.title")
|
||||
;; Execution time translation strings:
|
||||
;; (tr "dashboard.export.options.all.message")
|
||||
;; (tr "dashboard.export.options.all.title")
|
||||
;; (tr "dashboard.export.options.detach.message")
|
||||
;; (tr "dashboard.export.options.detach.title")
|
||||
;; (tr "dashboard.export.options.merge.message")
|
||||
;; (tr "dashboard.export.options.merge.title")
|
||||
[:span {:class (stl/css-case :global/checked (= selected type))}
|
||||
(when (= selected type)
|
||||
i/status-tick)]
|
||||
|
@ -488,5 +495,5 @@
|
|||
[:input {:class (stl/css :accept-btn)
|
||||
:type "button"
|
||||
:value (tr "labels.close")
|
||||
:disabled (->> state :files (some :loading?))
|
||||
:disabled (->> state :files (some :loading))
|
||||
:on-click on-cancel}]]]])]]))
|
||||
|
|
|
@ -526,15 +526,17 @@
|
|||
(mf/deps file)
|
||||
(fn [event]
|
||||
(let [target (dom/get-current-target event)
|
||||
binary? (= (dom/get-data target "binary") "true")
|
||||
evname (if binary?
|
||||
"export-binary-files"
|
||||
"export-standard-files")]
|
||||
format (-> (dom/get-data target "format")
|
||||
(keyword))
|
||||
evname (if (= format :legacy-zip)
|
||||
"export-standard-files"
|
||||
"export-binary-files")]
|
||||
(st/emit!
|
||||
(ptk/event ::ev/event {::ev/name evname
|
||||
::ev/origin "workspace"
|
||||
:format format
|
||||
:num-files 1})
|
||||
(dcm/export-files [file] binary?)))))
|
||||
(dcm/export-files [file] format)))))
|
||||
|
||||
on-export-file-key-down
|
||||
(mf/use-fn
|
||||
|
@ -587,15 +589,24 @@
|
|||
[:> dropdown-menu-item* {:class (stl/css :submenu-item)
|
||||
:on-click on-export-file
|
||||
:on-key-down on-export-file-key-down
|
||||
:data-binary true
|
||||
:data-format "binfile-v1"
|
||||
:id "file-menu-binary-file"}
|
||||
[:span {:class (stl/css :item-name)}
|
||||
(tr "dashboard.download-binary-file")]]
|
||||
|
||||
(when (contains? cf/flags :export-file-v3)
|
||||
[:> dropdown-menu-item* {:class (stl/css :submenu-item)
|
||||
:on-click on-export-file
|
||||
:on-key-down on-export-file-key-down
|
||||
:data-format "binfile-v3"
|
||||
:id "file-menu-binary-file"}
|
||||
[:span {:class (stl/css :item-name)}
|
||||
(tr "dashboard.download-binary-file-v3")]])
|
||||
|
||||
[:> dropdown-menu-item* {:class (stl/css :submenu-item)
|
||||
:on-click on-export-file
|
||||
:on-key-down on-export-file-key-down
|
||||
:data-binary false
|
||||
:data-format "legacy-zip"
|
||||
:id "file-menu-standard-file"}
|
||||
[:span {:class (stl/css :item-name)}
|
||||
(tr "dashboard.download-standard-file")]]
|
||||
|
|
|
@ -70,6 +70,7 @@
|
|||
[{:keys [component renaming listing-thumbs? selected
|
||||
file-id on-asset-click on-context-menu on-drag-start do-rename
|
||||
cancel-rename selected-full selected-paths local]}]
|
||||
|
||||
(let [item-ref (mf/use-ref)
|
||||
|
||||
dragging* (mf/use-state false)
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
[app.common.data.macros :as dm]
|
||||
[app.common.record :as crc]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.main.data.workspace :as dw]
|
||||
[app.main.features :as features]
|
||||
[app.main.store :as st]
|
||||
|
@ -114,29 +115,33 @@
|
|||
(page/page-proxy $plugin $id page-id))))
|
||||
|
||||
(export
|
||||
[self type export-type]
|
||||
(let [export-type (or (parser/parse-keyword export-type) :all)]
|
||||
[self format type]
|
||||
(let [type (or (parser/parse-keyword type) :all)]
|
||||
(cond
|
||||
(not (contains? #{"penpot" "zip"} type))
|
||||
(u/display-not-valid :export-type type)
|
||||
(not (contains? #{"penpot" "zip"} format))
|
||||
(u/display-not-valid :format type)
|
||||
|
||||
(not (contains? (set mue/export-types) export-type))
|
||||
(u/display-not-valid :export-exportType export-type)
|
||||
(not (contains? (set mue/default-export-types) type))
|
||||
(u/display-not-valid :type type)
|
||||
|
||||
:else
|
||||
(let [export-cmd (if (= type "penpot") :export-binary-file :export-standard-file)
|
||||
file (u/proxy->file self)
|
||||
features (features/get-team-enabled-features @st/state)
|
||||
team-id (:current-team-id @st/state)]
|
||||
(let [file (u/proxy->file self)
|
||||
features (features/get-team-enabled-features @st/state)
|
||||
team-id (:current-team-id @st/state)
|
||||
format (case format
|
||||
"penpot" (if (contains? cf/flags :export-file-v3)
|
||||
:binfile-v3
|
||||
:binfile-v1)
|
||||
"zip" :legacy-zip)]
|
||||
(p/create
|
||||
(fn [resolve reject]
|
||||
(->> (uw/ask-many!
|
||||
{:cmd export-cmd
|
||||
{:cmd :export-files
|
||||
:format format
|
||||
:type type
|
||||
:team-id team-id
|
||||
:features features
|
||||
:export-type export-type
|
||||
:files [file]})
|
||||
(rx/mapcat #(->> (rx/of %) (rx/delay 1000)))
|
||||
(rx/mapcat
|
||||
(fn [msg]
|
||||
(case (:type msg)
|
||||
|
@ -147,9 +152,11 @@
|
|||
(rx/empty)
|
||||
|
||||
:finish
|
||||
(http/send! {:method :get :uri (:uri msg) :mode :no-cors :response-type :blob}))))
|
||||
(rx/first)
|
||||
(rx/mapcat (fn [{:keys [body]}] (.arrayBuffer ^js body)))
|
||||
(http/send! {:method :get
|
||||
:uri (:uri msg)
|
||||
:mode :no-cors
|
||||
:response-type :buffer}))))
|
||||
(rx/take 1)
|
||||
(rx/map (fn [data] (js/Uint8Array. data)))
|
||||
(rx/subs! resolve reject)))))))))
|
||||
|
||||
|
|
|
@ -103,26 +103,31 @@
|
|||
(when @abortable?
|
||||
(.abort ^js controller)))))))
|
||||
|
||||
(defn response->map
|
||||
[response]
|
||||
{:status (.-status ^js response)
|
||||
:uri (.-url ^js response)
|
||||
:headers (parse-headers (.-headers ^js response))
|
||||
:body (.-body ^js response)
|
||||
::response response})
|
||||
|
||||
(defn process-response-type
|
||||
[response-type response]
|
||||
(let [native-response (::response response)
|
||||
body (case response-type
|
||||
:buffer (.arrayBuffer ^js native-response)
|
||||
:json (.json ^js native-response)
|
||||
:text (.text ^js native-response)
|
||||
:blob (.blob ^js native-response))]
|
||||
(->> (rx/from body)
|
||||
(rx/map (fn [body]
|
||||
(assoc response :body body))))))
|
||||
|
||||
(defn send!
|
||||
[{:keys [response-type] :or {response-type :text} :as params}]
|
||||
(letfn [(on-response [^js response]
|
||||
(if (= :stream response-type)
|
||||
(rx/of {:status (.-status response)
|
||||
:headers (parse-headers (.-headers response))
|
||||
:body (.-body response)
|
||||
::response response})
|
||||
(let [body (case response-type
|
||||
:json (.json ^js response)
|
||||
:text (.text ^js response)
|
||||
:blob (.blob ^js response))]
|
||||
(->> (rx/from body)
|
||||
(rx/map (fn [body]
|
||||
{::response response
|
||||
:status (.-status ^js response)
|
||||
:headers (parse-headers (.-headers ^js response))
|
||||
:body body}))))))]
|
||||
(->> (fetch params)
|
||||
(rx/mapcat on-response))))
|
||||
(->> (fetch params)
|
||||
(rx/map response->map)
|
||||
(rx/mapcat (partial process-response-type response-type))))
|
||||
|
||||
(defn form-data
|
||||
[data]
|
||||
|
|
|
@ -33,16 +33,24 @@
|
|||
|
||||
(defn- process-file
|
||||
[entry path type]
|
||||
;; (js/console.log "zip:process-file" entry path type)
|
||||
(cond
|
||||
(nil? entry)
|
||||
(p/rejected (str "File not found: " path))
|
||||
|
||||
(.-dir entry)
|
||||
(.-dir ^js entry)
|
||||
(p/resolved {:dir path})
|
||||
|
||||
:else
|
||||
(-> (.async entry type)
|
||||
(p/then #(hash-map :path path :content %)))))
|
||||
(->> (.async ^js entry type)
|
||||
(p/fmap (fn [content]
|
||||
;; (js/console.log "zip:process-file" 2 content)
|
||||
{:path path
|
||||
:content content})))))
|
||||
|
||||
(defn load
|
||||
[data]
|
||||
(rx/from (zip/loadAsync data)))
|
||||
|
||||
(defn get-file
|
||||
"Gets a single file from the zip archive"
|
||||
|
|
|
@ -64,8 +64,9 @@
|
|||
|
||||
(reply-completed
|
||||
([] (reply-completed nil))
|
||||
([msg] (post {:payload msg
|
||||
:completed true})))]
|
||||
([msg]
|
||||
(post {:payload msg
|
||||
:completed true})))]
|
||||
|
||||
(try
|
||||
(let [result (impl/handler payload transfer)
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
(ns app.worker.export
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.json :as json]
|
||||
[app.common.media :as cm]
|
||||
[app.common.text :as ct]
|
||||
|
@ -396,46 +397,55 @@
|
|||
(->> (uz/compress-files data)
|
||||
(rx/map #(vector (get files file-id) %)))))))))
|
||||
|
||||
(defmethod impl/handler :export-binary-file
|
||||
[{:keys [files export-type] :as message}]
|
||||
(->> (rx/from files)
|
||||
(rx/mapcat
|
||||
(fn [file]
|
||||
(->> (rp/cmd! :export-binfile {:file-id (:id file)
|
||||
:include-libraries (= export-type :all)
|
||||
:embed-assets (= export-type :merge)})
|
||||
(rx/map #(hash-map :type :finish
|
||||
:file-id (:id file)
|
||||
:filename (:name file)
|
||||
:mtype "application/penpot"
|
||||
:description "Penpot export (*.penpot)"
|
||||
:uri (wapi/create-uri (wapi/create-blob %))))
|
||||
(rx/catch
|
||||
(fn [err]
|
||||
(rx/of {:type :error
|
||||
:error (str err)
|
||||
:file-id (:id file)}))))))))
|
||||
(defmethod impl/handler :export-files
|
||||
[{:keys [team-id files type format features] :as message}]
|
||||
(cond
|
||||
(or (= format :binfile-v1)
|
||||
(= format :binfile-v3))
|
||||
(->> (rx/from files)
|
||||
(rx/mapcat
|
||||
(fn [file]
|
||||
(->> (rp/cmd! :export-binfile {:file-id (:id file)
|
||||
:version (if (= format :binfile-v3) 3 1)
|
||||
:include-libraries (= type :all)
|
||||
:embed-assets (= type :merge)})
|
||||
(rx/map wapi/create-blob)
|
||||
(rx/map wapi/create-uri)
|
||||
(rx/map (fn [uri]
|
||||
{:type :finish
|
||||
:file-id (:id file)
|
||||
:filename (:name file)
|
||||
:mtype (if (= format :binfile-v3)
|
||||
"application/zip"
|
||||
"application/penpot")
|
||||
:uri uri}))
|
||||
(rx/catch
|
||||
(fn [cause]
|
||||
(rx/of (ex/raise :type :internal
|
||||
:code :export-error
|
||||
:hint "unexpected error on exporting file"
|
||||
:file-id (:id file)
|
||||
:cause cause))))))))
|
||||
|
||||
(defmethod impl/handler :export-standard-file
|
||||
[{:keys [team-id files export-type features] :as message}]
|
||||
|
||||
(->> (rx/from files)
|
||||
(rx/mapcat
|
||||
(fn [file]
|
||||
(->> (export-file team-id (:id file) export-type features)
|
||||
(rx/map
|
||||
(fn [value]
|
||||
(if (contains? value :type)
|
||||
value
|
||||
(let [[file export-blob] value]
|
||||
{:type :finish
|
||||
:file-id (:id file)
|
||||
:filename (:name file)
|
||||
:mtype "application/zip"
|
||||
:description "Penpot export (*.zip)"
|
||||
:uri (wapi/create-uri export-blob)}))))
|
||||
(rx/catch (fn [err]
|
||||
(js/console.error err)
|
||||
(rx/of {:type :error
|
||||
:error (str err)
|
||||
:file-id (:id file)}))))))))
|
||||
(= format :legacy-zip)
|
||||
(->> (rx/from files)
|
||||
(rx/mapcat
|
||||
(fn [file]
|
||||
(->> (export-file team-id (:id file) type features)
|
||||
(rx/map
|
||||
(fn [value]
|
||||
(if (contains? value :type)
|
||||
value
|
||||
(let [[file export-blob] value]
|
||||
{:type :finish
|
||||
:file-id (:id file)
|
||||
:filename (:name file)
|
||||
:mtype "application/zip"
|
||||
:uri (wapi/create-uri export-blob)}))))
|
||||
(rx/catch
|
||||
(fn [cause]
|
||||
(rx/of (ex/raise :type :internal
|
||||
:code :export-error
|
||||
:hint "unexpected error on exporting file"
|
||||
:file-id (:id file)
|
||||
:cause cause))))))))))
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
(ns app.worker.import
|
||||
(:refer-clojure :exclude [resolve])
|
||||
(:require
|
||||
["jszip" :as zip]
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.files.builder :as fb]
|
||||
|
@ -16,7 +15,6 @@
|
|||
[app.common.json :as json]
|
||||
[app.common.logging :as log]
|
||||
[app.common.media :as cm]
|
||||
[app.common.pprint :as pp]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.text :as ct]
|
||||
[app.common.time :as tm]
|
||||
|
@ -25,7 +23,6 @@
|
|||
[app.util.http :as http]
|
||||
[app.util.i18n :as i18n :refer [tr]]
|
||||
[app.util.sse :as sse]
|
||||
[app.util.webapi :as wapi]
|
||||
[app.util.zip :as uz]
|
||||
[app.worker.impl :as impl]
|
||||
[app.worker.import.parser :as parser]
|
||||
|
@ -64,7 +61,8 @@
|
|||
m))
|
||||
|
||||
(defn get-file
|
||||
"Resolves the file inside the context given its id and the data"
|
||||
"Resolves the file inside the context given its id and the
|
||||
data. LEGACY"
|
||||
([context type]
|
||||
(get-file context type nil nil))
|
||||
|
||||
|
@ -105,6 +103,12 @@
|
|||
:else
|
||||
stream)))))
|
||||
|
||||
(defn- read-zip-manifest
|
||||
[zipfile]
|
||||
(->> (uz/get-file zipfile "manifest.json")
|
||||
(rx/map :content)
|
||||
(rx/map json/decode)))
|
||||
|
||||
(defn progress!
|
||||
([context type]
|
||||
(assert (keyword? type))
|
||||
|
@ -123,14 +127,14 @@
|
|||
|
||||
([context type file current total]
|
||||
(when (and context (contains? context :progress))
|
||||
(let [msg {:type type
|
||||
:file file
|
||||
:current current
|
||||
:total total}]
|
||||
(log/debug :status :import-progress :message msg)
|
||||
(let [progress {:type type
|
||||
:file file
|
||||
:current current
|
||||
:total total}]
|
||||
(log/debug :status :progress :progress progress)
|
||||
(rx/push! (:progress context) {:file-id (:file-id context)
|
||||
:status :import-progress
|
||||
:message msg})))))
|
||||
:status :progress
|
||||
:progress progress})))))
|
||||
|
||||
(defn resolve-factory
|
||||
"Creates a wrapper around the atom to remap ids to new ids and keep
|
||||
|
@ -162,7 +166,7 @@
|
|||
(rp/cmd! :create-temp-file
|
||||
{:id file-id
|
||||
:name (:name context)
|
||||
:is-shared (:shared context)
|
||||
:is-shared (:is-shared context)
|
||||
:project-id (:project-id context)
|
||||
:create-page false
|
||||
|
||||
|
@ -212,6 +216,15 @@
|
|||
;; We use merge to keep some information not stored in back-end
|
||||
(rx/map #(merge file %))))))
|
||||
|
||||
(defn slurp-uri
|
||||
([uri] (slurp-uri uri :text))
|
||||
([uri response-type]
|
||||
(->> (http/send!
|
||||
{:uri uri
|
||||
:response-type response-type
|
||||
:method :get})
|
||||
(rx/map :body))))
|
||||
|
||||
(defn upload-media-files
|
||||
"Upload a image to the backend and returns its id"
|
||||
[context file-id name data-uri]
|
||||
|
@ -312,8 +325,6 @@
|
|||
(let [frame-id (:current-frame-id file)
|
||||
frame (when (and (some? frame-id) (not= frame-id uuid/zero))
|
||||
(fb/lookup-shape file frame-id))]
|
||||
|
||||
(js/console.log " translate-frame" (clj->js frame))
|
||||
(if (some? frame)
|
||||
(-> data
|
||||
(d/update-when :x + (:x frame))
|
||||
|
@ -716,7 +727,6 @@
|
|||
|
||||
(defn create-files
|
||||
[{:keys [system-features] :as context} files]
|
||||
|
||||
(let [data (group-by :file-id files)]
|
||||
(rx/concat
|
||||
(->> (rx/from files)
|
||||
|
@ -738,68 +748,124 @@
|
|||
"1 13 32 206" "application/octet-stream"
|
||||
"other")))
|
||||
|
||||
(defn- analyze-file-legacy-zip-entry
|
||||
[features entry]
|
||||
;; NOTE: LEGACY manifest reading mechanism, we can't
|
||||
;; reuse the new read-zip-manifest funcion here
|
||||
(->> (rx/from (uz/load (:body entry)))
|
||||
(rx/merge-map #(get-file {:zip %} :manifest))
|
||||
(rx/mapcat
|
||||
(fn [manifest]
|
||||
;; Checks if the file is exported with
|
||||
;; components v2 and the current team
|
||||
;; only supports components v1
|
||||
(let [has-file-v2?
|
||||
(->> (:files manifest)
|
||||
(d/seek (fn [[_ file]] (contains? (set (:features file)) "components/v2"))))]
|
||||
|
||||
(if (and has-file-v2? (not (contains? features "components/v2")))
|
||||
(rx/of (-> entry
|
||||
(assoc :error "dashboard.import.analyze-error.components-v2")
|
||||
(dissoc :body)))
|
||||
(->> (rx/from (:files manifest))
|
||||
(rx/map (fn [[file-id data]]
|
||||
(-> entry
|
||||
(dissoc :body)
|
||||
(merge data)
|
||||
(dissoc :shared)
|
||||
(assoc :is-shared (:shared data))
|
||||
(assoc :file-id file-id)
|
||||
(assoc :status :success)))))))))))
|
||||
|
||||
;; NOTE: this is a limited subset schema for the manifest file of
|
||||
;; binfile-v3 format; is used for partially parse it and read the
|
||||
;; files referenced inside the exported file
|
||||
|
||||
(def ^:private schema:manifest
|
||||
[:map {:title "Manifest"}
|
||||
[:type :string]
|
||||
[:files
|
||||
[:vector
|
||||
[:map
|
||||
[:id ::sm/uuid]
|
||||
[:name :string]]]]])
|
||||
|
||||
(def ^:private decode-manifest
|
||||
(sm/decoder schema:manifest sm/json-transformer))
|
||||
|
||||
(defn analyze-file
|
||||
[features {:keys [uri] :as file}]
|
||||
(let [stream (->> (slurp-uri uri :buffer)
|
||||
(rx/merge-map
|
||||
(fn [body]
|
||||
(let [mtype (parse-mtype body)]
|
||||
(if (= "application/zip" mtype)
|
||||
(->> (uz/load body)
|
||||
(rx/merge-map read-zip-manifest)
|
||||
(rx/map
|
||||
(fn [manifest]
|
||||
(if (= (:type manifest) "penpot/export-files")
|
||||
(let [manifest (decode-manifest manifest)]
|
||||
(assoc file :type :binfile-v3 :files (:files manifest)))
|
||||
(assoc file :type :legacy-zip :body body)))))
|
||||
(rx/of (assoc file :type :binfile-v1))))))
|
||||
(rx/share))]
|
||||
|
||||
(->> (rx/merge
|
||||
(->> stream
|
||||
(rx/filter (fn [entry] (= :legacy-zip (:type entry))))
|
||||
(rx/merge-map (partial analyze-file-legacy-zip-entry features)))
|
||||
|
||||
(->> stream
|
||||
(rx/filter (fn [entry] (= :binfile-v1 (:type entry))))
|
||||
(rx/map (fn [entry]
|
||||
(let [file-id (uuid/next)]
|
||||
(-> entry
|
||||
(assoc :file-id file-id)
|
||||
(assoc :name (:name file))
|
||||
(assoc :status :success))))))
|
||||
|
||||
(->> stream
|
||||
(rx/filter (fn [entry] (= :binfile-v3 (:type entry))))
|
||||
(rx/merge-map (fn [{:keys [files] :as entry}]
|
||||
(->> (rx/from files)
|
||||
(rx/map (fn [file]
|
||||
(-> entry
|
||||
(dissoc :files)
|
||||
(assoc :name (:name file))
|
||||
(assoc :file-id (:id file))
|
||||
(assoc :status :success))))))))
|
||||
|
||||
(->> stream
|
||||
(rx/filter (fn [data] (= "other" (:type data))))
|
||||
(rx/map (fn [_]
|
||||
{:uri (:uri file)
|
||||
:error (tr "dashboard.import.analyze-error")}))))
|
||||
|
||||
(rx/catch (fn [cause]
|
||||
(let [error (or (ex-message cause) (tr "dashboard.import.analyze-error"))]
|
||||
(rx/of (assoc file :error error :status :error))))))))
|
||||
|
||||
(defmethod impl/handler :analyze-import
|
||||
[{:keys [files features]}]
|
||||
|
||||
(->> (rx/from files)
|
||||
(rx/merge-map
|
||||
(fn [file]
|
||||
(let [st (->> (http/send!
|
||||
{:uri (:uri file)
|
||||
:response-type :blob
|
||||
:method :get})
|
||||
(rx/map :body)
|
||||
(rx/mapcat wapi/read-file-as-array-buffer)
|
||||
(rx/map (fn [data]
|
||||
{:type (parse-mtype data)
|
||||
:uri (:uri file)
|
||||
:body data})))]
|
||||
(->> (rx/merge
|
||||
(->> st
|
||||
(rx/filter (fn [data] (= "application/zip" (:type data))))
|
||||
(rx/merge-map #(zip/loadAsync (:body %)))
|
||||
(rx/merge-map #(get-file {:zip %} :manifest))
|
||||
(rx/map
|
||||
(fn [data]
|
||||
;; Checks if the file is exported with components v2 and the current team only
|
||||
;; supports components v1
|
||||
(let [has-file-v2?
|
||||
(->> (:files data)
|
||||
(d/seek (fn [[_ file]] (contains? (set (:features file)) "components/v2"))))]
|
||||
(if (and has-file-v2? (not (contains? features "components/v2")))
|
||||
{:uri (:uri file) :error "dashboard.import.analyze-error.components-v2"}
|
||||
(hash-map :uri (:uri file) :data data :type "application/zip"))))))
|
||||
(->> st
|
||||
(rx/filter (fn [data] (= "application/octet-stream" (:type data))))
|
||||
(rx/map (fn [_]
|
||||
(let [file-id (uuid/next)]
|
||||
{:uri (:uri file)
|
||||
:data {:name (:name file)
|
||||
:file-id file-id
|
||||
:files {file-id {:name (:name file)}}
|
||||
:status :ready}
|
||||
:type "application/octet-stream"}))))
|
||||
(->> st
|
||||
(rx/filter (fn [data] (= "other" (:type data))))
|
||||
(rx/map (fn [_]
|
||||
{:uri (:uri file)
|
||||
:error (tr "dashboard.import.analyze-error")}))))
|
||||
(rx/catch (fn [data]
|
||||
(let [error (or (.-message data) (tr "dashboard.import.analyze-error"))]
|
||||
(rx/of {:uri (:uri file) :error error}))))))))))
|
||||
|
||||
(rx/merge-map (partial analyze-file features))))
|
||||
|
||||
(defmethod impl/handler :import-files
|
||||
[{:keys [project-id files features]}]
|
||||
(let [context {:project-id project-id
|
||||
:resolve (resolve-factory)
|
||||
:system-features features}
|
||||
|
||||
(let [context {:project-id project-id
|
||||
:resolve (resolve-factory)
|
||||
:system-features features}
|
||||
zip-files (filter #(= "application/zip" (:type %)) files)
|
||||
binary-files (filter #(= "application/octet-stream" (:type %)) files)]
|
||||
legacy-zip (filter #(= :legacy-zip (:type %)) files)
|
||||
binfile-v1 (filter #(= :binfile-v1 (:type %)) files)
|
||||
binfile-v3 (filter #(= :binfile-v3 (:type %)) files)]
|
||||
|
||||
(rx/merge
|
||||
(->> (create-files context zip-files)
|
||||
|
||||
;; NOTE: LEGACY, will be removed so no new development should be
|
||||
;; done for this part
|
||||
(->> (create-files context legacy-zip)
|
||||
(rx/merge-map
|
||||
(fn [[file data]]
|
||||
(->> (uz/load-from-url (:uri data))
|
||||
|
@ -813,9 +879,12 @@
|
|||
(->> file-stream
|
||||
(rx/map
|
||||
(fn [file]
|
||||
{:status :import-finish
|
||||
:errors (:errors file)
|
||||
:file-id (:file-id data)})))))))
|
||||
(if-let [errors (not-empty (:errors file))]
|
||||
{:status :error
|
||||
:error (first errors)
|
||||
:file-id (:file-id data)}
|
||||
{:status :finish
|
||||
:file-id (:file-id data)}))))))))
|
||||
(rx/catch (fn [cause]
|
||||
(let [data (ex-data cause)]
|
||||
(log/error :hint (ex-message cause)
|
||||
|
@ -823,12 +892,11 @@
|
|||
(when-let [explain (:explain data)]
|
||||
(js/console.log explain)))
|
||||
|
||||
(rx/of {:status :import-error
|
||||
(rx/of {:status :error
|
||||
:file-id (:file-id data)
|
||||
:error (ex-message cause)
|
||||
:error-data (ex-data cause)})))))))
|
||||
:error (ex-message cause)})))))))
|
||||
|
||||
(->> (rx/from binary-files)
|
||||
(->> (rx/from binfile-v1)
|
||||
(rx/merge-map
|
||||
(fn [data]
|
||||
(->> (http/send!
|
||||
|
@ -836,32 +904,74 @@
|
|||
:response-type :blob
|
||||
:method :get})
|
||||
(rx/map :body)
|
||||
(rx/mapcat (fn [file]
|
||||
(rx/mapcat
|
||||
(fn [file]
|
||||
(->> (rp/cmd! ::sse/import-binfile
|
||||
{:name (str/replace (:name data) #".penpot$" "")
|
||||
:file file
|
||||
:project-id project-id})
|
||||
(rx/tap (fn [event]
|
||||
(let [payload (sse/get-payload event)
|
||||
type (sse/get-type event)]
|
||||
(if (= type "progress")
|
||||
(log/dbg :hint "import-binfile: progress"
|
||||
:section (:section payload)
|
||||
:name (:name payload))
|
||||
(log/dbg :hint "import-binfile: end")))))
|
||||
(rx/filter sse/end-of-stream?)
|
||||
(rx/map (fn [_]
|
||||
{:status :finish
|
||||
:file-id (:file-id data)})))))
|
||||
|
||||
(rx/catch
|
||||
(fn [cause]
|
||||
(log/error :hint "unexpected error on import process"
|
||||
:project-id project-id
|
||||
:cause cause)
|
||||
(rx/of {:status :error
|
||||
:error (ex-message cause)
|
||||
:file-id (:file-id data)})))))))
|
||||
|
||||
(->> (rx/from binfile-v3)
|
||||
(rx/reduce (fn [result file]
|
||||
(update result (:uri file) (fnil conj []) file))
|
||||
{})
|
||||
(rx/mapcat identity)
|
||||
(rx/merge-map
|
||||
(fn [[uri entries]]
|
||||
(->> (slurp-uri uri :blob)
|
||||
(rx/mapcat (fn [content]
|
||||
;; FIXME: implement the naming and filtering
|
||||
(->> (rp/cmd! ::sse/import-binfile
|
||||
{:name (str/replace (:name data) #".penpot$" "")
|
||||
:file file
|
||||
{:name (-> entries first :name)
|
||||
:file content
|
||||
:version 3
|
||||
:project-id project-id})
|
||||
(rx/tap (fn [event]
|
||||
(let [payload (sse/get-payload event)
|
||||
type (sse/get-type event)]
|
||||
(if (= type "progress")
|
||||
(log/dbg :hint "import-binfile: progress" :section (:section payload) :name (:name payload))
|
||||
(log/dbg :hint "import-binfile: progress"
|
||||
:section (:section payload)
|
||||
:name (:name payload))
|
||||
(log/dbg :hint "import-binfile: end")))))
|
||||
(rx/filter sse/end-of-stream?)
|
||||
(rx/map (fn [_]
|
||||
{:status :import-finish
|
||||
:file-id (:file-id data)})))))
|
||||
(rx/catch (fn [cause]
|
||||
(log/error :hint "unexpected error on import process"
|
||||
:project-id project-id
|
||||
::log/sync? true)
|
||||
(let [edata (if (map? cause) cause (ex-data cause))]
|
||||
(println "Error data:")
|
||||
(pp/pprint (dissoc edata :explain) {:level 3 :length 10})
|
||||
(rx/mapcat (fn [_]
|
||||
(->> (rx/from entries)
|
||||
(rx/map (fn [entry]
|
||||
{:status :finish
|
||||
:file-id (:file-id entry)}))))))))
|
||||
|
||||
(when (string? (:explain edata))
|
||||
(js/console.log (:explain edata)))
|
||||
(rx/catch
|
||||
(fn [cause]
|
||||
(log/error :hint "unexpected error on import process"
|
||||
:project-id project-id
|
||||
::log/sync? true
|
||||
:cause cause)
|
||||
(->> (rx/from entries)
|
||||
(rx/map (fn [entry]
|
||||
{:status :error
|
||||
:error (ex-message cause)
|
||||
:file-id (:file-id entry)}))))))))))))
|
||||
|
||||
(rx/of {:status :import-error
|
||||
:file-id (:file-id data)})))))))))))
|
||||
|
||||
|
|
|
@ -420,6 +420,9 @@ msgstr "Delete team"
|
|||
msgid "dashboard.download-binary-file"
|
||||
msgstr "Download Penpot file (.penpot)"
|
||||
|
||||
msgid "dashboard.download-binary-file-v3"
|
||||
msgstr "Download Penpot file (.zip) (BETA)"
|
||||
|
||||
#: src/app/main/ui/dashboard/file_menu.cljs:300, src/app/main/ui/workspace/main_menu.cljs:597
|
||||
msgid "dashboard.download-standard-file"
|
||||
msgstr "Download standard file (.svg + .json)"
|
||||
|
@ -485,6 +488,10 @@ msgstr "Once a project member creates a file, it will be displayed here."
|
|||
msgid "dashboard.export-binary-multi"
|
||||
msgstr "Download %s Penpot files (.penpot)"
|
||||
|
||||
#: src/app/main/ui/dashboard/file_menu.cljs:249
|
||||
msgid "dashboard.export-binary-multi-v3"
|
||||
msgstr "Download %s Penpot files (.zip) (BETA)"
|
||||
|
||||
#: src/app/main/ui/workspace/main_menu.cljs:605
|
||||
msgid "dashboard.export-frames"
|
||||
msgstr "Export boards as PDF"
|
||||
|
|
Loading…
Reference in a new issue