mirror of
https://github.com/penpot/penpot.git
synced 2025-04-06 20:11:29 -05:00
Merge pull request #2016 from penpot/niwinz-experiments-custom-export-import
Experimental support for binary file format for exportation/importation of penpot files
This commit is contained in:
commit
4d19ceff8d
34 changed files with 1640 additions and 448 deletions
|
@ -28,7 +28,8 @@
|
|||
metosin/reitit-core {:mvn/version "0.5.18"}
|
||||
org.postgresql/postgresql {:mvn/version "42.4.0"}
|
||||
com.zaxxer/HikariCP {:mvn/version "5.0.1"}
|
||||
funcool/datoteka {:mvn/version "2.0.0"}
|
||||
|
||||
funcool/datoteka {:mvn/version "3.0.64"}
|
||||
|
||||
buddy/buddy-hashers {:mvn/version "1.8.158"}
|
||||
buddy/buddy-sign {:mvn/version "3.4.333"}
|
||||
|
|
|
@ -20,6 +20,33 @@
|
|||
</header>
|
||||
<section class="rpc-doc-content">
|
||||
|
||||
<h2>RPC COMMAND METHODS:</h2>
|
||||
<ul class="rpc-items">
|
||||
{% for item in command-methods %}
|
||||
<li class="rpc-item">
|
||||
<div class="rpc-row-info">
|
||||
{# <div class="type">{{item.type}}</div> #}
|
||||
<div class="name">{{item.name}}</div>
|
||||
<div class="tags">
|
||||
<span class="tag">
|
||||
<span>Auth:</span>
|
||||
<span>{% if item.auth %}YES{% else %}NO{% endif %}</span>
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="rpc-row-detail hidden">
|
||||
{% if item.docs %}
|
||||
<h3>DOCSTRING:</h3>
|
||||
<p>{{item.docs}}</p>
|
||||
{% endif %}
|
||||
|
||||
<h3>SPEC EXPLAIN:</h3>
|
||||
<pre>{{item.spec}}</pre>
|
||||
</div>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
|
||||
<h2>RPC QUERY METHODS:</h2>
|
||||
<ul class="rpc-items">
|
||||
{% for item in query-methods %}
|
||||
|
|
|
@ -25,6 +25,11 @@
|
|||
<Logger name="org.eclipse.jetty" level="error" />
|
||||
<Logger name="org.postgresql" level="error" />
|
||||
|
||||
<Logger name="app.msgbus" level="error" />
|
||||
<Logger name="app.http.websocket" level="error" />
|
||||
<Logger name="app.rpc.commands.binfile" level="debug" />
|
||||
<Logger name="app.storage.tmp" level="debug" />
|
||||
|
||||
<Logger name="app.cli" level="debug" additivity="false">
|
||||
<AppenderRef ref="console"/>
|
||||
</Logger>
|
||||
|
@ -38,11 +43,6 @@
|
|||
<AppenderRef ref="zmq" level="debug" />
|
||||
</Logger>
|
||||
|
||||
<Logger name="penpot" level="debug" additivity="false">
|
||||
<AppenderRef ref="main" level="debug" />
|
||||
<AppenderRef ref="zmq" level="debug" />
|
||||
</Logger>
|
||||
|
||||
<Logger name="user" level="trace" additivity="false">
|
||||
<AppenderRef ref="main" level="trace" />
|
||||
</Logger>
|
||||
|
|
|
@ -10,11 +10,12 @@
|
|||
<Logger name="com.zaxxer.hikari" level="error" />
|
||||
<Logger name="org.eclipse.jetty" level="error" />
|
||||
|
||||
<Logger name="app" level="debug" additivity="false">
|
||||
<AppenderRef ref="console" />
|
||||
</Logger>
|
||||
<Logger name="app.msgbus" level="error" />
|
||||
<Logger name="app.http.websocket" level="error" />
|
||||
<Logger name="app.rpc.commands.binfile" level="debug" />
|
||||
<Logger name="app.storage.tmp" level="debug" />
|
||||
|
||||
<Logger name="penpot" level="fatal" additivity="false">
|
||||
<Logger name="app" level="debug" additivity="false">
|
||||
<AppenderRef ref="console" />
|
||||
</Logger>
|
||||
|
||||
|
|
|
@ -10,23 +10,110 @@ Debug Main Page
|
|||
<div>[<a href="/dbg/error">ERRORS</a>]</div>
|
||||
</nav>
|
||||
<main class="index">
|
||||
<section>
|
||||
<h2>Download file data:</h2>
|
||||
<desc>Given an FILE-ID, downloads the file data as file. The file data is encoded using transit.</desc>
|
||||
<form method="get" action="/dbg/file/data">
|
||||
<input type="text" style="width:300px" name="file-id" placeholder="file-id" />
|
||||
<input type="hidden" name="download" value="1" />
|
||||
<input type="submit" value="Download" />
|
||||
</form>
|
||||
<section class="widget">
|
||||
<fieldset>
|
||||
<legend>Download file data:</legend>
|
||||
<desc>Given an FILE-ID, downloads the file data as file. The file data is encoded using transit.</desc>
|
||||
<form method="get" action="/dbg/file/data">
|
||||
<div class="row">
|
||||
<input type="text" style="width:300px" name="file-id" placeholder="file-id" />
|
||||
</div>
|
||||
<div class="row">
|
||||
<input type="submit" name="download" value="Download" />
|
||||
<input type="submit" name="clone" value="Clone" />
|
||||
</div>
|
||||
</form>
|
||||
</fieldset>
|
||||
|
||||
<fieldset>
|
||||
<legend>Upload File Data:</legend>
|
||||
<desc>Create a new file on your draft projects using the file downloaded from the previous section.</desc>
|
||||
<form method="post" enctype="multipart/form-data" action="/dbg/file/data">
|
||||
<div class="row">
|
||||
<input type="file" name="file" value="" />
|
||||
</div>
|
||||
<div class="row">
|
||||
<label>Import with same id?</label>
|
||||
<input type="checkbox" name="reuseid" />
|
||||
</div>
|
||||
|
||||
<input type="submit" value="Upload" />
|
||||
</form>
|
||||
</fieldset>
|
||||
</section>
|
||||
|
||||
<section>
|
||||
<h2>Upload File Data:</h2>
|
||||
<desc>Create a new file on your draft projects using the file downloaded from the previous section.</desc>
|
||||
<form method="post" enctype="multipart/form-data" action="/dbg/file/data">
|
||||
<input type="file" name="file" value="" />
|
||||
<input type="submit" value="Upload" />
|
||||
</form>
|
||||
<section class="widget">
|
||||
<fieldset>
|
||||
<legend>Export binfile:</legend>
|
||||
<desc>Given an FILE-ID, downloads the file and optionally all
|
||||
the related libraries in a single custom formatted binary
|
||||
file.</desc>
|
||||
|
||||
<form method="get" action="/dbg/file/export">
|
||||
<div class="row">
|
||||
<input type="text" style="width:300px" name="file-id" placeholder="file-id" />
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<label>Include libraries?</label>
|
||||
<input type="checkbox" name="includelibs" checked/>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<input type="submit" name="download" value="Download" />
|
||||
<input type="submit" name="clone" value="Clone" />
|
||||
</div>
|
||||
</form>
|
||||
</fieldset>
|
||||
<fieldset>
|
||||
<legend>Import binfile:</legend>
|
||||
<desc>Import penpot file in binary
|
||||
format. If <strong>overwrite</strong> is checked, all files will
|
||||
be overwriten using the same ids found in the file instead of
|
||||
generating a new ones.</desc>
|
||||
|
||||
<form method="post" enctype="multipart/form-data" action="/dbg/file/import">
|
||||
<div class="row">
|
||||
<input type="file" name="file" value="" />
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<label>Overwrite?</label>
|
||||
<input type="checkbox" name="overwrite" />
|
||||
<br />
|
||||
<small>
|
||||
Instead of creating a new file with all relations remaped,
|
||||
reuses all ids and updates/overwrites the objects that are
|
||||
already exists on the database.
|
||||
<strong>Warning, this operation should be used with caution.</strong>
|
||||
</small>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<label>Migrate?</label>
|
||||
<input type="checkbox" name="migrate" />
|
||||
<br />
|
||||
<small>
|
||||
Applies the file migrations on the importation process.
|
||||
</small>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<label>Ignore index errors?</label>
|
||||
<input type="checkbox" name="ignore-index-errors" />
|
||||
<br />
|
||||
<small>
|
||||
Do not break on index lookup erros (remap operation).
|
||||
Useful when importing a broken file that has broken
|
||||
relations or missing pieces.
|
||||
</small>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<input type="submit" name="upload" value="Upload" />
|
||||
</div>
|
||||
</form>
|
||||
</fieldset>
|
||||
</section>
|
||||
</main>
|
||||
{% endblock %}
|
||||
|
|
|
@ -14,7 +14,6 @@ pre {
|
|||
}
|
||||
|
||||
desc {
|
||||
display: flex;
|
||||
margin-bottom: 10px;
|
||||
font-size: 10px;
|
||||
color: #666;
|
||||
|
@ -28,6 +27,15 @@ main {
|
|||
margin: 20px;
|
||||
}
|
||||
|
||||
small {
|
||||
font-size: 9px;
|
||||
color: #888;
|
||||
}
|
||||
|
||||
small > strong {
|
||||
font-size: 9px;
|
||||
}
|
||||
|
||||
nav {
|
||||
position: fixed;
|
||||
width: 100vw;
|
||||
|
@ -95,17 +103,25 @@ nav > div:not(:last-child) {
|
|||
|
||||
.index {
|
||||
margin-top: 40px;
|
||||
display: flex;
|
||||
}
|
||||
|
||||
.index > section {
|
||||
padding: 10px;
|
||||
background-color: #e3e3e3;
|
||||
max-width: 400px;
|
||||
margin: 5px;
|
||||
height: fit-content;
|
||||
}
|
||||
|
||||
.index > section:not(:last-child) {
|
||||
margin-bottom: 10px;
|
||||
.index fieldset:not(:first-child) {
|
||||
margin-top: 15px;
|
||||
}
|
||||
|
||||
/* .index > section:not(:last-child) { */
|
||||
/* margin-bottom: 10px; */
|
||||
/* } */
|
||||
|
||||
|
||||
.index > section > h2 {
|
||||
margin-top: 0px;
|
||||
|
@ -148,3 +164,7 @@ nav > div:not(:last-child) {
|
|||
color: inherit;
|
||||
}
|
||||
|
||||
form .row {
|
||||
padding: 5px 0;
|
||||
}
|
||||
|
||||
|
|
|
@ -120,16 +120,17 @@
|
|||
(s/def ::feedback fn?)
|
||||
(s/def ::ws fn?)
|
||||
(s/def ::audit-handler fn?)
|
||||
(s/def ::debug map?)
|
||||
(s/def ::awsns-handler fn?)
|
||||
(s/def ::session map?)
|
||||
(s/def ::debug-routes vector?)
|
||||
|
||||
(defmethod ig/pre-init-spec ::router [_]
|
||||
(s/keys :req-un [::rpc ::mtx/metrics ::ws ::oauth ::storage ::assets
|
||||
::session ::feedback ::awsns-handler ::debug ::audit-handler]))
|
||||
::session ::feedback ::awsns-handler ::debug-routes
|
||||
::audit-handler]))
|
||||
|
||||
(defmethod ig/init-key ::router
|
||||
[_ {:keys [ws session rpc oauth metrics assets feedback debug] :as cfg}]
|
||||
[_ {:keys [ws session rpc oauth metrics assets feedback debug-routes] :as cfg}]
|
||||
(rr/router
|
||||
[["" {:middleware [[middleware/server-timing]
|
||||
[middleware/format-response]
|
||||
|
@ -137,20 +138,14 @@
|
|||
[middleware/parse-request]
|
||||
[middleware/errors errors/handle]
|
||||
[middleware/restrict-methods]]}
|
||||
|
||||
["/metrics" {:handler (:handler metrics)}]
|
||||
["/assets" {:middleware [(:middleware session)]}
|
||||
["/by-id/:id" {:handler (:objects-handler assets)}]
|
||||
["/by-file-media-id/:id" {:handler (:file-objects-handler assets)}]
|
||||
["/by-file-media-id/:id/thumbnail" {:handler (:file-thumbnails-handler assets)}]]
|
||||
|
||||
["/dbg" {:middleware [(:middleware session)]}
|
||||
["" {:handler (:index debug)}]
|
||||
["/changelog" {:handler (:changelog debug)}]
|
||||
["/error-by-id/:id" {:handler (:retrieve-error debug)}]
|
||||
["/error/:id" {:handler (:retrieve-error debug)}]
|
||||
["/error" {:handler (:retrieve-error-list debug)}]
|
||||
["/file/data" {:handler (:file-data debug)}]
|
||||
["/file/changes" {:handler (:retrieve-file-changes debug)}]]
|
||||
debug-routes
|
||||
|
||||
["/webhooks"
|
||||
["/sns" {:handler (:awsns-handler cfg)
|
||||
|
@ -162,7 +157,6 @@
|
|||
|
||||
["/api" {:middleware [[middleware/cors]
|
||||
(:middleware session)]}
|
||||
["/health" {:handler (:health-check debug)}]
|
||||
["/_doc" {:handler (doc/handler rpc)
|
||||
:allowed-methods #{:get}}]
|
||||
["/feedback" {:handler feedback
|
||||
|
@ -177,6 +171,7 @@
|
|||
:allowed-methods #{:post}}]
|
||||
|
||||
["/rpc"
|
||||
["/command/:command" {:handler (:command-handler rpc)}]
|
||||
["/query/:type" {:handler (:query-handler rpc)}]
|
||||
["/mutation/:type" {:handler (:mutation-handler rpc)
|
||||
:allowed-methods #{:post}}]]]]]))
|
||||
|
|
|
@ -5,36 +5,39 @@
|
|||
;; Copyright (c) UXBOX Labs SL
|
||||
|
||||
(ns app.http.debug
|
||||
(:refer-clojure :exclude [error-handler])
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.pprint :as pp]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.rpc.mutations.files :as m.files]
|
||||
[app.http.middleware :as mw]
|
||||
[app.rpc.commands.binfile :as binf]
|
||||
[app.rpc.mutations.files :refer [create-file]]
|
||||
[app.rpc.queries.profile :as profile]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.bytes :as bs]
|
||||
[app.util.template :as tmpl]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.java.io :as io]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.core :as fs]
|
||||
[emoji.core :as emj]
|
||||
[fipp.edn :as fpp]
|
||||
[integrant.core :as ig]
|
||||
[markdown.core :as md]
|
||||
[markdown.transformers :as mdt]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]
|
||||
[yetti.request :as yrq]
|
||||
[yetti.response :as yrs]))
|
||||
|
||||
;; (selmer.parser/cache-off!)
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; HELPERS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn authorized?
|
||||
[pool {:keys [profile-id]}]
|
||||
(or (= "devenv" (cf/get :host))
|
||||
|
@ -42,7 +45,22 @@
|
|||
admins (or (cf/get :admins) #{})]
|
||||
(contains? admins (:email profile)))))
|
||||
|
||||
(defn index
|
||||
(defn prepare-response
|
||||
[body]
|
||||
(let [headers {"content-type" "application/transit+json"}]
|
||||
(yrs/response :status 200 :body body :headers headers)))
|
||||
|
||||
(defn prepare-download-response
|
||||
[body filename]
|
||||
(let [headers {"content-disposition" (str "attachment; filename=" filename)
|
||||
"content-type" "application/octet-stream"}]
|
||||
(yrs/response :status 200 :body body :headers headers)))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; INDEX
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn index-handler
|
||||
[{:keys [pool]} request]
|
||||
(when-not (authorized? pool request)
|
||||
(ex/raise :type :authentication
|
||||
|
@ -52,6 +70,9 @@
|
|||
:body (-> (io/resource "templates/debug.tmpl")
|
||||
(tmpl/render {}))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; FILE CHANGES
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def sql:retrieve-range-of-changes
|
||||
"select revn, changes from file_change where file_id=? and revn >= ? and revn <= ? order by revn")
|
||||
|
@ -59,28 +80,16 @@
|
|||
(def sql:retrieve-single-change
|
||||
"select revn, changes, data from file_change where file_id=? and revn = ?")
|
||||
|
||||
(defn prepare-response
|
||||
[{:keys [params] :as request} body filename]
|
||||
(when-not body
|
||||
(ex/raise :type :not-found
|
||||
:code :enpty-data
|
||||
:hint "empty response"))
|
||||
|
||||
(cond-> (yrs/response :status 200
|
||||
:body body
|
||||
:headers {"content-type" "application/transit+json"})
|
||||
(contains? params :download)
|
||||
(update :headers assoc "content-disposition" (str "attachment; filename=" filename))))
|
||||
|
||||
(defn- retrieve-file-data
|
||||
[{:keys [pool]} {:keys [params] :as request}]
|
||||
[{:keys [pool]} {:keys [params profile-id] :as request}]
|
||||
(when-not (authorized? pool request)
|
||||
(ex/raise :type :authentication
|
||||
:code :only-admins-allowed))
|
||||
|
||||
(let [file-id (some-> (get-in request [:params :file-id]) uuid/uuid)
|
||||
revn (some-> (get-in request [:params :revn]) d/parse-integer)
|
||||
(let [file-id (some-> params :file-id parse-uuid)
|
||||
revn (some-> params :revn parse-long)
|
||||
filename (str file-id)]
|
||||
|
||||
(when-not file-id
|
||||
(ex/raise :type :validation
|
||||
:code :missing-arguments))
|
||||
|
@ -88,35 +97,63 @@
|
|||
(let [data (if (integer? revn)
|
||||
(some-> (db/exec-one! pool [sql:retrieve-single-change file-id revn]) :data)
|
||||
(some-> (db/get-by-id pool :file file-id) :data))]
|
||||
(if (contains? params :download)
|
||||
(-> (prepare-response request data filename)
|
||||
(update :headers assoc "content-type" "application/octet-stream"))
|
||||
(prepare-response request (some-> data blob/decode) filename)))))
|
||||
|
||||
(when-not data
|
||||
(ex/raise :type :not-found
|
||||
:code :enpty-data
|
||||
:hint "empty response"))
|
||||
(cond
|
||||
(contains? params :download)
|
||||
(prepare-download-response data filename)
|
||||
|
||||
(contains? params :clone)
|
||||
(let [project-id (some-> (profile/retrieve-additional-data pool profile-id) :default-project-id)
|
||||
data (some-> data blob/decode)]
|
||||
(create-file pool {:id (uuid/next)
|
||||
:name (str "Cloned file: " filename)
|
||||
:project-id project-id
|
||||
:profile-id profile-id
|
||||
:data data})
|
||||
(yrs/response 201 "OK CREATED"))
|
||||
|
||||
:else
|
||||
(prepare-response (some-> data blob/decode))))))
|
||||
|
||||
(defn- is-file-exists?
|
||||
[pool id]
|
||||
(let [sql "select exists (select 1 from file where id=?) as exists;"]
|
||||
(-> (db/exec-one! pool [sql id]) :exists)))
|
||||
|
||||
(defn- upload-file-data
|
||||
[{:keys [pool]} {:keys [profile-id params] :as request}]
|
||||
(let [project-id (some-> (profile/retrieve-additional-data pool profile-id) :default-project-id)
|
||||
data (some-> params :file :path fs/slurp-bytes blob/decode)]
|
||||
data (some-> params :file :path bs/read-as-bytes blob/decode)]
|
||||
|
||||
(if (and data project-id)
|
||||
(let [fname (str "imported-file-" (dt/now))
|
||||
file-id (try
|
||||
(uuid/uuid (-> params :file :filename))
|
||||
(catch Exception _ (uuid/next)))
|
||||
file (db/exec-one! pool (sql/select :file {:id file-id}))]
|
||||
(if file
|
||||
(db/update! pool :file
|
||||
{:data (blob/encode data)}
|
||||
{:id file-id})
|
||||
(m.files/create-file pool {:id file-id
|
||||
:name fname
|
||||
:project-id project-id
|
||||
:profile-id profile-id
|
||||
:data data}))
|
||||
(yrs/response 200 "OK"))
|
||||
(let [fname (str "Imported file *: " (dt/now))
|
||||
overwrite? (contains? params :overwrite?)
|
||||
file-id (or (and overwrite? (ex/ignoring (-> params :file :filename parse-uuid)))
|
||||
(uuid/next))]
|
||||
|
||||
(if (and overwrite? file-id
|
||||
(is-file-exists? pool file-id))
|
||||
(do
|
||||
(db/update! pool :file
|
||||
{:data (blob/encode data)}
|
||||
{:id file-id})
|
||||
(yrs/response 200 "OK UPDATED"))
|
||||
|
||||
(do
|
||||
(create-file pool {:id file-id
|
||||
:name fname
|
||||
:project-id project-id
|
||||
:profile-id profile-id
|
||||
:data data})
|
||||
(yrs/response 201 "OK CREATED"))))
|
||||
|
||||
(yrs/response 500 "ERROR"))))
|
||||
|
||||
(defn file-data
|
||||
(defn file-data-handler
|
||||
[cfg request]
|
||||
(case (yrq/method request)
|
||||
:get (retrieve-file-data cfg request)
|
||||
|
@ -124,43 +161,47 @@
|
|||
(ex/raise :type :http
|
||||
:code :method-not-found)))
|
||||
|
||||
(defn retrieve-file-changes
|
||||
[{:keys [pool]} request]
|
||||
(defn file-changes-handler
|
||||
[{:keys [pool]} {:keys [params] :as request}]
|
||||
(when-not (authorized? pool request)
|
||||
(ex/raise :type :authentication
|
||||
:code :only-admins-allowed))
|
||||
|
||||
(let [file-id (some-> (get-in request [:params :id]) uuid/uuid)
|
||||
revn (or (get-in request [:params :revn]) "latest")
|
||||
filename (str file-id)]
|
||||
(letfn [(retrieve-changes [file-id revn]
|
||||
(if (str/includes? revn ":")
|
||||
(let [[start end] (->> (str/split revn #":")
|
||||
(map str/trim)
|
||||
(map parse-long))]
|
||||
(some->> (db/exec! pool [sql:retrieve-range-of-changes file-id start end])
|
||||
(map :changes)
|
||||
(map blob/decode)
|
||||
(mapcat identity)
|
||||
(vec)))
|
||||
|
||||
(when (or (not file-id) (not revn))
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-arguments
|
||||
:hint "missing arguments"))
|
||||
(if-let [revn (parse-long revn)]
|
||||
(let [item (db/exec-one! pool [sql:retrieve-single-change file-id revn])]
|
||||
(some-> item :changes blob/decode vec))
|
||||
(ex/raise :type :validation :code :invalid-arguments))))]
|
||||
|
||||
(cond
|
||||
(d/num-string? revn)
|
||||
(let [item (db/exec-one! pool [sql:retrieve-single-change file-id (d/parse-integer revn)])]
|
||||
(prepare-response request (some-> item :changes blob/decode vec) filename))
|
||||
(let [file-id (some-> params :id parse-uuid)
|
||||
revn (or (some-> params :revn parse-long) "latest")
|
||||
filename (str file-id)]
|
||||
|
||||
(str/includes? revn ":")
|
||||
(let [[start end] (->> (str/split revn #":")
|
||||
(map str/trim)
|
||||
(map d/parse-integer))
|
||||
items (db/exec! pool [sql:retrieve-range-of-changes file-id start end])]
|
||||
(prepare-response request
|
||||
(some->> items
|
||||
(map :changes)
|
||||
(map blob/decode)
|
||||
(mapcat identity)
|
||||
(vec))
|
||||
filename))
|
||||
:else
|
||||
(ex/raise :type :validation :code :invalid-arguments))))
|
||||
(when (or (not file-id) (not revn))
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-arguments
|
||||
:hint "missing arguments"))
|
||||
|
||||
(let [data (retrieve-changes file-id revn)]
|
||||
(if (contains? params :download)
|
||||
(prepare-download-response data filename)
|
||||
(prepare-response data))))))
|
||||
|
||||
(defn retrieve-error
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; ERROR BROWSER
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn error-handler
|
||||
[{:keys [pool]} request]
|
||||
(letfn [(parse-id [request]
|
||||
(let [id (get-in request [:path-params :id])
|
||||
|
@ -176,9 +217,8 @@
|
|||
(let [context (dissoc report
|
||||
:trace :cause :params :data :spec-problems
|
||||
:spec-explain :spec-value :error :explain :hint)
|
||||
params {:context (with-out-str
|
||||
(fpp/pprint context {:width 200}))
|
||||
:hint (:hint report)
|
||||
params {:context (pp/pprint-str context :width 200)
|
||||
:hint (:hint report)
|
||||
:spec-explain (:spec-explain report)
|
||||
:spec-problems (:spec-problems report)
|
||||
:spec-value (:spec-value report)
|
||||
|
@ -206,7 +246,7 @@
|
|||
(def sql:error-reports
|
||||
"select id, created_at from server_error_report order by created_at desc limit 100")
|
||||
|
||||
(defn retrieve-error-list
|
||||
(defn error-list-handler
|
||||
[{:keys [pool]} request]
|
||||
(when-not (authorized? pool request)
|
||||
(ex/raise :type :authentication
|
||||
|
@ -219,14 +259,88 @@
|
|||
:headers {"content-type" "text/html; charset=utf-8"
|
||||
"x-robots-tag" "noindex"})))
|
||||
|
||||
(defn health-check
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; EXPORT/IMPORT
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn export-handler
|
||||
[{:keys [pool] :as cfg} {:keys [params profile-id] :as request}]
|
||||
(let [file-id (some-> params :file-id parse-uuid)
|
||||
libs? (contains? params :includelibs)
|
||||
clone? (contains? params :clone)]
|
||||
|
||||
(when-not file-id
|
||||
(ex/raise :type :validation
|
||||
:code :missing-arguments))
|
||||
|
||||
(let [path (-> cfg
|
||||
(assoc ::binf/file-id file-id)
|
||||
(assoc ::binf/include-libraries? libs?)
|
||||
(binf/export!))]
|
||||
(if clone?
|
||||
(let [project-id (some-> (profile/retrieve-additional-data pool profile-id) :default-project-id)]
|
||||
(binf/import!
|
||||
(assoc cfg
|
||||
::binf/input path
|
||||
::binf/overwrite? false
|
||||
::binf/profile-id profile-id
|
||||
::binf/project-id project-id))
|
||||
|
||||
(yrs/response
|
||||
:status 200
|
||||
:headers {"content-type" "text/plain"}
|
||||
:body "OK CLONED"))
|
||||
|
||||
(yrs/response
|
||||
:status 200
|
||||
:headers {"content-type" "application/octet-stream"
|
||||
"content-disposition" (str "attachmen; filename=" file-id ".penpot")}
|
||||
:body (io/input-stream path))))))
|
||||
|
||||
|
||||
(defn import-handler
|
||||
[{:keys [pool] :as cfg} {:keys [params profile-id] :as request}]
|
||||
(when-not (contains? params :file)
|
||||
(ex/raise :type :validation
|
||||
:code :missing-upload-file
|
||||
:hint "missing upload file"))
|
||||
|
||||
(let [project-id (some-> (profile/retrieve-additional-data pool profile-id) :default-project-id)
|
||||
overwrite? (contains? params :overwrite)
|
||||
migrate? (contains? params :migrate)
|
||||
ignore-index-errors? (contains? params :ignore-index-errors)]
|
||||
|
||||
(when-not project-id
|
||||
(ex/raise :type :validation
|
||||
:code :missing-project
|
||||
:hint "project not found"))
|
||||
|
||||
(binf/import!
|
||||
(assoc cfg
|
||||
::binf/input (-> params :file :path)
|
||||
::binf/overwrite? overwrite?
|
||||
::binf/migrate? migrate?
|
||||
::binf/ignore-index-errors? ignore-index-errors?
|
||||
::binf/profile-id profile-id
|
||||
::binf/project-id project-id))
|
||||
|
||||
(yrs/response
|
||||
:status 200
|
||||
:headers {"content-type" "text/plain"}
|
||||
:body "OK")))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; OTHER SMALL VIEWS/HANDLERS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn health-handler
|
||||
"Mainly a task that performs a health check."
|
||||
[{:keys [pool]} _]
|
||||
(db/with-atomic [conn pool]
|
||||
(db/exec-one! conn ["select count(*) as count from server_prop;"])
|
||||
(yrs/response 200 "OK")))
|
||||
|
||||
(defn changelog
|
||||
(defn changelog-handler
|
||||
[_ _]
|
||||
(letfn [(transform-emoji [text state]
|
||||
[(emj/emojify text) state])
|
||||
|
@ -238,22 +352,39 @@
|
|||
:body (-> clog slurp md->html))
|
||||
(yrs/response :status 404 :body "NOT FOUND"))))
|
||||
|
||||
(defn- wrap-async
|
||||
[{:keys [executor] :as cfg} f]
|
||||
(fn [request respond raise]
|
||||
(-> (px/submit! executor #(f cfg request))
|
||||
(p/then respond)
|
||||
(p/catch raise))))
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; INIT
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defmethod ig/pre-init-spec ::handlers [_]
|
||||
(s/keys :req-un [::db/pool ::wrk/executor]))
|
||||
(def with-authorization
|
||||
{:compile
|
||||
(fn [& _]
|
||||
(fn [handler pool]
|
||||
(fn [request respond raise]
|
||||
(if (authorized? pool request)
|
||||
(handler request respond raise)
|
||||
(raise (ex/error :type :authentication
|
||||
:code :only-admins-allowed))))))})
|
||||
|
||||
(defmethod ig/init-key ::handlers
|
||||
[_ cfg]
|
||||
{:index (wrap-async cfg index)
|
||||
:health-check (wrap-async cfg health-check)
|
||||
:retrieve-file-changes (wrap-async cfg retrieve-file-changes)
|
||||
:retrieve-error (wrap-async cfg retrieve-error)
|
||||
:retrieve-error-list (wrap-async cfg retrieve-error-list)
|
||||
:file-data (wrap-async cfg file-data)
|
||||
:changelog (wrap-async cfg changelog)})
|
||||
|
||||
(s/def ::session map?)
|
||||
|
||||
(defmethod ig/pre-init-spec ::routes [_]
|
||||
(s/keys :req-un [::db/pool ::wrk/executor ::session]))
|
||||
|
||||
(defmethod ig/init-key ::routes
|
||||
[_ {:keys [session pool executor] :as cfg}]
|
||||
["/dbg" {:middleware [[(:middleware session)]
|
||||
[with-authorization pool]
|
||||
[mw/with-promise-async executor]
|
||||
[mw/with-config cfg]]}
|
||||
["" {:handler index-handler}]
|
||||
["/health" {:handler health-handler}]
|
||||
["/changelog" {:handler changelog-handler}]
|
||||
;; ["/error-by-id/:id" {:handler error-handler}]
|
||||
["/error/:id" {:handler error-handler}]
|
||||
["/error" {:handler error-list-handler}]
|
||||
["/file/export" {:handler export-handler}]
|
||||
["/file/import" {:handler import-handler}]
|
||||
["/file/data" {:handler file-data-handler}]
|
||||
["/file/changes" {:handler file-changes-handler}]])
|
||||
|
|
|
@ -34,7 +34,12 @@
|
|||
:auth (:auth mdata true)
|
||||
:docs (::sv/docs mdata)
|
||||
:spec (get-spec-str (::sv/spec mdata))}))]
|
||||
{:query-methods
|
||||
|
||||
{:command-methods
|
||||
(into []
|
||||
(map (partial gen-doc :command))
|
||||
(->> rpc :methods :command (sort-by first)))
|
||||
:query-methods
|
||||
(into []
|
||||
(map (partial gen-doc :query))
|
||||
(->> rpc :methods :query (sort-by first)))
|
||||
|
|
|
@ -12,6 +12,8 @@
|
|||
[app.config :as cf]
|
||||
[app.util.json :as json]
|
||||
[cuerdas.core :as str]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]
|
||||
[yetti.adapter :as yt]
|
||||
[yetti.middleware :as ymw]
|
||||
[yetti.request :as yrq]
|
||||
|
@ -192,3 +194,20 @@
|
|||
(def restrict-methods
|
||||
{:name ::restrict-methods
|
||||
:compile compile-restrict-methods})
|
||||
|
||||
(def with-promise-async
|
||||
{:compile
|
||||
(fn [& _]
|
||||
(fn [handler executor]
|
||||
(fn [request respond raise]
|
||||
(-> (px/submit! executor #(handler request))
|
||||
(p/then respond)
|
||||
(p/catch raise)))))})
|
||||
|
||||
(def with-config
|
||||
{:compile
|
||||
(fn [& _]
|
||||
(fn [handler config]
|
||||
(fn
|
||||
([request] (handler config request))
|
||||
([request respond raise] (handler config request respond raise)))))})
|
||||
|
|
|
@ -162,21 +162,22 @@
|
|||
|
||||
(defn- make-middleware
|
||||
[{:keys [::events-ch store] :as cfg}]
|
||||
{:name :session-middleware
|
||||
:wrap (fn [handler]
|
||||
(fn [request respond raise]
|
||||
(try
|
||||
(-> (retrieve-session store request)
|
||||
(p/then' #(merge request %))
|
||||
(p/finally (fn [request cause]
|
||||
(if cause
|
||||
(raise cause)
|
||||
(do
|
||||
{:name :session
|
||||
:compile (fn [& _]
|
||||
(fn [handler]
|
||||
(fn [request respond raise]
|
||||
(try
|
||||
(-> (retrieve-session store request)
|
||||
(p/then' #(merge request %))
|
||||
(p/finally (fn [request cause]
|
||||
(if cause
|
||||
(raise cause)
|
||||
(do
|
||||
(when-let [session-id (:session-id request)]
|
||||
(a/offer! events-ch session-id))
|
||||
(handler request respond raise))))))
|
||||
(catch Throwable cause
|
||||
(raise cause)))))})
|
||||
(catch Throwable cause
|
||||
(raise cause))))))})
|
||||
|
||||
|
||||
;; --- STATE INIT: SESSION
|
||||
|
|
|
@ -71,6 +71,10 @@
|
|||
:app.tokens/tokens
|
||||
{:keys (ig/ref :app.setup/keys)}
|
||||
|
||||
:app.storage.tmp/cleaner
|
||||
{:executor (ig/ref [::worker :app.worker/executor])
|
||||
:scheduler (ig/ref :app.worker/scheduler)}
|
||||
|
||||
:app.storage/gc-deleted-task
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:storage (ig/ref :app.storage/storage)
|
||||
|
@ -125,7 +129,7 @@
|
|||
:session (ig/ref :app.http/session)
|
||||
:awsns-handler (ig/ref :app.http.awsns/handler)
|
||||
:oauth (ig/ref :app.http.oauth/handler)
|
||||
:debug (ig/ref :app.http.debug/handlers)
|
||||
:debug-routes (ig/ref :app.http.debug/routes)
|
||||
:ws (ig/ref :app.http.websocket/handler)
|
||||
:metrics (ig/ref :app.metrics/metrics)
|
||||
:public-uri (cf/get :public-uri)
|
||||
|
@ -135,9 +139,11 @@
|
|||
:rpc (ig/ref :app.rpc/rpc)
|
||||
:executor (ig/ref [::default :app.worker/executor])}
|
||||
|
||||
:app.http.debug/handlers
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:executor (ig/ref [::worker :app.worker/executor])}
|
||||
:app.http.debug/routes
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:executor (ig/ref [::worker :app.worker/executor])
|
||||
:storage (ig/ref :app.storage/storage)
|
||||
:session (ig/ref :app.http/session)}
|
||||
|
||||
:app.http.websocket/handler
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
|
@ -336,23 +342,12 @@
|
|||
|
||||
:backends
|
||||
{:assets-s3 (ig/ref [::assets :app.storage.s3/backend])
|
||||
:assets-db (ig/ref [::assets :app.storage.db/backend])
|
||||
:assets-fs (ig/ref [::assets :app.storage.fs/backend])
|
||||
|
||||
:tmp (ig/ref [::tmp :app.storage.fs/backend])
|
||||
:fdata-s3 (ig/ref [::fdata :app.storage.s3/backend])
|
||||
|
||||
;; keep this for backward compatibility
|
||||
:s3 (ig/ref [::assets :app.storage.s3/backend])
|
||||
:fs (ig/ref [::assets :app.storage.fs/backend])}}
|
||||
|
||||
[::fdata :app.storage.s3/backend]
|
||||
{:region (cf/get :storage-fdata-s3-region)
|
||||
:bucket (cf/get :storage-fdata-s3-bucket)
|
||||
:endpoint (cf/get :storage-fdata-s3-endpoint)
|
||||
:prefix (cf/get :storage-fdata-s3-prefix)
|
||||
:executor (ig/ref [::default :app.worker/executor])}
|
||||
|
||||
[::assets :app.storage.s3/backend]
|
||||
{:region (cf/get :storage-assets-s3-region)
|
||||
:endpoint (cf/get :storage-assets-s3-endpoint)
|
||||
|
@ -361,12 +356,7 @@
|
|||
|
||||
[::assets :app.storage.fs/backend]
|
||||
{:directory (cf/get :storage-assets-fs-directory)}
|
||||
|
||||
[::tmp :app.storage.fs/backend]
|
||||
{:directory "/tmp/penpot"}
|
||||
|
||||
[::assets :app.storage.db/backend]
|
||||
{:pool (ig/ref :app.db/pool)}})
|
||||
})
|
||||
|
||||
(def system nil)
|
||||
|
||||
|
|
|
@ -12,18 +12,16 @@
|
|||
[app.common.media :as cm]
|
||||
[app.common.spec :as us]
|
||||
[app.config :as cf]
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.util.bytes :as bs]
|
||||
[app.util.svg :as svg]
|
||||
[buddy.core.bytes :as bb]
|
||||
[buddy.core.codecs :as bc]
|
||||
[clojure.java.io :as io]
|
||||
[clojure.java.shell :as sh]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.core :as fs])
|
||||
(:import
|
||||
java.io.ByteArrayInputStream
|
||||
java.io.OutputStream
|
||||
org.apache.commons.io.IOUtils
|
||||
org.im4java.core.ConvertCmd
|
||||
org.im4java.core.IMOperation
|
||||
org.im4java.core.Info))
|
||||
|
@ -93,18 +91,16 @@
|
|||
(let [{:keys [path mtype]} input
|
||||
format (or (cm/mtype->format mtype) format)
|
||||
ext (cm/format->extension format)
|
||||
tmp (fs/create-tempfile :suffix ext)]
|
||||
tmp (tmp/tempfile :prefix "penpot.media." :suffix ext)]
|
||||
|
||||
(doto (ConvertCmd.)
|
||||
(.run operation (into-array (map str [path tmp]))))
|
||||
|
||||
(let [thumbnail-data (fs/slurp-bytes tmp)]
|
||||
(fs/delete tmp)
|
||||
(assoc params
|
||||
:format format
|
||||
:mtype (cm/format->mtype format)
|
||||
:size (alength ^bytes thumbnail-data)
|
||||
:data (ByteArrayInputStream. thumbnail-data)))))
|
||||
(assoc params
|
||||
:format format
|
||||
:mtype (cm/format->mtype format)
|
||||
:size (fs/size tmp)
|
||||
:data tmp)))
|
||||
|
||||
(defmethod process :generic-thumbnail
|
||||
[{:keys [quality width height] :as params}]
|
||||
|
@ -201,59 +197,54 @@
|
|||
(defmethod process :generate-fonts
|
||||
[{:keys [input] :as params}]
|
||||
(letfn [(ttf->otf [data]
|
||||
(let [input-file (fs/create-tempfile :prefix "penpot")
|
||||
output-file (fs/path (str input-file ".otf"))
|
||||
_ (with-open [out (io/output-stream input-file)]
|
||||
(IOUtils/writeChunked ^bytes data ^OutputStream out)
|
||||
(.flush ^OutputStream out))
|
||||
res (sh/sh "fontforge" "-lang=ff" "-c"
|
||||
(str/fmt "Open('%s'); Generate('%s')"
|
||||
(str input-file)
|
||||
(str output-file)))]
|
||||
(let [finput (tmp/tempfile :prefix "penpot.font." :suffix "")
|
||||
foutput (fs/path (str finput ".otf"))
|
||||
_ (bs/write-to-file! data finput)
|
||||
res (sh/sh "fontforge" "-lang=ff" "-c"
|
||||
(str/fmt "Open('%s'); Generate('%s')"
|
||||
(str finput)
|
||||
(str foutput)))]
|
||||
(when (zero? (:exit res))
|
||||
(fs/slurp-bytes output-file))))
|
||||
|
||||
foutput)))
|
||||
|
||||
(otf->ttf [data]
|
||||
(let [input-file (fs/create-tempfile :prefix "penpot")
|
||||
output-file (fs/path (str input-file ".ttf"))
|
||||
_ (with-open [out (io/output-stream input-file)]
|
||||
(IOUtils/writeChunked ^bytes data ^OutputStream out)
|
||||
(.flush ^OutputStream out))
|
||||
res (sh/sh "fontforge" "-lang=ff" "-c"
|
||||
(str/fmt "Open('%s'); Generate('%s')"
|
||||
(str input-file)
|
||||
(str output-file)))]
|
||||
(let [finput (tmp/tempfile :prefix "penpot.font." :suffix "")
|
||||
foutput (fs/path (str finput ".ttf"))
|
||||
_ (bs/write-to-file! data finput)
|
||||
res (sh/sh "fontforge" "-lang=ff" "-c"
|
||||
(str/fmt "Open('%s'); Generate('%s')"
|
||||
(str finput)
|
||||
(str foutput)))]
|
||||
(when (zero? (:exit res))
|
||||
(fs/slurp-bytes output-file))))
|
||||
foutput)))
|
||||
|
||||
(ttf-or-otf->woff [data]
|
||||
(let [input-file (fs/create-tempfile :prefix "penpot" :suffix "")
|
||||
output-file (fs/path (str input-file ".woff"))
|
||||
_ (with-open [out (io/output-stream input-file)]
|
||||
(IOUtils/writeChunked ^bytes data ^OutputStream out)
|
||||
(.flush ^OutputStream out))
|
||||
res (sh/sh "sfnt2woff" (str input-file))]
|
||||
;; NOTE: foutput is not used directly, it represents the
|
||||
;; default output of the exection of the underlying
|
||||
;; command.
|
||||
(let [finput (tmp/tempfile :prefix "penpot.font." :suffix "")
|
||||
foutput (fs/path (str finput ".woff"))
|
||||
_ (bs/write-to-file! data finput)
|
||||
res (sh/sh "sfnt2woff" (str finput))]
|
||||
(when (zero? (:exit res))
|
||||
(fs/slurp-bytes output-file))))
|
||||
foutput)))
|
||||
|
||||
(ttf-or-otf->woff2 [data]
|
||||
(let [input-file (fs/create-tempfile :prefix "penpot" :suffix "")
|
||||
output-file (fs/path (str input-file ".woff2"))
|
||||
_ (with-open [out (io/output-stream input-file)]
|
||||
(IOUtils/writeChunked ^bytes data ^OutputStream out)
|
||||
(.flush ^OutputStream out))
|
||||
res (sh/sh "woff2_compress" (str input-file))]
|
||||
;; NOTE: foutput is not used directly, it represents the
|
||||
;; default output of the exection of the underlying
|
||||
;; command.
|
||||
(let [finput (tmp/tempfile :prefix "penpot.font." :suffix ".tmp")
|
||||
foutput (fs/path (str (fs/base finput) ".woff2"))
|
||||
_ (bs/write-to-file! data finput)
|
||||
res (sh/sh "woff2_compress" (str finput))]
|
||||
(when (zero? (:exit res))
|
||||
(fs/slurp-bytes output-file))))
|
||||
foutput)))
|
||||
|
||||
(woff->sfnt [data]
|
||||
(let [input-file (fs/create-tempfile :prefix "penpot" :suffix "")
|
||||
_ (with-open [out (io/output-stream input-file)]
|
||||
(IOUtils/writeChunked ^bytes data ^OutputStream out)
|
||||
(.flush ^OutputStream out))
|
||||
res (sh/sh "woff2sfnt" (str input-file)
|
||||
:out-enc :bytes)]
|
||||
(let [finput (tmp/tempfile :prefix "penpot" :suffix "")
|
||||
_ (bs/write-to-file! data finput)
|
||||
res (sh/sh "woff2sfnt" (str finput)
|
||||
:out-enc :bytes)]
|
||||
(when (zero? (:exit res))
|
||||
(:out res))))
|
||||
|
||||
|
|
|
@ -226,6 +226,12 @@
|
|||
|
||||
{:name "0072-mod-file-object-thumbnail-table"
|
||||
:fn (mg/resource "app/migrations/sql/0072-mod-file-object-thumbnail-table.sql")}
|
||||
|
||||
{:name "0073-mod-file-media-object-constraints"
|
||||
:fn (mg/resource "app/migrations/sql/0073-mod-file-media-object-constraints.sql")}
|
||||
|
||||
{:name "0074-mod-file-library-rel-constraints"
|
||||
:fn (mg/resource "app/migrations/sql/0074-mod-file-library-rel-constraints.sql")}
|
||||
])
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
ALTER TABLE file_media_object
|
||||
ALTER CONSTRAINT file_media_object_media_id_fkey DEFERRABLE INITIALLY IMMEDIATE;
|
||||
|
||||
ALTER TABLE file_media_object
|
||||
ALTER CONSTRAINT file_media_object_thumbnail_id_fkey DEFERRABLE INITIALLY IMMEDIATE;
|
||||
|
||||
ALTER TABLE file_media_object
|
||||
RENAME CONSTRAINT media_object_file_id_fkey TO file_media_object_file_id_fkey;
|
||||
|
||||
ALTER TABLE file_media_object
|
||||
ALTER CONSTRAINT file_media_object_file_id_fkey DEFERRABLE INITIALLY IMMEDIATE;
|
|
@ -0,0 +1,5 @@
|
|||
ALTER TABLE file_library_rel
|
||||
ALTER CONSTRAINT file_library_rel_file_id_fkey DEFERRABLE INITIALLY IMMEDIATE;
|
||||
|
||||
ALTER TABLE file_library_rel
|
||||
ALTER CONSTRAINT file_library_rel_library_file_id_fkey DEFERRABLE INITIALLY IMMEDIATE;
|
|
@ -86,6 +86,30 @@
|
|||
(let [context {:profile-id profile-id}]
|
||||
(raise (ex/wrap-with-context cause context)))))))))
|
||||
|
||||
(defn- rpc-command-handler
|
||||
"Ring handler that dispatches cmd requests and convert between
|
||||
internal async flow into ring async flow."
|
||||
[methods {:keys [profile-id session-id params] :as request} respond raise]
|
||||
(letfn [(handle-response [result]
|
||||
(let [mdata (meta result)]
|
||||
(p/-> (yrs/response 200 result)
|
||||
(handle-response-transformation request mdata)
|
||||
(handle-before-comple-hook mdata))))]
|
||||
|
||||
(let [cmd (keyword (:command params))
|
||||
data (into {::request request} params)
|
||||
data (if profile-id
|
||||
(assoc data :profile-id profile-id ::session-id session-id)
|
||||
(dissoc data :profile-id))
|
||||
|
||||
method (get methods cmd default-handler)]
|
||||
(-> (method data)
|
||||
(p/then handle-response)
|
||||
(p/then respond)
|
||||
(p/catch (fn [cause]
|
||||
(let [context {:profile-id profile-id}]
|
||||
(raise (ex/wrap-with-context cause context)))))))))
|
||||
|
||||
(defn- wrap-metrics
|
||||
"Wrap service method with metrics measurement."
|
||||
[{:keys [metrics ::metrics-id]} f mdata]
|
||||
|
@ -214,6 +238,13 @@
|
|||
(map (partial process-method cfg))
|
||||
(into {}))))
|
||||
|
||||
(defn- resolve-command-methods
|
||||
[cfg]
|
||||
(let [cfg (assoc cfg ::type "command" ::metrics-id :rpc-command-timing)]
|
||||
(->> (sv/scan-ns 'app.rpc.commands.binfile)
|
||||
(map (partial process-method cfg))
|
||||
(into {}))))
|
||||
|
||||
(s/def ::storage some?)
|
||||
(s/def ::session map?)
|
||||
(s/def ::tokens fn?)
|
||||
|
@ -227,7 +258,9 @@
|
|||
(defmethod ig/init-key ::rpc
|
||||
[_ cfg]
|
||||
(let [mq (resolve-query-methods cfg)
|
||||
mm (resolve-mutation-methods cfg)]
|
||||
{:methods {:query mq :mutation mm}
|
||||
mm (resolve-mutation-methods cfg)
|
||||
cm (resolve-command-methods cfg)]
|
||||
{:methods {:query mq :mutation mm :command cm}
|
||||
:command-handler (partial rpc-command-handler cm)
|
||||
:query-handler (partial rpc-query-handler mq)
|
||||
:mutation-handler (partial rpc-mutation-handler mm)}))
|
||||
|
|
716
backend/src/app/rpc/commands/binfile.clj
Normal file
716
backend/src/app/rpc/commands/binfile.clj
Normal file
|
@ -0,0 +1,716 @@
|
|||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) UXBOX Labs SL
|
||||
|
||||
(ns app.rpc.commands.binfile
|
||||
(:refer-clojure :exclude [assert])
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.pages.migrations :as pmg]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.media :as media]
|
||||
[app.rpc.queries.files :refer [decode-row]]
|
||||
[app.storage :as sto]
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.tasks.file-gc]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.bytes :as bs]
|
||||
[app.util.fressian :as fres]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[clojure.java.io :as io]
|
||||
[clojure.spec.alpha :as s]
|
||||
[clojure.walk :as walk]
|
||||
[cuerdas.core :as str]
|
||||
[yetti.adapter :as yt])
|
||||
(:import
|
||||
java.io.DataInputStream
|
||||
java.io.DataOutputStream
|
||||
java.io.InputStream
|
||||
java.io.OutputStream
|
||||
java.lang.AutoCloseable))
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; LOW LEVEL STREAM IO
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def ^:const buffer-size (:xnio/buffer-size yt/defaults))
|
||||
(def ^:const penpot-magic-number 800099563638710213)
|
||||
(def ^:const max-object-size (* 1024 1024 100)) ; Only allow 100MiB max file size.
|
||||
|
||||
(def ^:dynamic *position* nil)
|
||||
|
||||
(defn get-mark
|
||||
[id]
|
||||
(case id
|
||||
:header 1
|
||||
:stream 2
|
||||
:uuid 3
|
||||
:label 4
|
||||
:obj 5
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-mark-id
|
||||
:hint (format "invalid mark id %s" id))))
|
||||
|
||||
;; (defn buffered-output-stream
|
||||
;; "Returns a buffered output stream that ignores flush calls. This is
|
||||
;; needed because transit-java calls flush very aggresivelly on each
|
||||
;; object write."
|
||||
;; [^java.io.OutputStream os ^long chunk-size]
|
||||
;; (proxy [java.io.BufferedOutputStream] [os (int chunk-size)]
|
||||
;; ;; Explicitly do not forward flush
|
||||
;; (flush [])
|
||||
;; (close []
|
||||
;; (proxy-super flush)
|
||||
;; (proxy-super close)))
|
||||
|
||||
(defmacro assert
|
||||
[expr hint]
|
||||
`(when-not ~expr
|
||||
(ex/raise :type :validation
|
||||
:code :unexpected-condition
|
||||
:hint ~hint)))
|
||||
|
||||
(defmacro assert-mark
|
||||
[v type]
|
||||
`(let [expected# (get-mark ~type)
|
||||
val# (long ~v)]
|
||||
(when (not= val# expected#)
|
||||
(ex/raise :type :validation
|
||||
:code :unexpected-mark
|
||||
:hint (format "received mark %s, expected %s" val# expected#)))))
|
||||
|
||||
(defmacro assert-label
|
||||
[expr label]
|
||||
`(let [v# ~expr]
|
||||
(when (not= v# ~label)
|
||||
(ex/raise :type :assertion
|
||||
:code :unexpected-label
|
||||
:hint (format "received label %s, expected %s" v# ~label)))))
|
||||
|
||||
;; --- PRIMITIVE
|
||||
|
||||
(defn write-byte!
|
||||
[^DataOutputStream output data]
|
||||
(l/trace :fn "write-byte!" :data data :position @*position* ::l/async false)
|
||||
(.writeByte output (byte data))
|
||||
(swap! *position* inc))
|
||||
|
||||
(defn read-byte!
|
||||
[^DataInputStream input]
|
||||
(let [v (.readByte input)]
|
||||
(l/trace :fn "read-byte!" :val v :position @*position* ::l/async false)
|
||||
(swap! *position* inc)
|
||||
v))
|
||||
|
||||
(defn write-long!
|
||||
[^DataOutputStream output data]
|
||||
(l/trace :fn "write-long!" :data data :position @*position* ::l/async false)
|
||||
(.writeLong output (long data))
|
||||
(swap! *position* + 8))
|
||||
|
||||
|
||||
(defn read-long!
|
||||
[^DataInputStream input]
|
||||
(let [v (.readLong input)]
|
||||
(l/trace :fn "read-long!" :val v :position @*position* ::l/async false)
|
||||
(swap! *position* + 8)
|
||||
v))
|
||||
|
||||
(defn write-bytes!
|
||||
[^DataOutputStream output ^bytes data]
|
||||
(let [size (alength data)]
|
||||
(l/trace :fn "write-bytes!" :size size :position @*position* ::l/async false)
|
||||
(.write output data 0 size)
|
||||
(swap! *position* + size)))
|
||||
|
||||
(defn read-bytes!
|
||||
[^InputStream input ^bytes buff]
|
||||
(let [size (alength buff)
|
||||
readed (.readNBytes input buff 0 size)]
|
||||
(l/trace :fn "read-bytes!" :expected (alength buff) :readed readed :position @*position* ::l/async false)
|
||||
(swap! *position* + readed)
|
||||
readed))
|
||||
|
||||
;; --- COMPOSITE
|
||||
|
||||
(defn write-uuid!
|
||||
[^DataOutputStream output id]
|
||||
(l/trace :fn "write-uuid!" :position @*position* :WRITTEN? (.size output) ::l/async false)
|
||||
|
||||
(doto output
|
||||
(write-byte! (get-mark :uuid))
|
||||
(write-long! (uuid/get-word-high id))
|
||||
(write-long! (uuid/get-word-low id))))
|
||||
|
||||
(defn read-uuid!
|
||||
[^DataInputStream input]
|
||||
(l/trace :fn "read-uuid!" :position @*position* ::l/async false)
|
||||
(let [m (read-byte! input)]
|
||||
(assert-mark m :uuid)
|
||||
(let [a (read-long! input)
|
||||
b (read-long! input)]
|
||||
(uuid/custom a b))))
|
||||
|
||||
(defn write-obj!
|
||||
[^DataOutputStream output data]
|
||||
(l/trace :fn "write-obj!" :position @*position* ::l/async false)
|
||||
(let [^bytes data (fres/encode data)]
|
||||
(doto output
|
||||
(write-byte! (get-mark :obj))
|
||||
(write-long! (alength data))
|
||||
(write-bytes! data))))
|
||||
|
||||
(defn read-obj!
|
||||
[^DataInputStream input]
|
||||
(l/trace :fn "read-obj!" :position @*position* ::l/async false)
|
||||
(let [m (read-byte! input)]
|
||||
(assert-mark m :obj)
|
||||
(let [size (read-long! input)]
|
||||
(assert (pos? size) "incorrect header size found on reading header")
|
||||
(let [buff (byte-array size)]
|
||||
(read-bytes! input buff)
|
||||
(fres/decode buff)))))
|
||||
|
||||
(defn write-label!
|
||||
[^DataOutputStream output label]
|
||||
(l/trace :fn "write-label!" :label label :position @*position* ::l/async false)
|
||||
(doto output
|
||||
(write-byte! (get-mark :label))
|
||||
(write-obj! label)))
|
||||
|
||||
(defn read-label!
|
||||
[^DataInputStream input]
|
||||
(l/trace :fn "read-label!" :position @*position* ::l/async false)
|
||||
(let [m (read-byte! input)]
|
||||
(assert-mark m :label)
|
||||
(read-obj! input)))
|
||||
|
||||
(defn write-header!
|
||||
[^DataOutputStream output & {:keys [version metadata]}]
|
||||
(l/trace :fn "write-header!"
|
||||
:version version
|
||||
:metadata metadata
|
||||
:position @*position*
|
||||
::l/async false)
|
||||
|
||||
(doto output
|
||||
(write-byte! (get-mark :header))
|
||||
(write-long! penpot-magic-number)
|
||||
(write-long! version)
|
||||
(write-obj! metadata)))
|
||||
|
||||
(defn read-header!
|
||||
[^DataInputStream input]
|
||||
(l/trace :fn "read-header!" :position @*position* ::l/async false)
|
||||
(let [mark (read-byte! input)
|
||||
mnum (read-long! input)
|
||||
vers (read-long! input)]
|
||||
|
||||
(when (or (not= mark (get-mark :header))
|
||||
(not= mnum penpot-magic-number))
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-penpot-file))
|
||||
|
||||
(-> (read-obj! input)
|
||||
(assoc ::version vers))))
|
||||
|
||||
(defn copy-stream!
|
||||
[^OutputStream output ^InputStream input ^long size]
|
||||
(let [written (bs/copy! input output :size size)]
|
||||
(l/trace :fn "copy-stream!" :position @*position* :size size :written written ::l/async false)
|
||||
(swap! *position* + written)
|
||||
written))
|
||||
|
||||
(defn write-stream!
|
||||
[^DataOutputStream output stream size]
|
||||
(l/trace :fn "write-stream!" :position @*position* ::l/async false :size size)
|
||||
(doto output
|
||||
(write-byte! (get-mark :stream))
|
||||
(write-long! size))
|
||||
|
||||
(copy-stream! output stream size))
|
||||
|
||||
(def size-2mib
|
||||
(* 1024 1024 2))
|
||||
|
||||
(defn read-stream!
|
||||
[^DataInputStream input]
|
||||
(l/trace :fn "read-stream!" :position @*position* ::l/async false)
|
||||
(let [m (read-byte! input)
|
||||
s (read-long! input)
|
||||
p (tmp/tempfile :prefix "penpot.binfile.")]
|
||||
(assert-mark m :stream)
|
||||
|
||||
(when (> s max-object-size)
|
||||
(ex/raise :type :validation
|
||||
:code :max-file-size-reached
|
||||
:hint (str/ffmt "unable to import storage object with size % bytes" s)))
|
||||
|
||||
(if (> s size-2mib)
|
||||
;; If size is more than 2MiB, use a temporal file.
|
||||
(with-open [^OutputStream output (io/output-stream p)]
|
||||
(let [readed (bs/copy! input output :offset 0 :size s)]
|
||||
(l/trace :fn "read-stream*!" :expected s :readed readed :position @*position* ::l/async false)
|
||||
(swap! *position* + readed)
|
||||
[s p]))
|
||||
|
||||
;; If not, use an in-memory byte-array.
|
||||
[s (bs/read-as-bytes input :size s)])))
|
||||
|
||||
(defmacro assert-read-label!
|
||||
[input expected-label]
|
||||
`(let [readed# (read-label! ~input)
|
||||
expected# ~expected-label]
|
||||
(when (not= readed# expected#)
|
||||
(ex/raise :type :validation
|
||||
:code :unexpected-label
|
||||
:hint (format "unxpected label found: %s, expected: %s" readed# expected#)))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; HIGH LEVEL IMPL
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn- retrieve-file
|
||||
[pool file-id]
|
||||
(->> (db/query pool :file {:id file-id})
|
||||
(map decode-row)
|
||||
(first)))
|
||||
|
||||
(def ^:private sql:file-media-objects
|
||||
"SELECT * FROM file_media_object WHERE id = ANY(?)")
|
||||
|
||||
(defn- retrieve-file-media
|
||||
[pool {:keys [data] :as file}]
|
||||
(with-open [^AutoCloseable conn (db/open pool)]
|
||||
(let [ids (app.tasks.file-gc/collect-used-media data)
|
||||
ids (db/create-array conn "uuid" ids)]
|
||||
(db/exec! conn [sql:file-media-objects ids]))))
|
||||
|
||||
(def ^:private storage-object-id-xf
|
||||
(comp
|
||||
(mapcat (juxt :media-id :thumbnail-id))
|
||||
(filter uuid?)))
|
||||
|
||||
(def ^:private sql:file-libraries
|
||||
"WITH RECURSIVE libs AS (
|
||||
SELECT fl.id, fl.deleted_at
|
||||
FROM file AS fl
|
||||
JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
|
||||
WHERE flr.file_id = ?::uuid
|
||||
UNION
|
||||
SELECT fl.id, fl.deleted_at
|
||||
FROM file AS fl
|
||||
JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
|
||||
JOIN libs AS l ON (flr.file_id = l.id)
|
||||
)
|
||||
SELECT DISTINCT l.id
|
||||
FROM libs AS l
|
||||
WHERE l.deleted_at IS NULL OR l.deleted_at > now();")
|
||||
|
||||
(defn- retrieve-libraries
|
||||
[pool file-id]
|
||||
(map :id (db/exec! pool [sql:file-libraries file-id])))
|
||||
|
||||
(def ^:private sql:file-library-rels
|
||||
"SELECT * FROM file_library_rel
|
||||
WHERE file_id = ANY(?)")
|
||||
|
||||
(defn- retrieve-library-relations
|
||||
[pool ids]
|
||||
(with-open [^AutoCloseable conn (db/open pool)]
|
||||
(db/exec! conn [sql:file-library-rels (db/create-array conn "uuid" ids)])))
|
||||
|
||||
(defn write-export!
|
||||
"Do the exportation of a speficied file in custom penpot binary
|
||||
format. There are some options available for customize the output:
|
||||
|
||||
`::include-libraries?`: additionaly to the specified file, all the
|
||||
linked libraries also will be included (including transitive
|
||||
dependencies).
|
||||
"
|
||||
|
||||
[{:keys [pool storage ::output ::file-id ::include-libraries?]}]
|
||||
(let [libs (when include-libraries?
|
||||
(retrieve-libraries pool file-id))
|
||||
rels (when include-libraries?
|
||||
(retrieve-library-relations pool (cons file-id libs)))
|
||||
files (into [file-id] libs)
|
||||
sids (atom #{})]
|
||||
|
||||
;; Write header with metadata
|
||||
(l/debug :hint "exportation summary"
|
||||
:files (count files)
|
||||
:rels (count rels)
|
||||
:include-libs? include-libraries?
|
||||
::l/async false)
|
||||
|
||||
(let [sections [:v1/files :v1/rels :v1/sobjects]
|
||||
mdata {:penpot-version (:full cf/version)
|
||||
:sections sections
|
||||
:files files}]
|
||||
(write-header! output :version 1 :metadata mdata))
|
||||
|
||||
(l/debug :hint "write section" :section :v1/files :total (count files) ::l/async false)
|
||||
(write-label! output :v1/files)
|
||||
(doseq [file-id files]
|
||||
(let [file (retrieve-file pool file-id)
|
||||
media (retrieve-file-media pool file)]
|
||||
|
||||
;; Collect all storage ids for later write them all under
|
||||
;; specific storage objects section.
|
||||
(swap! sids into (sequence storage-object-id-xf media))
|
||||
|
||||
(l/trace :hint "write penpot file"
|
||||
:id file-id
|
||||
:media (count media)
|
||||
::l/async false)
|
||||
|
||||
(doto output
|
||||
(write-obj! file)
|
||||
(write-obj! media))))
|
||||
|
||||
(l/debug :hint "write section" :section :v1/rels :total (count rels) ::l/async false)
|
||||
(doto output
|
||||
(write-label! :v1/rels)
|
||||
(write-obj! rels))
|
||||
|
||||
(let [sids (into [] @sids)]
|
||||
(l/debug :hint "write section"
|
||||
:section :v1/sobjects
|
||||
:items (count sids)
|
||||
::l/async false)
|
||||
|
||||
;; Write all collected storage objects
|
||||
(doto output
|
||||
(write-label! :v1/sobjects)
|
||||
(write-obj! sids))
|
||||
|
||||
(let [storage (media/configure-assets-storage storage)]
|
||||
(doseq [id sids]
|
||||
(let [{:keys [size] :as obj} @(sto/get-object storage id)]
|
||||
(l/trace :hint "write sobject" :id id ::l/async false)
|
||||
|
||||
(doto output
|
||||
(write-uuid! id)
|
||||
(write-obj! (meta obj)))
|
||||
|
||||
(with-open [^InputStream stream @(sto/get-object-data storage obj)]
|
||||
(let [written (write-stream! output stream size)]
|
||||
(when (not= written size)
|
||||
(ex/raise :type :validation
|
||||
:code :mismatch-readed-size
|
||||
:hint (str/ffmt "found unexpected object size; size=% written=%" size written)))))))))))
|
||||
|
||||
|
||||
;; Dynamic variables for importation process.
|
||||
|
||||
(def ^:dynamic *files*)
|
||||
(def ^:dynamic *media*)
|
||||
(def ^:dynamic *index*)
|
||||
(def ^:dynamic *conn*)
|
||||
|
||||
(defn read-import!
|
||||
"Do the importation of the specified resource in penpot custom binary
|
||||
format. There are some options for customize the importation
|
||||
behavior:
|
||||
|
||||
`::overwrite?`: if true, instead of creating new files and remaping id references,
|
||||
it reuses all ids and updates existing objects; defaults to `false`.
|
||||
|
||||
`::migrate?`: if true, applies the migration before persisting the
|
||||
file data; defaults to `false`.
|
||||
|
||||
`::ignore-index-errors?`: if true, do not fail on index lookup errors, can
|
||||
happen with broken files; defaults to: `false`.
|
||||
"
|
||||
|
||||
[{:keys [pool storage ::project-id ::ts ::input ::overwrite? ::migrate? ::ignore-index-errors?]
|
||||
:or {overwrite? false migrate? false ts (dt/now)}
|
||||
:as cfg}]
|
||||
|
||||
(letfn [(lookup-index [id]
|
||||
(if ignore-index-errors?
|
||||
(or (get @*index* id) id)
|
||||
(let [val (get @*index* id)]
|
||||
(l/trace :fn "lookup-index" :id id :val val ::l/async false)
|
||||
(when-not val
|
||||
(ex/raise :type :validation
|
||||
:code :incomplete-index
|
||||
:hint "looks like index has missing data"))
|
||||
val)))
|
||||
|
||||
(update-index [index coll]
|
||||
(loop [items (seq coll)
|
||||
index index]
|
||||
(if-let [id (first items)]
|
||||
(let [new-id (if overwrite? id (uuid/next))]
|
||||
(l/trace :fn "update-index" :id id :new-id new-id ::l/async false)
|
||||
(recur (rest items)
|
||||
(assoc index id new-id)))
|
||||
index)))
|
||||
|
||||
(process-map-form [form]
|
||||
(cond-> form
|
||||
;; Relink Image Shapes
|
||||
(and (map? (:metadata form))
|
||||
(= :image (:type form)))
|
||||
(update-in [:metadata :id] lookup-index)
|
||||
|
||||
;; This covers old shapes and the new :fills.
|
||||
(uuid? (:fill-color-ref-file form))
|
||||
(update :fill-color-ref-file lookup-index)
|
||||
|
||||
;; This covers the old shapes and the new :strokes
|
||||
(uuid? (:storage-color-ref-file form))
|
||||
(update :stroke-color-ref-file lookup-index)
|
||||
|
||||
;; This covers all text shapes that have typography referenced
|
||||
(uuid? (:typography-ref-file form))
|
||||
(update :typography-ref-file lookup-index)
|
||||
|
||||
;; This covers the shadows and grids (they have directly
|
||||
;; the :file-id prop)
|
||||
(uuid? (:file-id form))
|
||||
(update :file-id lookup-index)))
|
||||
|
||||
;; a function responsible to analyze all file data and
|
||||
;; replace the old :component-file reference with the new
|
||||
;; ones, using the provided file-index
|
||||
(relink-shapes [data]
|
||||
(walk/postwalk (fn [form]
|
||||
(if (map? form)
|
||||
(try
|
||||
(process-map-form form)
|
||||
(catch Throwable cause
|
||||
(l/trace :hint "failed form" :form (pr-str form) ::l/async false)
|
||||
(throw cause)))
|
||||
form))
|
||||
data))
|
||||
|
||||
;; A function responsible of process the :media attr of file
|
||||
;; data and remap the old ids with the new ones.
|
||||
(relink-media [media]
|
||||
(reduce-kv (fn [res k v]
|
||||
(let [id (lookup-index k)]
|
||||
(if (uuid? id)
|
||||
(-> res
|
||||
(assoc id (assoc v :id id))
|
||||
(dissoc k))
|
||||
res)))
|
||||
media
|
||||
media))
|
||||
|
||||
(create-or-update-file [params]
|
||||
(let [sql (str "INSERT INTO file (id, project_id, name, revn, is_shared, data, created_at, modified_at) "
|
||||
"VALUES (?, ?, ?, ?, ?, ?, ?, ?) "
|
||||
"ON CONFLICT (id) DO UPDATE SET data=?")]
|
||||
(db/exec-one! *conn* [sql
|
||||
(:id params)
|
||||
(:project-id params)
|
||||
(:name params)
|
||||
(:revn params)
|
||||
(:is-shared params)
|
||||
(:data params)
|
||||
(:created-at params)
|
||||
(:modified-at params)
|
||||
(:data params)])))
|
||||
|
||||
(read-files-section! [input]
|
||||
(l/debug :hint "reading section" :section :v1/files ::l/async false)
|
||||
(assert-read-label! input :v1/files)
|
||||
|
||||
;; Process/Read all file
|
||||
(doseq [expected-file-id *files*]
|
||||
(let [file (read-obj! input)
|
||||
media' (read-obj! input)
|
||||
file-id (:id file)]
|
||||
|
||||
(when (not= file-id expected-file-id)
|
||||
(ex/raise :type :validation
|
||||
:code :inconsistent-penpot-file
|
||||
:hint "the penpot file seems corrupt, found unexpected uuid (file-id)"))
|
||||
|
||||
|
||||
;; Update index using with media
|
||||
(l/trace :hint "update index with media" ::l/async false)
|
||||
(vswap! *index* update-index (map :id media'))
|
||||
|
||||
;; Store file media for later insertion
|
||||
(l/trace :hint "update media references" ::l/async false)
|
||||
(vswap! *media* into (map #(update % :id lookup-index)) media')
|
||||
|
||||
(l/trace :hint "procesing file" :file-id file-id ::l/async false)
|
||||
|
||||
(let [file-id' (lookup-index file-id)
|
||||
data (-> (:data file)
|
||||
(assoc :id file-id')
|
||||
(cond-> migrate? (pmg/migrate-data))
|
||||
(update :pages-index relink-shapes)
|
||||
(update :components relink-shapes)
|
||||
(update :media relink-media))
|
||||
|
||||
params {:id file-id'
|
||||
:project-id project-id
|
||||
:name (str "Imported: " (:name file))
|
||||
:revn (:revn file)
|
||||
:is-shared (:is-shared file)
|
||||
:data (blob/encode data)
|
||||
:created-at ts
|
||||
:modified-at ts}]
|
||||
|
||||
(l/trace :hint "create file" :id file-id' ::l/async false)
|
||||
|
||||
(if overwrite?
|
||||
(create-or-update-file params)
|
||||
(db/insert! *conn* :file params))
|
||||
|
||||
(when overwrite?
|
||||
(db/delete! *conn* :file-thumbnail {:file-id file-id'}))))))
|
||||
|
||||
(read-rels-section! [input]
|
||||
(l/debug :hint "reading section" :section :v1/rels ::l/async false)
|
||||
(assert-read-label! input :v1/rels)
|
||||
|
||||
(let [rels (read-obj! input)]
|
||||
;; Insert all file relations
|
||||
(doseq [rel rels]
|
||||
(let [rel (-> rel
|
||||
(assoc :synced-at ts)
|
||||
(update :file-id lookup-index)
|
||||
(update :library-file-id lookup-index))]
|
||||
(l/trace :hint "create file library link"
|
||||
:file-id (:file-id rel)
|
||||
:lib-id (:library-file-id rel)
|
||||
::l/async false)
|
||||
(db/insert! *conn* :file-library-rel rel)))))
|
||||
|
||||
(read-sobjects-section! [input]
|
||||
(l/debug :hint "reading section" :section :v1/sobjects ::l/async false)
|
||||
(assert-read-label! input :v1/sobjects)
|
||||
|
||||
(let [storage (media/configure-assets-storage storage)
|
||||
ids (read-obj! input)]
|
||||
|
||||
;; Step 1: process all storage objects
|
||||
(doseq [expected-storage-id ids]
|
||||
(let [id (read-uuid! input)
|
||||
mdata (read-obj! input)]
|
||||
|
||||
(when (not= id expected-storage-id)
|
||||
(ex/raise :type :validation
|
||||
:code :inconsistent-penpot-file
|
||||
:hint "the penpot file seems corrupt, found unexpected uuid (storage-object-id)"))
|
||||
|
||||
(l/trace :hint "readed storage object" :id id ::l/async false)
|
||||
|
||||
(let [[size resource] (read-stream! input)
|
||||
hash (sto/calculate-hash resource)
|
||||
content (-> (sto/content resource size)
|
||||
(sto/wrap-with-hash hash))
|
||||
params (-> mdata
|
||||
(assoc ::sto/deduplicate? true)
|
||||
(assoc ::sto/content content)
|
||||
(assoc ::sto/touched-at (dt/now)))
|
||||
sobject @(sto/put-object! storage params)]
|
||||
(l/trace :hint "persisted storage object" :id id :new-id (:id sobject) ::l/async false)
|
||||
(vswap! *index* assoc id (:id sobject)))))
|
||||
|
||||
;; Step 2: insert all file-media-object rows with correct
|
||||
;; storage-id reference.
|
||||
(doseq [item @*media*]
|
||||
(l/trace :hint "inserting file media objects" :id (:id item) ::l/async false)
|
||||
(db/insert! *conn* :file-media-object
|
||||
(-> item
|
||||
(update :file-id lookup-index)
|
||||
(d/update-when :media-id lookup-index)
|
||||
(d/update-when :thumbnail-id lookup-index))
|
||||
{:on-conflict-do-nothing overwrite?}))))
|
||||
|
||||
(read-section! [section input]
|
||||
(case section
|
||||
:v1/rels (read-rels-section! input)
|
||||
:v1/files (read-files-section! input)
|
||||
:v1/sobjects (read-sobjects-section! input)))]
|
||||
|
||||
(with-open [input (bs/zstd-input-stream input)]
|
||||
(with-open [input (bs/data-input-stream input)]
|
||||
(db/with-atomic [conn pool]
|
||||
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED;"])
|
||||
|
||||
;; Verify that we received a proper .penpot file
|
||||
(let [{:keys [sections files]} (read-header! input)]
|
||||
(l/debug :hint "import verified" :files files :overwrite? overwrite?)
|
||||
(binding [*index* (volatile! (update-index {} files))
|
||||
*media* (volatile! [])
|
||||
*files* files
|
||||
*conn* conn]
|
||||
(run! #(read-section! % input) sections))))))))
|
||||
|
||||
(defn export!
|
||||
[cfg]
|
||||
(let [path (tmp/tempfile :prefix "penpot.export.")
|
||||
id (uuid/next)
|
||||
ts (dt/now)
|
||||
cs (volatile! nil)]
|
||||
(try
|
||||
(l/info :hint "start exportation" :export-id id)
|
||||
(with-open [output (io/output-stream path)]
|
||||
(with-open [output (bs/zstd-output-stream output :level 12)]
|
||||
(with-open [output (bs/data-output-stream output)]
|
||||
(binding [*position* (atom 0)]
|
||||
(write-export! (assoc cfg ::output output))
|
||||
path))))
|
||||
|
||||
(catch Throwable cause
|
||||
(vreset! cs cause)
|
||||
(throw cause))
|
||||
|
||||
(finally
|
||||
(l/info :hint "exportation finished" :export-id id
|
||||
:elapsed (str (inst-ms (dt/diff ts (dt/now))) "ms")
|
||||
:cause @cs)))))
|
||||
|
||||
(defn import!
|
||||
[{:keys [::input] :as cfg}]
|
||||
(let [id (uuid/next)
|
||||
ts (dt/now)
|
||||
cs (volatile! nil)]
|
||||
(try
|
||||
(l/info :hint "start importation" :import-id id)
|
||||
(binding [*position* (atom 0)]
|
||||
(with-open [input (io/input-stream input)]
|
||||
(read-import! (assoc cfg ::input input))))
|
||||
|
||||
(catch Throwable cause
|
||||
(vreset! cs cause)
|
||||
(throw cause))
|
||||
|
||||
(finally
|
||||
(l/info :hint "importation finished" :import-id id
|
||||
:elapsed (str (inst-ms (dt/diff ts (dt/now))) "ms")
|
||||
:error? (some? @cs)
|
||||
:cause @cs)))))
|
||||
|
||||
;; --- Command: export-binfile
|
||||
|
||||
(s/def ::file-id ::us/uuid)
|
||||
(s/def ::profile-id ::us/uuid)
|
||||
|
||||
(s/def ::export-binfile
|
||||
(s/keys :req-un [::profile-id ::file-id]))
|
||||
|
||||
#_:clj-kondo/ignore
|
||||
(sv/defmethod ::export-binfile
|
||||
"Export a penpot file in a binary format."
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
|
||||
{:hello "world"})
|
|
@ -6,6 +6,7 @@
|
|||
|
||||
(ns app.rpc.mutations.files
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.pages :as cp]
|
||||
[app.common.pages.migrations :as pmg]
|
||||
|
@ -63,21 +64,23 @@
|
|||
(db/insert! conn :file-profile-rel))))
|
||||
|
||||
(defn create-file
|
||||
[conn {:keys [id name project-id is-shared data deleted-at revn]
|
||||
:or {is-shared false
|
||||
revn 0
|
||||
deleted-at nil}
|
||||
[conn {:keys [id name project-id is-shared data revn
|
||||
modified-at deleted-at ignore-sync-until]
|
||||
:or {is-shared false revn 0}
|
||||
:as params}]
|
||||
(let [id (or id (:id data) (uuid/next))
|
||||
data (or data (cp/make-file-data id))
|
||||
file (db/insert! conn :file
|
||||
{:id id
|
||||
:project-id project-id
|
||||
:name name
|
||||
:revn revn
|
||||
:is-shared is-shared
|
||||
:data (blob/encode data)
|
||||
:deleted-at deleted-at})]
|
||||
(d/without-nils
|
||||
{:id id
|
||||
:project-id project-id
|
||||
:name name
|
||||
:revn revn
|
||||
:is-shared is-shared
|
||||
:data (blob/encode data)
|
||||
:ignore-sync-until ignore-sync-until
|
||||
:modified-at modified-at
|
||||
:deleted-at deleted-at}))]
|
||||
|
||||
(->> (assoc params :file-id id :role :owner)
|
||||
(create-file-role conn))
|
||||
|
|
|
@ -71,9 +71,9 @@
|
|||
data)
|
||||
|
||||
(persist-font-object [data mtype]
|
||||
(when-let [fdata (get data mtype)]
|
||||
(p/let [hash (calculate-hash fdata)
|
||||
content (-> (sto/content fdata)
|
||||
(when-let [resource (get data mtype)]
|
||||
(p/let [hash (calculate-hash resource)
|
||||
content (-> (sto/content resource)
|
||||
(sto/wrap-with-hash hash))]
|
||||
(sto/put-object! storage {::sto/content content
|
||||
::sto/touched-at (dt/now)
|
||||
|
|
|
@ -17,6 +17,8 @@
|
|||
[app.rpc.queries.teams :as teams]
|
||||
[app.rpc.rlimit :as rlimit]
|
||||
[app.storage :as sto]
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.util.bytes :as bs]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]
|
||||
|
@ -179,11 +181,12 @@
|
|||
(* 1024 1024 100)) ; 100MiB
|
||||
|
||||
(defn- create-file-media-object-from-url
|
||||
[{:keys [storage http-client] :as cfg} {:keys [url name] :as params}]
|
||||
[{:keys [http-client] :as cfg} {:keys [url name] :as params}]
|
||||
(letfn [(parse-and-validate-size [headers]
|
||||
(let [size (some-> (get headers "content-length") d/parse-integer)
|
||||
mtype (get headers "content-type")
|
||||
format (cm/mtype->format mtype)]
|
||||
|
||||
(when-not size
|
||||
(ex/raise :type :validation
|
||||
:code :unknown-size
|
||||
|
@ -203,24 +206,24 @@
|
|||
:mtype mtype
|
||||
:format format}))
|
||||
|
||||
(get-upload-object [sobj]
|
||||
(p/let [path (sto/get-object-path storage sobj)
|
||||
mdata (meta sobj)]
|
||||
{:filename "tempfile"
|
||||
:size (:size sobj)
|
||||
:path path
|
||||
:mtype (:content-type mdata)}))
|
||||
|
||||
(download-media [uri]
|
||||
(p/let [{:keys [body headers]} (http-client {:method :get :uri uri} {:response-type :input-stream})
|
||||
{:keys [size mtype]} (parse-and-validate-size headers)]
|
||||
(-> (http-client {:method :get :uri uri} {:response-type :input-stream})
|
||||
(p/then process-response)))
|
||||
|
||||
(-> (assoc storage :backend :tmp)
|
||||
(sto/put-object! {::sto/content (sto/content body size)
|
||||
::sto/expired-at (dt/in-future {:minutes 30})
|
||||
:content-type mtype
|
||||
:bucket "file-media-object"})
|
||||
(p/then get-upload-object))))]
|
||||
(process-response [{:keys [body headers] :as response}]
|
||||
(let [{:keys [size mtype]} (parse-and-validate-size headers)
|
||||
path (tmp/tempfile :prefix "penpot.media.download.")
|
||||
written (bs/write-to-file! body path :size size)]
|
||||
|
||||
(when (not= written size)
|
||||
(ex/raise :type :internal
|
||||
:code :mismatch-write-size
|
||||
:hint "unexpected state: unable to write to file"))
|
||||
|
||||
{:filename "tempfile"
|
||||
:size size
|
||||
:path path
|
||||
:mtype mtype}))]
|
||||
|
||||
(p/let [content (download-media url)]
|
||||
(->> (merge params {:content content :name (or name (:filename content))})
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.storage.db :as sdb]
|
||||
[app.storage.fs :as sfs]
|
||||
[app.storage.impl :as impl]
|
||||
[app.storage.s3 :as ss3]
|
||||
|
@ -32,14 +31,12 @@
|
|||
|
||||
(s/def ::s3 ::ss3/backend)
|
||||
(s/def ::fs ::sfs/backend)
|
||||
(s/def ::db ::sdb/backend)
|
||||
|
||||
(s/def ::backends
|
||||
(s/map-of ::us/keyword
|
||||
(s/nilable
|
||||
(s/or :s3 ::ss3/backend
|
||||
:fs ::sfs/backend
|
||||
:db ::sdb/backend))))
|
||||
:fs ::sfs/backend))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::storage [_]
|
||||
(s/keys :req-un [::db/pool ::wrk/executor ::backends]))
|
||||
|
@ -84,13 +81,14 @@
|
|||
" and backend = ?"
|
||||
" and deleted_at is null"
|
||||
" limit 1")]
|
||||
(db/exec-one! conn [sql hash bucket (name backend)])))
|
||||
(some-> (db/exec-one! conn [sql hash bucket (name backend)])
|
||||
(update :metadata db/decode-transit-pgobject))))
|
||||
|
||||
(defn- create-database-object
|
||||
[{:keys [conn backend executor]} {:keys [::content ::expired-at ::touched-at] :as params}]
|
||||
(us/assert ::storage-content content)
|
||||
(px/with-dispatch executor
|
||||
(let [id (uuid/random)
|
||||
(let [id (uuid/next)
|
||||
|
||||
mdata (cond-> (get-metadata params)
|
||||
(satisfies? impl/IContentHash content)
|
||||
|
@ -106,13 +104,15 @@
|
|||
(get-database-object-by-hash conn backend (:bucket mdata) (:hash mdata)))
|
||||
|
||||
result (or result
|
||||
(db/insert! conn :storage-object
|
||||
{:id id
|
||||
:size (count content)
|
||||
:backend (name backend)
|
||||
:metadata (db/tjson mdata)
|
||||
:deleted-at expired-at
|
||||
:touched-at touched-at}))]
|
||||
(-> (db/insert! conn :storage-object
|
||||
{:id id
|
||||
:size (impl/get-size content)
|
||||
:backend (name backend)
|
||||
:metadata (db/tjson mdata)
|
||||
:deleted-at expired-at
|
||||
:touched-at touched-at})
|
||||
(update :metadata db/decode-transit-pgobject)
|
||||
(update :metadata assoc ::created? true)))]
|
||||
|
||||
(StorageObject. (:id result)
|
||||
(:size result)
|
||||
|
@ -120,7 +120,7 @@
|
|||
(:deleted-at result)
|
||||
(:touched-at result)
|
||||
backend
|
||||
mdata
|
||||
(:metadata result)
|
||||
nil))))
|
||||
|
||||
(def ^:private sql:retrieve-storage-object
|
||||
|
@ -173,9 +173,10 @@
|
|||
(p/let [storage (assoc storage :conn (or conn pool))
|
||||
object (create-database-object storage params)]
|
||||
|
||||
;; Store the data finally on the underlying storage subsystem.
|
||||
(-> (impl/resolve-backend storage backend)
|
||||
(impl/put-object object content))
|
||||
(when (::created? (meta object))
|
||||
;; Store the data finally on the underlying storage subsystem.
|
||||
(-> (impl/resolve-backend storage backend)
|
||||
(impl/put-object object content)))
|
||||
|
||||
object))
|
||||
|
||||
|
@ -259,7 +260,8 @@
|
|||
;; A task responsible to permanently delete already marked as deleted
|
||||
;; storage files. The storage objects are practically never marked to
|
||||
;; be deleted directly by the api call. The touched-gc is responsible
|
||||
;; of collecting the usage of the object and mark it as deleted.
|
||||
;; of collecting the usage of the object and mark it as deleted. Only
|
||||
;; the TMP files are are created with expiration date in future.
|
||||
|
||||
(declare sql:retrieve-deleted-objects-chunk)
|
||||
|
||||
|
|
|
@ -1,67 +0,0 @@
|
|||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) UXBOX Labs SL
|
||||
|
||||
(ns app.storage.db
|
||||
(:require
|
||||
[app.common.spec :as us]
|
||||
[app.db :as db]
|
||||
[app.storage.impl :as impl]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec :as px])
|
||||
(:import
|
||||
java.io.ByteArrayInputStream))
|
||||
|
||||
;; --- BACKEND INIT
|
||||
|
||||
(defmethod ig/pre-init-spec ::backend [_]
|
||||
(s/keys :opt-un [::db/pool]))
|
||||
|
||||
(defmethod ig/init-key ::backend
|
||||
[_ cfg]
|
||||
(assoc cfg :type :db))
|
||||
|
||||
(s/def ::type ::us/keyword)
|
||||
(s/def ::backend
|
||||
(s/keys :req-un [::type ::db/pool]))
|
||||
|
||||
;; --- API IMPL
|
||||
|
||||
(defmethod impl/put-object :db
|
||||
[{:keys [conn executor] :as storage} {:keys [id] :as object} content]
|
||||
(px/with-dispatch executor
|
||||
(let [data (impl/slurp-bytes content)]
|
||||
(db/insert! conn :storage-data {:id id :data data})
|
||||
object)))
|
||||
|
||||
(defmethod impl/get-object-data :db
|
||||
[{:keys [conn executor] :as backend} {:keys [id] :as object}]
|
||||
(px/with-dispatch executor
|
||||
(let [result (db/exec-one! conn ["select data from storage_data where id=?" id])]
|
||||
(ByteArrayInputStream. (:data result)))))
|
||||
|
||||
(defmethod impl/get-object-bytes :db
|
||||
[{:keys [conn executor] :as backend} {:keys [id] :as object}]
|
||||
(px/with-dispatch executor
|
||||
(let [result (db/exec-one! conn ["select data from storage_data where id=?" id])]
|
||||
(:data result))))
|
||||
|
||||
(defmethod impl/get-object-url :db
|
||||
[_ _]
|
||||
(throw (UnsupportedOperationException. "not supported")))
|
||||
|
||||
(defmethod impl/del-object :db
|
||||
[_ _]
|
||||
;; NOOP: because deleting the row already deletes the file data from
|
||||
;; the database.
|
||||
nil)
|
||||
|
||||
(defmethod impl/del-objects-in-bulk :db
|
||||
[_ _]
|
||||
;; NOOP: because deleting the row already deletes the file data from
|
||||
;; the database.
|
||||
nil)
|
||||
|
|
@ -10,11 +10,13 @@
|
|||
[app.common.spec :as us]
|
||||
[app.common.uri :as u]
|
||||
[app.storage.impl :as impl]
|
||||
[app.util.bytes :as bs]
|
||||
[clojure.java.io :as io]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.core :as fs]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px])
|
||||
(:import
|
||||
java.io.InputStream
|
||||
|
@ -72,9 +74,10 @@
|
|||
(io/input-stream full))))
|
||||
|
||||
(defmethod impl/get-object-bytes :fs
|
||||
[{:keys [executor] :as backend} object]
|
||||
(px/with-dispatch executor
|
||||
(fs/slurp-bytes (impl/get-object-data backend object))))
|
||||
[backend object]
|
||||
(p/let [input (impl/get-object-data backend object)]
|
||||
(ex/with-always (bs/close! input)
|
||||
(bs/read-as-bytes input))))
|
||||
|
||||
(defmethod impl/get-object-url :fs
|
||||
[{:keys [uri executor] :as backend} {:keys [id] :as object} _]
|
||||
|
|
|
@ -9,18 +9,15 @@
|
|||
(:require
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.util.bytes :as bs]
|
||||
[buddy.core.codecs :as bc]
|
||||
[buddy.core.hash :as bh]
|
||||
[clojure.java.io :as io])
|
||||
(:import
|
||||
java.nio.ByteBuffer
|
||||
java.util.UUID
|
||||
java.io.ByteArrayInputStream
|
||||
java.io.InputStream
|
||||
java.nio.file.Files
|
||||
org.apache.commons.io.input.BoundedInputStream
|
||||
))
|
||||
java.nio.file.Path
|
||||
java.util.UUID))
|
||||
|
||||
;; --- API Definition
|
||||
|
||||
|
@ -95,23 +92,23 @@
|
|||
(defn coerce-id
|
||||
[id]
|
||||
(cond
|
||||
(string? id) (uuid/uuid id)
|
||||
(uuid? id) id
|
||||
:else (ex/raise :type :internal
|
||||
:code :invalid-id-type
|
||||
:hint "id should be string or uuid")))
|
||||
(string? id) (parse-uuid id)
|
||||
(uuid? id) id
|
||||
:else (ex/raise :type :internal
|
||||
:code :invalid-id-type
|
||||
:hint "id should be string or uuid")))
|
||||
|
||||
(defprotocol IContentObject
|
||||
(size [_] "get object size"))
|
||||
(get-size [_] "get object size"))
|
||||
|
||||
(defprotocol IContentHash
|
||||
(get-hash [_] "get precalculated hash"))
|
||||
|
||||
(defn- make-content
|
||||
[^InputStream is ^long size]
|
||||
(defn- path->content
|
||||
[^Path path ^long size]
|
||||
(reify
|
||||
IContentObject
|
||||
(size [_] size)
|
||||
(get-size [_] size)
|
||||
|
||||
io/IOFactory
|
||||
(make-reader [this opts]
|
||||
|
@ -119,47 +116,53 @@
|
|||
(make-writer [_ _]
|
||||
(throw (UnsupportedOperationException. "not implemented")))
|
||||
(make-input-stream [_ _]
|
||||
(doto (BoundedInputStream. is size)
|
||||
(.setPropagateClose false)))
|
||||
(-> (io/input-stream path)
|
||||
(bs/bounded-input-stream size)))
|
||||
(make-output-stream [_ _]
|
||||
(throw (UnsupportedOperationException. "not implemented")))))
|
||||
|
||||
(defn- bytes->content
|
||||
[^bytes data ^long size]
|
||||
(reify
|
||||
IContentObject
|
||||
(get-size [_] size)
|
||||
|
||||
io/IOFactory
|
||||
(make-reader [this opts]
|
||||
(io/make-reader this opts))
|
||||
(make-writer [_ _]
|
||||
(throw (UnsupportedOperationException. "not implemented")))
|
||||
|
||||
clojure.lang.Counted
|
||||
(count [_] size)
|
||||
|
||||
java.lang.AutoCloseable
|
||||
(close [_]
|
||||
(.close is))))
|
||||
(make-input-stream [_ _]
|
||||
(-> (bs/bytes-input-stream data)
|
||||
(bs/bounded-input-stream size)))
|
||||
(make-output-stream [_ _]
|
||||
(throw (UnsupportedOperationException. "not implemented")))))
|
||||
|
||||
(defn content
|
||||
([data] (content data nil))
|
||||
([data size]
|
||||
(cond
|
||||
(instance? java.nio.file.Path data)
|
||||
(make-content (io/input-stream data)
|
||||
(Files/size data))
|
||||
(path->content data (or size (Files/size data)))
|
||||
|
||||
(instance? java.io.File data)
|
||||
(content (.toPath ^java.io.File data) nil)
|
||||
(content (.toPath ^java.io.File data) size)
|
||||
|
||||
(instance? String data)
|
||||
(let [data (.getBytes data "UTF-8")
|
||||
bais (ByteArrayInputStream. ^bytes data)]
|
||||
(make-content bais (alength data)))
|
||||
(let [data (.getBytes data "UTF-8")]
|
||||
(bytes->content data (alength data)))
|
||||
|
||||
(bytes? data)
|
||||
(let [size (alength ^bytes data)
|
||||
bais (ByteArrayInputStream. ^bytes data)]
|
||||
(make-content bais size))
|
||||
(bytes->content data (or size (alength ^bytes data)))
|
||||
|
||||
(instance? InputStream data)
|
||||
(do
|
||||
(when-not size
|
||||
(throw (UnsupportedOperationException. "size should be provided on InputStream")))
|
||||
(make-content data size))
|
||||
;; (instance? InputStream data)
|
||||
;; (do
|
||||
;; (when-not size
|
||||
;; (throw (UnsupportedOperationException. "size should be provided on InputStream")))
|
||||
;; (make-content data size))
|
||||
|
||||
:else
|
||||
(throw (UnsupportedOperationException. "type not supported")))))
|
||||
(throw (IllegalArgumentException. "invalid argument type")))))
|
||||
|
||||
(defn wrap-with-hash
|
||||
[content ^String hash]
|
||||
|
@ -171,7 +174,7 @@
|
|||
|
||||
(reify
|
||||
IContentObject
|
||||
(size [_] (size content))
|
||||
(get-size [_] (get-size content))
|
||||
|
||||
IContentHash
|
||||
(get-hash [_] hash)
|
||||
|
@ -184,43 +187,17 @@
|
|||
(make-input-stream [_ opts]
|
||||
(io/make-input-stream content opts))
|
||||
(make-output-stream [_ opts]
|
||||
(io/make-output-stream content opts))
|
||||
|
||||
clojure.lang.Counted
|
||||
(count [_] (count content))
|
||||
|
||||
java.lang.AutoCloseable
|
||||
(close [_]
|
||||
(.close ^java.lang.AutoCloseable content))))
|
||||
(io/make-output-stream content opts))))
|
||||
|
||||
(defn content?
|
||||
[v]
|
||||
(satisfies? IContentObject v))
|
||||
|
||||
(defn slurp-bytes
|
||||
[content]
|
||||
(with-open [input (io/input-stream content)
|
||||
output (java.io.ByteArrayOutputStream. (count content))]
|
||||
(io/copy input output)
|
||||
(.toByteArray output)))
|
||||
|
||||
(defn calculate-hash
|
||||
[path-or-stream]
|
||||
(let [result (cond
|
||||
(instance? InputStream path-or-stream)
|
||||
(let [result (-> (bh/blake2b-256 path-or-stream)
|
||||
(bc/bytes->hex))]
|
||||
(.reset path-or-stream)
|
||||
result)
|
||||
|
||||
(string? path-or-stream)
|
||||
(-> (bh/blake2b-256 path-or-stream)
|
||||
(bc/bytes->hex))
|
||||
|
||||
:else
|
||||
(with-open [is (io/input-stream path-or-stream)]
|
||||
(-> (bh/blake2b-256 is)
|
||||
(bc/bytes->hex))))]
|
||||
[resource]
|
||||
(let [result (with-open [input (io/input-stream resource)]
|
||||
(-> (bh/blake2b-256 input)
|
||||
(bc/bytes->hex)))]
|
||||
(str "blake2b:" result)))
|
||||
|
||||
(defn resolve-backend
|
||||
|
|
|
@ -12,14 +12,17 @@
|
|||
[app.common.spec :as us]
|
||||
[app.common.uri :as u]
|
||||
[app.storage.impl :as impl]
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.java.io :as io]
|
||||
[clojure.spec.alpha :as s]
|
||||
[datoteka.core :as fs]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px])
|
||||
(:import
|
||||
java.io.FilterInputStream
|
||||
java.io.InputStream
|
||||
java.nio.ByteBuffer
|
||||
java.time.Duration
|
||||
|
@ -30,6 +33,7 @@
|
|||
org.reactivestreams.Subscription
|
||||
software.amazon.awssdk.core.ResponseBytes
|
||||
software.amazon.awssdk.core.async.AsyncRequestBody
|
||||
software.amazon.awssdk.core.async.AsyncResponseTransformer
|
||||
software.amazon.awssdk.core.client.config.ClientAsyncConfiguration
|
||||
software.amazon.awssdk.core.client.config.SdkAdvancedAsyncClientOption
|
||||
software.amazon.awssdk.http.nio.netty.NettyNioAsyncHttpClient
|
||||
|
@ -107,7 +111,16 @@
|
|||
|
||||
(defmethod impl/get-object-data :s3
|
||||
[backend object]
|
||||
(get-object-data backend object))
|
||||
(letfn [(no-such-key? [cause]
|
||||
(instance? software.amazon.awssdk.services.s3.model.NoSuchKeyException cause))
|
||||
(handle-not-found [cause]
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found
|
||||
:hint "s3 object not found"
|
||||
:cause cause))]
|
||||
|
||||
(-> (get-object-data backend object)
|
||||
(p/catch no-such-key? handle-not-found))))
|
||||
|
||||
(defmethod impl/get-object-bytes :s3
|
||||
[backend object]
|
||||
|
@ -204,7 +217,7 @@
|
|||
(reify
|
||||
AsyncRequestBody
|
||||
(contentLength [_]
|
||||
(Optional/of (long (count content))))
|
||||
(Optional/of (long (impl/get-size content))))
|
||||
|
||||
(^void subscribe [_ ^Subscriber s]
|
||||
(let [thread (Thread. #(writer-fn s))]
|
||||
|
@ -216,7 +229,6 @@
|
|||
(cancel [_]
|
||||
(.interrupt thread)
|
||||
(.release sem 1))
|
||||
|
||||
(request [_ n]
|
||||
(.release sem (int n))))))))))
|
||||
|
||||
|
@ -238,16 +250,31 @@
|
|||
^AsyncRequestBody content))))
|
||||
|
||||
(defn get-object-data
|
||||
[{:keys [client bucket prefix]} {:keys [id]}]
|
||||
(p/let [gor (.. (GetObjectRequest/builder)
|
||||
(bucket bucket)
|
||||
(key (str prefix (impl/id->path id)))
|
||||
(build))
|
||||
obj (.getObject ^S3AsyncClient client ^GetObjectRequest gor)
|
||||
;; rsp (.response ^ResponseInputStream obj)
|
||||
;; len (.contentLength ^GetObjectResponse rsp)
|
||||
]
|
||||
(io/input-stream obj)))
|
||||
[{:keys [client bucket prefix]} {:keys [id size]}]
|
||||
(let [gor (.. (GetObjectRequest/builder)
|
||||
(bucket bucket)
|
||||
(key (str prefix (impl/id->path id)))
|
||||
(build))]
|
||||
|
||||
;; If the file size is greater than 2MiB then stream the content
|
||||
;; to the filesystem and then read with buffered inputstream; if
|
||||
;; not, read the contento into memory using bytearrays.
|
||||
(if (> size (* 1024 1024 2))
|
||||
(p/let [path (tmp/tempfile :prefix "penpot.storage.s3.")
|
||||
rxf (AsyncResponseTransformer/toFile path)
|
||||
_ (.getObject ^S3AsyncClient client
|
||||
^GetObjectRequest gor
|
||||
^AsyncResponseTransformer rxf)]
|
||||
(proxy [FilterInputStream] [(io/input-stream path)]
|
||||
(close []
|
||||
(fs/delete path)
|
||||
(proxy-super close))))
|
||||
|
||||
(p/let [rxf (AsyncResponseTransformer/toBytes)
|
||||
obj (.getObject ^S3AsyncClient client
|
||||
^GetObjectRequest gor
|
||||
^AsyncResponseTransformer rxf)]
|
||||
(.asInputStream ^ResponseBytes obj)))))
|
||||
|
||||
(defn get-object-bytes
|
||||
[{:keys [client bucket prefix]} {:keys [id]}]
|
||||
|
@ -255,7 +282,10 @@
|
|||
(bucket bucket)
|
||||
(key (str prefix (impl/id->path id)))
|
||||
(build))
|
||||
obj (.getObjectAsBytes ^S3AsyncClient client ^GetObjectRequest gor)]
|
||||
rxf (AsyncResponseTransformer/toBytes)
|
||||
obj (.getObjectAsBytes ^S3AsyncClient client
|
||||
^GetObjectRequest gor
|
||||
^AsyncResponseTransformer rxf)]
|
||||
(.asByteArray ^ResponseBytes obj)))
|
||||
|
||||
(def default-max-age
|
||||
|
|
83
backend/src/app/storage/tmp.clj
Normal file
83
backend/src/app/storage/tmp.clj
Normal file
|
@ -0,0 +1,83 @@
|
|||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) UXBOX Labs SL
|
||||
|
||||
(ns app.storage.tmp
|
||||
"Temporal files service all created files will be tried to clean after
|
||||
1 hour afrer creation. This is a best effort, if this process fails,
|
||||
the operating system cleaning task should be responsible of
|
||||
permanently delete these files (look at systemd-tempfiles)."
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.logging :as l]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.core.async :as a]
|
||||
[clojure.spec.alpha :as s]
|
||||
[datoteka.core :as fs]
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec :as px]))
|
||||
|
||||
(declare remove-temp-file)
|
||||
(defonce queue (a/chan 128))
|
||||
|
||||
(s/def ::min-age ::dt/duration)
|
||||
|
||||
(defmethod ig/pre-init-spec ::cleaner [_]
|
||||
(s/keys :req-un [::min-age ::wrk/scheduler ::wrk/executor]))
|
||||
|
||||
(defmethod ig/prep-key ::cleaner
|
||||
[_ cfg]
|
||||
(merge {:min-age (dt/duration {:minutes 30})}
|
||||
(d/without-nils cfg)))
|
||||
|
||||
(defmethod ig/init-key ::cleaner
|
||||
[_ {:keys [scheduler executor min-age] :as cfg}]
|
||||
(l/info :hint "starting tempfile cleaner service")
|
||||
(let [cch (a/chan)]
|
||||
(a/go-loop []
|
||||
(let [[path port] (a/alts! [queue cch])]
|
||||
(when (not= port cch)
|
||||
(l/trace :hint "schedule tempfile deletion" :path path
|
||||
:expires-at (dt/plus (dt/now) min-age))
|
||||
(px/schedule! scheduler
|
||||
(inst-ms min-age)
|
||||
(partial remove-temp-file executor path))
|
||||
(recur))))
|
||||
cch))
|
||||
|
||||
(defmethod ig/halt-key! ::cleaner
|
||||
[_ close-ch]
|
||||
(l/info :hint "stoping tempfile cleaner service")
|
||||
(some-> close-ch a/close!))
|
||||
|
||||
(defn- remove-temp-file
|
||||
"Permanently delete tempfile"
|
||||
[executor path]
|
||||
(px/with-dispatch executor
|
||||
(l/trace :hint "permanently delete tempfile" :path path)
|
||||
(when (fs/exists? path)
|
||||
(fs/delete path))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn tempfile
|
||||
"Returns a tmpfile candidate (without creating it)"
|
||||
[& {:keys [suffix prefix]
|
||||
:or {prefix "penpot."
|
||||
suffix ".tmp"}}]
|
||||
(let [candidate (fs/tempfile :suffix suffix :prefix prefix)]
|
||||
(a/offer! queue candidate)
|
||||
candidate))
|
||||
|
||||
(defn create-tempfile
|
||||
[& {:keys [suffix prefix]
|
||||
:or {prefix "penpot."
|
||||
suffix ".tmp"}}]
|
||||
(let [path (fs/create-tempfile :suffix suffix :prefix prefix)]
|
||||
(a/offer! queue path)
|
||||
path))
|
|
@ -82,7 +82,7 @@
|
|||
:kf first
|
||||
:initk (dt/now)))))
|
||||
|
||||
(defn- collect-used-media
|
||||
(defn collect-used-media
|
||||
[data]
|
||||
(let [xform (comp
|
||||
(map :objects)
|
||||
|
|
110
backend/src/app/util/bytes.clj
Normal file
110
backend/src/app/util/bytes.clj
Normal file
|
@ -0,0 +1,110 @@
|
|||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) UXBOX Labs SL
|
||||
|
||||
(ns app.util.bytes
|
||||
"Bytes & Byte Streams helpers"
|
||||
(:require
|
||||
[clojure.java.io :as io]
|
||||
[datoteka.core :as fs]
|
||||
[yetti.adapter :as yt])
|
||||
(:import
|
||||
com.github.luben.zstd.ZstdInputStream
|
||||
com.github.luben.zstd.ZstdOutputStream
|
||||
java.io.ByteArrayInputStream
|
||||
java.io.ByteArrayOutputStream
|
||||
java.io.DataInputStream
|
||||
java.io.DataOutputStream
|
||||
java.io.OutputStream
|
||||
java.io.InputStream
|
||||
java.lang.AutoCloseable
|
||||
org.apache.commons.io.IOUtils
|
||||
org.apache.commons.io.input.BoundedInputStream))
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
(def ^:const default-buffer-size
|
||||
(:xnio/buffer-size yt/defaults))
|
||||
|
||||
(defn copy!
|
||||
[src dst & {:keys [offset size buffer-size]
|
||||
:or {offset 0 buffer-size default-buffer-size}}]
|
||||
(let [^bytes buff (byte-array buffer-size)]
|
||||
(if size
|
||||
(IOUtils/copyLarge ^InputStream src ^OutputStream dst (long offset) (long size) buff)
|
||||
(IOUtils/copyLarge ^InputStream src ^OutputStream dst buff))))
|
||||
|
||||
(defn write-to-file!
|
||||
[src dst & {:keys [size]}]
|
||||
(with-open [^OutputStream output (io/output-stream dst)]
|
||||
(cond
|
||||
(bytes? src)
|
||||
(if size
|
||||
(with-open [^InputStream input (ByteArrayInputStream. ^bytes src)]
|
||||
(with-open [^InputStream input (BoundedInputStream. input (or size (alength ^bytes src)))]
|
||||
(copy! input output :size size)))
|
||||
|
||||
(do
|
||||
(IOUtils/writeChunked ^bytes src output)
|
||||
(.flush ^OutputStream output)
|
||||
(alength ^bytes src)))
|
||||
|
||||
(instance? InputStream src)
|
||||
(copy! src output :size size)
|
||||
|
||||
:else
|
||||
(throw (IllegalArgumentException. "invalid arguments")))))
|
||||
|
||||
(defn read-as-bytes
|
||||
"Read input stream as byte array."
|
||||
[input & {:keys [size]}]
|
||||
(cond
|
||||
(instance? InputStream input)
|
||||
(with-open [output (ByteArrayOutputStream. (or size (.available ^InputStream input)))]
|
||||
(copy! input output :size size)
|
||||
(.toByteArray output))
|
||||
|
||||
(fs/path? input)
|
||||
(with-open [input (io/input-stream input)
|
||||
output (ByteArrayOutputStream. (or size (.available input)))]
|
||||
(copy! input output :size size)
|
||||
(.toByteArray output))
|
||||
|
||||
:else
|
||||
(throw (IllegalArgumentException. "invalid arguments"))))
|
||||
|
||||
(defn bytes-input-stream
|
||||
"Creates an instance of ByteArrayInputStream."
|
||||
[^bytes data]
|
||||
(ByteArrayInputStream. data))
|
||||
|
||||
(defn bounded-input-stream
|
||||
[input size & {:keys [close?] :or {close? true}}]
|
||||
(doto (BoundedInputStream. ^InputStream input ^long size)
|
||||
(.setPropagateClose close?)))
|
||||
|
||||
(defn zstd-input-stream
|
||||
^InputStream
|
||||
[input]
|
||||
(ZstdInputStream. ^InputStream input))
|
||||
|
||||
(defn zstd-output-stream
|
||||
^OutputStream
|
||||
[output & {:keys [level] :or {level 0}}]
|
||||
(ZstdOutputStream. ^OutputStream output (int level)))
|
||||
|
||||
(defn data-input-stream
|
||||
^DataInputStream
|
||||
[input]
|
||||
(DataInputStream. ^InputStream input))
|
||||
|
||||
(defn data-output-stream
|
||||
^DataOutputStream
|
||||
[output]
|
||||
(DataOutputStream. ^OutputStream output))
|
||||
|
||||
(defn close!
|
||||
[^AutoCloseable stream]
|
||||
(.close stream))
|
|
@ -11,6 +11,7 @@
|
|||
[app.http :as http]
|
||||
[app.storage :as sto]
|
||||
[app.test-helpers :as th]
|
||||
[app.util.bytes :as bs]
|
||||
[clojure.java.io :as io]
|
||||
[clojure.test :as t]
|
||||
[datoteka.core :as fs]))
|
||||
|
@ -25,7 +26,8 @@
|
|||
font-id (uuid/custom 10 1)
|
||||
|
||||
ttfdata (-> (io/resource "app/test_files/font-1.ttf")
|
||||
(fs/slurp-bytes))
|
||||
io/input-stream
|
||||
bs/read-as-bytes)
|
||||
|
||||
params {::th/type :create-font-variant
|
||||
:profile-id (:id prof)
|
||||
|
@ -60,7 +62,8 @@
|
|||
font-id (uuid/custom 10 1)
|
||||
|
||||
data (-> (io/resource "app/test_files/font-1.woff")
|
||||
(fs/slurp-bytes))
|
||||
io/input-stream
|
||||
bs/read-as-bytes)
|
||||
|
||||
params {::th/type :create-font-variant
|
||||
:profile-id (:id prof)
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
[app.storage :as sto]
|
||||
[app.test-helpers :as th]
|
||||
[app.util.time :as dt]
|
||||
[app.util.bytes :as bs]
|
||||
[clojure.java.io :as io]
|
||||
[clojure.test :as t]
|
||||
[cuerdas.core :as str]
|
||||
|
@ -27,11 +28,11 @@
|
|||
"Given storage map, returns a storage configured with the appropriate
|
||||
backend for assets."
|
||||
([storage]
|
||||
(assoc storage :backend :tmp))
|
||||
(assoc storage :backend :assets-fs))
|
||||
([storage conn]
|
||||
(-> storage
|
||||
(assoc :conn conn)
|
||||
(assoc :backend :tmp))))
|
||||
(assoc :backend :assets-fs))))
|
||||
|
||||
(t/deftest put-and-retrieve-object
|
||||
(let [storage (-> (:app.storage/storage th/*system*)
|
||||
|
@ -43,7 +44,7 @@
|
|||
(t/is (sto/storage-object? object))
|
||||
(t/is (fs/path? @(sto/get-object-path storage object)))
|
||||
(t/is (nil? (:expired-at object)))
|
||||
(t/is (= :tmp (:backend object)))
|
||||
(t/is (= :assets-fs (:backend object)))
|
||||
(t/is (= "data" (:other (meta object))))
|
||||
(t/is (= "text/plain" (:content-type (meta object))))
|
||||
(t/is (= "content" (slurp @(sto/get-object-data storage object))))
|
||||
|
@ -197,7 +198,8 @@
|
|||
:is-shared false})
|
||||
|
||||
ttfdata (-> (io/resource "app/test_files/font-1.ttf")
|
||||
(fs/slurp-bytes))
|
||||
io/input-stream
|
||||
bs/read-as-bytes)
|
||||
|
||||
mfile {:filename "sample.jpg"
|
||||
:path (th/tempfile "app/test_files/sample.jpg")
|
||||
|
|
|
@ -50,6 +50,12 @@
|
|||
[& exprs]
|
||||
`(try* (^:once fn* [] ~@exprs) identity))
|
||||
|
||||
(defn with-always
|
||||
"A helper that evaluates an exptession independently if the body
|
||||
raises exception or not."
|
||||
[always-expr & body]
|
||||
`(try ~@body (finally ~always-expr)))
|
||||
|
||||
(defn ex-info?
|
||||
[v]
|
||||
(instance? #?(:clj clojure.lang.ExceptionInfo :cljs cljs.core.ExceptionInfo) v))
|
||||
|
|
|
@ -7,21 +7,16 @@
|
|||
(ns app.common.pprint
|
||||
(:refer-clojure :exclude [prn])
|
||||
(:require
|
||||
[cuerdas.core :as str]
|
||||
[fipp.edn :as fpp]))
|
||||
|
||||
(defn pprint-str
|
||||
[expr]
|
||||
(binding [*print-level* 8
|
||||
*print-length* 25]
|
||||
[expr & {:keys [width level length]
|
||||
:or {width 110 level 8 length 25}}]
|
||||
(binding [*print-level* level
|
||||
*print-length* length]
|
||||
(with-out-str
|
||||
(fpp/pprint expr {:width 110}))))
|
||||
(fpp/pprint expr {:width width}))))
|
||||
|
||||
(defn pprint
|
||||
([expr]
|
||||
(println (pprint-str expr)))
|
||||
([label expr]
|
||||
(println (str/concat "============ " label "============"))
|
||||
(pprint expr)))
|
||||
|
||||
|
||||
[expr & {:as opts}]
|
||||
(println (pprint-str expr opts)))
|
||||
|
|
|
@ -48,3 +48,6 @@
|
|||
|
||||
#?(:clj
|
||||
(dm/export impl/get-word-high))
|
||||
|
||||
#?(:clj
|
||||
(dm/export impl/get-word-low))
|
||||
|
|
Loading…
Add table
Reference in a new issue