0
Fork 0
mirror of https://github.com/penpot/penpot.git synced 2025-04-05 03:21:26 -05:00

♻️ Refactor backend to be more async friendly

This commit is contained in:
Andrey Antukh 2022-02-28 17:15:58 +01:00 committed by Alonso Torres
parent 087d896569
commit 9e4a50fb15
49 changed files with 1503 additions and 1378 deletions

View file

@ -19,8 +19,8 @@
io.lettuce/lettuce-core {:mvn/version "6.1.6.RELEASE"}
java-http-clj/java-http-clj {:mvn/version "0.4.3"}
funcool/yetti {:git/tag "v4.0" :git/sha "59ed2a7"
:git/url "https://github.com/funcool/yetti.git"
funcool/yetti {:git/tag "v5.0" :git/sha "f7d61e2"
:git/url "https://github.com/funcool/yetti"
:exclusions [org.slf4j/slf4j-api]}
com.github.seancorfield/next.jdbc {:mvn/version "1.2.772"}

View file

@ -1,129 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) UXBOX Labs SL
(ns app.cli.migrate-media
(:require
[app.common.logging :as l]
[app.common.media :as cm]
[app.config :as cf]
[app.db :as db]
[app.main :as main]
[app.storage :as sto]
[cuerdas.core :as str]
[datoteka.core :as fs]
[integrant.core :as ig]))
(declare migrate-profiles)
(declare migrate-teams)
(declare migrate-file-media)
(defn run-in-system
[system]
(db/with-atomic [conn (:app.db/pool system)]
(let [system (assoc system ::conn conn)]
(migrate-profiles system)
(migrate-teams system)
(migrate-file-media system))
system))
(defn run
[]
(let [config (select-keys main/system-config
[:app.db/pool
:app.migrations/migrations
:app.metrics/metrics
:app.storage.s3/backend
:app.storage.db/backend
:app.storage.fs/backend
:app.storage/storage])]
(ig/load-namespaces config)
(try
(-> (ig/prep config)
(ig/init)
(run-in-system)
(ig/halt!))
(catch Exception e
(l/error :hint "unhandled exception" :cause e)))))
;; --- IMPL
(defn migrate-profiles
[{:keys [::conn] :as system}]
(letfn [(retrieve-profiles [conn]
(->> (db/exec! conn ["select * from profile"])
(filter #(not (str/empty? (:photo %))))
(seq)))]
(let [base (fs/path (cf/get :storage-fs-old-directory))
storage (-> (:app.storage/storage system)
(assoc :conn conn))]
(doseq [profile (retrieve-profiles conn)]
(let [path (fs/path (:photo profile))
full (-> (fs/join base path)
(fs/normalize))
ext (fs/ext path)
mtype (cm/format->mtype (keyword ext))
obj (sto/put-object storage {:content (sto/content full)
:content-type mtype})]
(db/update! conn :profile
{:photo-id (:id obj)}
{:id (:id profile)}))))))
(defn migrate-teams
[{:keys [::conn] :as system}]
(letfn [(retrieve-teams [conn]
(->> (db/exec! conn ["select * from team"])
(filter #(not (str/empty? (:photo %))))
(seq)))]
(let [base (fs/path (cf/get :storage-fs-old-directory))
storage (-> (:app.storage/storage system)
(assoc :conn conn))]
(doseq [team (retrieve-teams conn)]
(let [path (fs/path (:photo team))
full (-> (fs/join base path)
(fs/normalize))
ext (fs/ext path)
mtype (cm/format->mtype (keyword ext))
obj (sto/put-object storage {:content (sto/content full)
:content-type mtype})]
(db/update! conn :team
{:photo-id (:id obj)}
{:id (:id team)}))))))
(defn migrate-file-media
[{:keys [::conn] :as system}]
(letfn [(retrieve-media-objects [conn]
(->> (db/exec! conn ["select fmo.id, fmo.path, fth.path as thumbnail_path
from file_media_object as fmo
join file_media_thumbnail as fth on (fth.media_object_id = fmo.id)"])
(seq)))]
(let [base (fs/path (cf/get :storage-fs-old-directory))
storage (-> (:app.storage/storage system)
(assoc :conn conn))]
(doseq [mobj (retrieve-media-objects conn)]
(let [img-path (fs/path (:path mobj))
thm-path (fs/path (:thumbnail-path mobj))
img-path (-> (fs/join base img-path)
(fs/normalize))
thm-path (-> (fs/join base thm-path)
(fs/normalize))
img-ext (fs/ext img-path)
thm-ext (fs/ext thm-path)
img-mtype (cm/format->mtype (keyword img-ext))
thm-mtype (cm/format->mtype (keyword thm-ext))
img-obj (sto/put-object storage {:content (sto/content img-path)
:content-type img-mtype})
thm-obj (sto/put-object storage {:content (sto/content thm-path)
:content-type thm-mtype})]
(db/update! conn :file-media-object
{:media-id (:id img-obj)
:thumbnail-id (:id thm-obj)}
{:id (:id mobj)}))))))

View file

@ -47,7 +47,7 @@
:database-username "penpot"
:database-password "penpot"
:default-blob-version 3
:default-blob-version 4
:loggers-zmq-uri "tcp://localhost:45556"
:file-change-snapshot-every 5
@ -65,8 +65,8 @@
:assets-path "/internal/assets/"
:rlimit-password 10
:rlimit-image 2
:rlimit-font 5
:rlimit-image 10
:rlimit-font 10
:smtp-default-reply-to "Penpot <no-reply@example.com>"
:smtp-default-from "Penpot <no-reply@example.com>"

View file

@ -247,7 +247,7 @@
([ds table params where opts]
(exec-one! ds
(sql/update table params where opts)
(assoc opts :return-keys true))))
(merge {:return-keys true} opts))))
(defn delete!
([ds table params] (delete! ds table params nil))

View file

@ -29,6 +29,7 @@
;; HTTP SERVER
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(s/def ::session map?)
(s/def ::handler fn?)
(s/def ::router some?)
(s/def ::port ::us/integer)
@ -47,7 +48,7 @@
(d/without-nils cfg)))
(defmethod ig/pre-init-spec ::server [_]
(s/keys :req-un [::port ::host ::name ::min-threads ::max-threads]
(s/keys :req-un [::port ::host ::name ::min-threads ::max-threads ::session]
:opt-un [::mtx/metrics ::router ::handler]))
(defn- instrument-metrics
@ -59,37 +60,39 @@
server))
(defmethod ig/init-key ::server
[_ {:keys [handler router port name metrics host] :as opts}]
[_ {:keys [handler router port name metrics host] :as cfg}]
(l/info :hint "starting http server"
:port port :host host :name name
:min-threads (:min-threads opts)
:max-threads (:max-threads opts))
:min-threads (:min-threads cfg)
:max-threads (:max-threads cfg))
(let [options {:http/port port
:http/host host
:thread-pool/max-threads (:max-threads opts)
:thread-pool/min-threads (:min-threads opts)
:thread-pool/max-threads (:max-threads cfg)
:thread-pool/min-threads (:min-threads cfg)
:ring/async true}
handler (cond
(fn? handler) handler
(some? router) (wrap-router router)
(some? router) (wrap-router cfg router)
:else (ex/raise :type :internal
:code :invalid-argument
:hint "Missing `handler` or `router` option."))
server (-> (yt/server handler (d/without-nils options))
(cond-> metrics (instrument-metrics metrics)))]
(assoc opts :server (yt/start! server))))
(assoc cfg :server (yt/start! server))))
(defmethod ig/halt-key! ::server
[_ {:keys [server name port] :as opts}]
[_ {:keys [server name port] :as cfg}]
(l/info :msg "stoping http server" :name name :port port)
(yt/stop! server))
(defn- wrap-router
[router]
[{:keys [session] :as cfg} router]
(let [default (rr/routes
(rr/create-resource-handler {:path "/"})
(rr/create-default-handler))
options {:middleware [middleware/wrap-server-timing]
options {:middleware [[middleware/wrap-server-timing]
[middleware/cookies]
[(:middleware session)]]
:inject-match? false
:inject-router? false}
handler (rr/ring-handler router default options)]
@ -106,28 +109,25 @@
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(s/def ::rpc map?)
(s/def ::session map?)
(s/def ::oauth map?)
(s/def ::storage map?)
(s/def ::assets map?)
(s/def ::feedback fn?)
(s/def ::ws fn?)
(s/def ::audit-http-handler fn?)
(s/def ::audit-handler fn?)
(s/def ::debug map?)
(s/def ::awsns-handler fn?)
(defmethod ig/pre-init-spec ::router [_]
(s/keys :req-un [::rpc ::session ::mtx/metrics ::ws
::oauth ::storage ::assets ::feedback
::debug ::audit-http-handler]))
(s/keys :req-un [::rpc ::mtx/metrics ::ws ::oauth ::storage ::assets
::feedback ::awsns-handler ::debug ::audit-handler]))
(defmethod ig/init-key ::router
[_ {:keys [ws session rpc oauth metrics assets feedback debug] :as cfg}]
(rr/router
[["/metrics" {:get (:handler metrics)}]
["/assets" {:middleware [[middleware/format-response-body]
[middleware/errors errors/handle]
[middleware/cookies]
(:middleware session)]}
[middleware/errors errors/handle]]}
["/by-id/:id" {:get (:objects-handler assets)}]
["/by-file-media-id/:id" {:get (:file-objects-handler assets)}]
["/by-file-media-id/:id/thumbnail" {:get (:file-thumbnails-handler assets)}]]
@ -136,9 +136,7 @@
[middleware/params]
[middleware/keyword-params]
[middleware/format-response-body]
[middleware/errors errors/handle]
[middleware/cookies]
[(:middleware session)]]}
[middleware/errors errors/handle]]}
["" {:get (:index debug)}]
["/error-by-id/:id" {:get (:retrieve-error debug)}]
["/error/:id" {:get (:retrieve-error debug)}]
@ -148,15 +146,13 @@
["/file/changes" {:get (:retrieve-file-changes debug)}]]
["/webhooks"
["/sns" {:post (:sns-webhook cfg)}]]
["/sns" {:post (:awsns-handler cfg)}]]
["/ws/notifications"
{:middleware [[middleware/params]
[middleware/keyword-params]
[middleware/format-response-body]
[middleware/errors errors/handle]
[middleware/cookies]
[(:middleware session)]]
[middleware/errors errors/handle]]
:get ws}]
["/api" {:middleware [[middleware/cors]
@ -165,8 +161,7 @@
[middleware/keyword-params]
[middleware/format-response-body]
[middleware/parse-request-body]
[middleware/errors errors/handle]
[middleware/cookies]]}
[middleware/errors errors/handle]]}
["/health" {:get (:health-check debug)}]
["/_doc" {:get (doc/handler rpc)}]
@ -175,10 +170,9 @@
["/auth/oauth/:provider" {:post (:handler oauth)}]
["/auth/oauth/:provider/callback" {:get (:callback-handler oauth)}]
["/audit/events" {:middleware [(:middleware session)]
:post (:audit-http-handler cfg)}]
["/audit/events" {:post (:audit-handler cfg)}]
["/rpc" {:middleware [(:middleware session)]}
["/rpc"
["/query/:type" {:get (:query-handler rpc)
:post (:query-handler rpc)}]
["/mutation/:type" {:post (:mutation-handler rpc)}]]]]))

View file

@ -13,12 +13,12 @@
[app.db :as db]
[app.metrics :as mtx]
[app.storage :as sto]
[app.util.async :as async]
[app.util.time :as dt]
[app.worker :as wrk]
[clojure.spec.alpha :as s]
[integrant.core :as ig]
[promesa.core :as p]))
[promesa.core :as p]
[promesa.exec :as px]))
(def ^:private cache-max-age
(dt/duration {:hours 24}))
@ -35,27 +35,31 @@
res))
(defn- get-file-media-object
[{:keys [pool] :as storage} id]
(let [id (coerce-id id)
mobj (db/exec-one! pool ["select * from file_media_object where id=?" id])]
(when-not mobj
(ex/raise :type :not-found
:hint "object does not found"))
mobj))
[{:keys [pool executor] :as storage} id]
(px/with-dispatch executor
(let [id (coerce-id id)
mobj (db/exec-one! pool ["select * from file_media_object where id=?" id])]
(when-not mobj
(ex/raise :type :not-found
:hint "object does not found"))
mobj)))
(defn- serve-object
"Helper function that returns the appropriate responde depending on
the storage object backend type."
[{:keys [storage] :as cfg} obj]
(let [mdata (meta obj)
backend (sto/resolve-backend storage (:backend obj))]
(case (:type backend)
:db
{:status 200
:headers {"content-type" (:content-type mdata)
"cache-control" (str "max-age=" (inst-ms cache-max-age))}
:body (sto/get-object-bytes storage obj)}
(p/let [body (sto/get-object-bytes storage obj)]
{:status 200
:headers {"content-type" (:content-type mdata)
"cache-control" (str "max-age=" (inst-ms cache-max-age))}
:body body})
:s3
(let [{:keys [host port] :as url} (sto/get-object-url storage obj {:max-age signature-max-age})]
(p/let [{:keys [host port] :as url} (sto/get-object-url storage obj {:max-age signature-max-age})]
{:status 307
:headers {"location" (str url)
"x-host" (cond-> host port (str ":" port))
@ -63,43 +67,49 @@
:body ""})
:fs
(let [purl (u/uri (:assets-path cfg))
purl (u/join purl (sto/object->relative-path obj))]
(p/let [purl (u/uri (:assets-path cfg))
purl (u/join purl (sto/object->relative-path obj))]
{:status 204
:headers {"x-accel-redirect" (:path purl)
"content-type" (:content-type mdata)
"cache-control" (str "max-age=" (inst-ms cache-max-age))}
:body ""}))))
(defn- generic-handler
[{:keys [storage executor] :as cfg} request kf]
(async/with-dispatch executor
(let [id (get-in request [:path-params :id])
mobj (get-file-media-object storage id)
obj (sto/get-object storage (kf mobj))]
(if obj
(serve-object cfg obj)
{:status 404 :body ""}))))
(defn objects-handler
"Handler that servers storage objects by id."
[{:keys [storage executor] :as cfg} request respond raise]
(-> (async/with-dispatch executor
(let [id (get-in request [:path-params :id])
id (coerce-id id)
obj (sto/get-object storage id)]
(-> (px/with-dispatch executor
(p/let [id (get-in request [:path-params :id])
id (coerce-id id)
obj (sto/get-object storage id)]
(if obj
(serve-object cfg obj)
{:status 404 :body ""})))
(p/then respond)
(p/bind p/wrap)
(p/then' respond)
(p/catch raise)))
(defn- generic-handler
"A generic handler helper/common code for file-media based handlers."
[{:keys [storage] :as cfg} request kf]
(p/let [id (get-in request [:path-params :id])
mobj (get-file-media-object storage id)
obj (sto/get-object storage (kf mobj))]
(if obj
(serve-object cfg obj)
{:status 404 :body ""})))
(defn file-objects-handler
"Handler that serves storage objects by file media id."
[cfg request respond raise]
(-> (generic-handler cfg request :media-id)
(p/then respond)
(p/catch raise)))
(defn file-thumbnails-handler
"Handler that serves storage objects by thumbnail-id and quick
fallback to file-media-id if no thumbnail is available."
[cfg request respond raise]
(-> (generic-handler cfg request #(or (:thumbnail-id %) (:media-id %)))
(p/then respond)

View file

@ -11,45 +11,53 @@
[app.common.logging :as l]
[app.db :as db]
[app.db.sql :as sql]
[app.util.http :as http]
[clojure.spec.alpha :as s]
[cuerdas.core :as str]
[integrant.core :as ig]
[jsonista.core :as j]))
[jsonista.core :as j]
[promesa.exec :as px]))
(declare parse-json)
(declare handle-request)
(declare parse-notification)
(declare process-report)
(s/def ::http-client fn?)
(defmethod ig/pre-init-spec ::handler [_]
(s/keys :req-un [::db/pool]))
(s/keys :req-un [::db/pool ::http-client]))
(defmethod ig/init-key ::handler
[_ cfg]
[_ {:keys [executor] :as cfg}]
(fn [request respond _]
(try
(let [body (parse-json (slurp (:body request)))
mtype (get body "Type")]
(cond
(= mtype "SubscriptionConfirmation")
(let [surl (get body "SubscribeURL")
stopic (get body "TopicArn")]
(l/info :action "subscription received" :topic stopic :url surl)
(http/send! {:uri surl :method :post :timeout 10000}))
(let [data (slurp (:body request))]
(px/run! executor #(handle-request cfg data))
(respond {:status 200 :body ""}))))
(= mtype "Notification")
(when-let [message (parse-json (get body "Message"))]
(let [notification (parse-notification cfg message)]
(process-report cfg notification)))
(defn handle-request
[{:keys [http-client] :as cfg} data]
(try
(let [body (parse-json data)
mtype (get body "Type")]
(cond
(= mtype "SubscriptionConfirmation")
(let [surl (get body "SubscribeURL")
stopic (get body "TopicArn")]
(l/info :action "subscription received" :topic stopic :url surl)
(http-client {:uri surl :method :post :timeout 10000} {:sync? true}))
:else
(l/warn :hint "unexpected data received"
:report (pr-str body))))
(catch Throwable cause
(l/error :hint "unexpected exception on awsns handler"
:cause cause)))
(= mtype "Notification")
(when-let [message (parse-json (get body "Message"))]
(let [notification (parse-notification cfg message)]
(process-report cfg notification)))
(respond {:status 200 :body ""})))
:else
(l/warn :hint "unexpected data received"
:report (pr-str body))))
(catch Throwable cause
(l/error :hint "unexpected exception on awsns"
:cause cause))))
(defn- parse-bounce
[data]

View file

@ -0,0 +1,30 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) UXBOX Labs SL
(ns app.http.client
"Http client abstraction layer."
(:require
[app.worker :as wrk]
[clojure.spec.alpha :as s]
[integrant.core :as ig]
[java-http-clj.core :as http]))
(defmethod ig/pre-init-spec :app.http/client [_]
(s/keys :req-un [::wrk/executor]))
(defmethod ig/init-key :app.http/client
[_ {:keys [executor] :as cfg}]
(let [client (http/build-client {:executor executor
:connect-timeout 30000 ;; 10s
:follow-redirects :always})]
(with-meta
(fn send
([req] (send req {}))
([req {:keys [response-type sync?] :or {response-type :string sync? false}}]
(if sync?
(http/send req {:client client :as response-type})
(http/send-async req {:client client :as response-type}))))
{::client client})))

View file

@ -14,7 +14,6 @@
[app.db :as db]
[app.rpc.mutations.files :as m.files]
[app.rpc.queries.profile :as profile]
[app.util.async :as async]
[app.util.blob :as blob]
[app.util.template :as tmpl]
[app.util.time :as dt]
@ -25,7 +24,8 @@
[datoteka.core :as fs]
[fipp.edn :as fpp]
[integrant.core :as ig]
[promesa.core :as p]))
[promesa.core :as p]
[promesa.exec :as px]))
;; (selmer.parser/cache-off!)
@ -208,8 +208,7 @@
(defn- wrap-async
[{:keys [executor] :as cfg} f]
(fn [request respond raise]
(-> (async/with-dispatch executor
(f cfg request))
(-> (px/submit! executor #(f cfg request))
(p/then respond)
(p/catch raise))))

View file

@ -15,17 +15,15 @@
[app.db :as db]
[app.loggers.audit :as audit]
[app.rpc.queries.profile :as profile]
[app.util.http :as http]
[app.util.json :as json]
[app.util.time :as dt]
[clojure.data.json :as json]
[clojure.set :as set]
[clojure.spec.alpha :as s]
[cuerdas.core :as str]
[integrant.core :as ig]
[promesa.core :as p]
[promesa.exec :as px]))
;; TODO: make it fully async (?)
(defn- build-redirect-uri
[{:keys [provider] :as cfg}]
(let [public (u/uri (:public-uri cfg))]
@ -43,27 +41,6 @@
(assoc :query query)
(str))))
(defn retrieve-access-token
[{:keys [provider] :as cfg} code]
(try
(let [params {:client_id (:client-id provider)
:client_secret (:client-secret provider)
:code code
:grant_type "authorization_code"
:redirect_uri (build-redirect-uri cfg)}
req {:method :post
:headers {"content-type" "application/x-www-form-urlencoded"}
:uri (:token-uri provider)
:body (u/map->query-string params)}
res (http/send! req)]
(when (= 200 (:status res))
(let [data (json/read-str (:body res))]
{:token (get data "access_token")
:type (get data "token_type")})))
(catch Exception e
(l/warn :hint "unexpected error on retrieve-access-token" :cause e)
nil)))
(defn- qualify-props
[provider props]
(reduce-kv (fn [result k v]
@ -71,25 +48,56 @@
{}
props))
(defn- retrieve-user-info
[{:keys [provider] :as cfg} tdata]
(try
(let [req {:uri (:user-uri provider)
:headers {"Authorization" (str (:type tdata) " " (:token tdata))}
:timeout 6000
:method :get}
res (http/send! req)]
(defn retrieve-access-token
[{:keys [provider http-client] :as cfg} code]
(let [params {:client_id (:client-id provider)
:client_secret (:client-secret provider)
:code code
:grant_type "authorization_code"
:redirect_uri (build-redirect-uri cfg)}
req {:method :post
:headers {"content-type" "application/x-www-form-urlencoded"}
:uri (:token-uri provider)
:body (u/map->query-string params)}]
(p/then
(http-client req)
(fn [{:keys [status body] :as res}]
(if (= status 200)
(let [data (json/read body)]
{:token (get data :access_token)
:type (get data :token_type)})
(ex/raise :type :internal
:code :unable-to-retrieve-token
::http-status status
::http-body body))))))
(when (= 200 (:status res))
(let [info (json/read-str (:body res) :key-fn keyword)]
{:backend (:name provider)
:email (:email info)
:fullname (:name info)
:props (->> (dissoc info :name :email)
(qualify-props provider))})))
(catch Exception e
(l/warn :hint "unexpected exception on retrieve-user-info" :cause e)
nil)))
(defn- retrieve-user-info
[{:keys [provider http-client] :as cfg} tdata]
(p/then
(http-client {:uri (:user-uri provider)
:headers {"Authorization" (str (:type tdata) " " (:token tdata))}
:timeout 6000
:method :get})
(fn [{:keys [status body] :as res}]
(if (= 200 status)
(let [info (json/read body)
info {:backend (:name provider)
:email (get info :email)
:fullname (get info :name)
:props (->> (dissoc info :name :email)
(qualify-props provider))}]
(when-not (s/valid? ::info info)
(l/warn :hint "received incomplete profile info object (please set correct scopes)"
:info (pr-str info))
(ex/raise :type :internal
:code :unable-to-auth
:hint "no user info"))
info)
(ex/raise :type :internal
:code :unable-to-retrieve-user-info
::http-status status
::http-body body)))))
(s/def ::backend ::us/not-empty-string)
(s/def ::email ::us/not-empty-string)
@ -104,45 +112,44 @@
(defn retrieve-info
[{:keys [tokens provider] :as cfg} request]
(let [state (get-in request [:params :state])
state (tokens :verify {:token state :iss :oauth})
info (some->> (get-in request [:params :code])
(retrieve-access-token cfg)
(retrieve-user-info cfg))]
(letfn [(validate-oidc [info]
;; If the provider is OIDC, we can proceed to check
;; roles if they are defined.
(when (and (= "oidc" (:name provider))
(seq (:roles provider)))
(let [provider-roles (into #{} (:roles provider))
profile-roles (let [attr (cf/get :oidc-roles-attr :roles)
roles (get info attr)]
(cond
(string? roles) (into #{} (str/words roles))
(vector? roles) (into #{} roles)
:else #{}))]
(when-not (s/valid? ::info info)
(l/warn :hint "received incomplete profile info object (please set correct scopes)"
:info (pr-str info))
(ex/raise :type :internal
:code :unable-to-auth
:hint "no user info"))
;; check if profile has a configured set of roles
(when-not (set/subset? provider-roles profile-roles)
(ex/raise :type :internal
:code :unable-to-auth
:hint "not enough permissions"))))
info)
;; If the provider is OIDC, we can proceed to check
;; roles if they are defined.
(when (and (= "oidc" (:name provider))
(seq (:roles provider)))
(let [provider-roles (into #{} (:roles provider))
profile-roles (let [attr (cf/get :oidc-roles-attr :roles)
roles (get info attr)]
(cond
(string? roles) (into #{} (str/words roles))
(vector? roles) (into #{} roles)
:else #{}))]
(post-process [state info]
(cond-> info
(some? (:invitation-token state))
(assoc :invitation-token (:invitation-token state))
;; check if profile has a configured set of roles
(when-not (set/subset? provider-roles profile-roles)
(ex/raise :type :internal
:code :unable-to-auth
:hint "not enough permissions"))))
;; If state token comes with props, merge them. The state token
;; props can contain pm_ and utm_ prefixed query params.
(map? (:props state))
(update :props merge (:props state))))]
(cond-> info
(some? (:invitation-token state))
(assoc :invitation-token (:invitation-token state))
;; If state token comes with props, merge them. The state token
;; props can contain pm_ and utm_ prefixed query params.
(map? (:props state))
(update :props merge (:props state)))))
(let [state (get-in request [:params :state])
state (tokens :verify {:token state :iss :oauth})
code (get-in request [:params :code])]
(-> (p/resolved code)
(p/then #(retrieve-access-token cfg %))
(p/then #(retrieve-user-info cfg %))
(p/then' validate-oidc)
(p/then' (partial post-process state))))))
;; --- HTTP HANDLERS
@ -158,12 +165,13 @@
params))
(defn- retrieve-profile
[{:keys [pool] :as cfg} info]
(with-open [conn (db/open pool)]
(some->> (:email info)
(profile/retrieve-profile-data-by-email conn)
(profile/populate-additional-data conn)
(profile/decode-profile-row))))
[{:keys [pool executor] :as cfg} info]
(px/with-dispatch executor
(with-open [conn (db/open pool)]
(some->> (:email info)
(profile/retrieve-profile-data-by-email conn)
(profile/populate-additional-data conn)
(profile/decode-profile-row)))))
(defn- redirect-response
[uri]
@ -202,6 +210,7 @@
(->> (redirect-response uri)
(sxf request)))
(let [info (assoc info
:iss :prepared-register
:is-active true
@ -216,35 +225,30 @@
(redirect-response uri))))
(defn- auth-handler
[{:keys [tokens executor] :as cfg} {:keys [params] :as request} respond _]
(px/run!
executor
(fn []
(let [invitation (:invitation-token params)
props (extract-utm-props params)
state (tokens :generate
{:iss :oauth
:invitation-token invitation
:props props
:exp (dt/in-future "15m")})
uri (build-auth-uri cfg state)]
(respond
{:status 200
:body {:redirect-uri uri}})))))
[{:keys [tokens] :as cfg} {:keys [params] :as request} respond _]
(let [props (extract-utm-props params)
state (tokens :generate
{:iss :oauth
:invitation-token (:invitation-token params)
:props props
:exp (dt/in-future "15m")})
uri (build-auth-uri cfg state)]
(respond {:status 200 :body {:redirect-uri uri}})))
(defn- callback-handler
[{:keys [executor] :as cfg} request respond _]
(px/run!
executor
(fn []
(try
(let [info (retrieve-info cfg request)
profile (retrieve-profile cfg info)]
(respond (generate-redirect cfg request info profile)))
(catch Exception cause
(l/warn :hint "error on oauth process" :cause cause)
(respond (generate-error-redirect cfg cause)))))))
[cfg request respond _]
(letfn [(process-request []
(p/let [info (retrieve-info cfg request)
profile (retrieve-profile cfg info)]
(generate-redirect cfg request info profile)))
(handle-error [cause]
(l/warn :hint "error on oauth process" :cause cause)
(respond (generate-error-redirect cfg cause)))]
(-> (process-request)
(p/then respond)
(p/catch handle-error))))
;; --- INIT
@ -281,10 +285,10 @@
:callback-handler (wrap-handler cfg callback-handler)}))
(defn- discover-oidc-config
[{:keys [base-uri] :as opts}]
[{:keys [http-client]} {:keys [base-uri] :as opts}]
(let [discovery-uri (u/join base-uri ".well-known/openid-configuration")
response (ex/try (http/send! {:method :get :uri (str discovery-uri)}))]
response (ex/try (http-client {:method :get :uri (str discovery-uri)} {:sync? true}))]
(cond
(ex/exception? response)
(do
@ -294,10 +298,10 @@
nil)
(= 200 (:status response))
(let [data (json/read-str (:body response))]
{:token-uri (get data "token_endpoint")
:auth-uri (get data "authorization_endpoint")
:user-uri (get data "userinfo_endpoint")})
(let [data (json/read (:body response))]
{:token-uri (get data :token_endpoint)
:auth-uri (get data :authorization_endpoint)
:user-uri (get data :userinfo_endpoint)})
:else
(do
@ -325,6 +329,7 @@
:roles-attr (cf/get :oidc-roles-attr)
:roles (cf/get :oidc-roles)
:name "oidc"}]
(if (and (string? (:base-uri opts))
(string? (:client-id opts))
(string? (:client-secret opts)))
@ -339,7 +344,7 @@
(assoc-in cfg [:providers "oidc"] opts))
(do
(l/debug :hint "trying to discover oidc provider configuration using BASE_URI")
(if-let [opts' (discover-oidc-config opts)]
(if-let [opts' (discover-oidc-config cfg opts)]
(do
(l/debug :hint "discovered opts" :additional-opts opts')
(assoc-in cfg [:providers "oidc"] (merge opts opts')))

View file

@ -89,16 +89,6 @@
(when-let [token (get-in cookies [token-cookie-name :value])]
(rss/delete-session store token)))
(defn- retrieve-session
[store token]
(when token
(rss/read-session store token)))
(defn- retrieve-from-request
[store {:keys [cookies] :as request}]
(->> (get-in cookies [token-cookie-name :value])
(retrieve-session store)))
(defn- add-cookies
[response token]
(let [cors? (contains? cfg/flags :cors)
@ -132,40 +122,55 @@
:value ""
:max-age -1}})))
;; NOTE: for now the session middleware is synchronous and is
;; processed on jetty threads. This is because of probably a bug on
;; jetty that causes NPE on upgrading connection to websocket from
;; thread not managed by jetty. We probably can fix it running
;; websocket server in different port as standalone service.
(defn- middleware
[events-ch store handler]
(fn [request respond raise]
(if-let [{:keys [id profile-id] :as session} (retrieve-from-request store request)]
(do
(a/>!! events-ch id)
(l/set-context! {:profile-id profile-id})
(handler (assoc request :profile-id profile-id :session-id id) respond raise))
(handler request respond raise))))
[{:keys [::events-ch ::store] :as cfg} handler]
(letfn [(get-session [{:keys [cookies] :as request}]
(if-let [token (get-in cookies [token-cookie-name :value])]
(if-let [{:keys [id profile-id]} (rss/read-session store token)]
(assoc request :session-id id :profile-id profile-id)
request)
request))]
(fn [request respond raise]
(try
(let [{:keys [session-id profile-id] :as request} (get-session request)]
(when (and session-id profile-id)
(a/offer! events-ch session-id))
(handler request respond raise))
(catch Throwable cause
(raise cause))))))
;; --- STATE INIT: SESSION
(s/def ::tokens fn?)
(defmethod ig/pre-init-spec ::session [_]
(s/keys :req-un [::db/pool ::tokens]))
(defmethod ig/pre-init-spec :app.http/session [_]
(s/keys :req-un [::db/pool ::tokens ::wrk/executor]))
(defmethod ig/prep-key ::session
(defmethod ig/prep-key :app.http/session
[_ cfg]
(d/merge {:buffer-size 128}
(d/without-nils cfg)))
(defmethod ig/init-key ::session
(defmethod ig/init-key :app.http/session
[_ {:keys [pool tokens] :as cfg}]
(let [events-ch (a/chan (a/dropping-buffer (:buffer-size cfg)))
store (if (db/read-only? pool)
(->MemoryStore (atom {}) tokens)
(->DatabaseStore pool tokens))]
(->DatabaseStore pool tokens))
cfg (assoc cfg ::store store ::events-ch events-ch)]
(when (db/read-only? pool)
(l/warn :hint "sessions module initialized with in-memory store"))
(-> cfg
(assoc ::events-ch events-ch)
(assoc :middleware (partial middleware events-ch store))
(assoc :middleware (partial middleware cfg))
(assoc :create (fn [profile-id]
(fn [request response]
(let [token (create-session store request profile-id)]
@ -177,11 +182,10 @@
(assoc :body "")
(clear-cookies)))))))
(defmethod ig/halt-key! ::session
(defmethod ig/halt-key! :app.http/session
[_ data]
(a/close! (::events-ch data)))
;; --- STATE INIT: SESSION UPDATER
(declare update-sessions)
@ -192,8 +196,7 @@
(defmethod ig/pre-init-spec ::updater [_]
(s/keys :req-un [::db/pool ::wrk/executor ::mtx/metrics ::session]
:opt-un [::max-batch-age
::max-batch-size]))
:opt-un [::max-batch-age ::max-batch-size]))
(defmethod ig/prep-key ::updater
[_ cfg]

View file

@ -16,7 +16,6 @@
[app.config :as cf]
[app.db :as db]
[app.util.async :as aa]
[app.util.http :as http]
[app.util.time :as dt]
[app.worker :as wrk]
[clojure.core.async :as a]
@ -221,11 +220,12 @@
(declare archive-events)
(s/def ::http-client fn?)
(s/def ::uri ::us/string)
(s/def ::tokens fn?)
(defmethod ig/pre-init-spec ::archive-task [_]
(s/keys :req-un [::db/pool ::tokens]
(s/keys :req-un [::db/pool ::tokens ::http-client]
:opt-un [::uri]))
(defmethod ig/init-key ::archive-task
@ -257,7 +257,7 @@
for update skip locked;")
(defn archive-events
[{:keys [pool uri tokens] :as cfg}]
[{:keys [pool uri tokens http-client] :as cfg}]
(letfn [(decode-row [{:keys [props ip-addr context] :as row}]
(cond-> row
(db/pgobject? props)
@ -293,7 +293,7 @@
:method :post
:headers headers
:body body}
resp (http/send! params)]
resp (http-client params {:sync? true})]
(if (= (:status resp) 204)
true
(do

View file

@ -11,7 +11,6 @@
[app.common.spec :as us]
[app.config :as cfg]
[app.util.async :as aa]
[app.util.http :as http]
[app.util.json :as json]
[app.worker :as wrk]
[clojure.core.async :as a]
@ -62,13 +61,14 @@
(str "\n" (:trace error))))]]}]}))
(defn- send-log
[uri payload i]
[{:keys [http-client uri]} payload i]
(try
(let [response (http/send! {:uri uri
:timeout 6000
:method :post
:headers {"content-type" "application/json"}
:body (json/write payload)})]
(let [response (http-client {:uri uri
:timeout 6000
:method :post
:headers {"content-type" "application/json"}
:body (json/write payload)}
{:sync? true})]
(cond
(= (:status response) 204)
true
@ -89,11 +89,11 @@
false)))
(defn- handle-event
[{:keys [executor uri]} event]
[{:keys [executor] :as cfg} event]
(aa/with-thread executor
(let [payload (prepare-payload event)]
(loop [i 1]
(when (and (not (send-log uri payload i)) (< i 20))
(when (and (not (send-log cfg payload i)) (< i 20))
(Thread/sleep (* i 2000))
(recur (inc i)))))))

View file

@ -9,52 +9,47 @@
(:require
[app.common.logging :as l]
[app.config :as cf]
[app.db :as db]
[app.loggers.database :as ldb]
[app.util.async :as aa]
[app.util.http :as http]
[app.util.json :as json]
[app.worker :as wrk]
[clojure.core.async :as a]
[clojure.spec.alpha :as s]
[integrant.core :as ig]))
[integrant.core :as ig]
[promesa.core :as p]))
(defonce enabled (atom true))
(defn- send-mattermost-notification!
[cfg {:keys [host id public-uri] :as event}]
(try
(let [uri (:uri cfg)
text (str "Exception on (host: " host ", url: " public-uri "/dbg/error/" id ")\n"
(when-let [pid (:profile-id event)]
(str "- profile-id: #uuid-" pid "\n")))
rsp (http/send! {:uri uri
:method :post
:headers {"content-type" "application/json"}
:body (json/write-str {:text text})})]
(when (not= (:status rsp) 200)
(l/error :hint "error on sending data to mattermost"
:response (pr-str rsp))))
(catch Exception e
(l/error :hint "unexpected exception on error reporter"
:cause e))))
[{:keys [http-client] :as cfg} {:keys [host id public-uri] :as event}]
(let [uri (:uri cfg)
text (str "Exception on (host: " host ", url: " public-uri "/dbg/error/" id ")\n"
(when-let [pid (:profile-id event)]
(str "- profile-id: #uuid-" pid "\n")))]
(p/then
(http-client {:uri uri
:method :post
:headers {"content-type" "application/json"}
:body (json/write-str {:text text})})
(fn [{:keys [status] :as rsp}]
(when (not= status 200)
(l/warn :hint "error on sending data to mattermost"
:response (pr-str rsp)))))))
(defn handle-event
[{:keys [executor] :as cfg} event]
(aa/with-thread executor
(try
(let [event (ldb/parse-event event)]
(when @enabled
(send-mattermost-notification! cfg event)))
(catch Exception e
(l/warn :hint "unexpected exception on error reporter" :cause e)))))
[cfg event]
(let [ch (a/chan)]
(-> (p/let [event (ldb/parse-event event)]
(send-mattermost-notification! cfg event))
(p/finally (fn [_ cause]
(when cause
(l/warn :hint "unexpected exception on error reporter" :cause cause))
(a/close! ch))))
ch))
(s/def ::http-client fn?)
(s/def ::uri ::cf/error-report-webhook)
(defmethod ig/pre-init-spec ::reporter [_]
(s/keys :req-un [::wrk/executor ::db/pool ::receiver]
(s/keys :req-un [::http-client ::receiver]
:opt-un [::uri]))
(defmethod ig/init-key ::reporter

View file

@ -37,7 +37,11 @@
(keep prepare)))
mult (a/mult output)]
(when endpoint
(a/thread (start-rcv-loop {:out buffer :endpoint endpoint})))
(let [thread (Thread. #(start-rcv-loop {:out buffer :endpoint endpoint}))]
(.setDaemon thread false)
(.setName thread "penpot/zmq-logger-receiver")
(.start thread)))
(a/pipe buffer output)
(with-meta
(fn [cmd ch]
@ -62,7 +66,7 @@
([] (start-rcv-loop nil))
([{:keys [out endpoint] :or {endpoint "tcp://localhost:5556"}}]
(let [out (or out (a/chan 1))
zctx (ZContext.)
zctx (ZContext. 1)
socket (.. zctx (createSocket SocketType/SUB))]
(.. socket (connect ^String endpoint))
(.. socket (subscribe ""))
@ -75,7 +79,7 @@
(recur)
(do
(.close ^java.lang.AutoCloseable socket)
(.close ^java.lang.AutoCloseable zctx))))))))
(.destroy ^ZContext zctx))))))))
(s/def ::logger-name string?)
(s/def ::level string?)

View file

@ -29,10 +29,10 @@
{:parallelism (cf/get :default-executor-parallelism 60)
:prefix :default}
;; Constrained thread pool. Should only be used from high demand
;; RPC methods.
;; Constrained thread pool. Should only be used from high resources
;; demanding operations.
[::blocking :app.worker/executor]
{:parallelism (cf/get :blocking-executor-parallelism 20)
{:parallelism (cf/get :blocking-executor-parallelism 10)
:prefix :blocking}
;; Dedicated thread pool for backround tasks execution.
@ -40,6 +40,10 @@
{:parallelism (cf/get :worker-executor-parallelism 10)
:prefix :worker}
:app.worker/scheduler
{:parallelism 1
:prefix :scheduler}
:app.worker/executors
{:default (ig/ref [::default :app.worker/executor])
:worker (ig/ref [::worker :app.worker/executor])
@ -47,6 +51,7 @@
:app.worker/executors-monitor
{:metrics (ig/ref :app.metrics/metrics)
:scheduler (ig/ref :app.worker/scheduler)
:executors (ig/ref :app.worker/executors)}
:app.migrations/migrations
@ -68,14 +73,19 @@
:app.storage/gc-deleted-task
{:pool (ig/ref :app.db/pool)
:storage (ig/ref :app.storage/storage)
:executor (ig/ref [::worker :app.worker/executor])
:min-age (dt/duration {:hours 2})}
:app.storage/gc-touched-task
{:pool (ig/ref :app.db/pool)}
{:pool (ig/ref :app.db/pool)}
:app.http.session/session
:app.http/client
{:executor (ig/ref [::default :app.worker/executor])}
:app.http/session
{:pool (ig/ref :app.db/pool)
:tokens (ig/ref :app.tokens/tokens)}
:tokens (ig/ref :app.tokens/tokens)
:executor (ig/ref [::default :app.worker/executor])}
:app.http.session/gc-task
{:pool (ig/ref :app.db/pool)
@ -85,41 +95,44 @@
{:pool (ig/ref :app.db/pool)
:metrics (ig/ref :app.metrics/metrics)
:executor (ig/ref [::worker :app.worker/executor])
:session (ig/ref :app.http.session/session)
:session (ig/ref :app.http/session)
:max-batch-age (cf/get :http-session-updater-batch-max-age)
:max-batch-size (cf/get :http-session-updater-batch-max-size)}
:app.http.awsns/handler
{:tokens (ig/ref :app.tokens/tokens)
:pool (ig/ref :app.db/pool)}
{:tokens (ig/ref :app.tokens/tokens)
:pool (ig/ref :app.db/pool)
:http-client (ig/ref :app.http/client)
:executor (ig/ref [::worker :app.worker/executor])}
:app.http/server
{:port (cf/get :http-server-port)
:host (cf/get :http-server-host)
:router (ig/ref :app.http/router)
:metrics (ig/ref :app.metrics/metrics)
:executor (ig/ref [::default :app.worker/executor])
:session (ig/ref :app.http/session)
:max-threads (cf/get :http-server-max-threads)
:min-threads (cf/get :http-server-min-threads)}
:app.http/router
{:assets (ig/ref :app.http.assets/handlers)
:feedback (ig/ref :app.http.feedback/handler)
:session (ig/ref :app.http.session/session)
:sns-webhook (ig/ref :app.http.awsns/handler)
:oauth (ig/ref :app.http.oauth/handler)
:debug (ig/ref :app.http.debug/handlers)
:ws (ig/ref :app.http.websocket/handler)
:metrics (ig/ref :app.metrics/metrics)
:public-uri (cf/get :public-uri)
:storage (ig/ref :app.storage/storage)
:tokens (ig/ref :app.tokens/tokens)
:audit-http-handler (ig/ref :app.loggers.audit/http-handler)
:rpc (ig/ref :app.rpc/rpc)}
{:assets (ig/ref :app.http.assets/handlers)
:feedback (ig/ref :app.http.feedback/handler)
:awsns-handler (ig/ref :app.http.awsns/handler)
:oauth (ig/ref :app.http.oauth/handler)
:debug (ig/ref :app.http.debug/handlers)
:ws (ig/ref :app.http.websocket/handler)
:metrics (ig/ref :app.metrics/metrics)
:public-uri (cf/get :public-uri)
:storage (ig/ref :app.storage/storage)
:tokens (ig/ref :app.tokens/tokens)
:audit-handler (ig/ref :app.loggers.audit/http-handler)
:rpc (ig/ref :app.rpc/rpc)
:executor (ig/ref [::default :app.worker/executor])}
:app.http.debug/handlers
{:pool (ig/ref :app.db/pool)
:executor (ig/ref [::default :app.worker/executor])}
:executor (ig/ref [::worker :app.worker/executor])}
:app.http.websocket/handler
{:pool (ig/ref :app.db/pool)
@ -139,24 +152,26 @@
:executor (ig/ref [::default :app.worker/executor])}
:app.http.oauth/handler
{:rpc (ig/ref :app.rpc/rpc)
:session (ig/ref :app.http.session/session)
:pool (ig/ref :app.db/pool)
:tokens (ig/ref :app.tokens/tokens)
:audit (ig/ref :app.loggers.audit/collector)
:executor (ig/ref [::default :app.worker/executor])
:public-uri (cf/get :public-uri)}
{:rpc (ig/ref :app.rpc/rpc)
:session (ig/ref :app.http/session)
:pool (ig/ref :app.db/pool)
:tokens (ig/ref :app.tokens/tokens)
:audit (ig/ref :app.loggers.audit/collector)
:executor (ig/ref [::default :app.worker/executor])
:http-client (ig/ref :app.http/client)
:public-uri (cf/get :public-uri)}
:app.rpc/rpc
{:pool (ig/ref :app.db/pool)
:session (ig/ref :app.http.session/session)
:tokens (ig/ref :app.tokens/tokens)
:metrics (ig/ref :app.metrics/metrics)
:storage (ig/ref :app.storage/storage)
:msgbus (ig/ref :app.msgbus/msgbus)
:public-uri (cf/get :public-uri)
:audit (ig/ref :app.loggers.audit/collector)
:executors (ig/ref :app.worker/executors)}
{:pool (ig/ref :app.db/pool)
:session (ig/ref :app.http/session)
:tokens (ig/ref :app.tokens/tokens)
:metrics (ig/ref :app.metrics/metrics)
:storage (ig/ref :app.storage/storage)
:msgbus (ig/ref :app.msgbus/msgbus)
:public-uri (cf/get :public-uri)
:audit (ig/ref :app.loggers.audit/collector)
:http-client (ig/ref :app.http/client)
:executors (ig/ref :app.worker/executors)}
:app.worker/worker
{:executor (ig/ref [::worker :app.worker/executor])
@ -164,11 +179,12 @@
:metrics (ig/ref :app.metrics/metrics)
:pool (ig/ref :app.db/pool)}
:app.worker/scheduler
:app.worker/cron
{:executor (ig/ref [::worker :app.worker/executor])
:scheduler (ig/ref :app.worker/scheduler)
:tasks (ig/ref :app.worker/registry)
:pool (ig/ref :app.db/pool)
:schedule
:entries
[{:cron #app/cron "0 0 0 * * ?" ;; daily
:task :file-media-gc}
@ -261,7 +277,8 @@
{:pool (ig/ref :app.db/pool)
:version (:full cf/version)
:uri (cf/get :telemetry-uri)
:sprops (ig/ref :app.setup/props)}
:sprops (ig/ref :app.setup/props)
:http-client (ig/ref :app.http/client)}
:app.srepl/server
{:port (cf/get :srepl-port)
@ -279,16 +296,17 @@
:app.loggers.audit/http-handler
{:pool (ig/ref :app.db/pool)
:executor (ig/ref [::worker :app.worker/executor])}
:executor (ig/ref [::default :app.worker/executor])}
:app.loggers.audit/collector
{:pool (ig/ref :app.db/pool)
:executor (ig/ref [::worker :app.worker/executor])}
:app.loggers.audit/archive-task
{:uri (cf/get :audit-log-archive-uri)
:tokens (ig/ref :app.tokens/tokens)
:pool (ig/ref :app.db/pool)}
{:uri (cf/get :audit-log-archive-uri)
:tokens (ig/ref :app.tokens/tokens)
:pool (ig/ref :app.db/pool)
:http-client (ig/ref :app.http/client)}
:app.loggers.audit/gc-task
{:max-age (cf/get :audit-log-gc-max-age cf/deletion-delay)
@ -300,10 +318,9 @@
:executor (ig/ref [::worker :app.worker/executor])}
:app.loggers.mattermost/reporter
{:uri (cf/get :error-report-webhook)
:receiver (ig/ref :app.loggers.zmq/receiver)
:pool (ig/ref :app.db/pool)
:executor (ig/ref [::worker :app.worker/executor])}
{:uri (cf/get :error-report-webhook)
:receiver (ig/ref :app.loggers.zmq/receiver)
:http-client (ig/ref :app.http/client)}
:app.loggers.database/reporter
{:receiver (ig/ref :app.loggers.zmq/receiver)
@ -312,6 +329,8 @@
:app.storage/storage
{:pool (ig/ref :app.db/pool)
:executor (ig/ref [::default :app.worker/executor])
:backends
{:assets-s3 (ig/ref [::assets :app.storage.s3/backend])
:assets-db (ig/ref [::assets :app.storage.db/backend])
@ -328,12 +347,14 @@
{:region (cf/get :storage-fdata-s3-region)
:bucket (cf/get :storage-fdata-s3-bucket)
:endpoint (cf/get :storage-fdata-s3-endpoint)
:prefix (cf/get :storage-fdata-s3-prefix)}
:prefix (cf/get :storage-fdata-s3-prefix)
:executor (ig/ref [::default :app.worker/executor])}
[::assets :app.storage.s3/backend]
{:region (cf/get :storage-assets-s3-region)
:endpoint (cf/get :storage-assets-s3-endpoint)
:bucket (cf/get :storage-assets-s3-bucket)}
:bucket (cf/get :storage-assets-s3-bucket)
:executor (ig/ref [::default :app.worker/executor])}
[::assets :app.storage.fs/backend]
{:directory (cf/get :storage-assets-fs-directory)}

View file

@ -42,13 +42,14 @@
:internal.http.upload/tempfile
:internal.http.upload/content-type]))
(defn validate-media-type
([mtype] (validate-media-type mtype cm/valid-image-types))
(defn validate-media-type!
([mtype] (validate-media-type! mtype cm/valid-image-types))
([mtype allowed]
(when-not (contains? allowed mtype)
(ex/raise :type :validation
:code :media-type-not-allowed
:hint "Seems like you are uploading an invalid media object"))))
:hint "Seems like you are uploading an invalid media object"))
mtype))
(defmulti process :cmd)
(defmulti process-error class)

View file

@ -211,6 +211,9 @@
{:name "0067-add-team-invitation-table"
:fn (mg/resource "app/migrations/sql/0067-add-team-invitation-table.sql")}
{:name "0068-mod-storage-object-table"
:fn (mg/resource "app/migrations/sql/0068-mod-storage-object-table.sql")}
])

View file

@ -9,7 +9,6 @@ CREATE TABLE team_invitation (
PRIMARY KEY(team_id, email_to)
);
ALTER TABLE team_invitation
ALTER COLUMN email_to SET STORAGE external,
ALTER COLUMN role SET STORAGE external;
ALTER COLUMN role SET STORAGE external;

View file

@ -0,0 +1,3 @@
CREATE INDEX storage_object__hash_backend_bucket__idx
ON storage_object ((metadata->>'~:hash'), (metadata->>'~:bucket'), backend)
WHERE deleted_at IS NULL;

View file

@ -110,11 +110,11 @@
"Wraps service method into async flow, with the ability to dispatching
it to a preconfigured executor service."
[{:keys [executors] :as cfg} f mdata]
(let [dname (::async/dispatch mdata :none)]
(let [dname (::async/dispatch mdata :default)]
(if (= :none dname)
(with-meta
(fn [cfg params]
(p/do! (f cfg params)))
(p/do (f cfg params)))
mdata)
(let [executor (get executors dname)]

View file

@ -17,12 +17,13 @@
[app.rpc.permissions :as perms]
[app.rpc.queries.files :as files]
[app.rpc.queries.projects :as proj]
[app.rpc.rlimit :as rlimit]
[app.storage.impl :as simpl]
[app.util.async :as async]
[app.util.blob :as blob]
[app.util.services :as sv]
[app.util.time :as dt]
[clojure.spec.alpha :as s]))
[clojure.spec.alpha :as s]
[promesa.core :as p]))
(declare create-file)
@ -126,7 +127,6 @@
[{:keys [pool] :as cfg} {:keys [id profile-id] :as params}]
(db/with-atomic [conn pool]
(files/check-edition-permissions! conn profile-id id)
(mark-file-deleted conn params)))
(defn mark-file-deleted
@ -273,7 +273,7 @@
(contains? o :changes-with-metadata)))))
(sv/defmethod ::update-file
{::async/dispatch :blocking}
{::rlimit/permits 20}
[{:keys [pool] :as cfg} {:keys [id profile-id] :as params}]
(db/with-atomic [conn pool]
(db/xact-lock! conn id)
@ -295,8 +295,9 @@
(defn- delete-from-storage
[{:keys [storage] :as cfg} file]
(when-let [backend (simpl/resolve-backend storage (:data-backend file))]
(simpl/del-object backend file)))
(p/do
(when-let [backend (simpl/resolve-backend storage (:data-backend file))]
(simpl/del-object backend file))))
(defn- update-file
[{:keys [conn metrics] :as cfg} {:keys [file changes changes-with-metadata session-id profile-id] :as params}]
@ -353,7 +354,7 @@
;; We need to delete the data from external storage backend
(when-not (nil? (:data-backend file))
(delete-from-storage cfg file))
@(delete-from-storage cfg file))
(db/update! conn :project
{:modified-at ts}

View file

@ -6,6 +6,7 @@
(ns app.rpc.mutations.fonts
(:require
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.spec :as us]
[app.common.uuid :as uuid]
@ -15,7 +16,9 @@
[app.storage :as sto]
[app.util.services :as sv]
[app.util.time :as dt]
[clojure.spec.alpha :as s]))
[clojure.spec.alpha :as s]
[promesa.core :as p]
[promesa.exec :as px]))
(declare create-font-variant)
@ -38,56 +41,74 @@
(sv/defmethod ::create-font-variant
[{:keys [pool] :as cfg} {:keys [team-id profile-id] :as params}]
(teams/check-edition-permissions! pool profile-id team-id)
(create-font-variant cfg params))
(let [cfg (update cfg :storage media/configure-assets-storage)]
(teams/check-edition-permissions! pool profile-id team-id)
(create-font-variant cfg params)))
(defn create-font-variant
[{:keys [storage pool] :as cfg} {:keys [data] :as params}]
(let [data (media/run {:cmd :generate-fonts :input data})
storage (media/configure-assets-storage storage)]
[{:keys [storage pool executors] :as cfg} {:keys [data] :as params}]
(letfn [(generate-fonts [data]
(px/with-dispatch (:blocking executors)
(media/run {:cmd :generate-fonts :input data})))
(when (and (not (contains? data "font/otf"))
(not (contains? data "font/ttf"))
(not (contains? data "font/woff"))
(not (contains? data "font/woff2")))
(ex/raise :type :validation
:code :invalid-font-upload))
;; Function responsible of calculating cryptographyc hash of
;; the provided data. Even though it uses the hight
;; performance BLAKE2b algorithm, we prefer to schedule it
;; to be executed on the blocking executor.
(calculate-hash [data]
(px/with-dispatch (:blocking executors)
(sto/calculate-hash data)))
(let [otf (when-let [fdata (get data "font/otf")]
(sto/put-object storage {:content (sto/content fdata)
:content-type "font/otf"
:reference :team-font-variant
:touched-at (dt/now)}))
(validate-data [data]
(when (and (not (contains? data "font/otf"))
(not (contains? data "font/ttf"))
(not (contains? data "font/woff"))
(not (contains? data "font/woff2")))
(ex/raise :type :validation
:code :invalid-font-upload))
data)
ttf (when-let [fdata (get data "font/ttf")]
(sto/put-object storage {:content (sto/content fdata)
:content-type "font/ttf"
:touched-at (dt/now)
:reference :team-font-variant}))
(persist-font-object [data mtype]
(when-let [fdata (get data mtype)]
(p/let [hash (calculate-hash fdata)
content (-> (sto/content fdata)
(sto/wrap-with-hash hash))]
(sto/put-object! storage {::sto/content content
::sto/touched-at (dt/now)
::sto/deduplicate? true
:content-type mtype
:bucket "team-font-variant"}))))
woff1 (when-let [fdata (get data "font/woff")]
(sto/put-object storage {:content (sto/content fdata)
:content-type "font/woff"
:touched-at (dt/now)
:reference :team-font-variant}))
(persist-fonts [data]
(p/let [otf (persist-font-object data "font/otf")
ttf (persist-font-object data "font/ttf")
woff1 (persist-font-object data "font/woff")
woff2 (persist-font-object data "font/woff2")]
woff2 (when-let [fdata (get data "font/woff2")]
(sto/put-object storage {:content (sto/content fdata)
:content-type "font/woff2"
:touched-at (dt/now)
:reference :team-font-variant}))]
(d/without-nils
{:otf otf
:ttf ttf
:woff1 woff1
:woff2 woff2})))
(db/insert! pool :team-font-variant
{:id (uuid/next)
:team-id (:team-id params)
:font-id (:font-id params)
:font-family (:font-family params)
:font-weight (:font-weight params)
:font-style (:font-style params)
:woff1-file-id (:id woff1)
:woff2-file-id (:id woff2)
:otf-file-id (:id otf)
:ttf-file-id (:id ttf)}))))
(insert-into-db [{:keys [woff1 woff2 otf ttf]}]
(db/insert! pool :team-font-variant
{:id (uuid/next)
:team-id (:team-id params)
:font-id (:font-id params)
:font-family (:font-family params)
:font-weight (:font-weight params)
:font-style (:font-style params)
:woff1-file-id (:id woff1)
:woff2-file-id (:id woff2)
:otf-file-id (:id otf)
:ttf-file-id (:id ttf)}))
]
(-> (generate-fonts data)
(p/then validate-data)
(p/then persist-fonts (:default executors))
(p/then insert-into-db (:default executors)))))
;; --- UPDATE FONT FAMILY

View file

@ -6,6 +6,7 @@
(ns app.rpc.mutations.media
(:require
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.media :as cm]
[app.common.spec :as us]
@ -16,12 +17,12 @@
[app.rpc.queries.teams :as teams]
[app.rpc.rlimit :as rlimit]
[app.storage :as sto]
[app.util.async :as async]
[app.util.http :as http]
[app.util.services :as sv]
[app.util.time :as dt]
[clojure.spec.alpha :as s]
[datoteka.core :as fs]))
[datoteka.core :as fs]
[promesa.core :as p]
[promesa.exec :as px]))
(def thumbnail-options
{:width 100
@ -50,10 +51,10 @@
:opt-un [::id]))
(sv/defmethod ::upload-file-media-object
{::rlimit/permits (cf/get :rlimit-image)
::async/dispatch :default}
{::rlimit/permits (cf/get :rlimit-image)}
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
(let [file (select-file pool file-id)]
(let [file (select-file pool file-id)
cfg (update cfg :storage media/configure-assets-storage)]
(teams/check-edition-permissions! pool profile-id (:team-id file))
(create-file-media-object cfg params)))
@ -68,34 +69,6 @@
[info]
(= (:mtype info) "image/svg+xml"))
(defn- fetch-url
[url]
(try
(http/get! url {:as :byte-array})
(catch Exception e
(ex/raise :type :validation
:code :unable-to-access-to-url
:cause e))))
;; TODO: we need to check the size before fetch resource, if not we
;; can start downloading very big object and cause OOM errors.
(defn- download-media
[{:keys [storage] :as cfg} url]
(let [result (fetch-url url)
data (:body result)
mtype (get (:headers result) "content-type")
format (cm/mtype->format mtype)]
(when (nil? format)
(ex/raise :type :validation
:code :media-type-not-allowed
:hint "Seems like the url points to an invalid media object."))
(-> (assoc storage :backend :tmp)
(sto/put-object {:content (sto/content data)
:content-type mtype
:reference :file-media-object
:expired-at (dt/in-future {:minutes 30})}))))
;; NOTE: we use the `on conflict do update` instead of `do nothing`
;; because postgresql does not returns anything if no update is
;; performed, the `do update` does the trick.
@ -121,67 +94,137 @@
;; inverse, soft referential integrity).
(defn create-file-media-object
[{:keys [storage pool] :as cfg} {:keys [id file-id is-local name content] :as params}]
(media/validate-media-type (:content-type content))
(let [source-path (fs/path (:tempfile content))
source-mtype (:content-type content)
source-info (media/run {:cmd :info :input {:path source-path :mtype source-mtype}})
storage (media/configure-assets-storage storage)
[{:keys [storage pool executors] :as cfg} {:keys [id file-id is-local name content] :as params}]
(media/validate-media-type! (:content-type content))
thumb (when (and (not (svg-image? source-info))
(big-enough-for-thumbnail? source-info))
(media/run (assoc thumbnail-options
:cmd :generic-thumbnail
:input {:mtype (:mtype source-info)
:path source-path})))
(letfn [;; Function responsible to retrieve the file information, as
;; it is synchronous operation it should be wrapped into
;; with-dispatch macro.
(get-info [path mtype]
(px/with-dispatch (:blocking executors)
(media/run {:cmd :info :input {:path path :mtype mtype}})))
image (if (= (:mtype source-info) "image/svg+xml")
(let [data (slurp source-path)]
(sto/put-object storage
{:content (sto/content data)
:content-type (:mtype source-info)
:reference :file-media-object
:touched-at (dt/now)}))
(sto/put-object storage
{:content (sto/content source-path)
:content-type (:mtype source-info)
:reference :file-media-object
:touched-at (dt/now)}))
;; Function responsible of calculating cryptographyc hash of
;; the provided data. Even though it uses the hight
;; performance BLAKE2b algorithm, we prefer to schedule it
;; to be executed on the blocking executor.
(calculate-hash [data]
(px/with-dispatch (:blocking executors)
(sto/calculate-hash data)))
thumb (when thumb
(sto/put-object storage
{:content (sto/content (:data thumb) (:size thumb))
:content-type (:mtype thumb)
:reference :file-media-object
:touched-at (dt/now)}))]
;; Function responsible of generating thumnail. As it is synchronous
;; opetation, it should be wrapped into with-dispatch macro
(generate-thumbnail [info path]
(px/with-dispatch (:blocking executors)
(media/run (assoc thumbnail-options
:cmd :generic-thumbnail
:input {:mtype (:mtype info) :path path}))))
(db/exec-one! pool [sql:create-file-media-object
(or id (uuid/next))
file-id is-local name
(:id image)
(:id thumb)
(:width source-info)
(:height source-info)
source-mtype])))
(create-thumbnail [info path]
(when (and (not (svg-image? info))
(big-enough-for-thumbnail? info))
(p/let [thumb (generate-thumbnail info path)
hash (calculate-hash (:data thumb))
content (-> (sto/content (:data thumb) (:size thumb))
(sto/wrap-with-hash hash))]
(sto/put-object! storage
{::sto/content content
::sto/deduplicate? true
::sto/touched-at (dt/now)
:content-type (:mtype thumb)
:bucket "file-media-object"}))))
(create-image [info path]
(p/let [data (cond-> path (= (:mtype info) "image/svg+xml") slurp)
hash (calculate-hash data)
content (-> (sto/content data)
(sto/wrap-with-hash hash))]
(sto/put-object! storage
{::sto/content content
::sto/deduplicate? true
::sto/touched-at (dt/now)
:content-type (:mtype info)
:bucket "file-media-object"})))
(insert-into-database [info image thumb]
(px/with-dispatch (:default executors)
(db/exec-one! pool [sql:create-file-media-object
(or id (uuid/next))
file-id is-local name
(:id image)
(:id thumb)
(:width info)
(:height info)
(:mtype info)])))]
(p/let [path (fs/path (:tempfile content))
info (get-info path (:content-type content))
thumb (create-thumbnail info path)
image (create-image info path)]
(insert-into-database info image thumb))))
;; --- Create File Media Object (from URL)
(declare ^:private create-file-media-object-from-url)
(s/def ::create-file-media-object-from-url
(s/keys :req-un [::profile-id ::file-id ::is-local ::url]
:opt-un [::id ::name]))
(sv/defmethod ::create-file-media-object-from-url
{::rlimit/permits (cf/get :rlimit-image)
::async/dispatch :default}
[{:keys [pool storage] :as cfg} {:keys [profile-id file-id url name] :as params}]
(let [file (select-file pool file-id)]
{::rlimit/permits (cf/get :rlimit-image)}
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
(let [file (select-file pool file-id)
cfg (update cfg :storage media/configure-assets-storage)]
(teams/check-edition-permissions! pool profile-id (:team-id file))
(let [mobj (download-media cfg url)
content {:filename "tempfile"
:size (:size mobj)
:tempfile (sto/get-object-path storage mobj)
:content-type (:content-type (meta mobj))}]
(create-file-media-object-from-url cfg params)))
(def max-download-file-size
(* 1024 1024 100)) ; 100MiB
(defn- create-file-media-object-from-url
[{:keys [storage http-client] :as cfg} {:keys [url name] :as params}]
(letfn [(parse-and-validate-size [headers]
(let [size (some-> (get headers "content-length") d/parse-integer)
mtype (get headers "content-type")
format (cm/mtype->format mtype)]
(when-not size
(ex/raise :type :validation
:code :unknown-size
:hint "Seems like the url points to resource with unknown size"))
(when (> size max-download-file-size)
(ex/raise :type :validation
:code :file-too-large
:hint "Seems like the url points to resource with size greater than 100MiB"))
(when (nil? format)
(ex/raise :type :validation
:code :media-type-not-allowed
:hint "Seems like the url points to an invalid media object"))
{:size size
:mtype mtype
:format format}))
(download-media [uri]
(p/let [{:keys [body headers]} (http-client {:method :get :uri uri} {:response-type :input-stream})
{:keys [size mtype]} (parse-and-validate-size headers)]
(-> (assoc storage :backend :tmp)
(sto/put-object! {::sto/content (sto/content body size)
::sto/expired-at (dt/in-future {:minutes 30})
:content-type mtype
:bucket "file-media-object"})
(p/then (fn [sobj]
(p/let [path (sto/get-object-path storage sobj)]
{:filename "tempfile"
:size (:size sobj)
:tempfile path
:content-type (:content-type (meta sobj))}))))))]
(p/let [content (download-media url)]
(->> (merge params {:content content :name (or name (:filename content))})
(create-file-media-object cfg)))))

View file

@ -24,7 +24,8 @@
[app.util.time :as dt]
[buddy.hashers :as hashers]
[clojure.spec.alpha :as s]
[cuerdas.core :as str]))
[cuerdas.core :as str]
[promesa.exec :as px]))
;; --- Helpers & Specs
@ -345,6 +346,7 @@
(profile/decode-profile-row)
(profile/strip-private-attrs))))
(s/def ::update-profile
(s/keys :req-un [::id ::fullname]
:opt-un [::lang ::theme]))
@ -410,32 +412,33 @@
(s/def ::update-profile-photo
(s/keys :req-un [::profile-id ::file]))
;; TODO: properly handle resource usage, transactions and storage
(sv/defmethod ::update-profile-photo
{::rlimit/permits (cf/get :rlimit-image)}
[{:keys [pool storage] :as cfg} {:keys [profile-id file] :as params}]
(db/with-atomic [conn pool]
(media/validate-media-type (:content-type file) #{"image/jpeg" "image/png" "image/webp"})
(media/run {:cmd :info :input {:path (:tempfile file)
:mtype (:content-type file)}})
[{:keys [pool storage executors] :as cfg} {:keys [profile-id file] :as params}]
;; Validate incoming mime type
(media/validate-media-type! (:content-type file) #{"image/jpeg" "image/png" "image/webp"})
;; Perform file validation
@(px/with-dispatch (:blocking executors)
(media/run {:cmd :info :input {:path (:tempfile file) :mtype (:content-type file)}}))
(db/with-atomic [conn pool]
(let [profile (db/get-by-id conn :profile profile-id)
storage (media/configure-assets-storage storage conn)
cfg (assoc cfg :storage storage)
photo (teams/upload-photo cfg params)]
cfg (update cfg :storage media/configure-assets-storage conn)
photo @(teams/upload-photo cfg params)]
;; Schedule deletion of old photo
(when-let [id (:photo-id profile)]
(sto/del-object storage id))
@(sto/touch-object! storage id))
;; Save new photo
(update-profile-photo conn profile-id photo))))
(defn- update-profile-photo
[conn profile-id sobj]
(db/update! conn :profile
{:photo-id (:id sobj)}
{:id profile-id})
nil)
(db/update! conn :profile
{:photo-id (:id photo)}
{:id profile-id})
nil)))
;; --- MUTATION: Request Email Change

View file

@ -24,7 +24,9 @@
[app.util.time :as dt]
[clojure.spec.alpha :as s]
[cuerdas.core :as str]
[datoteka.core :as fs]))
[datoteka.core :as fs]
[promesa.core :as p]
[promesa.exec :as px]))
;; --- Helpers & Specs
@ -276,7 +278,6 @@
nil)))
;; --- Mutation: Update Team Photo
(declare upload-photo)
@ -289,21 +290,25 @@
(sv/defmethod ::update-team-photo
{::rlimit/permits (cf/get :rlimit-image)}
[{:keys [pool storage] :as cfg} {:keys [profile-id file team-id] :as params}]
[{:keys [pool storage executors] :as cfg} {:keys [profile-id file team-id] :as params}]
;; Validate incoming mime type
(media/validate-media-type! (:content-type file) #{"image/jpeg" "image/png" "image/webp"})
;; Perform file validation
@(px/with-dispatch (:blocking executors)
(media/run {:cmd :info :input {:path (:tempfile file) :mtype (:content-type file)}}))
(db/with-atomic [conn pool]
(teams/check-edition-permissions! conn profile-id team-id)
(media/validate-media-type (:content-type file) #{"image/jpeg" "image/png" "image/webp"})
(media/run {:cmd :info :input {:path (:tempfile file)
:mtype (:content-type file)}})
(let [team (teams/retrieve-team conn profile-id team-id)
storage (media/configure-assets-storage storage conn)
cfg (assoc cfg :storage storage)
photo (upload-photo cfg params)]
cfg (update cfg :storage media/configure-assets-storage conn)
photo @(upload-photo cfg params)]
;; Schedule deletion of old photo
;; Mark object as touched for make it ellegible for tentative
;; garbage collection.
(when-let [id (:photo-id team)]
(sto/del-object storage id))
@(sto/touch-object! storage id))
;; Save new photo
(db/update! conn :team
@ -313,17 +318,33 @@
(assoc team :photo-id (:id photo)))))
(defn upload-photo
[{:keys [storage] :as cfg} {:keys [file]}]
(let [thumb (media/run {:cmd :profile-thumbnail
[{:keys [storage executors] :as cfg} {:keys [file]}]
(letfn [(generate-thumbnail [path mtype]
(px/with-dispatch (:blocking executors)
(media/run {:cmd :profile-thumbnail
:format :jpeg
:quality 85
:width 256
:height 256
:input {:path (fs/path (:tempfile file))
:mtype (:content-type file)}})]
(sto/put-object storage
{:content (sto/content (:data thumb) (:size thumb))
:content-type (:mtype thumb)})))
:input {:path path :mtype mtype}})))
;; Function responsible of calculating cryptographyc hash of
;; the provided data. Even though it uses the hight
;; performance BLAKE2b algorithm, we prefer to schedule it
;; to be executed on the blocking executor.
(calculate-hash [data]
(px/with-dispatch (:blocking executors)
(sto/calculate-hash data)))]
(p/let [thumb (generate-thumbnail (fs/path (:tempfile file))
(:content-type file))
hash (calculate-hash (:data thumb))
content (-> (sto/content (:data thumb) (:size thumb))
(sto/wrap-with-hash hash))]
(sto/put-object! storage {::sto/content content
::sto/deduplicate? true
:bucket "profile"
:content-type (:mtype thumb)}))))
;; --- Mutation: Invite Member

View file

@ -19,7 +19,8 @@
[app.storage.impl :as simpl]
[app.util.blob :as blob]
[app.util.services :as sv]
[clojure.spec.alpha :as s]))
[clojure.spec.alpha :as s]
[promesa.core :as p]))
(declare decode-row)
(declare decode-row-xf)
@ -35,7 +36,6 @@
(s/def ::team-id ::us/uuid)
(s/def ::search-term ::us/string)
;; --- Query: File Permissions
(def ^:private sql:file-permissions
@ -188,21 +188,23 @@
(defn- retrieve-data*
[{:keys [storage] :as cfg} file]
(when-let [backend (simpl/resolve-backend storage (:data-backend file))]
(simpl/get-object-bytes backend file)))
(p/do
(when-let [backend (simpl/resolve-backend storage (:data-backend file))]
(simpl/get-object-bytes backend file))))
(defn retrieve-data
[cfg file]
(if (bytes? (:data file))
file
(assoc file :data (retrieve-data* cfg file))))
(p/->> (retrieve-data* cfg file)
(assoc file :data))))
(defn retrieve-file
[{:keys [conn] :as cfg} id]
(->> (db/get-by-id conn :file id)
(retrieve-data cfg)
(decode-row)
(pmg/migrate-file)))
[{:keys [pool] :as cfg} id]
(p/->> (db/get-by-id pool :file id)
(retrieve-data cfg)
(decode-row)
(pmg/migrate-file)))
(s/def ::file
(s/keys :req-un [::profile-id ::id]))
@ -210,13 +212,10 @@
(sv/defmethod ::file
"Retrieve a file by its ID. Only authenticated users."
[{:keys [pool] :as cfg} {:keys [profile-id id] :as params}]
(db/with-atomic [conn pool]
(let [cfg (assoc cfg :conn conn)
perms (get-permissions conn profile-id id)]
(check-read-permissions! perms)
(some-> (retrieve-file cfg id)
(assoc :permissions perms)))))
(let [perms (get-permissions pool profile-id id)]
(check-read-permissions! perms)
(p/-> (retrieve-file cfg id)
(assoc :permissions perms))))
(declare trim-file-data)
@ -232,13 +231,11 @@
need force download all shapes when only a small subset is
necesseary."
[{:keys [pool] :as cfg} {:keys [profile-id id] :as params}]
(db/with-atomic [conn pool]
(let [cfg (assoc cfg :conn conn)
perms (get-permissions conn profile-id id)]
(check-read-permissions! perms)
(some-> (retrieve-file cfg id)
(trim-file-data params)
(assoc :permissions perms)))))
(let [perms (get-permissions pool profile-id id)]
(check-read-permissions! perms)
(p/-> (retrieve-file cfg id)
(trim-file-data params)
(assoc :permissions perms))))
(defn- trim-file-data
[file {:keys [page-id object-id]}]
@ -263,15 +260,12 @@
"Retrieves the first page of the file. Used mainly for render
thumbnails on dashboard."
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as props}]
(db/with-atomic [conn pool]
(check-read-permissions! conn profile-id file-id)
(let [cfg (assoc cfg :conn conn)
file (retrieve-file cfg file-id)
(check-read-permissions! pool profile-id file-id)
(p/let [file (retrieve-file cfg file-id)
page-id (get-in file [:data :pages 0])]
(cond-> (get-in file [:data :pages-index page-id])
(true? (:strip-frames-with-thumbnails props))
(strip-frames-with-thumbnails)))))
(cond-> (get-in file [:data :pages-index page-id])
(true? (:strip-frames-with-thumbnails props))
(strip-frames-with-thumbnails))))
(defn strip-frames-with-thumbnails
"Remove unnecesary shapes from frames that have thumbnail."
@ -354,22 +348,20 @@
WHERE l.deleted_at IS NULL OR l.deleted_at > now();")
(defn retrieve-file-libraries
[{:keys [conn] :as cfg} is-indirect file-id]
[{:keys [pool] :as cfg} is-indirect file-id]
(let [xform (comp
(map #(assoc % :is-indirect is-indirect))
(map #(retrieve-data cfg %))
(map decode-row))]
(into #{} xform (db/exec! conn [sql:file-libraries file-id]))))
(into #{} xform (db/exec! pool [sql:file-libraries file-id]))))
(s/def ::file-libraries
(s/keys :req-un [::profile-id ::file-id]))
(sv/defmethod ::file-libraries
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
(db/with-atomic [conn pool]
(let [cfg (assoc cfg :conn conn)]
(check-read-permissions! conn profile-id file-id)
(retrieve-file-libraries cfg false file-id))))
(check-read-permissions! pool profile-id file-id)
(retrieve-file-libraries cfg false file-id))
;; --- QUERY: team-recent-files
@ -399,9 +391,8 @@
(sv/defmethod ::team-recent-files
[{:keys [pool] :as cfg} {:keys [profile-id team-id]}]
(with-open [conn (db/open pool)]
(teams/check-read-permissions! conn profile-id team-id)
(db/exec! conn [sql:team-recent-files team-id])))
(teams/check-read-permissions! pool profile-id team-id)
(db/exec! pool [sql:team-recent-files team-id]))
;; --- QUERY: get the thumbnail for an frame
@ -417,10 +408,8 @@
(sv/defmethod ::file-frame-thumbnail
[{:keys [pool]} {:keys [profile-id file-id frame-id]}]
(with-open [conn (db/open pool)]
(check-read-permissions! conn profile-id file-id)
(db/exec-one! conn [sql:file-frame-thumbnail file-id frame-id])))
(check-read-permissions! pool profile-id file-id)
(db/exec-one! pool [sql:file-frame-thumbnail file-id frame-id]))
;; --- Helpers

View file

@ -13,27 +13,28 @@
[app.rpc.queries.share-link :as slnk]
[app.rpc.queries.teams :as teams]
[app.util.services :as sv]
[clojure.spec.alpha :as s]))
[clojure.spec.alpha :as s]
[promesa.core :as p]))
;; --- Query: View Only Bundle
(defn- retrieve-project
[conn id]
(db/get-by-id conn :project id {:columns [:id :name :team-id]}))
[pool id]
(db/get-by-id pool :project id {:columns [:id :name :team-id]}))
(defn- retrieve-bundle
[{:keys [conn] :as cfg} file-id]
(let [file (files/retrieve-file cfg file-id)
project (retrieve-project conn (:project-id file))
libs (files/retrieve-file-libraries cfg false file-id)
users (teams/retrieve-users conn (:team-id project))
[{:keys [pool] :as cfg} file-id]
(p/let [file (files/retrieve-file cfg file-id)
project (retrieve-project pool (:project-id file))
libs (files/retrieve-file-libraries cfg false file-id)
users (teams/retrieve-users pool (:team-id project))
links (->> (db/query conn :share-link {:file-id file-id})
(mapv slnk/decode-share-link-row))
links (->> (db/query pool :share-link {:file-id file-id})
(mapv slnk/decode-share-link-row))
fonts (db/query conn :team-font-variant
{:team-id (:team-id project)
:deleted-at nil})]
fonts (db/query pool :team-font-variant
{:team-id (:team-id project)
:deleted-at nil})]
{:file file
:users users
:fonts fonts
@ -50,34 +51,31 @@
(sv/defmethod ::view-only-bundle {:auth false}
[{:keys [pool] :as cfg} {:keys [profile-id file-id share-id] :as params}]
(db/with-atomic [conn pool]
(let [cfg (assoc cfg :conn conn)
slink (slnk/retrieve-share-link conn file-id share-id)
perms (files/get-permissions conn profile-id file-id share-id)
(p/let [slink (slnk/retrieve-share-link pool file-id share-id)
perms (files/get-permissions pool profile-id file-id share-id)
bundle (p/-> (retrieve-bundle cfg file-id)
(assoc :permissions perms))]
bundle (some-> (retrieve-bundle cfg file-id)
(assoc :permissions perms))]
;; When we have neither profile nor share, we just return a not
;; found response to the user.
(when (and (not profile-id)
(not slink))
(ex/raise :type :not-found
:code :object-not-found))
;; When we have neither profile nor share, we just return a not
;; found response to the user.
(when (and (not profile-id)
(not slink))
(ex/raise :type :not-found
:code :object-not-found))
;; When we have only profile, we need to check read permissions
;; on file.
(when (and profile-id (not slink))
(files/check-read-permissions! pool profile-id file-id))
;; When we have only profile, we need to check read permissions
;; on file.
(when (and profile-id (not slink))
(files/check-read-permissions! conn profile-id file-id))
(cond-> bundle
(some? slink)
(assoc :share slink)
(cond-> bundle
(some? slink)
(assoc :share slink)
(and (some? slink)
(not (contains? (:flags slink) "view-all-pages")))
(update-in [:file :data] (fn [data]
(let [allowed-pages (:pages slink)]
(-> data
(update :pages (fn [pages] (filterv #(contains? allowed-pages %) pages)))
(update :pages-index (fn [index] (select-keys index allowed-pages)))))))))))
(and (some? slink)
(not (contains? (:flags slink) "view-all-pages")))
(update-in [:file :data] (fn [data]
(let [allowed-pages (:pages slink)]
(-> data
(update :pages (fn [pages] (filterv #(contains? allowed-pages %) pages)))
(update :pages-index (fn [index] (select-keys index allowed-pages))))))))))

View file

@ -52,7 +52,7 @@
))))
(defn wrap-rlimit
[{:keys [metrics] :as cfg} f mdata]
[{:keys [metrics executors] :as cfg} f mdata]
(if-let [permits (::permits mdata)]
(let [sem (semaphore {:permits permits
:metrics metrics
@ -60,7 +60,7 @@
(l/debug :hint "wrapping rlimit" :handler (::sv/name mdata) :permits permits)
(fn [cfg params]
(-> (acquire! sem)
(p/then (fn [_] (f cfg params)))
(p/then (fn [_] (f cfg params)) (:default executors))
(p/finally (fn [_ _] (release! sem))))))
f))

View file

@ -19,9 +19,12 @@
[app.storage.impl :as impl]
[app.storage.s3 :as ss3]
[app.util.time :as dt]
[app.worker :as wrk]
[clojure.spec.alpha :as s]
[datoteka.core :as fs]
[integrant.core :as ig]))
[integrant.core :as ig]
[promesa.core :as p]
[promesa.exec :as px]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Storage Module State
@ -39,7 +42,7 @@
:db ::sdb/backend))))
(defmethod ig/pre-init-spec ::storage [_]
(s/keys :req-un [::db/pool ::backends]))
(s/keys :req-un [::db/pool ::wrk/executor ::backends]))
(defmethod ig/prep-key ::storage
[_ {:keys [backends] :as cfg}]
@ -67,48 +70,58 @@
(s/def ::storage-object storage-object?)
(s/def ::storage-content impl/content?)
(defn get-metadata
[params]
(into {}
(remove (fn [[k _]] (qualified-keyword? k)))
params))
(defn- clone-database-object
;; If we in this condition branch, this means we come from the
;; clone-object, so we just need to clone it with a new backend.
[{:keys [conn backend]} object]
(let [id (uuid/random)
mdata (meta object)
result (db/insert! conn :storage-object
{:id id
:size (:size object)
:backend (name backend)
:metadata (db/tjson mdata)
:deleted-at (:expired-at object)
:touched-at (:touched-at object)})]
(assoc object
:id (:id result)
:backend backend
:created-at (:created-at result)
:touched-at (:touched-at result))))
(defn- get-database-object-by-hash
[conn backend bucket hash]
(let [sql (str "select * from storage_object "
" where (metadata->>'~:hash') = ? "
" and (metadata->>'~:bucket') = ? "
" and backend = ?"
" and deleted_at is null"
" limit 1")]
(db/exec-one! conn [sql hash bucket (name backend)])))
(defn- create-database-object
[{:keys [conn backend]} {:keys [content] :as object}]
[{:keys [conn backend executor]} {:keys [::content ::expired-at ::touched-at] :as params}]
(us/assert ::storage-content content)
(let [id (uuid/random)
mdata (dissoc object :content :expired-at :touched-at)
(px/with-dispatch executor
(let [id (uuid/random)
result (db/insert! conn :storage-object
{:id id
:size (count content)
:backend (name backend)
:metadata (db/tjson mdata)
:deleted-at (:expired-at object)
:touched-at (:touched-at object)})]
mdata (cond-> (get-metadata params)
(satisfies? impl/IContentHash content)
(assoc :hash (impl/get-hash content)))
(StorageObject. (:id result)
(:size result)
(:created-at result)
(:deleted-at result)
(:touched-at result)
backend
mdata
nil)))
;; NOTE: for now we don't reuse the deleted objects, but in
;; futute we can consider reusing deleted objects if we
;; found a duplicated one and is marked for deletion but
;; still not deleted.
result (when (and (::deduplicate? params)
(:hash mdata)
(:bucket mdata))
(get-database-object-by-hash conn backend (:bucket mdata) (:hash mdata)))
result (or result
(db/insert! conn :storage-object
{:id id
:size (count content)
:backend (name backend)
:metadata (db/tjson mdata)
:deleted-at expired-at
:touched-at touched-at}))]
(StorageObject. (:id result)
(:size result)
(:created-at result)
(:deleted-at result)
(:touched-at result)
backend
mdata
nil))))
(def ^:private sql:retrieve-storage-object
"select * from storage_object where id = ? and (deleted_at is null or deleted_at > now())")
@ -129,14 +142,6 @@
(when-let [res (db/exec-one! conn [sql:retrieve-storage-object id])]
(row->storage-object res)))
(def sql:delete-storage-object
"update storage_object set deleted_at=now() where id=?")
(defn- delete-database-object
[{:keys [conn] :as storage} id]
(let [result (db/exec-one! conn [sql:delete-storage-object id])]
(pos? (:next.jdbc/update-count result))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; API
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@ -149,24 +154,24 @@
[url]
(fs/path (java.net.URI. (str url))))
(defn content
([data] (impl/content data nil))
([data size] (impl/content data size)))
(dm/export impl/content)
(dm/export impl/wrap-with-hash)
(defn get-object
[{:keys [conn pool] :as storage} id]
(us/assert ::storage storage)
(-> (assoc storage :conn (or conn pool))
(retrieve-database-object id)))
(p/do
(-> (assoc storage :conn (or conn pool))
(retrieve-database-object id))))
(defn put-object
(defn put-object!
"Creates a new object with the provided content."
[{:keys [pool conn backend] :as storage} {:keys [content] :as params}]
[{:keys [pool conn backend] :as storage} {:keys [::content] :as params}]
(us/assert ::storage storage)
(us/assert ::storage-content content)
(us/assert ::us/keyword backend)
(let [storage (assoc storage :conn (or conn pool))
object (create-database-object storage params)]
(p/let [storage (assoc storage :conn (or conn pool))
object (create-database-object storage params)]
;; Store the data finally on the underlying storage subsystem.
(-> (impl/resolve-backend storage backend)
@ -174,82 +179,78 @@
object))
(defn clone-object
"Creates a clone of the provided object using backend based efficient
method. Always clones objects to the configured default."
[{:keys [pool conn backend] :as storage} object]
(us/assert ::storage storage)
(us/assert ::storage-object object)
(us/assert ::us/keyword backend)
(let [storage (assoc storage :conn (or conn pool))
object* (clone-database-object storage object)]
(if (= (:backend object) (:backend storage))
;; if the source and destination backends are the same, we
;; proceed to use the fast path with specific copy
;; implementation on backend.
(-> (impl/resolve-backend storage (:backend storage))
(impl/copy-object object object*))
;; if the source and destination backends are different, we just
;; need to obtain the streams and proceed full copy of the data
(with-open [is (-> (impl/resolve-backend storage (:backend object))
(impl/get-object-data object))]
(-> (impl/resolve-backend storage (:backend storage))
(impl/put-object object* (impl/content is (:size object))))))
object*))
(defn touch-object!
"Mark object as touched."
[{:keys [pool conn] :as storage} object-or-id]
(p/do
(let [id (if (storage-object? object-or-id) (:id object-or-id) object-or-id)
res (db/update! (or conn pool) :storage-object
{:touched-at (dt/now)}
{:id id}
{:return-keys false})]
(pos? (:next.jdbc/update-count res)))))
(defn get-object-data
"Return an input stream instance of the object content."
[{:keys [pool conn] :as storage} object]
(us/assert ::storage storage)
(when (or (nil? (:expired-at object))
(dt/is-after? (:expired-at object) (dt/now)))
(-> (assoc storage :conn (or conn pool))
(impl/resolve-backend (:backend object))
(impl/get-object-data object))))
(p/do
(when (or (nil? (:expired-at object))
(dt/is-after? (:expired-at object) (dt/now)))
(-> (assoc storage :conn (or conn pool))
(impl/resolve-backend (:backend object))
(impl/get-object-data object)))))
(defn get-object-bytes
"Returns a byte array of object content."
[{:keys [pool conn] :as storage} object]
(us/assert ::storage storage)
(when (or (nil? (:expired-at object))
(dt/is-after? (:expired-at object) (dt/now)))
(-> (assoc storage :conn (or conn pool))
(impl/resolve-backend (:backend object))
(impl/get-object-bytes object))))
(p/do
(when (or (nil? (:expired-at object))
(dt/is-after? (:expired-at object) (dt/now)))
(-> (assoc storage :conn (or conn pool))
(impl/resolve-backend (:backend object))
(impl/get-object-bytes object)))))
(defn get-object-url
([storage object]
(get-object-url storage object nil))
([{:keys [conn pool] :as storage} object options]
(us/assert ::storage storage)
(when (or (nil? (:expired-at object))
(dt/is-after? (:expired-at object) (dt/now)))
(-> (assoc storage :conn (or conn pool))
(impl/resolve-backend (:backend object))
(impl/get-object-url object options)))))
(p/do
(when (or (nil? (:expired-at object))
(dt/is-after? (:expired-at object) (dt/now)))
(-> (assoc storage :conn (or conn pool))
(impl/resolve-backend (:backend object))
(impl/get-object-url object options))))))
(defn get-object-path
"Get the Path to the object. Only works with `:fs` type of
storages."
[storage object]
(let [backend (impl/resolve-backend storage (:backend object))]
(when (not= :fs (:type backend))
(ex/raise :type :internal
:code :operation-not-allowed
:hint "get-object-path only works with fs type backends"))
(when (or (nil? (:expired-at object))
(dt/is-after? (:expired-at object) (dt/now)))
(-> (impl/get-object-url backend object nil)
(file-url->path)))))
(p/do
(let [backend (impl/resolve-backend storage (:backend object))]
(when (not= :fs (:type backend))
(ex/raise :type :internal
:code :operation-not-allowed
:hint "get-object-path only works with fs type backends"))
(when (or (nil? (:expired-at object))
(dt/is-after? (:expired-at object) (dt/now)))
(p/-> (impl/get-object-url backend object nil) file-url->path)))))
(defn del-object
[{:keys [conn pool] :as storage} id-or-obj]
(defn del-object!
[{:keys [conn pool] :as storage} object-or-id]
(us/assert ::storage storage)
(-> (assoc storage :conn (or conn pool))
(delete-database-object (if (uuid? id-or-obj) id-or-obj (:id id-or-obj)))))
(p/do
(let [id (if (storage-object? object-or-id) (:id object-or-id) object-or-id)
res (db/update! (or conn pool) :storage-object
{:deleted-at (dt/now)}
{:id id}
{:return-keys false})]
(pos? (:next.jdbc/update-count res)))))
(dm/export impl/resolve-backend)
(dm/export impl/calculate-hash)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Garbage Collection: Permanently delete objects
@ -263,7 +264,7 @@
(s/def ::min-age ::dt/duration)
(defmethod ig/pre-init-spec ::gc-deleted-task [_]
(s/keys :req-un [::storage ::db/pool ::min-age]))
(s/keys :req-un [::storage ::db/pool ::min-age ::wrk/executor]))
(defmethod ig/init-key ::gc-deleted-task
[_ {:keys [pool storage min-age] :as cfg}]
@ -284,7 +285,7 @@
(delete-in-bulk [conn backend ids]
(let [backend (impl/resolve-backend storage backend)
backend (assoc backend :conn conn)]
(impl/del-objects-in-bulk backend ids)))]
@(impl/del-objects-in-bulk backend ids)))]
(fn [_]
(db/with-atomic [conn pool]
@ -317,18 +318,23 @@
;; Garbage Collection: Analyze touched objects
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; This task is part of the garbage collection of storage objects and is responsible on analyzing the touched
;; objects and mark them for deletion if corresponds.
;; This task is part of the garbage collection of storage objects and
;; is responsible on analyzing the touched objects and mark them for
;; deletion if corresponds.
;;
;; For example: when file_media_object is deleted, the depending storage_object are marked as touched. This
;; means that some files that depend on a concrete storage_object are no longer exists and maybe this
;; storage_object is no longer necessary and can be eligible for elimination. This task periodically analyzes
;; touched objects and mark them as freeze (means that has other references and the object is still valid) or
;; deleted (no more references to this object so is ready to be deleted).
;; For example: when file_media_object is deleted, the depending
;; storage_object are marked as touched. This means that some files
;; that depend on a concrete storage_object are no longer exists and
;; maybe this storage_object is no longer necessary and can be
;; eligible for elimination. This task periodically analyzes touched
;; objects and mark them as freeze (means that has other references
;; and the object is still valid) or deleted (no more references to
;; this object so is ready to be deleted).
(declare sql:retrieve-touched-objects-chunk)
(declare sql:retrieve-file-media-object-nrefs)
(declare sql:retrieve-team-font-variant-nrefs)
(declare sql:retrieve-profile-nrefs)
(defmethod ig/pre-init-spec ::gc-touched-task [_]
(s/keys :req-un [::db/pool]))
@ -341,6 +347,9 @@
(has-file-media-object-nrefs? [conn id]
(-> (db/exec-one! conn [sql:retrieve-file-media-object-nrefs id id]) :nrefs pos?))
(has-profile-nrefs? [conn id]
(-> (db/exec-one! conn [sql:retrieve-profile-nrefs id id]) :nrefs pos?))
(mark-freeze-in-bulk [conn ids]
(db/exec-one! conn ["update storage_object set touched_at=null where id = ANY(?)"
(db/create-array conn "uuid" ids)]))
@ -349,17 +358,30 @@
(db/exec-one! conn ["update storage_object set deleted_at=now(), touched_at=null where id = ANY(?)"
(db/create-array conn "uuid" ids)]))
;; NOTE: A getter that retrieves the key witch will be used
;; for group ids; previoulsy we have no value, then we
;; introduced the `:reference` prop, and then it is renamed
;; to `:bucket` and now is string instead. This is
;; implemented in this way for backward comaptibilty.
;; NOTE: we use the "file-media-object" as default value for
;; backward compatibility because when we deploy it we can
;; have old backend instances running in the same time as
;; the new one and we can still have storage-objects created
;; without bucket value. And we know that if it does not
;; have value, it means :file-media-object.
(get-bucket [{:keys [metadata]}]
(or (some-> metadata :bucket)
(some-> metadata :reference d/name)
"file-media-object"))
(retrieve-touched-chunk [conn cursor]
(let [rows (->> (db/exec! conn [sql:retrieve-touched-objects-chunk cursor])
(mapv #(d/update-when % :metadata db/decode-transit-pgobject)))
kw (fn [o] (if (keyword? o) o (keyword o)))]
(mapv #(d/update-when % :metadata db/decode-transit-pgobject)))]
(when (seq rows)
[(-> rows peek :created-at)
;; NOTE: we use the :file-media-object as default value for backward compatibility because when we
;; deploy it we can have old backend instances running in the same time as the new one and we can
;; still have storage-objects created without reference value. And we know that if it does not
;; have value, it means :file-media-object.
(d/group-by' #(or (some-> % :metadata :reference kw) :file-media-object) :id rows)])))
(d/group-by' get-bucket :id rows)])))
(retrieve-touched [conn]
(->> (d/iteration (fn [cursor]
@ -389,13 +411,14 @@
(loop [to-freeze 0
to-delete 0
groups (retrieve-touched conn)]
(if-let [[reference ids] (first groups)]
(let [[f d] (case reference
:file-media-object (process-objects! conn has-file-media-object-nrefs? ids)
:team-font-variant (process-objects! conn has-team-font-variant-nrefs? ids)
(if-let [[bucket ids] (first groups)]
(let [[f d] (case bucket
"file-media-object" (process-objects! conn has-file-media-object-nrefs? ids)
"team-font-variant" (process-objects! conn has-team-font-variant-nrefs? ids)
"profile" (process-objects! conn has-profile-nrefs? ids)
(ex/raise :type :internal
:code :unexpected-unknown-reference
:hint (format "unknown reference %s" (pr-str reference))))]
:hint (dm/fmt "unknown reference %" bucket)))]
(recur (+ to-freeze f)
(+ to-delete d)
(rest groups)))
@ -419,3 +442,7 @@
(select count(*) from team_font_variant where woff2_file_id = ?) +
(select count(*) from team_font_variant where otf_file_id = ?) +
(select count(*) from team_font_variant where ttf_file_id = ?)) as nrefs")
(def sql:retrieve-profile-nrefs
"select ((select count(*) from profile where photo_id = ?) +
(select count(*) from team where photo_id = ?)) as nrefs")

View file

@ -10,7 +10,8 @@
[app.db :as db]
[app.storage.impl :as impl]
[clojure.spec.alpha :as s]
[integrant.core :as ig])
[integrant.core :as ig]
[promesa.exec :as px])
(:import
java.io.ByteArrayInputStream))
@ -30,26 +31,23 @@
;; --- API IMPL
(defmethod impl/put-object :db
[{:keys [conn] :as storage} {:keys [id] :as object} content]
(let [data (impl/slurp-bytes content)]
(db/insert! conn :storage-data {:id id :data data})
object))
(defmethod impl/copy-object :db
[{:keys [conn] :as storage} src-object dst-object]
(db/exec-one! conn ["insert into storage_data (id, data) select ? as id, data from storage_data where id=?"
(:id dst-object)
(:id src-object)]))
[{:keys [conn executor] :as storage} {:keys [id] :as object} content]
(px/with-dispatch executor
(let [data (impl/slurp-bytes content)]
(db/insert! conn :storage-data {:id id :data data})
object)))
(defmethod impl/get-object-data :db
[{:keys [conn] :as backend} {:keys [id] :as object}]
(let [result (db/exec-one! conn ["select data from storage_data where id=?" id])]
(ByteArrayInputStream. (:data result))))
[{:keys [conn executor] :as backend} {:keys [id] :as object}]
(px/with-dispatch executor
(let [result (db/exec-one! conn ["select data from storage_data where id=?" id])]
(ByteArrayInputStream. (:data result)))))
(defmethod impl/get-object-bytes :db
[{:keys [conn] :as backend} {:keys [id] :as object}]
(let [result (db/exec-one! conn ["select data from storage_data where id=?" id])]
(:data result)))
[{:keys [conn executor] :as backend} {:keys [id] :as object}]
(px/with-dispatch executor
(let [result (db/exec-one! conn ["select data from storage_data where id=?" id])]
(:data result))))
(defmethod impl/get-object-url :db
[_ _]

View file

@ -14,7 +14,8 @@
[clojure.spec.alpha :as s]
[cuerdas.core :as str]
[datoteka.core :as fs]
[integrant.core :as ig])
[integrant.core :as ig]
[promesa.exec :as px])
(:import
java.io.InputStream
java.io.OutputStream
@ -47,62 +48,57 @@
;; --- API IMPL
(defmethod impl/put-object :fs
[backend {:keys [id] :as object} content]
(let [base (fs/path (:directory backend))
path (fs/path (impl/id->path id))
full (fs/normalize (fs/join base path))]
(when-not (fs/exists? (fs/parent full))
(fs/create-dir (fs/parent full)))
(with-open [^InputStream src (io/input-stream content)
^OutputStream dst (io/output-stream full)]
(io/copy src dst))))
(defmethod impl/copy-object :fs
[backend src-object dst-object]
(let [base (fs/path (:directory backend))
path (fs/path (impl/id->path (:id dst-object)))
full (fs/normalize (fs/join base path))]
(when-not (fs/exists? (fs/parent full))
(fs/create-dir (fs/parent full)))
(with-open [^InputStream src (impl/get-object-data backend src-object)
^OutputStream dst (io/output-stream full)]
(io/copy src dst))))
[{:keys [executor] :as backend} {:keys [id] :as object} content]
(px/with-dispatch executor
(let [base (fs/path (:directory backend))
path (fs/path (impl/id->path id))
full (fs/normalize (fs/join base path))]
(when-not (fs/exists? (fs/parent full))
(fs/create-dir (fs/parent full)))
(with-open [^InputStream src (io/input-stream content)
^OutputStream dst (io/output-stream full)]
(io/copy src dst)))))
(defmethod impl/get-object-data :fs
[backend {:keys [id] :as object}]
(let [^Path base (fs/path (:directory backend))
^Path path (fs/path (impl/id->path id))
^Path full (fs/normalize (fs/join base path))]
(when-not (fs/exists? full)
(ex/raise :type :internal
:code :filesystem-object-does-not-exists
:path (str full)))
(io/input-stream full)))
[{:keys [executor] :as backend} {:keys [id] :as object}]
(px/with-dispatch executor
(let [^Path base (fs/path (:directory backend))
^Path path (fs/path (impl/id->path id))
^Path full (fs/normalize (fs/join base path))]
(when-not (fs/exists? full)
(ex/raise :type :internal
:code :filesystem-object-does-not-exists
:path (str full)))
(io/input-stream full))))
(defmethod impl/get-object-bytes :fs
[backend object]
(fs/slurp-bytes (impl/get-object-data backend object)))
[{:keys [executor] :as backend} object]
(px/with-dispatch executor
(fs/slurp-bytes (impl/get-object-data backend object))))
(defmethod impl/get-object-url :fs
[{:keys [uri] :as backend} {:keys [id] :as object} _]
(update uri :path
(fn [existing]
(if (str/ends-with? existing "/")
(str existing (impl/id->path id))
(str existing "/" (impl/id->path id))))))
[{:keys [uri executor] :as backend} {:keys [id] :as object} _]
(px/with-dispatch executor
(update uri :path
(fn [existing]
(if (str/ends-with? existing "/")
(str existing (impl/id->path id))
(str existing "/" (impl/id->path id)))))))
(defmethod impl/del-object :fs
[backend {:keys [id] :as object}]
(let [base (fs/path (:directory backend))
path (fs/path (impl/id->path id))
path (fs/join base path)]
(Files/deleteIfExists ^Path path)))
[{:keys [executor] :as backend} {:keys [id] :as object}]
(px/with-dispatch executor
(let [base (fs/path (:directory backend))
path (fs/path (impl/id->path id))
path (fs/join base path)]
(Files/deleteIfExists ^Path path))))
(defmethod impl/del-objects-in-bulk :fs
[backend ids]
(let [base (fs/path (:directory backend))]
(doseq [id ids]
(let [path (fs/path (impl/id->path id))
path (fs/join base path)]
(Files/deleteIfExists ^Path path)))))
[{:keys [executor] :as backend} ids]
(px/with-dispatch executor
(let [base (fs/path (:directory backend))]
(doseq [id ids]
(let [path (fs/path (impl/id->path id))
path (fs/join base path)]
(Files/deleteIfExists ^Path path))))))

View file

@ -7,17 +7,20 @@
(ns app.storage.impl
"Storage backends abstraction layer."
(:require
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.common.uuid :as uuid]
[buddy.core.codecs :as bc]
[clojure.java.io :as io]
[cuerdas.core :as str])
[buddy.core.hash :as bh]
[clojure.java.io :as io])
(:import
java.nio.ByteBuffer
java.util.UUID
java.io.ByteArrayInputStream
java.io.InputStream
java.nio.file.Files))
java.nio.file.Files
org.apache.commons.io.input.BoundedInputStream
))
;; --- API Definition
@ -29,14 +32,6 @@
:code :invalid-storage-backend
:context cfg))
(defmulti copy-object (fn [cfg _ _] (:type cfg)))
(defmethod copy-object :default
[cfg _ _]
(ex/raise :type :internal
:code :invalid-storage-backend
:context cfg))
(defmulti get-object-data (fn [cfg _] (:type cfg)))
(defmethod get-object-data :default
@ -106,63 +101,26 @@
:code :invalid-id-type
:hint "id should be string or uuid")))
(defprotocol IContentObject
(size [_] "get object size"))
(defprotocol IContentObject)
(defprotocol IContentHash
(get-hash [_] "get precalculated hash"))
(defn- path->content
[path]
(let [size (Files/size path)]
(reify
IContentObject
io/IOFactory
(make-reader [_ opts]
(io/make-reader path opts))
(make-writer [_ _]
(throw (UnsupportedOperationException. "not implemented")))
(make-input-stream [_ opts]
(io/make-input-stream path opts))
(make-output-stream [_ _]
(throw (UnsupportedOperationException. "not implemented")))
clojure.lang.Counted
(count [_] size)
java.lang.AutoCloseable
(close [_]))))
(defn string->content
[^String v]
(let [data (.getBytes v "UTF-8")
bais (ByteArrayInputStream. ^bytes data)]
(reify
IContentObject
io/IOFactory
(make-reader [_ opts]
(io/make-reader bais opts))
(make-writer [_ _]
(throw (UnsupportedOperationException. "not implemented")))
(make-input-stream [_ opts]
(io/make-input-stream bais opts))
(make-output-stream [_ _]
(throw (UnsupportedOperationException. "not implemented")))
clojure.lang.Counted
(count [_]
(alength data))
java.lang.AutoCloseable
(close [_]))))
(defn- input-stream->content
[^InputStream is size]
(defn- make-content
[^InputStream is ^long size]
(reify
IContentObject
(size [_] size)
io/IOFactory
(make-reader [_ opts]
(io/make-reader is opts))
(make-reader [this opts]
(io/make-reader this opts))
(make-writer [_ _]
(throw (UnsupportedOperationException. "not implemented")))
(make-input-stream [_ opts]
(io/make-input-stream is opts))
(make-input-stream [_ _]
(doto (BoundedInputStream. is size)
(.setPropagateClose false)))
(make-output-stream [_ _]
(throw (UnsupportedOperationException. "not implemented")))
@ -178,26 +136,63 @@
([data size]
(cond
(instance? java.nio.file.Path data)
(path->content data)
(make-content (io/input-stream data)
(Files/size data))
(instance? java.io.File data)
(path->content (.toPath ^java.io.File data))
(content (.toPath ^java.io.File data) nil)
(instance? String data)
(string->content data)
(let [data (.getBytes data "UTF-8")
bais (ByteArrayInputStream. ^bytes data)]
(make-content bais (alength data)))
(bytes? data)
(input-stream->content (ByteArrayInputStream. ^bytes data) (alength ^bytes data))
(let [size (alength ^bytes data)
bais (ByteArrayInputStream. ^bytes data)]
(make-content bais size))
(instance? InputStream data)
(do
(when-not size
(throw (UnsupportedOperationException. "size should be provided on InputStream")))
(input-stream->content data size))
(make-content data size))
:else
(throw (UnsupportedOperationException. "type not supported")))))
(defn wrap-with-hash
[content ^String hash]
(when-not (satisfies? IContentObject content)
(throw (UnsupportedOperationException. "`content` should be an instance of IContentObject")))
(when-not (satisfies? io/IOFactory content)
(throw (UnsupportedOperationException. "`content` should be an instance of IOFactory")))
(reify
IContentObject
(size [_] (size content))
IContentHash
(get-hash [_] hash)
io/IOFactory
(make-reader [_ opts]
(io/make-reader content opts))
(make-writer [_ opts]
(io/make-writer content opts))
(make-input-stream [_ opts]
(io/make-input-stream content opts))
(make-output-stream [_ opts]
(io/make-output-stream content opts))
clojure.lang.Counted
(count [_] (count content))
java.lang.AutoCloseable
(close [_]
(.close ^java.lang.AutoCloseable content))))
(defn content?
[v]
(satisfies? IContentObject v))
@ -209,15 +204,29 @@
(io/copy input output)
(.toByteArray output)))
(defn resolve-backend
[{:keys [conn pool] :as storage} backend-id]
(when backend-id
(let [backend (get-in storage [:backends backend-id])]
(when-not backend
(ex/raise :type :internal
:code :backend-not-configured
:hint (str/fmt "backend '%s' not configured" backend-id)))
(assoc backend
:conn (or conn pool)
:id backend-id))))
(defn calculate-hash
[path-or-stream]
(let [result (cond
(instance? InputStream path-or-stream)
(let [result (-> (bh/blake2b-256 path-or-stream)
(bc/bytes->hex))]
(.reset path-or-stream)
result)
:else
(with-open [is (io/input-stream path-or-stream)]
(-> (bh/blake2b-256 is)
(bc/bytes->hex))))]
(str "blake2b:" result)))
(defn resolve-backend
[{:keys [conn pool executor] :as storage} backend-id]
(let [backend (get-in storage [:backends backend-id])]
(when-not backend
(ex/raise :type :internal
:code :backend-not-configured
:hint (dm/fmt "backend '%' not configured" backend-id)))
(assoc backend
:executor executor
:conn (or conn pool)
:id backend-id)))

View file

@ -13,36 +13,42 @@
[app.common.uri :as u]
[app.storage.impl :as impl]
[app.util.time :as dt]
[app.worker :as wrk]
[clojure.java.io :as io]
[clojure.spec.alpha :as s]
[integrant.core :as ig])
[integrant.core :as ig]
[promesa.core :as p]
[promesa.exec :as px])
(:import
java.time.Duration
java.io.InputStream
java.nio.ByteBuffer
java.time.Duration
java.util.Collection
software.amazon.awssdk.core.sync.RequestBody
java.util.Optional
java.util.concurrent.Semaphore
org.reactivestreams.Subscriber
org.reactivestreams.Subscription
software.amazon.awssdk.core.ResponseBytes
;; software.amazon.awssdk.core.ResponseInputStream
software.amazon.awssdk.core.async.AsyncRequestBody
software.amazon.awssdk.core.client.config.ClientAsyncConfiguration
software.amazon.awssdk.core.client.config.SdkAdvancedAsyncClientOption
software.amazon.awssdk.http.nio.netty.NettyNioAsyncHttpClient
software.amazon.awssdk.http.nio.netty.SdkEventLoopGroup
software.amazon.awssdk.regions.Region
software.amazon.awssdk.services.s3.S3Client
software.amazon.awssdk.services.s3.S3AsyncClient
software.amazon.awssdk.services.s3.model.Delete
software.amazon.awssdk.services.s3.model.CopyObjectRequest
software.amazon.awssdk.services.s3.model.DeleteObjectRequest
software.amazon.awssdk.services.s3.model.DeleteObjectsRequest
software.amazon.awssdk.services.s3.model.DeleteObjectsResponse
software.amazon.awssdk.services.s3.model.DeleteObjectRequest
software.amazon.awssdk.services.s3.model.GetObjectRequest
software.amazon.awssdk.services.s3.model.ObjectIdentifier
software.amazon.awssdk.services.s3.model.PutObjectRequest
software.amazon.awssdk.services.s3.model.S3Error
;; software.amazon.awssdk.services.s3.model.GetObjectResponse
software.amazon.awssdk.services.s3.presigner.S3Presigner
software.amazon.awssdk.services.s3.presigner.model.GetObjectPresignRequest
software.amazon.awssdk.services.s3.presigner.model.PresignedGetObjectRequest
))
software.amazon.awssdk.services.s3.presigner.model.PresignedGetObjectRequest))
(declare put-object)
(declare copy-object)
(declare get-object-bytes)
(declare get-object-data)
(declare get-object-url)
@ -59,7 +65,7 @@
(s/def ::endpoint ::us/string)
(defmethod ig/pre-init-spec ::backend [_]
(s/keys :opt-un [::region ::bucket ::prefix ::endpoint]))
(s/keys :opt-un [::region ::bucket ::prefix ::endpoint ::wrk/executor]))
(defmethod ig/prep-key ::backend
[_ {:keys [prefix] :as cfg}]
@ -75,12 +81,18 @@
(let [client (build-s3-client cfg)
presigner (build-s3-presigner cfg)]
(assoc cfg
:client client
:client @client
:presigner presigner
:type :s3))))
:type :s3
::close-fn #(.close ^java.lang.AutoCloseable client)))))
(defmethod ig/halt-key! ::backend
[_ {:keys [::close-fn]}]
(when (fn? close-fn)
(px/run! close-fn)))
(s/def ::type ::us/keyword)
(s/def ::client #(instance? S3Client %))
(s/def ::client #(instance? S3AsyncClient %))
(s/def ::presigner #(instance? S3Presigner %))
(s/def ::backend
(s/keys :req-un [::region ::bucket ::client ::type ::presigner]
@ -92,10 +104,6 @@
[backend object content]
(put-object backend object content))
(defmethod impl/copy-object :s3
[backend src-object dst-object]
(copy-object backend src-object dst-object))
(defmethod impl/get-object-data :s3
[backend object]
(get-object-data backend object))
@ -118,21 +126,44 @@
;; --- HELPERS
(def default-eventloop-threads 4)
(def default-timeout
(dt/duration {:seconds 30}))
(defn- ^Region lookup-region
[region]
(Region/of (name region)))
(defn build-s3-client
[{:keys [region endpoint]}]
(if (string? endpoint)
(let [uri (java.net.URI. endpoint)]
(.. (S3Client/builder)
(endpointOverride uri)
(region (lookup-region region))
(build)))
(.. (S3Client/builder)
(region (lookup-region region))
(build))))
[{:keys [region endpoint executor]}]
(let [hclient (.. (NettyNioAsyncHttpClient/builder)
(eventLoopGroupBuilder (.. (SdkEventLoopGroup/builder)
(numberOfThreads (int default-eventloop-threads))))
(connectionAcquisitionTimeout default-timeout)
(connectionTimeout default-timeout)
(readTimeout default-timeout)
(writeTimeout default-timeout)
(build))
client (.. (S3AsyncClient/builder)
(asyncConfiguration (.. (ClientAsyncConfiguration/builder)
(advancedOption SdkAdvancedAsyncClientOption/FUTURE_COMPLETION_EXECUTOR
executor)
(build)))
(httpClient hclient)
(region (lookup-region region)))]
(when-let [uri (some-> endpoint (java.net.URI.))]
(.endpointOverride client uri))
(let [client (.build client)]
(reify
clojure.lang.IDeref
(deref [_] client)
java.lang.AutoCloseable
(close [_]
(.close hclient)
(.close client))))))
(defn build-s3-presigner
[{:keys [region endpoint]}]
@ -146,58 +177,83 @@
(region (lookup-region region))
(build))))
(defn- make-request-body
[content]
(let [is (io/input-stream content)
buff-size (* 1024 64)
sem (Semaphore. 0)
writer-fn (fn [s]
(try
(loop []
(.acquire sem 1)
(let [buffer (byte-array buff-size)
readed (.read is buffer)]
(when (pos? readed)
(.onNext ^Subscriber s (ByteBuffer/wrap buffer 0 readed))
(when (= readed buff-size)
(recur)))))
(.onComplete s)
(catch Throwable cause
(.onError s cause))
(finally
(.close ^InputStream is))))]
(reify
AsyncRequestBody
(contentLength [_]
(Optional/of (long (count content))))
(^void subscribe [_ ^Subscriber s]
(let [thread (Thread. #(writer-fn s))]
(.setDaemon thread true)
(.setName thread "penpot/storage:s3")
(.start thread)
(.onSubscribe s (reify Subscription
(cancel [_]
(.interrupt thread)
(.release sem 1))
(request [_ n]
(.release sem (int n))))))))))
(defn put-object
[{:keys [client bucket prefix]} {:keys [id] :as object} content]
(let [path (str prefix (impl/id->path id))
mdata (meta object)
mtype (:content-type mdata "application/octet-stream")
request (.. (PutObjectRequest/builder)
(bucket bucket)
(contentType mtype)
(key path)
(build))]
(p/let [path (str prefix (impl/id->path id))
mdata (meta object)
mtype (:content-type mdata "application/octet-stream")
request (.. (PutObjectRequest/builder)
(bucket bucket)
(contentType mtype)
(key path)
(build))]
(with-open [^InputStream is (io/input-stream content)]
(let [content (RequestBody/fromInputStream is (count content))]
(.putObject ^S3Client client
^PutObjectRequest request
^RequestBody content)))))
(defn copy-object
[{:keys [client bucket prefix]} src-object dst-object]
(let [source-path (str prefix (impl/id->path (:id src-object)))
source-mdata (meta src-object)
source-mtype (:content-type source-mdata "application/octet-stream")
dest-path (str prefix (impl/id->path (:id dst-object)))
request (.. (CopyObjectRequest/builder)
(copySource (u/query-encode (str bucket "/" source-path)))
(destinationBucket bucket)
(destinationKey dest-path)
(contentType source-mtype)
(build))]
(.copyObject ^S3Client client ^CopyObjectRequest request)))
(let [content (make-request-body content)]
(.putObject ^S3AsyncClient client
^PutObjectRequest request
^AsyncRequestBody content))))
(defn get-object-data
[{:keys [client bucket prefix]} {:keys [id]}]
(let [gor (.. (GetObjectRequest/builder)
(bucket bucket)
(key (str prefix (impl/id->path id)))
(build))
obj (.getObject ^S3Client client ^GetObjectRequest gor)
;; rsp (.response ^ResponseInputStream obj)
;; len (.contentLength ^GetObjectResponse rsp)
]
(p/let [gor (.. (GetObjectRequest/builder)
(bucket bucket)
(key (str prefix (impl/id->path id)))
(build))
obj (.getObject ^S3AsyncClient client ^GetObjectRequest gor)
;; rsp (.response ^ResponseInputStream obj)
;; len (.contentLength ^GetObjectResponse rsp)
]
(io/input-stream obj)))
(defn get-object-bytes
[{:keys [client bucket prefix]} {:keys [id]}]
(let [gor (.. (GetObjectRequest/builder)
(bucket bucket)
(key (str prefix (impl/id->path id)))
(build))
obj (.getObjectAsBytes ^S3Client client ^GetObjectRequest gor)]
(p/let [gor (.. (GetObjectRequest/builder)
(bucket bucket)
(key (str prefix (impl/id->path id)))
(build))
obj (.getObjectAsBytes ^S3AsyncClient client ^GetObjectRequest gor)]
(.asByteArray ^ResponseBytes obj)))
(def default-max-age
@ -206,42 +262,43 @@
(defn get-object-url
[{:keys [presigner bucket prefix]} {:keys [id]} {:keys [max-age] :or {max-age default-max-age}}]
(us/assert dt/duration? max-age)
(let [gor (.. (GetObjectRequest/builder)
(bucket bucket)
(key (str prefix (impl/id->path id)))
(build))
gopr (.. (GetObjectPresignRequest/builder)
(signatureDuration ^Duration max-age)
(getObjectRequest ^GetObjectRequest gor)
(build))
pgor (.presignGetObject ^S3Presigner presigner ^GetObjectPresignRequest gopr)]
(u/uri (str (.url ^PresignedGetObjectRequest pgor)))))
(p/do
(let [gor (.. (GetObjectRequest/builder)
(bucket bucket)
(key (str prefix (impl/id->path id)))
(build))
gopr (.. (GetObjectPresignRequest/builder)
(signatureDuration ^Duration max-age)
(getObjectRequest ^GetObjectRequest gor)
(build))
pgor (.presignGetObject ^S3Presigner presigner ^GetObjectPresignRequest gopr)]
(u/uri (str (.url ^PresignedGetObjectRequest pgor))))))
(defn del-object
[{:keys [bucket client prefix]} {:keys [id] :as obj}]
(let [dor (.. (DeleteObjectRequest/builder)
(bucket bucket)
(key (str prefix (impl/id->path id)))
(build))]
(.deleteObject ^S3Client client
(p/let [dor (.. (DeleteObjectRequest/builder)
(bucket bucket)
(key (str prefix (impl/id->path id)))
(build))]
(.deleteObject ^S3AsyncClient client
^DeleteObjectRequest dor)))
(defn del-object-in-bulk
[{:keys [bucket client prefix]} ids]
(let [oids (map (fn [id]
(.. (ObjectIdentifier/builder)
(key (str prefix (impl/id->path id)))
(build)))
ids)
delc (.. (Delete/builder)
(objects ^Collection oids)
(build))
dor (.. (DeleteObjectsRequest/builder)
(bucket bucket)
(delete ^Delete delc)
(build))
dres (.deleteObjects ^S3Client client
^DeleteObjectsRequest dor)]
(p/let [oids (map (fn [id]
(.. (ObjectIdentifier/builder)
(key (str prefix (impl/id->path id)))
(build)))
ids)
delc (.. (Delete/builder)
(objects ^Collection oids)
(build))
dor (.. (DeleteObjectsRequest/builder)
(bucket bucket)
(delete ^Delete delc)
(build))
dres (.deleteObjects ^S3AsyncClient client
^DeleteObjectsRequest dor)]
(when (.hasErrors ^DeleteObjectsResponse dres)
(let [errors (seq (.errors ^DeleteObjectsResponse dres))]
(ex/raise :type :internal

View file

@ -9,10 +9,9 @@
of deleted objects."
(:require
[app.common.logging :as l]
[app.config :as cf]
[app.db :as db]
[app.media :as media]
[app.storage :as sto]
[app.storage.impl :as simpl]
[app.util.time :as dt]
[clojure.spec.alpha :as s]
[cuerdas.core :as str]
@ -56,16 +55,12 @@
;; --- IMPL: file deletion
(defmethod delete-objects "file"
[{:keys [conn max-age table storage] :as cfg}]
(let [sql (str/fmt sql:delete-objects
{:table table :limit 50})
result (db/exec! conn [sql max-age])
backend (simpl/resolve-backend storage (cf/get :fdata-storage-backend))]
[{:keys [conn max-age table] :as cfg}]
(let [sql (str/fmt sql:delete-objects {:table table :limit 50})
result (db/exec! conn [sql max-age])]
(doseq [{:keys [id] :as item} result]
(l/trace :hint "delete object" :table table :id id)
(when backend
(simpl/del-object backend item)))
(l/trace :hint "delete object" :table table :id id))
(count result)))
@ -76,13 +71,13 @@
(let [sql (str/fmt sql:delete-objects
{:table table :limit 50})
fonts (db/exec! conn [sql max-age])
storage (assoc storage :conn conn)]
storage (media/configure-assets-storage storage conn)]
(doseq [{:keys [id] :as font} fonts]
(l/trace :hint "delete object" :table table :id id)
(some->> (:woff1-file-id font) (sto/del-object storage))
(some->> (:woff2-file-id font) (sto/del-object storage))
(some->> (:otf-file-id font) (sto/del-object storage))
(some->> (:ttf-file-id font) (sto/del-object storage)))
(some->> (:woff1-file-id font) (sto/touch-object! storage) deref)
(some->> (:woff2-file-id font) (sto/touch-object! storage) deref)
(some->> (:otf-file-id font) (sto/touch-object! storage) deref)
(some->> (:ttf-file-id font) (sto/touch-object! storage) deref))
(count fonts)))
;; --- IMPL: team deletion
@ -96,7 +91,7 @@
(doseq [{:keys [id] :as team} teams]
(l/trace :hint "delete object" :table table :id id)
(some->> (:photo-id team) (sto/del-object storage)))
(some->> (:photo-id team) (sto/touch-object! storage) deref))
(count teams)))
@ -135,7 +130,7 @@
;; Mark as deleted the storage object related with the photo-id
;; field.
(some->> (:photo-id profile) (sto/del-object storage))
(some->> (:photo-id profile) (sto/touch-object! storage) deref)
;; And finally, permanently delete the profile.
(db/delete! conn :profile {:id id}))

View file

@ -15,7 +15,6 @@
[app.config :as cfg]
[app.db :as db]
[app.util.async :refer [thread-sleep]]
[app.util.http :as http]
[app.util.json :as json]
[clojure.spec.alpha :as s]
[integrant.core :as ig]))
@ -27,6 +26,7 @@
(declare get-stats)
(declare send!)
(s/def ::http-client fn?)
(s/def ::version ::us/string)
(s/def ::uri ::us/string)
(s/def ::instance-id ::us/uuid)
@ -34,7 +34,7 @@
(s/keys :req-un [::instance-id]))
(defmethod ig/pre-init-spec ::handler [_]
(s/keys :req-un [::db/pool ::version ::uri ::sprops]))
(s/keys :req-un [::db/pool ::http-client ::version ::uri ::sprops]))
(defmethod ig/init-key ::handler
[_ {:keys [pool sprops version] :as cfg}]
@ -47,7 +47,8 @@
stats (-> (get-stats pool version)
(assoc :instance-id instance-id))]
(when send?
(send! stats cfg))
(send! cfg stats))
stats)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@ -55,11 +56,12 @@
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- send!
[data cfg]
(let [response (http/send! {:method :post
:uri (:uri cfg)
:headers {"content-type" "application/json"}
:body (json/write-str data)})]
[{:keys [http-client uri] :as cfg} data]
(let [response (http-client {:method :post
:uri uri
:headers {"content-type" "application/json"}
:body (json/write-str data)}
{:sync? true})]
(when (> (:status response) 206)
(ex/raise :type :internal
:code :invalid-response

View file

@ -7,8 +7,7 @@
(ns app.util.async
(:require
[clojure.core.async :as a]
[clojure.spec.alpha :as s]
[promesa.exec :as px])
[clojure.spec.alpha :as s])
(:import
java.util.concurrent.Executor))
@ -61,10 +60,6 @@
`(a/thread-call (^:once fn* [] (try ~@body (catch Exception e# e#))))
`(thread-call ~executor (^:once fn* [] ~@body))))
(defmacro with-dispatch
[executor & body]
`(px/submit! ~executor (^:once fn* [] ~@body)))
(defn batch
[in {:keys [max-batch-size
max-batch-age

View file

@ -1,27 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) UXBOX Labs SL
(ns app.util.http
"Http client abstraction layer."
(:require
[java-http-clj.core :as http]
[promesa.exec :as px]))
(def default-client
(delay (http/build-client {:executor @px/default-executor
:connect-timeout 10000 ;; 10s
:follow-redirects :always})))
(defn get!
[url opts]
(let [opts' (merge {:client @default-client :as :string} opts)]
(http/get url nil opts')))
(defn send!
([req]
(http/send req {:client @default-client :as :string}))
([req opts]
(http/send req (merge {:client @default-client :as :string} opts))))

View file

@ -18,7 +18,6 @@
java.nio.ByteBuffer
org.eclipse.jetty.io.EofException))
(declare decode-beat)
(declare encode-beat)
(declare process-heartbeat)

View file

@ -23,47 +23,77 @@
[promesa.exec :as px])
(:import
java.util.concurrent.ExecutorService
java.util.concurrent.Executors
java.util.concurrent.ForkJoinPool
java.util.concurrent.ForkJoinWorkerThread
java.util.concurrent.Future
java.util.concurrent.ForkJoinPool$ForkJoinWorkerThreadFactory
java.util.concurrent.atomic.AtomicLong
java.util.concurrent.Executors))
java.util.concurrent.ForkJoinWorkerThread
java.util.concurrent.ScheduledExecutorService
java.util.concurrent.ThreadFactory
java.util.concurrent.atomic.AtomicLong))
(set! *warn-on-reflection* true)
(s/def ::executor #(instance? ExecutorService %))
(s/def ::scheduler #(instance? ScheduledExecutorService %))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Executor
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(declare ^:private get-fj-thread-factory)
(declare ^:private get-thread-factory)
(s/def ::prefix keyword?)
(s/def ::parallelism ::us/integer)
(s/def ::min-threads ::us/integer)
(s/def ::max-threads ::us/integer)
(s/def ::idle-timeout ::us/integer)
(defmethod ig/pre-init-spec ::executor [_]
(s/keys :req-un [::prefix ::parallelism]))
(s/keys :req-un [::prefix]
:opt-un [::parallelism]))
(defn- get-thread-factory
(defmethod ig/init-key ::executor
[_ {:keys [parallelism prefix]}]
(let [counter (AtomicLong. 0)]
(if parallelism
(ForkJoinPool. (int parallelism) (get-fj-thread-factory prefix counter) nil false)
(Executors/newCachedThreadPool (get-thread-factory prefix counter)))))
(defmethod ig/halt-key! ::executor
[_ instance]
(.shutdown ^ExecutorService instance))
(defmethod ig/pre-init-spec ::scheduler [_]
(s/keys :req-un [::prefix]
:opt-un [::parallelism]))
(defmethod ig/init-key ::scheduler
[_ {:keys [parallelism prefix] :or {parallelism 1}}]
(let [counter (AtomicLong. 0)]
(px/scheduled-pool parallelism (get-thread-factory prefix counter))))
(defmethod ig/halt-key! ::scheduler
[_ instance]
(.shutdown ^ExecutorService instance))
(defn- get-fj-thread-factory
^ForkJoinPool$ForkJoinWorkerThreadFactory
[prefix counter]
(reify ForkJoinPool$ForkJoinWorkerThreadFactory
(newThread [_ pool]
(let [^ForkJoinWorkerThread thread (.newThread ForkJoinPool/defaultForkJoinWorkerThreadFactory pool)
^String thread-name (str (name prefix) "-" (.getAndIncrement ^AtomicLong counter))]
^String thread-name (str "penpot/" (name prefix) "-" (.getAndIncrement ^AtomicLong counter))]
(.setName thread thread-name)
thread))))
(defmethod ig/init-key ::executor
[_ {:keys [parallelism prefix]}]
(let [counter (AtomicLong. 0)]
(ForkJoinPool. (int parallelism) (get-thread-factory prefix counter) nil false)))
(defmethod ig/halt-key! ::executor
[_ instance]
(.shutdown ^ForkJoinPool instance))
(defn- get-thread-factory
^ThreadFactory
[prefix counter]
(reify ThreadFactory
(newThread [_ runnable]
(doto (Thread. runnable)
(.setDaemon true)
(.setName (str "penpot/" (name prefix) "-" (.getAndIncrement ^AtomicLong counter)))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Executor Monitor
@ -72,11 +102,11 @@
(s/def ::executors (s/map-of keyword? ::executor))
(defmethod ig/pre-init-spec ::executors-monitor [_]
(s/keys :req-un [::executors ::mtx/metrics]))
(s/keys :req-un [::executors ::scheduler ::mtx/metrics]))
(defmethod ig/init-key ::executors-monitor
[_ {:keys [executors metrics interval] :or {interval 3000}}]
(letfn [(log-stats [scheduler state]
[_ {:keys [executors metrics interval scheduler] :or {interval 3000}}]
(letfn [(log-stats [state]
(doseq [[key ^ForkJoinPool executor] executors]
(let [labels (into-array String [(name key)])
running (.getRunningThreadCount executor)
@ -97,18 +127,17 @@
:queued queued
:steals steals)))
(when-not (.isShutdown scheduler)
(px/schedule! scheduler interval (partial log-stats scheduler state))))]
(when (and (not (.isShutdown scheduler))
(not (:shutdown @state)))
(px/schedule! scheduler interval (partial log-stats state))))]
(let [scheduler (px/scheduled-pool 1)
state (atom {})]
(px/schedule! scheduler interval (partial log-stats scheduler state))
{::scheduler scheduler
::state state})))
(let [state (atom {})]
(px/schedule! scheduler interval (partial log-stats state))
{:state state})))
(defmethod ig/halt-key! ::executors-monitor
[_ {:keys [::scheduler]}]
(.shutdown ^ExecutorService scheduler))
[_ {:keys [state]}]
(swap! state assoc :shutdown true))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Worker
@ -252,7 +281,6 @@
(db/exec-one! conn [sql:insert-new-task id (d/name task) props (d/name queue) priority max-retries interval])
id))
;; --- RUNNER
(def ^:private
@ -392,13 +420,12 @@
[{:keys [executor] :as cfg}]
(aa/thread-call executor #(event-loop-fn* cfg)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Scheduler
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(declare schedule-task)
(declare synchronize-schedule)
(declare schedule-cron-task)
(declare synchronize-cron-entries)
(s/def ::fn (s/or :var var? :fn fn?))
(s/def ::id keyword?)
@ -406,79 +433,85 @@
(s/def ::props (s/nilable map?))
(s/def ::task keyword?)
(s/def ::scheduled-task
(s/def ::cron-task
(s/keys :req-un [::cron ::task]
:opt-un [::props ::id]))
(s/def ::schedule (s/coll-of (s/nilable ::scheduled-task)))
(s/def ::entries (s/coll-of (s/nilable ::cron-task)))
(defmethod ig/pre-init-spec ::scheduler [_]
(s/keys :req-un [::executor ::db/pool ::schedule ::tasks]))
(defmethod ig/pre-init-spec ::cron [_]
(s/keys :req-un [::executor ::scheduler ::db/pool ::entries ::tasks]))
(defmethod ig/init-key ::scheduler
[_ {:keys [schedule tasks pool] :as cfg}]
(let [scheduler (Executors/newScheduledThreadPool (int 1))]
(if (db/read-only? pool)
(l/warn :hint "scheduler not started, db is read-only")
(let [schedule (->> schedule
(filter some?)
;; If id is not defined, use the task as id.
(map (fn [{:keys [id task] :as item}]
(if (some? id)
(assoc item :id (d/name id))
(assoc item :id (d/name task)))))
(map (fn [{:keys [task] :as item}]
(let [f (get tasks task)]
(when-not f
(ex/raise :type :internal
:code :task-not-found
:hint (str/fmt "task %s not configured" task)))
(-> item
(dissoc :task)
(assoc :fn f))))))
cfg (assoc cfg
:scheduler scheduler
:schedule schedule)]
(l/info :hint "scheduler started"
:registred-tasks (count schedule))
(defmethod ig/init-key ::cron
[_ {:keys [entries tasks pool] :as cfg}]
(if (db/read-only? pool)
(l/warn :hint "scheduler not started, db is read-only")
(let [running (atom #{})
entries (->> entries
(filter some?)
;; If id is not defined, use the task as id.
(map (fn [{:keys [id task] :as item}]
(if (some? id)
(assoc item :id (d/name id))
(assoc item :id (d/name task)))))
(map (fn [{:keys [task] :as item}]
(let [f (get tasks task)]
(when-not f
(ex/raise :type :internal
:code :task-not-found
:hint (str/fmt "task %s not configured" task)))
(-> item
(dissoc :task)
(assoc :fn f))))))
(synchronize-schedule cfg)
(run! (partial schedule-task cfg)
(filter some? schedule))))
cfg (assoc cfg :entries entries :running running)]
(reify
java.lang.AutoCloseable
(close [_]
(.shutdownNow ^ExecutorService scheduler)))))
(l/info :hint "cron started" :registred-tasks (count entries))
(synchronize-cron-entries cfg)
(defmethod ig/halt-key! ::scheduler
(->> (filter some? entries)
(run! (partial schedule-cron-task cfg)))
(reify
clojure.lang.IDeref
(deref [_] @running)
java.lang.AutoCloseable
(close [_]
(doseq [item @running]
(when-not (.isDone ^Future item)
(.cancel ^Future item true))))))))
(defmethod ig/halt-key! ::cron
[_ instance]
(.close ^java.lang.AutoCloseable instance))
(when instance
(.close ^java.lang.AutoCloseable instance)))
(def sql:upsert-scheduled-task
(def sql:upsert-cron-task
"insert into scheduled_task (id, cron_expr)
values (?, ?)
on conflict (id)
do update set cron_expr=?")
(defn- synchronize-schedule-item
(defn- synchronize-cron-item
[conn {:keys [id cron]}]
(let [cron (str cron)]
(l/debug :action "initialize scheduled task" :id id :cron cron)
(db/exec-one! conn [sql:upsert-scheduled-task id cron cron])))
(db/exec-one! conn [sql:upsert-cron-task id cron cron])))
(defn- synchronize-schedule
(defn- synchronize-cron-entries
[{:keys [pool schedule]}]
(db/with-atomic [conn pool]
(run! (partial synchronize-schedule-item conn) schedule)))
(run! (partial synchronize-cron-item conn) schedule)))
(def sql:lock-scheduled-task
(def sql:lock-cron-task
"select id from scheduled_task where id=? for update skip locked")
(defn- execute-scheduled-task
(defn- execute-cron-task
[{:keys [executor pool] :as cfg} {:keys [id] :as task}]
(letfn [(run-task [conn]
(when (db/exec-one! conn [sql:lock-scheduled-task (d/name id)])
(when (db/exec-one! conn [sql:lock-cron-task (d/name id)])
(l/debug :action "execute scheduled task" :id id)
((:fn task) task)))
@ -491,10 +524,10 @@
::l/context (get-error-context cause task)
:task-id id
:cause cause))))]
(try
(px/run! executor handle-task)
(finally
(schedule-task cfg task)))))
(px/run! executor #(schedule-cron-task cfg task))
nil))
(defn- ms-until-valid
[cron]
@ -503,10 +536,16 @@
next (dt/next-valid-instant-from cron now)]
(inst-ms (dt/diff now next))))
(defn- schedule-task
[{:keys [scheduler] :as cfg} {:keys [cron] :as task}]
(let [ms (ms-until-valid cron)]
(px/schedule! scheduler ms (partial execute-scheduled-task cfg task))))
(def ^:private
xf-without-done
(remove #(.isDone ^Future %)))
(defn- schedule-cron-task
[{:keys [scheduler running] :as cfg} {:keys [cron] :as task}]
(let [ft (px/schedule! scheduler
(ms-until-valid cron)
(partial execute-cron-task cfg task))]
(swap! running #(into #{ft} xf-without-done %))))
;; --- INSTRUMENTATION

View file

@ -174,12 +174,18 @@
:type :image
:metadata {:id (:id fmo1)}}}]})]
;; Check that reference storage objets on filemediaobjects
;; are the same because of deduplication feature.
(t/is (= (:media-id fmo1) (:media-id fmo2)))
(t/is (= (:thumbnail-id fmo1) (:thumbnail-id fmo2)))
;; If we launch gc-touched-task, we should have 4 items to freeze.
;; If we launch gc-touched-task, we should have 2 items to
;; freeze because of the deduplication (we have uploaded 2 times
;; 2 two same files).
(let [task (:app.storage/gc-touched-task th/*system*)
res (task {})]
(t/is (= 4 (:freeze res)))
(t/is (= 2 (:freeze res)))
(t/is (= 0 (:delete res))))
;; run the task immediately
@ -205,27 +211,26 @@
(t/is (= 1 (count rows))))
;; The underlying storage objects are still available.
(t/is (some? (sto/get-object storage (:media-id fmo2))))
(t/is (some? (sto/get-object storage (:thumbnail-id fmo2))))
(t/is (some? (sto/get-object storage (:media-id fmo1))))
(t/is (some? (sto/get-object storage (:thumbnail-id fmo1))))
(t/is (some? @(sto/get-object storage (:media-id fmo2))))
(t/is (some? @(sto/get-object storage (:thumbnail-id fmo2))))
(t/is (some? @(sto/get-object storage (:media-id fmo1))))
(t/is (some? @(sto/get-object storage (:thumbnail-id fmo1))))
;; now, we have deleted the unused file-media-object, if we
;; execute the touched-gc task, we should see that two of them
;; are marked to be deleted.
(let [task (:app.storage/gc-touched-task th/*system*)
res (task {})]
(t/is (= 0 (:freeze res)))
(t/is (= 2 (:delete res))))
(t/is (= 2 (:freeze res)))
(t/is (= 0 (:delete res))))
;; Finally, check that some of the objects that are marked as
;; deleted we are unable to retrieve them using standard storage
;; public api.
(t/is (nil? (sto/get-object storage (:media-id fmo2))))
(t/is (nil? (sto/get-object storage (:thumbnail-id fmo2))))
(t/is (some? (sto/get-object storage (:media-id fmo1))))
(t/is (some? (sto/get-object storage (:thumbnail-id fmo1))))
(t/is (some? @(sto/get-object storage (:media-id fmo2))))
(t/is (some? @(sto/get-object storage (:thumbnail-id fmo2))))
(t/is (some? @(sto/get-object storage (:media-id fmo1))))
(t/is (some? @(sto/get-object storage (:thumbnail-id fmo1))))
)))

View file

@ -23,9 +23,9 @@
(let [storage (-> (:app.storage/storage th/*system*)
(configure-storage-backend))
sobject (sto/put-object storage {:content (sto/content "content")
:content-type "text/plain"
:other "data"})
sobject @(sto/put-object! storage {::sto/content (sto/content "content")
:content-type "text/plain"
:other "data"})
profile (th/create-profile* 1 {:is-active true})
project (th/create-project* 1 {:team-id (:default-team-id profile)
:profile-id (:id profile)})
@ -92,15 +92,17 @@
))))
(t/deftest duplicate-file-with-deleted-rels
(t/deftest duplicate-file-with-deleted-relations
(let [storage (-> (:app.storage/storage th/*system*)
(configure-storage-backend))
sobject (sto/put-object storage {:content (sto/content "content")
:content-type "text/plain"
:other "data"})
sobject @(sto/put-object! storage {::sto/content (sto/content "content")
:content-type "text/plain"
:other "data"})
profile (th/create-profile* 1 {:is-active true})
project (th/create-project* 1 {:team-id (:default-team-id profile)
:profile-id (:id profile)})
file1 (th/create-file* 1 {:profile-id (:id profile)
:project-id (:id project)})
file2 (th/create-file* 2 {:profile-id (:id profile)
@ -112,16 +114,10 @@
mobj (th/create-file-media-object* {:file-id (:id file1)
:is-local false
:media-id (:id sobject)})
:media-id (:id sobject)})]
_ (th/mark-file-deleted* {:id (:id file2)})
_ (sto/del-object storage (:id sobject))]
(th/update-file*
{:file-id (:id file1)
:profile-id (:id profile)
:changes [{:type :add-media
:object (select-keys mobj [:id :width :height :mtype :name])}]})
(th/mark-file-deleted* {:id (:id file2)})
@(sto/del-object! storage sobject)
(let [data {::th/type :duplicate-file
:profile-id (:id profile)
@ -140,7 +136,7 @@
(t/is (= "file 1 (copy)" (:name result)))
(t/is (not= (:id file1) (:id result)))
;; Check that the deleted library is not duplicated
;; Check that there are no relation to a deleted library
(let [[item :as rows] (db/query th/*pool* :file-library-rel {:file-id (:id result)})]
(t/is (= 0 (count rows))))
@ -158,9 +154,10 @@
(let [storage (-> (:app.storage/storage th/*system*)
(configure-storage-backend))
sobject (sto/put-object storage {:content (sto/content "content")
:content-type "text/plain"
:other "data"})
sobject @(sto/put-object! storage {::sto/content (sto/content "content")
:content-type "text/plain"
:other "data"})
profile (th/create-profile* 1 {:is-active true})
project (th/create-project* 1 {:team-id (:default-team-id profile)
:profile-id (:id profile)})
@ -176,6 +173,7 @@
:is-local false
:media-id (:id sobject)})]
(th/update-file*
{:file-id (:id file1)
:profile-id (:id profile)
@ -229,9 +227,9 @@
(t/deftest duplicate-project-with-deleted-files
(let [storage (-> (:app.storage/storage th/*system*)
(configure-storage-backend))
sobject (sto/put-object storage {:content (sto/content "content")
:content-type "text/plain"
:other "data"})
sobject @(sto/put-object! storage {::sto/content (sto/content "content")
:content-type "text/plain"
:other "data"})
profile (th/create-profile* 1 {:is-active true})
project (th/create-project* 1 {:team-id (:default-team-id profile)
:profile-id (:id profile)})
@ -247,12 +245,6 @@
:is-local false
:media-id (:id sobject)})]
(th/update-file*
{:file-id (:id file1)
:profile-id (:id profile)
:changes [{:type :add-media
:object (select-keys mobj [:id :width :height :mtype :name])}]})
(th/mark-file-deleted* {:id (:id file1)})
(let [data {::th/type :duplicate-project
@ -432,7 +424,7 @@
;; project1 now should have 2 file
(let [[item1 item2 :as rows] (db/query th/*pool* :file {:project-id (:id project1)}
{:order-by [:created-at]})]
{:order-by [:created-at]})]
;; (clojure.pprint/pprint rows)
(t/is (= 2 (count rows)))
(t/is (= (:id item1) (:id file2))))
@ -610,6 +602,3 @@
(t/is (= (:library-file-id item1) (:id file2))))
)))

View file

@ -41,8 +41,8 @@
(t/is (uuid? media-id))
(t/is (uuid? thumbnail-id))
(let [storage (:app.storage/storage th/*system*)
mobj1 (sto/get-object storage media-id)
mobj2 (sto/get-object storage thumbnail-id)]
mobj1 @(sto/get-object storage media-id)
mobj2 @(sto/get-object storage thumbnail-id)]
(t/is (sto/storage-object? mobj1))
(t/is (sto/storage-object? mobj2))
(t/is (= 122785 (:size mobj1)))
@ -79,8 +79,8 @@
(t/is (uuid? media-id))
(t/is (uuid? thumbnail-id))
(let [storage (:app.storage/storage th/*system*)
mobj1 (sto/get-object storage media-id)
mobj2 (sto/get-object storage thumbnail-id)]
mobj1 @(sto/get-object storage media-id)
mobj2 @(sto/get-object storage thumbnail-id)]
(t/is (sto/storage-object? mobj1))
(t/is (sto/storage-object? mobj2))
(t/is (= 312043 (:size mobj1)))

View file

@ -37,69 +37,74 @@
(let [storage (-> (:app.storage/storage th/*system*)
(configure-storage-backend))
content (sto/content "content")
object (sto/put-object storage {:content content
:content-type "text/plain"
:other "data"})]
object @(sto/put-object! storage {::sto/content content
:content-type "text/plain"
:other "data"})]
(t/is (sto/storage-object? object))
(t/is (fs/path? (sto/get-object-path storage object)))
(t/is (fs/path? @(sto/get-object-path storage object)))
(t/is (nil? (:expired-at object)))
(t/is (= :tmp (:backend object)))
(t/is (= "data" (:other (meta object))))
(t/is (= "text/plain" (:content-type (meta object))))
(t/is (= "content" (slurp (sto/get-object-data storage object))))
(t/is (= "content" (slurp (sto/get-object-path storage object))))
(t/is (= "content" (slurp @(sto/get-object-data storage object))))
(t/is (= "content" (slurp @(sto/get-object-path storage object))))
))
(t/deftest put-and-retrieve-expired-object
(let [storage (-> (:app.storage/storage th/*system*)
(configure-storage-backend))
content (sto/content "content")
object (sto/put-object storage {:content content
:content-type "text/plain"
:expired-at (dt/in-future {:seconds 1})})]
object @(sto/put-object! storage {::sto/content content
::sto/expired-at (dt/in-future {:seconds 1})
:content-type "text/plain"
})]
(t/is (sto/storage-object? object))
(t/is (dt/instant? (:expired-at object)))
(t/is (dt/is-after? (:expired-at object) (dt/now)))
(t/is (= object (sto/get-object storage (:id object))))
(t/is (= object @(sto/get-object storage (:id object))))
(th/sleep 1000)
(t/is (nil? (sto/get-object storage (:id object))))
(t/is (nil? (sto/get-object-data storage object)))
(t/is (nil? (sto/get-object-url storage object)))
(t/is (nil? (sto/get-object-path storage object)))
(t/is (nil? @(sto/get-object storage (:id object))))
(t/is (nil? @(sto/get-object-data storage object)))
(t/is (nil? @(sto/get-object-url storage object)))
(t/is (nil? @(sto/get-object-path storage object)))
))
(t/deftest put-and-delete-object
(let [storage (-> (:app.storage/storage th/*system*)
(configure-storage-backend))
content (sto/content "content")
object (sto/put-object storage {:content content
:content-type "text/plain"
:expired-at (dt/in-future {:seconds 1})})]
object @(sto/put-object! storage {::sto/content content
:content-type "text/plain"
:expired-at (dt/in-future {:seconds 1})})]
(t/is (sto/storage-object? object))
(t/is (true? (sto/del-object storage object)))
(t/is (true? @(sto/del-object! storage object)))
;; retrieving the same object should be not nil because the
;; deletion is not immediate
(t/is (some? (sto/get-object-data storage object)))
(t/is (some? (sto/get-object-url storage object)))
(t/is (some? (sto/get-object-path storage object)))
(t/is (some? @(sto/get-object-data storage object)))
(t/is (some? @(sto/get-object-url storage object)))
(t/is (some? @(sto/get-object-path storage object)))
;; But you can't retrieve the object again because in database is
;; marked as deleted/expired.
(t/is (nil? (sto/get-object storage (:id object))))
(t/is (nil? @(sto/get-object storage (:id object))))
))
(t/deftest test-deleted-gc-task
(let [storage (-> (:app.storage/storage th/*system*)
(configure-storage-backend))
content (sto/content "content")
object1 (sto/put-object storage {:content content
:content-type "text/plain"
:expired-at (dt/now)})
object2 (sto/put-object storage {:content content
:content-type "text/plain"
:expired-at (dt/in-past {:hours 2})})]
content1 (sto/content "content1")
content2 (sto/content "content2")
object1 @(sto/put-object! storage {::sto/content content1
::sto/expired-at (dt/now)
:content-type "text/plain"
})
object2 @(sto/put-object! storage {::sto/content content2
::sto/expired-at (dt/in-past {:hours 2})
:content-type "text/plain"
})]
(th/sleep 200)
(let [task (:app.storage/gc-deleted-task th/*system*)
@ -147,22 +152,24 @@
(t/is (uuid? (:media-id result-1)))
(t/is (uuid? (:media-id result-2)))
(t/is (= (:media-id result-1) (:media-id result-2)))
;; now we proceed to manually delete one file-media-object
(db/exec-one! th/*pool* ["delete from file_media_object where id = ?" (:id result-1)])
;; check that we still have all the storage objects
(let [res (db/exec-one! th/*pool* ["select count(*) from storage_object"])]
(t/is (= 4 (:count res))))
(t/is (= 2 (:count res))))
;; now check if the storage objects are touched
(let [res (db/exec-one! th/*pool* ["select count(*) from storage_object where touched_at is not null"])]
(t/is (= 4 (:count res))))
(t/is (= 2 (:count res))))
;; run the touched gc task
(let [task (:app.storage/gc-touched-task th/*system*)
res (task {})]
(t/is (= 2 (:freeze res)))
(t/is (= 2 (:delete res))))
(t/is (= 0 (:delete res))))
;; now check that there are no touched objects
(let [res (db/exec-one! th/*pool* ["select count(*) from storage_object where touched_at is not null"])]
@ -170,8 +177,8 @@
;; now check that all objects are marked to be deleted
(let [res (db/exec-one! th/*pool* ["select count(*) from storage_object where deleted_at is not null"])]
(t/is (= 2 (:count res))))
)))
(t/is (= 0 (:count res))))
)))
(t/deftest test-touched-gc-task-2
@ -249,7 +256,7 @@
(let [res (db/exec-one! th/*pool* ["select count(*) from storage_object where deleted_at is not null"])]
(t/is (= 4 (:count res))))))))
(t/deftest test-touched-gc-task-without-delete
(t/deftest test-touched-gc-task-3
(let [storage (-> (:app.storage/storage th/*system*)
(configure-storage-backend))
prof (th/create-profile* 1)
@ -285,9 +292,23 @@
;; run the touched gc task
(let [task (:app.storage/gc-touched-task th/*system*)
res (task {})]
(t/is (= 4 (:freeze res)))
(t/is (= 2 (:freeze res)))
(t/is (= 0 (:delete res))))
;; check that we have all object in the db
(let [res (db/exec-one! th/*pool* ["select count(*) from storage_object where deleted_at is null"])]
(t/is (= 4 (:count res)))))))
(t/is (= 2 (:count res)))))
;; now we proceed to manually delete all team_font_variant
(db/exec-one! th/*pool* ["delete from file_media_object"])
;; run the touched gc task
(let [task (:app.storage/gc-touched-task th/*system*)
res (task {})]
(t/is (= 0 (:freeze res)))
(t/is (= 2 (:delete res))))
;; check that we have all no objects
(let [res (db/exec-one! th/*pool* ["select count(*) from storage_object where deleted_at is null"])]
(t/is (= 0 (:count res))))))

View file

@ -27,7 +27,7 @@
(task-fn nil)
(t/is (:called? @mock))
(let [[data] (-> @mock :call-args)]
(let [[_ data] (-> @mock :call-args)]
(t/is (contains? data :total-fonts))
(t/is (contains? data :total-users))
(t/is (contains? data :total-projects))

View file

@ -61,7 +61,7 @@
:app.http.oauth/gitlab
:app.http.oauth/github
:app.http.oauth/all
:app.worker/scheduler
:app.worker/cron
:app.worker/worker)
(d/deep-merge
{:app.tasks.file-media-gc/handler {:max-age (dt/duration 300)}}))

View file

@ -79,12 +79,6 @@
(stringify-data val)])))
data)))
#?(:clj
(defn set-context!
[data]
(ThreadContext/putAll (data->context-map data))
nil))
#?(:clj
(defmacro with-context
[data & body]
@ -173,12 +167,11 @@
~level-sym (get-level ~level)]
(when (enabled? ~logger-sym ~level-sym)
~(if async
`(->> (ThreadContext/getImmutableContext)
(send-off logging-agent
(fn [_# cdata#]
(with-context (-> {:id (uuid/next)} (into cdata#) (into ~context))
(->> (or ~raw (build-map-message ~props))
(write-log! ~logger-sym ~level-sym ~cause))))))
`(send-off logging-agent
(fn [_#]
(with-context (into {:id (uuid/next)} ~context)
(->> (or ~raw (build-map-message ~props))
(write-log! ~logger-sym ~level-sym ~cause)))))
`(let [message# (or ~raw (build-map-message ~props))]
(write-log! ~logger-sym ~level-sym ~cause message#))))))))

View file

@ -131,8 +131,7 @@
(defn resolve-file-media
([media]
(resolve-file-media media false))
([{:keys [id]} thumbnail?]
([{:keys [id] :as media} thumbnail?]
(str (cond-> (u/join public-uri "assets/by-file-media-id/")
(true? thumbnail?) (u/join (str id "/thumbnail"))
(false? thumbnail?) (u/join (str id))))))

View file

@ -7,18 +7,18 @@
(ns app.main.errors
"Generic error handling"
(:require
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.config :as cf]
[app.main.data.messages :as dm]
[app.main.data.messages :as msg]
[app.main.data.users :as du]
[app.main.sentry :as sentry]
[app.main.store :as st]
[app.util.i18n :refer [tr]]
[app.util.router :as rt]
[app.util.timers :as ts]
[cljs.pprint :refer [pprint]]
[cuerdas.core :as str]
[expound.alpha :as expound]
[fipp.edn :as fpp]
[potok.core :as ptk]))
(defn on-error
@ -33,7 +33,7 @@
:else
(let [hint (ex-message error)
msg (str "Internal Error: " hint)]
msg (dm/str "Internal Error: " hint)]
(sentry/capture-exception error)
(ts/schedule (st/emitf (rt/assign-exception error)))
@ -51,7 +51,7 @@
[_]
(let [msg (tr "errors.auth.unable-to-login")]
(st/emit! (du/logout {:capture-redirect true}))
(ts/schedule 500 (st/emitf (dm/warn msg)))))
(ts/schedule 500 (st/emitf (msg/warn msg)))))
;; That are special case server-errors that should be treated
@ -73,7 +73,7 @@
[error]
(ts/schedule
(st/emitf
(dm/show {:content "Unexpected validation error."
(msg/show {:content "Unexpected validation error."
:type :error
:timeout 3000})))
@ -81,7 +81,7 @@
(js/console.group "Validation Error:")
(ex/ignoring
(js/console.info
(with-out-str (pprint (dissoc error :explain)))))
(with-out-str (fpp/pprint (dissoc error :explain)))))
(when-let [explain (:explain error)]
(js/console.group "Spec explain:")
@ -96,7 +96,7 @@
[_]
(ts/schedule
(st/emitf
(dm/show {:content "SVG is invalid or malformed"
(msg/show {:content "SVG is invalid or malformed"
:type :error
:timeout 3000}))))
@ -104,7 +104,7 @@
[_]
(ts/schedule
(st/emitf
(dm/show {:content "There was an error with the comment"
(msg/show {:content "There was an error with the comment"
:type :error
:timeout 3000}))))
@ -114,15 +114,15 @@
(defmethod ptk/handle-error :assertion
[{:keys [message hint] :as error}]
(let [message (or message hint)
message (str "Internal Assertion Error: " message)
context (str/fmt "ns: '%s'\nname: '%s'\nfile: '%s:%s'"
(:ns error)
(:name error)
(str cf/public-uri "js/cljs-runtime/" (:file error))
(:line error))]
message (dm/str "Internal Assertion Error: " message)
context (dm/fmt "ns: '%'\nname: '%'\nfile: '%:%'"
(:ns error)
(:name error)
(dm/str cf/public-uri "js/cljs-runtime/" (:file error))
(:line error))]
(ts/schedule
(st/emitf
(dm/show {:content "Internal error: assertion."
(msg/show {:content "Internal error: assertion."
:type :error
:timeout 3000})))
@ -138,17 +138,23 @@
(defmethod ptk/handle-error :server-error
[{:keys [data hint] :as error}]
(let [hint (or hint (:hint data) (:message data))
info (with-out-str (pprint data))
msg (str "Internal Server Error: " hint)]
info (with-out-str (fpp/pprint (dissoc data :explain)))
msg (dm/str "Internal Server Error: " hint)]
(ts/schedule
(st/emitf
(dm/show {:content "Something wrong has happened (on backend)."
:type :error
:timeout 3000})))
#(st/emit!
(msg/show {:content "Something wrong has happened (on backend)."
:type :error
:timeout 3000})))
(js/console.group msg)
(js/console.info info)
(when-let [explain (:explain data)]
(js/console.group "Spec explain:")
(js/console.log explain)
(js/console.groupEnd "Spec explain:"))
(js/console.groupEnd msg)))
(defn on-unhandled-error
@ -156,7 +162,7 @@
(if (instance? ExceptionInfo error)
(-> error sentry/capture-exception ex-data ptk/handle-error)
(let [hint (ex-message error)
msg (str "Unhandled Internal Error: " hint)]
msg (dm/str "Unhandled Internal Error: " hint)]
(sentry/capture-exception error)
(ts/schedule (st/emitf (rt/assign-exception error)))
(js/console.group msg)