mirror of
https://github.com/penpot/penpot.git
synced 2025-03-27 23:21:47 -05:00
Merge pull request #3001 from penpot/niwinz-experiments-2
♻️ Refactor concurrency model (start using JDK19 virtual threads on RPC and WebSockets)
This commit is contained in:
commit
afb09919ed
82 changed files with 2461 additions and 2500 deletions
|
@ -2,6 +2,7 @@
|
|||
{promesa.core/let clojure.core/let
|
||||
promesa.core/->> clojure.core/->>
|
||||
promesa.core/-> clojure.core/->
|
||||
promesa.exec.csp/go-loop clojure.core/loop
|
||||
rumext.v2/defc clojure.core/defn
|
||||
rumext.v2/fnc clojure.core/fn
|
||||
app.common.data/export clojure.core/def
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
{:deps
|
||||
{:mvn/repos
|
||||
{"sonatype" {:url "https://oss.sonatype.org/content/repositories/snapshots/"}}
|
||||
|
||||
:deps
|
||||
{penpot/common {:local/root "../common"}
|
||||
org.clojure/clojure {:mvn/version "1.11.1"}
|
||||
org.clojure/core.async {:mvn/version "1.6.673"}
|
||||
|
@ -19,14 +22,16 @@
|
|||
java-http-clj/java-http-clj {:mvn/version "0.4.3"}
|
||||
|
||||
funcool/yetti
|
||||
{:git/tag "v9.12"
|
||||
:git/sha "51646d8"
|
||||
{:git/tag "v9.15"
|
||||
:git/sha "aa9b967"
|
||||
:git/url "https://github.com/funcool/yetti.git"
|
||||
:exclusions [org.slf4j/slf4j-api]}
|
||||
|
||||
com.github.seancorfield/next.jdbc {:mvn/version "1.3.847"}
|
||||
metosin/reitit-core {:mvn/version "0.5.18"}
|
||||
org.postgresql/postgresql {:mvn/version "42.5.2"}
|
||||
|
||||
org.postgresql/postgresql {:mvn/version "42.6.0-SNAPSHOT"}
|
||||
|
||||
com.zaxxer/HikariCP {:mvn/version "5.0.1"}
|
||||
|
||||
io.whitfin/siphash {:mvn/version "2.0.0"}
|
||||
|
@ -34,7 +39,7 @@
|
|||
buddy/buddy-hashers {:mvn/version "1.8.158"}
|
||||
buddy/buddy-sign {:mvn/version "3.4.333"}
|
||||
|
||||
com.github.ben-manes.caffeine/caffeine {:mvn/version "3.1.2"}
|
||||
com.github.ben-manes.caffeine/caffeine {:mvn/version "3.1.5"}
|
||||
|
||||
org.jsoup/jsoup {:mvn/version "1.15.3"}
|
||||
org.im4java/im4java
|
||||
|
|
|
@ -1,9 +1,14 @@
|
|||
;; Example climit.edn file
|
||||
;; Required: concurrency
|
||||
;; Optional: queue-size, ommited means Integer/MAX_VALUE
|
||||
{:update-file {:concurrency 1 :queue-size 3}
|
||||
:auth {:concurrency 128}
|
||||
:process-font {:concurrency 4 :queue-size 32}
|
||||
:process-image {:concurrency 8 :queue-size 32}
|
||||
:push-audit-events
|
||||
{:concurrency 1 :queue-size 3}}
|
||||
;; Required: permits
|
||||
;; Optional: queue, ommited means Integer/MAX_VALUE
|
||||
;; Optional: timeout, ommited means no timeout
|
||||
;; Note: queue and timeout are excluding
|
||||
{:update-file-by-id {:permits 1 :queue 3}
|
||||
:update-file {:permits 20}
|
||||
|
||||
:derive-password {:permits 8}
|
||||
:process-font {:permits 4 :queue 32}
|
||||
:process-image {:permits 8 :queue 32}
|
||||
|
||||
:submit-audit-events-by-profile
|
||||
{:permits 1 :queue 3}}
|
||||
|
|
|
@ -3,12 +3,12 @@
|
|||
<Appenders>
|
||||
<Console name="console" target="SYSTEM_OUT">
|
||||
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"
|
||||
alwaysWriteExceptions="false" />
|
||||
alwaysWriteExceptions="true" />
|
||||
</Console>
|
||||
|
||||
<RollingFile name="main" fileName="logs/main.log" filePattern="logs/main-%i.log">
|
||||
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"
|
||||
alwaysWriteExceptions="false" />
|
||||
alwaysWriteExceptions="true" />
|
||||
<Policies>
|
||||
<SizeBasedTriggeringPolicy size="50M"/>
|
||||
</Policies>
|
||||
|
|
|
@ -3,8 +3,9 @@
|
|||
{:default
|
||||
[[:default :window "200000/h"]]
|
||||
|
||||
#{:command/get-teams}
|
||||
[[:burst :bucket "5/1/5s"]]
|
||||
;; #{:command/get-teams}
|
||||
;; [[:burst :bucket "5/5/5s"]]
|
||||
|
||||
#{:command/get-profile}
|
||||
[[:burst :bucket "60/60/1m"]]}
|
||||
;; #{:command/get-profile}
|
||||
;; [[:burst :bucket "60/60/1m"]]
|
||||
}
|
||||
|
|
|
@ -42,19 +42,40 @@ export PENPOT_ASSETS_STORAGE_BACKEND=assets-s3
|
|||
export PENPOT_STORAGE_ASSETS_S3_ENDPOINT=http://minio:9000
|
||||
export PENPOT_STORAGE_ASSETS_S3_BUCKET=penpot
|
||||
|
||||
#-J-Djdk.virtualThreadScheduler.parallelism=16
|
||||
|
||||
|
||||
export OPTIONS="
|
||||
-A:jmx-remote -A:dev \
|
||||
-J-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||
-J-Djdk.attach.allowAttachSelf \
|
||||
-J-Dlog4j2.configurationFile=log4j2-devenv.xml \
|
||||
-J-Xms50m \
|
||||
-J-Xmx1024m \
|
||||
-J-XX:+UseZGC \
|
||||
-J-XX:-OmitStackTraceInFastThrow \
|
||||
-J-XX:+UnlockDiagnosticVMOptions \
|
||||
-J-XX:+DebugNonSafepoints";
|
||||
-J-XX:+DebugNonSafepoints \
|
||||
-J-Djdk.tracePinnedThreads=full \
|
||||
-J--enable-preview";
|
||||
|
||||
# Uncomment for use the ImageMagick v7.x
|
||||
# Setup HEAP
|
||||
export OPTIONS="$OPTIONS -J-Xms50m -J-Xmx1024m"
|
||||
# export OPTIONS="$OPTIONS -J-Xms1100m -J-Xmx1100m -J-XX:+AlwaysPreTouch"
|
||||
|
||||
# Increase virtual thread pool size
|
||||
# export OPTIONS="$OPTIONS -J-Djdk.virtualThreadScheduler.parallelism=16"
|
||||
|
||||
# Disable C2 Compiler
|
||||
# export OPTIONS="$OPTIONS -J-XX:TieredStopAtLevel=1"
|
||||
|
||||
# Disable all compilers
|
||||
# export OPTIONS="$OPTIONS -J-Xint"
|
||||
|
||||
# Setup GC
|
||||
export OPTIONS="$OPTIONS -J-XX:+UseG1GC"
|
||||
|
||||
# Setup GC
|
||||
# export OPTIONS="$OPTIONS -J-XX:+UseZGC"
|
||||
|
||||
# Enable ImageMagick v7.x support
|
||||
# export OPTIONS="-J-Dim4java.useV7=true $OPTIONS";
|
||||
|
||||
export OPTIONS_EVAL="nil"
|
||||
|
|
|
@ -6,15 +6,18 @@
|
|||
|
||||
(ns app.auth
|
||||
(:require
|
||||
[buddy.hashers :as hashers]))
|
||||
[buddy.hashers :as hashers]
|
||||
[promesa.exec :as px]))
|
||||
|
||||
(def default-params
|
||||
{:alg :argon2id
|
||||
:memory (* 32768 2)
|
||||
:iterations 5
|
||||
:parallelism (px/get-available-processors)})
|
||||
|
||||
(defn derive-password
|
||||
[password]
|
||||
(hashers/derive password
|
||||
{:alg :argon2id
|
||||
:memory 16384
|
||||
:iterations 20
|
||||
:parallelism 2}))
|
||||
(hashers/derive password default-params))
|
||||
|
||||
(defn verify-password
|
||||
[attempt password]
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http.client :as http]
|
||||
[app.http.middleware :as hmw]
|
||||
[app.http.session :as session]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.main :as-alias main]
|
||||
|
@ -25,14 +24,11 @@
|
|||
[app.tokens :as tokens]
|
||||
[app.util.json :as json]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.set :as set]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]
|
||||
[yetti.response :as yrs]))
|
||||
[yetti.response :as-alias yrs]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; HELPERS
|
||||
|
@ -166,20 +162,22 @@
|
|||
|
||||
(defn- retrieve-github-email
|
||||
[cfg tdata info]
|
||||
(or (some-> info :email p/resolved)
|
||||
(->> (http/req! cfg
|
||||
{:uri "https://api.github.com/user/emails"
|
||||
:headers {"Authorization" (dm/str (:type tdata) " " (:token tdata))}
|
||||
:timeout 6000
|
||||
:method :get})
|
||||
(p/map (fn [{:keys [status body] :as response}]
|
||||
(when-not (s/int-in-range? 200 300 status)
|
||||
(ex/raise :type :internal
|
||||
:code :unable-to-retrieve-github-emails
|
||||
:hint "unable to retrieve github emails"
|
||||
:http-status status
|
||||
:http-body body))
|
||||
(->> response :body json/decode (filter :primary) first :email))))))
|
||||
(or (some-> info :email)
|
||||
(let [params {:uri "https://api.github.com/user/emails"
|
||||
:headers {"Authorization" (dm/str (:type tdata) " " (:token tdata))}
|
||||
:timeout 6000
|
||||
:method :get}
|
||||
|
||||
{:keys [status body]} (http/req! cfg params {:sync? true})]
|
||||
|
||||
(when-not (s/int-in-range? 200 300 status)
|
||||
(ex/raise :type :internal
|
||||
:code :unable-to-retrieve-github-emails
|
||||
:hint "unable to retrieve github emails"
|
||||
:http-status status
|
||||
:http-body body))
|
||||
|
||||
(->> body json/decode (filter :primary) first :email))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::providers/github [_]
|
||||
(s/keys :req [::http/client]))
|
||||
|
@ -290,80 +288,74 @@
|
|||
:grant-type (:grant_type params)
|
||||
:redirect-uri (:redirect_uri params))
|
||||
|
||||
(->> (http/req! cfg req)
|
||||
(p/map (fn [{:keys [status body] :as res}]
|
||||
(l/trace :hint "access token response"
|
||||
:status status
|
||||
:body body)
|
||||
(if (= status 200)
|
||||
(let [data (json/decode body)]
|
||||
{:token (get data :access_token)
|
||||
:type (get data :token_type)})
|
||||
(ex/raise :type :internal
|
||||
:code :unable-to-retrieve-token
|
||||
:http-status status
|
||||
:http-body body)))))))
|
||||
(let [{:keys [status body]} (http/req! cfg req {:sync? true})]
|
||||
(l/trace :hint "access token response" :status status :body body)
|
||||
(if (= status 200)
|
||||
(let [data (json/decode body)]
|
||||
{:token (get data :access_token)
|
||||
:type (get data :token_type)})
|
||||
|
||||
(ex/raise :type :internal
|
||||
:code :unable-to-retrieve-token
|
||||
:hint "unable to retrieve token"
|
||||
:http-status status
|
||||
:http-body body)))))
|
||||
|
||||
(defn- retrieve-user-info
|
||||
[{:keys [provider] :as cfg} tdata]
|
||||
(letfn [(retrieve []
|
||||
(l/trace :hint "request user info"
|
||||
:uri (:user-uri provider)
|
||||
:token (obfuscate-string (:token tdata))
|
||||
:token-type (:type tdata))
|
||||
(http/req! cfg
|
||||
{:uri (:user-uri provider)
|
||||
:headers {"Authorization" (str (:type tdata) " " (:token tdata))}
|
||||
:timeout 6000
|
||||
:method :get}))
|
||||
(validate-response [response]
|
||||
(l/trace :hint "user info response"
|
||||
:status (:status response)
|
||||
:body (:body response))
|
||||
(when-not (s/int-in-range? 200 300 (:status response))
|
||||
(ex/raise :type :internal
|
||||
:code :unable-to-retrieve-user-info
|
||||
:hint "unable to retrieve user info"
|
||||
:http-status (:status response)
|
||||
:http-body (:body response)))
|
||||
response)
|
||||
|
||||
(get-email [info]
|
||||
(letfn [(get-email [info]
|
||||
;; Allow providers hook into this for custom email
|
||||
;; retrieval method.
|
||||
(if-let [get-email-fn (:get-email-fn provider)]
|
||||
(get-email-fn tdata info)
|
||||
(let [attr-kw (cf/get :oidc-email-attr :email)]
|
||||
(p/resolved (get info attr-kw)))))
|
||||
(get info attr-kw))))
|
||||
|
||||
(get-name [info]
|
||||
(let [attr-kw (cf/get :oidc-name-attr :name)]
|
||||
(get info attr-kw)))
|
||||
|
||||
(process-response [response]
|
||||
(p/let [info (-> response :body json/decode)
|
||||
email (get-email info)]
|
||||
(let [info (-> response :body json/decode)
|
||||
email (get-email info)]
|
||||
{:backend (:name provider)
|
||||
:email email
|
||||
:fullname (or (get-name info) email)
|
||||
:props (->> (dissoc info :name :email)
|
||||
(qualify-props provider))}))
|
||||
(qualify-props provider))}))]
|
||||
|
||||
(validate-info [info]
|
||||
(l/trace :hint "authentication info" :info info)
|
||||
(when-not (s/valid? ::info info)
|
||||
(l/warn :hint "received incomplete profile info object (please set correct scopes)"
|
||||
:info (pr-str info))
|
||||
(ex/raise :type :internal
|
||||
:code :incomplete-user-info
|
||||
:hint "inconmplete user info"
|
||||
:info info))
|
||||
info)]
|
||||
(l/trace :hint "request user info"
|
||||
:uri (:user-uri provider)
|
||||
:token (obfuscate-string (:token tdata))
|
||||
:token-type (:type tdata))
|
||||
|
||||
(->> (retrieve)
|
||||
(p/fmap validate-response)
|
||||
(p/mcat process-response)
|
||||
(p/fmap validate-info))))
|
||||
(let [request {:uri (:user-uri provider)
|
||||
:headers {"Authorization" (str (:type tdata) " " (:token tdata))}
|
||||
:timeout 6000
|
||||
:method :get}
|
||||
response (http/req! cfg request {:sync? true})]
|
||||
|
||||
(l/trace :hint "user info response"
|
||||
:status (:status response)
|
||||
:body (:body response))
|
||||
|
||||
(when-not (s/int-in-range? 200 300 (:status response))
|
||||
(ex/raise :type :internal
|
||||
:code :unable-to-retrieve-user-info
|
||||
:hint "unable to retrieve user info"
|
||||
:http-status (:status response)
|
||||
:http-body (:body response)))
|
||||
|
||||
(let [info (process-response response)]
|
||||
(l/trace :hint "authentication info" :info info)
|
||||
|
||||
(when-not (s/valid? ::info info)
|
||||
(l/warn :hint "received incomplete profile info object (please set correct scopes)" :info info)
|
||||
(ex/raise :type :internal
|
||||
:code :incomplete-user-info
|
||||
:hint "inconmplete user info"
|
||||
:info info))
|
||||
info))))
|
||||
|
||||
(s/def ::backend ::us/not-empty-string)
|
||||
(s/def ::email ::us/not-empty-string)
|
||||
|
@ -377,61 +369,55 @@
|
|||
|
||||
(defn get-info
|
||||
[{:keys [provider] :as cfg} {:keys [params] :as request}]
|
||||
(letfn [(validate-oidc [info]
|
||||
;; If the provider is OIDC, we can proceed to check
|
||||
;; roles if they are defined.
|
||||
(when (and (= "oidc" (:name provider))
|
||||
(seq (:roles provider)))
|
||||
(let [provider-roles (into #{} (:roles provider))
|
||||
profile-roles (let [attr (cf/get :oidc-roles-attr :roles)
|
||||
roles (get info attr)]
|
||||
(cond
|
||||
(string? roles) (into #{} (str/words roles))
|
||||
(vector? roles) (into #{} roles)
|
||||
:else #{}))]
|
||||
(when-let [error (get params :error)]
|
||||
(ex/raise :type :internal
|
||||
:code :error-on-retrieving-code
|
||||
:error-id error
|
||||
:error-desc (get params :error_description)))
|
||||
|
||||
;; check if profile has a configured set of roles
|
||||
(when-not (set/subset? provider-roles profile-roles)
|
||||
(ex/raise :type :internal
|
||||
:code :unable-to-auth
|
||||
:hint "not enough permissions"))))
|
||||
info)
|
||||
(let [state (get params :state)
|
||||
code (get params :code)
|
||||
state (tokens/verify (::main/props cfg) {:token state :iss :oauth})
|
||||
token (retrieve-access-token cfg code)
|
||||
info (retrieve-user-info cfg token)]
|
||||
|
||||
(post-process [state info]
|
||||
(cond-> info
|
||||
(some? (:invitation-token state))
|
||||
(assoc :invitation-token (:invitation-token state))
|
||||
;; If the provider is OIDC, we can proceed to check
|
||||
;; roles if they are defined.
|
||||
(when (and (= "oidc" (:name provider))
|
||||
(seq (:roles provider)))
|
||||
(let [provider-roles (into #{} (:roles provider))
|
||||
profile-roles (let [attr (cf/get :oidc-roles-attr :roles)
|
||||
roles (get info attr)]
|
||||
(cond
|
||||
(string? roles) (into #{} (str/words roles))
|
||||
(vector? roles) (into #{} roles)
|
||||
:else #{}))]
|
||||
|
||||
;; If state token comes with props, merge them. The state token
|
||||
;; props can contain pm_ and utm_ prefixed query params.
|
||||
(map? (:props state))
|
||||
(update :props merge (:props state))))]
|
||||
;; check if profile has a configured set of roles
|
||||
(when-not (set/subset? provider-roles profile-roles)
|
||||
(ex/raise :type :internal
|
||||
:code :unable-to-auth
|
||||
:hint "not enough permissions"))))
|
||||
|
||||
(when-let [error (get params :error)]
|
||||
(ex/raise :type :internal
|
||||
:code :error-on-retrieving-code
|
||||
:error-id error
|
||||
:error-desc (get params :error_description)))
|
||||
(cond-> info
|
||||
(some? (:invitation-token state))
|
||||
(assoc :invitation-token (:invitation-token state))
|
||||
|
||||
(let [state (get params :state)
|
||||
code (get params :code)
|
||||
state (tokens/verify (::main/props cfg) {:token state :iss :oauth})]
|
||||
(-> (p/resolved code)
|
||||
(p/then #(retrieve-access-token cfg %))
|
||||
(p/then #(retrieve-user-info cfg %))
|
||||
(p/then' validate-oidc)
|
||||
(p/then' (partial post-process state))))))
|
||||
;; If state token comes with props, merge them. The state token
|
||||
;; props can contain pm_ and utm_ prefixed query params.
|
||||
(map? (:props state))
|
||||
(update :props merge (:props state)))))
|
||||
|
||||
(defn- get-profile
|
||||
[{:keys [::db/pool ::wrk/executor] :as cfg} info]
|
||||
(px/with-dispatch executor
|
||||
(with-open [conn (db/open pool)]
|
||||
(some->> (:email info)
|
||||
(profile/get-profile-by-email conn)))))
|
||||
[{:keys [::db/pool] :as cfg} info]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(some->> (:email info)
|
||||
(profile/get-profile-by-email conn))))
|
||||
|
||||
(defn- redirect-response
|
||||
[uri]
|
||||
(yrs/response :status 302 :headers {"location" (str uri)}))
|
||||
{::yrs/status 302
|
||||
::yrs/headers {"location" (str uri)}})
|
||||
|
||||
(defn- generate-error-redirect
|
||||
[_ error]
|
||||
|
@ -458,11 +444,11 @@
|
|||
(ex/raise :type :restriction
|
||||
:code :profile-blocked))
|
||||
|
||||
(audit/submit! cfg {:type "command"
|
||||
:name "login-with-password"
|
||||
:profile-id (:id profile)
|
||||
:ip-addr (audit/parse-client-ip request)
|
||||
:props (audit/profile->props profile)})
|
||||
(audit/submit! cfg {::audit/type "command"
|
||||
::audit/name "login-with-oidc"
|
||||
::audit/profile-id (:id profile)
|
||||
::audit/ip-addr (audit/parse-client-ip request)
|
||||
::audit/props (audit/profile->props profile)})
|
||||
|
||||
(->> (redirect-response uri)
|
||||
(sxf request)))
|
||||
|
@ -478,6 +464,7 @@
|
|||
uri (-> (u/uri (cf/get :public-uri))
|
||||
(assoc :path "/#/auth/register/validate")
|
||||
(assoc :query (u/map->query-string params)))]
|
||||
|
||||
(redirect-response uri))))
|
||||
|
||||
(defn- auth-handler
|
||||
|
@ -489,27 +476,24 @@
|
|||
:props props
|
||||
:exp (dt/in-future "4h")})
|
||||
uri (build-auth-uri cfg state)]
|
||||
(yrs/response 200 {:redirect-uri uri})))
|
||||
{::yrs/status 200
|
||||
::yrs/body {:redirect-uri uri}}))
|
||||
|
||||
(defn- callback-handler
|
||||
[cfg request]
|
||||
(letfn [(process-request []
|
||||
(p/let [info (get-info cfg request)
|
||||
profile (get-profile cfg info)]
|
||||
(generate-redirect cfg request info profile)))
|
||||
|
||||
(handle-error [cause]
|
||||
(l/error :hint "error on oauth process" :cause cause)
|
||||
(generate-error-redirect cfg cause))]
|
||||
|
||||
(-> (process-request)
|
||||
(p/catch handle-error))))
|
||||
(try
|
||||
(let [info (get-info cfg request)
|
||||
profile (get-profile cfg info)]
|
||||
(generate-redirect cfg request info profile))
|
||||
(catch Throwable cause
|
||||
(l/error :hint "error on oauth process" :cause cause)
|
||||
(generate-error-redirect cfg cause))))
|
||||
|
||||
(def provider-lookup
|
||||
{:compile
|
||||
(fn [& _]
|
||||
(fn [handler]
|
||||
(fn [{:keys [::providers] :as cfg} request]
|
||||
(fn [handler {:keys [::providers] :as cfg}]
|
||||
(fn [request]
|
||||
(let [provider (some-> request :path-params :provider keyword)]
|
||||
(if-let [provider (get providers provider)]
|
||||
(handler (assoc cfg :provider provider) request)
|
||||
|
@ -553,18 +537,15 @@
|
|||
[_]
|
||||
(s/keys :req [::session/manager
|
||||
::http/client
|
||||
::wrk/executor
|
||||
::main/props
|
||||
::db/pool
|
||||
::providers]))
|
||||
|
||||
(defmethod ig/init-key ::routes
|
||||
[_ {:keys [::wrk/executor] :as cfg}]
|
||||
[_ cfg]
|
||||
(let [cfg (update cfg :provider d/without-nils)]
|
||||
["" {:middleware [[session/authz cfg]
|
||||
[hmw/with-dispatch executor]
|
||||
[hmw/with-config cfg]
|
||||
[provider-lookup]]}
|
||||
[provider-lookup cfg]]}
|
||||
["/auth/oauth"
|
||||
["/:provider"
|
||||
{:handler auth-handler
|
||||
|
|
|
@ -37,6 +37,7 @@
|
|||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn- parse-address
|
||||
^"[Ljakarta.mail.internet.InternetAddress;"
|
||||
[v]
|
||||
(InternetAddress/parse ^String v))
|
||||
|
||||
|
@ -149,6 +150,7 @@
|
|||
"mail.smtp.connectiontimeout" timeout}))
|
||||
|
||||
(defn- create-smtp-session
|
||||
^Session
|
||||
[cfg]
|
||||
(let [props (opts->props cfg)]
|
||||
(Session/getInstance props)))
|
||||
|
|
|
@ -19,19 +19,21 @@
|
|||
[app.http.middleware :as mw]
|
||||
[app.http.session :as session]
|
||||
[app.http.websocket :as-alias ws]
|
||||
[app.main :as-alias main]
|
||||
[app.metrics :as mtx]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.doc :as-alias rpc.doc]
|
||||
[app.worker :as wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec :as px]
|
||||
[reitit.core :as r]
|
||||
[reitit.middleware :as rr]
|
||||
[yetti.adapter :as yt]
|
||||
[yetti.request :as yrq]
|
||||
[yetti.response :as yrs]))
|
||||
[yetti.response :as-alias yrs]))
|
||||
|
||||
(declare wrap-router)
|
||||
(declare router-handler)
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; HTTP SERVER
|
||||
|
@ -71,13 +73,16 @@
|
|||
:http/host host
|
||||
:http/max-body-size (::max-body-size cfg)
|
||||
:http/max-multipart-body-size (::max-multipart-body-size cfg)
|
||||
:xnio/io-threads (::io-threads cfg)
|
||||
:xnio/dispatch (::wrk/executor cfg)
|
||||
:xnio/io-threads (or (::io-threads cfg)
|
||||
(max 3 (px/get-available-processors)))
|
||||
:xnio/worker-threads (or (::worker-threads cfg)
|
||||
(max 6 (px/get-available-processors)))
|
||||
:xnio/dispatch true
|
||||
:ring/async true}
|
||||
|
||||
handler (cond
|
||||
(some? router)
|
||||
(wrap-router router)
|
||||
(router-handler router)
|
||||
|
||||
(some? handler)
|
||||
handler
|
||||
|
@ -97,32 +102,35 @@
|
|||
|
||||
(defn- not-found-handler
|
||||
[_ respond _]
|
||||
(respond (yrs/response 404)))
|
||||
(respond {::yrs/status 404}))
|
||||
|
||||
(defn- wrap-router
|
||||
(defn- router-handler
|
||||
[router]
|
||||
(letfn [(handler [request respond raise]
|
||||
(letfn [(resolve-handler [request]
|
||||
(if-let [match (r/match-by-path router (yrq/path request))]
|
||||
(let [params (:path-params match)
|
||||
result (:result match)
|
||||
handler (or (:handler result) not-found-handler)
|
||||
request (assoc request :path-params params)]
|
||||
(handler request respond raise))
|
||||
(not-found-handler request respond raise)))
|
||||
(partial handler request))
|
||||
(partial not-found-handler request)))
|
||||
|
||||
(on-error [cause request respond]
|
||||
(on-error [cause request]
|
||||
(let [{:keys [body] :as response} (errors/handle cause request)]
|
||||
(respond
|
||||
(cond-> response
|
||||
(map? body)
|
||||
(-> (update :headers assoc "content-type" "application/transit+json")
|
||||
(assoc :body (t/encode-str body {:type :json-verbose})))))))]
|
||||
(cond-> response
|
||||
(map? body)
|
||||
(-> (update ::yrs/headers assoc "content-type" "application/transit+json")
|
||||
(assoc ::yrs/body (t/encode-str body {:type :json-verbose}))))))]
|
||||
|
||||
(fn [request respond _]
|
||||
(try
|
||||
(handler request respond #(on-error % request respond))
|
||||
(catch Throwable cause
|
||||
(on-error cause request respond))))))
|
||||
(let [handler (resolve-handler request)
|
||||
exchange (yrq/exchange request)]
|
||||
(handler
|
||||
(fn [response]
|
||||
(yt/dispatch! exchange (partial respond response)))
|
||||
(fn [cause]
|
||||
(let [response (on-error cause request)]
|
||||
(yt/dispatch! exchange (partial respond response)))))))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; HTTP ROUTER
|
||||
|
@ -130,11 +138,11 @@
|
|||
|
||||
(defmethod ig/pre-init-spec ::router [_]
|
||||
(s/keys :req [::session/manager
|
||||
::actoken/manager
|
||||
::ws/routes
|
||||
::rpc/routes
|
||||
::rpc.doc/routes
|
||||
::oidc/routes
|
||||
::main/props
|
||||
::assets/routes
|
||||
::debug/routes
|
||||
::db/pool
|
||||
|
@ -151,7 +159,8 @@
|
|||
[session/soft-auth cfg]
|
||||
[actoken/soft-auth cfg]
|
||||
[mw/errors errors/handle]
|
||||
[mw/restrict-methods]]}
|
||||
[mw/restrict-methods]
|
||||
[mw/with-dispatch :vthread]]}
|
||||
|
||||
(::mtx/routes cfg)
|
||||
(::assets/routes cfg)
|
||||
|
|
|
@ -7,26 +7,12 @@
|
|||
(ns app.http.access-token
|
||||
(:require
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.main :as-alias main]
|
||||
[app.tokens :as tokens]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]
|
||||
[yetti.request :as yrq]))
|
||||
|
||||
|
||||
(s/def ::manager
|
||||
(s/keys :req [::db/pool ::wrk/executor ::main/props]))
|
||||
|
||||
(defmethod ig/pre-init-spec ::manager [_] ::manager)
|
||||
(defmethod ig/init-key ::manager [_ cfg] cfg)
|
||||
(defmethod ig/halt-key! ::manager [_ _])
|
||||
|
||||
(def header-re #"^Token\s+(.*)")
|
||||
|
||||
(defn- get-token
|
||||
|
@ -48,40 +34,32 @@
|
|||
(db/decode-pgarray #{})))))
|
||||
|
||||
(defn- wrap-soft-auth
|
||||
[handler {:keys [::manager]}]
|
||||
(us/assert! ::manager manager)
|
||||
"Soft Authentication, will be executed synchronously on the undertow
|
||||
worker thread."
|
||||
[handler {:keys [::main/props]}]
|
||||
(letfn [(handle-request [request]
|
||||
(try
|
||||
(let [token (get-token request)
|
||||
claims (decode-token props token)]
|
||||
(cond-> request
|
||||
(map? claims)
|
||||
(assoc ::id (:tid claims))))
|
||||
(catch Throwable cause
|
||||
(l/trace :hint "exception on decoding malformed token" :cause cause)
|
||||
request)))]
|
||||
|
||||
(let [{:keys [::wrk/executor ::main/props]} manager]
|
||||
(fn [request respond raise]
|
||||
(let [token (get-token request)]
|
||||
(->> (px/submit! executor (partial decode-token props token))
|
||||
(p/fnly (fn [claims cause]
|
||||
(when cause
|
||||
(l/trace :hint "exception on decoding malformed token" :cause cause))
|
||||
(let [request (cond-> request
|
||||
(map? claims)
|
||||
(assoc ::id (:tid claims)))]
|
||||
(handler request respond raise)))))))))
|
||||
(let [request (handle-request request)]
|
||||
(handler request respond raise)))))
|
||||
|
||||
(defn- wrap-authz
|
||||
[handler {:keys [::manager]}]
|
||||
(us/assert! ::manager manager)
|
||||
(let [{:keys [::wrk/executor ::db/pool]} manager]
|
||||
(fn [request respond raise]
|
||||
(if-let [token-id (::id request)]
|
||||
(->> (px/submit! executor (partial get-token-perms pool token-id))
|
||||
(p/fnly (fn [perms cause]
|
||||
(cond
|
||||
(some? cause)
|
||||
(raise cause)
|
||||
|
||||
(nil? perms)
|
||||
(handler request respond raise)
|
||||
|
||||
:else
|
||||
(let [request (assoc request ::perms perms)]
|
||||
(handler request respond raise))))))
|
||||
(handler request respond raise)))))
|
||||
"Authorization middleware, will be executed synchronously on vthread."
|
||||
[handler {:keys [::db/pool]}]
|
||||
(fn [request]
|
||||
(let [perms (some->> (::id request) (get-token-perms pool))]
|
||||
(handler (cond-> request
|
||||
(some? perms)
|
||||
(assoc ::perms perms))))))
|
||||
|
||||
(def soft-auth
|
||||
{:name ::soft-auth
|
||||
|
|
|
@ -14,11 +14,9 @@
|
|||
[app.db :as db]
|
||||
[app.storage :as sto]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[yetti.response :as yrs]))
|
||||
[yetti.response :as-alias yrs]))
|
||||
|
||||
(def ^:private cache-max-age
|
||||
(dt/duration {:hours 24}))
|
||||
|
@ -28,10 +26,9 @@
|
|||
|
||||
(defn get-id
|
||||
[{:keys [path-params]}]
|
||||
(if-let [id (some-> path-params :id d/parse-uuid)]
|
||||
(p/resolved id)
|
||||
(p/rejected (ex/error :type :not-found
|
||||
:hunt "object not found"))))
|
||||
(or (some-> path-params :id d/parse-uuid)
|
||||
(ex/raise :type :not-found
|
||||
:hunt "object not found")))
|
||||
|
||||
(defn- get-file-media-object
|
||||
[pool id]
|
||||
|
@ -39,16 +36,12 @@
|
|||
|
||||
(defn- serve-object-from-s3
|
||||
[{:keys [::sto/storage] :as cfg} obj]
|
||||
(let [mdata (meta obj)]
|
||||
(->> (sto/get-object-url storage obj {:max-age signature-max-age})
|
||||
(p/fmap (fn [{:keys [host port] :as url}]
|
||||
(let [headers {"location" (str url)
|
||||
"x-host" (cond-> host port (str ":" port))
|
||||
"x-mtype" (:content-type mdata)
|
||||
"cache-control" (str "max-age=" (inst-ms cache-max-age))}]
|
||||
(yrs/response
|
||||
:status 307
|
||||
:headers headers)))))))
|
||||
(let [{:keys [host port] :as url} (sto/get-object-url storage obj {:max-age signature-max-age})]
|
||||
{::yrs/status 307
|
||||
::yrs/headers {"location" (str url)
|
||||
"x-host" (cond-> host port (str ":" port))
|
||||
"x-mtype" (-> obj meta :content-type)
|
||||
"cache-control" (str "max-age=" (inst-ms cache-max-age))}}))
|
||||
|
||||
(defn- serve-object-from-fs
|
||||
[{:keys [::path]} obj]
|
||||
|
@ -58,8 +51,8 @@
|
|||
headers {"x-accel-redirect" (:path purl)
|
||||
"content-type" (:content-type mdata)
|
||||
"cache-control" (str "max-age=" (inst-ms cache-max-age))}]
|
||||
(p/resolved
|
||||
(yrs/response :status 204 :headers headers))))
|
||||
{::yrs/status 204
|
||||
::yrs/headers headers}))
|
||||
|
||||
(defn- serve-object
|
||||
"Helper function that returns the appropriate response depending on
|
||||
|
@ -72,42 +65,34 @@
|
|||
|
||||
(defn objects-handler
|
||||
"Handler that servers storage objects by id."
|
||||
[{:keys [::sto/storage ::wrk/executor] :as cfg} request respond raise]
|
||||
(->> (get-id request)
|
||||
(p/mcat executor (fn [id] (sto/get-object storage id)))
|
||||
(p/mcat executor (fn [obj]
|
||||
(if (some? obj)
|
||||
(serve-object cfg obj)
|
||||
(p/resolved (yrs/response 404)))))
|
||||
(p/fnly executor (fn [result cause]
|
||||
(if cause (raise cause) (respond result))))))
|
||||
[{:keys [::sto/storage] :as cfg} request]
|
||||
(let [id (get-id request)
|
||||
obj (sto/get-object storage id)]
|
||||
(if obj
|
||||
(serve-object cfg obj)
|
||||
{::yrs/status 404})))
|
||||
|
||||
(defn- generic-handler
|
||||
"A generic handler helper/common code for file-media based handlers."
|
||||
[{:keys [::sto/storage ::wrk/executor] :as cfg} request kf]
|
||||
(let [pool (::db/pool storage)]
|
||||
(->> (get-id request)
|
||||
(p/fmap executor (fn [id] (get-file-media-object pool id)))
|
||||
(p/mcat executor (fn [mobj] (sto/get-object storage (kf mobj))))
|
||||
(p/mcat executor (fn [sobj]
|
||||
(if sobj
|
||||
(serve-object cfg sobj)
|
||||
(p/resolved (yrs/response 404))))))))
|
||||
[{:keys [::sto/storage] :as cfg} request kf]
|
||||
(let [pool (::db/pool storage)
|
||||
id (get-id request)
|
||||
mobj (get-file-media-object pool id)
|
||||
sobj (sto/get-object storage (kf mobj))]
|
||||
(if sobj
|
||||
(serve-object cfg sobj)
|
||||
{::yrs/status 404})))
|
||||
|
||||
(defn file-objects-handler
|
||||
"Handler that serves storage objects by file media id."
|
||||
[cfg request respond raise]
|
||||
(->> (generic-handler cfg request :media-id)
|
||||
(p/fnly (fn [result cause]
|
||||
(if cause (raise cause) (respond result))))))
|
||||
[cfg request]
|
||||
(generic-handler cfg request :media-id))
|
||||
|
||||
(defn file-thumbnails-handler
|
||||
"Handler that serves storage objects by thumbnail-id and quick
|
||||
fallback to file-media-id if no thumbnail is available."
|
||||
[cfg request respond raise]
|
||||
(->> (generic-handler cfg request #(or (:thumbnail-id %) (:media-id %)))
|
||||
(p/fnly (fn [result cause]
|
||||
(if cause (raise cause) (respond result))))))
|
||||
[cfg request]
|
||||
(generic-handler cfg request #(or (:thumbnail-id %) (:media-id %))))
|
||||
|
||||
;; --- Initialization
|
||||
|
||||
|
@ -115,7 +100,7 @@
|
|||
(s/def ::routes vector?)
|
||||
|
||||
(defmethod ig/pre-init-spec ::routes [_]
|
||||
(s/keys :req [::sto/storage ::wrk/executor ::path]))
|
||||
(s/keys :req [::sto/storage ::path]))
|
||||
|
||||
(defmethod ig/init-key ::routes
|
||||
[_ cfg]
|
||||
|
|
|
@ -21,7 +21,7 @@
|
|||
[jsonista.core :as j]
|
||||
[promesa.exec :as px]
|
||||
[yetti.request :as yrq]
|
||||
[yetti.response :as yrs]))
|
||||
[yetti.response :as-alias yrs]))
|
||||
|
||||
(declare parse-json)
|
||||
(declare handle-request)
|
||||
|
@ -39,7 +39,7 @@
|
|||
(letfn [(handler [request respond _]
|
||||
(let [data (-> request yrq/body slurp)]
|
||||
(px/run! executor #(handle-request cfg data)))
|
||||
(respond (yrs/response 200)))]
|
||||
(respond {::yrs/status 200}))]
|
||||
["/sns" {:handler handler
|
||||
:allowed-methods #{:post}}]))
|
||||
|
||||
|
|
|
@ -40,12 +40,25 @@
|
|||
(catch Throwable cause
|
||||
(p/rejected cause))))))
|
||||
|
||||
(defn- resolve-client
|
||||
[params]
|
||||
(cond
|
||||
(instance? HttpClient params)
|
||||
params
|
||||
|
||||
(map? params)
|
||||
(resolve-client (::client params))
|
||||
|
||||
:else
|
||||
(throw (UnsupportedOperationException. "invalid arguments"))))
|
||||
|
||||
(defn req!
|
||||
"A convencience toplevel function for gradual migration to a new API
|
||||
convention."
|
||||
([{:keys [::client]} request]
|
||||
(us/assert! ::client client)
|
||||
(send! client request {}))
|
||||
([{:keys [::client]} request options]
|
||||
(us/assert! ::client client)
|
||||
(send! client request options)))
|
||||
([cfg-or-client request]
|
||||
(let [client (resolve-client cfg-or-client)]
|
||||
(send! client request {})))
|
||||
([cfg-or-client request options]
|
||||
(let [client (resolve-client cfg-or-client)]
|
||||
(send! client request options))))
|
||||
|
||||
|
|
|
@ -13,7 +13,6 @@
|
|||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http.middleware :as mw]
|
||||
[app.http.session :as session]
|
||||
[app.rpc.commands.binfile :as binf]
|
||||
[app.rpc.commands.files-create :refer [create-file]]
|
||||
|
@ -21,7 +20,6 @@
|
|||
[app.util.blob :as blob]
|
||||
[app.util.template :as tmpl]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.io :as io]
|
||||
|
@ -48,13 +46,17 @@
|
|||
(defn prepare-response
|
||||
[body]
|
||||
(let [headers {"content-type" "application/transit+json"}]
|
||||
(yrs/response :status 200 :body body :headers headers)))
|
||||
{::yrs/status 200
|
||||
::yrs/body body
|
||||
::yrs/headers headers}))
|
||||
|
||||
(defn prepare-download-response
|
||||
[body filename]
|
||||
(let [headers {"content-disposition" (str "attachment; filename=" filename)
|
||||
"content-type" "application/octet-stream"}]
|
||||
(yrs/response :status 200 :body body :headers headers)))
|
||||
{::yrs/status 200
|
||||
::yrs/body body
|
||||
::yrs/headers headers}))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; INDEX
|
||||
|
@ -65,10 +67,10 @@
|
|||
(when-not (authorized? pool request)
|
||||
(ex/raise :type :authentication
|
||||
:code :only-admins-allowed))
|
||||
(yrs/response :status 200
|
||||
:headers {"content-type" "text/html"}
|
||||
:body (-> (io/resource "app/templates/debug.tmpl")
|
||||
(tmpl/render {}))))
|
||||
{::yrs/status 200
|
||||
::yrs/headers {"content-type" "text/html"}
|
||||
::yrs/body (-> (io/resource "app/templates/debug.tmpl")
|
||||
(tmpl/render {}))})
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; FILE CHANGES
|
||||
|
@ -115,7 +117,8 @@
|
|||
:project-id project-id
|
||||
:profile-id profile-id
|
||||
:data data})
|
||||
(yrs/response 201 "OK CREATED"))
|
||||
{::yrs/status 201
|
||||
::yrs/body "OK CREATED"})
|
||||
|
||||
:else
|
||||
(prepare-response (blob/decode data))))))
|
||||
|
@ -143,7 +146,8 @@
|
|||
(db/update! pool :file
|
||||
{:data (blob/encode data)}
|
||||
{:id file-id})
|
||||
(yrs/response 200 "OK UPDATED"))
|
||||
{::yrs/status 200
|
||||
::yrs/body "OK UPDATED"})
|
||||
|
||||
(do
|
||||
(create-file pool {:id file-id
|
||||
|
@ -151,9 +155,11 @@
|
|||
:project-id project-id
|
||||
:profile-id profile-id
|
||||
:data data})
|
||||
(yrs/response 201 "OK CREATED"))))
|
||||
{::yrs/status 201
|
||||
::yrs/body "OK CREATED"})))
|
||||
|
||||
(yrs/response 500 "ERROR"))))
|
||||
{::yrs/status 500
|
||||
::yrs/body "ERROR"})))
|
||||
|
||||
(defn file-data-handler
|
||||
[cfg request]
|
||||
|
@ -241,11 +247,12 @@
|
|||
(let [result (if (= 1 (:version report))
|
||||
(render-template-v1 report)
|
||||
(render-template-v2 report))]
|
||||
(yrs/response :status 200
|
||||
:body result
|
||||
:headers {"content-type" "text/html; charset=utf-8"
|
||||
"x-robots-tag" "noindex"}))
|
||||
(yrs/response 404 "not found"))))
|
||||
{::yrs/status 200
|
||||
::yrs/body result
|
||||
::yrs/headers {"content-type" "text/html; charset=utf-8"
|
||||
"x-robots-tag" "noindex"}})
|
||||
{::yrs/status 404
|
||||
::yrs/body "not found"})))
|
||||
|
||||
(def sql:error-reports
|
||||
"SELECT id, created_at,
|
||||
|
@ -261,11 +268,11 @@
|
|||
:code :only-admins-allowed))
|
||||
(let [items (->> (db/exec! pool [sql:error-reports])
|
||||
(map #(update % :created-at dt/format-instant :rfc1123)))]
|
||||
(yrs/response :status 200
|
||||
:body (-> (io/resource "app/templates/error-list.tmpl")
|
||||
(tmpl/render {:items items}))
|
||||
:headers {"content-type" "text/html; charset=utf-8"
|
||||
"x-robots-tag" "noindex"})))
|
||||
{::yrs/status 200
|
||||
::yrs/body (-> (io/resource "app/templates/error-list.tmpl")
|
||||
(tmpl/render {:items items}))
|
||||
::yrs/headers {"content-type" "text/html; charset=utf-8"
|
||||
"x-robots-tag" "noindex"}}))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; EXPORT/IMPORT
|
||||
|
@ -301,16 +308,15 @@
|
|||
::binf/profile-id profile-id
|
||||
::binf/project-id project-id))
|
||||
|
||||
(yrs/response
|
||||
:status 200
|
||||
:headers {"content-type" "text/plain"}
|
||||
:body "OK CLONED"))
|
||||
{::yrs/status 200
|
||||
::yrs/headers {"content-type" "text/plain"}
|
||||
::yrs/body "OK CLONED"})
|
||||
|
||||
{::yrs/status 200
|
||||
::yrs/body (io/input-stream path)
|
||||
::yrs/headers {"content-type" "application/octet-stream"
|
||||
"content-disposition" (str "attachmen; filename=" (first file-ids) ".penpot")}}))))
|
||||
|
||||
(yrs/response
|
||||
:status 200
|
||||
:headers {"content-type" "application/octet-stream"
|
||||
"content-disposition" (str "attachmen; filename=" (first file-ids) ".penpot")}
|
||||
:body (io/input-stream path))))))
|
||||
|
||||
|
||||
(defn import-handler
|
||||
|
@ -340,10 +346,9 @@
|
|||
::binf/profile-id profile-id
|
||||
::binf/project-id project-id))
|
||||
|
||||
(yrs/response
|
||||
:status 200
|
||||
:headers {"content-type" "text/plain"}
|
||||
:body "OK")))
|
||||
{::yrs/status 200
|
||||
::yrs/headers {"content-type" "text/plain"}
|
||||
::yrs/body "OK"}))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; OTHER SMALL VIEWS/HANDLERS
|
||||
|
@ -354,11 +359,13 @@
|
|||
[{:keys [::db/pool]} _]
|
||||
(try
|
||||
(db/exec-one! pool ["select count(*) as count from server_prop;"])
|
||||
(yrs/response 200 "OK")
|
||||
{::yrs/status 200
|
||||
::yrs/body "OK"}
|
||||
(catch Throwable cause
|
||||
(l/warn :hint "unable to execute query on health handler"
|
||||
:cause cause)
|
||||
(yrs/response 503 "KO"))))
|
||||
{::yrs/status 503
|
||||
::yrs/body "KO"})))
|
||||
|
||||
(defn changelog-handler
|
||||
[_ _]
|
||||
|
@ -367,10 +374,11 @@
|
|||
(md->html [text]
|
||||
(md/md-to-html-string text :replacement-transformers (into [transform-emoji] mdt/transformer-vector)))]
|
||||
(if-let [clog (io/resource "changelog.md")]
|
||||
(yrs/response :status 200
|
||||
:headers {"content-type" "text/html; charset=utf-8"}
|
||||
:body (-> clog slurp md->html))
|
||||
(yrs/response :status 404 :body "NOT FOUND"))))
|
||||
{::yrs/status 200
|
||||
::yrs/headers {"content-type" "text/html; charset=utf-8"}
|
||||
::yrs/body (-> clog slurp md->html)}
|
||||
{::yrs/status 404
|
||||
::yrs/body "NOT FOUND"})))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; INIT
|
||||
|
@ -380,33 +388,26 @@
|
|||
{:compile
|
||||
(fn [& _]
|
||||
(fn [handler pool]
|
||||
(fn [request respond raise]
|
||||
(fn [request]
|
||||
(if (authorized? pool request)
|
||||
(handler request respond raise)
|
||||
(raise (ex/error :type :authentication
|
||||
:code :only-admins-allowed))))))})
|
||||
(handler request)
|
||||
(ex/raise :type :authentication
|
||||
:code :only-admins-allowed)))))})
|
||||
|
||||
(defmethod ig/pre-init-spec ::routes [_]
|
||||
(s/keys :req [::db/pool
|
||||
::wrk/executor
|
||||
::session/manager]))
|
||||
(s/keys :req [::db/pool ::session/manager]))
|
||||
|
||||
(defmethod ig/init-key ::routes
|
||||
[_ {:keys [::db/pool ::wrk/executor] :as cfg}]
|
||||
[["/readyz" {:middleware [[mw/with-dispatch executor]
|
||||
[mw/with-config cfg]]
|
||||
:handler health-handler}]
|
||||
[_ {:keys [::db/pool] :as cfg}]
|
||||
[["/readyz" {:handler (partial health-handler cfg)}]
|
||||
["/dbg" {:middleware [[session/authz cfg]
|
||||
[with-authorization pool]
|
||||
[mw/with-dispatch executor]
|
||||
[mw/with-config cfg]]}
|
||||
["" {:handler index-handler}]
|
||||
["/health" {:handler health-handler}]
|
||||
["/changelog" {:handler changelog-handler}]
|
||||
;; ["/error-by-id/:id" {:handler error-handler}]
|
||||
["/error/:id" {:handler error-handler}]
|
||||
["/error" {:handler error-list-handler}]
|
||||
["/file/export" {:handler export-handler}]
|
||||
["/file/import" {:handler import-handler}]
|
||||
["/file/data" {:handler file-data-handler}]
|
||||
["/file/changes" {:handler file-changes-handler}]]])
|
||||
[with-authorization pool]]}
|
||||
["" {:handler (partial index-handler cfg)}]
|
||||
["/health" {:handler (partial health-handler cfg)}]
|
||||
["/changelog" {:handler (partial changelog-handler cfg)}]
|
||||
["/error/:id" {:handler (partial error-handler cfg)}]
|
||||
["/error" {:handler (partial error-list-handler cfg)}]
|
||||
["/file/export" {:handler (partial export-handler cfg)}]
|
||||
["/file/import" {:handler (partial import-handler cfg)}]
|
||||
["/file/data" {:handler (partial file-data-handler cfg)}]
|
||||
["/file/changes" {:handler (partial file-changes-handler cfg)}]]])
|
||||
|
|
|
@ -46,20 +46,30 @@
|
|||
|
||||
(defmethod handle-exception :authentication
|
||||
[err _]
|
||||
(yrs/response 401 (ex-data err)))
|
||||
{::yrs/status 401
|
||||
::yrs/body (ex-data err)})
|
||||
|
||||
(defmethod handle-exception :authorization
|
||||
[err _]
|
||||
(yrs/response 403 (ex-data err)))
|
||||
{::yrs/status 403
|
||||
::yrs/body (ex-data err)})
|
||||
|
||||
(defmethod handle-exception :restriction
|
||||
[err _]
|
||||
(yrs/response 400 (ex-data err)))
|
||||
{::yrs/status 400
|
||||
::yrs/body (ex-data err)})
|
||||
|
||||
(defmethod handle-exception :rate-limit
|
||||
[err _]
|
||||
(let [headers (-> err ex-data ::http/headers)]
|
||||
(yrs/response :status 429 :body "" :headers headers)))
|
||||
{::yrs/status 429
|
||||
::yrs/headers headers}))
|
||||
|
||||
(defmethod handle-exception :concurrency-limit
|
||||
[err _]
|
||||
(let [headers (-> err ex-data ::http/headers)]
|
||||
{::yrs/status 429
|
||||
::yrs/headers headers}))
|
||||
|
||||
(defmethod handle-exception :validation
|
||||
[err _]
|
||||
|
@ -67,16 +77,16 @@
|
|||
(cond
|
||||
(= code :spec-validation)
|
||||
(let [explain (ex/explain data)]
|
||||
(yrs/response :status 400
|
||||
:body (-> data
|
||||
(dissoc ::s/problems ::s/value)
|
||||
(cond-> explain (assoc :explain explain)))))
|
||||
{::yrs/status 400
|
||||
::yrs/body (-> data
|
||||
(dissoc ::s/problems ::s/value)
|
||||
(cond-> explain (assoc :explain explain)))})
|
||||
|
||||
(= code :request-body-too-large)
|
||||
(yrs/response :status 413 :body data)
|
||||
{::yrs/status 413 ::yrs/body data}
|
||||
|
||||
:else
|
||||
(yrs/response :status 400 :body data))))
|
||||
{::yrs/status 400 ::yrs/body data})))
|
||||
|
||||
(defmethod handle-exception :assertion
|
||||
[error request]
|
||||
|
@ -84,31 +94,27 @@
|
|||
explain (ex/explain edata)]
|
||||
(binding [l/*context* (request->context request)]
|
||||
(l/error :hint "Assertion error" :message (ex-message error) :cause error)
|
||||
(yrs/response :status 500
|
||||
:body {:type :server-error
|
||||
:code :assertion
|
||||
:data (-> edata
|
||||
(dissoc ::s/problems ::s/value ::s/spec)
|
||||
(cond-> explain (assoc :explain explain)))}))))
|
||||
{::yrs/status 500
|
||||
::yrs/body {:type :server-error
|
||||
:code :assertion
|
||||
:data (-> edata
|
||||
(dissoc ::s/problems ::s/value ::s/spec)
|
||||
(cond-> explain (assoc :explain explain)))}})))
|
||||
|
||||
(defmethod handle-exception :not-found
|
||||
[err _]
|
||||
(yrs/response 404 (ex-data err)))
|
||||
{::yrs/status 404
|
||||
::yrs/body (ex-data err)})
|
||||
|
||||
(defmethod handle-exception :internal
|
||||
[error request]
|
||||
(let [{:keys [code] :as edata} (ex-data error)]
|
||||
(cond
|
||||
(= :concurrency-limit-reached code)
|
||||
(yrs/response 429)
|
||||
|
||||
:else
|
||||
(binding [l/*context* (request->context request)]
|
||||
(l/error :hint "Internal error" :message (ex-message error) :cause error)
|
||||
(yrs/response 500 {:type :server-error
|
||||
:code :unhandled
|
||||
:hint (ex-message error)
|
||||
:data edata})))))
|
||||
(binding [l/*context* (request->context request)]
|
||||
(l/error :hint "Internal error" :message (ex-message error) :cause error)
|
||||
{::yrs/status 500
|
||||
::yrs/body {:type :server-error
|
||||
:code :unhandloed
|
||||
:hint (ex-message error)
|
||||
:data (ex-data error)}}))
|
||||
|
||||
(defmethod handle-exception org.postgresql.util.PSQLException
|
||||
[error request]
|
||||
|
@ -117,20 +123,23 @@
|
|||
(l/error :hint "PSQL error" :message (ex-message error) :cause error)
|
||||
(cond
|
||||
(= state "57014")
|
||||
(yrs/response 504 {:type :server-error
|
||||
:code :statement-timeout
|
||||
:hint (ex-message error)})
|
||||
{::yrs/status 504
|
||||
::yrs/body {:type :server-error
|
||||
:code :statement-timeout
|
||||
:hint (ex-message error)}}
|
||||
|
||||
(= state "25P03")
|
||||
(yrs/response 504 {:type :server-error
|
||||
:code :idle-in-transaction-timeout
|
||||
:hint (ex-message error)})
|
||||
{::yrs/status 504
|
||||
::yrs/body {:type :server-error
|
||||
:code :idle-in-transaction-timeout
|
||||
:hint (ex-message error)}}
|
||||
|
||||
:else
|
||||
(yrs/response 500 {:type :server-error
|
||||
:code :unexpected
|
||||
:hint (ex-message error)
|
||||
:state state})))))
|
||||
{::yrs/status 500
|
||||
::yrs/body {:type :server-error
|
||||
:code :unexpected
|
||||
:hint (ex-message error)
|
||||
:state state}}))))
|
||||
|
||||
(defmethod handle-exception :default
|
||||
[error request]
|
||||
|
@ -140,9 +149,10 @@
|
|||
(nil? edata)
|
||||
(binding [l/*context* (request->context request)]
|
||||
(l/error :hint "Unexpected error" :message (ex-message error) :cause error)
|
||||
(yrs/response 500 {:type :server-error
|
||||
:code :unexpected
|
||||
:hint (ex-message error)}))
|
||||
{::yrs/status 500
|
||||
::yrs/body {:type :server-error
|
||||
:code :unexpected
|
||||
:hint (ex-message error)}})
|
||||
|
||||
;; This is a special case for the idle-in-transaction error;
|
||||
;; when it happens, the connection is automatically closed and
|
||||
|
@ -156,10 +166,11 @@
|
|||
:else
|
||||
(binding [l/*context* (request->context request)]
|
||||
(l/error :hint "Unhandled error" :message (ex-message error) :cause error)
|
||||
(yrs/response 500 {:type :server-error
|
||||
:code :unhandled
|
||||
:hint (ex-message error)
|
||||
:data edata})))))
|
||||
{::yrs/status 500
|
||||
::yrs/body {:type :server-error
|
||||
:code :unhandled
|
||||
:hint (ex-message error)
|
||||
:data edata}}))))
|
||||
|
||||
(defn handle
|
||||
[cause request]
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
[cuerdas.core :as str]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]
|
||||
[promesa.util :as pu]
|
||||
[yetti.adapter :as yt]
|
||||
[yetti.middleware :as ymw]
|
||||
[yetti.request :as yrq]
|
||||
|
@ -22,7 +23,10 @@
|
|||
com.fasterxml.jackson.core.JsonParseException
|
||||
com.fasterxml.jackson.core.io.JsonEOFException
|
||||
io.undertow.server.RequestTooBigException
|
||||
java.io.OutputStream))
|
||||
java.io.OutputStream
|
||||
java.io.InputStream))
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
(def server-timing
|
||||
{:name ::server-timing
|
||||
|
@ -44,14 +48,14 @@
|
|||
(let [header (yrq/get-header request "content-type")]
|
||||
(cond
|
||||
(str/starts-with? header "application/transit+json")
|
||||
(with-open [is (yrq/body request)]
|
||||
(with-open [^InputStream is (yrq/body request)]
|
||||
(let [params (t/read! (t/reader is))]
|
||||
(-> request
|
||||
(assoc :body-params params)
|
||||
(update :params merge params))))
|
||||
|
||||
(str/starts-with? header "application/json")
|
||||
(with-open [is (yrq/body request)]
|
||||
(with-open [^InputStream is (yrq/body request)]
|
||||
(let [params (json/decode is json-mapper)]
|
||||
(-> request
|
||||
(assoc :body-params params)
|
||||
|
@ -62,6 +66,11 @@
|
|||
|
||||
(handle-error [raise cause]
|
||||
(cond
|
||||
(instance? RuntimeException cause)
|
||||
(if-let [cause (ex-cause cause)]
|
||||
(handle-error raise cause)
|
||||
(raise cause))
|
||||
|
||||
(instance? RequestTooBigException cause)
|
||||
(raise (ex/error :type :validation
|
||||
:code :request-body-too-large
|
||||
|
@ -78,12 +87,12 @@
|
|||
(raise cause)))]
|
||||
|
||||
(fn [request respond raise]
|
||||
(let [request (ex/try! (process-request request))]
|
||||
(if (ex/exception? request)
|
||||
(if (ex/runtime-exception? request)
|
||||
(handle-error raise (or (ex-cause request) request))
|
||||
(handle-error raise request))
|
||||
(handler request respond raise))))))
|
||||
(if (= (yrq/method request) :post)
|
||||
(let [request (ex/try! (process-request request))]
|
||||
(if (ex/exception? request)
|
||||
(handle-error raise request)
|
||||
(handler request respond raise)))
|
||||
(handler request respond raise)))))
|
||||
|
||||
(def parse-request
|
||||
{:name ::parse-request
|
||||
|
@ -94,12 +103,7 @@
|
|||
needed because transit-java calls flush very aggresivelly on each
|
||||
object write."
|
||||
[^java.io.OutputStream os ^long chunk-size]
|
||||
(proxy [java.io.BufferedOutputStream] [os (int chunk-size)]
|
||||
;; Explicitly do not forward flush
|
||||
(flush [])
|
||||
(close []
|
||||
(proxy-super flush)
|
||||
(proxy-super close))))
|
||||
(yetti.util.BufferedOutputStream. os (int chunk-size)))
|
||||
|
||||
(def ^:const buffer-size (:xnio/buffer-size yt/defaults))
|
||||
|
||||
|
@ -109,13 +113,10 @@
|
|||
(reify yrs/StreamableResponseBody
|
||||
(-write-body-to-stream [_ _ output-stream]
|
||||
(try
|
||||
(with-open [bos (buffered-output-stream output-stream buffer-size)]
|
||||
(with-open [^OutputStream bos (buffered-output-stream output-stream buffer-size)]
|
||||
(let [tw (t/writer bos opts)]
|
||||
(t/write! tw data)))
|
||||
|
||||
(catch java.io.IOException _cause
|
||||
;; Do nothing, EOF means client closes connection abruptly
|
||||
nil)
|
||||
(catch java.io.IOException _)
|
||||
(catch Throwable cause
|
||||
(l/warn :hint "unexpected error on encoding response"
|
||||
:cause cause))
|
||||
|
@ -126,13 +127,10 @@
|
|||
(reify yrs/StreamableResponseBody
|
||||
(-write-body-to-stream [_ _ output-stream]
|
||||
(try
|
||||
|
||||
(with-open [bos (buffered-output-stream output-stream buffer-size)]
|
||||
(with-open [^OutputStream bos (buffered-output-stream output-stream buffer-size)]
|
||||
(json/write! bos data json-mapper))
|
||||
|
||||
(catch java.io.IOException _cause
|
||||
;; Do nothing, EOF means client closes connection abruptly
|
||||
nil)
|
||||
(catch java.io.IOException _)
|
||||
(catch Throwable cause
|
||||
(l/warn :hint "unexpected error on encoding response"
|
||||
:cause cause))
|
||||
|
@ -140,15 +138,15 @@
|
|||
(.close ^OutputStream output-stream))))))
|
||||
|
||||
(format-response-with-json [response _]
|
||||
(let [body (yrs/body response)]
|
||||
(let [body (::yrs/body response)]
|
||||
(if (or (boolean? body) (coll? body))
|
||||
(-> response
|
||||
(update :headers assoc "content-type" "application/json")
|
||||
(assoc :body (json-streamable-body body)))
|
||||
(update ::yrs/headers assoc "content-type" "application/json")
|
||||
(assoc ::yrs/body (json-streamable-body body)))
|
||||
response)))
|
||||
|
||||
(format-response-with-transit [response request]
|
||||
(let [body (yrs/body response)]
|
||||
(let [body (::yrs/body response)]
|
||||
(if (or (boolean? body) (coll? body))
|
||||
(let [qs (yrq/query request)
|
||||
opts (if (or (contains? cf/flags :transit-readable-response)
|
||||
|
@ -156,8 +154,8 @@
|
|||
{:type :json-verbose}
|
||||
{:type :json})]
|
||||
(-> response
|
||||
(update :headers assoc "content-type" "application/transit+json")
|
||||
(assoc :body (transit-streamable-body body opts))))
|
||||
(update ::yrs/headers assoc "content-type" "application/transit+json")
|
||||
(assoc ::yrs/body (transit-streamable-body body opts))))
|
||||
response)))
|
||||
|
||||
(format-response [response request]
|
||||
|
@ -181,8 +179,7 @@
|
|||
(fn [request respond raise]
|
||||
(handler request
|
||||
(fn [response]
|
||||
(let [response (process-response response request)]
|
||||
(respond response)))
|
||||
(respond (process-response response request)))
|
||||
raise))))
|
||||
|
||||
(def format-response
|
||||
|
@ -191,74 +188,59 @@
|
|||
|
||||
(defn wrap-errors
|
||||
[handler on-error]
|
||||
(fn [request respond _]
|
||||
(fn [request respond raise]
|
||||
(handler request respond (fn [cause]
|
||||
(-> cause (on-error request) respond)))))
|
||||
(try
|
||||
(respond (on-error cause request))
|
||||
(catch Throwable cause
|
||||
(raise cause)))))))
|
||||
|
||||
(def errors
|
||||
{:name ::errors
|
||||
:compile (constantly wrap-errors)})
|
||||
|
||||
(defn- with-cors-headers
|
||||
[headers origin]
|
||||
(-> headers
|
||||
(assoc "access-control-allow-origin" origin)
|
||||
(assoc "access-control-allow-methods" "GET,POST,DELETE,OPTIONS,PUT,HEAD,PATCH")
|
||||
(assoc "access-control-allow-credentials" "true")
|
||||
(assoc "access-control-expose-headers" "x-requested-with, content-type, cookie")
|
||||
(assoc "access-control-allow-headers" "x-frontend-version, content-type, accept, x-requested-width")))
|
||||
|
||||
(defn wrap-cors
|
||||
[handler]
|
||||
(if-not (contains? cf/flags :cors)
|
||||
handler
|
||||
(letfn [(add-headers [headers request]
|
||||
(let [origin (yrq/get-header request "origin")]
|
||||
(-> headers
|
||||
(assoc "access-control-allow-origin" origin)
|
||||
(assoc "access-control-allow-methods" "GET,POST,DELETE,OPTIONS,PUT,HEAD,PATCH")
|
||||
(assoc "access-control-allow-credentials" "true")
|
||||
(assoc "access-control-expose-headers" "x-requested-with, content-type, cookie")
|
||||
(assoc "access-control-allow-headers" "x-frontend-version, content-type, accept, x-requested-width"))))
|
||||
|
||||
(update-response [response request]
|
||||
(update response :headers add-headers request))]
|
||||
|
||||
(fn [request respond raise]
|
||||
(if (= (yrq/method request) :options)
|
||||
(-> (yrs/response 200)
|
||||
(update-response request)
|
||||
(respond))
|
||||
(handler request
|
||||
(fn [response]
|
||||
(respond (update-response response request)))
|
||||
raise))))))
|
||||
(fn [request]
|
||||
(let [response (if (= (yrq/method request) :options)
|
||||
{::yrs/status 200}
|
||||
(handler request))
|
||||
origin (yrq/get-header request "origin")]
|
||||
(update response ::yrs/headers with-cors-headers origin))))
|
||||
|
||||
(def cors
|
||||
{:name ::cors
|
||||
:compile (constantly wrap-cors)})
|
||||
|
||||
(defn compile-restrict-methods
|
||||
[data _]
|
||||
(when-let [allowed (:allowed-methods data)]
|
||||
(fn [handler]
|
||||
(fn [request respond raise]
|
||||
(let [method (yrq/method request)]
|
||||
(if (contains? allowed method)
|
||||
(handler request respond raise)
|
||||
(respond (yrs/response 405))))))))
|
||||
:compile (fn [& _]
|
||||
(when (contains? cf/flags :cors)
|
||||
wrap-cors))})
|
||||
|
||||
(def restrict-methods
|
||||
{:name ::restrict-methods
|
||||
:compile compile-restrict-methods})
|
||||
:compile
|
||||
(fn [data _]
|
||||
(when-let [allowed (:allowed-methods data)]
|
||||
(fn [handler]
|
||||
(fn [request respond raise]
|
||||
(let [method (yrq/method request)]
|
||||
(if (contains? allowed method)
|
||||
(handler request respond raise)
|
||||
(respond {::yrs/status 405})))))))})
|
||||
|
||||
(def with-dispatch
|
||||
{:name ::with-dispatch
|
||||
:compile
|
||||
(fn [& _]
|
||||
(fn [handler executor]
|
||||
(fn [request respond raise]
|
||||
(-> (px/submit! executor #(handler request))
|
||||
(p/bind p/wrap)
|
||||
(p/then respond)
|
||||
(p/catch raise)))))})
|
||||
|
||||
(def with-config
|
||||
{:name ::with-config
|
||||
:compile
|
||||
(fn [& _]
|
||||
(fn [handler config]
|
||||
(fn
|
||||
([request] (handler config request))
|
||||
([request respond raise] (handler config request respond raise)))))})
|
||||
(let [executor (px/resolve-executor executor)]
|
||||
(fn [request respond raise]
|
||||
(->> (px/submit! executor (partial handler request))
|
||||
(p/fnly (pu/handler respond raise)))))))})
|
||||
|
|
|
@ -8,7 +8,6 @@
|
|||
(:refer-clojure :exclude [read])
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.config :as cf]
|
||||
|
@ -18,12 +17,9 @@
|
|||
[app.main :as-alias main]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]
|
||||
[yetti.request :as yrq]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
@ -76,69 +72,56 @@
|
|||
:id key})
|
||||
|
||||
(defn- database-manager
|
||||
[{:keys [::db/pool ::wrk/executor ::main/props]}]
|
||||
^{::wrk/executor executor
|
||||
::db/pool pool
|
||||
::main/props props}
|
||||
[pool]
|
||||
(reify ISessionManager
|
||||
(read [_ token]
|
||||
(px/with-dispatch executor
|
||||
(db/exec-one! pool (sql/select :http-session {:id token}))))
|
||||
(db/exec-one! pool (sql/select :http-session {:id token})))
|
||||
|
||||
(write! [_ key params]
|
||||
(px/with-dispatch executor
|
||||
(let [params (prepare-session-params key params)]
|
||||
(db/insert! pool :http-session params)
|
||||
params)))
|
||||
(let [params (prepare-session-params key params)]
|
||||
(db/insert! pool :http-session params)
|
||||
params))
|
||||
|
||||
(update! [_ params]
|
||||
(let [updated-at (dt/now)]
|
||||
(px/with-dispatch executor
|
||||
(db/update! pool :http-session
|
||||
{:updated-at updated-at}
|
||||
{:id (:id params)})
|
||||
(assoc params :updated-at updated-at))))
|
||||
(db/update! pool :http-session
|
||||
{:updated-at updated-at}
|
||||
{:id (:id params)})
|
||||
(assoc params :updated-at updated-at)))
|
||||
|
||||
(delete! [_ token]
|
||||
(px/with-dispatch executor
|
||||
(db/delete! pool :http-session {:id token})
|
||||
nil))))
|
||||
(db/delete! pool :http-session {:id token})
|
||||
nil)))
|
||||
|
||||
(defn inmemory-manager
|
||||
[{:keys [::db/pool ::wrk/executor ::main/props]}]
|
||||
[]
|
||||
(let [cache (atom {})]
|
||||
^{::main/props props
|
||||
::wrk/executor executor
|
||||
::db/pool pool}
|
||||
(reify ISessionManager
|
||||
(read [_ token]
|
||||
(p/do (get @cache token)))
|
||||
(get @cache token))
|
||||
|
||||
(write! [_ key params]
|
||||
(p/do
|
||||
(let [params (prepare-session-params key params)]
|
||||
(swap! cache assoc key params)
|
||||
params)))
|
||||
(let [params (prepare-session-params key params)]
|
||||
(swap! cache assoc key params)
|
||||
params))
|
||||
|
||||
(update! [_ params]
|
||||
(p/do
|
||||
(let [updated-at (dt/now)]
|
||||
(swap! cache update (:id params) assoc :updated-at updated-at)
|
||||
(assoc params :updated-at updated-at))))
|
||||
(let [updated-at (dt/now)]
|
||||
(swap! cache update (:id params) assoc :updated-at updated-at)
|
||||
(assoc params :updated-at updated-at)))
|
||||
|
||||
(delete! [_ token]
|
||||
(p/do
|
||||
(swap! cache dissoc token)
|
||||
nil)))))
|
||||
(swap! cache dissoc token)
|
||||
nil))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::manager [_]
|
||||
(s/keys :req [::db/pool ::wrk/executor ::main/props]))
|
||||
(s/keys :req [::db/pool]))
|
||||
|
||||
(defmethod ig/init-key ::manager
|
||||
[_ {:keys [::db/pool] :as cfg}]
|
||||
[_ {:keys [::db/pool]}]
|
||||
(if (db/read-only? pool)
|
||||
(inmemory-manager cfg)
|
||||
(database-manager cfg)))
|
||||
(inmemory-manager)
|
||||
(database-manager pool)))
|
||||
|
||||
(defmethod ig/halt-key! ::manager
|
||||
[_ _])
|
||||
|
@ -154,40 +137,35 @@
|
|||
(declare ^:private gen-token)
|
||||
|
||||
(defn create-fn
|
||||
[{:keys [::manager]} profile-id]
|
||||
[{:keys [::manager ::main/props]} profile-id]
|
||||
(us/assert! ::manager manager)
|
||||
(us/assert! ::us/uuid profile-id)
|
||||
|
||||
(let [props (-> manager meta ::main/props)]
|
||||
(fn [request response]
|
||||
(let [uagent (yrq/get-header request "user-agent")
|
||||
params {:profile-id profile-id
|
||||
:user-agent uagent
|
||||
:created-at (dt/now)}
|
||||
token (gen-token props params)]
|
||||
(fn [request response]
|
||||
(let [uagent (yrq/get-header request "user-agent")
|
||||
params {:profile-id profile-id
|
||||
:user-agent uagent
|
||||
:created-at (dt/now)}
|
||||
token (gen-token props params)
|
||||
session (write! manager token params)]
|
||||
(l/trace :hint "create" :profile-id (str profile-id))
|
||||
(-> response
|
||||
(assign-auth-token-cookie session)
|
||||
(assign-authenticated-cookie session)))))
|
||||
|
||||
(->> (write! manager token params)
|
||||
(p/fmap (fn [session]
|
||||
(l/trace :hint "create" :profile-id (str profile-id))
|
||||
(-> response
|
||||
(assign-auth-token-cookie session)
|
||||
(assign-authenticated-cookie session)))))))))
|
||||
(defn delete-fn
|
||||
[{:keys [::manager]}]
|
||||
(us/assert! ::manager manager)
|
||||
(letfn [(delete [{:keys [profile-id] :as request}]
|
||||
(let [cname (cf/get :auth-token-cookie-name default-auth-token-cookie-name)
|
||||
cookie (yrq/get-cookie request cname)]
|
||||
(l/trace :hint "delete" :profile-id profile-id)
|
||||
(some->> (:value cookie) (delete! manager))))]
|
||||
(fn [request response]
|
||||
(p/do
|
||||
(delete request)
|
||||
(-> response
|
||||
(assoc :status 204)
|
||||
(assoc :body nil)
|
||||
(clear-auth-token-cookie)
|
||||
(clear-authenticated-cookie))))))
|
||||
(fn [request response]
|
||||
(let [cname (cf/get :auth-token-cookie-name default-auth-token-cookie-name)
|
||||
cookie (yrq/get-cookie request cname)]
|
||||
(l/trace :hint "delete" :profile-id (:profile-id request))
|
||||
(some->> (:value cookie) (delete! manager))
|
||||
(-> response
|
||||
(assoc :status 204)
|
||||
(assoc :body nil)
|
||||
(clear-auth-token-cookie)
|
||||
(clear-authenticated-cookie)))))
|
||||
|
||||
(defn- gen-token
|
||||
[props {:keys [profile-id created-at]}]
|
||||
|
@ -216,58 +194,39 @@
|
|||
(let [elapsed (dt/diff updated-at (dt/now))]
|
||||
(neg? (compare default-renewal-max-age elapsed)))))
|
||||
|
||||
(defn- wrap-reneval
|
||||
[respond manager session]
|
||||
(fn [response]
|
||||
(p/let [session (update! manager session)]
|
||||
(-> response
|
||||
(assign-auth-token-cookie session)
|
||||
(assign-authenticated-cookie session)
|
||||
(respond)))))
|
||||
|
||||
(defn- wrap-soft-auth
|
||||
[handler {:keys [::manager]}]
|
||||
[handler {:keys [::manager ::main/props]}]
|
||||
(us/assert! ::manager manager)
|
||||
(letfn [(handle-request [request]
|
||||
(try
|
||||
(let [token (get-token request)
|
||||
claims (decode-token props token)]
|
||||
(cond-> request
|
||||
(map? claims)
|
||||
(-> (assoc ::token-claims claims)
|
||||
(assoc ::token token))))
|
||||
(catch Throwable cause
|
||||
(l/trace :hint "exception on decoding malformed token" :cause cause)
|
||||
request)))]
|
||||
|
||||
(let [{:keys [::wrk/executor ::main/props]} (meta manager)]
|
||||
(fn [request respond raise]
|
||||
(let [token (ex/try! (get-token request))]
|
||||
(if (ex/exception? token)
|
||||
(raise token)
|
||||
(->> (px/submit! executor (partial decode-token props token))
|
||||
(p/fnly (fn [claims cause]
|
||||
(when cause
|
||||
(l/trace :hint "exception on decoding malformed token" :cause cause))
|
||||
(let [request (cond-> request
|
||||
(map? claims)
|
||||
(-> (assoc ::token-claims claims)
|
||||
(assoc ::token token)))]
|
||||
(handler request respond raise))))))))))
|
||||
(let [request (handle-request request)]
|
||||
(handler request respond raise)))))
|
||||
|
||||
(defn- wrap-authz
|
||||
[handler {:keys [::manager]}]
|
||||
(us/assert! ::manager manager)
|
||||
(fn [request respond raise]
|
||||
(if-let [token (::token request)]
|
||||
(->> (get-session manager token)
|
||||
(p/fnly (fn [session cause]
|
||||
(cond
|
||||
(some? cause)
|
||||
(raise cause)
|
||||
(fn [request]
|
||||
(let [session (get-session manager (::token request))
|
||||
request (cond-> request
|
||||
(some? session)
|
||||
(assoc ::profile-id (:profile-id session)
|
||||
::id (:id session)))]
|
||||
|
||||
(nil? session)
|
||||
(handler request respond raise)
|
||||
|
||||
:else
|
||||
(let [request (-> request
|
||||
(assoc ::profile-id (:profile-id session))
|
||||
(assoc ::id (:id session)))
|
||||
respond (cond-> respond
|
||||
(renew-session? session)
|
||||
(wrap-reneval manager session))]
|
||||
(handler request respond raise))))))
|
||||
|
||||
(handler request respond raise))))
|
||||
(cond-> (handler request)
|
||||
(renew-session? session)
|
||||
(-> (assign-auth-token-cookie session)
|
||||
(assign-authenticated-cookie session))))))
|
||||
|
||||
(def soft-auth
|
||||
{:name ::soft-auth
|
||||
|
|
|
@ -17,9 +17,9 @@
|
|||
[app.msgbus :as mbus]
|
||||
[app.util.time :as dt]
|
||||
[app.util.websocket :as ws]
|
||||
[clojure.core.async :as a]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec.csp :as sp]
|
||||
[yetti.websocket :as yws]))
|
||||
|
||||
(def recv-labels
|
||||
|
@ -34,70 +34,38 @@
|
|||
|
||||
(def state (atom {}))
|
||||
|
||||
(defn- on-connect
|
||||
[{:keys [::mtx/metrics]} wsp]
|
||||
(let [created-at (dt/now)]
|
||||
(swap! state assoc (::ws/id @wsp) wsp)
|
||||
(mtx/run! metrics
|
||||
:id :websocket-active-connections
|
||||
:inc 1)
|
||||
(fn []
|
||||
(swap! state dissoc (::ws/id @wsp))
|
||||
(mtx/run! metrics :id :websocket-active-connections :dec 1)
|
||||
(mtx/run! metrics
|
||||
:id :websocket-session-timing
|
||||
:val (/ (inst-ms (dt/diff created-at (dt/now))) 1000.0)))))
|
||||
|
||||
(defn- on-rcv-message
|
||||
[{:keys [::mtx/metrics]} _ message]
|
||||
(mtx/run! metrics
|
||||
:id :websocket-messages-total
|
||||
:labels recv-labels
|
||||
:inc 1)
|
||||
message)
|
||||
|
||||
(defn- on-snd-message
|
||||
[{:keys [::mtx/metrics]} _ message]
|
||||
(mtx/run! metrics
|
||||
:id :websocket-messages-total
|
||||
:labels send-labels
|
||||
:inc 1)
|
||||
message)
|
||||
|
||||
;; REPL HELPERS
|
||||
|
||||
(defn repl-get-connections-for-file
|
||||
[file-id]
|
||||
(->> (vals @state)
|
||||
(filter #(= file-id (-> % deref ::file-subscription :file-id)))
|
||||
(map deref)
|
||||
(map ::ws/id)))
|
||||
|
||||
(defn repl-get-connections-for-team
|
||||
[team-id]
|
||||
(->> (vals @state)
|
||||
(filter #(= team-id (-> % deref ::team-subscription :team-id)))
|
||||
(map deref)
|
||||
(map ::ws/id)))
|
||||
|
||||
(defn repl-close-connection
|
||||
[id]
|
||||
(when-let [wsp (get @state id)]
|
||||
(a/>!! (::ws/close-ch @wsp) [8899 "closed from server"])
|
||||
(a/close! (::ws/close-ch @wsp))))
|
||||
(when-let [{:keys [::ws/close-ch] :as wsp} (get @state id)]
|
||||
(sp/put! close-ch [8899 "closed from server"])
|
||||
(sp/close! close-ch)))
|
||||
|
||||
(defn repl-get-connection-info
|
||||
[id]
|
||||
(when-let [wsp (get @state id)]
|
||||
{:id id
|
||||
:created-at (::created-at @wsp)
|
||||
:profile-id (::profile-id @wsp)
|
||||
:session-id (::session-id @wsp)
|
||||
:user-agent (::ws/user-agent @wsp)
|
||||
:ip-addr (::ws/remote-addr @wsp)
|
||||
:last-activity-at (::ws/last-activity-at @wsp)
|
||||
:subscribed-file (-> wsp deref ::file-subscription :file-id)
|
||||
:subscribed-team (-> wsp deref ::team-subscription :team-id)}))
|
||||
:created-at (::created-at wsp)
|
||||
:profile-id (::profile-id wsp)
|
||||
:session-id (::session-id wsp)
|
||||
:user-agent (::ws/user-agent wsp)
|
||||
:ip-addr (::ws/remote-addr wsp)
|
||||
:last-activity-at (::ws/last-activity-at wsp)
|
||||
:subscribed-file (-> wsp ::file-subscription :file-id)
|
||||
:subscribed-team (-> wsp ::team-subscription :team-id)}))
|
||||
|
||||
(defn repl-print-connection-info
|
||||
[id]
|
||||
|
@ -117,223 +85,215 @@
|
|||
(fn [_ _ message]
|
||||
(:type message)))
|
||||
|
||||
(defmethod handle-message :connect
|
||||
[cfg wsp _]
|
||||
(defmethod handle-message :open
|
||||
[{:keys [::mbus/msgbus]} {:keys [::ws/id ::ws/output-ch ::ws/state ::profile-id ::session-id] :as wsp} _]
|
||||
(l/trace :fn "handle-message" :event "open" :conn-id id)
|
||||
(let [ch (sp/chan :buf (sp/dropping-buffer 16)
|
||||
:xf (remove #(= (:session-id %) session-id)))]
|
||||
|
||||
(let [msgbus (::mbus/msgbus cfg)
|
||||
conn-id (::ws/id @wsp)
|
||||
profile-id (::profile-id @wsp)
|
||||
session-id (::session-id @wsp)
|
||||
output-ch (::ws/output-ch @wsp)
|
||||
;; Subscribe to the profile channel and forward all messages to websocket output
|
||||
;; channel (send them to the client).
|
||||
(swap! state assoc ::profile-subscription {:channel ch})
|
||||
|
||||
xform (remove #(= (:session-id %) session-id))
|
||||
channel (a/chan (a/dropping-buffer 16) xform)]
|
||||
;; Forward the subscription messages directly to the websocket output channel
|
||||
(sp/pipe ch output-ch false)
|
||||
|
||||
(l/trace :fn "handle-message" :event "connect" :conn-id conn-id)
|
||||
;; Subscribe to the profile topic on msgbus/redis
|
||||
(mbus/sub! msgbus :topic profile-id :chan ch)))
|
||||
|
||||
;; Subscribe to the profile channel and forward all messages to
|
||||
;; websocket output channel (send them to the client).
|
||||
(swap! wsp assoc ::profile-subscription channel)
|
||||
(a/pipe channel output-ch false)
|
||||
(mbus/sub! msgbus :topic profile-id :chan channel)))
|
||||
(defmethod handle-message :close
|
||||
[{:keys [::mbus/msgbus]} {:keys [::ws/id ::ws/state ::profile-id ::session-id]} _]
|
||||
(l/trace :fn "handle-message" :event "close" :conn-id id)
|
||||
(let [psub (::profile-subscription @state)
|
||||
fsub (::file-subscription @state)
|
||||
tsub (::team-subscription @state)
|
||||
msg {:type :disconnect
|
||||
:subs-id profile-id
|
||||
:profile-id profile-id
|
||||
:session-id session-id}]
|
||||
|
||||
(defmethod handle-message :disconnect
|
||||
[cfg wsp _]
|
||||
(let [msgbus (::mbus/msgbus cfg)
|
||||
conn-id (::ws/id @wsp)
|
||||
profile-id (::profile-id @wsp)
|
||||
session-id (::session-id @wsp)
|
||||
profile-ch (::profile-subscription @wsp)
|
||||
fsub (::file-subscription @wsp)
|
||||
tsub (::team-subscription @wsp)
|
||||
;; Close profile subscription if exists
|
||||
(when-let [ch (:channel psub)]
|
||||
(sp/close! ch)
|
||||
(mbus/purge! msgbus [ch]))
|
||||
|
||||
message {:type :disconnect
|
||||
:subs-id profile-id
|
||||
:profile-id profile-id
|
||||
:session-id session-id}]
|
||||
|
||||
(l/trace :fn "handle-message"
|
||||
:event :disconnect
|
||||
:conn-id conn-id)
|
||||
|
||||
(a/go
|
||||
;; Close the main profile subscription
|
||||
(a/close! profile-ch)
|
||||
(a/<! (mbus/purge! msgbus [profile-ch]))
|
||||
|
||||
;; Close tram subscription if exists
|
||||
(when-let [channel (:channel tsub)]
|
||||
(a/close! channel)
|
||||
(a/<! (mbus/purge! msgbus channel)))
|
||||
;; Close team subscription if exists
|
||||
(when-let [ch (:channel tsub)]
|
||||
(sp/close! ch)
|
||||
(mbus/purge! msgbus [ch]))
|
||||
|
||||
;; Close file subscription if exists
|
||||
(when-let [{:keys [topic channel]} fsub]
|
||||
(a/close! channel)
|
||||
(a/<! (mbus/purge! msgbus channel))
|
||||
(a/<! (mbus/pub! msgbus :topic topic :message message))))))
|
||||
(sp/close! channel)
|
||||
(mbus/purge! msgbus [channel])
|
||||
(mbus/pub! msgbus :topic topic :message msg))))
|
||||
|
||||
(defmethod handle-message :subscribe-team
|
||||
[cfg wsp {:keys [team-id] :as params}]
|
||||
(let [msgbus (::mbus/msgbus cfg)
|
||||
conn-id (::ws/id @wsp)
|
||||
session-id (::session-id @wsp)
|
||||
output-ch (::ws/output-ch @wsp)
|
||||
prev-subs (get @wsp ::team-subscription)
|
||||
xform (comp
|
||||
(remove #(= (:session-id %) session-id))
|
||||
(map #(assoc % :subs-id team-id)))
|
||||
[{:keys [::mbus/msgbus]} {:keys [::ws/id ::ws/state ::ws/output-ch ::session-id]} {:keys [team-id] :as params}]
|
||||
(l/trace :fn "handle-message" :event "subscribe-team" :team-id team-id :conn-id id)
|
||||
(let [prev-subs (get @state ::team-subscription)
|
||||
channel (sp/chan :buf (sp/dropping-buffer 64)
|
||||
:xf (comp
|
||||
(remove #(= (:session-id %) session-id))
|
||||
(map #(assoc % :subs-id team-id))))]
|
||||
|
||||
channel (a/chan (a/dropping-buffer 64) xform)]
|
||||
(sp/pipe channel output-ch false)
|
||||
(mbus/sub! msgbus :topic team-id :chan channel)
|
||||
|
||||
(l/trace :fn "handle-message"
|
||||
:event :subscribe-team
|
||||
:team-id team-id
|
||||
:conn-id conn-id)
|
||||
(let [subs {:team-id team-id :channel channel :topic team-id}]
|
||||
(swap! state assoc ::team-subscription subs))
|
||||
|
||||
(a/pipe channel output-ch false)
|
||||
;; Close previous subscription if exists
|
||||
(when-let [ch (:channel prev-subs)]
|
||||
(sp/close! ch)
|
||||
(mbus/purge! msgbus [ch]))))
|
||||
|
||||
(let [state {:team-id team-id :channel channel :topic team-id}]
|
||||
(swap! wsp assoc ::team-subscription state))
|
||||
|
||||
(a/go
|
||||
;; Close previous subscription if exists
|
||||
(when-let [channel (:channel prev-subs)]
|
||||
(a/close! channel)
|
||||
(a/<! (mbus/purge! msgbus channel))))
|
||||
|
||||
(a/go
|
||||
(a/<! (mbus/sub! msgbus :topic team-id :chan channel)))))
|
||||
|
||||
(defmethod handle-message :subscribe-file
|
||||
[cfg wsp {:keys [file-id] :as params}]
|
||||
(let [msgbus (::mbus/msgbus cfg)
|
||||
conn-id (::ws/id @wsp)
|
||||
profile-id (::profile-id @wsp)
|
||||
session-id (::session-id @wsp)
|
||||
output-ch (::ws/output-ch @wsp)
|
||||
prev-subs (::file-subscription @wsp)
|
||||
xform (comp (remove #(= (:session-id %) session-id))
|
||||
(map #(assoc % :subs-id file-id)))
|
||||
channel (a/chan (a/dropping-buffer 64) xform)]
|
||||
[{:keys [::mbus/msgbus]} {:keys [::ws/id ::ws/state ::ws/output-ch ::session-id ::profile-id]} {:keys [file-id] :as params}]
|
||||
(l/trace :fn "handle-message" :event "subscribe-file" :file-id file-id :conn-id id)
|
||||
(let [psub (::file-subscription @state)
|
||||
fch (sp/chan :buf (sp/dropping-buffer 64)
|
||||
:xf (comp (remove #(= (:session-id %) session-id))
|
||||
(map #(assoc % :subs-id file-id))))]
|
||||
|
||||
(l/trace :fn "handle-message"
|
||||
:event :subscribe-file
|
||||
:file-id file-id
|
||||
:conn-id conn-id)
|
||||
(let [subs {:file-id file-id :channel fch :topic file-id}]
|
||||
(swap! state assoc ::file-subscription subs))
|
||||
|
||||
(let [state {:file-id file-id :channel channel :topic file-id}]
|
||||
(swap! wsp assoc ::file-subscription state))
|
||||
;; Close previous subscription if exists
|
||||
(when-let [ch (:channel psub)]
|
||||
(sp/close! ch)
|
||||
(mbus/purge! msgbus [ch]))
|
||||
|
||||
(a/go
|
||||
;; Close previous subscription if exists
|
||||
(when-let [channel (:channel prev-subs)]
|
||||
(a/close! channel)
|
||||
(a/<! (mbus/purge! msgbus channel))))
|
||||
|
||||
;; Message forwarding
|
||||
(a/go
|
||||
(loop []
|
||||
(when-let [{:keys [type] :as message} (a/<! channel)]
|
||||
(when (or (= :join-file type)
|
||||
(= :leave-file type)
|
||||
(= :disconnect type))
|
||||
(let [message {:type :presence
|
||||
:file-id file-id
|
||||
:session-id session-id
|
||||
(sp/go-loop []
|
||||
(when-let [{:keys [type] :as message} (sp/take! fch)]
|
||||
(sp/put! output-ch message)
|
||||
(when (or (= :join-file type)
|
||||
(= :leave-file type)
|
||||
(= :disconnect type))
|
||||
(let [message {:type :presence
|
||||
:file-id file-id
|
||||
:session-id session-id
|
||||
:profile-id profile-id}]
|
||||
(a/<! (mbus/pub! msgbus :topic file-id :message message))))
|
||||
(a/>! output-ch message)
|
||||
(recur))))
|
||||
(mbus/pub! msgbus
|
||||
:topic file-id
|
||||
:message message)))
|
||||
(recur)))
|
||||
|
||||
(a/go
|
||||
;; Subscribe to file topic
|
||||
(a/<! (mbus/sub! msgbus :topic file-id :chan channel))
|
||||
;; Subscribe to file topic
|
||||
(mbus/sub! msgbus :topic file-id :chan fch)
|
||||
|
||||
;; Notifify the rest of participants of the new connection.
|
||||
(let [message {:type :join-file
|
||||
:file-id file-id
|
||||
:subs-id file-id
|
||||
:session-id session-id
|
||||
:profile-id profile-id}]
|
||||
(a/<! (mbus/pub! msgbus :topic file-id :message message))))))
|
||||
;; Notifify the rest of participants of the new connection.
|
||||
(let [message {:type :join-file
|
||||
:file-id file-id
|
||||
:subs-id file-id
|
||||
:session-id session-id
|
||||
:profile-id profile-id}]
|
||||
(mbus/pub! msgbus :topic file-id :message message))))
|
||||
|
||||
(defmethod handle-message :unsubscribe-file
|
||||
[cfg wsp {:keys [file-id] :as params}]
|
||||
(let [msgbus (::mbus/msgbus cfg)
|
||||
conn-id (::ws/id @wsp)
|
||||
session-id (::session-id @wsp)
|
||||
profile-id (::profile-id @wsp)
|
||||
subs (::file-subscription @wsp)
|
||||
[{:keys [::mbus/msgbus]} {:keys [::ws/id ::ws/state ::session-id ::profile-id]} {:keys [file-id] :as params}]
|
||||
(l/trace :fn "handle-message" :event "unsubscribe-file" :file-id file-id :conn-id id)
|
||||
|
||||
message {:type :leave-file
|
||||
:file-id file-id
|
||||
:session-id session-id
|
||||
:profile-id profile-id}]
|
||||
(let [subs (::file-subscription @state)
|
||||
message {:type :leave-file
|
||||
:file-id file-id
|
||||
:session-id session-id
|
||||
:profile-id profile-id}]
|
||||
|
||||
(l/trace :fn "handle-message"
|
||||
:event :unsubscribe-file
|
||||
:file-id file-id
|
||||
:conn-id conn-id)
|
||||
|
||||
(a/go
|
||||
(when (= (:file-id subs) file-id)
|
||||
(let [channel (:channel subs)]
|
||||
(a/close! channel)
|
||||
(a/<! (mbus/purge! msgbus channel))
|
||||
(a/<! (mbus/pub! msgbus :topic file-id :message message)))))))
|
||||
(when (= (:file-id subs) file-id)
|
||||
(mbus/pub! msgbus :topic file-id :message message)
|
||||
(let [ch (:channel subs)]
|
||||
(sp/close! ch)
|
||||
(mbus/purge! msgbus [ch])))))
|
||||
|
||||
(defmethod handle-message :keepalive
|
||||
[_ _ _]
|
||||
(l/trace :fn "handle-message" :event :keepalive)
|
||||
(a/go :nothing))
|
||||
(l/trace :fn "handle-message" :event :keepalive))
|
||||
|
||||
(defmethod handle-message :broadcast
|
||||
[{:keys [::mbus/msgbus]} {:keys [::ws/id ::session-id ::profile-id]} message]
|
||||
(l/trace :fn "handle-message" :event "broadcast" :conn-id id)
|
||||
(let [message (-> message
|
||||
(assoc :subs-id profile-id)
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :session-id session-id))]
|
||||
(mbus/pub! msgbus :topic profile-id :message message)))
|
||||
|
||||
(defmethod handle-message :pointer-update
|
||||
[cfg wsp {:keys [file-id] :as message}]
|
||||
(let [msgbus (::mbus/msgbus cfg)
|
||||
profile-id (::profile-id @wsp)
|
||||
session-id (::session-id @wsp)
|
||||
subs (::file-subscription @wsp)
|
||||
message (-> message
|
||||
(assoc :subs-id file-id)
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :session-id session-id))]
|
||||
(a/go
|
||||
;; Only allow receive pointer updates when active subscription
|
||||
(when subs
|
||||
(a/<! (mbus/pub! msgbus :topic file-id :message message))))))
|
||||
[{:keys [::mbus/msgbus]} {:keys [::ws/state ::session-id ::profile-id]} {:keys [file-id] :as message}]
|
||||
(when (::file-subscription @state)
|
||||
(let [message (-> message
|
||||
(assoc :subs-id file-id)
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :session-id session-id))]
|
||||
(mbus/pub! msgbus :topic file-id :message message))))
|
||||
|
||||
(defmethod handle-message :default
|
||||
[_ wsp message]
|
||||
(let [conn-id (::ws/id @wsp)]
|
||||
(l/warn :hint "received unexpected message"
|
||||
:message message
|
||||
:conn-id conn-id)
|
||||
(a/go :none)))
|
||||
[_ {:keys [::ws/id]} message]
|
||||
(l/warn :hint "received unexpected message"
|
||||
:message message
|
||||
:conn-id id))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; HTTP HANDLER
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn- on-connect
|
||||
[{:keys [::mtx/metrics]} {:keys [::ws/id] :as wsp}]
|
||||
(let [created-at (dt/now)]
|
||||
(l/trace :fn "on-connect" :conn-id id)
|
||||
(swap! state assoc id wsp)
|
||||
(mtx/run! metrics
|
||||
:id :websocket-active-connections
|
||||
:inc 1)
|
||||
|
||||
(assoc wsp ::ws/on-disconnect
|
||||
(fn []
|
||||
(l/trace :fn "on-disconnect" :conn-id id)
|
||||
(swap! state dissoc id)
|
||||
(mtx/run! metrics :id :websocket-active-connections :dec 1)
|
||||
(mtx/run! metrics
|
||||
:id :websocket-session-timing
|
||||
:val (/ (inst-ms (dt/diff created-at (dt/now))) 1000.0))))))
|
||||
|
||||
(defn- on-rcv-message
|
||||
[{:keys [::mtx/metrics ::profile-id ::session-id]} message]
|
||||
(mtx/run! metrics
|
||||
:id :websocket-messages-total
|
||||
:labels recv-labels
|
||||
:inc 1)
|
||||
(assoc message :profile-id profile-id :session-id session-id))
|
||||
|
||||
(defn- on-snd-message
|
||||
[{:keys [::mtx/metrics]} message]
|
||||
(mtx/run! metrics
|
||||
:id :websocket-messages-total
|
||||
:labels send-labels
|
||||
:inc 1)
|
||||
message)
|
||||
|
||||
|
||||
(s/def ::session-id ::us/uuid)
|
||||
(s/def ::handler-params
|
||||
(s/keys :req-un [::session-id]))
|
||||
|
||||
(defn- http-handler
|
||||
[cfg {:keys [params ::session/profile-id] :as request} respond raise]
|
||||
[cfg {:keys [params ::session/profile-id] :as request}]
|
||||
(let [{:keys [session-id]} (us/conform ::handler-params params)]
|
||||
(cond
|
||||
(not profile-id)
|
||||
(raise (ex/error :type :authentication
|
||||
:hint "Authentication required."))
|
||||
(ex/raise :type :authentication
|
||||
:hint "Authentication required.")
|
||||
|
||||
(not (yws/upgrade-request? request))
|
||||
(raise (ex/error :type :validation
|
||||
:code :websocket-request-expected
|
||||
:hint "this endpoint only accepts websocket connections"))
|
||||
(ex/raise :type :validation
|
||||
:code :websocket-request-expected
|
||||
:hint "this endpoint only accepts websocket connections")
|
||||
|
||||
:else
|
||||
(do
|
||||
(l/trace :hint "websocket request" :profile-id profile-id :session-id session-id)
|
||||
|
||||
(->> (ws/handler
|
||||
::ws/on-rcv-message (partial on-rcv-message cfg)
|
||||
::ws/on-snd-message (partial on-snd-message cfg)
|
||||
|
@ -341,8 +301,7 @@
|
|||
::ws/handler (partial handle-message cfg)
|
||||
::profile-id profile-id
|
||||
::session-id session-id)
|
||||
(yws/upgrade request)
|
||||
(respond))))))
|
||||
(yws/upgrade request))))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::routes [_]
|
||||
(s/keys :req [::mbus/msgbus
|
||||
|
|
|
@ -16,13 +16,15 @@
|
|||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http.client :as http]
|
||||
[app.http :as-alias http]
|
||||
[app.http.client :as http.client]
|
||||
[app.loggers.audit.tasks :as-alias tasks]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.main :as-alias main]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.retry :as rtry]
|
||||
[app.util.services :as-alias sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
|
@ -92,6 +94,15 @@
|
|||
|
||||
;; --- SPECS
|
||||
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; COLLECTOR
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
;; Defines a service that collects the audit/activity log using
|
||||
;; internal database. Later this audit log can be transferred to
|
||||
;; an external storage and data cleared.
|
||||
|
||||
(s/def ::profile-id ::us/uuid)
|
||||
(s/def ::name ::us/string)
|
||||
(s/def ::type ::us/string)
|
||||
|
@ -104,20 +115,13 @@
|
|||
(s/or :fn fn? :str string? :kw keyword?))
|
||||
|
||||
(s/def ::event
|
||||
(s/keys :req-un [::type ::name ::profile-id]
|
||||
:opt-un [::ip-addr ::props]
|
||||
:opt [::webhooks/event?
|
||||
(s/keys :req [::type ::name ::profile-id]
|
||||
:opt [::ip-addr
|
||||
::props
|
||||
::webhooks/event?
|
||||
::webhooks/batch-timeout
|
||||
::webhooks/batch-key]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; COLLECTOR
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
;; Defines a service that collects the audit/activity log using
|
||||
;; internal database. Later this audit log can be transferred to
|
||||
;; an external storage and data cleared.
|
||||
|
||||
(s/def ::collector
|
||||
(s/keys :req [::wrk/executor ::db/pool]))
|
||||
|
||||
|
@ -133,15 +137,58 @@
|
|||
:else
|
||||
cfg))
|
||||
|
||||
(defn prepare-event
|
||||
[cfg mdata params result]
|
||||
(let [resultm (meta result)
|
||||
request (::http/request params)
|
||||
profile-id (or (::profile-id resultm)
|
||||
(:profile-id result)
|
||||
(::rpc/profile-id params)
|
||||
uuid/zero)
|
||||
|
||||
props (-> (or (::replace-props resultm)
|
||||
(-> params
|
||||
(merge (::props resultm))
|
||||
(dissoc :profile-id)
|
||||
(dissoc :type)))
|
||||
|
||||
(clean-props))]
|
||||
|
||||
{::type (or (::type resultm)
|
||||
(::rpc/type cfg))
|
||||
::name (or (::name resultm)
|
||||
(::sv/name mdata))
|
||||
::profile-id profile-id
|
||||
::ip-addr (some-> request parse-client-ip)
|
||||
::props props
|
||||
|
||||
;; NOTE: for batch-key lookup we need the params as-is
|
||||
;; because the rpc api does not need to know the
|
||||
;; audit/webhook specific object layout.
|
||||
::rpc/params (dissoc params ::http/request)
|
||||
|
||||
::webhooks/batch-key
|
||||
(or (::webhooks/batch-key mdata)
|
||||
(::webhooks/batch-key resultm))
|
||||
|
||||
::webhooks/batch-timeout
|
||||
(or (::webhooks/batch-timeout mdata)
|
||||
(::webhooks/batch-timeout resultm))
|
||||
|
||||
::webhooks/event?
|
||||
(or (::webhooks/event? mdata)
|
||||
(::webhooks/event? resultm)
|
||||
false)}))
|
||||
|
||||
(defn- handle-event!
|
||||
[conn-or-pool event]
|
||||
(us/verify! ::event event)
|
||||
(let [params {:id (uuid/next)
|
||||
:name (:name event)
|
||||
:type (:type event)
|
||||
:profile-id (:profile-id event)
|
||||
:ip-addr (:ip-addr event)
|
||||
:props (:props event)}]
|
||||
:name (::name event)
|
||||
:type (::type event)
|
||||
:profile-id (::profile-id event)
|
||||
:ip-addr (::ip-addr event)
|
||||
:props (::props event)}]
|
||||
|
||||
(when (contains? cf/flags :audit-log)
|
||||
;; NOTE: this operation may cause primary key conflicts on inserts
|
||||
|
@ -207,7 +254,7 @@
|
|||
(s/def ::tasks/uri ::us/string)
|
||||
|
||||
(defmethod ig/pre-init-spec ::tasks/archive-task [_]
|
||||
(s/keys :req [::db/pool ::main/props ::http/client]))
|
||||
(s/keys :req [::db/pool ::main/props ::http.client/client]))
|
||||
|
||||
(defmethod ig/init-key ::tasks/archive
|
||||
[_ cfg]
|
||||
|
@ -231,7 +278,7 @@
|
|||
(if n
|
||||
(do
|
||||
(px/sleep 100)
|
||||
(recur (+ total n)))
|
||||
(recur (+ total ^long n)))
|
||||
(when (pos? total)
|
||||
(l/debug :hint "events archived" :total total)))))))))
|
||||
|
||||
|
@ -281,7 +328,7 @@
|
|||
:method :post
|
||||
:headers headers
|
||||
:body body}
|
||||
resp (http/req! cfg params {:sync? true})]
|
||||
resp (http.client/req! cfg params {:sync? true})]
|
||||
(if (= (:status resp) 204)
|
||||
true
|
||||
(do
|
||||
|
|
|
@ -62,6 +62,11 @@
|
|||
(dissoc ::s/problems ::s/value ::s/spec :hint)
|
||||
(pp/pprint-str :width 200))})))
|
||||
|
||||
(defn error-record?
|
||||
[{:keys [::l/level ::l/cause]}]
|
||||
(and (= :error level)
|
||||
(ex/exception? cause)))
|
||||
|
||||
(defn- handle-event
|
||||
[{:keys [::db/pool]} {:keys [::l/id] :as record}]
|
||||
(try
|
||||
|
@ -74,20 +79,16 @@
|
|||
(catch Throwable cause
|
||||
(l/warn :hint "unexpected exception on database error logger" :cause cause))))
|
||||
|
||||
(defn error-record?
|
||||
[{:keys [::l/level ::l/cause]}]
|
||||
(and (= :error level)
|
||||
(ex/exception? cause)))
|
||||
|
||||
(defmethod ig/pre-init-spec ::reporter [_]
|
||||
(s/keys :req [::db/pool]))
|
||||
|
||||
(defmethod ig/init-key ::reporter
|
||||
[_ cfg]
|
||||
(let [input (sp/chan (sp/sliding-buffer 32) (filter error-record?))]
|
||||
(let [input (sp/chan :buf (sp/sliding-buffer 32)
|
||||
:xf (filter error-record?))]
|
||||
(add-watch l/log-record ::reporter #(sp/put! input %4))
|
||||
(px/thread
|
||||
{:name "penpot/database-reporter" :virtual true}
|
||||
|
||||
(px/thread {:name "penpot/database-reporter" :virtual true}
|
||||
(l/info :hint "initializing database error persistence")
|
||||
(try
|
||||
(loop []
|
||||
|
|
|
@ -77,7 +77,8 @@
|
|||
{:name "penpot/mattermost-reporter"
|
||||
:virtual true}
|
||||
(l/info :hint "initializing error reporter" :uri uri)
|
||||
(let [input (sp/chan (sp/sliding-buffer 128) (filter ldb/error-record?))]
|
||||
(let [input (sp/chan :buf (sp/sliding-buffer 128)
|
||||
:xf (filter ldb/error-record?))]
|
||||
(add-watch l/log-record ::reporter #(sp/put! input %4))
|
||||
(try
|
||||
(loop []
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
[app.db :as-alias db]
|
||||
[app.email :as-alias email]
|
||||
[app.http :as-alias http]
|
||||
[app.http.access-token :as-alias actoken]
|
||||
[app.http.assets :as-alias http.assets]
|
||||
[app.http.awsns :as http.awsns]
|
||||
[app.http.client :as-alias http.client]
|
||||
|
@ -37,7 +36,8 @@
|
|||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig])
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec :as px])
|
||||
(:gen-class))
|
||||
|
||||
(def default-metrics
|
||||
|
@ -102,15 +102,15 @@
|
|||
::mdef/labels ["name"]
|
||||
::mdef/type :summary}
|
||||
|
||||
:rpc-climit-queue-size
|
||||
{::mdef/name "penpot_rpc_climit_queue_size"
|
||||
::mdef/help "Current number of queued submissions on the CLIMIT."
|
||||
:rpc-climit-queue
|
||||
{::mdef/name "penpot_rpc_climit_queue"
|
||||
::mdef/help "Current number of queued submissions."
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :gauge}
|
||||
|
||||
:rpc-climit-concurrency
|
||||
{::mdef/name "penpot_rpc_climit_concurrency"
|
||||
::mdef/help "Current number of used concurrency capacity on the CLIMIT"
|
||||
:rpc-climit-permits
|
||||
{::mdef/name "penpot_rpc_climit_permits"
|
||||
::mdef/help "Current number of available permits"
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :gauge}
|
||||
|
||||
|
@ -174,10 +174,8 @@
|
|||
|
||||
;; Default thread pool for IO operations
|
||||
::wrk/executor
|
||||
{::wrk/parallelism (cf/get :default-executor-parallelism 100)}
|
||||
|
||||
::wrk/scheduled-executor
|
||||
{::wrk/parallelism (cf/get :scheduled-executor-parallelism 20)}
|
||||
{::wrk/parallelism (cf/get :default-executor-parallelism
|
||||
(+ 3 (* (px/get-available-processors) 3)))}
|
||||
|
||||
::wrk/monitor
|
||||
{::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
|
@ -194,17 +192,16 @@
|
|||
{::mtx/metrics (ig/ref ::mtx/metrics)}
|
||||
|
||||
::rds/redis
|
||||
{::rds/uri (cf/get :redis-uri)
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)}
|
||||
{::rds/uri (cf/get :redis-uri)
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::wrk/executor (ig/ref ::wrk/executor)}
|
||||
|
||||
::mbus/msgbus
|
||||
{:backend (cf/get :msgbus-backend :redis)
|
||||
:executor (ig/ref ::wrk/executor)
|
||||
:redis (ig/ref ::rds/redis)}
|
||||
{::wrk/executor (ig/ref ::wrk/executor)
|
||||
::rds/redis (ig/ref ::rds/redis)}
|
||||
|
||||
:app.storage.tmp/cleaner
|
||||
{::wrk/executor (ig/ref ::wrk/executor)
|
||||
::wrk/scheduled-executor (ig/ref ::wrk/scheduled-executor)}
|
||||
{::wrk/executor (ig/ref ::wrk/executor)}
|
||||
|
||||
::sto/gc-deleted-task
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
|
@ -217,14 +214,7 @@
|
|||
{::wrk/executor (ig/ref ::wrk/executor)}
|
||||
|
||||
::session/manager
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
::wrk/executor (ig/ref ::wrk/executor)
|
||||
::props (ig/ref :app.setup/props)}
|
||||
|
||||
::actoken/manager
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
::wrk/executor (ig/ref ::wrk/executor)
|
||||
::props (ig/ref :app.setup/props)}
|
||||
{::db/pool (ig/ref ::db/pool)}
|
||||
|
||||
::session.tasks/gc
|
||||
{::db/pool (ig/ref ::db/pool)}
|
||||
|
@ -239,8 +229,7 @@
|
|||
{::http/port (cf/get :http-server-port)
|
||||
::http/host (cf/get :http-server-host)
|
||||
::http/router (ig/ref ::http/router)
|
||||
::http/metrics (ig/ref ::mtx/metrics)
|
||||
::http/executor (ig/ref ::wrk/executor)
|
||||
::wrk/executor (ig/ref ::wrk/executor)
|
||||
::http/io-threads (cf/get :http-server-io-threads)
|
||||
::http/max-body-size (cf/get :http-server-max-body-size)
|
||||
::http/max-multipart-body-size (cf/get :http-server-max-multipart-body-size)}
|
||||
|
@ -275,7 +264,6 @@
|
|||
{::http.client/client (ig/ref ::http.client/client)
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::props (ig/ref :app.setup/props)
|
||||
::wrk/executor (ig/ref ::wrk/executor)
|
||||
::oidc/providers {:google (ig/ref ::oidc.providers/google)
|
||||
:github (ig/ref ::oidc.providers/github)
|
||||
:gitlab (ig/ref ::oidc.providers/gitlab)
|
||||
|
@ -284,8 +272,6 @@
|
|||
|
||||
:app.http/router
|
||||
{::session/manager (ig/ref ::session/manager)
|
||||
::actoken/manager (ig/ref ::actoken/manager)
|
||||
::wrk/executor (ig/ref ::wrk/executor)
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::rpc/routes (ig/ref ::rpc/routes)
|
||||
::rpc.doc/routes (ig/ref ::rpc.doc/routes)
|
||||
|
@ -302,10 +288,10 @@
|
|||
::wrk/executor (ig/ref ::wrk/executor)
|
||||
::session/manager (ig/ref ::session/manager)}
|
||||
|
||||
:app.http.websocket/routes
|
||||
::http.ws/routes
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::mbus/msgbus (ig/ref :app.msgbus/msgbus)
|
||||
::mbus/msgbus (ig/ref ::mbus/msgbus)
|
||||
::session/manager (ig/ref ::session/manager)}
|
||||
|
||||
:app.http.assets/routes
|
||||
|
@ -320,8 +306,7 @@
|
|||
::wrk/executor (ig/ref ::wrk/executor)}
|
||||
|
||||
:app.rpc/rlimit
|
||||
{::wrk/executor (ig/ref ::wrk/executor)
|
||||
::wrk/scheduled-executor (ig/ref ::wrk/scheduled-executor)}
|
||||
{::wrk/executor (ig/ref ::wrk/executor)}
|
||||
|
||||
:app.rpc/methods
|
||||
{::http.client/client (ig/ref ::http.client/client)
|
||||
|
@ -351,7 +336,6 @@
|
|||
::db/pool (ig/ref ::db/pool)
|
||||
::wrk/executor (ig/ref ::wrk/executor)
|
||||
::session/manager (ig/ref ::session/manager)
|
||||
::actoken/manager (ig/ref ::actoken/manager)
|
||||
::props (ig/ref :app.setup/props)}
|
||||
|
||||
::wrk/registry
|
||||
|
@ -467,8 +451,7 @@
|
|||
|
||||
(def worker-config
|
||||
{::wrk/cron
|
||||
{::wrk/scheduled-executor (ig/ref ::wrk/scheduled-executor)
|
||||
::wrk/registry (ig/ref ::wrk/registry)
|
||||
{::wrk/registry (ig/ref ::wrk/registry)
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::wrk/entries
|
||||
[{:cron #app/cron "0 0 * * * ?" ;; hourly
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
[app.storage :as-alias sto]
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.util.svg :as svg]
|
||||
[app.util.time :as dt]
|
||||
[buddy.core.bytes :as bb]
|
||||
[buddy.core.codecs :as bc]
|
||||
[clojure.java.shell :as sh]
|
||||
|
@ -168,7 +169,7 @@
|
|||
(ex/raise :type :validation
|
||||
:code :invalid-svg-file
|
||||
:hint "uploaded svg does not provides dimensions"))
|
||||
(merge input info))
|
||||
(merge input info {:ts (dt/now)}))
|
||||
|
||||
(let [instance (Info. (str path))
|
||||
mtype' (.getProperty instance "Mime type")]
|
||||
|
@ -183,7 +184,8 @@
|
|||
;; any frame.
|
||||
(assoc input
|
||||
:width (.getPageWidth instance)
|
||||
:height (.getPageHeight instance))))))
|
||||
:height (.getPageHeight instance)
|
||||
:ts (dt/now))))))
|
||||
|
||||
(defmethod process-error org.im4java.core.InfoException
|
||||
[error]
|
||||
|
|
|
@ -8,20 +8,18 @@
|
|||
"The msgbus abstraction implemented using redis as underlying backend."
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.transit :as t]
|
||||
[app.config :as cfg]
|
||||
[app.redis :as redis]
|
||||
[app.util.async :as aa]
|
||||
[app.redis :as rds]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.core.async :as a]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]))
|
||||
[promesa.exec :as px]
|
||||
[promesa.exec.csp :as sp]))
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
|
@ -34,132 +32,116 @@
|
|||
(def ^:private xform-prefix-topic
|
||||
(map (fn [obj] (update obj :topic prefix-topic))))
|
||||
|
||||
(declare ^:private redis-connect)
|
||||
(declare ^:private redis-disconnect)
|
||||
(declare ^:private redis-pub)
|
||||
(declare ^:private redis-sub)
|
||||
(declare ^:private redis-unsub)
|
||||
(declare ^:private redis-pub!)
|
||||
(declare ^:private redis-sub!)
|
||||
(declare ^:private redis-unsub!)
|
||||
(declare ^:private start-io-loop!)
|
||||
(declare ^:private subscribe-to-topics)
|
||||
(declare ^:private unsubscribe-channels)
|
||||
|
||||
(defmethod ig/prep-key ::msgbus
|
||||
[_ cfg]
|
||||
(merge {:buffer-size 128
|
||||
:timeout (dt/duration {:seconds 30})}
|
||||
(d/without-nils cfg)))
|
||||
|
||||
(s/def ::cmd-ch ::aa/channel)
|
||||
(s/def ::rcv-ch ::aa/channel)
|
||||
(s/def ::pub-ch ::aa/channel)
|
||||
(s/def ::cmd-ch sp/chan?)
|
||||
(s/def ::rcv-ch sp/chan?)
|
||||
(s/def ::pub-ch sp/chan?)
|
||||
(s/def ::state ::us/agent)
|
||||
(s/def ::pconn ::redis/connection-holder)
|
||||
(s/def ::sconn ::redis/connection-holder)
|
||||
(s/def ::pconn ::rds/connection-holder)
|
||||
(s/def ::sconn ::rds/connection-holder)
|
||||
(s/def ::msgbus
|
||||
(s/keys :req [::cmd-ch ::rcv-ch ::pub-ch ::state ::pconn ::sconn ::wrk/executor]))
|
||||
|
||||
(s/def ::buffer-size ::us/integer)
|
||||
|
||||
(defmethod ig/pre-init-spec ::msgbus [_]
|
||||
(s/keys :req-un [::buffer-size ::redis/timeout ::redis/redis ::wrk/executor]))
|
||||
(s/keys :req [::rds/redis ::wrk/executor]))
|
||||
|
||||
(defmethod ig/prep-key ::msgbus
|
||||
[_ cfg]
|
||||
(-> cfg
|
||||
(assoc ::buffer-size 128)
|
||||
(assoc ::timeout (dt/duration {:seconds 30}))))
|
||||
|
||||
(defmethod ig/init-key ::msgbus
|
||||
[_ {:keys [buffer-size executor] :as cfg}]
|
||||
[_ {:keys [::buffer-size ::wrk/executor ::timeout ::rds/redis] :as cfg}]
|
||||
(l/info :hint "initialize msgbus" :buffer-size buffer-size)
|
||||
(let [cmd-ch (a/chan buffer-size)
|
||||
rcv-ch (a/chan (a/dropping-buffer buffer-size))
|
||||
pub-ch (a/chan (a/dropping-buffer buffer-size) xform-prefix-topic)
|
||||
(let [cmd-ch (sp/chan :buf buffer-size)
|
||||
rcv-ch (sp/chan :buf (sp/dropping-buffer buffer-size))
|
||||
pub-ch (sp/chan :buf (sp/dropping-buffer buffer-size)
|
||||
:xf xform-prefix-topic)
|
||||
state (agent {})
|
||||
msgbus (-> (redis-connect cfg)
|
||||
|
||||
pconn (rds/connect redis :timeout timeout)
|
||||
sconn (rds/connect redis :type :pubsub :timeout timeout)
|
||||
msgbus (-> cfg
|
||||
(assoc ::pconn pconn)
|
||||
(assoc ::sconn sconn)
|
||||
(assoc ::cmd-ch cmd-ch)
|
||||
(assoc ::rcv-ch rcv-ch)
|
||||
(assoc ::pub-ch pub-ch)
|
||||
(assoc ::state state)
|
||||
(assoc ::wrk/executor executor))]
|
||||
|
||||
(us/verify! ::msgbus msgbus)
|
||||
|
||||
(set-error-handler! state #(l/error :cause % :hint "unexpected error on agent" ::l/sync? true))
|
||||
(set-error-mode! state :continue)
|
||||
(start-io-loop! msgbus)
|
||||
|
||||
msgbus))
|
||||
|
||||
(defn sub!
|
||||
[{:keys [::state ::wrk/executor] :as cfg} & {:keys [topic topics chan]}]
|
||||
(let [done-ch (a/chan)
|
||||
topics (into [] (map prefix-topic) (if topic [topic] topics))]
|
||||
(l/debug :hint "subscribe" :topics topics)
|
||||
(send-via executor state subscribe-to-topics cfg topics chan done-ch)
|
||||
done-ch))
|
||||
|
||||
(defn pub!
|
||||
[{::keys [pub-ch]} & {:as params}]
|
||||
(a/go
|
||||
(a/>! pub-ch params)))
|
||||
|
||||
(defn purge!
|
||||
[{:keys [::state ::wrk/executor] :as msgbus} chans]
|
||||
(l/trace :hint "purge" :chans (count chans))
|
||||
(let [done-ch (a/chan)]
|
||||
(send-via executor state unsubscribe-channels msgbus chans done-ch)
|
||||
done-ch))
|
||||
(assoc msgbus ::io-thr (start-io-loop! msgbus))))
|
||||
|
||||
(defmethod ig/halt-key! ::msgbus
|
||||
[_ msgbus]
|
||||
(redis-disconnect msgbus)
|
||||
(a/close! (::cmd-ch msgbus))
|
||||
(a/close! (::rcv-ch msgbus))
|
||||
(a/close! (::pub-ch msgbus)))
|
||||
(px/interrupt! (::io-thr msgbus))
|
||||
(sp/close! (::cmd-ch msgbus))
|
||||
(sp/close! (::rcv-ch msgbus))
|
||||
(sp/close! (::pub-ch msgbus))
|
||||
(d/close! (::pconn msgbus))
|
||||
(d/close! (::sconn msgbus)))
|
||||
|
||||
(defn sub!
|
||||
[{:keys [::state ::wrk/executor] :as cfg} & {:keys [topic topics chan]}]
|
||||
(let [topics (into [] (map prefix-topic) (if topic [topic] topics))]
|
||||
(l/debug :hint "subscribe" :topics topics :chan (hash chan))
|
||||
(send-via executor state subscribe-to-topics cfg topics chan)
|
||||
nil))
|
||||
|
||||
(defn pub!
|
||||
[{::keys [pub-ch]} & {:as params}]
|
||||
(sp/put! pub-ch params))
|
||||
|
||||
(defn purge!
|
||||
[{:keys [::state ::wrk/executor] :as msgbus} chans]
|
||||
(l/debug :hint "purge" :chans (count chans))
|
||||
(send-via executor state unsubscribe-channels msgbus chans)
|
||||
nil)
|
||||
|
||||
;; --- IMPL
|
||||
|
||||
(defn- redis-connect
|
||||
[{:keys [timeout redis] :as cfg}]
|
||||
(let [pconn (redis/connect redis :timeout timeout)
|
||||
sconn (redis/connect redis :type :pubsub :timeout timeout)]
|
||||
{::pconn pconn
|
||||
::sconn sconn}))
|
||||
|
||||
(defn- redis-disconnect
|
||||
[{:keys [::pconn ::sconn] :as cfg}]
|
||||
(d/close! pconn)
|
||||
(d/close! sconn))
|
||||
|
||||
(defn- conj-subscription
|
||||
"A low level function that is responsible to create on-demand
|
||||
subscriptions on redis. It reuses the same subscription if it is
|
||||
already established. Intended to be executed in agent."
|
||||
already established."
|
||||
[nsubs cfg topic chan]
|
||||
(let [nsubs (if (nil? nsubs) #{chan} (conj nsubs chan))]
|
||||
(when (= 1 (count nsubs))
|
||||
(l/trace :hint "open subscription" :topic topic ::l/sync? true)
|
||||
(redis-sub cfg topic))
|
||||
(redis-sub! cfg topic))
|
||||
nsubs))
|
||||
|
||||
(defn- disj-subscription
|
||||
"A low level function responsible on removing subscriptions. The
|
||||
subscription is truly removed from redis once no single local
|
||||
subscription is look for it. Intended to be executed in agent."
|
||||
subscription is look for it."
|
||||
[nsubs cfg topic chan]
|
||||
(let [nsubs (disj nsubs chan)]
|
||||
(when (empty? nsubs)
|
||||
(l/trace :hint "close subscription" :topic topic ::l/sync? true)
|
||||
(redis-unsub cfg topic))
|
||||
(redis-unsub! cfg topic))
|
||||
nsubs))
|
||||
|
||||
(defn- subscribe-to-topics
|
||||
"Function responsible to attach local subscription to the
|
||||
state. Intended to be used in agent."
|
||||
[state cfg topics chan done-ch]
|
||||
(aa/with-closing done-ch
|
||||
(let [state (update state :chans assoc chan topics)]
|
||||
(reduce (fn [state topic]
|
||||
(update-in state [:topics topic] conj-subscription cfg topic chan))
|
||||
state
|
||||
topics))))
|
||||
"Function responsible to attach local subscription to the state."
|
||||
[state cfg topics chan]
|
||||
(let [state (update state :chans assoc chan topics)]
|
||||
(reduce (fn [state topic]
|
||||
(update-in state [:topics topic] conj-subscription cfg topic chan))
|
||||
state
|
||||
topics)))
|
||||
|
||||
(defn- unsubscribe-single-channel
|
||||
(defn- unsubscribe-channel
|
||||
"Auxiliary function responsible on removing a single local
|
||||
subscription from the state."
|
||||
[state cfg chan]
|
||||
|
@ -174,87 +156,113 @@
|
|||
"Function responsible from detach from state a seq of channels,
|
||||
useful when client disconnects or in-bulk unsubscribe
|
||||
operations. Intended to be executed in agent."
|
||||
[state cfg channels done-ch]
|
||||
(aa/with-closing done-ch
|
||||
(reduce #(unsubscribe-single-channel %1 cfg %2) state channels)))
|
||||
[state cfg channels]
|
||||
(reduce #(unsubscribe-channel %1 cfg %2) state channels))
|
||||
|
||||
(defn- create-listener
|
||||
[rcv-ch]
|
||||
(redis/pubsub-listener
|
||||
(rds/pubsub-listener
|
||||
:on-message (fn [_ topic message]
|
||||
;; There are no back pressure, so we use a slidding
|
||||
;; buffer for cases when the pubsub broker sends
|
||||
;; more messages that we can process.
|
||||
(let [val {:topic topic :message (t/decode message)}]
|
||||
(when-not (a/offer! rcv-ch val)
|
||||
(when-not (sp/offer! rcv-ch val)
|
||||
(l/warn :msg "dropping message on subscription loop"))))))
|
||||
|
||||
(defn- process-input!
|
||||
[{:keys [::state ::wrk/executor] :as cfg} topic message]
|
||||
(let [chans (get-in @state [:topics topic])]
|
||||
(when-let [closed (loop [chans (seq chans)
|
||||
closed #{}]
|
||||
(if-let [ch (first chans)]
|
||||
(if (sp/put! ch message)
|
||||
(recur (rest chans) closed)
|
||||
(recur (rest chans) (conj closed ch)))
|
||||
(seq closed)))]
|
||||
(send-via executor state unsubscribe-channels cfg closed))))
|
||||
|
||||
|
||||
(defn start-io-loop!
|
||||
[{:keys [::sconn ::rcv-ch ::pub-ch ::state ::wrk/executor] :as cfg}]
|
||||
(redis/add-listener! sconn (create-listener rcv-ch))
|
||||
(letfn [(send-to-topic [topic message]
|
||||
(a/go-loop [chans (seq (get-in @state [:topics topic]))
|
||||
closed #{}]
|
||||
(if-let [ch (first chans)]
|
||||
(if (a/>! ch message)
|
||||
(recur (rest chans) closed)
|
||||
(recur (rest chans) (conj closed ch)))
|
||||
(seq closed))))
|
||||
(rds/add-listener! sconn (create-listener rcv-ch))
|
||||
|
||||
(process-incoming [{:keys [topic message]}]
|
||||
(a/go
|
||||
(when-let [closed (a/<! (send-to-topic topic message))]
|
||||
(send-via executor state unsubscribe-channels cfg closed nil))))
|
||||
]
|
||||
(px/thread
|
||||
{:name "penpot/msgbus-io-loop"}
|
||||
(px/thread
|
||||
{:name "penpot/msgbus/io-loop"
|
||||
:virtual true}
|
||||
(try
|
||||
(loop []
|
||||
(let [[val port] (a/alts!! [pub-ch rcv-ch])]
|
||||
(let [timeout-ch (sp/timeout-chan 1000)
|
||||
[val port] (sp/alts! [timeout-ch pub-ch rcv-ch])]
|
||||
(cond
|
||||
(nil? val)
|
||||
(do
|
||||
(l/trace :hint "stopping io-loop, nil received")
|
||||
(send-via executor state (fn [state]
|
||||
(->> (vals state)
|
||||
(mapcat identity)
|
||||
(filter some?)
|
||||
(run! a/close!))
|
||||
nil)))
|
||||
|
||||
(= port rcv-ch)
|
||||
(do
|
||||
(a/<!! (process-incoming val))
|
||||
(identical? port timeout-ch)
|
||||
(let [closed (->> (:chans @state)
|
||||
(map key)
|
||||
(filter sp/closed?))]
|
||||
(when (seq closed)
|
||||
(send-via executor state unsubscribe-channels cfg closed)
|
||||
(l/debug :hint "proactively purge channels" :count (count closed)))
|
||||
(recur))
|
||||
|
||||
(= port pub-ch)
|
||||
(let [result (a/<!! (redis-pub cfg val))]
|
||||
(when (ex/exception? result)
|
||||
(l/error :hint "unexpected error on publishing"
|
||||
:message val
|
||||
:cause result))
|
||||
(recur))))))))
|
||||
(nil? val)
|
||||
(throw (InterruptedException. "internally interrupted"))
|
||||
|
||||
(defn- redis-pub
|
||||
(identical? port rcv-ch)
|
||||
(let [{:keys [topic message]} val]
|
||||
(process-input! cfg topic message)
|
||||
(recur))
|
||||
|
||||
(identical? port pub-ch)
|
||||
(do
|
||||
(redis-pub! cfg val)
|
||||
(recur)))))
|
||||
|
||||
(catch InterruptedException _
|
||||
(l/trace :hint "io-loop thread interrumpted"))
|
||||
|
||||
(catch Throwable cause
|
||||
(l/error :hint "unexpected exception on io-loop thread"
|
||||
:cause cause))
|
||||
(finally
|
||||
(l/trace :hint "clearing io-loop state")
|
||||
(when-let [chans (:chans @state)]
|
||||
(run! sp/close! (keys chans)))
|
||||
|
||||
(l/debug :hint "io-loop thread terminated")))))
|
||||
|
||||
|
||||
(defn- redis-pub!
|
||||
"Publish a message to the redis server. Asynchronous operation,
|
||||
intended to be used in core.async go blocks."
|
||||
[{:keys [::pconn] :as cfg} {:keys [topic message]}]
|
||||
(let [message (t/encode message)
|
||||
res (a/chan 1)]
|
||||
(-> (redis/publish! pconn topic message)
|
||||
(p/finally (fn [_ cause]
|
||||
(when (and cause (redis/open? pconn))
|
||||
(a/offer! res cause))
|
||||
(a/close! res))))
|
||||
res))
|
||||
(try
|
||||
(p/await! (rds/publish! pconn topic (t/encode message)))
|
||||
(catch InterruptedException cause
|
||||
(throw cause))
|
||||
(catch Throwable cause
|
||||
(l/error :hint "unexpected error on publishing"
|
||||
:message message
|
||||
:cause cause))))
|
||||
|
||||
(defn redis-sub
|
||||
(defn- redis-sub!
|
||||
"Create redis subscription. Blocking operation, intended to be used
|
||||
inside an agent."
|
||||
[{:keys [::sconn] :as cfg} topic]
|
||||
(redis/subscribe! sconn topic))
|
||||
(try
|
||||
(rds/subscribe! sconn topic)
|
||||
(catch InterruptedException cause
|
||||
(throw cause))
|
||||
(catch Throwable cause
|
||||
(l/trace :hint "exception on subscribing" :topic topic :cause cause))))
|
||||
|
||||
(defn redis-unsub
|
||||
(defn- redis-unsub!
|
||||
"Removes redis subscription. Blocking operation, intended to be used
|
||||
inside an agent."
|
||||
[{:keys [::sconn] :as cfg} topic]
|
||||
(redis/unsubscribe! sconn topic))
|
||||
(try
|
||||
(rds/unsubscribe! sconn topic)
|
||||
(catch InterruptedException cause
|
||||
(throw cause))
|
||||
(catch Throwable cause
|
||||
(l/trace :hint "exception on unsubscribing" :topic topic :cause cause))))
|
||||
|
||||
|
|
|
@ -8,17 +8,21 @@
|
|||
"The msgbus abstraction implemented using redis as underlying backend."
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.metrics :as mtx]
|
||||
[app.redis.script :as-alias rscript]
|
||||
[app.util.cache :as cache]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.core :as c]
|
||||
[clojure.java.io :as io]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p])
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px])
|
||||
(:import
|
||||
clojure.lang.IDeref
|
||||
clojure.lang.MapEntry
|
||||
|
@ -87,7 +91,7 @@
|
|||
(s/def ::connect? ::us/boolean)
|
||||
(s/def ::io-threads ::us/integer)
|
||||
(s/def ::worker-threads ::us/integer)
|
||||
(s/def ::cache #(instance? clojure.lang.Atom %))
|
||||
(s/def ::cache some?)
|
||||
|
||||
(s/def ::redis
|
||||
(s/keys :req [::resources
|
||||
|
@ -99,11 +103,11 @@
|
|||
|
||||
(defmethod ig/prep-key ::redis
|
||||
[_ cfg]
|
||||
(let [runtime (Runtime/getRuntime)
|
||||
cpus (.availableProcessors ^Runtime runtime)]
|
||||
(let [cpus (px/get-available-processors)
|
||||
threads (max 1 (int (* cpus 0.2)))]
|
||||
(merge {::timeout (dt/duration "10s")
|
||||
::io-threads (max 3 cpus)
|
||||
::worker-threads (max 3 cpus)}
|
||||
::io-threads (max 3 threads)
|
||||
::worker-threads (max 3 threads)}
|
||||
(d/without-nils cfg))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::redis [_]
|
||||
|
@ -129,6 +133,15 @@
|
|||
(def string-codec
|
||||
(RedisCodec/of StringCodec/UTF8 StringCodec/UTF8))
|
||||
|
||||
(defn- create-cache
|
||||
[{:keys [::wrk/executor] :as cfg}]
|
||||
(letfn [(on-remove [key val cause]
|
||||
(l/trace :hint "evict connection (cache)" :key key :reason cause)
|
||||
(some-> val d/close!))]
|
||||
(cache/create :executor executor
|
||||
:on-remove on-remove
|
||||
:keepalive "5m")))
|
||||
|
||||
(defn- initialize-resources
|
||||
"Initialize redis connection resources"
|
||||
[{:keys [::uri ::io-threads ::worker-threads ::connect?] :as cfg}]
|
||||
|
@ -145,19 +158,21 @@
|
|||
(timer ^Timer timer)
|
||||
(build))
|
||||
|
||||
redis-uri (RedisURI/create ^String uri)]
|
||||
redis-uri (RedisURI/create ^String uri)
|
||||
cfg (-> cfg
|
||||
(assoc ::resources resources)
|
||||
(assoc ::timer timer)
|
||||
(assoc ::redis-uri redis-uri))]
|
||||
|
||||
(-> cfg
|
||||
(assoc ::resources resources)
|
||||
(assoc ::timer timer)
|
||||
(assoc ::cache (atom {}))
|
||||
(assoc ::redis-uri redis-uri))))
|
||||
(assoc cfg ::cache (create-cache cfg))))
|
||||
|
||||
(defn- shutdown-resources
|
||||
[{:keys [::resources ::cache ::timer]}]
|
||||
(run! d/close! (vals @cache))
|
||||
(cache/invalidate-all! cache)
|
||||
|
||||
(when resources
|
||||
(.shutdown ^ClientResources resources))
|
||||
|
||||
(when timer
|
||||
(.stop ^Timer timer)))
|
||||
|
||||
|
@ -173,6 +188,7 @@
|
|||
:default (.connect ^RedisClient client ^RedisCodec codec)
|
||||
:pubsub (.connectPubSub ^RedisClient client ^RedisCodec codec))]
|
||||
|
||||
(l/trc :hint "connect" :hid (hash client))
|
||||
(.setTimeout ^StatefulConnection conn ^Duration timeout)
|
||||
(reify
|
||||
IDeref
|
||||
|
@ -180,8 +196,9 @@
|
|||
|
||||
AutoCloseable
|
||||
(close [_]
|
||||
(.close ^StatefulConnection conn)
|
||||
(.shutdown ^RedisClient client)))))
|
||||
(ex/ignoring (.close ^StatefulConnection conn))
|
||||
(ex/ignoring (.shutdown ^RedisClient client))
|
||||
(l/trc :hint "disconnect" :hid (hash client))))))
|
||||
|
||||
(defn connect
|
||||
[state & {:as opts}]
|
||||
|
@ -194,15 +211,10 @@
|
|||
(defn get-or-connect
|
||||
[{:keys [::cache] :as state} key options]
|
||||
(us/assert! ::redis state)
|
||||
(-> state
|
||||
(assoc ::connection
|
||||
(or (get @cache key)
|
||||
(-> (swap! cache (fn [cache]
|
||||
(when-let [prev (get cache key)]
|
||||
(d/close! prev))
|
||||
(assoc cache key (connect* state options))))
|
||||
(get key))))
|
||||
(dissoc ::cache)))
|
||||
(let [connection (cache/get cache key (fn [_] (connect* state options)))]
|
||||
(-> state
|
||||
(dissoc ::cache)
|
||||
(assoc ::connection connection))))
|
||||
|
||||
(defn add-listener!
|
||||
[{:keys [::connection] :as conn} listener]
|
||||
|
@ -344,7 +356,7 @@
|
|||
(do
|
||||
(l/error :hint "no script found" :name sname :cause cause)
|
||||
(->> (load-script)
|
||||
(p/mapcat eval-script)))
|
||||
(p/mcat eval-script)))
|
||||
(if-let [on-error (::rscript/on-error script)]
|
||||
(on-error cause)
|
||||
(p/rejected cause))))
|
||||
|
@ -375,15 +387,16 @@
|
|||
(load-script []
|
||||
(l/trace :hint "load script" :name sname)
|
||||
(->> (.scriptLoad ^RedisScriptingAsyncCommands cmd
|
||||
^String (read-script))
|
||||
(p/map (fn [sha]
|
||||
(swap! scripts-cache assoc sname sha)
|
||||
sha))))]
|
||||
^String (read-script))
|
||||
(p/fmap (fn [sha]
|
||||
(swap! scripts-cache assoc sname sha)
|
||||
sha))))]
|
||||
|
||||
(if-let [sha (get @scripts-cache sname)]
|
||||
(eval-script sha)
|
||||
(->> (load-script)
|
||||
(p/mapcat eval-script))))))
|
||||
(p/await!
|
||||
(if-let [sha (get @scripts-cache sname)]
|
||||
(eval-script sha)
|
||||
(->> (load-script)
|
||||
(p/mapcat eval-script)))))))
|
||||
|
||||
(defn timeout-exception?
|
||||
[cause]
|
||||
|
|
|
@ -11,7 +11,6 @@
|
|||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http :as-alias http]
|
||||
|
@ -19,7 +18,6 @@
|
|||
[app.http.client :as-alias http.client]
|
||||
[app.http.session :as session]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.main :as-alias main]
|
||||
[app.metrics :as mtx]
|
||||
[app.msgbus :as-alias mbus]
|
||||
|
@ -35,7 +33,6 @@
|
|||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]
|
||||
[yetti.request :as yrq]
|
||||
[yetti.response :as yrs]))
|
||||
|
||||
|
@ -47,12 +44,10 @@
|
|||
|
||||
(defn- handle-response-transformation
|
||||
[response request mdata]
|
||||
(let [transform-fn (reduce (fn [res-fn transform-fn]
|
||||
(fn [request response]
|
||||
(p/then (res-fn request response) #(transform-fn request %))))
|
||||
(constantly response)
|
||||
(::response-transform-fns mdata))]
|
||||
(transform-fn request response)))
|
||||
(reduce (fn [response transform-fn]
|
||||
(transform-fn request response))
|
||||
response
|
||||
(::response-transform-fns mdata)))
|
||||
|
||||
(defn- handle-before-comple-hook
|
||||
[response mdata]
|
||||
|
@ -63,18 +58,18 @@
|
|||
(defn- handle-response
|
||||
[request result]
|
||||
(if (fn? result)
|
||||
(p/wrap (result request))
|
||||
(result request)
|
||||
(let [mdata (meta result)]
|
||||
(p/-> (yrs/response {:status (::http/status mdata 200)
|
||||
:headers (::http/headers mdata {})
|
||||
:body (rph/unwrap result)})
|
||||
(handle-response-transformation request mdata)
|
||||
(handle-before-comple-hook mdata)))))
|
||||
(-> {::yrs/status (::http/status mdata 200)
|
||||
::yrs/headers (::http/headers mdata {})
|
||||
::yrs/body (rph/unwrap result)}
|
||||
(handle-response-transformation request mdata)
|
||||
(handle-before-comple-hook mdata)))))
|
||||
|
||||
(defn- rpc-query-handler
|
||||
"Ring handler that dispatches query requests and convert between
|
||||
internal async flow into ring async flow."
|
||||
[methods {:keys [params path-params] :as request} respond raise]
|
||||
[methods {:keys [params path-params] :as request}]
|
||||
(let [type (keyword (:type path-params))
|
||||
profile-id (or (::session/profile-id request)
|
||||
(::actoken/profile-id request))
|
||||
|
@ -87,19 +82,14 @@
|
|||
(assoc :profile-id profile-id)
|
||||
(assoc ::profile-id profile-id))
|
||||
(dissoc data :profile-id ::profile-id))
|
||||
method (get methods type default-handler)]
|
||||
|
||||
(->> (method data)
|
||||
(p/mcat (partial handle-response request))
|
||||
(p/fnly (fn [response cause]
|
||||
(if cause
|
||||
(raise cause)
|
||||
(respond response)))))))
|
||||
method (get methods type default-handler)
|
||||
response (method data)]
|
||||
(handle-response request response)))
|
||||
|
||||
(defn- rpc-mutation-handler
|
||||
"Ring handler that dispatches mutation requests and convert between
|
||||
internal async flow into ring async flow."
|
||||
[methods {:keys [params path-params] :as request} respond raise]
|
||||
[methods {:keys [params path-params] :as request}]
|
||||
(let [type (keyword (:type path-params))
|
||||
profile-id (or (::session/profile-id request)
|
||||
(::actoken/profile-id request))
|
||||
|
@ -111,24 +101,18 @@
|
|||
(assoc :profile-id profile-id)
|
||||
(assoc ::profile-id profile-id))
|
||||
(dissoc data :profile-id))
|
||||
method (get methods type default-handler)]
|
||||
|
||||
(->> (method data)
|
||||
(p/mcat (partial handle-response request))
|
||||
(p/fnly (fn [response cause]
|
||||
(if cause
|
||||
(raise cause)
|
||||
(respond response)))))))
|
||||
method (get methods type default-handler)
|
||||
response (method data)]
|
||||
(handle-response request response)))
|
||||
|
||||
(defn- rpc-command-handler
|
||||
"Ring handler that dispatches cmd requests and convert between
|
||||
internal async flow into ring async flow."
|
||||
[methods {:keys [params path-params] :as request} respond raise]
|
||||
[methods {:keys [params path-params] :as request}]
|
||||
(let [type (keyword (:type path-params))
|
||||
etag (yrq/get-header request "if-none-match")
|
||||
profile-id (or (::session/profile-id request)
|
||||
(::actoken/profile-id request))
|
||||
|
||||
data (-> params
|
||||
(assoc ::request-at (dt/now))
|
||||
(assoc ::session/id (::session/id request))
|
||||
|
@ -140,12 +124,8 @@
|
|||
method (get methods type default-handler)]
|
||||
|
||||
(binding [cond/*enabled* true]
|
||||
(->> (method data)
|
||||
(p/mcat (partial handle-response request))
|
||||
(p/fnly (fn [response cause]
|
||||
(if cause
|
||||
(raise cause)
|
||||
(respond response))))))))
|
||||
(let [response (method data)]
|
||||
(handle-response request response)))))
|
||||
|
||||
(defn- wrap-metrics
|
||||
"Wrap service method with metrics measurement."
|
||||
|
@ -153,23 +133,22 @@
|
|||
(let [labels (into-array String [(::sv/name mdata)])]
|
||||
(fn [cfg params]
|
||||
(let [tp (dt/tpoint)]
|
||||
(->> (f cfg params)
|
||||
(p/fnly (fn [_ _]
|
||||
(mtx/run! metrics
|
||||
:id metrics-id
|
||||
:val (inst-ms (tp))
|
||||
:labels labels))))))))
|
||||
|
||||
(try
|
||||
(f cfg params)
|
||||
(finally
|
||||
(mtx/run! metrics
|
||||
:id metrics-id
|
||||
:val (inst-ms (tp))
|
||||
:labels labels)))))))
|
||||
|
||||
(defn- wrap-authentication
|
||||
[_ f mdata]
|
||||
(fn [cfg params]
|
||||
(let [profile-id (::profile-id params)]
|
||||
(if (and (::auth mdata true) (not (uuid? profile-id)))
|
||||
(p/rejected
|
||||
(ex/error :type :authentication
|
||||
:code :authentication-required
|
||||
:hint "authentication required for this endpoint"))
|
||||
(ex/raise :type :authentication
|
||||
:code :authentication-required
|
||||
:hint "authentication required for this endpoint")
|
||||
(f cfg params)))))
|
||||
|
||||
(defn- wrap-access-token
|
||||
|
@ -182,98 +161,34 @@
|
|||
(let [perms (::actoken/perms request #{})]
|
||||
(if (contains? perms name)
|
||||
(f cfg params)
|
||||
(p/rejected
|
||||
(ex/error :type :authorization
|
||||
:code :operation-not-allowed
|
||||
:allowed perms))))
|
||||
(ex/raise :type :authorization
|
||||
:code :operation-not-allowed
|
||||
:allowed perms)))
|
||||
(f cfg params))))
|
||||
f))
|
||||
|
||||
(defn- wrap-dispatch
|
||||
"Wraps service method into async flow, with the ability to dispatching
|
||||
it to a preconfigured executor service."
|
||||
[{:keys [::wrk/executor] :as cfg} f mdata]
|
||||
(with-meta
|
||||
(fn [cfg params]
|
||||
(->> (px/submit! executor (px/wrap-bindings #(f cfg params)))
|
||||
(p/mapcat p/wrap)
|
||||
(p/map rph/wrap)))
|
||||
mdata))
|
||||
|
||||
(defn- wrap-audit
|
||||
[cfg f mdata]
|
||||
[_ f mdata]
|
||||
(if (or (contains? cf/flags :webhooks)
|
||||
(contains? cf/flags :audit-log))
|
||||
(letfn [(handle-audit [params result]
|
||||
(let [resultm (meta result)
|
||||
request (::http/request params)
|
||||
|
||||
profile-id (or (::audit/profile-id resultm)
|
||||
(:profile-id result)
|
||||
(if (= (::type cfg) "command")
|
||||
(::profile-id params)
|
||||
(:profile-id params))
|
||||
uuid/zero)
|
||||
|
||||
props (-> (or (::audit/replace-props resultm)
|
||||
(-> params
|
||||
(merge (::audit/props resultm))
|
||||
(dissoc :profile-id)
|
||||
(dissoc :type)))
|
||||
(audit/clean-props))
|
||||
|
||||
event {:type (or (::audit/type resultm)
|
||||
(::type cfg))
|
||||
:name (or (::audit/name resultm)
|
||||
(::sv/name mdata))
|
||||
:profile-id profile-id
|
||||
:ip-addr (some-> request audit/parse-client-ip)
|
||||
:props props
|
||||
|
||||
;; NOTE: for batch-key lookup we need the params as-is
|
||||
;; because the rpc api does not need to know the
|
||||
;; audit/webhook specific object layout.
|
||||
::params (dissoc params ::http/request)
|
||||
|
||||
::webhooks/batch-key
|
||||
(or (::webhooks/batch-key mdata)
|
||||
(::webhooks/batch-key resultm))
|
||||
|
||||
::webhooks/batch-timeout
|
||||
(or (::webhooks/batch-timeout mdata)
|
||||
(::webhooks/batch-timeout resultm))
|
||||
|
||||
::webhooks/event?
|
||||
(or (::webhooks/event? mdata)
|
||||
(::webhooks/event? resultm)
|
||||
false)}]
|
||||
|
||||
(audit/submit! cfg event)))
|
||||
|
||||
(handle-request [cfg params]
|
||||
(->> (f cfg params)
|
||||
(p/fnly (fn [result cause]
|
||||
(when-not cause
|
||||
(handle-audit params result))))))]
|
||||
|
||||
(if-not (::audit/skip mdata)
|
||||
(with-meta handle-request mdata)
|
||||
f))
|
||||
(if-not (::audit/skip mdata)
|
||||
(fn [cfg params]
|
||||
(let [result (f cfg params)]
|
||||
(->> (audit/prepare-event cfg mdata params result)
|
||||
(audit/submit! cfg))
|
||||
result))
|
||||
f)
|
||||
f))
|
||||
|
||||
(defn- wrap-spec-conform
|
||||
[_ f mdata]
|
||||
(let [spec (or (::sv/spec mdata) (s/spec any?))]
|
||||
(fn [cfg params]
|
||||
(let [params (ex/try! (us/conform spec params))]
|
||||
(if (ex/exception? params)
|
||||
(p/rejected params)
|
||||
(f cfg params))))))
|
||||
(f cfg (us/conform spec params)))))
|
||||
|
||||
(defn- wrap-all
|
||||
[cfg f mdata]
|
||||
(as-> f $
|
||||
(wrap-dispatch cfg $ mdata)
|
||||
(wrap-metrics cfg $ mdata)
|
||||
(cond/wrap cfg $ mdata)
|
||||
(retry/wrap-retry cfg $ mdata)
|
||||
|
@ -288,13 +203,11 @@
|
|||
[cfg f mdata]
|
||||
(l/debug :hint "register method" :name (::sv/name mdata))
|
||||
(let [f (wrap-all cfg f mdata)]
|
||||
(with-meta #(f cfg %) mdata)))
|
||||
(partial f cfg)))
|
||||
|
||||
(defn- process-method
|
||||
[cfg vfn]
|
||||
(let [mdata (meta vfn)]
|
||||
[(keyword (::sv/name mdata))
|
||||
(wrap cfg vfn mdata)]))
|
||||
[cfg [vfn mdata]]
|
||||
[(keyword (::sv/name mdata)) [mdata (wrap cfg vfn mdata)]])
|
||||
|
||||
(defn- resolve-query-methods
|
||||
[cfg]
|
||||
|
@ -371,13 +284,13 @@
|
|||
:commands (resolve-command-methods cfg)}))
|
||||
|
||||
(s/def ::mutations
|
||||
(s/map-of keyword? fn?))
|
||||
(s/map-of keyword? (s/tuple map? fn?)))
|
||||
|
||||
(s/def ::queries
|
||||
(s/map-of keyword? fn?))
|
||||
(s/map-of keyword? (s/tuple map? fn?)))
|
||||
|
||||
(s/def ::commands
|
||||
(s/map-of keyword? fn?))
|
||||
(s/map-of keyword? (s/tuple map? fn?)))
|
||||
|
||||
(s/def ::methods
|
||||
(s/keys :req-un [::mutations
|
||||
|
@ -391,15 +304,18 @@
|
|||
::db/pool
|
||||
::main/props
|
||||
::wrk/executor
|
||||
::session/manager
|
||||
::actoken/manager]))
|
||||
::session/manager]))
|
||||
|
||||
(defmethod ig/init-key ::routes
|
||||
[_ {:keys [::methods] :as cfg}]
|
||||
[["/rpc" {:middleware [[session/authz cfg]
|
||||
[actoken/authz cfg]]}
|
||||
["/command/:type" {:handler (partial rpc-command-handler (:commands methods))}]
|
||||
["/query/:type" {:handler (partial rpc-query-handler (:queries methods))}]
|
||||
["/mutation/:type" {:handler (partial rpc-mutation-handler (:mutations methods))
|
||||
:allowed-methods #{:post}}]]])
|
||||
(let [methods (-> methods
|
||||
(update :commands update-vals peek)
|
||||
(update :queries update-vals peek)
|
||||
(update :mutations update-vals peek))]
|
||||
[["/rpc" {:middleware [[session/authz cfg]
|
||||
[actoken/authz cfg]]}
|
||||
["/command/:type" {:handler (partial rpc-command-handler (:commands methods))}]
|
||||
["/query/:type" {:handler (partial rpc-query-handler (:queries methods))}]
|
||||
["/mutation/:type" {:handler (partial rpc-mutation-handler (:mutations methods))
|
||||
:allowed-methods #{:post}}]]]))
|
||||
|
||||
|
|
|
@ -6,14 +6,16 @@
|
|||
|
||||
(ns app.rpc.climit
|
||||
"Concurrencly limiter for RPC."
|
||||
(:refer-clojure :exclude [run!])
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.config :as cf]
|
||||
[app.metrics :as mtx]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.climit.config :as-alias config]
|
||||
[app.util.cache :as cache]
|
||||
[app.util.services :as-alias sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
|
@ -23,184 +25,200 @@
|
|||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]
|
||||
[promesa.exec.bulkhead :as pxb])
|
||||
[promesa.exec.bulkhead :as pbh])
|
||||
(:import
|
||||
com.github.benmanes.caffeine.cache.Cache
|
||||
com.github.benmanes.caffeine.cache.CacheLoader
|
||||
com.github.benmanes.caffeine.cache.Caffeine
|
||||
com.github.benmanes.caffeine.cache.RemovalListener))
|
||||
clojure.lang.ExceptionInfo))
|
||||
|
||||
(defn- capacity-exception?
|
||||
[o]
|
||||
(and (ex/error? o)
|
||||
(let [data (ex-data o)]
|
||||
(and (= :bulkhead-error (:type data))
|
||||
(= :capacity-limit-reached (:code data))))))
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
(defn invoke!
|
||||
[limiter f]
|
||||
(->> (px/submit! limiter f)
|
||||
(p/hcat (fn [result cause]
|
||||
(cond
|
||||
(capacity-exception? cause)
|
||||
(p/rejected
|
||||
(ex/error :type :internal
|
||||
:code :concurrency-limit-reached
|
||||
:queue (-> limiter meta ::bkey name)
|
||||
:cause cause))
|
||||
(defn- create-bulkhead-cache
|
||||
[{:keys [::wrk/executor]} config]
|
||||
(letfn [(load-fn [key]
|
||||
(let [config (get config (nth key 0))]
|
||||
(l/trace :hint "insert into cache" :key key)
|
||||
(pbh/create :permits (or (:permits config) (:concurrency config))
|
||||
:queue (or (:queue config) (:queue-size config))
|
||||
:timeout (:timeout config)
|
||||
:executor executor
|
||||
:type (:type config :semaphore))))
|
||||
|
||||
(some? cause)
|
||||
(p/rejected cause)
|
||||
(on-remove [_ _ cause]
|
||||
(l/trace :hint "evict from cache" :key key :reason (str cause)))]
|
||||
|
||||
:else
|
||||
(p/resolved result))))))
|
||||
(cache/create :executor :same-thread
|
||||
:on-remove on-remove
|
||||
:keepalive "5m"
|
||||
:load-fn load-fn)))
|
||||
|
||||
(defn- create-limiter
|
||||
[{:keys [::wrk/executor ::mtx/metrics ::bkey ::skey concurrency queue-size]}]
|
||||
(let [labels (into-array String [(name bkey)])
|
||||
on-queue (fn [instance]
|
||||
(l/trace :hint "enqueued"
|
||||
:key (name bkey)
|
||||
:skey (str skey)
|
||||
:queue-size (get instance ::pxb/current-queue-size)
|
||||
:concurrency (get instance ::pxb/current-concurrency))
|
||||
(mtx/run! metrics
|
||||
:id :rpc-climit-queue-size
|
||||
:val (get instance ::pxb/current-queue-size)
|
||||
:labels labels)
|
||||
(mtx/run! metrics
|
||||
:id :rpc-climit-concurrency
|
||||
:val (get instance ::pxb/current-concurrency)
|
||||
:labels labels))
|
||||
|
||||
on-run (fn [instance task]
|
||||
(let [elapsed (- (inst-ms (dt/now))
|
||||
(inst-ms task))]
|
||||
(l/trace :hint "execute"
|
||||
:key (name bkey)
|
||||
:skey (str skey)
|
||||
:elapsed (str elapsed "ms"))
|
||||
(mtx/run! metrics
|
||||
:id :rpc-climit-timing
|
||||
:val elapsed
|
||||
:labels labels)
|
||||
(mtx/run! metrics
|
||||
:id :rpc-climit-queue-size
|
||||
:val (get instance ::pxb/current-queue-size)
|
||||
:labels labels)
|
||||
(mtx/run! metrics
|
||||
:id :rpc-climit-concurrency
|
||||
:val (get instance ::pxb/current-concurrency)
|
||||
:labels labels)))
|
||||
|
||||
options {:executor executor
|
||||
:concurrency concurrency
|
||||
:queue-size (or queue-size Integer/MAX_VALUE)
|
||||
:on-queue on-queue
|
||||
:on-run on-run}]
|
||||
|
||||
(-> (pxb/create options)
|
||||
(vary-meta assoc ::bkey bkey ::skey skey))))
|
||||
|
||||
(defn- create-cache
|
||||
[{:keys [::wrk/executor] :as params} config]
|
||||
(let [listener (reify RemovalListener
|
||||
(onRemoval [_ key _val cause]
|
||||
(l/trace :hint "cache: remove" :key key :reason (str cause))))
|
||||
|
||||
loader (reify CacheLoader
|
||||
(load [_ key]
|
||||
(let [[bkey skey] key]
|
||||
(when-let [config (get config bkey)]
|
||||
(-> (merge params config)
|
||||
(assoc ::bkey bkey)
|
||||
(assoc ::skey skey)
|
||||
(create-limiter))))))]
|
||||
|
||||
(.. (Caffeine/newBuilder)
|
||||
(weakValues)
|
||||
(executor executor)
|
||||
(removalListener listener)
|
||||
(build loader))))
|
||||
|
||||
(defprotocol IConcurrencyManager)
|
||||
|
||||
(s/def ::concurrency ::us/integer)
|
||||
(s/def ::queue-size ::us/integer)
|
||||
(s/def ::config/permits ::us/integer)
|
||||
(s/def ::config/queue ::us/integer)
|
||||
(s/def ::config/timeout ::us/integer)
|
||||
(s/def ::config
|
||||
(s/map-of keyword?
|
||||
(s/keys :req-un [::concurrency]
|
||||
:opt-un [::queue-size])))
|
||||
(s/keys :opt-un [::config/permits
|
||||
::config/queue
|
||||
::config/timeout])))
|
||||
|
||||
(defmethod ig/prep-key ::rpc/climit
|
||||
[_ cfg]
|
||||
(merge {::path (cf/get :rpc-climit-config)}
|
||||
(d/without-nils cfg)))
|
||||
(assoc cfg ::path (cf/get :rpc-climit-config)))
|
||||
|
||||
(s/def ::path ::fs/path)
|
||||
|
||||
(defmethod ig/pre-init-spec ::rpc/climit [_]
|
||||
(s/keys :req [::wrk/executor ::mtx/metrics ::path]))
|
||||
|
||||
(defmethod ig/init-key ::rpc/climit
|
||||
[_ {:keys [::path] :as params}]
|
||||
[_ {:keys [::path ::mtx/metrics ::wrk/executor] :as cfg}]
|
||||
(when (contains? cf/flags :rpc-climit)
|
||||
(if-let [config (some->> path slurp edn/read-string)]
|
||||
(do
|
||||
(l/info :hint "initializing concurrency limit" :config (str path))
|
||||
(us/verify! ::config config)
|
||||
|
||||
(let [cache (create-cache params config)]
|
||||
^{::cache cache}
|
||||
(reify
|
||||
IConcurrencyManager
|
||||
clojure.lang.IDeref
|
||||
(deref [_] config)
|
||||
|
||||
clojure.lang.ILookup
|
||||
(valAt [_ key]
|
||||
(let [key (if (vector? key) key [key])]
|
||||
(.get ^Cache cache key))))))
|
||||
|
||||
(l/warn :hint "unable to load configuration" :config (str path)))))
|
||||
(when-let [params (some->> path slurp edn/read-string)]
|
||||
(l/info :hint "initializing concurrency limit" :config (str path))
|
||||
(us/verify! ::config params)
|
||||
{::cache (create-bulkhead-cache cfg params)
|
||||
::config params
|
||||
::wrk/executor executor
|
||||
::mtx/metrics metrics})))
|
||||
|
||||
(s/def ::cache cache/cache?)
|
||||
(s/def ::instance
|
||||
(s/keys :req [::cache ::config ::wrk/executor]))
|
||||
|
||||
(s/def ::rpc/climit
|
||||
(s/nilable #(satisfies? IConcurrencyManager %)))
|
||||
(s/nilable ::instance))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; PUBLIC API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn invoke!
|
||||
[cache metrics id key f]
|
||||
(let [limiter (cache/get cache [id key])
|
||||
tpoint (dt/tpoint)
|
||||
labels (into-array String [(name id)])
|
||||
|
||||
wrapped
|
||||
(fn []
|
||||
(let [elapsed (tpoint)
|
||||
stats (pbh/get-stats limiter)]
|
||||
(l/trace :hint "executed"
|
||||
:id (name id)
|
||||
:key key
|
||||
:fnh (hash f)
|
||||
:permits (:permits stats)
|
||||
:queue (:queue stats)
|
||||
:max-permits (:max-permits stats)
|
||||
:max-queue (:max-queue stats)
|
||||
:elapsed (dt/format-duration elapsed))
|
||||
(mtx/run! metrics
|
||||
:id :rpc-climit-timing
|
||||
:val (inst-ms elapsed)
|
||||
:labels labels)
|
||||
(try
|
||||
(f)
|
||||
(finally
|
||||
(let [elapsed (tpoint)]
|
||||
(l/trace :hint "finished"
|
||||
:id (name id)
|
||||
:key key
|
||||
:fnh (hash f)
|
||||
:permits (:permits stats)
|
||||
:queue (:queue stats)
|
||||
:max-permits (:max-permits stats)
|
||||
:max-queue (:max-queue stats)
|
||||
:elapsed (dt/format-duration elapsed)))))))
|
||||
measure!
|
||||
(fn [stats]
|
||||
(mtx/run! metrics
|
||||
:id :rpc-climit-queue
|
||||
:val (:queue stats)
|
||||
:labels labels)
|
||||
(mtx/run! metrics
|
||||
:id :rpc-climit-permits
|
||||
:val (:permits stats)
|
||||
:labels labels))]
|
||||
|
||||
(try
|
||||
(let [stats (pbh/get-stats limiter)]
|
||||
(measure! stats)
|
||||
(l/trace :hint "enqueued"
|
||||
:id (name id)
|
||||
:key key
|
||||
:fnh (hash f)
|
||||
:permits (:permits stats)
|
||||
:queue (:queue stats)
|
||||
:max-permits (:max-permits stats)
|
||||
:max-queue (:max-queue stats))
|
||||
(pbh/invoke! limiter wrapped))
|
||||
(catch ExceptionInfo cause
|
||||
(let [{:keys [type code]} (ex-data cause)]
|
||||
(if (= :bulkhead-error type)
|
||||
(ex/raise :type :concurrency-limit
|
||||
:code code
|
||||
:hint "concurrency limit reached")
|
||||
(throw cause))))
|
||||
|
||||
(finally
|
||||
(measure! (pbh/get-stats limiter))))))
|
||||
|
||||
|
||||
(defn run!
|
||||
[{:keys [::id ::cache ::mtx/metrics]} f]
|
||||
(if (and cache id)
|
||||
(invoke! cache metrics id nil f)
|
||||
(f)))
|
||||
|
||||
(defn submit!
|
||||
[{:keys [::id ::cache ::wrk/executor ::mtx/metrics]} f]
|
||||
(let [f (partial px/submit! executor f)]
|
||||
(if (and cache id)
|
||||
(p/await! (invoke! cache metrics id nil f))
|
||||
(p/await! (f)))))
|
||||
|
||||
(defn configure
|
||||
([{:keys [::rpc/climit]} id]
|
||||
(us/assert! ::rpc/climit climit)
|
||||
(assoc climit ::id id))
|
||||
([{:keys [::rpc/climit]} id executor]
|
||||
(us/assert! ::rpc/climit climit)
|
||||
(-> climit
|
||||
(assoc ::id id)
|
||||
(assoc ::wrk/executor executor))))
|
||||
|
||||
(defmacro with-dispatch!
|
||||
"Dispatch blocking operation to a separated thread protected with the
|
||||
specified concurrency limiter. If climit is not active, the function
|
||||
will be scheduled to execute without concurrency monitoring."
|
||||
[instance & body]
|
||||
(if (vector? instance)
|
||||
`(-> (app.rpc.climit/configure ~@instance)
|
||||
(app.rpc.climit/run! (^:once fn* [] ~@body)))
|
||||
`(run! ~instance (^:once fn* [] ~@body))))
|
||||
|
||||
(defmacro with-dispatch
|
||||
[lim & body]
|
||||
`(if ~lim
|
||||
(invoke! ~lim (^:once fn [] (p/wrap (do ~@body))))
|
||||
(p/wrap (do ~@body))))
|
||||
"Dispatch blocking operation to a separated thread protected with
|
||||
the specified semaphore.
|
||||
DEPRECATED"
|
||||
[& params]
|
||||
`(with-dispatch! ~@params))
|
||||
|
||||
(def noop-fn (constantly nil))
|
||||
|
||||
(defn wrap
|
||||
[{:keys [::rpc/climit]} f {:keys [::queue ::key-fn] :as mdata}]
|
||||
(if (and (some? climit)
|
||||
(some? queue))
|
||||
(if-let [config (get @climit queue)]
|
||||
(do
|
||||
[{:keys [::rpc/climit ::mtx/metrics]} f {:keys [::id ::key-fn] :or {key-fn noop-fn} :as mdata}]
|
||||
(if (and (some? climit) (some? id))
|
||||
(if-let [config (get-in climit [::config id])]
|
||||
(let [cache (::cache climit)]
|
||||
(l/debug :hint "wrap: instrumenting method"
|
||||
:limit-name (name queue)
|
||||
:limit (name id)
|
||||
:service-name (::sv/name mdata)
|
||||
:queue-size (or (:queue-size config) Integer/MAX_VALUE)
|
||||
:concurrency (:concurrency config)
|
||||
:timeout (:timeout config)
|
||||
:permits (:permits config)
|
||||
:queue (:queue config)
|
||||
:keyed? (some? key-fn))
|
||||
(if (some? key-fn)
|
||||
(fn [cfg params]
|
||||
(let [key [queue (key-fn params)]
|
||||
lim (get climit key)]
|
||||
(invoke! lim (partial f cfg params))))
|
||||
(let [lim (get climit queue)]
|
||||
(fn [cfg params]
|
||||
(invoke! lim (partial f cfg params))))))
|
||||
(fn [cfg params]
|
||||
(invoke! cache metrics id (key-fn params) (partial f cfg params))))
|
||||
|
||||
(do
|
||||
(l/warn :hint "wrap: no config found"
|
||||
:queue (name queue)
|
||||
:service (::sv/name mdata))
|
||||
(l/warn :hint "no config found for specified queue" :id id)
|
||||
f))
|
||||
|
||||
f))
|
||||
|
|
|
@ -21,10 +21,7 @@
|
|||
[app.rpc.helpers :as rph]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]))
|
||||
[clojure.spec.alpha :as s]))
|
||||
|
||||
(defn- event->row [event]
|
||||
[(uuid/next)
|
||||
|
@ -71,17 +68,22 @@
|
|||
:req-un [::events]))
|
||||
|
||||
(sv/defmethod ::push-audit-events
|
||||
{::climit/queue :push-audit-events
|
||||
{::climit/id :submit-audit-events-by-profile
|
||||
::climit/key-fn ::rpc/profile-id
|
||||
::audit/skip true
|
||||
::doc/added "1.17"}
|
||||
[{:keys [::db/pool ::wrk/executor] :as cfg} params]
|
||||
[{:keys [::db/pool] :as cfg} params]
|
||||
(if (or (db/read-only? pool)
|
||||
(not (contains? cf/flags :audit-log)))
|
||||
(do
|
||||
(l/warn :hint "audit: http handler disabled or db is read-only")
|
||||
(rph/wrap nil))
|
||||
|
||||
(->> (px/submit! executor #(handle-events cfg params))
|
||||
(p/fmap (constantly nil)))))
|
||||
(do
|
||||
(try
|
||||
(handle-events cfg params)
|
||||
(catch Throwable cause
|
||||
(l/error :hint "unexpected error on persisting audit events from frontend"
|
||||
:cause cause)))
|
||||
|
||||
(rph/wrap nil))))
|
||||
|
|
|
@ -6,9 +6,9 @@
|
|||
|
||||
(ns app.rpc.commands.auth
|
||||
(:require
|
||||
[app.auth :as auth]
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
|
@ -18,7 +18,6 @@
|
|||
[app.loggers.audit :as audit]
|
||||
[app.main :as-alias main]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.climit :as climit]
|
||||
[app.rpc.commands.profile :as profile]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
|
@ -63,14 +62,20 @@
|
|||
:code :login-disabled
|
||||
:hint "login is disabled in this instance"))
|
||||
|
||||
(letfn [(check-password [profile password]
|
||||
(letfn [(check-password [conn profile password]
|
||||
(when (= (:password profile) "!")
|
||||
(ex/raise :type :validation
|
||||
:code :account-without-password
|
||||
:hint "the current account does not have password"))
|
||||
(:valid (auth/verify-password password (:password profile))))
|
||||
(let [result (profile/verify-password cfg password (:password profile))]
|
||||
(when (:update result)
|
||||
(l/trace :hint "updating profile password" :id (:id profile) :email (:email profile))
|
||||
(profile/update-profile-password! (assoc cfg ::db/conn conn)
|
||||
(assoc profile :password password)))
|
||||
(:valid result)))
|
||||
|
||||
(validate-profile [profile]
|
||||
|
||||
(validate-profile [conn profile]
|
||||
(when-not profile
|
||||
(ex/raise :type :validation
|
||||
:code :wrong-credentials))
|
||||
|
@ -80,7 +85,7 @@
|
|||
(when (:is-blocked profile)
|
||||
(ex/raise :type :restriction
|
||||
:code :profile-blocked))
|
||||
(when-not (check-password profile password)
|
||||
(when-not (check-password conn profile password)
|
||||
(ex/raise :type :validation
|
||||
:code :wrong-credentials))
|
||||
(when-let [deleted-at (:deleted-at profile)]
|
||||
|
@ -92,8 +97,7 @@
|
|||
|
||||
(db/with-atomic [conn pool]
|
||||
(let [profile (->> (profile/get-profile-by-email conn email)
|
||||
(validate-profile)
|
||||
(profile/decode-row)
|
||||
(validate-profile conn)
|
||||
(profile/strip-private-attrs))
|
||||
|
||||
invitation (when-let [token (:invitation-token params)]
|
||||
|
@ -118,7 +122,6 @@
|
|||
(sv/defmethod ::login-with-password
|
||||
"Performs authentication using penpot password."
|
||||
{::rpc/auth false
|
||||
::climit/queue :auth
|
||||
::doc/added "1.15"}
|
||||
[cfg params]
|
||||
(login-with-password cfg params))
|
||||
|
@ -144,7 +147,7 @@
|
|||
(:profile-id tdata)))
|
||||
|
||||
(update-password [conn profile-id]
|
||||
(let [pwd (auth/derive-password password)]
|
||||
(let [pwd (profile/derive-password cfg password)]
|
||||
(db/update! conn :profile {:password pwd} {:id profile-id})))]
|
||||
|
||||
(db/with-atomic [conn pool]
|
||||
|
@ -158,7 +161,6 @@
|
|||
|
||||
(sv/defmethod ::recover-profile
|
||||
{::rpc/auth false
|
||||
::climit/queue :auth
|
||||
::doc/added "1.15"}
|
||||
[cfg params]
|
||||
(recover-profile cfg params))
|
||||
|
@ -264,9 +266,7 @@
|
|||
:nudge {:big 10 :small 1}})
|
||||
(db/tjson))
|
||||
|
||||
password (if-let [password (:password params)]
|
||||
(auth/derive-password password)
|
||||
"!")
|
||||
password (or (:password params) "!")
|
||||
|
||||
locale (:locale params)
|
||||
locale (when (and (string? locale) (not (str/blank? locale)))
|
||||
|
@ -344,8 +344,11 @@
|
|||
|
||||
profile (if-let [profile-id (:profile-id claims)]
|
||||
(profile/get-profile conn profile-id)
|
||||
(->> (create-profile! conn (assoc params :is-active is-active))
|
||||
(create-profile-rels! conn)))
|
||||
(let [params (-> params
|
||||
(assoc :is-active is-active)
|
||||
(update :password #(profile/derive-password cfg %)))]
|
||||
(->> (create-profile! conn params)
|
||||
(create-profile-rels! conn))))
|
||||
|
||||
invitation (when-let [token (:invitation-token params)]
|
||||
(tokens/verify (::main/props cfg) {:token token :iss :team-invitation}))]
|
||||
|
@ -356,9 +359,9 @@
|
|||
(when-let [id (:profile-id claims)]
|
||||
(db/update! conn :profile {:modified-at (dt/now)} {:id id})
|
||||
(audit/submit! cfg
|
||||
{:type "fact"
|
||||
:name "register-profile-retry"
|
||||
:profile-id id}))
|
||||
{::audit/type "fact"
|
||||
::audit/name "register-profile-retry"
|
||||
::audit/profile-id id}))
|
||||
|
||||
(cond
|
||||
;; If invitation token comes in params, this is because the
|
||||
|
@ -406,7 +409,6 @@
|
|||
|
||||
(sv/defmethod ::register-profile
|
||||
{::rpc/auth false
|
||||
::climit/queue :auth
|
||||
::doc/added "1.15"}
|
||||
[{:keys [::db/pool] :as cfg} params]
|
||||
(db/with-atomic [conn pool]
|
||||
|
|
|
@ -354,7 +354,6 @@
|
|||
(with-open [^AutoCloseable conn (db/open pool)]
|
||||
(db/exec! conn [sql:file-library-rels (db/create-array conn "uuid" ids)])))
|
||||
|
||||
|
||||
(defn- create-or-update-file
|
||||
[conn params]
|
||||
(let [sql (str "INSERT INTO file (id, project_id, name, revn, is_shared, data, created_at, modified_at) "
|
||||
|
@ -527,13 +526,13 @@
|
|||
(write-obj! output sids)
|
||||
|
||||
(doseq [id sids]
|
||||
(let [{:keys [size] :as obj} @(sto/get-object storage id)]
|
||||
(let [{:keys [size] :as obj} (sto/get-object storage id)]
|
||||
(l/debug :hint "write sobject" :id id ::l/sync? true)
|
||||
(doto output
|
||||
(write-uuid! id)
|
||||
(write-obj! (meta obj)))
|
||||
|
||||
(with-open [^InputStream stream @(sto/get-object-data storage obj)]
|
||||
(with-open [^InputStream stream (sto/get-object-data storage obj)]
|
||||
(let [written (write-stream! output stream size)]
|
||||
(when (not= written size)
|
||||
(ex/raise :type :validation
|
||||
|
@ -719,7 +718,7 @@
|
|||
(assoc ::sto/touched-at (dt/now))
|
||||
(assoc :bucket "file-media-object"))
|
||||
|
||||
sobject @(sto/put-object! storage params)]
|
||||
sobject (sto/put-object! storage params)]
|
||||
|
||||
(l/debug :hint "persisted storage object" :id id :new-id (:id sobject) ::l/sync? true)
|
||||
(vswap! *state* update :index assoc id (:id sobject)))))
|
||||
|
@ -910,7 +909,9 @@
|
|||
(export! output-stream))))]
|
||||
|
||||
(fn [_]
|
||||
(yrs/response 200 body {"content-type" "application/octet-stream"}))))
|
||||
{::yrs/status 200
|
||||
::yrs/body body
|
||||
::yrs/headers {"content-type" "application/octet-stream"}})))
|
||||
|
||||
(s/def ::file ::media/upload)
|
||||
(s/def ::import-binfile
|
||||
|
|
|
@ -101,7 +101,7 @@
|
|||
(sv/defmethod ::get-comment-threads
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id share-id] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(get-comment-threads conn profile-id file-id)))
|
||||
|
||||
|
@ -144,7 +144,7 @@
|
|||
(sv/defmethod ::get-unread-comment-threads
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(teams/check-read-permissions! conn profile-id team-id)
|
||||
(get-unread-comment-threads conn profile-id team-id)))
|
||||
|
||||
|
@ -191,7 +191,7 @@
|
|||
(sv/defmethod ::get-comment-thread
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id id share-id] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(let [sql (str "with threads as (" sql:comment-threads ")"
|
||||
"select * from threads where id = ?")]
|
||||
|
@ -211,7 +211,7 @@
|
|||
(sv/defmethod ::get-comments
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id thread-id share-id] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(let [{:keys [file-id] :as thread} (get-comment-thread conn thread-id)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(get-comments conn thread-id))))
|
||||
|
@ -263,7 +263,7 @@
|
|||
{::doc/added "1.15"
|
||||
::doc/changes ["1.15" "Imported from queries and renamed."]}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id share-id]}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(get-file-comments-users conn file-id profile-id)))
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
[app.loggers.audit :as audit]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.auth :as auth]
|
||||
[app.rpc.commands.profile :as profile]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
|
@ -48,7 +49,7 @@
|
|||
:fullname fullname
|
||||
:is-active true
|
||||
:deleted-at (dt/in-future cf/deletion-delay)
|
||||
:password password
|
||||
:password (profile/derive-password cfg password)
|
||||
:props {}}]
|
||||
|
||||
(db/with-atomic [conn pool]
|
||||
|
|
|
@ -277,7 +277,7 @@
|
|||
::cond/get-object #(get-minimal-file %1 (:id %2))
|
||||
::cond/key-fn get-file-etag}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id features]}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(let [perms (get-permissions conn profile-id id)]
|
||||
(check-read-permissions! perms)
|
||||
(let [file (-> (get-file conn id features)
|
||||
|
@ -305,7 +305,7 @@
|
|||
{::doc/added "1.17"
|
||||
::rpc/:auth false}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id fragment-id share-id] }]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(let [perms (get-permissions conn profile-id file-id share-id)]
|
||||
(check-read-permissions! perms)
|
||||
(-> (get-file-fragment conn file-id fragment-id)
|
||||
|
@ -341,7 +341,7 @@
|
|||
::cond/reuse-key? true
|
||||
::cond/key-fn get-file-etag}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(check-read-permissions! conn profile-id file-id)
|
||||
(get-object-thumbnails conn file-id)))
|
||||
|
||||
|
@ -372,7 +372,7 @@
|
|||
"Get all files for the specified project."
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id project-id]}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(projects/check-read-permissions! conn profile-id project-id)
|
||||
(get-project-files conn project-id)))
|
||||
|
||||
|
@ -391,7 +391,7 @@
|
|||
"Checks if the file has libraries. Returns a boolean"
|
||||
{::doc/added "1.15.1"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id]}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(check-read-permissions! pool profile-id file-id)
|
||||
(get-has-file-libraries conn file-id)))
|
||||
|
||||
|
@ -458,7 +458,7 @@
|
|||
Mainly used for rendering purposes."
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(check-read-permissions! conn profile-id file-id)
|
||||
(get-page conn params)))
|
||||
|
||||
|
@ -511,7 +511,7 @@
|
|||
"Get all file (libraries) for the specified team."
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id]}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(teams/check-read-permissions! conn profile-id team-id)
|
||||
(get-team-shared-files conn team-id)))
|
||||
|
||||
|
@ -565,7 +565,7 @@
|
|||
"Get libraries used by the specified file."
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id features]}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(check-read-permissions! conn profile-id file-id)
|
||||
(get-file-libraries conn file-id features)))
|
||||
|
||||
|
@ -591,7 +591,7 @@
|
|||
"Returns all the file references that use specified file (library) id."
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(check-read-permissions! conn profile-id file-id)
|
||||
(get-library-file-references conn file-id)))
|
||||
|
||||
|
@ -628,7 +628,7 @@
|
|||
(sv/defmethod ::get-team-recent-files
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id]}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(teams/check-read-permissions! conn profile-id team-id)
|
||||
(get-team-recent-files conn team-id)))
|
||||
|
||||
|
@ -662,7 +662,7 @@
|
|||
(sv/defmethod ::get-file-thumbnail
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool]} {:keys [::rpc/profile-id file-id revn]}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(check-read-permissions! conn profile-id file-id)
|
||||
(-> (get-file-thumbnail conn file-id revn)
|
||||
(rph/with-http-cache long-cache-duration))))
|
||||
|
@ -758,7 +758,7 @@
|
|||
mainly for render thumbnails on dashboard."
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id features] :as props}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(check-read-permissions! conn profile-id file-id)
|
||||
;; NOTE: we force here the "storage/pointer-map" feature, because
|
||||
;; it used internally only and is independent if user supports it
|
||||
|
|
|
@ -101,7 +101,7 @@
|
|||
|
||||
(defn- wrap-with-pointer-map-context
|
||||
[f]
|
||||
(fn [{:keys [conn] :as cfg} {:keys [id] :as file}]
|
||||
(fn [{:keys [::db/conn] :as cfg} {:keys [id] :as file}]
|
||||
(binding [pmap/*tracked* (atom {})
|
||||
pmap/*load-fn* (partial files/load-pointer conn id)
|
||||
ffeat/*wrap-with-pointer-map-fn* pmap/wrap]
|
||||
|
@ -126,7 +126,7 @@
|
|||
;; database.
|
||||
|
||||
(sv/defmethod ::update-file
|
||||
{::climit/queue :update-file
|
||||
{::climit/id :update-file-by-id
|
||||
::climit/key-fn :id
|
||||
::webhooks/event? true
|
||||
::webhooks/batch-timeout (dt/duration "2m")
|
||||
|
@ -136,8 +136,7 @@
|
|||
(db/with-atomic [conn pool]
|
||||
(files/check-edition-permissions! conn profile-id id)
|
||||
(db/xact-lock! conn id)
|
||||
|
||||
(let [cfg (assoc cfg :conn conn)
|
||||
(let [cfg (assoc cfg ::db/conn conn)
|
||||
params (assoc params :profile-id profile-id)
|
||||
tpoint (dt/tpoint)]
|
||||
(-> (update-file cfg params)
|
||||
|
@ -145,7 +144,7 @@
|
|||
(l/trace :hint "update-file" :time (dt/format-duration elapsed))))))))
|
||||
|
||||
(defn update-file
|
||||
[{:keys [conn ::mtx/metrics] :as cfg} {:keys [profile-id id changes changes-with-metadata] :as params}]
|
||||
[{:keys [::db/conn ::mtx/metrics] :as cfg} {:keys [profile-id id changes changes-with-metadata] :as params}]
|
||||
(let [file (get-file conn id)
|
||||
features (->> (concat (:features file)
|
||||
(:features params))
|
||||
|
@ -197,24 +196,34 @@
|
|||
:project-id (:project-id file)
|
||||
:team-id (:team-id file)}))))))
|
||||
|
||||
(defn- update-file-data
|
||||
[file changes]
|
||||
(-> file
|
||||
(update :revn inc)
|
||||
(update :data (fn [data]
|
||||
(cond-> data
|
||||
:always
|
||||
(-> (blob/decode)
|
||||
(assoc :id (:id file))
|
||||
(pmg/migrate-data))
|
||||
|
||||
(and (contains? ffeat/*current* "components/v2")
|
||||
(not (contains? ffeat/*previous* "components/v2")))
|
||||
(ctf/migrate-to-components-v2)
|
||||
|
||||
:always
|
||||
(-> (cp/process-changes changes)
|
||||
(blob/encode)))))))
|
||||
|
||||
|
||||
(defn- update-file*
|
||||
[{:keys [conn] :as cfg} {:keys [profile-id file changes session-id ::created-at] :as params}]
|
||||
(let [file (-> file
|
||||
(update :revn inc)
|
||||
(update :data (fn [data]
|
||||
(cond-> data
|
||||
:always
|
||||
(-> (blob/decode)
|
||||
(assoc :id (:id file))
|
||||
(pmg/migrate-data))
|
||||
[{:keys [::db/conn] :as cfg} {:keys [profile-id file changes session-id ::created-at] :as params}]
|
||||
(let [;; Process the file data in the CLIMIT context; scheduling it
|
||||
;; to be executed on a separated executor for avoid to do the
|
||||
;; CPU intensive operation on vthread.
|
||||
file (-> (climit/configure cfg :update-file)
|
||||
(climit/submit! (partial update-file-data file changes)))]
|
||||
|
||||
(and (contains? ffeat/*current* "components/v2")
|
||||
(not (contains? ffeat/*previous* "components/v2")))
|
||||
(ctf/migrate-to-components-v2)
|
||||
|
||||
:always
|
||||
(-> (cp/process-changes changes)
|
||||
(blob/encode))))))]
|
||||
(db/insert! conn :file-change
|
||||
{:id (uuid/next)
|
||||
:session-id session-id
|
||||
|
@ -273,11 +282,10 @@
|
|||
(vec)))
|
||||
|
||||
(defn- send-notifications!
|
||||
[{:keys [conn] :as cfg} {:keys [file changes session-id] :as params}]
|
||||
[{:keys [::db/conn] :as cfg} {:keys [file changes session-id] :as params}]
|
||||
(let [lchanges (filter library-change? changes)
|
||||
msgbus (::mbus/msgbus cfg)]
|
||||
|
||||
;; Asynchronously publish message to the msgbus
|
||||
(mbus/pub! msgbus
|
||||
:topic (:id file)
|
||||
:message {:type :file-change
|
||||
|
@ -290,7 +298,6 @@
|
|||
(when (and (:is-shared file) (seq lchanges))
|
||||
(let [team-id (or (:team-id file)
|
||||
(files/get-team-id conn (:project-id file)))]
|
||||
;; Asynchronously publish message to the msgbus
|
||||
(mbus/pub! msgbus
|
||||
:topic team-id
|
||||
:message {:type :library-change
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
|
||||
(ns app.rpc.commands.fonts
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
|
@ -15,7 +15,7 @@
|
|||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.media :as media]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.climit :as-alias climit]
|
||||
[app.rpc.climit :as climit]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.projects :as projects]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
|
@ -25,10 +25,7 @@
|
|||
[app.storage :as sto]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]))
|
||||
[clojure.spec.alpha :as s]))
|
||||
|
||||
(def valid-weight #{100 200 300 400 500 600 700 800 900 950})
|
||||
(def valid-style #{"normal" "italic"})
|
||||
|
@ -59,7 +56,7 @@
|
|||
(sv/defmethod ::get-font-variants
|
||||
{::doc/added "1.18"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id file-id project-id] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(cond
|
||||
(uuid? team-id)
|
||||
(do
|
||||
|
@ -107,50 +104,45 @@
|
|||
(create-font-variant cfg (assoc params :profile-id profile-id))))
|
||||
|
||||
(defn create-font-variant
|
||||
[{:keys [::sto/storage ::db/pool ::wrk/executor ::rpc/climit]} {:keys [data] :as params}]
|
||||
(letfn [(generate-fonts [data]
|
||||
(climit/with-dispatch (:process-font climit)
|
||||
(media/run {:cmd :generate-fonts :input data})))
|
||||
[{:keys [::sto/storage ::db/pool] :as cfg} {:keys [data] :as params}]
|
||||
(letfn [(generate-missing! [data]
|
||||
(let [data (media/run {:cmd :generate-fonts :input data})]
|
||||
(when (and (not (contains? data "font/otf"))
|
||||
(not (contains? data "font/ttf"))
|
||||
(not (contains? data "font/woff"))
|
||||
(not (contains? data "font/woff2")))
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-font-upload
|
||||
:hint "invalid font upload, unable to generate missing font assets"))
|
||||
data))
|
||||
|
||||
;; Function responsible of calculating cryptographyc hash of
|
||||
;; the provided data.
|
||||
(calculate-hash [data]
|
||||
(px/with-dispatch executor
|
||||
(sto/calculate-hash data)))
|
||||
|
||||
(validate-data [data]
|
||||
(when (and (not (contains? data "font/otf"))
|
||||
(not (contains? data "font/ttf"))
|
||||
(not (contains? data "font/woff"))
|
||||
(not (contains? data "font/woff2")))
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-font-upload))
|
||||
data)
|
||||
|
||||
(persist-font-object [data mtype]
|
||||
(prepare-font [data mtype]
|
||||
(when-let [resource (get data mtype)]
|
||||
(p/let [hash (calculate-hash resource)
|
||||
content (-> (sto/content resource)
|
||||
(sto/wrap-with-hash hash))]
|
||||
(sto/put-object! storage {::sto/content content
|
||||
::sto/touched-at (dt/now)
|
||||
::sto/deduplicate? true
|
||||
:content-type mtype
|
||||
:bucket "team-font-variant"}))))
|
||||
(let [hash (sto/calculate-hash resource)
|
||||
content (-> (sto/content resource)
|
||||
(sto/wrap-with-hash hash))]
|
||||
{::sto/content content
|
||||
::sto/touched-at (dt/now)
|
||||
::sto/deduplicate? true
|
||||
:content-type mtype
|
||||
:bucket "team-font-variant"})))
|
||||
|
||||
(persist-fonts [data]
|
||||
(p/let [otf (persist-font-object data "font/otf")
|
||||
ttf (persist-font-object data "font/ttf")
|
||||
woff1 (persist-font-object data "font/woff")
|
||||
woff2 (persist-font-object data "font/woff2")]
|
||||
(persist-fonts-files! [data]
|
||||
(let [otf-params (prepare-font data "font/otf")
|
||||
ttf-params (prepare-font data "font/ttf")
|
||||
wf1-params (prepare-font data "font/woff")
|
||||
wf2-params (prepare-font data "font/woff2")]
|
||||
(cond-> {}
|
||||
(some? otf-params)
|
||||
(assoc :otf (sto/put-object! storage otf-params))
|
||||
(some? ttf-params)
|
||||
(assoc :ttf (sto/put-object! storage ttf-params))
|
||||
(some? wf1-params)
|
||||
(assoc :woff1 (sto/put-object! storage wf1-params))
|
||||
(some? wf2-params)
|
||||
(assoc :woff2 (sto/put-object! storage wf2-params)))))
|
||||
|
||||
(d/without-nils
|
||||
{:otf otf
|
||||
:ttf ttf
|
||||
:woff1 woff1
|
||||
:woff2 woff2})))
|
||||
|
||||
(insert-into-db [{:keys [woff1 woff2 otf ttf]}]
|
||||
(insert-font-variant! [{:keys [woff1 woff2 otf ttf]}]
|
||||
(db/insert! pool :team-font-variant
|
||||
{:id (uuid/next)
|
||||
:team-id (:team-id params)
|
||||
|
@ -164,13 +156,11 @@
|
|||
:ttf-file-id (:id ttf)}))
|
||||
]
|
||||
|
||||
(->> (generate-fonts data)
|
||||
(p/fmap validate-data)
|
||||
(p/mcat executor persist-fonts)
|
||||
(p/fmap executor insert-into-db)
|
||||
(p/fmap (fn [result]
|
||||
(let [params (update params :data (comp vec keys))]
|
||||
(rph/with-meta result {::audit/replace-props params})))))))
|
||||
(let [data (-> (climit/configure cfg :process-font)
|
||||
(climit/submit! (partial generate-missing! data)))
|
||||
assets (persist-fonts-files! data)
|
||||
result (insert-font-variant! assets)]
|
||||
(vary-meta result assoc ::audit/replace-props (update params :data (comp vec keys))))))
|
||||
|
||||
;; --- UPDATE FONT FAMILY
|
||||
|
||||
|
|
|
@ -22,13 +22,9 @@
|
|||
[app.storage :as sto]
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.io :as io]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]))
|
||||
[datoteka.io :as io]))
|
||||
|
||||
(def default-max-file-size
|
||||
(* 1024 1024 10)) ; 10 MiB
|
||||
|
@ -110,71 +106,62 @@
|
|||
;; witch holds the reference to storage object (it some kind of
|
||||
;; inverse, soft referential integrity).
|
||||
|
||||
(defn- process-main-image
|
||||
[info]
|
||||
(let [hash (sto/calculate-hash (:path info))
|
||||
data (-> (sto/content (:path info))
|
||||
(sto/wrap-with-hash hash))]
|
||||
{::sto/content data
|
||||
::sto/deduplicate? true
|
||||
::sto/touched-at (:ts info)
|
||||
:content-type (:mtype info)
|
||||
:bucket "file-media-object"}))
|
||||
|
||||
(defn- process-thumb-image
|
||||
[info]
|
||||
(let [thumb (-> thumbnail-options
|
||||
(assoc :cmd :generic-thumbnail)
|
||||
(assoc :input info)
|
||||
(media/run))
|
||||
hash (sto/calculate-hash (:data thumb))
|
||||
data (-> (sto/content (:data thumb) (:size thumb))
|
||||
(sto/wrap-with-hash hash))]
|
||||
{::sto/content data
|
||||
::sto/deduplicate? true
|
||||
::sto/touched-at (:ts info)
|
||||
:content-type (:mtype thumb)
|
||||
:bucket "file-media-object"}))
|
||||
|
||||
(defn- process-image
|
||||
[content]
|
||||
(let [info (media/run {:cmd :info :input content})]
|
||||
(cond-> info
|
||||
(and (not (svg-image? info))
|
||||
(big-enough-for-thumbnail? info))
|
||||
(assoc ::thumb (process-thumb-image info))
|
||||
|
||||
:always
|
||||
(assoc ::image (process-main-image info)))))
|
||||
|
||||
(defn create-file-media-object
|
||||
[{:keys [::sto/storage ::db/pool climit ::wrk/executor]}
|
||||
[{:keys [::sto/storage ::db/pool] :as cfg}
|
||||
{:keys [id file-id is-local name content]}]
|
||||
(letfn [;; Function responsible to retrieve the file information, as
|
||||
;; it is synchronous operation it should be wrapped into
|
||||
;; with-dispatch macro.
|
||||
(get-info [content]
|
||||
(climit/with-dispatch (:process-image climit)
|
||||
(media/run {:cmd :info :input content})))
|
||||
|
||||
;; Function responsible of calculating cryptographyc hash of
|
||||
;; the provided data.
|
||||
(calculate-hash [data]
|
||||
(px/with-dispatch executor
|
||||
(sto/calculate-hash data)))
|
||||
(let [result (-> (climit/configure cfg :process-image)
|
||||
(climit/submit! (partial process-image content)))
|
||||
|
||||
;; Function responsible of generating thumnail. As it is synchronous
|
||||
;; opetation, it should be wrapped into with-dispatch macro
|
||||
(generate-thumbnail [info]
|
||||
(climit/with-dispatch (:process-image climit)
|
||||
(media/run (assoc thumbnail-options
|
||||
:cmd :generic-thumbnail
|
||||
:input info))))
|
||||
image (sto/put-object! storage (::image result))
|
||||
thumb (when-let [params (::thumb result)]
|
||||
(sto/put-object! storage params))]
|
||||
|
||||
(create-thumbnail [info]
|
||||
(when (and (not (svg-image? info))
|
||||
(big-enough-for-thumbnail? info))
|
||||
(p/let [thumb (generate-thumbnail info)
|
||||
hash (calculate-hash (:data thumb))
|
||||
content (-> (sto/content (:data thumb) (:size thumb))
|
||||
(sto/wrap-with-hash hash))]
|
||||
(sto/put-object! storage
|
||||
{::sto/content content
|
||||
::sto/deduplicate? true
|
||||
::sto/touched-at (dt/now)
|
||||
:content-type (:mtype thumb)
|
||||
:bucket "file-media-object"}))))
|
||||
|
||||
(create-image [info]
|
||||
(p/let [data (:path info)
|
||||
hash (calculate-hash data)
|
||||
content (-> (sto/content data)
|
||||
(sto/wrap-with-hash hash))]
|
||||
(sto/put-object! storage
|
||||
{::sto/content content
|
||||
::sto/deduplicate? true
|
||||
::sto/touched-at (dt/now)
|
||||
:content-type (:mtype info)
|
||||
:bucket "file-media-object"})))
|
||||
|
||||
(insert-into-database [info image thumb]
|
||||
(px/with-dispatch executor
|
||||
(db/exec-one! pool [sql:create-file-media-object
|
||||
(or id (uuid/next))
|
||||
file-id is-local name
|
||||
(:id image)
|
||||
(:id thumb)
|
||||
(:width info)
|
||||
(:height info)
|
||||
(:mtype info)])))]
|
||||
|
||||
(p/let [info (get-info content)
|
||||
thumb (create-thumbnail info)
|
||||
image (create-image info)]
|
||||
(insert-into-database info image thumb))))
|
||||
(db/exec-one! pool [sql:create-file-media-object
|
||||
(or id (uuid/next))
|
||||
file-id is-local name
|
||||
(:id image)
|
||||
(:id thumb)
|
||||
(:width result)
|
||||
(:height result)
|
||||
(:mtype result)])))
|
||||
|
||||
;; --- Create File Media Object (from URL)
|
||||
|
||||
|
@ -192,9 +179,9 @@
|
|||
(files/check-edition-permissions! pool profile-id file-id)
|
||||
(create-file-media-object-from-url cfg params)))
|
||||
|
||||
(defn- create-file-media-object-from-url
|
||||
[cfg {:keys [url name] :as params}]
|
||||
(letfn [(parse-and-validate-size [headers]
|
||||
(defn- download-image
|
||||
[{:keys [::http/client]} uri]
|
||||
(letfn [(parse-and-validate [{:keys [headers] :as response}]
|
||||
(let [size (some-> (get headers "content-length") d/parse-integer)
|
||||
mtype (get headers "content-type")
|
||||
format (cm/mtype->format mtype)
|
||||
|
@ -217,32 +204,34 @@
|
|||
:code :media-type-not-allowed
|
||||
:hint "seems like the url points to an invalid media object"))
|
||||
|
||||
{:size size
|
||||
:mtype mtype
|
||||
:format format}))
|
||||
{:size size :mtype mtype :format format}))]
|
||||
|
||||
(download-media [uri]
|
||||
(-> (http/req! cfg {:method :get :uri uri} {:response-type :input-stream})
|
||||
(p/then process-response)))
|
||||
(let [{:keys [body] :as response} (http/req! client
|
||||
{:method :get :uri uri}
|
||||
{:response-type :input-stream :sync? true})
|
||||
{:keys [size mtype]} (parse-and-validate response)
|
||||
|
||||
(process-response [{:keys [body headers] :as response}]
|
||||
(let [{:keys [size mtype]} (parse-and-validate-size headers)
|
||||
path (tmp/tempfile :prefix "penpot.media.download.")
|
||||
written (io/write-to-file! body path :size size)]
|
||||
path (tmp/tempfile :prefix "penpot.media.download.")
|
||||
written (io/write-to-file! body path :size size)]
|
||||
|
||||
(when (not= written size)
|
||||
(ex/raise :type :internal
|
||||
:code :mismatch-write-size
|
||||
:hint "unexpected state: unable to write to file"))
|
||||
(when (not= written size)
|
||||
(ex/raise :type :internal
|
||||
:code :mismatch-write-size
|
||||
:hint "unexpected state: unable to write to file"))
|
||||
|
||||
{:filename "tempfile"
|
||||
:size size
|
||||
:path path
|
||||
:mtype mtype}))]
|
||||
{:filename "tempfile"
|
||||
:size size
|
||||
:path path
|
||||
:mtype mtype})))
|
||||
|
||||
(p/let [content (download-media url)]
|
||||
(->> (merge params {:content content :name (or name (:filename content))})
|
||||
(create-file-media-object cfg)))))
|
||||
|
||||
(defn- create-file-media-object-from-url
|
||||
[cfg {:keys [url name] :as params}]
|
||||
(let [content (download-image cfg url)
|
||||
params (-> params
|
||||
(assoc :content content)
|
||||
(assoc :name (or name (:filename content))))]
|
||||
(create-file-media-object cfg params)))
|
||||
|
||||
;; --- Clone File Media object (Upload and create from url)
|
||||
|
||||
|
|
|
@ -26,17 +26,16 @@
|
|||
[app.tokens :as tokens]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]))
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
(declare check-profile-existence!)
|
||||
(declare decode-row)
|
||||
(declare derive-password)
|
||||
(declare filter-props)
|
||||
(declare get-profile)
|
||||
(declare strip-private-attrs)
|
||||
(declare filter-props)
|
||||
(declare check-profile-existence!)
|
||||
(declare verify-password)
|
||||
|
||||
;; --- QUERY: Get profile (own)
|
||||
|
||||
|
@ -50,6 +49,7 @@
|
|||
;; We need to return the anonymous profile object in two cases, when
|
||||
;; no profile-id is in session, and when db call raises not found. In all other
|
||||
;; cases we need to reraise the exception.
|
||||
|
||||
(try
|
||||
(-> (get-profile pool profile-id)
|
||||
(strip-private-attrs)
|
||||
|
@ -120,10 +120,10 @@
|
|||
:req-un [::password ::old-password]))
|
||||
|
||||
(sv/defmethod ::update-profile-password
|
||||
{::climit/queue :auth}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id password] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [profile (validate-password! conn (assoc params :profile-id profile-id))
|
||||
(let [cfg (assoc cfg ::db/conn conn)
|
||||
profile (validate-password! cfg (assoc params :profile-id profile-id))
|
||||
session-id (::session/id params)]
|
||||
|
||||
(when (= (str/lower (:email profile))
|
||||
|
@ -132,29 +132,30 @@
|
|||
:code :email-as-password
|
||||
:hint "you can't use your email as password"))
|
||||
|
||||
(update-profile-password! conn (assoc profile :password password))
|
||||
(invalidate-profile-session! conn profile-id session-id)
|
||||
(update-profile-password! cfg (assoc profile :password password))
|
||||
(invalidate-profile-session! cfg profile-id session-id)
|
||||
nil)))
|
||||
|
||||
(defn- invalidate-profile-session!
|
||||
"Removes all sessions except the current one."
|
||||
[conn profile-id session-id]
|
||||
[{:keys [::db/conn]} profile-id session-id]
|
||||
(let [sql "delete from http_session where profile_id = ? and id != ?"]
|
||||
(:next.jdbc/update-count (db/exec-one! conn [sql profile-id session-id]))))
|
||||
|
||||
(defn- validate-password!
|
||||
[conn {:keys [profile-id old-password] :as params}]
|
||||
[{:keys [::db/conn] :as cfg} {:keys [profile-id old-password] :as params}]
|
||||
(let [profile (db/get-by-id conn :profile profile-id ::db/for-update? true)]
|
||||
(when-not (:valid (auth/verify-password old-password (:password profile)))
|
||||
(when-not (:valid (verify-password cfg old-password (:password profile)))
|
||||
(ex/raise :type :validation
|
||||
:code :old-password-not-match))
|
||||
profile))
|
||||
|
||||
(defn update-profile-password!
|
||||
[conn {:keys [id password] :as profile}]
|
||||
(db/update! conn :profile
|
||||
{:password (auth/derive-password password)}
|
||||
{:id id}))
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id password] :as profile}]
|
||||
(let [password (derive-password cfg password)]
|
||||
(db/update! conn :profile
|
||||
{:password password}
|
||||
{:id id})))
|
||||
|
||||
;; --- MUTATION: Update Photo
|
||||
|
||||
|
@ -173,61 +174,49 @@
|
|||
(let [cfg (update cfg ::sto/storage media/configure-assets-storage)]
|
||||
(update-profile-photo cfg (assoc params :profile-id profile-id))))
|
||||
|
||||
;; TODO: reimplement it without p/let
|
||||
|
||||
(defn update-profile-photo
|
||||
[{:keys [::db/pool ::sto/storage ::wrk/executor] :as cfg} {:keys [profile-id file] :as params}]
|
||||
(letfn [(on-uploaded [photo]
|
||||
(let [profile (db/get-by-id pool :profile profile-id ::db/for-update? true)]
|
||||
[{:keys [::db/pool ::sto/storage] :as cfg} {:keys [profile-id file] :as params}]
|
||||
(let [photo (upload-photo cfg params)
|
||||
profile (db/get-by-id pool :profile profile-id ::db/for-update? true)]
|
||||
|
||||
;; Schedule deletion of old photo
|
||||
(when-let [id (:photo-id profile)]
|
||||
(sto/touch-object! storage id))
|
||||
;; Schedule deletion of old photo
|
||||
(when-let [id (:photo-id profile)]
|
||||
(sto/touch-object! storage id))
|
||||
|
||||
;; Save new photo
|
||||
(db/update! pool :profile
|
||||
{:photo-id (:id photo)}
|
||||
{:id profile-id})
|
||||
;; Save new photo
|
||||
(db/update! pool :profile
|
||||
{:photo-id (:id photo)}
|
||||
{:id profile-id})
|
||||
|
||||
(-> (rph/wrap)
|
||||
(rph/with-meta {::audit/replace-props
|
||||
{:file-name (:filename file)
|
||||
:file-size (:size file)
|
||||
:file-path (str (:path file))
|
||||
:file-mtype (:mtype file)}}))))]
|
||||
(->> (upload-photo cfg params)
|
||||
(p/fmap executor on-uploaded))))
|
||||
(-> (rph/wrap)
|
||||
(rph/with-meta {::audit/replace-props
|
||||
{:file-name (:filename file)
|
||||
:file-size (:size file)
|
||||
:file-path (str (:path file))
|
||||
:file-mtype (:mtype file)}}))))
|
||||
|
||||
(defn- generate-thumbnail!
|
||||
[file]
|
||||
(let [input (media/run {:cmd :info :input file})
|
||||
thumb (media/run {:cmd :profile-thumbnail
|
||||
:format :jpeg
|
||||
:quality 85
|
||||
:width 256
|
||||
:height 256
|
||||
:input input})
|
||||
hash (sto/calculate-hash (:data thumb))
|
||||
content (-> (sto/content (:data thumb) (:size thumb))
|
||||
(sto/wrap-with-hash hash))]
|
||||
{::sto/content content
|
||||
::sto/deduplicate? true
|
||||
:bucket "profile"
|
||||
:content-type (:mtype thumb)}))
|
||||
|
||||
(defn upload-photo
|
||||
[{:keys [::sto/storage ::wrk/executor climit] :as cfg} {:keys [file]}]
|
||||
(letfn [(get-info [content]
|
||||
(climit/with-dispatch (:process-image climit)
|
||||
(media/run {:cmd :info :input content})))
|
||||
|
||||
(generate-thumbnail [info]
|
||||
(climit/with-dispatch (:process-image climit)
|
||||
(media/run {:cmd :profile-thumbnail
|
||||
:format :jpeg
|
||||
:quality 85
|
||||
:width 256
|
||||
:height 256
|
||||
:input info})))
|
||||
|
||||
;; Function responsible of calculating cryptographyc hash of
|
||||
;; the provided data.
|
||||
(calculate-hash [data]
|
||||
(px/with-dispatch executor
|
||||
(sto/calculate-hash data)))]
|
||||
|
||||
(p/let [info (get-info file)
|
||||
thumb (generate-thumbnail info)
|
||||
hash (calculate-hash (:data thumb))
|
||||
content (-> (sto/content (:data thumb) (:size thumb))
|
||||
(sto/wrap-with-hash hash))]
|
||||
(sto/put-object! storage {::sto/content content
|
||||
::sto/deduplicate? true
|
||||
:bucket "profile"
|
||||
:content-type (:mtype thumb)}))))
|
||||
[{:keys [::sto/storage] :as cfg} {:keys [file]}]
|
||||
(let [params (-> (climit/configure cfg :process-image)
|
||||
(climit/submit! (partial generate-thumbnail! file)))]
|
||||
(sto/put-object! storage params)))
|
||||
|
||||
|
||||
;; --- MUTATION: Request Email Change
|
||||
|
@ -417,6 +406,17 @@
|
|||
[props]
|
||||
(into {} (filter (fn [[k _]] (simple-ident? k))) props))
|
||||
|
||||
(defn derive-password
|
||||
[cfg password]
|
||||
(when password
|
||||
(-> (climit/configure cfg :derive-password)
|
||||
(climit/submit! (partial auth/derive-password password)))))
|
||||
|
||||
(defn verify-password
|
||||
[cfg password password-data]
|
||||
(-> (climit/configure cfg :derive-password)
|
||||
(climit/submit! (partial auth/verify-password password password-data))))
|
||||
|
||||
(defn decode-row
|
||||
[{:keys [props] :as row}]
|
||||
(cond-> row
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
|
||||
(ns app.rpc.commands.projects
|
||||
(:require
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.spec :as us]
|
||||
[app.db :as db]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
|
@ -79,7 +80,7 @@
|
|||
(sv/defmethod ::get-projects
|
||||
{::doc/added "1.18"}
|
||||
[{:keys [::db/pool]} {:keys [::rpc/profile-id team-id]}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(teams/check-read-permissions! conn profile-id team-id)
|
||||
(get-projects conn profile-id team-id)))
|
||||
|
||||
|
@ -114,7 +115,7 @@
|
|||
(sv/defmethod ::get-all-projects
|
||||
{::doc/added "1.18"}
|
||||
[{:keys [::db/pool]} {:keys [::rpc/profile-id]}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(get-all-projects conn profile-id)))
|
||||
|
||||
(def sql:all-projects
|
||||
|
@ -157,7 +158,7 @@
|
|||
(sv/defmethod ::get-project
|
||||
{::doc/added "1.18"}
|
||||
[{:keys [::db/pool]} {:keys [::rpc/profile-id id]}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(let [project (db/get-by-id conn :project id)]
|
||||
(check-read-permissions! conn profile-id id)
|
||||
project)))
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
(ns app.rpc.commands.teams
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
|
@ -27,11 +28,8 @@
|
|||
[app.tokens :as tokens]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]))
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
;; --- Helpers & Specs
|
||||
|
||||
|
@ -78,13 +76,15 @@
|
|||
|
||||
(declare retrieve-teams)
|
||||
|
||||
(def counter (volatile! 0))
|
||||
|
||||
(s/def ::get-teams
|
||||
(s/keys :req [::rpc/profile-id]))
|
||||
|
||||
(sv/defmethod ::get-teams
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(retrieve-teams conn profile-id)))
|
||||
|
||||
(def sql:teams
|
||||
|
@ -129,7 +129,7 @@
|
|||
(sv/defmethod ::get-team
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id]}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(retrieve-team conn profile-id id)))
|
||||
|
||||
(defn retrieve-team
|
||||
|
@ -170,7 +170,7 @@
|
|||
(sv/defmethod ::get-team-members
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id]}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(check-read-permissions! conn profile-id team-id)
|
||||
(retrieve-team-members conn team-id)))
|
||||
|
||||
|
@ -188,7 +188,7 @@
|
|||
(sv/defmethod ::get-team-users
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id file-id]}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(if team-id
|
||||
(do
|
||||
(check-read-permissions! conn profile-id team-id)
|
||||
|
@ -246,7 +246,7 @@
|
|||
(sv/defmethod ::get-team-stats
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id]}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(check-read-permissions! conn profile-id team-id)
|
||||
(retrieve-team-stats conn team-id)))
|
||||
|
||||
|
@ -277,7 +277,7 @@
|
|||
(sv/defmethod ::get-team-invitations
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id]}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(check-read-permissions! conn profile-id team-id)
|
||||
(get-team-invitations conn team-id)))
|
||||
|
||||
|
@ -588,10 +588,9 @@
|
|||
(update-team-photo cfg (assoc params :profile-id profile-id))))
|
||||
|
||||
(defn update-team-photo
|
||||
[{:keys [::db/pool ::sto/storage ::wrk/executor] :as cfg} {:keys [profile-id team-id] :as params}]
|
||||
(p/let [team (px/with-dispatch executor
|
||||
(retrieve-team pool profile-id team-id))
|
||||
photo (profile/upload-photo cfg params)]
|
||||
[{:keys [::db/pool ::sto/storage] :as cfg} {:keys [profile-id team-id] :as params}]
|
||||
(let [team (retrieve-team pool profile-id team-id)
|
||||
photo (profile/upload-photo cfg params)]
|
||||
|
||||
;; Mark object as touched for make it ellegible for tentative
|
||||
;; garbage collection.
|
||||
|
@ -694,13 +693,13 @@
|
|||
(l/info :hint "invitation token" :token itoken))
|
||||
|
||||
(audit/submit! cfg
|
||||
{:type "action"
|
||||
:name (if updated?
|
||||
"update-team-invitation"
|
||||
"create-team-invitation")
|
||||
:profile-id (:id profile)
|
||||
:props (-> (dissoc tprops :profile-id)
|
||||
(d/without-nils))})
|
||||
{::audit/type "action"
|
||||
::audit/name (if updated?
|
||||
"update-team-invitation"
|
||||
"create-team-invitation")
|
||||
::audit/profile-id (:id profile)
|
||||
::audit/props (-> (dissoc tprops :profile-id)
|
||||
(d/without-nils))})
|
||||
|
||||
(eml/send! {::eml/conn conn
|
||||
::eml/factory eml/invite-to-team
|
||||
|
@ -802,13 +801,13 @@
|
|||
::quotes/incr (count emails)}))
|
||||
|
||||
(audit/submit! cfg
|
||||
{:type "command"
|
||||
:name "create-team-invitations"
|
||||
:profile-id profile-id
|
||||
:props {:emails emails
|
||||
:role role
|
||||
:profile-id profile-id
|
||||
:invitations (count emails)}})
|
||||
{::audit/type "command"
|
||||
::audit/name "create-team-invitations"
|
||||
::audit/profile-id profile-id
|
||||
::audit/props {:emails emails
|
||||
:role role
|
||||
:profile-id profile-id
|
||||
:invitations (count emails)}})
|
||||
|
||||
(vary-meta team assoc ::audit/props {:invitations (count emails)}))))
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
|
||||
(ns app.rpc.commands.viewer
|
||||
(:require
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.db :as db]
|
||||
[app.rpc :as-alias rpc]
|
||||
|
@ -85,5 +86,5 @@
|
|||
::cond/reuse-key? true
|
||||
::doc/added "1.17"}
|
||||
[{:keys [::db/pool]} {:keys [::rpc/profile-id] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(get-view-only-bundle conn (assoc params :profile-id profile-id))))
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
|
||||
(ns app.rpc.commands.webhooks
|
||||
(:require
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uri :as u]
|
||||
|
@ -18,10 +19,8 @@
|
|||
[app.rpc.doc :as-alias doc]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[promesa.core :as p]))
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
(defn decode-row
|
||||
[{:keys [uri] :as row}]
|
||||
|
@ -48,30 +47,26 @@
|
|||
|
||||
(defn- validate-webhook!
|
||||
[cfg whook params]
|
||||
(letfn [(handle-exception [exception]
|
||||
(if-let [hint (webhooks/interpret-exception exception)]
|
||||
(ex/raise :type :validation
|
||||
:code :webhook-validation
|
||||
:hint hint)
|
||||
(ex/raise :type :internal
|
||||
:code :webhook-validation
|
||||
:cause exception)))
|
||||
(when (not= (:uri whook) (:uri params))
|
||||
(try
|
||||
(let [response (http/req! cfg
|
||||
{:method :head
|
||||
:uri (str (:uri params))
|
||||
:timeout (dt/duration "3s")}
|
||||
{:sync? true})]
|
||||
(when-let [hint (webhooks/interpret-response response)]
|
||||
(ex/raise :type :validation
|
||||
:code :webhook-validation
|
||||
:hint hint)))
|
||||
|
||||
(handle-response [response]
|
||||
(when-let [hint (webhooks/interpret-response response)]
|
||||
(ex/raise :type :validation
|
||||
:code :webhook-validation
|
||||
:hint hint)))]
|
||||
|
||||
(if (not= (:uri whook) (:uri params))
|
||||
(->> (http/req! cfg {:method :head
|
||||
:uri (str (:uri params))
|
||||
:timeout (dt/duration "3s")})
|
||||
(p/hmap (fn [response exception]
|
||||
(if exception
|
||||
(handle-exception exception)
|
||||
(handle-response response)))))
|
||||
(p/resolved nil))))
|
||||
(catch Throwable cause
|
||||
(if-let [hint (webhooks/interpret-exception cause)]
|
||||
(ex/raise :type :validation
|
||||
:code :webhook-validation
|
||||
:hint hint)
|
||||
(ex/raise :type :internal
|
||||
:code :webhook-validation
|
||||
:cause cause))))))
|
||||
|
||||
(defn- validate-quotes!
|
||||
[{:keys [::db/pool]} {:keys [team-id]}]
|
||||
|
@ -106,22 +101,22 @@
|
|||
|
||||
(sv/defmethod ::create-webhook
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool ::wrk/executor] :as cfg} {:keys [::rpc/profile-id team-id] :as params}]
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id] :as params}]
|
||||
(check-edition-permissions! pool profile-id team-id)
|
||||
(validate-quotes! cfg params)
|
||||
(->> (validate-webhook! cfg nil params)
|
||||
(p/fmap executor (fn [_] (insert-webhook! cfg params)))))
|
||||
(validate-webhook! cfg nil params)
|
||||
(insert-webhook! cfg params))
|
||||
|
||||
(s/def ::update-webhook
|
||||
(s/keys :req-un [::id ::uri ::mtype ::is-active]))
|
||||
|
||||
(sv/defmethod ::update-webhook
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool ::wrk/executor] :as cfg} {:keys [::rpc/profile-id id] :as params}]
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id] :as params}]
|
||||
(let [whook (-> (db/get pool :webhook {:id id}) (decode-row))]
|
||||
(check-edition-permissions! pool profile-id (:team-id whook))
|
||||
(->> (validate-webhook! cfg whook params)
|
||||
(p/fmap executor (fn [_] (update-webhook! cfg whook params))))))
|
||||
(validate-webhook! cfg whook params)
|
||||
(update-webhook! cfg whook params)))
|
||||
|
||||
(s/def ::delete-webhook
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
|
@ -149,7 +144,7 @@
|
|||
|
||||
(sv/defmethod ::get-webhooks
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id]}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(check-read-permissions! conn profile-id team-id)
|
||||
(->> (db/exec! conn [sql:get-webhooks team-id])
|
||||
(mapv decode-row))))
|
||||
|
|
|
@ -27,8 +27,6 @@
|
|||
[app.common.logging :as l]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.util.services :as-alias sv]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]
|
||||
[yetti.response :as yrs]))
|
||||
|
||||
(def
|
||||
|
@ -38,30 +36,24 @@
|
|||
|
||||
(defn- fmt-key
|
||||
[s]
|
||||
(when s
|
||||
(str "W/\"" s "\"")))
|
||||
(str "W/\"" s "\""))
|
||||
|
||||
(defn wrap
|
||||
[{:keys [executor]} f {:keys [::get-object ::key-fn ::reuse-key?] :as mdata}]
|
||||
[_ f {:keys [::get-object ::key-fn ::reuse-key?] :as mdata}]
|
||||
(if (and (ifn? get-object) (ifn? key-fn))
|
||||
(do
|
||||
(l/debug :hint "instrumenting method" :service (::sv/name mdata))
|
||||
(fn [cfg {:keys [::key] :as params}]
|
||||
(if *enabled*
|
||||
(->> (if (or key reuse-key?)
|
||||
(->> (px/submit! executor (partial get-object cfg params))
|
||||
(p/map key-fn)
|
||||
(p/map fmt-key))
|
||||
(p/resolved nil))
|
||||
(p/mapcat (fn [key']
|
||||
(if (and (some? key)
|
||||
(= key key'))
|
||||
(p/resolved (fn [_] (yrs/response 304)))
|
||||
(->> (f cfg params)
|
||||
(p/map (fn [result]
|
||||
(->> (or (and reuse-key? key')
|
||||
(-> result meta ::key fmt-key)
|
||||
(-> result key-fn fmt-key))
|
||||
(rph/with-header result "etag")))))))))
|
||||
(let [key' (when (or key reuse-key?)
|
||||
(some-> (get-object cfg params) key-fn fmt-key))]
|
||||
(if (and (some? key)
|
||||
(= key key'))
|
||||
(fn [_] {::yrs/status 304})
|
||||
(let [result (f cfg params)
|
||||
etag (or (and reuse-key? key')
|
||||
(some-> result meta ::key fmt-key)
|
||||
(some-> result key-fn fmt-key))]
|
||||
(rph/with-header result "etag" etag))))
|
||||
(f cfg params))))
|
||||
f))
|
||||
|
|
|
@ -30,32 +30,34 @@
|
|||
|
||||
(defn- prepare-context
|
||||
[methods]
|
||||
(letfn [(gen-doc [type [name f]]
|
||||
(let [mdata (meta f)]
|
||||
{:type (d/name type)
|
||||
:name (d/name name)
|
||||
:module (-> (:ns mdata) (str/split ".") last)
|
||||
:auth (:auth mdata true)
|
||||
:webhook (::webhooks/event? mdata false)
|
||||
:docs (::sv/docstring mdata)
|
||||
:deprecated (::deprecated mdata)
|
||||
:added (::added mdata)
|
||||
:changes (some->> (::changes mdata) (partition-all 2) (map vec))
|
||||
:spec (get-spec-str (::sv/spec mdata))}))]
|
||||
(letfn [(gen-doc [type [{:keys [::sv/name] :as mdata} _f]]
|
||||
{:type (d/name type)
|
||||
:name (d/name name)
|
||||
:module (-> (:ns mdata) (str/split ".") last)
|
||||
:auth (:auth mdata true)
|
||||
:webhook (::webhooks/event? mdata false)
|
||||
:docs (::sv/docstring mdata)
|
||||
:deprecated (::deprecated mdata)
|
||||
:added (::added mdata)
|
||||
:changes (some->> (::changes mdata) (partition-all 2) (map vec))
|
||||
:spec (get-spec-str (::sv/spec mdata))})]
|
||||
|
||||
{:version (:main cf/version)
|
||||
:command-methods
|
||||
(->> (:commands methods)
|
||||
(map val)
|
||||
(map (partial gen-doc :command))
|
||||
(sort-by (juxt :module :name)))
|
||||
|
||||
:query-methods
|
||||
(->> (:queries methods)
|
||||
(map val)
|
||||
(map (partial gen-doc :query))
|
||||
(sort-by (juxt :module :name)))
|
||||
|
||||
:mutation-methods
|
||||
(->> (:mutations methods)
|
||||
(map val)
|
||||
(map (partial gen-doc :query))
|
||||
(sort-by (juxt :module :name)))}))
|
||||
|
||||
|
@ -64,11 +66,11 @@
|
|||
(if (contains? cf/flags :backend-api-doc)
|
||||
(let [context (prepare-context methods)]
|
||||
(fn [_ respond _]
|
||||
(respond (yrs/response 200 (-> (io/resource "app/templates/api-doc.tmpl")
|
||||
(tmpl/render context))))))
|
||||
(respond {::yrs/status 200
|
||||
::yrs/body (-> (io/resource "app/templates/api-doc.tmpl")
|
||||
(tmpl/render context))})))
|
||||
(fn [_ respond _]
|
||||
(respond (yrs/response 404)))))
|
||||
|
||||
(respond {::yrs/status 404}))))
|
||||
|
||||
(s/def ::routes vector?)
|
||||
|
||||
|
|
|
@ -10,7 +10,8 @@
|
|||
(:require
|
||||
[app.common.data.macros :as dm]
|
||||
[app.http :as-alias http]
|
||||
[app.rpc :as-alias rpc]))
|
||||
[app.rpc :as-alias rpc]
|
||||
[yetti.response :as-alias yrs]))
|
||||
|
||||
;; A utilty wrapper object for wrap service responses that does not
|
||||
;; implements the IObj interface that make possible attach metadata to
|
||||
|
@ -35,7 +36,9 @@
|
|||
o
|
||||
(MetadataWrapper. o {})))
|
||||
([o m]
|
||||
(MetadataWrapper. o m)))
|
||||
(if (instance? clojure.lang.IObj o)
|
||||
(vary-meta o merge m)
|
||||
(MetadataWrapper. o m))))
|
||||
|
||||
(defn wrapped?
|
||||
[o]
|
||||
|
@ -74,4 +77,4 @@
|
|||
(fn [_ response]
|
||||
(let [exp (if (integer? max-age) max-age (inst-ms max-age))
|
||||
val (dm/fmt "max-age=%" (int (/ exp 1000.0)))]
|
||||
(update response :headers assoc "cache-control" val)))))
|
||||
(update response ::yrs/headers assoc "cache-control" val)))))
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
[app.http.session :as session]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.media :as media]
|
||||
[app.rpc.climit :as-alias climit]
|
||||
[app.rpc.commands.profile :as profile]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
|
@ -78,20 +77,20 @@
|
|||
(s/keys :req-un [::profile-id ::password ::old-password]))
|
||||
|
||||
(sv/defmethod ::update-profile-password
|
||||
{::climit/queue :auth
|
||||
::doc/added "1.0"
|
||||
{::doc/added "1.0"
|
||||
::doc/deprecated "1.18"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [password] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [profile (#'profile/validate-password! conn params)
|
||||
(let [cfg (assoc cfg ::db/conn conn)
|
||||
profile (#'profile/validate-password! cfg params)
|
||||
session-id (::session/id params)]
|
||||
(when (= (str/lower (:email profile))
|
||||
(str/lower (:password params)))
|
||||
(ex/raise :type :validation
|
||||
:code :email-as-password
|
||||
:hint "you can't use your email as password"))
|
||||
(profile/update-profile-password! conn (assoc profile :password password))
|
||||
(#'profile/invalidate-profile-session! conn (:id profile) session-id)
|
||||
(profile/update-profile-password! cfg (assoc profile :password password))
|
||||
(#'profile/invalidate-profile-session! cfg (:id profile) session-id)
|
||||
nil)))
|
||||
|
||||
|
||||
|
|
|
@ -10,8 +10,7 @@
|
|||
(:require
|
||||
[app.common.logging :as l]
|
||||
[app.util.retry :refer [conflict-exception?]]
|
||||
[app.util.services :as sv]
|
||||
[promesa.core :as p]))
|
||||
[app.util.services :as sv]))
|
||||
|
||||
(defn conflict-db-insert?
|
||||
"Check if exception matches a insertion conflict on postgresql."
|
||||
|
@ -28,18 +27,16 @@
|
|||
|
||||
(if-let [max-retries (::max-retries mdata)]
|
||||
(fn [cfg params]
|
||||
(letfn [(run [retry]
|
||||
(->> (f cfg params)
|
||||
(p/merr (partial handle-error retry))))
|
||||
|
||||
(handle-error [retry cause]
|
||||
(if (matches cause)
|
||||
(let [current-retry (inc retry)]
|
||||
(l/trace :hint "running retry algorithm" :retry current-retry)
|
||||
(if (<= current-retry max-retries)
|
||||
(run current-retry)
|
||||
(throw cause)))
|
||||
(throw cause)))]
|
||||
(run 1)))
|
||||
((fn run [retry]
|
||||
(try
|
||||
(f cfg params)
|
||||
(catch Throwable cause
|
||||
(if (matches cause)
|
||||
(let [current-retry (inc retry)]
|
||||
(l/trace :hint "running retry algorithm" :retry current-retry)
|
||||
(if (<= current-retry max-retries)
|
||||
(run current-retry)
|
||||
(throw cause)))
|
||||
(throw cause))))) 1))
|
||||
f))
|
||||
|
||||
|
|
|
@ -55,6 +55,7 @@
|
|||
[app.redis :as rds]
|
||||
[app.redis.script :as-alias rscript]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.rpc.rlimit.result :as-alias lresult]
|
||||
[app.util.services :as-alias sv]
|
||||
[app.util.time :as dt]
|
||||
|
@ -64,7 +65,6 @@
|
|||
[cuerdas.core :as str]
|
||||
[datoteka.fs :as fs]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]))
|
||||
|
||||
(def ^:private default-timeout
|
||||
|
@ -82,7 +82,7 @@
|
|||
{::rscript/name ::window-rate-limit
|
||||
::rscript/path "app/rpc/rlimit/window.lua"})
|
||||
|
||||
(def enabled?
|
||||
(def enabled
|
||||
"Allows on runtime completely disable rate limiting."
|
||||
(atom true))
|
||||
|
||||
|
@ -119,116 +119,97 @@
|
|||
(defmethod parse-limit :bucket
|
||||
[[name strategy opts :as vlimit]]
|
||||
(us/assert! ::limit-tuple vlimit)
|
||||
(merge
|
||||
{::name name
|
||||
::strategy strategy}
|
||||
(if-let [[_ capacity rate interval] (re-find bucket-opts-re opts)]
|
||||
(let [interval (dt/duration interval)
|
||||
rate (parse-long rate)
|
||||
capacity (parse-long capacity)]
|
||||
{::capacity capacity
|
||||
::rate rate
|
||||
::interval interval
|
||||
::opts opts
|
||||
::params [(dt/->seconds interval) rate capacity]
|
||||
::key (str "ratelimit.bucket." (d/name name))})
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-bucket-limit-opts
|
||||
:hint (str/ffmt "looks like '%' does not have a valid format" opts)))))
|
||||
(if-let [[_ capacity rate interval] (re-find bucket-opts-re opts)]
|
||||
(let [interval (dt/duration interval)
|
||||
rate (parse-long rate)
|
||||
capacity (parse-long capacity)]
|
||||
{::name name
|
||||
::strategy strategy
|
||||
::capacity capacity
|
||||
::rate rate
|
||||
::interval interval
|
||||
::opts opts
|
||||
::params [(dt/->seconds interval) rate capacity]
|
||||
::key (str "ratelimit.bucket." (d/name name))})
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-bucket-limit-opts
|
||||
:hint (str/ffmt "looks like '%' does not have a valid format" opts))))
|
||||
|
||||
(defmethod process-limit :bucket
|
||||
[redis user-id now {:keys [::key ::params ::service ::capacity ::interval ::rate] :as limit}]
|
||||
(let [script (-> bucket-rate-limit-script
|
||||
(assoc ::rscript/keys [(str key "." service "." user-id)])
|
||||
(assoc ::rscript/vals (conj params (dt/->seconds now))))]
|
||||
(->> (rds/eval! redis script)
|
||||
(p/fmap (fn [result]
|
||||
(let [allowed? (boolean (nth result 0))
|
||||
remaining (nth result 1)
|
||||
reset (* (/ (inst-ms interval) rate)
|
||||
(- capacity remaining))]
|
||||
(l/trace :hint "limit processed"
|
||||
:service service
|
||||
:limit (name (::name limit))
|
||||
:strategy (name (::strategy limit))
|
||||
:opts (::opts limit)
|
||||
:allowed? allowed?
|
||||
:remaining remaining)
|
||||
(-> limit
|
||||
(assoc ::lresult/allowed? allowed?)
|
||||
(assoc ::lresult/reset (dt/plus now reset))
|
||||
(assoc ::lresult/remaining remaining))))))))
|
||||
(let [script (-> bucket-rate-limit-script
|
||||
(assoc ::rscript/keys [(str key "." service "." user-id)])
|
||||
(assoc ::rscript/vals (conj params (dt/->seconds now))))
|
||||
result (rds/eval! redis script)
|
||||
allowed? (boolean (nth result 0))
|
||||
remaining (nth result 1)
|
||||
reset (* (/ (inst-ms interval) rate)
|
||||
(- capacity remaining))]
|
||||
(l/trace :hint "limit processed"
|
||||
:service service
|
||||
:limit (name (::name limit))
|
||||
:strategy (name (::strategy limit))
|
||||
:opts (::opts limit)
|
||||
:allowed allowed?
|
||||
:remaining remaining)
|
||||
(-> limit
|
||||
(assoc ::lresult/allowed allowed?)
|
||||
(assoc ::lresult/reset (dt/plus now reset))
|
||||
(assoc ::lresult/remaining remaining))))
|
||||
|
||||
(defmethod process-limit :window
|
||||
[redis user-id now {:keys [::nreq ::unit ::key ::service] :as limit}]
|
||||
(let [ts (dt/truncate now unit)
|
||||
ttl (dt/diff now (dt/plus ts {unit 1}))
|
||||
script (-> window-rate-limit-script
|
||||
(assoc ::rscript/keys [(str key "." service "." user-id "." (dt/format-instant ts))])
|
||||
(assoc ::rscript/vals [nreq (dt/->seconds ttl)]))]
|
||||
(->> (rds/eval! redis script)
|
||||
(p/fmap (fn [result]
|
||||
(let [allowed? (boolean (nth result 0))
|
||||
remaining (nth result 1)]
|
||||
(l/trace :hint "limit processed"
|
||||
:service service
|
||||
:limit (name (::name limit))
|
||||
:strategy (name (::strategy limit))
|
||||
:opts (::opts limit)
|
||||
:allowed? allowed?
|
||||
:remaining remaining)
|
||||
(-> limit
|
||||
(assoc ::lresult/allowed? allowed?)
|
||||
(assoc ::lresult/remaining remaining)
|
||||
(assoc ::lresult/reset (dt/plus ts {unit 1})))))))))
|
||||
(let [ts (dt/truncate now unit)
|
||||
ttl (dt/diff now (dt/plus ts {unit 1}))
|
||||
script (-> window-rate-limit-script
|
||||
(assoc ::rscript/keys [(str key "." service "." user-id "." (dt/format-instant ts))])
|
||||
(assoc ::rscript/vals [nreq (dt/->seconds ttl)]))
|
||||
result (rds/eval! redis script)
|
||||
allowed? (boolean (nth result 0))
|
||||
remaining (nth result 1)]
|
||||
(l/trace :hint "limit processed"
|
||||
:service service
|
||||
:limit (name (::name limit))
|
||||
:strategy (name (::strategy limit))
|
||||
:opts (::opts limit)
|
||||
:allowed allowed?
|
||||
:remaining remaining)
|
||||
(-> limit
|
||||
(assoc ::lresult/allowed allowed?)
|
||||
(assoc ::lresult/remaining remaining)
|
||||
(assoc ::lresult/reset (dt/plus ts {unit 1})))))
|
||||
|
||||
(defn- process-limits!
|
||||
[redis user-id limits now]
|
||||
(->> (p/all (map (partial process-limit redis user-id now) limits))
|
||||
(p/fmap (fn [results]
|
||||
(let [remaining (->> results
|
||||
(d/index-by ::name ::lresult/remaining)
|
||||
(uri/map->query-string))
|
||||
reset (->> results
|
||||
(d/index-by ::name (comp dt/->seconds ::lresult/reset))
|
||||
(uri/map->query-string))
|
||||
rejected (->> results
|
||||
(filter (complement ::lresult/allowed?))
|
||||
(first))]
|
||||
(let [results (into [] (map (partial process-limit redis user-id now)) limits)
|
||||
remaining (->> results
|
||||
(d/index-by ::name ::lresult/remaining)
|
||||
(uri/map->query-string))
|
||||
reset (->> results
|
||||
(d/index-by ::name (comp dt/->seconds ::lresult/reset))
|
||||
(uri/map->query-string))
|
||||
|
||||
(when rejected
|
||||
(l/warn :hint "rejected rate limit"
|
||||
:user-id (str user-id)
|
||||
:limit-service (-> rejected ::service name)
|
||||
:limit-name (-> rejected ::name name)
|
||||
:limit-strategy (-> rejected ::strategy name)))
|
||||
rejected (d/seek (complement ::lresult/allowed) results)]
|
||||
|
||||
{:enabled? true
|
||||
:allowed? (not (some? rejected))
|
||||
:headers {"x-rate-limit-remaining" remaining
|
||||
"x-rate-limit-reset" reset}})))))
|
||||
(when rejected
|
||||
(l/warn :hint "rejected rate limit"
|
||||
:user-id (str user-id)
|
||||
:limit-service (-> rejected ::service name)
|
||||
:limit-name (-> rejected ::name name)
|
||||
:limit-strategy (-> rejected ::strategy name)))
|
||||
|
||||
(defn- handle-response
|
||||
[f cfg params result]
|
||||
(if (:enabled? result)
|
||||
(let [headers (:headers result)]
|
||||
(if (:allowed? result)
|
||||
(->> (f cfg params)
|
||||
(p/fmap (fn [response]
|
||||
(vary-meta response update ::http/headers merge headers))))
|
||||
(p/rejected
|
||||
(ex/error :type :rate-limit
|
||||
:code :request-blocked
|
||||
:hint "rate limit reached"
|
||||
::http/headers headers))))
|
||||
(f cfg params)))
|
||||
{::enabled true
|
||||
::allowed (not (some? rejected))
|
||||
::remaingin remaining
|
||||
::reset reset
|
||||
::headers {"x-rate-limit-remaining" remaining
|
||||
"x-rate-limit-reset" reset}}))
|
||||
|
||||
(defn- get-limits
|
||||
[state skey sname]
|
||||
(some->> (or (get-in @state [::limits skey])
|
||||
(get-in @state [::limits :default]))
|
||||
(map #(assoc % ::service sname))
|
||||
(seq)))
|
||||
(when-let [limits (or (get-in @state [::limits skey])
|
||||
(get-in @state [::limits :default]))]
|
||||
(into [] (map #(assoc % ::service sname)) limits)))
|
||||
|
||||
(defn- get-uid
|
||||
[{:keys [::http/request] :as params}]
|
||||
|
@ -236,6 +217,31 @@
|
|||
(some-> request parse-client-ip)
|
||||
uuid/zero))
|
||||
|
||||
(defn process-request!
|
||||
[{:keys [::rpc/rlimit ::rds/redis ::skey ::sname] :as cfg} params]
|
||||
(when-let [limits (get-limits rlimit skey sname)]
|
||||
(let [redis (rds/get-or-connect redis ::rpc/rlimit default-options)
|
||||
uid (get-uid params)
|
||||
;; FIXME: why not clasic try/catch?
|
||||
result (ex/try! (process-limits! redis uid limits (dt/now)))]
|
||||
|
||||
(l/trc :hint "process-limits"
|
||||
:service sname
|
||||
:remaining (::remaingin result)
|
||||
:reset (::reset result))
|
||||
|
||||
(cond
|
||||
(ex/exception? result)
|
||||
(do
|
||||
(l/error :hint "error on processing rate-limit" :cause result)
|
||||
{::enabled false})
|
||||
|
||||
(contains? cf/flags :soft-rpc-rlimit)
|
||||
{::enabled false}
|
||||
|
||||
:else
|
||||
result))))
|
||||
|
||||
(defn wrap
|
||||
[{:keys [::rpc/rlimit ::rds/redis] :as cfg} f mdata]
|
||||
(us/assert! ::rpc/rlimit rlimit)
|
||||
|
@ -243,36 +249,25 @@
|
|||
|
||||
(if rlimit
|
||||
(let [skey (keyword (::rpc/type cfg) (->> mdata ::sv/spec name))
|
||||
sname (str (::rpc/type cfg) "." (->> mdata ::sv/spec name))]
|
||||
sname (str (::rpc/type cfg) "." (->> mdata ::sv/spec name))
|
||||
cfg (-> cfg
|
||||
(assoc ::skey skey)
|
||||
(assoc ::sname sname))]
|
||||
|
||||
(fn [cfg params]
|
||||
(if @enabled?
|
||||
(try
|
||||
(let [uid (get-uid params)
|
||||
rsp (when-let [limits (get-limits rlimit skey sname)]
|
||||
(let [redis (rds/get-or-connect redis ::rpc/rlimit default-options)
|
||||
rsp (->> (process-limits! redis uid limits (dt/now))
|
||||
(p/merr (fn [cause]
|
||||
;; If we have an error on processing the rate-limit we just skip
|
||||
;; it for do not cause service interruption because of redis
|
||||
;; downtime or similar situation.
|
||||
(l/error :hint "error on processing rate-limit" :cause cause)
|
||||
(p/resolved {:enabled? false}))))]
|
||||
|
||||
;; If soft rate are enabled, we process the rate-limit but return unprotected
|
||||
;; response.
|
||||
(if (contains? cf/flags :soft-rpc-rlimit)
|
||||
{:enabled? false}
|
||||
rsp)))]
|
||||
|
||||
(->> (p/promise rsp)
|
||||
(p/fmap #(or % {:enabled? false}))
|
||||
(p/mcat #(handle-response f cfg params %))))
|
||||
|
||||
(catch Throwable cause
|
||||
(p/rejected cause)))
|
||||
|
||||
(f cfg params))))
|
||||
(fn [hcfg params]
|
||||
(if @enabled
|
||||
(let [result (process-request! cfg params)]
|
||||
(if (::enabled result)
|
||||
(if (::allowed result)
|
||||
(-> (f hcfg params)
|
||||
(rph/wrap)
|
||||
(vary-meta update ::http/headers merge (::headers result)))
|
||||
(ex/raise :type :rate-limit
|
||||
:code :request-blocked
|
||||
:hint "rate limit reached"
|
||||
::http/headers (::headers result)))
|
||||
(f hcfg params)))
|
||||
(f hcfg params))))
|
||||
f))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
@ -352,7 +347,7 @@
|
|||
::limits limits}))))
|
||||
|
||||
(defn- refresh-config
|
||||
[{:keys [::state ::path ::wrk/executor ::wrk/scheduled-executor] :as cfg}]
|
||||
[{:keys [::state ::path ::wrk/executor] :as cfg}]
|
||||
(letfn [(update-config [{:keys [::updated-at] :as state}]
|
||||
(let [updated-at' (fs/last-modified-time path)]
|
||||
(merge state
|
||||
|
@ -367,8 +362,7 @@
|
|||
state)))))
|
||||
|
||||
(schedule-next [state]
|
||||
(px/schedule! scheduled-executor
|
||||
(inst-ms (::refresh state))
|
||||
(px/schedule! (inst-ms (::refresh state))
|
||||
(partial refresh-config cfg))
|
||||
state)]
|
||||
|
||||
|
@ -391,8 +385,7 @@
|
|||
(and (fs/exists? path) (fs/regular-file? path) path)))
|
||||
|
||||
(defmethod ig/pre-init-spec :app.rpc/rlimit [_]
|
||||
(s/keys :req [::wrk/executor
|
||||
::wrk/scheduled-executor]))
|
||||
(s/keys :req [::wrk/executor]))
|
||||
|
||||
(defmethod ig/init-key ::rpc/rlimit
|
||||
[_ {:keys [::wrk/executor] :as cfg}]
|
||||
|
|
|
@ -22,8 +22,7 @@
|
|||
[clojure.spec.alpha :as s]
|
||||
[datoteka.fs :as fs]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]))
|
||||
[promesa.core :as p]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; Storage Module State
|
||||
|
@ -79,42 +78,40 @@
|
|||
(update :metadata db/decode-transit-pgobject))))
|
||||
|
||||
(defn- create-database-object
|
||||
[{:keys [::backend ::wrk/executor ::db/pool-or-conn]} {:keys [::content ::expired-at ::touched-at] :as params}]
|
||||
(px/with-dispatch executor
|
||||
(let [id (uuid/random)
|
||||
[{:keys [::backend ::db/pool-or-conn]} {:keys [::content ::expired-at ::touched-at] :as params}]
|
||||
(let [id (uuid/random)
|
||||
mdata (cond-> (get-metadata params)
|
||||
(satisfies? impl/IContentHash content)
|
||||
(assoc :hash (impl/get-hash content)))
|
||||
|
||||
mdata (cond-> (get-metadata params)
|
||||
(satisfies? impl/IContentHash content)
|
||||
(assoc :hash (impl/get-hash content)))
|
||||
;; NOTE: for now we don't reuse the deleted objects, but in
|
||||
;; futute we can consider reusing deleted objects if we
|
||||
;; found a duplicated one and is marked for deletion but
|
||||
;; still not deleted.
|
||||
result (when (and (::deduplicate? params)
|
||||
(:hash mdata)
|
||||
(:bucket mdata))
|
||||
(get-database-object-by-hash pool-or-conn backend (:bucket mdata) (:hash mdata)))
|
||||
|
||||
;; NOTE: for now we don't reuse the deleted objects, but in
|
||||
;; futute we can consider reusing deleted objects if we
|
||||
;; found a duplicated one and is marked for deletion but
|
||||
;; still not deleted.
|
||||
result (when (and (::deduplicate? params)
|
||||
(:hash mdata)
|
||||
(:bucket mdata))
|
||||
(get-database-object-by-hash pool-or-conn backend (:bucket mdata) (:hash mdata)))
|
||||
result (or result
|
||||
(-> (db/insert! pool-or-conn :storage-object
|
||||
{:id id
|
||||
:size (impl/get-size content)
|
||||
:backend (name backend)
|
||||
:metadata (db/tjson mdata)
|
||||
:deleted-at expired-at
|
||||
:touched-at touched-at})
|
||||
(update :metadata db/decode-transit-pgobject)
|
||||
(update :metadata assoc ::created? true)))]
|
||||
|
||||
result (or result
|
||||
(-> (db/insert! pool-or-conn :storage-object
|
||||
{:id id
|
||||
:size (impl/get-size content)
|
||||
:backend (name backend)
|
||||
:metadata (db/tjson mdata)
|
||||
:deleted-at expired-at
|
||||
:touched-at touched-at})
|
||||
(update :metadata db/decode-transit-pgobject)
|
||||
(update :metadata assoc ::created? true)))]
|
||||
|
||||
(impl/storage-object
|
||||
(:id result)
|
||||
(:size result)
|
||||
(:created-at result)
|
||||
(:deleted-at result)
|
||||
(:touched-at result)
|
||||
backend
|
||||
(:metadata result)))))
|
||||
(impl/storage-object
|
||||
(:id result)
|
||||
(:size result)
|
||||
(:created-at result)
|
||||
(:deleted-at result)
|
||||
(:touched-at result)
|
||||
backend
|
||||
(:metadata result))))
|
||||
|
||||
(def ^:private sql:retrieve-storage-object
|
||||
"select * from storage_object where id = ? and (deleted_at is null or deleted_at > now())")
|
||||
|
@ -153,45 +150,41 @@
|
|||
(dm/export impl/object?)
|
||||
|
||||
(defn get-object
|
||||
[{:keys [::db/pool-or-conn ::wrk/executor] :as storage} id]
|
||||
[{:keys [::db/pool-or-conn] :as storage} id]
|
||||
(us/assert! ::storage storage)
|
||||
(px/with-dispatch executor
|
||||
(retrieve-database-object pool-or-conn id)))
|
||||
(retrieve-database-object pool-or-conn id))
|
||||
|
||||
(defn put-object!
|
||||
"Creates a new object with the provided content."
|
||||
[{:keys [::backend] :as storage} {:keys [::content] :as params}]
|
||||
(us/assert! ::storage-with-backend storage)
|
||||
(us/assert! ::impl/content content)
|
||||
(->> (create-database-object storage params)
|
||||
(p/mcat (fn [object]
|
||||
(if (::created? (meta object))
|
||||
;; Store the data finally on the underlying storage subsystem.
|
||||
(-> (impl/resolve-backend storage backend)
|
||||
(impl/put-object object content))
|
||||
(p/resolved object))))))
|
||||
(let [object (create-database-object storage params)]
|
||||
(if (::created? (meta object))
|
||||
;; Store the data finally on the underlying storage subsystem.
|
||||
(-> (impl/resolve-backend storage backend)
|
||||
(impl/put-object object content))
|
||||
object)))
|
||||
|
||||
(defn touch-object!
|
||||
"Mark object as touched."
|
||||
[{:keys [::db/pool-or-conn ::wrk/executor] :as storage} object-or-id]
|
||||
[{:keys [::db/pool-or-conn] :as storage} object-or-id]
|
||||
(us/assert! ::storage storage)
|
||||
(px/with-dispatch executor
|
||||
(let [id (if (impl/object? object-or-id) (:id object-or-id) object-or-id)
|
||||
rs (db/update! pool-or-conn :storage-object
|
||||
{:touched-at (dt/now)}
|
||||
{:id id}
|
||||
{::db/return-keys? false})]
|
||||
(pos? (db/get-update-count rs)))))
|
||||
(let [id (if (impl/object? object-or-id) (:id object-or-id) object-or-id)
|
||||
rs (db/update! pool-or-conn :storage-object
|
||||
{:touched-at (dt/now)}
|
||||
{:id id}
|
||||
{::db/return-keys? false})]
|
||||
(pos? (db/get-update-count rs))))
|
||||
|
||||
(defn get-object-data
|
||||
"Return an input stream instance of the object content."
|
||||
[storage object]
|
||||
(us/assert! ::storage storage)
|
||||
(if (or (nil? (:expired-at object))
|
||||
(dt/is-after? (:expired-at object) (dt/now)))
|
||||
(when (or (nil? (:expired-at object))
|
||||
(dt/is-after? (:expired-at object) (dt/now)))
|
||||
(-> (impl/resolve-backend storage (:backend object))
|
||||
(impl/get-object-data object))
|
||||
(p/resolved nil)))
|
||||
(impl/get-object-data object))))
|
||||
|
||||
(defn get-object-bytes
|
||||
"Returns a byte array of object content."
|
||||
|
@ -208,11 +201,10 @@
|
|||
(get-object-url storage object nil))
|
||||
([storage object options]
|
||||
(us/assert! ::storage storage)
|
||||
(if (or (nil? (:expired-at object))
|
||||
(dt/is-after? (:expired-at object) (dt/now)))
|
||||
(when (or (nil? (:expired-at object))
|
||||
(dt/is-after? (:expired-at object) (dt/now)))
|
||||
(-> (impl/resolve-backend storage (:backend object))
|
||||
(impl/get-object-url object options))
|
||||
(p/resolved nil))))
|
||||
(impl/get-object-url object options)))))
|
||||
|
||||
(defn get-object-path
|
||||
"Get the Path to the object. Only works with `:fs` type of
|
||||
|
@ -220,24 +212,20 @@
|
|||
[storage object]
|
||||
(us/assert! ::storage storage)
|
||||
(let [backend (impl/resolve-backend storage (:backend object))]
|
||||
(if (not= :fs (::type backend))
|
||||
(p/resolved nil)
|
||||
(if (or (nil? (:expired-at object))
|
||||
(dt/is-after? (:expired-at object) (dt/now)))
|
||||
(->> (impl/get-object-url backend object nil)
|
||||
(p/fmap file-url->path))
|
||||
(p/resolved nil)))))
|
||||
(when (and (= :fs (::type backend))
|
||||
(or (nil? (:expired-at object))
|
||||
(dt/is-after? (:expired-at object) (dt/now))))
|
||||
(-> (impl/get-object-url backend object nil) file-url->path))))
|
||||
|
||||
(defn del-object!
|
||||
[{:keys [::db/pool-or-conn ::wrk/executor] :as storage} object-or-id]
|
||||
[{:keys [::db/pool-or-conn] :as storage} object-or-id]
|
||||
(us/assert! ::storage storage)
|
||||
(px/with-dispatch executor
|
||||
(let [id (if (impl/object? object-or-id) (:id object-or-id) object-or-id)
|
||||
res (db/update! pool-or-conn :storage-object
|
||||
{:deleted-at (dt/now)}
|
||||
{:id id}
|
||||
{::db/return-keys? false})]
|
||||
(pos? (db/get-update-count res)))))
|
||||
(let [id (if (impl/object? object-or-id) (:id object-or-id) object-or-id)
|
||||
res (db/update! pool-or-conn :storage-object
|
||||
{:deleted-at (dt/now)}
|
||||
{:id id}
|
||||
{::db/return-keys? false})]
|
||||
(pos? (db/get-update-count res))))
|
||||
|
||||
(dm/export impl/resolve-backend)
|
||||
(dm/export impl/calculate-hash)
|
||||
|
@ -281,7 +269,7 @@
|
|||
(doseq [id ids]
|
||||
(l/debug :hint "gc-deleted: permanently delete storage object" :backend backend-id :id id))
|
||||
|
||||
@(impl/del-objects-in-bulk backend ids)))]
|
||||
(impl/del-objects-in-bulk backend ids)))]
|
||||
|
||||
(fn [params]
|
||||
(let [min-age (or (:min-age params) min-age)]
|
||||
|
@ -422,8 +410,8 @@
|
|||
(ex/raise :type :internal
|
||||
:code :unexpected-unknown-reference
|
||||
:hint (dm/fmt "unknown reference %" bucket)))]
|
||||
(recur (+ to-freeze f)
|
||||
(+ to-delete d)
|
||||
(recur (+ to-freeze (long f))
|
||||
(+ to-delete (long d))
|
||||
(rest groups)))
|
||||
(do
|
||||
(l/info :hint "gc-touched: task finished" :to-freeze to-freeze :to-delete to-delete)
|
||||
|
|
|
@ -6,22 +6,18 @@
|
|||
|
||||
(ns app.storage.fs
|
||||
(:require
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uri :as u]
|
||||
[app.storage :as-alias sto]
|
||||
[app.storage.impl :as impl]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.fs :as fs]
|
||||
[datoteka.io :as io]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px])
|
||||
[integrant.core :as ig])
|
||||
(:import
|
||||
java.io.InputStream
|
||||
java.io.OutputStream
|
||||
java.nio.file.Path
|
||||
java.nio.file.Files))
|
||||
|
||||
|
@ -48,74 +44,66 @@
|
|||
(s/keys :req [::directory
|
||||
::uri]
|
||||
:opt [::sto/type
|
||||
::sto/id
|
||||
::wrk/executor]))
|
||||
::sto/id]))
|
||||
|
||||
;; --- API IMPL
|
||||
|
||||
(defmethod impl/put-object :fs
|
||||
[{:keys [::wrk/executor] :as backend} {:keys [id] :as object} content]
|
||||
[backend {:keys [id] :as object} content]
|
||||
(us/assert! ::backend backend)
|
||||
(px/with-dispatch executor
|
||||
(let [base (fs/path (::directory backend))
|
||||
path (fs/path (impl/id->path id))
|
||||
full (fs/normalize (fs/join base path))]
|
||||
(when-not (fs/exists? (fs/parent full))
|
||||
(fs/create-dir (fs/parent full)))
|
||||
(with-open [^InputStream src (io/input-stream content)
|
||||
^OutputStream dst (io/output-stream full)]
|
||||
(io/copy! src dst))
|
||||
(let [base (fs/path (::directory backend))
|
||||
path (fs/path (impl/id->path id))
|
||||
full (fs/normalize (fs/join base path))]
|
||||
|
||||
object)))
|
||||
(when-not (fs/exists? (fs/parent full))
|
||||
(fs/create-dir (fs/parent full)))
|
||||
|
||||
(dm/with-open [src (io/input-stream content)
|
||||
dst (io/output-stream full)]
|
||||
(io/copy! src dst))
|
||||
|
||||
object))
|
||||
|
||||
(defmethod impl/get-object-data :fs
|
||||
[{:keys [::wrk/executor] :as backend} {:keys [id] :as object}]
|
||||
[backend {:keys [id] :as object}]
|
||||
(us/assert! ::backend backend)
|
||||
(px/with-dispatch executor
|
||||
(let [^Path base (fs/path (::directory backend))
|
||||
^Path path (fs/path (impl/id->path id))
|
||||
^Path full (fs/normalize (fs/join base path))]
|
||||
(when-not (fs/exists? full)
|
||||
(ex/raise :type :internal
|
||||
:code :filesystem-object-does-not-exists
|
||||
:path (str full)))
|
||||
(io/input-stream full))))
|
||||
(let [^Path base (fs/path (::directory backend))
|
||||
^Path path (fs/path (impl/id->path id))
|
||||
^Path full (fs/normalize (fs/join base path))]
|
||||
(when-not (fs/exists? full)
|
||||
(ex/raise :type :internal
|
||||
:code :filesystem-object-does-not-exists
|
||||
:path (str full)))
|
||||
(io/input-stream full)))
|
||||
|
||||
(defmethod impl/get-object-bytes :fs
|
||||
[backend object]
|
||||
(->> (impl/get-object-data backend object)
|
||||
(p/fmap (fn [input]
|
||||
(try
|
||||
(io/read-as-bytes input)
|
||||
(finally
|
||||
(io/close! input)))))))
|
||||
(dm/with-open [input (impl/get-object-data backend object)]
|
||||
(io/read-as-bytes input)))
|
||||
|
||||
(defmethod impl/get-object-url :fs
|
||||
[{:keys [::uri] :as backend} {:keys [id] :as object} _]
|
||||
(us/assert! ::backend backend)
|
||||
(p/resolved
|
||||
(update uri :path
|
||||
(fn [existing]
|
||||
(if (str/ends-with? existing "/")
|
||||
(str existing (impl/id->path id))
|
||||
(str existing "/" (impl/id->path id)))))))
|
||||
(update uri :path
|
||||
(fn [existing]
|
||||
(if (str/ends-with? existing "/")
|
||||
(str existing (impl/id->path id))
|
||||
(str existing "/" (impl/id->path id))))))
|
||||
|
||||
(defmethod impl/del-object :fs
|
||||
[{:keys [::wrk/executor] :as backend} {:keys [id] :as object}]
|
||||
[backend {:keys [id] :as object}]
|
||||
(us/assert! ::backend backend)
|
||||
(px/with-dispatch executor
|
||||
(let [base (fs/path (::directory backend))
|
||||
path (fs/path (impl/id->path id))
|
||||
path (fs/join base path)]
|
||||
(Files/deleteIfExists ^Path path))))
|
||||
(let [base (fs/path (::directory backend))
|
||||
path (fs/path (impl/id->path id))
|
||||
path (fs/join base path)]
|
||||
(Files/deleteIfExists ^Path path)))
|
||||
|
||||
(defmethod impl/del-objects-in-bulk :fs
|
||||
[{:keys [::wrk/executor] :as backend} ids]
|
||||
[backend ids]
|
||||
(us/assert! ::backend backend)
|
||||
(px/with-dispatch executor
|
||||
(let [base (fs/path (::directory backend))]
|
||||
(doseq [id ids]
|
||||
(let [path (fs/path (impl/id->path id))
|
||||
path (fs/join base path)]
|
||||
(Files/deleteIfExists ^Path path))))))
|
||||
(let [base (fs/path (::directory backend))]
|
||||
(doseq [id ids]
|
||||
(let [path (fs/path (impl/id->path id))
|
||||
path (fs/join base path)]
|
||||
(Files/deleteIfExists ^Path path)))))
|
||||
|
||||
|
|
|
@ -153,8 +153,8 @@
|
|||
(content (.toPath ^java.io.File data) size)
|
||||
|
||||
(instance? String data)
|
||||
(let [data (.getBytes data "UTF-8")]
|
||||
(bytes->content data (alength data)))
|
||||
(let [data (.getBytes ^String data "UTF-8")]
|
||||
(bytes->content data (alength ^bytes data)))
|
||||
|
||||
(bytes? data)
|
||||
(bytes->content data (or size (alength ^bytes data)))
|
||||
|
@ -195,7 +195,7 @@
|
|||
|
||||
(defn calculate-hash
|
||||
[resource]
|
||||
(let [result (with-open [input (io/input-stream resource)]
|
||||
(let [result (dm/with-open [input (io/input-stream resource)]
|
||||
(-> (bh/blake2b-256 input)
|
||||
(bc/bytes->hex)))]
|
||||
(str "blake2b:" result)))
|
||||
|
|
|
@ -45,6 +45,7 @@
|
|||
software.amazon.awssdk.http.nio.netty.SdkEventLoopGroup
|
||||
software.amazon.awssdk.regions.Region
|
||||
software.amazon.awssdk.services.s3.S3AsyncClient
|
||||
software.amazon.awssdk.services.s3.S3AsyncClientBuilder
|
||||
software.amazon.awssdk.services.s3.S3Configuration
|
||||
software.amazon.awssdk.services.s3.model.Delete
|
||||
software.amazon.awssdk.services.s3.model.DeleteObjectRequest
|
||||
|
@ -121,7 +122,7 @@
|
|||
(defmethod impl/put-object :s3
|
||||
[backend object content]
|
||||
(us/assert! ::backend backend)
|
||||
(put-object backend object content))
|
||||
(p/await! (put-object backend object content)))
|
||||
|
||||
(defmethod impl/get-object-data :s3
|
||||
[backend object]
|
||||
|
@ -135,12 +136,13 @@
|
|||
:cause cause))]
|
||||
|
||||
(-> (get-object-data backend object)
|
||||
(p/catch no-such-key? handle-not-found))))
|
||||
(p/catch no-such-key? handle-not-found)
|
||||
(p/await!))))
|
||||
|
||||
(defmethod impl/get-object-bytes :s3
|
||||
[backend object]
|
||||
(us/assert! ::backend backend)
|
||||
(get-object-bytes backend object))
|
||||
(p/await! (get-object-bytes backend object)))
|
||||
|
||||
(defmethod impl/get-object-url :s3
|
||||
[backend object options]
|
||||
|
@ -150,12 +152,12 @@
|
|||
(defmethod impl/del-object :s3
|
||||
[backend object]
|
||||
(us/assert! ::backend backend)
|
||||
(del-object backend object))
|
||||
(p/await! (del-object backend object)))
|
||||
|
||||
(defmethod impl/del-objects-in-bulk :s3
|
||||
[backend ids]
|
||||
(us/assert! ::backend backend)
|
||||
(del-object-in-bulk backend ids))
|
||||
(p/await! (del-object-in-bulk backend ids)))
|
||||
|
||||
;; --- HELPERS
|
||||
|
||||
|
@ -187,13 +189,17 @@
|
|||
(.writeTimeout default-timeout)
|
||||
(.build))
|
||||
|
||||
client (-> (S3AsyncClient/builder)
|
||||
(.serviceConfiguration ^S3Configuration sconfig)
|
||||
(.asyncConfiguration ^ClientAsyncConfiguration aconfig)
|
||||
(.httpClient ^NettyNioAsyncHttpClient hclient)
|
||||
(.region (lookup-region region))
|
||||
(cond-> (some? endpoint) (.endpointOverride (URI. endpoint)))
|
||||
(.build))]
|
||||
client (let [builder (S3AsyncClient/builder)
|
||||
builder (.serviceConfiguration ^S3AsyncClientBuilder builder ^S3Configuration sconfig)
|
||||
builder (.asyncConfiguration ^S3AsyncClientBuilder builder ^ClientAsyncConfiguration aconfig)
|
||||
builder (.httpClient ^S3AsyncClientBuilder builder ^NettyNioAsyncHttpClient hclient)
|
||||
builder (.region ^S3AsyncClientBuilder builder (lookup-region region))
|
||||
builder (cond-> ^S3AsyncClientBuilder builder
|
||||
(some? endpoint)
|
||||
(.endpointOverride (URI. endpoint)))]
|
||||
(.build ^S3AsyncClientBuilder builder))
|
||||
|
||||
]
|
||||
|
||||
(reify
|
||||
clojure.lang.IDeref
|
||||
|
@ -288,6 +294,7 @@
|
|||
^AsyncRequestBody rbody)
|
||||
(p/fmap (constantly object)))))
|
||||
|
||||
;; FIXME: research how to avoid reflection on close method
|
||||
(defn- path->stream
|
||||
[path]
|
||||
(proxy [FilterInputStream] [(io/input-stream path)]
|
||||
|
@ -347,8 +354,7 @@
|
|||
(getObjectRequest ^GetObjectRequest gor)
|
||||
(build))
|
||||
pgor (.presignGetObject ^S3Presigner presigner ^GetObjectPresignRequest gopr)]
|
||||
(p/resolved
|
||||
(u/uri (str (.url ^PresignedGetObjectRequest pgor))))))
|
||||
(u/uri (str (.url ^PresignedGetObjectRequest pgor)))))
|
||||
|
||||
(defn- del-object
|
||||
[{:keys [::bucket ::client ::prefix]} {:keys [id] :as obj}]
|
||||
|
|
|
@ -10,57 +10,59 @@
|
|||
the operating system cleaning task should be responsible of
|
||||
permanently delete these files (look at systemd-tempfiles)."
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.logging :as l]
|
||||
[app.storage :as-alias sto]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.core.async :as a]
|
||||
[clojure.spec.alpha :as s]
|
||||
[datoteka.fs :as fs]
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec :as px]))
|
||||
[promesa.exec :as px]
|
||||
[promesa.exec.csp :as sp]))
|
||||
|
||||
(declare remove-temp-file)
|
||||
(defonce queue (a/chan 128))
|
||||
(declare ^:private remove-temp-file)
|
||||
(declare ^:private io-loop)
|
||||
|
||||
(defonce queue (sp/chan :buf 128))
|
||||
|
||||
(defmethod ig/pre-init-spec ::cleaner [_]
|
||||
(s/keys :req [::sto/min-age ::wrk/scheduled-executor]))
|
||||
(s/keys :req [::wrk/executor]))
|
||||
|
||||
(defmethod ig/prep-key ::cleaner
|
||||
[_ cfg]
|
||||
(merge {::sto/min-age (dt/duration "30m")}
|
||||
(d/without-nils cfg)))
|
||||
(assoc cfg ::min-age (dt/duration "30m")))
|
||||
|
||||
(defmethod ig/init-key ::cleaner
|
||||
[_ {:keys [::sto/min-age ::wrk/scheduled-executor] :as cfg}]
|
||||
(px/thread
|
||||
{:name "penpot/storage-tmp-cleaner"}
|
||||
(try
|
||||
(l/info :hint "started tmp file cleaner")
|
||||
(loop []
|
||||
(when-let [path (a/<!! queue)]
|
||||
(l/trace :hint "schedule tempfile deletion" :path path
|
||||
:expires-at (dt/plus (dt/now) min-age))
|
||||
(px/schedule! scheduled-executor
|
||||
(inst-ms min-age)
|
||||
(partial remove-temp-file path))
|
||||
(recur)))
|
||||
(catch InterruptedException _
|
||||
(l/debug :hint "interrupted"))
|
||||
(finally
|
||||
(l/info :hint "terminated tmp file cleaner")))))
|
||||
[_ cfg]
|
||||
(px/fn->thread (partial io-loop cfg)
|
||||
{:name "penpot/storage/tmp-cleaner" :virtual true}))
|
||||
|
||||
(defmethod ig/halt-key! ::cleaner
|
||||
[_ thread]
|
||||
(px/interrupt! thread))
|
||||
|
||||
(defn- io-loop
|
||||
[{:keys [::min-age] :as cfg}]
|
||||
(l/info :hint "started tmp file cleaner")
|
||||
(try
|
||||
(loop []
|
||||
(when-let [path (sp/take! queue)]
|
||||
(l/debug :hint "schedule tempfile deletion" :path path
|
||||
:expires-at (dt/plus (dt/now) min-age))
|
||||
(px/schedule! (inst-ms min-age) (partial remove-temp-file cfg path))
|
||||
(recur)))
|
||||
(catch InterruptedException _
|
||||
(l/trace :hint "cleaner interrupted"))
|
||||
(finally
|
||||
(l/info :hint "cleaner terminated"))))
|
||||
|
||||
(defn- remove-temp-file
|
||||
"Permanently delete tempfile"
|
||||
[path]
|
||||
(l/trace :hint "permanently delete tempfile" :path path)
|
||||
[{:keys [::wrk/executor path]}]
|
||||
(when (fs/exists? path)
|
||||
(fs/delete path)))
|
||||
(px/run! executor
|
||||
(fn []
|
||||
(l/debug :hint "permanently delete tempfile" :path path)
|
||||
(fs/delete path)))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; API
|
||||
|
@ -72,7 +74,7 @@
|
|||
:or {prefix "penpot."
|
||||
suffix ".tmp"}}]
|
||||
(let [candidate (fs/tempfile :suffix suffix :prefix prefix)]
|
||||
(a/offer! queue candidate)
|
||||
(sp/offer! queue candidate)
|
||||
candidate))
|
||||
|
||||
(defn create-tempfile
|
||||
|
@ -80,5 +82,5 @@
|
|||
:or {prefix "penpot."
|
||||
suffix ".tmp"}}]
|
||||
(let [path (fs/create-tempfile :suffix suffix :prefix prefix)]
|
||||
(a/offer! queue path)
|
||||
(sp/offer! queue path)
|
||||
path))
|
||||
|
|
|
@ -85,7 +85,7 @@
|
|||
|
||||
;; Mark as deleted the storage object related with the
|
||||
;; photo-id field.
|
||||
(some->> photo-id (sto/touch-object! storage) deref)
|
||||
(some->> photo-id (sto/touch-object! storage))
|
||||
|
||||
;; And finally, permanently delete the profile.
|
||||
(db/delete! conn :profile {:id id})
|
||||
|
@ -117,7 +117,7 @@
|
|||
|
||||
;; Mark as deleted the storage object related with the
|
||||
;; photo-id field.
|
||||
(some->> photo-id (sto/touch-object! storage) deref)
|
||||
(some->> photo-id (sto/touch-object! storage))
|
||||
|
||||
;; And finally, permanently delete the team.
|
||||
(db/delete! conn :team {:id id})
|
||||
|
@ -184,10 +184,10 @@
|
|||
(l/debug :hint "permanently delete font variant" :id (str id))
|
||||
|
||||
;; Mark as deleted the all related storage objects
|
||||
(some->> (:woff1-file-id font) (sto/touch-object! storage) deref)
|
||||
(some->> (:woff2-file-id font) (sto/touch-object! storage) deref)
|
||||
(some->> (:otf-file-id font) (sto/touch-object! storage) deref)
|
||||
(some->> (:ttf-file-id font) (sto/touch-object! storage) deref)
|
||||
(some->> (:woff1-file-id font) (sto/touch-object! storage))
|
||||
(some->> (:woff2-file-id font) (sto/touch-object! storage))
|
||||
(some->> (:otf-file-id font) (sto/touch-object! storage))
|
||||
(some->> (:ttf-file-id font) (sto/touch-object! storage))
|
||||
|
||||
;; And finally, permanently delete the team font variant
|
||||
(db/delete! conn :team-font-variant {:id id})
|
||||
|
|
69
backend/src/app/util/cache.clj
Normal file
69
backend/src/app/util/cache.clj
Normal file
|
@ -0,0 +1,69 @@
|
|||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.util.cache
|
||||
"In-memory cache backed by Caffeine"
|
||||
(:refer-clojure :exclude [get])
|
||||
(:require
|
||||
[app.util.time :as dt]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px])
|
||||
(:import
|
||||
com.github.benmanes.caffeine.cache.AsyncCache
|
||||
com.github.benmanes.caffeine.cache.AsyncLoadingCache
|
||||
com.github.benmanes.caffeine.cache.CacheLoader
|
||||
com.github.benmanes.caffeine.cache.Caffeine
|
||||
com.github.benmanes.caffeine.cache.RemovalListener
|
||||
java.time.Duration
|
||||
java.util.concurrent.Executor
|
||||
java.util.function.Function))
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
(defn create-listener
|
||||
[f]
|
||||
(reify RemovalListener
|
||||
(onRemoval [_ key val cause]
|
||||
(when val
|
||||
(f key val cause)))))
|
||||
|
||||
(defn create-loader
|
||||
[f]
|
||||
(reify CacheLoader
|
||||
(load [_ key]
|
||||
(f key))))
|
||||
|
||||
(defn create
|
||||
[& {:keys [executor on-remove load-fn keepalive]}]
|
||||
(as-> (Caffeine/newBuilder) builder
|
||||
(if on-remove (.removalListener builder (create-listener on-remove)) builder)
|
||||
(if executor (.executor builder ^Executor (px/resolve-executor executor)) builder)
|
||||
(if keepalive (.expireAfterAccess builder ^Duration (dt/duration keepalive)) builder)
|
||||
(if load-fn
|
||||
(.buildAsync builder ^CacheLoader (create-loader load-fn))
|
||||
(.buildAsync builder))))
|
||||
|
||||
(defn invalidate-all!
|
||||
[^AsyncCache cache]
|
||||
(.invalidateAll (.synchronous cache)))
|
||||
|
||||
(defn get
|
||||
([cache key]
|
||||
(assert (instance? AsyncLoadingCache cache) "should be AsyncLoadingCache instance")
|
||||
(p/await! (.get ^AsyncLoadingCache cache ^Object key)))
|
||||
([cache key not-found-fn]
|
||||
(assert (instance? AsyncCache cache) "should be AsyncCache instance")
|
||||
(p/await! (.get ^AsyncCache cache
|
||||
^Object key
|
||||
^Function (reify
|
||||
Function
|
||||
(apply [_ key]
|
||||
(not-found-fn key)))))))
|
||||
|
||||
(defn cache?
|
||||
[o]
|
||||
(or (instance? AsyncCache o)
|
||||
(instance? AsyncLoadingCache o)))
|
|
@ -45,9 +45,9 @@
|
|||
(map second)
|
||||
(filter #(::spec (meta %)))
|
||||
(map (fn [fvar]
|
||||
(with-meta (deref fvar)
|
||||
(-> (meta fvar)
|
||||
(assoc :ns (-> ns ns-name str)))))))))))
|
||||
[(deref fvar)
|
||||
(-> (meta fvar)
|
||||
(assoc :ns (-> ns ns-name str)))])))))))
|
||||
|
||||
(defn scan-ns
|
||||
[& nsyms]
|
||||
|
|
|
@ -6,27 +6,30 @@
|
|||
|
||||
(ns app.util.svg
|
||||
(:require
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[clojure.xml :as xml]
|
||||
[cuerdas.core :as str])
|
||||
(:import
|
||||
javax.xml.XMLConstants
|
||||
java.io.InputStream
|
||||
javax.xml.parsers.SAXParserFactory
|
||||
clojure.lang.XMLHandler
|
||||
org.apache.commons.io.IOUtils))
|
||||
|
||||
(defn- secure-parser-factory
|
||||
[s ch]
|
||||
[^InputStream input ^XMLHandler handler]
|
||||
(.. (doto (SAXParserFactory/newInstance)
|
||||
(.setFeature XMLConstants/FEATURE_SECURE_PROCESSING true)
|
||||
(.setFeature "http://apache.org/xml/features/disallow-doctype-decl" true))
|
||||
(newSAXParser)
|
||||
(parse s ch)))
|
||||
(parse input handler)))
|
||||
|
||||
(defn parse
|
||||
[data]
|
||||
[^String data]
|
||||
(try
|
||||
(with-open [istream (IOUtils/toInputStream data "UTF-8")]
|
||||
(dm/with-open [istream (IOUtils/toInputStream data "UTF-8")]
|
||||
(xml/parse istream secure-parser-factory))
|
||||
(catch Exception e
|
||||
(l/warn :hint "error on processing svg"
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
java.time.ZonedDateTime
|
||||
java.time.format.DateTimeFormatter
|
||||
java.time.temporal.ChronoUnit
|
||||
java.time.temporal.Temporal
|
||||
java.time.temporal.TemporalAmount
|
||||
java.time.temporal.TemporalUnit
|
||||
java.util.Date
|
||||
|
@ -160,11 +161,29 @@
|
|||
|
||||
(defn plus
|
||||
[d ta]
|
||||
(.plus d ^TemporalAmount (duration ta)))
|
||||
(let [^TemporalAmount ta (duration ta)]
|
||||
(cond
|
||||
(instance? Duration d)
|
||||
(.plus ^Duration d ta)
|
||||
|
||||
(instance? Temporal d)
|
||||
(.plus ^Temporal d ta)
|
||||
|
||||
:else
|
||||
(throw (UnsupportedOperationException. "unsupported type")))))
|
||||
|
||||
(defn minus
|
||||
[d ta]
|
||||
(.minus d ^TemporalAmount (duration ta)))
|
||||
(let [^TemporalAmount ta (duration ta)]
|
||||
(cond
|
||||
(instance? Duration d)
|
||||
(.minus ^Duration d ta)
|
||||
|
||||
(instance? Temporal d)
|
||||
(.minus ^Temporal d ta)
|
||||
|
||||
:else
|
||||
(throw (UnsupportedOperationException. "unsupported type")))))
|
||||
|
||||
(defn now
|
||||
[]
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.util.websocket
|
||||
"A general protocol implementation on top of websockets."
|
||||
"A general protocol implementation on top of websockets using vthreads."
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
|
@ -13,22 +13,42 @@
|
|||
[app.common.uuid :as uuid]
|
||||
[app.loggers.audit :refer [parse-client-ip]]
|
||||
[app.util.time :as dt]
|
||||
[clojure.core.async :as a]
|
||||
[promesa.exec :as px]
|
||||
[promesa.exec.csp :as sp]
|
||||
[yetti.request :as yr]
|
||||
[yetti.util :as yu]
|
||||
[yetti.websocket :as yws])
|
||||
(:import
|
||||
java.nio.ByteBuffer))
|
||||
|
||||
(declare decode-beat)
|
||||
(declare encode-beat)
|
||||
(declare start-io-loop)
|
||||
(declare ws-ping!)
|
||||
(declare ws-send!)
|
||||
(declare filter-options)
|
||||
|
||||
(def noop (constantly nil))
|
||||
(def identity-3 (fn [_ _ o] o))
|
||||
(def max-missed-heartbeats 3)
|
||||
(def heartbeat-interval 5000)
|
||||
|
||||
(defn- encode-beat
|
||||
[n]
|
||||
(doto (ByteBuffer/allocate 8)
|
||||
(.putLong n)
|
||||
(.rewind)))
|
||||
|
||||
(defn- decode-beat
|
||||
[^ByteBuffer buffer]
|
||||
(when (= 8 (.capacity buffer))
|
||||
(.rewind buffer)
|
||||
(.getLong buffer)))
|
||||
|
||||
(defn- wrap-handler
|
||||
[handler]
|
||||
(fn [wsp message]
|
||||
(try
|
||||
(handler wsp message)
|
||||
(catch Throwable cause
|
||||
(if (ex/error? cause)
|
||||
{:type :error :error (ex-data cause)}
|
||||
{:type :error :error {:message (ex-message cause)}})))))
|
||||
|
||||
(declare start-io-loop!)
|
||||
|
||||
(defn handler
|
||||
"A WebSocket upgrade handler factory. Returns a handler that can be
|
||||
|
@ -46,12 +66,11 @@
|
|||
::on-connect
|
||||
::input-buff-size
|
||||
::output-buff-size
|
||||
::handler
|
||||
::idle-timeout]
|
||||
:or {input-buff-size 64
|
||||
output-buff-size 64
|
||||
idle-timeout 60000
|
||||
on-connect noop
|
||||
on-connect identity
|
||||
on-snd-message identity-3
|
||||
on-rcv-message identity-3}
|
||||
:as options}]
|
||||
|
@ -61,91 +80,65 @@
|
|||
(assert (fn? on-connect) "'on-connect' should be a function")
|
||||
|
||||
(fn [{:keys [::yws/channel] :as request}]
|
||||
(let [input-ch (a/chan input-buff-size)
|
||||
output-ch (a/chan output-buff-size)
|
||||
hbeat-ch (a/chan (a/sliding-buffer 6))
|
||||
close-ch (a/chan)
|
||||
stop-ch (a/chan)
|
||||
(let [input-ch (sp/chan :buf input-buff-size)
|
||||
output-ch (sp/chan :buf output-buff-size)
|
||||
hbeat-ch (sp/chan :buf (sp/sliding-buffer 6))
|
||||
close-ch (sp/chan)
|
||||
|
||||
ip-addr (parse-client-ip request)
|
||||
uagent (yr/get-header request "user-agent")
|
||||
id (uuid/next)
|
||||
state (atom {})
|
||||
beats (atom #{})
|
||||
|
||||
options (-> (filter-options options)
|
||||
(merge {::id id
|
||||
::created-at (dt/now)
|
||||
::input-ch input-ch
|
||||
::heartbeat-ch hbeat-ch
|
||||
::output-ch output-ch
|
||||
::close-ch close-ch
|
||||
::stop-ch stop-ch
|
||||
::channel channel
|
||||
::remote-addr ip-addr
|
||||
::user-agent uagent})
|
||||
(atom))
|
||||
|
||||
;; call the on-connect hook and memoize the on-terminate instance
|
||||
on-terminate (on-connect options)
|
||||
options (-> options
|
||||
(update ::handler wrap-handler)
|
||||
(assoc ::id id)
|
||||
(assoc ::state state)
|
||||
(assoc ::beats beats)
|
||||
(assoc ::created-at (dt/now))
|
||||
(assoc ::input-ch input-ch)
|
||||
(assoc ::heartbeat-ch hbeat-ch)
|
||||
(assoc ::output-ch output-ch)
|
||||
(assoc ::close-ch close-ch)
|
||||
(assoc ::channel channel)
|
||||
(assoc ::remote-addr ip-addr)
|
||||
(assoc ::user-agent uagent)
|
||||
(on-connect))
|
||||
|
||||
on-ws-open
|
||||
(fn [channel]
|
||||
(l/trace :fn "on-ws-open" :conn-id id)
|
||||
(yws/idle-timeout! channel (dt/duration idle-timeout)))
|
||||
(let [timeout (dt/duration idle-timeout)
|
||||
name (str "penpot/websocket/io-loop/" id)]
|
||||
(yws/idle-timeout! channel timeout)
|
||||
(px/fn->thread (partial start-io-loop! options)
|
||||
{:name name :virtual true})))
|
||||
|
||||
on-ws-terminate
|
||||
(fn [_ code reason]
|
||||
(l/trace :fn "on-ws-terminate" :conn-id id :code code :reason reason)
|
||||
(a/close! close-ch))
|
||||
(l/trace :fn "on-ws-terminate"
|
||||
:conn-id id
|
||||
:code code
|
||||
:reason reason)
|
||||
(sp/close! close-ch))
|
||||
|
||||
on-ws-error
|
||||
(fn [_ error]
|
||||
(when-not (or (instance? java.nio.channels.ClosedChannelException error)
|
||||
(instance? java.net.SocketException error)
|
||||
(instance? java.io.IOException error))
|
||||
(l/error :fn "on-ws-error" :conn-id id
|
||||
:hint (ex-message error)
|
||||
:cause error))
|
||||
(on-ws-terminate nil 8801 "close after error"))
|
||||
(fn [_ cause]
|
||||
(sp/close! close-ch cause))
|
||||
|
||||
on-ws-message
|
||||
(fn [_ message]
|
||||
(try
|
||||
(let [message (on-rcv-message options message)
|
||||
message (t/decode-str message)]
|
||||
(a/offer! input-ch message)
|
||||
(swap! options assoc ::last-activity-at (dt/now)))
|
||||
(catch Throwable e
|
||||
(l/warn :hint "error on decoding incoming message from websocket"
|
||||
:wsmsg (pr-str message)
|
||||
:cause e)
|
||||
(a/>! close-ch [8802 "decode error"])
|
||||
(a/close! close-ch))))
|
||||
(sp/offer! input-ch message)
|
||||
(swap! state assoc ::last-activity-at (dt/now)))
|
||||
|
||||
on-ws-pong
|
||||
(fn [_ buffers]
|
||||
(a/>!! hbeat-ch (yu/copy-many buffers)))]
|
||||
;; (l/trace :fn "on-ws-pong" :buffers (pr-str buffers))
|
||||
(sp/put! hbeat-ch (yu/copy-many buffers)))]
|
||||
|
||||
;; Wait a close signal
|
||||
(a/go
|
||||
(let [[code reason] (a/<! close-ch)]
|
||||
(a/close! stop-ch)
|
||||
(a/close! hbeat-ch)
|
||||
(a/close! output-ch)
|
||||
(a/close! input-ch)
|
||||
|
||||
(when (and code reason)
|
||||
(l/trace :hint "close channel condition" :code code :reason reason)
|
||||
(yws/close! channel code reason))
|
||||
|
||||
(when (fn? on-terminate)
|
||||
(on-terminate))
|
||||
|
||||
(l/trace :hint "connection terminated")))
|
||||
|
||||
;; React on messages received from the client
|
||||
(a/go
|
||||
(a/<! (start-io-loop options handler on-snd-message on-ws-terminate))
|
||||
(l/trace :hint "io loop terminated"))
|
||||
(yws/on-close! channel (fn [_]
|
||||
(sp/close! close-ch)))
|
||||
|
||||
{:on-open on-ws-open
|
||||
:on-error on-ws-error
|
||||
|
@ -153,118 +146,81 @@
|
|||
:on-text on-ws-message
|
||||
:on-pong on-ws-pong})))
|
||||
|
||||
(defn- ws-send!
|
||||
[channel s]
|
||||
(let [ch (a/chan 1)]
|
||||
(defn- handle-ping!
|
||||
[{:keys [::id ::beats ::channel] :as wsp} beat-id]
|
||||
(l/trace :hint "ping" :beat beat-id :conn-id id)
|
||||
(yws/ping! channel (encode-beat beat-id))
|
||||
(let [issued (swap! beats conj (long beat-id))]
|
||||
(not (>= (count issued) max-missed-heartbeats))))
|
||||
|
||||
(defn- start-io-loop!
|
||||
[{:keys [::id ::close-ch ::input-ch ::output-ch ::heartbeat-ch ::channel ::handler ::beats ::on-rcv-message ::on-snd-message] :as wsp}]
|
||||
(px/thread
|
||||
{:name (str "penpot/websocket/io-loop/" id)
|
||||
:virtual true}
|
||||
(try
|
||||
(yws/send! channel s (fn [e]
|
||||
(when e (a/offer! ch e))
|
||||
(a/close! ch)))
|
||||
(handler wsp {:type :open})
|
||||
(loop [i 0]
|
||||
(let [ping-ch (sp/timeout-chan heartbeat-interval)
|
||||
[msg p] (sp/alts! [close-ch input-ch output-ch heartbeat-ch ping-ch])]
|
||||
(when (yws/connected? channel)
|
||||
(cond
|
||||
(identical? p ping-ch)
|
||||
(if (handle-ping! wsp i)
|
||||
(recur (inc i))
|
||||
(yws/close! channel 8802 "missing to many pings"))
|
||||
|
||||
(or (identical? p close-ch) (nil? msg))
|
||||
(do :nothing)
|
||||
|
||||
(identical? p heartbeat-ch)
|
||||
(let [beat (decode-beat msg)]
|
||||
;; (l/trace :hint "pong" :beat beat :conn-id id)
|
||||
(swap! beats disj beat)
|
||||
(recur i))
|
||||
|
||||
(identical? p input-ch)
|
||||
(let [message (t/decode-str msg)
|
||||
message (on-rcv-message message)
|
||||
{:keys [request-id] :as response} (handler wsp message)]
|
||||
(when (map? response)
|
||||
(sp/put! output-ch
|
||||
(cond-> response
|
||||
(some? request-id)
|
||||
(assoc :request-id request-id))))
|
||||
(recur i))
|
||||
|
||||
(identical? p output-ch)
|
||||
(let [message (on-snd-message msg)
|
||||
message (t/encode-str message {:type :json-verbose})]
|
||||
;; (l/trace :hint "writing message to output" :message msg)
|
||||
(yws/send! channel message)
|
||||
(recur i))))))
|
||||
|
||||
(catch java.nio.channels.ClosedChannelException _)
|
||||
(catch java.net.SocketException _)
|
||||
(catch java.io.IOException _)
|
||||
|
||||
(catch InterruptedException _
|
||||
(l/debug :hint "websocket thread interrumpted" :conn-id id))
|
||||
|
||||
(catch Throwable cause
|
||||
(a/offer! ch cause)
|
||||
(a/close! ch)))
|
||||
ch))
|
||||
(l/error :hint "unhandled exception on websocket thread"
|
||||
:conn-id id
|
||||
:cause cause))
|
||||
|
||||
(defn- ws-ping!
|
||||
[channel s]
|
||||
(let [ch (a/chan 1)]
|
||||
(try
|
||||
(yws/ping! channel s (fn [e]
|
||||
(when e (a/offer! ch e))
|
||||
(a/close! ch)))
|
||||
(catch Throwable cause
|
||||
(a/offer! ch cause)
|
||||
(a/close! ch)))
|
||||
ch))
|
||||
(finally
|
||||
(handler wsp {:type :close})
|
||||
|
||||
(defn- encode-beat
|
||||
[n]
|
||||
(doto (ByteBuffer/allocate 8)
|
||||
(.putLong n)
|
||||
(.rewind)))
|
||||
(when (yws/connected? channel)
|
||||
;; NOTE: we need to ignore all exceptions here because
|
||||
;; there can be a race condition that first returns that
|
||||
;; channel is connected but on closing, will raise that
|
||||
;; channel is already closed.
|
||||
(ex/ignoring
|
||||
(yws/close! channel 8899 "terminated")))
|
||||
|
||||
(defn- decode-beat
|
||||
[^ByteBuffer buffer]
|
||||
(when (= 8 (.capacity buffer))
|
||||
(.rewind buffer)
|
||||
(.getLong buffer)))
|
||||
(when-let [on-disconnect (::on-disconnect wsp)]
|
||||
(on-disconnect))
|
||||
|
||||
(defn- wrap-handler
|
||||
[handler]
|
||||
(fn [wsp message]
|
||||
(locking wsp
|
||||
(handler wsp message))))
|
||||
|
||||
(def max-missed-heartbeats 3)
|
||||
(def heartbeat-interval 5000)
|
||||
|
||||
(defn- start-io-loop
|
||||
[wsp handler on-snd-message on-ws-terminate]
|
||||
(let [input-ch (::input-ch @wsp)
|
||||
output-ch (::output-ch @wsp)
|
||||
stop-ch (::stop-ch @wsp)
|
||||
hbeat-pong-ch (::heartbeat-ch @wsp)
|
||||
channel (::channel @wsp)
|
||||
conn-id (::id @wsp)
|
||||
handler (wrap-handler handler)
|
||||
beats (atom #{})
|
||||
choices [stop-ch
|
||||
input-ch
|
||||
output-ch
|
||||
hbeat-pong-ch]]
|
||||
|
||||
;; Start IO loop
|
||||
(a/go
|
||||
(a/<! (handler wsp {:type :connect}))
|
||||
(a/<! (a/go-loop [i 0]
|
||||
(let [hbeat-ping-ch (a/timeout heartbeat-interval)
|
||||
[v p] (a/alts! (conj choices hbeat-ping-ch))]
|
||||
(cond
|
||||
(not (yws/connected? channel))
|
||||
(on-ws-terminate nil 8800 "channel disconnected")
|
||||
|
||||
(= p hbeat-ping-ch)
|
||||
(do
|
||||
(l/trace :hint "ping" :beat i :conn-id conn-id)
|
||||
(a/<! (ws-ping! channel (encode-beat i)))
|
||||
(let [issued (swap! beats conj (long i))]
|
||||
(if (>= (count issued) max-missed-heartbeats)
|
||||
(on-ws-terminate nil 8802 "heartbeat: timeout")
|
||||
(recur (inc i)))))
|
||||
|
||||
(= p hbeat-pong-ch)
|
||||
(let [beat (decode-beat v)]
|
||||
(l/trace :hint "pong" :beat beat :conn-id conn-id)
|
||||
(swap! beats disj beat)
|
||||
(recur i))
|
||||
|
||||
(= p input-ch)
|
||||
(let [result (a/<! (handler wsp v))]
|
||||
;; (l/trace :hint "message received" :message v)
|
||||
(cond
|
||||
(ex/error? result)
|
||||
(a/>! output-ch {:type :error :error (ex-data result)})
|
||||
|
||||
(ex/exception? result)
|
||||
(a/>! output-ch {:type :error :error {:message (ex-message result)}})
|
||||
|
||||
(map? result)
|
||||
(a/>! output-ch (cond-> result (:request-id v) (assoc :request-id (:request-id v)))))
|
||||
(recur i))
|
||||
|
||||
(= p output-ch)
|
||||
(let [v (on-snd-message wsp v)]
|
||||
;; (l/trace :hint "writing message to output" :message v)
|
||||
(a/<! (ws-send! channel (t/encode-str v)))
|
||||
(recur i))))))
|
||||
|
||||
(a/<! (handler wsp {:type :disconnect})))))
|
||||
|
||||
(defn- filter-options
|
||||
"Remove from options all namespace qualified keys that matches the
|
||||
current namespace."
|
||||
[options]
|
||||
(into {}
|
||||
(remove (fn [[key]]
|
||||
(= (namespace key) "app.util.websocket")))
|
||||
options))
|
||||
(l/trace :hint "websocket thread terminated" :conn-id id)))))
|
||||
|
|
|
@ -22,17 +22,16 @@
|
|||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px])
|
||||
(:import
|
||||
java.util.concurrent.ExecutorService
|
||||
java.util.concurrent.ForkJoinPool
|
||||
java.util.concurrent.Future
|
||||
java.util.concurrent.ScheduledExecutorService))
|
||||
java.util.concurrent.Future))
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
(s/def ::executor #(instance? ExecutorService %))
|
||||
(s/def ::scheduled-executor #(instance? ScheduledExecutorService %))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; Executor
|
||||
|
@ -47,29 +46,23 @@
|
|||
[skey {:keys [::parallelism]}]
|
||||
(let [prefix (if (vector? skey) (-> skey first name) "default")
|
||||
tname (str "penpot/" prefix "/%s")
|
||||
factory (px/forkjoin-thread-factory :name tname)]
|
||||
(px/forkjoin-executor
|
||||
:factory factory
|
||||
:parallelism parallelism
|
||||
:async? true)))
|
||||
ttype (cf/get :worker-executor-type :fjoin)]
|
||||
(case ttype
|
||||
:fjoin
|
||||
(let [factory (px/forkjoin-thread-factory :name tname)]
|
||||
(px/forkjoin-executor {:factory factory
|
||||
:core-size (px/get-available-processors)
|
||||
:parallelism parallelism
|
||||
:async true}))
|
||||
|
||||
:cached
|
||||
(let [factory (px/thread-factory :name tname)]
|
||||
(px/cached-executor :factory factory)))))
|
||||
|
||||
(defmethod ig/halt-key! ::executor
|
||||
[_ instance]
|
||||
(px/shutdown! instance))
|
||||
|
||||
(defmethod ig/pre-init-spec ::scheduled-executor [_]
|
||||
(s/keys :req [::parallelism]))
|
||||
|
||||
(defmethod ig/init-key ::scheduled-executor
|
||||
[_ {:keys [::parallelism]}]
|
||||
(px/scheduled-executor
|
||||
:parallelism parallelism
|
||||
:factory (px/thread-factory :name "penpot/scheduled-executor/%s")))
|
||||
|
||||
(defmethod ig/halt-key! ::scheduled-executor
|
||||
[_ instance]
|
||||
(px/shutdown! instance))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; TASKS REGISTRY
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
@ -147,7 +140,7 @@
|
|||
steals))]
|
||||
|
||||
(px/thread
|
||||
{:name "penpot/executors-monitor"}
|
||||
{:name "penpot/executors-monitor" :virtual true}
|
||||
(l/info :hint "monitor: started" :name name)
|
||||
(try
|
||||
(loop [steals 0]
|
||||
|
@ -220,53 +213,48 @@
|
|||
:queued res)))
|
||||
|
||||
(run-batch! [rconn]
|
||||
(db/with-atomic [conn pool]
|
||||
(when-let [tasks (get-tasks conn)]
|
||||
(->> (group-by :queue tasks)
|
||||
(run! (partial push-tasks! conn rconn)))
|
||||
true)))]
|
||||
(try
|
||||
(db/with-atomic [conn pool]
|
||||
(if-let [tasks (get-tasks conn)]
|
||||
(->> (group-by :queue tasks)
|
||||
(run! (partial push-tasks! conn rconn)))
|
||||
(px/sleep (::wait-duration cfg))))
|
||||
(catch InterruptedException cause
|
||||
(throw cause))
|
||||
(catch Exception cause
|
||||
(cond
|
||||
(rds/exception? cause)
|
||||
(do
|
||||
(l/warn :hint "dispatcher: redis exception (will retry in an instant)" :cause cause)
|
||||
(px/sleep (::rds/timeout rconn)))
|
||||
|
||||
(db/sql-exception? cause)
|
||||
(do
|
||||
(l/warn :hint "dispatcher: database exception (will retry in an instant)" :cause cause)
|
||||
(px/sleep (::rds/timeout rconn)))
|
||||
|
||||
:else
|
||||
(do
|
||||
(l/error :hint "dispatcher: unhandled exception (will retry in an instant)" :cause cause)
|
||||
(px/sleep (::rds/timeout rconn)))))))
|
||||
|
||||
(dispatcher []
|
||||
(l/info :hint "dispatcher: started")
|
||||
(try
|
||||
(dm/with-open [rconn (rds/connect redis)]
|
||||
(loop []
|
||||
(run-batch! rconn)
|
||||
(recur)))
|
||||
(catch InterruptedException _
|
||||
(l/trace :hint "dispatcher: interrupted"))
|
||||
(catch Throwable cause
|
||||
(l/error :hint "dispatcher: unexpected exception" :cause cause))
|
||||
(finally
|
||||
(l/info :hint "dispatcher: terminated"))))]
|
||||
|
||||
(if (db/read-only? pool)
|
||||
(l/warn :hint "dispatcher: not started (db is read-only)")
|
||||
(px/thread
|
||||
{:name "penpot/worker-dispatcher"}
|
||||
(l/info :hint "dispatcher: started")
|
||||
(try
|
||||
(dm/with-open [rconn (rds/connect redis)]
|
||||
(loop []
|
||||
(when (px/interrupted?)
|
||||
(throw (InterruptedException. "interrumpted")))
|
||||
|
||||
(try
|
||||
(when-not (run-batch! rconn)
|
||||
(px/sleep (::wait-duration cfg)))
|
||||
(catch InterruptedException cause
|
||||
(throw cause))
|
||||
(catch Exception cause
|
||||
(cond
|
||||
(rds/exception? cause)
|
||||
(do
|
||||
(l/warn :hint "dispatcher: redis exception (will retry in an instant)" :cause cause)
|
||||
(px/sleep (::rds/timeout rconn)))
|
||||
|
||||
(db/sql-exception? cause)
|
||||
(do
|
||||
(l/warn :hint "dispatcher: database exception (will retry in an instant)" :cause cause)
|
||||
(px/sleep (::rds/timeout rconn)))
|
||||
|
||||
:else
|
||||
(do
|
||||
(l/error :hint "dispatcher: unhandled exception (will retry in an instant)" :cause cause)
|
||||
(px/sleep (::rds/timeout rconn))))))
|
||||
|
||||
(recur)))
|
||||
|
||||
(catch InterruptedException _
|
||||
(l/debug :hint "dispatcher: interrupted"))
|
||||
(catch Throwable cause
|
||||
(l/error :hint "dispatcher: unexpected exception" :cause cause))
|
||||
(finally
|
||||
(l/info :hint "dispatcher: terminated")))))))
|
||||
(px/fn->thread dispatcher :name "penpot/worker/dispatcher" :virtual true))))
|
||||
|
||||
(defmethod ig/halt-key! ::dispatcher
|
||||
[_ thread]
|
||||
|
@ -311,7 +299,7 @@
|
|||
(defn- start-worker!
|
||||
[{:keys [::rds/redis ::worker-id ::queue] :as cfg}]
|
||||
(px/thread
|
||||
{:name (format "penpot/worker/%s" worker-id)}
|
||||
{:name (format "penpot/worker/runner:%s" worker-id)}
|
||||
(l/info :hint "worker: started" :worker-id worker-id :queue queue)
|
||||
(try
|
||||
(dm/with-open [rconn (rds/connect redis)]
|
||||
|
@ -462,7 +450,8 @@
|
|||
(case status
|
||||
:retry (handle-task-retry result)
|
||||
:failed (handle-task-failure result)
|
||||
:completed (handle-task-completion result))))
|
||||
:completed (handle-task-completion result)
|
||||
nil)))
|
||||
|
||||
(run-task-loop [task-id]
|
||||
(loop [result (run-task task-id)]
|
||||
|
@ -531,7 +520,7 @@
|
|||
(s/def ::entries (s/coll-of (s/nilable ::cron-task)))
|
||||
|
||||
(defmethod ig/pre-init-spec ::cron [_]
|
||||
(s/keys :req [::scheduled-executor ::db/pool ::entries ::registry]))
|
||||
(s/keys :req [::db/pool ::entries ::registry]))
|
||||
|
||||
(defmethod ig/init-key ::cron
|
||||
[_ {:keys [::entries ::registry ::db/pool] :as cfg}]
|
||||
|
@ -598,22 +587,23 @@
|
|||
|
||||
(defn- execute-cron-task
|
||||
[{:keys [::db/pool] :as cfg} {:keys [id] :as task}]
|
||||
(try
|
||||
(db/with-atomic [conn pool]
|
||||
(when (db/exec-one! conn [sql:lock-cron-task (d/name id)])
|
||||
(l/trace :hint "cron: execute task" :task-id id)
|
||||
((:fn task) task)))
|
||||
(catch InterruptedException _
|
||||
(px/interrupt! (px/current-thread))
|
||||
(l/debug :hint "cron: task interrupted" :task-id id))
|
||||
(catch Throwable cause
|
||||
(l/error :hint "cron: unhandled exception on running task"
|
||||
::l/context (get-error-context cause task)
|
||||
:task-id id
|
||||
:cause cause))
|
||||
(finally
|
||||
(when-not (px/interrupted? :current)
|
||||
(schedule-cron-task cfg task)))))
|
||||
(px/thread
|
||||
{:name (str "penpot/cront-task/" id)}
|
||||
(try
|
||||
(db/with-atomic [conn pool]
|
||||
(when (db/exec-one! conn [sql:lock-cron-task (d/name id)])
|
||||
(l/trace :hint "cron: execute task" :task-id id)
|
||||
((:fn task) task)))
|
||||
(catch InterruptedException _
|
||||
(l/debug :hint "cron: task interrupted" :task-id id))
|
||||
(catch Throwable cause
|
||||
(l/error :hint "cron: unhandled exception on running task"
|
||||
::l/context (get-error-context cause task)
|
||||
:task-id id
|
||||
:cause cause))
|
||||
(finally
|
||||
(when-not (px/interrupted? :current)
|
||||
(schedule-cron-task cfg task))))))
|
||||
|
||||
(defn- ms-until-valid
|
||||
[cron]
|
||||
|
@ -622,16 +612,11 @@
|
|||
next (dt/next-valid-instant-from cron now)]
|
||||
(inst-ms (dt/diff now next))))
|
||||
|
||||
(def ^:private
|
||||
xf-without-done
|
||||
(remove #(.isDone ^Future %)))
|
||||
|
||||
(defn- schedule-cron-task
|
||||
[{:keys [::scheduled-executor ::running] :as cfg} {:keys [cron] :as task}]
|
||||
(let [ft (px/schedule! scheduled-executor
|
||||
(ms-until-valid cron)
|
||||
[{:keys [::running] :as cfg} {:keys [cron] :as task}]
|
||||
(let [ft (px/schedule! (ms-until-valid cron)
|
||||
(partial execute-cron-task cfg task))]
|
||||
(swap! running #(into #{ft} xf-without-done %))))
|
||||
(swap! running #(into #{ft} (filter p/pending?) %))))
|
||||
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
(:require
|
||||
[app.auth]
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.flags :as flags]
|
||||
[app.common.pages :as cp]
|
||||
|
@ -65,6 +66,47 @@
|
|||
:enable-smtp
|
||||
:enable-quotes])
|
||||
|
||||
(def test-init-sql
|
||||
["alter table project_profile_rel set unlogged;\n"
|
||||
"alter table file_profile_rel set unlogged;\n"
|
||||
"alter table presence set unlogged;\n"
|
||||
"alter table presence set unlogged;\n"
|
||||
"alter table http_session set unlogged;\n"
|
||||
"alter table team_profile_rel set unlogged;\n"
|
||||
"alter table team_project_profile_rel set unlogged;\n"
|
||||
"alter table comment_thread_status set unlogged;\n"
|
||||
"alter table comment set unlogged;\n"
|
||||
"alter table comment_thread set unlogged;\n"
|
||||
"alter table profile_complaint_report set unlogged;\n"
|
||||
"alter table file_change set unlogged;\n"
|
||||
"alter table team_font_variant set unlogged;\n"
|
||||
"alter table share_link set unlogged;\n"
|
||||
"alter table usage_quote set unlogged;\n"
|
||||
"alter table access_token set unlogged;\n"
|
||||
"alter table profile set unlogged;\n"
|
||||
"alter table file_library_rel set unlogged;\n"
|
||||
"alter table file_thumbnail set unlogged;\n"
|
||||
"alter table file_object_thumbnail set unlogged;\n"
|
||||
"alter table file_media_object set unlogged;\n"
|
||||
"alter table file_data_fragment set unlogged;\n"
|
||||
"alter table file set unlogged;\n"
|
||||
"alter table project set unlogged;\n"
|
||||
"alter table team_invitation set unlogged;\n"
|
||||
"alter table webhook_delivery set unlogged;\n"
|
||||
"alter table webhook set unlogged;\n"
|
||||
"alter table team set unlogged;\n"
|
||||
;; For some reason, modifying the task realted tables is very very
|
||||
;; slow (5s); so we just don't alter them
|
||||
;; "alter table task set unlogged;\n"
|
||||
;; "alter table task_default set unlogged;\n"
|
||||
;; "alter table task_completed set unlogged;\n"
|
||||
"alter table audit_log_default set unlogged ;\n"
|
||||
"alter table storage_object set unlogged;\n"
|
||||
"alter table server_error_report set unlogged;\n"
|
||||
"alter table server_prop set unlogged;\n"
|
||||
"alter table global_complaint_report set unlogged;\n"
|
||||
])
|
||||
|
||||
(defn state-init
|
||||
[next]
|
||||
(with-redefs [app.config/flags (flags/parse flags/default default-flags)
|
||||
|
@ -97,9 +139,7 @@
|
|||
:app.http.oauth/handler
|
||||
:app.notifications/handler
|
||||
:app.loggers.mattermost/reporter
|
||||
:app.loggers.loki/reporter
|
||||
:app.loggers.database/reporter
|
||||
:app.loggers.zmq/receiver
|
||||
:app.worker/cron
|
||||
:app.worker/worker))
|
||||
_ (ig/load-namespaces system)
|
||||
|
@ -108,6 +148,9 @@
|
|||
(try
|
||||
(binding [*system* system
|
||||
*pool* (:app.db/pool system)]
|
||||
(db/with-atomic [conn *pool*]
|
||||
(doseq [sql test-init-sql]
|
||||
(db/exec! conn [sql])))
|
||||
(next))
|
||||
(finally
|
||||
(ig/halt! system))))))
|
||||
|
@ -120,11 +163,15 @@
|
|||
" AND table_name != 'migrations';")]
|
||||
(db/with-atomic [conn *pool*]
|
||||
(let [result (->> (db/exec! conn [sql])
|
||||
(map :table-name))]
|
||||
(db/exec! conn [(str "TRUNCATE "
|
||||
(apply str (interpose ", " result))
|
||||
" CASCADE;")]))))
|
||||
(next))
|
||||
(map :table-name)
|
||||
(remove #(= "task" %)))
|
||||
sql (str "TRUNCATE "
|
||||
(apply str (interpose ", " result))
|
||||
" CASCADE;")]
|
||||
(doseq [table result]
|
||||
(db/exec! conn [(str "delete from " table ";")]))))
|
||||
|
||||
(next)))
|
||||
|
||||
(defn clean-storage
|
||||
[next]
|
||||
|
@ -162,7 +209,7 @@
|
|||
:password "123123"
|
||||
:is-demo false}
|
||||
params)]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(->> params
|
||||
(cmd.auth/create-profile! conn)
|
||||
(cmd.auth/create-profile-rels! conn))))))
|
||||
|
@ -172,7 +219,7 @@
|
|||
([pool i {:keys [profile-id team-id] :as params}]
|
||||
(us/assert uuid? profile-id)
|
||||
(us/assert uuid? team-id)
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(->> (merge {:id (mk-uuid "project" i)
|
||||
:name (str "project" i)}
|
||||
params)
|
||||
|
@ -184,7 +231,7 @@
|
|||
([pool i {:keys [profile-id project-id] :as params}]
|
||||
(us/assert uuid? profile-id)
|
||||
(us/assert uuid? project-id)
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(files.create/create-file conn
|
||||
(merge {:id (mk-uuid "file" i)
|
||||
:name (str "file" i)
|
||||
|
@ -200,7 +247,7 @@
|
|||
([i params] (create-team* *pool* i params))
|
||||
([pool i {:keys [profile-id] :as params}]
|
||||
(us/assert uuid? profile-id)
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(let [id (mk-uuid "team" i)]
|
||||
(teams/create-team conn {:id id
|
||||
:profile-id profile-id
|
||||
|
@ -211,7 +258,7 @@
|
|||
([pool {:keys [name width height mtype file-id is-local media-id]
|
||||
:or {name "sample" width 100 height 100 mtype "image/svg+xml" is-local true}}]
|
||||
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(db/insert! conn :file-media-object
|
||||
{:id (uuid/next)
|
||||
:file-id file-id
|
||||
|
@ -225,12 +272,12 @@
|
|||
(defn link-file-to-library*
|
||||
([params] (link-file-to-library* *pool* params))
|
||||
([pool {:keys [file-id library-id] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(#'files/link-file-to-library conn {:file-id file-id :library-id library-id}))))
|
||||
|
||||
(defn create-complaint-for
|
||||
[pool {:keys [id created-at type]}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(db/insert! conn :profile-complaint-report
|
||||
{:profile-id id
|
||||
:created-at (or created-at (dt/now))
|
||||
|
@ -239,7 +286,7 @@
|
|||
|
||||
(defn create-global-complaint-for
|
||||
[pool {:keys [email type created-at]}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(db/insert! conn :global-complaint-report
|
||||
{:email email
|
||||
:type (name type)
|
||||
|
@ -249,7 +296,7 @@
|
|||
(defn create-team-role*
|
||||
([params] (create-team-role* *pool* params))
|
||||
([pool {:keys [team-id profile-id role] :or {role :owner}}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(#'teams/create-team-role conn {:team-id team-id
|
||||
:profile-id profile-id
|
||||
:role role}))))
|
||||
|
@ -257,7 +304,7 @@
|
|||
(defn create-project-role*
|
||||
([params] (create-project-role* *pool* params))
|
||||
([pool {:keys [project-id profile-id role] :or {role :owner}}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(#'teams/create-project-role conn {:project-id project-id
|
||||
:profile-id profile-id
|
||||
:role role}))))
|
||||
|
@ -265,7 +312,7 @@
|
|||
(defn create-file-role*
|
||||
([params] (create-file-role* *pool* params))
|
||||
([pool {:keys [file-id profile-id role] :or {role :owner}}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(files.create/create-file-role! conn {:file-id file-id
|
||||
:profile-id profile-id
|
||||
:role role}))))
|
||||
|
@ -274,10 +321,10 @@
|
|||
([params] (update-file* *pool* params))
|
||||
([pool {:keys [file-id changes session-id profile-id revn]
|
||||
:or {session-id (uuid/next) revn 0}}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(let [features #{"components/v2"}
|
||||
cfg (-> (select-keys *system* [::mbus/msgbus ::mtx/metrics])
|
||||
(assoc :conn conn))]
|
||||
(assoc ::db/conn conn))]
|
||||
(files.update/update-file cfg
|
||||
{:id file-id
|
||||
:revn revn
|
||||
|
@ -310,7 +357,7 @@
|
|||
(defmacro try-on!
|
||||
[expr]
|
||||
`(try
|
||||
(let [result# (deref ~expr)
|
||||
(let [result# ~expr
|
||||
result# (cond-> result# (rph/wrapped? result#) deref)]
|
||||
{:error nil
|
||||
:result result#})
|
||||
|
@ -320,7 +367,7 @@
|
|||
|
||||
(defn command!
|
||||
[{:keys [::type] :as data}]
|
||||
(let [method-fn (get-in *system* [:app.rpc/methods :commands type])]
|
||||
(let [[mdata method-fn] (get-in *system* [:app.rpc/methods :commands type])]
|
||||
(when-not method-fn
|
||||
(ex/raise :type :assertion
|
||||
:code :rpc-method-not-found
|
||||
|
@ -333,7 +380,7 @@
|
|||
|
||||
(defn mutation!
|
||||
[{:keys [::type profile-id] :as data}]
|
||||
(let [method-fn (get-in *system* [:app.rpc/methods :mutations type])]
|
||||
(let [[mdata method-fn] (get-in *system* [:app.rpc/methods :mutations type])]
|
||||
(try-on! (method-fn (-> data
|
||||
(dissoc ::type)
|
||||
(assoc ::rpc/profile-id profile-id)
|
||||
|
@ -341,7 +388,7 @@
|
|||
|
||||
(defn query!
|
||||
[{:keys [::type profile-id] :as data}]
|
||||
(let [method-fn (get-in *system* [:app.rpc/methods :queries type])]
|
||||
(let [[mdata method-fn] (get-in *system* [:app.rpc/methods :queries type])]
|
||||
(try-on! (method-fn (-> data
|
||||
(dissoc ::type)
|
||||
(assoc ::rpc/profile-id profile-id)
|
||||
|
|
|
@ -40,6 +40,6 @@
|
|||
{:keys [error result]} (th/command! (assoc params ::cond/key etag))]
|
||||
(t/is (nil? error))
|
||||
(t/is (fn? result))
|
||||
(t/is (= 304 (-> (result nil) :status))))
|
||||
(t/is (= 304 (-> (result nil) :yetti.response/status))))
|
||||
))))
|
||||
|
||||
|
|
|
@ -215,10 +215,10 @@
|
|||
(t/is (= 1 (count rows))))
|
||||
|
||||
;; The underlying storage objects are still available.
|
||||
(t/is (some? @(sto/get-object storage (:media-id fmo2))))
|
||||
(t/is (some? @(sto/get-object storage (:thumbnail-id fmo2))))
|
||||
(t/is (some? @(sto/get-object storage (:media-id fmo1))))
|
||||
(t/is (some? @(sto/get-object storage (:thumbnail-id fmo1))))
|
||||
(t/is (some? (sto/get-object storage (:media-id fmo2))))
|
||||
(t/is (some? (sto/get-object storage (:thumbnail-id fmo2))))
|
||||
(t/is (some? (sto/get-object storage (:media-id fmo1))))
|
||||
(t/is (some? (sto/get-object storage (:thumbnail-id fmo1))))
|
||||
|
||||
;; proceed to remove usage of the file
|
||||
(update-file {:file-id (:id file)
|
||||
|
@ -246,10 +246,10 @@
|
|||
;; Finally, check that some of the objects that are marked as
|
||||
;; deleted we are unable to retrieve them using standard storage
|
||||
;; public api.
|
||||
(t/is (nil? @(sto/get-object storage (:media-id fmo2))))
|
||||
(t/is (nil? @(sto/get-object storage (:thumbnail-id fmo2))))
|
||||
(t/is (nil? @(sto/get-object storage (:media-id fmo1))))
|
||||
(t/is (nil? @(sto/get-object storage (:thumbnail-id fmo1))))
|
||||
(t/is (nil? (sto/get-object storage (:media-id fmo2))))
|
||||
(t/is (nil? (sto/get-object storage (:thumbnail-id fmo2))))
|
||||
(t/is (nil? (sto/get-object storage (:media-id fmo1))))
|
||||
(t/is (nil? (sto/get-object storage (:thumbnail-id fmo1))))
|
||||
)))
|
||||
|
||||
(t/deftest permissions-checks-creating-file
|
||||
|
|
|
@ -26,9 +26,9 @@
|
|||
(let [storage (-> (:app.storage/storage th/*system*)
|
||||
(configure-storage-backend))
|
||||
|
||||
sobject @(sto/put-object! storage {::sto/content (sto/content "content")
|
||||
:content-type "text/plain"
|
||||
:other "data"})
|
||||
sobject (sto/put-object! storage {::sto/content (sto/content "content")
|
||||
:content-type "text/plain"
|
||||
:other "data"})
|
||||
profile (th/create-profile* 1 {:is-active true})
|
||||
project (th/create-project* 1 {:team-id (:default-team-id profile)
|
||||
:profile-id (:id profile)})
|
||||
|
@ -98,9 +98,9 @@
|
|||
(t/deftest duplicate-file-with-deleted-relations
|
||||
(let [storage (-> (:app.storage/storage th/*system*)
|
||||
(configure-storage-backend))
|
||||
sobject @(sto/put-object! storage {::sto/content (sto/content "content")
|
||||
:content-type "text/plain"
|
||||
:other "data"})
|
||||
sobject (sto/put-object! storage {::sto/content (sto/content "content")
|
||||
:content-type "text/plain"
|
||||
:other "data"})
|
||||
profile (th/create-profile* 1 {:is-active true})
|
||||
|
||||
project (th/create-project* 1 {:team-id (:default-team-id profile)
|
||||
|
@ -120,7 +120,7 @@
|
|||
:media-id (:id sobject)})]
|
||||
|
||||
(th/mark-file-deleted* {:id (:id file2)})
|
||||
@(sto/del-object! storage sobject)
|
||||
(sto/del-object! storage sobject)
|
||||
|
||||
(let [data {::th/type :duplicate-file
|
||||
::rpc/profile-id (:id profile)
|
||||
|
@ -157,9 +157,9 @@
|
|||
(let [storage (-> (:app.storage/storage th/*system*)
|
||||
(configure-storage-backend))
|
||||
|
||||
sobject @(sto/put-object! storage {::sto/content (sto/content "content")
|
||||
:content-type "text/plain"
|
||||
:other "data"})
|
||||
sobject (sto/put-object! storage {::sto/content (sto/content "content")
|
||||
:content-type "text/plain"
|
||||
:other "data"})
|
||||
|
||||
profile (th/create-profile* 1 {:is-active true})
|
||||
project (th/create-project* 1 {:team-id (:default-team-id profile)
|
||||
|
@ -230,9 +230,9 @@
|
|||
(t/deftest duplicate-project-with-deleted-files
|
||||
(let [storage (-> (:app.storage/storage th/*system*)
|
||||
(configure-storage-backend))
|
||||
sobject @(sto/put-object! storage {::sto/content (sto/content "content")
|
||||
:content-type "text/plain"
|
||||
:other "data"})
|
||||
sobject (sto/put-object! storage {::sto/content (sto/content "content")
|
||||
:content-type "text/plain"
|
||||
:other "data"})
|
||||
profile (th/create-profile* 1 {:is-active true})
|
||||
project (th/create-project* 1 {:team-id (:default-team-id profile)
|
||||
:profile-id (:id profile)})
|
||||
|
|
|
@ -42,8 +42,8 @@
|
|||
(t/is (uuid? media-id))
|
||||
(t/is (uuid? thumbnail-id))
|
||||
(let [storage (:app.storage/storage th/*system*)
|
||||
mobj1 @(sto/get-object storage media-id)
|
||||
mobj2 @(sto/get-object storage thumbnail-id)]
|
||||
mobj1 (sto/get-object storage media-id)
|
||||
mobj2 (sto/get-object storage thumbnail-id)]
|
||||
(t/is (sto/object? mobj1))
|
||||
(t/is (sto/object? mobj2))
|
||||
(t/is (= 122785 (:size mobj1)))
|
||||
|
@ -83,8 +83,8 @@
|
|||
(t/is (uuid? media-id))
|
||||
(t/is (uuid? thumbnail-id))
|
||||
(let [storage (:app.storage/storage th/*system*)
|
||||
mobj1 @(sto/get-object storage media-id)
|
||||
mobj2 @(sto/get-object storage thumbnail-id)]
|
||||
mobj1 (sto/get-object storage media-id)
|
||||
mobj2 (sto/get-object storage thumbnail-id)]
|
||||
(t/is (sto/object? mobj1))
|
||||
(t/is (sto/object? mobj2))
|
||||
(t/is (= 312043 (:size mobj1)))
|
||||
|
@ -162,8 +162,8 @@
|
|||
(t/is (uuid? media-id))
|
||||
(t/is (uuid? thumbnail-id))
|
||||
(let [storage (:app.storage/storage th/*system*)
|
||||
mobj1 @(sto/get-object storage media-id)
|
||||
mobj2 @(sto/get-object storage thumbnail-id)]
|
||||
mobj1 (sto/get-object storage media-id)
|
||||
mobj2 (sto/get-object storage thumbnail-id)]
|
||||
(t/is (sto/object? mobj1))
|
||||
(t/is (sto/object? mobj2))
|
||||
(t/is (= 122785 (:size mobj1)))
|
||||
|
@ -203,8 +203,8 @@
|
|||
(t/is (uuid? media-id))
|
||||
(t/is (uuid? thumbnail-id))
|
||||
(let [storage (:app.storage/storage th/*system*)
|
||||
mobj1 @(sto/get-object storage media-id)
|
||||
mobj2 @(sto/get-object storage thumbnail-id)]
|
||||
mobj1 (sto/get-object storage media-id)
|
||||
mobj2 (sto/get-object storage thumbnail-id)]
|
||||
(t/is (sto/object? mobj1))
|
||||
(t/is (sto/object? mobj2))
|
||||
(t/is (= 312043 (:size mobj1)))
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
|
||||
(ns backend-tests.rpc-team-test
|
||||
(:require
|
||||
[app.common.logging :as l]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.http :as http]
|
||||
|
|
|
@ -37,61 +37,61 @@
|
|||
(let [storage (-> (:app.storage/storage th/*system*)
|
||||
(configure-storage-backend))
|
||||
content (sto/content "content")
|
||||
object @(sto/put-object! storage {::sto/content content
|
||||
:content-type "text/plain"
|
||||
:other "data"})]
|
||||
object (sto/put-object! storage {::sto/content content
|
||||
:content-type "text/plain"
|
||||
:other "data"})]
|
||||
|
||||
(t/is (sto/object? object))
|
||||
(t/is (fs/path? @(sto/get-object-path storage object)))
|
||||
(t/is (fs/path? (sto/get-object-path storage object)))
|
||||
|
||||
(t/is (nil? (:expired-at object)))
|
||||
(t/is (= :assets-fs (:backend object)))
|
||||
(t/is (= "data" (:other (meta object))))
|
||||
(t/is (= "text/plain" (:content-type (meta object))))
|
||||
(t/is (= "content" (slurp @(sto/get-object-data storage object))))
|
||||
(t/is (= "content" (slurp @(sto/get-object-path storage object))))
|
||||
(t/is (= "content" (slurp (sto/get-object-data storage object))))
|
||||
(t/is (= "content" (slurp (sto/get-object-path storage object))))
|
||||
))
|
||||
|
||||
(t/deftest put-and-retrieve-expired-object
|
||||
(let [storage (-> (:app.storage/storage th/*system*)
|
||||
(configure-storage-backend))
|
||||
content (sto/content "content")
|
||||
object @(sto/put-object! storage {::sto/content content
|
||||
::sto/expired-at (dt/in-future {:seconds 1})
|
||||
:content-type "text/plain"
|
||||
})]
|
||||
object (sto/put-object! storage {::sto/content content
|
||||
::sto/expired-at (dt/in-future {:seconds 1})
|
||||
:content-type "text/plain"
|
||||
})]
|
||||
|
||||
(t/is (sto/object? object))
|
||||
(t/is (dt/instant? (:expired-at object)))
|
||||
(t/is (dt/is-after? (:expired-at object) (dt/now)))
|
||||
(t/is (= object @(sto/get-object storage (:id object))))
|
||||
(t/is (= object (sto/get-object storage (:id object))))
|
||||
|
||||
(th/sleep 1000)
|
||||
(t/is (nil? @(sto/get-object storage (:id object))))
|
||||
(t/is (nil? @(sto/get-object-data storage object)))
|
||||
(t/is (nil? @(sto/get-object-url storage object)))
|
||||
(t/is (nil? @(sto/get-object-path storage object)))
|
||||
(t/is (nil? (sto/get-object storage (:id object))))
|
||||
(t/is (nil? (sto/get-object-data storage object)))
|
||||
(t/is (nil? (sto/get-object-url storage object)))
|
||||
(t/is (nil? (sto/get-object-path storage object)))
|
||||
))
|
||||
|
||||
(t/deftest put-and-delete-object
|
||||
(let [storage (-> (:app.storage/storage th/*system*)
|
||||
(configure-storage-backend))
|
||||
content (sto/content "content")
|
||||
object @(sto/put-object! storage {::sto/content content
|
||||
:content-type "text/plain"
|
||||
:expired-at (dt/in-future {:seconds 1})})]
|
||||
object (sto/put-object! storage {::sto/content content
|
||||
:content-type "text/plain"
|
||||
:expired-at (dt/in-future {:seconds 1})})]
|
||||
(t/is (sto/object? object))
|
||||
(t/is (true? @(sto/del-object! storage object)))
|
||||
(t/is (true? (sto/del-object! storage object)))
|
||||
|
||||
;; retrieving the same object should be not nil because the
|
||||
;; deletion is not immediate
|
||||
(t/is (some? @(sto/get-object-data storage object)))
|
||||
(t/is (some? @(sto/get-object-url storage object)))
|
||||
(t/is (some? @(sto/get-object-path storage object)))
|
||||
(t/is (some? (sto/get-object-data storage object)))
|
||||
(t/is (some? (sto/get-object-url storage object)))
|
||||
(t/is (some? (sto/get-object-path storage object)))
|
||||
|
||||
;; But you can't retrieve the object again because in database is
|
||||
;; marked as deleted/expired.
|
||||
(t/is (nil? @(sto/get-object storage (:id object))))
|
||||
(t/is (nil? (sto/get-object storage (:id object))))
|
||||
))
|
||||
|
||||
(t/deftest test-deleted-gc-task
|
||||
|
@ -99,14 +99,14 @@
|
|||
(configure-storage-backend))
|
||||
content1 (sto/content "content1")
|
||||
content2 (sto/content "content2")
|
||||
object1 @(sto/put-object! storage {::sto/content content1
|
||||
::sto/expired-at (dt/now)
|
||||
:content-type "text/plain"
|
||||
})
|
||||
object2 @(sto/put-object! storage {::sto/content content2
|
||||
::sto/expired-at (dt/in-past {:hours 2})
|
||||
:content-type "text/plain"
|
||||
})]
|
||||
object1 (sto/put-object! storage {::sto/content content1
|
||||
::sto/expired-at (dt/now)
|
||||
:content-type "text/plain"
|
||||
})
|
||||
object2 (sto/put-object! storage {::sto/content content2
|
||||
::sto/expired-at (dt/in-past {:hours 2})
|
||||
:content-type "text/plain"
|
||||
})]
|
||||
|
||||
(th/sleep 200)
|
||||
|
||||
|
|
|
@ -23,15 +23,19 @@
|
|||
com.cognitect/transit-cljs {:mvn/version "0.8.280"}
|
||||
java-http-clj/java-http-clj {:mvn/version "0.4.3"}
|
||||
|
||||
funcool/promesa {:mvn/version "10.0.594"}
|
||||
funcool/cuerdas {:mvn/version "2022.06.16-403"}
|
||||
funcool/promesa
|
||||
{:git/tag "11.0-alpha13"
|
||||
:git/sha "f6cab38"
|
||||
:git/url "https://github.com/funcool/promesa.git"}
|
||||
|
||||
lambdaisland/uri {:mvn/version "1.13.95"
|
||||
:exclusions [org.clojure/data.json]}
|
||||
|
||||
frankiesardo/linked {:mvn/version "1.3.0"}
|
||||
|
||||
funcool/datoteka {:mvn/version "3.0.66"}
|
||||
funcool/datoteka {:mvn/version "3.0.66"
|
||||
:exclusions [funcool/promesa]}
|
||||
com.sun.mail/jakarta.mail {:mvn/version "2.0.1"}
|
||||
org.la4j/la4j {:mvn/version "0.6.0"}
|
||||
|
||||
|
|
|
@ -178,6 +178,7 @@
|
|||
(print-detail cause)
|
||||
(recur cause))))))
|
||||
]
|
||||
|
||||
(with-out-str
|
||||
(print-all cause)))))
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
"A version parsing helper."
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.geom.matrix :as gmt]
|
||||
[app.common.geom.point :as gpt]
|
||||
[app.common.geom.shapes :as gsh]
|
||||
|
@ -28,11 +29,6 @@
|
|||
(def conjv (fnil conj []))
|
||||
(def conjs (fnil conj #{}))
|
||||
|
||||
(defn- raise
|
||||
[err-str]
|
||||
#?(:clj (throw (Exception. err-str))
|
||||
:cljs (throw (js/Error. err-str))))
|
||||
|
||||
(defn- commit-change
|
||||
([file change]
|
||||
(commit-change file change nil))
|
||||
|
@ -104,7 +100,9 @@
|
|||
|
||||
(defn setup-rect-selrect [{:keys [x y width height transform] :as obj}]
|
||||
(when-not (d/num? x y width height)
|
||||
(raise "Coords not valid for object"))
|
||||
(ex/raise :type :assertion
|
||||
:code :invalid-condition
|
||||
:hint "Coords not valid for object"))
|
||||
|
||||
(let [rect (gsh/make-rect x y width height)
|
||||
center (gsh/center-rect rect)
|
||||
|
@ -121,7 +119,9 @@
|
|||
[{:keys [content center transform transform-inverse] :as obj}]
|
||||
|
||||
(when (or (empty? content) (nil? center))
|
||||
(raise "Path not valid"))
|
||||
(ex/raise :type :assertion
|
||||
:code :invalid-condition
|
||||
:hint "Path not valid"))
|
||||
|
||||
(let [transform (gmt/transform-in center transform)
|
||||
transform-inverse (gmt/transform-in center transform-inverse)
|
||||
|
|
|
@ -72,7 +72,7 @@
|
|||
(apply matrix params)))
|
||||
|
||||
(defn close?
|
||||
[m1 m2]
|
||||
[^Matrix m1 ^Matrix m2]
|
||||
(and (mth/close? (.-a m1) (.-a m2))
|
||||
(mth/close? (.-b m1) (.-b m2))
|
||||
(mth/close? (.-c m1) (.-c m2))
|
||||
|
@ -80,7 +80,7 @@
|
|||
(mth/close? (.-e m1) (.-e m2))
|
||||
(mth/close? (.-f m1) (.-f m2))))
|
||||
|
||||
(defn unit? [m1]
|
||||
(defn unit? [^Matrix m1]
|
||||
(and (some? m1)
|
||||
(mth/close? (.-a m1) 1)
|
||||
(mth/close? (.-b m1) 0)
|
||||
|
|
|
@ -380,7 +380,6 @@
|
|||
(defn rotate
|
||||
"Rotates the point around center with an angle"
|
||||
[p c angle]
|
||||
(prn "ROTATE" p c angle)
|
||||
(assert (point? p) "point instance expected")
|
||||
(assert (point? c) "point instance expected")
|
||||
(let [angle (mth/radians angle)
|
||||
|
|
|
@ -213,26 +213,28 @@
|
|||
`(when (enabled? ~logger ~level)
|
||||
(let [props# (cond-> (delay ~props) ~sync? deref)
|
||||
ts# (current-timestamp)
|
||||
context# *context*]
|
||||
(px/run! *default-executor*
|
||||
(fn []
|
||||
(let [props# (if ~sync? props# (deref props#))
|
||||
props# (into (d/ordered-map) props#)
|
||||
cause# ~cause
|
||||
context# (d/without-nils
|
||||
(merge context# ~context))
|
||||
lrecord# {::id (uuid/next)
|
||||
::timestamp ts#
|
||||
::message (delay (build-message props#))
|
||||
::props props#
|
||||
::context context#
|
||||
::level ~level
|
||||
::logger ~logger}
|
||||
lrecord# (cond-> lrecord#
|
||||
(some? cause#)
|
||||
(assoc ::cause cause#
|
||||
::trace (delay (build-stack-trace cause#))))]
|
||||
(swap! log-record (constantly lrecord#)))))))))
|
||||
context# *context*
|
||||
logfn# (fn []
|
||||
(let [props# (if ~sync? props# (deref props#))
|
||||
props# (into (d/ordered-map) props#)
|
||||
cause# ~cause
|
||||
context# (d/without-nils
|
||||
(merge context# ~context))
|
||||
lrecord# {::id (uuid/next)
|
||||
::timestamp ts#
|
||||
::message (delay (build-message props#))
|
||||
::props props#
|
||||
::context context#
|
||||
::level ~level
|
||||
::logger ~logger}
|
||||
lrecord# (cond-> lrecord#
|
||||
(some? cause#)
|
||||
(assoc ::cause cause#
|
||||
::trace (delay (build-stack-trace cause#))))]
|
||||
(swap! log-record (constantly lrecord#))))]
|
||||
(if ~sync?
|
||||
(logfn#)
|
||||
(px/exec! *default-executor* logfn#))))))
|
||||
|
||||
#?(:clj
|
||||
(defn slf4j-log-handler
|
||||
|
@ -243,12 +245,12 @@
|
|||
(some? trace)
|
||||
(str "\n" @trace))]
|
||||
(case level
|
||||
:trace (.trace ^Logger logger ^String message ^Throwable cause)
|
||||
:debug (.debug ^Logger logger ^String message ^Throwable cause)
|
||||
:info (.info ^Logger logger ^String message ^Throwable cause)
|
||||
:warn (.warn ^Logger logger ^String message ^Throwable cause)
|
||||
:error (.error ^Logger logger ^String message ^Throwable cause)
|
||||
:fatal (.error ^Logger logger ^String message ^Throwable cause)
|
||||
:trace (.trace ^Logger logger ^String message)
|
||||
:debug (.debug ^Logger logger ^String message)
|
||||
:info (.info ^Logger logger ^String message)
|
||||
:warn (.warn ^Logger logger ^String message)
|
||||
:error (.error ^Logger logger ^String message)
|
||||
:fatal (.error ^Logger logger ^String message)
|
||||
(throw (IllegalArgumentException. (str "invalid level:" level))))))))
|
||||
|
||||
#?(:cljs
|
||||
|
|
|
@ -103,7 +103,7 @@
|
|||
|
||||
(defn font-weight->name
|
||||
[weight]
|
||||
(case weight
|
||||
(case (long weight)
|
||||
100 "Hairline"
|
||||
200 "Extra Light"
|
||||
300 "Light"
|
||||
|
|
|
@ -252,6 +252,9 @@
|
|||
#?(:clj
|
||||
(s/def ::agent #(instance? clojure.lang.Agent %)))
|
||||
|
||||
#?(:clj
|
||||
(s/def ::atom #(instance? clojure.lang.Atom %)))
|
||||
|
||||
(defn bytes?
|
||||
"Test if a first parameter is a byte
|
||||
array or not."
|
||||
|
|
|
@ -157,7 +157,7 @@
|
|||
(defn- code-points->text
|
||||
[cpoints start end]
|
||||
#?(:cljs (apply str (subvec cpoints start end))
|
||||
:clj (let [sb (StringBuilder. (- end start))]
|
||||
:clj (let [sb (StringBuilder. (- ^long end ^long start))]
|
||||
(run! #(.appendCodePoint sb (int %)) (subvec cpoints start end))
|
||||
(.toString sb))))
|
||||
|
||||
|
|
|
@ -17,43 +17,43 @@
|
|||
[app.common.types.page :as ctp]
|
||||
[app.common.types.file :as ctf]))
|
||||
|
||||
(defspec transit-encode-decode-with-shape 30
|
||||
(defspec transit-encode-decode-with-shape 10
|
||||
(props/for-all
|
||||
[fdata (s/gen ::cts/shape)]
|
||||
(let [res (-> fdata transit/encode-str transit/decode-str)]
|
||||
(t/is (= res fdata)))))
|
||||
|
||||
(defspec types-shape-spec 10
|
||||
(defspec types-shape-spec 5
|
||||
(props/for-all
|
||||
[fdata (s/gen ::cts/shape)]
|
||||
(t/is (us/valid? ::cts/shape fdata))))
|
||||
|
||||
(defspec types-page-spec 10
|
||||
(defspec types-page-spec 5
|
||||
(props/for-all
|
||||
[fdata (s/gen ::ctp/page)]
|
||||
(t/is (us/valid? ::ctp/page fdata))))
|
||||
|
||||
(defspec types-file-colors-spec 30
|
||||
(defspec types-file-colors-spec 10
|
||||
(props/for-all
|
||||
[fdata (s/gen ::ctf/colors)]
|
||||
(t/is (us/valid? ::ctf/colors fdata))))
|
||||
|
||||
(defspec types-file-recent-colors-spec 30
|
||||
(defspec types-file-recent-colors-spec 10
|
||||
(props/for-all
|
||||
[fdata (s/gen ::ctf/recent-colors)]
|
||||
(t/is (us/valid? ::ctf/recent-colors fdata))))
|
||||
|
||||
(defspec types-file-typographies-spec 30
|
||||
(defspec types-file-typographies-spec 10
|
||||
(props/for-all
|
||||
[fdata (s/gen ::ctf/typographies)]
|
||||
(t/is (us/valid? ::ctf/typographies fdata))))
|
||||
|
||||
(defspec types-file-media-spec 30
|
||||
(defspec types-file-media-spec 10
|
||||
(props/for-all
|
||||
[fdata (s/gen ::ctf/media)]
|
||||
(t/is (us/valid? ::ctf/media fdata))))
|
||||
|
||||
(defspec types-file-components-spec 10
|
||||
(defspec types-file-components-spec 1
|
||||
(props/for-all
|
||||
[fdata (s/gen ::ctf/components)]
|
||||
(t/is (us/valid? ::ctf/components fdata))))
|
||||
|
|
|
@ -14,10 +14,8 @@
|
|||
[clojure.test.check.generators :as gen]
|
||||
[clojure.test.check.properties :as props]))
|
||||
|
||||
(defspec non-repeating-uuid-next-1 5000
|
||||
(defspec non-repeating-uuid-next-1 100
|
||||
(props/for-all
|
||||
[uuid1 (s/gen ::us/uuid)
|
||||
uuid2 (s/gen ::us/uuid)]
|
||||
(t/is (not= uuid1 uuid2))))
|
||||
|
||||
|
||||
|
|
|
@ -1,14 +1,17 @@
|
|||
listen_addresses = '*'
|
||||
max_connections = 100
|
||||
shared_buffers = 128MB
|
||||
temp_buffers = 8MB
|
||||
work_mem = 8MB
|
||||
max_connections = 50
|
||||
shared_buffers = 256MB
|
||||
temp_buffers = 18MB
|
||||
work_mem = 18MB
|
||||
|
||||
dynamic_shared_memory_type = posix
|
||||
synchronous_commit = off
|
||||
wal_writer_delay = 900ms
|
||||
max_wal_size = 1GB
|
||||
min_wal_size = 80MB
|
||||
full_page_writes = off
|
||||
min_wal_size=1GB
|
||||
max_wal_size=4GB
|
||||
|
||||
# log_min_duration_statement = 0
|
||||
log_timezone = 'Europe/Madrid'
|
||||
|
@ -19,4 +22,3 @@ lc_monetary = 'en_US.utf8'
|
|||
lc_numeric = 'en_US.utf8'
|
||||
lc_time = 'en_US.utf8'
|
||||
default_text_search_config = 'pg_catalog.english'
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@
|
|||
(derive :get-font-variants ::query)
|
||||
(derive :get-profile ::query)
|
||||
(derive :get-project ::query)
|
||||
(derive :get-projects ::query)
|
||||
(derive :get-team-invitations ::query)
|
||||
(derive :get-team-members ::query)
|
||||
(derive :get-team-shared-files ::query)
|
||||
|
@ -29,6 +30,9 @@
|
|||
(derive :get-teams ::query)
|
||||
(derive :get-view-only-bundle ::query)
|
||||
(derive :search-files ::query)
|
||||
(derive :retrieve-list-of-builtin-templates ::query)
|
||||
(derive :get-unread-comment-threads ::query)
|
||||
(derive :get-team-recent-files ::query)
|
||||
|
||||
(defn handle-response
|
||||
[{:keys [status body] :as response}]
|
||||
|
|
|
@ -46,9 +46,10 @@
|
|||
(defonce state
|
||||
(ptk/store {:resolve ptk/resolve
|
||||
:on-event on-event
|
||||
:on-error (fn [e]
|
||||
(.log js/console "ERROR!!" e)
|
||||
(@on-error e))}))
|
||||
:on-error (fn [cause]
|
||||
(when cause
|
||||
(log/error :hint "unexpected exception on store" :cause cause)
|
||||
(@on-error cause)))}))
|
||||
|
||||
(defonce stream
|
||||
(ptk/input-stream state))
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
(ns app.main.ui.auth.login
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.logging :as log]
|
||||
[app.common.spec :as us]
|
||||
[app.config :as cf]
|
||||
[app.main.data.messages :as dm]
|
||||
|
@ -38,7 +39,10 @@
|
|||
(dom/prevent-default event)
|
||||
(->> (rp/command! :login-with-oidc (assoc params :provider provider))
|
||||
(rx/subs (fn [{:keys [redirect-uri] :as rsp}]
|
||||
(.replace js/location redirect-uri))
|
||||
(if redirect-uri
|
||||
(.replace js/location redirect-uri)
|
||||
(log/error :hint "unexpected response from OIDC method"
|
||||
:resp (pr-str rsp))))
|
||||
(fn [{:keys [type code] :as error}]
|
||||
(cond
|
||||
(and (= type :restriction)
|
||||
|
|
Loading…
Add table
Reference in a new issue