0
Fork 0
mirror of https://github.com/penpot/penpot.git synced 2025-03-15 09:11:21 -05:00

Merge remote-tracking branch 'origin/staging'

This commit is contained in:
Alejandro Alonso 2023-01-31 13:57:30 +01:00
commit 95cad24c18
686 changed files with 48550 additions and 23400 deletions

View file

@ -3,19 +3,19 @@ jobs:
build:
docker:
- image: penpotapp/devenv:latest
- image: cimg/postgres:13.5
- image: cimg/postgres:14.5
environment:
POSTGRES_USER: penpot_test
POSTGRES_PASSWORD: penpot_test
POSTGRES_DB: penpot_test
- image: cimg/redis:6.2.6
- image: cimg/redis:7.0.5
working_directory: ~/repo
resource_class: large
environment:
# Customize the JVM maximum heap limit
JVM_OPTS: -Xmx1g
JVM_OPTS: -Xmx4g
steps:
- checkout
@ -29,6 +29,13 @@ jobs:
- run: cd .clj-kondo && cat config.edn
- run:
name: frontend styles prettier
working_directory: "./frontend"
command: |
yarn install
yarn run lint-scss
- run:
name: common lint
working_directory: "./common"
@ -43,13 +50,6 @@ jobs:
clj-kondo --version
clj-kondo --parallel --lint src/
- run:
name: frontend styles prettier
working_directory: "./frontend"
command: |
yarn install
yarn run lint-scss
- run:
name: backend lint
working_directory: "./backend"
@ -57,47 +57,42 @@ jobs:
clj-kondo --version
clj-kondo --parallel --lint src/
# run backend test
- run:
working_directory: "./common"
name: common tests
command: |
yarn install
yarn test
clojure -X:dev:test :patterns '["common-tests.*-test"]'
environment:
PATH: /usr/local/nodejs/bin/:/usr/local/bin:/bin:/usr/bin
JVM_OPTS: -Xmx4g
NODE_OPTIONS: --max-old-space-size=4096
- run:
name: backend test
working_directory: "./backend"
command: "clojure -X:dev:test"
command: |
clojure -X:dev:test :patterns '["backend-tests.*-test"]'
environment:
PENPOT_TEST_DATABASE_URI: "postgresql://localhost/penpot_test"
PENPOT_TEST_DATABASE_USERNAME: penpot_test
PENPOT_TEST_DATABASE_PASSWORD: penpot_test
PENPOT_TEST_REDIS_URI: "redis://localhost/1"
JVM_OPTS: -Xmx4g
- run:
name: frontend tests
working_directory: "./frontend"
command: |
yarn install
clojure -M:dev:shadow-cljs compile test
node target/tests.js
environment:
PATH: /usr/local/nodejs/bin/:/usr/local/bin:/bin:/usr/bin
- run:
working_directory: "./common"
name: common tests (cljs)
command: |
yarn install
yarn run compile-test
node target/test.js
environment:
PATH: /usr/local/nodejs/bin/:/usr/local/bin:/bin:/usr/bin
- run:
working_directory: "./common"
name: common tests (clj)
command: |
clojure -X:dev:test
yarn test
environment:
PATH: /usr/local/nodejs/bin/:/usr/local/bin:/bin:/usr/bin
NODE_OPTIONS: --max-old-space-size=4096
- save_cache:
paths:

View file

@ -7,6 +7,7 @@
app.common.data/export clojure.core/def
app.db/with-atomic clojure.core/with-open
app.common.data.macros/get-in clojure.core/get-in
app.common.data.macros/with-open clojure.core/with-open
app.common.data.macros/select-keys clojure.core/select-keys
app.common.logging/with-context clojure.core/do}
@ -44,6 +45,15 @@
:redundant-do
{:level :off}
:earmuffed-var-not-dynamic
{:level :off}
:dynamic-var-not-earmuffed
{:level :off}
:used-underscored-binding
{:level :warning}
:unused-binding
{:exclude-destructured-as true
:exclude-destructured-keys-in-fn-args false

89
.github/ISSUE_TEMPLATE/bug-report.yml vendored Normal file
View file

@ -0,0 +1,89 @@
description: Create a report to help us improve
labels: ["bug"]
name: Bug report
title: "bug: "
body:
- type: markdown
attributes:
value: |
## Before you start
Please search our [existing issues](https://github.com/penpot/penpot/issues) and open [pull requests](https://github.com/penpot/penpot/pulls) to lessen the change of filing duplicate issues or feature requests. Thank you.
---
- type: textarea
attributes:
label: Steps To Reproduce
description: Steps to reproduce the behavior.
placeholder: |
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
validations:
required: true
- type: textarea
id: expected
attributes:
description: A clear and concise description of what you expected to happen.
label: Expected behavior
validations:
required: true
- type: textarea
id: actual
attributes:
description: A clear and concise description of what happens instead; what the bug is.
label: Actual behavior
validations:
required: true
- type: textarea
id: screenshots
attributes:
description: If applicable, add screenshots to help explain your problem.
label: Screenshots or video
- type: textarea
id: desktop
attributes:
label: Desktop (please complete the following information)
placeholder: |
- OS (e.g. iOS):
- Browser & version (e.g. Chrome 89.0):
- type: textarea
id: mobile
attributes:
label: Smartphone (please complete the following information)
placeholder: |
- Device & model (e.g. iPhone 6):
- OS & version (e.g. iOS 8.1):
- Browser & version (e.g. stock browser 22):
- type: textarea
id: environment
attributes:
label: Environment (please complete the following information)
placeholder: |
- Host (e.g. https://design.penpot.app, local instance):
*If self-hosted:*
- OS Version (e.g. Ubuntu 16.04):
- Docker / Docker-compose version (e.g. Docker version 18.03.0-ce, build 0520e24):
- Image version (e.g. Alpine):
Docker commands or docker-compose file (if possible and if proceed.x):
```
```
- type: textarea
id: frontend-trace
attributes:
label: Frontend Stack Trace
render: console
- type: textarea
id: backend-trace
attributes:
label: Backend Stack Trace
render: console
- type: textarea
id: additional-context
attributes:
label: Additional context
description: Any other context about the problem.

View file

@ -1,72 +0,0 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: bug
assignees: ''
---
**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
**Expected behavior**
A clear and concise description of what you expected to happen.
**Actual behavior**
A clear and concise description of what happens instead; what the bug is.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Desktop (please complete the following information):**
- OS (e.g. iOS):
- Browser & version (e.g. Chrome 89.0):
**Smartphone (please complete the following information):**
- Device & model (e.g. iPhone 6):
- OS & version (e.g. iOS 8.1):
- Browser & version (e.g. stock browser 22):
**Environment (please complete the following information):**
- Host (e.g. https://design.penpot.app, local instance):
*If self-hosted:*
- OS Version (e.g. Ubuntu 16.04):
- Docker / Docker-compose version (e.g. Docker version 18.03.0-ce, build 0520e24):
- Image version (e.g. Alpine):
Docker commands or docker-compose file (if possible and if proceed.x):
```
```
Frontend Stack Trace:
<details>
```
```
</details>
Backend Stack Trace:
<details>
```
```
</details>
**Additional context:**
Any other context about the problem.

View file

@ -0,0 +1,37 @@
description: Suggest an idea for this project.
labels: ["needs triage", "enhancement"]
name: "Feature request"
title: "feature: "
body:
- type: markdown
attributes:
value: |
## Before you start
Please search our [existing issues](https://github.com/penpot/penpot/issues) and open [pull requests](https://github.com/penpot/penpot/pulls) to lessen the change of filing duplicate issues or feature requests. Thank you.
---
- type: textarea
id: problem
attributes:
description: A clear and concise description of what the problem is. Ex. I'm always frustrated when (...)
label: Is your feature request related to a problem? Please describe.
validations:
required: true
- type: textarea
id: solution
attributes:
description: A clear and concise description of what you want to happen.
label: Describe the solution you'd like.
validations:
required: true
- type: textarea
id: alternatives
attributes:
label: Describe alternatives you've considered.
description: A clear and concise description of any alternative solutions or features you've considered.
- type: textarea
id: additional-context
attributes:
label: Additional context
description: Add any other context or screenshots about the feature request here.

View file

@ -1,21 +0,0 @@
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: enhancement
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when (...)
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

34
.gitignore vendored
View file

@ -1,55 +1,59 @@
*-init.clj
*.jar
*.penpot
*.orig
*.penpot
.calva
.clj-kondo
.cpcache
.lein-deps-sum
.lein-failures
.lein-plugins/
.lein-repl-history
.lsp
.nrepl-port
.nyc_output
.rebel_readline_history
.repl
.shadow-cljs
/*.jpg
/*.md
/*.png
/*.sql
/*.txt
/*.yml
/*.zip
/.clj-kondo/.cache
/_dump
/backend/-
/backend/*.md
/backend/*.sql
/backend/*.txt
/backend/assets/
/backend/builtin-templates
/backend/dist/
/backend/logs/
/backend/resources/public/assets
/backend/resources/public/media
/backend/target/
/backend/builtin-templates
/bundle*
/cd.md
/clj-profiler/
/common/.shadow-cljs
/common/coverage
/common/target
/deploy
/docker/images/bundle*
/exporter/.shadow-cljs
/exporter/target
/frontend/.shadow-cljs
/frontend/package-lock.json
/frontend/cypress/videos/*/
/frontend/cypress/fixtures/validuser.json
/frontend/cypress/videos/*/
/frontend/cypress/videos/*/
/frontend/dist/
/frontend/npm-debug.log
/frontend/out/
/frontend/package-lock.json
/frontend/resources/fonts/experiments
/frontend/resources/public/*
/frontend/target/
/frontend/cypress/videos/*/
/media
/other/
/scripts/
/telemetry/
/tmp/
/vendor/**/target
/vendor/svgclean/bundle*.js
/web
clj-profiler/
figwheel_server.log
node_modules

View file

@ -1,6 +1,82 @@
# CHANGELOG
## 1.17.0
### :sparkles: New features
- Adds layout flex functionality for boards
- Better overlays interactions on boards inside boards [Taiga #4386](https://tree.taiga.io/project/penpot/us/4386)
- Show board miniature in manual overlay setting [Taiga #4475](https://tree.taiga.io/project/penpot/issue/4475)
- Handoff visual improvements [Taiga #3124](https://tree.taiga.io/project/penpot/us/3124)
- Dynamic alignment only in sight [Github 1971](https://github.com/penpot/penpot/issues/1971)
- Add some accessibility to shortcut panel [Taiga #4713](https://tree.taiga.io/project/penpot/issue/4713)
- Add shortcuts for text editing [Taiga #2052](https://tree.taiga.io/project/penpot/us/2052)
- Second level boards treated as groups in terms of selection [Taiga #4269](https://tree.taiga.io/project/penpot/us/4269)
- Performance improvements both for backend and frontend
- Accessibility improvements for login area [Taiga #4353](https://tree.taiga.io/project/penpot/us/4353)
- Outbound webhooks [Taiga #4577](https://tree.taiga.io/project/penpot/us/4577)
- Add copy invitation link to the invitation options [Taiga #4213](https://tree.taiga.io/project/penpot/us/4213)
- Dynamic alignment only in sight [Taiga #3537](https://tree.taiga.io/project/penpot/us/3537)
- Improve naming of layers [Taiga #4036](https://tree.taiga.io/project/penpot/us/4036)
- Add zoom lense [Taiga #4691](https://tree.taiga.io/project/penpot/us/4691)
- Detect potential problems with custom font vertical metrics [Taiga #4697](https://tree.taiga.io/project/penpot/us/4697)
### :bug: Bugs fixed
- Add title to color bullets [Taiga #4218](https://tree.taiga.io/project/penpot/task/4218)
- Fix color bullets in library color modal [Taiga #4186](https://tree.taiga.io/project/penpot/issue/4186)
- Fix shortcut texts alignment [Taiga #4275](https://tree.taiga.io/project/penpot/issue/4275)
- Fix some texts and a typo [Taiga #4215](https://tree.taiga.io/project/penpot/issue/4215)
- Fix twitter support account link [Taiga #4279](https://tree.taiga.io/project/penpot/issue/4279)
- Fix lang autodetect issue [Taiga #4277](https://tree.taiga.io/project/penpot/issue/4277)
- Fix adding an extra page on import [Taiga #4543](https://tree.taiga.io/project/penpot/task/4543)
- Fix unable to select text at assets inputs in firefox [Taiga #4572](https://tree.taiga.io/project/penpot/issue/4572)
- Fix component sync when converting to path [Taiga #3642](https://tree.taiga.io/project/penpot/issue/3642)
- Fix style for team invite in deutsch [Taiga #4614](https://tree.taiga.io/project/penpot/issue/4614)
- Fix problem with text edition in Safari [Taiga #4046](https://tree.taiga.io/project/penpot/issue/4046)
- Fix show outline with rounded corners on rects [Taiga #4053](https://tree.taiga.io/project/penpot/issue/4053)
- Fix wrong interaction between comments and panning modes [Taiga #4297](https://tree.taiga.io/project/penpot/issue/4297)
- Fix bad element positioning on interaction with fixed scroll [Github #2660](https://github.com/penpot/penpot/issues/2660)
- Fix display type of component library not persistent [Taiga #4512](https://tree.taiga.io/project/penpot/issue/4512)
- Fix problem when moving texts with keyboard [#2690](https://github.com/penpot/penpot/issues/2690)
- Fix problem when drawing boxes won't detect mouse-up [Taiga #4618](https://tree.taiga.io/project/penpot/issue/4618)
- Fix missing loading icon on shared libraries [Taiga #4148](https://tree.taiga.io/project/penpot/issue/4148)
- Fix selection stroke missing in properties of multiple texts [Taiga #4048](https://tree.taiga.io/project/penpot/issue/4048)
- Fix missing create component menu for frames [Github #2670](https://github.com/penpot/penpot/issues/2670)
- Fix "currentColor" is not converted when importing SVG [Github 2276](https://github.com/penpot/penpot/issues/2276)
- Fix incorrect color in properties of multiple bool shapes [Taiga #4355](https://tree.taiga.io/project/penpot/issue/4355)
- Fix pressing the enter key gives you an internal error [Github 2675](https://github.com/penpot/penpot/issues/2675) [Github 2577](https://github.com/penpot/penpot/issues/2577)
- Fix confirm group name with enter doesn't work in assets modal [Taiga #4506](https://tree.taiga.io/project/penpot/issue/4506)
- Fix group/ungroup shapes inside a component [Taiga #4052](https://tree.taiga.io/project/penpot/issue/4052)
- Fix wrong update of text in components [Taiga #4646](https://tree.taiga.io/project/penpot/issue/4646)
- Fix problem with SVG imports with style [#2605](https://github.com/penpot/penpot/issues/2605)
- Fix ghost shapes after sync groups in components [Taiga #4649](https://tree.taiga.io/project/penpot/issue/4649)
- Fix layer orders messed up on move, group, reparent and undo [Github #2672](https://github.com/penpot/penpot/issues/2672)
- Fix max height in library dialog [Github #2335](https://github.com/penpot/penpot/issues/2335)
- Fix undo ungroup (shift+g) scrambles positions [Taiga #4674](https://tree.taiga.io/project/penpot/issue/4674)
- Fix justified text is stretched [Github #2539](https://github.com/penpot/penpot/issues/2539)
- Fix mousewheel on viewer inspector [Taiga #4221](https://tree.taiga.io/project/penpot/issue/4221)
- Fix path edition activated on boards [Taiga #4105](https://tree.taiga.io/project/penpot/issue/4105)
- Fix hidden layers inside groups become visible after the group visibility is changed[Taiga #4710](https://tree.taiga.io/project/penpot/issue/4710)
- Fix format of HSLA color on viewer [Taiga #4393](https://tree.taiga.io/project/penpot/issue/4393)
- Fix some typos [Taiga #4724](https://tree.taiga.io/project/penpot/issue/4724)
- Fix ctrl+c for inspect code [Taiga #4739](https://tree.taiga.io/project/penpot/issue/4739)
- Fix text in custom font is not at the expected position at export [Taiga #4394](https://tree.taiga.io/project/penpot/issue/4394)
- Fix unneeded popup when updating local components [Taiga #4430](https://tree.taiga.io/project/penpot/issue/4430)
- Fix multiuser - "Shadow" element is not updating immediately [Taiga #4709](https://tree.taiga.io/project/penpot/issue/4709)
- Fix paths not flagged as modified when resized [Taiga #4742](https://tree.taiga.io/project/penpot/issue/4742)
- Fix resend invitation doesn't reset the expiration date [Taiga #4741](https://tree.taiga.io/project/penpot/issue/4741)
- Fix incorrect state after undo page creation [Taiga #4690](https://tree.taiga.io/project/penpot/issue/4690)
- Fix copy paste texts with typography assets linked [Taiga #4750](https://tree.taiga.io/project/penpot/issue/4750)
### :heart: Community contributions by (Thank you!)
- To @iprithvitharun: let's make UX Writing contributions in Open Source a trend!
## 1.16.2-beta
### :bug: Bugs fixed
- Fix strage cursor behaviour after clicking viewport with text pool [Github #2447](https://github.com/penpot/penpot/issues/2447)
## 1.16.1-beta
@ -30,7 +106,7 @@
- Removed the support for v2 internal file data blob format. This
version has never been documented nor set as default value so
technicaly this is not a breaking change because we are removing
technically this is not a breaking change because we are removing
a "private API".
### :sparkles: New features
@ -70,11 +146,11 @@
- Fix grid not syncing immediately in multiuser [Taiga #4339](https://tree.taiga.io/project/penpot/issue/4339)
- Fix custom font upload fails silently for unsupported formats [Taiga #4279](https://tree.taiga.io/project/penpot/issue/4280)
### :arrow_up: Deps updates
### :heart: Community contributions by (Thank you!)
- To @andrewzhurov for many code contributions on this release.
- UI improvements in Project section (by @Waishnav) [#2285](https://github.com/penpot/penpot/pull/2285)
- Fix fronted comments (by @lol768) [#2368](https://github.com/penpot/penpot/pull/2368)
## 1.15.5-beta
@ -166,7 +242,7 @@
- The `PENPOT_LDAP_ATTRS_PHOTO` finally removed, it was unused for many
versions.
- If you are using social login (google, github, gitlab or generic OIDC) you
will need to ensure to add the following flags respectivelly to let them
will need to ensure to add the following flags respectively to let them
enabled: `enable-login-with-google`, `enable-login-with-github`,
`enable-login-with-gitlab` and `enable-login-with-oidc`. If not, they will
remain disabled after application start independently if you set the client-id
@ -271,7 +347,7 @@
- Fix undo when drawing curves [Taiga #3523](https://tree.taiga.io/project/penpot/issue/3523)
- Fix issue with text edition and certain fonts (WorkSans, Raleway, ...) and foreign objects [Taiga #3521](https://tree.taiga.io/project/penpot/issue/3521)
- Fix thumbnail generation when concurrent edition [Taiga #3522](https://tree.taiga.io/project/penpot/issue/3522)
- Fix environment imporot for exporter in Docker
- Fix environment import for exporter in Docker
- Fix auto scroll layers in Firefox [Taiga #3531](https://tree.taiga.io/project/penpot/issue/3531)
- Fix base background not visible for imported SVG
@ -355,7 +431,7 @@
- Fix mouse leave in handoff close overlay animation breaks [Taiga #3173](https://tree.taiga.io/project/penpot/issue/3173)
- Fix different behaviour during image drag [Taiga #2279](https://tree.taiga.io/project/penpot/issue/2279)
- Fix hidden file name on import [Taiga #3172](https://tree.taiga.io/project/penpot/issue/3172)
- Fix unneccessary scrollbars at the color list [Taiga #3211](https://tree.taiga.io/project/penpot/issue/3211)
- Fix unnecessary scrollbars at the color list [Taiga #3211](https://tree.taiga.io/project/penpot/issue/3211)
- "Show in exports" is showing in multiselections [Taiga #3194](https://tree.taiga.io/project/penpot/issue/3194)
- Edit file name navigates to the file workspace [Taiga #3183](https://tree.taiga.io/project/penpot/issue/3183)
- Fix scroll into view behind fixed element [Taiga #3170](https://tree.taiga.io/project/penpot/issue/3170)
@ -364,7 +440,7 @@
- Fix duplicate multi selected elements [Taiga #3155](https://tree.taiga.io/project/penpot/issue/3155)
- Fix add fills to artboard modify children [Taiga #3151](https://tree.taiga.io/project/penpot/issue/3151)
- Avoid numeric inputs to allow big numbers [Taiga #2858](https://tree.taiga.io/project/penpot/issue/2858)
- Fix component contex menu size [Taiga #2480](https://tree.taiga.io/project/penpot/issue/2480)
- Fix component context menu size [Taiga #2480](https://tree.taiga.io/project/penpot/issue/2480)
- Add shadow to artboard make it lose the fill [Taiga #3139](https://tree.taiga.io/project/penpot/issue/3139)
- Avoid numeric inputs to change its value without focusing them [Taiga #3140](https://tree.taiga.io/project/penpot/issue/3140)
- Fix comments modal when changing pages [Taiga #2597](https://tree.taiga.io/project/penpot/issue/2508)
@ -493,7 +569,7 @@
- Fix issue on handling empty content on boolean shapes
- Fix race condition issue on component renaming
- Handle EOF errors on writting streamed response
- Handle EOF errors on writing streamed response
- Handle EOF errors on websocket send/ping methods
- Disable parallel upload of file media on import (causes too much
contention on the rlimit subsistem that does not works as expected
@ -605,7 +681,7 @@
## 1.10.4-beta
### :sparkles: Enhacements
### :sparkles: Enhancements
- Allow parametrice file snapshoting interval
@ -617,7 +693,7 @@
## 1.10.3-beta
### :sparkles: Enhacements
### :sparkles: Enhancements
- Make all logging asynchronous, this avoid some overhead on jetty threads at cost of logging latency.
- Increase default session time to 15 days.
@ -953,7 +1029,7 @@
- Add better auth module logging.
- Add missing `email` scope to OIDC backend.
- Add missing cause prop on error loging.
- Add missing cause prop on error logging.
- Fix empty font-family handling on custom fonts page.
- Fix incorrect unicode code points handling on draft-to-penpot conversion.
- Fix some problems with paths.

View file

@ -99,7 +99,7 @@ Each commit should have:
- An entry on the CHANGES.md file if applicable, referencing the
github or taiga issue/user-story using the these same rules.
Examples of good commit messags:
Examples of good commit messages:
- :bug: Fix unexpected error on launching modal
- :bug: Set proper error message on generic error

View file

@ -85,4 +85,13 @@ We want to thank to the amazing people that help us! Thank you! You're the best!
* [Yaron](https://hosted.weblate.org/user/Yaron)
* [yrd](https://hosted.weblate.org/user/yrd)
* [YukiYuigishi](https://hosted.weblate.org/user/YukiYuigishi)
* [zcraber](https://hosted.weblate.org/user/zcraber)
* [zcraber](https://hosted.weblate.org/user/zcraber)
## Libraries & templates
* systxema
* plumilla
* victor crespo
* xtech
* candidexmedia
* merih güz
* klarr agency

View file

@ -1,12 +1,12 @@
{:deps
{penpot/common {:local/root "../common"}
org.clojure/clojure {:mvn/version "1.11.1"}
org.clojure/core.async {:mvn/version "1.5.648"}
org.clojure/core.async {:mvn/version "1.6.673"}
;; Logging
org.zeromq/jeromq {:mvn/version "0.5.2"}
org.zeromq/jeromq {:mvn/version "0.5.3"}
com.github.luben/zstd-jni {:mvn/version "1.5.2-4"}
com.github.luben/zstd-jni {:mvn/version "1.5.2-5"}
org.clojure/data.fressian {:mvn/version "1.0.0"}
io.prometheus/simpleclient {:mvn/version "0.16.0"}
@ -16,21 +16,20 @@
:exclusions [org.eclipse.jetty/jetty-server
org.eclipse.jetty/jetty-servlet]}
io.prometheus/simpleclient_httpserver {:mvn/version "0.16.0"}
io.lettuce/lettuce-core {:mvn/version "6.2.0.RELEASE"}
io.lettuce/lettuce-core {:mvn/version "6.2.2.RELEASE"}
java-http-clj/java-http-clj {:mvn/version "0.4.3"}
funcool/yetti
{:git/tag "v9.9"
:git/sha "f0a455d"
{:git/tag "v9.12"
:git/sha "51646d8"
:git/url "https://github.com/funcool/yetti.git"
:exclusions [org.slf4j/slf4j-api]}
com.github.seancorfield/next.jdbc {:mvn/version "1.3.828"}
com.github.seancorfield/next.jdbc {:mvn/version "1.3.847"}
metosin/reitit-core {:mvn/version "0.5.18"}
org.postgresql/postgresql {:mvn/version "42.5.0"}
org.postgresql/postgresql {:mvn/version "42.5.1"}
com.zaxxer/HikariCP {:mvn/version "5.0.1"}
io.whitfin/siphash {:mvn/version "2.0.0"}
@ -38,7 +37,9 @@
buddy/buddy-hashers {:mvn/version "1.8.158"}
buddy/buddy-sign {:mvn/version "3.4.333"}
org.jsoup/jsoup {:mvn/version "1.15.1"}
com.github.ben-manes.caffeine/caffeine {:mvn/version "3.1.2"}
org.jsoup/jsoup {:mvn/version "1.15.3"}
org.im4java/im4java
{:git/tag "1.4.0-penpot-2"
:git/sha "e2b3e16"
@ -50,11 +51,12 @@
integrant/integrant {:mvn/version "0.8.0"}
dawran6/emoji {:mvn/version "0.1.5"}
markdown-clj/markdown-clj {:mvn/version "1.11.3"}
markdown-clj/markdown-clj {:mvn/version "1.11.4"}
;; Pretty Print specs
pretty-spec/pretty-spec {:mvn/version "0.1.4"}
software.amazon.awssdk/s3 {:mvn/version "2.17.278"}}
software.amazon.awssdk/s3 {:mvn/version "2.19.8"}
}
:paths ["src" "resources" "target/classes"]
:aliases
@ -62,15 +64,16 @@
{:extra-deps
{com.bhauman/rebel-readline {:mvn/version "RELEASE"}
org.clojure/tools.namespace {:mvn/version "RELEASE"}
org.clojure/test.check {:mvn/version "RELEASE"}
clojure-humanize/clojure-humanize {:mvn/version "0.2.2"}
org.clojure/data.csv {:mvn/version "RELEASE"}
com.clojure-goes-fast/clj-async-profiler {:mvn/version "RELEASE"}
mockery/mockery {:mvn/version "RELEASE"}}
:extra-paths ["test" "dev"]}
:build
{:extra-deps {io.github.clojure/tools.build {:git/tag "v0.8.3" :git/sha "0d20256"}}
{:extra-deps
{io.github.clojure/tools.build {:git/tag "v0.9.0" :git/sha "8c93e0c"}}
:ns-default build}
:test

View file

@ -12,9 +12,12 @@
[app.common.logging :as l]
[app.common.perf :as perf]
[app.common.pprint :as pp]
[app.common.spec :as us]
[app.common.transit :as t]
[app.common.uuid :as uuid]
[app.config :as cfg]
[app.main :as main]
[app.srepl.helpers]
[app.srepl.main :as srepl]
[app.util.blob :as blob]
[app.util.fressian :as fres]
@ -26,10 +29,13 @@
[clojure.pprint :refer [pprint print-table]]
[clojure.repl :refer :all]
[clojure.spec.alpha :as s]
[clojure.spec.gen.alpha :as sgen]
[clojure.stacktrace :as trace]
[clojure.test :as test]
[clojure.test.check.generators :as gen]
[clojure.tools.namespace.repl :as repl]
[clojure.walk :refer [macroexpand-all]]
[criterium.core :as crit]
[cuerdas.core :as str]
[datoteka.core]
[integrant.core :as ig]))
@ -42,24 +48,24 @@
(defmacro run-quick-bench
[& exprs]
`(with-progress-reporting (quick-bench (do ~@exprs) :verbose)))
`(crit/with-progress-reporting (crit/quick-bench (do ~@exprs) :verbose)))
(defmacro run-quick-bench'
[& exprs]
`(quick-bench (do ~@exprs)))
`(crit/quick-bench (do ~@exprs)))
(defmacro run-bench
[& exprs]
`(with-progress-reporting (bench (do ~@exprs) :verbose)))
`(crit/with-progress-reporting (crit/bench (do ~@exprs) :verbose)))
(defmacro run-bench'
[& exprs]
`(bench (do ~@exprs)))
`(crit/bench (do ~@exprs)))
;; --- Development Stuff
(defn- run-tests
([] (run-tests #"^app.*-test$"))
([] (run-tests #"^backend-tests.*-test$"))
([o]
(repl/refresh)
(cond
@ -74,19 +80,22 @@
(defn- start
[]
(alter-var-root #'system (fn [sys]
(when sys (ig/halt! sys))
(-> (merge main/system-config main/worker-config)
(ig/prep)
(ig/init))))
:started)
(try
(alter-var-root #'system (fn [sys]
(when sys (ig/halt! sys))
(-> (merge main/system-config main/worker-config)
(ig/prep)
(ig/init))))
:started
(catch Throwable cause
(ex/print-throwable cause))))
(defn- stop
[]
(alter-var-root #'system (fn [sys]
(when sys (ig/halt! sys))
nil))
:stoped)
:stopped)
(defn restart
[]
@ -100,12 +109,20 @@
(defn compression-bench
[data]
(let [humanize (fn [v] (hum/filesize v :binary true :format " %.4f "))]
(let [humanize (fn [v] (hum/filesize v :binary true :format " %.4f "))
v1 (time (humanize (alength (blob/encode data {:version 1}))))
v3 (time (humanize (alength (blob/encode data {:version 3}))))
v4 (time (humanize (alength (blob/encode data {:version 4}))))
v5 (time (humanize (alength (blob/encode data {:version 5}))))
v6 (time (humanize (alength (blob/encode data {:version 6}))))
]
(print-table
[{:v1 (humanize (alength (blob/encode data {:version 1})))
:v2 (humanize (alength (blob/encode data {:version 2})))
:v3 (humanize (alength (blob/encode data {:version 3})))
:v4 (humanize (alength (blob/encode data {:version 4})))
[{
:v1 v1
:v3 v3
:v4 v4
:v5 v5
:v6 v6
}])))
(defonce debug-tap

View file

@ -6,14 +6,21 @@
<div class="tags">
{% if item.deprecated %}
<span class="tag">
<span>Deprecated:</span>
<span>since v{{item.deprecated}}</span>,
<span>DEPRECATED</span>
</span>
{% endif %}
{% if item.auth %}
<span class="tag">
<span>AUTH</span>
</span>
{% endif %}
{% if item.webhook %}
<span class="tag">
<span>WEBHOOK</span>
</span>
{% endif %}
<span class="tag">
<span>Auth:</span>
<span>{% if item.auth %}YES{% else %}NO{% endif %}</span>
</span>
</div>
</div>
<div class="rpc-row-detail hidden">

View file

@ -77,7 +77,7 @@ Debug Main Page
<legend>Import binfile:</legend>
<desc>Import penpot file in binary
format. If <strong>overwrite</strong> is checked, all files will
be overwriten using the same ids found in the file instead of
be overwritten using the same ids found in the file instead of
generating a new ones.</desc>
<form method="post" enctype="multipart/form-data" action="/dbg/file/import">
@ -90,7 +90,7 @@ Debug Main Page
<input type="checkbox" name="overwrite" />
<br />
<small>
Instead of creating a new file with all relations remaped,
Instead of creating a new file with all relations remapped,
reuses all ids and updates/overwrites the objects that are
already exists on the database.
<strong>Warning, this operation should be used with caution.</strong>
@ -111,7 +111,7 @@ Debug Main Page
<input type="checkbox" name="ignore-index-errors" checked/>
<br />
<small>
Do not break on index lookup erros (remap operation).
Do not break on index lookup errors (remap operation).
Useful when importing a broken file that has broken
relations or missing pieces.
</small>

View file

@ -11,7 +11,8 @@ penpot - error list
<main class="horizontal-list">
<ul>
{% for item in items %}
<li><a href="/dbg/error/{{item.id}}">{{item.created-at}}</a></li>
<li><a class="date" href="/dbg/error/{{item.id}}">{{item.created-at}}</a>
<span class="title">{{item.hint|abbreviate:150}}</span></li>
{% endfor %}
</ul>
</main>

View file

@ -137,8 +137,6 @@ nav > div:not(:last-child) {
margin: 0px;
padding: 0px;
flex-direction: column;
flex-wrap: wrap;
height: calc(100vh - 75px);
justify-content: flex-start;
}
@ -151,19 +149,31 @@ nav > div:not(:last-child) {
margin: 0px 20px;
cursor: pointer;
display: flex;
justify-content: center;
border-radius: 3px;
}
.horizontal-list li:hover {
background-color: #e9e9e9;
}
.horizontal-list li > *:not(:last-child) {
margin-right: 10px;
}
.horizontal-list li > a {
text-decoration: none;
color: inherit;
}
.horizontal-list li > .date {
font-weight: 200;
color: #686868;
min-width: 210px;
}
form .row {
padding: 5px 0;
}

View file

@ -0,0 +1,9 @@
;; Example climit.edn file
;; Required: concurrency
;; Optional: queue-size, ommited means Integer/MAX_VALUE
{:update-file {:concurrency 1 :queue-size 3}
:auth {:concurrency 128}
:process-font {:concurrency 4 :queue-size 32}
:process-image {:concurrency 8 :queue-size 32}
:push-audit-events
{:concurrency 1 :queue-size 3}}

View file

@ -2,11 +2,13 @@
<Configuration status="info" monitorInterval="30">
<Appenders>
<Console name="console" target="SYSTEM_OUT">
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"/>
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"
alwaysWriteExceptions="false" />
</Console>
<RollingFile name="main" fileName="logs/main.log" filePattern="logs/main-%i.log">
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"/>
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"
alwaysWriteExceptions="false" />
<Policies>
<SizeBasedTriggeringPolicy size="50M"/>
</Policies>
@ -32,6 +34,8 @@
<Logger name="app.util.websocket" level="info" />
<Logger name="app.redis" level="info" />
<Logger name="app.rpc.rlimit" level="info" />
<Logger name="app.rpc.climit" level="info" />
<Logger name="app.rpc.mutations.files" level="info" />
<Logger name="app.cli" level="debug" additivity="false">
<AppenderRef ref="console"/>

View file

@ -2,7 +2,8 @@
<Configuration status="info" monitorInterval="60">
<Appenders>
<Console name="console" target="SYSTEM_OUT">
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"/>
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"
alwaysWriteExceptions="false" />
</Console>
</Appenders>

View file

@ -1,6 +1,10 @@
;; Example rlimit.edn file
^{:refresh "30s"}
{:default
[[:default :window "200000/h"]]
#{:query/teams}
[[:burst :bucket "5/1/5s"]]
#{:query/profile}
[[:burst :bucket "100/60/1m"]]}

View file

@ -13,9 +13,9 @@ cp ../CHANGES.md target/classes/changelog.md;
clojure -T:build jar;
mv target/penpot.jar target/dist/penpot.jar
cp scripts/run.template.sh target/dist/run.sh;
cp scripts/manage.template.sh target/dist/manage.sh;
cp scripts/manage.py target/dist/manage.py
chmod +x target/dist/run.sh;
chmod +x target/dist/manage.sh;
chmod +x target/dist/manage.py
# Prefetch
bb ./scripts/prefetch-templates.clj resources/app/onboarding.edn builtin-templates/

4
backend/scripts/kill-repl.sh Executable file
View file

@ -0,0 +1,4 @@
#!/usr/bin/env bash
set -x
jcmd |grep "rebel" |sed -nE 's/^([0-9]+).*$/\1/p' | xargs kill -9

167
backend/scripts/manage.py Executable file
View file

@ -0,0 +1,167 @@
#!/usr/bin/env python3
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (c) KALEIDOS INC
import argparse
import json
import socket
import sys
from getpass import getpass
from urllib.parse import urlparse
PREPL_URI = "tcp://localhost:6063"
def get_prepl_conninfo():
uri_data = urlparse(PREPL_URI)
if uri_data.scheme != "tcp":
raise RuntimeException(f"invalid PREPL_URI: {PREPL_URI}")
if not isinstance(uri_data.netloc, str):
raise RuntimeException(f"invalid PREPL_URI: {PREPL_URI}")
host, port = uri_data.netloc.split(":", 2)
if port is None:
port = 6063
if isinstance(port, str):
port = int(port)
return host, port
def send_eval(expr):
host, port = get_prepl_conninfo()
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.connect((host, port))
s.send(expr.encode("utf-8"))
s.send(b":repl/quit\n\n")
with s.makefile() as f:
result = json.load(f)
tag = result.get("tag", None)
if tag != "ret":
raise RuntimeException("unexpected response from PREPL")
return result.get("val", None), result.get("exception", None)
def encode(val):
return json.dumps(json.dumps(val))
def print_error(res):
for error in res["via"]:
print("ERR:", error["message"])
break
def run_cmd(params):
expr = "(app.srepl.ext/run-json-cmd {})".format(encode(params))
res, failed = send_eval(expr)
if failed:
print_error(res)
sys.exit(-1)
return res
def create_profile(fullname, email, password):
params = {
"cmd": "create-profile",
"params": {
"fullname": fullname,
"email": email,
"password": password
}
}
res = run_cmd(params)
print(f"Created: {res['email']} / {res['id']}")
def update_profile(email, fullname, password, is_active):
params = {
"cmd": "update-profile",
"params": {
"email": email,
"fullname": fullname,
"password": password,
"is_active": is_active
}
}
res = run_cmd(params)
if res is True:
print(f"Updated")
else:
print(f"No profile found with email {email}")
def derive_password(password):
params = {
"cmd": "derive-password",
"params": {
"password": password,
}
}
res = run_cmd(params)
print(f"Derived password: \"{res}\"")
available_commands = [
"create-profile",
"update-profile",
"derive-password"
]
parser = argparse.ArgumentParser(
description=(
"Penpot Command Line Interface (CLI)"
)
)
parser.add_argument("-V", "--version", action="version", version="Penpot CLI %%develop%%")
parser.add_argument("action", action="store", choices=available_commands)
parser.add_argument("-n", "--fullname", help="Fullname", action="store")
parser.add_argument("-e", "--email", help="Email", action="store")
parser.add_argument("-p", "--password", help="Password", action="store")
parser.add_argument("-c", "--connect", help="Connect to PREPL", action="store", default="tcp://localhost:6063")
args = parser.parse_args()
PREPL_URI = args.connect
if args.action == "create-profile":
email = args.email
password = args.password
fullname = args.fullname
if email is None:
email = input("Email: ")
if fullname is None:
fullname = input("Fullname: ")
if password is None:
password = getpass("Password: ")
create_profile(fullname, email, password)
elif args.action == "update-profile":
email = args.email
password = args.password
if email is None:
email = input("Email: ")
if password is None:
password = getpass("Password: ")
update_profile(email, None, password, None)
elif args.action == "derive-password":
password = args.password
if password is None:
password = getpass("Password: ")
derive_password(password)

View file

@ -1,19 +0,0 @@
#!/usr/bin/env bash
set +e
JAVA_CMD=$(type -p java)
set -e
if [[ ! -n "$JAVA_CMD" ]]; then
if [[ -n "$JAVA_HOME" ]] && [[ -x "$JAVA_HOME/bin/java" ]]; then
JAVA_CMD="$JAVA_HOME/bin/java"
else
>&2 echo "Couldn't find 'java'. Please set JAVA_HOME."
exit 1
fi
fi
if [ -f ./environ ]; then
source ./environ
fi
exec $JAVA_CMD $JVM_OPTS -jar penpot.jar -m app.cli.manage "$@"

View file

@ -2,7 +2,21 @@
export PENPOT_HOST=devenv
export PENPOT_TENANT=dev
export PENPOT_FLAGS="$PENPOT_FLAGS enable-backend-asserts enable-audit-log enable-transit-readable-response enable-demo-users disable-secure-session-cookies enable-rpc-rate-limit enable-warn-rpc-rate-limits enable-smtp"
export PENPOT_FLAGS="\
$PENPOT_FLAGS \
enable-backend-asserts \
enable-audit-log \
enable-transit-readable-response \
enable-demo-users \
disable-secure-session-cookies \
enable-smtp \
enable-prepl-server \
enable-urepl-server \
enable-rpc-climit \
enable-rpc-rlimit \
enable-soft-rpc-rlimit \
enable-webhooks \
enable-access-tokens";
# export PENPOT_DATABASE_URI="postgresql://172.17.0.1:5432/penpot"
# export PENPOT_DATABASE_USERNAME="penpot"
@ -16,8 +30,6 @@ export PENPOT_FLAGS="$PENPOT_FLAGS enable-backend-asserts enable-audit-log enabl
# export PENPOT_LOGGERS_LOKI_URI="http://172.17.0.1:3100/loki/api/v1/push"
# export PENPOT_AUDIT_LOG_ARCHIVE_URI="http://localhost:6070/api/audit"
export PENPOT_DEFAULT_RATE_LIMIT="default,window,10000/h"
# Initialize MINIO config
mc alias set penpot-s3/ http://minio:9000 minioadmin minioadmin
mc admin user add penpot-s3 penpot-devenv penpot-devenv
@ -31,7 +43,7 @@ export PENPOT_STORAGE_ASSETS_S3_ENDPOINT=http://minio:9000
export PENPOT_STORAGE_ASSETS_S3_BUCKET=penpot
export OPTIONS="
-A:dev:jmx-remote \
-A:jmx-remote -A:dev \
-J-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
-J-Dlog4j2.configurationFile=log4j2-devenv.xml \
-J-XX:+UseG1GC \

View file

@ -1,19 +1,21 @@
#!/usr/bin/env bash
set +e
JAVA_CMD=$(type -p java)
set -e
if [[ ! -n "$JAVA_CMD" ]]; then
if [[ -n "$JAVA_HOME" ]] && [[ -x "$JAVA_HOME/bin/java" ]]; then
JAVA_CMD="$JAVA_HOME/bin/java"
else
>&2 echo "Couldn't find 'java'. Please set JAVA_HOME."
exit 1
fi
if [[ -n "$JAVA_HOME" ]] && [[ -x "$JAVA_HOME/bin/java" ]]; then
JAVA_CMD="$JAVA_HOME/bin/java"
else
set +e
JAVA_CMD=$(type -p java)
set -e
if [[ ! -n "$JAVA_CMD" ]]; then
>&2 echo "Couldn't find 'java'. Please set JAVA_HOME."
exit 1
fi
fi
fi
if [ -f ./environ ]; then
source ./environ
source ./environ
fi
set -x

View file

@ -2,7 +2,7 @@
export PENPOT_HOST=devenv
export PENPOT_TENANT=dev
export PENPOT_FLAGS="$PENPOT_FLAGS enable-backend-asserts enable-audit-log enable-transit-readable-response enable-demo-users disable-secure-session-cookies enable-smtp"
export PENPOT_FLAGS="$PENPOT_FLAGS enable-backend-asserts enable-audit-log enable-transit-readable-response enable-demo-users disable-secure-session-cookies enable-smtp enable-webhooks"
set -ex

26
backend/src/app/auth.clj Normal file
View file

@ -0,0 +1,26 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.auth
(:require
[buddy.hashers :as hashers]))
(defn derive-password
[password]
(hashers/derive password
{:alg :argon2id
:memory 16384
:iterations 20
:parallelism 2}))
(defn verify-password
[attempt password]
(try
(hashers/verify attempt password)
(catch Throwable _
{:update false
:valid false})))

View file

@ -41,15 +41,18 @@
(reduce-kv clojure.string/replace s replacements))
(defn- search-user
[{:keys [conn attrs base-dn] :as cfg} email]
(let [query (replace-several (:query cfg) ":username" email)
[{:keys [::conn base-dn] :as cfg} email]
(let [query (replace-several (:query cfg) ":username" email)
attrs [(:attrs-username cfg)
(:attrs-email cfg)
(:attrs-fullname cfg)]
params {:filter query
:sizelimit 1
:attributes attrs}]
(first (ldap/search conn base-dn params))))
(defn- retrieve-user
[{:keys [conn] :as cfg} {:keys [email password]}]
[{:keys [::conn] :as cfg} {:keys [email password]}]
(when-let [{:keys [dn] :as user} (search-user cfg email)]
(when (ldap/bind? conn dn password)
{:fullname (get user (-> cfg :attrs-fullname keyword))
@ -66,7 +69,7 @@
(defn authenticate
[cfg params]
(with-open [conn (connect cfg)]
(when-let [user (-> (assoc cfg :conn conn)
(when-let [user (-> (assoc cfg ::conn conn)
(retrieve-user params))]
(when-not (s/valid? ::info-data user)
(let [explain (s/explain-str ::info-data user)]
@ -100,17 +103,6 @@
:host (:host cfg) :port (:port cfg) :cause cause)
nil))))
(defn- prepare-attributes
[cfg]
(assoc cfg :attrs [(:attrs-username cfg)
(:attrs-email cfg)
(:attrs-fullname cfg)]))
(defmethod ig/init-key ::provider
[_ cfg]
(when (:enabled? cfg)
(some-> cfg try-connectivity prepare-attributes)))
(s/def ::enabled? ::us/boolean)
(s/def ::host ::cf/ldap-host)
(s/def ::port ::cf/ldap-port)
@ -124,8 +116,7 @@
(s/def ::attrs-fullname ::cf/ldap-attrs-fullname)
(s/def ::attrs-username ::cf/ldap-attrs-username)
(defmethod ig/pre-init-spec ::provider
[_]
(s/def ::provider-params
(s/keys :opt-un [::host ::port
::ssl ::tls
::enabled?
@ -135,3 +126,14 @@
::attrs-email
::attrs-username
::attrs-fullname]))
(s/def ::provider
(s/nilable ::provider-params))
(defmethod ig/pre-init-spec ::provider
[_]
(s/spec ::provider))
(defmethod ig/init-key ::provider
[_ cfg]
(when (:enabled? cfg)
(try-connectivity cfg)))

View file

@ -7,6 +7,7 @@
(ns app.auth.oidc
"OIDC client implementation."
(:require
[app.auth.oidc.providers :as-alias providers]
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
@ -17,7 +18,9 @@
[app.db :as db]
[app.http.client :as http]
[app.http.middleware :as hmw]
[app.http.session :as session]
[app.loggers.audit :as audit]
[app.main :as-alias main]
[app.rpc.queries.profile :as profile]
[app.tokens :as tokens]
[app.util.json :as json]
@ -47,9 +50,11 @@
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- discover-oidc-config
[{:keys [http-client]} {:keys [base-uri] :as opts}]
[cfg {:keys [::base-uri] :as opts}]
(let [discovery-uri (u/join base-uri ".well-known/openid-configuration")
response (ex/try (http/req! http-client {:method :get :uri (str discovery-uri)} {:sync? true}))]
response (ex/try! (http/req! cfg
{:method :get :uri (str discovery-uri)}
{:sync? true}))]
(cond
(ex/exception? response)
(do
@ -59,7 +64,7 @@
nil)
(= 200 (:status response))
(let [data (json/read (:body response))]
(let [data (json/decode (:body response))]
{:token-uri (get data :token_endpoint)
:auth-uri (get data :authorization_endpoint)
:user-uri (get data :userinfo_endpoint)})
@ -73,15 +78,15 @@
(defn- prepare-oidc-opts
[cfg]
(let [opts {:base-uri (:base-uri cfg)
:client-id (:client-id cfg)
:client-secret (:client-secret cfg)
:token-uri (:token-uri cfg)
:auth-uri (:auth-uri cfg)
:user-uri (:user-uri cfg)
:scopes (:scopes cfg #{"openid" "profile" "email"})
:roles-attr (:roles-attr cfg)
:roles (:roles cfg)
(let [opts {:base-uri (cf/get :oidc-base-uri)
:client-id (cf/get :oidc-client-id)
:client-secret (cf/get :oidc-client-secret)
:token-uri (cf/get :oidc-token-uri)
:auth-uri (cf/get :oidc-auth-uri)
:user-uri (cf/get :oidc-user-uri)
:scopes (cf/get :oidc-scopes #{"openid" "profile" "email"})
:roles-attr (cf/get :oidc-roles-attr)
:roles (cf/get :oidc-roles)
:name "oidc"}
opts (d/without-nils opts)]
@ -96,13 +101,12 @@
(some-> (discover-oidc-config cfg opts)
(merge opts {:discover? true}))))))
(defmethod ig/prep-key ::generic-provider
[_ cfg]
(d/without-nils cfg))
(defmethod ig/pre-init-spec ::providers/generic [_]
(s/keys :req [::http/client]))
(defmethod ig/init-key ::generic-provider
(defmethod ig/init-key ::providers/generic
[_ cfg]
(when (:enabled? cfg)
(when (contains? cf/flags :login-with-oidc)
(if-let [opts (prepare-oidc-opts cfg)]
(do
(l/info :hint "provider initialized"
@ -110,10 +114,10 @@
:method (if (:discover? opts) "discover" "manual")
:client-id (:client-id opts)
:client-secret (obfuscate-string (:client-secret opts))
:scopes (str/join "," (:scopes opts))
:auth-uri (:auth-uri opts)
:user-uri (:user-uri opts)
:token-uri (:token-uri opts)
:scopes (str/join "," (:scopes opts))
:auth-uri (:auth-uri opts)
:user-uri (:user-uri opts)
:token-uri (:token-uri opts)
:roles-attr (:roles-attr opts)
:roles (:roles opts))
opts)
@ -125,21 +129,17 @@
;; GOOGLE AUTH PROVIDER
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defmethod ig/prep-key ::google-provider
[_ cfg]
(d/without-nils cfg))
(defmethod ig/init-key ::google-provider
[_ cfg]
(let [opts {:client-id (:client-id cfg)
:client-secret (:client-secret cfg)
(defmethod ig/init-key ::providers/google
[_ _]
(let [opts {:client-id (cf/get :google-client-id)
:client-secret (cf/get :google-client-secret)
:scopes #{"openid" "email" "profile"}
:auth-uri "https://accounts.google.com/o/oauth2/v2/auth"
:token-uri "https://oauth2.googleapis.com/token"
:user-uri "https://openidconnect.googleapis.com/v1/userinfo"
:name "google"}]
(when (:enabled? cfg)
(when (contains? cf/flags :login-with-google)
(if (and (string? (:client-id opts))
(string? (:client-secret opts)))
(do
@ -158,29 +158,29 @@
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- retrieve-github-email
[{:keys [http-client]} tdata info]
[cfg tdata info]
(or (some-> info :email p/resolved)
(-> (http/req! http-client {:uri "https://api.github.com/user/emails"
:headers {"Authorization" (dm/str (:type tdata) " " (:token tdata))}
:timeout 6000
:method :get})
(p/then (fn [{:keys [status body] :as response}]
(->> (http/req! cfg
{:uri "https://api.github.com/user/emails"
:headers {"Authorization" (dm/str (:type tdata) " " (:token tdata))}
:timeout 6000
:method :get})
(p/map (fn [{:keys [status body] :as response}]
(when-not (s/int-in-range? 200 300 status)
(ex/raise :type :internal
:code :unable-to-retrieve-github-emails
:hint "unable to retrieve github emails"
:http-status status
:http-body body))
(->> response :body json/read (filter :primary) first :email))))))
(->> response :body json/decode (filter :primary) first :email))))))
(defmethod ig/prep-key ::github-provider
[_ cfg]
(d/without-nils cfg))
(defmethod ig/pre-init-spec ::providers/github [_]
(s/keys :req [::http/client]))
(defmethod ig/init-key ::github-provider
(defmethod ig/init-key ::providers/github
[_ cfg]
(let [opts {:client-id (:client-id cfg)
:client-secret (:client-secret cfg)
(let [opts {:client-id (cf/get :github-client-id)
:client-secret (cf/get :github-client-secret)
:scopes #{"read:user" "user:email"}
:auth-uri "https://github.com/login/oauth/authorize"
:token-uri "https://github.com/login/oauth/access_token"
@ -191,7 +191,7 @@
;; retrieve emails.
:get-email-fn (partial retrieve-github-email cfg)}]
(when (:enabled? cfg)
(when (contains? cf/flags :login-with-github)
(if (and (string? (:client-id opts))
(string? (:client-secret opts)))
(do
@ -209,22 +209,18 @@
;; GITLAB AUTH PROVIDER
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defmethod ig/prep-key ::gitlab-provider
[_ cfg]
(d/without-nils cfg))
(defmethod ig/init-key ::gitlab-provider
[_ cfg]
(let [base (:base-uri cfg "https://gitlab.com")
(defmethod ig/init-key ::providers/gitlab
[_ _]
(let [base (cf/get :gitlab-base-uri "https://gitlab.com")
opts {:base-uri base
:client-id (:client-id cfg)
:client-secret (:client-secret cfg)
:client-id (cf/get :gitlab-client-id)
:client-secret (cf/get :gitlab-client-secret)
:scopes #{"openid" "profile" "email"}
:auth-uri (str base "/oauth/authorize")
:token-uri (str base "/oauth/token")
:user-uri (str base "/oauth/userinfo")
:name "gitlab"}]
(when (:enabled? cfg)
(when (contains? cf/flags :login-with-gitlab)
(if (and (string? (:client-id opts))
(string? (:client-secret opts)))
(do
@ -245,7 +241,7 @@
(defn- build-redirect-uri
[{:keys [provider] :as cfg}]
(let [public (u/uri (:public-uri cfg))]
(let [public (u/uri (cf/get :public-uri))]
(str (assoc public :path (str "/api/auth/oauth/" (:name provider) "/callback")))))
(defn- build-auth-uri
@ -268,7 +264,7 @@
props))
(defn retrieve-access-token
[{:keys [provider http-client] :as cfg} code]
[{:keys [provider] :as cfg} code]
(let [params {:client_id (:client-id provider)
:client_secret (:client-secret provider)
:code code
@ -279,25 +275,25 @@
"accept" "application/json"}
:uri (:token-uri provider)
:body (u/map->query-string params)}]
(p/then
(http/req! http-client req)
(fn [{:keys [status body] :as res}]
(if (= status 200)
(let [data (json/read body)]
{:token (get data :access_token)
:type (get data :token_type)})
(ex/raise :type :internal
:code :unable-to-retrieve-token
:http-status status
:http-body body))))))
(->> (http/req! cfg req)
(p/map (fn [{:keys [status body] :as res}]
(if (= status 200)
(let [data (json/decode body)]
{:token (get data :access_token)
:type (get data :token_type)})
(ex/raise :type :internal
:code :unable-to-retrieve-token
:http-status status
:http-body body)))))))
(defn- retrieve-user-info
[{:keys [provider http-client] :as cfg} tdata]
[{:keys [provider] :as cfg} tdata]
(letfn [(retrieve []
(http/req! http-client {:uri (:user-uri provider)
:headers {"Authorization" (str (:type tdata) " " (:token tdata))}
:timeout 6000
:method :get}))
(http/req! cfg
{:uri (:user-uri provider)
:headers {"Authorization" (str (:type tdata) " " (:token tdata))}
:timeout 6000
:method :get}))
(validate-response [response]
(when-not (s/int-in-range? 200 300 (:status response))
(ex/raise :type :internal
@ -320,7 +316,7 @@
(get info attr-kw)))
(process-response [response]
(p/let [info (-> response :body json/read)
(p/let [info (-> response :body json/decode)
email (get-email info)]
{:backend (:name provider)
:email email
@ -354,7 +350,7 @@
::props]))
(defn retrieve-info
[{:keys [sprops provider] :as cfg} {:keys [params] :as request}]
[{:keys [provider] :as cfg} {:keys [params] :as request}]
(letfn [(validate-oidc [info]
;; If the provider is OIDC, we can proceed to check
;; roles if they are defined.
@ -393,7 +389,7 @@
(let [state (get params :state)
code (get params :code)
state (tokens/verify sprops {:token state :iss :oauth})]
state (tokens/verify (::main/props cfg) {:token state :iss :oauth})]
(-> (p/resolved code)
(p/then #(retrieve-access-token cfg %))
(p/then #(retrieve-user-info cfg %))
@ -401,7 +397,7 @@
(p/then' (partial post-process state))))))
(defn- retrieve-profile
[{:keys [pool executor] :as cfg} info]
[{:keys [::db/pool ::wrk/executor] :as cfg} info]
(px/with-dispatch executor
(with-open [conn (db/open pool)]
(some->> (:email info)
@ -414,23 +410,23 @@
(yrs/response :status 302 :headers {"location" (str uri)}))
(defn- generate-error-redirect
[cfg error]
(let [uri (-> (u/uri (:public-uri cfg))
[_ error]
(let [uri (-> (u/uri (cf/get :public-uri))
(assoc :path "/#/auth/login")
(assoc :query (u/map->query-string {:error "unable-to-auth" :hint (ex-message error)})))]
(redirect-response uri)))
(defn- generate-redirect
[{:keys [sprops session audit] :as cfg} request info profile]
[{:keys [::session/session] :as cfg} request info profile]
(if profile
(let [sxf ((:create session) (:id profile))
(let [sxf (session/create-fn session (:id profile))
token (or (:invitation-token info)
(tokens/generate sprops {:iss :auth
:exp (dt/in-future "15m")
:profile-id (:id profile)}))
(tokens/generate (::main/props cfg)
{:iss :auth
:exp (dt/in-future "15m")
:profile-id (:id profile)}))
params {:token token}
uri (-> (u/uri (:public-uri cfg))
uri (-> (u/uri (cf/get :public-uri))
(assoc :path "/#/auth/verify-token")
(assoc :query (u/map->query-string params)))]
@ -438,13 +434,12 @@
(ex/raise :type :restriction
:code :profile-blocked))
(when (fn? audit)
(audit :cmd :submit
:type "command"
:name "login"
:profile-id (:id profile)
:ip-addr (audit/parse-client-ip request)
:props (audit/profile->props profile)))
(when-let [collector (::audit/collector cfg)]
(audit/submit! collector {:type "command"
:name "login"
:profile-id (:id profile)
:ip-addr (audit/parse-client-ip request)
:props (audit/profile->props profile)}))
(->> (redirect-response uri)
(sxf request)))
@ -453,19 +448,19 @@
:iss :prepared-register
:is-active true
:exp (dt/in-future {:hours 48}))
token (tokens/generate sprops info)
token (tokens/generate (::main/props cfg) info)
params (d/without-nils
{:token token
:fullname (:fullname info)})
uri (-> (u/uri (:public-uri cfg))
uri (-> (u/uri (cf/get :public-uri))
(assoc :path "/#/auth/register/validate")
(assoc :query (u/map->query-string params)))]
(redirect-response uri))))
(defn- auth-handler
[{:keys [sprops] :as cfg} {:keys [params] :as request}]
[cfg {:keys [params] :as request}]
(let [props (audit/extract-utm-params params)
state (tokens/generate sprops
state (tokens/generate (::main/props cfg)
{:iss :oauth
:invitation-token (:invitation-token params)
:props props
@ -491,7 +486,7 @@
{:compile
(fn [& _]
(fn [handler]
(fn [{:keys [providers] :as cfg} request]
(fn [{:keys [::providers] :as cfg} request]
(let [provider (some-> request :path-params :provider keyword)]
(if-let [provider (get providers provider)]
(handler (assoc cfg :provider provider) request)
@ -500,44 +495,57 @@
:provider provider
:hint "provider not configured"))))))})
(s/def ::public-uri ::us/not-empty-string)
(s/def ::http-client ::http/client)
(s/def ::session map?)
(s/def ::sprops map?)
(s/def ::providers map?)
(s/def ::client-id ::cf/oidc-client-id)
(s/def ::client-secret ::cf/oidc-client-secret)
(s/def ::base-uri ::cf/oidc-base-uri)
(s/def ::token-uri ::cf/oidc-token-uri)
(s/def ::auth-uri ::cf/oidc-auth-uri)
(s/def ::user-uri ::cf/oidc-user-uri)
(s/def ::scopes ::cf/oidc-scopes)
(s/def ::roles ::cf/oidc-roles)
(s/def ::roles-attr ::cf/oidc-roles-attr)
(s/def ::email-attr ::cf/oidc-email-attr)
(s/def ::name-attr ::cf/oidc-name-attr)
;; FIXME: migrate to qualified-keywords
(s/def ::provider
(s/keys :req-un [::client-id
::client-secret]
:opt-un [::base-uri
::token-uri
::auth-uri
::user-uri
::scopes
::roles
::roles-attr
::email-attr
::name-attr]))
(s/def ::providers (s/map-of ::us/keyword (s/nilable ::provider)))
(defmethod ig/pre-init-spec ::routes
[_]
(s/keys :req-un [::public-uri
::session
::sprops
::http-client
::providers
::db/pool
::wrk/executor]))
(s/keys :req [::http/client
::wrk/executor
::main/props
::db/pool
::providers
::session/session]))
(defmethod ig/init-key ::routes
[_ {:keys [executor session] :as cfg}]
[_ {:keys [::wrk/executor ::session/session] :as cfg}]
(let [cfg (update cfg :provider d/without-nils)]
["" {:middleware [[(:middleware session)]
[hmw/with-dispatch executor]
[hmw/with-config cfg]
[provider-lookup]
]}
;; We maintain the both URI prefixes for backward compatibility.
["/auth/oauth"
["/:provider"
{:handler auth-handler
:allowed-methods #{:post}}]
["/:provider/callback"
{:handler callback-handler
:allowed-methods #{:get}}]]
["/auth/oidc"
["/:provider"
{:handler auth-handler
:allowed-methods #{:post}}]
["/:provider/callback"
{:handler callback-handler
:allowed-methods #{:get}}]]]))

View file

@ -111,7 +111,7 @@
:id :verbosity
:default 1
:update-fn inc]
["-q" nil "Dont' print to console"
["-q" nil "Don't print to console"
:id :verbosity
:update-fn (constantly 0)]
["-h" "--help"]])

View file

@ -27,6 +27,10 @@
clojure.lang.IRecord
clojure.lang.IDeref)
(prefer-method print-method
clojure.lang.IPersistentMap
clojure.lang.IDeref)
(prefer-method pprint/simple-dispatch
clojure.lang.IPersistentMap
clojure.lang.IDeref)
@ -46,20 +50,20 @@
:database-username "penpot"
:database-password "penpot"
:default-blob-version 4
:default-blob-version 5
:loggers-zmq-uri "tcp://localhost:45556"
:rpc-rlimit-config (fs/path "resources/rlimit.edn")
:rpc-climit-config (fs/path "resources/climit.edn")
:file-change-snapshot-every 5
:file-change-snapshot-timeout "3h"
:public-uri "http://localhost:3449"
:host "localhost"
:tenant "main"
:tenant "default"
:redis-uri "redis://redis/0"
:srepl-host "127.0.0.1"
:srepl-port 6062
:assets-storage-backend :assets-fs
:storage-assets-fs-directory "assets"
@ -86,6 +90,7 @@
(s/def ::default-rpc-rlimit ::us/vector-of-strings)
(s/def ::rpc-rlimit-config ::fs/path)
(s/def ::rpc-climit-config ::fs/path)
(s/def ::media-max-file-size ::us/integer)
@ -93,13 +98,17 @@
(s/def ::telemetry-enabled ::us/boolean)
(s/def ::audit-log-archive-uri ::us/string)
(s/def ::audit-log-http-handler-concurrency ::us/integer)
(s/def ::admins ::us/set-of-strings)
(s/def ::admins ::us/set-of-valid-emails)
(s/def ::file-change-snapshot-every ::us/integer)
(s/def ::file-change-snapshot-timeout ::dt/duration)
(s/def ::default-executor-parallelism ::us/integer)
(s/def ::worker-executor-parallelism ::us/integer)
(s/def ::scheduled-executor-parallelism ::us/integer)
(s/def ::worker-default-parallelism ::us/integer)
(s/def ::worker-webhook-parallelism ::us/integer)
(s/def ::authenticated-cookie-domain ::us/string)
(s/def ::authenticated-cookie-name ::us/string)
@ -116,6 +125,16 @@
(s/def ::database-min-pool-size ::us/integer)
(s/def ::database-max-pool-size ::us/integer)
(s/def ::quotes-teams-per-profile ::us/integer)
(s/def ::quotes-projects-per-team ::us/integer)
(s/def ::quotes-invitations-per-team ::us/integer)
(s/def ::quotes-profiles-per-team ::us/integer)
(s/def ::quotes-files-per-project ::us/integer)
(s/def ::quotes-files-per-team ::us/integer)
(s/def ::quotes-font-variants-per-team ::us/integer)
(s/def ::quotes-comment-threads-per-file ::us/integer)
(s/def ::quotes-comments-per-file ::us/integer)
(s/def ::default-blob-version ::us/integer)
(s/def ::error-report-webhook ::us/string)
(s/def ::user-feedback-destination ::us/string)
@ -144,7 +163,6 @@
(s/def ::http-server-max-multipart-body-size ::us/integer)
(s/def ::http-server-io-threads ::us/integer)
(s/def ::http-server-worker-threads ::us/integer)
(s/def ::initial-project-skey ::us/string)
(s/def ::ldap-attrs-email ::us/string)
(s/def ::ldap-attrs-fullname ::us/string)
(s/def ::ldap-attrs-username ::us/string)
@ -168,11 +186,6 @@
(s/def ::redis-uri ::us/string)
(s/def ::registration-domain-whitelist ::us/set-of-strings)
(s/def ::semaphore-process-font ::us/integer)
(s/def ::semaphore-process-image ::us/integer)
(s/def ::semaphore-update-file ::us/integer)
(s/def ::semaphore-auth ::us/integer)
(s/def ::smtp-default-from ::us/string)
(s/def ::smtp-default-reply-to ::us/string)
(s/def ::smtp-host ::us/string)
@ -181,18 +194,15 @@
(s/def ::smtp-ssl ::us/boolean)
(s/def ::smtp-tls ::us/boolean)
(s/def ::smtp-username (s/nilable ::us/string))
(s/def ::srepl-host ::us/string)
(s/def ::srepl-port ::us/integer)
(s/def ::urepl-host ::us/string)
(s/def ::urepl-port ::us/integer)
(s/def ::prepl-host ::us/string)
(s/def ::prepl-port ::us/integer)
(s/def ::assets-storage-backend ::us/keyword)
(s/def ::fdata-storage-backend ::us/keyword)
(s/def ::storage-assets-fs-directory ::us/string)
(s/def ::storage-assets-s3-bucket ::us/string)
(s/def ::storage-assets-s3-region ::us/keyword)
(s/def ::storage-assets-s3-endpoint ::us/string)
(s/def ::storage-fdata-s3-bucket ::us/string)
(s/def ::storage-fdata-s3-region ::us/keyword)
(s/def ::storage-fdata-s3-prefix ::us/string)
(s/def ::storage-fdata-s3-endpoint ::us/string)
(s/def ::telemetry-uri ::us/string)
(s/def ::telemetry-with-taiga ::us/boolean)
(s/def ::tenant ::us/string)
@ -203,6 +213,7 @@
::admins
::allow-demo-users
::audit-log-archive-uri
::audit-log-http-handler-concurrency
::auth-token-cookie-name
::auth-token-cookie-max-age
::authenticated-cookie-name
@ -217,7 +228,9 @@
::default-rpc-rlimit
::error-report-webhook
::default-executor-parallelism
::worker-executor-parallelism
::scheduled-executor-parallelism
::worker-default-parallelism
::worker-webhook-parallelism
::file-change-snapshot-every
::file-change-snapshot-timeout
::user-feedback-destination
@ -246,7 +259,6 @@
::http-server-max-multipart-body-size
::http-server-io-threads
::http-server-worker-threads
::initial-project-skey
::ldap-attrs-email
::ldap-attrs-fullname
::ldap-attrs-username
@ -267,6 +279,17 @@
::profile-complaint-max-age
::profile-complaint-threshold
::public-uri
::quotes-teams-per-profile
::quotes-projects-per-team
::quotes-invitations-per-team
::quotes-profiles-per-team
::quotes-files-per-project
::quotes-files-per-team
::quotes-font-variants-per-team
::quotes-comment-threads-per-file
::quotes-comments-per-file
::redis-uri
::registration-domain-whitelist
::rpc-rlimit-config
@ -285,19 +308,16 @@
::smtp-tls
::smtp-username
::srepl-host
::srepl-port
::urepl-host
::urepl-port
::prepl-host
::prepl-port
::assets-storage-backend
::storage-assets-fs-directory
::storage-assets-s3-bucket
::storage-assets-s3-region
::storage-assets-s3-endpoint
::fdata-storage-backend
::storage-fdata-s3-bucket
::storage-fdata-s3-region
::storage-fdata-s3-prefix
::storage-fdata-s3-endpoint
::telemetry-enabled
::telemetry-uri
::telemetry-referer
@ -338,7 +358,8 @@
(when (ex/ex-info? e)
(println ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;")
(println "Error on validating configuration:")
(println (us/pretty-explain (ex-data e)))
(println (some-> e ex-data ex/explain))
(println (ex/explain (ex-data e)))
(println ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"))
(throw e))))

View file

@ -167,6 +167,7 @@
(instance? javax.sql.DataSource v))
(s/def ::pool pool?)
(s/def ::conn-or-pool some?)
(defn closed?
[pool]
@ -296,6 +297,7 @@
(let [row (get* ds table params opts)]
(when (and (not row) check-deleted?)
(ex/raise :type :not-found
:code :object-not-found
:table table
:hint "database object not found"))
row)))
@ -308,6 +310,7 @@
(let [row (get* ds table params (assoc opts :check-deleted? check-not-found))]
(when (and (not row) check-not-found)
(ex/raise :type :not-found
:code :object-not-found
:table table
:hint "database object not found"))
row)))
@ -352,10 +355,13 @@
[v]
(and (pgarray? v) (= "uuid" (.getBaseTypeName ^PgArray v))))
;; TODO rename to decode-pgarray-into
(defn decode-pgarray
([v] (some->> ^PgArray v .getArray vec))
([v in] (some->> ^PgArray v .getArray (into in)))
([v in xf] (some->> ^PgArray v .getArray (into in xf))))
([v] (decode-pgarray v []))
([v in]
(into in (some-> ^PgArray v .getArray)))
([v in xf]
(into in xf (some-> ^PgArray v .getArray))))
(defn pgarray->set
[v]
@ -417,47 +423,53 @@
(defn decode-json-pgobject
[^PGobject o]
(let [typ (.getType o)
val (.getValue o)]
(if (or (= typ "json")
(= typ "jsonb"))
(json/read val)
val)))
(when o
(let [typ (.getType o)
val (.getValue o)]
(if (or (= typ "json")
(= typ "jsonb"))
(json/decode val)
val))))
(defn decode-transit-pgobject
[^PGobject o]
(let [typ (.getType o)
val (.getValue o)]
(if (or (= typ "json")
(= typ "jsonb"))
(t/decode-str val)
val)))
(when o
(let [typ (.getType o)
val (.getValue o)]
(if (or (= typ "json")
(= typ "jsonb"))
(t/decode-str val)
val))))
(defn inet
[ip-addr]
(doto (org.postgresql.util.PGobject.)
(.setType "inet")
(.setValue (str ip-addr))))
(when ip-addr
(doto (org.postgresql.util.PGobject.)
(.setType "inet")
(.setValue (str ip-addr)))))
(defn decode-inet
[^PGobject o]
(if (= "inet" (.getType o))
(.getValue o)
nil))
(when o
(if (= "inet" (.getType o))
(.getValue o)
nil)))
(defn tjson
"Encode as transit json."
[data]
(doto (org.postgresql.util.PGobject.)
(.setType "jsonb")
(.setValue (t/encode-str data {:type :json-verbose}))))
(when data
(doto (org.postgresql.util.PGobject.)
(.setType "jsonb")
(.setValue (t/encode-str data {:type :json-verbose})))))
(defn json
"Encode as plain json."
[data]
(doto (org.postgresql.util.PGobject.)
(.setType "jsonb")
(.setValue (json/write-str data))))
(when data
(doto (org.postgresql.util.PGobject.)
(.setType "jsonb")
(.setValue (json/encode-str data)))))
;; --- Locks
@ -488,3 +500,18 @@
(let [n (xact-check-param n)
row (exec-one! conn ["select pg_try_advisory_xact_lock(?::bigint) as lock" n])]
(:lock row)))
(defn sql-exception?
[cause]
(instance? java.sql.SQLException cause))
(defn connection-error?
[cause]
(and (sql-exception? cause)
(contains? #{"08003" "08006" "08001" "08004"}
(.getSQLState ^java.sql.SQLException cause))))
(defn serialization-error?
[cause]
(and (sql-exception? cause)
(= "40001" (.getSQLState ^java.sql.SQLException cause))))

View file

@ -257,15 +257,17 @@
"Schedule an already defined email to be sent using asynchronously
using worker task."
[{:keys [::conn ::factory] :as context}]
(us/verify fn? factory)
(us/verify some? conn)
(let [email (factory context)]
(wrk/submit! (assoc email
::wrk/task :sendmail
::wrk/delay 0
::wrk/max-retries 4
::wrk/priority 200
::wrk/conn conn))))
(let [email (if factory
(factory context)
(dissoc context ::conn))]
(wrk/submit! (merge
{::wrk/task :sendmail
::wrk/delay 0
::wrk/max-retries 4
::wrk/priority 200
::wrk/conn conn}
email))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; SENDMAIL FN / TASK HANDLER

View file

@ -11,6 +11,7 @@
[app.common.transit :as t]
[app.http.errors :as errors]
[app.http.middleware :as mw]
[app.http.session :as session]
[app.metrics :as mtx]
[app.worker :as wrk]
[clojure.spec.alpha :as s]
@ -76,7 +77,7 @@
(defmethod ig/halt-key! ::server
[_ {:keys [server name port] :as cfg}]
(l/info :msg "stoping http server" :name name :port port)
(l/info :msg "stopping http server" :name name :port port)
(yt/stop! server))
(defn- not-found-handler
@ -90,9 +91,7 @@
(let [params (:path-params match)
result (:result match)
handler (or (:handler result) not-found-handler)
request (-> request
(assoc :path-params params)
(update :params merge params))]
request (assoc request :path-params params)]
(handler request respond raise))
(not-found-handler request respond raise)))
@ -115,7 +114,6 @@
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(s/def ::assets map?)
(s/def ::audit-handler fn?)
(s/def ::awsns-handler fn?)
(s/def ::debug-routes (s/nilable vector?))
(s/def ::doc-routes (s/nilable vector?))
@ -123,7 +121,7 @@
(s/def ::oauth map?)
(s/def ::oidc-routes (s/nilable vector?))
(s/def ::rpc-routes (s/nilable vector?))
(s/def ::session map?)
(s/def ::session ::session/session)
(s/def ::storage map?)
(s/def ::ws fn?)
@ -137,7 +135,6 @@
::awsns-handler
::debug-routes
::oidc-routes
::audit-handler
::rpc-routes
::doc-routes]))
@ -148,13 +145,14 @@
[mw/format-response]
[mw/params]
[mw/parse-request]
[session/middleware-1 session]
[mw/errors errors/handle]
[mw/restrict-methods]]}
["/metrics" {:handler (::mtx/handler metrics)
:allowed-methods #{:get}}]
["/assets" {:middleware [(:middleware session)]}
["/assets" {:middleware [[session/middleware-2 session]]}
["/by-id/:id" {:handler (:objects-handler assets)}]
["/by-file-media-id/:id" {:handler (:file-objects-handler assets)}]
["/by-file-media-id/:id/thumbnail" {:handler (:file-thumbnails-handler assets)}]]
@ -165,14 +163,12 @@
["/sns" {:handler (:awsns-handler cfg)
:allowed-methods #{:post}}]]
["/ws/notifications" {:middleware [(:middleware session)]
["/ws/notifications" {:middleware [[session/middleware-2 session]]
:handler ws
:allowed-methods #{:get}}]
["/api" {:middleware [[mw/cors]
[(:middleware session)]]}
["/audit/events" {:handler (:audit-handler cfg)
:allowed-methods #{:post}}]
[session/middleware-2 session]]}
["/feedback" {:handler feedback
:allowed-methods #{:post}}]
(:doc-routes cfg)

View file

@ -12,7 +12,9 @@
[app.db :as db]
[app.db.sql :as sql]
[app.http.client :as http]
[app.main :as-alias main]
[app.tokens :as tokens]
[app.worker :as-alias wrk]
[clojure.spec.alpha :as s]
[cuerdas.core :as str]
[integrant.core :as ig]
@ -26,21 +28,21 @@
(declare parse-notification)
(declare process-report)
(s/def ::http-client ::http/client)
(s/def ::sprops map?)
(defmethod ig/pre-init-spec ::handler [_]
(s/keys :req-un [::db/pool ::http-client ::sprops]))
(s/keys :req [::http/client
::main/props
::db/pool
::wrk/executor]))
(defmethod ig/init-key ::handler
[_ {:keys [executor] :as cfg}]
[_ {:keys [::wrk/executor] :as cfg}]
(fn [request respond _]
(let [data (-> request yrq/body slurp)]
(px/run! executor #(handle-request cfg data)))
(respond (yrs/response 200))))
(defn handle-request
[{:keys [http-client] :as cfg} data]
[cfg data]
(try
(let [body (parse-json data)
mtype (get body "Type")]
@ -49,7 +51,7 @@
(let [surl (get body "SubscribeURL")
stopic (get body "TopicArn")]
(l/info :action "subscription received" :topic stopic :url surl)
(http/req! http-client {:uri surl :method :post :timeout 10000} {:sync? true}))
(http/req! cfg {:uri surl :method :post :timeout 10000} {:sync? true}))
(= mtype "Notification")
(when-let [message (parse-json (get body "Message"))]
@ -100,10 +102,11 @@
(get mail "headers")))
(defn- extract-identity
[{:keys [sprops]} headers]
[cfg headers]
(let [tdata (get headers "x-penpot-data")]
(when-not (str/empty? tdata)
(let [result (tokens/verify sprops {:token tdata :iss :profile-identity})]
(let [sprops (::main/props cfg)
result (tokens/verify sprops {:token tdata :iss :profile-identity})]
(:profile-id result)))))
(defn- parse-notification
@ -136,7 +139,7 @@
(j/read-value v)))
(defn- register-bounce-for-profile
[{:keys [pool]} {:keys [type kind profile-id] :as report}]
[{:keys [::db/pool]} {:keys [type kind profile-id] :as report}]
(when (= kind "permanent")
(db/with-atomic [conn pool]
(db/insert! conn :profile-complaint-report
@ -165,7 +168,7 @@
{:id profile-id}))))))
(defn- register-complaint-for-profile
[{:keys [pool]} {:keys [type profile-id] :as report}]
[{:keys [::db/pool]} {:keys [type profile-id] :as report}]
(db/with-atomic [conn pool]
(db/insert! conn :profile-complaint-report
{:profile-id profile-id

View file

@ -7,34 +7,45 @@
(ns app.http.client
"Http client abstraction layer."
(:require
[app.common.spec :as us]
[app.worker :as wrk]
[clojure.spec.alpha :as s]
[integrant.core :as ig]
[java-http-clj.core :as http]))
[java-http-clj.core :as http]
[promesa.core :as p])
(:import
java.net.http.HttpClient))
(s/def ::client fn?)
(s/def ::client #(instance? HttpClient %))
(s/def ::client-holder
(s/keys :req [::client]))
(defmethod ig/pre-init-spec :app.http/client [_]
(s/keys :req-un [::wrk/executor]))
(defmethod ig/pre-init-spec ::client [_]
(s/keys :req [::wrk/executor]))
(defmethod ig/init-key :app.http/client
[_ {:keys [executor] :as cfg}]
(let [client (http/build-client {:executor executor
:connect-timeout 30000 ;; 10s
:follow-redirects :always})]
(with-meta
(fn send
([req] (send req {}))
([req {:keys [response-type sync?] :or {response-type :string sync? false}}]
(if sync?
(http/send req {:client client :as response-type})
(http/send-async req {:client client :as response-type}))))
{::client client})))
(defmethod ig/init-key ::client
[_ {:keys [::wrk/executor] :as cfg}]
(http/build-client {:executor executor
:connect-timeout 30000 ;; 10s
:follow-redirects :always}))
(defn send!
([client req] (send! client req {}))
([client req {:keys [response-type sync?] :or {response-type :string sync? false}}]
(us/assert! ::client client)
(if sync?
(http/send req {:client client :as response-type})
(try
(http/send-async req {:client client :as response-type})
(catch Throwable cause
(p/rejected cause))))))
(defn req!
"A convencience toplevel function for gradual migration to a new API
convention."
([client request]
(client request))
([client request options]
(client request options)))
([{:keys [::client] :as holder} request]
(us/assert! ::client-holder holder)
(send! client request {}))
([{:keys [::client] :as holder} request options]
(us/assert! ::client-holder holder)
(send! client request options)))

View file

@ -14,8 +14,9 @@
[app.config :as cf]
[app.db :as db]
[app.http.middleware :as mw]
[app.http.session :as session]
[app.rpc.commands.binfile :as binf]
[app.rpc.mutations.files :refer [create-file]]
[app.rpc.commands.files.create :refer [create-file]]
[app.rpc.queries.profile :as profile]
[app.util.blob :as blob]
[app.util.template :as tmpl]
@ -243,15 +244,19 @@
(yrs/response 404 "not found")))))
(def sql:error-reports
"select id, created_at from server_error_report order by created_at desc limit 100")
"SELECT id, created_at,
content->>'~:hint' AS hint
FROM server_error_report
ORDER BY created_at DESC
LIMIT 100")
(defn error-list-handler
[{:keys [pool]} request]
(when-not (authorized? pool request)
(ex/raise :type :authentication
:code :only-admins-allowed))
(let [items (db/exec! pool [sql:error-reports])
items (map #(update % :created-at dt/format-instant :rfc1123) items)]
(let [items (->> (db/exec! pool [sql:error-reports])
(map #(update % :created-at dt/format-instant :rfc1123)))]
(yrs/response :status 200
:body (-> (io/resource "app/templates/error-list.tmpl")
(tmpl/render {:items items}))
@ -377,17 +382,15 @@
:code :only-admins-allowed))))))})
(s/def ::session map?)
(defmethod ig/pre-init-spec ::routes [_]
(s/keys :req-un [::db/pool ::wrk/executor ::session]))
(s/keys :req-un [::db/pool ::wrk/executor ::session/session]))
(defmethod ig/init-key ::routes
[_ {:keys [session pool executor] :as cfg}]
[["/readyz" {:middleware [[mw/with-dispatch executor]
[mw/with-config cfg]]
:handler health-handler}]
["/dbg" {:middleware [[(:middleware session)]
["/dbg" {:middleware [[session/middleware-2 session]
[with-authorization pool]
[mw/with-dispatch executor]
[mw/with-config cfg]]}

View file

@ -7,9 +7,9 @@
(ns app.http.errors
"A errors handling for the http server."
(:require
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.spec :as us]
[app.http :as-alias http]
[clojure.spec.alpha :as s]
[cuerdas.core :as str]
@ -26,16 +26,18 @@
(defn get-context
[request]
(merge
*context*
{:path (:path request)
:method (:method request)
:params (:params request)
:ip-addr (parse-client-ip request)
:profile-id (:profile-id request)}
(let [headers (:headers request)]
{:user-agent (get headers "user-agent")
:frontend-version (get headers "x-frontend-version" "unknown")})))
(let [claims (:session-token-claims request)]
(merge
*context*
{:path (:path request)
:method (:method request)
:params (:params request)
:ip-addr (parse-client-ip request)}
(d/without-nils
{:user-agent (yrq/get-header request "user-agent")
:frontend-version (or (yrq/get-header request "x-frontend-version")
"unknown")
:profile-id (:uid claims)}))))
(defmulti handle-exception
(fn [err & _rest]
@ -61,7 +63,7 @@
(let [{:keys [code] :as data} (ex-data err)]
(cond
(= code :spec-validation)
(let [explain (us/pretty-explain data)]
(let [explain (ex/explain data)]
(yrs/response :status 400
:body (-> data
(dissoc ::s/problems ::s/value)
@ -75,11 +77,11 @@
(defmethod handle-exception :assertion
[error request]
(let [edata (ex-data error)
explain (us/pretty-explain edata)]
(l/error ::l/raw (str (ex-message error) "\n" explain)
::l/context (get-context request)
:cause error)
(let [edata (ex-data error)
explain (ex/explain edata)]
(l/error :hint (ex-message error)
:cause error
::l/context (get-context request))
(yrs/response :status 500
:body {:type :server-error
:code :assertion
@ -91,12 +93,29 @@
[err _]
(yrs/response 404 (ex-data err)))
(defmethod handle-exception :internal
[error request]
(let [{:keys [code] :as edata} (ex-data error)]
(cond
(= :concurrency-limit-reached code)
(yrs/response 429)
:else
(do
(l/error :hint (ex-message error)
:cause error
::l/context (get-context request))
(yrs/response 500 {:type :server-error
:code :unhandled
:hint (ex-message error)
:data edata})))))
(defmethod handle-exception org.postgresql.util.PSQLException
[error request]
(let [state (.getSQLState ^java.sql.SQLException error)]
(l/error ::l/raw (ex-message error)
::l/context (get-context request)
:cause error)
(l/error :hint (ex-message error)
:cause error
::l/context (get-context request))
(cond
(= state "57014")
(yrs/response 504 {:type :server-error
@ -121,9 +140,9 @@
;; This means that exception is not a controlled exception.
(nil? edata)
(do
(l/error ::l/raw (ex-message error)
::l/context (get-context request)
:cause error)
(l/error :hint (ex-message error)
:cause error
::l/context (get-context request))
(yrs/response 500 {:type :server-error
:code :unexpected
:hint (ex-message error)}))
@ -139,9 +158,9 @@
:else
(do
(l/error ::l/raw (ex-message error)
::l/context (get-context request)
:cause error)
(l/error :hint (ex-message error)
:cause error
::l/context (get-context request))
(yrs/response 500 {:type :server-error
:code :unhandled
:hint (ex-message error)

View file

@ -19,6 +19,7 @@
[yetti.request :as yrq]
[yetti.response :as yrs])
(:import
com.fasterxml.jackson.core.JsonParseException
com.fasterxml.jackson.core.io.JsonEOFException
io.undertow.server.RequestTooBigException
java.io.OutputStream))
@ -31,6 +32,12 @@
{:name ::params
:compile (constantly ymw/wrap-params)})
(def ^:private json-mapper
(json/mapper
{:encode-key-fn str/camel
:decode-key-fn (comp keyword str/kebab)
:pretty true}))
(defn wrap-parse-request
[handler]
(letfn [(process-request [request]
@ -45,7 +52,7 @@
(str/starts-with? header "application/json")
(with-open [is (yrq/body request)]
(let [params (json/read is)]
(let [params (json/decode is json-mapper)]
(-> request
(assoc :body-params params)
(update :params merge params))))
@ -60,21 +67,23 @@
:code :request-body-too-large
:hint (ex-message cause)))
(instance? JsonEOFException cause)
(or (instance? JsonEOFException cause)
(instance? JsonParseException cause))
(raise (ex/error :type :validation
:code :malformed-json
:hint (ex-message cause)))
:hint (ex-message cause)
:cause cause))
:else
(raise cause)))]
(fn [request respond raise]
(when-let [request (try
(process-request request)
(catch RuntimeException cause
(handle-error raise (or (.getCause cause) cause)))
(catch Throwable cause
(handle-error raise cause)))]
(handler request respond raise)))))
(let [request (ex/try! (process-request request))]
(if (ex/exception? request)
(if (instance? RuntimeException request)
(handle-error raise (or (ex/cause request) request))
(handle-error raise request))
(handler request respond raise))))))
(def parse-request
{:name ::parse-request
@ -113,7 +122,32 @@
(finally
(.close ^OutputStream output-stream))))))
(format-response [response request]
(json-streamable-body [data]
(reify yrs/StreamableResponseBody
(-write-body-to-stream [_ _ output-stream]
(try
(with-open [bos (buffered-output-stream output-stream buffer-size)]
(json/write! bos data json-mapper))
(catch java.io.IOException _cause
;; Do nothing, EOF means client closes connection abruptly
nil)
(catch Throwable cause
(l/warn :hint "unexpected error on encoding response"
:cause cause))
(finally
(.close ^OutputStream output-stream))))))
(format-response-with-json [response _]
(let [body (yrs/body response)]
(if (or (boolean? body) (coll? body))
(-> response
(update :headers assoc "content-type" "application/json")
(assoc :body (json-streamable-body body)))
response)))
(format-response-with-transit [response request]
(let [body (yrs/body response)]
(if (or (boolean? body) (coll? body))
(let [qs (yrq/query request)
@ -126,6 +160,20 @@
(assoc :body (transit-streamable-body body opts))))
response)))
(format-response [response request]
(let [accept (yrq/get-header request "accept")]
(cond
(or (= accept "application/transit+json")
(str/includes? accept "application/transit+json"))
(format-response-with-transit response request)
(or (= accept "application/json")
(str/includes? accept "application/json"))
(format-response-with-json response request)
:else
(format-response-with-transit response request))))
(process-response [response request]
(cond-> response
(map? response) (format-response request)))]

View file

@ -5,12 +5,14 @@
;; Copyright (c) KALEIDOS INC
(ns app.http.session
(:refer-clojure :exclude [read])
(:require
[app.common.data :as d]
[app.common.logging :as l]
[app.config :as cf]
[app.db :as db]
[app.db.sql :as sql]
[app.main :as-alias main]
[app.tokens :as tokens]
[app.util.time :as dt]
[app.worker :as wrk]
@ -20,6 +22,10 @@
[promesa.exec :as px]
[yetti.request :as yrq]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; DEFAULTS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; A default cookie name for storing the session.
(def default-auth-token-cookie-name "auth-token")
@ -33,35 +39,55 @@
;; Default age for automatic session renewal
(def default-renewal-max-age (dt/duration {:hours 6}))
(defprotocol ISessionStore
(read-session [store key])
(write-session [store key data])
(update-session [store data])
(delete-session [store key]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; PROTOCOLS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- make-database-store
[{:keys [pool sprops executor]}]
(reify ISessionStore
(read-session [_ token]
(defprotocol ISessionManager
(read [_ key])
(decode [_ key])
(write! [_ key data])
(update! [_ data])
(delete! [_ key]))
(s/def ::session #(satisfies? ISessionManager %))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; STORAGE IMPL
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- prepare-session-params
[props data]
(let [profile-id (:profile-id data)
user-agent (:user-agent data)
created-at (or (:created-at data) (dt/now))
token (tokens/generate props {:iss "authentication"
:iat created-at
:uid profile-id})]
{:user-agent user-agent
:profile-id profile-id
:created-at created-at
:updated-at created-at
:id token}))
(defn- database-manager
[{:keys [::db/pool ::wrk/executor ::main/props]}]
(reify ISessionManager
(read [_ token]
(px/with-dispatch executor
(db/exec-one! pool (sql/select :http-session {:id token}))))
(write-session [_ _ data]
(decode [_ token]
(px/with-dispatch executor
(let [profile-id (:profile-id data)
user-agent (:user-agent data)
created-at (or (:created-at data) (dt/now))
token (tokens/generate sprops {:iss "authentication"
:iat created-at
:uid profile-id})
params {:user-agent user-agent
:profile-id profile-id
:created-at created-at
:updated-at created-at
:id token}]
(db/insert! pool :http-session params))))
(tokens/verify props {:token token :iss "authentication"})))
(update-session [_ data]
(write! [_ _ data]
(px/with-dispatch executor
(let [params (prepare-session-params props data)]
(db/insert! pool :http-session params)
params)))
(update! [_ data]
(let [updated-at (dt/now)]
(px/with-dispatch executor
(db/update! pool :http-session
@ -69,83 +95,154 @@
{:id (:id data)})
(assoc data :updated-at updated-at))))
(delete-session [_ token]
(delete! [_ token]
(px/with-dispatch executor
(db/delete! pool :http-session {:id token})
nil))))
(defn make-inmemory-store
[{:keys [sprops]}]
(defn inmemory-manager
[{:keys [::wrk/executor ::main/props]}]
(let [cache (atom {})]
(reify ISessionStore
(read-session [_ token]
(reify ISessionManager
(read [_ token]
(p/do (get @cache token)))
(write-session [_ _ data]
(p/do
(let [profile-id (:profile-id data)
user-agent (:user-agent data)
created-at (or (:created-at data) (dt/now))
token (tokens/generate sprops {:iss "authentication"
:iat created-at
:uid profile-id})
params {:user-agent user-agent
:created-at created-at
:updated-at created-at
:profile-id profile-id
:id token}]
(decode [_ token]
(px/with-dispatch executor
(tokens/verify props {:token token :iss "authentication"})))
(write! [_ _ data]
(p/do
(let [{:keys [token] :as params} (prepare-session-params props data)]
(swap! cache assoc token params)
params)))
(update-session [_ data]
(let [updated-at (dt/now)]
(swap! cache update (:id data) assoc :updated-at updated-at)
(assoc data :updated-at updated-at)))
(update! [_ data]
(p/do
(let [updated-at (dt/now)]
(swap! cache update (:id data) assoc :updated-at updated-at)
(assoc data :updated-at updated-at))))
(delete-session [_ token]
(delete! [_ token]
(p/do
(swap! cache dissoc token)
nil)))))
(s/def ::sprops map?)
(defmethod ig/pre-init-spec ::store [_]
(s/keys :req-un [::db/pool ::wrk/executor ::sprops]))
(defmethod ig/pre-init-spec ::manager [_]
(s/keys :req [::db/pool ::wrk/executor ::main/props]))
(defmethod ig/init-key ::store
[_ {:keys [pool] :as cfg}]
(defmethod ig/init-key ::manager
[_ {:keys [::db/pool] :as cfg}]
(if (db/read-only? pool)
(make-inmemory-store cfg)
(make-database-store cfg)))
(inmemory-manager cfg)
(database-manager cfg)))
(defmethod ig/halt-key! ::store
(defmethod ig/halt-key! ::manager
[_ _])
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; MANAGER IMPL
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(declare assign-auth-token-cookie)
(declare assign-authenticated-cookie)
(declare clear-auth-token-cookie)
(declare clear-authenticated-cookie)
(defn create-fn
[manager profile-id]
(fn [request response]
(let [uagent (yrq/get-header request "user-agent")
params {:profile-id profile-id
:user-agent uagent}]
(-> (write! manager nil params)
(p/then (fn [session]
(l/trace :hint "create" :profile-id profile-id)
(-> response
(assign-auth-token-cookie session)
(assign-authenticated-cookie session))))))))
(defn delete-fn
[manager]
(letfn [(delete [{:keys [profile-id] :as request}]
(let [cname (cf/get :auth-token-cookie-name default-auth-token-cookie-name)
cookie (yrq/get-cookie request cname)]
(l/trace :hint "delete" :profile-id profile-id)
(some->> (:value cookie) (delete! manager))))]
(fn [request response]
(p/do
(delete request)
(-> response
(assoc :status 204)
(assoc :body nil)
(clear-auth-token-cookie)
(clear-authenticated-cookie))))))
(def middleware-1
(letfn [(decode-cookie [manager cookie]
(if-let [value (:value cookie)]
(decode manager value)
(p/resolved nil)))
(wrap-handler [manager handler request respond raise]
(let [cookie (some->> (cf/get :auth-token-cookie-name default-auth-token-cookie-name)
(yrq/get-cookie request))]
(->> (decode-cookie manager cookie)
(p/fnly (fn [claims _]
(cond-> request
(some? claims) (assoc :session-token-claims claims)
:always (handler respond raise)))))))]
{:name :session-1
:compile (fn [& _]
(fn [handler manager]
(partial wrap-handler manager handler)))}))
(def middleware-2
(letfn [(wrap-handler [manager handler request respond raise]
(-> (retrieve-session manager request)
(p/finally (fn [session cause]
(cond
(some? cause)
(raise cause)
(nil? session)
(handler request respond raise)
:else
(let [request (-> request
(assoc :profile-id (:profile-id session))
(assoc :session-id (:id session)))
respond (cond-> respond
(renew-session? session)
(wrap-respond manager session))]
(handler request respond raise)))))))
(retrieve-session [manager request]
(let [cname (cf/get :auth-token-cookie-name default-auth-token-cookie-name)
cookie (yrq/get-cookie request cname)]
(some->> (:value cookie) (read manager))))
(renew-session? [{:keys [updated-at] :as session}]
(and (dt/instant? updated-at)
(let [elapsed (dt/diff updated-at (dt/now))]
(neg? (compare default-renewal-max-age elapsed)))))
;; Wrap respond with session renewal code
(wrap-respond [respond manager session]
(fn [response]
(p/let [session (update! manager session)]
(-> response
(assign-auth-token-cookie session)
(assign-authenticated-cookie session)
(respond)))))]
{:name :session-2
:compile (fn [& _]
(fn [handler manager]
(partial wrap-handler manager handler)))}))
;; --- IMPL
(defn- create-session!
[store profile-id user-agent]
(let [params {:user-agent user-agent
:profile-id profile-id}]
(write-session store nil params)))
(defn- update-session!
[store session]
(update-session store session))
(defn- delete-session!
[store {:keys [cookies] :as request}]
(let [name (cf/get :auth-token-cookie-name default-auth-token-cookie-name)]
(when-let [token (get-in cookies [name :value])]
(delete-session store token))))
(defn- retrieve-session
[store request]
(let [cookie-name (cf/get :auth-token-cookie-name default-auth-token-cookie-name)]
(when-let [cookie (yrq/get-cookie request cookie-name)]
(read-session store (:value cookie)))))
(defn assign-auth-token-cookie
(defn- assign-auth-token-cookie
[response {token :id updated-at :updated-at}]
(let [max-age (cf/get :auth-token-cookie-max-age default-cookie-max-age)
created-at (or updated-at (dt/now))
@ -164,7 +261,7 @@
:secure secure?}]
(update response :cookies assoc name cookie)))
(defn assign-authenticated-cookie
(defn- assign-authenticated-cookie
[response {updated-at :updated-at}]
(let [max-age (cf/get :auth-token-cookie-max-age default-cookie-max-age)
created-at (or updated-at (dt/now))
@ -185,96 +282,23 @@
(string? domain)
(update :cookies assoc name cookie))))
(defn clear-auth-token-cookie
(defn- clear-auth-token-cookie
[response]
(let [name (cf/get :auth-token-cookie-name default-auth-token-cookie-name)]
(update response :cookies assoc name {:path "/" :value "" :max-age -1})))
(let [cname (cf/get :auth-token-cookie-name default-auth-token-cookie-name)]
(update response :cookies assoc cname {:path "/" :value "" :max-age -1})))
(defn- clear-authenticated-cookie
[response]
(let [name (cf/get :authenticated-cookie-name default-authenticated-cookie-name)
(let [cname (cf/get :authenticated-cookie-name default-authenticated-cookie-name)
domain (cf/get :authenticated-cookie-domain)]
(cond-> response
(string? domain)
(update :cookies assoc name {:domain domain :path "/" :value "" :max-age -1}))))
(defn- make-middleware
[{:keys [store] :as cfg}]
(letfn [;; Check if time reached for automatic session renewal
(renew-session? [{:keys [updated-at] :as session}]
(and (dt/instant? updated-at)
(let [elapsed (dt/diff updated-at (dt/now))]
(neg? (compare default-renewal-max-age elapsed)))))
;; Wrap respond with session renewal code
(wrap-respond [respond session]
(fn [response]
(p/let [session (update-session! store session)]
(-> response
(assign-auth-token-cookie session)
(assign-authenticated-cookie session)
(respond)))))]
{:name :session
:compile (fn [& _]
(fn [handler]
(fn [request respond raise]
(try
(-> (retrieve-session store request)
(p/finally (fn [session cause]
(cond
(some? cause)
(raise cause)
(nil? session)
(handler request respond raise)
:else
(let [request (-> request
(assoc :profile-id (:profile-id session))
(assoc :session-id (:id session)))
respond (cond-> respond
(renew-session? session)
(wrap-respond session))]
(handler request respond raise))))))
(catch Throwable cause
(raise cause))))))}))
(update :cookies assoc cname {:domain domain :path "/" :value "" :max-age -1}))))
;; --- STATE INIT: SESSION
(s/def ::store #(satisfies? ISessionStore %))
(defmethod ig/pre-init-spec :app.http/session [_]
(s/keys :req-un [::store]))
(defmethod ig/prep-key :app.http/session
[_ cfg]
(d/merge {:buffer-size 128}
(d/without-nils cfg)))
(defmethod ig/init-key :app.http/session
[_ {:keys [store] :as cfg}]
(-> cfg
(assoc :middleware (make-middleware cfg))
(assoc :create (fn [profile-id]
(fn [request response]
(p/let [uagent (yrq/get-header request "user-agent")
session (create-session! store profile-id uagent)]
(-> response
(assign-auth-token-cookie session)
(assign-authenticated-cookie session))))))
(assoc :delete (fn [request response]
(p/do
(delete-session! store request)
(-> response
(assoc :status 204)
(assoc :body nil)
(clear-auth-token-cookie)
(clear-authenticated-cookie)))))))
;; --- STATE INIT: SESSION GC
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; TASK: SESSION GC
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(declare sql:delete-expired)

View file

@ -8,6 +8,7 @@
"Services related to the user activity (audit log)."
(:require
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.spec :as us]
@ -15,19 +16,27 @@
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.http.client :as http]
[app.loggers.audit.tasks :as-alias tasks]
[app.loggers.webhooks :as-alias webhooks]
[app.main :as-alias main]
[app.metrics :as mtx]
[app.rpc :as-alias rpc]
[app.tokens :as tokens]
[app.util.async :as aa]
[app.util.retry :as rtry]
[app.util.time :as dt]
[app.worker :as wrk]
[clojure.core.async :as a]
[clojure.spec.alpha :as s]
[cuerdas.core :as str]
[integrant.core :as ig]
[lambdaisland.uri :as u]
[promesa.core :as p]
[promesa.exec :as px]
[yetti.request :as yrq]
[yetti.response :as yrs]))
[yetti.request :as yrq]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; HELPERS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn parse-client-ip
[request]
@ -49,10 +58,22 @@
(assoc (->> sk str/kebab (keyword "penpot")) v))))]
(reduce-kv process-param {} params)))
(def ^:private
profile-props
[:id
:is-active
:is-muted
:auth-backend
:email
:default-team-id
:default-project-id
:fullname
:lang])
(defn profile->props
[profile]
(-> profile
(select-keys [:id :is-active :is-muted :auth-backend :email :default-team-id :default-project-id :fullname :lang])
(select-keys profile-props)
(merge (:props profile))
(d/without-nils)))
@ -79,180 +100,132 @@
(update event :props #(into {} xform %))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; HTTP Handler
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(declare persist-http-events)
;; --- SPECS
(s/def ::profile-id ::us/uuid)
(s/def ::name ::us/string)
(s/def ::type ::us/string)
(s/def ::props (s/map-of ::us/keyword any?))
(s/def ::timestamp dt/instant?)
(s/def ::context (s/map-of ::us/keyword any?))
(s/def ::ip-addr ::us/string)
(s/def ::frontend-event
(s/keys :req-un [::type ::name ::props ::timestamp ::profile-id]
:opt-un [::context]))
(s/def ::webhooks/event? ::us/boolean)
(s/def ::webhooks/batch-timeout ::dt/duration)
(s/def ::webhooks/batch-key
(s/or :fn fn? :str string? :kw keyword?))
(s/def ::frontend-events (s/every ::frontend-event))
(defmethod ig/init-key ::http-handler
[_ {:keys [executor pool] :as cfg}]
(if (or (db/read-only? pool) (not (contains? cf/flags :audit-log)))
(do
(l/warn :hint "audit log http handler disabled or db is read-only")
(fn [_ respond _]
(respond (yrs/response 204))))
(letfn [(handler [{:keys [profile-id] :as request}]
(let [events (->> (:events (:params request))
(remove #(not= profile-id (:profile-id %)))
(us/conform ::frontend-events))
ip-addr (parse-client-ip request)
cfg (-> cfg
(assoc :source "frontend")
(assoc :events events)
(assoc :ip-addr ip-addr))]
(persist-http-events cfg)))
(handle-error [cause]
(let [xdata (ex-data cause)]
(if (= :spec-validation (:code xdata))
(l/error ::l/raw (str "spec validation on persist-events:\n" (us/pretty-explain xdata)))
(l/error :hint "error on persist-events" :cause cause))))]
(fn [request respond _]
;; Fire and forget, log error in case of errro
(-> (px/submit! executor #(handler request))
(p/catch handle-error))
(respond (yrs/response 204))))))
(defn- persist-http-events
[{:keys [pool events ip-addr source] :as cfg}]
(let [columns [:id :name :source :type :tracked-at :profile-id :ip-addr :props :context]
prepare-xf (map (fn [event]
[(uuid/next)
(:name event)
source
(:type event)
(:timestamp event)
(:profile-id event)
(db/inet ip-addr)
(db/tjson (:props event))
(db/tjson (d/without-nils (:context event)))]))]
(when (seq events)
(->> (into [] prepare-xf events)
(db/insert-multi! pool :audit-log columns)))))
(s/def ::event
(s/keys :req-un [::type ::name ::profile-id]
:opt-un [::ip-addr ::props]
:opt [::webhooks/event?
::webhooks/batch-timeout
::webhooks/batch-key]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Collector
;; COLLECTOR
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Defines a service that collects the audit/activity log using
;; internal database. Later this audit log can be transferred to
;; an external storage and data cleared.
(declare persist-events)
(s/def ::collector
(s/keys :req [::wrk/executor ::db/pool]))
(defmethod ig/pre-init-spec ::collector [_]
(s/keys :req-un [::db/pool ::wrk/executor]))
(s/def ::ip-addr string?)
(s/def ::backend-event
(s/keys :req-un [::type ::name ::profile-id]
:opt-un [::ip-addr ::props]))
(def ^:private backend-event-xform
(comp
(filter #(us/valid? ::backend-event %))
(map clean-props)))
(s/keys :req [::db/pool ::wrk/executor ::mtx/metrics]))
(defmethod ig/init-key ::collector
[_ {:keys [pool] :as cfg}]
[_ {:keys [::db/pool] :as cfg}]
(cond
(not (contains? cf/flags :audit-log))
(do
(l/info :hint "audit log collection disabled")
(constantly nil))
(db/read-only? pool)
(do
(l/warn :hint "audit log collection disabled, db is read-only")
(constantly nil))
(l/warn :hint "audit: disabled (db is read-only)")
:else
(let [input (a/chan 512 backend-event-xform)
buffer (aa/batch input {:max-batch-size 100
:max-batch-age (* 10 1000) ; 10s
:init []})]
(l/info :hint "audit log collector initialized")
(a/go-loop []
(when-let [[_type events] (a/<! buffer)]
(let [res (a/<! (persist-events cfg events))]
(when (ex/exception? res)
(l/error :hint "error on persisting events" :cause res))
(recur))))
cfg))
(fn [& {:keys [cmd] :as params}]
(case cmd
:stop
(a/close! input)
(defn- persist-event!
[pool event]
(us/verify! ::event event)
(let [params {:id (uuid/next)
:name (:name event)
:type (:type event)
:profile-id (:profile-id event)
:ip-addr (:ip-addr event)
:props (:props event)}]
:submit
(let [params (-> params
(dissoc :cmd)
(assoc :tracked-at (dt/now)))]
(when-not (a/offer! input params)
(l/warn :hint "activity channel is full"))))))))
(when (contains? cf/flags :audit-log)
;; NOTE: this operation may cause primary key conflicts on inserts
;; because of the timestamp precission (two concurrent requests), in
;; this case we just retry the operation.
(rtry/with-retry {::rtry/when rtry/conflict-exception?
::rtry/max-retries 6
::rtry/label "persist-audit-log-event"}
(let [now (dt/now)]
(db/insert! pool :audit-log
(-> params
(update :props db/tjson)
(update :ip-addr db/inet)
(assoc :created-at now)
(assoc :tracked-at now)
(assoc :source "backend"))))))
(defn- persist-events
[{:keys [pool executor] :as cfg} events]
(letfn [(event->row [event]
[(uuid/next)
(:name event)
(:type event)
(:profile-id event)
(:tracked-at event)
(some-> (:ip-addr event) db/inet)
(db/tjson (:props event))
"backend"])]
(aa/with-thread executor
(when (seq events)
(db/with-atomic [conn pool]
(db/insert-multi! conn :audit-log
[:id :name :type :profile-id :tracked-at :ip-addr :props :source]
(sequence (keep event->row) events)))))))
(when (and (contains? cf/flags :webhooks)
(::webhooks/event? event))
(let [batch-key (::webhooks/batch-key event)
batch-timeout (::webhooks/batch-timeout event)
label (dm/str "rpc:" (:name params))
label (cond
(ifn? batch-key) (dm/str label ":" (batch-key (::rpc/params event)))
(string? batch-key) (dm/str label ":" batch-key)
:else label)
dedupe? (boolean (and batch-key batch-timeout))]
(wrk/submit! ::wrk/conn pool
::wrk/task :process-webhook-event
::wrk/queue :webhooks
::wrk/max-retries 0
::wrk/delay (or batch-timeout 0)
::wrk/dedupe dedupe?
::wrk/label label
::webhooks/event
(-> params
(dissoc :ip-addr)
(dissoc :type)))))))
(defn submit!
"Submit audit event to the collector."
[{:keys [::wrk/executor ::db/pool] :as collector} params]
(us/assert! ::collector collector)
(->> (px/submit! executor (partial persist-event! pool (d/without-nils params)))
(p/merr (fn [cause]
(l/error :hint "audit: unexpected error processing event" :cause cause)
(p/resolved nil)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Archive Task
;; TASK: ARCHIVE
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; This is a task responsible to send the accumulated events to an
;; This is a task responsible to send the accumulated events to
;; external service for archival.
(declare archive-events)
(s/def ::http-client fn?)
(s/def ::uri ::us/string)
(s/def ::sprops map?)
(s/def ::tasks/uri ::us/string)
(defmethod ig/pre-init-spec ::archive-task [_]
(s/keys :req-un [::db/pool ::sprops ::http-client]
:opt-un [::uri]))
(defmethod ig/pre-init-spec ::tasks/archive-task [_]
(s/keys :req [::db/pool ::main/props ::http/client]))
(defmethod ig/init-key ::archive-task
[_ {:keys [uri] :as cfg}]
(fn [props]
(defmethod ig/init-key ::tasks/archive
[_ cfg]
(fn [params]
;; NOTE: this let allows overwrite default configured values from
;; the repl, when manually invoking the task.
(let [enabled (or (contains? cf/flags :audit-log-archive)
(:enabled props false))
uri (or uri (:uri props))
cfg (assoc cfg :uri uri)]
(:enabled params false))
uri (cf/get :audit-log-archive-uri)
uri (or uri (:uri params))
cfg (assoc cfg ::uri uri)]
(when (and enabled (not uri))
(ex/raise :type :internal
@ -264,20 +237,21 @@
(let [n (archive-events cfg)]
(if n
(do
(aa/thread-sleep 100)
(px/sleep 100)
(recur (+ total n)))
(when (pos? total)
(l/trace :hint "events chunk archived" :num total)))))))))
(l/debug :hint "events archived" :total total)))))))))
(def sql:retrieve-batch-of-audit-log
"select * from audit_log
(def ^:private sql:retrieve-batch-of-audit-log
"select *
from audit_log
where archived_at is null
order by created_at asc
limit 256
for update skip locked;")
(defn archive-events
[{:keys [pool uri sprops http-client] :as cfg}]
[{:keys [::db/pool ::uri] :as cfg}]
(letfn [(decode-row [{:keys [props ip-addr context] :as row}]
(cond-> row
(db/pgobject? props)
@ -301,9 +275,10 @@
:context]))
(send [events]
(let [token (tokens/generate sprops {:iss "authentication"
:iat (dt/now)
:uid uuid/zero})
(let [token (tokens/generate (::main/props cfg)
{:iss "authentication"
:iat (dt/now)
:uid uuid/zero})
body (t/encode {:events events})
headers {"content-type" "application/transit+json"
"origin" (cf/get :public-uri)
@ -313,7 +288,7 @@
:method :post
:headers headers
:body body}
resp (http-client params {:sync? true})]
resp (http/req! cfg params {:sync? true})]
(if (= (:status resp) 204)
true
(do
@ -334,7 +309,7 @@
(map row->event))
events (into [] xform rows)]
(when-not (empty? events)
(l/debug :action "archive-events" :uri uri :events (count events))
(l/trace :hint "archive events chunk" :uri uri :events (count events))
(when (send events)
(mark-as-archived conn rows)
(count events)))))))
@ -343,21 +318,21 @@
;; GC Task
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def sql:clean-archived
(def ^:private sql:clean-archived
"delete from audit_log
where archived_at is not null")
(defn- clean-archived
[{:keys [pool]}]
[{:keys [::db/pool]}]
(let [result (db/exec-one! pool [sql:clean-archived])
result (:next.jdbc/update-count result)]
(l/debug :hint "delete archived audit log entries" :deleted result)
result))
(defmethod ig/pre-init-spec ::gc-task [_]
(s/keys :req-un [::db/pool]))
(defmethod ig/pre-init-spec ::tasks/gc [_]
(s/keys :req [::db/pool]))
(defmethod ig/init-key ::gc-task
(defmethod ig/init-key ::tasks/gc
[_ cfg]
(fn [_]
(clean-archived cfg)))

View file

@ -11,12 +11,12 @@
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.util.async :as aa]
[app.worker :as wrk]
[app.loggers.zmq :as lzmq]
[clojure.core.async :as a]
[clojure.spec.alpha :as s]
[cuerdas.core :as str]
[integrant.core :as ig]))
[integrant.core :as ig]
[promesa.exec :as px]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Error Listener
@ -27,7 +27,7 @@
(defonce enabled (atom true))
(defn- persist-on-database!
[{:keys [pool] :as cfg} {:keys [id] :as event}]
[{:keys [::db/pool] :as cfg} {:keys [id] :as event}]
(when-not (db/read-only? pool)
(db/insert! pool :server-error-report {:id id :content (db/tjson event)})))
@ -53,41 +53,49 @@
(assoc :version (:full cf/version))
(update :id #(or % (uuid/next)))))
(defn handle-event
[{:keys [executor] :as cfg} event]
(aa/with-thread executor
(try
(let [event (parse-event event)
uri (cf/get :public-uri)]
(defn- handle-event
[cfg event]
(try
(let [event (parse-event event)
uri (cf/get :public-uri)]
(l/debug :hint "registering error on database" :id (:id event)
:uri (str uri "/dbg/error/" (:id event)))
(l/debug :hint "registering error on database" :id (:id event)
:uri (str uri "/dbg/error/" (:id event)))
(persist-on-database! cfg event))
(catch Exception cause
(l/warn :hint "unexpected exception on database error logger" :cause cause)))))
(persist-on-database! cfg event))
(catch Throwable cause
(l/warn :hint "unexpected exception on database error logger" :cause cause))))
(defmethod ig/pre-init-spec ::reporter [_]
(s/keys :req-un [::wrk/executor ::db/pool ::receiver]))
(defn error-event?
(defn- error-event?
[event]
(= "error" (:logger/level event)))
(defmethod ig/pre-init-spec ::reporter [_]
(s/keys :req [::db/pool ::lzmq/receiver]))
(defmethod ig/init-key ::reporter
[_ {:keys [receiver] :as cfg}]
(l/info :msg "initializing database error persistence")
(let [output (a/chan (a/sliding-buffer 5) (filter error-event?))]
(receiver :sub output)
(a/go-loop []
(let [msg (a/<! output)]
(if (nil? msg)
(l/info :msg "stoping error reporting loop")
(do
(a/<! (handle-event cfg msg))
(recur)))))
output))
[_ {:keys [::lzmq/receiver] :as cfg}]
(px/thread
{:name "penpot/database-reporter"}
(l/info :hint "initializing database error persistence")
(let [input (a/chan (a/sliding-buffer 5)
(filter error-event?))]
(try
(lzmq/sub! receiver input)
(loop []
(when-let [msg (a/<!! input)]
(handle-event cfg msg))
(recur))
(catch InterruptedException _
(l/debug :hint "reporter interrupted"))
(catch Throwable cause
(l/error :hint "unexpected error" :cause cause))
(finally
(a/close! input)
(l/info :hint "reporter terminated"))))))
(defmethod ig/halt-key! ::reporter
[_ output]
(a/close! output))
[_ thread]
(some-> thread px/interrupt!))

View file

@ -8,58 +8,55 @@
"A Loki integration."
(:require
[app.common.logging :as l]
[app.common.spec :as us]
[app.config :as cfg]
[app.config :as cf]
[app.http.client :as http]
[app.loggers.zmq :as lzmq]
[app.util.json :as json]
[clojure.core.async :as a]
[clojure.spec.alpha :as s]
[integrant.core :as ig]))
[integrant.core :as ig]
[promesa.exec :as px]))
(declare ^:private handle-event)
(declare ^:private start-rcv-loop)
(s/def ::uri ::us/string)
(s/def ::receiver fn?)
(s/def ::http-client fn?)
(defmethod ig/pre-init-spec ::reporter [_]
(s/keys :req-un [ ::receiver ::http-client]
:opt-un [::uri]))
(s/keys :req [::http/client
::lzmq/receiver]))
(defmethod ig/init-key ::reporter
[_ {:keys [receiver uri] :as cfg}]
(when uri
(l/info :msg "initializing loki reporter" :uri uri)
(let [input (a/chan (a/dropping-buffer 2048))]
(receiver :sub input)
[_ cfg]
(when-let [uri (cf/get :loggers-loki-uri)]
(px/thread
{:name "penpot/loki-reporter"}
(l/info :hint "reporter started" :uri uri)
(let [input (a/chan (a/dropping-buffer 2048))
cfg (assoc cfg ::uri uri)]
(doto (Thread. #(start-rcv-loop cfg input))
(.setDaemon true)
(.setName "penpot/loki-sender")
(.start))
(try
(lzmq/sub! (::lzmq/receiver cfg) input)
(loop []
(when-let [msg (a/<!! input)]
(handle-event cfg msg)
(recur)))
input)))
(catch InterruptedException _
(l/debug :hint "reporter interrupted"))
(catch Throwable cause
(l/error :hint "unexpected exception"
:cause cause))
(finally
(a/close! input)
(l/info :hint "reporter terminated")))))))
(defmethod ig/halt-key! ::reporter
[_ output]
(when output
(a/close! output)))
(defn- start-rcv-loop
[cfg input]
(loop []
(let [msg (a/<!! input)]
(when-not (nil? msg)
(handle-event cfg msg)
(recur))))
(l/info :msg "stoping error reporting loop"))
[_ thread]
(some-> thread px/interrupt!))
(defn- prepare-payload
[event]
(let [labels {:host (cfg/get :host)
:tenant (cfg/get :tenant)
:version (:full cfg/version)
(let [labels {:host (cf/get :host)
:tenant (cf/get :tenant)
:version (:full cf/version)
:logger (:logger/name event)
:level (:logger/level event)}]
{:streams
@ -69,15 +66,15 @@
(when-let [error (:trace event)]
(str "\n" error)))]]}]}))
(defn- make-request
[{:keys [http-client uri] :as cfg} payload]
(http-client {:uri uri
:timeout 3000
:method :post
:headers {"content-type" "application/json"}
:body (json/write payload)}
{:sync? true}))
[{:keys [::uri] :as cfg} payload]
(http/req! cfg
{:uri uri
:timeout 3000
:method :post
:headers {"content-type" "application/json"}
:body (json/encode payload)}
{:sync? true}))
(defn- handle-event
[cfg event]

View file

@ -9,67 +9,69 @@
(:require
[app.common.logging :as l]
[app.config :as cf]
[app.http.client :as http]
[app.loggers.database :as ldb]
[app.loggers.zmq :as lzmq]
[app.util.json :as json]
[clojure.core.async :as a]
[clojure.spec.alpha :as s]
[integrant.core :as ig]
[promesa.core :as p]))
[promesa.exec :as px]))
(defonce enabled (atom true))
(defn- send-mattermost-notification!
[{:keys [http-client] :as cfg} {:keys [host id public-uri] :as event}]
(let [uri (:uri cfg)
text (str "Exception on (host: " host ", url: " public-uri "/dbg/error/" id ")\n"
(when-let [pid (:profile-id event)]
(str "- profile-id: #uuid-" pid "\n")))]
(p/then
(http-client {:uri uri
:method :post
:headers {"content-type" "application/json"}
:body (json/write-str {:text text})})
(fn [{:keys [status] :as rsp}]
(when (not= status 200)
(l/warn :hint "error on sending data to mattermost"
:response (pr-str rsp)))))))
[cfg {:keys [host id public-uri] :as event}]
(let [text (str "Exception on (host: " host ", url: " public-uri "/dbg/error/" id ")\n"
(when-let [pid (:profile-id event)]
(str "- profile-id: #uuid-" pid "\n")))
resp (http/req! cfg
{:uri (cf/get :error-report-webhook)
:method :post
:headers {"content-type" "application/json"}
:body (json/encode-str {:text text})}
{:sync? true})]
(when (not= 200 (:status resp))
(l/warn :hint "error on sending data"
:response (pr-str resp)))))
(defn handle-event
[cfg event]
(let [ch (a/chan)]
(-> (p/let [event (ldb/parse-event event)]
(send-mattermost-notification! cfg event))
(p/finally (fn [_ cause]
(when cause
(l/warn :hint "unexpected exception on error reporter" :cause cause))
(a/close! ch))))
ch))
(s/def ::http-client fn?)
(s/def ::uri ::cf/error-report-webhook)
(when @enabled
(try
(let [event (ldb/parse-event event)]
(send-mattermost-notification! cfg event))
(catch Throwable cause
(l/warn :hint "unhandled error"
:cause cause)))))
(defmethod ig/pre-init-spec ::reporter [_]
(s/keys :req-un [::http-client ::receiver]
:opt-un [::uri]))
(s/keys :req [::http/client
::lzmq/receiver]))
(defmethod ig/init-key ::reporter
[_ {:keys [receiver uri] :as cfg}]
(when uri
(l/info :msg "initializing mattermost error reporter" :uri uri)
(let [output (a/chan (a/sliding-buffer 128)
(filter (fn [event]
(= (:logger/level event) "error"))))]
(receiver :sub output)
(a/go-loop []
(let [msg (a/<! output)]
(if (nil? msg)
(l/info :msg "stoping error reporting loop")
(do
(a/<! (handle-event cfg msg))
(recur)))))
output)))
[_ cfg]
(when-let [uri (cf/get :error-report-webhook)]
(px/thread
{:name "penpot/mattermost-reporter"}
(l/info :msg "initializing error reporter" :uri uri)
(let [input (a/chan (a/sliding-buffer 128)
(filter #(= (:logger/level %) "error")))]
(try
(lzmq/sub! (::lzmq/receiver cfg) input)
(loop []
(when-let [msg (a/<!! input)]
(handle-event cfg msg)
(recur)))
(catch InterruptedException _
(l/debug :hint "reporter interrupted"))
(catch Throwable cause
(l/error :hint "unexpected error" :cause cause))
(finally
(a/close! input)
(l/info :hint "reporter terminated")))))))
(defmethod ig/halt-key! ::reporter
[_ output]
(when output
(a/close! output)))
[_ thread]
(some-> thread px/interrupt!))

View file

@ -0,0 +1,186 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.loggers.webhooks
"A mattermost integration for error reporting."
(:require
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.logging :as l]
[app.common.transit :as t]
[app.common.uri :as uri]
[app.config :as cf]
[app.db :as db]
[app.http.client :as http]
[app.util.json :as json]
[app.util.time :as dt]
[app.worker :as wrk]
[clojure.spec.alpha :as s]
[cuerdas.core :as str]
[integrant.core :as ig]))
;; --- HELPERS
(defn key-fn
[k & keys]
(fn [params]
(reduce #(dm/str %1 ":" (get params %2))
(dm/str (get params k))
keys)))
;; --- PROC
(defn- lookup-webhooks-by-team
[pool team-id]
(db/exec! pool ["select w.* from webhook as w where team_id=? and is_active=true" team-id]))
(defn- lookup-webhooks-by-project
[pool project-id]
(let [sql [(str "select w.* from webhook as w"
" join project as p on (p.team_id = w.team_id)"
" where p.id = ? and w.is_active = true")
project-id]]
(db/exec! pool sql)))
(defn- lookup-webhooks-by-file
[pool file-id]
(let [sql [(str "select w.* from webhook as w"
" join project as p on (p.team_id = w.team_id)"
" join file as f on (f.project_id = p.id)"
" where f.id = ? and w.is_active = true")
file-id]]
(db/exec! pool sql)))
(defn- lookup-webhooks
[{:keys [::db/pool]} {:keys [props] :as event}]
(or (some->> (:team-id props) (lookup-webhooks-by-team pool))
(some->> (:project-id props) (lookup-webhooks-by-project pool))
(some->> (:file-id props) (lookup-webhooks-by-file pool))))
(defmethod ig/pre-init-spec ::process-event-handler [_]
(s/keys :req [::db/pool]))
(defmethod ig/init-key ::process-event-handler
[_ {:keys [::db/pool] :as cfg}]
(fn [{:keys [props] :as task}]
(let [event (::event props)]
(l/debug :hint "process webhook event"
:name (:name event))
(when-let [items (lookup-webhooks cfg event)]
(l/trace :hint "webhooks found for event" :total (count items))
(db/with-atomic [conn pool]
(doseq [item items]
(wrk/submit! ::wrk/conn conn
::wrk/task :run-webhook
::wrk/queue :webhooks
::wrk/max-retries 3
::event event
::config item)))))))
;; --- RUN
(declare interpret-exception)
(declare interpret-response)
(def ^:private json-mapper
(json/mapper
{:encode-key-fn str/camel
:decode-key-fn (comp keyword str/kebab)
:pretty true}))
(defmethod ig/pre-init-spec ::run-webhook-handler [_]
(s/keys :req [::http/client ::db/pool]))
(defmethod ig/prep-key ::run-webhook-handler
[_ cfg]
(merge {::max-errors 3} (d/without-nils cfg)))
(defmethod ig/init-key ::run-webhook-handler
[_ {:keys [::db/pool ::max-errors] :as cfg}]
(letfn [(update-webhook! [whook err]
(if err
(let [sql [(str "update webhook "
" set error_code=?, "
" error_count=error_count+1 "
" where id=?")
err
(:id whook)]
res (db/exec-one! pool sql {:return-keys true})]
(when (>= (:error-count res) max-errors)
(db/update! pool :webhook {:is-active false} {:id (:id whook)})))
(db/update! pool :webhook
{:updated-at (dt/now)
:error-code nil
:error-count 0}
{:id (:id whook)})))
(report-delivery! [whook req rsp err]
(db/insert! pool :webhook-delivery
{:webhook-id (:id whook)
:created-at (dt/now)
:error-code err
:req-data (db/tjson req)
:rsp-data (db/tjson rsp)}))]
(fn [{:keys [props] :as task}]
(let [event (::event props)
whook (::config props)
body (case (:mtype whook)
"application/json" (json/encode-str event json-mapper)
"application/transit+json" (t/encode-str event)
"application/x-www-form-urlencoded" (uri/map->query-string event))]
(l/debug :hint "run webhook"
:event-name (:name event)
:webhook-id (:id whook)
:webhook-uri (:uri whook)
:webhook-mtype (:mtype whook))
(let [req {:uri (:uri whook)
:headers {"content-type" (:mtype whook)
"user-agent" (str/ffmt "penpot/%" (:main cf/version))}
:timeout (dt/duration "4s")
:method :post
:body body}]
(try
(let [rsp (http/req! cfg req {:response-type :input-stream :sync? true})
err (interpret-response rsp)]
(report-delivery! whook req rsp err)
(update-webhook! whook err))
(catch Throwable cause
(let [err (interpret-exception cause)]
(report-delivery! whook req nil err)
(update-webhook! whook err)
(when (= err "unknown")
(l/error :hint "unknown error on webhook request"
:cause cause))))))))))
(defn interpret-response
[{:keys [status] :as response}]
(when-not (or (= 200 status)
(= 204 status))
(str/ffmt "unexpected-status:%" status)))
(defn interpret-exception
[cause]
(cond
(instance? javax.net.ssl.SSLHandshakeException cause)
"ssl-validation-error"
(instance? java.net.ConnectException cause)
"connection-error"
(instance? java.lang.IllegalArgumentException cause)
"invalid-uri"
(instance? java.net.http.HttpConnectTimeoutException cause)
"timeout"
))

View file

@ -9,13 +9,15 @@
(:require
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.spec :as us]
[app.config :as cf]
[app.loggers.zmq.receiver :as-alias receiver]
[app.util.json :as json]
[app.util.time :as dt]
[clojure.core.async :as a]
[clojure.spec.alpha :as s]
[cuerdas.core :as str]
[integrant.core :as ig])
[integrant.core :as ig]
[promesa.exec :as px])
(:import
org.zeromq.SocketType
org.zeromq.ZMQ$Socket
@ -24,38 +26,56 @@
(declare prepare)
(declare start-rcv-loop)
(s/def ::endpoint ::us/string)
(defmethod ig/pre-init-spec ::receiver [_]
(s/keys :opt-un [::endpoint]))
(defmethod ig/init-key ::receiver
[_ {:keys [endpoint] :as cfg}]
(l/info :msg "initializing ZMQ receiver" :bind endpoint)
(let [buffer (a/chan 1)
[_ cfg]
(let [uri (cf/get :loggers-zmq-uri)
buffer (a/chan 1)
output (a/chan 1 (comp (filter map?)
(keep prepare)))
mult (a/mult output)]
(when endpoint
(let [thread (Thread. #(start-rcv-loop {:out buffer :endpoint endpoint}))]
(.setDaemon thread false)
(.setName thread "penpot/zmq-logger-receiver")
(.start thread)))
mult (a/mult output)
thread (when uri
(px/thread
{:name "penpot/zmq-receiver"
:daemon false}
(l/info :hint "receiver started")
(try
(start-rcv-loop buffer uri)
(catch InterruptedException _
(l/debug :hint "receiver interrupted"))
(catch java.lang.IllegalStateException cause
(if (= "errno 4" (ex-message cause))
(l/debug :hint "receiver interrupted")
(l/error :hint "unhandled error" :cause cause)))
(catch Throwable cause
(l/error :hint "unhandled error" :cause cause))
(finally
(l/info :hint "receiver terminated")))))]
(a/pipe buffer output)
(with-meta
(fn [cmd ch]
(case cmd
:sub (a/tap mult ch)
:unsub (a/untap mult ch))
ch)
{::output output
::buffer buffer
::mult mult})))
(-> cfg
(assoc ::receiver/mult mult)
(assoc ::receiver/thread thread)
(assoc ::receiver/output output)
(assoc ::receiver/buffer buffer))))
(s/def ::receiver/mult some?)
(s/def ::receiver/thread #(instance? Thread %))
(s/def ::receiver/output some?)
(s/def ::receiver/buffer some?)
(s/def ::receiver
(s/keys :req [::receiver/mult
::receiver/thread
::receiver/output
::receiver/buffer]))
(defn sub!
[{:keys [::receiver/mult]} ch]
(a/tap mult ch))
(defmethod ig/halt-key! ::receiver
[_ f]
(a/close! (::buffer (meta f))))
[_ {:keys [::receiver/buffer ::receiver/thread]}]
(some-> thread px/interrupt!)
(some-> buffer a/close!))
(def ^:private json-mapper
(json/mapper
@ -63,23 +83,23 @@
:decode-key-fn (comp keyword str/kebab)}))
(defn- start-rcv-loop
([] (start-rcv-loop nil))
([{:keys [out endpoint] :or {endpoint "tcp://localhost:5556"}}]
(let [out (or out (a/chan 1))
zctx (ZContext. 1)
socket (.. zctx (createSocket SocketType/SUB))]
(.. socket (connect ^String endpoint))
(.. socket (subscribe ""))
(.. socket (setReceiveTimeOut 5000))
(loop []
(let [msg (.recv ^ZMQ$Socket socket)
msg (ex/ignoring (json/read msg json-mapper))
msg (if (nil? msg) :empty msg)]
(if (a/>!! out msg)
(recur)
(do
(.close ^java.lang.AutoCloseable socket)
(.destroy ^ZContext zctx))))))))
[output endpoint]
(let [zctx (ZContext. 1)
socket (.. zctx (createSocket SocketType/SUB))]
(try
(.. socket (connect ^String endpoint))
(.. socket (subscribe ""))
(.. socket (setReceiveTimeOut 5000))
(loop []
(let [msg (.recv ^ZMQ$Socket socket)
msg (ex/ignoring (json/decode msg json-mapper))
msg (if (nil? msg) :empty msg)]
(when (a/>!! output msg)
(recur))))
(finally
(.close ^java.lang.AutoCloseable socket)
(.destroy ^ZContext zctx)))))
(s/def ::logger-name string?)
(s/def ::level string?)

View file

@ -6,108 +6,234 @@
(ns app.main
(:require
[app.auth.oidc]
[app.auth.ldap :as-alias ldap]
[app.auth.oidc :as-alias oidc]
[app.auth.oidc.providers :as-alias oidc.providers]
[app.common.logging :as l]
[app.config :as cf]
[app.db :as-alias db]
[app.http.client :as-alias http.client]
[app.http.session :as-alias http.session]
[app.loggers.audit :as-alias audit]
[app.loggers.audit.tasks :as-alias audit.tasks]
[app.loggers.webhooks :as-alias webhooks]
[app.loggers.zmq :as-alias lzmq]
[app.metrics :as-alias mtx]
[app.metrics.definition :as-alias mdef]
[app.redis :as-alias rds]
[app.srepl :as-alias srepl]
[app.storage :as-alias sto]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[cuerdas.core :as str]
[integrant.core :as ig])
(:gen-class))
(def default-metrics
{:update-file-changes
{::mdef/name "penpot_rpc_update_file_changes_total"
::mdef/help "A total number of changes submitted to update-file."
::mdef/type :counter}
:update-file-bytes-processed
{::mdef/name "penpot_rpc_update_file_bytes_processed_total"
::mdef/help "A total number of bytes processed by update-file."
::mdef/type :counter}
:rpc-mutation-timing
{::mdef/name "penpot_rpc_mutation_timing"
::mdef/help "RPC mutation method call timing."
::mdef/labels ["name"]
::mdef/type :histogram}
:rpc-command-timing
{::mdef/name "penpot_rpc_command_timing"
::mdef/help "RPC command method call timing."
::mdef/labels ["name"]
::mdef/type :histogram}
:rpc-query-timing
{::mdef/name "penpot_rpc_query_timing"
::mdef/help "RPC query method call timing."
::mdef/labels ["name"]
::mdef/type :histogram}
:websocket-active-connections
{::mdef/name "penpot_websocket_active_connections"
::mdef/help "Active websocket connections gauge"
::mdef/type :gauge}
:websocket-messages-total
{::mdef/name "penpot_websocket_message_total"
::mdef/help "Counter of processed messages."
::mdef/labels ["op"]
::mdef/type :counter}
:websocket-session-timing
{::mdef/name "penpot_websocket_session_timing"
::mdef/help "Websocket session timing (seconds)."
::mdef/type :summary}
:session-update-total
{::mdef/name "penpot_http_session_update_total"
::mdef/help "A counter of session update batch events."
::mdef/type :counter}
:tasks-timing
{::mdef/name "penpot_tasks_timing"
::mdef/help "Background tasks timing (milliseconds)."
::mdef/labels ["name"]
::mdef/type :summary}
:redis-eval-timing
{::mdef/name "penpot_redis_eval_timing"
::mdef/help "Redis EVAL commands execution timings (ms)"
::mdef/labels ["name"]
::mdef/type :summary}
:rpc-climit-queue-size
{::mdef/name "penpot_rpc_climit_queue_size"
::mdef/help "Current number of queued submissions on the CLIMIT."
::mdef/labels ["name"]
::mdef/type :gauge}
:rpc-climit-concurrency
{::mdef/name "penpot_rpc_climit_concurrency"
::mdef/help "Current number of used concurrency capacity on the CLIMIT"
::mdef/labels ["name"]
::mdef/type :gauge}
:rpc-climit-timing
{::mdef/name "penpot_rpc_climit_timing"
::mdef/help "Summary of the time between queuing and executing on the CLIMIT"
::mdef/labels ["name"]
::mdef/type :summary}
:audit-http-handler-queue-size
{::mdef/name "penpot_audit_http_handler_queue_size"
::mdef/help "Current number of queued submissions on the audit log http handler"
::mdef/labels []
::mdef/type :gauge}
:audit-http-handler-concurrency
{::mdef/name "penpot_audit_http_handler_concurrency"
::mdef/help "Current number of used concurrency capacity on the audit log http handler"
::mdef/labels []
::mdef/type :gauge}
:audit-http-handler-timing
{::mdef/name "penpot_audit_http_handler_timing"
::mdef/help "Summary of the time between queuing and executing on the audit log http handler"
::mdef/labels []
::mdef/type :summary}
:executors-active-threads
{::mdef/name "penpot_executors_active_threads"
::mdef/help "Current number of threads available in the executor service."
::mdef/labels ["name"]
::mdef/type :gauge}
:executors-completed-tasks
{::mdef/name "penpot_executors_completed_tasks_total"
::mdef/help "Approximate number of completed tasks by the executor."
::mdef/labels ["name"]
::mdef/type :counter}
:executors-running-threads
{::mdef/name "penpot_executors_running_threads"
::mdef/help "Current number of threads with state RUNNING."
::mdef/labels ["name"]
::mdef/type :gauge}
:executors-queued-submissions
{::mdef/name "penpot_executors_queued_submissions"
::mdef/help "Current number of queued submissions."
::mdef/labels ["name"]
::mdef/type :gauge}})
(def system-config
{:app.db/pool
{::db/pool
{:uri (cf/get :database-uri)
:username (cf/get :database-username)
:password (cf/get :database-password)
:read-only (cf/get :database-readonly false)
:metrics (ig/ref :app.metrics/metrics)
:metrics (ig/ref ::mtx/metrics)
:migrations (ig/ref :app.migrations/all)
:name :main
:min-size (cf/get :database-min-pool-size 0)
:max-size (cf/get :database-max-pool-size 60)}
;; Default thread pool for IO operations
[::default :app.worker/executor]
{:parallelism (cf/get :default-executor-parallelism 70)}
::wrk/executor
{::wrk/parallelism (cf/get :default-executor-parallelism 100)}
;; Dedicated thread pool for backround tasks execution.
[::worker :app.worker/executor]
{:parallelism (cf/get :worker-executor-parallelism 20)}
::wrk/scheduled-executor
{::wrk/parallelism (cf/get :scheduled-executor-parallelism 20)}
:app.worker/scheduler
{:parallelism 1
:prefix :scheduler}
:app.worker/executors
{:default (ig/ref [::default :app.worker/executor])
:worker (ig/ref [::worker :app.worker/executor])}
:app.worker/executor-monitor
{:metrics (ig/ref :app.metrics/metrics)
:executors (ig/ref :app.worker/executors)}
::wrk/monitor
{::mtx/metrics (ig/ref ::mtx/metrics)
::wrk/name "default"
::wrk/executor (ig/ref ::wrk/executor)}
:app.migrations/migrations
{}
:app.metrics/metrics
{}
::mtx/metrics
{:default default-metrics}
:app.migrations/all
{:main (ig/ref :app.migrations/migrations)}
:app.redis/redis
{:uri (cf/get :redis-uri)
:metrics (ig/ref :app.metrics/metrics)}
::rds/redis
{::rds/uri (cf/get :redis-uri)
::mtx/metrics (ig/ref ::mtx/metrics)}
:app.msgbus/msgbus
{:backend (cf/get :msgbus-backend :redis)
:executor (ig/ref [::default :app.worker/executor])
:redis (ig/ref :app.redis/redis)}
:executor (ig/ref ::wrk/executor)
:redis (ig/ref ::rds/redis)}
:app.storage.tmp/cleaner
{:executor (ig/ref [::worker :app.worker/executor])
:scheduler (ig/ref :app.worker/scheduler)}
{::wrk/executor (ig/ref ::wrk/executor)
::wrk/scheduled-executor (ig/ref ::wrk/scheduled-executor)}
:app.storage/gc-deleted-task
{:pool (ig/ref :app.db/pool)
:storage (ig/ref :app.storage/storage)
:executor (ig/ref [::worker :app.worker/executor])}
::sto/gc-deleted-task
{:pool (ig/ref ::db/pool)
:storage (ig/ref ::sto/storage)
:executor (ig/ref ::wrk/executor)}
:app.storage/gc-touched-task
{:pool (ig/ref :app.db/pool)}
::sto/gc-touched-task
{:pool (ig/ref ::db/pool)}
:app.http/client
{:executor (ig/ref [::default :app.worker/executor])}
::http.client/client
{::wrk/executor (ig/ref ::wrk/executor)}
:app.http/session
{:store (ig/ref :app.http.session/store)}
:app.http.session/store
{:pool (ig/ref :app.db/pool)
:sprops (ig/ref :app.setup/props)
:executor (ig/ref [::default :app.worker/executor])}
:app.http.session/manager
{::db/pool (ig/ref ::db/pool)
::wrk/executor (ig/ref ::wrk/executor)
::props (ig/ref :app.setup/props)}
:app.http.session/gc-task
{:pool (ig/ref :app.db/pool)
{:pool (ig/ref ::db/pool)
:max-age (cf/get :auth-token-cookie-max-age)}
:app.http.awsns/handler
{:sprops (ig/ref :app.setup/props)
:pool (ig/ref :app.db/pool)
:http-client (ig/ref :app.http/client)
:executor (ig/ref [::worker :app.worker/executor])}
{::props (ig/ref :app.setup/props)
::db/pool (ig/ref ::db/pool)
::http.client/client (ig/ref ::http.client/client)
::wrk/executor (ig/ref ::wrk/executor)}
:app.http/server
{:port (cf/get :http-server-port)
:host (cf/get :http-server-host)
:router (ig/ref :app.http/router)
:metrics (ig/ref :app.metrics/metrics)
:executor (ig/ref [::default :app.worker/executor])
:metrics (ig/ref ::mtx/metrics)
:executor (ig/ref ::wrk/executor)
:io-threads (cf/get :http-server-io-threads)
:max-body-size (cf/get :http-server-max-body-size)
:max-multipart-body-size (cf/get :http-server-max-multipart-body-size)}
:app.auth.ldap/provider
::ldap/provider
{:host (cf/get :ldap-host)
:port (cf/get :ldap-port)
:ssl (cf/get :ldap-ssl)
@ -121,117 +247,102 @@
:bind-password (cf/get :ldap-bind-password)
:enabled? (contains? cf/flags :login-with-ldap)}
:app.auth.oidc/google-provider
{:enabled? (contains? cf/flags :login-with-google)
:client-id (cf/get :google-client-id)
:client-secret (cf/get :google-client-secret)}
::oidc.providers/google
{}
:app.auth.oidc/github-provider
{:enabled? (contains? cf/flags :login-with-github)
:http-client (ig/ref :app.http/client)
:client-id (cf/get :github-client-id)
:client-secret (cf/get :github-client-secret)}
::oidc.providers/github
{::http.client/client (ig/ref ::http.client/client)}
:app.auth.oidc/gitlab-provider
{:enabled? (contains? cf/flags :login-with-gitlab)
:base-uri (cf/get :gitlab-base-uri "https://gitlab.com")
:client-id (cf/get :gitlab-client-id)
:client-secret (cf/get :gitlab-client-secret)}
::oidc.providers/gitlab
{}
:app.auth.oidc/generic-provider
{:enabled? (contains? cf/flags :login-with-oidc)
:http-client (ig/ref :app.http/client)
::oidc.providers/generic
{::http.client/client (ig/ref ::http.client/client)}
:client-id (cf/get :oidc-client-id)
:client-secret (cf/get :oidc-client-secret)
::oidc/routes
{::http.client/client (ig/ref ::http.client/client)
::db/pool (ig/ref ::db/pool)
::props (ig/ref :app.setup/props)
::wrk/executor (ig/ref ::wrk/executor)
::oidc/providers {:google (ig/ref ::oidc.providers/google)
:github (ig/ref ::oidc.providers/github)
:gitlab (ig/ref ::oidc.providers/gitlab)
:oidc (ig/ref ::oidc.providers/generic)}
::audit/collector (ig/ref ::audit/collector)
::http.session/session (ig/ref :app.http.session/manager)}
:base-uri (cf/get :oidc-base-uri)
:token-uri (cf/get :oidc-token-uri)
:auth-uri (cf/get :oidc-auth-uri)
:user-uri (cf/get :oidc-user-uri)
:scopes (cf/get :oidc-scopes)
:roles-attr (cf/get :oidc-roles-attr)
:roles (cf/get :oidc-roles)}
:app.auth.oidc/routes
{:providers {:google (ig/ref :app.auth.oidc/google-provider)
:github (ig/ref :app.auth.oidc/github-provider)
:gitlab (ig/ref :app.auth.oidc/gitlab-provider)
:oidc (ig/ref :app.auth.oidc/generic-provider)}
:sprops (ig/ref :app.setup/props)
:http-client (ig/ref :app.http/client)
:pool (ig/ref :app.db/pool)
:session (ig/ref :app.http/session)
:public-uri (cf/get :public-uri)
:executor (ig/ref [::default :app.worker/executor])}
;; TODO: revisit the dependencies of this service, looks they are too much unused of them
:app.http/router
{:assets (ig/ref :app.http.assets/handlers)
:feedback (ig/ref :app.http.feedback/handler)
:session (ig/ref :app.http/session)
:session (ig/ref :app.http.session/manager)
:awsns-handler (ig/ref :app.http.awsns/handler)
:debug-routes (ig/ref :app.http.debug/routes)
:oidc-routes (ig/ref :app.auth.oidc/routes)
:oidc-routes (ig/ref ::oidc/routes)
:ws (ig/ref :app.http.websocket/handler)
:metrics (ig/ref :app.metrics/metrics)
:metrics (ig/ref ::mtx/metrics)
:public-uri (cf/get :public-uri)
:storage (ig/ref :app.storage/storage)
:audit-handler (ig/ref :app.loggers.audit/http-handler)
:storage (ig/ref ::sto/storage)
:rpc-routes (ig/ref :app.rpc/routes)
:doc-routes (ig/ref :app.rpc.doc/routes)
:executor (ig/ref [::default :app.worker/executor])}
:executor (ig/ref ::wrk/executor)}
:app.http.debug/routes
{:pool (ig/ref :app.db/pool)
:executor (ig/ref [::worker :app.worker/executor])
:storage (ig/ref :app.storage/storage)
:session (ig/ref :app.http/session)}
{:pool (ig/ref ::db/pool)
:executor (ig/ref ::wrk/executor)
:storage (ig/ref ::sto/storage)
:session (ig/ref :app.http.session/manager)
::db/pool (ig/ref ::db/pool)
::wrk/executor (ig/ref ::wrk/executor)
::sto/storage (ig/ref ::sto/storage)}
:app.http.websocket/handler
{:pool (ig/ref :app.db/pool)
:metrics (ig/ref :app.metrics/metrics)
{:pool (ig/ref ::db/pool)
:metrics (ig/ref ::mtx/metrics)
:msgbus (ig/ref :app.msgbus/msgbus)}
:app.http.assets/handlers
{:metrics (ig/ref :app.metrics/metrics)
{:metrics (ig/ref ::mtx/metrics)
:assets-path (cf/get :assets-path)
:storage (ig/ref :app.storage/storage)
:executor (ig/ref [::default :app.worker/executor])
:storage (ig/ref ::sto/storage)
:executor (ig/ref ::wrk/executor)
:cache-max-age (dt/duration {:hours 24})
:signature-max-age (dt/duration {:hours 24 :minutes 5})}
:app.http.feedback/handler
{:pool (ig/ref :app.db/pool)
:executor (ig/ref [::default :app.worker/executor])}
{:pool (ig/ref ::db/pool)
:executor (ig/ref ::wrk/executor)}
:app.rpc/semaphores
{:metrics (ig/ref :app.metrics/metrics)
:executor (ig/ref [::default :app.worker/executor])}
:app.rpc/climit
{:metrics (ig/ref ::mtx/metrics)
:executor (ig/ref ::wrk/executor)}
:app.rpc/rlimit
{:executor (ig/ref [::worker :app.worker/executor])
:scheduler (ig/ref :app.worker/scheduler)}
{:executor (ig/ref ::wrk/executor)
:scheduled-executor (ig/ref ::wrk/scheduled-executor)}
:app.rpc/methods
{:pool (ig/ref :app.db/pool)
:session (ig/ref :app.http/session)
:sprops (ig/ref :app.setup/props)
:metrics (ig/ref :app.metrics/metrics)
:storage (ig/ref :app.storage/storage)
:msgbus (ig/ref :app.msgbus/msgbus)
:public-uri (cf/get :public-uri)
:redis (ig/ref :app.redis/redis)
:audit (ig/ref :app.loggers.audit/collector)
:ldap (ig/ref :app.auth.ldap/provider)
:http-client (ig/ref :app.http/client)
:rlimit (ig/ref :app.rpc/rlimit)
:executors (ig/ref :app.worker/executors)
:executor (ig/ref [::default :app.worker/executor])
:templates (ig/ref :app.setup/builtin-templates)
:semaphores (ig/ref :app.rpc/semaphores)
{::audit/collector (ig/ref ::audit/collector)
::http.client/client (ig/ref ::http.client/client)
::db/pool (ig/ref ::db/pool)
::wrk/executor (ig/ref ::wrk/executor)
::props (ig/ref :app.setup/props)
::ldap/provider (ig/ref ::ldap/provider)
:pool (ig/ref ::db/pool)
:session (ig/ref :app.http.session/manager)
:sprops (ig/ref :app.setup/props)
:metrics (ig/ref ::mtx/metrics)
:storage (ig/ref ::sto/storage)
:msgbus (ig/ref :app.msgbus/msgbus)
:public-uri (cf/get :public-uri)
:redis (ig/ref ::rds/redis)
:http-client (ig/ref ::http.client/client)
:climit (ig/ref :app.rpc/climit)
:rlimit (ig/ref :app.rpc/rlimit)
:executor (ig/ref ::wrk/executor)
:templates (ig/ref :app.setup/builtin-templates)
}
:app.rpc.doc/routes
@ -240,20 +351,25 @@
:app.rpc/routes
{:methods (ig/ref :app.rpc/methods)}
:app.worker/registry
{:metrics (ig/ref :app.metrics/metrics)
::wrk/registry
{:metrics (ig/ref ::mtx/metrics)
:tasks
{:sendmail (ig/ref :app.emails/handler)
:objects-gc (ig/ref :app.tasks.objects-gc/handler)
:file-gc (ig/ref :app.tasks.file-gc/handler)
:file-xlog-gc (ig/ref :app.tasks.file-xlog-gc/handler)
:storage-gc-deleted (ig/ref :app.storage/gc-deleted-task)
:storage-gc-touched (ig/ref :app.storage/gc-touched-task)
:storage-gc-deleted (ig/ref ::sto/gc-deleted-task)
:storage-gc-touched (ig/ref ::sto/gc-touched-task)
:tasks-gc (ig/ref :app.tasks.tasks-gc/handler)
:telemetry (ig/ref :app.tasks.telemetry/handler)
:session-gc (ig/ref :app.http.session/gc-task)
:audit-log-archive (ig/ref :app.loggers.audit/archive-task)
:audit-log-gc (ig/ref :app.loggers.audit/gc-task)}}
:audit-log-archive (ig/ref ::audit.tasks/archive)
:audit-log-gc (ig/ref ::audit.tasks/gc)
:process-webhook-event
(ig/ref ::webhooks/process-event-handler)
:run-webhook
(ig/ref ::webhooks/run-webhook-handler)}}
:app.emails/sendmail
@ -268,78 +384,81 @@
:app.emails/handler
{:sendmail (ig/ref :app.emails/sendmail)
:metrics (ig/ref :app.metrics/metrics)}
:metrics (ig/ref ::mtx/metrics)}
:app.tasks.tasks-gc/handler
{:pool (ig/ref :app.db/pool)
{:pool (ig/ref ::db/pool)
:max-age cf/deletion-delay}
:app.tasks.objects-gc/handler
{:pool (ig/ref :app.db/pool)
:storage (ig/ref :app.storage/storage)}
{::db/pool (ig/ref ::db/pool)
::sto/storage (ig/ref ::sto/storage)}
:app.tasks.file-gc/handler
{:pool (ig/ref :app.db/pool)}
{:pool (ig/ref ::db/pool)}
:app.tasks.file-xlog-gc/handler
{:pool (ig/ref :app.db/pool)}
{:pool (ig/ref ::db/pool)}
:app.tasks.telemetry/handler
{:pool (ig/ref :app.db/pool)
:version (:full cf/version)
:uri (cf/get :telemetry-uri)
:sprops (ig/ref :app.setup/props)
:http-client (ig/ref :app.http/client)}
{::db/pool (ig/ref ::db/pool)
::http.client/client (ig/ref ::http.client/client)
::props (ig/ref :app.setup/props)}
:app.srepl/server
{:port (cf/get :srepl-port)
:host (cf/get :srepl-host)}
[::srepl/urepl ::srepl/server]
{:port (cf/get :urepl-port 6062)
:host (cf/get :urepl-host "localhost")}
[::srepl/prepl ::srepl/server]
{:port (cf/get :prepl-port 6063)
:host (cf/get :prepl-host "localhost")}
:app.setup/builtin-templates
{:http-client (ig/ref :app.http/client)}
{::http.client/client (ig/ref ::http.client/client)}
:app.setup/props
{:pool (ig/ref :app.db/pool)
{:pool (ig/ref ::db/pool)
:key (cf/get :secret-key)}
:app.loggers.zmq/receiver
{:endpoint (cf/get :loggers-zmq-uri)}
::lzmq/receiver
{}
:app.loggers.audit/http-handler
{:pool (ig/ref :app.db/pool)
:executor (ig/ref [::default :app.worker/executor])}
::audit/collector
{::db/pool (ig/ref ::db/pool)
::wrk/executor (ig/ref ::wrk/executor)
::mtx/metrics (ig/ref ::mtx/metrics)}
:app.loggers.audit/collector
{:pool (ig/ref :app.db/pool)
:executor (ig/ref [::worker :app.worker/executor])}
::audit.tasks/archive
{::props (ig/ref :app.setup/props)
::db/pool (ig/ref ::db/pool)
::http.client/client (ig/ref ::http.client/client)}
:app.loggers.audit/archive-task
{:uri (cf/get :audit-log-archive-uri)
:sprops (ig/ref :app.setup/props)
:pool (ig/ref :app.db/pool)
:http-client (ig/ref :app.http/client)}
::audit.tasks/gc
{::db/pool (ig/ref ::db/pool)}
:app.loggers.audit/gc-task
{:pool (ig/ref :app.db/pool)}
::webhooks/process-event-handler
{::db/pool (ig/ref ::db/pool)
::http.client/client (ig/ref ::http.client/client)}
::webhooks/run-webhook-handler
{::db/pool (ig/ref ::db/pool)
::http.client/client (ig/ref ::http.client/client)}
:app.loggers.loki/reporter
{:uri (cf/get :loggers-loki-uri)
:receiver (ig/ref :app.loggers.zmq/receiver)
:http-client (ig/ref :app.http/client)}
{::lzmq/receiver (ig/ref ::lzmq/receiver)
::http.client/client (ig/ref ::http.client/client)}
:app.loggers.mattermost/reporter
{:uri (cf/get :error-report-webhook)
:receiver (ig/ref :app.loggers.zmq/receiver)
:http-client (ig/ref :app.http/client)}
{::lzmq/receiver (ig/ref ::lzmq/receiver)
::http.client/client (ig/ref ::http.client/client)}
:app.loggers.database/reporter
{:receiver (ig/ref :app.loggers.zmq/receiver)
:pool (ig/ref :app.db/pool)
:executor (ig/ref [::worker :app.worker/executor])}
{::lzmq/receiver (ig/ref :app.loggers.zmq/receiver)
::db/pool (ig/ref ::db/pool)}
:app.storage/storage
{:pool (ig/ref :app.db/pool)
:executor (ig/ref [::default :app.worker/executor])
::sto/storage
{:pool (ig/ref ::db/pool)
:executor (ig/ref ::wrk/executor)
:backends
{:assets-s3 (ig/ref [::assets :app.storage.s3/backend])
@ -353,7 +472,7 @@
{:region (cf/get :storage-assets-s3-region)
:endpoint (cf/get :storage-assets-s3-endpoint)
:bucket (cf/get :storage-assets-s3-bucket)
:executor (ig/ref [::default :app.worker/executor])}
:executor (ig/ref ::wrk/executor)}
[::assets :app.storage.fs/backend]
{:directory (cf/get :storage-assets-fs-directory)}
@ -361,12 +480,11 @@
(def worker-config
{:app.worker/cron
{:executor (ig/ref [::worker :app.worker/executor])
:scheduler (ig/ref :app.worker/scheduler)
:tasks (ig/ref :app.worker/registry)
:pool (ig/ref :app.db/pool)
:entries
{::wrk/cron
{::wrk/scheduled-executor (ig/ref ::wrk/scheduled-executor)
::wrk/registry (ig/ref ::wrk/registry)
::db/pool (ig/ref ::db/pool)
::wrk/entries
[{:cron #app/cron "0 0 * * * ?" ;; hourly
:task :file-xlog-gc}
@ -399,11 +517,27 @@
{:cron #app/cron "30 */5 * * * ?" ;; every 5m
:task :audit-log-gc})]}
:app.worker/worker
{:executor (ig/ref [::worker :app.worker/executor])
:tasks (ig/ref :app.worker/registry)
:metrics (ig/ref :app.metrics/metrics)
:pool (ig/ref :app.db/pool)}})
::wrk/dispatcher
{::rds/redis (ig/ref ::rds/redis)
::mtx/metrics (ig/ref ::mtx/metrics)
::db/pool (ig/ref ::db/pool)}
[::default ::wrk/worker]
{::wrk/parallelism (cf/get ::worker-default-parallelism 1)
::wrk/queue :default
::rds/redis (ig/ref ::rds/redis)
::wrk/registry (ig/ref ::wrk/registry)
::mtx/metrics (ig/ref ::mtx/metrics)
::db/pool (ig/ref ::db/pool)}
[::webhook ::wrk/worker]
{::wrk/parallelism (cf/get ::worker-webhook-parallelism 1)
::wrk/queue :webhooks
::rds/redis (ig/ref ::rds/redis)
::wrk/registry (ig/ref ::wrk/registry)
::mtx/metrics (ig/ref ::mtx/metrics)
::db/pool (ig/ref ::db/pool)}})
(def system nil)
@ -417,7 +551,7 @@
(merge worker-config))
(ig/prep)
(ig/init))))
(l/info :msg "welcome to penpot"
(l/info :hint "welcome to penpot"
:flags (str/join "," (map name cf/flags))
:worker? (contains? cf/flags :backend-worker)
:version (:full cf/version)))
@ -430,4 +564,9 @@
(defn -main
[& _args]
(start))
(try
(start)
(catch Throwable cause
(l/error :hint (ex-message cause)
:cause cause)
(System/exit -1))))

View file

@ -220,7 +220,7 @@
(ttf-or-otf->woff [data]
;; NOTE: foutput is not used directly, it represents the
;; default output of the exection of the underlying
;; default output of the execution of the underlying
;; command.
(let [finput (tmp/tempfile :prefix "penpot.font." :suffix "")
foutput (fs/path (str finput ".woff"))

View file

@ -38,110 +38,6 @@
;; METRICS SERVICE PROVIDER
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def default-metrics
{:update-file-changes
{::mdef/name "penpot_rpc_update_file_changes_total"
::mdef/help "A total number of changes submitted to update-file."
::mdef/type :counter}
:update-file-bytes-processed
{::mdef/name "penpot_rpc_update_file_bytes_processed_total"
::mdef/help "A total number of bytes processed by update-file."
::mdef/type :counter}
:rpc-mutation-timing
{::mdef/name "penpot_rpc_mutation_timing"
::mdef/help "RPC mutation method call timming."
::mdef/labels ["name"]
::mdef/type :histogram}
:rpc-command-timing
{::mdef/name "penpot_rpc_command_timing"
::mdef/help "RPC command method call timming."
::mdef/labels ["name"]
::mdef/type :histogram}
:rpc-query-timing
{::mdef/name "penpot_rpc_query_timing"
::mdef/help "RPC query method call timing."
::mdef/labels ["name"]
::mdef/type :histogram}
:websocket-active-connections
{::mdef/name "penpot_websocket_active_connections"
::mdef/help "Active websocket connections gauge"
::mdef/type :gauge}
:websocket-messages-total
{::mdef/name "penpot_websocket_message_total"
::mdef/help "Counter of processed messages."
::mdef/labels ["op"]
::mdef/type :counter}
:websocket-session-timing
{::mdef/name "penpot_websocket_session_timing"
::mdef/help "Websocket session timing (seconds)."
::mdef/type :summary}
:session-update-total
{::mdef/name "penpot_http_session_update_total"
::mdef/help "A counter of session update batch events."
::mdef/type :counter}
:tasks-timing
{::mdef/name "penpot_tasks_timing"
::mdef/help "Background tasks timing (milliseconds)."
::mdef/labels ["name"]
::mdef/type :summary}
:redis-eval-timing
{::mdef/name "penpot_redis_eval_timing"
::mdef/help "Redis EVAL commands execution timings (ms)"
::mdef/labels ["name"]
::mdef/type :summary}
:semaphore-queued-submissions
{::mdef/name "penpot_semaphore_queued_submissions"
::mdef/help "Current number of queued submissions on SEMAPHORE."
::mdef/labels ["name"]
::mdef/type :gauge}
:semaphore-used-permits
{::mdef/name "penpot_semaphore_used_permits"
::mdef/help "Current number of used permits on SEMAPHORE."
::mdef/labels ["name"]
::mdef/type :gauge}
:semaphore-timing
{::mdef/name "penpot_semaphore_timing"
::mdef/help "Total timing of SEMAPHORE."
::mdef/labels ["name"]
::mdef/type :summary}
:executors-active-threads
{::mdef/name "penpot_executors_active_threads"
::mdef/help "Current number of threads available in the executor service."
::mdef/labels ["name"]
::mdef/type :gauge}
:executors-completed-tasks
{::mdef/name "penpot_executors_completed_tasks_total"
::mdef/help "Aproximate number of completed tasks by the executor."
::mdef/labels ["name"]
::mdef/type :counter}
:executors-running-threads
{::mdef/name "penpot_executors_running_threads"
::mdef/help "Current number of threads with state RUNNING."
::mdef/labels ["name"]
::mdef/type :gauge}
:executors-queued-submissions
{::mdef/name "penpot_executors_queued_submissions"
::mdef/help "Current number of queued submissions."
::mdef/labels ["name"]
::mdef/type :gauge}})
(s/def ::mdef/name string?)
(s/def ::mdef/help string?)
(s/def ::mdef/labels (s/every string? :kind vector?))
@ -169,8 +65,13 @@
::handler
::definitions]))
(s/def ::default ::definitions)
(defmethod ig/pre-init-spec ::metrics [_]
(s/keys :req-un [::default]))
(defmethod ig/init-key ::metrics
[_ _]
[_ cfg]
(l/info :action "initialize metrics")
(let [registry (create-registry)
definitions (reduce-kv (fn [res k v]
@ -178,7 +79,7 @@
(create-collector)
(assoc res k)))
{}
default-metrics)]
(:default cfg))]
(us/verify! ::definitions definitions)
@ -231,7 +132,7 @@
(defmethod run-collector! :counter
[{:keys [::mdef/instance]} {:keys [inc labels] :or {inc 1 labels default-empty-labels}}]
(let [instance (.labels instance (if (is-array? labels) labels (into-array String labels)))]
(let [instance (.labels ^Counter instance (if (is-array? labels) labels (into-array String labels)))]
(.inc ^Counter$Child instance (double inc))))
(defmethod run-collector! :gauge

View file

@ -247,7 +247,62 @@
{:name "0079-mod-profile-table"
:fn (mg/resource "app/migrations/sql/0079-mod-profile-table.sql")}
])
{:name "0080-mod-index-names"
:fn (mg/resource "app/migrations/sql/0080-mod-index-names.sql")}
{:name "0081-add-deleted-at-index-to-file-table"
:fn (mg/resource "app/migrations/sql/0081-add-deleted-at-index-to-file-table.sql")}
{:name "0082-add-features-column-to-file-table"
:fn (mg/resource "app/migrations/sql/0082-add-features-column-to-file-table.sql")}
{:name "0083-add-file-data-fragment-table"
:fn (mg/resource "app/migrations/sql/0083-add-file-data-fragment-table.sql")}
{:name "0084-add-features-column-to-file-change-table"
:fn (mg/resource "app/migrations/sql/0084-add-features-column-to-file-change-table.sql")}
{:name "0085-add-webhook-table"
:fn (mg/resource "app/migrations/sql/0085-add-webhook-table.sql")}
{:name "0086-add-webhook-delivery-table"
:fn (mg/resource "app/migrations/sql/0086-add-webhook-delivery-table.sql")}
{:name "0087-mod-task-table"
:fn (mg/resource "app/migrations/sql/0087-mod-task-table.sql")}
{:name "0088-mod-team-profile-rel-table"
:fn (mg/resource "app/migrations/sql/0088-mod-team-profile-rel-table.sql")}
{:name "0089-mod-project-profile-rel-table"
:fn (mg/resource "app/migrations/sql/0089-mod-project-profile-rel-table.sql")}
{:name "0090-mod-http-session-table"
:fn (mg/resource "app/migrations/sql/0090-mod-http-session-table.sql")}
{:name "0091-mod-team-project-profile-rel-table"
:fn (mg/resource "app/migrations/sql/0091-mod-team-project-profile-rel-table.sql")}
{:name "0092-mod-team-invitation-table"
:fn (mg/resource "app/migrations/sql/0092-mod-team-invitation-table.sql")}
{:name "0093-del-file-share-tokens-table"
:fn (mg/resource "app/migrations/sql/0093-del-file-share-tokens-table.sql")}
{:name "0094-del-profile-attr-table"
:fn (mg/resource "app/migrations/sql/0094-del-profile-attr-table.sql")}
{:name "0095-del-storage-data-table"
:fn (mg/resource "app/migrations/sql/0095-del-storage-data-table.sql")}
{:name "0096-del-storage-pending-table"
:fn (mg/resource "app/migrations/sql/0096-del-storage-pending-table.sql")}
{:name "0098-add-quotes-table"
:fn (mg/resource "app/migrations/sql/0098-add-quotes-table.sql")}
])
(defmethod ig/init-key ::migrations [_ _] migrations)

View file

@ -0,0 +1,11 @@
ALTER INDEX team_font_variant_deleted_at_idx
RENAME TO team_font_variant__deleted_at__idx;
ALTER INDEX team_deleted_at_idx
RENAME TO team__deleted_at__idx;
ALTER INDEX profile_deleted_at_idx
RENAME TO profile__deleted_at__idx;
ALTER INDEX project_deleted_at_idx
RENAME TO project__deleted_at__idx;

View file

@ -0,0 +1,3 @@
CREATE INDEX file__deleted_at__idx
ON file (deleted_at, id)
WHERE deleted_at IS NOT NULL;

View file

@ -0,0 +1,2 @@
ALTER TABLE file
ADD COLUMN features text[] DEFAULT NULL;

View file

@ -0,0 +1,15 @@
CREATE TABLE file_data_fragment (
id uuid NOT NULL,
file_id uuid NOT NULL REFERENCES file(id) ON DELETE CASCADE DEFERRABLE,
created_at timestamptz NOT NULL DEFAULT now(),
metadata jsonb NULL,
content bytea NOT NULL,
PRIMARY KEY (file_id, id)
);
ALTER TABLE file_data_fragment
ALTER COLUMN metadata SET STORAGE external,
ALTER COLUMN content SET STORAGE external;

View file

@ -0,0 +1,8 @@
ALTER TABLE file_change
ADD COLUMN features text[] DEFAULT NULL;
ALTER TABLE file_change
ALTER COLUMN features SET STORAGE external;
ALTER TABLE file
ALTER COLUMN features SET STORAGE external;

View file

@ -0,0 +1,25 @@
CREATE TABLE webhook (
id uuid PRIMARY KEY,
team_id uuid NOT NULL REFERENCES team(id) ON DELETE CASCADE DEFERRABLE,
created_at timestamptz NOT NULL DEFAULT now(),
updated_at timestamptz NOT NULL DEFAULT now(),
uri text NOT NULL,
mtype text NOT NULL,
error_code text NULL,
error_count smallint DEFAULT 0,
is_active boolean DEFAULT true,
secret_key text NULL
);
ALTER TABLE webhook
ALTER COLUMN uri SET STORAGE external,
ALTER COLUMN mtype SET STORAGE external,
ALTER COLUMN error_code SET STORAGE external,
ALTER COLUMN secret_key SET STORAGE external;
CREATE INDEX webhook__team_id__idx ON webhook (team_id);

View file

@ -0,0 +1,16 @@
CREATE TABLE webhook_delivery (
webhook_id uuid NOT NULL REFERENCES webhook(id) ON DELETE CASCADE DEFERRABLE,
created_at timestamptz NOT NULL DEFAULT now(),
error_code text NULL,
req_data jsonb NULL,
rsp_data jsonb NULL,
PRIMARY KEY (webhook_id, created_at)
);
ALTER TABLE webhook_delivery
ALTER COLUMN error_code SET STORAGE external,
ALTER COLUMN req_data SET STORAGE external,
ALTER COLUMN rsp_data SET STORAGE external;

View file

@ -0,0 +1,9 @@
ALTER TABLE task
ADD COLUMN label text NULL;
ALTER TABLE task
ALTER COLUMN label SET STORAGE external;
CREATE INDEX task__label__idx
ON task (label, name, queue)
WHERE status = 'new';

View file

@ -0,0 +1,3 @@
ALTER TABLE team_profile_rel DROP CONSTRAINT team_profile_rel_pkey;
ALTER TABLE team_profile_rel ADD COLUMN id uuid DEFAULT uuid_generate_v4() PRIMARY KEY;
ALTER TABLE team_profile_rel ADD CONSTRAINT team_profile_rel_unique UNIQUE (team_id, profile_id);

View file

@ -0,0 +1,3 @@
ALTER TABLE project_profile_rel DROP CONSTRAINT project_profile_rel_pkey;
ALTER TABLE project_profile_rel ADD COLUMN id uuid DEFAULT uuid_generate_v4() PRIMARY KEY;
ALTER TABLE project_profile_rel ADD CONSTRAINT project_profile_rel_unique UNIQUE (project_id, profile_id);

View file

@ -0,0 +1,2 @@
ALTER TABLE http_session DROP CONSTRAINT http_session_pkey;
ALTER TABLE http_session ADD CONSTRAINT http_session_pkey PRIMARY KEY (id);

View file

@ -0,0 +1,3 @@
ALTER TABLE team_project_profile_rel DROP CONSTRAINT team_project_profile_rel_pkey;
ALTER TABLE team_project_profile_rel ADD COLUMN id uuid DEFAULT uuid_generate_v4() PRIMARY KEY;
ALTER TABLE team_project_profile_rel ADD CONSTRAINT team_project_profile_rel_unique UNIQUE (team_id, project_id, profile_id);

View file

@ -0,0 +1,3 @@
ALTER TABLE team_invitation DROP CONSTRAINT team_invitation_pkey;
ALTER TABLE team_invitation ADD COLUMN id uuid DEFAULT uuid_generate_v4() PRIMARY KEY;
ALTER TABLE team_invitation ADD CONSTRAINT team_invitation_unique UNIQUE (team_id, email_to);

View file

@ -0,0 +1 @@
DROP TABLE file_share_token;

View file

@ -0,0 +1 @@
DROP TABLE profile_attr;

View file

@ -0,0 +1 @@
DROP TABLE storage_data;

View file

@ -0,0 +1 @@
DROP TABLE storage_pending;

View file

@ -0,0 +1,82 @@
CREATE TABLE usage_quote (
id uuid NOT NULL DEFAULT uuid_generate_v4() PRIMARY KEY,
target text NOT NULL,
quote bigint NOT NULL,
profile_id uuid NULL REFERENCES profile(id) ON DELETE CASCADE DEFERRABLE,
project_id uuid NULL REFERENCES project(id) ON DELETE CASCADE DEFERRABLE,
team_id uuid NULL REFERENCES team(id) ON DELETE CASCADE DEFERRABLE,
file_id uuid NULL REFERENCES file(id) ON DELETE CASCADE DEFERRABLE
);
ALTER TABLE usage_quote
ALTER COLUMN target SET STORAGE external;
CREATE INDEX usage_quote__profile_id__idx ON usage_quote(profile_id, target);
CREATE INDEX usage_quote__project_id__idx ON usage_quote(project_id, target);
CREATE INDEX usage_quote__team_id__idx ON usage_quote(team_id, target);
-- DROP TABLE IF EXISTS usage_quote_test;
-- CREATE TABLE usage_quote_test (
-- id bigserial NOT NULL PRIMARY KEY,
-- target text NOT NULL,
-- quote bigint NOT NULL,
-- profile_id bigint NULL,
-- team_id bigint NULL,
-- project_id bigint NULL,
-- file_id bigint NULL
-- );
-- ALTER TABLE usage_quote_test
-- ALTER COLUMN target SET STORAGE external;
-- CREATE INDEX usage_quote_test__profile_id__idx ON usage_quote_test(profile_id, target);
-- CREATE INDEX usage_quote_test__project_id__idx ON usage_quote_test(project_id, target);
-- CREATE INDEX usage_quote_test__team_id__idx ON usage_quote_test(team_id, target);
-- -- CREATE INDEX usage_quote_test__target__idx ON usage_quote_test(target);
-- DELETE FROM usage_quote_test;
-- INSERT INTO usage_quote_test (target, quote, profile_id, team_id, project_id)
-- SELECT 'files-per-project', 50*RANDOM(), 2000*RANDOM(), null, null
-- FROM generate_series(1, 5000);
-- INSERT INTO usage_quote_test (target, quote, profile_id, team_id, project_id)
-- SELECT 'files-per-project', 200*RANDOM(), 300*RANDOM(), 300*RANDOM(), null
-- FROM generate_series(1, 1000);
-- INSERT INTO usage_quote_test (target, quote, profile_id, team_id, project_id)
-- SELECT 'files-per-project', 100*RANDOM(), 300*RANDOM(), null, 300*RANDOM()
-- FROM generate_series(1, 1000);
-- INSERT INTO usage_quote_test (target, quote, profile_id, team_id, project_id)
-- SELECT 'files-per-project', 100*RANDOM(), 300*RANDOM(), 300*RANDOM(), 300*RANDOM()
-- FROM generate_series(1, 1000);
-- INSERT INTO usage_quote_test (target, quote, profile_id, team_id, project_id)
-- SELECT 'files-per-project', 30*RANDOM(), null, 2000*RANDOM(), null
-- FROM generate_series(1, 5000);
-- INSERT INTO usage_quote_test (target, quote, profile_id, team_id, project_id)
-- SELECT 'files-per-project', 10*RANDOM(), null, null, 2000*RANDOM()
-- FROM generate_series(1, 5000);
-- VACUUM ANALYZE usage_quote_test;
-- select * from usage_quote_test
-- where target = 'files-per-project'
-- and profile_id = 1
-- and team_id is null
-- and project_id is null;
-- select * from usage_quote_test
-- where target = 'files-per-project'
-- and ((team_id = 1 and (profile_id = 1 or profile_id is null)) or
-- (profile_id = 1 and team_id is null and project_id is null));
-- select * from usage_quote_test
-- where target = 'files-per-project'
-- and ((project_id = 1 and (profile_id = 1 or profile_id is null)) or
-- (team_id = 1 and (profile_id = 1 or profile_id is null)) or
-- (profile_id = 1 and team_id is null and project_id is null));

View file

@ -20,7 +20,8 @@
[clojure.core.async :as a]
[clojure.spec.alpha :as s]
[integrant.core :as ig]
[promesa.core :as p]))
[promesa.core :as p]
[promesa.exec :as px]))
(set! *warn-on-reflection* true)
@ -52,8 +53,8 @@
(s/def ::rcv-ch ::aa/channel)
(s/def ::pub-ch ::aa/channel)
(s/def ::state ::us/agent)
(s/def ::pconn ::redis/connection)
(s/def ::sconn ::redis/connection)
(s/def ::pconn ::redis/connection-holder)
(s/def ::sconn ::redis/connection-holder)
(s/def ::msgbus
(s/keys :req [::cmd-ch ::rcv-ch ::pub-ch ::state ::pconn ::sconn ::wrk/executor]))
@ -122,8 +123,8 @@
(defn- redis-disconnect
[{:keys [::pconn ::sconn] :as cfg}]
(redis/close! pconn)
(redis/close! sconn))
(d/close! pconn)
(d/close! sconn))
(defn- conj-subscription
"A low level function that is responsible to create on-demand
@ -138,7 +139,7 @@
(defn- disj-subscription
"A low level function responsible on removing subscriptions. The
subscription is trully removed from redis once no single local
subscription is truly removed from redis once no single local
subscription is look for it. Intended to be executed in agent."
[nsubs cfg topic chan]
(let [nsubs (disj nsubs chan)]
@ -159,7 +160,7 @@
topics))))
(defn- unsubscribe-single-channel
"Auxiliar function responsible on removing a single local
"Auxiliary function responsible on removing a single local
subscription from the state."
[state cfg chan]
(let [topics (get-in state [:chans chan])
@ -205,31 +206,33 @@
(when-let [closed (a/<! (send-to-topic topic message))]
(send-via executor state unsubscribe-channels cfg closed nil))))
]
(px/thread
{:name "penpot/msgbus-io-loop"}
(loop []
(let [[val port] (a/alts!! [pub-ch rcv-ch])]
(cond
(nil? val)
(do
(l/trace :hint "stopping io-loop, nil received")
(send-via executor state (fn [state]
(->> (vals state)
(mapcat identity)
(filter some?)
(run! a/close!))
nil)))
(a/go-loop []
(let [[val port] (a/alts! [pub-ch rcv-ch])]
(cond
(nil? val)
(do
(l/trace :hint "stoping io-loop, nil received")
(send-via executor state (fn [state]
(->> (vals state)
(mapcat identity)
(filter some?)
(run! a/close!))
nil)))
(= port rcv-ch)
(do
(a/<!! (process-incoming val))
(recur))
(= port rcv-ch)
(do
(a/<! (process-incoming val))
(recur))
(= port pub-ch)
(let [result (a/<! (redis-pub cfg val))]
(when (ex/exception? result)
(l/error :hint "unexpected error on publishing" :message val
:cause result))
(recur)))))))
(= port pub-ch)
(let [result (a/<!! (redis-pub cfg val))]
(when (ex/exception? result)
(l/error :hint "unexpected error on publishing"
:message val
:cause result))
(recur))))))))
(defn- redis-pub
"Publish a message to the redis server. Asynchronous operation,

View file

@ -21,13 +21,19 @@
[promesa.core :as p])
(:import
clojure.lang.IDeref
clojure.lang.MapEntry
io.lettuce.core.KeyValue
io.lettuce.core.RedisClient
io.lettuce.core.RedisCommandInterruptedException
io.lettuce.core.RedisCommandTimeoutException
io.lettuce.core.RedisException
io.lettuce.core.RedisURI
io.lettuce.core.ScriptOutputType
io.lettuce.core.api.StatefulConnection
io.lettuce.core.api.StatefulRedisConnection
io.lettuce.core.api.async.RedisAsyncCommands
io.lettuce.core.api.async.RedisScriptingAsyncCommands
io.lettuce.core.api.sync.RedisCommands
io.lettuce.core.codec.ByteArrayCodec
io.lettuce.core.codec.RedisCodec
io.lettuce.core.codec.StringCodec
@ -45,13 +51,12 @@
(declare initialize-resources)
(declare shutdown-resources)
(declare connect)
(declare close!)
(declare connect*)
(s/def ::timer
#(instance? Timer %))
(s/def ::connection
(s/def ::default-connection
#(or (instance? StatefulRedisConnection %)
(and (instance? IDeref %)
(instance? StatefulRedisConnection (deref %)))))
@ -61,6 +66,13 @@
(and (instance? IDeref %)
(instance? StatefulRedisPubSubConnection (deref %)))))
(s/def ::connection
(s/or :default ::default-connection
:pubsub ::pubsub-connection))
(s/def ::connection-holder
(s/keys :req [::connection]))
(s/def ::redis-uri
#(instance? RedisURI %))
@ -75,32 +87,37 @@
(s/def ::connect? ::us/boolean)
(s/def ::io-threads ::us/integer)
(s/def ::worker-threads ::us/integer)
(s/def ::cache #(instance? clojure.lang.Atom %))
(s/def ::redis
(s/keys :req [::resources ::redis-uri ::timer ::mtx/metrics]
:opt [::connection]))
(defmethod ig/pre-init-spec ::redis [_]
(s/keys :req-un [::uri ::mtx/metrics]
:opt-un [::timeout
::connect?
::io-threads
::worker-threads]))
(s/keys :req [::resources
::redis-uri
::timer
::mtx/metrics]
:opt [::connection
::cache]))
(defmethod ig/prep-key ::redis
[_ cfg]
(let [runtime (Runtime/getRuntime)
cpus (.availableProcessors ^Runtime runtime)]
(merge {:timeout (dt/duration 5000)
:io-threads (max 3 cpus)
:worker-threads (max 3 cpus)}
(d/without-nils cfg))))
(merge {::timeout (dt/duration "10s")
::io-threads (max 3 cpus)
::worker-threads (max 3 cpus)}
(d/without-nils cfg))))
(defmethod ig/pre-init-spec ::redis [_]
(s/keys :req [::uri ::mtx/metrics]
:opt [::timeout
::connect?
::io-threads
::worker-threads]))
(defmethod ig/init-key ::redis
[_ {:keys [connect?] :as cfg}]
(let [cfg (initialize-resources cfg)]
(cond-> cfg
connect? (assoc ::connection (connect cfg)))))
[_ {:keys [::connect?] :as cfg}]
(let [state (initialize-resources cfg)]
(cond-> state
connect? (assoc ::connection (connect* cfg {})))))
(defmethod ig/halt-key! ::redis
[_ state]
@ -114,7 +131,7 @@
(defn- initialize-resources
"Initialize redis connection resources"
[{:keys [uri io-threads worker-threads connect? metrics] :as cfg}]
[{:keys [::uri ::io-threads ::worker-threads ::connect?] :as cfg}]
(l/info :hint "initialize redis resources"
:uri uri
:io-threads io-threads
@ -131,34 +148,32 @@
redis-uri (RedisURI/create ^String uri)]
(-> cfg
(assoc ::mtx/metrics metrics)
(assoc ::cache (atom {}))
(assoc ::resources resources)
(assoc ::timer timer)
(assoc ::redis-uri redis-uri)
(assoc ::resources resources))))
(assoc ::cache (atom {}))
(assoc ::redis-uri redis-uri))))
(defn- shutdown-resources
[{:keys [::resources ::cache ::timer]}]
(run! close! (vals @cache))
(run! d/close! (vals @cache))
(when resources
(.shutdown ^ClientResources resources))
(when timer
(.stop ^Timer timer)))
(defn connect
[{:keys [::resources ::redis-uri] :as cfg}
& {:keys [timeout codec type] :or {codec default-codec type :default}}]
(defn connect*
[{:keys [::resources ::redis-uri] :as state}
{:keys [timeout codec type]
:or {codec default-codec type :default}}]
(us/assert! ::resources resources)
(let [client (RedisClient/create ^ClientResources resources ^RedisURI redis-uri)
timeout (or timeout (:timeout cfg))
timeout (or timeout (::timeout state))
conn (case type
:default (.connect ^RedisClient client ^RedisCodec codec)
:pubsub (.connectPubSub ^RedisClient client ^RedisCodec codec))]
(.setTimeout ^StatefulConnection conn ^Duration timeout)
(reify
IDeref
(deref [_] conn)
@ -168,53 +183,113 @@
(.close ^StatefulConnection conn)
(.shutdown ^RedisClient client)))))
(defn connect
[state & {:as opts}]
(let [connection (connect* state opts)]
(-> state
(assoc ::connection connection)
(dissoc ::cache)
(vary-meta assoc `d/close! (fn [_] (d/close! connection))))))
(defn get-or-connect
[{:keys [::cache] :as state} key options]
(assoc state ::connection
(or (get @cache key)
(-> (swap! cache (fn [cache]
(when-let [prev (get cache key)]
(close! prev))
(assoc cache key (connect state options))))
(get key)))))
(-> state
(assoc ::connection
(or (get @cache key)
(-> (swap! cache (fn [cache]
(when-let [prev (get cache key)]
(d/close! prev))
(assoc cache key (connect* state options))))
(get key))))
(dissoc ::cache)))
(defn add-listener!
[conn listener]
(us/assert! ::pubsub-connection @conn)
[{:keys [::connection] :as conn} listener]
(us/assert! ::connection-holder conn)
(us/assert! ::pubsub-connection connection)
(us/assert! ::pubsub-listener listener)
(.addListener ^StatefulRedisPubSubConnection @conn
(.addListener ^StatefulRedisPubSubConnection @connection
^RedisPubSubListener listener)
conn)
(defn publish!
[conn topic message]
[{:keys [::connection] :as conn} topic message]
(us/assert! ::us/string topic)
(us/assert! ::us/bytes message)
(us/assert! ::connection @conn)
(us/assert! ::connection-holder conn)
(us/assert! ::default-connection connection)
(let [pcomm (.async ^StatefulRedisConnection @conn)]
(let [pcomm (.async ^StatefulRedisConnection @connection)]
(.publish ^RedisAsyncCommands pcomm ^String topic ^bytes message)))
(defn subscribe!
"Blocking operation, intended to be used on a worker/agent thread."
[conn & topics]
(us/assert! ::pubsub-connection @conn)
(let [topics (into-array String (map str topics))
cmd (.sync ^StatefulRedisPubSubConnection @conn)]
(.subscribe ^RedisPubSubCommands cmd topics)))
"Blocking operation, intended to be used on a thread/agent thread."
[{:keys [::connection] :as conn} & topics]
(us/assert! ::connection-holder conn)
(us/assert! ::pubsub-connection connection)
(try
(let [topics (into-array String (map str topics))
cmd (.sync ^StatefulRedisPubSubConnection @connection)]
(.subscribe ^RedisPubSubCommands cmd topics))
(catch RedisCommandInterruptedException cause
(throw (InterruptedException. (ex-message cause))))))
(defn unsubscribe!
"Blocking operation, intended to be used on a worker/agent thread."
[conn & topics]
(us/assert! ::pubsub-connection @conn)
(let [topics (into-array String (map str topics))
cmd (.sync ^StatefulRedisPubSubConnection @conn)]
(.unsubscribe ^RedisPubSubCommands cmd topics)))
"Blocking operation, intended to be used on a thread/agent thread."
[{:keys [::connection] :as conn} & topics]
(us/assert! ::connection-holder conn)
(us/assert! ::pubsub-connection connection)
(try
(let [topics (into-array String (map str topics))
cmd (.sync ^StatefulRedisPubSubConnection @connection)]
(.unsubscribe ^RedisPubSubCommands cmd topics))
(catch RedisCommandInterruptedException cause
(throw (InterruptedException. (ex-message cause))))))
(defn rpush!
[{:keys [::connection] :as conn} key payload]
(us/assert! ::connection-holder conn)
(us/assert! (or (and (vector? payload)
(every? bytes? payload))
(bytes? payload)))
(try
(let [cmd (.sync ^StatefulRedisConnection @connection)
data (if (vector? payload) payload [payload])
vals (make-array (. Class (forName "[B")) (count data))]
(loop [i 0 xs (seq data)]
(when xs
(aset ^"[[B" vals i ^bytes (first xs))
(recur (inc i) (next xs))))
(.rpush ^RedisCommands cmd
^String key
^"[[B" vals))
(catch RedisCommandInterruptedException cause
(throw (InterruptedException. (ex-message cause))))))
(defn blpop!
[{:keys [::connection] :as conn} timeout & keys]
(us/assert! ::connection-holder conn)
(try
(let [keys (into-array Object (map str keys))
cmd (.sync ^StatefulRedisConnection @connection)
timeout (/ (double (inst-ms timeout)) 1000.0)]
(when-let [res (.blpop ^RedisCommands cmd
^double timeout
^"[Ljava.lang.String;" keys)]
(MapEntry/create
(.getKey ^KeyValue res)
(.getValue ^KeyValue res))))
(catch RedisCommandInterruptedException cause
(throw (InterruptedException. (ex-message cause))))))
(defn open?
[conn]
(.isOpen ^StatefulConnection @conn))
[{:keys [::connection] :as conn}]
(us/assert! ::connection-holder conn)
(us/assert! ::pubsub-connection connection)
(.isOpen ^StatefulConnection @connection))
(defn pubsub-listener
[& {:keys [on-message on-subscribe on-unsubscribe]}]
@ -243,10 +318,6 @@
(when on-unsubscribe
(on-unsubscribe nil topic count)))))
(defn close!
[o]
(.close ^AutoCloseable o))
(def ^:private scripts-cache (atom {}))
(def noop-fn (constantly nil))
@ -262,12 +333,12 @@
::rscript/vals]))
(defn eval!
[{:keys [::mtx/metrics] :as state} script]
(us/assert! ::rscript/script script)
[{:keys [::mtx/metrics ::connection] :as state} script]
(us/assert! ::redis state)
(us/assert! ::connection-holder state)
(us/assert! ::rscript/script script)
(let [rconn (-> state ::connection deref)
cmd (.async ^StatefulRedisConnection rconn)
(let [cmd (.async ^StatefulRedisConnection @connection)
keys (into-array String (map str (::rscript/keys script)))
vals (into-array String (map str (::rscript/vals script)))
sname (::rscript/name script)]
@ -276,44 +347,52 @@
(if (instance? io.lettuce.core.RedisNoScriptException cause)
(do
(l/error :hint "no script found" :name sname :cause cause)
(-> (load-script)
(p/then eval-script)))
(->> (load-script)
(p/mapcat eval-script)))
(if-let [on-error (::rscript/on-error script)]
(on-error cause)
(p/rejected cause))))
(eval-script [sha]
(let [tpoint (dt/tpoint)]
(-> (.evalsha ^RedisScriptingAsyncCommands cmd
^String sha
^ScriptOutputType ScriptOutputType/MULTI
^"[Ljava.lang.String;" keys
^"[Ljava.lang.String;" vals)
(p/then (fn [result]
(let [elapsed (tpoint)]
(mtx/run! metrics {:id :redis-eval-timing
:labels [(name sname)]
:val (inst-ms elapsed)})
(l/trace :hint "eval script"
:name (name sname)
:sha sha
:params (str/join "," (::rscript/vals script))
:elapsed (dt/format-duration elapsed))
result)))
(p/catch on-error))))
(->> (.evalsha ^RedisScriptingAsyncCommands cmd
^String sha
^ScriptOutputType ScriptOutputType/MULTI
^"[Ljava.lang.String;" keys
^"[Ljava.lang.String;" vals)
(p/fmap (fn [result]
(let [elapsed (tpoint)]
(mtx/run! metrics {:id :redis-eval-timing
:labels [(name sname)]
:val (inst-ms elapsed)})
(l/trace :hint "eval script"
:name (name sname)
:sha sha
:params (str/join "," (::rscript/vals script))
:elapsed (dt/format-duration elapsed))
result)))
(p/merr on-error))))
(read-script []
(-> script ::rscript/path io/resource slurp))
(load-script []
(l/trace :hint "load script" :name sname)
(-> (.scriptLoad ^RedisScriptingAsyncCommands cmd
(->> (.scriptLoad ^RedisScriptingAsyncCommands cmd
^String (read-script))
(p/then (fn [sha]
(p/map (fn [sha]
(swap! scripts-cache assoc sname sha)
sha))))]
(if-let [sha (get @scripts-cache sname)]
(eval-script sha)
(-> (load-script)
(p/then eval-script))))))
(->> (load-script)
(p/mapcat eval-script))))))
(defn timeout-exception?
[cause]
(instance? RedisCommandTimeoutException cause))
(defn exception?
[cause]
(instance? RedisException cause))

View file

@ -6,58 +6,82 @@
(ns app.rpc
(:require
[app.auth.ldap :as-alias ldap]
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.spec :as us]
[app.common.uuid :as uuid]
[app.db :as db]
[app.http :as-alias http]
[app.http.client :as-alias http.client]
[app.http.session :as-alias http.session]
[app.loggers.audit :as audit]
[app.loggers.webhooks :as-alias webhooks]
[app.metrics :as mtx]
[app.msgbus :as-alias mbus]
[app.rpc.climit :as climit]
[app.rpc.cond :as cond]
[app.rpc.helpers :as rph]
[app.rpc.retry :as retry]
[app.rpc.rlimit :as rlimit]
[app.rpc.semaphore :as-alias rsem]
[app.storage :as-alias sto]
[app.util.services :as sv]
[app.util.time :as ts]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[clojure.spec.alpha :as s]
[integrant.core :as ig]
[promesa.core :as p]
[promesa.exec :as px]
[yetti.request :as yrq]
[yetti.response :as yrs]))
(s/def ::profile-id ::us/uuid)
(defn- default-handler
[_]
(p/rejected (ex/error :type :not-found)))
(defn- handle-response-transformation
[response request mdata]
(let [response (if (sv/wrapped? response) @response response)]
(if-let [transform-fn (:transform-response mdata)]
(p/do (transform-fn request response))
(p/resolved response))))
(let [transform-fn (reduce (fn [res-fn transform-fn]
(fn [request response]
(p/then (res-fn request response) #(transform-fn request %))))
(constantly response)
(::response-transform-fns mdata))]
(transform-fn request response)))
(defn- handle-before-comple-hook
[response mdata]
(when-let [hook-fn (:before-complete mdata)]
(doseq [hook-fn (::before-complete-fns mdata)]
(ex/ignoring (hook-fn)))
response)
(defn- handle-response
[request result]
(let [mdata (meta result)]
(p/-> (yrs/response 200 result (::http/headers mdata {}))
(handle-response-transformation request mdata)
(handle-before-comple-hook mdata))))
(if (fn? result)
(p/wrap (result request))
(let [mdata (meta result)]
(p/-> (yrs/response {:status (::http/status mdata 200)
:headers (::http/headers mdata {})
:body (rph/unwrap result)})
(handle-response-transformation request mdata)
(handle-before-comple-hook mdata)))))
(defn- rpc-query-handler
"Ring handler that dispatches query requests and convert between
internal async flow into ring async flow."
[methods {:keys [profile-id session-id params] :as request} respond raise]
(let [type (keyword (:type params))
data (into {::http/request request} params)
[methods {:keys [profile-id session-id path-params params] :as request} respond raise]
(let [type (keyword (:type path-params))
data (-> params
(assoc ::request-at (dt/now))
(assoc ::http/request request))
data (if profile-id
(assoc data :profile-id profile-id ::session-id session-id)
(dissoc data :profile-id))
(-> data
(assoc :profile-id profile-id)
(assoc ::profile-id profile-id)
(assoc ::session-id session-id))
(dissoc data :profile-id ::profile-id))
method (get methods type default-handler)]
(-> (method data)
@ -70,13 +94,17 @@
(defn- rpc-mutation-handler
"Ring handler that dispatches mutation requests and convert between
internal async flow into ring async flow."
[methods {:keys [profile-id session-id params] :as request} respond raise]
(let [type (keyword (:type params))
data (into {::request request} params)
[methods {:keys [profile-id session-id path-params params] :as request} respond raise]
(let [type (keyword (:type path-params))
data (-> params
(assoc ::request-at (dt/now))
(assoc ::http/request request))
data (if profile-id
(assoc data :profile-id profile-id ::session-id session-id)
(dissoc data :profile-id))
(-> data
(assoc :profile-id profile-id)
(assoc ::profile-id profile-id)
(assoc ::session-id session-id))
(dissoc data :profile-id ::profile-id))
method (get methods type default-handler)]
(-> (method data)
(p/then (partial handle-response request))
@ -88,27 +116,33 @@
(defn- rpc-command-handler
"Ring handler that dispatches cmd requests and convert between
internal async flow into ring async flow."
[methods {:keys [profile-id session-id params] :as request} respond raise]
(let [cmd (keyword (:command params))
data (into {::request request} params)
data (if profile-id
(assoc data :profile-id profile-id ::session-id session-id)
(dissoc data :profile-id))
[methods {:keys [profile-id session-id path-params params] :as request} respond raise]
(let [cmd (keyword (:type path-params))
etag (yrq/get-header request "if-none-match")
data (-> params
(assoc ::request-at (dt/now))
(assoc ::http/request request)
(assoc ::cond/key etag)
(cond-> (uuid? profile-id)
(-> (assoc ::profile-id profile-id)
(assoc ::session-id session-id))))
method (get methods cmd default-handler)]
(-> (method data)
(p/then (partial handle-response request))
(p/then respond)
(p/catch (fn [cause]
(let [context {:profile-id profile-id}]
(raise (ex/wrap-with-context cause context))))))))
(binding [cond/*enabled* true]
(-> (method data)
(p/then (partial handle-response request))
(p/then respond)
(p/catch (fn [cause]
(let [context {:profile-id profile-id}]
(raise (ex/wrap-with-context cause context)))))))))
(defn- wrap-metrics
"Wrap service method with metrics measurement."
[{:keys [metrics ::metrics-id]} f mdata]
(let [labels (into-array String [(::sv/name mdata)])]
(fn [cfg params]
(let [tp (ts/tpoint)]
(let [tp (dt/tpoint)]
(p/finally
(f cfg params)
(fn [_ _]
@ -123,63 +157,101 @@
[{:keys [executor] :as cfg} f mdata]
(with-meta
(fn [cfg params]
(-> (px/submit! executor #(f cfg params))
(p/bind p/wrap)))
(->> (px/submit! executor (px/wrap-bindings #(f cfg params)))
(p/mapcat p/wrap)
(p/map rph/wrap)))
mdata))
(defn- wrap-audit
[{:keys [audit] :as cfg} f mdata]
(if audit
(with-meta
(fn [cfg {:keys [::request] :as params}]
(p/finally (f cfg params)
(fn [result _]
(when result
(let [resultm (meta result)
profile-id (or (::audit/profile-id resultm)
(:profile-id result)
(:profile-id params))
props (or (::audit/replace-props resultm)
(-> params
(merge (::audit/props resultm))
(dissoc :type)))]
(audit :cmd :submit
:type (or (::audit/type resultm)
[cfg f mdata]
(if-let [collector (::audit/collector cfg)]
(letfn [(handle-audit [params result]
(let [resultm (meta result)
request (::http/request params)
profile-id (or (::audit/profile-id resultm)
(:profile-id result)
(if (= (::type cfg) "command")
(::profile-id params)
(:profile-id params))
uuid/zero)
props (-> (or (::audit/replace-props resultm)
(-> params
(merge (::audit/props resultm))
(dissoc :profile-id)
(dissoc :type)))
(d/without-qualified)
(d/without-nils))
event {:type (or (::audit/type resultm)
(::type cfg))
:name (or (::audit/name resultm)
(::sv/name mdata))
:profile-id profile-id
:ip-addr (some-> request audit/parse-client-ip)
:props (dissoc props ::request)))))))
mdata)
:props props
;; NOTE: for batch-key lookup we need the params as-is
;; because the rpc api does not need to know the
;; audit/webhook specific object layout.
::params (dissoc params ::http/request)
::webhooks/batch-key
(or (::webhooks/batch-key mdata)
(::webhooks/batch-key resultm))
::webhooks/batch-timeout
(or (::webhooks/batch-timeout mdata)
(::webhooks/batch-timeout resultm))
::webhooks/event?
(or (::webhooks/event? mdata)
(::webhooks/event? resultm)
false)}]
(audit/submit! collector event)))
(handle-request [cfg params]
(->> (f cfg params)
(p/mcat (fn [result]
(->> (handle-audit params result)
(p/map (constantly result)))))))]
(if-not (::audit/skip mdata)
(with-meta handle-request mdata)
f))
f))
(defn- wrap
[cfg f mdata]
(let [f (as-> f $
(wrap-dispatch cfg $ mdata)
(wrap-metrics cfg $ mdata)
(cond/wrap cfg $ mdata)
(retry/wrap-retry cfg $ mdata)
(rsem/wrap cfg $ mdata)
(wrap-metrics cfg $ mdata)
(climit/wrap cfg $ mdata)
(rlimit/wrap cfg $ mdata)
(wrap-audit cfg $ mdata))
spec (or (::sv/spec mdata) (s/spec any?))
auth? (:auth mdata true)]
auth? (::auth mdata true)]
(l/debug :hint "register method" :name (::sv/name mdata))
(with-meta
(fn [{:keys [::request] :as params}]
(fn [params]
;; Raise authentication error when rpc method requires auth but
;; no profile-id is found in the request.
(p/do!
(if (and auth? (not (uuid? (:profile-id params))))
(ex/raise :type :authentication
:code :authentication-required
:hint "authentication required for this endpoint")
(let [params (us/conform spec (dissoc params ::request))]
(f cfg (assoc params ::request request))))))
(let [profile-id (if (= "command" (::type cfg))
(::profile-id params)
(:profile-id params))]
(p/do!
(if (and auth? (not (uuid? profile-id)))
(ex/raise :type :authentication
:code :authentication-required
:hint "authentication required for this endpoint")
(let [params (us/conform spec params)]
(f cfg params))))))
mdata)))
(defn- process-method
@ -194,7 +266,6 @@
(->> (sv/scan-ns 'app.rpc.queries.projects
'app.rpc.queries.files
'app.rpc.queries.teams
'app.rpc.queries.comments
'app.rpc.queries.profile
'app.rpc.queries.viewer
'app.rpc.queries.fonts)
@ -207,13 +278,10 @@
(->> (sv/scan-ns 'app.rpc.mutations.media
'app.rpc.mutations.profile
'app.rpc.mutations.files
'app.rpc.mutations.comments
'app.rpc.mutations.projects
'app.rpc.mutations.teams
'app.rpc.mutations.management
'app.rpc.mutations.fonts
'app.rpc.mutations.share-link
'app.rpc.mutations.verify-token)
'app.rpc.mutations.share-link)
(map (partial process-method cfg))
(into {}))))
@ -224,35 +292,45 @@
'app.rpc.commands.comments
'app.rpc.commands.management
'app.rpc.commands.verify-token
'app.rpc.commands.search
'app.rpc.commands.media
'app.rpc.commands.teams
'app.rpc.commands.auth
'app.rpc.commands.ldap
'app.rpc.commands.demo
'app.rpc.commands.files)
'app.rpc.commands.webhooks
'app.rpc.commands.audit
'app.rpc.commands.files
'app.rpc.commands.files.update
'app.rpc.commands.files.create
'app.rpc.commands.files.temp)
(map (partial process-method cfg))
(into {}))))
(s/def ::audit (s/nilable fn?))
(s/def ::http-client fn?)
(s/def ::ldap (s/nilable map?))
(s/def ::msgbus ::mbus/msgbus)
(s/def ::climit (s/nilable ::climit/climit))
(s/def ::rlimit (s/nilable ::rlimit/rlimit))
(s/def ::public-uri ::us/not-empty-string)
(s/def ::session map?)
(s/def ::storage some?)
(s/def ::sprops map?)
(defmethod ig/pre-init-spec ::methods [_]
(s/keys :req-un [::storage
::session
(s/keys :req [::audit/collector
::http.client/client
::db/pool
::ldap/provider
::wrk/executor]
:req-un [::sto/storage
::http.session/session
::sprops
::audit
::public-uri
::msgbus
::http-client
::rsem/semaphores
::rlimit/rlimit
::rlimit
::climit
::wrk/executor
::mtx/metrics
::db/pool
::ldap]))
::db/pool]))
(defmethod ig/init-key ::methods
[_ cfg]
@ -280,7 +358,7 @@
(defmethod ig/init-key ::routes
[_ {:keys [methods] :as cfg}]
[["/rpc"
["/command/:command" {:handler (partial rpc-command-handler (:commands methods))}]
["/command/:type" {:handler (partial rpc-command-handler (:commands methods))}]
["/query/:type" {:handler (partial rpc-query-handler (:queries methods))}]
["/mutation/:type" {:handler (partial rpc-mutation-handler (:mutations methods))
:allowed-methods #{:post}}]]])

View file

@ -0,0 +1,204 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.rpc.climit
"Concurrencly limiter for RPC."
(:require
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.spec :as us]
[app.config :as cf]
[app.metrics :as mtx]
[app.rpc :as-alias rpc]
[app.util.services :as-alias sv]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[clojure.edn :as edn]
[clojure.spec.alpha :as s]
[datoteka.fs :as fs]
[integrant.core :as ig]
[promesa.core :as p]
[promesa.exec :as px]
[promesa.exec.bulkhead :as pxb])
(:import
com.github.benmanes.caffeine.cache.Cache
com.github.benmanes.caffeine.cache.CacheLoader
com.github.benmanes.caffeine.cache.Caffeine
com.github.benmanes.caffeine.cache.RemovalListener))
(defn- capacity-exception?
[o]
(and (ex/ex-info? o)
(let [data (ex-data o)]
(and (= :bulkhead-error (:type data))
(= :capacity-limit-reached (:code data))))))
(defn invoke!
[limiter f]
(->> (px/submit! limiter f)
(p/hcat (fn [result cause]
(cond
(capacity-exception? cause)
(p/rejected
(ex/error :type :internal
:code :concurrency-limit-reached
:queue (-> limiter meta :bkey name)
:cause cause))
(some? cause)
(p/rejected cause)
:else
(p/resolved result))))))
(defn- create-limiter
[{:keys [executor metrics concurrency queue-size bkey skey]}]
(let [labels (into-array String [(name bkey)])
on-queue (fn [instance]
(l/trace :hint "enqueued"
:key (name bkey)
:skey (str skey)
:queue-size (get instance ::pxb/current-queue-size)
:concurrency (get instance ::pxb/current-concurrency))
(mtx/run! metrics
:id :rpc-climit-queue-size
:val (get instance ::pxb/current-queue-size)
:labels labels)
(mtx/run! metrics
:id :rpc-climit-concurrency
:val (get instance ::pxb/current-concurrency)
:labels labels))
on-run (fn [instance task]
(let [elapsed (- (inst-ms (dt/now))
(inst-ms task))]
(l/trace :hint "execute"
:key (name bkey)
:skey (str skey)
:elapsed (str elapsed "ms"))
(mtx/run! metrics
:id :rpc-climit-timing
:val elapsed
:labels labels)
(mtx/run! metrics
:id :rpc-climit-queue-size
:val (get instance ::pxb/current-queue-size)
:labels labels)
(mtx/run! metrics
:id :rpc-climit-concurrency
:val (get instance ::pxb/current-concurrency)
:labels labels)))
options {:executor executor
:concurrency concurrency
:queue-size (or queue-size Integer/MAX_VALUE)
:on-queue on-queue
:on-run on-run}]
(-> (pxb/create options)
(vary-meta assoc :bkey bkey :skey skey))))
(defn- create-cache
[{:keys [executor] :as params} config]
(let [listener (reify RemovalListener
(onRemoval [_ key _val cause]
(l/trace :hint "cache: remove" :key key :reason (str cause))))
loader (reify CacheLoader
(load [_ key]
(let [[bkey skey] key]
(when-let [config (get config bkey)]
(-> (merge params config)
(assoc :bkey bkey)
(assoc :skey skey)
(create-limiter))))))]
(.. (Caffeine/newBuilder)
(weakValues)
(executor executor)
(removalListener listener)
(build loader))))
(defprotocol IConcurrencyManager)
(s/def ::concurrency ::us/integer)
(s/def ::queue-size ::us/integer)
(s/def ::config
(s/map-of keyword?
(s/keys :req-un [::concurrency]
:opt-un [::queue-size])))
(defmethod ig/prep-key ::rpc/climit
[_ cfg]
(merge {:path (cf/get :rpc-climit-config)}
(d/without-nils cfg)))
(defmethod ig/pre-init-spec ::rpc/climit [_]
(s/keys :req-un [::wrk/executor ::mtx/metrics ::fs/path]))
(defmethod ig/init-key ::rpc/climit
[_ {:keys [path] :as params}]
(when (contains? cf/flags :rpc-climit)
(if-let [config (some->> path slurp edn/read-string)]
(do
(l/info :hint "initializing concurrency limit" :config (str path))
(us/verify! ::config config)
(let [cache (create-cache params config)]
^{::cache cache}
(reify
IConcurrencyManager
clojure.lang.IDeref
(deref [_] config)
clojure.lang.ILookup
(valAt [_ key]
(let [key (if (vector? key) key [key])]
(.get ^Cache cache key))))))
(l/warn :hint "unable to load configuration" :config (str path)))))
(s/def ::climit #(satisfies? IConcurrencyManager %))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; PUBLIC API
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defmacro with-dispatch
[lim & body]
`(if ~lim
(invoke! ~lim (^:once fn [] (p/wrap (do ~@body))))
(p/wrap (do ~@body))))
(defn wrap
[{:keys [climit]} f {:keys [::queue ::key-fn] :as mdata}]
(if (and (some? climit)
(some? queue))
(if-let [config (get @climit queue)]
(do
(l/debug :hint "wrap: instrumenting method"
:limit-name (name queue)
:service-name (::sv/name mdata)
:queue-size (or (:queue-size config) Integer/MAX_VALUE)
:concurrency (:concurrency config)
:keyed? (some? key-fn))
(if (some? key-fn)
(fn [cfg params]
(let [key [queue (key-fn params)]
lim (get climit key)]
(invoke! lim (partial f cfg params))))
(let [lim (get climit queue)]
(fn [cfg params]
(invoke! lim (partial f cfg params))))))
(do
(l/warn :hint "wrap: no config found"
:queue (name queue)
:service (::sv/name mdata))
f))
f))

View file

@ -0,0 +1,87 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.rpc.commands.audit
"Audit Log related RPC methods"
(:require
[app.common.data :as d]
[app.common.logging :as l]
[app.common.spec :as us]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.http :as-alias http]
[app.loggers.audit :as audit]
[app.rpc :as-alias rpc]
[app.rpc.climit :as-alias climit]
[app.rpc.doc :as-alias doc]
[app.rpc.helpers :as rph]
[app.util.services :as sv]
[app.util.time :as dt]
[app.worker :as wrk]
[clojure.spec.alpha :as s]
[promesa.core :as p]
[promesa.exec :as px]))
(defn- event->row [event]
[(uuid/next)
(:name event)
(:source event)
(:type event)
(:timestamp event)
(:profile-id event)
(db/inet (:ip-addr event))
(db/tjson (:props event))
(db/tjson (d/without-nils (:context event)))])
(def ^:private event-columns
[:id :name :source :type :tracked-at
:profile-id :ip-addr :props :context])
(defn- handle-events
[{:keys [::db/pool]} {:keys [::rpc/profile-id events ::http/request] :as params}]
(let [ip-addr (audit/parse-client-ip request)
xform (comp
(map #(assoc % :profile-id profile-id))
(map #(assoc % :ip-addr ip-addr))
(map #(assoc % :source "frontend"))
(filter :profile-id)
(map event->row))
events (sequence xform events)]
(when (seq events)
(db/insert-multi! pool :audit-log event-columns events))))
(s/def ::name ::us/string)
(s/def ::type ::us/string)
(s/def ::props (s/map-of ::us/keyword any?))
(s/def ::timestamp dt/instant?)
(s/def ::context (s/map-of ::us/keyword any?))
(s/def ::event
(s/keys :req-un [::type ::name ::props ::timestamp]
:opt-un [::context]))
(s/def ::events (s/every ::event))
(s/def ::push-audit-events
(s/keys :req [::rpc/profile-id]
:req-un [::events]))
(sv/defmethod ::push-audit-events
{::climit/queue :push-audit-events
::climit/key-fn ::rpc/profile-id
::audit/skip true
::doc/added "1.17"}
[{:keys [::db/pool ::wrk/executor] :as cfg} params]
(if (or (db/read-only? pool)
(not (contains? cf/flags :audit-log)))
(do
(l/warn :hint "audit: http handler disabled or db is read-only")
(rph/wrap nil))
(->> (px/submit! executor #(handle-events cfg params))
(p/fmap (constantly nil)))))

View file

@ -6,6 +6,7 @@
(ns app.rpc.commands.auth
(:require
[app.auth :as auth]
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.spec :as us]
@ -13,15 +14,18 @@
[app.config :as cf]
[app.db :as db]
[app.emails :as eml]
[app.http.session :as session]
[app.loggers.audit :as audit]
[app.main :as-alias main]
[app.rpc :as-alias rpc]
[app.rpc.climit :as climit]
[app.rpc.commands.teams :as teams]
[app.rpc.doc :as-alias doc]
[app.rpc.mutations.teams :as teams]
[app.rpc.helpers :as rph]
[app.rpc.queries.profile :as profile]
[app.rpc.semaphore :as rsem]
[app.tokens :as tokens]
[app.util.services :as sv]
[app.util.time :as dt]
[buddy.hashers :as hashers]
[clojure.spec.alpha :as s]
[cuerdas.core :as str]))
@ -29,7 +33,6 @@
(s/def ::fullname ::us/not-empty-string)
(s/def ::lang ::us/string)
(s/def ::path ::us/string)
(s/def ::profile-id ::us/uuid)
(s/def ::password ::us/not-empty-string)
(s/def ::old-password ::us/not-empty-string)
(s/def ::theme ::us/string)
@ -38,22 +41,6 @@
;; ---- HELPERS
(defn derive-password
[password]
(hashers/derive password
{:alg :argon2id
:memory 16384
:iterations 20
:parallelism 2}))
(defn verify-password
[attempt password]
(try
(hashers/verify attempt password)
(catch Exception _e
{:update false
:valid false})))
(defn email-domain-in-whitelist?
"Returns true if email's domain is in the given whitelist or if
given whitelist is an empty string."
@ -82,9 +69,10 @@
;; ---- COMMAND: login with password
(defn login-with-password
[{:keys [pool session sprops] :as cfg} {:keys [email password] :as params}]
[{:keys [::db/pool session] :as cfg} {:keys [email password] :as params}]
(when-not (contains? cf/flags :login)
(when-not (or (contains? cf/flags :login)
(contains? cf/flags :login-with-password))
(ex/raise :type :restriction
:code :login-disabled
:hint "login is disabled in this instance"))
@ -94,7 +82,7 @@
(ex/raise :type :validation
:code :account-without-password
:hint "the current account does not have password"))
(:valid (verify-password password (:password profile))))
(:valid (auth/verify-password password (:password profile))))
(validate-profile [profile]
(when-not profile
@ -124,28 +112,29 @@
(profile/decode-profile-row))
invitation (when-let [token (:invitation-token params)]
(tokens/verify sprops {:token token :iss :team-invitation}))
(tokens/verify (::main/props cfg) {:token token :iss :team-invitation}))
;; If invitation member-id does not matches the profile-id, we just proceed to ignore the
;; invitation because invitations matches exactly; and user can't loging with other email and
;; invitation because invitations matches exactly; and user can't login with other email and
;; accept invitation with other email
response (if (and (some? invitation) (= (:id profile) (:member-id invitation)))
{:invitation-token (:invitation-token params)}
profile)]
(with-meta response
{:transform-response ((:create session) (:id profile))
::audit/props (audit/profile->props profile)
::audit/profile-id (:id profile)})))))
(assoc profile :is-admin (let [admins (cf/get :admins)]
(contains? admins (:email profile)))))]
(-> response
(rph/with-transform (session/create-fn session (:id profile)))
(rph/with-meta {::audit/props (audit/profile->props profile)
::audit/profile-id (:id profile)}))))))
(s/def ::scope ::us/string)
(s/def ::login-with-password
(s/keys :req-un [::email ::password]
:opt-un [::invitation-token]))
:opt-un [::invitation-token ::scope]))
(sv/defmethod ::login-with-password
"Performs authentication using penpot password."
{:auth false
::rsem/queue :auth
{::rpc/auth false
::climit/queue :auth
::doc/added "1.15"}
[cfg params]
(login-with-password cfg params))
@ -153,26 +142,25 @@
;; ---- COMMAND: Logout
(s/def ::logout
(s/keys :opt-un [::profile-id]))
(s/keys :opt [::rpc/profile-id]))
(sv/defmethod ::logout
"Clears the authentication cookie and logout the current session."
{:auth false
{::rpc/auth false
::doc/added "1.15"}
[{:keys [session] :as cfg} _]
(with-meta {}
{:transform-response (:delete session)}))
(rph/with-transform {} (session/delete-fn session)))
;; ---- COMMAND: Recover Profile
(defn recover-profile
[{:keys [pool sprops] :as cfg} {:keys [token password]}]
[{:keys [::db/pool] :as cfg} {:keys [token password]}]
(letfn [(validate-token [token]
(let [tdata (tokens/verify sprops {:token token :iss :password-recovery})]
(let [tdata (tokens/verify (::main/props cfg) {:token token :iss :password-recovery})]
(:profile-id tdata)))
(update-password [conn profile-id]
(let [pwd (derive-password password)]
(let [pwd (auth/derive-password password)]
(db/update! conn :profile {:password pwd} {:id profile-id})))]
(db/with-atomic [conn pool]
@ -185,8 +173,8 @@
(s/keys :req-un [::token ::password]))
(sv/defmethod ::recover-profile
{:auth false
::rsem/queue :auth
{::rpc/auth false
::climit/queue :auth
::doc/added "1.15"}
[cfg params]
(recover-profile cfg params))
@ -194,13 +182,13 @@
;; ---- COMMAND: Prepare Register
(defn validate-register-attempt!
[{:keys [pool sprops]} params]
[{:keys [::db/pool] :as cfg} params]
(when-not (contains? cf/flags :registration)
(if-not (contains? params :invitation-token)
(ex/raise :type :restriction
:code :registration-disabled)
(let [invitation (tokens/verify sprops {:token (:invitation-token params) :iss :team-invitation})]
(let [invitation (tokens/verify (::main/props cfg) {:token (:invitation-token params) :iss :team-invitation})]
(when-not (= (:email params) (:member-email invitation))
(ex/raise :type :restriction
:code :email-does-not-match-invitation
@ -234,7 +222,7 @@
(pos? (compare elapsed register-retry-threshold))))
(defn prepare-register
[{:keys [pool sprops] :as cfg} params]
[{:keys [::db/pool] :as cfg} params]
(validate-register-attempt! cfg params)
@ -263,7 +251,7 @@
params (d/without-nils params)
token (tokens/generate sprops params)]
token (tokens/generate (::main/props cfg) params)]
(with-meta {:token token}
{::audit/profile-id uuid/zero})))
@ -272,7 +260,7 @@
:opt-un [::invitation-token]))
(sv/defmethod ::prepare-register-profile
{:auth false
{::rpc/auth false
::doc/added "1.15"}
[cfg params]
(prepare-register cfg params))
@ -292,7 +280,7 @@
(db/tjson))
password (if-let [password (:password params)]
(derive-password password)
(auth/derive-password password)
"!")
locale (:locale params)
@ -325,6 +313,7 @@
(throw e)
(ex/raise :type :validation
:code :email-already-exists
:hint "email already exists"
:cause e)))))))
(defn create-profile-relations
@ -338,15 +327,15 @@
(assoc :default-project-id (:default-project-id team)))))
(defn send-email-verification!
[conn sprops profile]
(let [vtoken (tokens/generate sprops
[conn props profile]
(let [vtoken (tokens/generate props
{:iss :verify-email
:exp (dt/in-future "72h")
:profile-id (:id profile)
:email (:email profile)})
;; NOTE: this token is mainly used for possible complains
;; identification on the sns webhook
ptoken (tokens/generate sprops
ptoken (tokens/generate props
{:iss :profile-identity
:profile-id (:id profile)
:exp (dt/in-future {:days 30})})]
@ -359,8 +348,8 @@
:extra-data ptoken})))
(defn register-profile
[{:keys [conn sprops session] :as cfg} {:keys [token] :as params}]
(let [claims (tokens/verify sprops {:token token :iss :prepared-register})
[{:keys [conn session] :as cfg} {:keys [token] :as params}]
(let [claims (tokens/verify (::main/props cfg) {:token token :iss :prepared-register})
params (merge params claims)
is-active (or (:is-active params)
@ -375,20 +364,19 @@
(create-profile conn)
(create-profile-relations conn)
(profile/decode-profile-row)))
audit-fn (:audit cfg)
invitation (when-let [token (:invitation-token params)]
(tokens/verify sprops {:token token :iss :team-invitation}))]
(tokens/verify (::main/props cfg) {:token token :iss :team-invitation}))]
;; If profile is filled in claims, means it tries to register
;; again, so we proceed to update the modified-at attr
;; accordingly.
(when-let [id (:profile-id claims)]
(db/update! conn :profile {:modified-at (dt/now)} {:id id})
(audit-fn :cmd :submit
:type "fact"
:name "register-profile-retry"
:profile-id id))
(when-let [collector (::audit/collector cfg)]
(audit/submit! collector
{:type "fact"
:name "register-profile-retry"
:profile-id id})))
(cond
;; If invitation token comes in params, this is because the
@ -399,35 +387,35 @@
;; email.
(and (some? invitation) (= (:email profile) (:member-email invitation)))
(let [claims (assoc invitation :member-id (:id profile))
token (tokens/generate sprops claims)
token (tokens/generate (::main/props cfg) claims)
resp {:invitation-token token}]
(with-meta resp
{:transform-response ((:create session) (:id profile))
::audit/replace-props (audit/profile->props profile)
::audit/profile-id (:id profile)}))
(-> resp
(rph/with-transform (session/create-fn session (:id profile)))
(rph/with-meta {::audit/replace-props (audit/profile->props profile)
::audit/profile-id (:id profile)})))
;; If auth backend is different from "penpot" means user is
;; registering using third party auth mechanism; in this case
;; we need to mark this session as logged.
(not= "penpot" (:auth-backend profile))
(with-meta (profile/strip-private-attrs profile)
{:transform-response ((:create session) (:id profile))
::audit/replace-props (audit/profile->props profile)
::audit/profile-id (:id profile)})
(-> (profile/strip-private-attrs profile)
(rph/with-transform (session/create-fn session (:id profile)))
(rph/with-meta {::audit/replace-props (audit/profile->props profile)
::audit/profile-id (:id profile)}))
;; If the `:enable-insecure-register` flag is set, we proceed
;; to sign in the user directly, without email verification.
(true? is-active)
(with-meta (profile/strip-private-attrs profile)
{:transform-response ((:create session) (:id profile))
::audit/replace-props (audit/profile->props profile)
::audit/profile-id (:id profile)})
(-> (profile/strip-private-attrs profile)
(rph/with-transform (session/create-fn session (:id profile)))
(rph/with-meta {::audit/replace-props (audit/profile->props profile)
::audit/profile-id (:id profile)}))
;; In all other cases, send a verification email.
:else
(do
(send-email-verification! conn sprops profile)
(with-meta profile
(send-email-verification! conn (::main/props cfg) profile)
(rph/with-meta profile
{::audit/replace-props (audit/profile->props profile)
::audit/profile-id (:id profile)})))))
@ -435,10 +423,10 @@
(s/keys :req-un [::token ::fullname]))
(sv/defmethod ::register-profile
{:auth false
::rsem/queue :auth
{::rpc/auth false
::climit/queue :auth
::doc/added "1.15"}
[{:keys [pool] :as cfg} params]
[{:keys [::db/pool] :as cfg} params]
(db/with-atomic [conn pool]
(-> (assoc cfg :conn conn)
(register-profile params))))
@ -446,16 +434,16 @@
;; ---- COMMAND: Request Profile Recovery
(defn request-profile-recovery
[{:keys [pool sprops] :as cfg} {:keys [email] :as params}]
[{:keys [::db/pool] :as cfg} {:keys [email] :as params}]
(letfn [(create-recovery-token [{:keys [id] :as profile}]
(let [token (tokens/generate sprops
(let [token (tokens/generate (::main/props cfg)
{:iss :password-recovery
:exp (dt/in-future "15m")
:profile-id id})]
(assoc profile :token token)))
(send-email-notification [conn profile]
(let [ptoken (tokens/generate sprops
(let [ptoken (tokens/generate (::main/props cfg)
{:iss :profile-identity
:profile-id (:id profile)
:exp (dt/in-future {:days 30})})]
@ -493,7 +481,7 @@
(s/keys :req-un [::email]))
(sv/defmethod ::request-profile-recovery
{:auth false
{::rpc/auth false
::doc/added "1.15"}
[cfg params]
(request-profile-recovery cfg params))

View file

@ -9,21 +9,28 @@
(:require
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.files.features :as ffeat]
[app.common.logging :as l]
[app.common.pages.migrations :as pmg]
[app.common.spec :as us]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.loggers.audit :as-alias audit]
[app.loggers.webhooks :as-alias webhooks]
[app.media :as media]
[app.rpc :as-alias rpc]
[app.rpc.commands.files :as files]
[app.rpc.doc :as-alias doc]
[app.rpc.queries.files :as files]
[app.rpc.helpers :as rph]
[app.rpc.queries.projects :as projects]
[app.storage :as sto]
[app.storage.tmp :as tmp]
[app.tasks.file-gc]
[app.util.blob :as blob]
[app.util.fressian :as fres]
[app.util.objects-map :as omap]
[app.util.pointer-map :as pmap]
[app.util.services :as sv]
[app.util.time :as dt]
[clojure.spec.alpha :as s]
@ -269,7 +276,7 @@
(when (not= readed# expected#)
(ex/raise :type :validation
:code :unexpected-label
:hint (format "unxpected label found: %s, expected: %s" readed# expected#)))))
:hint (format "unexpected label found: %s, expected: %s" readed# expected#)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; API
@ -289,9 +296,11 @@
(defn- retrieve-file
[pool file-id]
(->> (db/query pool :file {:id file-id})
(map files/decode-row)
(first)))
(with-open [^AutoCloseable conn (db/open pool)]
(binding [pmap/*load-fn* (partial files/load-pointer conn file-id)]
(some-> (db/get* conn :file {:id file-id})
(files/decode-row)
(update :data files/process-pointers deref)))))
(def ^:private sql:file-media-objects
"SELECT * FROM file_media_object WHERE id = ANY(?)")
@ -367,7 +376,7 @@
(def ^:dynamic *state*)
(def ^:dynamic *options*)
;; --- EXPORT WRITTER
;; --- EXPORT WRITER
(defn- embed-file-assets
[data conn file-id]
@ -397,8 +406,8 @@
form))
(process-group-of-assets [data [lib-id items]]
;; NOTE: there are a posibility that shape refers to a not
;; existing file because the file was removed. In this
;; NOTE: there is a possibility that shape refers to an
;; non-existant file because the file was removed. In this
;; case we just ignore the asset.
(if-let [lib (retrieve-file conn lib-id)]
(reduce (partial process-asset lib) data items)
@ -434,14 +443,14 @@
:opt [::include-libraries? ::embed-assets?]))
(defn write-export!
"Do the exportation of a speficied file in custom penpot binary
"Do the exportation of a specified file in custom penpot binary
format. There are some options available for customize the output:
`::include-libraries?`: additionaly to the specified file, all the
`::include-libraries?`: additionally to the specified file, all the
linked libraries also will be included (including transitive
dependencies).
`::embed-assets?`: instead of including the libraryes, embedd in the
`::embed-assets?`: instead of including the libraries, embed in the
same file library all assets used from external libraries."
[{:keys [::include-libraries? ::embed-assets?] :as options}]
(us/assert! ::write-export-options options)
@ -557,7 +566,7 @@
format. There are some options for customize the importation
behavior:
`::overwrite?`: if true, instead of creating new files and remaping id references,
`::overwrite?`: if true, instead of creating new files and remapping id references,
it reuses all ids and updates existing objects; defaults to `false`.
`::migrate?`: if true, applies the migration before persisting the
@ -602,12 +611,23 @@
(vswap! *state* update :index update-index files)
(vswap! *state* assoc :version version :files files)))
(defn- postprocess-file
[data]
(let [omap-wrap ffeat/*wrap-with-objects-map-fn*
pmap-wrap ffeat/*wrap-with-pointer-map-fn*]
(-> data
(update :pages-index update-vals #(update % :objects omap-wrap))
(update :pages-index update-vals pmap-wrap)
(update :components update-vals #(update % :objects omap-wrap))
(update :components pmap-wrap))))
(defmethod read-section :v1/files
[{:keys [conn ::input ::migrate? ::project-id ::timestamp ::overwrite?]}]
(doseq [expected-file-id (-> *state* deref :files)]
(let [file (read-obj! input)
media' (read-obj! input)
file-id (:id file)]
(let [file (read-obj! input)
media' (read-obj! input)
file-id (:id file)
features files/default-features]
(when (not= file-id expected-file-id)
(ex/raise :type :validation
@ -622,33 +642,42 @@
(l/debug :hint "update media references" ::l/async false)
(vswap! *state* update :media into (map #(update % :id lookup-index)) media')
(l/debug :hint "procesing file" :file-id file-id ::l/async false)
(l/debug :hint "processing file" :file-id file-id ::features features ::l/async false)
(let [file-id' (lookup-index file-id)
data (-> (:data file)
(assoc :id file-id')
(cond-> migrate? (pmg/migrate-data))
(update :pages-index relink-shapes)
(update :components relink-shapes)
(update :media relink-media))
(binding [ffeat/*current* features
ffeat/*wrap-with-objects-map-fn* (if (features "storage/objects-map") omap/wrap identity)
ffeat/*wrap-with-pointer-map-fn* (if (features "storage/pointer-map") pmap/wrap identity)
pmap/*tracked* (atom {})]
params {:id file-id'
:project-id project-id
:name (:name file)
:revn (:revn file)
:is-shared (:is-shared file)
:data (blob/encode data)
:created-at timestamp
:modified-at timestamp}]
(let [file-id' (lookup-index file-id)
data (-> (:data file)
(assoc :id file-id')
(cond-> migrate? (pmg/migrate-data))
(update :pages-index relink-shapes)
(update :components relink-shapes)
(update :media relink-media)
(postprocess-file))
(l/debug :hint "create file" :id file-id' ::l/async false)
params {:id file-id'
:project-id project-id
:features (db/create-array conn "text" features)
:name (:name file)
:revn (:revn file)
:is-shared (:is-shared file)
:data (blob/encode data)
:created-at timestamp
:modified-at timestamp}]
(if overwrite?
(create-or-update-file conn params)
(db/insert! conn :file params))
(l/debug :hint "create file" :id file-id' ::l/async false)
(when overwrite?
(db/delete! conn :file-thumbnail {:file-id file-id'}))))))
(if overwrite?
(create-or-update-file conn params)
(db/insert! conn :file params))
(files/persist-pointers! conn file-id')
(when overwrite?
(db/delete! conn :file-thumbnail {:file-id file-id'})))))))
(defmethod read-section :v1/rels
[{:keys [conn ::input ::timestamp]}]
@ -807,7 +836,7 @@
cs (volatile! nil)]
(try
(l/info :hint "start exportation" :export-id id)
(with-open [output (io/output-stream output)]
(with-open [^AutoCloseable output (io/output-stream output)]
(binding [*position* (atom 0)]
(write-export! (assoc cfg ::output output))))
@ -830,19 +859,19 @@
(defn export-to-tmpfile!
[cfg]
(let [path (tmp/tempfile :prefix "penpot.export.")]
(with-open [output (io/output-stream path)]
(with-open [^AutoCloseable output (io/output-stream path)]
(export! cfg output)
path)))
(defn import!
[{:keys [::input] :as cfg}]
(let [id (uuid/next)
ts (dt/now)
tp (dt/tpoint)
cs (volatile! nil)]
(l/info :hint "import: started" :import-id id)
(try
(l/info :hint "start importation" :import-id id)
(binding [*position* (atom 0)]
(with-open [input (io/input-stream input)]
(with-open [^AutoCloseable input (io/input-stream input)]
(read-import! (assoc cfg ::input input))))
(catch Throwable cause
@ -850,27 +879,29 @@
(throw cause))
(finally
(l/info :hint "importation finished" :import-id id
:elapsed (str (inst-ms (dt/diff ts (dt/now))) "ms")
(l/info :hint "import: terminated"
:import-id id
:elapsed (dt/format-duration (tp))
:error? (some? @cs)
:cause @cs)))))
:cause @cs
)))))
;; --- Command: export-binfile
(s/def ::file-id ::us/uuid)
(s/def ::profile-id ::us/uuid)
(s/def ::include-libraries? ::us/boolean)
(s/def ::embed-assets? ::us/boolean)
(s/def ::export-binfile
(s/keys :req-un [::profile-id ::file-id ::include-libraries? ::embed-assets?]))
(s/keys :req [::rpc/profile-id] :req-un [::file-id ::include-libraries? ::embed-assets?]))
(sv/defmethod ::export-binfile
"Export a penpot file in a binary format."
{::doc/added "1.15"}
[{:keys [pool] :as cfg} {:keys [profile-id file-id include-libraries? embed-assets?] :as params}]
{::doc/added "1.15"
::webhooks/event? true}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id file-id include-libraries? embed-assets?] :as params}]
(files/check-read-permissions! pool profile-id file-id)
(let [resp (reify yrs/StreamableResponseBody
(let [body (reify yrs/StreamableResponseBody
(-write-body-to-stream [_ _ output-stream]
(-> cfg
(assoc ::file-ids [file-id])
@ -878,23 +909,23 @@
(assoc ::include-libraries? include-libraries?)
(export! output-stream))))]
(with-meta (sv/wrap nil)
{:transform-response (fn [_ response]
(-> response
(assoc :body resp)
(assoc :headers {"content-type" "application/octet-stream"})))})))
(fn [_]
(yrs/response 200 body {"content-type" "application/octet-stream"}))))
(s/def ::file ::media/upload)
(s/def ::import-binfile
(s/keys :req-un [::profile-id ::project-id ::file]))
(s/keys :req [::rpc/profile-id] :req-un [::project-id ::file]))
(sv/defmethod ::import-binfile
"Import a penpot file in a binary format."
{::doc/added "1.15"}
[{:keys [pool] :as cfg} {:keys [profile-id project-id file] :as params}]
{::doc/added "1.15"
::webhooks/event? true}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id project-id file] :as params}]
(db/with-atomic [conn pool]
(projects/check-read-permissions! conn profile-id project-id)
(import! (assoc cfg
::input (:path file)
::project-id project-id
::ignore-index-errors? true))))
(let [ids (import! (assoc cfg
::input (:path file)
::project-id project-id
::ignore-index-errors? true))]
(rph/with-meta ids
{::audit/props {:file nil :file-ids ids}}))))

View file

@ -6,22 +6,26 @@
(ns app.rpc.commands.comments
(:require
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.common.geom.point :as gpt]
[app.common.spec :as us]
[app.common.uuid :as uuid]
[app.db :as db]
[app.loggers.audit :as-alias audit]
[app.loggers.webhooks :as-alias webhooks]
[app.rpc :as-alias rpc]
[app.rpc.commands.files :as files]
[app.rpc.commands.teams :as teams]
[app.rpc.doc :as-alias doc]
[app.rpc.queries.files :as files]
[app.rpc.queries.teams :as teams]
[app.rpc.retry :as retry]
[app.util.blob :as blob]
[app.rpc.quotes :as quotes]
[app.util.pointer-map :as pmap]
[app.util.retry :as rtry]
[app.util.services :as sv]
[app.util.time :as dt]
[clojure.spec.alpha :as s]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; QUERY COMMANDS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; --- GENERAL PURPOSE INTERNAL HELPERS
(defn decode-row
[{:keys [participants position] :as row}]
@ -29,23 +33,77 @@
(db/pgpoint? position) (assoc :position (db/decode-pgpoint position))
(db/pgobject? participants) (assoc :participants (db/decode-transit-pgobject participants))))
(def sql:get-file
"select f.id, f.modified_at, f.revn, f.features,
f.project_id, p.team_id, f.data
from file as f
join project as p on (p.id = f.project_id)
where f.id = ?
and f.deleted_at is null")
(defn- get-file
"A specialized version of get-file for comments module."
[conn file-id page-id]
(binding [pmap/*load-fn* (partial files/load-pointer conn file-id)]
(if-let [{:keys [data] :as file} (some-> (db/exec-one! conn [sql:get-file file-id]) (files/decode-row))]
(-> file
(assoc :page-name (dm/get-in data [:pages-index page-id :name]))
(assoc :page-id page-id))
(ex/raise :type :not-found
:code :object-not-found
:hint "file not found"))))
(defn- get-comment-thread
[conn thread-id & {:keys [for-update?]}]
(-> (db/get-by-id conn :comment-thread thread-id {:for-update for-update?})
(decode-row)))
(defn- get-comment
[conn comment-id & {:keys [for-update?]}]
(db/get-by-id conn :comment comment-id {:for-update for-update?}))
(defn- get-next-seqn
[conn file-id]
(let [sql "select (f.comment_thread_seqn + 1) as next_seqn from file as f where f.id = ?"
res (db/exec-one! conn [sql file-id])]
(:next-seqn res)))
(def sql:upsert-comment-thread-status
"insert into comment_thread_status (thread_id, profile_id, modified_at)
values (?, ?, ?)
on conflict (thread_id, profile_id)
do update set modified_at = ?
returning modified_at;")
(defn upsert-comment-thread-status!
([conn profile-id thread-id]
(upsert-comment-thread-status! conn profile-id thread-id (dt/now)))
([conn profile-id thread-id mod-at]
(db/exec-one! conn [sql:upsert-comment-thread-status thread-id profile-id mod-at mod-at])))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; QUERY COMMANDS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; --- COMMAND: Get Comment Threads
(declare retrieve-comment-threads)
(declare ^:private get-comment-threads)
(s/def ::team-id ::us/uuid)
(s/def ::file-id ::us/uuid)
(s/def ::share-id (s/nilable ::us/uuid))
(s/def ::get-comment-threads
(s/and (s/keys :req-un [::profile-id]
(s/and (s/keys :req [::rpc/profile-id]
:opt-un [::file-id ::share-id ::team-id])
#(or (:file-id %) (:team-id %))))
(sv/defmethod ::get-comment-threads
[{:keys [pool] :as cfg} params]
{::doc/added "1.15"}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id file-id share-id] :as params}]
(with-open [conn (db/open pool)]
(retrieve-comment-threads conn params)))
(files/check-comment-permissions! conn profile-id file-id share-id)
(get-comment-threads conn profile-id file-id)))
(def sql:comment-threads
"select distinct on (ct.id)
@ -69,25 +127,26 @@
where ct.file_id = ?
window w as (partition by c.thread_id order by c.created_at asc)")
(defn retrieve-comment-threads
[conn {:keys [profile-id file-id share-id]}]
(files/check-comment-permissions! conn profile-id file-id share-id)
(defn- get-comment-threads
[conn profile-id file-id]
(->> (db/exec! conn [sql:comment-threads profile-id file-id])
(into [] (map decode-row))))
;; --- COMMAND: Get Unread Comment Threads
(declare retrieve-unread-comment-threads)
(declare ^:private get-unread-comment-threads)
(s/def ::team-id ::us/uuid)
(s/def ::get-unread-comment-threads
(s/keys :req-un [::profile-id ::team-id]))
(s/keys :req [::rpc/profile-id]
:req-un [::team-id]))
(sv/defmethod ::get-unread-comment-threads
[{:keys [pool] :as cfg} {:keys [profile-id team-id] :as params}]
{::doc/added "1.15"}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id team-id] :as params}]
(with-open [conn (db/open pool)]
(teams/check-read-permissions! conn profile-id team-id)
(retrieve-unread-comment-threads conn params)))
(get-unread-comment-threads conn profile-id team-id)))
(def sql:comment-threads-by-team
"select distinct on (ct.id)
@ -116,22 +175,22 @@
(str "with threads as (" sql:comment-threads-by-team ")"
"select * from threads where count_unread_comments > 0"))
(defn retrieve-unread-comment-threads
[conn {:keys [profile-id team-id]}]
(defn- get-unread-comment-threads
[conn profile-id team-id]
(->> (db/exec! conn [sql:unread-comment-threads-by-team profile-id team-id])
(into [] (map decode-row))))
;; --- COMMAND: Get Single Comment Thread
(s/def ::id ::us/uuid)
(s/def ::share-id (s/nilable ::us/uuid))
(s/def ::get-comment-thread
(s/keys :req-un [::profile-id ::file-id ::id]
(s/keys :req [::rpc/profile-id]
:req-un [::file-id ::us/id]
:opt-un [::share-id]))
(sv/defmethod ::get-comment-thread
[{:keys [pool] :as cfg} {:keys [profile-id file-id id share-id] :as params}]
{::doc/added "1.15"}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id file-id id share-id] :as params}]
(with-open [conn (db/open pool)]
(files/check-comment-permissions! conn profile-id file-id share-id)
(let [sql (str "with threads as (" sql:comment-threads ")"
@ -139,37 +198,30 @@
(-> (db/exec-one! conn [sql profile-id file-id id])
(decode-row)))))
(defn get-comment-thread
[conn {:keys [profile-id file-id id] :as params}]
(let [sql (str "with threads as (" sql:comment-threads ")"
"select * from threads where id = ?")]
(-> (db/exec-one! conn [sql profile-id file-id id])
(decode-row))))
;; --- COMMAND: Retrieve Comments
(declare get-comments)
(declare ^:private get-comments)
(s/def ::file-id ::us/uuid)
(s/def ::share-id (s/nilable ::us/uuid))
(s/def ::thread-id ::us/uuid)
(s/def ::get-comments
(s/keys :req-un [::profile-id ::thread-id]
(s/keys :req [::rpc/profile-id]
:req-un [::thread-id]
:opt-un [::share-id]))
(sv/defmethod ::get-comments
[{:keys [pool] :as cfg} {:keys [profile-id thread-id share-id] :as params}]
{::doc/added "1.15"}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id thread-id share-id] :as params}]
(with-open [conn (db/open pool)]
(let [thread (db/get-by-id conn :comment-thread thread-id)]
(files/check-comment-permissions! conn profile-id (:file-id thread) share-id))
(get-comments conn thread-id)))
(let [{:keys [file-id] :as thread} (get-comment-thread conn thread-id)]
(files/check-comment-permissions! conn profile-id file-id share-id)
(get-comments conn thread-id))))
(def sql:comments
"select c.* from comment as c
where c.thread_id = ?
order by c.created_at asc")
(defn get-comments
(defn- get-comments
[conn thread-id]
(->> (db/query conn :comment
{:thread-id thread-id}
@ -178,25 +230,6 @@
;; --- COMMAND: Get file comments users
(declare get-file-comments-users)
(s/def ::file-id ::us/uuid)
(s/def ::share-id (s/nilable ::us/uuid))
(s/def ::get-profiles-for-file-comments
(s/keys :req-un [::profile-id ::file-id]
:opt-un [::share-id]))
(sv/defmethod ::get-profiles-for-file-comments
"Retrieves a list of profiles with limited set of properties of all
participants on comment threads of the file."
{::doc/added "1.15"
::doc/changes ["1.15" "Imported from queries and renamed."]}
[{:keys [pool] :as cfg} {:keys [profile-id file-id share-id]}]
(with-open [conn (db/open pool)]
(files/check-comment-permissions! conn profile-id file-id share-id)
(get-file-comments-users conn file-id profile-id)))
;; All the profiles that had comment the file, plus the current
;; profile.
@ -219,86 +252,113 @@
[conn file-id profile-id]
(db/exec! conn [sql:file-comment-users file-id profile-id]))
(s/def ::get-profiles-for-file-comments
(s/keys :req [::rpc/profile-id]
:req-un [::file-id]
:opt-un [::share-id]))
(sv/defmethod ::get-profiles-for-file-comments
"Retrieves a list of profiles with limited set of properties of all
participants on comment threads of the file."
{::doc/added "1.15"
::doc/changes ["1.15" "Imported from queries and renamed."]}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id file-id share-id]}]
(with-open [conn (db/open pool)]
(files/check-comment-permissions! conn profile-id file-id share-id)
(get-file-comments-users conn file-id profile-id)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; MUTATION COMMANDS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(declare ^:private create-comment-thread)
;; --- COMMAND: Create Comment Thread
(declare upsert-comment-thread-status!)
(declare create-comment-thread)
(declare retrieve-page-name)
(s/def ::page-id ::us/uuid)
(s/def ::file-id ::us/uuid)
(s/def ::share-id (s/nilable ::us/uuid))
(s/def ::profile-id ::us/uuid)
(s/def ::position ::gpt/point)
(s/def ::content ::us/string)
(s/def ::frame-id ::us/uuid)
(s/def ::create-comment-thread
(s/keys :req-un [::profile-id ::file-id ::position ::content ::page-id ::frame-id]
(s/keys :req [::rpc/profile-id]
:req-un [::file-id ::position ::content ::page-id ::frame-id]
:opt-un [::share-id]))
(sv/defmethod ::create-comment-thread
{::retry/max-retries 3
::retry/matches retry/conflict-db-insert?
::doc/added "1.15"}
[{:keys [pool] :as cfg} {:keys [profile-id file-id share-id] :as params}]
{::doc/added "1.15"
::webhooks/event? true}
[{:keys [::db/pool] :as cfg}
{:keys [::rpc/profile-id ::rpc/request-at file-id page-id share-id position content frame-id]}]
(db/with-atomic [conn pool]
(files/check-comment-permissions! conn profile-id file-id share-id)
(create-comment-thread conn params)))
(let [{:keys [team-id project-id page-name] :as file} (get-file conn file-id page-id)]
(files/check-comment-permissions! conn profile-id file-id share-id)
(defn- retrieve-next-seqn
[conn file-id]
(let [sql "select (f.comment_thread_seqn + 1) as next_seqn from file as f where f.id = ?"
res (db/exec-one! conn [sql file-id])]
(:next-seqn res)))
(run! (partial quotes/check-quote! conn)
(list {::quotes/id ::quotes/comment-threads-per-file
::quotes/profile-id profile-id
::quotes/team-id team-id
::quotes/project-id project-id
::quotes/file-id file-id}
{::quotes/id ::quotes/comments-per-file
::quotes/profile-id profile-id
::quotes/team-id team-id
::quotes/project-id project-id
::quotes/file-id file-id}))
(defn create-comment-thread
[conn {:keys [profile-id file-id page-id position content frame-id] :as params}]
(let [seqn (retrieve-next-seqn conn file-id)
now (dt/now)
pname (retrieve-page-name conn params)
thread (db/insert! conn :comment-thread
{:file-id file-id
:owner-id profile-id
:participants (db/tjson #{profile-id})
:page-name pname
:page-id page-id
:created-at now
:modified-at now
:seqn seqn
:position (db/pgpoint position)
:frame-id frame-id})]
(rtry/with-retry {::rtry/when rtry/conflict-exception?
::rtry/max-retries 3
::rtry/label "create-comment-thread"}
(create-comment-thread conn
{:created-at request-at
:profile-id profile-id
:file-id file-id
:page-id page-id
:page-name page-name
:position position
:content content
:frame-id frame-id})))))
;; Create a comment entry
(db/insert! conn :comment
{:thread-id (:id thread)
:owner-id profile-id
:created-at now
:modified-at now
:content content})
(defn- create-comment-thread
[conn {:keys [profile-id file-id page-id page-name created-at position content frame-id]}]
(let [;; NOTE: we take the next seq number from a separate query because the whole
;; operation can be retried on conflict, and in this case the new seq shold be
;; retrieved from the database.
seqn (get-next-seqn conn file-id)
thread-id (uuid/next)
thread (db/insert! conn :comment-thread
{:id thread-id
:file-id file-id
:owner-id profile-id
:participants (db/tjson #{profile-id})
:page-name page-name
:page-id page-id
:created-at created-at
:modified-at created-at
:seqn seqn
:position (db/pgpoint position)
:frame-id frame-id})
comment (db/insert! conn :comment
{:id (uuid/next)
:thread-id thread-id
:owner-id profile-id
:created-at created-at
:modified-at created-at
:content content})]
;; Make the current thread as read.
(upsert-comment-thread-status! conn profile-id (:id thread))
(upsert-comment-thread-status! conn profile-id thread-id created-at)
;; Optimistic update of current seq number on file.
(db/update! conn :file
{:comment-thread-seqn seqn}
{:id file-id})
(select-keys thread [:id :file-id :page-id])))
(defn- retrieve-page-name
[conn {:keys [file-id page-id]}]
(let [{:keys [data]} (db/get-by-id conn :file file-id)
data (blob/decode data)]
(get-in data [:pages-index page-id :name])))
(-> thread
(select-keys [:id :file-id :page-id])
(assoc :comment-id (:id comment)))))
;; --- COMMAND: Update Comment Thread Status
@ -306,49 +366,33 @@
(s/def ::share-id (s/nilable ::us/uuid))
(s/def ::update-comment-thread-status
(s/keys :req-un [::profile-id ::id]
(s/keys :req [::rpc/profile-id]
:req-un [::id]
:opt-un [::share-id]))
(sv/defmethod ::update-comment-thread-status
{::doc/added "1.15"}
[{:keys [pool] :as cfg} {:keys [profile-id id share-id] :as params}]
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id id share-id] :as params}]
(db/with-atomic [conn pool]
(let [cthr (db/get-by-id conn :comment-thread id {:for-update true})]
(when-not cthr
(ex/raise :type :not-found))
(files/check-comment-permissions! conn profile-id (:file-id cthr) share-id)
(upsert-comment-thread-status! conn profile-id (:id cthr)))))
(def sql:upsert-comment-thread-status
"insert into comment_thread_status (thread_id, profile_id)
values (?, ?)
on conflict (thread_id, profile_id)
do update set modified_at = clock_timestamp()
returning modified_at;")
(defn upsert-comment-thread-status!
[conn profile-id thread-id]
(db/exec-one! conn [sql:upsert-comment-thread-status thread-id profile-id]))
(let [{:keys [file-id] :as thread} (get-comment-thread conn id :for-update? true)]
(files/check-comment-permissions! conn profile-id file-id share-id)
(upsert-comment-thread-status! conn profile-id id))))
;; --- COMMAND: Update Comment Thread
(s/def ::is-resolved ::us/boolean)
(s/def ::update-comment-thread
(s/keys :req-un [::profile-id ::id ::is-resolved]
(s/keys :req [::rpc/profile-id]
:req-un [::id ::is-resolved]
:opt-un [::share-id]))
(sv/defmethod ::update-comment-thread
{::doc/added "1.15"}
[{:keys [pool] :as cfg} {:keys [profile-id id is-resolved share-id] :as params}]
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id id is-resolved share-id] :as params}]
(db/with-atomic [conn pool]
(let [thread (db/get-by-id conn :comment-thread id {:for-update true})]
(when-not thread
(ex/raise :type :not-found))
(files/check-comment-permissions! conn profile-id (:file-id thread) share-id)
(let [{:keys [file-id] :as thread} (get-comment-thread conn id :for-update? true)]
(files/check-comment-permissions! conn profile-id file-id share-id)
(db/update! conn :comment-thread
{:is-resolved is-resolved}
{:id id})
@ -357,156 +401,149 @@
;; --- COMMAND: Add Comment
(declare get-comment-thread)
(declare create-comment)
(s/def ::create-comment
(s/keys :req-un [::profile-id ::thread-id ::content]
(s/keys :req [::rpc/profile-id]
:req-un [::thread-id ::content]
:opt-un [::share-id]))
(sv/defmethod ::create-comment
{::doc/added "1.15"}
[{:keys [pool] :as cfg} params]
{::doc/added "1.15"
::webhooks/event? true}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id ::rpc/request-at thread-id share-id content] :as params}]
(db/with-atomic [conn pool]
(create-comment conn params)))
(let [{:keys [file-id page-id] :as thread} (get-comment-thread conn thread-id :for-update? true)
{:keys [team-id project-id page-name] :as file} (get-file conn file-id page-id)]
(defn create-comment
[conn {:keys [profile-id thread-id content share-id] :as params}]
(let [thread (-> (db/get-by-id conn :comment-thread thread-id {:for-update true})
(decode-row))
pname (retrieve-page-name conn thread)]
(files/check-comment-permissions! conn profile-id (:id file) share-id)
(quotes/check-quote! conn
{::quotes/id ::quotes/comments-per-file
::quotes/profile-id profile-id
::quotes/team-id team-id
::quotes/project-id project-id
::quotes/file-id (:id file)})
;; Standard Checks
(when-not thread (ex/raise :type :not-found))
;; Update the page-name cached attribute on comment thread table.
(when (not= page-name (:page-name thread))
(db/update! conn :comment-thread
{:page-name page-name}
{:id thread-id}))
;; Permission Checks
(files/check-comment-permissions! conn profile-id (:file-id thread) share-id)
(let [comment (db/insert! conn :comment
{:id (uuid/next)
:created-at request-at
:modified-at request-at
:thread-id thread-id
:owner-id profile-id
:content content})
props {:file-id file-id
:share-id nil}]
;; Update the page-name cachedattribute on comment thread table.
(when (not= pname (:page-name thread))
(db/update! conn :comment-thread
{:page-name pname}
{:id thread-id}))
;; Update thread modified-at attribute and assoc the current
;; profile to the participant set.
(db/update! conn :comment-thread
{:modified-at request-at
:participants (-> (:participants thread #{})
(conj profile-id)
(db/tjson))}
{:id thread-id})
;; NOTE: is important that all timestamptz related fields are
;; created or updated on the database level for avoid clock
;; inconsistencies (some user sees something read that is not
;; read, etc...)
(let [ppants (:participants thread #{})
comment (db/insert! conn :comment
{:thread-id thread-id
:owner-id profile-id
:content content})]
;; Update the current profile status in relation to the
;; current thread.
(upsert-comment-thread-status! conn profile-id thread-id request-at)
;; NOTE: this is done in SQL instead of using db/update!
;; helper because currently the helper does not allow pass raw
;; function call parameters to the underlying prepared
;; statement; in a future when we fix/improve it, this can be
;; changed to use the helper.
;; Update thread modified-at attribute and assoc the current
;; profile to the participant set.
(let [ppants (conj ppants profile-id)
sql "update comment_thread
set modified_at = clock_timestamp(),
participants = ?
where id = ?"]
(db/exec-one! conn [sql (db/tjson ppants) thread-id]))
;; Update the current profile status in relation to the
;; current thread.
(upsert-comment-thread-status! conn profile-id thread-id)
;; Return the created comment object.
comment)))
(vary-meta comment assoc ::audit/props props)))))
;; --- COMMAND: Update Comment
(declare update-comment)
(s/def ::update-comment
(s/keys :req-un [::profile-id ::id ::content]
(s/keys :req [::rpc/profile-id]
:req-un [::id ::content]
:opt-un [::share-id]))
(sv/defmethod ::update-comment
{::doc/added "1.15"}
[{:keys [pool] :as cfg} params]
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id ::rpc/request-at id share-id content] :as params}]
(db/with-atomic [conn pool]
(update-comment conn params)))
(let [{:keys [thread-id] :as comment} (get-comment conn id :for-update? true)
{:keys [file-id page-id owner-id] :as thread} (get-comment-thread conn thread-id :for-update? true)]
(defn update-comment
[conn {:keys [profile-id id content share-id] :as params}]
(let [comment (db/get-by-id conn :comment id {:for-update true})
_ (when-not comment (ex/raise :type :not-found))
thread (db/get-by-id conn :comment-thread (:thread-id comment) {:for-update true})
_ (when-not thread (ex/raise :type :not-found))
pname (retrieve-page-name conn thread)]
(files/check-comment-permissions! conn profile-id file-id share-id)
(files/check-comment-permissions! conn profile-id (:file-id thread) share-id)
;; Don't allow edit comments to not owners
(when-not (= owner-id profile-id)
(ex/raise :type :validation
:code :not-allowed))
;; Don't allow edit comments to not owners
(when-not (= (:owner-id thread) profile-id)
(ex/raise :type :validation
:code :not-allowed))
(db/update! conn :comment
{:content content
:modified-at (dt/now)}
{:id (:id comment)})
(db/update! conn :comment-thread
{:modified-at (dt/now)
:page-name pname}
{:id (:id thread)})
nil))
(let [{:keys [page-name] :as file} (get-file conn file-id page-id)]
(db/update! conn :comment
{:content content
:modified-at request-at}
{:id id})
(db/update! conn :comment-thread
{:modified-at request-at
:page-name page-name}
{:id thread-id})
nil))))
;; --- COMMAND: Delete Comment Thread
(s/def ::delete-comment-thread
(s/keys :req-un [::profile-id ::id]))
(s/keys :req [::rpc/profile-id]
:req-un [::id]
:opt-un [::share-id]))
(sv/defmethod ::delete-comment-thread
{::doc/added "1.15"}
[{:keys [pool] :as cfg} {:keys [profile-id id] :as params}]
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id id share-id] :as params}]
(db/with-atomic [conn pool]
(let [thread (db/get-by-id conn :comment-thread id {:for-update true})]
(when-not (= (:owner-id thread) profile-id)
(let [{:keys [owner-id file-id] :as thread} (get-comment-thread conn id :for-update? true)]
(files/check-comment-permissions! conn profile-id file-id share-id)
(when-not (= owner-id profile-id)
(ex/raise :type :validation
:code :not-allowed))
(db/delete! conn :comment-thread {:id id})
nil)))
;; --- COMMAND: Delete comment
(s/def ::delete-comment
(s/keys :req-un [::profile-id ::id]))
(s/keys :req [::rpc/profile-id]
:req-un [::id]
:opt-un [::share-id]))
(sv/defmethod ::delete-comment
{::doc/added "1.15"}
[{:keys [pool] :as cfg} {:keys [profile-id id] :as params}]
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id id share-id] :as params}]
(db/with-atomic [conn pool]
(let [comment (db/get-by-id conn :comment id {:for-update true})]
(when-not (= (:owner-id comment) profile-id)
(let [{:keys [owner-id thread-id] :as comment} (get-comment conn id :for-update? true)
{:keys [file-id] :as thread} (get-comment-thread conn thread-id)]
(files/check-comment-permissions! conn profile-id file-id share-id)
(when-not (= owner-id profile-id)
(ex/raise :type :validation
:code :not-allowed))
(db/delete! conn :comment {:id id}))))
;; --- COMMAND: Update comment thread position
(s/def ::update-comment-thread-position
(s/keys :req-un [::profile-id ::id ::position ::frame-id]
(s/keys :req [::rpc/profile-id]
:req-un [::id ::position ::frame-id]
:opt-un [::share-id]))
(sv/defmethod ::update-comment-thread-position
{::doc/added "1.15"}
[{:keys [pool] :as cfg} {:keys [profile-id id position frame-id share-id] :as params}]
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id id position frame-id share-id] :as params}]
(db/with-atomic [conn pool]
(let [thread (db/get-by-id conn :comment-thread id {:for-update true})]
(files/check-comment-permissions! conn profile-id (:file-id thread) share-id)
(let [{:keys [file-id] :as thread} (get-comment-thread conn id :for-update? true)]
(files/check-comment-permissions! conn profile-id file-id share-id)
(db/update! conn :comment-thread
{:modified-at (dt/now)
{:modified-at (::rpc/request-at params)
:position (db/pgpoint position)
:frame-id frame-id}
{:id (:id thread)})
@ -515,18 +552,18 @@
;; --- COMMAND: Update comment frame
(s/def ::update-comment-thread-frame
(s/keys :req-un [::profile-id ::id ::frame-id]
(s/keys :req [::rpc/profile-id]
:req-un [::id ::frame-id]
:opt-un [::share-id]))
(sv/defmethod ::update-comment-thread-frame
{::doc/added "1.15"}
[{:keys [pool] :as cfg} {:keys [profile-id id frame-id share-id] :as params}]
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id id frame-id share-id] :as params}]
(db/with-atomic [conn pool]
(let [thread (db/get-by-id conn :comment-thread id {:for-update true})]
(files/check-comment-permissions! conn profile-id (:file-id thread) share-id)
(let [{:keys [file-id] :as thread} (get-comment-thread conn id :for-update? true)]
(files/check-comment-permissions! conn profile-id file-id share-id)
(db/update! conn :comment-thread
{:modified-at (dt/now)
{:modified-at (::rpc/request-at params)
:frame-id frame-id}
{:id (:id thread)})
{:id id})
nil)))

View file

@ -12,6 +12,7 @@
[app.config :as cf]
[app.db :as db]
[app.loggers.audit :as audit]
[app.rpc :as-alias rpc]
[app.rpc.commands.auth :as cmd.auth]
[app.rpc.doc :as-alias doc]
[app.util.services :as sv]
@ -24,11 +25,11 @@
(sv/defmethod ::create-demo-profile
"A command that is responsible of creating a demo purpose
profile. It only works if the `demo-users` flag is inabled in the
profile. It only works if the `demo-users` flag is enabled in the
configuration."
{:auth false
{::rpc/auth false
::doc/added "1.15"
::doc/changes ["1.15" "This methos is migrated from mutations to commands."]}
::doc/changes ["1.15" "This method is migrated from mutations to commands."]}
[{:keys [pool] :as cfg} _]
(let [id (uuid/next)
sem (System/currentTimeMillis)

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,96 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.rpc.commands.files.create
(:require
[app.common.data :as d]
[app.common.files.features :as ffeat]
[app.common.types.file :as ctf]
[app.common.uuid :as uuid]
[app.db :as db]
[app.loggers.audit :as-alias audit]
[app.loggers.webhooks :as-alias webhooks]
[app.rpc :as-alias rpc]
[app.rpc.commands.files :as files]
[app.rpc.doc :as-alias doc]
[app.rpc.permissions :as perms]
[app.rpc.queries.projects :as proj]
[app.rpc.quotes :as quotes]
[app.util.blob :as blob]
[app.util.objects-map :as omap]
[app.util.pointer-map :as pmap]
[app.util.services :as sv]
[clojure.spec.alpha :as s]))
(defn create-file-role!
[conn {:keys [file-id profile-id role]}]
(let [params {:file-id file-id
:profile-id profile-id}]
(->> (perms/assign-role-flags params role)
(db/insert! conn :file-profile-rel))))
(defn create-file
[conn {:keys [id name project-id is-shared data revn
modified-at deleted-at create-page
ignore-sync-until features]
:or {is-shared false revn 0 create-page true}
:as params}]
(let [id (or id (:id data) (uuid/next))
features (-> (into files/default-features features)
(files/check-features-compatibility!))
data (or data
(binding [ffeat/*current* features
ffeat/*wrap-with-objects-map-fn* (if (features "storate/objects-map") omap/wrap identity)
ffeat/*wrap-with-pointer-map-fn* (if (features "storage/pointer-map") pmap/wrap identity)]
(if create-page
(ctf/make-file-data id)
(ctf/make-file-data id nil))))
features (db/create-array conn "text" features)
file (db/insert! conn :file
(d/without-nils
{:id id
:project-id project-id
:name name
:revn revn
:is-shared is-shared
:data (blob/encode data)
:features features
:ignore-sync-until ignore-sync-until
:modified-at modified-at
:deleted-at deleted-at}))]
(->> (assoc params :file-id id :role :owner)
(create-file-role! conn))
(files/decode-row file)))
(s/def ::create-file
(s/keys :req [::rpc/profile-id]
:req-un [::files/name
::files/project-id]
:opt-un [::files/id
::files/is-shared
::files/features]))
(sv/defmethod ::create-file
{::doc/added "1.17"
::webhooks/event? true}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id project-id] :as params}]
(db/with-atomic [conn pool]
(proj/check-edition-permissions! conn profile-id project-id)
(let [team-id (files/get-team-id conn project-id)
params (assoc params :profile-id profile-id)]
(run! (partial quotes/check-quote! conn)
(list {::quotes/id ::quotes/files-per-project
::quotes/team-id team-id
::quotes/profile-id profile-id
::quotes/project-id project-id}))
(-> (create-file conn params)
(vary-meta assoc ::audit/props {:team-id team-id})))))

View file

@ -0,0 +1,107 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.rpc.commands.files.temp
(:require
[app.common.exceptions :as ex]
[app.common.pages :as cp]
[app.common.spec :as us]
[app.common.uuid :as uuid]
[app.db :as db]
[app.rpc :as-alias rpc]
[app.rpc.commands.files :as files]
[app.rpc.commands.files.create :as files.create]
[app.rpc.commands.files.update :as files.update]
[app.rpc.doc :as-alias doc]
[app.rpc.queries.projects :as proj]
[app.util.blob :as blob]
[app.util.services :as sv]
[app.util.time :as dt]
[clojure.spec.alpha :as s]))
;; --- MUTATION COMMAND: create-temp-file
(s/def ::create-page ::us/boolean)
(s/def ::create-temp-file
(s/keys :req [::rpc/profile-id]
:req-un [::files/name
::files/project-id]
:opt-un [::files/id
::files/is-shared
::files/features
::create-page]))
(sv/defmethod ::create-temp-file
{::doc/added "1.17"}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id project-id] :as params}]
(db/with-atomic [conn pool]
(proj/check-edition-permissions! conn profile-id project-id)
(files.create/create-file conn (assoc params :profile-id profile-id :deleted-at (dt/in-future {:days 1})))))
;; --- MUTATION COMMAND: update-temp-file
(defn update-temp-file
[conn {:keys [::rpc/profile-id session-id id revn changes] :as params}]
(db/insert! conn :file-change
{:id (uuid/next)
:session-id session-id
:profile-id profile-id
:created-at (dt/now)
:file-id id
:revn revn
:data nil
:changes (blob/encode changes)}))
(s/def ::update-temp-file
(s/keys :req-un [::files.update/changes
::files.update/revn
::files.update/session-id
::files/id]))
(sv/defmethod ::update-temp-file
{::doc/added "1.17"}
[{:keys [pool] :as cfg} params]
(db/with-atomic [conn pool]
(update-temp-file conn params)
nil))
;; --- MUTATION COMMAND: persist-temp-file
(defn persist-temp-file
[conn {:keys [id] :as params}]
(let [file (db/get-by-id conn :file id)
revs (db/query conn :file-change
{:file-id id}
{:order-by [[:revn :asc]]})
revn (count revs)]
(when (nil? (:deleted-at file))
(ex/raise :type :validation
:code :cant-persist-already-persisted-file))
(loop [revs (seq revs)
data (blob/decode (:data file))]
(if-let [rev (first revs)]
(recur (rest revs)
(->> rev :changes blob/decode (cp/process-changes data)))
(db/update! conn :file
{:deleted-at nil
:revn revn
:data (blob/encode data)}
{:id id})))
nil))
(s/def ::persist-temp-file
(s/keys :req [::rpc/profile-id]
:req-un [::files/id]))
(sv/defmethod ::persist-temp-file
{::doc/added "1.17"}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id id] :as params}]
(db/with-atomic [conn pool]
(files/check-edition-permissions! conn profile-id id)
(persist-temp-file conn params)))

View file

@ -0,0 +1,302 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.rpc.commands.files.update
(:require
[app.common.exceptions :as ex]
[app.common.files.features :as ffeat]
[app.common.logging :as l]
[app.common.pages :as cp]
[app.common.pages.migrations :as pmg]
[app.common.spec :as us]
[app.common.types.file :as ctf]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.loggers.audit :as audit]
[app.loggers.webhooks :as webhooks]
[app.metrics :as mtx]
[app.msgbus :as mbus]
[app.rpc :as-alias rpc]
[app.rpc.climit :as-alias climit]
[app.rpc.commands.files :as files]
[app.rpc.doc :as-alias doc]
[app.rpc.helpers :as rph]
[app.util.blob :as blob]
[app.util.objects-map :as omap]
[app.util.pointer-map :as pmap]
[app.util.services :as sv]
[app.util.time :as dt]
[clojure.spec.alpha :as s]))
;; --- SPECS
(s/def ::changes
(s/coll-of map? :kind vector?))
(s/def ::hint-origin ::us/keyword)
(s/def ::hint-events
(s/every ::us/keyword :kind vector?))
(s/def ::change-with-metadata
(s/keys :req-un [::changes]
:opt-un [::hint-origin
::hint-events]))
(s/def ::changes-with-metadata
(s/every ::change-with-metadata :kind vector?))
(s/def ::session-id ::us/uuid)
(s/def ::revn ::us/integer)
(s/def ::update-file
(s/and
(s/keys :req [::rpc/profile-id]
:req-un [::files/id ::session-id ::revn]
:opt-un [::changes ::changes-with-metadata ::features])
(fn [o]
(or (contains? o :changes)
(contains? o :changes-with-metadata)))))
;; --- HELPERS
;; File changes that affect to the library, and must be notified
;; to all clients using it.
(def ^:private library-change-types
#{:add-color :mod-color :del-color
:add-media :mod-media :del-media
:add-component :mod-component :del-component
:add-typography :mod-typography :del-typography})
(def ^:private file-change-types
#{:add-obj :mod-obj :del-obj
:reg-objects :mov-objects})
(defn- library-change?
[{:keys [type] :as change}]
(or (contains? library-change-types type)
(and (contains? file-change-types type)
(some? (:component-id change)))))
(def ^:private sql:get-file
"SELECT f.*, p.team_id
FROM file AS f
JOIN project AS p ON (p.id = f.project_id)
WHERE f.id = ?
AND (f.deleted_at IS NULL OR
f.deleted_at > now())
FOR KEY SHARE")
(defn get-file
[conn id]
(let [file (db/exec-one! conn [sql:get-file id])]
(when-not file
(ex/raise :type :not-found
:code :object-not-found
:hint (format "file with id '%s' does not exists" id)))
(update file :features db/decode-pgarray #{})))
(defn- wrap-with-pointer-map-context
[f]
(fn [{:keys [conn] :as cfg} {:keys [id] :as file}]
(binding [pmap/*tracked* (atom {})
pmap/*load-fn* (partial files/load-pointer conn id)
ffeat/*wrap-with-pointer-map-fn* pmap/wrap]
(let [result (f cfg file)]
(files/persist-pointers! conn id)
result))))
(defn- wrap-with-objects-map-context
[f]
(fn [cfg file]
(binding [ffeat/*wrap-with-objects-map-fn* omap/wrap]
(f cfg file))))
(declare get-lagged-changes)
(declare send-notifications!)
(declare update-file)
(declare update-file*)
(declare take-snapshot?)
;; If features are specified from params and the final feature
;; set is different than the persisted one, update it on the
;; database.
(sv/defmethod ::update-file
{::climit/queue :update-file
::climit/key-fn :id
::webhooks/event? true
::webhooks/batch-timeout (dt/duration "2m")
::webhooks/batch-key (webhooks/key-fn ::rpc/profile-id :id)
::doc/added "1.17"}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id id] :as params}]
(db/with-atomic [conn pool]
(files/check-edition-permissions! conn profile-id id)
(db/xact-lock! conn id)
(let [cfg (assoc cfg :conn conn)
params (assoc params :profile-id profile-id)
tpoint (dt/tpoint)]
(-> (update-file cfg params)
(rph/with-defer #(let [elapsed (tpoint)]
(l/trace :hint "update-file" :time (dt/format-duration elapsed))))))))
(defn update-file
[{:keys [conn metrics] :as cfg} {:keys [profile-id id changes changes-with-metadata] :as params}]
(let [file (get-file conn id)
features (->> (concat (:features file)
(:features params))
(into files/default-features)
(files/check-features-compatibility!))]
(files/check-edition-permissions! conn profile-id (:id file))
(binding [ffeat/*current* features
ffeat/*previous* (:features file)]
(let [update-fn (cond-> update-file*
(contains? features "storage/pointer-map")
(wrap-with-pointer-map-context)
(contains? features "storage/objects-map")
(wrap-with-objects-map-context))
file (assoc file :features features)
changes (if changes-with-metadata
(->> changes-with-metadata (mapcat :changes) vec)
(vec changes))
params (-> params
(assoc :file file)
(assoc :changes changes)
(assoc ::created-at (dt/now)))]
(when (> (:revn params)
(:revn file))
(ex/raise :type :validation
:code :revn-conflict
:hint "The incoming revision number is greater that stored version."
:context {:incoming-revn (:revn params)
:stored-revn (:revn file)}))
(mtx/run! metrics {:id :update-file-changes :inc (count changes)})
(when (not= features (:features file))
(let [features (db/create-array conn "text" features)]
(db/update! conn :file
{:features features}
{:id id})))
(-> (update-fn cfg params)
(vary-meta assoc ::audit/replace-props
{:id (:id file)
:name (:name file)
:features (:features file)
:project-id (:project-id file)
:team-id (:team-id file)}))))))
(defn- update-file*
[{:keys [conn] :as cfg} {:keys [profile-id file changes session-id ::created-at] :as params}]
(let [file (-> file
(update :revn inc)
(update :data (fn [data]
(cond-> data
:always
(-> (blob/decode)
(assoc :id (:id file))
(pmg/migrate-data))
(and (contains? ffeat/*current* "components/v2")
(not (contains? ffeat/*previous* "components/v2")))
(ctf/migrate-to-components-v2)
:always
(-> (cp/process-changes changes)
(blob/encode))))))]
(db/insert! conn :file-change
{:id (uuid/next)
:session-id session-id
:profile-id profile-id
:created-at created-at
:file-id (:id file)
:revn (:revn file)
:features (db/create-array conn "text" (:features file))
:data (when (take-snapshot? file)
(:data file))
:changes (blob/encode changes)})
(db/update! conn :file
{:revn (:revn file)
:data (:data file)
:data-backend nil
:modified-at created-at
:has-media-trimmed false}
{:id (:id file)})
(db/update! conn :project
{:modified-at created-at}
{:id (:project-id file)})
(let [params (assoc params :file file)]
;; Send asynchronous notifications
(send-notifications! cfg params)
;; Retrieve and return lagged data
(get-lagged-changes conn params))))
(defn- take-snapshot?
"Defines the rule when file `data` snapshot should be saved."
[{:keys [revn modified-at] :as file}]
(let [freq (or (cf/get :file-change-snapshot-every) 20)
timeout (or (cf/get :file-change-snapshot-timeout)
(dt/duration {:hours 1}))]
(or (= 1 freq)
(zero? (mod revn freq))
(> (inst-ms (dt/diff modified-at (dt/now)))
(inst-ms timeout)))))
(def ^:private
sql:lagged-changes
"select s.id, s.revn, s.file_id,
s.session_id, s.changes
from file_change as s
where s.file_id = ?
and s.revn > ?
order by s.created_at asc")
(defn- get-lagged-changes
[conn {:keys [id revn] :as params}]
(->> (db/exec! conn [sql:lagged-changes id revn])
(map files/decode-row)
(vec)))
(defn- send-notifications!
[{:keys [conn] :as cfg} {:keys [file changes session-id] :as params}]
(let [lchanges (filter library-change? changes)
msgbus (:msgbus cfg)]
;; Asynchronously publish message to the msgbus
(mbus/pub! msgbus
:topic (:id file)
:message {:type :file-change
:profile-id (:profile-id params)
:file-id (:id file)
:session-id (:session-id params)
:revn (:revn file)
:changes changes})
(when (and (:is-shared file) (seq lchanges))
(let [team-id (or (:team-id file)
(files/get-team-id conn (:project-id file)))]
;; Asynchronously publish message to the msgbus
(mbus/pub! msgbus
:topic team-id
:message {:type :library-change
:profile-id (:profile-id params)
:file-id (:id file)
:session-id session-id
:revn (:revn file)
:modified-at (dt/now)
:changes lchanges})))))

View file

@ -10,10 +10,15 @@
[app.common.exceptions :as ex]
[app.common.spec :as us]
[app.db :as db]
[app.http.session :as session]
[app.loggers.audit :as-alias audit]
[app.main :as-alias main]
[app.rpc :as-alias rpc]
[app.rpc.commands.auth :as cmd.auth]
[app.rpc.doc :as-alias doc]
[app.rpc.helpers :as rph]
[app.rpc.queries.profile :as profile]
[app.tokens :as tokens]
[app.util.services :as sv]
[clojure.spec.alpha :as s]))
@ -32,15 +37,15 @@
(sv/defmethod ::login-with-ldap
"Performs the authentication using LDAP backend. Only works if LDAP
is properly configured and enabled with `login-with-ldap` flag."
{:auth false
{::rpc/auth false
::doc/added "1.15"}
[{:keys [session tokens ldap] :as cfg} params]
(when-not ldap
[{:keys [::main/props ::ldap/provider session] :as cfg} params]
(when-not provider
(ex/raise :type :restriction
:code :ldap-not-initialized
:hide "ldap auth provider is not initialized"))
(let [info (ldap/authenticate ldap params)]
(let [info (ldap/authenticate provider params)]
(when-not info
(ex/raise :type :validation
:code :wrong-credentials))
@ -56,20 +61,20 @@
;; user comes from team-invitation process; in this case,
;; regenerate token and send back to the user a new invitation
;; token (and mark current session as logged).
(let [claims (tokens :verify {:token token :iss :team-invitation})
(let [claims (tokens/verify props {:token token :iss :team-invitation})
claims (assoc claims
:member-id (:id profile)
:member-email (:email profile))
token (tokens :generate claims)]
(with-meta {:invitation-token token}
{:transform-response ((:create session) (:id profile))
::audit/props (:props profile)
::audit/profile-id (:id profile)}))
token (tokens/generate props claims)]
(-> {:invitation-token token}
(rph/with-transform (session/create-fn session (:id profile)))
(rph/with-meta {::audit/props (:props profile)
::audit/profile-id (:id profile)})))
(with-meta profile
{:transform-response ((:create session) (:id profile))
::audit/props (:props profile)
::audit/profile-id (:id profile)})))))
(-> profile
(rph/with-transform (session/create-fn session (:id profile)))
(rph/with-meta {::audit/props (:props profile)
::audit/profile-id (:id profile)}))))))
(defn- login-or-register
[{:keys [pool] :as cfg} info]

View file

@ -13,12 +13,15 @@
[app.common.spec :as us]
[app.common.uuid :as uuid]
[app.db :as db]
[app.loggers.webhooks :as-alias webhooks]
[app.rpc :as-alias rpc]
[app.rpc.commands.binfile :as binfile]
[app.rpc.commands.files :as files]
[app.rpc.commands.teams :as teams :refer [create-project-role create-project]]
[app.rpc.doc :as-alias doc]
[app.rpc.mutations.projects :refer [create-project-role create-project]]
[app.rpc.queries.projects :as proj]
[app.rpc.queries.teams :as teams]
[app.util.blob :as blob]
[app.util.pointer-map :as pmap]
[app.util.services :as sv]
[app.util.time :as dt]
[clojure.spec.alpha :as s]
@ -29,22 +32,23 @@
(declare duplicate-file)
(s/def ::id ::us/uuid)
(s/def ::profile-id ::us/uuid)
(s/def ::project-id ::us/uuid)
(s/def ::file-id ::us/uuid)
(s/def ::team-id ::us/uuid)
(s/def ::name ::us/string)
(s/def ::duplicate-file
(s/keys :req-un [::profile-id ::file-id]
(s/keys :req [::rpc/profile-id]
:req-un [::file-id]
:opt-un [::name]))
(sv/defmethod ::duplicate-file
"Duplicate a single file in the same team."
{::doc/added "1.16"}
[{:keys [pool] :as cfg} params]
{::doc/added "1.16"
::webhooks/event? true}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id] :as params}]
(db/with-atomic [conn pool]
(duplicate-file conn params)))
(duplicate-file conn (assoc params :profile-id profile-id))))
(defn- remap-id
[item index key]
@ -53,7 +57,7 @@
(assoc key (get index (get item key) (get item key)))))
(defn- process-file
[file index]
[conn {:keys [id] :as file} index]
(letfn [(process-form [form]
(cond-> form
;; Relink library items
@ -97,18 +101,25 @@
res)))
media
media))]
(update file :data
(fn [data]
(-> data
(blob/decode)
(assoc :id (:id file))
(pmg/migrate-data)
(update :pages-index relink-shapes)
(update :components relink-shapes)
(update :media relink-media)
(d/without-nils)
(blob/encode))))))
(-> file
(update :id #(get index %))
(update :data
(fn [data]
(binding [pmap/*load-fn* (partial files/load-pointer conn id)
pmap/*tracked* (atom {})]
(let [file-id (get index id)
data (-> data
(blob/decode)
(assoc :id file-id)
(pmg/migrate-data)
(update :pages-index relink-shapes)
(update :components relink-shapes)
(update :media relink-media)
(d/without-nils)
(files/process-pointers pmap/clone)
(blob/encode))]
(files/persist-pointers! conn file-id)
data)))))))
(def sql:retrieve-used-libraries
"select flr.*
@ -125,7 +136,7 @@
and so.deleted_at is null")
(defn duplicate-file*
[conn {:keys [profile-id file index project-id name flibs fmeds]} {:keys [reset-shared-flag] :as opts}]
[conn {:keys [profile-id file index project-id name flibs fmeds]} {:keys [reset-shared-flag]}]
(let [flibs (or flibs (db/exec! conn [sql:retrieve-used-libraries (:id file)]))
fmeds (or fmeds (db/exec! conn [sql:retrieve-used-media-objects (:id file)]))
@ -166,9 +177,9 @@
file (-> file
(assoc :created-at now)
(assoc :modified-at now)
(assoc :ignore-sync-until ignore)
(update :id #(get index %))
(process-file index))]
(assoc :ignore-sync-until ignore))
file (process-file conn file index)]
(db/insert! conn :file file)
(db/insert! conn :file-profile-rel
@ -194,22 +205,25 @@
(proj/check-edition-permissions! conn profile-id (:project-id file))
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
(-> (duplicate-file* conn params {:reset-shared-flag true})
(update :data blob/decode))))
(update :data blob/decode)
(update :features db/decode-pgarray #{}))))
;; --- COMMAND: Duplicate Project
(declare duplicate-project)
(s/def ::duplicate-project
(s/keys :req-un [::profile-id ::project-id]
(s/keys :req [::rpc/profile-id]
:req-un [::project-id]
:opt-un [::name]))
(sv/defmethod ::duplicate-project
"Duplicate an entire project with all the files"
{::doc/added "1.16"}
{::doc/added "1.16"
::webhooks/event? true}
[{:keys [pool] :as cfg} params]
(db/with-atomic [conn pool]
(duplicate-project conn params)))
(duplicate-project conn (assoc params :profile-id (::rpc/profile-id params)))))
(defn duplicate-project
[conn {:keys [profile-id project-id name] :as params}]
@ -237,9 +251,7 @@
;; create the duplicated project and assign the current profile as
;; a project owner
(create-project conn project)
(create-project-role conn {:project-id (:id project)
:profile-id profile-id
:role :owner})
(create-project-role conn profile-id (:id project) :owner)
;; duplicate all files
(let [index (reduce #(assoc %1 (:id %2) (uuid/next)) {} files)
@ -310,15 +322,16 @@
(s/def ::ids (s/every ::us/uuid :kind set?))
(s/def ::move-files
(s/keys :req-un [::profile-id ::ids ::project-id]))
(s/keys :req [::rpc/profile-id]
:req-un [::ids ::project-id]))
(sv/defmethod ::move-files
"Move a set of files from one project to other."
{::doc/added "1.16"}
[{:keys [pool] :as cfg} params]
{::doc/added "1.16"
::webhooks/event? true}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id] :as params}]
(db/with-atomic [conn pool]
(move-files conn params)))
(move-files conn (assoc params :profile-id profile-id))))
;; --- COMMAND: Move project
@ -349,14 +362,16 @@
(s/def ::move-project
(s/keys :req-un [::profile-id ::team-id ::project-id]))
(s/keys :req [::rpc/profile-id]
:req-un [::team-id ::project-id]))
(sv/defmethod ::move-project
"Move projects between teams."
{::doc/added "1.16"}
[{:keys [pool] :as cfg} params]
{::doc/added "1.16"
::webhooks/event? true}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id] :as params}]
(db/with-atomic [conn pool]
(move-project conn params)))
(move-project conn (assoc params :profile-id profile-id))))
;; --- COMMAND: Clone Template
@ -364,15 +379,17 @@
(s/def ::template-id ::us/not-empty-string)
(s/def ::clone-template
(s/keys :req-un [::profile-id ::project-id ::template-id]))
(s/keys :req [::rpc/profile-id]
:req-un [::project-id ::template-id]))
(sv/defmethod ::clone-template
"Clone into the specified project the template by its id."
{::doc/added "1.16"}
[{:keys [pool] :as cfg} params]
{::doc/added "1.16"
::webhooks/event? true}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id] :as params}]
(db/with-atomic [conn pool]
(-> (assoc cfg :conn conn)
(clone-template params))))
(clone-template (assoc params :profile-id profile-id)))))
(defn- clone-template
[{:keys [conn templates] :as cfg} {:keys [profile-id template-id project-id]}]

View file

@ -0,0 +1,274 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.rpc.commands.media
(:require
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.media :as cm]
[app.common.spec :as us]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.http.client :as http]
[app.media :as media]
[app.rpc :as-alias rpc]
[app.rpc.climit :as climit]
[app.rpc.commands.files :as files]
[app.rpc.doc :as-alias doc]
[app.storage :as sto]
[app.storage.tmp :as tmp]
[app.util.services :as sv]
[app.util.time :as dt]
[clojure.spec.alpha :as s]
[cuerdas.core :as str]
[datoteka.io :as io]
[promesa.core :as p]
[promesa.exec :as px]))
(def default-max-file-size
(* 1024 1024 10)) ; 10 MiB
(def thumbnail-options
{:width 100
:height 100
:quality 85
:format :jpeg})
(s/def ::id ::us/uuid)
(s/def ::name ::us/string)
(s/def ::file-id ::us/uuid)
(s/def ::team-id ::us/uuid)
(defn validate-content-size!
[content]
(when (> (:size content) (cf/get :media-max-file-size default-max-file-size))
(ex/raise :type :restriction
:code :media-max-file-size-reached
:hint (str/ffmt "the uploaded file size % is greater than the maximum %"
(:size content)
default-max-file-size))))
;; --- Create File Media object (upload)
(declare create-file-media-object)
(s/def ::content ::media/upload)
(s/def ::is-local ::us/boolean)
(s/def ::upload-file-media-object
(s/keys :req [::rpc/profile-id]
:req-un [::file-id ::is-local ::name ::content]
:opt-un [::id]))
(sv/defmethod ::upload-file-media-object
{::doc/added "1.17"}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id file-id content] :as params}]
(let [cfg (update cfg :storage media/configure-assets-storage)]
(files/check-edition-permissions! pool profile-id file-id)
(media/validate-media-type! content)
(validate-content-size! content)
(create-file-media-object cfg params)))
(defn- big-enough-for-thumbnail?
"Checks if the provided image info is big enough for
create a separate thumbnail storage object."
[info]
(or (> (:width info) (:width thumbnail-options))
(> (:height info) (:height thumbnail-options))))
(defn- svg-image?
[info]
(= (:mtype info) "image/svg+xml"))
;; NOTE: we use the `on conflict do update` instead of `do nothing`
;; because postgresql does not returns anything if no update is
;; performed, the `do update` does the trick.
(def sql:create-file-media-object
"insert into file_media_object (id, file_id, is_local, name, media_id, thumbnail_id, width, height, mtype)
values (?, ?, ?, ?, ?, ?, ?, ?, ?)
on conflict (id) do update set created_at=file_media_object.created_at
returning *")
;; NOTE: the following function executes without a transaction, this
;; means that if something fails in the middle of this function, it
;; will probably leave leaked/unreferenced objects in the database and
;; probably in the storage layer. For handle possible object leakage,
;; we create all media objects marked as touched, this ensures that if
;; something fails, all leaked (already created storage objects) will
;; be eventually marked as deleted by the touched-gc task.
;;
;; The touched-gc task, performs periodic analysis of all touched
;; storage objects and check references of it. This is the reason why
;; `reference` metadata exists: it indicates the name of the table
;; witch holds the reference to storage object (it some kind of
;; inverse, soft referential integrity).
(defn create-file-media-object
[{:keys [storage pool climit executor]}
{:keys [id file-id is-local name content]}]
(letfn [;; Function responsible to retrieve the file information, as
;; it is synchronous operation it should be wrapped into
;; with-dispatch macro.
(get-info [content]
(climit/with-dispatch (:process-image climit)
(media/run {:cmd :info :input content})))
;; Function responsible of calculating cryptographyc hash of
;; the provided data.
(calculate-hash [data]
(px/with-dispatch executor
(sto/calculate-hash data)))
;; Function responsible of generating thumnail. As it is synchronous
;; opetation, it should be wrapped into with-dispatch macro
(generate-thumbnail [info]
(climit/with-dispatch (:process-image climit)
(media/run (assoc thumbnail-options
:cmd :generic-thumbnail
:input info))))
(create-thumbnail [info]
(when (and (not (svg-image? info))
(big-enough-for-thumbnail? info))
(p/let [thumb (generate-thumbnail info)
hash (calculate-hash (:data thumb))
content (-> (sto/content (:data thumb) (:size thumb))
(sto/wrap-with-hash hash))]
(sto/put-object! storage
{::sto/content content
::sto/deduplicate? true
::sto/touched-at (dt/now)
:content-type (:mtype thumb)
:bucket "file-media-object"}))))
(create-image [info]
(p/let [data (:path info)
hash (calculate-hash data)
content (-> (sto/content data)
(sto/wrap-with-hash hash))]
(sto/put-object! storage
{::sto/content content
::sto/deduplicate? true
::sto/touched-at (dt/now)
:content-type (:mtype info)
:bucket "file-media-object"})))
(insert-into-database [info image thumb]
(px/with-dispatch executor
(db/exec-one! pool [sql:create-file-media-object
(or id (uuid/next))
file-id is-local name
(:id image)
(:id thumb)
(:width info)
(:height info)
(:mtype info)])))]
(p/let [info (get-info content)
thumb (create-thumbnail info)
image (create-image info)]
(insert-into-database info image thumb))))
;; --- Create File Media Object (from URL)
(declare ^:private create-file-media-object-from-url)
(s/def ::create-file-media-object-from-url
(s/keys :req [::rpc/profile-id]
:req-un [::file-id ::is-local ::url]
:opt-un [::id ::name]))
(sv/defmethod ::create-file-media-object-from-url
{::doc/added "1.17"}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id file-id] :as params}]
(let [cfg (update cfg :storage media/configure-assets-storage)]
(files/check-edition-permissions! pool profile-id file-id)
(create-file-media-object-from-url cfg params)))
(defn- create-file-media-object-from-url
[cfg {:keys [url name] :as params}]
(letfn [(parse-and-validate-size [headers]
(let [size (some-> (get headers "content-length") d/parse-integer)
mtype (get headers "content-type")
format (cm/mtype->format mtype)
max-size (cf/get :media-max-file-size default-max-file-size)]
(when-not size
(ex/raise :type :validation
:code :unknown-size
:hint "seems like the url points to resource with unknown size"))
(when (> size max-size)
(ex/raise :type :validation
:code :file-too-large
:hint (str/ffmt "the file size % is greater than the maximum %"
size
default-max-file-size)))
(when (nil? format)
(ex/raise :type :validation
:code :media-type-not-allowed
:hint "seems like the url points to an invalid media object"))
{:size size
:mtype mtype
:format format}))
(download-media [uri]
(-> (http/req! cfg {:method :get :uri uri} {:response-type :input-stream})
(p/then process-response)))
(process-response [{:keys [body headers] :as response}]
(let [{:keys [size mtype]} (parse-and-validate-size headers)
path (tmp/tempfile :prefix "penpot.media.download.")
written (io/write-to-file! body path :size size)]
(when (not= written size)
(ex/raise :type :internal
:code :mismatch-write-size
:hint "unexpected state: unable to write to file"))
{:filename "tempfile"
:size size
:path path
:mtype mtype}))]
(p/let [content (download-media url)]
(->> (merge params {:content content :name (or name (:filename content))})
(create-file-media-object cfg)))))
;; --- Clone File Media object (Upload and create from url)
(declare clone-file-media-object)
(s/def ::clone-file-media-object
(s/keys :req [::rpc/profile-id]
:req-un [::file-id ::is-local ::id]))
(sv/defmethod ::clone-file-media-object
{::doc/added "1.17"}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id file-id] :as params}]
(db/with-atomic [conn pool]
(files/check-edition-permissions! conn profile-id file-id)
(-> (assoc cfg :conn conn)
(clone-file-media-object params))))
(defn clone-file-media-object
[{:keys [conn]} {:keys [id file-id is-local]}]
(let [mobj (db/get-by-id conn :file-media-object id)]
(db/insert! conn :file-media-object
{:id (uuid/next)
:file-id file-id
:is-local is-local
:name (:name mobj)
:media-id (:media-id mobj)
:thumbnail-id (:thumbnail-id mobj)
:width (:width mobj)
:height (:height mobj)
:mtype (:mtype mobj)})))

View file

@ -0,0 +1,68 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.rpc.commands.search
(:require
[app.common.spec :as us]
[app.db :as db]
[app.rpc :as-alias rpc]
[app.rpc.doc :as-alias doc]
[app.util.services :as sv]
[clojure.spec.alpha :as s]))
(def ^:private sql:search-files
"with projects as (
select p.*
from project as p
inner join team_profile_rel as tpr on (tpr.team_id = p.team_id)
where tpr.profile_id = ?
and p.team_id = ?
and (p.deleted_at is null or p.deleted_at > now())
and (tpr.is_admin = true or
tpr.is_owner = true or
tpr.can_edit = true)
union
select p.*
from project as p
inner join project_profile_rel as ppr on (ppr.project_id = p.id)
where ppr.profile_id = ?
and p.team_id = ?
and (p.deleted_at is null or p.deleted_at > now())
and (ppr.is_admin = true or
ppr.is_owner = true or
ppr.can_edit = true)
)
select distinct
f.id,
f.project_id,
f.created_at,
f.modified_at,
f.name,
f.is_shared
from file as f
inner join projects as pr on (f.project_id = pr.id)
where f.name ilike ('%' || ? || '%')
order by f.created_at asc")
(defn search-files
[conn profile-id team-id search-term]
(db/exec! conn [sql:search-files
profile-id team-id
profile-id team-id
search-term]))
(s/def ::team-id ::us/uuid)
(s/def ::search-files ::us/string)
(s/def ::search-files
(s/keys :req [::rpc/profile-id]
:req-un [::team-id]
:opt-un [::search-term]))
(sv/defmethod ::search-files
{::doc/added "1.17"}
[{:keys [pool]} {:keys [::rpc/profile-id team-id search-term]}]
(some->> search-term (search-files pool profile-id team-id)))

View file

@ -0,0 +1,890 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.rpc.commands.teams
(:require
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.spec :as us]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.emails :as eml]
[app.loggers.audit :as audit]
[app.main :as-alias main]
[app.media :as media]
[app.rpc :as-alias rpc]
[app.rpc.climit :as climit]
[app.rpc.doc :as-alias doc]
[app.rpc.helpers :as rph]
[app.rpc.permissions :as perms]
[app.rpc.queries.profile :as profile]
[app.rpc.quotes :as quotes]
[app.storage :as sto]
[app.tokens :as tokens]
[app.util.services :as sv]
[app.util.time :as dt]
[clojure.spec.alpha :as s]
[cuerdas.core :as str]
[promesa.core :as p]
[promesa.exec :as px]))
;; --- Helpers & Specs
(s/def ::id ::us/uuid)
(s/def ::name ::us/string)
(s/def ::file-id ::us/uuid)
(s/def ::team-id ::us/uuid)
(def ^:private sql:team-permissions
"select tpr.is_owner,
tpr.is_admin,
tpr.can_edit
from team_profile_rel as tpr
join team as t on (t.id = tpr.team_id)
where tpr.profile_id = ?
and tpr.team_id = ?
and t.deleted_at is null")
(defn get-permissions
[conn profile-id team-id]
(let [rows (db/exec! conn [sql:team-permissions profile-id team-id])
is-owner (boolean (some :is-owner rows))
is-admin (boolean (some :is-admin rows))
can-edit (boolean (some :can-edit rows))]
(when (seq rows)
{:is-owner is-owner
:is-admin (or is-owner is-admin)
:can-edit (or is-owner is-admin can-edit)
:can-read true})))
(def has-edit-permissions?
(perms/make-edition-predicate-fn get-permissions))
(def has-read-permissions?
(perms/make-read-predicate-fn get-permissions))
(def check-edition-permissions!
(perms/make-check-fn has-edit-permissions?))
(def check-read-permissions!
(perms/make-check-fn has-read-permissions?))
;; --- Query: Teams
(declare retrieve-teams)
(s/def ::get-teams
(s/keys :req [::rpc/profile-id]))
(sv/defmethod ::get-teams
{::doc/added "1.17"}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id] :as params}]
(with-open [conn (db/open pool)]
(retrieve-teams conn profile-id)))
(def sql:teams
"select t.*,
tp.is_owner,
tp.is_admin,
tp.can_edit,
(t.id = ?) as is_default
from team_profile_rel as tp
join team as t on (t.id = tp.team_id)
where t.deleted_at is null
and tp.profile_id = ?
order by tp.created_at asc")
(defn process-permissions
[team]
(let [is-owner (:is-owner team)
is-admin (:is-admin team)
can-edit (:can-edit team)
permissions {:type :membership
:is-owner is-owner
:is-admin (or is-owner is-admin)
:can-edit (or is-owner is-admin can-edit)}]
(-> team
(dissoc :is-owner :is-admin :can-edit)
(assoc :permissions permissions))))
(defn retrieve-teams
[conn profile-id]
(let [defaults (profile/retrieve-additional-data conn profile-id)]
(->> (db/exec! conn [sql:teams (:default-team-id defaults) profile-id])
(mapv process-permissions))))
;; --- Query: Team (by ID)
(declare retrieve-team)
(s/def ::get-team
(s/keys :req [::rpc/profile-id]
:req-un [::id]))
(sv/defmethod ::get-team
{::doc/added "1.17"}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id id]}]
(with-open [conn (db/open pool)]
(retrieve-team conn profile-id id)))
(defn retrieve-team
[conn profile-id team-id]
(let [defaults (profile/retrieve-additional-data conn profile-id)
sql (str "WITH teams AS (" sql:teams ") SELECT * FROM teams WHERE id=?")
result (db/exec-one! conn [sql (:default-team-id defaults) profile-id team-id])]
(when-not result
(ex/raise :type :not-found
:code :team-does-not-exist))
(process-permissions result)))
;; --- Query: Team Members
(def sql:team-members
"select tp.*,
p.id,
p.email,
p.fullname as name,
p.fullname as fullname,
p.photo_id,
p.is_active
from team_profile_rel as tp
join profile as p on (p.id = tp.profile_id)
where tp.team_id = ?")
(defn retrieve-team-members
[conn team-id]
(db/exec! conn [sql:team-members team-id]))
(s/def ::team-id ::us/uuid)
(s/def ::get-team-members
(s/keys :req [::rpc/profile-id]
:req-un [::team-id]))
(sv/defmethod ::get-team-members
{::doc/added "1.17"}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id]}]
(with-open [conn (db/open pool)]
(check-read-permissions! conn profile-id team-id)
(retrieve-team-members conn team-id)))
;; --- Query: Team Users
(declare retrieve-users)
(declare retrieve-team-for-file)
(s/def ::get-team-users
(s/and (s/keys :req [::rpc/profile-id]
:opt-un [::team-id ::file-id])
#(or (:team-id %) (:file-id %))))
(sv/defmethod ::get-team-users
{::doc/added "1.17"}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id file-id]}]
(with-open [conn (db/open pool)]
(if team-id
(do
(check-read-permissions! conn profile-id team-id)
(retrieve-users conn team-id))
(let [{team-id :id} (retrieve-team-for-file conn file-id)]
(check-read-permissions! conn profile-id team-id)
(retrieve-users conn team-id)))))
;; This is a similar query to team members but can contain more data
;; because some user can be explicitly added to project or file (not
;; implemented in UI)
(def sql:team-users
"select pf.id, pf.fullname, pf.photo_id
from profile as pf
inner join team_profile_rel as tpr on (tpr.profile_id = pf.id)
where tpr.team_id = ?
union
select pf.id, pf.fullname, pf.photo_id
from profile as pf
inner join project_profile_rel as ppr on (ppr.profile_id = pf.id)
inner join project as p on (ppr.project_id = p.id)
where p.team_id = ?
union
select pf.id, pf.fullname, pf.photo_id
from profile as pf
inner join file_profile_rel as fpr on (fpr.profile_id = pf.id)
inner join file as f on (fpr.file_id = f.id)
inner join project as p on (f.project_id = p.id)
where p.team_id = ?")
(def sql:team-by-file
"select p.team_id as id
from project as p
join file as f on (p.id = f.project_id)
where f.id = ?")
(defn retrieve-users
[conn team-id]
(db/exec! conn [sql:team-users team-id team-id team-id]))
(defn retrieve-team-for-file
[conn file-id]
(->> [sql:team-by-file file-id]
(db/exec-one! conn)))
;; --- Query: Team Stats
(declare retrieve-team-stats)
(s/def ::get-team-stats
(s/keys :req [::rpc/profile-id]
:req-un [::team-id]))
(sv/defmethod ::get-team-stats
{::doc/added "1.17"}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id]}]
(with-open [conn (db/open pool)]
(check-read-permissions! conn profile-id team-id)
(retrieve-team-stats conn team-id)))
(def sql:team-stats
"select (select count(*) from project where team_id = ?) as projects,
(select count(*) from file as f join project as p on (p.id = f.project_id) where p.team_id = ?) as files")
(defn retrieve-team-stats
[conn team-id]
(db/exec-one! conn [sql:team-stats team-id team-id]))
;; --- Query: Team invitations
(s/def ::get-team-invitations
(s/keys :req [::rpc/profile-id]
:req-un [::team-id]))
(def sql:team-invitations
"select email_to as email, role, (valid_until < now()) as expired
from team_invitation where team_id = ? order by valid_until desc, created_at desc")
(defn get-team-invitations
[conn team-id]
(->> (db/exec! conn [sql:team-invitations team-id])
(mapv #(update % :role keyword))))
(sv/defmethod ::get-team-invitations
{::doc/added "1.17"}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id]}]
(with-open [conn (db/open pool)]
(check-read-permissions! conn profile-id team-id)
(get-team-invitations conn team-id)))
;; --- Mutation: Create Team
(declare create-team)
(declare create-project)
(declare create-project-role)
(declare ^:private create-team*)
(declare ^:private create-team-role)
(declare ^:private create-team-default-project)
(s/def ::create-team
(s/keys :req [::rpc/profile-id]
:req-un [::name]
:opt-un [::id]))
(sv/defmethod ::create-team
{::doc/added "1.17"}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id] :as params}]
(db/with-atomic [conn pool]
(quotes/check-quote! conn {::quotes/id ::quotes/teams-per-profile
::quotes/profile-id profile-id})
(create-team conn (assoc params :profile-id profile-id))))
(defn create-team
"This is a complete team creation process, it creates the team
object and all related objects (default role and default project)."
[conn params]
(let [team (create-team* conn params)
params (assoc params
:team-id (:id team)
:role :owner)
project (create-team-default-project conn params)]
(create-team-role conn params)
(assoc team :default-project-id (:id project))))
(defn- create-team*
[conn {:keys [id name is-default] :as params}]
(let [id (or id (uuid/next))
is-default (if (boolean? is-default) is-default false)]
(db/insert! conn :team
{:id id
:name name
:is-default is-default})))
(defn- create-team-role
[conn {:keys [profile-id team-id role] :as params}]
(let [params {:team-id team-id
:profile-id profile-id}]
(->> (perms/assign-role-flags params role)
(db/insert! conn :team-profile-rel))))
(defn- create-team-default-project
[conn {:keys [profile-id team-id] :as params}]
(let [project {:id (uuid/next)
:team-id team-id
:name "Drafts"
:is-default true}
project (create-project conn project)]
(create-project-role conn profile-id (:id project) :owner)
project))
;; NOTE: we have project creation here because there are cyclic
;; dependency between teams and projects namespaces, and the project
;; creation happens in both sides, on team creation and on simple
;; project creation, so it make sense to have this functions in this
;; namespace too.
(defn create-project
[conn {:keys [id team-id name is-default] :as params}]
(let [id (or id (uuid/next))
is-default (if (boolean? is-default) is-default false)]
(db/insert! conn :project
{:id id
:name name
:team-id team-id
:is-default is-default})))
(defn create-project-role
[conn profile-id project-id role]
(let [params {:project-id project-id
:profile-id profile-id}]
(->> (perms/assign-role-flags params role)
(db/insert! conn :project-profile-rel))))
;; --- Mutation: Update Team
(s/def ::update-team
(s/keys :req [::rpc/profile-id]
:req-un [::name ::id]))
(sv/defmethod ::update-team
{::doc/added "1.17"}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id name] :as params}]
(db/with-atomic [conn pool]
(check-edition-permissions! conn profile-id id)
(db/update! conn :team
{:name name}
{:id id})
nil))
;; --- Mutation: Leave Team
(declare role->params)
(defn leave-team
[conn {:keys [profile-id id reassign-to]}]
(let [perms (get-permissions conn profile-id id)
members (retrieve-team-members conn id)]
(cond
;; we can only proceed if there are more members in the team
;; besides the current profile
(<= (count members) 1)
(ex/raise :type :validation
:code :no-enough-members-for-leave
:context {:members (count members)})
;; if the `reassign-to` is filled and has a different value
;; than the current profile-id, we proceed to reassing the
;; owner role to profile identified by the `reassign-to`.
(and reassign-to (not= reassign-to profile-id))
(let [member (d/seek #(= reassign-to (:id %)) members)]
(when-not member
(ex/raise :type :not-found :code :member-does-not-exist))
;; unasign owner role to current profile
(db/update! conn :team-profile-rel
{:is-owner false}
{:team-id id
:profile-id profile-id})
;; assign owner role to new profile
(db/update! conn :team-profile-rel
(role->params :owner)
{:team-id id :profile-id reassign-to}))
;; and finally, if all other conditions does not match and the
;; current profile is owner, we dont allow it because there
;; must always be an owner.
(:is-owner perms)
(ex/raise :type :validation
:code :owner-cant-leave-team
:hint "releasing owner before leave"))
(db/delete! conn :team-profile-rel
{:profile-id profile-id
:team-id id})
nil))
(s/def ::reassign-to ::us/uuid)
(s/def ::leave-team
(s/keys :req [::rpc/profile-id]
:req-un [::id]
:opt-un [::reassign-to]))
(sv/defmethod ::leave-team
{::doc/added "1.17"}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id] :as params}]
(db/with-atomic [conn pool]
(leave-team conn (assoc params :profile-id profile-id))))
;; --- Mutation: Delete Team
(s/def ::delete-team
(s/keys :req [::rpc/profile-id]
:req-un [::id]))
;; TODO: right now just don't allow delete default team, in future it
;; should raise a specific exception for signal that this action is
;; not allowed.
(sv/defmethod ::delete-team
{::doc/added "1.17"}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id id] :as params}]
(db/with-atomic [conn pool]
(let [perms (get-permissions conn profile-id id)]
(when-not (:is-owner perms)
(ex/raise :type :validation
:code :only-owner-can-delete-team))
(db/update! conn :team
{:deleted-at (dt/now)}
{:id id :is-default false})
nil)))
;; --- Mutation: Team Update Role
(s/def ::team-id ::us/uuid)
(s/def ::member-id ::us/uuid)
;; Temporarily disabled viewer role
;; https://tree.taiga.io/project/uxboxproject/issue/1083
;; (s/def ::role #{:owner :admin :editor :viewer})
(s/def ::role #{:owner :admin :editor})
(defn role->params
[role]
(case role
:admin {:is-owner false :is-admin true :can-edit true}
:editor {:is-owner false :is-admin false :can-edit true}
:owner {:is-owner true :is-admin true :can-edit true}
:viewer {:is-owner false :is-admin false :can-edit false}))
(defn update-team-member-role
[conn {:keys [profile-id team-id member-id role] :as params}]
;; We retrieve all team members instead of query the
;; database for a single member. This is just for
;; convenience, if this becomes a bottleneck or problematic,
;; we will change it to more efficient fetch mechanisms.
(let [perms (get-permissions conn profile-id team-id)
members (retrieve-team-members conn team-id)
member (d/seek #(= member-id (:id %)) members)
is-owner? (:is-owner perms)
is-admin? (:is-admin perms)]
;; If no member is found, just 404
(when-not member
(ex/raise :type :not-found
:code :member-does-not-exist))
;; First check if we have permissions to change roles
(when-not (or is-owner? is-admin?)
(ex/raise :type :validation
:code :insufficient-permissions))
;; Don't allow change role of owner member
(when (:is-owner member)
(ex/raise :type :validation
:code :cant-change-role-to-owner))
;; Don't allow promote to owner to admin users.
(when (and (not is-owner?) (= role :owner))
(ex/raise :type :validation
:code :cant-promote-to-owner))
(let [params (role->params role)]
;; Only allow single owner on team
(when (= role :owner)
(db/update! conn :team-profile-rel
{:is-owner false}
{:team-id team-id
:profile-id profile-id}))
(db/update! conn :team-profile-rel
params
{:team-id team-id
:profile-id member-id})
nil)))
(s/def ::update-team-member-role
(s/keys :req [::rpc/profile-id]
:req-un [::team-id ::member-id ::role]))
(sv/defmethod ::update-team-member-role
{::doc/added "1.17"}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id] :as params}]
(db/with-atomic [conn pool]
(update-team-member-role conn (assoc params :profile-id profile-id))))
;; --- Mutation: Delete Team Member
(s/def ::delete-team-member
(s/keys :req [::rpc/profile-id]
:req-un [::team-id ::member-id]))
(sv/defmethod ::delete-team-member
{::doc/added "1.17"}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id team-id member-id] :as params}]
(db/with-atomic [conn pool]
(let [perms (get-permissions conn profile-id team-id)]
(when-not (or (:is-owner perms)
(:is-admin perms))
(ex/raise :type :validation
:code :insufficient-permissions))
(when (= member-id profile-id)
(ex/raise :type :validation
:code :cant-remove-yourself))
(db/delete! conn :team-profile-rel {:profile-id member-id
:team-id team-id})
nil)))
;; --- Mutation: Update Team Photo
(declare ^:private upload-photo)
(declare ^:private update-team-photo)
(s/def ::file ::media/upload)
(s/def ::update-team-photo
(s/keys :req [::rpc/profile-id]
:req-un [::team-id ::file]))
(sv/defmethod ::update-team-photo
{::doc/added "1.17"}
[cfg {:keys [::rpc/profile-id file] :as params}]
;; Validate incoming mime type
(media/validate-media-type! file #{"image/jpeg" "image/png" "image/webp"})
(let [cfg (update cfg :storage media/configure-assets-storage)]
(update-team-photo cfg (assoc params :profile-id profile-id))))
(defn update-team-photo
[{:keys [pool storage executor] :as cfg} {:keys [profile-id team-id] :as params}]
(p/let [team (px/with-dispatch executor
(retrieve-team pool profile-id team-id))
photo (upload-photo cfg params)]
;; Mark object as touched for make it ellegible for tentative
;; garbage collection.
(when-let [id (:photo-id team)]
(sto/touch-object! storage id))
;; Save new photo
(db/update! pool :team
{:photo-id (:id photo)}
{:id team-id})
(assoc team :photo-id (:id photo))))
(defn upload-photo
[{:keys [storage executor climit] :as cfg} {:keys [file]}]
(letfn [(get-info [content]
(climit/with-dispatch (:process-image climit)
(media/run {:cmd :info :input content})))
(generate-thumbnail [info]
(climit/with-dispatch (:process-image climit)
(media/run {:cmd :profile-thumbnail
:format :jpeg
:quality 85
:width 256
:height 256
:input info})))
;; Function responsible of calculating cryptographyc hash of
;; the provided data.
(calculate-hash [data]
(px/with-dispatch executor
(sto/calculate-hash data)))]
(p/let [info (get-info file)
thumb (generate-thumbnail info)
hash (calculate-hash (:data thumb))
content (-> (sto/content (:data thumb) (:size thumb))
(sto/wrap-with-hash hash))]
(sto/put-object! storage {::sto/content content
::sto/deduplicate? true
:bucket "profile"
:content-type (:mtype thumb)}))))
;; --- Mutation: Create Team Invitation
(def sql:upsert-team-invitation
"insert into team_invitation(team_id, email_to, role, valid_until)
values (?, ?, ?, ?)
on conflict(team_id, email_to) do
update set role = ?, valid_until = ?, updated_at = now();")
(defn- create-invitation-token
[cfg {:keys [profile-id valid-until team-id member-id member-email role]}]
(tokens/generate (::main/props cfg)
{:iss :team-invitation
:exp valid-until
:profile-id profile-id
:role role
:team-id team-id
:member-email member-email
:member-id member-id}))
(defn- create-profile-identity-token
[cfg profile]
(tokens/generate (::main/props cfg)
{:iss :profile-identity
:profile-id (:id profile)
:exp (dt/in-future {:days 30})}))
(defn- create-invitation
[{:keys [::conn] :as cfg} {:keys [team profile role email] :as params}]
(let [member (profile/retrieve-profile-data-by-email conn email)
expire (dt/in-future "168h") ;; 7 days
itoken (create-invitation-token cfg {:profile-id (:id profile)
:valid-until expire
:team-id (:id team)
:member-email (or (:email member) email)
:member-id (:id member)
:role role})
ptoken (create-profile-identity-token cfg profile)]
(when (and member (not (eml/allow-send-emails? conn member)))
(ex/raise :type :validation
:code :member-is-muted
:email email
:hint "the profile has reported repeatedly as spam or has bounces"))
;; Secondly check if the invited member email is part of the global spam/bounce report.
(when (eml/has-bounce-reports? conn email)
(ex/raise :type :validation
:code :email-has-permanent-bounces
:email email
:hint "the email you invite has been repeatedly reported as spam or bounce"))
(when (contains? cf/flags :log-invitation-tokens)
(l/trace :hint "invitation token" :token itoken))
;; When we have email verification disabled and invitation user is
;; already present in the database, we proceed to add it to the
;; team as-is, without email roundtrip.
;; TODO: if member does not exists and email verification is
;; disabled, we should proceed to create the profile (?)
(if (and (not (contains? cf/flags :email-verification))
(some? member))
(let [params (merge {:team-id (:id team)
:profile-id (:id member)}
(role->params role))]
;; Insert the invited member to the team
(db/insert! conn :team-profile-rel params {:on-conflict-do-nothing true})
;; If profile is not yet verified, mark it as verified because
;; accepting an invitation link serves as verification.
(when-not (:is-active member)
(db/update! conn :profile
{:is-active true}
{:id (:id member)})))
(do
(db/exec-one! conn [sql:upsert-team-invitation
(:id team) (str/lower email) (name role) expire (name role) expire])
(eml/send! {::eml/conn conn
::eml/factory eml/invite-to-team
:public-uri (cf/get :public-uri)
:to email
:invited-by (:fullname profile)
:team (:name team)
:token itoken
:extra-data ptoken})))
itoken))
(s/def ::email ::us/email)
(s/def ::emails ::us/set-of-valid-emails)
(s/def ::create-team-invitations
(s/keys :req [::rpc/profile-id]
:req-un [::team-id ::role]
:opt-un [::email ::emails]))
(sv/defmethod ::create-team-invitations
"A rpc call that allow to send a single or multiple invitations to
join the team."
{::doc/added "1.17"}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id team-id email emails role] :as params}]
(db/with-atomic [conn pool]
(let [perms (get-permissions conn profile-id team-id)
profile (db/get-by-id conn :profile profile-id)
team (db/get-by-id conn :team team-id)
emails (cond-> (or emails #{}) (string? email) (conj email))]
(run! (partial quotes/check-quote! conn)
(list {::quotes/id ::quotes/invitations-per-team
::quotes/profile-id profile-id
::quotes/team-id (:id team)
::quotes/incr (count emails)}
{::quotes/id ::quotes/profiles-per-team
::quotes/profile-id profile-id
::quotes/team-id (:id team)
::quotes/incr (count emails)}))
(when-not (:is-admin perms)
(ex/raise :type :validation
:code :insufficient-permissions))
;; First check if the current profile is allowed to send emails.
(when-not (eml/allow-send-emails? conn profile)
(ex/raise :type :validation
:code :profile-is-muted
:hint "looks like the profile has reported repeatedly as spam or has permanent bounces"))
(let [cfg (assoc cfg ::conn conn)
invitations (->> emails
(map (fn [email]
{:email (str/lower email)
:team team
:profile profile
:role role}))
(map (partial create-invitation cfg)))]
(with-meta (vec invitations)
{::audit/props {:invitations (count invitations)}})))))
;; --- Mutation: Create Team & Invite Members
(s/def ::emails ::us/set-of-valid-emails)
(s/def ::create-team-with-invitations
(s/merge ::create-team
(s/keys :req-un [::emails ::role])))
(sv/defmethod ::create-team-with-invitations
{::doc/added "1.17"}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id emails role] :as params}]
(db/with-atomic [conn pool]
(let [params (assoc params :profile-id profile-id)
team (create-team conn params)
profile (db/get-by-id conn :profile profile-id)
cfg (assoc cfg ::conn conn)]
;; Create invitations for all provided emails.
(->> emails
(map (fn [email]
{:team team
:profile profile
:email (str/lower email)
:role role}))
(run! (partial create-invitation cfg)))
(run! (partial quotes/check-quote! conn)
(list {::quotes/id ::quotes/teams-per-profile
::quotes/profile-id profile-id}
{::quotes/id ::quotes/invitations-per-team
::quotes/profile-id profile-id
::quotes/team-id (:id team)
::quotes/incr (count emails)}
{::quotes/id ::quotes/profiles-per-team
::quotes/profile-id profile-id
::quotes/team-id (:id team)
::quotes/incr (count emails)}))
(-> team
(vary-meta assoc ::audit/props {:invitations (count emails)})
(rph/with-defer
#(when-let [collector (::audit/collector cfg)]
(audit/submit! collector
{:type "command"
:name "create-team-invitations"
:profile-id profile-id
:props {:emails emails
:role role
:profile-id profile-id
:invitations (count emails)}})))))))
;; --- Query: get-team-invitation-token
(s/def ::get-team-invitation-token
(s/keys :req [::rpc/profile-id]
:req-un [::team-id ::email]))
(sv/defmethod ::get-team-invitation-token
{::doc/added "1.17"}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id email] :as params}]
(check-read-permissions! pool profile-id team-id)
(let [invit (-> (db/get pool :team-invitation
{:team-id team-id
:email-to (str/lower email)})
(update :role keyword))
member (profile/retrieve-profile-data-by-email pool (:email invit))
token (create-invitation-token cfg {:team-id (:team-id invit)
:profile-id profile-id
:valid-until (:valid-until invit)
:role (:role invit)
:member-id (:id member)
:member-email (or (:email member) (:email-to invit))})]
{:token token}))
;; --- Mutation: Update invitation role
(s/def ::update-team-invitation-role
(s/keys :req [::rpc/profile-id]
:req-un [::team-id ::email ::role]))
(sv/defmethod ::update-team-invitation-role
{::doc/added "1.17"}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id team-id email role] :as params}]
(db/with-atomic [conn pool]
(let [perms (get-permissions conn profile-id team-id)]
(when-not (:is-admin perms)
(ex/raise :type :validation
:code :insufficient-permissions))
(db/update! conn :team-invitation
{:role (name role) :updated-at (dt/now)}
{:team-id team-id :email-to (str/lower email)})
nil)))
;; --- Mutation: Delete invitation
(s/def ::delete-team-invitation
(s/keys :req [::rpc/profile-id]
:req-un [::team-id ::email]))
(sv/defmethod ::delete-team-invitation
{::doc/added "1.17"}
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id team-id email] :as params}]
(db/with-atomic [conn pool]
(let [perms (get-permissions conn profile-id team-id)]
(when-not (:is-admin perms)
(ex/raise :type :validation
:code :insufficient-permissions))
(db/delete! conn :team-invitation
{:team-id team-id :email-to (str/lower email)})
nil)))

View file

@ -9,10 +9,14 @@
[app.common.exceptions :as ex]
[app.common.spec :as us]
[app.db :as db]
[app.http.session :as session]
[app.loggers.audit :as audit]
[app.rpc :as-alias rpc]
[app.rpc.commands.teams :as teams]
[app.rpc.doc :as-alias doc]
[app.rpc.mutations.teams :as teams]
[app.rpc.helpers :as rph]
[app.rpc.queries.profile :as profile]
[app.rpc.quotes :as quotes]
[app.tokens :as tokens]
[app.tokens.spec.team-invitation :as-alias spec.team-invitation]
[app.util.services :as sv]
@ -25,10 +29,10 @@
(s/def ::verify-token
(s/keys :req-un [::token]
:opt-un [::profile-id]))
:opt [::rpc/profile-id]))
(sv/defmethod ::verify-token
{:auth false
{::rpc/auth false
::doc/added "1.15"}
[{:keys [pool sprops] :as cfg} {:keys [token] :as params}]
(db/with-atomic [conn pool]
@ -46,7 +50,7 @@
{:email email}
{:id profile-id})
(with-meta claims
(rph/with-meta claims
{::audit/name "update-profile-email"
::audit/props {:email email}
::audit/profile-id profile-id}))
@ -66,11 +70,11 @@
{:is-active true}
{:id (:id profile)}))
(with-meta claims
{:transform-response ((:create session) profile-id)
::audit/name "verify-profile-email"
::audit/props (audit/profile->props profile)
::audit/profile-id (:id profile)})))
(-> claims
(rph/with-transform (session/create-fn session profile-id))
(rph/with-meta {::audit/name "verify-profile-email"
::audit/props (audit/profile->props profile)
::audit/profile-id (:id profile)}))))
(defmethod process-token :auth
[{:keys [conn] :as cfg} _params {:keys [profile-id] :as claims}]
@ -93,6 +97,11 @@
(ex/raise :type :restriction
:code :profile-blocked))
(quotes/check-quote! conn
{::quotes/id ::quotes/profiles-per-team
::quotes/profile-id (:id member)
::quotes/team-id team-id})
;; Insert the invited member to the team
(db/insert! conn :team-profile-rel params {:on-conflict-do-nothing true})
@ -124,10 +133,11 @@
:opt-un [::spec.team-invitation/member-id]))
(defmethod process-token :team-invitation
[{:keys [conn session] :as cfg} {:keys [profile-id token]}
[{:keys [conn session] :as cfg}
{:keys [::rpc/profile-id token]}
{:keys [member-id team-id member-email] :as claims}]
(us/assert ::team-invitation-claims claims)
(us/verify! ::team-invitation-claims claims)
(let [invitation (db/get* conn :team-invitation
{:team-id team-id :email-to member-email})
@ -146,14 +156,13 @@
;; proceed with accepting the invitation and joining the
;; current profile to the invited team.
(let [profile (accept-invitation cfg claims invitation profile)]
(with-meta
(assoc claims :state :created)
{::audit/name "accept-team-invitation"
::audit/props (merge
(audit/profile->props profile)
{:team-id (:team-id claims)
:role (:role claims)})
::audit/profile-id profile-id}))
(-> (assoc claims :state :created)
(rph/with-meta {::audit/name "accept-team-invitation"
::audit/props (merge
(audit/profile->props profile)
{:team-id (:team-id claims)
:role (:role claims)})
::audit/profile-id profile-id})))
(ex/raise :type :validation
:code :invalid-token
@ -169,15 +178,14 @@
{:email member-email})
{:columns [:id :email]})]
(let [profile (accept-invitation cfg claims invitation member)]
(with-meta
(assoc claims :state :created)
{:transform-response ((:create session) (:id profile))
::audit/name "accept-team-invitation"
::audit/props (merge
(audit/profile->props profile)
{:team-id (:team-id claims)
:role (:role claims)})
::audit/profile-id member-id}))
(-> (assoc claims :state :created)
(rph/with-transform (session/create-fn session (:id profile)))
(rph/with-meta {::audit/name "accept-team-invitation"
::audit/props (merge
(audit/profile->props profile)
{:team-id (:team-id claims)
:role (:role claims)})
::audit/profile-id member-id})))
{:invitation-token token
:iss :team-invitation

View file

@ -0,0 +1,89 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.rpc.commands.viewer
(:require
[app.common.exceptions :as ex]
[app.db :as db]
[app.rpc :as-alias rpc]
[app.rpc.commands.comments :as comments]
[app.rpc.commands.files :as files]
[app.rpc.cond :as-alias cond]
[app.rpc.doc :as-alias doc]
[app.rpc.queries.share-link :as slnk]
[app.util.services :as sv]
[clojure.spec.alpha :as s]))
;; --- Query: View Only Bundle
(defn- get-project
[conn id]
(db/get-by-id conn :project id {:columns [:id :name :team-id]}))
(defn- get-bundle
[conn file-id profile-id features]
(let [file (files/get-file conn file-id features)
project (get-project conn (:project-id file))
libs (files/get-file-libraries conn file-id features)
users (comments/get-file-comments-users conn file-id profile-id)
links (->> (db/query conn :share-link {:file-id file-id})
(mapv slnk/decode-share-link-row))
fonts (db/query conn :team-font-variant
{:team-id (:team-id project)
:deleted-at nil})]
{:file file
:users users
:fonts fonts
:project project
:share-links links
:libraries libs}))
(defn- remove-not-allowed-pages
[data allowed]
(-> data
(update :pages (fn [pages] (filterv #(contains? allowed %) pages)))
(update :pages-index select-keys allowed)))
(defn get-view-only-bundle
[conn {:keys [profile-id file-id share-id features] :as params}]
(let [perms (files/get-permissions conn profile-id file-id share-id)
bundle (-> (get-bundle conn file-id profile-id features)
(assoc :permissions perms))]
;; When we have neither profile nor share, we just return a not
;; found response to the user.
(when-not perms
(ex/raise :type :not-found
:code :object-not-found
:hint "object not found"))
(update bundle :file
(fn [file]
(cond-> file
(= :share-link (:type perms))
(update :data remove-not-allowed-pages (:pages perms))
:always
(update :data select-keys [:id :options :pages :pages-index]))))))
(s/def ::get-view-only-bundle
(s/keys :req-un [::files/file-id]
:opt-un [::files/share-id
::files/features]
:opt [::rpc/profile-id]))
(sv/defmethod ::get-view-only-bundle
{::rpc/auth false
::cond/get-object #(files/get-minimal-file %1 (:file-id %2))
::cond/key-fn files/get-file-etag
::cond/reuse-key? true
::doc/added "1.17"}
[{:keys [pool]} {:keys [::rpc/profile-id] :as params}]
(with-open [conn (db/open pool)]
(get-view-only-bundle conn (assoc params :profile-id profile-id))))

View file

@ -0,0 +1,146 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.rpc.commands.webhooks
(:require
[app.common.exceptions :as ex]
[app.common.spec :as us]
[app.common.uuid :as uuid]
[app.db :as db]
[app.http.client :as http]
[app.loggers.webhooks :as webhooks]
[app.rpc :as-alias rpc]
[app.rpc.commands.teams :refer [check-edition-permissions! check-read-permissions!]]
[app.rpc.doc :as-alias doc]
[app.util.services :as sv]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[clojure.spec.alpha :as s]
[cuerdas.core :as str]
[promesa.core :as p]))
;; --- Mutation: Create Webhook
(s/def ::team-id ::us/uuid)
(s/def ::uri ::us/not-empty-string)
(s/def ::is-active ::us/boolean)
(s/def ::mtype
#{"application/json"
"application/transit+json"})
(s/def ::create-webhook
(s/keys :req [::rpc/profile-id]
:req-un [::team-id ::uri ::mtype]
:opt-un [::is-active]))
;; NOTE: for now the quote is hardcoded but this need to be solved in
;; a more universal way for handling properly object quotes
(def max-hooks-for-team 8)
(defn- validate-webhook!
[cfg whook params]
(letfn [(handle-exception [exception]
(if-let [hint (webhooks/interpret-exception exception)]
(ex/raise :type :validation
:code :webhook-validation
:hint hint)
(ex/raise :type :internal
:code :webhook-validation
:cause exception)))
(handle-response [response]
(when-let [hint (webhooks/interpret-response response)]
(ex/raise :type :validation
:code :webhook-validation
:hint hint)))]
(if (not= (:uri whook) (:uri params))
(->> (http/req! cfg {:method :head
:uri (:uri params)
:timeout (dt/duration "3s")})
(p/hmap (fn [response exception]
(if exception
(handle-exception exception)
(handle-response response)))))
(p/resolved nil))))
(defn- validate-quotes!
[{:keys [::db/pool]} {:keys [team-id]}]
(let [sql ["select count(*) as total from webhook where team_id = ?" team-id]
total (:total (db/exec-one! pool sql))]
(when (>= total max-hooks-for-team)
(ex/raise :type :restriction
:code :webhooks-quote-reached
:hint (str/ffmt "can't create more than % webhooks per team"
max-hooks-for-team)))))
(defn- insert-webhook!
[{:keys [::db/pool]} {:keys [team-id uri mtype is-active] :as params}]
(db/insert! pool :webhook
{:id (uuid/next)
:team-id team-id
:uri uri
:is-active is-active
:mtype mtype}))
(defn- update-webhook!
[{:keys [::db/pool] :as cfg} {:keys [id] :as wook} {:keys [uri mtype is-active] :as params}]
(db/update! pool :webhook
{:uri uri
:is-active is-active
:mtype mtype
:error-code nil
:error-count 0}
{:id id}))
(sv/defmethod ::create-webhook
{::doc/added "1.17"}
[{:keys [::db/pool ::wrk/executor] :as cfg} {:keys [::rpc/profile-id team-id] :as params}]
(check-edition-permissions! pool profile-id team-id)
(validate-quotes! cfg params)
(->> (validate-webhook! cfg nil params)
(p/fmap executor (fn [_] (insert-webhook! cfg params)))))
(s/def ::update-webhook
(s/keys :req-un [::id ::uri ::mtype ::is-active]))
(sv/defmethod ::update-webhook
{::doc/added "1.17"}
[{:keys [::db/pool ::wrk/executor] :as cfg} {:keys [::rpc/profile-id id] :as params}]
(let [whook (db/get pool :webhook {:id id})]
(check-edition-permissions! pool profile-id (:team-id whook))
(->> (validate-webhook! cfg whook params)
(p/fmap executor (fn [_] (update-webhook! cfg whook params))))))
(s/def ::delete-webhook
(s/keys :req [::rpc/profile-id]
:req-un [::id]))
(sv/defmethod ::delete-webhook
{::doc/added "1.17"}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id]}]
(db/with-atomic [conn pool]
(let [whook (db/get conn :webhook {:id id})]
(check-edition-permissions! conn profile-id (:team-id whook))
(db/delete! conn :webhook {:id id})
nil)))
;; --- Query: Webhooks
(s/def ::team-id ::us/uuid)
(s/def ::get-webhooks
(s/keys :req [::rpc/profile-id]
:req-un [::team-id]))
(def sql:get-webhooks
"select id, uri, mtype, is_active, error_code, error_count
from webhook where team_id = ? order by uri")
(sv/defmethod ::get-webhooks
[{:keys [pool] :as cfg} {:keys [::rpc/profile-id team-id]}]
(with-open [conn (db/open pool)]
(check-read-permissions! conn profile-id team-id)
(db/exec! conn [sql:get-webhooks team-id])))

View file

@ -0,0 +1,67 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.rpc.cond
"Conditional loading middleware.
A middleware consists mainly on wrapping a RPC method with
conditional logic. It expects to to have some metadata set on the RPC
method that will enable this middleware to retrieve the necessary data
for process the conditional logic:
- `::get-object` => should be a function that retrieves the minimum version
of the object that will be used for calculate the KEY (etags in terms of
the HTTP protocol).
- `::key-fn` a function used to generate a string representation
of the object. This function can be applied to the object returned by the
`get-object` but also to the RPC return value (in case you don't provide
the return value calculated key under `::key` metadata prop.
- `::reuse-key?` enables reusing the key calculated on first time; usefull
when the target object is not retrieved on the RPC (typical on retrieving
dependent objects).
"
(:require
[app.common.logging :as l]
[app.rpc.helpers :as rph]
[app.util.services :as-alias sv]
[promesa.core :as p]
[promesa.exec :as px]
[yetti.response :as yrs]))
(def
^{:dynamic true
:doc "Runtime flag for enable/disable conditional processing of RPC methods."}
*enabled* false)
(defn- fmt-key
[s]
(when s
(str "W/\"" s "\"")))
(defn wrap
[{:keys [executor]} f {:keys [::get-object ::key-fn ::reuse-key?] :as mdata}]
(if (and (ifn? get-object) (ifn? key-fn))
(do
(l/debug :hint "instrumenting method" :service (::sv/name mdata))
(fn [cfg {:keys [::key] :as params}]
(if *enabled*
(->> (if (or key reuse-key?)
(->> (px/submit! executor (partial get-object cfg params))
(p/map key-fn)
(p/map fmt-key))
(p/resolved nil))
(p/mapcat (fn [key']
(if (and (some? key)
(= key key'))
(p/resolved (fn [_] (yrs/response 304)))
(->> (f cfg params)
(p/map (fn [result]
(->> (or (and reuse-key? key')
(-> result meta ::key fmt-key)
(-> result key-fn fmt-key))
(rph/with-header result "etag")))))))))
(f cfg params))))
f))

View file

@ -9,6 +9,7 @@
(:require
[app.common.data :as d]
[app.config :as cf]
[app.loggers.webhooks :as-alias webhooks]
[app.rpc :as-alias rpc]
[app.util.services :as sv]
[app.util.template :as tmpl]
@ -35,6 +36,7 @@
:name (d/name name)
:module (-> (:ns mdata) (str/split ".") last)
:auth (:auth mdata true)
:webhook (::webhooks/event? mdata false)
:docs (::sv/docstring mdata)
:deprecated (::deprecated mdata)
:added (::added mdata)
@ -51,6 +53,7 @@
(->> (:queries methods)
(map (partial gen-doc :query))
(sort-by (juxt :module :name)))
:mutation-methods
(->> (:mutations methods)
(map (partial gen-doc :query))

View file

@ -6,11 +6,72 @@
(ns app.rpc.helpers
"General purpose RPC helpers."
(:require [app.common.data.macros :as dm]))
(:refer-clojure :exclude [with-meta])
(:require
[app.common.data.macros :as dm]
[app.http :as-alias http]
[app.rpc :as-alias rpc]))
(defn http-cache
[{:keys [max-age]}]
(fn [_ response]
(let [exp (if (integer? max-age) max-age (inst-ms max-age))
val (dm/fmt "max-age=%" (int (/ exp 1000.0)))]
(update response :headers assoc "cache-control" val))))
;; A utilty wrapper object for wrap service responses that does not
;; implements the IObj interface that make possible attach metadata to
;; it.
(deftype MetadataWrapper [obj ^:unsynchronized-mutable metadata]
clojure.lang.IDeref
(deref [_] obj)
clojure.lang.IObj
(withMeta [_ meta]
(MetadataWrapper. obj meta))
(meta [_] metadata))
(defn wrap
"Conditionally wrap a value into MetadataWrapper instance. If the
object already implements IObj interface it will be returned as is."
([] (wrap nil))
([o]
(if (instance? clojure.lang.IObj o)
o
(MetadataWrapper. o {})))
([o m]
(MetadataWrapper. o m)))
(defn wrapped?
[o]
(instance? MetadataWrapper o))
(defn unwrap
[o]
(if (wrapped? o) @o o))
(defn with-header
"Add a http header to the RPC result."
[mdw key val]
(vary-meta mdw update ::http/headers assoc key val))
(defn with-transform
"Adds a http response transform to the RPC result."
[mdw transform-fn]
(vary-meta mdw update ::rpc/response-transform-fns conj transform-fn))
(defn with-defer
"Defer execution of the function until request is finished."
[mdw hook-fn]
(vary-meta mdw update ::rpc/before-complete-fns conj hook-fn))
(defn with-meta
[mdw mdata]
(vary-meta mdw merge mdata))
(defn assoc-meta
[mdw k v]
(vary-meta mdw assoc k v))
(defn with-http-cache
[mdw max-age]
(vary-meta mdw update ::rpc/response-transform-fns conj
(fn [_ response]
(let [exp (if (integer? max-age) max-age (inst-ms max-age))
val (dm/fmt "max-age=%" (int (/ exp 1000.0)))]
(update response :headers assoc "cache-control" val)))))

View file

@ -1,123 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.rpc.mutations.comments
(:require
[app.common.exceptions :as ex]
[app.common.spec :as us]
[app.db :as db]
[app.rpc.commands.comments :as cmd.comments]
[app.rpc.doc :as-alias doc]
[app.rpc.queries.files :as files]
[app.rpc.retry :as retry]
[app.util.services :as sv]
[clojure.spec.alpha :as s]))
;; --- Mutation: Create Comment Thread
(s/def ::create-comment-thread ::cmd.comments/create-comment-thread)
(sv/defmethod ::create-comment-thread
{::retry/max-retries 3
::retry/matches retry/conflict-db-insert?
::doc/added "1.0"
::doc/deprecated "1.15"}
[{:keys [pool] :as cfg} {:keys [profile-id file-id share-id] :as params}]
(db/with-atomic [conn pool]
(files/check-comment-permissions! conn profile-id file-id share-id)
(cmd.comments/create-comment-thread conn params)))
;; --- Mutation: Update Comment Thread Status
(s/def ::id ::us/uuid)
(s/def ::share-id (s/nilable ::us/uuid))
(s/def ::update-comment-thread-status ::cmd.comments/update-comment-thread-status)
(sv/defmethod ::update-comment-thread-status
{::doc/added "1.0"
::doc/deprecated "1.15"}
[{:keys [pool] :as cfg} {:keys [profile-id id share-id] :as params}]
(db/with-atomic [conn pool]
(let [cthr (db/get-by-id conn :comment-thread id {:for-update true})]
(when-not cthr (ex/raise :type :not-found))
(files/check-comment-permissions! conn profile-id (:file-id cthr) share-id)
(cmd.comments/upsert-comment-thread-status! conn profile-id (:id cthr)))))
;; --- Mutation: Update Comment Thread
(s/def ::update-comment-thread ::cmd.comments/update-comment-thread)
(sv/defmethod ::update-comment-thread
{::doc/added "1.0"
::doc/deprecated "1.15"}
[{:keys [pool] :as cfg} {:keys [profile-id id is-resolved share-id] :as params}]
(db/with-atomic [conn pool]
(let [thread (db/get-by-id conn :comment-thread id {:for-update true})]
(when-not thread
(ex/raise :type :not-found))
(files/check-comment-permissions! conn profile-id (:file-id thread) share-id)
(db/update! conn :comment-thread
{:is-resolved is-resolved}
{:id id})
nil)))
;; --- Mutation: Add Comment
(s/def ::add-comment ::cmd.comments/create-comment)
(sv/defmethod ::add-comment
{::doc/added "1.0"
::doc/deprecated "1.15"}
[{:keys [pool] :as cfg} params]
(db/with-atomic [conn pool]
(cmd.comments/create-comment conn params)))
;; --- Mutation: Update Comment
(s/def ::update-comment ::cmd.comments/update-comment)
(sv/defmethod ::update-comment
{::doc/added "1.0"
::doc/deprecated "1.15"}
[{:keys [pool] :as cfg} params]
(db/with-atomic [conn pool]
(cmd.comments/update-comment conn params)))
;; --- Mutation: Delete Comment Thread
(s/def ::delete-comment-thread ::cmd.comments/delete-comment-thread)
(sv/defmethod ::delete-comment-thread
{::doc/added "1.0"
::doc/deprecated "1.15"}
[{:keys [pool] :as cfg} {:keys [profile-id id] :as params}]
(db/with-atomic [conn pool]
(let [thread (db/get-by-id conn :comment-thread id {:for-update true})]
(when-not (= (:owner-id thread) profile-id)
(ex/raise :type :validation :code :not-allowed))
(db/delete! conn :comment-thread {:id id})
nil)))
;; --- Mutation: Delete comment
(s/def ::delete-comment ::cmd.comments/delete-comment)
(sv/defmethod ::delete-comment
{::doc/added "1.0"
::doc/deprecated "1.15"}
[{:keys [pool] :as cfg} {:keys [profile-id id] :as params}]
(db/with-atomic [conn pool]
(let [comment (db/get-by-id conn :comment id {:for-update true})]
(when-not (= (:owner-id comment) profile-id)
(ex/raise :type :validation :code :not-allowed))
(db/delete! conn :comment {:id id}))))

View file

@ -6,571 +6,232 @@
(ns app.rpc.mutations.files
(:require
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.pages :as cp]
[app.common.pages.migrations :as pmg]
[app.common.logging :as l]
[app.common.spec :as us]
[app.common.types.file :as ctf]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.loggers.audit :as audit]
[app.metrics :as mtx]
[app.msgbus :as mbus]
[app.rpc.permissions :as perms]
[app.rpc.queries.files :as files]
[app.rpc.climit :as-alias climit]
[app.rpc.commands.files :as cmd.files]
[app.rpc.commands.files.create :as cmd.files.create]
[app.rpc.commands.files.temp :as cmd.files.temp]
[app.rpc.commands.files.update :as cmd.files.update]
[app.rpc.doc :as-alias doc]
[app.rpc.helpers :as rph]
[app.rpc.queries.projects :as proj]
[app.rpc.semaphore :as rsem]
[app.storage.impl :as simpl]
[app.util.blob :as blob]
[app.util.services :as sv]
[app.util.time :as dt]
[clojure.spec.alpha :as s]
[promesa.core :as p]))
(declare create-file)
(declare retrieve-team-id)
;; --- Helpers & Specs
(s/def ::frame-id ::us/uuid)
(s/def ::file-id ::us/uuid)
(s/def ::id ::us/uuid)
(s/def ::name ::us/string)
(s/def ::profile-id ::us/uuid)
(s/def ::project-id ::us/uuid)
(s/def ::url ::us/url)
[clojure.spec.alpha :as s]))
;; --- Mutation: Create File
(s/def ::is-shared ::us/boolean)
(s/def ::create-file
(s/keys :req-un [::profile-id ::name ::project-id]
:opt-un [::id ::is-shared ::components-v2]))
(s/def ::create-file ::cmd.files.create/create-file)
(sv/defmethod ::create-file
[{:keys [pool] :as cfg} {:keys [profile-id project-id] :as params}]
{::doc/added "1.0"
::doc/deprecated "1.17"}
[{:keys [pool] :as cfg} {:keys [profile-id project-id features components-v2] :as params}]
(db/with-atomic [conn pool]
(let [team-id (retrieve-team-id conn project-id)]
(proj/check-edition-permissions! conn profile-id project-id)
(with-meta
(create-file conn params)
{::audit/props {:team-id team-id}}))))
(proj/check-edition-permissions! conn profile-id project-id)
(let [team-id (cmd.files/get-team-id conn project-id)
features (cond-> (or features #{})
;; BACKWARD COMPATIBILITY with the components-v2 param
components-v2 (conj "components/v2"))
params (assoc params :features features)]
(-> (cmd.files.create/create-file conn params)
(vary-meta assoc ::audit/props {:team-id team-id})))))
(defn create-file-role
[conn {:keys [file-id profile-id role]}]
(let [params {:file-id file-id
:profile-id profile-id}]
(->> (perms/assign-role-flags params role)
(db/insert! conn :file-profile-rel))))
(defn create-file
[conn {:keys [id name project-id is-shared data revn
modified-at deleted-at ignore-sync-until
components-v2]
:or {is-shared false revn 0}
:as params}]
(let [id (or id (:id data) (uuid/next))
data (or data (ctf/make-file-data id components-v2))
file (db/insert! conn :file
(d/without-nils
{:id id
:project-id project-id
:name name
:revn revn
:is-shared is-shared
:data (blob/encode data)
:ignore-sync-until ignore-sync-until
:modified-at modified-at
:deleted-at deleted-at}))]
(->> (assoc params :file-id id :role :owner)
(create-file-role conn))
(assoc file :data data)))
;; --- Mutation: Rename File
(declare rename-file)
(s/def ::rename-file
(s/keys :req-un [::profile-id ::name ::id]))
(s/def ::rename-file ::cmd.files/rename-file)
(sv/defmethod ::rename-file
{::doc/added "1.0"
::doc/deprecated "1.17"}
[{:keys [pool] :as cfg} {:keys [id profile-id] :as params}]
(db/with-atomic [conn pool]
(files/check-edition-permissions! conn profile-id id)
(rename-file conn params)))
(defn- rename-file
[conn {:keys [id name] :as params}]
(db/update! conn :file
{:name name}
{:id id}))
(cmd.files/check-edition-permissions! conn profile-id id)
(cmd.files/rename-file conn params)))
;; --- Mutation: Set File shared
(declare set-file-shared)
(declare unlink-files)
(declare absorb-library)
(s/def ::set-file-shared
(s/keys :req-un [::profile-id ::id ::is-shared]))
(s/def ::set-file-shared ::cmd.files/set-file-shared)
(sv/defmethod ::set-file-shared
{::doc/added "1.2"
::doc/deprecated "1.17"}
[{:keys [pool] :as cfg} {:keys [id profile-id is-shared] :as params}]
(db/with-atomic [conn pool]
(files/check-edition-permissions! conn profile-id id)
(cmd.files/check-edition-permissions! conn profile-id id)
(when-not is-shared
(absorb-library conn params)
(unlink-files conn params))
(set-file-shared conn params)))
(defn- unlink-files
[conn {:keys [id] :as params}]
(db/delete! conn :file-library-rel {:library-file-id id}))
(defn- set-file-shared
[conn {:keys [id is-shared] :as params}]
(db/update! conn :file
{:is-shared is-shared}
{:id id}))
(cmd.files/absorb-library conn params)
(cmd.files/unlink-files conn params))
(cmd.files/set-file-shared conn params)))
;; --- Mutation: Delete File
(declare mark-file-deleted)
(s/def ::delete-file
(s/keys :req-un [::id ::profile-id]))
(s/def ::delete-file ::cmd.files/delete-file)
(sv/defmethod ::delete-file
{::doc/added "1.0"
::doc/deprecated "1.17"}
[{:keys [pool] :as cfg} {:keys [id profile-id] :as params}]
(db/with-atomic [conn pool]
(files/check-edition-permissions! conn profile-id id)
(absorb-library conn params)
(mark-file-deleted conn params)))
(defn mark-file-deleted
[conn {:keys [id] :as params}]
(db/update! conn :file
{:deleted-at (dt/now)}
{:id id})
nil)
(defn absorb-library
"Find all files using a shared library, and absorb all library assets
into the file local libraries"
[conn {:keys [id] :as params}]
(let [library (db/get-by-id conn :file id)]
(when (:is-shared library)
(let [ldata (-> library files/decode-row pmg/migrate-file :data)]
(->> (db/query conn :file-library-rel {:library-file-id id})
(keep (fn [{:keys [file-id]}]
(some->> (db/get-by-id conn :file file-id {:check-not-found false})
(files/decode-row)
(pmg/migrate-file))))
(run! (fn [{:keys [id data revn] :as file}]
(let [data (ctf/absorb-assets data ldata)]
(db/update! conn :file
{:revn (inc revn)
:data (blob/encode data)
:modified-at (dt/now)}
{:id id})))))))))
(cmd.files/check-edition-permissions! conn profile-id id)
(cmd.files/absorb-library conn params)
(cmd.files/mark-file-deleted conn params)
nil))
;; --- Mutation: Link file to library
(declare link-file-to-library)
(s/def ::link-file-to-library
(s/keys :req-un [::profile-id ::file-id ::library-id]))
(s/def ::link-file-to-library ::cmd.files/link-file-to-library)
(sv/defmethod ::link-file-to-library
{::doc/added "1.3"
::doc/deprecated "1.17"}
[{:keys [pool] :as cfg} {:keys [profile-id file-id library-id] :as params}]
(when (= file-id library-id)
(ex/raise :type :validation
:code :invalid-library
:hint "A file cannot be linked to itself"))
(db/with-atomic [conn pool]
(files/check-edition-permissions! conn profile-id file-id)
(files/check-edition-permissions! conn profile-id library-id)
(link-file-to-library conn params)))
(def sql:link-file-to-library
"insert into file_library_rel (file_id, library_file_id)
values (?, ?)
on conflict do nothing;")
(defn- link-file-to-library
[conn {:keys [file-id library-id] :as params}]
(db/exec-one! conn [sql:link-file-to-library file-id library-id]))
(cmd.files/check-edition-permissions! conn profile-id file-id)
(cmd.files/check-edition-permissions! conn profile-id library-id)
(cmd.files/link-file-to-library conn params)))
;; --- Mutation: Unlink file from library
(declare unlink-file-from-library)
(s/def ::unlink-file-from-library
(s/keys :req-un [::profile-id ::file-id ::library-id]))
(s/def ::unlink-file-from-library ::cmd.files/unlink-file-from-library)
(sv/defmethod ::unlink-file-from-library
{::doc/added "1.3"
::doc/deprecated "1.17"}
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
(db/with-atomic [conn pool]
(files/check-edition-permissions! conn profile-id file-id)
(unlink-file-from-library conn params)))
(defn- unlink-file-from-library
[conn {:keys [file-id library-id] :as params}]
(db/delete! conn :file-library-rel
{:file-id file-id
:library-file-id library-id}))
(cmd.files/check-edition-permissions! conn profile-id file-id)
(cmd.files/unlink-file-from-library conn params)))
;; --- Mutation: Update synchronization status of a link
(declare update-sync)
(s/def ::update-sync
(s/keys :req-un [::profile-id ::file-id ::library-id]))
(s/def ::update-sync ::cmd.files/update-file-library-sync-status)
(sv/defmethod ::update-sync
{::doc/added "1.10"
::doc/deprecated "1.17"}
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
(db/with-atomic [conn pool]
(files/check-edition-permissions! conn profile-id file-id)
(update-sync conn params)))
(cmd.files/check-edition-permissions! conn profile-id file-id)
(cmd.files/update-sync conn params)))
(defn- update-sync
[conn {:keys [file-id library-id] :as params}]
(db/update! conn :file-library-rel
{:synced-at (dt/now)}
{:file-id file-id
:library-file-id library-id}))
;; --- Mutation: Ignore updates in linked files
(declare ignore-sync)
(s/def ::ignore-sync
(s/keys :req-un [::profile-id ::file-id ::date]))
(s/def ::ignore-sync ::cmd.files/ignore-file-library-sync-status)
(sv/defmethod ::ignore-sync
{::doc/added "1.10"
::doc/deprecated "1.17"}
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
(db/with-atomic [conn pool]
(files/check-edition-permissions! conn profile-id file-id)
(ignore-sync conn params)))
(defn- ignore-sync
[conn {:keys [file-id date] :as params}]
(db/update! conn :file
{:ignore-sync-until date}
{:id file-id}))
(cmd.files/check-edition-permissions! conn profile-id file-id)
(cmd.files/ignore-sync conn params)))
;; --- MUTATION: update-file
;; A generic, Changes based (granular) file update method.
;; File changes that affect to the library, and must be notified
;; to all clients using it.
(defn library-change?
[change]
(or (#{:add-color :mod-color :del-color
:add-media :mod-media :del-media
:add-component :mod-component :del-component
:add-typography :mod-typography :del-typography} (:type change))
(and (#{:add-obj :mod-obj :del-obj
:reg-objects :mov-objects} (:type change))
(some? (:component-id change)))))
(declare insert-change)
(declare retrieve-lagged-changes)
(declare send-notifications)
(declare update-file)
(s/def ::changes
(s/coll-of map? :kind vector?))
(s/def ::hint-origin ::us/keyword)
(s/def ::hint-events
(s/every ::us/keyword :kind vector?))
(s/def ::change-with-metadata
(s/keys :req-un [::changes]
:opt-un [::hint-origin
::hint-events]))
(s/def ::changes-with-metadata
(s/every ::change-with-metadata :kind vector?))
(s/def ::session-id ::us/uuid)
(s/def ::revn ::us/integer)
(s/def ::components-v2 ::us/boolean)
(s/def ::update-file
(s/and
(s/keys :req-un [::id ::session-id ::profile-id ::revn]
:opt-un [::changes ::changes-with-metadata ::components-v2])
(fn [o]
(or (contains? o :changes)
(contains? o :changes-with-metadata)))))
(s/and ::cmd.files.update/update-file
(s/keys :opt-un [::components-v2])))
(sv/defmethod ::update-file
{::rsem/queue :update-file}
[{:keys [pool] :as cfg} {:keys [id profile-id] :as params}]
{::climit/queue :update-file
::climit/key-fn :id
::doc/added "1.0"
::doc/deprecated "1.17"}
[{:keys [pool] :as cfg} {:keys [id profile-id features components-v2] :as params}]
(db/with-atomic [conn pool]
(db/xact-lock! conn id)
(let [{:keys [id] :as file} (db/get-by-id conn :file id {:for-key-share true})
team-id (retrieve-team-id conn (:project-id file))]
(files/check-edition-permissions! conn profile-id id)
(with-meta
(update-file (assoc cfg :conn conn)
(assoc params :file file))
{::audit/props {:project-id (:project-id file)
:team-id team-id}}))))
(cmd.files/check-edition-permissions! conn profile-id id)
(defn- take-snapshot?
"Defines the rule when file `data` snapshot should be saved."
[{:keys [revn modified-at] :as file}]
(let [freq (or (cf/get :file-change-snapshot-every) 20)
timeout (or (cf/get :file-change-snapshot-timeout)
(dt/duration {:hours 1}))]
(or (= 1 freq)
(zero? (mod revn freq))
(> (inst-ms (dt/diff modified-at (dt/now)))
(inst-ms timeout)))))
(let [;; BACKWARD COMPATIBILITY with the components-v2 parameter
features (cond-> (or features #{})
components-v2 (conj "components/v2"))
tpoint (dt/tpoint)
params (assoc params :features features)
cfg (assoc cfg :conn conn)]
(defn- delete-from-storage
[{:keys [storage] :as cfg} file]
(p/do
(when-let [backend (simpl/resolve-backend storage (:data-backend file))]
(simpl/del-object backend file))))
(defn- update-file
[{:keys [conn metrics] :as cfg}
{:keys [file changes changes-with-metadata session-id profile-id components-v2] :as params}]
(when (> (:revn params)
(:revn file))
(ex/raise :type :validation
:code :revn-conflict
:hint "The incoming revision number is greater that stored version."
:context {:incoming-revn (:revn params)
:stored-revn (:revn file)}))
(let [changes (if changes-with-metadata
(mapcat :changes changes-with-metadata)
changes)
changes (vec changes)
;; Trace the number of changes processed
_ (mtx/run! metrics {:id :update-file-changes :inc (count changes)})
ts (dt/now)
file (-> file
(update :revn inc)
(update :data (fn [data]
;; Trace the length of bytes of processed data
(mtx/run! metrics {:id :update-file-bytes-processed :inc (alength data)})
(cond-> data
:always
(-> (blob/decode)
(assoc :id (:id file))
(pmg/migrate-data))
components-v2
(ctf/migrate-to-components-v2)
:always
(-> (cp/process-changes changes)
(blob/encode))))))]
;; Insert change to the xlog
(db/insert! conn :file-change
{:id (uuid/next)
:session-id session-id
:profile-id profile-id
:created-at ts
:file-id (:id file)
:revn (:revn file)
:data (when (take-snapshot? file)
(:data file))
:changes (blob/encode changes)})
;; Update file
(db/update! conn :file
{:revn (:revn file)
:data (:data file)
:data-backend nil
:modified-at ts
:has-media-trimmed false}
{:id (:id file)})
;; We need to delete the data from external storage backend
(when-not (nil? (:data-backend file))
@(delete-from-storage cfg file))
(db/update! conn :project
{:modified-at ts}
{:id (:project-id file)})
(let [params (assoc params :file file :changes changes)]
;; Send asynchronous notifications
(send-notifications cfg params)
;; Retrieve and return lagged data
(retrieve-lagged-changes conn params))))
(def ^:private
sql:lagged-changes
"select s.id, s.revn, s.file_id,
s.session_id, s.changes
from file_change as s
where s.file_id = ?
and s.revn > ?
order by s.created_at asc")
(defn- retrieve-lagged-changes
[conn params]
(->> (db/exec! conn [sql:lagged-changes (:id params) (:revn params)])
(into [] (comp (map files/decode-row)
(map (fn [row]
(cond-> row
(= (:revn row) (:revn (:file params)))
(assoc :changes []))))))))
(defn- send-notifications
[{:keys [conn] :as cfg} {:keys [file changes session-id] :as params}]
(let [lchanges (filter library-change? changes)
msgbus (:msgbus cfg)]
;; Asynchronously publish message to the msgbus
(mbus/pub! msgbus
:topic (:id file)
:message {:type :file-change
:profile-id (:profile-id params)
:file-id (:id file)
:session-id (:session-id params)
:revn (:revn file)
:changes changes})
(when (and (:is-shared file) (seq lchanges))
(let [team-id (retrieve-team-id conn (:project-id file))]
;; Asynchronously publish message to the msgbus
(mbus/pub! msgbus
:topic team-id
:message {:type :library-change
:profile-id (:profile-id params)
:file-id (:id file)
:session-id session-id
:revn (:revn file)
:modified-at (dt/now)
:changes lchanges})))))
(defn- retrieve-team-id
[conn project-id]
(:team-id (db/get-by-id conn :project project-id {:columns [:team-id]})))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; TEMPORARY FILES (behaves differently)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(s/def ::create-temp-file ::create-file)
(sv/defmethod ::create-temp-file
[{:keys [pool] :as cfg} {:keys [profile-id project-id] :as params}]
(db/with-atomic [conn pool]
(proj/check-edition-permissions! conn profile-id project-id)
(create-file conn (assoc params :deleted-at (dt/in-future {:days 1})))))
(s/def ::update-temp-file
(s/keys :req-un [::changes ::revn ::session-id ::id]))
(sv/defmethod ::update-temp-file
[{:keys [pool] :as cfg} {:keys [profile-id session-id id revn changes] :as params}]
(db/with-atomic [conn pool]
(db/insert! conn :file-change
{:id (uuid/next)
:session-id session-id
:profile-id profile-id
:created-at (dt/now)
:file-id id
:revn revn
:data nil
:changes (blob/encode changes)})
nil))
(s/def ::persist-temp-file
(s/keys :req-un [::id ::profile-id]))
(sv/defmethod ::persist-temp-file
[{:keys [pool] :as cfg} {:keys [id profile-id] :as params}]
(db/with-atomic [conn pool]
(files/check-edition-permissions! conn profile-id id)
(let [file (db/get-by-id conn :file id)
revs (db/query conn :file-change
{:file-id id}
{:order-by [[:revn :asc]]})
revn (count revs)]
(when (nil? (:deleted-at file))
(ex/raise :type :validation
:code :cant-persist-already-persisted-file))
(loop [revs (seq revs)
data (blob/decode (:data file))]
(if-let [rev (first revs)]
(recur (rest revs)
(->> rev :changes blob/decode (cp/process-changes data)))
(db/update! conn :file
{:deleted-at nil
:revn revn
:data (blob/encode data)}
{:id id})))
nil)))
(-> (cmd.files.update/update-file cfg params)
(rph/with-defer #(let [elapsed (tpoint)]
(l/trace :hint "update-file" :time (dt/format-duration elapsed))))))))
;; --- Mutation: upsert object thumbnail
(def sql:upsert-object-thumbnail
"insert into file_object_thumbnail(file_id, object_id, data)
values (?, ?, ?)
on conflict(file_id, object_id) do
update set data = ?;")
(s/def ::data (s/nilable ::us/string))
(s/def ::object-id ::us/string)
(s/def ::upsert-file-object-thumbnail
(s/keys :req-un [::profile-id ::file-id ::object-id ::data]))
(s/def ::upsert-file-object-thumbnail ::cmd.files/upsert-file-object-thumbnail)
(sv/defmethod ::upsert-file-object-thumbnail
[{:keys [pool] :as cfg} {:keys [profile-id file-id object-id data]}]
{::doc/added "1.13"
::doc/deprecated "1.17"}
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
(db/with-atomic [conn pool]
(files/check-edition-permissions! conn profile-id file-id)
(if data
(db/exec-one! conn [sql:upsert-object-thumbnail file-id object-id data data])
(db/delete! conn :file-object-thumbnail {:file-id file-id :object-id object-id}))
(cmd.files/check-edition-permissions! conn profile-id file-id)
(cmd.files/upsert-file-object-thumbnail! conn params)
nil))
;; --- Mutation: upsert file thumbnail
(def sql:upsert-file-thumbnail
"insert into file_thumbnail (file_id, revn, data, props)
values (?, ?, ?, ?::jsonb)
on conflict(file_id, revn) do
update set data = ?, props=?, updated_at=now();")
(s/def ::revn ::us/integer)
(s/def ::props map?)
(s/def ::upsert-file-thumbnail
(s/keys :req-un [::profile-id ::file-id ::revn ::data ::props]))
(s/def ::upsert-file-thumbnail ::cmd.files/upsert-file-thumbnail)
(sv/defmethod ::upsert-file-thumbnail
"Creates or updates the file thumbnail. Mainly used for paint the
grid thumbnals."
[{:keys [pool] :as cfg} {:keys [profile-id file-id revn data props]}]
grid thumbnails."
{::doc/added "1.13"
::doc/deprecated "1.17"}
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
(db/with-atomic [conn pool]
(files/check-edition-permissions! conn profile-id file-id)
(let [props (db/tjson (or props {}))]
(db/exec-one! conn [sql:upsert-file-thumbnail
file-id revn data props data props])
nil)))
(cmd.files/check-edition-permissions! conn profile-id file-id)
(cmd.files/upsert-file-thumbnail conn params)
nil))
;; --- MUTATION COMMAND: create-temp-file
(s/def ::create-temp-file ::cmd.files.temp/create-temp-file)
(sv/defmethod ::create-temp-file
{::doc/added "1.7"
::doc/deprecated "1.17"}
[{:keys [pool] :as cfg} {:keys [profile-id project-id] :as params}]
(db/with-atomic [conn pool]
(proj/check-edition-permissions! conn profile-id project-id)
(cmd.files.create/create-file conn (assoc params :deleted-at (dt/in-future {:days 1})))))
;; --- MUTATION COMMAND: update-temp-file
(s/def ::update-temp-file ::cmd.files.temp/update-temp-file)
(sv/defmethod ::update-temp-file
{::doc/added "1.7"
::doc/deprecated "1.17"}
[{:keys [pool] :as cfg} params]
(db/with-atomic [conn pool]
(cmd.files.temp/update-temp-file conn params)
nil))
;; --- MUTATION COMMAND: persist-temp-file
(s/def ::persist-temp-file ::cmd.files.temp/persist-temp-file)
(sv/defmethod ::persist-temp-file
{::doc/added "1.7"
::doc/deprecated "1.17"}
[{:keys [pool] :as cfg} {:keys [id profile-id] :as params}]
(db/with-atomic [conn pool]
(cmd.files/check-edition-permissions! conn profile-id id)
(cmd.files.temp/persist-temp-file conn params)))

View file

@ -11,15 +11,20 @@
[app.common.spec :as us]
[app.common.uuid :as uuid]
[app.db :as db]
[app.loggers.audit :as-alias audit]
[app.loggers.webhooks :as-alias webhooks]
[app.media :as media]
[app.rpc.climit :as-alias climit]
[app.rpc.commands.teams :as teams]
[app.rpc.doc :as-alias doc]
[app.rpc.queries.teams :as teams]
[app.rpc.semaphore :as rsem]
[app.rpc.helpers :as rph]
[app.rpc.quotes :as quotes]
[app.storage :as sto]
[app.util.services :as sv]
[app.util.time :as dt]
[clojure.spec.alpha :as s]
[promesa.core :as p]))
[promesa.core :as p]
[promesa.exec :as px]))
(declare create-font-variant)
@ -40,21 +45,26 @@
::font-id ::font-family ::font-weight ::font-style]))
(sv/defmethod ::create-font-variant
{::doc/added "1.3"
::webhooks/event? true}
[{:keys [pool] :as cfg} {:keys [team-id profile-id] :as params}]
(let [cfg (update cfg :storage media/configure-assets-storage)]
(teams/check-edition-permissions! pool profile-id team-id)
(quotes/check-quote! pool {::quotes/id ::quotes/font-variants-per-team
::quotes/profile-id profile-id
::quotes/team-id team-id})
(create-font-variant cfg params)))
(defn create-font-variant
[{:keys [storage pool executor semaphores] :as cfg} {:keys [data] :as params}]
[{:keys [storage pool executor climit] :as cfg} {:keys [data] :as params}]
(letfn [(generate-fonts [data]
(rsem/with-dispatch (:process-font semaphores)
(climit/with-dispatch (:process-font climit)
(media/run {:cmd :generate-fonts :input data})))
;; Function responsible of calculating cryptographyc hash of
;; the provided data.
(calculate-hash [data]
(rsem/with-dispatch (:process-font semaphores)
(px/with-dispatch executor
(sto/calculate-hash data)))
(validate-data [data]
@ -103,28 +113,34 @@
:ttf-file-id (:id ttf)}))
]
(-> (generate-fonts data)
(p/then validate-data)
(p/then persist-fonts executor)
(p/then insert-into-db executor))))
(->> (generate-fonts data)
(p/fmap validate-data)
(p/mcat executor persist-fonts)
(p/fmap executor insert-into-db)
(p/fmap (fn [result]
(let [params (update params :data (comp vec keys))]
(rph/with-meta result {::audit/replace-props params})))))))
;; --- UPDATE FONT FAMILY
(s/def ::update-font
(s/keys :req-un [::profile-id ::team-id ::id ::name]))
(def sql:update-font
"update team_font_variant
set font_family = ?
where team_id = ?
and font_id = ?")
(sv/defmethod ::update-font
{::doc/added "1.3"
::webhooks/event? true}
[{:keys [pool] :as cfg} {:keys [team-id profile-id id name] :as params}]
(db/with-atomic [conn pool]
(teams/check-edition-permissions! conn profile-id team-id)
(db/exec-one! conn [sql:update-font name team-id id])
nil))
(rph/with-meta
(db/update! conn :team-font-variant
{:font-family name}
{:font-id id
:team-id team-id})
{::audit/replace-props {:id id
:name name
:team-id team-id
:profile-id profile-id}})))
;; --- DELETE FONT
@ -132,14 +148,19 @@
(s/keys :req-un [::profile-id ::team-id ::id]))
(sv/defmethod ::delete-font
{::doc/added "1.3"
::webhooks/event? true}
[{:keys [pool] :as cfg} {:keys [id team-id profile-id] :as params}]
(db/with-atomic [conn pool]
(teams/check-edition-permissions! conn profile-id team-id)
(db/update! conn :team-font-variant
{:deleted-at (dt/now)}
{:font-id id :team-id team-id})
nil))
(let [font (db/update! conn :team-font-variant
{:deleted-at (dt/now)}
{:font-id id :team-id team-id})]
(rph/with-meta (rph/wrap)
{::audit/props {:id id
:team-id team-id
:name (:font-family font)
:profile-id profile-id}}))))
;; --- DELETE FONT VARIANT
@ -147,12 +168,14 @@
(s/keys :req-un [::profile-id ::team-id ::id]))
(sv/defmethod ::delete-font-variant
{::doc/added "1.3"}
{::doc/added "1.3"
::webhooks/event? true}
[{:keys [pool] :as cfg} {:keys [id team-id profile-id] :as params}]
(db/with-atomic [conn pool]
(teams/check-edition-permissions! conn profile-id team-id)
(db/update! conn :team-font-variant
{:deleted-at (dt/now)}
{:id id :team-id team-id})
nil))
(let [variant (db/update! conn :team-font-variant
{:deleted-at (dt/now)}
{:id id :team-id team-id})]
(rph/with-meta (rph/wrap)
{::audit/props {:font-family (:font-family variant)
:font-id (:font-id variant)}}))))

View file

@ -1,58 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.rpc.mutations.management
"Move & Duplicate RPC methods for files and projects."
(:require
[app.db :as db]
[app.rpc.commands.management :as cmd.mgm]
[app.rpc.doc :as-alias doc]
[app.util.services :as sv]
[clojure.spec.alpha :as s]))
;; --- MUTATION: Duplicate File
(s/def ::duplicate-file ::cmd.mgm/duplicate-file)
(sv/defmethod ::duplicate-file
{::doc/added "1.2"
::doc/deprecated "1.16"}
[{:keys [pool] :as cfg} params]
(db/with-atomic [conn pool]
(cmd.mgm/duplicate-file conn params)))
;; --- MUTATION: Duplicate Project
(s/def ::duplicate-project ::cmd.mgm/duplicate-project)
(sv/defmethod ::duplicate-project
{::doc/added "1.2"
::doc/deprecated "1.16"}
[{:keys [pool] :as cfg} params]
(db/with-atomic [conn pool]
(cmd.mgm/duplicate-project conn params)))
;; --- MUTATION: Move file
(s/def ::move-files ::cmd.mgm/move-files)
(sv/defmethod ::move-files
{::doc/added "1.2"
::doc/deprecated "1.16"}
[{:keys [pool] :as cfg} params]
(db/with-atomic [conn pool]
(cmd.mgm/move-files conn params)))
;; --- MUTATION: Move project
(s/def ::move-project ::cmd.mgm/move-project)
(sv/defmethod ::move-project
{::doc/added "1.2"
::doc/deprecated "1.16"}
[{:keys [pool] :as cfg} params]
(db/with-atomic [conn pool]
(cmd.mgm/move-project conn params)))

View file

@ -6,277 +6,49 @@
(ns app.rpc.mutations.media
(:require
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.media :as cm]
[app.common.spec :as us]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.media :as media]
[app.rpc.queries.teams :as teams]
[app.rpc.semaphore :as rsem]
[app.storage :as sto]
[app.storage.tmp :as tmp]
[app.rpc.commands.files :as files]
[app.rpc.commands.media :as cmd.media]
[app.rpc.doc :as-alias doc]
[app.util.services :as sv]
[app.util.time :as dt]
[clojure.spec.alpha :as s]
[cuerdas.core :as str]
[datoteka.io :as io]
[promesa.core :as p]))
(def default-max-file-size (* 1024 1024 10)) ; 10 MiB
(def thumbnail-options
{:width 100
:height 100
:quality 85
:format :jpeg})
(s/def ::id ::us/uuid)
(s/def ::name ::us/string)
(s/def ::profile-id ::us/uuid)
(s/def ::file-id ::us/uuid)
(s/def ::team-id ::us/uuid)
[clojure.spec.alpha :as s]))
;; --- Create File Media object (upload)
(declare create-file-media-object)
(declare select-file)
(s/def ::content ::media/upload)
(s/def ::is-local ::us/boolean)
(s/def ::upload-file-media-object
(s/keys :req-un [::profile-id ::file-id ::is-local ::name ::content]
:opt-un [::id]))
(s/def ::upload-file-media-object ::cmd.media/upload-file-media-object)
(sv/defmethod ::upload-file-media-object
{::doc/added "1.2"
::doc/deprecated "1.17"}
[{:keys [pool] :as cfg} {:keys [profile-id file-id content] :as params}]
(let [file (select-file pool file-id)
cfg (update cfg :storage media/configure-assets-storage)]
(teams/check-edition-permissions! pool profile-id (:team-id file))
(let [cfg (update cfg :storage media/configure-assets-storage)]
(files/check-edition-permissions! pool profile-id file-id)
(media/validate-media-type! content)
(when (> (:size content) (cf/get :media-max-file-size default-max-file-size))
(ex/raise :type :restriction
:code :media-max-file-size-reached
:hint (str/ffmt "the uploaded file size % is greater than the maximum %"
(:size content)
default-max-file-size)))
(create-file-media-object cfg params)))
(defn- big-enough-for-thumbnail?
"Checks if the provided image info is big enough for
create a separate thumbnail storage object."
[info]
(or (> (:width info) (:width thumbnail-options))
(> (:height info) (:height thumbnail-options))))
(defn- svg-image?
[info]
(= (:mtype info) "image/svg+xml"))
;; NOTE: we use the `on conflict do update` instead of `do nothing`
;; because postgresql does not returns anything if no update is
;; performed, the `do update` does the trick.
(def sql:create-file-media-object
"insert into file_media_object (id, file_id, is_local, name, media_id, thumbnail_id, width, height, mtype)
values (?, ?, ?, ?, ?, ?, ?, ?, ?)
on conflict (id) do update set created_at=file_media_object.created_at
returning *")
;; NOTE: the following function executes without a transaction, this
;; means that if something fails in the middle of this function, it
;; will probably leave leaked/unreferenced objects in the database and
;; probably in the storage layer. For handle possible object leakage,
;; we create all media objects marked as touched, this ensures that if
;; something fails, all leaked (already created storage objects) will
;; be eventually marked as deleted by the touched-gc task.
;;
;; The touched-gc task, performs periodic analisis of all touched
;; storage objects and check references of it. This is the reason why
;; `reference` metadata exists: it indicates the name of the table
;; witch holds the reference to storage object (it some kind of
;; inverse, soft referential integrity).
(defn create-file-media-object
[{:keys [storage pool semaphores] :as cfg}
{:keys [id file-id is-local name content] :as params}]
(letfn [;; Function responsible to retrieve the file information, as
;; it is synchronous operation it should be wrapped into
;; with-dispatch macro.
(get-info [content]
(rsem/with-dispatch (:process-image semaphores)
(media/run {:cmd :info :input content})))
;; Function responsible of calculating cryptographyc hash of
;; the provided data.
(calculate-hash [data]
(rsem/with-dispatch (:process-image semaphores)
(sto/calculate-hash data)))
;; Function responsible of generating thumnail. As it is synchronous
;; opetation, it should be wrapped into with-dispatch macro
(generate-thumbnail [info]
(rsem/with-dispatch (:process-image semaphores)
(media/run (assoc thumbnail-options
:cmd :generic-thumbnail
:input info))))
(create-thumbnail [info]
(when (and (not (svg-image? info))
(big-enough-for-thumbnail? info))
(p/let [thumb (generate-thumbnail info)
hash (calculate-hash (:data thumb))
content (-> (sto/content (:data thumb) (:size thumb))
(sto/wrap-with-hash hash))]
(sto/put-object! storage
{::sto/content content
::sto/deduplicate? true
::sto/touched-at (dt/now)
:content-type (:mtype thumb)
:bucket "file-media-object"}))))
(create-image [info]
(p/let [data (:path info)
hash (calculate-hash data)
content (-> (sto/content data)
(sto/wrap-with-hash hash))]
(sto/put-object! storage
{::sto/content content
::sto/deduplicate? true
::sto/touched-at (dt/now)
:content-type (:mtype info)
:bucket "file-media-object"})))
(insert-into-database [info image thumb]
(db/exec-one! pool [sql:create-file-media-object
(or id (uuid/next))
file-id is-local name
(:id image)
(:id thumb)
(:width info)
(:height info)
(:mtype info)]))]
(p/let [info (get-info content)
thumb (create-thumbnail info)
image (create-image info)]
(insert-into-database info image thumb))))
(cmd.media/validate-content-size! content)
(cmd.media/create-file-media-object cfg params)))
;; --- Create File Media Object (from URL)
(declare ^:private create-file-media-object-from-url)
(s/def ::create-file-media-object-from-url
(s/keys :req-un [::profile-id ::file-id ::is-local ::url]
:opt-un [::id ::name]))
(s/def ::create-file-media-object-from-url ::cmd.media/create-file-media-object-from-url)
(sv/defmethod ::create-file-media-object-from-url
{::doc/added "1.3"
::doc/deprecated "1.17"}
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
(let [file (select-file pool file-id)
cfg (update cfg :storage media/configure-assets-storage)]
(teams/check-edition-permissions! pool profile-id (:team-id file))
(create-file-media-object-from-url cfg params)))
(defn- create-file-media-object-from-url
[{:keys [http-client] :as cfg} {:keys [url name] :as params}]
(letfn [(parse-and-validate-size [headers]
(let [size (some-> (get headers "content-length") d/parse-integer)
mtype (get headers "content-type")
format (cm/mtype->format mtype)
max-size (cf/get :media-max-file-size default-max-file-size)]
(when-not size
(ex/raise :type :validation
:code :unknown-size
:hint "seems like the url points to resource with unknown size"))
(when (> size max-size)
(ex/raise :type :validation
:code :file-too-large
:hint (str/ffmt "the file size % is greater than the maximum %"
size
default-max-file-size)))
(when (nil? format)
(ex/raise :type :validation
:code :media-type-not-allowed
:hint "seems like the url points to an invalid media object"))
{:size size
:mtype mtype
:format format}))
(download-media [uri]
(-> (http-client {:method :get :uri uri} {:response-type :input-stream})
(p/then process-response)))
(process-response [{:keys [body headers] :as response}]
(let [{:keys [size mtype]} (parse-and-validate-size headers)
path (tmp/tempfile :prefix "penpot.media.download.")
written (io/write-to-file! body path :size size)]
(when (not= written size)
(ex/raise :type :internal
:code :mismatch-write-size
:hint "unexpected state: unable to write to file"))
{:filename "tempfile"
:size size
:path path
:mtype mtype}))]
(p/let [content (download-media url)]
(->> (merge params {:content content :name (or name (:filename content))})
(create-file-media-object cfg)))))
(let [cfg (update cfg :storage media/configure-assets-storage)]
(files/check-edition-permissions! pool profile-id file-id)
(#'cmd.media/create-file-media-object-from-url cfg params)))
;; --- Clone File Media object (Upload and create from url)
(declare clone-file-media-object)
(s/def ::clone-file-media-object
(s/keys :req-un [::profile-id ::file-id ::is-local ::id]))
(s/def ::clone-file-media-object ::cmd.media/clone-file-media-object)
(sv/defmethod ::clone-file-media-object
{::doc/added "1.2"
::doc/deprecated "1.17"}
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
(db/with-atomic [conn pool]
(let [file (select-file conn file-id)]
(teams/check-edition-permissions! conn profile-id (:team-id file))
(-> (assoc cfg :conn conn)
(clone-file-media-object params)))))
(defn clone-file-media-object
[{:keys [conn] :as cfg} {:keys [id file-id is-local]}]
(let [mobj (db/get-by-id conn :file-media-object id)]
(db/insert! conn :file-media-object
{:id (uuid/next)
:file-id file-id
:is-local is-local
:name (:name mobj)
:media-id (:media-id mobj)
:thumbnail-id (:thumbnail-id mobj)
:width (:width mobj)
:height (:height mobj)
:mtype (:mtype mobj)})))
;; --- HELPERS
(def ^:private
sql:select-file
"select file.*,
project.team_id as team_id
from file
inner join project on (project.id = file.project_id)
where file.id = ?")
(defn- select-file
[conn id]
(let [row (db/exec-one! conn [sql:select-file id])]
(when-not row
(ex/raise :type :not-found))
row))
(files/check-edition-permissions! conn profile-id file-id)
(-> (assoc cfg :conn conn)
(cmd.media/clone-file-media-object params))))

View file

@ -6,19 +6,23 @@
(ns app.rpc.mutations.profile
(:require
[app.auth :as auth]
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.spec :as us]
[app.config :as cf]
[app.db :as db]
[app.emails :as eml]
[app.http.session :as session]
[app.loggers.audit :as audit]
[app.media :as media]
[app.rpc :as-alias rpc]
[app.rpc.climit :as-alias climit]
[app.rpc.commands.auth :as cmd.auth]
[app.rpc.commands.teams :as teams]
[app.rpc.doc :as-alias doc]
[app.rpc.mutations.teams :as teams]
[app.rpc.helpers :as rph]
[app.rpc.queries.profile :as profile]
[app.rpc.semaphore :as rsem]
[app.storage :as sto]
[app.tokens :as tokens]
[app.util.services :as sv]
@ -46,6 +50,7 @@
:opt-un [::lang ::theme]))
(sv/defmethod ::update-profile
{::doc/added "1.0"}
[{:keys [pool] :as cfg} {:keys [profile-id fullname lang theme] :as params}]
(db/with-atomic [conn pool]
;; NOTE: we need to retrieve the profile independently if we use
@ -68,8 +73,11 @@
:props (db/tjson (:props profile))}
{:id profile-id})
(with-meta (-> profile profile/strip-private-attrs d/without-nils)
{::audit/props (audit/profile->props profile)}))))
(-> profile
profile/strip-private-attrs
d/without-nils
(rph/with-meta {::audit/props (audit/profile->props profile)})))))
;; --- MUTATION: Update Password
@ -81,11 +89,11 @@
(s/keys :req-un [::profile-id ::password ::old-password]))
(sv/defmethod ::update-profile-password
{::rsem/queue :auth}
{::climit/queue :auth}
[{:keys [pool] :as cfg} {:keys [password] :as params}]
(db/with-atomic [conn pool]
(let [profile (validate-password! conn params)
session-id (:app.rpc/session-id params)]
session-id (::rpc/session-id params)]
(when (= (str/lower (:email profile))
(str/lower (:password params)))
(ex/raise :type :validation
@ -104,7 +112,7 @@
(defn- validate-password!
[conn {:keys [profile-id old-password] :as params}]
(let [profile (db/get-by-id conn :profile profile-id)]
(when-not (:valid (cmd.auth/verify-password old-password (:password profile)))
(when-not (:valid (auth/verify-password old-password (:password profile)))
(ex/raise :type :validation
:code :old-password-not-match))
profile))
@ -112,7 +120,7 @@
(defn update-profile-password!
[conn {:keys [id password] :as profile}]
(db/update! conn :profile
{:password (cmd.auth/derive-password password)}
{:password (auth/derive-password password)}
{:id id}))
;; --- MUTATION: Update Photo
@ -131,7 +139,7 @@
(update-profile-photo cfg params)))
(defn update-profile-photo
[{:keys [pool storage executor] :as cfg} {:keys [profile-id] :as params}]
[{:keys [pool storage executor] :as cfg} {:keys [profile-id file] :as params}]
(p/let [profile (px/with-dispatch executor
(db/get-by-id pool :profile profile-id))
photo (teams/upload-photo cfg params)]
@ -144,7 +152,13 @@
(db/update! pool :profile
{:photo-id (:id photo)}
{:id profile-id})
nil))
(-> (rph/wrap)
(rph/with-meta {::audit/replace-props
{:file-name (:filename file)
:file-size (:size file)
:file-path (str (:path file))
:file-mtype (:mtype file)}}))))
;; --- MUTATION: Request Email Change
@ -276,8 +290,7 @@
{:deleted-at deleted-at}
{:id profile-id})
(with-meta {}
{:transform-response (:delete session)}))))
(rph/with-transform {} (session/delete-fn session)))))
(def sql:owned-teams
"with owner_teams as (
@ -296,77 +309,3 @@
(defn- get-owned-teams-with-participants
[conn profile-id]
(db/exec! conn [sql:owned-teams profile-id profile-id]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; DEPRECATED METHODS (TO BE REMOVED ON 1.16.x)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; --- MUTATION: Login
(s/def ::login ::cmd.auth/login-with-password)
(sv/defmethod ::login
{:auth false
::rsem/queue :auth
::doc/added "1.0"
::doc/deprecated "1.15"}
[cfg params]
(cmd.auth/login-with-password cfg params))
;; --- MUTATION: Logout
(s/def ::logout ::cmd.auth/logout)
(sv/defmethod ::logout
{:auth false
::doc/added "1.0"
::doc/deprecated "1.15"}
[{:keys [session] :as cfg} _]
(with-meta {}
{:transform-response (:delete session)}))
;; --- MUTATION: Recover Profile
(s/def ::recover-profile ::cmd.auth/recover-profile)
(sv/defmethod ::recover-profile
{::doc/added "1.0"
::doc/deprecated "1.15"}
[cfg params]
(cmd.auth/recover-profile cfg params))
;; --- MUTATION: Prepare Register
(s/def ::prepare-register-profile ::cmd.auth/prepare-register-profile)
(sv/defmethod ::prepare-register-profile
{:auth false
::doc/added "1.0"
::doc/deprecated "1.15"}
[cfg params]
(cmd.auth/prepare-register cfg params))
;; --- MUTATION: Register Profile
(s/def ::register-profile ::cmd.auth/register-profile)
(sv/defmethod ::register-profile
{:auth false
::rsem/queue :auth
::doc/added "1.0"
::doc/deprecated "1.15"}
[{:keys [pool] :as cfg} params]
(db/with-atomic [conn pool]
(-> (assoc cfg :conn conn)
(cmd.auth/register-profile params))))
;; --- MUTATION: Request Profile Recovery
(s/def ::request-profile-recovery ::cmd.auth/request-profile-recovery)
(sv/defmethod ::request-profile-recovery
{:auth false
::doc/added "1.0"
::doc/deprecated "1.15"}
[cfg params]
(cmd.auth/request-profile-recovery cfg params))

View file

@ -7,11 +7,14 @@
(ns app.rpc.mutations.projects
(:require
[app.common.spec :as us]
[app.common.uuid :as uuid]
[app.db :as db]
[app.rpc.permissions :as perms]
[app.loggers.audit :as-alias audit]
[app.loggers.webhooks :as-alias webhooks]
[app.rpc.commands.teams :as teams]
[app.rpc.doc :as-alias doc]
[app.rpc.helpers :as rph]
[app.rpc.queries.projects :as proj]
[app.rpc.queries.teams :as teams]
[app.rpc.quotes :as quotes]
[app.util.services :as sv]
[app.util.time :as dt]
[clojure.spec.alpha :as s]))
@ -22,57 +25,34 @@
(s/def ::name ::us/string)
(s/def ::profile-id ::us/uuid)
;; --- Mutation: Create Project
(declare create-project)
(declare create-project-role)
(declare create-team-project-profile)
(s/def ::team-id ::us/uuid)
(s/def ::create-project
(s/keys :req-un [::profile-id ::team-id ::name]
:opt-un [::id]))
(sv/defmethod ::create-project
{::doc/added "1.0"
::webhooks/event? true}
[{:keys [pool] :as cfg} {:keys [profile-id team-id] :as params}]
(db/with-atomic [conn pool]
(teams/check-edition-permissions! conn profile-id team-id)
(let [project (create-project conn params)
params (assoc params
:project-id (:id project)
:role :owner)]
(create-project-role conn params)
(create-team-project-profile conn params)
(quotes/check-quote! conn {::quotes/id ::quotes/projects-per-team
::quotes/profile-id profile-id
::quotes/team-id team-id})
(let [project (teams/create-project conn params)]
(teams/create-project-role conn profile-id (:id project) :owner)
(db/insert! conn :team-project-profile-rel
{:project-id (:id project)
:profile-id profile-id
:team-id team-id
:is-pinned true})
(assoc project :is-pinned true))))
(defn create-project
[conn {:keys [id team-id name is-default] :as params}]
(let [id (or id (uuid/next))
is-default (if (boolean? is-default) is-default false)]
(db/insert! conn :project
{:id id
:name name
:team-id team-id
:is-default is-default})))
(defn create-project-role
[conn {:keys [project-id profile-id role]}]
(let [params {:project-id project-id
:profile-id profile-id}]
(->> (perms/assign-role-flags params role)
(db/insert! conn :project-profile-rel))))
;; TODO: pending to be refactored
(defn create-team-project-profile
[conn {:keys [team-id project-id profile-id] :as params}]
(db/insert! conn :team-project-profile-rel
{:project-id project-id
:profile-id profile-id
:team-id team-id
:is-pinned true}))
;; --- Mutation: Toggle Project Pin
(def ^:private
@ -89,13 +69,16 @@
(s/keys :req-un [::profile-id ::id ::team-id ::is-pinned]))
(sv/defmethod ::update-project-pin
{::doc/added "1.0"
::webhooks/batch-timeout (dt/duration "5s")
::webhooks/batch-key :id
::webhooks/event? true}
[{:keys [pool] :as cfg} {:keys [id profile-id team-id is-pinned] :as params}]
(db/with-atomic [conn pool]
(proj/check-edition-permissions! conn profile-id id)
(db/exec-one! conn [sql:update-project-pin team-id id profile-id is-pinned is-pinned])
nil))
;; --- Mutation: Rename Project
(declare rename-project)
@ -104,13 +87,19 @@
(s/keys :req-un [::profile-id ::name ::id]))
(sv/defmethod ::rename-project
{::doc/added "1.0"
::webhooks/event? true}
[{:keys [pool] :as cfg} {:keys [id profile-id name] :as params}]
(db/with-atomic [conn pool]
(proj/check-edition-permissions! conn profile-id id)
(db/update! conn :project
{:name name}
{:id id})
nil))
(let [project (db/get-by-id conn :project id)]
(db/update! conn :project
{:name name}
{:id id})
(rph/with-meta (rph/wrap)
{::audit/props {:team-id (:team-id project)
:prev-name (:name project)}}))))
;; --- Mutation: Delete Project
@ -122,10 +111,16 @@
;; this is not allowed.
(sv/defmethod ::delete-project
{::doc/added "1.0"
::webhooks/event? true}
[{:keys [pool] :as cfg} {:keys [id profile-id] :as params}]
(db/with-atomic [conn pool]
(proj/check-edition-permissions! conn profile-id id)
(db/update! conn :project
{:deleted-at (dt/now)}
{:id id :is-default false})
nil))
(let [project (db/update! conn :project
{:deleted-at (dt/now)}
{:id id :is-default false})]
(rph/with-meta (rph/wrap)
{::audit/props {:team-id (:team-id project)
:name (:name project)
:created-at (:created-at project)
:modified-at (:modified-at project)}}))))

Some files were not shown because too many files have changed in this diff Show more