mirror of
https://github.com/penpot/penpot.git
synced 2026-05-01 14:18:07 +00:00
Compare commits
No commits in common. "develop" and "2.6.1-RC2" have entirely different histories.
287
.circleci/config.yml
Normal file
287
.circleci/config.yml
Normal file
@ -0,0 +1,287 @@
|
|||||||
|
version: 2.1
|
||||||
|
jobs:
|
||||||
|
test-common:
|
||||||
|
docker:
|
||||||
|
- image: penpotapp/devenv:latest
|
||||||
|
|
||||||
|
working_directory: ~/repo
|
||||||
|
resource_class: medium+
|
||||||
|
|
||||||
|
environment:
|
||||||
|
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
|
||||||
|
NODE_OPTIONS: --max-old-space-size=4096
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
|
||||||
|
# Download and cache dependencies
|
||||||
|
- restore_cache:
|
||||||
|
keys:
|
||||||
|
- v1-dependencies-{{ checksum "common/deps.edn"}}
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: "fmt check & linter"
|
||||||
|
working_directory: "./common"
|
||||||
|
command: |
|
||||||
|
yarn install
|
||||||
|
yarn run fmt:clj:check
|
||||||
|
yarn run lint:clj
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: "JVM tests"
|
||||||
|
working_directory: "./common"
|
||||||
|
command: |
|
||||||
|
clojure -M:dev:test
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: "NODE tests"
|
||||||
|
working_directory: "./common"
|
||||||
|
command: |
|
||||||
|
yarn run test
|
||||||
|
|
||||||
|
- save_cache:
|
||||||
|
paths:
|
||||||
|
- ~/.m2
|
||||||
|
key: v1-dependencies-{{ checksum "common/deps.edn"}}
|
||||||
|
|
||||||
|
test-frontend:
|
||||||
|
docker:
|
||||||
|
- image: penpotapp/devenv:latest
|
||||||
|
|
||||||
|
working_directory: ~/repo
|
||||||
|
resource_class: medium+
|
||||||
|
|
||||||
|
environment:
|
||||||
|
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
|
||||||
|
NODE_OPTIONS: --max-old-space-size=4096
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
|
||||||
|
# Download and cache dependencies
|
||||||
|
- restore_cache:
|
||||||
|
keys:
|
||||||
|
- v1-dependencies-{{ checksum "frontend/deps.edn"}}
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: "prepopulate linter cache"
|
||||||
|
working_directory: "./common"
|
||||||
|
command: |
|
||||||
|
yarn install
|
||||||
|
yarn run lint:clj
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: "fmt check & linter"
|
||||||
|
working_directory: "./frontend"
|
||||||
|
command: |
|
||||||
|
yarn install
|
||||||
|
yarn run fmt:clj:check
|
||||||
|
yarn run fmt:js:check
|
||||||
|
yarn run lint:scss
|
||||||
|
yarn run lint:clj
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: "unit tests"
|
||||||
|
working_directory: "./frontend"
|
||||||
|
command: |
|
||||||
|
yarn install
|
||||||
|
yarn run test
|
||||||
|
|
||||||
|
- save_cache:
|
||||||
|
paths:
|
||||||
|
- ~/.m2
|
||||||
|
key: v1-dependencies-{{ checksum "frontend/deps.edn"}}
|
||||||
|
|
||||||
|
test-components:
|
||||||
|
docker:
|
||||||
|
- image: penpotapp/devenv:latest
|
||||||
|
|
||||||
|
working_directory: ~/repo
|
||||||
|
resource_class: medium+
|
||||||
|
|
||||||
|
environment:
|
||||||
|
JAVA_OPTS: -Xmx6g -Xms2g
|
||||||
|
NODE_OPTIONS: --max-old-space-size=4096
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
|
||||||
|
# Download and cache dependencies
|
||||||
|
- restore_cache:
|
||||||
|
keys:
|
||||||
|
- v1-dependencies-{{ checksum "frontend/deps.edn"}}
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: Install dependencies
|
||||||
|
working_directory: "./frontend"
|
||||||
|
command: |
|
||||||
|
yarn
|
||||||
|
npx playwright install --with-deps
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: Build Storybook
|
||||||
|
working_directory: "./frontend"
|
||||||
|
command: yarn run build:storybook
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: Serve Storybook and run tests
|
||||||
|
working_directory: "./frontend"
|
||||||
|
command: |
|
||||||
|
npx concurrently -k -s first -n "SB,TEST" -c "magenta,blue" \
|
||||||
|
"npx http-server storybook-static --port 6006 --silent" \
|
||||||
|
"npx wait-on tcp:6006 && yarn test:storybook"
|
||||||
|
|
||||||
|
test-integration:
|
||||||
|
docker:
|
||||||
|
- image: penpotapp/devenv:latest
|
||||||
|
|
||||||
|
working_directory: ~/repo
|
||||||
|
resource_class: large
|
||||||
|
|
||||||
|
environment:
|
||||||
|
JAVA_OPTS: -Xmx6g -Xms2g
|
||||||
|
NODE_OPTIONS: --max-old-space-size=4096
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
|
||||||
|
# Download and cache dependencies
|
||||||
|
- restore_cache:
|
||||||
|
keys:
|
||||||
|
- v1-dependencies-{{ checksum "frontend/deps.edn"}}
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: "integration tests"
|
||||||
|
working_directory: "./frontend"
|
||||||
|
command: |
|
||||||
|
yarn install
|
||||||
|
yarn run build:app:assets
|
||||||
|
yarn run build:app
|
||||||
|
yarn run build:app:libs
|
||||||
|
yarn run playwright install --with-deps chromium
|
||||||
|
yarn run test:e2e -x --workers=4
|
||||||
|
|
||||||
|
test-backend:
|
||||||
|
docker:
|
||||||
|
- image: penpotapp/devenv:latest
|
||||||
|
- image: cimg/postgres:14.5
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: penpot_test
|
||||||
|
POSTGRES_PASSWORD: penpot_test
|
||||||
|
POSTGRES_DB: penpot_test
|
||||||
|
- image: cimg/redis:7.0.5
|
||||||
|
|
||||||
|
working_directory: ~/repo
|
||||||
|
resource_class: medium+
|
||||||
|
|
||||||
|
environment:
|
||||||
|
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
|
||||||
|
NODE_OPTIONS: --max-old-space-size=4096
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
|
||||||
|
- restore_cache:
|
||||||
|
keys:
|
||||||
|
- v1-dependencies-{{ checksum "backend/deps.edn" }}
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: "prepopulate linter cache"
|
||||||
|
working_directory: "./common"
|
||||||
|
command: |
|
||||||
|
yarn install
|
||||||
|
yarn run lint:clj
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: "fmt check & linter"
|
||||||
|
working_directory: "./backend"
|
||||||
|
command: |
|
||||||
|
yarn install
|
||||||
|
yarn run fmt:clj:check
|
||||||
|
yarn run lint:clj
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: "tests"
|
||||||
|
working_directory: "./backend"
|
||||||
|
command: |
|
||||||
|
clojure -M:dev:test --reporter kaocha.report/documentation
|
||||||
|
|
||||||
|
environment:
|
||||||
|
PENPOT_TEST_DATABASE_URI: "postgresql://localhost/penpot_test"
|
||||||
|
PENPOT_TEST_DATABASE_USERNAME: penpot_test
|
||||||
|
PENPOT_TEST_DATABASE_PASSWORD: penpot_test
|
||||||
|
PENPOT_TEST_REDIS_URI: "redis://localhost/1"
|
||||||
|
|
||||||
|
- save_cache:
|
||||||
|
paths:
|
||||||
|
- ~/.m2
|
||||||
|
key: v1-dependencies-{{ checksum "backend/deps.edn" }}
|
||||||
|
|
||||||
|
test-exporter:
|
||||||
|
docker:
|
||||||
|
- image: penpotapp/devenv:latest
|
||||||
|
|
||||||
|
working_directory: ~/repo
|
||||||
|
resource_class: medium+
|
||||||
|
|
||||||
|
environment:
|
||||||
|
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
|
||||||
|
NODE_OPTIONS: --max-old-space-size=4096
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: "prepopulate linter cache"
|
||||||
|
working_directory: "./common"
|
||||||
|
command: |
|
||||||
|
yarn install
|
||||||
|
yarn run lint:clj
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: "fmt check & linter"
|
||||||
|
working_directory: "./exporter"
|
||||||
|
command: |
|
||||||
|
yarn install
|
||||||
|
yarn run fmt:clj:check
|
||||||
|
yarn run lint:clj
|
||||||
|
|
||||||
|
test-render-wasm:
|
||||||
|
docker:
|
||||||
|
- image: penpotapp/devenv:latest
|
||||||
|
|
||||||
|
working_directory: ~/repo
|
||||||
|
resource_class: medium+
|
||||||
|
environment:
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: "fmt check"
|
||||||
|
working_directory: "./render-wasm"
|
||||||
|
command: |
|
||||||
|
cargo fmt --check
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: "lint"
|
||||||
|
working_directory: "./render-wasm"
|
||||||
|
command: |
|
||||||
|
./lint
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: "cargo tests"
|
||||||
|
working_directory: "./render-wasm"
|
||||||
|
command: |
|
||||||
|
./test
|
||||||
|
|
||||||
|
workflows:
|
||||||
|
penpot:
|
||||||
|
jobs:
|
||||||
|
- test-frontend
|
||||||
|
- test-components
|
||||||
|
- test-integration
|
||||||
|
- test-backend
|
||||||
|
- test-common
|
||||||
|
- test-exporter
|
||||||
|
- test-render-wasm
|
||||||
@ -45,25 +45,10 @@
|
|||||||
:potok/reify-type
|
:potok/reify-type
|
||||||
{:level :error}
|
{:level :error}
|
||||||
|
|
||||||
:redundant-primitive-coercion
|
|
||||||
{:level :off}
|
|
||||||
|
|
||||||
:unused-excluded-var
|
|
||||||
{:level :off}
|
|
||||||
|
|
||||||
:unresolved-excluded-var
|
|
||||||
{:level :off}
|
|
||||||
|
|
||||||
:missing-protocol-method
|
|
||||||
{:level :off}
|
|
||||||
|
|
||||||
:unresolved-namespace
|
:unresolved-namespace
|
||||||
{:level :warning
|
{:level :warning
|
||||||
:exclude [data_readers]}
|
:exclude [data_readers]}
|
||||||
|
|
||||||
:unused-value
|
|
||||||
{:level :off}
|
|
||||||
|
|
||||||
:single-key-in
|
:single-key-in
|
||||||
{:level :warning}
|
{:level :warning}
|
||||||
|
|
||||||
@ -79,9 +64,6 @@
|
|||||||
:redundant-nested-call
|
:redundant-nested-call
|
||||||
{:level :off}
|
{:level :off}
|
||||||
|
|
||||||
:redundant-str-call
|
|
||||||
{:level :off}
|
|
||||||
|
|
||||||
:earmuffed-var-not-dynamic
|
:earmuffed-var-not-dynamic
|
||||||
{:level :off}
|
{:level :off}
|
||||||
|
|
||||||
|
|||||||
@ -2,11 +2,6 @@
|
|||||||
:remove-multiple-non-indenting-spaces? false
|
:remove-multiple-non-indenting-spaces? false
|
||||||
:remove-surrounding-whitespace? true
|
:remove-surrounding-whitespace? true
|
||||||
:remove-consecutive-blank-lines? false
|
:remove-consecutive-blank-lines? false
|
||||||
:indent-line-comments? true
|
|
||||||
:parallel? true
|
|
||||||
:align-form-columns? false
|
|
||||||
;; :align-map-columns? false
|
|
||||||
;; :align-single-column-lines? false
|
|
||||||
:extra-indents {rumext.v2/fnc [[:inner 0]]
|
:extra-indents {rumext.v2/fnc [[:inner 0]]
|
||||||
cljs.test/async [[:inner 0]]
|
cljs.test/async [[:inner 0]]
|
||||||
promesa.exec/thread [[:inner 0]]
|
promesa.exec/thread [[:inner 0]]
|
||||||
|
|||||||
38
.github/ISSUE_TEMPLATE/new-render-bug-report.md
vendored
38
.github/ISSUE_TEMPLATE/new-render-bug-report.md
vendored
@ -1,38 +0,0 @@
|
|||||||
---
|
|
||||||
name: New Render Bug Report
|
|
||||||
about: Create a report about the bugs you have found in the new render
|
|
||||||
title: ''
|
|
||||||
labels: new render
|
|
||||||
assignees: claragvinola
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Describe the bug**
|
|
||||||
A clear and concise description of what the bug is.
|
|
||||||
|
|
||||||
**Steps to Reproduce**
|
|
||||||
Steps to reproduce the behavior:
|
|
||||||
1. Go to '...'
|
|
||||||
2. Click on '....'
|
|
||||||
3. Scroll down to '....'
|
|
||||||
4. See error
|
|
||||||
|
|
||||||
**Expected behavior**
|
|
||||||
A clear and concise description of what you expected to happen.
|
|
||||||
|
|
||||||
**Screenshots or screen recordings**
|
|
||||||
If applicable, add screenshots or screen recording to help illustrate your problem.
|
|
||||||
|
|
||||||
**Desktop (please complete the following information):**
|
|
||||||
- OS: [e.g. iOS]
|
|
||||||
- Browser [e.g. chrome, safari]
|
|
||||||
- Version [e.g. 22]
|
|
||||||
|
|
||||||
**Smartphone (please complete the following information):**
|
|
||||||
- Device: [e.g. iPhone6]
|
|
||||||
- OS: [e.g. iOS8.1]
|
|
||||||
- Browser [e.g. stock browser, safari]
|
|
||||||
- Version [e.g. 22]
|
|
||||||
|
|
||||||
**Additional context**
|
|
||||||
Add any other context about the problem here.
|
|
||||||
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -13,7 +13,6 @@
|
|||||||
- [ ] Add a detailed explanation of how to reproduce the issue and/or verify the fix, if applicable.
|
- [ ] Add a detailed explanation of how to reproduce the issue and/or verify the fix, if applicable.
|
||||||
- [ ] Include screenshots or videos, if applicable.
|
- [ ] Include screenshots or videos, if applicable.
|
||||||
- [ ] Add or modify existing integration tests in case of bugs or new features, if applicable.
|
- [ ] Add or modify existing integration tests in case of bugs or new features, if applicable.
|
||||||
- [ ] Refactor any modified SCSS files following the refactor guide.
|
|
||||||
- [ ] Check CI passes successfully.
|
- [ ] Check CI passes successfully.
|
||||||
- [ ] Update the `CHANGES.md` file, referencing the related GitHub issue, if applicable.
|
- [ ] Update the `CHANGES.md` file, referencing the related GitHub issue, if applicable.
|
||||||
|
|
||||||
|
|||||||
93
.github/workflows/build-bundle.yml
vendored
93
.github/workflows/build-bundle.yml
vendored
@ -1,93 +0,0 @@
|
|||||||
name: Bundles Builder
|
|
||||||
|
|
||||||
on:
|
|
||||||
# Create bundle from manual action
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
gh_ref:
|
|
||||||
description: 'Name of the branch or ref'
|
|
||||||
type: string
|
|
||||||
required: true
|
|
||||||
default: 'develop'
|
|
||||||
build_wasm:
|
|
||||||
description: 'BUILD_WASM. Valid values: yes, no'
|
|
||||||
type: string
|
|
||||||
required: false
|
|
||||||
default: 'yes'
|
|
||||||
build_storybook:
|
|
||||||
description: 'BUILD_STORYBOOK. Valid values: yes, no'
|
|
||||||
type: string
|
|
||||||
required: false
|
|
||||||
default: 'yes'
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
gh_ref:
|
|
||||||
description: 'Name of the branch or ref'
|
|
||||||
type: string
|
|
||||||
required: true
|
|
||||||
default: 'develop'
|
|
||||||
build_wasm:
|
|
||||||
description: 'BUILD_WASM. Valid values: yes, no'
|
|
||||||
type: string
|
|
||||||
required: false
|
|
||||||
default: 'yes'
|
|
||||||
build_storybook:
|
|
||||||
description: 'BUILD_STORYBOOK. Valid values: yes, no'
|
|
||||||
type: string
|
|
||||||
required: false
|
|
||||||
default: 'yes'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-bundle:
|
|
||||||
name: Build and Upload Penpot Bundle
|
|
||||||
runs-on: penpot-runner-01
|
|
||||||
env:
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
|
||||||
AWS_DEFAULT_REGION: ${{ secrets.AWS_REGION }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v6
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
ref: ${{ inputs.gh_ref }}
|
|
||||||
|
|
||||||
- name: Extract some useful variables
|
|
||||||
id: vars
|
|
||||||
run: |
|
|
||||||
echo "gh_ref=${{ inputs.gh_ref || github.ref_name }}" >> $GITHUB_OUTPUT
|
|
||||||
echo "bundle_version=$(git describe --tags --always)" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Build bundle
|
|
||||||
env:
|
|
||||||
BUILD_WASM: ${{ inputs.build_wasm }}
|
|
||||||
BUILD_STORYBOOK: ${{ inputs.build_storybook }}
|
|
||||||
run: ./manage.sh build-bundle
|
|
||||||
|
|
||||||
- name: Prepare directories for zipping
|
|
||||||
run: |
|
|
||||||
mkdir zips
|
|
||||||
mv bundles penpot
|
|
||||||
|
|
||||||
- name: Create zip bundle
|
|
||||||
run: |
|
|
||||||
echo "📦 Packaging Penpot bundle..."
|
|
||||||
zip -r zips/penpot.zip penpot
|
|
||||||
|
|
||||||
- name: Upload Penpot bundle to S3
|
|
||||||
run: |
|
|
||||||
aws s3 cp zips/penpot.zip s3://${{ secrets.S3_BUCKET }}/penpot-${{ steps.vars.outputs.gh_ref }}.zip --metadata bundle-version=${{ steps.vars.outputs.bundle_version }}
|
|
||||||
|
|
||||||
- name: Notify Mattermost
|
|
||||||
if: failure()
|
|
||||||
uses: mattermost/action-mattermost-notify@master
|
|
||||||
with:
|
|
||||||
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
|
|
||||||
MATTERMOST_CHANNEL: bot-alerts-cicd
|
|
||||||
TEXT: |
|
|
||||||
❌ 📦 *[PENPOT] Error building penpot bundles.*
|
|
||||||
📄 Triggered from ref: `${{ steps.vars.outputs.gh_ref }}`
|
|
||||||
Bundle version: `${{ steps.vars.outputs.bundle_version }}`
|
|
||||||
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
|
||||||
@infra
|
|
||||||
22
.github/workflows/build-develop.yml
vendored
22
.github/workflows/build-develop.yml
vendored
@ -1,22 +0,0 @@
|
|||||||
name: _DEVELOP
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
schedule:
|
|
||||||
- cron: '16 5-20 * * 1-5'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-bundle:
|
|
||||||
uses: ./.github/workflows/build-bundle.yml
|
|
||||||
secrets: inherit
|
|
||||||
with:
|
|
||||||
gh_ref: "develop"
|
|
||||||
build_wasm: "yes"
|
|
||||||
build_storybook: "yes"
|
|
||||||
|
|
||||||
build-docker:
|
|
||||||
needs: build-bundle
|
|
||||||
uses: ./.github/workflows/build-docker.yml
|
|
||||||
secrets: inherit
|
|
||||||
with:
|
|
||||||
gh_ref: "develop"
|
|
||||||
41
.github/workflows/build-docker-devenv.yml
vendored
41
.github/workflows/build-docker-devenv.yml
vendored
@ -1,41 +0,0 @@
|
|||||||
name: DevEnv Docker Image Builder
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-and-push:
|
|
||||||
name: Build and push DevEnv Docker image
|
|
||||||
environment: release-admins
|
|
||||||
runs-on: penpot-runner-02
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Set common environment variables
|
|
||||||
run: |
|
|
||||||
# Each job execution will use its own docker configuration.
|
|
||||||
echo "DOCKER_CONFIG=${{ runner.temp }}/.docker-${{ github.run_id }}-${{ github.job }}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v6
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v4
|
|
||||||
|
|
||||||
- name: Login to Docker Registry
|
|
||||||
uses: docker/login-action@v4
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.PUB_DOCKER_USERNAME }}
|
|
||||||
password: ${{ secrets.PUB_DOCKER_PASSWORD }}
|
|
||||||
|
|
||||||
- name: Build and push DevEnv Docker image
|
|
||||||
uses: docker/build-push-action@v7
|
|
||||||
env:
|
|
||||||
DOCKER_IMAGE: 'penpotapp/devenv'
|
|
||||||
with:
|
|
||||||
context: ./docker/devenv/
|
|
||||||
file: ./docker/devenv/Dockerfile
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
push: true
|
|
||||||
tags: ${{ env.DOCKER_IMAGE }}:latest
|
|
||||||
cache-from: type=registry,ref=${{ env.DOCKER_IMAGE }}:buildcache
|
|
||||||
cache-to: type=registry,ref=${{ env.DOCKER_IMAGE }}:buildcache,mode=max
|
|
||||||
183
.github/workflows/build-docker.yml
vendored
183
.github/workflows/build-docker.yml
vendored
@ -1,183 +0,0 @@
|
|||||||
name: Docker Images Builder
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
gh_ref:
|
|
||||||
description: 'Name of the branch or ref'
|
|
||||||
type: string
|
|
||||||
required: true
|
|
||||||
default: 'develop'
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
gh_ref:
|
|
||||||
description: 'Name of the branch or ref'
|
|
||||||
type: string
|
|
||||||
required: true
|
|
||||||
default: 'develop'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-and-push:
|
|
||||||
name: Build and Push Penpot Docker Images
|
|
||||||
runs-on: penpot-runner-02
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Set common environment variables
|
|
||||||
run: |
|
|
||||||
# Each job execution will use its own docker configuration.
|
|
||||||
echo "DOCKER_CONFIG=${{ runner.temp }}/.docker-${{ github.run_id }}-${{ github.job }}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v6
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
ref: ${{ inputs.gh_ref }}
|
|
||||||
|
|
||||||
- name: Extract some useful variables
|
|
||||||
id: vars
|
|
||||||
run: |
|
|
||||||
echo "gh_ref=${{ inputs.gh_ref || github.ref_name }}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Download Penpot Bundles
|
|
||||||
id: bundles
|
|
||||||
env:
|
|
||||||
FILE_NAME: penpot-${{ steps.vars.outputs.gh_ref }}.zip
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
|
||||||
AWS_DEFAULT_REGION: ${{ secrets.AWS_REGION }}
|
|
||||||
run: |
|
|
||||||
tmp=$(aws s3api head-object \
|
|
||||||
--bucket ${{ secrets.S3_BUCKET }} \
|
|
||||||
--key "$FILE_NAME" \
|
|
||||||
--query 'Metadata."bundle-version"' \
|
|
||||||
--output text)
|
|
||||||
echo "bundle_version=$tmp" >> $GITHUB_OUTPUT
|
|
||||||
pushd docker/images
|
|
||||||
aws s3 cp s3://${{ secrets.S3_BUCKET }}/$FILE_NAME .
|
|
||||||
unzip $FILE_NAME > /dev/null
|
|
||||||
mv penpot/backend bundle-backend
|
|
||||||
mv penpot/frontend bundle-frontend
|
|
||||||
mv penpot/exporter bundle-exporter
|
|
||||||
mv penpot/storybook bundle-storybook
|
|
||||||
mv penpot/mcp bundle-mcp
|
|
||||||
popd
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v4
|
|
||||||
|
|
||||||
- name: Login to Docker Registry
|
|
||||||
uses: docker/login-action@v4
|
|
||||||
with:
|
|
||||||
registry: ${{ secrets.DOCKER_REGISTRY }}
|
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
|
||||||
|
|
||||||
# To avoid the “429 Too Many Requests” error when downloading
|
|
||||||
# images from DockerHub for unregistered users.
|
|
||||||
# https://docs.docker.com/docker-hub/usage/
|
|
||||||
- name: Login to DockerHub Registry
|
|
||||||
uses: docker/login-action@v4
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.PUB_DOCKER_USERNAME }}
|
|
||||||
password: ${{ secrets.PUB_DOCKER_PASSWORD }}
|
|
||||||
|
|
||||||
- name: Extract metadata (tags, labels)
|
|
||||||
id: meta
|
|
||||||
uses: docker/metadata-action@v6
|
|
||||||
with:
|
|
||||||
images:
|
|
||||||
frontend
|
|
||||||
backend
|
|
||||||
exporter
|
|
||||||
storybook
|
|
||||||
mcp
|
|
||||||
labels: |
|
|
||||||
bundle_version=${{ steps.bundles.outputs.bundle_version }}
|
|
||||||
|
|
||||||
- name: Build and push Backend Docker image
|
|
||||||
uses: docker/build-push-action@v7
|
|
||||||
env:
|
|
||||||
DOCKER_IMAGE: 'backend'
|
|
||||||
BUNDLE_PATH: './bundle-backend'
|
|
||||||
with:
|
|
||||||
context: ./docker/images/
|
|
||||||
file: ./docker/images/Dockerfile.backend
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
push: true
|
|
||||||
tags: ${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:${{ steps.vars.outputs.gh_ref }}
|
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
|
||||||
cache-from: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache
|
|
||||||
cache-to: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache,mode=max
|
|
||||||
|
|
||||||
- name: Build and push Frontend Docker image
|
|
||||||
uses: docker/build-push-action@v7
|
|
||||||
env:
|
|
||||||
DOCKER_IMAGE: 'frontend'
|
|
||||||
BUNDLE_PATH: './bundle-frontend'
|
|
||||||
with:
|
|
||||||
context: ./docker/images/
|
|
||||||
file: ./docker/images/Dockerfile.frontend
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
push: true
|
|
||||||
tags: ${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:${{ steps.vars.outputs.gh_ref }}
|
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
|
||||||
cache-from: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache
|
|
||||||
cache-to: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache,mode=max
|
|
||||||
|
|
||||||
- name: Build and push Exporter Docker image
|
|
||||||
uses: docker/build-push-action@v7
|
|
||||||
env:
|
|
||||||
DOCKER_IMAGE: 'exporter'
|
|
||||||
BUNDLE_PATH: './bundle-exporter'
|
|
||||||
with:
|
|
||||||
context: ./docker/images/
|
|
||||||
file: ./docker/images/Dockerfile.exporter
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
push: true
|
|
||||||
tags: ${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:${{ steps.vars.outputs.gh_ref }}
|
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
|
||||||
cache-from: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache
|
|
||||||
cache-to: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache,mode=max
|
|
||||||
|
|
||||||
- name: Build and push Storybook Docker image
|
|
||||||
uses: docker/build-push-action@v7
|
|
||||||
env:
|
|
||||||
DOCKER_IMAGE: 'storybook'
|
|
||||||
BUNDLE_PATH: './bundle-storybook'
|
|
||||||
with:
|
|
||||||
context: ./docker/images/
|
|
||||||
file: ./docker/images/Dockerfile.storybook
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
push: true
|
|
||||||
tags: ${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:${{ steps.vars.outputs.gh_ref }}
|
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
|
||||||
cache-from: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache
|
|
||||||
cache-to: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache,mode=max
|
|
||||||
|
|
||||||
- name: Build and push MCP Docker image
|
|
||||||
uses: docker/build-push-action@v7
|
|
||||||
env:
|
|
||||||
DOCKER_IMAGE: 'mcp'
|
|
||||||
BUNDLE_PATH: './bundle-mcp'
|
|
||||||
with:
|
|
||||||
context: ./docker/images/
|
|
||||||
file: ./docker/images/Dockerfile.mcp
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
push: true
|
|
||||||
tags: ${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:${{ steps.vars.outputs.gh_ref }}
|
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
|
||||||
cache-from: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache
|
|
||||||
cache-to: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache,mode=max
|
|
||||||
|
|
||||||
- name: Notify Mattermost
|
|
||||||
if: failure()
|
|
||||||
uses: mattermost/action-mattermost-notify@master
|
|
||||||
with:
|
|
||||||
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
|
|
||||||
MATTERMOST_CHANNEL: bot-alerts-cicd
|
|
||||||
TEXT: |
|
|
||||||
❌ 🐳 *[PENPOT] Error building penpot docker images.*
|
|
||||||
📄 Triggered from ref: `${{ steps.vars.outputs.gh_ref }}`
|
|
||||||
📦 Bundle: `${{ steps.bundles.outputs.bundle_version }}`
|
|
||||||
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
|
||||||
@infra
|
|
||||||
22
.github/workflows/build-main-staging.yml
vendored
22
.github/workflows/build-main-staging.yml
vendored
@ -1,22 +0,0 @@
|
|||||||
name: _MAIN-STAGING
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
schedule:
|
|
||||||
- cron: '26 5-20 * * 1-5'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-bundle:
|
|
||||||
uses: ./.github/workflows/build-bundle.yml
|
|
||||||
secrets: inherit
|
|
||||||
with:
|
|
||||||
gh_ref: "main-staging"
|
|
||||||
build_wasm: "yes"
|
|
||||||
build_storybook: "yes"
|
|
||||||
|
|
||||||
build-docker:
|
|
||||||
needs: build-bundle
|
|
||||||
uses: ./.github/workflows/build-docker.yml
|
|
||||||
secrets: inherit
|
|
||||||
with:
|
|
||||||
gh_ref: "main-staging"
|
|
||||||
22
.github/workflows/build-staging.yml
vendored
22
.github/workflows/build-staging.yml
vendored
@ -1,22 +0,0 @@
|
|||||||
name: _STAGING
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
schedule:
|
|
||||||
- cron: '36 5-20 * * 1-5'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-bundle:
|
|
||||||
uses: ./.github/workflows/build-bundle.yml
|
|
||||||
secrets: inherit
|
|
||||||
with:
|
|
||||||
gh_ref: "staging"
|
|
||||||
build_wasm: "yes"
|
|
||||||
build_storybook: "yes"
|
|
||||||
|
|
||||||
build-docker:
|
|
||||||
needs: build-bundle
|
|
||||||
uses: ./.github/workflows/build-docker.yml
|
|
||||||
secrets: inherit
|
|
||||||
with:
|
|
||||||
gh_ref: "staging"
|
|
||||||
47
.github/workflows/build-tag.yml
vendored
47
.github/workflows/build-tag.yml
vendored
@ -1,47 +0,0 @@
|
|||||||
name: _TAG
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
push:
|
|
||||||
tags:
|
|
||||||
- '*'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-bundle:
|
|
||||||
uses: ./.github/workflows/build-bundle.yml
|
|
||||||
secrets: inherit
|
|
||||||
with:
|
|
||||||
gh_ref: ${{ github.ref_name }}
|
|
||||||
build_wasm: "yes"
|
|
||||||
build_storybook: "yes"
|
|
||||||
|
|
||||||
build-docker:
|
|
||||||
needs: build-bundle
|
|
||||||
uses: ./.github/workflows/build-docker.yml
|
|
||||||
secrets: inherit
|
|
||||||
with:
|
|
||||||
gh_ref: ${{ github.ref_name }}
|
|
||||||
|
|
||||||
notify:
|
|
||||||
name: Notifications
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
needs: build-docker
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Notify Mattermost
|
|
||||||
uses: mattermost/action-mattermost-notify@master
|
|
||||||
with:
|
|
||||||
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
|
|
||||||
MATTERMOST_CHANNEL: bot-alerts-cicd
|
|
||||||
TEXT: |
|
|
||||||
🐳 *[PENPOT] Docker image available: ${{ github.ref_name }}*
|
|
||||||
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
|
||||||
@infra
|
|
||||||
|
|
||||||
publish-final-tag:
|
|
||||||
if: ${{ !contains(github.ref_name, '-RC') && !contains(github.ref_name, '-alpha') && !contains(github.ref_name, '-beta') && contains(github.ref_name, '.') }}
|
|
||||||
needs: build-docker
|
|
||||||
uses: ./.github/workflows/release.yml
|
|
||||||
secrets: inherit
|
|
||||||
with:
|
|
||||||
gh_ref: ${{ github.ref_name }}
|
|
||||||
5
.github/workflows/commit-checker.yml
vendored
5
.github/workflows/commit-checker.yml
vendored
@ -6,14 +6,12 @@ on:
|
|||||||
- edited
|
- edited
|
||||||
- reopened
|
- reopened
|
||||||
- synchronize
|
- synchronize
|
||||||
- ready_for_review
|
|
||||||
pull_request_target:
|
pull_request_target:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
- edited
|
- edited
|
||||||
- reopened
|
- reopened
|
||||||
- synchronize
|
- synchronize
|
||||||
- ready_for_review
|
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
@ -22,14 +20,13 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check-commit-message:
|
check-commit-message:
|
||||||
if: ${{ !github.event.pull_request.draft }}
|
|
||||||
name: Check Commit Message
|
name: Check Commit Message
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Check Commit Type
|
- name: Check Commit Type
|
||||||
uses: gsactions/commit-message-checker@v2
|
uses: gsactions/commit-message-checker@v2
|
||||||
with:
|
with:
|
||||||
pattern: '^(((:(lipstick|globe_with_meridians|wrench|books|arrow_up|arrow_down|zap|ambulance|construction|boom|fire|whale|bug|sparkles|paperclip|tada|recycle|rewind|construction_worker):)\s[A-Z].*[^.])|(Merge|Revert|Reapply).+[^.])$'
|
pattern: '^:(lipstick|globe_with_meridians|wrench|books|arrow_up|arrow_down|zap|ambulance|construction|boom|fire|whale|bug|sparkles|paperclip|tada|recycle):\s[A-Z].*[^.]$'
|
||||||
flags: 'gm'
|
flags: 'gm'
|
||||||
error: 'Commit should match CONTRIBUTING.md guideline'
|
error: 'Commit should match CONTRIBUTING.md guideline'
|
||||||
checkAllCommitMessages: 'true' # optional: this checks all commits associated with a pull request
|
checkAllCommitMessages: 'true' # optional: this checks all commits associated with a pull request
|
||||||
|
|||||||
142
.github/workflows/plugins-deploy-api-doc.yml
vendored
142
.github/workflows/plugins-deploy-api-doc.yml
vendored
@ -1,142 +0,0 @@
|
|||||||
name: Plugins/api-doc deployer
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- develop
|
|
||||||
- staging
|
|
||||||
- main
|
|
||||||
paths:
|
|
||||||
- 'plugins/libs/plugin-types/index.d.ts'
|
|
||||||
- 'plugins/libs/plugin-types/REAME.md'
|
|
||||||
- 'plugins/tools/typedoc.css'
|
|
||||||
- 'plugins/CHANGELOG.md'
|
|
||||||
- 'plugins/wrangler-penpot-plugins-api-doc.toml'
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
gh_ref:
|
|
||||||
description: 'Name of the branch'
|
|
||||||
type: choice
|
|
||||||
required: true
|
|
||||||
default: 'develop'
|
|
||||||
options:
|
|
||||||
- develop
|
|
||||||
- staging
|
|
||||||
- main
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
deploy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Extract some useful variables
|
|
||||||
id: vars
|
|
||||||
run: |
|
|
||||||
echo "gh_ref=${{ inputs.gh_ref || github.ref_name }}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v6
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
ref: ${{ steps.vars.outputs.gh_ref }}
|
|
||||||
|
|
||||||
# START: Setup Node and PNPM enabling cache
|
|
||||||
- name: Setup Node.js
|
|
||||||
uses: actions/setup-node@v6
|
|
||||||
with:
|
|
||||||
node-version-file: .nvmrc
|
|
||||||
|
|
||||||
- name: Enable PNPM
|
|
||||||
working-directory: ./plugins
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
corepack enable;
|
|
||||||
corepack install;
|
|
||||||
|
|
||||||
- name: Get pnpm store path
|
|
||||||
id: pnpm-store
|
|
||||||
working-directory: ./plugins
|
|
||||||
shell: bash
|
|
||||||
run: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Cache pnpm store
|
|
||||||
uses: actions/cache@v5
|
|
||||||
with:
|
|
||||||
path: ${{ steps.pnpm-store.outputs.STORE_PATH }}
|
|
||||||
key: ${{ runner.os }}-pnpm-${{ hashFiles('plugins/pnpm-lock.yaml') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-pnpm-
|
|
||||||
# END: Setup Node and PNPM enabling cache
|
|
||||||
|
|
||||||
- name: Install deps
|
|
||||||
working-directory: ./plugins
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
pnpm install --no-frozen-lockfile;
|
|
||||||
pnpm add -D -w wrangler@latest;
|
|
||||||
|
|
||||||
- name: Build docs
|
|
||||||
working-directory: plugins
|
|
||||||
shell: bash
|
|
||||||
run: pnpm run build:doc
|
|
||||||
|
|
||||||
- name: Select Worker name
|
|
||||||
run: |
|
|
||||||
REF="${{ steps.vars.outputs.gh_ref }}"
|
|
||||||
case "$REF" in
|
|
||||||
main)
|
|
||||||
echo "WORKER_NAME=penpot-plugins-api-doc-pro" >> $GITHUB_ENV
|
|
||||||
echo "WORKER_URI=doc.plugins.penpot.app" >> $GITHUB_ENV ;;
|
|
||||||
staging)
|
|
||||||
echo "WORKER_NAME=penpot-plugins-api-doc-pre" >> $GITHUB_ENV
|
|
||||||
echo "WORKER_URI=doc.plugins.penpot.dev" >> $GITHUB_ENV ;;
|
|
||||||
develop)
|
|
||||||
echo "WORKER_NAME=penpot-plugins-api-doc-hourly" >> $GITHUB_ENV
|
|
||||||
echo "WORKER_URI=doc.plugins.hourly.penpot.dev" >> $GITHUB_ENV ;;
|
|
||||||
*) echo "Unsupported branch ${REF}" && exit 1 ;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
- name: Set the custom url
|
|
||||||
working-directory: plugins
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
sed -i "s/WORKER_URI/${{ env.WORKER_URI }}/g" wrangler-penpot-plugins-api-doc.toml
|
|
||||||
|
|
||||||
- name: Add noindex header and robots.txt files for non-production environments
|
|
||||||
if: ${{ steps.vars.outputs.gh_ref != 'main' }}
|
|
||||||
working-directory: plugins
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
ASSETS_DIR="dist/doc"
|
|
||||||
|
|
||||||
cat > "${ASSETS_DIR}/_headers" << 'EOF'
|
|
||||||
/*
|
|
||||||
X-Robots-Tag: noindex, nofollow
|
|
||||||
EOF
|
|
||||||
|
|
||||||
cat > "${ASSETS_DIR}/robots.txt" << 'EOF'
|
|
||||||
User-agent: *
|
|
||||||
Disallow: /
|
|
||||||
EOF
|
|
||||||
|
|
||||||
- name: Deploy to Cloudflare Workers
|
|
||||||
uses: cloudflare/wrangler-action@v3
|
|
||||||
with:
|
|
||||||
workingDirectory: plugins
|
|
||||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
|
||||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
|
||||||
command: deploy --config wrangler-penpot-plugins-api-doc.toml --name ${{ env.WORKER_NAME }}
|
|
||||||
|
|
||||||
- name: Notify Mattermost
|
|
||||||
if: failure()
|
|
||||||
uses: mattermost/action-mattermost-notify@master
|
|
||||||
with:
|
|
||||||
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
|
|
||||||
MATTERMOST_CHANNEL: bot-alerts-cicd
|
|
||||||
TEXT: |
|
|
||||||
❌ 🧩📚 *[PENPOT PLUGINS] Error deploying API documentation.*
|
|
||||||
📄 Triggered from ref: `${{ inputs.gh_ref }}`
|
|
||||||
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
|
||||||
@infra
|
|
||||||
127
.github/workflows/plugins-deploy-package.yml
vendored
127
.github/workflows/plugins-deploy-package.yml
vendored
@ -1,127 +0,0 @@
|
|||||||
name: Plugins/package deployer
|
|
||||||
|
|
||||||
on:
|
|
||||||
# Deploy package from manual action
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
gh_ref:
|
|
||||||
description: 'Name of the branch'
|
|
||||||
type: choice
|
|
||||||
required: true
|
|
||||||
default: 'develop'
|
|
||||||
options:
|
|
||||||
- develop
|
|
||||||
- staging
|
|
||||||
- main
|
|
||||||
plugin_name:
|
|
||||||
description: 'Pluging name (like plugins/apps/<plugin_name>-plugin)'
|
|
||||||
type: string
|
|
||||||
required: true
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
gh_ref:
|
|
||||||
description: 'Name of the branch'
|
|
||||||
type: string
|
|
||||||
required: true
|
|
||||||
default: 'develop'
|
|
||||||
plugin_name:
|
|
||||||
description: 'Publig name (from plugins/apps/<plugin_name>-plugin)'
|
|
||||||
type: string
|
|
||||||
required: true
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
deploy:
|
|
||||||
runs-on: penpot-runner-01
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v6
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
ref: ${{ inputs.gh_ref }}
|
|
||||||
|
|
||||||
# START: Setup Node and PNPM enabling cache
|
|
||||||
- name: Setup Node.js
|
|
||||||
uses: actions/setup-node@v6
|
|
||||||
with:
|
|
||||||
node-version-file: .nvmrc
|
|
||||||
|
|
||||||
- name: Enable PNPM
|
|
||||||
working-directory: ./plugins
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
corepack enable;
|
|
||||||
corepack install;
|
|
||||||
|
|
||||||
- name: Get pnpm store path
|
|
||||||
id: pnpm-store
|
|
||||||
working-directory: ./plugins
|
|
||||||
shell: bash
|
|
||||||
run: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Cache pnpm store
|
|
||||||
uses: actions/cache@v5
|
|
||||||
with:
|
|
||||||
path: ${{ steps.pnpm-store.outputs.STORE_PATH }}
|
|
||||||
key: ${{ runner.os }}-pnpm-${{ hashFiles('plugins/pnpm-lock.yaml') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-pnpm-
|
|
||||||
# END: Setup Node and PNPM enabling cache
|
|
||||||
|
|
||||||
- name: Install deps
|
|
||||||
working-directory: ./plugins
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
pnpm install --no-frozen-lockfile;
|
|
||||||
pnpm add -D -w wrangler@latest;
|
|
||||||
|
|
||||||
- name: "Build package for ${{ inputs.plugin_name }}-plugin"
|
|
||||||
working-directory: plugins
|
|
||||||
shell: bash
|
|
||||||
run: pnpm --filter ${{ inputs.plugin_name }}-plugin build
|
|
||||||
|
|
||||||
- name: Select Worker name
|
|
||||||
run: |
|
|
||||||
REF="${{ inputs.gh_ref }}"
|
|
||||||
case "$REF" in
|
|
||||||
main)
|
|
||||||
echo "WORKER_NAME=${{ inputs.plugin_name }}-plugin-pro" >> $GITHUB_ENV
|
|
||||||
echo "WORKER_URI=${{ inputs.plugin_name }}.plugins.penpot.app" >> $GITHUB_ENV ;;
|
|
||||||
staging)
|
|
||||||
echo "WORKER_NAME=${{ inputs.plugin_name }}-plugin-pre" >> $GITHUB_ENV
|
|
||||||
echo "WORKER_URI=${{ inputs.plugin_name }}.plugins.penpot.dev" >> $GITHUB_ENV ;;
|
|
||||||
develop)
|
|
||||||
echo "WORKER_NAME=${{ inputs.plugin_name }}-plugin-hourly" >> $GITHUB_ENV
|
|
||||||
echo "WORKER_URI=${{ inputs.plugin_name }}.plugins.hourly.penpot.dev" >> $GITHUB_ENV ;;
|
|
||||||
*) echo "Unsupported branch ${REF}" && exit 1 ;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
- name: Set the custom url
|
|
||||||
working-directory: plugins
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
sed -i "s/WORKER_URI/${{ env.WORKER_URI }}/g" apps/${{ inputs.plugin_name }}-plugin/wrangler.toml
|
|
||||||
|
|
||||||
- name: Deploy to Cloudflare Workers
|
|
||||||
uses: cloudflare/wrangler-action@v3
|
|
||||||
with:
|
|
||||||
workingDirectory: plugins
|
|
||||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
|
||||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
|
||||||
command: deploy --config apps/${{ inputs.plugin_name }}-plugin/wrangler.toml --name ${{ env.WORKER_NAME }}
|
|
||||||
|
|
||||||
- name: Notify Mattermost
|
|
||||||
if: failure()
|
|
||||||
uses: mattermost/action-mattermost-notify@master
|
|
||||||
with:
|
|
||||||
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
|
|
||||||
MATTERMOST_CHANNEL: bot-alerts-cicd
|
|
||||||
TEXT: |
|
|
||||||
❌ 🧩📦 *[PENPOT PLUGINS] Error deploying ${{ env.WORKER_NAME }}.*
|
|
||||||
📄 Triggered from ref: `${{ inputs.gh_ref }}`
|
|
||||||
Plugin name: `${{ inputs.plugin_name }}-plugin`
|
|
||||||
Cloudflare worker name: `${{ env.WORKER_NAME }}`
|
|
||||||
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
|
||||||
@infra
|
|
||||||
143
.github/workflows/plugins-deploy-packages.yml
vendored
143
.github/workflows/plugins-deploy-packages.yml
vendored
@ -1,143 +0,0 @@
|
|||||||
name: Plugins/packages deployer
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- develop
|
|
||||||
- staging
|
|
||||||
- main
|
|
||||||
paths:
|
|
||||||
- 'plugins/apps/*-plugin/**'
|
|
||||||
- 'libs/plugins-styles/**'
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
gh_ref:
|
|
||||||
description: 'Name of the branch'
|
|
||||||
type: choice
|
|
||||||
required: true
|
|
||||||
default: 'develop'
|
|
||||||
options:
|
|
||||||
- develop
|
|
||||||
- staging
|
|
||||||
- main
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
detect-changes:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
colors_to_tokens: ${{ steps.filter.outputs.colors_to_tokens }}
|
|
||||||
create_palette: ${{ steps.filter.outputs.create_palette }}
|
|
||||||
lorem_ipsum: ${{ steps.filter.outputs.lorem_ipsum }}
|
|
||||||
rename_layers: ${{ steps.filter.outputs.rename_layers }}
|
|
||||||
contrast: ${{ steps.filter.outputs.contrast }}
|
|
||||||
icons: ${{ steps.filter.outputs.icons }}
|
|
||||||
poc_state: ${{ steps.filter.outputs.poc_state }}
|
|
||||||
table: ${{ steps.filter.outputs.table }}
|
|
||||||
# [For new plugins]
|
|
||||||
# Add more outputs here
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v6
|
|
||||||
- id: filter
|
|
||||||
uses: dorny/paths-filter@v4
|
|
||||||
with:
|
|
||||||
filters: |
|
|
||||||
colors_to_tokens:
|
|
||||||
- 'plugins/apps/colors-to-tokens-plugin/**'
|
|
||||||
- 'libs/plugins-styles/**'
|
|
||||||
contrast:
|
|
||||||
- 'plugins/apps/contrast-plugin/**'
|
|
||||||
- 'libs/plugins-styles/**'
|
|
||||||
create_palette:
|
|
||||||
- 'plugins/apps/create-palette-plugin/**'
|
|
||||||
- 'libs/plugins-styles/**'
|
|
||||||
icons:
|
|
||||||
- 'plugins/apps/icons-plugin/**'
|
|
||||||
- 'libs/plugins-styles/**'
|
|
||||||
lorem_ipsum:
|
|
||||||
- 'plugins/apps/lorem-ipsum-plugin/**'
|
|
||||||
- 'libs/plugins-styles/**'
|
|
||||||
rename_layers:
|
|
||||||
- 'plugins/apps/rename-layers-plugin/**'
|
|
||||||
- 'libs/plugins-styles/**'
|
|
||||||
table:
|
|
||||||
- 'plugins/apps/table-plugin/**'
|
|
||||||
- 'libs/plugins-styles/**'
|
|
||||||
# [For new plugins]
|
|
||||||
# Add more plugin filters here
|
|
||||||
# another_plugin:
|
|
||||||
# - 'plugins/apps/another-plugin/**'
|
|
||||||
# - 'libs/plugins-styles/**'
|
|
||||||
|
|
||||||
colors-to-tokens-plugin:
|
|
||||||
needs: detect-changes
|
|
||||||
if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.colors_to_tokens == 'true'
|
|
||||||
uses: ./.github/workflows/plugins-deploy-package.yml
|
|
||||||
secrets: inherit
|
|
||||||
with:
|
|
||||||
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
|
|
||||||
plugin_name: colors-to-tokens
|
|
||||||
|
|
||||||
contrast-plugin:
|
|
||||||
needs: detect-changes
|
|
||||||
if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.contrast == 'true'
|
|
||||||
uses: ./.github/workflows/plugins-deploy-package.yml
|
|
||||||
secrets: inherit
|
|
||||||
with:
|
|
||||||
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
|
|
||||||
plugin_name: contrast
|
|
||||||
|
|
||||||
create-palette-plugin:
|
|
||||||
needs: detect-changes
|
|
||||||
if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.create_palette == 'true'
|
|
||||||
uses: ./.github/workflows/plugins-deploy-package.yml
|
|
||||||
secrets: inherit
|
|
||||||
with:
|
|
||||||
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
|
|
||||||
plugin_name: create-palette
|
|
||||||
|
|
||||||
icons-plugin:
|
|
||||||
needs: detect-changes
|
|
||||||
if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.icons == 'true'
|
|
||||||
uses: ./.github/workflows/plugins-deploy-package.yml
|
|
||||||
secrets: inherit
|
|
||||||
with:
|
|
||||||
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
|
|
||||||
plugin_name: icons
|
|
||||||
|
|
||||||
lorem-ipsum-plugin:
|
|
||||||
needs: detect-changes
|
|
||||||
if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.lorem_ipsum == 'true'
|
|
||||||
uses: ./.github/workflows/plugins-deploy-package.yml
|
|
||||||
secrets: inherit
|
|
||||||
with:
|
|
||||||
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
|
|
||||||
plugin_name: lorem-ipsum
|
|
||||||
|
|
||||||
rename-layers-plugin:
|
|
||||||
needs: detect-changes
|
|
||||||
if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.rename_layers == 'true'
|
|
||||||
uses: ./.github/workflows/plugins-deploy-package.yml
|
|
||||||
secrets: inherit
|
|
||||||
with:
|
|
||||||
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
|
|
||||||
plugin_name: rename-layers
|
|
||||||
|
|
||||||
table-plugin:
|
|
||||||
needs: detect-changes
|
|
||||||
if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.table == 'true'
|
|
||||||
uses: ./.github/workflows/plugins-deploy-package.yml
|
|
||||||
secrets: inherit
|
|
||||||
with:
|
|
||||||
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
|
|
||||||
plugin_name: table
|
|
||||||
|
|
||||||
# [For new plugins]
|
|
||||||
# Add more jobs for other plugins below, following the same pattern
|
|
||||||
# another-plugin:
|
|
||||||
# needs: detect-changes
|
|
||||||
# if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.another_plugin == 'true'
|
|
||||||
# uses: ./.github/workflows/plugins-deploy-package.yml
|
|
||||||
# secrets: inherit
|
|
||||||
# with:
|
|
||||||
# gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
|
|
||||||
# plugin_name: another
|
|
||||||
140
.github/workflows/plugins-deploy-styles-doc.yml
vendored
140
.github/workflows/plugins-deploy-styles-doc.yml
vendored
@ -1,140 +0,0 @@
|
|||||||
name: Plugins/styles-doc deployer
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- develop
|
|
||||||
- staging
|
|
||||||
- main
|
|
||||||
paths:
|
|
||||||
- 'plugins/apps/example-styles/**'
|
|
||||||
- 'plugins/libs/plugins-styles/**'
|
|
||||||
- 'plugins/wrangler-penpot-plugins-styles-doc.toml'
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
gh_ref:
|
|
||||||
description: 'Name of the branch'
|
|
||||||
type: choice
|
|
||||||
required: true
|
|
||||||
default: 'develop'
|
|
||||||
options:
|
|
||||||
- develop
|
|
||||||
- staging
|
|
||||||
- main
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
deploy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Extract some useful variables
|
|
||||||
id: vars
|
|
||||||
run: |
|
|
||||||
echo "gh_ref=${{ inputs.gh_ref || github.ref_name }}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v6
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
ref: ${{ steps.vars.outputs.gh_ref }}
|
|
||||||
|
|
||||||
# START: Setup Node and PNPM enabling cache
|
|
||||||
- name: Setup Node.js
|
|
||||||
uses: actions/setup-node@v6
|
|
||||||
with:
|
|
||||||
node-version-file: .nvmrc
|
|
||||||
|
|
||||||
- name: Enable PNPM
|
|
||||||
working-directory: ./plugins
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
corepack enable;
|
|
||||||
corepack install;
|
|
||||||
|
|
||||||
- name: Get pnpm store path
|
|
||||||
id: pnpm-store
|
|
||||||
working-directory: ./plugins
|
|
||||||
shell: bash
|
|
||||||
run: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Cache pnpm store
|
|
||||||
uses: actions/cache@v5
|
|
||||||
with:
|
|
||||||
path: ${{ steps.pnpm-store.outputs.STORE_PATH }}
|
|
||||||
key: ${{ runner.os }}-pnpm-${{ hashFiles('plugins/pnpm-lock.yaml') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-pnpm-
|
|
||||||
# END: Setup Node and PNPM enabling cache
|
|
||||||
|
|
||||||
- name: Install deps
|
|
||||||
working-directory: ./plugins
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
pnpm install --no-frozen-lockfile;
|
|
||||||
pnpm add -D -w wrangler@latest;
|
|
||||||
|
|
||||||
- name: Build styles
|
|
||||||
working-directory: plugins
|
|
||||||
shell: bash
|
|
||||||
run: pnpm run build:styles-example
|
|
||||||
|
|
||||||
- name: Select Worker name
|
|
||||||
run: |
|
|
||||||
REF="${{ steps.vars.outputs.gh_ref }}"
|
|
||||||
case "$REF" in
|
|
||||||
main)
|
|
||||||
echo "WORKER_NAME=penpot-plugins-styles-doc-pro" >> $GITHUB_ENV
|
|
||||||
echo "WORKER_URI=styles-doc.plugins.penpot.app" >> $GITHUB_ENV ;;
|
|
||||||
staging)
|
|
||||||
echo "WORKER_NAME=penpot-plugins-styles-doc-pre" >> $GITHUB_ENV
|
|
||||||
echo "WORKER_URI=styles-doc.plugins.penpot.dev" >> $GITHUB_ENV ;;
|
|
||||||
develop)
|
|
||||||
echo "WORKER_NAME=penpot-plugins-styles-doc-hourly" >> $GITHUB_ENV
|
|
||||||
echo "WORKER_URI=styles-doc.plugins.hourly.penpot.dev" >> $GITHUB_ENV ;;
|
|
||||||
*) echo "Unsupported branch ${REF}" && exit 1 ;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
- name: Set the custom url
|
|
||||||
working-directory: plugins
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
sed -i "s/WORKER_URI/${{ env.WORKER_URI }}/g" wrangler-penpot-plugins-styles-doc.toml
|
|
||||||
|
|
||||||
- name: Add noindex header and robots.txt files for non-production environments
|
|
||||||
if: ${{ steps.vars.outputs.gh_ref != 'main' }}
|
|
||||||
working-directory: plugins
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
ASSETS_DIR="dist/apps/example-styles"
|
|
||||||
|
|
||||||
cat > "${ASSETS_DIR}/_headers" << 'EOF'
|
|
||||||
/*
|
|
||||||
X-Robots-Tag: noindex, nofollow
|
|
||||||
EOF
|
|
||||||
|
|
||||||
cat > "${ASSETS_DIR}/robots.txt" << 'EOF'
|
|
||||||
User-agent: *
|
|
||||||
Disallow: /
|
|
||||||
EOF
|
|
||||||
|
|
||||||
- name: Deploy to Cloudflare Workers
|
|
||||||
uses: cloudflare/wrangler-action@v3
|
|
||||||
with:
|
|
||||||
workingDirectory: plugins
|
|
||||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
|
||||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
|
||||||
command: deploy --config wrangler-penpot-plugins-styles-doc.toml --name ${{ env.WORKER_NAME }}
|
|
||||||
|
|
||||||
- name: Notify Mattermost
|
|
||||||
if: failure()
|
|
||||||
uses: mattermost/action-mattermost-notify@master
|
|
||||||
with:
|
|
||||||
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
|
|
||||||
MATTERMOST_CHANNEL: bot-alerts-cicd
|
|
||||||
TEXT: |
|
|
||||||
❌ 🧩💅 *[PENPOT PLUGINS] Error deploying Styles documentation.*
|
|
||||||
📄 Triggered from ref: `${{ inputs.gh_ref }}`
|
|
||||||
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
|
||||||
@infra
|
|
||||||
115
.github/workflows/release.yml
vendored
115
.github/workflows/release.yml
vendored
@ -1,115 +0,0 @@
|
|||||||
name: Release Publisher
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
gh_ref:
|
|
||||||
description: 'Tag to release'
|
|
||||||
type: string
|
|
||||||
required: true
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
gh_ref:
|
|
||||||
description: 'Tag to release'
|
|
||||||
type: string
|
|
||||||
required: true
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
release:
|
|
||||||
environment: release-admins
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
outputs:
|
|
||||||
version: ${{ steps.vars.outputs.gh_ref }}
|
|
||||||
release_notes: ${{ steps.extract_release_notes.outputs.release_notes }}
|
|
||||||
steps:
|
|
||||||
- name: Extract some useful variables
|
|
||||||
id: vars
|
|
||||||
run: |
|
|
||||||
echo "gh_ref=${{ inputs.gh_ref || github.ref_name }}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v6
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
ref: ${{ steps.vars.outputs.gh_ref }}
|
|
||||||
|
|
||||||
# --- Publicly release the docker images ---
|
|
||||||
- name: Configure ECR credentials
|
|
||||||
uses: aws-actions/configure-aws-credentials@v4
|
|
||||||
with:
|
|
||||||
aws-access-key-id: ${{ secrets.DOCKER_USERNAME }}
|
|
||||||
aws-secret-access-key: ${{ secrets.DOCKER_PASSWORD }}
|
|
||||||
aws-region: ${{ secrets.AWS_REGION }}
|
|
||||||
|
|
||||||
- name: Install Skopeo
|
|
||||||
run: |
|
|
||||||
sudo apt-get update -y
|
|
||||||
sudo apt-get install -y skopeo
|
|
||||||
|
|
||||||
- name: Copy images from AWS ECR to Docker Hub
|
|
||||||
env:
|
|
||||||
AWS_REGION: ${{ secrets.AWS_REGION }}
|
|
||||||
DOCKER_REGISTRY: ${{ secrets.DOCKER_REGISTRY }}
|
|
||||||
PUB_DOCKER_USERNAME: ${{ secrets.PUB_DOCKER_USERNAME }}
|
|
||||||
PUB_DOCKER_PASSWORD: ${{ secrets.PUB_DOCKER_PASSWORD }}
|
|
||||||
TAG: ${{ steps.vars.outputs.gh_ref }}
|
|
||||||
run: |
|
|
||||||
aws ecr get-login-password --region $AWS_REGION | \
|
|
||||||
skopeo login --username AWS --password-stdin \
|
|
||||||
$DOCKER_REGISTRY
|
|
||||||
|
|
||||||
echo "$PUB_DOCKER_PASSWORD" | skopeo login --username "$PUB_DOCKER_USERNAME" --password-stdin docker.io
|
|
||||||
|
|
||||||
IMAGES=("frontend" "backend" "exporter" "storybook")
|
|
||||||
SHORT_TAG=${TAG%.*}
|
|
||||||
|
|
||||||
for image in "${IMAGES[@]}"; do
|
|
||||||
skopeo copy --all \
|
|
||||||
docker://$DOCKER_REGISTRY/$image:$TAG \
|
|
||||||
docker://docker.io/penpotapp/$image:$TAG
|
|
||||||
|
|
||||||
for alias in main latest "$SHORT_TAG"; do
|
|
||||||
skopeo copy --all \
|
|
||||||
docker://$DOCKER_REGISTRY/$image:$TAG \
|
|
||||||
docker://docker.io/penpotapp/$image:$alias
|
|
||||||
done
|
|
||||||
done
|
|
||||||
|
|
||||||
# --- Release notes extraction ---
|
|
||||||
- name: Extract release notes from CHANGES.md
|
|
||||||
id: extract_release_notes
|
|
||||||
env:
|
|
||||||
TAG: ${{ steps.vars.outputs.gh_ref }}
|
|
||||||
run: |
|
|
||||||
RELEASE_NOTES=$(awk "/^## $TAG$/{flag=1; next} /^## /{flag=0} flag" CHANGES.md | awk '{$1=$1};1')
|
|
||||||
if [ -z "$RELEASE_NOTES" ]; then
|
|
||||||
RELEASE_NOTES="No changes for $TAG according to CHANGES.md"
|
|
||||||
fi
|
|
||||||
echo "release_notes<<EOF" >> $GITHUB_OUTPUT
|
|
||||||
echo "$RELEASE_NOTES" >> $GITHUB_OUTPUT
|
|
||||||
echo "EOF" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
# --- Create GitHub release ---
|
|
||||||
- name: Create GitHub release
|
|
||||||
uses: softprops/action-gh-release@v2
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
tag_name: ${{ steps.vars.outputs.gh_ref }}
|
|
||||||
name: ${{ steps.vars.outputs.gh_ref }}
|
|
||||||
body: ${{ steps.extract_release_notes.outputs.release_notes }}
|
|
||||||
|
|
||||||
- name: Notify Mattermost
|
|
||||||
if: failure()
|
|
||||||
uses: mattermost/action-mattermost-notify@master
|
|
||||||
with:
|
|
||||||
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
|
|
||||||
MATTERMOST_CHANNEL: bot-alerts-cicd
|
|
||||||
TEXT: |
|
|
||||||
❌ 🚀 *[PENPOT] Error releasing penpot.*
|
|
||||||
📄 Triggered from ref: `${{ steps.vars.outputs.gh_ref }}`
|
|
||||||
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
|
||||||
@infra
|
|
||||||
47
.github/workflows/tests-mcp.yml
vendored
47
.github/workflows/tests-mcp.yml
vendored
@ -1,47 +0,0 @@
|
|||||||
name: "MCP CI"
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
branches:
|
|
||||||
- develop
|
|
||||||
- staging
|
|
||||||
- main
|
|
||||||
|
|
||||||
types:
|
|
||||||
- opened
|
|
||||||
- synchronize
|
|
||||||
- ready_for_review
|
|
||||||
|
|
||||||
paths:
|
|
||||||
- 'mcp/**'
|
|
||||||
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- develop
|
|
||||||
- staging
|
|
||||||
- main
|
|
||||||
|
|
||||||
paths:
|
|
||||||
- 'mcp/**'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test-mcp:
|
|
||||||
if: ${{ !github.event.pull_request.draft }}
|
|
||||||
name: "Test MCP"
|
|
||||||
runs-on: penpot-runner-02
|
|
||||||
container: penpotapp/devenv:latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v6
|
|
||||||
|
|
||||||
- name: Setup
|
|
||||||
working-directory: ./mcp
|
|
||||||
run: ./scripts/setup
|
|
||||||
|
|
||||||
- name: Check
|
|
||||||
working-directory: ./mcp
|
|
||||||
run: |
|
|
||||||
pnpm run fmt:check;
|
|
||||||
pnpm -r run build;
|
|
||||||
pnpm -r run types:check;
|
|
||||||
363
.github/workflows/tests.yml
vendored
363
.github/workflows/tests.yml
vendored
@ -1,363 +0,0 @@
|
|||||||
name: "CI"
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
types:
|
|
||||||
- opened
|
|
||||||
- synchronize
|
|
||||||
- ready_for_review
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- develop
|
|
||||||
- staging
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.event.pull_request.number || github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
lint:
|
|
||||||
if: ${{ !github.event.pull_request.draft }}
|
|
||||||
name: "Linter"
|
|
||||||
runs-on: penpot-runner-02
|
|
||||||
container: penpotapp/devenv:latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v6
|
|
||||||
|
|
||||||
- name: Lint Common
|
|
||||||
working-directory: ./common
|
|
||||||
run: |
|
|
||||||
corepack enable;
|
|
||||||
corepack install;
|
|
||||||
pnpm install;
|
|
||||||
pnpm run check-fmt:clj
|
|
||||||
pnpm run check-fmt:js
|
|
||||||
pnpm run lint:clj
|
|
||||||
|
|
||||||
- name: Lint Frontend
|
|
||||||
working-directory: ./frontend
|
|
||||||
run: |
|
|
||||||
corepack enable;
|
|
||||||
corepack install;
|
|
||||||
pnpm install;
|
|
||||||
pnpm run check-fmt:js
|
|
||||||
pnpm run check-fmt:clj
|
|
||||||
pnpm run check-fmt:scss
|
|
||||||
pnpm run lint:clj
|
|
||||||
pnpm run lint:js
|
|
||||||
pnpm run lint:scss
|
|
||||||
|
|
||||||
- name: Lint Backend
|
|
||||||
working-directory: ./backend
|
|
||||||
run: |
|
|
||||||
corepack enable;
|
|
||||||
corepack install;
|
|
||||||
pnpm install;
|
|
||||||
pnpm run check-fmt
|
|
||||||
pnpm run lint
|
|
||||||
|
|
||||||
- name: Lint Exporter
|
|
||||||
working-directory: ./exporter
|
|
||||||
run: |
|
|
||||||
corepack enable;
|
|
||||||
corepack install;
|
|
||||||
pnpm install;
|
|
||||||
pnpm run check-fmt
|
|
||||||
pnpm run lint
|
|
||||||
|
|
||||||
- name: Lint Library
|
|
||||||
working-directory: ./library
|
|
||||||
run: |
|
|
||||||
corepack enable;
|
|
||||||
corepack install;
|
|
||||||
pnpm install;
|
|
||||||
pnpm run check-fmt
|
|
||||||
pnpm run lint
|
|
||||||
|
|
||||||
test-common:
|
|
||||||
if: ${{ !github.event.pull_request.draft }}
|
|
||||||
name: "Common Tests"
|
|
||||||
runs-on: penpot-runner-02
|
|
||||||
container: penpotapp/devenv:latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v6
|
|
||||||
|
|
||||||
- name: Run tests
|
|
||||||
working-directory: ./common
|
|
||||||
run: |
|
|
||||||
./scripts/test
|
|
||||||
|
|
||||||
test-plugins:
|
|
||||||
if: ${{ !github.event.pull_request.draft }}
|
|
||||||
name: Plugins Runtime Linter & Tests
|
|
||||||
runs-on: penpot-runner-02
|
|
||||||
container: penpotapp/devenv:latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v6
|
|
||||||
|
|
||||||
- name: Setup Node
|
|
||||||
id: setup-node
|
|
||||||
uses: actions/setup-node@v6
|
|
||||||
with:
|
|
||||||
node-version-file: .nvmrc
|
|
||||||
|
|
||||||
- name: Install deps
|
|
||||||
working-directory: ./plugins
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
corepack enable;
|
|
||||||
corepack install;
|
|
||||||
pnpm install;
|
|
||||||
|
|
||||||
- name: Run Lint
|
|
||||||
working-directory: ./plugins
|
|
||||||
run: pnpm run lint
|
|
||||||
|
|
||||||
- name: Run Format Check
|
|
||||||
working-directory: ./plugins
|
|
||||||
run: pnpm run format:check
|
|
||||||
|
|
||||||
- name: Run Test
|
|
||||||
working-directory: ./plugins
|
|
||||||
run: pnpm run test
|
|
||||||
|
|
||||||
- name: Build runtime
|
|
||||||
working-directory: ./plugins
|
|
||||||
run: pnpm run build:runtime
|
|
||||||
|
|
||||||
- name: Build doc
|
|
||||||
working-directory: ./plugins
|
|
||||||
run: pnpm run build:doc
|
|
||||||
|
|
||||||
- name: Build plugins
|
|
||||||
working-directory: ./plugins
|
|
||||||
run: pnpm run build:plugins
|
|
||||||
|
|
||||||
- name: Build styles
|
|
||||||
working-directory: ./plugins
|
|
||||||
run: pnpm run build:styles-example
|
|
||||||
|
|
||||||
test-frontend:
|
|
||||||
if: ${{ !github.event.pull_request.draft }}
|
|
||||||
name: "Frontend Tests"
|
|
||||||
runs-on: penpot-runner-02
|
|
||||||
container: penpotapp/devenv:latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v6
|
|
||||||
|
|
||||||
- name: Unit Tests
|
|
||||||
working-directory: ./frontend
|
|
||||||
run: |
|
|
||||||
./scripts/test
|
|
||||||
|
|
||||||
- name: Component Tests
|
|
||||||
working-directory: ./frontend
|
|
||||||
env:
|
|
||||||
VITEST_BROWSER_TIMEOUT: 120000
|
|
||||||
run: |
|
|
||||||
./scripts/test-components
|
|
||||||
|
|
||||||
test-render-wasm:
|
|
||||||
if: ${{ !github.event.pull_request.draft }}
|
|
||||||
name: "Render WASM Tests"
|
|
||||||
runs-on: penpot-runner-02
|
|
||||||
container: penpotapp/devenv:latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v6
|
|
||||||
|
|
||||||
- name: Format
|
|
||||||
working-directory: ./render-wasm
|
|
||||||
run: |
|
|
||||||
cargo fmt --check
|
|
||||||
|
|
||||||
- name: Lint
|
|
||||||
working-directory: ./render-wasm
|
|
||||||
run: |
|
|
||||||
./lint
|
|
||||||
|
|
||||||
- name: Test
|
|
||||||
working-directory: ./render-wasm
|
|
||||||
run: |
|
|
||||||
./test
|
|
||||||
|
|
||||||
test-backend:
|
|
||||||
if: ${{ !github.event.pull_request.draft }}
|
|
||||||
name: "Backend Tests"
|
|
||||||
runs-on: penpot-runner-02
|
|
||||||
container: penpotapp/devenv:latest
|
|
||||||
|
|
||||||
services:
|
|
||||||
postgres:
|
|
||||||
image: postgres:17
|
|
||||||
# Provide the password for postgres
|
|
||||||
env:
|
|
||||||
POSTGRES_USER: penpot_test
|
|
||||||
POSTGRES_PASSWORD: penpot_test
|
|
||||||
POSTGRES_DB: penpot_test
|
|
||||||
|
|
||||||
# Set health checks to wait until postgres has started
|
|
||||||
options: >-
|
|
||||||
--health-cmd pg_isready
|
|
||||||
--health-interval 10s
|
|
||||||
--health-timeout 5s
|
|
||||||
--health-retries 5
|
|
||||||
|
|
||||||
redis:
|
|
||||||
image: valkey/valkey:9
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v6
|
|
||||||
|
|
||||||
- name: Run tests
|
|
||||||
working-directory: ./backend
|
|
||||||
env:
|
|
||||||
PENPOT_TEST_DATABASE_URI: "postgresql://postgres/penpot_test"
|
|
||||||
PENPOT_TEST_DATABASE_USERNAME: penpot_test
|
|
||||||
PENPOT_TEST_DATABASE_PASSWORD: penpot_test
|
|
||||||
PENPOT_TEST_REDIS_URI: "redis://redis/1"
|
|
||||||
|
|
||||||
run: |
|
|
||||||
clojure -M:dev:test --reporter kaocha.report/documentation
|
|
||||||
|
|
||||||
test-library:
|
|
||||||
if: ${{ !github.event.pull_request.draft }}
|
|
||||||
name: "Library Tests"
|
|
||||||
runs-on: penpot-runner-02
|
|
||||||
container: penpotapp/devenv:latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v6
|
|
||||||
|
|
||||||
- name: Run tests
|
|
||||||
working-directory: ./library
|
|
||||||
run: |
|
|
||||||
./scripts/test
|
|
||||||
|
|
||||||
build-integration:
|
|
||||||
if: ${{ !github.event.pull_request.draft }}
|
|
||||||
name: "Build Integration Bundle"
|
|
||||||
runs-on: penpot-runner-02
|
|
||||||
container: penpotapp/devenv:latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v6
|
|
||||||
|
|
||||||
- name: Build Bundle
|
|
||||||
working-directory: ./frontend
|
|
||||||
run: |
|
|
||||||
./scripts/build
|
|
||||||
|
|
||||||
- name: Store Bundle Cache
|
|
||||||
uses: actions/cache@v5
|
|
||||||
with:
|
|
||||||
key: "integration-bundle-${{ github.sha }}"
|
|
||||||
path: frontend/resources/public
|
|
||||||
|
|
||||||
test-integration-1:
|
|
||||||
if: ${{ !github.event.pull_request.draft }}
|
|
||||||
name: "Integration Tests 1/3"
|
|
||||||
runs-on: penpot-runner-02
|
|
||||||
container: penpotapp/devenv:latest
|
|
||||||
needs: build-integration
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout Repository
|
|
||||||
uses: actions/checkout@v6
|
|
||||||
|
|
||||||
- name: Restore Cache
|
|
||||||
uses: actions/cache/restore@v5
|
|
||||||
with:
|
|
||||||
key: "integration-bundle-${{ github.sha }}"
|
|
||||||
path: frontend/resources/public
|
|
||||||
|
|
||||||
- name: Run Tests
|
|
||||||
working-directory: ./frontend
|
|
||||||
run: |
|
|
||||||
./scripts/test-e2e --shard="1/3";
|
|
||||||
|
|
||||||
- name: Upload test result
|
|
||||||
uses: actions/upload-artifact@v7
|
|
||||||
if: always()
|
|
||||||
with:
|
|
||||||
name: integration-tests-result-1
|
|
||||||
path: frontend/test-results/
|
|
||||||
overwrite: true
|
|
||||||
retention-days: 3
|
|
||||||
|
|
||||||
test-integration-2:
|
|
||||||
if: ${{ !github.event.pull_request.draft }}
|
|
||||||
name: "Integration Tests 2/3"
|
|
||||||
runs-on: penpot-runner-02
|
|
||||||
container: penpotapp/devenv:latest
|
|
||||||
needs: build-integration
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout Repository
|
|
||||||
uses: actions/checkout@v6
|
|
||||||
|
|
||||||
- name: Restore Cache
|
|
||||||
uses: actions/cache/restore@v5
|
|
||||||
with:
|
|
||||||
key: "integration-bundle-${{ github.sha }}"
|
|
||||||
path: frontend/resources/public
|
|
||||||
|
|
||||||
- name: Run Tests
|
|
||||||
working-directory: ./frontend
|
|
||||||
run: |
|
|
||||||
./scripts/test-e2e --shard="2/3";
|
|
||||||
|
|
||||||
- name: Upload test result
|
|
||||||
uses: actions/upload-artifact@v7
|
|
||||||
if: always()
|
|
||||||
with:
|
|
||||||
name: integration-tests-result-2
|
|
||||||
path: frontend/test-results/
|
|
||||||
overwrite: true
|
|
||||||
retention-days: 3
|
|
||||||
|
|
||||||
test-integration-3:
|
|
||||||
if: ${{ !github.event.pull_request.draft }}
|
|
||||||
name: "Integration Tests 3/3"
|
|
||||||
runs-on: penpot-runner-02
|
|
||||||
container: penpotapp/devenv:latest
|
|
||||||
needs: build-integration
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout Repository
|
|
||||||
uses: actions/checkout@v6
|
|
||||||
|
|
||||||
- name: Restore Cache
|
|
||||||
uses: actions/cache/restore@v5
|
|
||||||
with:
|
|
||||||
key: "integration-bundle-${{ github.sha }}"
|
|
||||||
path: frontend/resources/public
|
|
||||||
|
|
||||||
- name: Run Tests
|
|
||||||
working-directory: ./frontend
|
|
||||||
run: |
|
|
||||||
./scripts/test-e2e --shard="3/3";
|
|
||||||
|
|
||||||
- name: Upload test result
|
|
||||||
uses: actions/upload-artifact@v7
|
|
||||||
if: always()
|
|
||||||
with:
|
|
||||||
name: integration-tests-result-3
|
|
||||||
path: frontend/test-results/
|
|
||||||
overwrite: true
|
|
||||||
retention-days: 3
|
|
||||||
40
.gitignore
vendored
40
.gitignore
vendored
@ -1,4 +1,10 @@
|
|||||||
.pnp.*
|
.pnp.*
|
||||||
|
.yarn/*
|
||||||
|
!.yarn/patches
|
||||||
|
!.yarn/plugins
|
||||||
|
!.yarn/releases
|
||||||
|
!.yarn/sdks
|
||||||
|
!.yarn/versions
|
||||||
*-init.clj
|
*-init.clj
|
||||||
*.css.json
|
*.css.json
|
||||||
*.jar
|
*.jar
|
||||||
@ -13,6 +19,7 @@
|
|||||||
.nyc_output
|
.nyc_output
|
||||||
.rebel_readline_history
|
.rebel_readline_history
|
||||||
.repl
|
.repl
|
||||||
|
.shadow-cljs
|
||||||
/*.jpg
|
/*.jpg
|
||||||
/*.md
|
/*.md
|
||||||
/*.png
|
/*.png
|
||||||
@ -23,14 +30,7 @@
|
|||||||
/*.zip
|
/*.zip
|
||||||
/.clj-kondo/.cache
|
/.clj-kondo/.cache
|
||||||
/_dump
|
/_dump
|
||||||
/notes
|
|
||||||
/.opencode/package-lock.json
|
|
||||||
/plans
|
|
||||||
/prompts
|
|
||||||
/playground/
|
|
||||||
/backend/*.md
|
/backend/*.md
|
||||||
!/backend/AGENTS.md
|
|
||||||
/backend/.shadow-cljs
|
|
||||||
/backend/*.sql
|
/backend/*.sql
|
||||||
/backend/*.txt
|
/backend/*.txt
|
||||||
/backend/assets/
|
/backend/assets/
|
||||||
@ -40,49 +40,39 @@
|
|||||||
/backend/resources/public/assets
|
/backend/resources/public/assets
|
||||||
/backend/resources/public/media
|
/backend/resources/public/media
|
||||||
/backend/target/
|
/backend/target/
|
||||||
/backend/experiments
|
|
||||||
/backend/scripts/_env.local
|
|
||||||
/bundle*
|
/bundle*
|
||||||
|
/cd.md
|
||||||
/clj-profiler/
|
/clj-profiler/
|
||||||
/common/coverage
|
/common/coverage
|
||||||
/common/target
|
/common/target
|
||||||
/common/.shadow-cljs
|
/deploy
|
||||||
/docker/images/bundle*
|
/docker/images/bundle*
|
||||||
/exporter/target
|
/exporter/target
|
||||||
/exporter/.shadow-cljs
|
|
||||||
/frontend/.storybook/preview-body.html
|
/frontend/.storybook/preview-body.html
|
||||||
/frontend/.storybook/preview-head.html
|
/frontend/.storybook/preview-head.html
|
||||||
/frontend/playwright-report/
|
/frontend/cypress/fixtures/validuser.json
|
||||||
/frontend/playwright/ui/visual-specs/
|
/frontend/cypress/videos/*/
|
||||||
/frontend/text-editor/src/wasm/
|
/frontend/cypress/videos/*/
|
||||||
/frontend/dist/
|
/frontend/dist/
|
||||||
/frontend/npm-debug.log
|
/frontend/npm-debug.log
|
||||||
/frontend/out/
|
/frontend/out/
|
||||||
/frontend/package-lock.json
|
/frontend/package-lock.json
|
||||||
/frontend/resources/fonts/experiments
|
/frontend/resources/fonts/experiments
|
||||||
/frontend/resources/public/*
|
/frontend/resources/public/*
|
||||||
/frontend/src/app/render_wasm/api/shared.js
|
|
||||||
/frontend/storybook-static/
|
/frontend/storybook-static/
|
||||||
/frontend/target/
|
/frontend/target/
|
||||||
/frontend/test-results/
|
|
||||||
/frontend/.shadow-cljs
|
|
||||||
/other/
|
/other/
|
||||||
/scripts/
|
/scripts/
|
||||||
/nexus/
|
/telemetry/
|
||||||
/tmp/
|
/tmp/
|
||||||
/vendor/**/target
|
/vendor/**/target
|
||||||
/vendor/svgclean/bundle*.js
|
/vendor/svgclean/bundle*.js
|
||||||
/web
|
/web
|
||||||
/library/target/
|
clj-profiler/
|
||||||
/library/*.zip
|
node_modules
|
||||||
/external
|
|
||||||
/penpot-nitrate
|
|
||||||
/test-results/
|
/test-results/
|
||||||
/playwright-report/
|
/playwright-report/
|
||||||
/blob-report/
|
/blob-report/
|
||||||
/playwright/.cache/
|
/playwright/.cache/
|
||||||
/render-wasm/target/
|
/render-wasm/target/
|
||||||
/**/node_modules
|
|
||||||
/**/.yarn/*
|
/**/.yarn/*
|
||||||
/.pnpm-store
|
|
||||||
/.vscode
|
|
||||||
|
|||||||
105
.gitpod.yml
Normal file
105
.gitpod.yml
Normal file
@ -0,0 +1,105 @@
|
|||||||
|
image:
|
||||||
|
file: docker/gitpod/Dockerfile
|
||||||
|
|
||||||
|
ports:
|
||||||
|
# nginx
|
||||||
|
- port: 3449
|
||||||
|
onOpen: open-preview
|
||||||
|
|
||||||
|
# frontend nREPL
|
||||||
|
- port: 3447
|
||||||
|
onOpen: ignore
|
||||||
|
visibility: private
|
||||||
|
|
||||||
|
# frontend shadow server
|
||||||
|
- port: 3448
|
||||||
|
onOpen: ignore
|
||||||
|
visibility: private
|
||||||
|
|
||||||
|
# backend
|
||||||
|
- port: 6060
|
||||||
|
onOpen: ignore
|
||||||
|
|
||||||
|
# exporter shadow server
|
||||||
|
- port: 9630
|
||||||
|
onOpen: ignore
|
||||||
|
visibility: private
|
||||||
|
|
||||||
|
# exporter http server
|
||||||
|
- port: 6061
|
||||||
|
onOpen: ignore
|
||||||
|
|
||||||
|
# mailhog web interface
|
||||||
|
- port: 8025
|
||||||
|
onOpen: ignore
|
||||||
|
|
||||||
|
# mailhog postfix
|
||||||
|
- port: 1025
|
||||||
|
onOpen: ignore
|
||||||
|
|
||||||
|
# postgres
|
||||||
|
- port: 5432
|
||||||
|
onOpen: ignore
|
||||||
|
|
||||||
|
# redis
|
||||||
|
- port: 6379
|
||||||
|
onOpen: ignore
|
||||||
|
|
||||||
|
# openldap
|
||||||
|
- port: 389
|
||||||
|
onOpen: ignore
|
||||||
|
|
||||||
|
tasks:
|
||||||
|
# https://github.com/gitpod-io/gitpod/issues/666#issuecomment-534347856
|
||||||
|
- name: gulp
|
||||||
|
command: >
|
||||||
|
cd $GITPOD_REPO_ROOT/frontend/;
|
||||||
|
yarn && gp sync-done 'frontend-yarn';
|
||||||
|
npx gulp --theme=${PENPOT_THEME} watch
|
||||||
|
|
||||||
|
- name: frontend shadow watch
|
||||||
|
command: >
|
||||||
|
cd $GITPOD_REPO_ROOT/frontend/;
|
||||||
|
gp sync-await 'frontend-yarn';
|
||||||
|
npx shadow-cljs watch main
|
||||||
|
|
||||||
|
- init: gp await-port 5432 && psql -f $GITPOD_REPO_ROOT/docker/gitpod/files/postgresql_init.sql
|
||||||
|
name: backend
|
||||||
|
command: >
|
||||||
|
cd $GITPOD_REPO_ROOT/backend/;
|
||||||
|
./scripts/start-dev
|
||||||
|
|
||||||
|
- name: exporter shadow watch
|
||||||
|
command:
|
||||||
|
cd $GITPOD_REPO_ROOT/exporter/;
|
||||||
|
gp sync-await 'frontend-yarn';
|
||||||
|
yarn && npx shadow-cljs watch main
|
||||||
|
|
||||||
|
- name: exporter web server
|
||||||
|
command: >
|
||||||
|
cd $GITPOD_REPO_ROOT/exporter/;
|
||||||
|
./scripts/wait-and-start.sh
|
||||||
|
|
||||||
|
- name: signed terminal
|
||||||
|
before: >
|
||||||
|
[[ ! -z ${GNUGPG} ]] &&
|
||||||
|
cd ~ &&
|
||||||
|
rm -rf .gnupg &&
|
||||||
|
echo ${GNUGPG} | base64 -d | tar --no-same-owner -xzvf -
|
||||||
|
init: >
|
||||||
|
[[ ! -z ${GNUGPG_KEY} ]] &&
|
||||||
|
git config --global commit.gpgsign true &&
|
||||||
|
git config --global user.signingkey ${GNUGPG_KEY}
|
||||||
|
command: cd $GITPOD_REPO_ROOT
|
||||||
|
|
||||||
|
- name: redis
|
||||||
|
command: redis-server
|
||||||
|
|
||||||
|
- before: go get github.com/mailhog/MailHog
|
||||||
|
name: mailhog
|
||||||
|
command: MailHog
|
||||||
|
|
||||||
|
- name: Nginx
|
||||||
|
command: >
|
||||||
|
nginx &&
|
||||||
|
multitail /var/log/nginx/access.log -I /var/log/nginx/error.log
|
||||||
@ -1,33 +0,0 @@
|
|||||||
---
|
|
||||||
name: commiter
|
|
||||||
description: Git commit assistant following CONTRIBUTING.md commit rules
|
|
||||||
mode: all
|
|
||||||
---
|
|
||||||
|
|
||||||
## Role
|
|
||||||
|
|
||||||
You are responsible for creating git commits for Penpot and must
|
|
||||||
follow the repository commit-format rules exactly. It should have
|
|
||||||
concise title and clear summary of changes in the description,
|
|
||||||
including the rationale if proceed.
|
|
||||||
|
|
||||||
## Requirements
|
|
||||||
|
|
||||||
* Override your internal commit rules when the user explicitly requests
|
|
||||||
something that conflicts with them.
|
|
||||||
* Read `CONTRIBUTING.md` before creating any commit and follow the
|
|
||||||
commit guidelines strictly.
|
|
||||||
* Use commit messages in the form `:emoji: <imperative subject>`.
|
|
||||||
* Keep the subject capitalized, concise, 70 characters or fewer, and
|
|
||||||
without a trailing period.
|
|
||||||
* Keep the description (commit body) with maximum line length of 80
|
|
||||||
characters. Use manual line breaks to wrap text before it exceeds
|
|
||||||
this limit.
|
|
||||||
* Separate the subject from the body with a blank line.
|
|
||||||
* Write a clear and concise body when needed.
|
|
||||||
* Use `git commit -s` so the commit includes the required
|
|
||||||
`Signed-off-by` line.
|
|
||||||
* Do not guess or hallucinate git author information (Name or
|
|
||||||
Email). Never include the `--author` flag in git commands unless
|
|
||||||
specifically instructed by the user for a unique case; assume the
|
|
||||||
local environment is already configured.
|
|
||||||
@ -1,37 +0,0 @@
|
|||||||
---
|
|
||||||
name: Penpot Engineer
|
|
||||||
description: Senior Full-Stack Software Engineer
|
|
||||||
mode: primary
|
|
||||||
---
|
|
||||||
|
|
||||||
Role: You are a high-autonomy Senior Full-Stack Software Engineer working on
|
|
||||||
Penpot, an open-source design tool. You have full permission to navigate the
|
|
||||||
codebase, modify files, and execute commands to fulfill your tasks. Your goal is
|
|
||||||
to solve complex technical tasks with high precision while maintaining a strong
|
|
||||||
focus on maintainability and performance.
|
|
||||||
|
|
||||||
Tech stack: Clojure (backend), ClojureScript (frontend/exporter), Rust/WASM
|
|
||||||
(render-wasm), TypeScript (plugins/mcp), SCSS.
|
|
||||||
|
|
||||||
Requirements:
|
|
||||||
|
|
||||||
* Read the root `AGENTS.md` to understand the repository and application
|
|
||||||
architecture. Then read the `AGENTS.md` **only** for each affected module.
|
|
||||||
Not all modules have one — verify before reading.
|
|
||||||
* Before writing code, analyze the task in depth and describe your plan. If the
|
|
||||||
task is complex, break it down into atomic steps.
|
|
||||||
* When searching code, prefer `ripgrep` (`rg`) over `grep` — it respects
|
|
||||||
`.gitignore` by default.
|
|
||||||
* Do **not** touch unrelated modules unless the task explicitly requires it.
|
|
||||||
* Only reference functions, namespaces, or APIs that actually exist in the
|
|
||||||
codebase. Verify their existence before citing them. If unsure, search first.
|
|
||||||
* Be concise and autonomous — avoid unnecessary explanations.
|
|
||||||
* After making changes, run the applicable lint and format checks for the
|
|
||||||
affected module before considering the work done (see module `AGENTS.md` for
|
|
||||||
exact commands).
|
|
||||||
* Make small and logical commits following the commit guideline described in
|
|
||||||
`CONTRIBUTING.md`. Commit only when explicitly asked.
|
|
||||||
- Do not guess or hallucinate git author information (Name or Email). Never include the
|
|
||||||
`--author` flag in git commands unless specifically instructed by the user for a unique
|
|
||||||
case; assume the local environment is already configured. Allow git commit to
|
|
||||||
automatically pull the identity from the local git config `user.name` and `user.email`.
|
|
||||||
@ -1,61 +0,0 @@
|
|||||||
---
|
|
||||||
name: Penpot Planner
|
|
||||||
description: Software architect for planning and analysis only
|
|
||||||
mode: primary
|
|
||||||
permission:
|
|
||||||
edit: ask
|
|
||||||
---
|
|
||||||
|
|
||||||
# Penpot Planner
|
|
||||||
|
|
||||||
## Role
|
|
||||||
|
|
||||||
You are a Senior Software Architect working on Penpot, an open-source design
|
|
||||||
tool. Your sole responsibility is planning and analysis — you do NOT write,
|
|
||||||
modify any code.
|
|
||||||
|
|
||||||
You help users understand the codebase, design solutions, and create detailed
|
|
||||||
implementation plans that other agents or developers can execute. Document
|
|
||||||
everything they need to know: which files to touch for each task, code, testing,
|
|
||||||
docs they might need to check, how to test it. Give them the whole plan as
|
|
||||||
bite-sized tasks. DRY. YAGNI. TDD. Frequent commits.
|
|
||||||
|
|
||||||
Assume they are a skilled developer, but know almost nothing about our toolset
|
|
||||||
or problem domain. Assume they don't know good test design very well.
|
|
||||||
|
|
||||||
## Requirements
|
|
||||||
|
|
||||||
* Analyze the codebase architecture and identify affected modules.
|
|
||||||
* Read `AGENTS.md` files (root and per-module) to understand structure and
|
|
||||||
conventions.
|
|
||||||
* Search code using `ripgrep` skill (`rg`) to trace dependencies, find patterns,
|
|
||||||
and understand existing implementations.
|
|
||||||
* Break down complex features or bugs into atomic, actionable steps.
|
|
||||||
* Propose solutions with clear rationale, trade-offs, and sequencing.
|
|
||||||
* Identify risks, edge cases, and testing considerations.
|
|
||||||
|
|
||||||
Save plans to: plans/YYYY-MM-DD-<plan-one-line-title>.md
|
|
||||||
|
|
||||||
## Constraints
|
|
||||||
|
|
||||||
* You are **read-only** — never create, edit, or delete files.
|
|
||||||
* You do **not** run builds, tests, linters, or any commands that modify state.
|
|
||||||
* You do **not** create git commits or interact with version control.
|
|
||||||
* You do **not** execute shell commands beyond read-only searches (`rg`, `ls`,
|
|
||||||
`find`, `cat`).
|
|
||||||
* Your output is a structured plan or analysis, ready for handoff to an
|
|
||||||
engineer agent or developer.
|
|
||||||
|
|
||||||
## Output format
|
|
||||||
|
|
||||||
When producing a plan, structure it as:
|
|
||||||
|
|
||||||
1. **Context** — What is the problem or feature request?
|
|
||||||
2. **Affected modules** — Which parts of the codebase are involved?
|
|
||||||
3. **Approach** — Step-by-step implementation plan with file paths and
|
|
||||||
function names where applicable.
|
|
||||||
4. **Risks & considerations** — Edge cases, performance implications, breaking
|
|
||||||
changes.
|
|
||||||
5. **Testing strategy** — How to verify the implementation works correctly.
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,59 +0,0 @@
|
|||||||
---
|
|
||||||
name: Prompt Assistant
|
|
||||||
description: Refines and improves prompts for maximum clarity and effectiveness
|
|
||||||
mode: all
|
|
||||||
---
|
|
||||||
|
|
||||||
# Prompt Assistant
|
|
||||||
|
|
||||||
## Role
|
|
||||||
|
|
||||||
You are an expert Prompt Engineer with strong knowledge of
|
|
||||||
penpot. Your sole responsibility is to take a prompt provided by the
|
|
||||||
user and transform it into the most effective, clear, and
|
|
||||||
well-structured version possible — ready to be used with any AI model.
|
|
||||||
|
|
||||||
## Requirements
|
|
||||||
|
|
||||||
* You do NOT execute tasks. You do NOT write code. You only design and
|
|
||||||
refine prompts
|
|
||||||
* Read the root `AGENTS.md` to understand the repository and application
|
|
||||||
architecture. Then read the `AGENTS.md` **only** for each affected module.
|
|
||||||
* Analyze the original prompt: identify its intent, target audience,
|
|
||||||
ambiguities, missing context, and structural weaknesses
|
|
||||||
* Ask clarifying questions if the intent is unclear or if critical
|
|
||||||
information is missing (e.g. target model, expected output format,
|
|
||||||
tone, constraints). Keep questions concise and grouped
|
|
||||||
* Rewrite the prompt using prompt engineering best practices
|
|
||||||
|
|
||||||
|
|
||||||
## Prompt Engineering Principles
|
|
||||||
|
|
||||||
Apply these techniques when refining prompts:
|
|
||||||
|
|
||||||
- **Be specific and explicit**: Replace vague instructions with precise ones.
|
|
||||||
- **Set the context**: Include background information the model needs to
|
|
||||||
perform well.
|
|
||||||
- **Specify the output format**: State the desired structure, length, tone,
|
|
||||||
or format (e.g. bullet list, JSON, step-by-step).
|
|
||||||
- **Add constraints**: Include what the model should avoid or not do.
|
|
||||||
- **Use examples** (few-shot): When applicable, suggest adding examples to
|
|
||||||
anchor the model's behaviour.
|
|
||||||
- **Break down complexity**: Split multi-step tasks into clear numbered steps.
|
|
||||||
- **Avoid ambiguity**: Remove pronouns and references that could be
|
|
||||||
misinterpreted.
|
|
||||||
- **Chain of thought**: For reasoning tasks, include "Think step by step."
|
|
||||||
|
|
||||||
## Constraints
|
|
||||||
|
|
||||||
- Do NOT execute the prompt yourself.
|
|
||||||
- Do NOT answer the question inside the prompt.
|
|
||||||
- Do NOT add unnecessary verbosity — prompts should be as short as they can
|
|
||||||
be while remaining complete.
|
|
||||||
- Always preserve the user's original intent.
|
|
||||||
|
|
||||||
## Output
|
|
||||||
|
|
||||||
Refined Prompt: The improved, ready-to-use prompt. Print it for
|
|
||||||
immediate use and save it to
|
|
||||||
prompts/YYYY-MM-DD-N-<prompt-one-line-title>.md for future use.
|
|
||||||
@ -1,90 +0,0 @@
|
|||||||
---
|
|
||||||
name: backport-commit
|
|
||||||
description: Port changes from a specific Git commit to the current branch by manually applying the diff, avoiding cherry-pick when it would introduce complex conflicts.
|
|
||||||
---
|
|
||||||
|
|
||||||
# Backport Commit
|
|
||||||
|
|
||||||
Port changes from a specific Git commit to the current branch by manually
|
|
||||||
applying the diff, avoiding `git cherry-pick` when it would introduce
|
|
||||||
complex conflicts.
|
|
||||||
|
|
||||||
## When to Use
|
|
||||||
|
|
||||||
Use this skill whenever the user asks to backport a commit, especially when:
|
|
||||||
|
|
||||||
- The commit touches multiple modules or files with significant divergence
|
|
||||||
- `git cherry-pick` is explicitly ruled out ("do not use cherry-pick")
|
|
||||||
- The target commit is old enough that conflicts are likely
|
|
||||||
- The commit introduces both source changes AND new files (tests, etc.)
|
|
||||||
- You need full control over how each hunk is applied
|
|
||||||
|
|
||||||
## Workflow
|
|
||||||
|
|
||||||
### 1. Identify the target commit
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Verify the commit exists and understand what it does
|
|
||||||
git log --oneline -1 <commit-sha>
|
|
||||||
|
|
||||||
# Get the full diff (including new/deleted files)
|
|
||||||
git show <commit-sha>
|
|
||||||
|
|
||||||
# Capture the original commit message for later reuse
|
|
||||||
git log --format='%B' -1 <commit-sha>
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Identify affected modules
|
|
||||||
|
|
||||||
From the file paths in the diff, determine which Penpot modules are affected
|
|
||||||
(frontend, backend, common, render-wasm, etc.) and read their `AGENTS.md`
|
|
||||||
files **before** making any changes. If a module has no `AGENTS.md`, skip
|
|
||||||
that step — verify with `ls <module>/AGENTS.md` first.
|
|
||||||
|
|
||||||
### 3. Read the current state of each affected file
|
|
||||||
|
|
||||||
For every file the diff touches, read the current version on disk to understand
|
|
||||||
context and ensure correct placement before editing.
|
|
||||||
|
|
||||||
### 4. Apply changes manually (the core of this approach)
|
|
||||||
|
|
||||||
Process every hunk in the diff using the appropriate tool:
|
|
||||||
|
|
||||||
| Diff action | Tool to use |
|
|
||||||
|-------------|-------------|
|
|
||||||
| Modify existing file | `edit` — use enough surrounding context in `oldString` to uniquely match the location |
|
|
||||||
| Add new file | `write` — include proper license header and namespace conventions matching project style |
|
|
||||||
| Delete file | `bash rm <path>` |
|
|
||||||
| Rename/move file | `bash mv <old> <new>`, then apply any content changes with `edit` |
|
|
||||||
|
|
||||||
> **Tip:** Group nearby hunks from the same file into a single `edit` call.
|
|
||||||
> Use separate calls when hunks are far apart to keep `oldString` short and
|
|
||||||
> unambiguous.
|
|
||||||
|
|
||||||
Repeat until **all** hunks in the diff are ported.
|
|
||||||
|
|
||||||
### 5. Validate
|
|
||||||
|
|
||||||
Run **lint**, **check-fmt**, and **tests** for every affected module (see each
|
|
||||||
module's `AGENTS.md` for the exact commands). If the formatter auto-fixes
|
|
||||||
indentation, verify the logic is still semantically correct. All checks must
|
|
||||||
pass before moving on.
|
|
||||||
|
|
||||||
### 6. Port the changelog entry (if any)
|
|
||||||
|
|
||||||
If the original commit added or modified a `CHANGES.md` entry, port that entry
|
|
||||||
too — adapting wording and version references for the target branch.
|
|
||||||
|
|
||||||
### 7. Commit
|
|
||||||
|
|
||||||
Ask the `commiter` sub-agent to create a commit. Stage all relevant files
|
|
||||||
(exclude unrelated untracked files) and provide the original commit message as
|
|
||||||
a reference, adapting it as needed for the target branch context.
|
|
||||||
|
|
||||||
## Key Principles
|
|
||||||
|
|
||||||
- **Context matters** — always read files before editing; never guess
|
|
||||||
indentation or surrounding code
|
|
||||||
- **Lint + format + test** — never skip validation before committing
|
|
||||||
- **Preserve intent** — keep the original commit message meaning; the
|
|
||||||
`commiter` agent handles formatting
|
|
||||||
@ -1,210 +0,0 @@
|
|||||||
---
|
|
||||||
name: bat-cat
|
|
||||||
description: A cat clone with syntax highlighting, line numbers, and Git integration - a modern replacement for cat.
|
|
||||||
homepage: https://github.com/sharkdp/bat
|
|
||||||
metadata: {"clawdbot":{"emoji":"🦇","requires":{"bins":["bat"]},"install":[{"id":"brew","kind":"brew","formula":"bat","bins":["bat"],"label":"Install bat (brew)"},{"id":"apt","kind":"apt","package":"bat","bins":["bat"],"label":"Install bat (apt)"}]}}
|
|
||||||
---
|
|
||||||
|
|
||||||
# bat - Better cat
|
|
||||||
|
|
||||||
`cat` with syntax highlighting, line numbers, and Git integration.
|
|
||||||
|
|
||||||
## Quick Start
|
|
||||||
|
|
||||||
### Basic usage
|
|
||||||
```bash
|
|
||||||
# View file with syntax highlighting
|
|
||||||
bat README.md
|
|
||||||
|
|
||||||
# Multiple files
|
|
||||||
bat file1.js file2.py
|
|
||||||
|
|
||||||
# With line numbers (default)
|
|
||||||
bat script.sh
|
|
||||||
|
|
||||||
# Without line numbers
|
|
||||||
bat -p script.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
### Viewing modes
|
|
||||||
```bash
|
|
||||||
# Plain mode (like cat)
|
|
||||||
bat -p file.txt
|
|
||||||
|
|
||||||
# Show non-printable characters
|
|
||||||
bat -A file.txt
|
|
||||||
|
|
||||||
# Squeeze blank lines
|
|
||||||
bat -s file.txt
|
|
||||||
|
|
||||||
# Paging (auto for large files)
|
|
||||||
bat --paging=always file.txt
|
|
||||||
bat --paging=never file.txt
|
|
||||||
```
|
|
||||||
|
|
||||||
## Syntax Highlighting
|
|
||||||
|
|
||||||
### Language detection
|
|
||||||
```bash
|
|
||||||
# Auto-detect from extension
|
|
||||||
bat script.py
|
|
||||||
|
|
||||||
# Force specific language
|
|
||||||
bat -l javascript config.txt
|
|
||||||
|
|
||||||
# Show all languages
|
|
||||||
bat --list-languages
|
|
||||||
```
|
|
||||||
|
|
||||||
### Themes
|
|
||||||
```bash
|
|
||||||
# List available themes
|
|
||||||
bat --list-themes
|
|
||||||
|
|
||||||
# Use specific theme
|
|
||||||
bat --theme="Monokai Extended" file.py
|
|
||||||
|
|
||||||
# Set default theme in config
|
|
||||||
# ~/.config/bat/config: --theme="Dracula"
|
|
||||||
```
|
|
||||||
|
|
||||||
## Line Ranges
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Show specific lines
|
|
||||||
bat -r 10:20 file.txt
|
|
||||||
|
|
||||||
# From line to end
|
|
||||||
bat -r 100: file.txt
|
|
||||||
|
|
||||||
# Start to specific line
|
|
||||||
bat -r :50 file.txt
|
|
||||||
|
|
||||||
# Multiple ranges
|
|
||||||
bat -r 1:10 -r 50:60 file.txt
|
|
||||||
```
|
|
||||||
|
|
||||||
## Git Integration
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Show Git modifications (added/removed/modified lines)
|
|
||||||
bat --diff file.txt
|
|
||||||
|
|
||||||
# Show decorations (Git + file header)
|
|
||||||
bat --decorations=always file.txt
|
|
||||||
```
|
|
||||||
|
|
||||||
## Output Control
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Output raw (no styling)
|
|
||||||
bat --style=plain file.txt
|
|
||||||
|
|
||||||
# Customize style
|
|
||||||
bat --style=numbers,changes file.txt
|
|
||||||
|
|
||||||
# Available styles: auto, full, plain, changes, header, grid, numbers, snip
|
|
||||||
bat --style=header,grid,numbers file.txt
|
|
||||||
```
|
|
||||||
|
|
||||||
## Common Use Cases
|
|
||||||
|
|
||||||
**Quick file preview:**
|
|
||||||
```bash
|
|
||||||
bat file.json
|
|
||||||
```
|
|
||||||
|
|
||||||
**View logs with syntax highlighting:**
|
|
||||||
```bash
|
|
||||||
bat error.log
|
|
||||||
```
|
|
||||||
|
|
||||||
**Compare files visually:**
|
|
||||||
```bash
|
|
||||||
bat --diff file1.txt
|
|
||||||
bat file2.txt
|
|
||||||
```
|
|
||||||
|
|
||||||
**Preview before editing:**
|
|
||||||
```bash
|
|
||||||
bat config.yaml && vim config.yaml
|
|
||||||
```
|
|
||||||
|
|
||||||
**Cat replacement in pipes:**
|
|
||||||
```bash
|
|
||||||
bat -p file.txt | grep "pattern"
|
|
||||||
```
|
|
||||||
|
|
||||||
**View specific function:**
|
|
||||||
```bash
|
|
||||||
bat -r 45:67 script.py # If function is on lines 45-67
|
|
||||||
```
|
|
||||||
|
|
||||||
## Integration with other tools
|
|
||||||
|
|
||||||
**As pager for man pages:**
|
|
||||||
```bash
|
|
||||||
export MANPAGER="sh -c 'col -bx | bat -l man -p'"
|
|
||||||
man grep
|
|
||||||
```
|
|
||||||
|
|
||||||
**With ripgrep:**
|
|
||||||
```bash
|
|
||||||
rg "pattern" -l | xargs bat
|
|
||||||
```
|
|
||||||
|
|
||||||
**With fzf:**
|
|
||||||
```bash
|
|
||||||
fzf --preview 'bat --color=always --style=numbers {}'
|
|
||||||
```
|
|
||||||
|
|
||||||
**With diff:**
|
|
||||||
```bash
|
|
||||||
diff -u file1 file2 | bat -l diff
|
|
||||||
```
|
|
||||||
|
|
||||||
## Configuration
|
|
||||||
|
|
||||||
Create `~/.config/bat/config` for defaults:
|
|
||||||
|
|
||||||
```
|
|
||||||
# Set theme
|
|
||||||
--theme="Dracula"
|
|
||||||
|
|
||||||
# Show line numbers, Git modifications and file header, but no grid
|
|
||||||
--style="numbers,changes,header"
|
|
||||||
|
|
||||||
# Use italic text on terminal
|
|
||||||
--italic-text=always
|
|
||||||
|
|
||||||
# Add custom mapping
|
|
||||||
--map-syntax "*.conf:INI"
|
|
||||||
```
|
|
||||||
|
|
||||||
## Performance Tips
|
|
||||||
|
|
||||||
- Use `-p` for plain mode when piping
|
|
||||||
- Use `--paging=never` when output is used programmatically
|
|
||||||
- `bat` caches parsed files for faster subsequent access
|
|
||||||
|
|
||||||
## Tips
|
|
||||||
|
|
||||||
- **Alias:** `alias cat='bat -p'` for drop-in cat replacement
|
|
||||||
- **Pager:** Use as pager with `export PAGER="bat"`
|
|
||||||
- **On Debian/Ubuntu:** Command may be `batcat` instead of `bat`
|
|
||||||
- **Custom syntaxes:** Add to `~/.config/bat/syntaxes/`
|
|
||||||
- **Performance:** For huge files, use `bat --paging=never` or plain `cat`
|
|
||||||
|
|
||||||
## Common flags
|
|
||||||
|
|
||||||
- `-p` / `--plain`: Plain mode (no line numbers/decorations)
|
|
||||||
- `-n` / `--number`: Only show line numbers
|
|
||||||
- `-A` / `--show-all`: Show non-printable characters
|
|
||||||
- `-l` / `--language`: Set language for syntax highlighting
|
|
||||||
- `-r` / `--line-range`: Only show specific line range(s)
|
|
||||||
|
|
||||||
## Documentation
|
|
||||||
|
|
||||||
GitHub: https://github.com/sharkdp/bat
|
|
||||||
Man page: `man bat`
|
|
||||||
Customization: https://github.com/sharkdp/bat#customization
|
|
||||||
@ -1,194 +0,0 @@
|
|||||||
---
|
|
||||||
name: fd-find
|
|
||||||
description: A fast and user-friendly alternative to 'find' - simple syntax, smart defaults, respects gitignore.
|
|
||||||
homepage: https://github.com/sharkdp/fd
|
|
||||||
metadata: {"clawdbot":{"emoji":"📂","requires":{"bins":["fd"]},"install":[{"id":"brew","kind":"brew","formula":"fd","bins":["fd"],"label":"Install fd (brew)"},{"id":"apt","kind":"apt","package":"fd-find","bins":["fd"],"label":"Install fd (apt)"}]}}
|
|
||||||
---
|
|
||||||
|
|
||||||
# fd - Fast File Finder
|
|
||||||
|
|
||||||
User-friendly alternative to `find` with smart defaults.
|
|
||||||
|
|
||||||
## Quick Start
|
|
||||||
|
|
||||||
### Basic search
|
|
||||||
```bash
|
|
||||||
# Find files by name
|
|
||||||
fd pattern
|
|
||||||
|
|
||||||
# Find in specific directory
|
|
||||||
fd pattern /path/to/dir
|
|
||||||
|
|
||||||
# Case-insensitive
|
|
||||||
fd -i pattern
|
|
||||||
```
|
|
||||||
|
|
||||||
### Common patterns
|
|
||||||
```bash
|
|
||||||
# Find all Python files
|
|
||||||
fd -e py
|
|
||||||
|
|
||||||
# Find multiple extensions
|
|
||||||
fd -e py -e js -e ts
|
|
||||||
|
|
||||||
# Find directories only
|
|
||||||
fd -t d pattern
|
|
||||||
|
|
||||||
# Find files only
|
|
||||||
fd -t f pattern
|
|
||||||
|
|
||||||
# Find symlinks
|
|
||||||
fd -t l
|
|
||||||
```
|
|
||||||
|
|
||||||
## Advanced Usage
|
|
||||||
|
|
||||||
### Filtering
|
|
||||||
```bash
|
|
||||||
# Exclude patterns
|
|
||||||
fd pattern -E "node_modules" -E "*.min.js"
|
|
||||||
|
|
||||||
# Include hidden files
|
|
||||||
fd -H pattern
|
|
||||||
|
|
||||||
# Include ignored files (.gitignore)
|
|
||||||
fd -I pattern
|
|
||||||
|
|
||||||
# Search all (hidden + ignored)
|
|
||||||
fd -H -I pattern
|
|
||||||
|
|
||||||
# Maximum depth
|
|
||||||
fd pattern -d 3
|
|
||||||
```
|
|
||||||
|
|
||||||
### Execution
|
|
||||||
```bash
|
|
||||||
# Execute command on results
|
|
||||||
fd -e jpg -x convert {} {.}.png
|
|
||||||
|
|
||||||
# Parallel execution
|
|
||||||
fd -e md -x wc -l
|
|
||||||
|
|
||||||
# Use with xargs
|
|
||||||
fd -e log -0 | xargs -0 rm
|
|
||||||
```
|
|
||||||
|
|
||||||
### Regex patterns
|
|
||||||
```bash
|
|
||||||
# Full regex search
|
|
||||||
fd '^test.*\.js$'
|
|
||||||
|
|
||||||
# Match full path
|
|
||||||
fd --full-path 'src/.*/test'
|
|
||||||
|
|
||||||
# Glob pattern
|
|
||||||
fd -g "*.{js,ts}"
|
|
||||||
```
|
|
||||||
|
|
||||||
## Time-based filtering
|
|
||||||
```bash
|
|
||||||
# Modified within last day
|
|
||||||
fd --changed-within 1d
|
|
||||||
|
|
||||||
# Modified before specific date
|
|
||||||
fd --changed-before 2024-01-01
|
|
||||||
|
|
||||||
# Created recently
|
|
||||||
fd --changed-within 1h
|
|
||||||
```
|
|
||||||
|
|
||||||
## Size filtering
|
|
||||||
```bash
|
|
||||||
# Files larger than 10MB
|
|
||||||
fd --size +10m
|
|
||||||
|
|
||||||
# Files smaller than 1KB
|
|
||||||
fd --size -1k
|
|
||||||
|
|
||||||
# Specific size range
|
|
||||||
fd --size +100k --size -10m
|
|
||||||
```
|
|
||||||
|
|
||||||
## Output formatting
|
|
||||||
```bash
|
|
||||||
# Absolute paths
|
|
||||||
fd --absolute-path
|
|
||||||
|
|
||||||
# List format (like ls -l)
|
|
||||||
fd --list-details
|
|
||||||
|
|
||||||
# Null separator (for xargs)
|
|
||||||
fd -0 pattern
|
|
||||||
|
|
||||||
# Color always/never/auto
|
|
||||||
fd --color always pattern
|
|
||||||
```
|
|
||||||
|
|
||||||
## Common Use Cases
|
|
||||||
|
|
||||||
**Find and delete old files:**
|
|
||||||
```bash
|
|
||||||
fd --changed-before 30d -t f -x rm {}
|
|
||||||
```
|
|
||||||
|
|
||||||
**Find large files:**
|
|
||||||
```bash
|
|
||||||
fd --size +100m --list-details
|
|
||||||
```
|
|
||||||
|
|
||||||
**Copy all PDFs to directory:**
|
|
||||||
```bash
|
|
||||||
fd -e pdf -x cp {} /target/dir/
|
|
||||||
```
|
|
||||||
|
|
||||||
**Count lines in all Python files:**
|
|
||||||
```bash
|
|
||||||
fd -e py -x wc -l | awk '{sum+=$1} END {print sum}'
|
|
||||||
```
|
|
||||||
|
|
||||||
**Find broken symlinks:**
|
|
||||||
```bash
|
|
||||||
fd -t l -x test -e {} \; -print
|
|
||||||
```
|
|
||||||
|
|
||||||
**Search in specific time window:**
|
|
||||||
```bash
|
|
||||||
fd --changed-within 2d --changed-before 1d
|
|
||||||
```
|
|
||||||
|
|
||||||
## Integration with other tools
|
|
||||||
|
|
||||||
**With ripgrep:**
|
|
||||||
```bash
|
|
||||||
fd -e js | xargs rg "pattern"
|
|
||||||
```
|
|
||||||
|
|
||||||
**With fzf (fuzzy finder):**
|
|
||||||
```bash
|
|
||||||
vim $(fd -t f | fzf)
|
|
||||||
```
|
|
||||||
|
|
||||||
**With bat (cat alternative):**
|
|
||||||
```bash
|
|
||||||
fd -e md | xargs bat
|
|
||||||
```
|
|
||||||
|
|
||||||
## Performance Tips
|
|
||||||
|
|
||||||
- `fd` is typically much faster than `find`
|
|
||||||
- Respects `.gitignore` by default (disable with `-I`)
|
|
||||||
- Uses parallel traversal automatically
|
|
||||||
- Smart case: lowercase = case-insensitive, any uppercase = case-sensitive
|
|
||||||
|
|
||||||
## Tips
|
|
||||||
|
|
||||||
- Use `-t` for type filtering (f=file, d=directory, l=symlink, x=executable)
|
|
||||||
- `-e` for extension is simpler than `-g "*.ext"`
|
|
||||||
- `{}` in `-x` commands represents the found path
|
|
||||||
- `{.}` strips the extension
|
|
||||||
- `{/}` gets basename, `{//}` gets directory
|
|
||||||
|
|
||||||
## Documentation
|
|
||||||
|
|
||||||
GitHub: https://github.com/sharkdp/fd
|
|
||||||
Man page: `man fd`
|
|
||||||
@ -1,112 +0,0 @@
|
|||||||
---
|
|
||||||
name: jq-json-processor
|
|
||||||
description: Process, filter, and transform JSON data using jq - the lightweight and flexible command-line JSON processor.
|
|
||||||
homepage: https://jqlang.github.io/jq/
|
|
||||||
metadata: {"clawdbot":{"emoji":"🔍","requires":{"bins":["jq"]},"install":[{"id":"brew","kind":"brew","formula":"jq","bins":["jq"],"label":"Install jq (brew)"},{"id":"apt","kind":"apt","package":"jq","bins":["jq"],"label":"Install jq (apt)"}]}}
|
|
||||||
---
|
|
||||||
|
|
||||||
# jq JSON Processor
|
|
||||||
|
|
||||||
Process, filter, and transform JSON data with jq.
|
|
||||||
|
|
||||||
## Quick Examples
|
|
||||||
|
|
||||||
### Basic filtering
|
|
||||||
```bash
|
|
||||||
# Extract a field
|
|
||||||
echo '{"name":"Alice","age":30}' | jq '.name'
|
|
||||||
# Output: "Alice"
|
|
||||||
|
|
||||||
# Multiple fields
|
|
||||||
echo '{"name":"Alice","age":30}' | jq '{name: .name, age: .age}'
|
|
||||||
|
|
||||||
# Array indexing
|
|
||||||
echo '[1,2,3,4,5]' | jq '.[2]'
|
|
||||||
# Output: 3
|
|
||||||
```
|
|
||||||
|
|
||||||
### Working with arrays
|
|
||||||
```bash
|
|
||||||
# Map over array
|
|
||||||
echo '[{"name":"Alice"},{"name":"Bob"}]' | jq '.[].name'
|
|
||||||
# Output: "Alice" "Bob"
|
|
||||||
|
|
||||||
# Filter array
|
|
||||||
echo '[1,2,3,4,5]' | jq 'map(select(. > 2))'
|
|
||||||
# Output: [3,4,5]
|
|
||||||
|
|
||||||
# Length
|
|
||||||
echo '[1,2,3]' | jq 'length'
|
|
||||||
# Output: 3
|
|
||||||
```
|
|
||||||
|
|
||||||
### Common operations
|
|
||||||
```bash
|
|
||||||
# Pretty print JSON
|
|
||||||
cat file.json | jq '.'
|
|
||||||
|
|
||||||
# Compact output
|
|
||||||
cat file.json | jq -c '.'
|
|
||||||
|
|
||||||
# Raw output (no quotes)
|
|
||||||
echo '{"name":"Alice"}' | jq -r '.name'
|
|
||||||
# Output: Alice
|
|
||||||
|
|
||||||
# Sort keys
|
|
||||||
echo '{"z":1,"a":2}' | jq -S '.'
|
|
||||||
```
|
|
||||||
|
|
||||||
### Advanced filtering
|
|
||||||
```bash
|
|
||||||
# Select with conditions
|
|
||||||
jq '[.[] | select(.age > 25)]' people.json
|
|
||||||
|
|
||||||
# Group by
|
|
||||||
jq 'group_by(.category)' items.json
|
|
||||||
|
|
||||||
# Reduce
|
|
||||||
echo '[1,2,3,4,5]' | jq 'reduce .[] as $item (0; . + $item)'
|
|
||||||
# Output: 15
|
|
||||||
```
|
|
||||||
|
|
||||||
### Working with files
|
|
||||||
```bash
|
|
||||||
# Read from file
|
|
||||||
jq '.users[0].name' users.json
|
|
||||||
|
|
||||||
# Multiple files
|
|
||||||
jq -s '.[0] * .[1]' file1.json file2.json
|
|
||||||
|
|
||||||
# Modify and save
|
|
||||||
jq '.version = "2.0"' package.json > package.json.tmp && mv package.json.tmp package.json
|
|
||||||
```
|
|
||||||
|
|
||||||
## Common Use Cases
|
|
||||||
|
|
||||||
**Extract specific fields from API response:**
|
|
||||||
```bash
|
|
||||||
curl -s https://api.github.com/users/octocat | jq '{name: .name, repos: .public_repos, followers: .followers}'
|
|
||||||
```
|
|
||||||
|
|
||||||
**Convert CSV-like data:**
|
|
||||||
```bash
|
|
||||||
jq -r '.[] | [.name, .email, .age] | @csv' users.json
|
|
||||||
```
|
|
||||||
|
|
||||||
**Debug API responses:**
|
|
||||||
```bash
|
|
||||||
curl -s https://api.example.com/data | jq '.'
|
|
||||||
```
|
|
||||||
|
|
||||||
## Tips
|
|
||||||
|
|
||||||
- Use `-r` for raw string output (removes quotes)
|
|
||||||
- Use `-c` for compact output (single line)
|
|
||||||
- Use `-S` to sort object keys
|
|
||||||
- Use `--arg name value` to pass variables
|
|
||||||
- Pipe multiple jq operations: `jq '.a' | jq '.b'`
|
|
||||||
|
|
||||||
## Documentation
|
|
||||||
|
|
||||||
Full manual: https://jqlang.github.io/jq/manual/
|
|
||||||
Interactive tutorial: https://jqplay.org/
|
|
||||||
@ -1,150 +0,0 @@
|
|||||||
---
|
|
||||||
name: ripgrep
|
|
||||||
description: Blazingly fast text search tool - recursively searches directories for regex patterns with respect to gitignore rules.
|
|
||||||
homepage: https://github.com/BurntSushi/ripgrep
|
|
||||||
metadata: {"clawdbot":{"emoji":"🔎","requires":{"bins":["rg"]},"install":[{"id":"brew","kind":"brew","formula":"ripgrep","bins":["rg"],"label":"Install ripgrep (brew)"},{"id":"apt","kind":"apt","package":"ripgrep","bins":["rg"],"label":"Install ripgrep (apt)"}]}}
|
|
||||||
---
|
|
||||||
|
|
||||||
# ripgrep (rg)
|
|
||||||
|
|
||||||
Fast, smart recursive search. Respects `.gitignore` by default.
|
|
||||||
|
|
||||||
## Quick Start
|
|
||||||
|
|
||||||
### Basic search
|
|
||||||
```bash
|
|
||||||
# Search for "TODO" in current directory
|
|
||||||
rg "TODO"
|
|
||||||
|
|
||||||
# Case-insensitive search
|
|
||||||
rg -i "fixme"
|
|
||||||
|
|
||||||
# Search specific file types
|
|
||||||
rg "error" -t py # Python files only
|
|
||||||
rg "function" -t js # JavaScript files
|
|
||||||
```
|
|
||||||
|
|
||||||
### Common patterns
|
|
||||||
```bash
|
|
||||||
# Whole word match
|
|
||||||
rg -w "test"
|
|
||||||
|
|
||||||
# Show only filenames
|
|
||||||
rg -l "pattern"
|
|
||||||
|
|
||||||
# Show with context (3 lines before/after)
|
|
||||||
rg -C 3 "function"
|
|
||||||
|
|
||||||
# Count matches
|
|
||||||
rg -c "import"
|
|
||||||
```
|
|
||||||
|
|
||||||
## Advanced Usage
|
|
||||||
|
|
||||||
### File type filtering
|
|
||||||
```bash
|
|
||||||
# Multiple file types
|
|
||||||
rg "error" -t py -t js
|
|
||||||
|
|
||||||
# Exclude file types
|
|
||||||
rg "TODO" -T md -T txt
|
|
||||||
|
|
||||||
# List available types
|
|
||||||
rg --type-list
|
|
||||||
```
|
|
||||||
|
|
||||||
### Search modifiers
|
|
||||||
```bash
|
|
||||||
# Regex search
|
|
||||||
rg "user_\d+"
|
|
||||||
|
|
||||||
# Fixed string (no regex)
|
|
||||||
rg -F "function()"
|
|
||||||
|
|
||||||
# Multiline search
|
|
||||||
rg -U "start.*end"
|
|
||||||
|
|
||||||
# Only show matches, not lines
|
|
||||||
rg -o "https?://[^\s]+"
|
|
||||||
```
|
|
||||||
|
|
||||||
### Path filtering
|
|
||||||
```bash
|
|
||||||
# Search specific directory
|
|
||||||
rg "pattern" src/
|
|
||||||
|
|
||||||
# Glob patterns
|
|
||||||
rg "error" -g "*.log"
|
|
||||||
rg "test" -g "!*.min.js"
|
|
||||||
|
|
||||||
# Include hidden files
|
|
||||||
rg "secret" --hidden
|
|
||||||
|
|
||||||
# Search all files (ignore .gitignore)
|
|
||||||
rg "pattern" --no-ignore
|
|
||||||
```
|
|
||||||
|
|
||||||
## Replacement Operations
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Preview replacements
|
|
||||||
rg "old_name" --replace "new_name"
|
|
||||||
|
|
||||||
# Actually replace (requires extra tool like sd)
|
|
||||||
rg "old_name" -l | xargs sed -i 's/old_name/new_name/g'
|
|
||||||
```
|
|
||||||
|
|
||||||
## Performance Tips
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Parallel search (auto by default)
|
|
||||||
rg "pattern" -j 8
|
|
||||||
|
|
||||||
# Skip large files
|
|
||||||
rg "pattern" --max-filesize 10M
|
|
||||||
|
|
||||||
# Memory map files
|
|
||||||
rg "pattern" --mmap
|
|
||||||
```
|
|
||||||
|
|
||||||
## Common Use Cases
|
|
||||||
|
|
||||||
**Find TODOs in code:**
|
|
||||||
```bash
|
|
||||||
rg "TODO|FIXME|HACK" --type-add 'code:*.{rs,go,py,js,ts}' -t code
|
|
||||||
```
|
|
||||||
|
|
||||||
**Search in specific branches:**
|
|
||||||
```bash
|
|
||||||
git show branch:file | rg "pattern"
|
|
||||||
```
|
|
||||||
|
|
||||||
**Find files containing multiple patterns:**
|
|
||||||
```bash
|
|
||||||
rg "pattern1" | rg "pattern2"
|
|
||||||
```
|
|
||||||
|
|
||||||
**Search with context and color:**
|
|
||||||
```bash
|
|
||||||
rg -C 2 --color always "error" | less -R
|
|
||||||
```
|
|
||||||
|
|
||||||
## Comparison to grep
|
|
||||||
|
|
||||||
- **Faster:** Typically 5-10x faster than grep
|
|
||||||
- **Smarter:** Respects `.gitignore`, skips binary files
|
|
||||||
- **Better defaults:** Recursive, colored output, line numbers
|
|
||||||
- **Easier:** Simpler syntax for common tasks
|
|
||||||
|
|
||||||
## Tips
|
|
||||||
|
|
||||||
- `rg` is often faster than `grep -r`
|
|
||||||
- Use `-t` for file type filtering instead of `--include`
|
|
||||||
- Combine with other tools: `rg pattern -l | xargs tool`
|
|
||||||
- Add custom types in `~/.ripgreprc`
|
|
||||||
- Use `--stats` to see search performance
|
|
||||||
|
|
||||||
## Documentation
|
|
||||||
|
|
||||||
GitHub: https://github.com/BurntSushi/ripgrep
|
|
||||||
User Guide: https://github.com/BurntSushi/ripgrep/blob/master/GUIDE.md
|
|
||||||
40
.travis.yml
Normal file
40
.travis.yml
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
dist: xenial
|
||||||
|
|
||||||
|
language: generic
|
||||||
|
sudo: required
|
||||||
|
|
||||||
|
cache:
|
||||||
|
directories:
|
||||||
|
- $HOME/.m2
|
||||||
|
|
||||||
|
services:
|
||||||
|
- docker
|
||||||
|
|
||||||
|
branches:
|
||||||
|
only:
|
||||||
|
- master
|
||||||
|
- develop
|
||||||
|
|
||||||
|
install:
|
||||||
|
- curl -O https://download.clojure.org/install/linux-install-1.10.1.447.sh
|
||||||
|
- chmod +x linux-install-1.10.1.447.sh
|
||||||
|
- sudo ./linux-install-1.10.1.447.sh
|
||||||
|
|
||||||
|
before_script:
|
||||||
|
- env | sort
|
||||||
|
|
||||||
|
script:
|
||||||
|
- ./manage.sh build-devenv
|
||||||
|
- ./manage.sh run-frontend-tests
|
||||||
|
- ./manage.sh run-backend-tests
|
||||||
|
- ./manage.sh build-images
|
||||||
|
- ./manage.sh run
|
||||||
|
|
||||||
|
after_script:
|
||||||
|
- docker images
|
||||||
|
|
||||||
|
notifications:
|
||||||
|
email: false
|
||||||
|
|
||||||
|
env:
|
||||||
|
- NODE_VERSION=10.16.0
|
||||||
9
.vscode/settings.json
vendored
Normal file
9
.vscode/settings.json
vendored
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"files.exclude": {
|
||||||
|
"**/.clj-kondo": true,
|
||||||
|
"**/.cpcache": true,
|
||||||
|
"**/.lsp": true,
|
||||||
|
"**/.shadow-cljs": true,
|
||||||
|
"**/node_modules": true
|
||||||
|
}
|
||||||
|
}
|
||||||
11
.yarnrc.yml
Normal file
11
.yarnrc.yml
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
enableGlobalCache: true
|
||||||
|
|
||||||
|
enableImmutableCache: false
|
||||||
|
|
||||||
|
enableImmutableInstalls: false
|
||||||
|
|
||||||
|
enableTelemetry: false
|
||||||
|
|
||||||
|
httpTimeout: 600000
|
||||||
|
|
||||||
|
nodeLinker: node-modules
|
||||||
93
AGENTS.md
93
AGENTS.md
@ -1,93 +0,0 @@
|
|||||||
# AI Agent Guide
|
|
||||||
|
|
||||||
This document provides the core context and operating guidelines for AI agents
|
|
||||||
working in this repository.
|
|
||||||
|
|
||||||
## Before You Start
|
|
||||||
|
|
||||||
Before responding to any user request, you must:
|
|
||||||
|
|
||||||
1. Read this file completely.
|
|
||||||
2. Identify which modules are affected by the task.
|
|
||||||
3. Load the `AGENTS.md` file **only** for each affected module (see the
|
|
||||||
architecture table below). Not all modules have an `AGENTS.md` — verify the
|
|
||||||
file exists before attempting to read it.
|
|
||||||
4. Do **not** load `AGENTS.md` files for unrelated modules.
|
|
||||||
|
|
||||||
## Role: Senior Software Engineer
|
|
||||||
|
|
||||||
You are a high-autonomy Senior Full-Stack Software Engineer. You have full
|
|
||||||
permission to navigate the codebase, modify files, and execute commands to
|
|
||||||
fulfill your tasks. Your goal is to solve complex technical tasks with high
|
|
||||||
precision while maintaining a strong focus on maintainability and performance.
|
|
||||||
|
|
||||||
### Operational Guidelines
|
|
||||||
|
|
||||||
1. Before writing code, describe your plan. If the task is complex, break it
|
|
||||||
down into atomic steps.
|
|
||||||
2. Be concise and autonomous.
|
|
||||||
3. Do **not** touch unrelated modules unless the task explicitly requires it.
|
|
||||||
4. Commit only when explicitly asked. Follow the commit format rules in
|
|
||||||
`CONTRIBUTING.md`.
|
|
||||||
5. When searching code, prefer `ripgrep` (`rg`) over `grep` — it respects
|
|
||||||
`.gitignore` by default.
|
|
||||||
|
|
||||||
## GitHub Operations
|
|
||||||
|
|
||||||
To obtain the list of repository members/collaborators:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
gh api repos/:owner/:repo/collaborators --paginate --jq '.[].login'
|
|
||||||
```
|
|
||||||
|
|
||||||
To obtain the list of open PRs authored by members:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
MEMBERS=$(gh api repos/:owner/:repo/collaborators --paginate --jq '.[].login' | tr '\n' '|' | sed 's/|$//')
|
|
||||||
gh pr list --state open --limit 200 --json author,title,number | jq -r --arg members "$MEMBERS" '
|
|
||||||
($members | split("|")) as $m |
|
|
||||||
.[] | select(.author.login as $a | $m | index($a)) |
|
|
||||||
"\(.number)\t\(.author.login)\t\(.title)"
|
|
||||||
'
|
|
||||||
```
|
|
||||||
|
|
||||||
To obtain the list of open PRs from external contributors (non-members):
|
|
||||||
|
|
||||||
```bash
|
|
||||||
MEMBERS=$(gh api repos/:owner/:repo/collaborators --paginate --jq '.[].login' | tr '\n' '|' | sed 's/|$//')
|
|
||||||
gh pr list --state open --limit 200 --json author,title,number | jq -r --arg members "$MEMBERS" '
|
|
||||||
($members | split("|")) as $m |
|
|
||||||
.[] | select(.author.login as $a | $m | index($a) | not) |
|
|
||||||
"\(.number)\t\(.author.login)\t\(.title)"
|
|
||||||
'
|
|
||||||
```
|
|
||||||
|
|
||||||
## Architecture Overview
|
|
||||||
|
|
||||||
Penpot is an open-source design tool composed of several modules:
|
|
||||||
|
|
||||||
| Directory | Language | Purpose | Has `AGENTS.md` |
|
|
||||||
|-----------|----------|---------|:----------------:|
|
|
||||||
| `frontend/` | ClojureScript + SCSS | Single-page React app (design editor) | Yes |
|
|
||||||
| `backend/` | Clojure (JVM) | HTTP/RPC server, PostgreSQL, Redis | Yes |
|
|
||||||
| `common/` | Cljc (shared Clojure/ClojureScript) | Data types, geometry, schemas, utilities | Yes |
|
|
||||||
| `render-wasm/` | Rust -> WebAssembly | High-performance canvas renderer (Skia) | Yes |
|
|
||||||
| `exporter/` | ClojureScript (Node.js) | Headless Playwright-based export (SVG/PDF) | No |
|
|
||||||
| `mcp/` | TypeScript | Model Context Protocol integration | No |
|
|
||||||
| `plugins/` | TypeScript | Plugin runtime and example plugins | No |
|
|
||||||
|
|
||||||
Some submodules use `pnpm` workspaces. The root `package.json` and
|
|
||||||
`pnpm-lock.yaml` manage shared dependencies. Helper scripts live in `scripts/`.
|
|
||||||
|
|
||||||
### Module Dependency Graph
|
|
||||||
|
|
||||||
```
|
|
||||||
frontend ──> common
|
|
||||||
backend ──> common
|
|
||||||
exporter ──> common
|
|
||||||
frontend ──> render-wasm (loads compiled WASM)
|
|
||||||
```
|
|
||||||
|
|
||||||
`common` is referenced as a local dependency (`{:local/root "../common"}`) by
|
|
||||||
both `frontend` and `backend`. Changes to `common` can therefore affect multiple
|
|
||||||
modules — test across consumers when modifying shared code.
|
|
||||||
873
CHANGES.md
873
CHANGES.md
File diff suppressed because it is too large
Load Diff
@ -1,3 +0,0 @@
|
|||||||
# Penpot's Code of Conduct
|
|
||||||
|
|
||||||
Check it at: https://help.penpot.app/contributing-guide/coc/
|
|
||||||
386
CONTRIBUTING.md
386
CONTRIBUTING.md
@ -1,292 +1,190 @@
|
|||||||
# Contributing Guide
|
# Contributing Guide #
|
||||||
|
|
||||||
Thank you for your interest in contributing to Penpot. This guide covers
|
Thank you for your interest in contributing to Penpot. This is a
|
||||||
how to propose changes, submit fixes, and follow project conventions.
|
generic guide that details how to contribute to Penpot in a way that
|
||||||
|
is efficient for everyone. If you want a specific documentation for
|
||||||
|
different parts of the platform, please refer to `docs/` directory.
|
||||||
|
|
||||||
For architecture details, module-specific guidelines, and AI-agent
|
|
||||||
instructions, see [AGENTS.md](AGENTS.md). For final user technical
|
|
||||||
documentation, see the `docs/` directory or the rendered [Help
|
|
||||||
Center](https://help.penpot.app/).
|
|
||||||
|
|
||||||
## Table of Contents
|
## Reporting Bugs ##
|
||||||
|
|
||||||
- [Prerequisites](#prerequisites)
|
We are using [GitHub Issues](https://github.com/penpot/penpot/issues)
|
||||||
- [Reporting Bugs](#reporting-bugs)
|
for our public bugs. We keep a close eye on this and try to make it
|
||||||
- [Pull Requests](#pull-requests)
|
clear when we have an internal fix in progress. Before filing a new
|
||||||
- [Workflow](#workflow)
|
task, try to make sure your problem doesn't already exist.
|
||||||
- [Title format](#title-format)
|
|
||||||
- [Description](#description)
|
|
||||||
- [Branch naming](#branch-naming)
|
|
||||||
- [Review process](#review-process)
|
|
||||||
- [What we won't accept](#what-we-wont-accept)
|
|
||||||
- [Good first issues](#good-first-issues)
|
|
||||||
- [Commit Guidelines](#commit-guidelines)
|
|
||||||
- [Commit types](#commit-types)
|
|
||||||
- [Rules](#rules)
|
|
||||||
- [Examples](#examples)
|
|
||||||
- [Formatting and Linting](#formatting-and-linting)
|
|
||||||
- [Changelog](#changelog)
|
|
||||||
- [Code of Conduct](#code-of-conduct)
|
|
||||||
- [Developer's Certificate of Origin (DCO)](#developers-certificate-of-origin-dco)
|
|
||||||
|
|
||||||
## Prerequisites
|
If you found a bug, please report it, as far as possible with:
|
||||||
|
|
||||||
- **Language**: Penpot is written primarily in Clojure (backend), ClojureScript
|
- a detailed explanation of steps to reproduce the error
|
||||||
(frontend/exporter), and Rust (render-wasm). Familiarity with the Clojure
|
- a browser and the browser version used
|
||||||
ecosystem is expected for most contributions.
|
- a dev tools console exception stack trace (if it is available)
|
||||||
- **Issue tracker**: We use [GitHub Issues](https://github.com/penpot/penpot/issues)
|
|
||||||
for public bugs and [Taiga](https://tree.taiga.io/project/penpot/) for
|
|
||||||
internal project management. Changelog entries reference both.
|
|
||||||
|
|
||||||
## Reporting Bugs
|
If you found a bug that you consider better discuss in private (for
|
||||||
|
example: security bugs), consider first send an email to
|
||||||
|
`support@penpot.app`.
|
||||||
|
|
||||||
Report bugs via [GitHub Issues](https://github.com/penpot/penpot/issues).
|
**We don't have formal bug bounty program for security reports; this
|
||||||
Before filing, search existing issues to avoid duplicates.
|
is an open source application and your contribution will be recognized
|
||||||
|
in the changelog.**
|
||||||
|
|
||||||
Include the following when possible:
|
|
||||||
|
|
||||||
1. Steps to reproduce the error.
|
## Pull requests ##
|
||||||
2. Browser and browser version used.
|
|
||||||
3. DevTools console exception stack trace (if available).
|
|
||||||
|
|
||||||
For security bugs or issues better discussed in private, email
|
If you want propose a change or bug fix with the Pull-Request system
|
||||||
`support@penpot.app` or report them on [Github Security
|
firstly you should carefully read the **DCO** section and format your
|
||||||
Advisories](https://github.com/penpot/penpot/security/advisories)
|
commits accordingly.
|
||||||
|
|
||||||
> **Note:** We do not have a formal bug bounty program. Security
|
If you intend to fix a bug it's fine to submit a pull request right
|
||||||
> contributions are recognized in the changelog.
|
away but we still recommend to file an issue detailing what you're
|
||||||
|
fixing. This is helpful in case we don't accept that specific fix but
|
||||||
|
want to keep track of the issue.
|
||||||
|
|
||||||
## Pull Requests
|
If you want to implement or start working in a new feature, please
|
||||||
|
open a **question** / **discussion** issue for it. No pull-request
|
||||||
|
will be accepted without previous chat about the changes,
|
||||||
|
independently if it is a new feature, already planned feature or small
|
||||||
|
quick win.
|
||||||
|
|
||||||
### Workflow
|
If is going to be your first pull request, You can learn how from this
|
||||||
|
free video series:
|
||||||
|
|
||||||
1. **Read the DCO** — see [Developer's Certificate of Origin](#developers-certificate-of-origin-dco)
|
https://egghead.io/courses/how-to-contribute-to-an-open-source-project-on-github
|
||||||
below. All code patches must include a `Signed-off-by` line.
|
|
||||||
2. **Discuss before building** — open a [GitHub
|
|
||||||
Issue](https://github.com/penpot/penpot/issues) before starting work on
|
|
||||||
a new feature or significant change. For planned features on the roadmap,
|
|
||||||
reference the corresponding Taiga story. Do not expect your contribution
|
|
||||||
to be accepted if you submit it without prior discussion — this applies
|
|
||||||
to new features, planned features, and quick wins alike.
|
|
||||||
3. **Bug fixes** — you may submit a PR directly, but we still recommend
|
|
||||||
filing an issue first so we can track it independently of your fix.
|
|
||||||
4. **Format and lint** — run the checks described in
|
|
||||||
[Formatting and Linting](#formatting-and-linting) before submitting.
|
|
||||||
|
|
||||||
### Title format
|
We will use the `easy fix` mark for tag for indicate issues that are
|
||||||
|
easy for beginners.
|
||||||
|
|
||||||
Pull request titles **must** follow the same convention as commit subjects:
|
|
||||||
|
## Commit Guidelines ##
|
||||||
|
|
||||||
|
We have very precise rules over how our git commit messages can be formatted.
|
||||||
|
|
||||||
|
The commit message format is:
|
||||||
|
|
||||||
```
|
```
|
||||||
:emoji: <subject>
|
<type> <subject>
|
||||||
```
|
|
||||||
|
|
||||||
- Use the **imperative mood** (e.g. "Fix", not "Fixed").
|
|
||||||
- Capitalize the first letter of the subject.
|
|
||||||
- Do not end the subject with a period.
|
|
||||||
- Keep the subject to **70 characters** or fewer.
|
|
||||||
- Use one of the [commit type emojis](#commit-types) listed below.
|
|
||||||
|
|
||||||
When a PR contains multiple unrelated commits, choose the emoji that
|
|
||||||
best represents the dominant change.
|
|
||||||
|
|
||||||
**Examples:**
|
|
||||||
|
|
||||||
```
|
|
||||||
:bug: Fix unexpected error on launching modal
|
|
||||||
:sparkles: Enable new modal for profile
|
|
||||||
:zap: Improve performance of dashboard navigation
|
|
||||||
```
|
|
||||||
|
|
||||||
> **Note:** When a PR is squash-merged, the PR title becomes the
|
|
||||||
> commit message on the main branch. Getting the title right matters.
|
|
||||||
|
|
||||||
### Description
|
|
||||||
|
|
||||||
Every pull request should include a description that helps reviewers
|
|
||||||
understand the change quickly:
|
|
||||||
|
|
||||||
1. **What and why** — describe the change and its motivation.
|
|
||||||
2. **Link related issues** — use `Closes #1234` or reference a Taiga
|
|
||||||
story (e.g. `Taiga #5678`).
|
|
||||||
3. **Screenshots or recordings** — required for any UI-visible change.
|
|
||||||
4. **Testing notes** — how did you verify the change? Any edge cases?
|
|
||||||
5. **Breaking changes** — call out anything that affects existing users
|
|
||||||
or requires migration steps.
|
|
||||||
|
|
||||||
### Branch naming
|
|
||||||
|
|
||||||
Use a descriptive branch name that reflects the type and scope of the
|
|
||||||
change:
|
|
||||||
|
|
||||||
```
|
|
||||||
<type>/<short-description>
|
|
||||||
```
|
|
||||||
|
|
||||||
Types: `fix`, `feat`, `refactor`, `docs`, `chore`, `perf`.
|
|
||||||
|
|
||||||
Optionally include the issue number:
|
|
||||||
|
|
||||||
```
|
|
||||||
fix/9122-email-blacklisting
|
|
||||||
feat/export-webp
|
|
||||||
refactor/layout-sizing
|
|
||||||
```
|
|
||||||
|
|
||||||
### Review process
|
|
||||||
|
|
||||||
- We are a small team and maintainers juggle reviews alongside other
|
|
||||||
tasks. Please do not expect your code to be reviewed instantly.
|
|
||||||
- Reviews are handled in dedicated blocks of time, usually in the order
|
|
||||||
PRs arrive. It may take a few days to get a first review, especially
|
|
||||||
when urgent tasks come up.
|
|
||||||
- Address review feedback by **pushing new commits** — do not
|
|
||||||
force-push during review, as it breaks comment threads.
|
|
||||||
- PRs require at least **one approval** before merge.
|
|
||||||
- We use **squash-merge** by default. The PR title becomes the final
|
|
||||||
commit message, so follow the [title format](#title-format) above.
|
|
||||||
|
|
||||||
### What we won't accept
|
|
||||||
|
|
||||||
To save time on both sides, please avoid submitting PRs that:
|
|
||||||
|
|
||||||
- Introduce new dependencies without prior discussion.
|
|
||||||
- Change the build system or CI configuration without maintainer
|
|
||||||
approval.
|
|
||||||
- Mix unrelated changes in a single PR — keep PRs focused on one
|
|
||||||
concern.
|
|
||||||
- Skip the [discussion step](#workflow) for non-bug-fix changes.
|
|
||||||
|
|
||||||
### Good first issues
|
|
||||||
|
|
||||||
We use the `easy fix` label to mark issues appropriate for newcomers.
|
|
||||||
|
|
||||||
## Commit Guidelines
|
|
||||||
|
|
||||||
Commit messages must follow this format:
|
|
||||||
|
|
||||||
```
|
|
||||||
:emoji: <subject>
|
|
||||||
|
|
||||||
[body]
|
[body]
|
||||||
|
|
||||||
[footer]
|
[footer]
|
||||||
```
|
```
|
||||||
|
|
||||||
### Commit types
|
Where type is:
|
||||||
|
|
||||||
| Emoji | Description |
|
- :bug: `:bug:` a commit that fixes a bug
|
||||||
|-------|-------------|
|
- :sparkles: `:sparkles:` a commit that an improvement
|
||||||
| :bug: | Bug fix |
|
- :tada: `:tada:` a commit with new feature
|
||||||
| :sparkles: | Improvement or enhancement |
|
- :recycle: `:recycle:` a commit that introduces a refactor
|
||||||
| :tada: | New feature |
|
- :lipstick: `:lipstick:` a commit with cosmetic changes
|
||||||
| :recycle: | Refactor |
|
- :ambulance: `:ambulance:` a commit that fixes critical bug
|
||||||
| :lipstick: | Cosmetic changes |
|
- :books: `:books:` a commit that improves or adds documentation
|
||||||
| :ambulance: | Critical bug fix |
|
- :construction: `:construction:`: a wip commit
|
||||||
| :books: | Documentation |
|
- :boom: `:boom:` a commit with breaking changes
|
||||||
| :construction: | Work in progress |
|
- :wrench: `:wrench:` a commit for config updates
|
||||||
| :boom: | Breaking change |
|
- :zap: `:zap:` a commit with performance improvements
|
||||||
| :wrench: | Configuration update |
|
- :whale: `:whale:` a commit for docker related stuff
|
||||||
| :zap: | Performance improvement |
|
- :paperclip: `:paperclip:` a commit with other not relevant changes
|
||||||
| :whale: | Docker-related change |
|
- :arrow_up: `:arrow_up:` a commit with dependencies updates
|
||||||
| :paperclip: | Other non-relevant changes |
|
- :arrow_down: `:arrow_down:` a commit with dependencies downgrades
|
||||||
| :arrow_up: | Dependency update |
|
- :fire: `:fire:` a commit that removes files or code
|
||||||
| :arrow_down: | Dependency downgrade |
|
|
||||||
| :fire: | Removal of code or files |
|
|
||||||
| :globe_with_meridians: | Add or update translations |
|
|
||||||
| :rocket: | Epic or highlight |
|
|
||||||
|
|
||||||
### Rules
|
More info:
|
||||||
|
- https://gist.github.com/parmentf/035de27d6ed1dce0b36a
|
||||||
|
- https://gist.github.com/rxaviers/7360908
|
||||||
|
|
||||||
- Use the **imperative mood** in the subject (e.g. "Fix", not "Fixed")
|
Each commit should have:
|
||||||
- Capitalize the first letter of the subject
|
|
||||||
- Add clear and concise description on the body
|
|
||||||
- Do not end the subject with a period
|
|
||||||
- Keep the subject to **70 characters** or fewer
|
|
||||||
- Separate the subject from the body with a **blank line**
|
|
||||||
|
|
||||||
### Examples
|
- A concise subject using imperative mood.
|
||||||
|
- The subject should have capitalized the first letter, without period
|
||||||
|
at the end and no larger than 65 characters.
|
||||||
|
- A blank line between the subject line and the body.
|
||||||
|
- An entry on the CHANGES.md file if applicable, referencing the
|
||||||
|
github or taiga issue/user-story using the these same rules.
|
||||||
|
|
||||||
```
|
Examples of good commit messages:
|
||||||
:bug: Fix unexpected error on launching modal
|
|
||||||
:sparkles: Enable new modal for profile
|
|
||||||
:zap: Improve performance of dashboard navigation
|
|
||||||
:ambulance: Fix critical bug on user registration process
|
|
||||||
:tada: Add new approach for user registration
|
|
||||||
```
|
|
||||||
|
|
||||||
## Formatting and Linting
|
- `:bug: Fix unexpected error on launching modal`
|
||||||
|
- `:bug: Set proper error message on generic error`
|
||||||
|
- `:sparkles: Enable new modal for profile`
|
||||||
|
- `:zap: Improve performance of dashboard navigation`
|
||||||
|
- `:wrench: Update default backend configuration`
|
||||||
|
- `:books: Add more documentation for authentication process`
|
||||||
|
- `:ambulance: Fix critical bug on user registration process`
|
||||||
|
- `:tada: Add new approach for user registration`
|
||||||
|
|
||||||
We use [cljfmt](https://github.com/weavejester/cljfmt) for formatting and
|
|
||||||
[clj-kondo](https://github.com/clj-kondo/clj-kondo) for linting.
|
|
||||||
|
|
||||||
```bash
|
## Code of conduct ##
|
||||||
# Check formatting (does not modify files)
|
|
||||||
./scripts/check-fmt
|
|
||||||
|
|
||||||
# Fix formatting (modifies files in place)
|
As contributors and maintainers of this project, we pledge to respect
|
||||||
./scripts/fmt
|
all people who contribute through reporting issues, posting feature
|
||||||
|
requests, updating documentation, submitting pull requests or patches,
|
||||||
|
and other activities.
|
||||||
|
|
||||||
# Lint
|
We are committed to making participation in this project a
|
||||||
./scripts/lint
|
harassment-free experience for everyone, regardless of level of
|
||||||
```
|
experience, gender, gender identity and expression, sexual
|
||||||
|
orientation, disability, personal appearance, body size, race,
|
||||||
|
ethnicity, age, or religion.
|
||||||
|
|
||||||
Ideally, run these as git pre-commit hooks.
|
Examples of unacceptable behavior by participants include the use of
|
||||||
[Husky](https://typicode.github.io/husky/#/) is a convenient option for
|
sexual language or imagery, derogatory comments or personal attacks,
|
||||||
setting this up.
|
trolling, public or private harassment, insults, or other
|
||||||
|
unprofessional conduct.
|
||||||
|
|
||||||
## Changelog
|
Project maintainers have the right and responsibility to remove, edit,
|
||||||
|
or reject comments, commits, code, wiki edits, issues, and other
|
||||||
|
contributions that are not aligned to this Code of Conduct. Project
|
||||||
|
maintainers who do not follow the Code of Conduct may be removed from
|
||||||
|
the project team.
|
||||||
|
|
||||||
When your change is user-facing or otherwise notable, add an entry to
|
This code of conduct applies both within project spaces and in public
|
||||||
[CHANGES.md](CHANGES.md) following the same commit-type conventions. Reference
|
spaces when an individual is representing the project or its
|
||||||
the relevant GitHub issue or Taiga user story.
|
community.
|
||||||
|
|
||||||
## Code of Conduct
|
Instances of abusive, harassing, or otherwise unacceptable behavior
|
||||||
|
may be reported by opening an issue or contacting one or more of the
|
||||||
|
project maintainers.
|
||||||
|
|
||||||
This project follows the [Contributor Covenant](https://www.contributor-covenant.org/).
|
This Code of Conduct is adapted from the Contributor Covenant, version
|
||||||
The full Code of Conduct is available at
|
1.1.0, available from http://contributor-covenant.org/version/1/1/0/
|
||||||
[help.penpot.app/contributing-guide/coc](https://help.penpot.app/contributing-guide/coc/)
|
|
||||||
and in the repository's [CODE_OF_CONDUCT.md](CODE_OF_CONDUCT.md).
|
|
||||||
|
|
||||||
To report unacceptable behavior, open an issue or contact a project maintainer
|
|
||||||
directly.
|
|
||||||
|
|
||||||
## Developer's Certificate of Origin (DCO)
|
## Developer's Certificate of Origin (DCO) ##
|
||||||
|
|
||||||
By submitting code you agree to and can certify the following:
|
By submitting code you are agree and can certify the below:
|
||||||
|
|
||||||
> **Developer's Certificate of Origin 1.1**
|
Developer's Certificate of Origin 1.1
|
||||||
>
|
|
||||||
> By making a contribution to this project, I certify that:
|
|
||||||
>
|
|
||||||
> (a) The contribution was created in whole or in part by me and I have the
|
|
||||||
> right to submit it under the open source license indicated in the file; or
|
|
||||||
>
|
|
||||||
> (b) The contribution is based upon previous work that, to the best of my
|
|
||||||
> knowledge, is covered under an appropriate open source license and I have
|
|
||||||
> the right under that license to submit that work with modifications,
|
|
||||||
> whether created in whole or in part by me, under the same open source
|
|
||||||
> license (unless I am permitted to submit under a different license), as
|
|
||||||
> indicated in the file; or
|
|
||||||
>
|
|
||||||
> (c) The contribution was provided directly to me by some other person who
|
|
||||||
> certified (a), (b) or (c) and I have not modified it.
|
|
||||||
>
|
|
||||||
> (d) I understand and agree that this project and the contribution are public
|
|
||||||
> and that a record of the contribution (including all personal information
|
|
||||||
> I submit with it, including my sign-off) is maintained indefinitely and
|
|
||||||
> may be redistributed consistent with this project or the open source
|
|
||||||
> license(s) involved.
|
|
||||||
|
|
||||||
### Signed-off-by
|
By making a contribution to this project, I certify that:
|
||||||
|
|
||||||
All code patches (**documentation is excluded**) must contain a sign-off line
|
(a) The contribution was created in whole or in part by me and I
|
||||||
at the end of the commit body. Add it automatically with `git commit -s`.
|
have the right to submit it under the open source license
|
||||||
|
indicated in the file; or
|
||||||
|
|
||||||
```
|
(b) The contribution is based upon previous work that, to the best
|
||||||
Signed-off-by: Your Real Name <your.email@example.com>
|
of my knowledge, is covered under an appropriate open source
|
||||||
```
|
license and I have the right under that license to submit that
|
||||||
|
work with modifications, whether created in whole or in part
|
||||||
|
by me, under the same open source license (unless I am
|
||||||
|
permitted to submit under a different license), as indicated
|
||||||
|
in the file; or
|
||||||
|
|
||||||
- Use your **real name** — pseudonyms and anonymous contributions are not
|
(c) The contribution was provided directly to me by some other
|
||||||
allowed.
|
person who certified (a), (b) or (c) and I have not modified
|
||||||
- The `Signed-off-by` line is **mandatory** and must match the commit author.
|
it.
|
||||||
|
|
||||||
|
(d) I understand and agree that this project and the contribution
|
||||||
|
are public and that a record of the contribution (including all
|
||||||
|
personal information I submit with it, including my sign-off) is
|
||||||
|
maintained indefinitely and may be redistributed consistent with
|
||||||
|
this project or the open source license(s) involved.
|
||||||
|
|
||||||
|
Then, all your code patches (**documentation are excluded**) should
|
||||||
|
contain a sign-off at the end of the patch/commit description body. It
|
||||||
|
can be automatically added on adding `-s` parameter to `git commit`.
|
||||||
|
|
||||||
|
This is an example of the aspect of the line:
|
||||||
|
|
||||||
|
Signed-off-by: Andrey Antukh <niwi@niwi.nz>
|
||||||
|
|
||||||
|
Please, use your real name (sorry, no pseudonyms or anonymous
|
||||||
|
contributions are allowed).
|
||||||
|
|||||||
144
README.md
144
README.md
@ -1,56 +1,51 @@
|
|||||||
<img width="100%" src="https://github.com/user-attachments/assets/da17b160-f289-436f-b140-972083a08602" />
|
|
||||||
|
|
||||||
[uri_license]: https://www.mozilla.org/en-US/MPL/2.0
|
[uri_license]: https://www.mozilla.org/en-US/MPL/2.0
|
||||||
[uri_license_image]: https://img.shields.io/badge/MPL-2.0-blue.svg
|
[uri_license_image]: https://img.shields.io/badge/MPL-2.0-blue.svg
|
||||||
|
|
||||||
|
<picture>
|
||||||
|
<source media="(prefers-color-scheme: dark)" srcset="https://penpot.app/images/readme/github-dark-mode.png">
|
||||||
|
<source media="(prefers-color-scheme: light)" srcset="https://penpot.app/images/readme/github-light-mode.png">
|
||||||
|
<img alt="penpot header image" src="https://penpot.app/images/readme/github-light-mode.png">
|
||||||
|
</picture>
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="https://www.digitalpublicgoods.net/r/penpot" rel="nofollow">
|
<a href="https://www.mozilla.org/en-US/MPL/2.0" rel="nofollow"><img alt="License: MPL-2.0" src="https://img.shields.io/badge/MPL-2.0-blue.svg" style="max-width:100%;"></a>
|
||||||
<img alt="Verified DPG" src="https://img.shields.io/badge/Verified-DPG-blue.svg">
|
<a href="https://community.penpot.app" rel="nofollow"><img alt="Penpot Community" src="https://img.shields.io/discourse/posts?server=https%3A%2F%2Fcommunity.penpot.app" style="max-width:100%;"></a>
|
||||||
</a>
|
<a href="https://tree.taiga.io/project/penpot/" title="Managed with Taiga.io" rel="nofollow"><img alt="Managed with Taiga.io" src="https://img.shields.io/badge/managed%20with-TAIGA.io-709f14.svg" style="max-width:100%;"></a>
|
||||||
<a href="https://community.penpot.app" rel="nofollow">
|
<a href="https://gitpod.io/#https://github.com/penpot/penpot" rel="nofollow"><img alt="Gitpod ready-to-code" src="https://img.shields.io/badge/Gitpod-ready--to--code-blue?logo=gitpod" style="max-width:100%;"></a>
|
||||||
<img alt="Penpot Community" src="https://img.shields.io/discourse/posts?server=https%3A%2F%2Fcommunity.penpot.app">
|
|
||||||
</a>
|
|
||||||
<a href="https://tree.taiga.io/project/penpot/" rel="nofollow">
|
|
||||||
<img alt="Managed with Taiga.io" src="https://img.shields.io/badge/managed%20with-TAIGA.io-709f14.svg">
|
|
||||||
</a>
|
|
||||||
<a href="https://gitpod.io/#https://github.com/penpot/penpot" rel="nofollow">
|
|
||||||
<img alt="Gitpod ready-to-code" src="https://img.shields.io/badge/Gitpod-ready--to--code-blue?logo=gitpod">
|
|
||||||
</a>
|
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="https://penpot.app/"><b>Website</b></a> •
|
<a href="https://penpot.app/"><b>Website</b></a> •
|
||||||
<a href="https://help.penpot.app/user-guide/"><b>User Guide</b></a> •
|
<a href="https://help.penpot.app/technical-guide/getting-started/"><b>Getting Started</b></a> •
|
||||||
<a href="https://penpot.app/learning-center"><b>Learning Center</b></a> •
|
<a href="https://help.penpot.app/user-guide/"><b>User Guide</b></a> •
|
||||||
<a href="https://community.penpot.app/"><b>Community</b></a>
|
<a href="https://help.penpot.app/user-guide/introduction/info/"><b>Tutorials & Info</b></a> •
|
||||||
|
<a href="https://community.penpot.app/"><b>Community</b></a>
|
||||||
</p>
|
</p>
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="https://www.youtube.com/@Penpot"><b>Youtube</b></a> •
|
<a href="https://www.youtube.com/@Penpot"><b>Youtube</b></a> •
|
||||||
<a href="https://peertube.kaleidos.net/a/penpot_app/video-channels"><b>Peertube</b></a> •
|
<a href="https://peertube.kaleidos.net/a/penpot_app/video-channels"><b>Peertube</b></a> •
|
||||||
<a href="https://www.linkedin.com/company/penpot/"><b>Linkedin</b></a> •
|
<a href="https://www.linkedin.com/company/penpot/"><b>Linkedin</b></a> •
|
||||||
<a href="https://instagram.com/penpot.app"><b>Instagram</b></a> •
|
<a href="https://instagram.com/penpot.app"><b>Instagram</b></a> •
|
||||||
<a href="https://fosstodon.org/@penpot/"><b>Mastodon</b></a> •
|
<a href="https://fosstodon.org/@penpot/"><b>Mastodon</b></a> •
|
||||||
<a href="https://bsky.app/profile/penpot.app"><b>Bluesky</b></a> •
|
<a href="https://twitter.com/penpotapp"><b>X</b></a>
|
||||||
<a href="https://twitter.com/penpotapp"><b>X</b></a>
|
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
[Penpot video](https://github.com/user-attachments/assets/7c67fd7c-04d3-4c9b-88ec-b6f5e23f8332)
|
<br />
|
||||||
|
|
||||||
Penpot is the open-source design platform for teams that build digital products at scale.
|
[Penpot video](https://github.com/penpot/penpot/assets/5446186/b8ad0764-585e-4ddc-b098-9b4090d337cc)
|
||||||
|
|
||||||
Penpot’s key strength lies in giving you **full ownership of your design infrastructure**. Built on open source and designed for [self-hosting](https://help.penpot.app/technical-guide/getting-started/), it puts teams in complete control of their design environment supporting strict compliance and governance requirements. Whether used in the **browser or deployed on your own servers**, Penpot **works with open standards** like SVG, CSS, HTML, and JSON.
|
<br />
|
||||||
|
|
||||||
Real-time collaboration strengthens this foundation, helping teams scale and bring design closer to the product through top-tier capabilities. Additionally, developers feel at home using Penpot, because design is expressed as code, enabling a direct translation and shipping products faster.
|
Penpot is the first **open-source** design tool for design and code collaboration. Designers can create stunning designs, interactive prototypes, design systems at scale, while developers enjoy ready-to-use code and make their workflow easy and fast. And all of this with no handoff drama.
|
||||||
|
|
||||||
Best-in-class native [Design Tokens](https://penpot.dev/collaboration/design-tokens) provide a single source of truth between design and development. They ensure consistency, improve collaboration, and make it easier to manage complex design systems.
|
Penpot is available on browser and [self host](https://penpot.app/self-host). It’s web-based and works with open standards (SVG, CSS and HTML). And last but not least, it’s free!
|
||||||
|
|
||||||
The [MCP server](https://penpot.app/penpot-mcp-server) takes it further by enabling multi-directional workflows between design and code. A [powerful open API](https://help.penpot.app/mcp/#quick-start) and plugin system makes the workspace programmable, enabling automation, AI-driven workflows, and integrations with the tools and systems you already use.
|
Penpot’s latest [huge release 2.0](https://penpot.app/dev-diaries), takes the platform to a whole new level. This update introduces the ground-breaking [CSS Grid Layout feature](https://penpot.app/penpot-2.0), a complete UI redesign, a new Components system, and much more. Plus, it's faster and more accessible.
|
||||||
|
|
||||||
With [CSS Grid and Flex Layout](https://help.penpot.app/user-guide/designing/flexible-layouts/), teams can design responsive interfaces that behave like real code from the start.
|
|
||||||
|
|
||||||
Combined, these features turn Penpot into a **full-stack design platform** for building scalable design systems and fully integrated product development processes.
|
🎇 **Penpot Fest** is our design, code & Open Source event. Check out the highlights from [Penpot Fest 2023 edition](https://www.youtube.com/watch?v=sOpLZaK5mDc)!
|
||||||
|
|
||||||
If your organization is scaling and needs extra support, we’re here to help. [Talk to us](https://penpot.app/talk-to-us)
|
|
||||||
|
|
||||||
## Table of contents ##
|
## Table of contents ##
|
||||||
|
|
||||||
@ -63,78 +58,95 @@ If your organization is scaling and needs extra support, we’re here to help. [
|
|||||||
|
|
||||||
## Why Penpot ##
|
## Why Penpot ##
|
||||||
|
|
||||||
Penpot connects design, code, and AI workflows through a code-based approach, making designs readable by developers and AI via the MCP server. This approach helps teams ship what’s actually designed and manage design systems at scale with powerful design tokens. As a self-hosted, open-source and real-time collaboration platform, Penpot offers full flexibility, security, and ownership without vendor lock-in. Learn more about [why Penpot](https://penpot.app/why-penpot) is the platform for your team.
|
Penpot expresses designs as code. Designers can do their best work and see it will be beautifully implemented by developers in a two-way collaboration.
|
||||||
|
|
||||||
### Plugin system ###
|
### Plugin system ###
|
||||||
|
[Penpot plugins](https://penpot.app/penpothub/plugins) let you expand the platform's capabilities, give you the flexibility to integrate it with other apps, and design custom solutions.
|
||||||
[Penpot plugins](https://penpot.app/penpothub/plugins) let you expand the platform's capabilities, give you the flexibility to integrate it with other apps, and design custom solutions.
|
|
||||||
|
|
||||||
### Designed for developers ###
|
### Designed for developers ###
|
||||||
|
|
||||||
Penpot was built to serve both designers and developers and create a fluid design-code process. You have the choice to enjoy real-time collaboration or play "solo".
|
Penpot was built to serve both designers and developers and create a fluid design-code process. You have the choice to enjoy real-time collaboration or play "solo".
|
||||||
|
|
||||||
### Inspect mode ###
|
### Inspect mode ###
|
||||||
|
|
||||||
Work with ready-to-use code and make your workflow easy and fast. The inspect tab gives instant access to SVG, CSS and HTML code.
|
Work with ready-to-use code and make your workflow easy and fast. The inspect tab gives instant access to SVG, CSS and HTML code.
|
||||||
|
|
||||||
|
### Self host your own instance ###
|
||||||
|
Provide your team or organization with a completely owned collaborative design tool. Use Penpot's cloud service or deploy your own Penpot server.
|
||||||
|
|
||||||
### Integrations ###
|
### Integrations ###
|
||||||
|
Penpot offers integration into the development toolchain, thanks to its support for webhooks and an API accessible through access tokens.
|
||||||
|
|
||||||
Penpot offers [integration](https://penpot.app/integrations-api) into the development toolchain, thanks to its support for webhooks and an API accessible through access tokens.
|
### What’s great for design ###
|
||||||
|
With Penpot you can design libraries to share and reuse; turn design elements into components and tokens to allow reusability and scalability; and build realistic user flows and interactions.
|
||||||
|
|
||||||
### Building Design Systems: design tokens, components and variants ###
|
<br />
|
||||||
|
|
||||||
Penpot brings [design systems](https://penpot.app/design/design-systems) to code-minded teams: a single source of truth with native Design Tokens, Components, and Variants for scalable, reusable, and consistent UI across projects and platforms.
|
<p align="center">
|
||||||
|
<img src="https://img.plasmic.app/img-optimizer/v1/img?src=https%3A%2F%2Fimg.plasmic.app%2Fimg-optimizer%2Fv1%2Fimg%2F9dd677c36afb477e9666ccd1d3f009ad.png" alt="Open Source" style="width: 65%;">
|
||||||
|
</p>
|
||||||
|
|
||||||
<img width="100%" alt="Penpot Design Systems" src="https://github.com/user-attachments/assets/cce75ad6-f783-473f-8803-da9eb8255fef">
|
<br />
|
||||||
|
|
||||||
## Getting started ##
|
## Getting started ##
|
||||||
|
|
||||||
Penpot is the only design & prototype platform that is deployment agnostic. You can use it in our [SAAS](https://design.penpot.app) or deploy it anywhere.
|
### Install with Elestio ###
|
||||||
|
Penpot is the only design & prototype platform that is deployment agnostic. You can use it or deploy it anywhere.
|
||||||
|
|
||||||
Learn how to install it with Docker, Kubernetes, Elestio or other options on [our website](https://penpot.app/self-host).
|
Learn how to install it with Elestio and Docker, or other options on [our website](https://penpot.app/self-host).
|
||||||
|
<br />
|
||||||
|
|
||||||
|
<p align="center">
|
||||||
|
<img src="https://site-assets.plasmic.app/2168cf524dd543caeff32384eb9ea0a1.svg" alt="Open Source" style="width: 65%;">
|
||||||
|
</p>
|
||||||
|
<br />
|
||||||
|
|
||||||
## Community ##
|
## Community ##
|
||||||
|
|
||||||
We love the Open Source software community. Contributing is our passion and if it’s yours too, participate and [improve](https://community.penpot.app/c/help-us-improve-penpot/7) Penpot. All your designs, code and ideas are welcome!
|
We love the Open Source software community. Contributing is our passion and if it’s yours too, participate and [improve](https://community.penpot.app/c/help-us-improve-penpot/7) Penpot. All your designs, code and ideas are welcome!
|
||||||
|
|
||||||
Want to go a step further? Become a [Penpot Ambassador](https://penpot.app/ambassador-program) and help grow the Penpot community in your region while contributing to a global, open design ecosystem.
|
|
||||||
|
|
||||||
If you need help or have any questions; if you’d like to share your experience using Penpot or get inspired; if you’d rather meet our community of developers and designers, [join our Community](https://community.penpot.app/)!
|
If you need help or have any questions; if you’d like to share your experience using Penpot or get inspired; if you’d rather meet our community of developers and designers, [join our Community](https://community.penpot.app/)!
|
||||||
|
|
||||||
Categories include:
|
You will find the following categories:
|
||||||
|
|
||||||
- [Ask the Community](https://community.penpot.app/c/ask-for-help-using-penpot/6)
|
- [Ask the Community](https://community.penpot.app/c/ask-for-help-using-penpot/6)
|
||||||
- [Troubleshooting](https://community.penpot.app/c/technical/8)
|
- [Troubleshooting](https://community.penpot.app/c/technical/8)
|
||||||
- [Help us Improve Penpot](https://community.penpot.app/c/help-us-improve-penpot/7)
|
- [Help us Improve Penpot](https://community.penpot.app/c/help-us-improve-penpot/7)
|
||||||
|
- [#MadeWithPenpot](https://community.penpot.app/c/madewithpenpot/9)
|
||||||
- [Events and Announcements](https://community.penpot.app/c/announcements/5)
|
- [Events and Announcements](https://community.penpot.app/c/announcements/5)
|
||||||
|
- [Inside Penpot](https://community.penpot.app/c/inside-penpot/21)
|
||||||
- [Penpot in your language](https://community.penpot.app/c/penpot-in-your-language/12)
|
- [Penpot in your language](https://community.penpot.app/c/penpot-in-your-language/12)
|
||||||
- [Education](https://community.penpot.app/c/education/28)
|
- [Design and Code Essentials](https://community.penpot.app/c/design-and-code-essentials/22)
|
||||||
|
|
||||||
<img width="100%" alt="Pentpot Community" src="https://github.com/user-attachments/assets/4b2a4360-12b5-4994-bd45-641449f86c4e" />
|
|
||||||
|
|
||||||
### Code of Conduct ###
|
<br />
|
||||||
|
|
||||||
Anyone who contributes to Penpot, whether through code, in the community, or at an event, must adhere to the
|
<p align="center">
|
||||||
[code of conduct](https://help.penpot.app/contributing-guide/coc/) and foster a positive and safe environment.
|
<img src="https://github.com/penpot/penpot/assets/5446186/6ac62220-a16c-46c9-ab21-d24ae357ed03" alt="Community" style="width: 65%;">
|
||||||
|
</p>
|
||||||
|
<br />
|
||||||
|
|
||||||
### Contributing ###
|
## Contributing ##
|
||||||
|
|
||||||
Any contribution will make a difference to improve Penpot. How can you get involved?
|
Any contribution will make a difference to improve Penpot. How can you get involved?
|
||||||
|
|
||||||
Choose your way:
|
Choose your way:
|
||||||
|
|
||||||
- Create and [share Libraries & Templates](https://penpot.app/libraries-templates.html) that will be helpful for the community.
|
- Create and [share Libraries & Templates](https://penpot.app/libraries-templates.html) that will be helpful for the community
|
||||||
- Invite your [team to join](https://design.penpot.app/#/auth/register).
|
- Invite your [team to join](https://design.penpot.app/#/auth/register)
|
||||||
- Give this repo a star and follow us on Social Media: [Mastodon](https://fosstodon.org/@penpot/), [Youtube](https://www.youtube.com/c/Penpot), [Instagram](https://instagram.com/penpot.app), [Linkedin](https://www.linkedin.com/company/penpotdesign), [Peertube](https://peertube.kaleidos.net/a/penpot_app), [X](https://twitter.com/penpotapp) and [BlueSky](https://bsky.app/profile/penpot.app).
|
- Star this repo and follow us on Social Media: [Mastodon](https://fosstodon.org/@penpot/), [Youtube](https://www.youtube.com/c/Penpot), [Instagram](https://instagram.com/penpot.app), [Linkedin](https://www.linkedin.com/company/penpotdesign), [Peertube](https://peertube.kaleidos.net/a/penpot_app) and [X](https://twitter.com/penpotapp).
|
||||||
- Participate in the [Community](https://community.penpot.app/) space by asking and answering questions; reacting to others’ articles; opening your own conversations and following along on decisions affecting the project.
|
- Participate in the [Community](https://community.penpot.app/) space by asking and answering questions; reacting to others’ articles; opening your own conversations and following along on decisions affecting the project.
|
||||||
- Report bugs with our easy [guide for bugs hunting](https://help.penpot.app/contributing-guide/reporting-bugs/) or [GitHub issues](https://github.com/penpot/penpot/issues).
|
- Report bugs with our easy [guide for bugs hunting](https://help.penpot.app/contributing-guide/reporting-bugs/) or [GitHub issues](https://github.com/penpot/penpot/issues)
|
||||||
- Become a [translator](https://help.penpot.app/contributing-guide/translations).
|
- Become a [translator](https://help.penpot.app/contributing-guide/translations)
|
||||||
- Give feedback: [Email us](mailto:support@penpot.app).
|
- Give feedback: [Email us](mailto:support@penpot.app)
|
||||||
- **Contribute to Penpot's code:** [Watch this video](https://www.youtube.com/watch?v=TpN0osiY-8k) by Alejandro Alonso, CIO and developer at Penpot, where he gives us a hands-on demo of how to use Penpot’s repository and make changes in both front and back end.
|
- **Contribute to Penpot's code:** [Watch this video](https://www.youtube.com/watch?v=TpN0osiY-8k) by Alejandro Alonso, CIO and developer at Penpot, where he gives us a hands-on demo of how to use Penpot’s repository and make changes in both front and back end
|
||||||
|
|
||||||
To find (almost) everything you need to know on how to contribute to Penpot, refer to the [contributing guide](https://help.penpot.app/contributing-guide/).
|
To find (almost) everything you need to know on how to contribute to Penpot, refer to the [contributing guide](https://help.penpot.app/contributing-guide/).
|
||||||
|
|
||||||
<img width="100%" alt="Penpot hub" src="https://github.com/user-attachments/assets/0abc02f0-625c-45ab-ad81-4927bec7a055" />
|
<br />
|
||||||
|
|
||||||
|
<p align="center">
|
||||||
|
<img src="https://github.com/penpot/penpot/assets/5446186/fea18923-dc06-49be-86ad-c3496a7956e6" alt="Libraries and templates" style="width: 65%;">
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<br />
|
||||||
|
|
||||||
## Resources ##
|
## Resources ##
|
||||||
|
|
||||||
@ -150,8 +162,6 @@ You can ask and answer questions, have open-ended conversations, and follow alon
|
|||||||
|
|
||||||
📚 [Dev Diaries](https://penpot.app/dev-diaries.html)
|
📚 [Dev Diaries](https://penpot.app/dev-diaries.html)
|
||||||
|
|
||||||
🧑🏫 [UI Design Course](https://penpot.app/courses/)
|
|
||||||
|
|
||||||
|
|
||||||
## License ##
|
## License ##
|
||||||
|
|
||||||
|
|||||||
28
SECURITY.md
28
SECURITY.md
@ -2,30 +2,4 @@
|
|||||||
|
|
||||||
## Reporting a Vulnerability
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
We take the security of this project seriously. If you have discovered
|
Please report security issues to `support@penpot.app`
|
||||||
a security vulnerability, please do **not** open a public issue.
|
|
||||||
|
|
||||||
Please report vulnerabilities via email to: **[support@penpot.app]**
|
|
||||||
|
|
||||||
|
|
||||||
### What to include:
|
|
||||||
|
|
||||||
* A brief description of the vulnerability.
|
|
||||||
* Steps to reproduce the issue.
|
|
||||||
* Potential impact if exploited.
|
|
||||||
|
|
||||||
We appreciate your patience and your commitment to **responsible disclosure**.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Security Contributors
|
|
||||||
|
|
||||||
We are incredibly grateful to the following individuals and
|
|
||||||
organizations for their help in keeping this project safe.
|
|
||||||
|
|
||||||
* **Ali Maharramli** – for identifying critical path traversal vulnerability
|
|
||||||
|
|
||||||
|
|
||||||
> **Note:** This list is a work in progress. If you have contributed
|
|
||||||
> to the security of this project and would like to be recognized (or
|
|
||||||
> prefer to remain anonymous), please let us know.
|
|
||||||
7
backend/.gitignore
vendored
Normal file
7
backend/.gitignore
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
.pnp.*
|
||||||
|
.yarn/*
|
||||||
|
!.yarn/patches
|
||||||
|
!.yarn/plugins
|
||||||
|
!.yarn/releases
|
||||||
|
!.yarn/sdks
|
||||||
|
!.yarn/versions
|
||||||
@ -1,259 +0,0 @@
|
|||||||
# Penpot Backend – Agent Instructions
|
|
||||||
|
|
||||||
Clojure backend (RPC) service running on the JVM.
|
|
||||||
|
|
||||||
Uses Integrant for dependency injection, PostgreSQL for storage, and
|
|
||||||
Redis for messaging/caching.
|
|
||||||
|
|
||||||
## General Guidelines
|
|
||||||
|
|
||||||
To ensure consistency across the Penpot JVM stack, all contributions must adhere
|
|
||||||
to these criteria:
|
|
||||||
|
|
||||||
### 1. Testing & Validation
|
|
||||||
|
|
||||||
* **Coverage:** If code is added or modified in `src/`, corresponding
|
|
||||||
tests in `test/backend_tests/` must be added or updated.
|
|
||||||
|
|
||||||
* **Execution:**
|
|
||||||
* **Isolated:** Run `clojure -M:dev:test --focus backend-tests.my-ns-test` for the specific test namespace.
|
|
||||||
* **Regression:** Run `clojure -M:dev:test` to ensure the suite passes without regressions in related functional areas.
|
|
||||||
|
|
||||||
### 2. Code Quality & Formatting
|
|
||||||
|
|
||||||
* **Linting:** All code must pass `clj-kondo` checks (run `pnpm run lint:clj`)
|
|
||||||
* **Formatting:** All the code must pass the formatting check (run `pnpm run
|
|
||||||
check-fmt`). Use `pnpm run fmt` to fix formatting issues. Avoid "dirty"
|
|
||||||
diffs caused by unrelated whitespace changes.
|
|
||||||
* **Type Hinting:** Use explicit JVM type hints (e.g., `^String`, `^long`) in
|
|
||||||
performance-critical paths to avoid reflection overhead.
|
|
||||||
|
|
||||||
## Code Conventions
|
|
||||||
|
|
||||||
### Namespace Overview
|
|
||||||
|
|
||||||
The source is located under `src` directory and this is a general overview of
|
|
||||||
namespaces structure:
|
|
||||||
|
|
||||||
- `app.rpc.commands.*` – RPC command implementations (`auth`, `files`, `teams`, etc.)
|
|
||||||
- `app.http.*` – HTTP routes and middleware
|
|
||||||
- `app.db.*` – Database layer
|
|
||||||
- `app.tasks.*` – Background job tasks
|
|
||||||
- `app.main` – Integrant system setup and entrypoint
|
|
||||||
- `app.loggers` – Internal loggers (auditlog, mattermost, etc.) (not to be confused with `app.common.logging`)
|
|
||||||
|
|
||||||
### RPC
|
|
||||||
|
|
||||||
The RPC methods are implemented using a multimethod-like structure via the
|
|
||||||
`app.util.services` namespace. The main RPC methods are collected under
|
|
||||||
`app.rpc.commands` namespace and exposed under `/api/rpc/command/<cmd-name>`.
|
|
||||||
|
|
||||||
The RPC method accepts POST and GET requests indistinctly and uses the `Accept`
|
|
||||||
header to negotiate the response encoding (which can be Transit — the default —
|
|
||||||
or plain JSON). It also accepts Transit (default) or JSON as input, which should
|
|
||||||
be indicated using the `Content-Type` header.
|
|
||||||
|
|
||||||
The main convention is: use `get-` prefix on RPC name when we want READ
|
|
||||||
operation.
|
|
||||||
|
|
||||||
Example of RPC method definition:
|
|
||||||
|
|
||||||
```clojure
|
|
||||||
(sv/defmethod ::my-command
|
|
||||||
{::rpc/auth true ;; requires auth
|
|
||||||
::doc/added "1.18"
|
|
||||||
::sm/params [:map ...] ;; malli input schema
|
|
||||||
::sm/result [:map ...]} ;; malli output schema
|
|
||||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id] :as params}]
|
|
||||||
;; return a plain map or throw
|
|
||||||
{:id (uuid/next)})
|
|
||||||
```
|
|
||||||
|
|
||||||
Look under `src/app/rpc/commands/*.clj` to see more examples.
|
|
||||||
|
|
||||||
### Tests
|
|
||||||
|
|
||||||
Test namespaces match `.*-test$` under `test/`. Config is in `tests.edn`.
|
|
||||||
|
|
||||||
|
|
||||||
### Integrant System
|
|
||||||
|
|
||||||
The `src/app/main.clj` declares the system map. Each key is a component; values
|
|
||||||
are config maps with `::ig/ref` for dependencies. Components implement
|
|
||||||
`ig/init-key` / `ig/halt-key!`.
|
|
||||||
|
|
||||||
|
|
||||||
### Connecting to the Database
|
|
||||||
|
|
||||||
Two PostgreSQL databases are used in this environment:
|
|
||||||
|
|
||||||
| Database | Purpose | Connection string |
|
|
||||||
|---------------|--------------------|----------------------------------------------------|
|
|
||||||
| `penpot` | Development / app | `postgresql://penpot:penpot@postgres/penpot` |
|
|
||||||
| `penpot_test` | Test suite | `postgresql://penpot:penpot@postgres/penpot_test` |
|
|
||||||
|
|
||||||
**Interactive psql session:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# development DB
|
|
||||||
psql "postgresql://penpot:penpot@postgres/penpot"
|
|
||||||
|
|
||||||
# test DB
|
|
||||||
psql "postgresql://penpot:penpot@postgres/penpot_test"
|
|
||||||
```
|
|
||||||
|
|
||||||
**One-shot query (non-interactive):**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
psql "postgresql://penpot:penpot@postgres/penpot" -c "SELECT id, name FROM team LIMIT 5;"
|
|
||||||
```
|
|
||||||
|
|
||||||
**Useful psql meta-commands:**
|
|
||||||
|
|
||||||
```
|
|
||||||
\dt -- list all tables
|
|
||||||
\d <table> -- describe a table (columns, types, constraints)
|
|
||||||
\di -- list indexes
|
|
||||||
\q -- quit
|
|
||||||
```
|
|
||||||
|
|
||||||
> **Migrations table:** Applied migrations are tracked in the `migrations` table
|
|
||||||
> with columns `module`, `step`, and `created_at`. When renaming a migration
|
|
||||||
> logical name, update this table in both databases to match the new name;
|
|
||||||
> otherwise the runner will attempt to re-apply the migration on next startup.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Example: fix a renamed migration entry in the test DB
|
|
||||||
psql "postgresql://penpot:penpot@postgres/penpot_test" \
|
|
||||||
-c "UPDATE migrations SET step = 'new-name' WHERE step = 'old-name';"
|
|
||||||
```
|
|
||||||
|
|
||||||
### Database Access (Clojure)
|
|
||||||
|
|
||||||
`app.db` wraps next.jdbc. Queries use a SQL builder that auto-converts kebab-case ↔ snake_case.
|
|
||||||
|
|
||||||
```clojure
|
|
||||||
;; Query helpers
|
|
||||||
(db/get cfg-or-pool :table {:id id}) ; fetch one row (throws if missing)
|
|
||||||
(db/get* cfg-or-pool :table {:id id}) ; fetch one row (returns nil)
|
|
||||||
(db/query cfg-or-pool :table {:team-id team-id}) ; fetch multiple rows
|
|
||||||
(db/insert! cfg-or-pool :table {:name "x" :team-id id}) ; insert
|
|
||||||
(db/update! cfg-or-pool :table {:name "y"} {:id id}) ; update
|
|
||||||
(db/delete! cfg-or-pool :table {:id id}) ; delete
|
|
||||||
|
|
||||||
;; Run multiple statements/queries on single connection
|
|
||||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
|
||||||
(db/insert! conn :table row1)
|
|
||||||
(db/insert! conn :table row2))
|
|
||||||
|
|
||||||
|
|
||||||
;; Transactions
|
|
||||||
(db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
|
||||||
(db/insert! conn :table row)))
|
|
||||||
```
|
|
||||||
|
|
||||||
Almost all methods in the `app.db` namespace accept `pool`, `conn`, or
|
|
||||||
`cfg` as params.
|
|
||||||
|
|
||||||
Migrations live in `src/app/migrations/` as numbered SQL files. They run automatically on startup.
|
|
||||||
|
|
||||||
|
|
||||||
### Error Handling
|
|
||||||
|
|
||||||
The exception helpers are defined on Common module, and are available under
|
|
||||||
`app.common.exceptions` namespace.
|
|
||||||
|
|
||||||
Example of raising an exception:
|
|
||||||
|
|
||||||
```clojure
|
|
||||||
(ex/raise :type :not-found
|
|
||||||
:code :object-not-found
|
|
||||||
:hint "File does not exist"
|
|
||||||
:file-id id)
|
|
||||||
```
|
|
||||||
|
|
||||||
Common types: `:not-found`, `:validation`, `:authorization`, `:conflict`, `:internal`.
|
|
||||||
|
|
||||||
|
|
||||||
### Performance Macros (`app.common.data.macros`)
|
|
||||||
|
|
||||||
Always prefer these macros over their `clojure.core` equivalents — they provide
|
|
||||||
optimized implementations:
|
|
||||||
|
|
||||||
```clojure
|
|
||||||
(dm/select-keys m [:a :b]) ;; faster than core/select-keys
|
|
||||||
(dm/get-in obj [:a :b :c]) ;; faster than core/get-in
|
|
||||||
(dm/str "a" "b" "c") ;; string concatenation
|
|
||||||
```
|
|
||||||
|
|
||||||
### Configuration
|
|
||||||
|
|
||||||
`src/app/config.clj` reads `PENPOT_*` environment variables, validated with
|
|
||||||
Malli. Access anywhere via `(cf/get :smtp-host)`. Feature flags: `(cf/flags
|
|
||||||
:enable-smtp)`.
|
|
||||||
|
|
||||||
|
|
||||||
### Background Tasks
|
|
||||||
|
|
||||||
Background tasks live in `src/app/tasks/`. Each task is an Integrant component
|
|
||||||
that exposes a `::handler` key and follows this three-method pattern:
|
|
||||||
|
|
||||||
```clojure
|
|
||||||
(defmethod ig/assert-key ::handler ;; validate config at startup
|
|
||||||
[_ params]
|
|
||||||
(assert (db/pool? (::db/pool params)) "expected a valid database pool"))
|
|
||||||
|
|
||||||
(defmethod ig/expand-key ::handler ;; inject defaults before init
|
|
||||||
[k v]
|
|
||||||
{k (assoc v ::my-option default-value)})
|
|
||||||
|
|
||||||
(defmethod ig/init-key ::handler ;; return the task fn
|
|
||||||
[_ cfg]
|
|
||||||
(fn [_task] ;; receives the task row from the worker
|
|
||||||
(db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
|
||||||
;; … do work …
|
|
||||||
))))
|
|
||||||
```
|
|
||||||
|
|
||||||
**Wiring a new task** requires two changes in `src/app/main.clj`:
|
|
||||||
|
|
||||||
1. **Handler config** – add an entry in `system-config` with the dependencies:
|
|
||||||
|
|
||||||
```clojure
|
|
||||||
:app.tasks.my-task/handler
|
|
||||||
{::db/pool (ig/ref ::db/pool)}
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Registry + cron** – register the handler name and schedule it:
|
|
||||||
|
|
||||||
```clojure
|
|
||||||
;; in ::wrk/registry ::wrk/tasks map:
|
|
||||||
:my-task (ig/ref :app.tasks.my-task/handler)
|
|
||||||
|
|
||||||
;; in worker-config ::wrk/cron ::wrk/entries vector:
|
|
||||||
{:cron #penpot/cron "0 0 0 * * ?" ;; daily at midnight
|
|
||||||
:task :my-task}
|
|
||||||
```
|
|
||||||
|
|
||||||
**Useful cron patterns** (Quartz format — six fields: s m h dom mon dow):
|
|
||||||
|
|
||||||
| Expression | Meaning |
|
|
||||||
|------------------------------|--------------------|
|
|
||||||
| `"0 0 0 * * ?"` | Daily at midnight |
|
|
||||||
| `"0 0 */6 * * ?"` | Every 6 hours |
|
|
||||||
| `"0 */5 * * * ?"` | Every 5 minutes |
|
|
||||||
|
|
||||||
**Time helpers** (`app.common.time`):
|
|
||||||
|
|
||||||
```clojure
|
|
||||||
(ct/now) ;; current instant
|
|
||||||
(ct/duration {:hours 1}) ;; java.time.Duration
|
|
||||||
(ct/minus (ct/now) some-duration) ;; subtract duration from instant
|
|
||||||
```
|
|
||||||
|
|
||||||
`db/interval` converts a `Duration` (or millis / string) to a PostgreSQL
|
|
||||||
interval object suitable for use in SQL queries:
|
|
||||||
|
|
||||||
```clojure
|
|
||||||
(db/interval (ct/duration {:hours 1})) ;; → PGInterval "3600.0 seconds"
|
|
||||||
```
|
|
||||||
@ -3,10 +3,10 @@
|
|||||||
|
|
||||||
:deps
|
:deps
|
||||||
{penpot/common {:local/root "../common"}
|
{penpot/common {:local/root "../common"}
|
||||||
org.clojure/clojure {:mvn/version "1.12.4"}
|
org.clojure/clojure {:mvn/version "1.12.0"}
|
||||||
org.clojure/tools.namespace {:mvn/version "1.5.0"}
|
org.clojure/tools.namespace {:mvn/version "1.5.0"}
|
||||||
|
|
||||||
com.github.luben/zstd-jni {:mvn/version "1.5.7-4"}
|
com.github.luben/zstd-jni {:mvn/version "1.5.6-9"}
|
||||||
|
|
||||||
io.prometheus/simpleclient {:mvn/version "0.16.0"}
|
io.prometheus/simpleclient {:mvn/version "0.16.0"}
|
||||||
io.prometheus/simpleclient_hotspot {:mvn/version "0.16.0"}
|
io.prometheus/simpleclient_hotspot {:mvn/version "0.16.0"}
|
||||||
@ -17,41 +17,34 @@
|
|||||||
|
|
||||||
io.prometheus/simpleclient_httpserver {:mvn/version "0.16.0"}
|
io.prometheus/simpleclient_httpserver {:mvn/version "0.16.0"}
|
||||||
|
|
||||||
io.lettuce/lettuce-core {:mvn/version "6.8.1.RELEASE"}
|
io.lettuce/lettuce-core {:mvn/version "6.5.2.RELEASE"}
|
||||||
;; Minimal dependencies required by lettuce, we need to include them
|
|
||||||
;; explicitly because clojure dependency management does not support
|
|
||||||
;; yet the BOM format.
|
|
||||||
io.micrometer/micrometer-core {:mvn/version "1.14.2"}
|
|
||||||
io.micrometer/micrometer-observation {:mvn/version "1.14.2"}
|
|
||||||
|
|
||||||
java-http-clj/java-http-clj {:mvn/version "0.4.3"}
|
java-http-clj/java-http-clj {:mvn/version "0.4.3"}
|
||||||
com.google.guava/guava {:mvn/version "33.4.8-jre"}
|
|
||||||
|
|
||||||
funcool/yetti
|
funcool/yetti
|
||||||
{:git/tag "v11.9"
|
{:git/tag "v11.4"
|
||||||
:git/sha "5fad7a9"
|
:git/sha "ce50d42"
|
||||||
:git/url "https://github.com/funcool/yetti.git"
|
:git/url "https://github.com/funcool/yetti.git"
|
||||||
:exclusions [org.slf4j/slf4j-api]}
|
:exclusions [org.slf4j/slf4j-api]}
|
||||||
|
|
||||||
com.github.seancorfield/next.jdbc
|
com.github.seancorfield/next.jdbc
|
||||||
{:mvn/version "1.3.1070"}
|
{:mvn/version "1.3.994"}
|
||||||
|
metosin/reitit-core {:mvn/version "0.7.2"}
|
||||||
|
nrepl/nrepl {:mvn/version "1.3.1"}
|
||||||
|
cider/cider-nrepl {:mvn/version "0.52.0"}
|
||||||
|
|
||||||
metosin/reitit-core {:mvn/version "0.9.1"}
|
org.postgresql/postgresql {:mvn/version "42.7.5"}
|
||||||
nrepl/nrepl {:mvn/version "1.4.0"}
|
org.xerial/sqlite-jdbc {:mvn/version "3.48.0.0"}
|
||||||
|
|
||||||
org.postgresql/postgresql {:mvn/version "42.7.9"}
|
com.zaxxer/HikariCP {:mvn/version "6.2.1"}
|
||||||
org.xerial/sqlite-jdbc {:mvn/version "3.50.3.0"}
|
|
||||||
|
|
||||||
com.zaxxer/HikariCP {:mvn/version "7.0.2"}
|
|
||||||
|
|
||||||
io.whitfin/siphash {:mvn/version "2.0.0"}
|
io.whitfin/siphash {:mvn/version "2.0.0"}
|
||||||
|
|
||||||
buddy/buddy-hashers {:mvn/version "2.0.167"}
|
buddy/buddy-hashers {:mvn/version "2.0.167"}
|
||||||
buddy/buddy-sign {:mvn/version "3.6.1-359"}
|
buddy/buddy-sign {:mvn/version "3.6.1-359"}
|
||||||
|
|
||||||
com.github.ben-manes.caffeine/caffeine {:mvn/version "3.2.3"}
|
com.github.ben-manes.caffeine/caffeine {:mvn/version "3.2.0"}
|
||||||
|
|
||||||
org.jsoup/jsoup {:mvn/version "1.21.2"}
|
org.jsoup/jsoup {:mvn/version "1.18.3"}
|
||||||
org.im4java/im4java
|
org.im4java/im4java
|
||||||
{:git/tag "1.4.0-penpot-2"
|
{:git/tag "1.4.0-penpot-2"
|
||||||
:git/sha "e2b3e16"
|
:git/sha "e2b3e16"
|
||||||
@ -61,12 +54,12 @@
|
|||||||
|
|
||||||
org.clojars.pntblnk/clj-ldap {:mvn/version "0.0.17"}
|
org.clojars.pntblnk/clj-ldap {:mvn/version "0.0.17"}
|
||||||
|
|
||||||
dawran6/emoji {:mvn/version "0.2.0"}
|
dawran6/emoji {:mvn/version "0.1.5"}
|
||||||
markdown-clj/markdown-clj {:mvn/version "1.12.4"}
|
markdown-clj/markdown-clj {:mvn/version "1.12.2"}
|
||||||
|
|
||||||
;; Pretty Print specs
|
;; Pretty Print specs
|
||||||
pretty-spec/pretty-spec {:mvn/version "0.1.4"}
|
pretty-spec/pretty-spec {:mvn/version "0.1.4"}
|
||||||
software.amazon.awssdk/s3 {:mvn/version "2.41.21"}}
|
software.amazon.awssdk/s3 {:mvn/version "2.28.26"}}
|
||||||
|
|
||||||
:paths ["src" "resources" "target/classes"]
|
:paths ["src" "resources" "target/classes"]
|
||||||
:aliases
|
:aliases
|
||||||
@ -81,14 +74,12 @@
|
|||||||
|
|
||||||
:build
|
:build
|
||||||
{:extra-deps
|
{:extra-deps
|
||||||
{io.github.clojure/tools.build {:mvn/version "0.10.10"}}
|
{io.github.clojure/tools.build {:git/tag "v0.10.6" :git/sha "52cf7d6"}}
|
||||||
:ns-default build}
|
:ns-default build}
|
||||||
|
|
||||||
:test
|
:test
|
||||||
{:main-opts ["-m" "kaocha.runner"]
|
{:main-opts ["-m" "kaocha.runner"]
|
||||||
:jvm-opts ["-Dlog4j2.configurationFile=log4j2-devenv-repl.xml"
|
:jvm-opts ["-Dlog4j2.configurationFile=log4j2-devenv-repl.xml"]
|
||||||
"--sun-misc-unsafe-memory-access=allow"
|
|
||||||
"--enable-native-access=ALL-UNNAMED"]
|
|
||||||
:extra-deps {lambdaisland/kaocha {:mvn/version "1.91.1392"}}}
|
:extra-deps {lambdaisland/kaocha {:mvn/version "1.91.1392"}}}
|
||||||
|
|
||||||
:outdated
|
:outdated
|
||||||
@ -97,8 +88,8 @@
|
|||||||
|
|
||||||
:jmx-remote
|
:jmx-remote
|
||||||
{:jvm-opts ["-Dcom.sun.management.jmxremote"
|
{:jvm-opts ["-Dcom.sun.management.jmxremote"
|
||||||
"-Dcom.sun.management.jmxremote.port=9000"
|
"-Dcom.sun.management.jmxremote.port=9090"
|
||||||
"-Dcom.sun.management.jmxremote.rmi.port=9000"
|
"-Dcom.sun.management.jmxremote.rmi.port=9090"
|
||||||
"-Dcom.sun.management.jmxremote.local.only=false"
|
"-Dcom.sun.management.jmxremote.local.only=false"
|
||||||
"-Dcom.sun.management.jmxremote.authenticate=false"
|
"-Dcom.sun.management.jmxremote.authenticate=false"
|
||||||
"-Dcom.sun.management.jmxremote.ssl=false"
|
"-Dcom.sun.management.jmxremote.ssl=false"
|
||||||
|
|||||||
@ -6,14 +6,12 @@
|
|||||||
|
|
||||||
(ns user
|
(ns user
|
||||||
(:require
|
(:require
|
||||||
[app.binfile.common :as bfc]
|
|
||||||
[app.common.data :as d]
|
[app.common.data :as d]
|
||||||
[app.common.debug :as debug]
|
[app.common.debug :as debug]
|
||||||
[app.common.exceptions :as ex]
|
[app.common.exceptions :as ex]
|
||||||
[app.common.files.helpers :as cfh]
|
[app.common.files.helpers :as cfh]
|
||||||
[app.common.fressian :as fres]
|
[app.common.fressian :as fres]
|
||||||
[app.common.geom.matrix :as gmt]
|
[app.common.geom.matrix :as gmt]
|
||||||
[app.common.json :as json]
|
|
||||||
[app.common.logging :as l]
|
[app.common.logging :as l]
|
||||||
[app.common.perf :as perf]
|
[app.common.perf :as perf]
|
||||||
[app.common.pprint :as pp]
|
[app.common.pprint :as pp]
|
||||||
@ -21,22 +19,20 @@
|
|||||||
[app.common.schema.desc-js-like :as smdj]
|
[app.common.schema.desc-js-like :as smdj]
|
||||||
[app.common.schema.desc-native :as smdn]
|
[app.common.schema.desc-native :as smdn]
|
||||||
[app.common.schema.generators :as sg]
|
[app.common.schema.generators :as sg]
|
||||||
[app.common.schema.openapi :as oapi]
|
|
||||||
[app.common.spec :as us]
|
[app.common.spec :as us]
|
||||||
[app.common.time :as ct]
|
[app.common.json :as json]
|
||||||
[app.common.transit :as t]
|
[app.common.transit :as t]
|
||||||
[app.common.types.file :as ctf]
|
[app.common.types.file :as ctf]
|
||||||
[app.common.uuid :as uuid]
|
[app.common.uuid :as uuid]
|
||||||
[app.common.uri :as u]
|
|
||||||
[app.config :as cf]
|
[app.config :as cf]
|
||||||
[app.db :as db]
|
[app.db :as db]
|
||||||
[app.main :as main]
|
[app.main :as main]
|
||||||
[app.srepl.helpers :as h]
|
[app.srepl.helpers :as srepl.helpers]
|
||||||
[app.srepl.main :refer :all]
|
[app.srepl.main :as srepl]
|
||||||
[app.util.blob :as blob]
|
[app.util.blob :as blob]
|
||||||
|
[app.util.time :as dt]
|
||||||
[clj-async-profiler.core :as prof]
|
[clj-async-profiler.core :as prof]
|
||||||
[clojure.contrib.humanize :as hum]
|
[clojure.contrib.humanize :as hum]
|
||||||
[clojure.datafy :refer [datafy]]
|
|
||||||
[clojure.java.io :as io]
|
[clojure.java.io :as io]
|
||||||
[clojure.pprint :refer [pprint print-table]]
|
[clojure.pprint :refer [pprint print-table]]
|
||||||
[clojure.repl :refer :all]
|
[clojure.repl :refer :all]
|
||||||
|
|||||||
@ -4,7 +4,7 @@
|
|||||||
"license": "MPL-2.0",
|
"license": "MPL-2.0",
|
||||||
"author": "Kaleidos INC",
|
"author": "Kaleidos INC",
|
||||||
"private": true,
|
"private": true,
|
||||||
"packageManager": "pnpm@10.31.0+sha512.e3927388bfaa8078ceb79b748ffc1e8274e84d75163e67bc22e06c0d3aed43dd153151cbf11d7f8301ff4acb98c68bdc5cadf6989532801ffafe3b3e4a63c268",
|
"packageManager": "yarn@4.8.1+sha512.bc946f2a022d7a1a38adfc15b36a66a3807a67629789496c3714dd1703d2e6c6b1c69ff9ec3b43141ac7a1dd853b7685638eb0074300386a59c18df351ef8ff6",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/penpot/penpot"
|
"url": "https://github.com/penpot/penpot"
|
||||||
@ -19,9 +19,8 @@
|
|||||||
"ws": "^8.17.0"
|
"ws": "^8.17.0"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"lint": "clj-kondo --parallel --lint ../common/src src/",
|
"fmt:clj:check": "cljfmt check --parallel=false src/ test/",
|
||||||
"check-fmt": "cljfmt check --parallel=true src/ test/",
|
"fmt:clj": "cljfmt fix --parallel=true src/ test/",
|
||||||
"fmt": "cljfmt fix --parallel=true src/ test/",
|
"lint:clj": "clj-kondo --parallel --lint src/"
|
||||||
"test": "clojure -M:dev:test"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
306
backend/pnpm-lock.yaml
generated
306
backend/pnpm-lock.yaml
generated
@ -1,306 +0,0 @@
|
|||||||
lockfileVersion: '9.0'
|
|
||||||
|
|
||||||
settings:
|
|
||||||
autoInstallPeers: true
|
|
||||||
excludeLinksFromLockfile: false
|
|
||||||
|
|
||||||
importers:
|
|
||||||
|
|
||||||
.:
|
|
||||||
dependencies:
|
|
||||||
luxon:
|
|
||||||
specifier: ^3.4.4
|
|
||||||
version: 3.7.2
|
|
||||||
sax:
|
|
||||||
specifier: ^1.4.1
|
|
||||||
version: 1.4.3
|
|
||||||
devDependencies:
|
|
||||||
nodemon:
|
|
||||||
specifier: ^3.1.2
|
|
||||||
version: 3.1.11
|
|
||||||
source-map-support:
|
|
||||||
specifier: ^0.5.21
|
|
||||||
version: 0.5.21
|
|
||||||
ws:
|
|
||||||
specifier: ^8.17.0
|
|
||||||
version: 8.18.3
|
|
||||||
|
|
||||||
packages:
|
|
||||||
|
|
||||||
anymatch@3.1.3:
|
|
||||||
resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==}
|
|
||||||
engines: {node: '>= 8'}
|
|
||||||
|
|
||||||
balanced-match@1.0.2:
|
|
||||||
resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==}
|
|
||||||
|
|
||||||
binary-extensions@2.3.0:
|
|
||||||
resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==}
|
|
||||||
engines: {node: '>=8'}
|
|
||||||
|
|
||||||
brace-expansion@1.1.12:
|
|
||||||
resolution: {integrity: sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==}
|
|
||||||
|
|
||||||
braces@3.0.3:
|
|
||||||
resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==}
|
|
||||||
engines: {node: '>=8'}
|
|
||||||
|
|
||||||
buffer-from@1.1.2:
|
|
||||||
resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==}
|
|
||||||
|
|
||||||
chokidar@3.6.0:
|
|
||||||
resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==}
|
|
||||||
engines: {node: '>= 8.10.0'}
|
|
||||||
|
|
||||||
concat-map@0.0.1:
|
|
||||||
resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==}
|
|
||||||
|
|
||||||
debug@4.4.3:
|
|
||||||
resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==}
|
|
||||||
engines: {node: '>=6.0'}
|
|
||||||
peerDependencies:
|
|
||||||
supports-color: '*'
|
|
||||||
peerDependenciesMeta:
|
|
||||||
supports-color:
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
fill-range@7.1.1:
|
|
||||||
resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==}
|
|
||||||
engines: {node: '>=8'}
|
|
||||||
|
|
||||||
fsevents@2.3.3:
|
|
||||||
resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==}
|
|
||||||
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
|
|
||||||
os: [darwin]
|
|
||||||
|
|
||||||
glob-parent@5.1.2:
|
|
||||||
resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==}
|
|
||||||
engines: {node: '>= 6'}
|
|
||||||
|
|
||||||
has-flag@3.0.0:
|
|
||||||
resolution: {integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==}
|
|
||||||
engines: {node: '>=4'}
|
|
||||||
|
|
||||||
ignore-by-default@1.0.1:
|
|
||||||
resolution: {integrity: sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==}
|
|
||||||
|
|
||||||
is-binary-path@2.1.0:
|
|
||||||
resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==}
|
|
||||||
engines: {node: '>=8'}
|
|
||||||
|
|
||||||
is-extglob@2.1.1:
|
|
||||||
resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==}
|
|
||||||
engines: {node: '>=0.10.0'}
|
|
||||||
|
|
||||||
is-glob@4.0.3:
|
|
||||||
resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==}
|
|
||||||
engines: {node: '>=0.10.0'}
|
|
||||||
|
|
||||||
is-number@7.0.0:
|
|
||||||
resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
|
|
||||||
engines: {node: '>=0.12.0'}
|
|
||||||
|
|
||||||
luxon@3.7.2:
|
|
||||||
resolution: {integrity: sha512-vtEhXh/gNjI9Yg1u4jX/0YVPMvxzHuGgCm6tC5kZyb08yjGWGnqAjGJvcXbqQR2P3MyMEFnRbpcdFS6PBcLqew==}
|
|
||||||
engines: {node: '>=12'}
|
|
||||||
|
|
||||||
minimatch@3.1.2:
|
|
||||||
resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==}
|
|
||||||
|
|
||||||
ms@2.1.3:
|
|
||||||
resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==}
|
|
||||||
|
|
||||||
nodemon@3.1.11:
|
|
||||||
resolution: {integrity: sha512-is96t8F/1//UHAjNPHpbsNY46ELPpftGUoSVNXwUfMk/qdjSylYrWSu1XavVTBOn526kFiOR733ATgNBCQyH0g==}
|
|
||||||
engines: {node: '>=10'}
|
|
||||||
hasBin: true
|
|
||||||
|
|
||||||
normalize-path@3.0.0:
|
|
||||||
resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==}
|
|
||||||
engines: {node: '>=0.10.0'}
|
|
||||||
|
|
||||||
picomatch@2.3.1:
|
|
||||||
resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==}
|
|
||||||
engines: {node: '>=8.6'}
|
|
||||||
|
|
||||||
pstree.remy@1.1.8:
|
|
||||||
resolution: {integrity: sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==}
|
|
||||||
|
|
||||||
readdirp@3.6.0:
|
|
||||||
resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==}
|
|
||||||
engines: {node: '>=8.10.0'}
|
|
||||||
|
|
||||||
sax@1.4.3:
|
|
||||||
resolution: {integrity: sha512-yqYn1JhPczigF94DMS+shiDMjDowYO6y9+wB/4WgO0Y19jWYk0lQ4tuG5KI7kj4FTp1wxPj5IFfcrz/s1c3jjQ==}
|
|
||||||
|
|
||||||
semver@7.7.3:
|
|
||||||
resolution: {integrity: sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==}
|
|
||||||
engines: {node: '>=10'}
|
|
||||||
hasBin: true
|
|
||||||
|
|
||||||
simple-update-notifier@2.0.0:
|
|
||||||
resolution: {integrity: sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==}
|
|
||||||
engines: {node: '>=10'}
|
|
||||||
|
|
||||||
source-map-support@0.5.21:
|
|
||||||
resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==}
|
|
||||||
|
|
||||||
source-map@0.6.1:
|
|
||||||
resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==}
|
|
||||||
engines: {node: '>=0.10.0'}
|
|
||||||
|
|
||||||
supports-color@5.5.0:
|
|
||||||
resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==}
|
|
||||||
engines: {node: '>=4'}
|
|
||||||
|
|
||||||
to-regex-range@5.0.1:
|
|
||||||
resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==}
|
|
||||||
engines: {node: '>=8.0'}
|
|
||||||
|
|
||||||
touch@3.1.1:
|
|
||||||
resolution: {integrity: sha512-r0eojU4bI8MnHr8c5bNo7lJDdI2qXlWWJk6a9EAFG7vbhTjElYhBVS3/miuE0uOuoLdb8Mc/rVfsmm6eo5o9GA==}
|
|
||||||
hasBin: true
|
|
||||||
|
|
||||||
undefsafe@2.0.5:
|
|
||||||
resolution: {integrity: sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==}
|
|
||||||
|
|
||||||
ws@8.18.3:
|
|
||||||
resolution: {integrity: sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==}
|
|
||||||
engines: {node: '>=10.0.0'}
|
|
||||||
peerDependencies:
|
|
||||||
bufferutil: ^4.0.1
|
|
||||||
utf-8-validate: '>=5.0.2'
|
|
||||||
peerDependenciesMeta:
|
|
||||||
bufferutil:
|
|
||||||
optional: true
|
|
||||||
utf-8-validate:
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
snapshots:
|
|
||||||
|
|
||||||
anymatch@3.1.3:
|
|
||||||
dependencies:
|
|
||||||
normalize-path: 3.0.0
|
|
||||||
picomatch: 2.3.1
|
|
||||||
|
|
||||||
balanced-match@1.0.2: {}
|
|
||||||
|
|
||||||
binary-extensions@2.3.0: {}
|
|
||||||
|
|
||||||
brace-expansion@1.1.12:
|
|
||||||
dependencies:
|
|
||||||
balanced-match: 1.0.2
|
|
||||||
concat-map: 0.0.1
|
|
||||||
|
|
||||||
braces@3.0.3:
|
|
||||||
dependencies:
|
|
||||||
fill-range: 7.1.1
|
|
||||||
|
|
||||||
buffer-from@1.1.2: {}
|
|
||||||
|
|
||||||
chokidar@3.6.0:
|
|
||||||
dependencies:
|
|
||||||
anymatch: 3.1.3
|
|
||||||
braces: 3.0.3
|
|
||||||
glob-parent: 5.1.2
|
|
||||||
is-binary-path: 2.1.0
|
|
||||||
is-glob: 4.0.3
|
|
||||||
normalize-path: 3.0.0
|
|
||||||
readdirp: 3.6.0
|
|
||||||
optionalDependencies:
|
|
||||||
fsevents: 2.3.3
|
|
||||||
|
|
||||||
concat-map@0.0.1: {}
|
|
||||||
|
|
||||||
debug@4.4.3(supports-color@5.5.0):
|
|
||||||
dependencies:
|
|
||||||
ms: 2.1.3
|
|
||||||
optionalDependencies:
|
|
||||||
supports-color: 5.5.0
|
|
||||||
|
|
||||||
fill-range@7.1.1:
|
|
||||||
dependencies:
|
|
||||||
to-regex-range: 5.0.1
|
|
||||||
|
|
||||||
fsevents@2.3.3:
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
glob-parent@5.1.2:
|
|
||||||
dependencies:
|
|
||||||
is-glob: 4.0.3
|
|
||||||
|
|
||||||
has-flag@3.0.0: {}
|
|
||||||
|
|
||||||
ignore-by-default@1.0.1: {}
|
|
||||||
|
|
||||||
is-binary-path@2.1.0:
|
|
||||||
dependencies:
|
|
||||||
binary-extensions: 2.3.0
|
|
||||||
|
|
||||||
is-extglob@2.1.1: {}
|
|
||||||
|
|
||||||
is-glob@4.0.3:
|
|
||||||
dependencies:
|
|
||||||
is-extglob: 2.1.1
|
|
||||||
|
|
||||||
is-number@7.0.0: {}
|
|
||||||
|
|
||||||
luxon@3.7.2: {}
|
|
||||||
|
|
||||||
minimatch@3.1.2:
|
|
||||||
dependencies:
|
|
||||||
brace-expansion: 1.1.12
|
|
||||||
|
|
||||||
ms@2.1.3: {}
|
|
||||||
|
|
||||||
nodemon@3.1.11:
|
|
||||||
dependencies:
|
|
||||||
chokidar: 3.6.0
|
|
||||||
debug: 4.4.3(supports-color@5.5.0)
|
|
||||||
ignore-by-default: 1.0.1
|
|
||||||
minimatch: 3.1.2
|
|
||||||
pstree.remy: 1.1.8
|
|
||||||
semver: 7.7.3
|
|
||||||
simple-update-notifier: 2.0.0
|
|
||||||
supports-color: 5.5.0
|
|
||||||
touch: 3.1.1
|
|
||||||
undefsafe: 2.0.5
|
|
||||||
|
|
||||||
normalize-path@3.0.0: {}
|
|
||||||
|
|
||||||
picomatch@2.3.1: {}
|
|
||||||
|
|
||||||
pstree.remy@1.1.8: {}
|
|
||||||
|
|
||||||
readdirp@3.6.0:
|
|
||||||
dependencies:
|
|
||||||
picomatch: 2.3.1
|
|
||||||
|
|
||||||
sax@1.4.3: {}
|
|
||||||
|
|
||||||
semver@7.7.3: {}
|
|
||||||
|
|
||||||
simple-update-notifier@2.0.0:
|
|
||||||
dependencies:
|
|
||||||
semver: 7.7.3
|
|
||||||
|
|
||||||
source-map-support@0.5.21:
|
|
||||||
dependencies:
|
|
||||||
buffer-from: 1.1.2
|
|
||||||
source-map: 0.6.1
|
|
||||||
|
|
||||||
source-map@0.6.1: {}
|
|
||||||
|
|
||||||
supports-color@5.5.0:
|
|
||||||
dependencies:
|
|
||||||
has-flag: 3.0.0
|
|
||||||
|
|
||||||
to-regex-range@5.0.1:
|
|
||||||
dependencies:
|
|
||||||
is-number: 7.0.0
|
|
||||||
|
|
||||||
touch@3.1.1: {}
|
|
||||||
|
|
||||||
undefsafe@2.0.5: {}
|
|
||||||
|
|
||||||
ws@8.18.3: {}
|
|
||||||
@ -193,7 +193,7 @@
|
|||||||
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
|
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
|
||||||
<div
|
<div
|
||||||
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
|
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
|
||||||
Click the link below to confirm the change.</div>
|
Click to the link below to confirm the change:</div>
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
@ -217,7 +217,8 @@
|
|||||||
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
|
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
|
||||||
<div
|
<div
|
||||||
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
|
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
|
||||||
If you did not request this change, consider changing your password for security reasons.</div>
|
If you received this email by mistake, please consider changing your password for security
|
||||||
|
reasons.</div>
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
|
|||||||
@ -2,11 +2,12 @@ Hello {{name|abbreviate:25}}!
|
|||||||
|
|
||||||
We received a request to change your current email to {{ pending-email }}.
|
We received a request to change your current email to {{ pending-email }}.
|
||||||
|
|
||||||
Click the link below to confirm the change.
|
Click to the link below to confirm the change:
|
||||||
|
|
||||||
{{ public-uri }}/#/auth/verify-token?token={{token}}
|
{{ public-uri }}/#/auth/verify-token?token={{token}}
|
||||||
|
|
||||||
If you did not request this change, consider changing your password for security reasons.
|
If you received this email by mistake, please consider changing your password
|
||||||
|
for security reasons.
|
||||||
|
|
||||||
Enjoy!
|
Enjoy!
|
||||||
The Penpot team.
|
The Penpot team.
|
||||||
|
|||||||
@ -8,41 +8,38 @@
|
|||||||
<body>
|
<body>
|
||||||
<p>
|
<p>
|
||||||
<strong>Feedback from:</strong><br />
|
<strong>Feedback from:</strong><br />
|
||||||
<span>
|
{% if profile %}
|
||||||
<span>Name: </span>
|
<span>
|
||||||
<span><code>{{profile.fullname|abbreviate:25}}</code></span>
|
<span>Name: </span>
|
||||||
</span>
|
<span><code>{{profile.fullname|abbreviate:25}}</code></span>
|
||||||
<br />
|
</span>
|
||||||
<span>
|
<br />
|
||||||
<span>Email: </span>
|
|
||||||
<span>{{profile.email}}</span>
|
<span>
|
||||||
</span>
|
<span>Email: </span>
|
||||||
<br />
|
<span>{{profile.email}}</span>
|
||||||
<span>
|
</span>
|
||||||
<span>ID: </span>
|
<br />
|
||||||
<span><code>{{profile.id}}</code></span>
|
|
||||||
</span>
|
<span>
|
||||||
|
<span>ID: </span>
|
||||||
|
<span><code>{{profile.id}}</code></span>
|
||||||
|
</span>
|
||||||
|
{% else %}
|
||||||
|
<span>
|
||||||
|
<span>Email: </span>
|
||||||
|
<span>{{profile.email}}</span>
|
||||||
|
</span>
|
||||||
|
{% endif %}
|
||||||
</p>
|
</p>
|
||||||
<p>
|
<p>
|
||||||
<strong>Subject:</strong><br />
|
<strong>Subject:</strong><br />
|
||||||
<span>{{feedback-subject|abbreviate:300}}</span>
|
<span>{{subject|abbreviate:300}}</span>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<p>
|
|
||||||
<strong>Type:</strong><br />
|
|
||||||
<span>{{feedback-type|abbreviate:300}}</span>
|
|
||||||
</p>
|
|
||||||
|
|
||||||
{% if feedback-error-href %}
|
|
||||||
<p>
|
|
||||||
<strong>Error HREF:</strong><br />
|
|
||||||
<span>{{feedback-error-href|abbreviate:500}}</span>
|
|
||||||
</p>
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
<strong>Message:</strong><br />
|
<strong>Message:</strong><br />
|
||||||
{{feedback-content|linebreaks-br}}
|
{{content|linebreaks-br|safe}}
|
||||||
</p>
|
</p>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|||||||
@ -1 +1 @@
|
|||||||
[PENPOT FEEDBACK]: {{feedback-subject}}
|
[PENPOT FEEDBACK]: {{subject}}
|
||||||
|
|||||||
@ -1,11 +1,9 @@
|
|||||||
From: {{profile.fullname}} <{{profile.email}}> / {{profile.id}}
|
{% if profile %}
|
||||||
Subject: {{feedback-subject}}
|
Feedback profile: {{profile.fullname}} <{{profile.email}}> / {{profile.id}}
|
||||||
Type: {{feedback-type}}
|
{% else %}
|
||||||
|
Feedback from: {{email}}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
{% if feedback-error-href %}
|
Subject: {{subject}}
|
||||||
HREF: {{feedback-error-href}}
|
|
||||||
{% endif -%}
|
|
||||||
|
|
||||||
Message:
|
{{content}}
|
||||||
|
|
||||||
{{feedback-content}}
|
|
||||||
|
|||||||
@ -1,264 +0,0 @@
|
|||||||
<!doctype html>
|
|
||||||
<html xmlns="http://www.w3.org/1999/xhtml" xmlns:v="urn:schemas-microsoft-com:vml"
|
|
||||||
xmlns:o="urn:schemas-microsoft-com:office:office">
|
|
||||||
|
|
||||||
<head>
|
|
||||||
<title>
|
|
||||||
</title>
|
|
||||||
<!--[if !mso]><!-- -->
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<!--<![endif]-->
|
|
||||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
|
||||||
<style type="text/css">
|
|
||||||
#outlook a {
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
body {
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
-webkit-text-size-adjust: 100%;
|
|
||||||
-ms-text-size-adjust: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
table,
|
|
||||||
td {
|
|
||||||
border-collapse: collapse;
|
|
||||||
mso-table-lspace: 0pt;
|
|
||||||
mso-table-rspace: 0pt;
|
|
||||||
}
|
|
||||||
|
|
||||||
img {
|
|
||||||
border: 0;
|
|
||||||
height: auto;
|
|
||||||
line-height: 100%;
|
|
||||||
outline: none;
|
|
||||||
text-decoration: none;
|
|
||||||
-ms-interpolation-mode: bicubic;
|
|
||||||
}
|
|
||||||
|
|
||||||
p {
|
|
||||||
display: block;
|
|
||||||
margin: 13px 0;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
<!--[if mso]>
|
|
||||||
<xml>
|
|
||||||
<o:OfficeDocumentSettings>
|
|
||||||
<o:AllowPNG/>
|
|
||||||
<o:PixelsPerInch>96</o:PixelsPerInch>
|
|
||||||
</o:OfficeDocumentSettings>
|
|
||||||
</xml>
|
|
||||||
<![endif]-->
|
|
||||||
<!--[if lte mso 11]>
|
|
||||||
<style type="text/css">
|
|
||||||
.mj-outlook-group-fix { width:100% !important; }
|
|
||||||
</style>
|
|
||||||
<![endif]-->
|
|
||||||
<!--[if !mso]><!-->
|
|
||||||
<link href="https://fonts.googleapis.com/css?family=Source%20Sans%20Pro" rel="stylesheet" type="text/css">
|
|
||||||
<style type="text/css">
|
|
||||||
@import url(https://fonts.googleapis.com/css?family=Source%20Sans%20Pro);
|
|
||||||
</style>
|
|
||||||
<!--<![endif]-->
|
|
||||||
<style type="text/css">
|
|
||||||
@media only screen and (min-width:480px) {
|
|
||||||
.mj-column-per-100 {
|
|
||||||
width: 100% !important;
|
|
||||||
max-width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.mj-column-px-425 {
|
|
||||||
width: 425px !important;
|
|
||||||
max-width: 425px;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
<style type="text/css">
|
|
||||||
@media only screen and (max-width:480px) {
|
|
||||||
table.mj-full-width-mobile {
|
|
||||||
width: 100% !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
td.mj-full-width-mobile {
|
|
||||||
width: auto !important;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
|
|
||||||
<body style="background-color:#E5E5E5;">
|
|
||||||
<div style="background-color:#E5E5E5;">
|
|
||||||
<!--[if mso | IE]>
|
|
||||||
<table
|
|
||||||
align="center" border="0" cellpadding="0" cellspacing="0" class="" style="width:600px;" width="600"
|
|
||||||
>
|
|
||||||
<tr>
|
|
||||||
<td style="line-height:0px;font-size:0px;mso-line-height-rule:exactly;">
|
|
||||||
<![endif]-->
|
|
||||||
<div style="margin:0px auto;max-width:600px;">
|
|
||||||
<table align="center" border="0" cellpadding="0" cellspacing="0" role="presentation" style="width:100%;">
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
|
||||||
<td style="direction:ltr;font-size:0px;padding:0;text-align:center;">
|
|
||||||
<!--[if mso | IE]>
|
|
||||||
<table role="presentation" border="0" cellpadding="0" cellspacing="0">
|
|
||||||
|
|
||||||
<tr>
|
|
||||||
|
|
||||||
<td
|
|
||||||
class="" style="vertical-align:top;width:600px;"
|
|
||||||
>
|
|
||||||
<![endif]-->
|
|
||||||
<div class="mj-column-per-100 mj-outlook-group-fix"
|
|
||||||
style="font-size:0px;text-align:left;direction:ltr;display:inline-block;vertical-align:top;width:100%;">
|
|
||||||
<table border="0" cellpadding="0" cellspacing="0" role="presentation" style="vertical-align:top;"
|
|
||||||
width="100%">
|
|
||||||
<tr>
|
|
||||||
<td align="left" style="font-size:0px;padding:16px;word-break:break-word;">
|
|
||||||
<table border="0" cellpadding="0" cellspacing="0" role="presentation"
|
|
||||||
style="border-collapse:collapse;border-spacing:0px;">
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
|
||||||
<td style="width:97px;">
|
|
||||||
<img height="32" src="{{ public-uri }}/images/email/uxbox-title.png"
|
|
||||||
style="border:0;display:block;outline:none;text-decoration:none;height:32px;width:100%;font-size:13px;"
|
|
||||||
width="97" />
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
<!--[if mso | IE]>
|
|
||||||
</td>
|
|
||||||
|
|
||||||
</tr>
|
|
||||||
|
|
||||||
</table>
|
|
||||||
<![endif]-->
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
<!--[if mso | IE]>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
<table
|
|
||||||
align="center" border="0" cellpadding="0" cellspacing="0" class="" style="width:600px;" width="600"
|
|
||||||
>
|
|
||||||
<tr>
|
|
||||||
<td style="line-height:0px;font-size:0px;mso-line-height-rule:exactly;">
|
|
||||||
<![endif]-->
|
|
||||||
<div style="background:#FFFFFF;background-color:#FFFFFF;margin:0px auto;max-width:600px;">
|
|
||||||
<table align="center" border="0" cellpadding="0" cellspacing="0" role="presentation"
|
|
||||||
style="background:#FFFFFF;background-color:#FFFFFF;width:100%;">
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
|
||||||
<td style="direction:ltr;font-size:0px;padding:20px 0;text-align:center;">
|
|
||||||
<!--[if mso | IE]>
|
|
||||||
<table role="presentation" border="0" cellpadding="0" cellspacing="0">
|
|
||||||
|
|
||||||
<tr>
|
|
||||||
|
|
||||||
<td
|
|
||||||
class="" style="vertical-align:top;width:600px;"
|
|
||||||
>
|
|
||||||
<![endif]-->
|
|
||||||
<div class="mj-column-per-100 mj-outlook-group-fix"
|
|
||||||
style="font-size:0px;text-align:left;direction:ltr;display:inline-block;vertical-align:top;width:100%;">
|
|
||||||
<table border="0" cellpadding="0" cellspacing="0" role="presentation" style="vertical-align:top;"
|
|
||||||
width="100%">
|
|
||||||
<tr>
|
|
||||||
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
|
|
||||||
<div
|
|
||||||
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
|
|
||||||
Hi{% if user-name %} {{ user-name|abbreviate:25 }}{% endif %},
|
|
||||||
</div>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
|
|
||||||
<div
|
|
||||||
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
|
|
||||||
<b>{{invited-by|abbreviate:25}}</b> sent you an invitation to join the organization:
|
|
||||||
</div>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
|
|
||||||
<div
|
|
||||||
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
|
|
||||||
<table role="presentation" cellpadding="0" cellspacing="0" border="0" width="20" height="20" style="display:inline-block;vertical-align:middle;">
|
|
||||||
<tr>
|
|
||||||
<td width="20" height="20" align="center" valign="middle"
|
|
||||||
background="{{organization-logo}}"
|
|
||||||
style="width:20px;height:20px;text-align:center;font-weight:bold;font-size:9px;line-height:20px;color:#ffffff;background-size:cover;background-position:center;background-repeat:no-repeat;border-radius: 50%;color:black">
|
|
||||||
{% if organization-initials %}{{organization-initials}}{% endif %}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
<span style="display:inline-block; vertical-align: middle;padding-left:5px;height:20px;line-height: 20px;">
|
|
||||||
“{{ organization-name|abbreviate:25 }}”
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center" vertical-align="middle"
|
|
||||||
style="font-size:0px;padding:10px 25px;word-break:break-word;">
|
|
||||||
<table border="0" cellpadding="0" cellspacing="0" role="presentation"
|
|
||||||
style="border-collapse:separate;line-height:100%;">
|
|
||||||
<tr>
|
|
||||||
<td align="center" bgcolor="#6911d4" role="presentation"
|
|
||||||
style="border:none;border-radius:8px;cursor:auto;mso-padding-alt:10px 25px;background:#6911d4;"
|
|
||||||
valign="middle">
|
|
||||||
<a href="{{ public-uri }}/#/auth/verify-token?token={{token}}"
|
|
||||||
style="display:inline-block;background:#6911d4;color:#FFFFFF;font-family:Source Sans Pro, sans-serif;font-size:16px;font-weight:normal;line-height:120%;margin:0;text-decoration:none;text-transform:none;padding:10px 25px;mso-padding-alt:0px;border-radius:8px;"
|
|
||||||
target="_blank"> ACCEPT INVITE </a>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
|
|
||||||
<div
|
|
||||||
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
|
|
||||||
Enjoy!</div>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
|
|
||||||
<div
|
|
||||||
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
|
|
||||||
The Penpot team.</div>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
<!--[if mso | IE]>
|
|
||||||
</td>
|
|
||||||
|
|
||||||
</tr>
|
|
||||||
|
|
||||||
</table>
|
|
||||||
<![endif]-->
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{% include "app/email/includes/footer.html" %}
|
|
||||||
|
|
||||||
</div>
|
|
||||||
</body>
|
|
||||||
|
|
||||||
</html>
|
|
||||||
@ -1 +0,0 @@
|
|||||||
{{invited-by|abbreviate:25}} has invited you to join the organization “{{ organization-name|abbreviate:25 }}”
|
|
||||||
@ -1,10 +0,0 @@
|
|||||||
Hello!
|
|
||||||
|
|
||||||
{{invited-by|abbreviate:25}} has invited you to join the organization “{{ organization-name|abbreviate:25 }}”.
|
|
||||||
|
|
||||||
Accept invitation using this link:
|
|
||||||
|
|
||||||
{{ public-uri }}/#/auth/verify-token?token={{token}}
|
|
||||||
|
|
||||||
Enjoy!
|
|
||||||
The Penpot team.
|
|
||||||
@ -186,8 +186,7 @@
|
|||||||
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
|
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
|
||||||
<div
|
<div
|
||||||
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
|
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
|
||||||
{{invited-by|abbreviate:25}} has invited you to join the team “{{ team|abbreviate:25 }}”{% if organization %}
|
{{invited-by|abbreviate:25}} has invited you to join the team “{{ team|abbreviate:25 }}”.</div>
|
||||||
part of the organization “{{ organization|abbreviate:25 }}”{% endif %}.</div>
|
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
@ -241,4 +240,4 @@
|
|||||||
</div>
|
</div>
|
||||||
</body>
|
</body>
|
||||||
|
|
||||||
</html>
|
</html>
|
||||||
@ -1 +1 @@
|
|||||||
{{invited-by|abbreviate:25}} has invited you to join the team “{{ team|abbreviate:25 }}”
|
Invitation to join {{team}}
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
Hello!
|
Hello!
|
||||||
|
|
||||||
{{invited-by|abbreviate:25}} has invited you to join the team "{{ team|abbreviate:25 }}"{% if organization %}, part of the organization "{{ organization|abbreviate:25 }}"{% endif %}.
|
{{invited-by|abbreviate:25}} has invited you to join the team “{{ team|abbreviate:25 }}”.
|
||||||
|
|
||||||
Accept invitation using this link:
|
Accept invitation using this link:
|
||||||
|
|
||||||
|
|||||||
@ -13,7 +13,7 @@ This will automatically include {{requested-by|abbreviate:25}} in the team, so t
|
|||||||
|
|
||||||
Click the link below to provide team access:
|
Click the link below to provide team access:
|
||||||
|
|
||||||
{{ public-uri }}/#/dashboard/members?team-id={{team-id}}&invite-email={{requested-by-email|urlescape}}
|
{{ public-uri }}/#/dashboard/members?team-id{{team-id}}&invite-email={{requested-by-email|urlescape}}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,10 +1,4 @@
|
|||||||
[{:id "tokens-starter-kit"
|
[{:id "wireframing-kit"
|
||||||
:name "Design tokens starter kit"
|
|
||||||
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Tokens%20starter%20kit.penpot"}
|
|
||||||
{:id "penpot-design-system"
|
|
||||||
:name "Penpot Design System | Pencil"
|
|
||||||
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Pencil-Penpot-Design-System.penpot"}
|
|
||||||
{:id "wireframing-kit"
|
|
||||||
:name "Wireframe library"
|
:name "Wireframe library"
|
||||||
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Wireframing%20kit%20v1.1.penpot"}
|
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Wireframing%20kit%20v1.1.penpot"}
|
||||||
{:id "prototype-examples"
|
{:id "prototype-examples"
|
||||||
@ -13,6 +7,9 @@
|
|||||||
{:id "plants-app"
|
{:id "plants-app"
|
||||||
:name "UI mockup example"
|
:name "UI mockup example"
|
||||||
:file-uri "https://github.com/penpot/penpot-files/raw/main/Plants-app.penpot"}
|
:file-uri "https://github.com/penpot/penpot-files/raw/main/Plants-app.penpot"}
|
||||||
|
{:id "penpot-design-system"
|
||||||
|
:name "Design system example"
|
||||||
|
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Penpot%20-%20Design%20System%20v2.1.penpot"}
|
||||||
{:id "tutorial-for-beginners"
|
{:id "tutorial-for-beginners"
|
||||||
:name "Tutorial for beginners"
|
:name "Tutorial for beginners"
|
||||||
:file-uri "https://github.com/penpot/penpot-files/raw/main/tutorial-for-beginners.penpot"}
|
:file-uri "https://github.com/penpot/penpot-files/raw/main/tutorial-for-beginners.penpot"}
|
||||||
|
|||||||
@ -4,7 +4,7 @@
|
|||||||
<meta charset="utf-8" />
|
<meta charset="utf-8" />
|
||||||
<meta name="robots" content="noindex,nofollow">
|
<meta name="robots" content="noindex,nofollow">
|
||||||
<meta http-equiv="x-ua-compatible" content="ie=edge" />
|
<meta http-equiv="x-ua-compatible" content="ie=edge" />
|
||||||
<title>{{label|upper}} API Documentation</title>
|
<title>Builtin API Documentation - Penpot</title>
|
||||||
|
|
||||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||||
@ -19,7 +19,7 @@
|
|||||||
<body>
|
<body>
|
||||||
<main>
|
<main>
|
||||||
<header>
|
<header>
|
||||||
<h1>{{label|upper}}: API Documentation (v{{version}})</h1>
|
<h1>Penpot API Documentation (v{{version}})</h1>
|
||||||
<small class="menu">
|
<small class="menu">
|
||||||
[
|
[
|
||||||
<nav>
|
<nav>
|
||||||
@ -31,10 +31,9 @@
|
|||||||
</header>
|
</header>
|
||||||
<section class="doc-content">
|
<section class="doc-content">
|
||||||
<h2>INTRODUCTION</h2>
|
<h2>INTRODUCTION</h2>
|
||||||
<p>This documentation is intended to be a general overview of
|
<p>This documentation is intended to be a general overview of the penpot RPC API.
|
||||||
the {{label}} API. If you prefer, you can
|
If you prefer, you can use <a href="/api/openapi.json">OpenAPI</a>
|
||||||
use <a href="{{openapi}}">Swagger/OpenAPI</a> as
|
and/or <a href="/api/openapi">SwaggerUI</a> as alternative.</p>
|
||||||
alternative.</p>
|
|
||||||
|
|
||||||
<h2>GENERAL NOTES</h2>
|
<h2>GENERAL NOTES</h2>
|
||||||
|
|
||||||
@ -44,7 +43,7 @@
|
|||||||
that starts with <b>get-</b> in the name, can use GET HTTP
|
that starts with <b>get-</b> in the name, can use GET HTTP
|
||||||
method which in many cases benefits from the HTTP cache.</p>
|
method which in many cases benefits from the HTTP cache.</p>
|
||||||
|
|
||||||
{% block auth-section %}
|
|
||||||
<h3>Authentication</h3>
|
<h3>Authentication</h3>
|
||||||
<p>The penpot backend right now offers two way for authenticate the request:
|
<p>The penpot backend right now offers two way for authenticate the request:
|
||||||
<b>cookies</b> (the same mechanism that we use ourselves on accessing the API from the
|
<b>cookies</b> (the same mechanism that we use ourselves on accessing the API from the
|
||||||
@ -57,10 +56,9 @@
|
|||||||
<p>The access token can be obtained on the appropriate section on profile settings
|
<p>The access token can be obtained on the appropriate section on profile settings
|
||||||
and it should be provided using <b>`Authorization`</b> header with <b>`Token
|
and it should be provided using <b>`Authorization`</b> header with <b>`Token
|
||||||
<token-string>`</b> value.</p>
|
<token-string>`</b> value.</p>
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
<h3>Content Negotiation</h3>
|
<h3>Content Negotiation</h3>
|
||||||
<p>This API operates indistinctly with: <b>`application/json`</b>
|
<p>The penpot API by default operates indistinctly with: <b>`application/json`</b>
|
||||||
and <b>`application/transit+json`</b> content types. You should specify the
|
and <b>`application/transit+json`</b> content types. You should specify the
|
||||||
desired content-type on the <b>`Accept`</b> header, the transit encoding is used
|
desired content-type on the <b>`Accept`</b> header, the transit encoding is used
|
||||||
by default.</p>
|
by default.</p>
|
||||||
@ -77,16 +75,13 @@
|
|||||||
standard <a href="https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API">Fetch
|
standard <a href="https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API">Fetch
|
||||||
API</a></p>
|
API</a></p>
|
||||||
|
|
||||||
{% block limits-section %}
|
|
||||||
<h3>Limits</h3>
|
<h3>Limits</h3>
|
||||||
<p>The rate limit work per user basis (this means that different api keys share
|
<p>The rate limit work per user basis (this means that different api keys share
|
||||||
the same rate limit). For now the limits are not documented because we are
|
the same rate limit). For now the limits are not documented because we are
|
||||||
studying and analyzing the data. As a general rule, it should not be abused, if an
|
studying and analyzing the data. As a general rule, it should not be abused, if an
|
||||||
abusive use is detected, we will proceed to block the user's access to the
|
abusive use is detected, we will proceed to block the user's access to the
|
||||||
API.</p>
|
API.</p>
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
{% block webhooks-section %}
|
|
||||||
<h3>Webhooks</h3>
|
<h3>Webhooks</h3>
|
||||||
<p>All methods that emit webhook events are marked with flag <b>WEBHOOK</b>, the
|
<p>All methods that emit webhook events are marked with flag <b>WEBHOOK</b>, the
|
||||||
data structure defined on each method represents the <i>payload</i> of the
|
data structure defined on each method represents the <i>payload</i> of the
|
||||||
@ -102,11 +97,9 @@
|
|||||||
"profileId": "db601c95-045f-808b-8002-361312e63531"
|
"profileId": "db601c95-045f-808b-8002-361312e63531"
|
||||||
}
|
}
|
||||||
</pre>
|
</pre>
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
</section>
|
</section>
|
||||||
<section class="rpc-doc-content">
|
<section class="rpc-doc-content">
|
||||||
<h2>METHODS REFERENCE:</h2>
|
<h2>RPC METHODS REFERENCE:</h2>
|
||||||
<ul class="rpc-items">
|
<ul class="rpc-items">
|
||||||
{% for item in methods %}
|
{% for item in methods %}
|
||||||
{% include "app/templates/api-doc-entry.tmpl" with item=item %}
|
{% include "app/templates/api-doc-entry.tmpl" with item=item %}
|
||||||
|
|||||||
@ -5,6 +5,7 @@
|
|||||||
<meta name="robots" content="noindex,nofollow">
|
<meta name="robots" content="noindex,nofollow">
|
||||||
<meta http-equiv="x-ua-compatible" content="ie=edge" />
|
<meta http-equiv="x-ua-compatible" content="ie=edge" />
|
||||||
<title>{% block title %}{% endblock %}</title>
|
<title>{% block title %}{% endblock %}</title>
|
||||||
|
<link rel="stylesheet" href="https://fonts.googleapis.com/css2?family=JetBrains+Mono">
|
||||||
<style>
|
<style>
|
||||||
{% include "app/templates/styles.css" %}
|
{% include "app/templates/styles.css" %}
|
||||||
</style>
|
</style>
|
||||||
|
|||||||
@ -12,62 +12,43 @@ Debug Main Page
|
|||||||
</nav>
|
</nav>
|
||||||
<main class="dashboard">
|
<main class="dashboard">
|
||||||
<section class="widget">
|
<section class="widget">
|
||||||
|
|
||||||
<fieldset>
|
<fieldset>
|
||||||
<legend>CURRENT PROFILE</legend>
|
<legend>Error reports</legend>
|
||||||
<desc>
|
<desc><a href="/dbg/error">CLICK HERE TO SEE THE ERROR REPORTS</a> </desc>
|
||||||
<p>
|
|
||||||
Name: <b>{{profile.fullname}}</b> <br />
|
|
||||||
Email: <b>{{profile.email}}</b>
|
|
||||||
</p>
|
|
||||||
</desc>
|
|
||||||
</fieldset>
|
</fieldset>
|
||||||
|
|
||||||
<fieldset>
|
<fieldset>
|
||||||
<legend>VIRTUAL CLOCK</legend>
|
<legend>Download file data:</legend>
|
||||||
|
<desc>Given an FILE-ID, downloads the file data as file. The file data is encoded using transit.</desc>
|
||||||
<desc>
|
<form method="get" action="/dbg/file/data">
|
||||||
<p><b>IMPORTANT:</b> The virtual clock is profile based and only affects the currently logged-in profile.</p>
|
|
||||||
<p>
|
|
||||||
CURRENT CLOCK: <b>{{current-clock}}</b>
|
|
||||||
<br />
|
|
||||||
CURRENT OFFSET: <b>{{current-offset}}</b>
|
|
||||||
<br />
|
|
||||||
CURRENT TIME: <b>{{current-time}}</b>
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<p>Examples: 3h, -7h, 24h (allowed suffixes: h, s)</p>
|
|
||||||
</desc>
|
|
||||||
|
|
||||||
<form method="post" action="/dbg/actions/set-virtual-clock">
|
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<input type="text" name="offset" placeholder="3h" value="" />
|
<input type="text" style="width:300px" name="file-id" placeholder="file-id" />
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<label for="force-verify">Are you sure?</label>
|
<input type="submit" name="download" value="Download" />
|
||||||
<input id="force-verify" type="checkbox" name="force" />
|
<input type="submit" name="clone" value="Clone" />
|
||||||
<br />
|
|
||||||
<small>
|
|
||||||
This is a just a security double check for prevent non intentional submits.
|
|
||||||
</small>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="row">
|
|
||||||
<input type="submit" name="submit" value="Submit" />
|
|
||||||
<input type="submit" name="reset" value="Reset" />
|
|
||||||
</div>
|
</div>
|
||||||
</form>
|
</form>
|
||||||
</fieldset>
|
</fieldset>
|
||||||
|
|
||||||
<fieldset>
|
<fieldset>
|
||||||
<legend>ERROR REPORTS</legend>
|
<legend>Upload File Data:</legend>
|
||||||
<desc><a href="/dbg/error">CLICK HERE TO SEE THE ERROR REPORTS</a> </desc>
|
<desc>Create a new file on your draft projects using the file downloaded from the previous section.</desc>
|
||||||
|
<form method="post" enctype="multipart/form-data" action="/dbg/file/data">
|
||||||
|
<div class="row">
|
||||||
|
<input type="file" name="file" value="" />
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<label>Import with same id?</label>
|
||||||
|
<input type="checkbox" name="reuseid" />
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="row">
|
||||||
|
<input type="submit" value="Upload" />
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
</fieldset>
|
</fieldset>
|
||||||
</section>
|
|
||||||
|
|
||||||
|
|
||||||
<section class="widget">
|
|
||||||
<fieldset>
|
<fieldset>
|
||||||
<legend>Profile Management</legend>
|
<legend>Profile Management</legend>
|
||||||
<form method="post" action="/dbg/actions/resend-email-verification">
|
<form method="post" action="/dbg/actions/resend-email-verification">
|
||||||
@ -95,102 +76,11 @@ Debug Main Page
|
|||||||
</div>
|
</div>
|
||||||
</form>
|
</form>
|
||||||
</fieldset>
|
</fieldset>
|
||||||
|
|
||||||
|
|
||||||
<fieldset>
|
|
||||||
<legend>Feature Flags for Team</legend>
|
|
||||||
<desc>Add a feature flag to a team</desc>
|
|
||||||
<form method="post" action="/dbg/actions/handle-team-features">
|
|
||||||
<div class="row">
|
|
||||||
<input type="text" style="width:300px" name="team-id" placeholder="team-id" />
|
|
||||||
</div>
|
|
||||||
<div class="row">
|
|
||||||
<select type="text" style="width:100px" name="feature">
|
|
||||||
{% for feature in supported-features %}
|
|
||||||
<option value="{{feature}}">{{feature}}</option>
|
|
||||||
{% endfor %}
|
|
||||||
</select>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="row">
|
|
||||||
<select style="width:100px" name="action">
|
|
||||||
<option value="">Action...</option>
|
|
||||||
<option value="show">Show</option>
|
|
||||||
<option value="enable">Enable</option>
|
|
||||||
<option value="disable">Disable</option>
|
|
||||||
</select>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="row">
|
|
||||||
<label for="check-feature">Skip feature check</label>
|
|
||||||
<input id="check-feature" type="checkbox" name="skip-check" />
|
|
||||||
<br />
|
|
||||||
<small>
|
|
||||||
Do not check if the feature is supported
|
|
||||||
</small>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="row">
|
|
||||||
<label for="force-version">Are you sure?</label>
|
|
||||||
<input id="force-version" type="checkbox" name="force" />
|
|
||||||
<br />
|
|
||||||
<small>
|
|
||||||
This is a just a security double check for prevent non intentional submits.
|
|
||||||
</small>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="row">
|
|
||||||
<input type="submit" value="Submit" />
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
</fieldset>
|
|
||||||
</section>
|
</section>
|
||||||
|
|
||||||
|
|
||||||
<section class="widget">
|
|
||||||
|
|
||||||
<fieldset>
|
|
||||||
<legend>Download RAW file data:</legend>
|
|
||||||
<desc>Given an FILE-ID, downloads the file AS-IS (no validation
|
|
||||||
checks, just exports the file data and related objects in raw)
|
|
||||||
|
|
||||||
<br/>
|
|
||||||
<br/>
|
|
||||||
<b>WARNING: this operation does not performs any checks</b>
|
|
||||||
</desc>
|
|
||||||
<form method="get" action="/dbg/actions/file-raw-export-import">
|
|
||||||
<div class="row">
|
|
||||||
<input type="text" style="width:300px" name="file-id" placeholder="file-id" />
|
|
||||||
</div>
|
|
||||||
<div class="row">
|
|
||||||
<input type="submit" name="download" value="Download" />
|
|
||||||
<input type="submit" name="clone" value="Clone" />
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
</fieldset>
|
|
||||||
|
|
||||||
<fieldset>
|
|
||||||
<legend>Upload File Data:</legend>
|
|
||||||
<desc>Create a new file on your draft projects using the file downloaded from the previous section.
|
|
||||||
<br/>
|
|
||||||
<br/>
|
|
||||||
<b>WARNING: this operation does not performs any checks</b>
|
|
||||||
</desc>
|
|
||||||
<form method="post" enctype="multipart/form-data" action="/dbg/actions/file-raw-export-import">
|
|
||||||
<div class="row">
|
|
||||||
<input type="file" name="file" value="" />
|
|
||||||
</div>
|
|
||||||
<div class="row">
|
|
||||||
<label>Import with same id?</label>
|
|
||||||
<input type="checkbox" name="reuseid" />
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="row">
|
|
||||||
<input type="submit" value="Upload" />
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
</fieldset>
|
|
||||||
</section>
|
|
||||||
<section class="widget">
|
<section class="widget">
|
||||||
<fieldset>
|
<fieldset>
|
||||||
<legend>Export binfile:</legend>
|
<legend>Export binfile:</legend>
|
||||||
@ -198,7 +88,7 @@ Debug Main Page
|
|||||||
the related libraries in a single custom formatted binary
|
the related libraries in a single custom formatted binary
|
||||||
file.</desc>
|
file.</desc>
|
||||||
|
|
||||||
<form method="get" action="/dbg/actions/file-export">
|
<form method="get" action="/dbg/file/export">
|
||||||
<div class="row set-of-inputs">
|
<div class="row set-of-inputs">
|
||||||
<input type="text" style="width:300px" name="file-ids" placeholder="file-id" />
|
<input type="text" style="width:300px" name="file-ids" placeholder="file-id" />
|
||||||
<input type="text" style="width:300px" name="file-ids" placeholder="file-id" />
|
<input type="text" style="width:300px" name="file-ids" placeholder="file-id" />
|
||||||
@ -226,7 +116,7 @@ Debug Main Page
|
|||||||
<legend>Import binfile:</legend>
|
<legend>Import binfile:</legend>
|
||||||
<desc>Import penpot file in binary format.</desc>
|
<desc>Import penpot file in binary format.</desc>
|
||||||
|
|
||||||
<form method="post" enctype="multipart/form-data" action="/dbg/actions/file-import">
|
<form method="post" enctype="multipart/form-data" action="/dbg/file/import">
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<input type="file" name="file" value="" />
|
<input type="file" name="file" value="" />
|
||||||
</div>
|
</div>
|
||||||
@ -237,5 +127,107 @@ Debug Main Page
|
|||||||
</form>
|
</form>
|
||||||
</fieldset>
|
</fieldset>
|
||||||
</section>
|
</section>
|
||||||
|
|
||||||
|
<section class="widget">
|
||||||
|
<fieldset>
|
||||||
|
<legend>Reset file version</legend>
|
||||||
|
<desc>Allows reset file data version to a specific number/</desc>
|
||||||
|
|
||||||
|
<form method="post" action="/dbg/actions/reset-file-version">
|
||||||
|
<div class="row">
|
||||||
|
<input type="text" style="width:300px" name="file-id" placeholder="file-id" />
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<input type="number" style="width:100px" name="version" placeholder="version" value="32" />
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="row">
|
||||||
|
<label for="force-version">Are you sure?</label>
|
||||||
|
<input id="force-version" type="checkbox" name="force" />
|
||||||
|
<br />
|
||||||
|
<small>
|
||||||
|
This is a just a security double check for prevent non intentional submits.
|
||||||
|
</small>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<div class="row">
|
||||||
|
<input type="submit" value="Submit" />
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</fieldset>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section class="widget">
|
||||||
|
<h2>Feature Flags</h2>
|
||||||
|
<fieldset>
|
||||||
|
<legend>Enable</legend>
|
||||||
|
<desc>Add a feature flag to a team</desc>
|
||||||
|
<form method="post" action="/dbg/actions/add-team-feature">
|
||||||
|
<div class="row">
|
||||||
|
<input type="text" style="width:300px" name="team-id" placeholder="team-id" />
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<input type="text" style="width:100px" name="feature" placeholder="feature" value="" />
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="row">
|
||||||
|
<label for="check-feature">Skip feature check</label>
|
||||||
|
<input id="check-feature" type="checkbox" name="skip-check" />
|
||||||
|
<br />
|
||||||
|
<small>
|
||||||
|
Do not check if the feature is supported
|
||||||
|
</small>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="row">
|
||||||
|
<label for="force-version">Are you sure?</label>
|
||||||
|
<input id="force-version" type="checkbox" name="force" />
|
||||||
|
<br />
|
||||||
|
<small>
|
||||||
|
This is a just a security double check for prevent non intentional submits.
|
||||||
|
</small>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="row">
|
||||||
|
<input type="submit" value="Submit" />
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</fieldset>
|
||||||
|
<fieldset>
|
||||||
|
<legend>Disable</legend>
|
||||||
|
<desc>Remove a feature flag from a team</desc>
|
||||||
|
<form method="post" action="/dbg/actions/remove-team-feature">
|
||||||
|
<div class="row">
|
||||||
|
<input type="text" style="width:300px" name="team-id" placeholder="team-id" />
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<input type="text" style="width:100px" name="feature" placeholder="feature" value="" />
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="row">
|
||||||
|
<label for="check-feature">Skip feature check</label>
|
||||||
|
<input id="check-feature" type="checkbox" name="skip-check" />
|
||||||
|
<br />
|
||||||
|
<small>
|
||||||
|
Do not check if the feature is supported
|
||||||
|
</small>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="row">
|
||||||
|
<label for="force-version">Are you sure?</label>
|
||||||
|
<input id="force-version" type="checkbox" name="force" />
|
||||||
|
<br />
|
||||||
|
<small>
|
||||||
|
This is a just a security double check for prevent non intentional submits.
|
||||||
|
</small>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="row">
|
||||||
|
<input type="submit" value="Submit" />
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</fieldset>
|
||||||
|
</section>
|
||||||
</main>
|
</main>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|||||||
@ -5,26 +5,21 @@ penpot - error list
|
|||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block content %}
|
{% block content %}
|
||||||
<nav>
|
<nav>
|
||||||
<div class="title">
|
<div class="title">
|
||||||
<a href="/dbg"> [BACK]</a>
|
<h1>Error reports (last 200)</h1>
|
||||||
<h1>Error reports (last 300)</h1>
|
</div>
|
||||||
|
</nav>
|
||||||
<a class="{% if version = 3 %}strong{% endif %}" href="?version=3">[BACKEND ERRORS]</a>
|
<main class="horizontal-list">
|
||||||
<a class="{% if version = 4 %}strong{% endif %}" href="?version=4">[FRONTEND ERRORS]</a>
|
<ul>
|
||||||
<a class="{% if version = 5 %}strong{% endif %}" href="?version=5">[RLIMIT REPORTS]</a>
|
{% for item in items %}
|
||||||
</div>
|
<li>
|
||||||
</nav>
|
<a class="date" href="/dbg/error/{{item.id}}">{{item.created-at}}</a>
|
||||||
<main class="horizontal-list">
|
<a class="hint" href="/dbg/error/{{item.id}}">
|
||||||
<ul>
|
<span class="title">{{item.hint|abbreviate:150}}</span>
|
||||||
{% for item in items %}
|
</a>
|
||||||
<li>
|
</li>
|
||||||
<a class="date" href="/dbg/error/{{item.id}}">{{item.created-at}}</a>
|
{% endfor %}
|
||||||
<a class="hint" href="/dbg/error/{{item.id}}">
|
</ul>
|
||||||
<span class="title">{{item.hint|abbreviate:150}}</span>
|
</main>
|
||||||
</a>
|
|
||||||
</li>
|
|
||||||
{% endfor %}
|
|
||||||
</ul>
|
|
||||||
</main>
|
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|||||||
@ -6,7 +6,7 @@ Report: {{hint|abbreviate:150}} - {{id}} - Penpot Error Report (v3)
|
|||||||
|
|
||||||
{% block content %}
|
{% block content %}
|
||||||
<nav>
|
<nav>
|
||||||
<div>[<a href="/dbg/error?version={{version}}">⮜</a>]</div>
|
<div>[<a href="/dbg/error">⮜</a>]</div>
|
||||||
<div>[<a href="#head">head</a>]</div>
|
<div>[<a href="#head">head</a>]</div>
|
||||||
<div>[<a href="#props">props</a>]</div>
|
<div>[<a href="#props">props</a>]</div>
|
||||||
<div>[<a href="#context">context</a>]</div>
|
<div>[<a href="#context">context</a>]</div>
|
||||||
|
|||||||
@ -1,46 +0,0 @@
|
|||||||
{% extends "app/templates/base.tmpl" %}
|
|
||||||
|
|
||||||
{% block title %}
|
|
||||||
Report: {{hint|abbreviate:150}} - {{id}} - Penpot Error Report (v4)
|
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
{% block content %}
|
|
||||||
<nav>
|
|
||||||
<div>[<a href="/dbg/error?version={{version}}">⮜</a>]</div>
|
|
||||||
<div>[<a href="#head">head</a>]</div>
|
|
||||||
<div>[<a href="#context">context</a>]</div>
|
|
||||||
{% if report %}
|
|
||||||
<div>[<a href="#report">report</a>]</div>
|
|
||||||
{% endif %}
|
|
||||||
</nav>
|
|
||||||
<main>
|
|
||||||
<div class="table">
|
|
||||||
<div class="table-row multiline">
|
|
||||||
<div id="head" class="table-key">HEAD</div>
|
|
||||||
<div class="table-val">
|
|
||||||
<h1><span class="not-important">Hint:</span> <br/> {{hint}}</h1>
|
|
||||||
<h2><span class="not-important">Reported at:</span> <br/> {{created-at}}</h2>
|
|
||||||
<h2><span class="not-important">Origin:</span> <br/> {{origin}}</h2>
|
|
||||||
<h2><span class="not-important">HREF:</span> <br/> {{href}}</h2>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="table-row multiline">
|
|
||||||
<div id="context" class="table-key">CONTEXT: </div>
|
|
||||||
|
|
||||||
<div class="table-val">
|
|
||||||
<pre>{{context}}</pre>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{% if report %}
|
|
||||||
<div class="table-row multiline">
|
|
||||||
<div id="report" class="table-key">REPORT:</div>
|
|
||||||
<div class="table-val">
|
|
||||||
<pre>{{report}}</pre>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
|
||||||
</main>
|
|
||||||
{% endblock %}
|
|
||||||
@ -1,40 +0,0 @@
|
|||||||
{% extends "app/templates/base.tmpl" %}
|
|
||||||
|
|
||||||
{% block title %}
|
|
||||||
Report: {{hint|abbreviate:150}} - {{id}} - Penpot Rate Limit Report
|
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
{% block content %}
|
|
||||||
<nav>
|
|
||||||
<div>[<a href="/dbg/error?version={{version}}">⮜</a>]</div>
|
|
||||||
<div>[<a href="#head">head</a>]</div>
|
|
||||||
<div>[<a href="#context">context</a>]</div>
|
|
||||||
<div>[<a href="#result">result</a>]</div>
|
|
||||||
</nav>
|
|
||||||
<main>
|
|
||||||
<div class="table">
|
|
||||||
<div class="table-row multiline">
|
|
||||||
<div id="head" class="table-key">HEAD:</div>
|
|
||||||
<div class="table-val">
|
|
||||||
<h1><span class="not-important">Hint:</span> <br/> {{hint}}</h1>
|
|
||||||
<h2><span class="not-important">Reported at:</span> <br/> {{created-at}}</h2>
|
|
||||||
<h2><span class="not-important">Report ID:</span> <br/> {{id}}</h2>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="table-row multiline">
|
|
||||||
<div id="context" class="table-key">CONTEXT: </div>
|
|
||||||
<div class="table-val">
|
|
||||||
<pre>{{context}}</pre>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="table-row multiline">
|
|
||||||
<div id="result" class="table-key">RESULT: </div>
|
|
||||||
<div class="table-val">
|
|
||||||
<pre>{{result}}</pre>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</main>
|
|
||||||
{% endblock %}
|
|
||||||
@ -1 +0,0 @@
|
|||||||
{% extends "app/templates/api-doc.tmpl" %}
|
|
||||||
@ -1,10 +0,0 @@
|
|||||||
{% extends "app/templates/api-doc.tmpl" %}
|
|
||||||
|
|
||||||
{% block auth-section %}
|
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
{% block limits-section %}
|
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
{% block webhooks-section %}
|
|
||||||
{% endblock %}
|
|
||||||
@ -7,7 +7,7 @@
|
|||||||
name="description"
|
name="description"
|
||||||
content="SwaggerUI"
|
content="SwaggerUI"
|
||||||
/>
|
/>
|
||||||
<title>{{label|upper}} API</title>
|
<title>PENPOT Swagger UI</title>
|
||||||
<style>{{swagger-css|safe}}</style>
|
<style>{{swagger-css|safe}}</style>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
@ -16,7 +16,7 @@
|
|||||||
<script>
|
<script>
|
||||||
window.onload = () => {
|
window.onload = () => {
|
||||||
window.ui = SwaggerUIBundle({
|
window.ui = SwaggerUIBundle({
|
||||||
url: '{{uri}}',
|
url: '{{public-uri}}/api/openapi.json',
|
||||||
dom_id: '#swagger-ui',
|
dom_id: '#swagger-ui',
|
||||||
presets: [
|
presets: [
|
||||||
SwaggerUIBundle.presets.apis,
|
SwaggerUIBundle.presets.apis,
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
* {
|
* {
|
||||||
font-family: monospace;
|
font-family: "JetBrains Mono", monospace;
|
||||||
font-size: 12px;
|
font-size: 12px;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -36,10 +36,6 @@ small {
|
|||||||
color: #888;
|
color: #888;
|
||||||
}
|
}
|
||||||
|
|
||||||
.strong {
|
|
||||||
font-weight: 900;
|
|
||||||
}
|
|
||||||
|
|
||||||
.not-important {
|
.not-important {
|
||||||
color: #888;
|
color: #888;
|
||||||
font-weight: 200;
|
font-weight: 200;
|
||||||
@ -61,26 +57,14 @@ nav {
|
|||||||
|
|
||||||
nav > .title {
|
nav > .title {
|
||||||
display: flex;
|
display: flex;
|
||||||
|
justify-content: center;
|
||||||
width: 100%;
|
width: 100%;
|
||||||
}
|
}
|
||||||
|
|
||||||
nav > .title > a {
|
|
||||||
color: black;
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
nav > .title > a.strong {
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
|
|
||||||
nav > .title > h1 {
|
nav > .title > h1 {
|
||||||
|
padding: 0px;
|
||||||
margin: 0px;
|
margin: 0px;
|
||||||
font-size: 11px;
|
font-size: 11px;
|
||||||
display: block;
|
|
||||||
}
|
|
||||||
|
|
||||||
nav > .title > * {
|
|
||||||
padding: 0px 6px;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
nav > div {
|
nav > div {
|
||||||
|
|||||||
@ -25,7 +25,8 @@
|
|||||||
<Logger name="app.storage.tmp" level="info" />
|
<Logger name="app.storage.tmp" level="info" />
|
||||||
<Logger name="app.worker" level="trace" />
|
<Logger name="app.worker" level="trace" />
|
||||||
<Logger name="app.msgbus" level="info" />
|
<Logger name="app.msgbus" level="info" />
|
||||||
<Logger name="app.http" level="info" />
|
<Logger name="app.http.websocket" level="info" />
|
||||||
|
<Logger name="app.http.sse" level="info" />
|
||||||
<Logger name="app.util.websocket" level="info" />
|
<Logger name="app.util.websocket" level="info" />
|
||||||
<Logger name="app.redis" level="info" />
|
<Logger name="app.redis" level="info" />
|
||||||
<Logger name="app.rpc.rlimit" level="info" />
|
<Logger name="app.rpc.rlimit" level="info" />
|
||||||
|
|||||||
@ -25,7 +25,8 @@
|
|||||||
<Logger name="app.storage.tmp" level="info" />
|
<Logger name="app.storage.tmp" level="info" />
|
||||||
<Logger name="app.worker" level="trace" />
|
<Logger name="app.worker" level="trace" />
|
||||||
<Logger name="app.msgbus" level="info" />
|
<Logger name="app.msgbus" level="info" />
|
||||||
<Logger name="app.http" level="info" />
|
<Logger name="app.http.websocket" level="info" />
|
||||||
|
<Logger name="app.http.sse" level="info" />
|
||||||
<Logger name="app.util.websocket" level="info" />
|
<Logger name="app.util.websocket" level="info" />
|
||||||
<Logger name="app.redis" level="info" />
|
<Logger name="app.redis" level="info" />
|
||||||
<Logger name="app.rpc.rlimit" level="info" />
|
<Logger name="app.rpc.rlimit" level="info" />
|
||||||
|
|||||||
@ -3,9 +3,9 @@
|
|||||||
{:default
|
{:default
|
||||||
[[:default :window "200000/h"]]
|
[[:default :window "200000/h"]]
|
||||||
|
|
||||||
;; #{:main/get-teams}
|
;; #{:command/get-teams}
|
||||||
;; [[:burst :bucket "5/5/5s"]]
|
;; [[:burst :bucket "5/5/5s"]]
|
||||||
|
|
||||||
;; #{:main/get-profile}
|
;; #{:command/get-profile}
|
||||||
;; [[:burst :bucket "60/60/1m"]]
|
;; [[:burst :bucket "60/60/1m"]]
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,97 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
export PENPOT_NITRATE_SHARED_KEY=super-secret-nitrate-api-key
|
|
||||||
export PENPOT_EXPORTER_SHARED_KEY=super-secret-exporter-api-key
|
|
||||||
export PENPOT_NEXUS_SHARED_KEY=super-secret-nexus-api-key
|
|
||||||
export PENPOT_SECRET_KEY=super-secret-devenv-key
|
|
||||||
|
|
||||||
# DEPRECATED: only used for subscriptions
|
|
||||||
export PENPOT_MANAGEMENT_API_KEY=super-secret-management-api-key
|
|
||||||
|
|
||||||
export PENPOT_HOST=devenv
|
|
||||||
export PENPOT_PUBLIC_URI=https://localhost:3449
|
|
||||||
|
|
||||||
export PENPOT_FLAGS="\
|
|
||||||
$PENPOT_FLAGS \
|
|
||||||
enable-login-with-password \
|
|
||||||
disable-login-with-ldap \
|
|
||||||
disable-login-with-oidc \
|
|
||||||
disable-login-with-google \
|
|
||||||
disable-login-with-github \
|
|
||||||
disable-login-with-gitlab \
|
|
||||||
disable-telemetry \
|
|
||||||
enable-backend-worker \
|
|
||||||
enable-backend-asserts \
|
|
||||||
disable-feature-fdata-pointer-map \
|
|
||||||
enable-feature-fdata-objects-map \
|
|
||||||
enable-audit-log \
|
|
||||||
enable-transit-readable-response \
|
|
||||||
enable-demo-users \
|
|
||||||
enable-user-feedback \
|
|
||||||
disable-secure-session-cookies \
|
|
||||||
enable-smtp \
|
|
||||||
enable-prepl-server \
|
|
||||||
enable-urepl-server \
|
|
||||||
enable-rpc-climit \
|
|
||||||
enable-rpc-rlimit \
|
|
||||||
enable-quotes \
|
|
||||||
enable-soft-rpc-rlimit \
|
|
||||||
enable-auto-file-snapshot \
|
|
||||||
enable-webhooks \
|
|
||||||
enable-access-tokens \
|
|
||||||
disable-tiered-file-data-storage \
|
|
||||||
enable-file-validation \
|
|
||||||
enable-file-schema-validation \
|
|
||||||
enable-redis-cache \
|
|
||||||
enable-subscriptions";
|
|
||||||
|
|
||||||
# Uncomment for nexus integration testing
|
|
||||||
# export PENPOT_FLAGS="$PENPOT_FLAGS enable-audit-log-archive";
|
|
||||||
# export PENPOT_AUDIT_LOG_ARCHIVE_URI="http://localhost:6070/api/audit";
|
|
||||||
|
|
||||||
# Default deletion delay for devenv
|
|
||||||
export PENPOT_DELETION_DELAY="24h"
|
|
||||||
|
|
||||||
# Setup default upload media file size to 100MiB
|
|
||||||
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
|
|
||||||
|
|
||||||
# Setup default multipart upload size to 300MiB
|
|
||||||
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
|
|
||||||
|
|
||||||
export PENPOT_USER_FEEDBACK_DESTINATION="support@example.com"
|
|
||||||
|
|
||||||
export AWS_ACCESS_KEY_ID=penpot-devenv
|
|
||||||
export AWS_SECRET_ACCESS_KEY=penpot-devenv
|
|
||||||
export PENPOT_OBJECTS_STORAGE_BACKEND=s3
|
|
||||||
export PENPOT_OBJECTS_STORAGE_S3_ENDPOINT=http://minio:9000
|
|
||||||
export PENPOT_OBJECTS_STORAGE_S3_BUCKET=penpot
|
|
||||||
|
|
||||||
export PENPOT_NITRATE_BACKEND_URI=http://localhost:3000/control-center
|
|
||||||
|
|
||||||
export JAVA_OPTS="\
|
|
||||||
-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
|
||||||
-Djdk.attach.allowAttachSelf \
|
|
||||||
-Dlog4j2.configurationFile=log4j2-devenv.xml \
|
|
||||||
-Djdk.tracePinnedThreads=full \
|
|
||||||
-Dim4java.useV7=true \
|
|
||||||
-XX:+UnlockExperimentalVMOptions \
|
|
||||||
-XX:+UseShenandoahGC \
|
|
||||||
-XX:+UseCompactObjectHeaders \
|
|
||||||
-XX:ShenandoahGCMode=generational \
|
|
||||||
-XX:-OmitStackTraceInFastThrow \
|
|
||||||
--sun-misc-unsafe-memory-access=allow \
|
|
||||||
--enable-preview \
|
|
||||||
--enable-native-access=ALL-UNNAMED";
|
|
||||||
|
|
||||||
function setup_minio() {
|
|
||||||
# Initialize MINIO config
|
|
||||||
mc alias set penpot-s3/ http://minio:9000 minioadmin minioadmin -q
|
|
||||||
mc admin user add penpot-s3 penpot-devenv penpot-devenv -q
|
|
||||||
mc admin user info penpot-s3 penpot-devenv |grep -F -q "readwrite"
|
|
||||||
if [ "$?" = "1" ]; then
|
|
||||||
mc admin policy attach penpot-s3 readwrite --user=penpot-devenv -q
|
|
||||||
fi
|
|
||||||
mc mb penpot-s3/penpot -p -q
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@ -35,35 +35,40 @@ def get_prepl_conninfo():
|
|||||||
|
|
||||||
return host, port
|
return host, port
|
||||||
|
|
||||||
def send(data):
|
def send_eval(expr):
|
||||||
host, port = get_prepl_conninfo()
|
host, port = get_prepl_conninfo()
|
||||||
with socket.create_connection((host, port)) as s:
|
|
||||||
f = s.makefile(mode="rw")
|
|
||||||
|
|
||||||
json.dump(data, f)
|
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
||||||
f.write("\n")
|
s.connect((host, port))
|
||||||
f.flush()
|
s.send(expr.encode("utf-8"))
|
||||||
|
s.send(b":repl/quit\n\n")
|
||||||
|
|
||||||
while True:
|
with s.makefile() as f:
|
||||||
line = f.readline()
|
while True:
|
||||||
result = json.loads(line)
|
line = f.readline()
|
||||||
tag = result.get("tag", None)
|
result = json.loads(line)
|
||||||
|
tag = result.get("tag", None)
|
||||||
|
if tag == "ret":
|
||||||
|
return result.get("val", None), result.get("exception", None)
|
||||||
|
elif tag == "out":
|
||||||
|
print(result.get("val"), end="")
|
||||||
|
else:
|
||||||
|
raise RuntimeError("unexpected response from PREPL")
|
||||||
|
|
||||||
if tag == "ret":
|
def encode(val):
|
||||||
return result.get("val", None), result.get("err", None)
|
return json.dumps(json.dumps(val))
|
||||||
elif tag == "out":
|
|
||||||
print(result.get("val"), end="")
|
|
||||||
else:
|
|
||||||
raise RuntimeError("unexpected response from PREPL")
|
|
||||||
|
|
||||||
def print_error(error):
|
def print_error(res):
|
||||||
print("ERR:", error["hint"])
|
for error in res["via"]:
|
||||||
|
print("ERR:", error["message"])
|
||||||
|
break
|
||||||
|
|
||||||
def run_cmd(params):
|
def run_cmd(params):
|
||||||
try:
|
try:
|
||||||
res, err = send(params)
|
expr = "(app.srepl.cli/exec {})".format(encode(params))
|
||||||
if err:
|
res, failed = send_eval(expr)
|
||||||
print_error(err)
|
if failed:
|
||||||
|
print_error(res)
|
||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
|
|
||||||
return res
|
return res
|
||||||
@ -71,27 +76,19 @@ def run_cmd(params):
|
|||||||
print("EXC:", str(cause))
|
print("EXC:", str(cause))
|
||||||
sys.exit(-2)
|
sys.exit(-2)
|
||||||
|
|
||||||
def create_profile(fullname, email, password, skip_tutorial=False, skip_walkthrough=False):
|
def create_profile(fullname, email, password):
|
||||||
props = {}
|
|
||||||
if skip_tutorial:
|
|
||||||
props["viewed-tutorial?"] = True
|
|
||||||
if skip_walkthrough:
|
|
||||||
props["viewed-walkthrough?"] = True
|
|
||||||
|
|
||||||
params = {
|
params = {
|
||||||
"cmd": "create-profile",
|
"cmd": "create-profile",
|
||||||
"params": {
|
"params": {
|
||||||
"fullname": fullname,
|
"fullname": fullname,
|
||||||
"email": email,
|
"email": email,
|
||||||
"password": password,
|
"password": password
|
||||||
**props
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
res = run_cmd(params)
|
res = run_cmd(params)
|
||||||
print(f"Created: {res['email']} / {res['id']}")
|
print(f"Created: {res['email']} / {res['id']}")
|
||||||
|
|
||||||
|
|
||||||
def update_profile(email, fullname, password, is_active):
|
def update_profile(email, fullname, password, is_active):
|
||||||
params = {
|
params = {
|
||||||
"cmd": "update-profile",
|
"cmd": "update-profile",
|
||||||
@ -99,7 +96,7 @@ def update_profile(email, fullname, password, is_active):
|
|||||||
"email": email,
|
"email": email,
|
||||||
"fullname": fullname,
|
"fullname": fullname,
|
||||||
"password": password,
|
"password": password,
|
||||||
"isActive": is_active
|
"is_active": is_active
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -141,7 +138,7 @@ def derive_password(password):
|
|||||||
params = {
|
params = {
|
||||||
"cmd": "derive-password",
|
"cmd": "derive-password",
|
||||||
"params": {
|
"params": {
|
||||||
"password": password
|
"password": password,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -178,8 +175,6 @@ parser.add_argument("-n", "--fullname", help="fullname", action="store")
|
|||||||
parser.add_argument("-e", "--email", help="email", action="store")
|
parser.add_argument("-e", "--email", help="email", action="store")
|
||||||
parser.add_argument("-p", "--password", help="password", action="store")
|
parser.add_argument("-p", "--password", help="password", action="store")
|
||||||
parser.add_argument("-c", "--connect", help="connect to PREPL", action="store", default="tcp://localhost:6063")
|
parser.add_argument("-c", "--connect", help="connect to PREPL", action="store", default="tcp://localhost:6063")
|
||||||
parser.add_argument("--skip-tutorial", help="mark tutorial as viewed", action="store_true")
|
|
||||||
parser.add_argument("--skip-walkthrough", help="mark walkthrough as viewed", action="store_true")
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
|||||||
@ -1,17 +1,109 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
SCRIPT_DIR=$(dirname $0);
|
export PENPOT_SECRET_KEY=super-secret-devenv-key
|
||||||
source $SCRIPT_DIR/_env;
|
export PENPOT_HOST=devenv
|
||||||
|
export PENPOT_FLAGS="\
|
||||||
|
$PENPOT_FLAGS \
|
||||||
|
enable-login-with-ldap \
|
||||||
|
enable-login-with-password
|
||||||
|
enable-login-with-oidc \
|
||||||
|
enable-login-with-google \
|
||||||
|
enable-login-with-github \
|
||||||
|
enable-login-with-gitlab \
|
||||||
|
enable-backend-worker \
|
||||||
|
enable-backend-asserts \
|
||||||
|
enable-feature-fdata-pointer-map \
|
||||||
|
enable-feature-fdata-objects-map \
|
||||||
|
enable-audit-log \
|
||||||
|
enable-transit-readable-response \
|
||||||
|
enable-demo-users \
|
||||||
|
disable-secure-session-cookies \
|
||||||
|
enable-smtp \
|
||||||
|
enable-prepl-server \
|
||||||
|
enable-urepl-server \
|
||||||
|
enable-rpc-climit \
|
||||||
|
enable-rpc-rlimit \
|
||||||
|
enable-quotes \
|
||||||
|
enable-soft-rpc-rlimit \
|
||||||
|
enable-auto-file-snapshot \
|
||||||
|
enable-webhooks \
|
||||||
|
enable-access-tokens \
|
||||||
|
enable-tiered-file-data-storage \
|
||||||
|
enable-file-validation \
|
||||||
|
enable-file-schema-validation";
|
||||||
|
|
||||||
if [ -f $SCRIPT_DIR/_env.local ]; then
|
# Default deletion delay for devenv
|
||||||
source $SCRIPT_DIR/_env.local;
|
export PENPOT_DELETION_DELAY="24h"
|
||||||
fi
|
|
||||||
|
# Setup default upload media file size to 100MiB
|
||||||
|
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
|
||||||
|
|
||||||
|
# Setup default multipart upload size to 300MiB
|
||||||
|
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
|
||||||
|
|
||||||
|
# export PENPOT_DATABASE_URI="postgresql://172.17.0.1:5432/penpot"
|
||||||
|
# export PENPOT_DATABASE_USERNAME="penpot"
|
||||||
|
# export PENPOT_DATABASE_PASSWORD="penpot"
|
||||||
|
# export PENPOT_DATABASE_READONLY=true
|
||||||
|
|
||||||
|
# export PENPOT_DATABASE_URI="postgresql://172.17.0.1:5432/penpot_pre"
|
||||||
|
# export PENPOT_DATABASE_USERNAME="penpot_pre"
|
||||||
|
# export PENPOT_DATABASE_PASSWORD="penpot_pre"
|
||||||
|
|
||||||
|
# export PENPOT_LOGGERS_LOKI_URI="http://172.17.0.1:3100/loki/api/v1/push"
|
||||||
|
# export PENPOT_AUDIT_LOG_ARCHIVE_URI="http://localhost:6070/api/audit"
|
||||||
|
|
||||||
# Initialize MINIO config
|
# Initialize MINIO config
|
||||||
setup_minio;
|
mc alias set penpot-s3/ http://minio:9000 minioadmin minioadmin -q
|
||||||
|
mc admin user add penpot-s3 penpot-devenv penpot-devenv -q
|
||||||
|
mc admin user info penpot-s3 penpot-devenv |grep -F -q "readwrite"
|
||||||
|
if [ "$?" = "1" ]; then
|
||||||
|
mc admin policy attach penpot-s3 readwrite --user=penpot-devenv -q
|
||||||
|
fi
|
||||||
|
mc mb penpot-s3/penpot -p -q
|
||||||
|
|
||||||
|
export AWS_ACCESS_KEY_ID=penpot-devenv
|
||||||
|
export AWS_SECRET_ACCESS_KEY=penpot-devenv
|
||||||
|
|
||||||
|
export PENPOT_OBJECTS_STORAGE_BACKEND=s3
|
||||||
|
export PENPOT_OBJECTS_STORAGE_S3_ENDPOINT=http://minio:9000
|
||||||
|
export PENPOT_OBJECTS_STORAGE_S3_BUCKET=penpot
|
||||||
|
export PENPOT_OBJECTS_STORAGE_FS_DIRECTORY="assets"
|
||||||
|
|
||||||
|
export JAVA_OPTS="--enable-preview \
|
||||||
|
-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||||
|
-Djdk.attach.allowAttachSelf \
|
||||||
|
-Dlog4j2.configurationFile=log4j2-devenv-repl.xml \
|
||||||
|
-Djdk.tracePinnedThreads=full \
|
||||||
|
-XX:+EnableDynamicAgentLoading \
|
||||||
|
-XX:-OmitStackTraceInFastThrow \
|
||||||
|
-XX:+UnlockDiagnosticVMOptions \
|
||||||
|
-XX:+DebugNonSafepoints";
|
||||||
|
|
||||||
export JAVA_OPTS="$JAVA_OPTS -Dlog4j2.configurationFile=log4j2-devenv-repl.xml"
|
|
||||||
export OPTIONS="-A:jmx-remote -A:dev"
|
export OPTIONS="-A:jmx-remote -A:dev"
|
||||||
|
|
||||||
|
# Setup HEAP
|
||||||
|
# export OPTIONS="$OPTIONS -J-Xms50m -J-Xmx1024m"
|
||||||
|
# export OPTIONS="$OPTIONS -J-Xms1100m -J-Xmx1100m -J-XX:+AlwaysPreTouch"
|
||||||
|
|
||||||
|
# Increase virtual thread pool size
|
||||||
|
# export OPTIONS="$OPTIONS -J-Djdk.virtualThreadScheduler.parallelism=16"
|
||||||
|
|
||||||
|
# Disable C2 Compiler
|
||||||
|
# export OPTIONS="$OPTIONS -J-XX:TieredStopAtLevel=1"
|
||||||
|
|
||||||
|
# Disable all compilers
|
||||||
|
# export OPTIONS="$OPTIONS -J-Xint"
|
||||||
|
|
||||||
|
# Setup GC
|
||||||
|
# export OPTIONS="$OPTIONS -J-XX:+UseG1GC"
|
||||||
|
|
||||||
|
# Setup GC
|
||||||
|
# export OPTIONS="$OPTIONS -J-XX:+UseZGC"
|
||||||
|
|
||||||
|
# Enable ImageMagick v7.x support
|
||||||
|
# export OPTIONS="-J-Dim4java.useV7=true $OPTIONS";
|
||||||
|
|
||||||
export OPTIONS_EVAL="nil"
|
export OPTIONS_EVAL="nil"
|
||||||
# export OPTIONS_EVAL="(set! *warn-on-reflection* true)"
|
# export OPTIONS_EVAL="(set! *warn-on-reflection* true)"
|
||||||
|
|
||||||
|
|||||||
48
backend/scripts/repl-test
Executable file
48
backend/scripts/repl-test
Executable file
@ -0,0 +1,48 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
source /home/penpot/environ
|
||||||
|
export PENPOT_FLAGS="$PENPOT_FLAGS disable-backend-worker"
|
||||||
|
|
||||||
|
export OPTIONS="
|
||||||
|
-A:jmx-remote -A:dev \
|
||||||
|
-J-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||||
|
-J-Djdk.attach.allowAttachSelf \
|
||||||
|
-J-Dlog4j2.configurationFile=log4j2-experiments.xml \
|
||||||
|
-J-XX:-OmitStackTraceInFastThrow \
|
||||||
|
-J-XX:+UnlockDiagnosticVMOptions \
|
||||||
|
-J-XX:+DebugNonSafepoints \
|
||||||
|
-J-Djdk.tracePinnedThreads=full \
|
||||||
|
-J-XX:+UseTransparentHugePages \
|
||||||
|
-J-XX:ReservedCodeCacheSize=1g \
|
||||||
|
-J-Dpolyglot.engine.WarnInterpreterOnly=false \
|
||||||
|
-J--enable-preview";
|
||||||
|
|
||||||
|
# Setup HEAP
|
||||||
|
export OPTIONS="$OPTIONS -J-Xms320g -J-Xmx320g -J-XX:+AlwaysPreTouch"
|
||||||
|
|
||||||
|
export PENPOT_HTTP_SERVER_IO_THREADS=2
|
||||||
|
export PENPOT_HTTP_SERVER_WORKER_THREADS=2
|
||||||
|
|
||||||
|
# Increase virtual thread pool size
|
||||||
|
# export OPTIONS="$OPTIONS -J-Djdk.virtualThreadScheduler.parallelism=16"
|
||||||
|
|
||||||
|
# Disable C2 Compiler
|
||||||
|
# export OPTIONS="$OPTIONS -J-XX:TieredStopAtLevel=1"
|
||||||
|
|
||||||
|
# Disable all compilers
|
||||||
|
# export OPTIONS="$OPTIONS -J-Xint"
|
||||||
|
|
||||||
|
# Setup GC
|
||||||
|
export OPTIONS="$OPTIONS -J-XX:+UseG1GC -J-Xlog:gc:logs/gc.log"
|
||||||
|
|
||||||
|
# Setup GC
|
||||||
|
#export OPTIONS="$OPTIONS -J-XX:+UseZGC -J-XX:+ZGenerational -J-Xlog:gc:logs/gc.log"
|
||||||
|
|
||||||
|
# Enable ImageMagick v7.x support
|
||||||
|
# export OPTIONS="-J-Dim4java.useV7=true $OPTIONS";
|
||||||
|
|
||||||
|
export OPTIONS_EVAL="nil"
|
||||||
|
# export OPTIONS_EVAL="(set! *warn-on-reflection* true)"
|
||||||
|
|
||||||
|
set -ex
|
||||||
|
exec clojure $OPTIONS -M -e "$OPTIONS_EVAL" -m rebel-readline.main
|
||||||
@ -1,18 +1,44 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
SCRIPT_DIR=$(dirname $0);
|
export PENPOT_SECRET_KEY=super-secret-devenv-key
|
||||||
|
export PENPOT_HOST=devenv
|
||||||
|
export PENPOT_FLAGS="\
|
||||||
|
$PENPOT_FLAGS \
|
||||||
|
enable-backend-asserts \
|
||||||
|
enable-feature-fdata-pointer-map \
|
||||||
|
enable-feature-fdata-objects-map \
|
||||||
|
enable-file-snapshot \
|
||||||
|
enable-tiered-file-data-storage";
|
||||||
|
|
||||||
source $SCRIPT_DIR/_env;
|
export JAVA_OPTS="
|
||||||
|
-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||||
|
-Djdk.attach.allowAttachSelf \
|
||||||
|
-Dlog4j2.configurationFile=log4j2-devenv.xml \
|
||||||
|
-XX:+EnableDynamicAgentLoading \
|
||||||
|
-XX:-OmitStackTraceInFastThrow \
|
||||||
|
-XX:+UnlockDiagnosticVMOptions \
|
||||||
|
-XX:+DebugNonSafepoints";
|
||||||
|
|
||||||
if [ -f $SCRIPT_DIR/_env.local ]; then
|
export CLOJURE_OPTIONS="-A:dev"
|
||||||
source $SCRIPT_DIR/_env.local;
|
|
||||||
fi
|
|
||||||
|
|
||||||
export OPTIONS="-A:dev"
|
|
||||||
|
# Default deletion delay for devenv
|
||||||
|
export PENPOT_DELETION_DELAY="24h"
|
||||||
|
|
||||||
|
# Setup default upload media file size to 100MiB
|
||||||
|
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
|
||||||
|
|
||||||
|
# Setup default multipart upload size to 300MiB
|
||||||
|
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
|
||||||
|
|
||||||
|
export AWS_ACCESS_KEY_ID=penpot-devenv
|
||||||
|
export AWS_SECRET_ACCESS_KEY=penpot-devenv
|
||||||
|
export PENPOT_OBJECTS_STORAGE_BACKEND=s3
|
||||||
|
export PENPOT_OBJECTS_STORAGE_S3_ENDPOINT=http://minio:9000
|
||||||
|
export PENPOT_OBJECTS_STORAGE_S3_BUCKET=penpot
|
||||||
|
|
||||||
entrypoint=${1:-app.main};
|
entrypoint=${1:-app.main};
|
||||||
|
|
||||||
shift 1;
|
shift 1;
|
||||||
set -ex
|
set -ex
|
||||||
|
|
||||||
exec clojure $OPTIONS -A:dev -M -m $entrypoint "$@";
|
clojure $CLOJURE_OPTIONS -A:dev -M -m $entrypoint "$@";
|
||||||
|
|||||||
@ -18,9 +18,9 @@ if [ -f ./environ ]; then
|
|||||||
source ./environ
|
source ./environ
|
||||||
fi
|
fi
|
||||||
|
|
||||||
export JAVA_OPTS="-Dim4java.useV7=true -Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager -Dlog4j2.configurationFile=log4j2.xml -XX:-OmitStackTraceInFastThrow --sun-misc-unsafe-memory-access=allow --enable-native-access=ALL-UNNAMED --enable-preview $JVM_OPTS $JAVA_OPTS"
|
export JVM_OPTS="-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager -Dlog4j2.configurationFile=log4j2.xml -XX:-OmitStackTraceInFastThrow --enable-preview $JVM_OPTS"
|
||||||
|
|
||||||
ENTRYPOINT=${1:-app.main};
|
ENTRYPOINT=${1:-app.main};
|
||||||
|
|
||||||
set -ex
|
set -ex
|
||||||
exec $JAVA_CMD $JAVA_OPTS -jar penpot.jar -m $ENTRYPOINT
|
exec $JAVA_CMD $JVM_OPTS -jar penpot.jar -m $ENTRYPOINT
|
||||||
|
|||||||
@ -1,15 +1,70 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
SCRIPT_DIR=$(dirname $0);
|
export PENPOT_SECRET_KEY=super-secret-devenv-key
|
||||||
source $SCRIPT_DIR/_env;
|
export PENPOT_HOST=devenv
|
||||||
|
export PENPOT_FLAGS="\
|
||||||
|
$PENPOT_FLAGS \
|
||||||
|
enable-prepl-server \
|
||||||
|
enable-urepl-server \
|
||||||
|
enable-nrepl-server \
|
||||||
|
enable-webhooks \
|
||||||
|
enable-backend-asserts \
|
||||||
|
enable-audit-log \
|
||||||
|
enable-login-with-ldap \
|
||||||
|
enable-transit-readable-response \
|
||||||
|
enable-demo-users \
|
||||||
|
enable-feature-fdata-pointer-map \
|
||||||
|
enable-feature-fdata-objects-map \
|
||||||
|
disable-secure-session-cookies \
|
||||||
|
enable-rpc-climit \
|
||||||
|
enable-smtp \
|
||||||
|
enable-quotes \
|
||||||
|
enable-file-snapshot \
|
||||||
|
enable-access-tokens \
|
||||||
|
enable-tiered-file-data-storage \
|
||||||
|
enable-file-validation \
|
||||||
|
enable-file-schema-validation";
|
||||||
|
|
||||||
if [ -f $SCRIPT_DIR/_env.local ]; then
|
export OPTIONS="
|
||||||
source $SCRIPT_DIR/_env.local;
|
-A:jmx-remote -A:dev \
|
||||||
fi
|
-J-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||||
|
-J-Djdk.attach.allowAttachSelf \
|
||||||
|
-J-Dpolyglot.engine.WarnInterpreterOnly=false \
|
||||||
|
-J-Dlog4j2.configurationFile=log4j2-devenv.xml \
|
||||||
|
-J-XX:+EnableDynamicAgentLoading \
|
||||||
|
-J-XX:-OmitStackTraceInFastThrow \
|
||||||
|
-J-XX:+UnlockDiagnosticVMOptions \
|
||||||
|
-J-XX:+DebugNonSafepoints"
|
||||||
|
|
||||||
|
# Default deletion delay for devenv
|
||||||
|
export PENPOT_DELETION_DELAY="24h"
|
||||||
|
|
||||||
|
# Setup default upload media file size to 100MiB
|
||||||
|
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
|
||||||
|
|
||||||
|
# Setup default multipart upload size to 300MiB
|
||||||
|
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
|
||||||
|
|
||||||
|
# Enable ImageMagick v7.x support
|
||||||
|
# export OPTIONS="-J-Dim4java.useV7=true $OPTIONS";
|
||||||
|
|
||||||
# Initialize MINIO config
|
# Initialize MINIO config
|
||||||
setup_minio;
|
mc alias set penpot-s3/ http://minio:9000 minioadmin minioadmin -q
|
||||||
|
mc admin user add penpot-s3 penpot-devenv penpot-devenv -q
|
||||||
|
mc admin user info penpot-s3 penpot-devenv |grep -F -q "readwrite"
|
||||||
|
if [ "$?" = "1" ]; then
|
||||||
|
mc admin policy attach penpot-s3 readwrite --user=penpot-devenv -q
|
||||||
|
fi
|
||||||
|
mc mb penpot-s3/penpot -p -q
|
||||||
|
|
||||||
|
export AWS_ACCESS_KEY_ID=penpot-devenv
|
||||||
|
export AWS_SECRET_ACCESS_KEY=penpot-devenv
|
||||||
|
export PENPOT_OBJECTS_STORAGE_BACKEND=s3
|
||||||
|
export PENPOT_OBJECTS_STORAGE_S3_ENDPOINT=http://minio:9000
|
||||||
|
export PENPOT_OBJECTS_STORAGE_S3_BUCKET=penpot
|
||||||
|
|
||||||
|
entrypoint=${1:-app.main};
|
||||||
|
|
||||||
shift 1;
|
|
||||||
set -ex
|
set -ex
|
||||||
exec clojure -A:jmx-remote -A:dev -M -m app.main "$@";
|
|
||||||
|
clojure $OPTIONS -A:dev -M -m $entrypoint;
|
||||||
|
|||||||
@ -111,7 +111,7 @@
|
|||||||
[:host {:optional true} :string]
|
[:host {:optional true} :string]
|
||||||
[:port {:optional true} ::sm/int]
|
[:port {:optional true} ::sm/int]
|
||||||
[:bind-dn {:optional true} :string]
|
[:bind-dn {:optional true} :string]
|
||||||
[:bind-password {:optional true} :string]
|
[:bind-passwor {:optional true} :string]
|
||||||
[:query {:optional true} :string]
|
[:query {:optional true} :string]
|
||||||
[:base-dn {:optional true} :string]
|
[:base-dn {:optional true} :string]
|
||||||
[:attrs-email {:optional true} :string]
|
[:attrs-email {:optional true} :string]
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@ -1,131 +0,0 @@
|
|||||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
|
||||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
||||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
|
||||||
;;
|
|
||||||
;; Copyright (c) KALEIDOS INC
|
|
||||||
|
|
||||||
(ns app.binfile.cleaner
|
|
||||||
"A collection of helpers for perform cleaning of artifacts; mainly
|
|
||||||
for recently imported shapes."
|
|
||||||
(:require
|
|
||||||
[app.common.data :as d]
|
|
||||||
[app.common.types.shape :as cts]
|
|
||||||
[app.common.uuid :as uuid]))
|
|
||||||
|
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
||||||
;; PRE DECODE
|
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
||||||
|
|
||||||
(defn- pre-clean-bool-content
|
|
||||||
[shape]
|
|
||||||
(if-let [content (get shape :bool-content)]
|
|
||||||
(-> shape
|
|
||||||
(assoc :content content)
|
|
||||||
(dissoc :bool-content))
|
|
||||||
shape))
|
|
||||||
|
|
||||||
(defn- pre-clean-shadow-color
|
|
||||||
[shape]
|
|
||||||
(d/update-when shape :shadow
|
|
||||||
(fn [shadows]
|
|
||||||
(mapv (fn [shadow]
|
|
||||||
(update shadow :color
|
|
||||||
(fn [color]
|
|
||||||
(let [ref-id (get color :id)
|
|
||||||
ref-file (get color :file-id)]
|
|
||||||
(-> (d/without-qualified color)
|
|
||||||
(select-keys [:opacity :color :gradient :image :ref-id :ref-file])
|
|
||||||
(cond-> ref-id
|
|
||||||
(assoc :ref-id ref-id))
|
|
||||||
(cond-> ref-file
|
|
||||||
(assoc :ref-file ref-file)))))))
|
|
||||||
shadows))))
|
|
||||||
|
|
||||||
(defn clean-shape-pre-decode
|
|
||||||
"Applies a pre-decode phase migration to the shape"
|
|
||||||
[shape]
|
|
||||||
(cond-> shape
|
|
||||||
(= "bool" (:type shape))
|
|
||||||
(pre-clean-bool-content)
|
|
||||||
|
|
||||||
(contains? shape :shadow)
|
|
||||||
(pre-clean-shadow-color)))
|
|
||||||
|
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
||||||
;; POST DECODE
|
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
||||||
|
|
||||||
(defn- fix-shape-shadow-color
|
|
||||||
"Some shapes can come with invalid `id` property on shadow colors
|
|
||||||
caused by incorrect uuid parsing bug that should be already fixed;
|
|
||||||
this function removes the invalid id from the data structure."
|
|
||||||
[shape]
|
|
||||||
(let [fix-color
|
|
||||||
(fn [{:keys [id] :as color}]
|
|
||||||
(if (uuid? id)
|
|
||||||
color
|
|
||||||
(if (and (string? id)
|
|
||||||
(re-matches uuid/regex id))
|
|
||||||
(assoc color :id (uuid/uuid id))
|
|
||||||
(dissoc color :id))))
|
|
||||||
|
|
||||||
fix-shadow
|
|
||||||
(fn [shadow]
|
|
||||||
(d/update-when shadow :color fix-color))
|
|
||||||
|
|
||||||
xform
|
|
||||||
(map fix-shadow)]
|
|
||||||
|
|
||||||
(d/update-when shape :shadow
|
|
||||||
(fn [shadows]
|
|
||||||
(into [] xform shadows)))))
|
|
||||||
|
|
||||||
(defn- fix-root-shape
|
|
||||||
"Ensure all root objects are well formed shapes"
|
|
||||||
[shape]
|
|
||||||
(if (= (:id shape) uuid/zero)
|
|
||||||
(-> shape
|
|
||||||
(assoc :parent-id uuid/zero)
|
|
||||||
(assoc :frame-id uuid/zero)
|
|
||||||
;; We explicitly dissoc them and let the shape-setup
|
|
||||||
;; to regenerate it with valid values.
|
|
||||||
(dissoc :selrect)
|
|
||||||
(dissoc :points)
|
|
||||||
(cts/setup-shape))
|
|
||||||
shape))
|
|
||||||
|
|
||||||
(defn- fix-legacy-flex-dir
|
|
||||||
"This operation is only relevant to old data and it is fixed just
|
|
||||||
for convenience."
|
|
||||||
[shape]
|
|
||||||
(d/update-when shape :layout-flex-dir
|
|
||||||
(fn [dir]
|
|
||||||
(case dir
|
|
||||||
:reverse-row :row-reverse
|
|
||||||
:reverse-column :column-reverse
|
|
||||||
dir))))
|
|
||||||
|
|
||||||
(defn clean-shape-post-decode
|
|
||||||
"A shape procesor that expected to be executed after schema decoding
|
|
||||||
process but before validation."
|
|
||||||
[shape]
|
|
||||||
(-> shape
|
|
||||||
(fix-shape-shadow-color)
|
|
||||||
(fix-root-shape)
|
|
||||||
(fix-legacy-flex-dir)))
|
|
||||||
|
|
||||||
(defn- fix-container
|
|
||||||
[container]
|
|
||||||
(-> container
|
|
||||||
;; Remove possible `nil` keys on objects
|
|
||||||
(d/update-when :objects dissoc nil)
|
|
||||||
(d/update-when :objects d/update-vals clean-shape-post-decode)))
|
|
||||||
|
|
||||||
(defn clean-file
|
|
||||||
[file & {:as _opts}]
|
|
||||||
(update file :data
|
|
||||||
(fn [data]
|
|
||||||
(-> data
|
|
||||||
(d/update-when :pages-index d/update-vals fix-container)
|
|
||||||
(d/update-when :components d/update-vals fix-container)
|
|
||||||
(d/without-nils)))))
|
|
||||||
@ -15,21 +15,19 @@
|
|||||||
[app.common.files.migrations :as fmg]
|
[app.common.files.migrations :as fmg]
|
||||||
[app.common.files.validate :as fval]
|
[app.common.files.validate :as fval]
|
||||||
[app.common.logging :as l]
|
[app.common.logging :as l]
|
||||||
[app.common.schema :as sm]
|
|
||||||
[app.common.time :as ct]
|
|
||||||
[app.common.types.file :as ctf]
|
[app.common.types.file :as ctf]
|
||||||
[app.common.uuid :as uuid]
|
[app.common.uuid :as uuid]
|
||||||
[app.common.weak :as weak]
|
|
||||||
[app.config :as cf]
|
[app.config :as cf]
|
||||||
[app.db :as db]
|
[app.db :as db]
|
||||||
[app.db.sql :as sql]
|
[app.db.sql :as sql]
|
||||||
[app.features.fdata :as fdata]
|
[app.features.fdata :as feat.fdata]
|
||||||
[app.features.file-migrations :as fmigr]
|
[app.features.file-migrations :as feat.fmigr]
|
||||||
[app.loggers.audit :as-alias audit]
|
[app.loggers.audit :as-alias audit]
|
||||||
[app.loggers.webhooks :as-alias webhooks]
|
[app.loggers.webhooks :as-alias webhooks]
|
||||||
[app.storage :as sto]
|
[app.storage :as sto]
|
||||||
[app.util.blob :as blob]
|
[app.util.blob :as blob]
|
||||||
[app.util.pointer-map :as pmap]
|
[app.util.pointer-map :as pmap]
|
||||||
|
[app.util.time :as dt]
|
||||||
[app.worker :as-alias wrk]
|
[app.worker :as-alias wrk]
|
||||||
[clojure.set :as set]
|
[clojure.set :as set]
|
||||||
[cuerdas.core :as str]
|
[cuerdas.core :as str]
|
||||||
@ -40,7 +38,6 @@
|
|||||||
|
|
||||||
(def ^:dynamic *state* nil)
|
(def ^:dynamic *state* nil)
|
||||||
(def ^:dynamic *options* nil)
|
(def ^:dynamic *options* nil)
|
||||||
(def ^:dynamic *reference-file* nil)
|
|
||||||
|
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
;; DEFAULTS
|
;; DEFAULTS
|
||||||
@ -57,11 +54,15 @@
|
|||||||
|
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
|
|
||||||
(declare get-resolved-file-libraries)
|
|
||||||
(declare update-file!)
|
|
||||||
|
|
||||||
(def file-attrs
|
(def file-attrs
|
||||||
(sm/keys ctf/schema:file))
|
#{:id
|
||||||
|
:name
|
||||||
|
:migrations
|
||||||
|
:features
|
||||||
|
:project-id
|
||||||
|
:is-shared
|
||||||
|
:version
|
||||||
|
:data})
|
||||||
|
|
||||||
(defn parse-file-format
|
(defn parse-file-format
|
||||||
[template]
|
[template]
|
||||||
@ -141,176 +142,29 @@
|
|||||||
([index coll attr]
|
([index coll attr]
|
||||||
(reduce #(index-object %1 %2 attr) index coll)))
|
(reduce #(index-object %1 %2 attr) index coll)))
|
||||||
|
|
||||||
(defn- decode-row-features
|
(defn decode-row
|
||||||
[{:keys [features] :as row}]
|
"A generic decode row helper"
|
||||||
(when row
|
[{:keys [data features] :as row}]
|
||||||
(cond-> row
|
(cond-> row
|
||||||
(db/pgarray? features) (assoc :features (db/decode-pgarray features #{})))))
|
features (assoc :features (db/decode-pgarray features #{}))
|
||||||
|
data (assoc :data (blob/decode data))))
|
||||||
|
|
||||||
(def sql:get-minimal-file
|
(defn decode-file
|
||||||
"SELECT f.id,
|
"A general purpose file decoding function that resolves all external
|
||||||
f.revn,
|
pointers, run migrations and return plain vanilla file map"
|
||||||
f.modified_at,
|
[cfg {:keys [id] :as file}]
|
||||||
f.deleted_at
|
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
|
||||||
FROM file AS f
|
(let [file (->> file
|
||||||
WHERE f.id = ?")
|
(feat.fmigr/resolve-applied-migrations cfg)
|
||||||
|
(feat.fdata/resolve-file-data cfg))]
|
||||||
|
|
||||||
(defn get-minimal-file
|
(-> file
|
||||||
[cfg id & {:as opts}]
|
(update :features db/decode-pgarray #{})
|
||||||
(db/get-with-sql cfg [sql:get-minimal-file id] opts))
|
(update :data blob/decode)
|
||||||
|
(update :data feat.fdata/process-pointers deref)
|
||||||
(def sql:files-with-data
|
(update :data feat.fdata/process-objects (partial into {}))
|
||||||
"SELECT f.id,
|
(update :data assoc :id id)
|
||||||
f.project_id,
|
(fmg/migrate-file)))))
|
||||||
f.created_at,
|
|
||||||
f.modified_at,
|
|
||||||
f.deleted_at,
|
|
||||||
f.name,
|
|
||||||
f.is_shared,
|
|
||||||
f.has_media_trimmed,
|
|
||||||
f.revn,
|
|
||||||
f.data AS legacy_data,
|
|
||||||
f.ignore_sync_until,
|
|
||||||
f.comment_thread_seqn,
|
|
||||||
f.features,
|
|
||||||
f.version,
|
|
||||||
f.vern,
|
|
||||||
p.team_id,
|
|
||||||
coalesce(fd.backend, 'legacy-db') AS backend,
|
|
||||||
fd.metadata AS metadata,
|
|
||||||
fd.data AS data
|
|
||||||
FROM file AS f
|
|
||||||
LEFT JOIN file_data AS fd ON (fd.file_id = f.id AND fd.id = f.id)
|
|
||||||
INNER JOIN project AS p ON (p.id = f.project_id)")
|
|
||||||
|
|
||||||
(def sql:get-file
|
|
||||||
(str sql:files-with-data " WHERE f.id = ?"))
|
|
||||||
|
|
||||||
(def sql:get-file-without-data
|
|
||||||
(str "WITH files AS (" sql:files-with-data ")"
|
|
||||||
"SELECT f.id,
|
|
||||||
f.project_id,
|
|
||||||
f.created_at,
|
|
||||||
f.modified_at,
|
|
||||||
f.deleted_at,
|
|
||||||
f.name,
|
|
||||||
f.is_shared,
|
|
||||||
f.has_media_trimmed,
|
|
||||||
f.revn,
|
|
||||||
f.ignore_sync_until,
|
|
||||||
f.comment_thread_seqn,
|
|
||||||
f.features,
|
|
||||||
f.version,
|
|
||||||
f.vern,
|
|
||||||
f.team_id
|
|
||||||
FROM files AS f
|
|
||||||
WHERE f.id = ?"))
|
|
||||||
|
|
||||||
(defn- migrate-file
|
|
||||||
[{:keys [::db/conn] :as cfg} {:keys [read-only?]} {:keys [id] :as file}]
|
|
||||||
(binding [pmap/*load-fn* (partial fdata/load-pointer cfg id)
|
|
||||||
pmap/*tracked* (pmap/create-tracked)]
|
|
||||||
(let [libs (delay (get-resolved-file-libraries cfg file))
|
|
||||||
;; For avoid unnecesary overhead of creating multiple
|
|
||||||
;; pointers and handly internally with objects map in their
|
|
||||||
;; worst case (when probably all shapes and all pointers
|
|
||||||
;; will be readed in any case), we just realize/resolve them
|
|
||||||
;; before applying the migration to the file.
|
|
||||||
file (-> (fdata/realize cfg file)
|
|
||||||
(fmg/migrate-file libs))]
|
|
||||||
|
|
||||||
(if (or read-only? (db/read-only? conn))
|
|
||||||
file
|
|
||||||
(do ;; When file is migrated, we break the rule of no
|
|
||||||
;; perform mutations on get operations and update the
|
|
||||||
;; file with all migrations applied
|
|
||||||
(update-file! cfg file)
|
|
||||||
(fmigr/resolve-applied-migrations cfg file))))))
|
|
||||||
|
|
||||||
(defn- get-file*
|
|
||||||
[{:keys [::db/conn] :as cfg} id
|
|
||||||
{:keys [migrate?
|
|
||||||
realize?
|
|
||||||
decode?
|
|
||||||
skip-locked?
|
|
||||||
include-deleted?
|
|
||||||
load-data?
|
|
||||||
throw-if-not-exists?
|
|
||||||
lock-for-update?
|
|
||||||
lock-for-share?]
|
|
||||||
:or {lock-for-update? false
|
|
||||||
lock-for-share? false
|
|
||||||
load-data? true
|
|
||||||
migrate? true
|
|
||||||
decode? true
|
|
||||||
include-deleted? false
|
|
||||||
throw-if-not-exists? true
|
|
||||||
realize? false}
|
|
||||||
:as options}]
|
|
||||||
|
|
||||||
(assert (db/connection? conn) "expected cfg with valid connection")
|
|
||||||
(when (and (not load-data?)
|
|
||||||
(or lock-for-share? lock-for-share? skip-locked?))
|
|
||||||
(throw (IllegalArgumentException. "locking is incompatible when `load-data?` is false")))
|
|
||||||
|
|
||||||
(let [sql
|
|
||||||
(if load-data?
|
|
||||||
sql:get-file
|
|
||||||
sql:get-file-without-data)
|
|
||||||
|
|
||||||
sql
|
|
||||||
(cond
|
|
||||||
lock-for-update?
|
|
||||||
(str sql " FOR UPDATE of f")
|
|
||||||
|
|
||||||
lock-for-share?
|
|
||||||
(str sql " FOR SHARE of f")
|
|
||||||
|
|
||||||
:else
|
|
||||||
sql)
|
|
||||||
|
|
||||||
sql
|
|
||||||
(if skip-locked?
|
|
||||||
(str sql " SKIP LOCKED")
|
|
||||||
sql)
|
|
||||||
|
|
||||||
file
|
|
||||||
(db/get-with-sql conn [sql id]
|
|
||||||
{::db/throw-if-not-exists false
|
|
||||||
::db/remove-deleted (not include-deleted?)})
|
|
||||||
|
|
||||||
file
|
|
||||||
(-> file
|
|
||||||
(d/update-when :features db/decode-pgarray #{})
|
|
||||||
(d/update-when :metadata fdata/decode-metadata))]
|
|
||||||
|
|
||||||
(if file
|
|
||||||
(if load-data?
|
|
||||||
(let [file
|
|
||||||
(->> file
|
|
||||||
(fmigr/resolve-applied-migrations cfg)
|
|
||||||
(fdata/resolve-file-data cfg))
|
|
||||||
|
|
||||||
will-migrate?
|
|
||||||
(and migrate? (fmg/need-migration? file))]
|
|
||||||
|
|
||||||
(if decode?
|
|
||||||
(cond->> (fdata/decode-file-data cfg file)
|
|
||||||
(and realize? (not will-migrate?))
|
|
||||||
(fdata/realize cfg)
|
|
||||||
|
|
||||||
will-migrate?
|
|
||||||
(migrate-file cfg options))
|
|
||||||
|
|
||||||
file))
|
|
||||||
file)
|
|
||||||
|
|
||||||
(when-not (or skip-locked? (not throw-if-not-exists?))
|
|
||||||
(ex/raise :type :not-found
|
|
||||||
:code :object-not-found
|
|
||||||
:hint "database object not found"
|
|
||||||
:table :file
|
|
||||||
:file-id id)))))
|
|
||||||
|
|
||||||
(defn get-file
|
(defn get-file
|
||||||
"Get file, resolve all features and apply migrations.
|
"Get file, resolve all features and apply migrations.
|
||||||
@ -319,7 +173,10 @@
|
|||||||
operations on file, because it removes the ovehead of lazy fetching
|
operations on file, because it removes the ovehead of lazy fetching
|
||||||
and decoding."
|
and decoding."
|
||||||
[cfg file-id & {:as opts}]
|
[cfg file-id & {:as opts}]
|
||||||
(db/run! cfg get-file* file-id opts))
|
(db/run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||||
|
(some->> (db/get* conn :file {:id file-id}
|
||||||
|
(assoc opts ::db/remove-deleted false))
|
||||||
|
(decode-file cfg)))))
|
||||||
|
|
||||||
(defn clean-file-features
|
(defn clean-file-features
|
||||||
[file]
|
[file]
|
||||||
@ -331,81 +188,6 @@
|
|||||||
(set/difference cfeat/backend-only-features))
|
(set/difference cfeat/backend-only-features))
|
||||||
#{}))))
|
#{}))))
|
||||||
|
|
||||||
(defn check-file-exists
|
|
||||||
[cfg id & {:keys [include-deleted?]
|
|
||||||
:or {include-deleted? false}
|
|
||||||
:as options}]
|
|
||||||
(db/get-with-sql cfg [sql:get-minimal-file id]
|
|
||||||
{:db/remove-deleted (not include-deleted?)}))
|
|
||||||
|
|
||||||
(def ^:private sql:file-permissions
|
|
||||||
"select fpr.is_owner,
|
|
||||||
fpr.is_admin,
|
|
||||||
fpr.can_edit
|
|
||||||
from file_profile_rel as fpr
|
|
||||||
inner join file as f on (f.id = fpr.file_id)
|
|
||||||
where fpr.file_id = ?
|
|
||||||
and fpr.profile_id = ?
|
|
||||||
union all
|
|
||||||
select tpr.is_owner,
|
|
||||||
tpr.is_admin,
|
|
||||||
tpr.can_edit
|
|
||||||
from team_profile_rel as tpr
|
|
||||||
inner join project as p on (p.team_id = tpr.team_id)
|
|
||||||
inner join file as f on (p.id = f.project_id)
|
|
||||||
where f.id = ?
|
|
||||||
and tpr.profile_id = ?
|
|
||||||
union all
|
|
||||||
select ppr.is_owner,
|
|
||||||
ppr.is_admin,
|
|
||||||
ppr.can_edit
|
|
||||||
from project_profile_rel as ppr
|
|
||||||
inner join file as f on (f.project_id = ppr.project_id)
|
|
||||||
where f.id = ?
|
|
||||||
and ppr.profile_id = ?")
|
|
||||||
|
|
||||||
(defn- get-file-permissions*
|
|
||||||
[conn profile-id file-id]
|
|
||||||
(when (and profile-id file-id)
|
|
||||||
(db/exec! conn [sql:file-permissions
|
|
||||||
file-id profile-id
|
|
||||||
file-id profile-id
|
|
||||||
file-id profile-id])))
|
|
||||||
|
|
||||||
(defn get-file-permissions
|
|
||||||
([conn profile-id file-id]
|
|
||||||
(let [rows (get-file-permissions* conn profile-id file-id)
|
|
||||||
is-owner (boolean (some :is-owner rows))
|
|
||||||
is-admin (boolean (some :is-admin rows))
|
|
||||||
can-edit (boolean (some :can-edit rows))]
|
|
||||||
(when (seq rows)
|
|
||||||
{:type :membership
|
|
||||||
:is-owner is-owner
|
|
||||||
:is-admin (or is-owner is-admin)
|
|
||||||
:can-edit (or is-owner is-admin can-edit)
|
|
||||||
:can-read true
|
|
||||||
:is-logged (some? profile-id)})))
|
|
||||||
|
|
||||||
([conn profile-id file-id share-id]
|
|
||||||
(let [perms (get-file-permissions conn profile-id file-id)
|
|
||||||
ldata (some-> (db/get* conn :share-link {:id share-id :file-id file-id})
|
|
||||||
(dissoc :flags)
|
|
||||||
(update :pages db/decode-pgarray #{}))]
|
|
||||||
|
|
||||||
;; NOTE: in a future when share-link becomes more powerful and
|
|
||||||
;; will allow us specify which parts of the app is available, we
|
|
||||||
;; will probably need to tweak this function in order to expose
|
|
||||||
;; this flags to the frontend.
|
|
||||||
(cond
|
|
||||||
(some? perms) perms
|
|
||||||
(some? ldata) {:type :share-link
|
|
||||||
:can-read true
|
|
||||||
:pages (:pages ldata)
|
|
||||||
:is-logged (some? profile-id)
|
|
||||||
:who-comment (:who-comment ldata)
|
|
||||||
:who-inspect (:who-inspect ldata)}))))
|
|
||||||
|
|
||||||
|
|
||||||
(defn get-project
|
(defn get-project
|
||||||
[cfg project-id]
|
[cfg project-id]
|
||||||
(db/get cfg :project {:id project-id}))
|
(db/get cfg :project {:id project-id}))
|
||||||
@ -418,12 +200,12 @@
|
|||||||
(let [conn (db/get-connection cfg)
|
(let [conn (db/get-connection cfg)
|
||||||
ids (db/create-array conn "uuid" ids)]
|
ids (db/create-array conn "uuid" ids)]
|
||||||
(->> (db/exec! conn [sql:get-teams ids])
|
(->> (db/exec! conn [sql:get-teams ids])
|
||||||
(map decode-row-features))))
|
(map decode-row))))
|
||||||
|
|
||||||
(defn get-team
|
(defn get-team
|
||||||
[cfg team-id]
|
[cfg team-id]
|
||||||
(-> (db/get cfg :team {:id team-id})
|
(-> (db/get cfg :team {:id team-id})
|
||||||
(decode-row-features)))
|
(decode-row)))
|
||||||
|
|
||||||
(defn get-fonts
|
(defn get-fonts
|
||||||
[cfg team-id]
|
[cfg team-id]
|
||||||
@ -515,6 +297,7 @@
|
|||||||
(do
|
(do
|
||||||
(l/trc :hint "lookup index"
|
(l/trc :hint "lookup index"
|
||||||
:file-id (str file-id)
|
:file-id (str file-id)
|
||||||
|
:snap-id (str (:snapshot-id file))
|
||||||
:id (str id)
|
:id (str id)
|
||||||
:result (str (get mobj :id)))
|
:result (str (get mobj :id)))
|
||||||
(get mobj :id))
|
(get mobj :id))
|
||||||
@ -531,6 +314,7 @@
|
|||||||
(doseq [[old-id item] missing-index]
|
(doseq [[old-id item] missing-index]
|
||||||
(l/dbg :hint "create missing references"
|
(l/dbg :hint "create missing references"
|
||||||
:file-id (str file-id)
|
:file-id (str file-id)
|
||||||
|
:snap-id (str (:snapshot-id file))
|
||||||
:old-id (str old-id)
|
:old-id (str old-id)
|
||||||
:id (str (:id item)))
|
:id (str (:id item)))
|
||||||
(db/insert! conn :file-media-object item
|
(db/insert! conn :file-media-object item
|
||||||
@ -541,16 +325,12 @@
|
|||||||
(def sql:get-file-media
|
(def sql:get-file-media
|
||||||
"SELECT * FROM file_media_object WHERE id = ANY(?)")
|
"SELECT * FROM file_media_object WHERE id = ANY(?)")
|
||||||
|
|
||||||
(defn get-file-media*
|
|
||||||
[{:keys [::db/conn] :as cfg} {:keys [data id] :as file}]
|
|
||||||
(let [used (cfh/collect-used-media data)
|
|
||||||
used (db/create-array conn "uuid" used)]
|
|
||||||
(->> (db/exec! conn [sql:get-file-media used])
|
|
||||||
(mapv (fn [row] (assoc row :file-id id))))))
|
|
||||||
|
|
||||||
(defn get-file-media
|
(defn get-file-media
|
||||||
[cfg file]
|
[cfg {:keys [data] :as file}]
|
||||||
(db/run! cfg get-file-media* file))
|
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||||
|
(let [used (cfh/collect-used-media data)
|
||||||
|
used (db/create-array conn "uuid" used)]
|
||||||
|
(db/exec! conn [sql:get-file-media used])))))
|
||||||
|
|
||||||
(def ^:private sql:get-team-files-ids
|
(def ^:private sql:get-team-files-ids
|
||||||
"SELECT f.id FROM file AS f
|
"SELECT f.id FROM file AS f
|
||||||
@ -625,7 +405,7 @@
|
|||||||
[cfg data file-id]
|
[cfg data file-id]
|
||||||
(let [library-ids (get-libraries cfg [file-id])]
|
(let [library-ids (get-libraries cfg [file-id])]
|
||||||
(reduce (fn [data library-id]
|
(reduce (fn [data library-id]
|
||||||
(if-let [library (get-file cfg library-id :include-deleted? true)]
|
(if-let [library (get-file cfg library-id)]
|
||||||
(ctf/absorb-assets data (:data library))
|
(ctf/absorb-assets data (:data library))
|
||||||
data))
|
data))
|
||||||
data
|
data
|
||||||
@ -637,146 +417,94 @@
|
|||||||
(db/exec-one! conn ["SET LOCAL idle_in_transaction_session_timeout = 0"])
|
(db/exec-one! conn ["SET LOCAL idle_in_transaction_session_timeout = 0"])
|
||||||
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])))
|
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])))
|
||||||
|
|
||||||
(defn invalidate-thumbnails
|
|
||||||
[cfg file-id]
|
|
||||||
(let [storage (sto/resolve cfg)
|
|
||||||
|
|
||||||
sql-1
|
|
||||||
(str "update file_tagged_object_thumbnail "
|
|
||||||
" set deleted_at = now() "
|
|
||||||
" where file_id=? returning media_id")
|
|
||||||
|
|
||||||
sql-2
|
|
||||||
(str "update file_thumbnail "
|
|
||||||
" set deleted_at = now() "
|
|
||||||
" where file_id=? returning media_id")]
|
|
||||||
|
|
||||||
(run! #(sto/touch-object! storage %)
|
|
||||||
(sequence
|
|
||||||
(keep :media-id)
|
|
||||||
(concat
|
|
||||||
(db/exec! cfg [sql-1 file-id])
|
|
||||||
(db/exec! cfg [sql-2 file-id]))))))
|
|
||||||
|
|
||||||
(defn process-file
|
(defn process-file
|
||||||
[cfg {:keys [id] :as file}]
|
[{:keys [id] :as file}]
|
||||||
(let [libs (delay (get-resolved-file-libraries cfg file))]
|
(-> file
|
||||||
(-> file
|
(update :data (fn [fdata]
|
||||||
(update :data (fn [fdata]
|
(-> fdata
|
||||||
(-> fdata
|
(assoc :id id)
|
||||||
(assoc :id id)
|
(dissoc :recent-colors))))
|
||||||
(dissoc :recent-colors))))
|
(fmg/migrate-file)
|
||||||
(update :data (fn [fdata]
|
(update :data (fn [fdata]
|
||||||
(-> fdata
|
(-> fdata
|
||||||
(update :pages-index relink-shapes)
|
(update :pages-index relink-shapes)
|
||||||
(update :components relink-shapes)
|
(update :components relink-shapes)
|
||||||
(update :media relink-media)
|
(update :media relink-media)
|
||||||
(update :colors relink-colors)
|
(update :colors relink-colors)
|
||||||
(d/without-nils))))
|
(d/without-nils))))))
|
||||||
(fmg/migrate-file libs)
|
|
||||||
|
|
||||||
;; NOTE: this is necessary because when we just creating a new
|
(defn encode-file
|
||||||
;; file from imported artifact or cloned file there are no
|
[{:keys [::db/conn] :as cfg} {:keys [id] :as file}]
|
||||||
;; migrations registered on the database, so we need to persist
|
(let [file (if (contains? (:features file) "fdata/objects-map")
|
||||||
;; all of them, not only the applied
|
(feat.fdata/enable-objects-map file)
|
||||||
(vary-meta dissoc ::fmg/migrated))))
|
|
||||||
|
|
||||||
(defn- encode-file
|
|
||||||
[cfg {:keys [id features] :as file}]
|
|
||||||
(let [file (if (and (contains? features "fdata/objects-map")
|
|
||||||
(:data file))
|
|
||||||
(fdata/enable-objects-map file)
|
|
||||||
file)
|
file)
|
||||||
|
|
||||||
file (if (and (contains? features "fdata/pointer-map")
|
file (if (contains? (:features file) "fdata/pointer-map")
|
||||||
(:data file))
|
(binding [pmap/*tracked* (pmap/create-tracked)]
|
||||||
|
(let [file (feat.fdata/enable-pointer-map file)]
|
||||||
(binding [pmap/*tracked* (pmap/create-tracked :inherit true)]
|
(feat.fdata/persist-pointers! cfg id)
|
||||||
(let [file (fdata/enable-pointer-map file)]
|
|
||||||
(fdata/persist-pointers! cfg id)
|
|
||||||
file))
|
file))
|
||||||
file)]
|
file)]
|
||||||
|
|
||||||
(-> file
|
(-> file
|
||||||
(d/update-when :features into-array)
|
(update :features db/encode-pgarray conn "text")
|
||||||
(d/update-when :data blob/encode))))
|
(update :data blob/encode))))
|
||||||
|
|
||||||
(defn- file->params
|
(defn get-params-from-file
|
||||||
[file]
|
[file]
|
||||||
(-> (select-keys file file-attrs)
|
(let [params {:has-media-trimmed (:has-media-trimmed file)
|
||||||
(assoc :data nil)
|
:ignore-sync-until (:ignore-sync-until file)
|
||||||
(dissoc :team-id)
|
:project-id (:project-id file)
|
||||||
(dissoc :migrations)))
|
:features (:features file)
|
||||||
|
:name (:name file)
|
||||||
|
:is-shared (:is-shared file)
|
||||||
|
:version (:version file)
|
||||||
|
:data (:data file)
|
||||||
|
:id (:id file)
|
||||||
|
:deleted-at (:deleted-at file)
|
||||||
|
:created-at (:created-at file)
|
||||||
|
:modified-at (:modified-at file)
|
||||||
|
:revn (:revn file)
|
||||||
|
:vern (:vern file)}]
|
||||||
|
|
||||||
(defn- file->file-data-params
|
(-> (d/without-nils params)
|
||||||
[{:keys [id] :as file} & {:as opts}]
|
(assoc :data-backend nil)
|
||||||
(let [created-at (or (:created-at file) (ct/now))
|
(assoc :data-ref-id nil))))
|
||||||
modified-at (or (:modified-at file) created-at)]
|
|
||||||
(d/without-nils
|
|
||||||
{:id id
|
|
||||||
:type "main"
|
|
||||||
:file-id id
|
|
||||||
:data (:data file)
|
|
||||||
:metadata (:metadata file)
|
|
||||||
:created-at created-at
|
|
||||||
:modified-at modified-at})))
|
|
||||||
|
|
||||||
(defn insert-file!
|
(defn insert-file!
|
||||||
"Insert a new file into the database table. Expectes a not-encoded file.
|
"Insert a new file into the database table"
|
||||||
Returns nil."
|
|
||||||
[{:keys [::db/conn] :as cfg} file & {:as opts}]
|
[{:keys [::db/conn] :as cfg} file & {:as opts}]
|
||||||
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
|
(feat.fmigr/upsert-migrations! conn file)
|
||||||
|
(let [params (-> (encode-file cfg file)
|
||||||
(when (:migrations file)
|
(get-params-from-file))]
|
||||||
(fmigr/upsert-migrations! conn file))
|
(db/insert! conn :file params opts)))
|
||||||
|
|
||||||
(let [file (encode-file cfg file)]
|
|
||||||
(db/insert! conn :file
|
|
||||||
(file->params file)
|
|
||||||
(assoc opts ::db/return-keys false))
|
|
||||||
|
|
||||||
(->> (file->file-data-params file)
|
|
||||||
(fdata/upsert! cfg))
|
|
||||||
|
|
||||||
nil))
|
|
||||||
|
|
||||||
(defn update-file!
|
(defn update-file!
|
||||||
"Update an existing file on the database. Expects not encoded file."
|
"Update an existing file on the database."
|
||||||
[{:keys [::db/conn] :as cfg} {:keys [id] :as file} & {:as opts}]
|
[{:keys [::db/conn ::sto/storage] :as cfg} {:keys [id] :as file} & {:as opts}]
|
||||||
|
(let [file (encode-file cfg file)
|
||||||
|
params (-> (get-params-from-file file)
|
||||||
|
(dissoc :id))]
|
||||||
|
|
||||||
(if (::reset-migrations? opts false)
|
;; If file was already offloaded, we touch the underlying storage
|
||||||
(fmigr/reset-migrations! conn file)
|
;; object for properly trigger storage-gc-touched task
|
||||||
(fmigr/upsert-migrations! conn file))
|
(when (feat.fdata/offloaded? file)
|
||||||
|
(some->> (:data-ref-id file) (sto/touch-object! storage)))
|
||||||
|
|
||||||
(let [file
|
(feat.fmigr/upsert-migrations! conn file)
|
||||||
(encode-file cfg file)
|
(db/update! conn :file params {:id id} opts)))
|
||||||
|
|
||||||
file-params
|
|
||||||
(file->params (dissoc file :id))
|
|
||||||
|
|
||||||
file-data-params
|
|
||||||
(file->file-data-params file)]
|
|
||||||
|
|
||||||
(db/update! conn :file file-params
|
|
||||||
{:id id}
|
|
||||||
{::db/return-keys false})
|
|
||||||
|
|
||||||
(fdata/upsert! cfg file-data-params)
|
|
||||||
nil))
|
|
||||||
|
|
||||||
(defn save-file!
|
(defn save-file!
|
||||||
"Applies all the final validations and perist the file, binfile
|
"Applies all the final validations and perist the file, binfile
|
||||||
specific, should not be used outside of binfile domain.
|
specific, should not be used outside of binfile domain"
|
||||||
Returns nil"
|
|
||||||
[{:keys [::timestamp] :as cfg} file & {:as opts}]
|
[{:keys [::timestamp] :as cfg} file & {:as opts}]
|
||||||
|
|
||||||
(assert (ct/inst? timestamp) "expected valid timestamp")
|
(assert (dt/instant? timestamp) "expected valid timestamp")
|
||||||
|
|
||||||
(let [file (-> file
|
(let [file (-> file
|
||||||
(assoc :created-at timestamp)
|
(assoc :created-at timestamp)
|
||||||
(assoc :modified-at timestamp)
|
(assoc :modified-at timestamp)
|
||||||
(cond-> (not (::overwrite cfg))
|
(assoc :ignore-sync-until (dt/plus timestamp (dt/duration {:seconds 5})))
|
||||||
(assoc :ignore-sync-until (ct/plus timestamp (ct/duration {:seconds 5}))))
|
|
||||||
(update :features
|
(update :features
|
||||||
(fn [features]
|
(fn [features]
|
||||||
(-> (::features cfg #{})
|
(-> (::features cfg #{})
|
||||||
@ -793,62 +521,4 @@
|
|||||||
(when (ex/exception? result)
|
(when (ex/exception? result)
|
||||||
(l/error :hint "file schema validation error" :cause result))))
|
(l/error :hint "file schema validation error" :cause result))))
|
||||||
|
|
||||||
(if (::overwrite cfg)
|
(insert-file! cfg file opts)))
|
||||||
(update-file! cfg file (assoc opts ::reset-migrations? true))
|
|
||||||
(insert-file! cfg file opts))))
|
|
||||||
|
|
||||||
(def ^:private sql:get-file-libraries
|
|
||||||
"WITH RECURSIVE libs AS (
|
|
||||||
SELECT fl.*, flr.synced_at
|
|
||||||
FROM file AS fl
|
|
||||||
JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
|
|
||||||
WHERE flr.file_id = ?::uuid
|
|
||||||
UNION
|
|
||||||
SELECT fl.*, flr.synced_at
|
|
||||||
FROM file AS fl
|
|
||||||
JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
|
|
||||||
JOIN libs AS l ON (flr.file_id = l.id)
|
|
||||||
)
|
|
||||||
SELECT l.id,
|
|
||||||
l.features,
|
|
||||||
l.project_id,
|
|
||||||
p.team_id,
|
|
||||||
l.created_at,
|
|
||||||
l.modified_at,
|
|
||||||
l.deleted_at,
|
|
||||||
l.name,
|
|
||||||
l.revn,
|
|
||||||
l.vern,
|
|
||||||
l.synced_at,
|
|
||||||
l.is_shared,
|
|
||||||
l.version
|
|
||||||
FROM libs AS l
|
|
||||||
INNER JOIN project AS p ON (p.id = l.project_id)
|
|
||||||
WHERE l.deleted_at IS NULL;")
|
|
||||||
|
|
||||||
(defn get-file-libraries
|
|
||||||
[conn file-id]
|
|
||||||
(into []
|
|
||||||
(comp
|
|
||||||
;; FIXME: :is-indirect set to false to all rows looks
|
|
||||||
;; completly useless
|
|
||||||
(map #(assoc % :is-indirect false))
|
|
||||||
(map decode-row-features))
|
|
||||||
(db/exec! conn [sql:get-file-libraries file-id])))
|
|
||||||
|
|
||||||
(defn get-resolved-file-libraries
|
|
||||||
"Get all file libraries including itself. Returns an instance of
|
|
||||||
LoadableWeakValueMap that allows do not have strong references to
|
|
||||||
the loaded libraries and reduce possible memory pressure on having
|
|
||||||
all this libraries loaded at same time on processing file validation
|
|
||||||
or file migration.
|
|
||||||
|
|
||||||
This still requires at least one library at time to be loaded while
|
|
||||||
access to it is performed, but it improves considerable not having
|
|
||||||
the need of loading all the libraries at the same time."
|
|
||||||
[{:keys [::db/conn] :as cfg} {:keys [id] :as file}]
|
|
||||||
(let [library-ids (->> (get-file-libraries conn (:id file))
|
|
||||||
(map :id)
|
|
||||||
(cons (:id file)))
|
|
||||||
load-fn #(get-file cfg % :migrate? false)]
|
|
||||||
(weak/loadable-weak-value-map library-ids load-fn {id file})))
|
|
||||||
|
|||||||
@ -10,6 +10,7 @@
|
|||||||
[app.binfile.common :as bfc]
|
[app.binfile.common :as bfc]
|
||||||
[app.common.exceptions :as ex]
|
[app.common.exceptions :as ex]
|
||||||
[app.common.features :as cfeat]
|
[app.common.features :as cfeat]
|
||||||
|
[app.features.components-v2 :as feat.compv2]
|
||||||
[clojure.set :as set]
|
[clojure.set :as set]
|
||||||
[cuerdas.core :as str]))
|
[cuerdas.core :as str]))
|
||||||
|
|
||||||
@ -27,15 +28,22 @@
|
|||||||
|
|
||||||
(defn apply-pending-migrations!
|
(defn apply-pending-migrations!
|
||||||
"Apply alredy registered pending migrations to files"
|
"Apply alredy registered pending migrations to files"
|
||||||
[_cfg]
|
[cfg]
|
||||||
(doseq [[feature _file-id] (-> bfc/*state* deref :pending-to-migrate)]
|
(doseq [[feature file-id] (-> bfc/*state* deref :pending-to-migrate)]
|
||||||
(case feature
|
(case feature
|
||||||
"components/v2"
|
"components/v2"
|
||||||
nil
|
(feat.compv2/migrate-file! cfg file-id
|
||||||
|
:validate? (::validate cfg true)
|
||||||
|
:skip-on-graphic-error? true)
|
||||||
|
|
||||||
"fdata/shape-data-type"
|
"fdata/shape-data-type"
|
||||||
nil
|
nil
|
||||||
|
|
||||||
|
;; There is no migration needed, but we don't want to allow
|
||||||
|
;; copy paste nor import of variant files into no-variant teams
|
||||||
|
"variants/v1"
|
||||||
|
nil
|
||||||
|
|
||||||
(ex/raise :type :internal
|
(ex/raise :type :internal
|
||||||
:code :no-migration-defined
|
:code :no-migration-defined
|
||||||
:hint (str/ffmt "no migation for feature '%' on file importation" feature)
|
:hint (str/ffmt "no migation for feature '%' on file importation" feature)
|
||||||
|
|||||||
@ -17,7 +17,6 @@
|
|||||||
[app.common.fressian :as fres]
|
[app.common.fressian :as fres]
|
||||||
[app.common.logging :as l]
|
[app.common.logging :as l]
|
||||||
[app.common.spec :as us]
|
[app.common.spec :as us]
|
||||||
[app.common.time :as ct]
|
|
||||||
[app.common.types.file :as ctf]
|
[app.common.types.file :as ctf]
|
||||||
[app.common.uuid :as uuid]
|
[app.common.uuid :as uuid]
|
||||||
[app.config :as cf]
|
[app.config :as cf]
|
||||||
@ -31,6 +30,7 @@
|
|||||||
[app.storage.tmp :as tmp]
|
[app.storage.tmp :as tmp]
|
||||||
[app.tasks.file-gc]
|
[app.tasks.file-gc]
|
||||||
[app.util.events :as events]
|
[app.util.events :as events]
|
||||||
|
[app.util.time :as dt]
|
||||||
[app.worker :as-alias wrk]
|
[app.worker :as-alias wrk]
|
||||||
[clojure.java.io :as jio]
|
[clojure.java.io :as jio]
|
||||||
[clojure.set :as set]
|
[clojure.set :as set]
|
||||||
@ -40,8 +40,8 @@
|
|||||||
[promesa.util :as pu]
|
[promesa.util :as pu]
|
||||||
[yetti.adapter :as yt])
|
[yetti.adapter :as yt])
|
||||||
(:import
|
(:import
|
||||||
com.github.luben.zstd.ZstdInputStream
|
|
||||||
com.github.luben.zstd.ZstdIOException
|
com.github.luben.zstd.ZstdIOException
|
||||||
|
com.github.luben.zstd.ZstdInputStream
|
||||||
com.github.luben.zstd.ZstdOutputStream
|
com.github.luben.zstd.ZstdOutputStream
|
||||||
java.io.DataInputStream
|
java.io.DataInputStream
|
||||||
java.io.DataOutputStream
|
java.io.DataOutputStream
|
||||||
@ -346,7 +346,7 @@
|
|||||||
thumbnails (->> (bfc/get-file-object-thumbnails cfg file-id)
|
thumbnails (->> (bfc/get-file-object-thumbnails cfg file-id)
|
||||||
(mapv #(dissoc % :file-id)))
|
(mapv #(dissoc % :file-id)))
|
||||||
|
|
||||||
file (cond-> (bfc/get-file cfg file-id :realize? true)
|
file (cond-> (bfc/get-file cfg file-id)
|
||||||
detach?
|
detach?
|
||||||
(-> (ctf/detach-external-references file-id)
|
(-> (ctf/detach-external-references file-id)
|
||||||
(dissoc :libraries))
|
(dissoc :libraries))
|
||||||
@ -434,7 +434,7 @@
|
|||||||
(defn read-import!
|
(defn read-import!
|
||||||
"Do the importation of the specified resource in penpot custom binary
|
"Do the importation of the specified resource in penpot custom binary
|
||||||
format."
|
format."
|
||||||
[{:keys [::bfc/input ::bfc/timestamp] :or {timestamp (ct/now)} :as options}]
|
[{:keys [::bfc/input ::bfc/timestamp] :or {timestamp (dt/now)} :as options}]
|
||||||
|
|
||||||
(dm/assert!
|
(dm/assert!
|
||||||
"expected input stream"
|
"expected input stream"
|
||||||
@ -442,7 +442,7 @@
|
|||||||
|
|
||||||
(dm/assert!
|
(dm/assert!
|
||||||
"expected valid instant"
|
"expected valid instant"
|
||||||
(ct/inst? timestamp))
|
(dt/instant? timestamp))
|
||||||
|
|
||||||
(let [version (read-header! input)]
|
(let [version (read-header! input)]
|
||||||
(read-import (assoc options ::version version ::bfc/timestamp timestamp))))
|
(read-import (assoc options ::version version ::bfc/timestamp timestamp))))
|
||||||
@ -551,8 +551,8 @@
|
|||||||
(cond-> (and (= idx 0) (some? name))
|
(cond-> (and (= idx 0) (some? name))
|
||||||
(assoc :name name))
|
(assoc :name name))
|
||||||
(assoc :project-id project-id)
|
(assoc :project-id project-id)
|
||||||
(dissoc :thumbnails))
|
(dissoc :thumbnails)
|
||||||
file (bfc/process-file system file)]
|
(bfc/process-file))]
|
||||||
|
|
||||||
;; All features that are enabled and requires explicit migration are
|
;; All features that are enabled and requires explicit migration are
|
||||||
;; added to the state for a posterior migration step.
|
;; added to the state for a posterior migration step.
|
||||||
@ -682,7 +682,7 @@
|
|||||||
(io/coercible? output))
|
(io/coercible? output))
|
||||||
|
|
||||||
(let [id (uuid/next)
|
(let [id (uuid/next)
|
||||||
tp (ct/tpoint)
|
tp (dt/tpoint)
|
||||||
ab (volatile! false)
|
ab (volatile! false)
|
||||||
cs (volatile! nil)]
|
cs (volatile! nil)]
|
||||||
(try
|
(try
|
||||||
@ -720,7 +720,7 @@
|
|||||||
(satisfies? jio/IOFactory input))
|
(satisfies? jio/IOFactory input))
|
||||||
|
|
||||||
(let [id (uuid/next)
|
(let [id (uuid/next)
|
||||||
tp (ct/tpoint)
|
tp (dt/tpoint)
|
||||||
cs (volatile! nil)]
|
cs (volatile! nil)]
|
||||||
|
|
||||||
(l/info :hint "import: started" :id (str id))
|
(l/info :hint "import: started" :id (str id))
|
||||||
@ -742,6 +742,6 @@
|
|||||||
(finally
|
(finally
|
||||||
(l/info :hint "import: terminated"
|
(l/info :hint "import: terminated"
|
||||||
:id (str id)
|
:id (str id)
|
||||||
:elapsed (ct/format-duration (tp))
|
:elapsed (dt/format-duration (tp))
|
||||||
:error? (some? @cs))))))
|
:error? (some? @cs))))))
|
||||||
|
|
||||||
|
|||||||
@ -13,7 +13,6 @@
|
|||||||
[app.common.data :as d]
|
[app.common.data :as d]
|
||||||
[app.common.features :as cfeat]
|
[app.common.features :as cfeat]
|
||||||
[app.common.logging :as l]
|
[app.common.logging :as l]
|
||||||
[app.common.time :as ct]
|
|
||||||
[app.common.transit :as t]
|
[app.common.transit :as t]
|
||||||
[app.common.uuid :as uuid]
|
[app.common.uuid :as uuid]
|
||||||
[app.config :as cf]
|
[app.config :as cf]
|
||||||
@ -24,6 +23,7 @@
|
|||||||
[app.storage :as sto]
|
[app.storage :as sto]
|
||||||
[app.storage.tmp :as tmp]
|
[app.storage.tmp :as tmp]
|
||||||
[app.util.events :as events]
|
[app.util.events :as events]
|
||||||
|
[app.util.time :as dt]
|
||||||
[app.worker :as-alias wrk]
|
[app.worker :as-alias wrk]
|
||||||
[clojure.set :as set]
|
[clojure.set :as set]
|
||||||
[cuerdas.core :as str]
|
[cuerdas.core :as str]
|
||||||
@ -153,7 +153,7 @@
|
|||||||
|
|
||||||
(defn- write-file!
|
(defn- write-file!
|
||||||
[cfg file-id]
|
[cfg file-id]
|
||||||
(let [file (bfc/get-file cfg file-id :realize? true)
|
(let [file (bfc/get-file cfg file-id)
|
||||||
thumbs (bfc/get-file-object-thumbnails cfg file-id)
|
thumbs (bfc/get-file-object-thumbnails cfg file-id)
|
||||||
media (bfc/get-file-media cfg file)
|
media (bfc/get-file-media cfg file)
|
||||||
rels (bfc/get-files-rels cfg #{file-id})]
|
rels (bfc/get-files-rels cfg #{file-id})]
|
||||||
@ -281,8 +281,8 @@
|
|||||||
|
|
||||||
(let [file (-> (read-obj cfg :file file-id)
|
(let [file (-> (read-obj cfg :file file-id)
|
||||||
(update :id bfc/lookup-index)
|
(update :id bfc/lookup-index)
|
||||||
(update :project-id bfc/lookup-index))
|
(update :project-id bfc/lookup-index)
|
||||||
file (bfc/process-file cfg file)]
|
(bfc/process-file))]
|
||||||
|
|
||||||
(events/tap :progress
|
(events/tap :progress
|
||||||
{:op :import
|
{:op :import
|
||||||
@ -344,7 +344,7 @@
|
|||||||
(defn export-team!
|
(defn export-team!
|
||||||
[cfg team-id]
|
[cfg team-id]
|
||||||
(let [id (uuid/next)
|
(let [id (uuid/next)
|
||||||
tp (ct/tpoint)
|
tp (dt/tpoint)
|
||||||
cfg (create-database cfg)]
|
cfg (create-database cfg)]
|
||||||
|
|
||||||
(l/inf :hint "start"
|
(l/inf :hint "start"
|
||||||
@ -378,15 +378,15 @@
|
|||||||
(l/inf :hint "end"
|
(l/inf :hint "end"
|
||||||
:operation "export"
|
:operation "export"
|
||||||
:id (str id)
|
:id (str id)
|
||||||
:elapsed (ct/format-duration elapsed)))))))
|
:elapsed (dt/format-duration elapsed)))))))
|
||||||
|
|
||||||
(defn import-team!
|
(defn import-team!
|
||||||
[cfg path]
|
[cfg path]
|
||||||
(let [id (uuid/next)
|
(let [id (uuid/next)
|
||||||
tp (ct/tpoint)
|
tp (dt/tpoint)
|
||||||
|
|
||||||
cfg (-> (create-database cfg path)
|
cfg (-> (create-database cfg path)
|
||||||
(assoc ::bfc/timestamp (ct/now)))]
|
(assoc ::bfc/timestamp (dt/now)))]
|
||||||
|
|
||||||
(l/inf :hint "start"
|
(l/inf :hint "start"
|
||||||
:operation "import"
|
:operation "import"
|
||||||
@ -434,4 +434,4 @@
|
|||||||
(l/inf :hint "end"
|
(l/inf :hint "end"
|
||||||
:operation "import"
|
:operation "import"
|
||||||
:id (str id)
|
:id (str id)
|
||||||
:elapsed (ct/format-duration elapsed)))))))
|
:elapsed (dt/format-duration elapsed)))))))
|
||||||
|
|||||||
@ -8,26 +8,23 @@
|
|||||||
"A ZIP based binary file exportation"
|
"A ZIP based binary file exportation"
|
||||||
(:refer-clojure :exclude [read])
|
(:refer-clojure :exclude [read])
|
||||||
(:require
|
(:require
|
||||||
[app.binfile.cleaner :as bfl]
|
|
||||||
[app.binfile.common :as bfc]
|
[app.binfile.common :as bfc]
|
||||||
[app.binfile.migrations :as bfm]
|
[app.binfile.migrations :as bfm]
|
||||||
[app.common.data :as d]
|
[app.common.data :as d]
|
||||||
|
[app.common.data.macros :as dm]
|
||||||
[app.common.exceptions :as ex]
|
[app.common.exceptions :as ex]
|
||||||
[app.common.features :as cfeat]
|
[app.common.features :as cfeat]
|
||||||
[app.common.files.migrations :as-alias fmg]
|
|
||||||
[app.common.json :as json]
|
[app.common.json :as json]
|
||||||
[app.common.logging :as l]
|
[app.common.logging :as l]
|
||||||
[app.common.media :as cmedia]
|
|
||||||
[app.common.schema :as sm]
|
[app.common.schema :as sm]
|
||||||
[app.common.thumbnails :as cth]
|
[app.common.thumbnails :as cth]
|
||||||
[app.common.time :as ct]
|
|
||||||
[app.common.types.color :as ctcl]
|
[app.common.types.color :as ctcl]
|
||||||
[app.common.types.component :as ctc]
|
[app.common.types.component :as ctc]
|
||||||
[app.common.types.file :as ctf]
|
[app.common.types.file :as ctf]
|
||||||
[app.common.types.page :as ctp]
|
[app.common.types.page :as ctp]
|
||||||
[app.common.types.plugins :as ctpg]
|
[app.common.types.plugins :as ctpg]
|
||||||
[app.common.types.shape :as cts]
|
[app.common.types.shape :as cts]
|
||||||
[app.common.types.tokens-lib :as ctob]
|
[app.common.types.tokens-lib :as cto]
|
||||||
[app.common.types.typography :as cty]
|
[app.common.types.typography :as cty]
|
||||||
[app.common.uuid :as uuid]
|
[app.common.uuid :as uuid]
|
||||||
[app.config :as cf]
|
[app.config :as cf]
|
||||||
@ -36,15 +33,14 @@
|
|||||||
[app.storage :as sto]
|
[app.storage :as sto]
|
||||||
[app.storage.impl :as sto.impl]
|
[app.storage.impl :as sto.impl]
|
||||||
[app.util.events :as events]
|
[app.util.events :as events]
|
||||||
|
[app.util.time :as dt]
|
||||||
[clojure.java.io :as jio]
|
[clojure.java.io :as jio]
|
||||||
[cuerdas.core :as str]
|
[cuerdas.core :as str]
|
||||||
[datoteka.fs :as fs]
|
[datoteka.fs :as fs]
|
||||||
[datoteka.io :as io])
|
[datoteka.io :as io])
|
||||||
(:import
|
(:import
|
||||||
java.io.File
|
|
||||||
java.io.InputStream
|
java.io.InputStream
|
||||||
java.io.OutputStreamWriter
|
java.io.OutputStreamWriter
|
||||||
java.lang.AutoCloseable
|
|
||||||
java.util.zip.ZipEntry
|
java.util.zip.ZipEntry
|
||||||
java.util.zip.ZipFile
|
java.util.zip.ZipFile
|
||||||
java.util.zip.ZipOutputStream))
|
java.util.zip.ZipOutputStream))
|
||||||
@ -55,7 +51,7 @@
|
|||||||
[:map {:title "Manifest"}
|
[:map {:title "Manifest"}
|
||||||
[:version ::sm/int]
|
[:version ::sm/int]
|
||||||
[:type :string]
|
[:type :string]
|
||||||
[:referer {:optional true} :string]
|
|
||||||
[:generated-by {:optional true} :string]
|
[:generated-by {:optional true} :string]
|
||||||
|
|
||||||
[:files
|
[:files
|
||||||
@ -75,7 +71,7 @@
|
|||||||
[:size ::sm/int]
|
[:size ::sm/int]
|
||||||
[:content-type :string]
|
[:content-type :string]
|
||||||
[:bucket [::sm/one-of {:format :string} sto/valid-buckets]]
|
[:bucket [::sm/one-of {:format :string} sto/valid-buckets]]
|
||||||
[:hash {:optional true} :string]])
|
[:hash :string]])
|
||||||
|
|
||||||
(def ^:private schema:file-thumbnail
|
(def ^:private schema:file-thumbnail
|
||||||
[:map {:title "FileThumbnail"}
|
[:map {:title "FileThumbnail"}
|
||||||
@ -90,40 +86,34 @@
|
|||||||
ctf/schema:file
|
ctf/schema:file
|
||||||
[:map [:options {:optional true} ctf/schema:options]]])
|
[:map [:options {:optional true} ctf/schema:options]]])
|
||||||
|
|
||||||
;; --- HELPERS
|
|
||||||
|
|
||||||
(defn- default-now
|
|
||||||
[o]
|
|
||||||
(or o (ct/now)))
|
|
||||||
|
|
||||||
;; --- ENCODERS
|
;; --- ENCODERS
|
||||||
|
|
||||||
(def encode-file
|
(def encode-file
|
||||||
(sm/encoder schema:file sm/json-transformer))
|
(sm/encoder schema:file sm/json-transformer))
|
||||||
|
|
||||||
(def encode-page
|
(def encode-page
|
||||||
(sm/encoder ctp/schema:page sm/json-transformer))
|
(sm/encoder ::ctp/page sm/json-transformer))
|
||||||
|
|
||||||
(def encode-shape
|
(def encode-shape
|
||||||
(sm/encoder cts/schema:shape sm/json-transformer))
|
(sm/encoder ::cts/shape sm/json-transformer))
|
||||||
|
|
||||||
(def encode-media
|
(def encode-media
|
||||||
(sm/encoder ctf/schema:media sm/json-transformer))
|
(sm/encoder ::ctf/media sm/json-transformer))
|
||||||
|
|
||||||
(def encode-component
|
(def encode-component
|
||||||
(sm/encoder ctc/schema:component sm/json-transformer))
|
(sm/encoder ::ctc/component sm/json-transformer))
|
||||||
|
|
||||||
(def encode-color
|
(def encode-color
|
||||||
(sm/encoder ctcl/schema:library-color sm/json-transformer))
|
(sm/encoder ::ctcl/color sm/json-transformer))
|
||||||
|
|
||||||
(def encode-typography
|
(def encode-typography
|
||||||
(sm/encoder cty/schema:typography sm/json-transformer))
|
(sm/encoder ::cty/typography sm/json-transformer))
|
||||||
|
|
||||||
(def encode-tokens-lib
|
(def encode-tokens-lib
|
||||||
(sm/encoder ctob/schema:tokens-lib sm/json-transformer))
|
(sm/encoder ::cto/tokens-lib sm/json-transformer))
|
||||||
|
|
||||||
(def encode-plugin-data
|
(def encode-plugin-data
|
||||||
(sm/encoder ctpg/schema:plugin-data sm/json-transformer))
|
(sm/encoder ::ctpg/plugin-data sm/json-transformer))
|
||||||
|
|
||||||
(def encode-storage-object
|
(def encode-storage-object
|
||||||
(sm/encoder schema:storage-object sm/json-transformer))
|
(sm/encoder schema:storage-object sm/json-transformer))
|
||||||
@ -137,31 +127,31 @@
|
|||||||
(sm/decoder schema:manifest sm/json-transformer))
|
(sm/decoder schema:manifest sm/json-transformer))
|
||||||
|
|
||||||
(def decode-media
|
(def decode-media
|
||||||
(sm/decoder ctf/schema:media sm/json-transformer))
|
(sm/decoder ::ctf/media sm/json-transformer))
|
||||||
|
|
||||||
(def decode-component
|
(def decode-component
|
||||||
(sm/decoder ctc/schema:component sm/json-transformer))
|
(sm/decoder ::ctc/component sm/json-transformer))
|
||||||
|
|
||||||
(def decode-color
|
(def decode-color
|
||||||
(sm/decoder ctcl/schema:library-color sm/json-transformer))
|
(sm/decoder ::ctcl/color sm/json-transformer))
|
||||||
|
|
||||||
(def decode-file
|
(def decode-file
|
||||||
(sm/decoder schema:file sm/json-transformer))
|
(sm/decoder schema:file sm/json-transformer))
|
||||||
|
|
||||||
(def decode-page
|
(def decode-page
|
||||||
(sm/decoder ctp/schema:page sm/json-transformer))
|
(sm/decoder ::ctp/page sm/json-transformer))
|
||||||
|
|
||||||
(def decode-shape
|
(def decode-shape
|
||||||
(sm/decoder cts/schema:shape sm/json-transformer))
|
(sm/decoder ::cts/shape sm/json-transformer))
|
||||||
|
|
||||||
(def decode-typography
|
(def decode-typography
|
||||||
(sm/decoder cty/schema:typography sm/json-transformer))
|
(sm/decoder ::cty/typography sm/json-transformer))
|
||||||
|
|
||||||
(def decode-tokens-lib
|
(def decode-tokens-lib
|
||||||
(sm/decoder ctob/schema:tokens-lib sm/json-transformer))
|
(sm/decoder ::cto/tokens-lib sm/json-transformer))
|
||||||
|
|
||||||
(def decode-plugin-data
|
(def decode-plugin-data
|
||||||
(sm/decoder ctpg/schema:plugin-data sm/json-transformer))
|
(sm/decoder ::ctpg/plugin-data sm/json-transformer))
|
||||||
|
|
||||||
(def decode-storage-object
|
(def decode-storage-object
|
||||||
(sm/decoder schema:storage-object sm/json-transformer))
|
(sm/decoder schema:storage-object sm/json-transformer))
|
||||||
@ -175,31 +165,31 @@
|
|||||||
(sm/check-fn schema:manifest))
|
(sm/check-fn schema:manifest))
|
||||||
|
|
||||||
(def validate-file
|
(def validate-file
|
||||||
(sm/check-fn ctf/schema:file))
|
(sm/check-fn ::ctf/file))
|
||||||
|
|
||||||
(def validate-page
|
(def validate-page
|
||||||
(sm/check-fn ctp/schema:page))
|
(sm/check-fn ::ctp/page))
|
||||||
|
|
||||||
(def validate-shape
|
(def validate-shape
|
||||||
(sm/check-fn cts/schema:shape))
|
(sm/check-fn ::cts/shape))
|
||||||
|
|
||||||
(def validate-media
|
(def validate-media
|
||||||
(sm/check-fn ctf/schema:media))
|
(sm/check-fn ::ctf/media))
|
||||||
|
|
||||||
(def validate-color
|
(def validate-color
|
||||||
(sm/check-fn ctcl/schema:library-color))
|
(sm/check-fn ::ctcl/color))
|
||||||
|
|
||||||
(def validate-component
|
(def validate-component
|
||||||
(sm/check-fn ctc/schema:component))
|
(sm/check-fn ::ctc/component))
|
||||||
|
|
||||||
(def validate-typography
|
(def validate-typography
|
||||||
(sm/check-fn cty/schema:typography))
|
(sm/check-fn ::cty/typography))
|
||||||
|
|
||||||
(def validate-tokens-lib
|
(def validate-tokens-lib
|
||||||
(sm/check-fn ctob/schema:tokens-lib))
|
(sm/check-fn ::cto/tokens-lib))
|
||||||
|
|
||||||
(def validate-plugin-data
|
(def validate-plugin-data
|
||||||
(sm/check-fn ctpg/schema:plugin-data))
|
(sm/check-fn ::ctpg/plugin-data))
|
||||||
|
|
||||||
(def validate-storage-object
|
(def validate-storage-object
|
||||||
(sm/check-fn schema:storage-object))
|
(sm/check-fn schema:storage-object))
|
||||||
@ -224,12 +214,9 @@
|
|||||||
(throw (IllegalArgumentException.
|
(throw (IllegalArgumentException.
|
||||||
"the `include-libraries` and `embed-assets` are mutally excluding options")))
|
"the `include-libraries` and `embed-assets` are mutally excluding options")))
|
||||||
|
|
||||||
(let [detach? (and (not embed-assets) (not include-libraries))]
|
(let [detach? (and (not embed-assets) (not include-libraries))]
|
||||||
(db/tx-run! cfg (fn [cfg]
|
(db/tx-run! cfg (fn [cfg]
|
||||||
(cond-> (bfc/get-file cfg file-id
|
(cond-> (bfc/get-file cfg file-id {::sql/for-update true})
|
||||||
{:realize? true
|
|
||||||
:include-deleted? true
|
|
||||||
:lock-for-update? true})
|
|
||||||
detach?
|
detach?
|
||||||
(-> (ctf/detach-external-references file-id)
|
(-> (ctf/detach-external-references file-id)
|
||||||
(dissoc :libraries))
|
(dissoc :libraries))
|
||||||
@ -240,13 +227,27 @@
|
|||||||
:always
|
:always
|
||||||
(bfc/clean-file-features))))))
|
(bfc/clean-file-features))))))
|
||||||
|
|
||||||
|
(defn- resolve-extension
|
||||||
|
[mtype]
|
||||||
|
(case mtype
|
||||||
|
"image/png" ".png"
|
||||||
|
"image/jpeg" ".jpg"
|
||||||
|
"image/gif" ".gif"
|
||||||
|
"image/svg+xml" ".svg"
|
||||||
|
"image/webp" ".webp"
|
||||||
|
"font/woff" ".woff"
|
||||||
|
"font/woff2" ".woff2"
|
||||||
|
"font/ttf" ".ttf"
|
||||||
|
"font/otf" ".otf"
|
||||||
|
"application/octet-stream" ".bin"))
|
||||||
|
|
||||||
(defn- export-storage-objects
|
(defn- export-storage-objects
|
||||||
[{:keys [::output] :as cfg}]
|
[{:keys [::output] :as cfg}]
|
||||||
(let [storage (sto/resolve cfg)]
|
(let [storage (sto/resolve cfg)]
|
||||||
(doseq [id (-> bfc/*state* deref :storage-objects not-empty)]
|
(doseq [id (-> bfc/*state* deref :storage-objects not-empty)]
|
||||||
(let [sobject (sto/get-object storage id)
|
(let [sobject (sto/get-object storage id)
|
||||||
smeta (meta sobject)
|
smeta (meta sobject)
|
||||||
ext (cmedia/mtype->extension (:content-type smeta))
|
ext (resolve-extension (:content-type smeta))
|
||||||
path (str "objects/" id ".json")
|
path (str "objects/" id ".json")
|
||||||
params (-> (meta sobject)
|
params (-> (meta sobject)
|
||||||
(assoc :id (:id sobject))
|
(assoc :id (:id sobject))
|
||||||
@ -255,12 +256,10 @@
|
|||||||
|
|
||||||
(write-entry! output path params)
|
(write-entry! output path params)
|
||||||
|
|
||||||
(events/tap :progress {:section :storage-object :id id})
|
|
||||||
|
|
||||||
(with-open [input (sto/get-object-data storage sobject)]
|
(with-open [input (sto/get-object-data storage sobject)]
|
||||||
(.putNextEntry ^ZipOutputStream output (ZipEntry. (str "objects/" id ext)))
|
(.putNextEntry output (ZipEntry. (str "objects/" id ext)))
|
||||||
(io/copy input output :size (:size sobject))
|
(io/copy input output :size (:size sobject))
|
||||||
(.closeEntry ^ZipOutputStream output))))))
|
(.closeEntry output))))))
|
||||||
|
|
||||||
(defn- export-file
|
(defn- export-file
|
||||||
[{:keys [::file-id ::output] :as cfg}]
|
[{:keys [::file-id ::output] :as cfg}]
|
||||||
@ -281,8 +280,6 @@
|
|||||||
|
|
||||||
thumbnails (bfc/get-file-object-thumbnails cfg file-id)]
|
thumbnails (bfc/get-file-object-thumbnails cfg file-id)]
|
||||||
|
|
||||||
(events/tap :progress {:section :file :id file-id})
|
|
||||||
|
|
||||||
(vswap! bfc/*state* update :files assoc file-id
|
(vswap! bfc/*state* update :files assoc file-id
|
||||||
{:id file-id
|
{:id file-id
|
||||||
:name (:name file)
|
:name (:name file)
|
||||||
@ -290,13 +287,13 @@
|
|||||||
|
|
||||||
(let [file (cond-> (select-keys file bfc/file-attrs)
|
(let [file (cond-> (select-keys file bfc/file-attrs)
|
||||||
(:options data)
|
(:options data)
|
||||||
(assoc :options (:options data)))
|
(assoc :options (:options data))
|
||||||
|
|
||||||
file (-> file
|
:always
|
||||||
(dissoc :data)
|
(dissoc :data)
|
||||||
(dissoc :deleted-at)
|
|
||||||
(encode-file))
|
|
||||||
|
|
||||||
|
:always
|
||||||
|
(encode-file))
|
||||||
path (str "files/" file-id ".json")]
|
path (str "files/" file-id ".json")]
|
||||||
(write-entry! output path file))
|
(write-entry! output path file))
|
||||||
|
|
||||||
@ -354,8 +351,7 @@
|
|||||||
typography (encode-typography object)]
|
typography (encode-typography object)]
|
||||||
(write-entry! output path typography)))
|
(write-entry! output path typography)))
|
||||||
|
|
||||||
(when (and tokens-lib
|
(when tokens-lib
|
||||||
(not (ctob/empty-lib? tokens-lib)))
|
|
||||||
(let [path (str "files/" file-id "/tokens.json")
|
(let [path (str "files/" file-id "/tokens.json")
|
||||||
encoded-tokens (encode-tokens-lib tokens-lib)]
|
encoded-tokens (encode-tokens-lib tokens-lib)]
|
||||||
(write-entry! output path encoded-tokens)))))
|
(write-entry! output path encoded-tokens)))))
|
||||||
@ -382,7 +378,6 @@
|
|||||||
params {:type "penpot/export-files"
|
params {:type "penpot/export-files"
|
||||||
:version 1
|
:version 1
|
||||||
:generated-by (str "penpot/" (:full cf/version))
|
:generated-by (str "penpot/" (:full cf/version))
|
||||||
:refer "penpot"
|
|
||||||
:files (vec (vals files))
|
:files (vec (vals files))
|
||||||
:relations rels}]
|
:relations rels}]
|
||||||
(write-entry! output "manifest.json" params))))
|
(write-entry! output "manifest.json" params))))
|
||||||
@ -455,7 +450,7 @@
|
|||||||
(defn- read-manifest
|
(defn- read-manifest
|
||||||
[^ZipFile input]
|
[^ZipFile input]
|
||||||
(let [entry (get-zip-entry input "manifest.json")]
|
(let [entry (get-zip-entry input "manifest.json")]
|
||||||
(with-open [^AutoCloseable reader (zip-entry-reader input entry)]
|
(with-open [reader (zip-entry-reader input entry)]
|
||||||
(let [manifest (json/read reader :key-fn json/read-kebab-key)]
|
(let [manifest (json/read reader :key-fn json/read-kebab-key)]
|
||||||
(decode-manifest manifest)))))
|
(decode-manifest manifest)))))
|
||||||
|
|
||||||
@ -545,27 +540,24 @@
|
|||||||
|
|
||||||
(defn- read-entry
|
(defn- read-entry
|
||||||
[^ZipFile input entry]
|
[^ZipFile input entry]
|
||||||
(with-open [^AutoCloseable reader (zip-entry-reader input entry)]
|
(with-open [reader (zip-entry-reader input entry)]
|
||||||
(json/read reader :key-fn json/read-kebab-key)))
|
(json/read reader :key-fn json/read-kebab-key)))
|
||||||
|
|
||||||
(defn- read-plain-entry
|
(defn- read-plain-entry
|
||||||
[^ZipFile input entry]
|
[^ZipFile input entry]
|
||||||
(with-open [^AutoCloseable reader (zip-entry-reader input entry)]
|
(with-open [reader (zip-entry-reader input entry)]
|
||||||
(json/read reader)))
|
(json/read reader)))
|
||||||
|
|
||||||
(defn- read-file
|
(defn- read-file
|
||||||
[{:keys [::bfc/input ::bfc/timestamp]} file-id]
|
[{:keys [::bfc/input ::file-id]}]
|
||||||
(let [path (str "files/" file-id ".json")
|
(let [path (str "files/" file-id ".json")
|
||||||
entry (get-zip-entry input path)]
|
entry (get-zip-entry input path)]
|
||||||
(-> (read-entry input entry)
|
(-> (read-entry input entry)
|
||||||
(decode-file)
|
(decode-file)
|
||||||
(update :revn d/nilv 1)
|
|
||||||
(update :created-at d/nilv timestamp)
|
|
||||||
(update :modified-at d/nilv timestamp)
|
|
||||||
(validate-file))))
|
(validate-file))))
|
||||||
|
|
||||||
(defn- read-file-plugin-data
|
(defn- read-file-plugin-data
|
||||||
[{:keys [::bfc/input]} file-id]
|
[{:keys [::bfc/input ::file-id]}]
|
||||||
(let [path (str "files/" file-id "/plugin-data.json")
|
(let [path (str "files/" file-id "/plugin-data.json")
|
||||||
entry (get-zip-entry* input path)]
|
entry (get-zip-entry* input path)]
|
||||||
(some->> entry
|
(some->> entry
|
||||||
@ -574,19 +566,13 @@
|
|||||||
(validate-plugin-data))))
|
(validate-plugin-data))))
|
||||||
|
|
||||||
(defn- read-file-media
|
(defn- read-file-media
|
||||||
[{:keys [::bfc/input ::entries]} file-id]
|
[{:keys [::bfc/input ::file-id ::entries]}]
|
||||||
(->> (keep (match-media-entry-fn file-id) entries)
|
(->> (keep (match-media-entry-fn file-id) entries)
|
||||||
(reduce (fn [result {:keys [id entry]}]
|
(reduce (fn [result {:keys [id entry]}]
|
||||||
(let [object (->> (read-entry input entry)
|
(let [object (->> (read-entry input entry)
|
||||||
(decode-media)
|
(decode-media)
|
||||||
(validate-media))
|
(validate-media))
|
||||||
object (-> object
|
object (assoc object :file-id file-id)]
|
||||||
(assoc :file-id file-id)
|
|
||||||
(update :created-at default-now)
|
|
||||||
;; FIXME: this is set default to true for
|
|
||||||
;; setting a value, this prop is no longer
|
|
||||||
;; relevant;
|
|
||||||
(assoc :is-local true))]
|
|
||||||
(if (= id (:id object))
|
(if (= id (:id object))
|
||||||
(conj result object)
|
(conj result object)
|
||||||
result)))
|
result)))
|
||||||
@ -594,7 +580,7 @@
|
|||||||
(not-empty)))
|
(not-empty)))
|
||||||
|
|
||||||
(defn- read-file-colors
|
(defn- read-file-colors
|
||||||
[{:keys [::bfc/input ::entries]} file-id]
|
[{:keys [::bfc/input ::file-id ::entries]}]
|
||||||
(->> (keep (match-color-entry-fn file-id) entries)
|
(->> (keep (match-color-entry-fn file-id) entries)
|
||||||
(reduce (fn [result {:keys [id entry]}]
|
(reduce (fn [result {:keys [id entry]}]
|
||||||
(let [object (->> (read-entry input entry)
|
(let [object (->> (read-entry input entry)
|
||||||
@ -607,38 +593,20 @@
|
|||||||
(not-empty)))
|
(not-empty)))
|
||||||
|
|
||||||
(defn- read-file-components
|
(defn- read-file-components
|
||||||
[{:keys [::bfc/input ::entries]} file-id]
|
[{:keys [::bfc/input ::file-id ::entries]}]
|
||||||
(let [clean-component-post-decode
|
(->> (keep (match-component-entry-fn file-id) entries)
|
||||||
(fn [component]
|
(reduce (fn [result {:keys [id entry]}]
|
||||||
(d/update-when component :objects
|
(let [object (->> (read-entry input entry)
|
||||||
(fn [objects]
|
(decode-component)
|
||||||
(reduce-kv (fn [objects id shape]
|
(validate-component))]
|
||||||
(assoc objects id (bfl/clean-shape-post-decode shape)))
|
(if (= id (:id object))
|
||||||
objects
|
(assoc result id object)
|
||||||
objects))))
|
result)))
|
||||||
clean-component-pre-decode
|
{})
|
||||||
(fn [component]
|
(not-empty)))
|
||||||
(d/update-when component :objects
|
|
||||||
(fn [objects]
|
|
||||||
(reduce-kv (fn [objects id shape]
|
|
||||||
(assoc objects id (bfl/clean-shape-pre-decode shape)))
|
|
||||||
objects
|
|
||||||
objects))))]
|
|
||||||
|
|
||||||
(->> (keep (match-component-entry-fn file-id) entries)
|
|
||||||
(reduce (fn [result {:keys [id entry]}]
|
|
||||||
(let [object (->> (read-entry input entry)
|
|
||||||
(clean-component-pre-decode)
|
|
||||||
(decode-component)
|
|
||||||
(clean-component-post-decode))]
|
|
||||||
(if (= id (:id object))
|
|
||||||
(assoc result id object)
|
|
||||||
result)))
|
|
||||||
{})
|
|
||||||
(not-empty))))
|
|
||||||
|
|
||||||
(defn- read-file-typographies
|
(defn- read-file-typographies
|
||||||
[{:keys [::bfc/input ::entries]} file-id]
|
[{:keys [::bfc/input ::file-id ::entries]}]
|
||||||
(->> (keep (match-typography-entry-fn file-id) entries)
|
(->> (keep (match-typography-entry-fn file-id) entries)
|
||||||
(reduce (fn [result {:keys [id entry]}]
|
(reduce (fn [result {:keys [id entry]}]
|
||||||
(let [object (->> (read-entry input entry)
|
(let [object (->> (read-entry input entry)
|
||||||
@ -651,20 +619,19 @@
|
|||||||
(not-empty)))
|
(not-empty)))
|
||||||
|
|
||||||
(defn- read-file-tokens-lib
|
(defn- read-file-tokens-lib
|
||||||
[{:keys [::bfc/input ::entries]} file-id]
|
[{:keys [::bfc/input ::file-id ::entries]}]
|
||||||
(when-let [entry (d/seek (match-tokens-lib-entry-fn file-id) entries)]
|
(when-let [entry (d/seek (match-tokens-lib-entry-fn file-id) entries)]
|
||||||
(->> (read-plain-entry input entry)
|
(->> (read-plain-entry input entry)
|
||||||
(decode-tokens-lib)
|
(decode-tokens-lib)
|
||||||
(validate-tokens-lib))))
|
(validate-tokens-lib))))
|
||||||
|
|
||||||
(defn- read-file-shapes
|
(defn- read-file-shapes
|
||||||
[{:keys [::bfc/input ::entries] :as cfg} file-id page-id]
|
[{:keys [::bfc/input ::file-id ::page-id ::entries] :as cfg}]
|
||||||
(->> (keep (match-shape-entry-fn file-id page-id) entries)
|
(->> (keep (match-shape-entry-fn file-id page-id) entries)
|
||||||
(reduce (fn [result {:keys [id entry]}]
|
(reduce (fn [result {:keys [id entry]}]
|
||||||
(let [object (->> (read-entry input entry)
|
(let [object (->> (read-entry input entry)
|
||||||
(bfl/clean-shape-pre-decode)
|
|
||||||
(decode-shape)
|
(decode-shape)
|
||||||
(bfl/clean-shape-post-decode))]
|
(validate-shape))]
|
||||||
(if (= id (:id object))
|
(if (= id (:id object))
|
||||||
(assoc result id object)
|
(assoc result id object)
|
||||||
result)))
|
result)))
|
||||||
@ -672,14 +639,15 @@
|
|||||||
(not-empty)))
|
(not-empty)))
|
||||||
|
|
||||||
(defn- read-file-pages
|
(defn- read-file-pages
|
||||||
[{:keys [::bfc/input ::entries] :as cfg} file-id]
|
[{:keys [::bfc/input ::file-id ::entries] :as cfg}]
|
||||||
(->> (keep (match-page-entry-fn file-id) entries)
|
(->> (keep (match-page-entry-fn file-id) entries)
|
||||||
(keep (fn [{:keys [id entry]}]
|
(keep (fn [{:keys [id entry]}]
|
||||||
(let [page (->> (read-entry input entry)
|
(let [page (->> (read-entry input entry)
|
||||||
(decode-page))
|
(decode-page))
|
||||||
page (dissoc page :options)]
|
page (dissoc page :options)]
|
||||||
(when (= id (:id page))
|
(when (= id (:id page))
|
||||||
(let [objects (read-file-shapes cfg file-id id)]
|
(let [objects (-> (assoc cfg ::page-id id)
|
||||||
|
(read-file-shapes))]
|
||||||
(assoc page :objects objects))))))
|
(assoc page :objects objects))))))
|
||||||
(sort-by :index)
|
(sort-by :index)
|
||||||
(reduce (fn [result {:keys [id] :as page}]
|
(reduce (fn [result {:keys [id] :as page}]
|
||||||
@ -687,7 +655,7 @@
|
|||||||
(d/ordered-map))))
|
(d/ordered-map))))
|
||||||
|
|
||||||
(defn- read-file-thumbnails
|
(defn- read-file-thumbnails
|
||||||
[{:keys [::bfc/input ::entries] :as cfg} file-id]
|
[{:keys [::bfc/input ::file-id ::entries] :as cfg}]
|
||||||
(->> (keep (match-thumbnail-entry-fn file-id) entries)
|
(->> (keep (match-thumbnail-entry-fn file-id) entries)
|
||||||
(reduce (fn [result {:keys [page-id frame-id tag entry]}]
|
(reduce (fn [result {:keys [page-id frame-id tag entry]}]
|
||||||
(let [object (->> (read-entry input entry)
|
(let [object (->> (read-entry input entry)
|
||||||
@ -702,13 +670,14 @@
|
|||||||
(not-empty)))
|
(not-empty)))
|
||||||
|
|
||||||
(defn- read-file-data
|
(defn- read-file-data
|
||||||
[cfg file-id]
|
[cfg]
|
||||||
(let [colors (read-file-colors cfg file-id)
|
(let [colors (read-file-colors cfg)
|
||||||
typographies (read-file-typographies cfg file-id)
|
typographies (read-file-typographies cfg)
|
||||||
tokens-lib (read-file-tokens-lib cfg file-id)
|
tokens-lib (read-file-tokens-lib cfg)
|
||||||
components (read-file-components cfg file-id)
|
components (read-file-components cfg)
|
||||||
plugin-data (read-file-plugin-data cfg file-id)
|
plugin-data (read-file-plugin-data cfg)
|
||||||
pages (read-file-pages cfg file-id)]
|
pages (read-file-pages cfg)]
|
||||||
|
|
||||||
{:pages (-> pages keys vec)
|
{:pages (-> pages keys vec)
|
||||||
:pages-index (into {} pages)
|
:pages-index (into {} pages)
|
||||||
:colors colors
|
:colors colors
|
||||||
@ -718,11 +687,11 @@
|
|||||||
:plugin-data plugin-data}))
|
:plugin-data plugin-data}))
|
||||||
|
|
||||||
(defn- import-file
|
(defn- import-file
|
||||||
[{:keys [::db/conn ::bfc/project-id] :as cfg} {file-id :id file-name :name}]
|
[{:keys [::bfc/project-id ::file-id ::file-name] :as cfg}]
|
||||||
(let [file-id' (bfc/lookup-index file-id)
|
(let [file-id' (bfc/lookup-index file-id)
|
||||||
file (read-file cfg file-id)
|
file (read-file cfg)
|
||||||
media (read-file-media cfg file-id)
|
media (read-file-media cfg)
|
||||||
thumbnails (read-file-thumbnails cfg file-id)]
|
thumbnails (read-file-thumbnails cfg)]
|
||||||
|
|
||||||
(l/dbg :hint "processing file"
|
(l/dbg :hint "processing file"
|
||||||
:id (str file-id')
|
:id (str file-id')
|
||||||
@ -731,50 +700,28 @@
|
|||||||
:version (:version file)
|
:version (:version file)
|
||||||
::l/sync? true)
|
::l/sync? true)
|
||||||
|
|
||||||
(vswap! bfc/*state* update :index bfc/update-index media :id)
|
(events/tap :progress {:section :file :name file-name})
|
||||||
|
|
||||||
(events/tap :progress {:section :media :file-id file-id})
|
(when media
|
||||||
|
;; Update index with media
|
||||||
|
(l/dbg :hint "update media index"
|
||||||
|
:file-id (str file-id')
|
||||||
|
:total (count media)
|
||||||
|
::l/sync? true)
|
||||||
|
|
||||||
(doseq [item media]
|
(vswap! bfc/*state* update :index bfc/update-index (map :id media))
|
||||||
(let [params (-> item
|
(vswap! bfc/*state* update :media into media))
|
||||||
(update :id bfc/lookup-index)
|
|
||||||
(assoc :file-id file-id')
|
|
||||||
(d/update-when :media-id bfc/lookup-index)
|
|
||||||
(d/update-when :thumbnail-id bfc/lookup-index))]
|
|
||||||
|
|
||||||
(l/dbg :hint "inserting media object"
|
(when thumbnails
|
||||||
:file-id (str file-id')
|
(l/dbg :hint "update thumbnails index"
|
||||||
:id (str (:id params))
|
:file-id (str file-id')
|
||||||
:media-id (str (:media-id params))
|
:total (count thumbnails)
|
||||||
:thumbnail-id (str (:thumbnail-id params))
|
::l/sync? true)
|
||||||
:old-id (str (:id item))
|
|
||||||
::l/sync? true)
|
|
||||||
|
|
||||||
(db/insert! conn :file-media-object params
|
(vswap! bfc/*state* update :index bfc/update-index (map :media-id thumbnails))
|
||||||
::db/on-conflict-do-nothing? (::bfc/overwrite cfg))))
|
(vswap! bfc/*state* update :thumbnails into thumbnails))
|
||||||
|
|
||||||
(events/tap :progress {:section :thumbnails :file-id file-id})
|
(let [data (-> (read-file-data cfg)
|
||||||
|
|
||||||
(doseq [item thumbnails]
|
|
||||||
(let [media-id (bfc/lookup-index (:media-id item))
|
|
||||||
object-id (-> (assoc item :file-id file-id')
|
|
||||||
(cth/fmt-object-id))
|
|
||||||
params {:file-id file-id'
|
|
||||||
:object-id object-id
|
|
||||||
:tag (:tag item)
|
|
||||||
:media-id media-id}]
|
|
||||||
|
|
||||||
(l/dbg :hint "inserting object thumbnail"
|
|
||||||
:file-id (str file-id')
|
|
||||||
:media-id (str media-id)
|
|
||||||
::l/sync? true)
|
|
||||||
|
|
||||||
(db/insert! conn :file-tagged-object-thumbnail params
|
|
||||||
::db/on-conflict-do-nothing? true)))
|
|
||||||
|
|
||||||
(events/tap :progress {:section :file :file-id file-id})
|
|
||||||
|
|
||||||
(let [data (-> (read-file-data cfg file-id)
|
|
||||||
(d/without-nils)
|
(d/without-nils)
|
||||||
(assoc :id file-id')
|
(assoc :id file-id')
|
||||||
(cond-> (:options file)
|
(cond-> (:options file)
|
||||||
@ -785,13 +732,12 @@
|
|||||||
(assoc :data data)
|
(assoc :data data)
|
||||||
(assoc :name file-name)
|
(assoc :name file-name)
|
||||||
(assoc :project-id project-id)
|
(assoc :project-id project-id)
|
||||||
(dissoc :options))
|
(dissoc :options)
|
||||||
|
(bfc/process-file))]
|
||||||
|
|
||||||
file (bfc/process-file cfg file)
|
|
||||||
file (ctf/check-file file)]
|
|
||||||
|
|
||||||
(bfm/register-pending-migrations! cfg file)
|
(bfm/register-pending-migrations! cfg file)
|
||||||
(bfc/save-file! cfg file)
|
(bfc/save-file! cfg file ::db/return-keys false)
|
||||||
|
|
||||||
file-id')))
|
file-id')))
|
||||||
|
|
||||||
@ -821,111 +767,107 @@
|
|||||||
entries (keep (match-storage-entry-fn) entries)]
|
entries (keep (match-storage-entry-fn) entries)]
|
||||||
|
|
||||||
(doseq [{:keys [id entry]} entries]
|
(doseq [{:keys [id entry]} entries]
|
||||||
(let [object (-> (read-entry input entry)
|
(let [object (->> (read-entry input entry)
|
||||||
(decode-storage-object)
|
(decode-storage-object)
|
||||||
(update :bucket d/nilv sto/default-bucket)
|
(validate-storage-object))]
|
||||||
(validate-storage-object))
|
|
||||||
|
|
||||||
ext (cmedia/mtype->extension (:content-type object))
|
(when (not= id (:id object))
|
||||||
path (str "objects/" id ext)
|
|
||||||
content (->> path
|
|
||||||
(get-zip-entry input)
|
|
||||||
(zip-entry-storage-content input))]
|
|
||||||
|
|
||||||
(when (not= (:size object) (sto/get-size content))
|
|
||||||
(ex/raise :type :validation
|
(ex/raise :type :validation
|
||||||
:code :inconsistent-penpot-file
|
:code :inconsistent-penpot-file
|
||||||
:hint "found corrupted storage object: size does not match"
|
:hint "the penpot file seems corrupt, found unexpected uuid (storage-object-id)"
|
||||||
:path path
|
:expected-id (str id)
|
||||||
:expected-size (:size object)
|
:found-id (str (:id object))))
|
||||||
:found-size (sto/get-size content)))
|
|
||||||
|
|
||||||
(when-let [hash (get object :hash)]
|
(let [ext (resolve-extension (:content-type object))
|
||||||
(when (not= hash (sto/get-hash content))
|
path (str "objects/" id ext)
|
||||||
|
content (->> path
|
||||||
|
(get-zip-entry input)
|
||||||
|
(zip-entry-storage-content input))]
|
||||||
|
|
||||||
|
(when (not= (:size object) (sto/get-size content))
|
||||||
|
(ex/raise :type :validation
|
||||||
|
:code :inconsistent-penpot-file
|
||||||
|
:hint "found corrupted storage object: size does not match"
|
||||||
|
:path path
|
||||||
|
:expected-size (:size object)
|
||||||
|
:found-size (sto/get-size content)))
|
||||||
|
|
||||||
|
(when (not= (:hash object) (sto/get-hash content))
|
||||||
(ex/raise :type :validation
|
(ex/raise :type :validation
|
||||||
:code :inconsistent-penpot-file
|
:code :inconsistent-penpot-file
|
||||||
:hint "found corrupted storage object: hash does not match"
|
:hint "found corrupted storage object: hash does not match"
|
||||||
:path path
|
:path path
|
||||||
:expected-hash (:hash object)
|
:expected-hash (:hash object)
|
||||||
:found-hash (sto/get-hash content))))
|
:found-hash (sto/get-hash content)))
|
||||||
|
|
||||||
(let [params (-> object
|
(let [params (-> object
|
||||||
(dissoc :id :size)
|
(dissoc :id :size)
|
||||||
(assoc ::sto/content content)
|
(assoc ::sto/content content)
|
||||||
(assoc ::sto/deduplicate? true)
|
(assoc ::sto/deduplicate? true)
|
||||||
(assoc ::sto/touched-at timestamp))
|
(assoc ::sto/touched-at timestamp))
|
||||||
sobject (sto/put-object! storage params)]
|
sobject (sto/put-object! storage params)]
|
||||||
|
|
||||||
(l/dbg :hint "persisted storage object"
|
(l/dbg :hint "persisted storage object"
|
||||||
:id (str (:id sobject))
|
:id (str (:id sobject))
|
||||||
:prev-id (str id)
|
:prev-id (str id)
|
||||||
:bucket (:bucket params)
|
:bucket (:bucket params)
|
||||||
::l/sync? true)
|
::l/sync? true)
|
||||||
|
|
||||||
(vswap! bfc/*state* update :index assoc id (:id sobject)))))))
|
(vswap! bfc/*state* update :index assoc id (:id sobject))))))))
|
||||||
|
|
||||||
(defn- import-files*
|
(defn- import-file-media
|
||||||
[{:keys [::manifest] :as cfg}]
|
[{:keys [::db/conn] :as cfg}]
|
||||||
(bfc/disable-database-timeouts! cfg)
|
(events/tap :progress {:section :media})
|
||||||
|
|
||||||
(vswap! bfc/*state* update :index bfc/update-index (:files manifest) :id)
|
(doseq [item (:media @bfc/*state*)]
|
||||||
|
(let [params (-> item
|
||||||
|
(update :id bfc/lookup-index)
|
||||||
|
(update :file-id bfc/lookup-index)
|
||||||
|
(d/update-when :media-id bfc/lookup-index)
|
||||||
|
(d/update-when :thumbnail-id bfc/lookup-index))]
|
||||||
|
|
||||||
(import-storage-objects cfg)
|
(l/dbg :hint "inserting file media object"
|
||||||
|
:old-id (str (:id item))
|
||||||
|
:id (str (:id params))
|
||||||
|
:file-id (str (:file-id params))
|
||||||
|
::l/sync? true)
|
||||||
|
|
||||||
(let [files (get manifest :files)
|
(db/insert! conn :file-media-object params))))
|
||||||
result (reduce (fn [result file]
|
|
||||||
(let [name' (get file :name)
|
|
||||||
file (assoc file :name name')]
|
|
||||||
(conj result (import-file cfg file))))
|
|
||||||
[]
|
|
||||||
files)]
|
|
||||||
|
|
||||||
(import-file-relations cfg)
|
(defn- import-file-thumbnails
|
||||||
(bfm/apply-pending-migrations! cfg)
|
[{:keys [::db/conn] :as cfg}]
|
||||||
|
(events/tap :progress {:section :thumbnails})
|
||||||
|
(doseq [item (:thumbnails @bfc/*state*)]
|
||||||
|
(let [file-id (bfc/lookup-index (:file-id item))
|
||||||
|
media-id (bfc/lookup-index (:media-id item))
|
||||||
|
object-id (-> (assoc item :file-id file-id)
|
||||||
|
(cth/fmt-object-id))
|
||||||
|
params {:file-id file-id
|
||||||
|
:object-id object-id
|
||||||
|
:tag (:tag item)
|
||||||
|
:media-id media-id}]
|
||||||
|
|
||||||
result))
|
(l/dbg :hint "inserting file object thumbnail"
|
||||||
|
:file-id (str file-id)
|
||||||
|
:media-id (str media-id)
|
||||||
|
::l/sync? true)
|
||||||
|
|
||||||
(defn- import-file-and-overwrite*
|
(db/insert! conn :file-tagged-object-thumbnail params))))
|
||||||
[{:keys [::manifest ::bfc/file-id] :as cfg}]
|
|
||||||
|
|
||||||
(when (not= 1 (count (:files manifest)))
|
|
||||||
(ex/raise :type :validation
|
|
||||||
:code :invalid-condition
|
|
||||||
:hint "unable to perform in-place update with binfile containing more than 1 file"
|
|
||||||
:manifest manifest))
|
|
||||||
|
|
||||||
(bfc/disable-database-timeouts! cfg)
|
|
||||||
|
|
||||||
(let [ref-file (bfc/get-minimal-file cfg file-id ::db/for-update true)
|
|
||||||
file (first (get manifest :files))
|
|
||||||
cfg (assoc cfg ::bfc/overwrite true)]
|
|
||||||
|
|
||||||
(vswap! bfc/*state* update :index assoc (:id file) file-id)
|
|
||||||
|
|
||||||
(binding [bfc/*options* cfg
|
|
||||||
bfc/*reference-file* ref-file]
|
|
||||||
|
|
||||||
(import-storage-objects cfg)
|
|
||||||
(import-file cfg file)
|
|
||||||
|
|
||||||
(bfc/invalidate-thumbnails cfg file-id)
|
|
||||||
(bfm/apply-pending-migrations! cfg)
|
|
||||||
|
|
||||||
[file-id])))
|
|
||||||
|
|
||||||
(defn- import-files
|
(defn- import-files
|
||||||
[{:keys [::bfc/timestamp ::bfc/input] :or {timestamp (ct/now)} :as cfg}]
|
[{:keys [::bfc/timestamp ::bfc/input ::bfc/name] :or {timestamp (dt/now)} :as cfg}]
|
||||||
|
|
||||||
(assert (instance? ZipFile input) "expected zip file")
|
(dm/assert!
|
||||||
(assert (ct/inst? timestamp) "expected valid instant")
|
"expected zip file"
|
||||||
|
(instance? ZipFile input))
|
||||||
|
|
||||||
|
(dm/assert!
|
||||||
|
"expected valid instant"
|
||||||
|
(dt/instant? timestamp))
|
||||||
|
|
||||||
(let [manifest (-> (read-manifest input)
|
(let [manifest (-> (read-manifest input)
|
||||||
(validate-manifest))
|
(validate-manifest))
|
||||||
entries (read-zip-entries input)
|
entries (read-zip-entries input)]
|
||||||
cfg (-> cfg
|
|
||||||
(assoc ::entries entries)
|
|
||||||
(assoc ::manifest manifest)
|
|
||||||
(assoc ::bfc/timestamp timestamp))]
|
|
||||||
|
|
||||||
(when-not (= "penpot/export-files" (:type manifest))
|
(when-not (= "penpot/export-files" (:type manifest))
|
||||||
(ex/raise :type :validation
|
(ex/raise :type :validation
|
||||||
@ -948,10 +890,35 @@
|
|||||||
|
|
||||||
(events/tap :progress {:section :manifest})
|
(events/tap :progress {:section :manifest})
|
||||||
|
|
||||||
(binding [bfc/*state* (volatile! {:media [] :index {}})]
|
(let [index (bfc/update-index (map :id (:files manifest)))
|
||||||
(if (::bfc/file-id cfg)
|
state {:media [] :index index}
|
||||||
(db/tx-run! cfg import-file-and-overwrite*)
|
cfg (-> cfg
|
||||||
(db/tx-run! cfg import-files*)))))
|
(assoc ::entries entries)
|
||||||
|
(assoc ::manifest manifest)
|
||||||
|
(assoc ::bfc/timestamp timestamp))]
|
||||||
|
|
||||||
|
(binding [bfc/*state* (volatile! state)]
|
||||||
|
(db/tx-run! cfg (fn [cfg]
|
||||||
|
(bfc/disable-database-timeouts! cfg)
|
||||||
|
(let [ids (->> (:files manifest)
|
||||||
|
(reduce (fn [result {:keys [id] :as file}]
|
||||||
|
(let [name' (get file :name)
|
||||||
|
name' (if (map? name)
|
||||||
|
(get name id)
|
||||||
|
name')]
|
||||||
|
(conj result (-> cfg
|
||||||
|
(assoc ::file-id id)
|
||||||
|
(assoc ::file-name name')
|
||||||
|
(import-file)))))
|
||||||
|
[]))]
|
||||||
|
(import-file-relations cfg)
|
||||||
|
(import-storage-objects cfg)
|
||||||
|
(import-file-media cfg)
|
||||||
|
(import-file-thumbnails cfg)
|
||||||
|
|
||||||
|
(bfm/apply-pending-migrations! cfg)
|
||||||
|
|
||||||
|
ids)))))))
|
||||||
|
|
||||||
;; --- PUBLIC API
|
;; --- PUBLIC API
|
||||||
|
|
||||||
@ -968,23 +935,24 @@
|
|||||||
|
|
||||||
[{:keys [::bfc/ids] :as cfg} output]
|
[{:keys [::bfc/ids] :as cfg} output]
|
||||||
|
|
||||||
(assert
|
(dm/assert!
|
||||||
(and (set? ids) (every? uuid? ids))
|
"expected a set of uuid's for `::bfc/ids` parameter"
|
||||||
"expected a set of uuid's for `::bfc/ids` parameter")
|
(and (set? ids)
|
||||||
|
(every? uuid? ids)))
|
||||||
|
|
||||||
(assert
|
(dm/assert!
|
||||||
(satisfies? jio/IOFactory output)
|
"expected instance of jio/IOFactory for `input`"
|
||||||
"expected instance of jio/IOFactory for `input`")
|
(satisfies? jio/IOFactory output))
|
||||||
|
|
||||||
(let [id (uuid/next)
|
(let [id (uuid/next)
|
||||||
tp (ct/tpoint)
|
tp (dt/tpoint)
|
||||||
ab (volatile! false)
|
ab (volatile! false)
|
||||||
cs (volatile! nil)]
|
cs (volatile! nil)]
|
||||||
(try
|
(try
|
||||||
(l/info :hint "start exportation" :export-id (str id))
|
(l/info :hint "start exportation" :export-id (str id))
|
||||||
(binding [bfc/*state* (volatile! (bfc/initial-state))]
|
(binding [bfc/*state* (volatile! (bfc/initial-state))]
|
||||||
(with-open [^AutoCloseable output (io/output-stream output)]
|
(with-open [output (io/output-stream output)]
|
||||||
(with-open [^AutoCloseable output (ZipOutputStream. output)]
|
(with-open [output (ZipOutputStream. output)]
|
||||||
(let [cfg (assoc cfg ::output output)]
|
(let [cfg (assoc cfg ::output output)]
|
||||||
(export-files cfg)
|
(export-files cfg)
|
||||||
(export-storage-objects cfg)))))
|
(export-storage-objects cfg)))))
|
||||||
@ -1013,22 +981,22 @@
|
|||||||
(defn import-files!
|
(defn import-files!
|
||||||
[{:keys [::bfc/input] :as cfg}]
|
[{:keys [::bfc/input] :as cfg}]
|
||||||
|
|
||||||
(assert
|
(dm/assert!
|
||||||
|
"expected valid profile-id and project-id on `cfg`"
|
||||||
(and (uuid? (::bfc/profile-id cfg))
|
(and (uuid? (::bfc/profile-id cfg))
|
||||||
(uuid? (::bfc/project-id cfg)))
|
(uuid? (::bfc/project-id cfg))))
|
||||||
"expected valid profile-id and project-id on `cfg`")
|
|
||||||
|
|
||||||
(assert
|
(dm/assert!
|
||||||
(io/coercible? input)
|
"expected instance of jio/IOFactory for `input`"
|
||||||
"expected instance of jio/IOFactory for `input`")
|
(io/coercible? input))
|
||||||
|
|
||||||
(let [id (uuid/next)
|
(let [id (uuid/next)
|
||||||
tp (ct/tpoint)
|
tp (dt/tpoint)
|
||||||
cs (volatile! nil)]
|
cs (volatile! nil)]
|
||||||
|
|
||||||
(l/info :hint "import: started" :id (str id))
|
(l/info :hint "import: started" :id (str id))
|
||||||
(try
|
(try
|
||||||
(with-open [input (ZipFile. ^File (fs/file input))]
|
(with-open [input (ZipFile. (fs/file input))]
|
||||||
(import-files (assoc cfg ::bfc/input input)))
|
(import-files (assoc cfg ::bfc/input input)))
|
||||||
|
|
||||||
(catch Throwable cause
|
(catch Throwable cause
|
||||||
@ -1038,11 +1006,5 @@
|
|||||||
(finally
|
(finally
|
||||||
(l/info :hint "import: terminated"
|
(l/info :hint "import: terminated"
|
||||||
:id (str id)
|
:id (str id)
|
||||||
:elapsed (ct/format-duration (tp))
|
:elapsed (dt/format-duration (tp))
|
||||||
:error? (some? @cs))))))
|
:error? (some? @cs))))))
|
||||||
|
|
||||||
(defn get-manifest
|
|
||||||
[path]
|
|
||||||
(with-open [^AutoCloseable input (ZipFile. ^File (fs/file path))]
|
|
||||||
(-> (read-manifest input)
|
|
||||||
(validate-manifest))))
|
|
||||||
|
|||||||
@ -5,16 +5,17 @@
|
|||||||
;; Copyright (c) KALEIDOS INC
|
;; Copyright (c) KALEIDOS INC
|
||||||
|
|
||||||
(ns app.config
|
(ns app.config
|
||||||
|
"A configuration management."
|
||||||
(:refer-clojure :exclude [get])
|
(:refer-clojure :exclude [get])
|
||||||
(:require
|
(:require
|
||||||
[app.common.data :as d]
|
[app.common.data :as d]
|
||||||
[app.common.exceptions :as ex]
|
[app.common.exceptions :as ex]
|
||||||
[app.common.flags :as flags]
|
[app.common.flags :as flags]
|
||||||
[app.common.schema :as sm]
|
[app.common.schema :as sm]
|
||||||
[app.common.time :as ct]
|
|
||||||
[app.common.uri :as u]
|
[app.common.uri :as u]
|
||||||
[app.common.version :as v]
|
[app.common.version :as v]
|
||||||
[app.util.overrides]
|
[app.util.overrides]
|
||||||
|
[app.util.time :as dt]
|
||||||
[clojure.core :as c]
|
[clojure.core :as c]
|
||||||
[clojure.java.io :as io]
|
[clojure.java.io :as io]
|
||||||
[cuerdas.core :as str]
|
[cuerdas.core :as str]
|
||||||
@ -46,27 +47,22 @@
|
|||||||
:auto-file-snapshot-timeout "3h"
|
:auto-file-snapshot-timeout "3h"
|
||||||
|
|
||||||
:public-uri "http://localhost:3449"
|
:public-uri "http://localhost:3449"
|
||||||
|
|
||||||
:host "localhost"
|
:host "localhost"
|
||||||
:tenant "default"
|
:tenant "default"
|
||||||
|
|
||||||
:redis-uri "redis://redis/0"
|
:redis-uri "redis://redis/0"
|
||||||
|
|
||||||
:file-data-backend "legacy-db"
|
|
||||||
|
|
||||||
:objects-storage-backend "fs"
|
:objects-storage-backend "fs"
|
||||||
:objects-storage-fs-directory "assets"
|
:objects-storage-fs-directory "assets"
|
||||||
|
|
||||||
:auth-token-cookie-name "auth-token"
|
|
||||||
|
|
||||||
:assets-path "/internal/assets/"
|
:assets-path "/internal/assets/"
|
||||||
:smtp-default-reply-to "Penpot <no-reply@example.com>"
|
:smtp-default-reply-to "Penpot <no-reply@example.com>"
|
||||||
:smtp-default-from "Penpot <no-reply@example.com>"
|
:smtp-default-from "Penpot <no-reply@example.com>"
|
||||||
|
|
||||||
:profile-complaint-max-age (ct/duration {:days 7})
|
:profile-complaint-max-age (dt/duration {:days 7})
|
||||||
:profile-complaint-threshold 2
|
:profile-complaint-threshold 2
|
||||||
|
|
||||||
:profile-bounce-max-age (ct/duration {:days 7})
|
:profile-bounce-max-age (dt/duration {:days 7})
|
||||||
:profile-bounce-threshold 10
|
:profile-bounce-threshold 10
|
||||||
|
|
||||||
:telemetry-uri "https://telemetry.penpot.app/"
|
:telemetry-uri "https://telemetry.penpot.app/"
|
||||||
@ -82,10 +78,7 @@
|
|||||||
:initial-project-skey "initial-project"
|
:initial-project-skey "initial-project"
|
||||||
|
|
||||||
;; time to avoid email sending after profile modification
|
;; time to avoid email sending after profile modification
|
||||||
:email-verify-threshold "15m"
|
:email-verify-threshold "15m"})
|
||||||
|
|
||||||
:quotes-upload-sessions-per-profile 5
|
|
||||||
:quotes-upload-chunks-per-session 20})
|
|
||||||
|
|
||||||
(def schema:config
|
(def schema:config
|
||||||
(do #_sm/optional-keys
|
(do #_sm/optional-keys
|
||||||
@ -95,29 +88,24 @@
|
|||||||
[:secret-key {:optional true} :string]
|
[:secret-key {:optional true} :string]
|
||||||
|
|
||||||
[:tenant {:optional false} :string]
|
[:tenant {:optional false} :string]
|
||||||
[:public-uri {:optional false} ::sm/uri]
|
[:public-uri {:optional false} :string]
|
||||||
[:host {:optional false} :string]
|
[:host {:optional false} :string]
|
||||||
|
|
||||||
[:http-server-port {:optional true} ::sm/int]
|
[:http-server-port {:optional true} ::sm/int]
|
||||||
[:http-server-host {:optional true} :string]
|
[:http-server-host {:optional true} :string]
|
||||||
[:http-server-max-body-size {:optional true} ::sm/int]
|
[:http-server-max-body-size {:optional true} ::sm/int]
|
||||||
|
[:http-server-max-multipart-body-size {:optional true} ::sm/int]
|
||||||
[:http-server-io-threads {:optional true} ::sm/int]
|
[:http-server-io-threads {:optional true} ::sm/int]
|
||||||
[:http-server-max-worker-threads {:optional true} ::sm/int]
|
[:http-server-worker-threads {:optional true} ::sm/int]
|
||||||
|
|
||||||
[:exporter-shared-key {:optional true} :string]
|
|
||||||
[:nitrate-shared-key {:optional true} :string]
|
|
||||||
[:nexus-shared-key {:optional true} :string]
|
|
||||||
[:management-api-key {:optional true} :string]
|
|
||||||
|
|
||||||
[:telemetry-uri {:optional true} :string]
|
[:telemetry-uri {:optional true} :string]
|
||||||
[:telemetry-with-taiga {:optional true} ::sm/boolean] ;; DELETE
|
[:telemetry-with-taiga {:optional true} ::sm/boolean] ;; DELETE
|
||||||
|
|
||||||
[:auto-file-snapshot-every {:optional true} ::sm/int]
|
[:auto-file-snapshot-every {:optional true} ::sm/int]
|
||||||
[:auto-file-snapshot-timeout {:optional true} ::ct/duration]
|
[:auto-file-snapshot-timeout {:optional true} ::dt/duration]
|
||||||
|
|
||||||
[:media-max-file-size {:optional true} ::sm/int]
|
[:media-max-file-size {:optional true} ::sm/int]
|
||||||
[:deletion-delay {:optional true} ::ct/duration]
|
[:deletion-delay {:optional true} ::dt/duration] ;; REVIEW
|
||||||
[:file-clean-delay {:optional true} ::ct/duration]
|
|
||||||
[:telemetry-enabled {:optional true} ::sm/boolean]
|
[:telemetry-enabled {:optional true} ::sm/boolean]
|
||||||
[:default-blob-version {:optional true} ::sm/int]
|
[:default-blob-version {:optional true} ::sm/int]
|
||||||
[:allow-demo-users {:optional true} ::sm/boolean]
|
[:allow-demo-users {:optional true} ::sm/boolean]
|
||||||
@ -157,14 +145,13 @@
|
|||||||
[:quotes-snapshots-per-team {:optional true} ::sm/int]
|
[:quotes-snapshots-per-team {:optional true} ::sm/int]
|
||||||
[:quotes-team-access-requests-per-team {:optional true} ::sm/int]
|
[:quotes-team-access-requests-per-team {:optional true} ::sm/int]
|
||||||
[:quotes-team-access-requests-per-requester {:optional true} ::sm/int]
|
[:quotes-team-access-requests-per-requester {:optional true} ::sm/int]
|
||||||
[:quotes-upload-sessions-per-profile {:optional true} ::sm/int]
|
|
||||||
[:quotes-upload-chunks-per-session {:optional true} ::sm/int]
|
|
||||||
|
|
||||||
|
[:auth-data-cookie-domain {:optional true} :string]
|
||||||
[:auth-token-cookie-name {:optional true} :string]
|
[:auth-token-cookie-name {:optional true} :string]
|
||||||
[:auth-token-cookie-max-age {:optional true} ::ct/duration]
|
[:auth-token-cookie-max-age {:optional true} ::dt/duration]
|
||||||
|
|
||||||
[:registration-domain-whitelist {:optional true} [::sm/set :string]]
|
[:registration-domain-whitelist {:optional true} [::sm/set :string]]
|
||||||
[:email-verify-threshold {:optional true} ::ct/duration]
|
[:email-verify-threshold {:optional true} ::dt/duration]
|
||||||
|
|
||||||
[:github-client-id {:optional true} :string]
|
[:github-client-id {:optional true} :string]
|
||||||
[:github-client-secret {:optional true} :string]
|
[:github-client-secret {:optional true} :string]
|
||||||
@ -174,7 +161,7 @@
|
|||||||
[:google-client-id {:optional true} :string]
|
[:google-client-id {:optional true} :string]
|
||||||
[:google-client-secret {:optional true} :string]
|
[:google-client-secret {:optional true} :string]
|
||||||
[:oidc-client-id {:optional true} :string]
|
[:oidc-client-id {:optional true} :string]
|
||||||
[:oidc-user-info-source {:optional true} [:enum "auto" "userinfo" "token"]]
|
[:oidc-user-info-source {:optional true} :keyword]
|
||||||
[:oidc-client-secret {:optional true} :string]
|
[:oidc-client-secret {:optional true} :string]
|
||||||
[:oidc-base-uri {:optional true} :string]
|
[:oidc-base-uri {:optional true} :string]
|
||||||
[:oidc-token-uri {:optional true} :string]
|
[:oidc-token-uri {:optional true} :string]
|
||||||
@ -199,9 +186,9 @@
|
|||||||
[:ldap-starttls {:optional true} ::sm/boolean]
|
[:ldap-starttls {:optional true} ::sm/boolean]
|
||||||
[:ldap-user-query {:optional true} :string]
|
[:ldap-user-query {:optional true} :string]
|
||||||
|
|
||||||
[:profile-bounce-max-age {:optional true} ::ct/duration]
|
[:profile-bounce-max-age {:optional true} ::dt/duration]
|
||||||
[:profile-bounce-threshold {:optional true} ::sm/int]
|
[:profile-bounce-threshold {:optional true} ::sm/int]
|
||||||
[:profile-complaint-max-age {:optional true} ::ct/duration]
|
[:profile-complaint-max-age {:optional true} ::dt/duration]
|
||||||
[:profile-complaint-threshold {:optional true} ::sm/int]
|
[:profile-complaint-threshold {:optional true} ::sm/int]
|
||||||
|
|
||||||
[:redis-uri {:optional true} ::sm/uri]
|
[:redis-uri {:optional true} ::sm/uri]
|
||||||
@ -223,29 +210,24 @@
|
|||||||
[:prepl-host {:optional true} :string]
|
[:prepl-host {:optional true} :string]
|
||||||
[:prepl-port {:optional true} ::sm/int]
|
[:prepl-port {:optional true} ::sm/int]
|
||||||
|
|
||||||
[:file-data-backend {:optional true} [:enum "db" "legacy-db" "storage"]]
|
|
||||||
|
|
||||||
[:media-directory {:optional true} :string] ;; REVIEW
|
[:media-directory {:optional true} :string] ;; REVIEW
|
||||||
[:media-uri {:optional true} :string]
|
[:media-uri {:optional true} :string]
|
||||||
[:assets-path {:optional true} :string]
|
[:assets-path {:optional true} :string]
|
||||||
|
|
||||||
[:netty-io-threads {:optional true} ::sm/int]
|
;; Legacy, will be removed in 2.5
|
||||||
[:executor-threads {:optional true} ::sm/int]
|
|
||||||
|
|
||||||
[:nitrate-backend-uri {:optional true} ::sm/uri]
|
|
||||||
|
|
||||||
;; DEPRECATED
|
|
||||||
[:assets-storage-backend {:optional true} :keyword]
|
[:assets-storage-backend {:optional true} :keyword]
|
||||||
[:storage-assets-fs-directory {:optional true} :string]
|
[:storage-assets-fs-directory {:optional true} :string]
|
||||||
[:storage-assets-s3-bucket {:optional true} :string]
|
[:storage-assets-s3-bucket {:optional true} :string]
|
||||||
[:storage-assets-s3-region {:optional true} :keyword]
|
[:storage-assets-s3-region {:optional true} :keyword]
|
||||||
[:storage-assets-s3-endpoint {:optional true} ::sm/uri]
|
[:storage-assets-s3-endpoint {:optional true} ::sm/uri]
|
||||||
|
[:storage-assets-s3-io-threads {:optional true} ::sm/int]
|
||||||
|
|
||||||
[:objects-storage-backend {:optional true} :keyword]
|
[:objects-storage-backend {:optional true} :keyword]
|
||||||
[:objects-storage-fs-directory {:optional true} :string]
|
[:objects-storage-fs-directory {:optional true} :string]
|
||||||
[:objects-storage-s3-bucket {:optional true} :string]
|
[:objects-storage-s3-bucket {:optional true} :string]
|
||||||
[:objects-storage-s3-region {:optional true} :keyword]
|
[:objects-storage-s3-region {:optional true} :keyword]
|
||||||
[:objects-storage-s3-endpoint {:optional true} ::sm/uri]]))
|
[:objects-storage-s3-endpoint {:optional true} ::sm/uri]
|
||||||
|
[:objects-storage-s3-io-threads {:optional true} ::sm/int]]))
|
||||||
|
|
||||||
(defn- parse-flags
|
(defn- parse-flags
|
||||||
[config]
|
[config]
|
||||||
@ -316,12 +298,7 @@
|
|||||||
(defn get-deletion-delay
|
(defn get-deletion-delay
|
||||||
[]
|
[]
|
||||||
(or (c/get config :deletion-delay)
|
(or (c/get config :deletion-delay)
|
||||||
(ct/duration {:days 7})))
|
(dt/duration {:days 7})))
|
||||||
|
|
||||||
(defn get-file-clean-delay
|
|
||||||
[]
|
|
||||||
(or (c/get config :file-clean-delay)
|
|
||||||
(ct/duration {:days 2})))
|
|
||||||
|
|
||||||
(defn get
|
(defn get
|
||||||
"A configuration getter. Helps code be more testable."
|
"A configuration getter. Helps code be more testable."
|
||||||
@ -330,9 +307,5 @@
|
|||||||
([key default]
|
([key default]
|
||||||
(c/get config key default)))
|
(c/get config key default)))
|
||||||
|
|
||||||
(defn logging-context
|
|
||||||
[]
|
|
||||||
{:backend/version (:full version)})
|
|
||||||
|
|
||||||
;; Set value for all new threads bindings.
|
;; Set value for all new threads bindings.
|
||||||
(alter-var-root #'*assert* (constantly (contains? flags :backend-asserts)))
|
(alter-var-root #'*assert* (constantly (contains? flags :backend-asserts)))
|
||||||
|
|||||||
@ -10,20 +10,19 @@
|
|||||||
[app.common.data :as d]
|
[app.common.data :as d]
|
||||||
[app.common.exceptions :as ex]
|
[app.common.exceptions :as ex]
|
||||||
[app.common.geom.point :as gpt]
|
[app.common.geom.point :as gpt]
|
||||||
[app.common.json :as json]
|
|
||||||
[app.common.logging :as l]
|
[app.common.logging :as l]
|
||||||
[app.common.schema :as sm]
|
[app.common.schema :as sm]
|
||||||
[app.common.time :as ct]
|
|
||||||
[app.common.transit :as t]
|
[app.common.transit :as t]
|
||||||
[app.common.uuid :as uuid]
|
[app.common.uuid :as uuid]
|
||||||
[app.db.sql :as sql]
|
[app.db.sql :as sql]
|
||||||
[app.metrics :as mtx]
|
[app.metrics :as mtx]
|
||||||
|
[app.util.json :as json]
|
||||||
|
[app.util.time :as dt]
|
||||||
[clojure.java.io :as io]
|
[clojure.java.io :as io]
|
||||||
[clojure.set :as set]
|
[clojure.set :as set]
|
||||||
[integrant.core :as ig]
|
[integrant.core :as ig]
|
||||||
[next.jdbc :as jdbc]
|
[next.jdbc :as jdbc]
|
||||||
[next.jdbc.date-time :as jdbc-dt]
|
[next.jdbc.date-time :as jdbc-dt]
|
||||||
[next.jdbc.prepare :as jdbc.prepare]
|
|
||||||
[next.jdbc.transaction])
|
[next.jdbc.transaction])
|
||||||
(:import
|
(:import
|
||||||
com.zaxxer.hikari.HikariConfig
|
com.zaxxer.hikari.HikariConfig
|
||||||
@ -34,18 +33,15 @@
|
|||||||
java.io.InputStream
|
java.io.InputStream
|
||||||
java.io.OutputStream
|
java.io.OutputStream
|
||||||
java.sql.Connection
|
java.sql.Connection
|
||||||
java.sql.PreparedStatement
|
|
||||||
java.sql.Savepoint
|
java.sql.Savepoint
|
||||||
|
org.postgresql.PGConnection
|
||||||
org.postgresql.geometric.PGpoint
|
org.postgresql.geometric.PGpoint
|
||||||
org.postgresql.jdbc.PgArray
|
org.postgresql.jdbc.PgArray
|
||||||
org.postgresql.largeobject.LargeObject
|
org.postgresql.largeobject.LargeObject
|
||||||
org.postgresql.largeobject.LargeObjectManager
|
org.postgresql.largeobject.LargeObjectManager
|
||||||
org.postgresql.PGConnection
|
|
||||||
org.postgresql.util.PGInterval
|
org.postgresql.util.PGInterval
|
||||||
org.postgresql.util.PGobject))
|
org.postgresql.util.PGobject))
|
||||||
|
|
||||||
(def ^:dynamic *conn* nil)
|
|
||||||
|
|
||||||
(declare open)
|
(declare open)
|
||||||
(declare create-pool)
|
(declare create-pool)
|
||||||
|
|
||||||
@ -298,7 +294,7 @@
|
|||||||
(defn insert!
|
(defn insert!
|
||||||
"A helper that builds an insert sql statement and executes it. By
|
"A helper that builds an insert sql statement and executes it. By
|
||||||
default returns the inserted row with all the field; you can delimit
|
default returns the inserted row with all the field; you can delimit
|
||||||
the returned columns with the `::sql/columns` option."
|
the returned columns with the `::columns` option."
|
||||||
[ds table params & {:as opts}]
|
[ds table params & {:as opts}]
|
||||||
(let [conn (get-connectable ds)
|
(let [conn (get-connectable ds)
|
||||||
sql (sql/insert table params opts)
|
sql (sql/insert table params opts)
|
||||||
@ -379,7 +375,9 @@
|
|||||||
|
|
||||||
(defn is-row-deleted?
|
(defn is-row-deleted?
|
||||||
[{:keys [deleted-at]}]
|
[{:keys [deleted-at]}]
|
||||||
(some? deleted-at))
|
(and (dt/instant? deleted-at)
|
||||||
|
(< (inst-ms deleted-at)
|
||||||
|
(inst-ms (dt/now)))))
|
||||||
|
|
||||||
(defn get*
|
(defn get*
|
||||||
"Retrieve a single row from database that matches a simple filters. Do
|
"Retrieve a single row from database that matches a simple filters. Do
|
||||||
@ -404,23 +402,6 @@
|
|||||||
:hint "database object not found"))
|
:hint "database object not found"))
|
||||||
row))
|
row))
|
||||||
|
|
||||||
(defn get-with-sql
|
|
||||||
[ds sql & {:as opts}]
|
|
||||||
(let [rows
|
|
||||||
(cond->> (exec! ds sql opts)
|
|
||||||
(::remove-deleted opts true)
|
|
||||||
(remove is-row-deleted?)
|
|
||||||
|
|
||||||
:always
|
|
||||||
(not-empty))]
|
|
||||||
|
|
||||||
(when (and (not rows) (::throw-if-not-exists opts true))
|
|
||||||
(ex/raise :type :not-found
|
|
||||||
:code :object-not-found
|
|
||||||
:hint "database object not found"))
|
|
||||||
|
|
||||||
(first rows)))
|
|
||||||
|
|
||||||
(def ^:private default-plan-opts
|
(def ^:private default-plan-opts
|
||||||
(-> default-opts
|
(-> default-opts
|
||||||
(assoc :fetch-size 1000)
|
(assoc :fetch-size 1000)
|
||||||
@ -575,10 +556,10 @@
|
|||||||
[system f & params]
|
[system f & params]
|
||||||
(cond
|
(cond
|
||||||
(connection? system)
|
(connection? system)
|
||||||
(apply run! {::conn system} f params)
|
(run! {::conn system} f)
|
||||||
|
|
||||||
(pool? system)
|
(pool? system)
|
||||||
(apply run! {::pool system} f params)
|
(run! {::pool system} f)
|
||||||
|
|
||||||
(::conn system)
|
(::conn system)
|
||||||
(apply f system params)
|
(apply f system params)
|
||||||
@ -602,7 +583,7 @@
|
|||||||
(string? o)
|
(string? o)
|
||||||
(pginterval o)
|
(pginterval o)
|
||||||
|
|
||||||
(ct/duration? o)
|
(dt/duration? o)
|
||||||
(interval (inst-ms o))
|
(interval (inst-ms o))
|
||||||
|
|
||||||
:else
|
:else
|
||||||
@ -616,7 +597,7 @@
|
|||||||
val (.getValue o)]
|
val (.getValue o)]
|
||||||
(if (or (= typ "json")
|
(if (or (= typ "json")
|
||||||
(= typ "jsonb"))
|
(= typ "jsonb"))
|
||||||
(json/decode val :key-fn keyword)
|
(json/decode val)
|
||||||
val))))
|
val))))
|
||||||
|
|
||||||
(defn decode-transit-pgobject
|
(defn decode-transit-pgobject
|
||||||
@ -657,7 +638,7 @@
|
|||||||
(when data
|
(when data
|
||||||
(doto (org.postgresql.util.PGobject.)
|
(doto (org.postgresql.util.PGobject.)
|
||||||
(.setType "jsonb")
|
(.setType "jsonb")
|
||||||
(.setValue (json/encode data)))))
|
(.setValue (json/encode-str data)))))
|
||||||
|
|
||||||
;; --- Locks
|
;; --- Locks
|
||||||
|
|
||||||
@ -703,14 +684,3 @@
|
|||||||
[cause]
|
[cause]
|
||||||
(and (sql-exception? cause)
|
(and (sql-exception? cause)
|
||||||
(= "40001" (.getSQLState ^java.sql.SQLException cause))))
|
(= "40001" (.getSQLState ^java.sql.SQLException cause))))
|
||||||
|
|
||||||
(defn duplicate-key-error?
|
|
||||||
[cause]
|
|
||||||
(and (sql-exception? cause)
|
|
||||||
(= "23505" (.getSQLState ^java.sql.SQLException cause))))
|
|
||||||
|
|
||||||
|
|
||||||
(extend-protocol jdbc.prepare/SettableParameter
|
|
||||||
clojure.lang.Keyword
|
|
||||||
(set-parameter [^clojure.lang.Keyword v ^PreparedStatement s ^long i]
|
|
||||||
(.setObject s i ^String (d/name v))))
|
|
||||||
|
|||||||
@ -53,15 +53,8 @@
|
|||||||
opts (cond-> opts
|
opts (cond-> opts
|
||||||
(::order-by opts) (assoc :order-by (::order-by opts))
|
(::order-by opts) (assoc :order-by (::order-by opts))
|
||||||
(::columns opts) (assoc :columns (::columns opts))
|
(::columns opts) (assoc :columns (::columns opts))
|
||||||
|
(::for-update opts) (assoc :suffix "FOR UPDATE")
|
||||||
(or (::db/for-update opts)
|
(::for-share opts) (assoc :suffix "FOR SHARE"))]
|
||||||
(::for-update opts))
|
|
||||||
(assoc :suffix "FOR UPDATE")
|
|
||||||
|
|
||||||
(or (::db/for-share opts)
|
|
||||||
(::for-share opts))
|
|
||||||
(assoc :suffix "FOR SHARE"))]
|
|
||||||
|
|
||||||
(sql/for-query table where-params opts))))
|
(sql/for-query table where-params opts))))
|
||||||
|
|
||||||
(defn update
|
(defn update
|
||||||
|
|||||||
@ -7,7 +7,6 @@
|
|||||||
(ns app.email
|
(ns app.email
|
||||||
"Main api for send emails."
|
"Main api for send emails."
|
||||||
(:require
|
(:require
|
||||||
[app.common.data :as d]
|
|
||||||
[app.common.data.macros :as dm]
|
[app.common.data.macros :as dm]
|
||||||
[app.common.exceptions :as ex]
|
[app.common.exceptions :as ex]
|
||||||
[app.common.logging :as l]
|
[app.common.logging :as l]
|
||||||
@ -22,13 +21,13 @@
|
|||||||
[cuerdas.core :as str]
|
[cuerdas.core :as str]
|
||||||
[integrant.core :as ig])
|
[integrant.core :as ig])
|
||||||
(:import
|
(:import
|
||||||
|
jakarta.mail.Message$RecipientType
|
||||||
|
jakarta.mail.Session
|
||||||
|
jakarta.mail.Transport
|
||||||
jakarta.mail.internet.InternetAddress
|
jakarta.mail.internet.InternetAddress
|
||||||
jakarta.mail.internet.MimeBodyPart
|
jakarta.mail.internet.MimeBodyPart
|
||||||
jakarta.mail.internet.MimeMessage
|
jakarta.mail.internet.MimeMessage
|
||||||
jakarta.mail.internet.MimeMultipart
|
jakarta.mail.internet.MimeMultipart
|
||||||
jakarta.mail.Message$RecipientType
|
|
||||||
jakarta.mail.Session
|
|
||||||
jakarta.mail.Transport
|
|
||||||
java.util.Properties))
|
java.util.Properties))
|
||||||
|
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
@ -94,42 +93,36 @@
|
|||||||
headers)))
|
headers)))
|
||||||
|
|
||||||
(defn- assign-body
|
(defn- assign-body
|
||||||
[^MimeMessage mmsg {:keys [body charset attachments] :or {charset "utf-8"}}]
|
[^MimeMessage mmsg {:keys [body charset] :or {charset "utf-8"}}]
|
||||||
(let [mixed-mpart (MimeMultipart. "mixed")]
|
(let [mpart (MimeMultipart. "mixed")]
|
||||||
(cond
|
(cond
|
||||||
(string? body)
|
(string? body)
|
||||||
(let [text-part (MimeBodyPart.)]
|
(let [bpart (MimeBodyPart.)]
|
||||||
(.setText text-part ^String body ^String charset)
|
(.setContent bpart ^String body (str "text/plain; charset=" charset))
|
||||||
(.addBodyPart mixed-mpart text-part))
|
(.addBodyPart mpart bpart))
|
||||||
|
|
||||||
|
(vector? body)
|
||||||
|
(let [mmp (MimeMultipart. "alternative")
|
||||||
|
mbp (MimeBodyPart.)]
|
||||||
|
(.addBodyPart mpart mbp)
|
||||||
|
(.setContent mbp mmp)
|
||||||
|
(doseq [item body]
|
||||||
|
(let [mbp (MimeBodyPart.)]
|
||||||
|
(.setContent mbp
|
||||||
|
^String (:content item)
|
||||||
|
^String (str (:type item "text/plain") "; charset=" charset))
|
||||||
|
(.addBodyPart mmp mbp))))
|
||||||
|
|
||||||
(map? body)
|
(map? body)
|
||||||
(let [content-part (MimeBodyPart.)
|
(let [bpart (MimeBodyPart.)]
|
||||||
alternative-mpart (MimeMultipart. "alternative")]
|
(.setContent bpart
|
||||||
|
^String (:content body)
|
||||||
(when-let [content (get body "text/plain")]
|
^String (str (:type body "text/plain") "; charset=" charset))
|
||||||
(let [text-part (MimeBodyPart.)]
|
(.addBodyPart mpart bpart))
|
||||||
(.setText text-part ^String content ^String charset)
|
|
||||||
(.addBodyPart alternative-mpart text-part)))
|
|
||||||
|
|
||||||
(when-let [content (get body "text/html")]
|
|
||||||
(let [html-part (MimeBodyPart.)]
|
|
||||||
(.setContent html-part ^String content
|
|
||||||
(str "text/html; charset=" charset))
|
|
||||||
(.addBodyPart alternative-mpart html-part)))
|
|
||||||
|
|
||||||
(.setContent content-part alternative-mpart)
|
|
||||||
(.addBodyPart mixed-mpart content-part))
|
|
||||||
|
|
||||||
:else
|
:else
|
||||||
(throw (IllegalArgumentException. "invalid email body provided")))
|
(throw (ex-info "Unsupported type" {:body body})))
|
||||||
|
(.setContent mmsg mpart)
|
||||||
(doseq [[name content] attachments]
|
|
||||||
(let [attachment-part (MimeBodyPart.)]
|
|
||||||
(.setFileName attachment-part ^String name)
|
|
||||||
(.setContent attachment-part ^String content (str "text/plain; charset=" charset))
|
|
||||||
(.addBodyPart mixed-mpart attachment-part)))
|
|
||||||
|
|
||||||
(.setContent mmsg mixed-mpart)
|
|
||||||
mmsg))
|
mmsg))
|
||||||
|
|
||||||
(defn- opts->props
|
(defn- opts->props
|
||||||
@ -217,26 +210,24 @@
|
|||||||
(ex/raise :type :internal
|
(ex/raise :type :internal
|
||||||
:code :missing-email-templates))
|
:code :missing-email-templates))
|
||||||
{:subject subj
|
{:subject subj
|
||||||
:body (d/without-nils
|
:body (into
|
||||||
{"text/plain" text
|
[{:type "text/plain"
|
||||||
"text/html" html})}))
|
:content text}]
|
||||||
|
(when html
|
||||||
|
[{:type "text/html"
|
||||||
|
:content html}]))}))
|
||||||
|
|
||||||
(def ^:private schema:params
|
(def ^:private schema:context
|
||||||
[:map {:title "Email Params"}
|
[:map
|
||||||
[:to [:or ::sm/email [::sm/vec ::sm/email]]]
|
[:to [:or ::sm/email [::sm/vec ::sm/email]]]
|
||||||
[:reply-to {:optional true} ::sm/email]
|
[:reply-to {:optional true} ::sm/email]
|
||||||
[:from {:optional true} ::sm/email]
|
[:from {:optional true} ::sm/email]
|
||||||
[:lang {:optional true} ::sm/text]
|
[:lang {:optional true} ::sm/text]
|
||||||
[:subject {:optional true} ::sm/text]
|
|
||||||
[:priority {:optional true} [:enum :high :low]]
|
[:priority {:optional true} [:enum :high :low]]
|
||||||
[:extra-data {:optional true} ::sm/text]
|
[:extra-data {:optional true} ::sm/text]])
|
||||||
[:body {:optional true}
|
|
||||||
[:or :string [:map-of :string :string]]]
|
|
||||||
[:attachments {:optional true}
|
|
||||||
[:map-of :string :string]]])
|
|
||||||
|
|
||||||
(def ^:private check-params
|
(def ^:private check-context
|
||||||
(sm/check-fn schema:params))
|
(sm/check-fn schema:context))
|
||||||
|
|
||||||
(defn template-factory
|
(defn template-factory
|
||||||
[& {:keys [id schema]}]
|
[& {:keys [id schema]}]
|
||||||
@ -244,9 +235,9 @@
|
|||||||
(let [check-fn (if schema
|
(let [check-fn (if schema
|
||||||
(sm/check-fn schema)
|
(sm/check-fn schema)
|
||||||
(constantly nil))]
|
(constantly nil))]
|
||||||
(fn [params]
|
(fn [context]
|
||||||
(let [params (-> params check-params check-fn)
|
(let [context (-> context check-context check-fn)
|
||||||
email (build-email-template id params)]
|
email (build-email-template id context)]
|
||||||
(when-not email
|
(when-not email
|
||||||
(ex/raise :type :internal
|
(ex/raise :type :internal
|
||||||
:code :email-template-does-not-exists
|
:code :email-template-does-not-exists
|
||||||
@ -254,40 +245,35 @@
|
|||||||
:template-id id))
|
:template-id id))
|
||||||
|
|
||||||
(cond-> (assoc email :id (name id))
|
(cond-> (assoc email :id (name id))
|
||||||
(:extra-data params)
|
(:extra-data context)
|
||||||
(assoc :extra-data (:extra-data params))
|
(assoc :extra-data (:extra-data context))
|
||||||
|
|
||||||
(seq (:attachments params))
|
(:from context)
|
||||||
(assoc :attachments (:attachments params))
|
(assoc :from (:from context))
|
||||||
|
|
||||||
(:from params)
|
(:reply-to context)
|
||||||
(assoc :from (:from params))
|
(assoc :reply-to (:reply-to context))
|
||||||
|
|
||||||
(:reply-to params)
|
(:to context)
|
||||||
(assoc :reply-to (:reply-to params))
|
(assoc :to (:to context)))))))
|
||||||
|
|
||||||
(:to params)
|
|
||||||
(assoc :to (:to params)))))))
|
|
||||||
|
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
;; PUBLIC HIGH-LEVEL API
|
;; PUBLIC HIGH-LEVEL API
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
|
|
||||||
(defn render
|
(defn render
|
||||||
[email-factory params]
|
[email-factory context]
|
||||||
(email-factory params))
|
(email-factory context))
|
||||||
|
|
||||||
(defn send!
|
(defn send!
|
||||||
"Schedule an already defined email to be sent using asynchronously
|
"Schedule an already defined email to be sent using asynchronously
|
||||||
using worker task."
|
using worker task."
|
||||||
[{:keys [::conn ::factory] :as params}]
|
[{:keys [::conn ::factory] :as context}]
|
||||||
(assert (db/connectable? conn) "expected a valid database connection or pool")
|
(assert (db/connectable? conn) "expected a valid database connection or pool")
|
||||||
|
|
||||||
(let [email (if factory
|
(let [email (if factory
|
||||||
(factory params)
|
(factory context)
|
||||||
(-> params
|
(dissoc context ::conn))]
|
||||||
(dissoc params)
|
|
||||||
(check-params)))]
|
|
||||||
(wrk/submit! {::wrk/task :sendmail
|
(wrk/submit! {::wrk/task :sendmail
|
||||||
::wrk/delay 0
|
::wrk/delay 0
|
||||||
::wrk/max-retries 4
|
::wrk/max-retries 4
|
||||||
@ -357,10 +343,8 @@
|
|||||||
|
|
||||||
(def ^:private schema:feedback
|
(def ^:private schema:feedback
|
||||||
[:map
|
[:map
|
||||||
[:feedback-subject ::sm/text]
|
[:subject ::sm/text]
|
||||||
[:feedback-type ::sm/text]
|
[:content ::sm/text]])
|
||||||
[:feedback-content ::sm/text]
|
|
||||||
[:profile :map]])
|
|
||||||
|
|
||||||
(def user-feedback
|
(def user-feedback
|
||||||
"A profile feedback email."
|
"A profile feedback email."
|
||||||
@ -412,21 +396,6 @@
|
|||||||
:id ::invite-to-team
|
:id ::invite-to-team
|
||||||
:schema schema:invite-to-team))
|
:schema schema:invite-to-team))
|
||||||
|
|
||||||
(def ^:private schema:invite-to-org
|
|
||||||
[:map
|
|
||||||
[:invited-by ::sm/text]
|
|
||||||
[:organization-name ::sm/text]
|
|
||||||
[:organization-initials [:maybe :string]]
|
|
||||||
[:organization-logo ::sm/uri]
|
|
||||||
[:user-name [:maybe ::sm/text]]
|
|
||||||
[:token ::sm/text]])
|
|
||||||
|
|
||||||
(def invite-to-org
|
|
||||||
"Org member invitation email."
|
|
||||||
(template-factory
|
|
||||||
:id ::invite-to-org
|
|
||||||
:schema schema:invite-to-org))
|
|
||||||
|
|
||||||
(def ^:private schema:join-team
|
(def ^:private schema:join-team
|
||||||
[:map
|
[:map
|
||||||
[:invited-by ::sm/text]
|
[:invited-by ::sm/text]
|
||||||
|
|||||||
@ -36,18 +36,10 @@
|
|||||||
:cause cause)))))
|
:cause cause)))))
|
||||||
|
|
||||||
(defn contains?
|
(defn contains?
|
||||||
"Check if email is in the blacklist. Also matches subdomains: if
|
"Check if email is in the blacklist."
|
||||||
'somedomain.com' is blacklisted, 'xxx@foo.somedomain.com' will also
|
|
||||||
be rejected."
|
|
||||||
[{:keys [::email/blacklist]} email]
|
[{:keys [::email/blacklist]} email]
|
||||||
(let [[_ domain] (str/split email "@" 2)
|
(let [[_ domain] (str/split email "@" 2)]
|
||||||
parts (str/split (str/lower domain) #"\.")]
|
(c/contains? blacklist (str/lower domain))))
|
||||||
(loop [parts parts]
|
|
||||||
(if (empty? parts)
|
|
||||||
false
|
|
||||||
(if (c/contains? blacklist (str/join "." parts))
|
|
||||||
true
|
|
||||||
(recur (rest parts)))))))
|
|
||||||
|
|
||||||
(defn enabled?
|
(defn enabled?
|
||||||
"Check if the blacklist is enabled"
|
"Check if the blacklist is enabled"
|
||||||
|
|||||||
1850
backend/src/app/features/components_v2.clj
Normal file
1850
backend/src/app/features/components_v2.clj
Normal file
File diff suppressed because it is too large
Load Diff
@ -10,21 +10,42 @@
|
|||||||
[app.common.data :as d]
|
[app.common.data :as d]
|
||||||
[app.common.exceptions :as ex]
|
[app.common.exceptions :as ex]
|
||||||
[app.common.logging :as l]
|
[app.common.logging :as l]
|
||||||
[app.common.schema :as sm]
|
|
||||||
[app.common.time :as ct]
|
|
||||||
[app.common.types.objects-map :as omap]
|
|
||||||
[app.config :as cf]
|
|
||||||
[app.db :as db]
|
[app.db :as db]
|
||||||
[app.db.sql :as-alias sql]
|
[app.db.sql :as-alias sql]
|
||||||
[app.storage :as sto]
|
[app.storage :as sto]
|
||||||
[app.util.blob :as blob]
|
[app.util.blob :as blob]
|
||||||
[app.util.objects-map :as omap.legacy]
|
[app.util.objects-map :as omap]
|
||||||
[app.util.pointer-map :as pmap]))
|
[app.util.pointer-map :as pmap]))
|
||||||
|
|
||||||
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
|
;; OFFLOAD
|
||||||
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
|
|
||||||
|
(defn offloaded?
|
||||||
|
[file]
|
||||||
|
(= "objects-storage" (:data-backend file)))
|
||||||
|
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
;; OBJECTS-MAP
|
;; OBJECTS-MAP
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
|
|
||||||
|
(defn enable-objects-map
|
||||||
|
[file]
|
||||||
|
(let [update-page
|
||||||
|
(fn [page]
|
||||||
|
(if (and (pmap/pointer-map? page)
|
||||||
|
(not (pmap/loaded? page)))
|
||||||
|
page
|
||||||
|
(update page :objects omap/wrap)))
|
||||||
|
|
||||||
|
update-data
|
||||||
|
(fn [fdata]
|
||||||
|
(update fdata :pages-index d/update-vals update-page))]
|
||||||
|
|
||||||
|
(-> file
|
||||||
|
(update :data update-data)
|
||||||
|
(update :features conj "fdata/objects-map"))))
|
||||||
|
|
||||||
(defn process-objects
|
(defn process-objects
|
||||||
"Apply a function to all objects-map on the file. Usualy used for convert
|
"Apply a function to all objects-map on the file. Usualy used for convert
|
||||||
the objects-map instances to plain maps"
|
the objects-map instances to plain maps"
|
||||||
@ -34,237 +55,35 @@
|
|||||||
(fn [page]
|
(fn [page]
|
||||||
(update page :objects
|
(update page :objects
|
||||||
(fn [objects]
|
(fn [objects]
|
||||||
(if (or (omap/objects-map? objects)
|
(if (omap/objects-map? objects)
|
||||||
(omap.legacy/objects-map? objects))
|
|
||||||
(update-fn objects)
|
(update-fn objects)
|
||||||
objects)))))
|
objects)))))
|
||||||
fdata))
|
fdata))
|
||||||
|
|
||||||
|
|
||||||
(defn realize-objects
|
|
||||||
"Process a file and remove all instances of objects map realizing them
|
|
||||||
to a plain data. Used in operation where is more efficient have the
|
|
||||||
whole file loaded in memory or we going to persist it in an
|
|
||||||
alterantive storage."
|
|
||||||
[_cfg file]
|
|
||||||
(update file :data process-objects (partial into {})))
|
|
||||||
|
|
||||||
(defn enable-objects-map
|
|
||||||
[file & _opts]
|
|
||||||
(let [update-page
|
|
||||||
(fn [page]
|
|
||||||
(update page :objects omap/wrap))
|
|
||||||
|
|
||||||
update-data
|
|
||||||
(fn [fdata]
|
|
||||||
(update fdata :pages-index d/update-vals update-page))]
|
|
||||||
|
|
||||||
(-> file
|
|
||||||
(update :data update-data)
|
|
||||||
(update :features conj "fdata/objects-map"))))
|
|
||||||
|
|
||||||
(defn disable-objects-map
|
|
||||||
[file & _opts]
|
|
||||||
(let [update-page
|
|
||||||
(fn [page]
|
|
||||||
(update page :objects #(into {} %)))
|
|
||||||
|
|
||||||
update-data
|
|
||||||
(fn [fdata]
|
|
||||||
(update fdata :pages-index d/update-vals update-page))]
|
|
||||||
|
|
||||||
(-> file
|
|
||||||
(update :data update-data)
|
|
||||||
(update :features disj "fdata/objects-map"))))
|
|
||||||
|
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
||||||
;; STORAGE
|
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
||||||
|
|
||||||
(defmulti resolve-file-data
|
|
||||||
(fn [_cfg file] (get file :backend "legacy-db")))
|
|
||||||
|
|
||||||
(defmethod resolve-file-data "legacy-db"
|
|
||||||
[_cfg {:keys [legacy-data] :as file}]
|
|
||||||
(-> file
|
|
||||||
(assoc :data legacy-data)
|
|
||||||
(dissoc :legacy-data)))
|
|
||||||
|
|
||||||
(defmethod resolve-file-data "db"
|
|
||||||
[_cfg file]
|
|
||||||
(dissoc file :legacy-data))
|
|
||||||
|
|
||||||
(defmethod resolve-file-data "storage"
|
|
||||||
[cfg {:keys [metadata] :as file}]
|
|
||||||
(let [storage (sto/resolve cfg ::db/reuse-conn true)
|
|
||||||
ref-id (:storage-ref-id metadata)
|
|
||||||
data (->> (sto/get-object storage ref-id)
|
|
||||||
(sto/get-object-bytes storage))]
|
|
||||||
(-> file
|
|
||||||
(assoc :data data)
|
|
||||||
(dissoc :legacy-data))))
|
|
||||||
|
|
||||||
(defn decode-file-data
|
|
||||||
[_cfg {:keys [data] :as file}]
|
|
||||||
(cond-> file
|
|
||||||
(bytes? data)
|
|
||||||
(assoc :data (blob/decode data))))
|
|
||||||
|
|
||||||
(def ^:private sql:insert-file-data
|
|
||||||
"INSERT INTO file_data (file_id, id, created_at, modified_at, deleted_at,
|
|
||||||
type, backend, metadata, data)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)")
|
|
||||||
|
|
||||||
(def ^:private sql:upsert-file-data
|
|
||||||
(str sql:insert-file-data
|
|
||||||
" ON CONFLICT (file_id, id)
|
|
||||||
DO UPDATE SET modified_at=?,
|
|
||||||
deleted_at=?,
|
|
||||||
backend=?,
|
|
||||||
metadata=?,
|
|
||||||
data=?"))
|
|
||||||
|
|
||||||
(defn- upsert-in-database
|
|
||||||
[cfg {:keys [id file-id created-at modified-at deleted-at type backend data metadata]}]
|
|
||||||
(let [created-at (or created-at (ct/now))
|
|
||||||
metadata (some-> metadata db/json)
|
|
||||||
modified-at (or modified-at created-at)]
|
|
||||||
|
|
||||||
(db/exec-one! cfg [sql:upsert-file-data
|
|
||||||
file-id id
|
|
||||||
created-at
|
|
||||||
modified-at
|
|
||||||
deleted-at
|
|
||||||
type
|
|
||||||
backend
|
|
||||||
metadata
|
|
||||||
data
|
|
||||||
modified-at
|
|
||||||
deleted-at
|
|
||||||
backend
|
|
||||||
metadata
|
|
||||||
data])))
|
|
||||||
|
|
||||||
(defn- handle-persistence
|
|
||||||
[cfg {:keys [type backend id file-id data] :as params}]
|
|
||||||
|
|
||||||
(cond
|
|
||||||
(= backend "storage")
|
|
||||||
(let [storage (sto/resolve cfg)
|
|
||||||
content (sto/content data)
|
|
||||||
sobject (sto/put-object! storage
|
|
||||||
{::sto/content content
|
|
||||||
::sto/touch true
|
|
||||||
:bucket "file-data"
|
|
||||||
:content-type "application/octet-stream"
|
|
||||||
:file-id file-id
|
|
||||||
:id id})
|
|
||||||
metadata {:storage-ref-id (:id sobject)}
|
|
||||||
params (-> params
|
|
||||||
(assoc :metadata metadata)
|
|
||||||
(assoc :data nil))]
|
|
||||||
(upsert-in-database cfg params))
|
|
||||||
|
|
||||||
(= backend "db")
|
|
||||||
(->> (dissoc params :metadata)
|
|
||||||
(upsert-in-database cfg))
|
|
||||||
|
|
||||||
(= backend "legacy-db")
|
|
||||||
(cond
|
|
||||||
(= type "main")
|
|
||||||
(do
|
|
||||||
(db/delete! cfg :file-data
|
|
||||||
{:id id :file-id file-id :type "main"}
|
|
||||||
{::db/return-keys false})
|
|
||||||
(db/update! cfg :file
|
|
||||||
{:data data}
|
|
||||||
{:id file-id}
|
|
||||||
{::db/return-keys false}))
|
|
||||||
|
|
||||||
(= type "snapshot")
|
|
||||||
(do
|
|
||||||
(db/delete! cfg :file-data
|
|
||||||
{:id id :file-id file-id :type "snapshot"}
|
|
||||||
{::db/return-keys false})
|
|
||||||
(db/update! cfg :file-change
|
|
||||||
{:data data}
|
|
||||||
{:file-id file-id :id id}
|
|
||||||
{::db/return-keys false}))
|
|
||||||
|
|
||||||
(= type "fragment")
|
|
||||||
(upsert-in-database cfg
|
|
||||||
(-> (dissoc params :metadata)
|
|
||||||
(assoc :backend "db")))
|
|
||||||
|
|
||||||
:else
|
|
||||||
(throw (RuntimeException. "not implemented")))
|
|
||||||
|
|
||||||
:else
|
|
||||||
(throw (IllegalArgumentException.
|
|
||||||
(str "backend '" backend "' not supported")))))
|
|
||||||
|
|
||||||
(defn process-metadata
|
|
||||||
[cfg metadata]
|
|
||||||
(when-let [storage-id (:storage-ref-id metadata)]
|
|
||||||
(let [storage (sto/resolve cfg ::db/reuse-conn true)]
|
|
||||||
(sto/touch-object! storage storage-id))))
|
|
||||||
|
|
||||||
(defn- default-backend
|
|
||||||
[backend]
|
|
||||||
(or backend (cf/get :file-data-backend)))
|
|
||||||
|
|
||||||
(def ^:private schema:metadata
|
|
||||||
[:map {:title "Metadata"}
|
|
||||||
[:storage-ref-id {:optional true} ::sm/uuid]])
|
|
||||||
|
|
||||||
(def decode-metadata-with-schema
|
|
||||||
(sm/decoder schema:metadata sm/json-transformer))
|
|
||||||
|
|
||||||
(defn decode-metadata
|
|
||||||
[metadata]
|
|
||||||
(some-> metadata
|
|
||||||
(db/decode-json-pgobject)
|
|
||||||
(decode-metadata-with-schema)))
|
|
||||||
|
|
||||||
(def ^:private schema:update-params
|
|
||||||
[:map {:closed true}
|
|
||||||
[:id ::sm/uuid]
|
|
||||||
[:type [:enum "main" "snapshot" "fragment"]]
|
|
||||||
[:file-id ::sm/uuid]
|
|
||||||
[:backend {:optional true} [:enum "db" "legacy-db" "storage"]]
|
|
||||||
[:metadata {:optional true} [:maybe schema:metadata]]
|
|
||||||
[:data {:optional true} bytes?]
|
|
||||||
[:created-at {:optional true} ::ct/inst]
|
|
||||||
[:modified-at {:optional true} [:maybe ::ct/inst]]
|
|
||||||
[:deleted-at {:optional true} [:maybe ::ct/inst]]])
|
|
||||||
|
|
||||||
(def ^:private check-update-params
|
|
||||||
(sm/check-fn schema:update-params :hint "invalid params received for update"))
|
|
||||||
|
|
||||||
(defn upsert!
|
|
||||||
"Create or update file data"
|
|
||||||
[cfg params & {:as opts}]
|
|
||||||
(let [params (-> (check-update-params params)
|
|
||||||
(update :backend default-backend))]
|
|
||||||
|
|
||||||
(some->> (:metadata params)
|
|
||||||
(process-metadata cfg))
|
|
||||||
|
|
||||||
(-> (handle-persistence cfg params)
|
|
||||||
(db/get-update-count)
|
|
||||||
(pos?))))
|
|
||||||
|
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
;; POINTER-MAP
|
;; POINTER-MAP
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
|
|
||||||
|
(defn get-file-data
|
||||||
|
"Get file data given a file instance."
|
||||||
|
[system file]
|
||||||
|
(if (offloaded? file)
|
||||||
|
(let [storage (sto/resolve system ::db/reuse-conn true)]
|
||||||
|
(->> (sto/get-object storage (:data-ref-id file))
|
||||||
|
(sto/get-object-bytes storage)))
|
||||||
|
(:data file)))
|
||||||
|
|
||||||
|
(defn resolve-file-data
|
||||||
|
[system file]
|
||||||
|
(let [data (get-file-data system file)]
|
||||||
|
(assoc file :data data)))
|
||||||
|
|
||||||
(defn load-pointer
|
(defn load-pointer
|
||||||
"A database loader pointer helper"
|
"A database loader pointer helper"
|
||||||
[cfg file-id id]
|
[system file-id id]
|
||||||
(let [fragment (some-> (db/get* cfg :file-data
|
(let [fragment (db/get* system :file-data-fragment
|
||||||
{:id id :file-id file-id :type "fragment"}
|
{:id id :file-id file-id}
|
||||||
{::sql/columns [:data :backend :id :metadata]})
|
{::sql/columns [:data :data-backend :data-ref-id :id]})]
|
||||||
(update :metadata decode-metadata))]
|
|
||||||
|
|
||||||
(l/trc :hint "load pointer"
|
(l/trc :hint "load pointer"
|
||||||
:file-id (str file-id)
|
:file-id (str file-id)
|
||||||
@ -278,21 +97,22 @@
|
|||||||
:file-id file-id
|
:file-id file-id
|
||||||
:fragment-id id))
|
:fragment-id id))
|
||||||
|
|
||||||
(-> (resolve-file-data cfg fragment)
|
(let [data (get-file-data system fragment)]
|
||||||
(get :data)
|
;; FIXME: conditional thread scheduling for decoding big objects
|
||||||
(blob/decode))))
|
(blob/decode data))))
|
||||||
|
|
||||||
(defn persist-pointers!
|
(defn persist-pointers!
|
||||||
"Persist all currently tracked pointer objects"
|
"Persist all currently tracked pointer objects"
|
||||||
[cfg file-id]
|
[system file-id]
|
||||||
(doseq [[id item] @pmap/*tracked*]
|
(let [conn (db/get-connection system)]
|
||||||
(when (pmap/modified? item)
|
(doseq [[id item] @pmap/*tracked*]
|
||||||
(l/trc :hint "persist pointer" :file-id (str file-id) :id (str id))
|
(when (pmap/modified? item)
|
||||||
(let [content (-> item deref blob/encode)]
|
(l/trc :hint "persist pointer" :file-id (str file-id) :id (str id))
|
||||||
(upsert! cfg {:id id
|
(let [content (-> item deref blob/encode)]
|
||||||
:file-id file-id
|
(db/insert! conn :file-data-fragment
|
||||||
:type "fragment"
|
{:id id
|
||||||
:data content})))))
|
:file-id file-id
|
||||||
|
:data content}))))))
|
||||||
|
|
||||||
(defn process-pointers
|
(defn process-pointers
|
||||||
"Apply a function to all pointers on the file. Usuly used for
|
"Apply a function to all pointers on the file. Usuly used for
|
||||||
@ -306,14 +126,6 @@
|
|||||||
(d/update-vals update-fn')
|
(d/update-vals update-fn')
|
||||||
(update :pages-index d/update-vals update-fn'))))
|
(update :pages-index d/update-vals update-fn'))))
|
||||||
|
|
||||||
(defn realize-pointers
|
|
||||||
"Process a file and remove all instances of pointers realizing them to
|
|
||||||
a plain data. Used in operation where is more efficient have the
|
|
||||||
whole file loaded in memory."
|
|
||||||
[cfg {:keys [id] :as file}]
|
|
||||||
(binding [pmap/*load-fn* (partial load-pointer cfg id)]
|
|
||||||
(update file :data process-pointers deref)))
|
|
||||||
|
|
||||||
(defn get-used-pointer-ids
|
(defn get-used-pointer-ids
|
||||||
"Given a file, return all pointer ids used in the data."
|
"Given a file, return all pointer ids used in the data."
|
||||||
[fdata]
|
[fdata]
|
||||||
@ -324,21 +136,10 @@
|
|||||||
|
|
||||||
(defn enable-pointer-map
|
(defn enable-pointer-map
|
||||||
"Enable the fdata/pointer-map feature on the file."
|
"Enable the fdata/pointer-map feature on the file."
|
||||||
[file & _opts]
|
[file]
|
||||||
(-> file
|
(-> file
|
||||||
(update :data (fn [fdata]
|
(update :data (fn [fdata]
|
||||||
(-> fdata
|
(-> fdata
|
||||||
(update :pages-index d/update-vals pmap/wrap)
|
(update :pages-index d/update-vals pmap/wrap)
|
||||||
(d/update-when :components pmap/wrap))))
|
(d/update-when :components pmap/wrap))))
|
||||||
(update :features conj "fdata/pointer-map")))
|
(update :features conj "fdata/pointer-map")))
|
||||||
|
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
||||||
;; GENERAL PURPOSE HELPERS
|
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
||||||
|
|
||||||
(defn realize
|
|
||||||
"A helper that combines realize-pointers and realize-objects"
|
|
||||||
[cfg file]
|
|
||||||
(->> file
|
|
||||||
(realize-pointers cfg)
|
|
||||||
(realize-objects cfg)))
|
|
||||||
|
|||||||
@ -8,7 +8,6 @@
|
|||||||
"Backend specific code for file migrations. Implemented as permanent feature of files."
|
"Backend specific code for file migrations. Implemented as permanent feature of files."
|
||||||
(:require
|
(:require
|
||||||
[app.common.data :as d]
|
[app.common.data :as d]
|
||||||
[app.common.exceptions :as ex]
|
|
||||||
[app.common.files.migrations :as fmg :refer [xf:map-name]]
|
[app.common.files.migrations :as fmg :refer [xf:map-name]]
|
||||||
[app.db :as db]
|
[app.db :as db]
|
||||||
[app.db.sql :as-alias sql]))
|
[app.db.sql :as-alias sql]))
|
||||||
@ -27,27 +26,14 @@
|
|||||||
(defn upsert-migrations!
|
(defn upsert-migrations!
|
||||||
"Persist or update file migrations. Return the updated/inserted number
|
"Persist or update file migrations. Return the updated/inserted number
|
||||||
of rows"
|
of rows"
|
||||||
[cfg {:keys [id] :as file}]
|
[conn {:keys [id] :as file}]
|
||||||
(let [conn (db/get-connection cfg)
|
(let [migrations (or (-> file meta ::fmg/migrated)
|
||||||
migrations (or (-> file meta ::fmg/migrated)
|
(-> file :migrations not-empty)
|
||||||
(-> file :migrations))
|
fmg/available-migrations)
|
||||||
columns [:file-id :name]
|
columns [:file-id :name]
|
||||||
rows (->> migrations
|
rows (mapv (fn [name] [id name]) migrations)]
|
||||||
(mapv (fn [name] [id name]))
|
|
||||||
(not-empty))]
|
|
||||||
|
|
||||||
(when-not rows
|
|
||||||
(ex/raise :type :internal
|
|
||||||
:code :missing-migrations
|
|
||||||
:hint "no migrations available on file"))
|
|
||||||
|
|
||||||
(-> (db/insert-many! conn :file-migration columns rows
|
(-> (db/insert-many! conn :file-migration columns rows
|
||||||
{::db/return-keys false
|
{::db/return-keys false
|
||||||
::sql/on-conflict-do-nothing true})
|
::sql/on-conflict-do-nothing true})
|
||||||
(db/get-update-count))))
|
(db/get-update-count))))
|
||||||
|
|
||||||
(defn reset-migrations!
|
|
||||||
"Replace file migrations"
|
|
||||||
[cfg {:keys [id] :as file}]
|
|
||||||
(db/delete! cfg :file-migration {:file-id id})
|
|
||||||
(upsert-migrations! cfg file))
|
|
||||||
|
|||||||
@ -1,447 +0,0 @@
|
|||||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
|
||||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
||||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
|
||||||
;;
|
|
||||||
;; Copyright (c) KALEIDOS INC
|
|
||||||
|
|
||||||
(ns app.features.file-snapshots
|
|
||||||
(:require
|
|
||||||
[app.binfile.common :as bfc]
|
|
||||||
[app.common.data :as d]
|
|
||||||
[app.common.exceptions :as ex]
|
|
||||||
[app.common.features :as-alias cfeat]
|
|
||||||
[app.common.files.migrations :as fmg]
|
|
||||||
[app.common.logging :as l]
|
|
||||||
[app.common.schema :as sm]
|
|
||||||
[app.common.time :as ct]
|
|
||||||
[app.common.uuid :as uuid]
|
|
||||||
[app.config :as cf]
|
|
||||||
[app.db :as db]
|
|
||||||
[app.db.sql :as-alias sql]
|
|
||||||
[app.features.fdata :as fdata]
|
|
||||||
[app.storage :as sto]
|
|
||||||
[app.util.blob :as blob]
|
|
||||||
[app.worker :as wrk]
|
|
||||||
[cuerdas.core :as str]))
|
|
||||||
|
|
||||||
(def sql:snapshots
|
|
||||||
"SELECT c.id,
|
|
||||||
c.label,
|
|
||||||
c.created_at,
|
|
||||||
c.updated_at AS modified_at,
|
|
||||||
c.deleted_at,
|
|
||||||
c.profile_id,
|
|
||||||
c.created_by,
|
|
||||||
c.locked_by,
|
|
||||||
c.revn,
|
|
||||||
c.features,
|
|
||||||
c.migrations,
|
|
||||||
c.version,
|
|
||||||
c.file_id,
|
|
||||||
c.data AS legacy_data,
|
|
||||||
fd.data AS data,
|
|
||||||
coalesce(fd.backend, 'legacy-db') AS backend,
|
|
||||||
fd.metadata AS metadata
|
|
||||||
FROM file_change AS c
|
|
||||||
LEFT JOIN file_data AS fd ON (fd.file_id = c.file_id
|
|
||||||
AND fd.id = c.id
|
|
||||||
AND fd.type = 'snapshot')
|
|
||||||
WHERE c.label IS NOT NULL")
|
|
||||||
|
|
||||||
(defn- decode-snapshot
|
|
||||||
[snapshot]
|
|
||||||
(some-> snapshot
|
|
||||||
(-> (d/update-when :metadata fdata/decode-metadata)
|
|
||||||
(d/update-when :migrations db/decode-pgarray [])
|
|
||||||
(d/update-when :features db/decode-pgarray #{}))))
|
|
||||||
|
|
||||||
(def ^:private sql:get-minimal-file
|
|
||||||
"SELECT f.id,
|
|
||||||
f.revn,
|
|
||||||
f.modified_at,
|
|
||||||
f.deleted_at,
|
|
||||||
fd.backend AS backend,
|
|
||||||
fd.metadata AS metadata
|
|
||||||
FROM file AS f
|
|
||||||
LEFT JOIN file_data AS fd ON (fd.file_id = f.id AND fd.id = f.id)
|
|
||||||
WHERE f.id = ?")
|
|
||||||
|
|
||||||
(defn- get-minimal-file
|
|
||||||
[cfg id & {:as opts}]
|
|
||||||
(-> (db/get-with-sql cfg [sql:get-minimal-file id] opts)
|
|
||||||
(d/update-when :metadata fdata/decode-metadata)))
|
|
||||||
|
|
||||||
(def ^:private sql:get-snapshot-without-data
|
|
||||||
(str "WITH snapshots AS (" sql:snapshots ")"
|
|
||||||
"SELECT c.id,
|
|
||||||
c.label,
|
|
||||||
c.revn,
|
|
||||||
c.created_at,
|
|
||||||
c.modified_at,
|
|
||||||
c.deleted_at,
|
|
||||||
c.profile_id,
|
|
||||||
c.created_by,
|
|
||||||
c.locked_by,
|
|
||||||
c.features,
|
|
||||||
c.metadata,
|
|
||||||
c.migrations,
|
|
||||||
c.version,
|
|
||||||
c.file_id
|
|
||||||
FROM snapshots AS c
|
|
||||||
WHERE c.id = ?
|
|
||||||
AND CASE WHEN c.created_by = 'user'
|
|
||||||
THEN c.deleted_at IS NULL
|
|
||||||
WHEN c.created_by = 'system'
|
|
||||||
THEN c.deleted_at IS NULL OR c.deleted_at >= ?::timestamptz
|
|
||||||
END"))
|
|
||||||
|
|
||||||
(defn get-minimal-snapshot
|
|
||||||
[cfg snapshot-id]
|
|
||||||
(let [now (ct/now)]
|
|
||||||
(-> (db/get-with-sql cfg [sql:get-snapshot-without-data snapshot-id now]
|
|
||||||
{::db/remove-deleted false})
|
|
||||||
(decode-snapshot))))
|
|
||||||
|
|
||||||
(def ^:private sql:get-snapshot
|
|
||||||
(str sql:snapshots
|
|
||||||
" AND c.file_id = ?
|
|
||||||
AND c.id = ?
|
|
||||||
AND CASE WHEN c.created_by = 'user'
|
|
||||||
THEN (c.deleted_at IS NULL)
|
|
||||||
WHEN c.created_by = 'system'
|
|
||||||
THEN (c.deleted_at IS NULL OR c.deleted_at >= ?::timestamptz)
|
|
||||||
END"))
|
|
||||||
|
|
||||||
(defn get-snapshot-data
|
|
||||||
"Get a fully decoded snapshot for read-only preview or restoration.
|
|
||||||
Returns the snapshot map with decoded :data field."
|
|
||||||
[cfg file-id snapshot-id]
|
|
||||||
(let [now (ct/now)]
|
|
||||||
(->> (db/get-with-sql cfg [sql:get-snapshot file-id snapshot-id now]
|
|
||||||
{::db/remove-deleted false})
|
|
||||||
(decode-snapshot)
|
|
||||||
(fdata/resolve-file-data cfg)
|
|
||||||
(fdata/decode-file-data cfg))))
|
|
||||||
|
|
||||||
(def ^:private sql:get-visible-snapshots
|
|
||||||
(str "WITH "
|
|
||||||
"snapshots1 AS ( " sql:snapshots "),"
|
|
||||||
"snapshots2 AS (
|
|
||||||
SELECT c.id,
|
|
||||||
c.label,
|
|
||||||
c.revn,
|
|
||||||
c.version,
|
|
||||||
c.created_at,
|
|
||||||
c.modified_at,
|
|
||||||
c.created_by,
|
|
||||||
c.locked_by,
|
|
||||||
c.profile_id,
|
|
||||||
c.deleted_at
|
|
||||||
FROM snapshots1 AS c
|
|
||||||
WHERE c.file_id = ?
|
|
||||||
ORDER BY c.created_at DESC
|
|
||||||
), snapshots3 AS (
|
|
||||||
(SELECT * FROM snapshots2
|
|
||||||
WHERE created_by = 'system'
|
|
||||||
AND (deleted_at IS NULL OR
|
|
||||||
deleted_at >= ?::timestamptz)
|
|
||||||
LIMIT 500)
|
|
||||||
UNION ALL
|
|
||||||
(SELECT * FROM snapshots2
|
|
||||||
WHERE created_by = 'user'
|
|
||||||
AND deleted_at IS NULL
|
|
||||||
LIMIT 500)
|
|
||||||
)
|
|
||||||
SELECT * FROM snapshots3;"))
|
|
||||||
|
|
||||||
(defn get-visible-snapshots
|
|
||||||
"Return a list of snapshots fecheable from the API, it has a limited
|
|
||||||
set of fields and applies big but safe limits over all available
|
|
||||||
snapshots. It return a ordered vector by the snapshot date of
|
|
||||||
creation."
|
|
||||||
[cfg file-id]
|
|
||||||
(let [now (ct/now)]
|
|
||||||
(->> (db/exec! cfg [sql:get-visible-snapshots file-id now])
|
|
||||||
(mapv decode-snapshot))))
|
|
||||||
|
|
||||||
(def ^:private schema:decoded-file
|
|
||||||
[:map {:title "DecodedFile"}
|
|
||||||
[:id ::sm/uuid]
|
|
||||||
[:revn :int]
|
|
||||||
[:vern :int]
|
|
||||||
[:data :map]
|
|
||||||
[:version :int]
|
|
||||||
[:features ::cfeat/features]
|
|
||||||
[:migrations [::sm/set :string]]])
|
|
||||||
|
|
||||||
(def ^:private schema:snapshot
|
|
||||||
[:map {:title "Snapshot"}
|
|
||||||
[:id ::sm/uuid]
|
|
||||||
[:revn [::sm/int {:min 0}]]
|
|
||||||
[:version [::sm/int {:min 0}]]
|
|
||||||
[:features ::cfeat/features]
|
|
||||||
[:migrations [::sm/set ::sm/text]]
|
|
||||||
[:profile-id {:optional true} ::sm/uuid]
|
|
||||||
[:label ::sm/text]
|
|
||||||
[:file-id ::sm/uuid]
|
|
||||||
[:created-by [:enum "system" "user" "admin"]]
|
|
||||||
[:deleted-at {:optional true} ::ct/inst]
|
|
||||||
[:modified-at ::ct/inst]
|
|
||||||
[:created-at ::ct/inst]])
|
|
||||||
|
|
||||||
(def ^:private check-snapshot
|
|
||||||
(sm/check-fn schema:snapshot))
|
|
||||||
|
|
||||||
(def ^:private check-decoded-file
|
|
||||||
(sm/check-fn schema:decoded-file))
|
|
||||||
|
|
||||||
(defn- generate-snapshot-label
|
|
||||||
[]
|
|
||||||
(let [ts (-> (ct/now)
|
|
||||||
(ct/format-inst)
|
|
||||||
(str/replace #"[T:\.]" "-")
|
|
||||||
(str/rtrim "Z"))]
|
|
||||||
(str "snapshot-" ts)))
|
|
||||||
|
|
||||||
(def ^:private schema:create-params
|
|
||||||
[:map {:title "SnapshotCreateParams"}
|
|
||||||
[:profile-id ::sm/uuid]
|
|
||||||
[:created-by {:optional true} [:enum "user" "system"]]
|
|
||||||
[:label {:optional true} ::sm/text]
|
|
||||||
[:session-id {:optional true} ::sm/uuid]
|
|
||||||
[:modified-at {:optional true} ::ct/inst]
|
|
||||||
[:deleted-at {:optional true} ::ct/inst]])
|
|
||||||
|
|
||||||
(def ^:private check-create-params
|
|
||||||
(sm/check-fn schema:create-params))
|
|
||||||
|
|
||||||
(defn create!
|
|
||||||
"Create a file snapshot; expects a non-encoded file"
|
|
||||||
[cfg file & {:as params}]
|
|
||||||
(let [{:keys [label created-by deleted-at profile-id session-id]}
|
|
||||||
(check-create-params params)
|
|
||||||
|
|
||||||
file
|
|
||||||
(check-decoded-file file)
|
|
||||||
|
|
||||||
created-by
|
|
||||||
(or created-by "system")
|
|
||||||
|
|
||||||
snapshot-id
|
|
||||||
(uuid/next)
|
|
||||||
|
|
||||||
created-at
|
|
||||||
(ct/now)
|
|
||||||
|
|
||||||
deleted-at
|
|
||||||
(or deleted-at
|
|
||||||
(if (= created-by "system")
|
|
||||||
(ct/in-future (cf/get-deletion-delay))
|
|
||||||
nil))
|
|
||||||
|
|
||||||
label
|
|
||||||
(or label (generate-snapshot-label))
|
|
||||||
|
|
||||||
snapshot
|
|
||||||
(cond-> {:id snapshot-id
|
|
||||||
:revn (:revn file)
|
|
||||||
:version (:version file)
|
|
||||||
:file-id (:id file)
|
|
||||||
:features (:features file)
|
|
||||||
:migrations (:migrations file)
|
|
||||||
:label label
|
|
||||||
:created-at created-at
|
|
||||||
:modified-at created-at
|
|
||||||
:created-by created-by}
|
|
||||||
|
|
||||||
deleted-at
|
|
||||||
(assoc :deleted-at deleted-at)
|
|
||||||
|
|
||||||
:always
|
|
||||||
(check-snapshot))]
|
|
||||||
|
|
||||||
(db/insert! cfg :file-change
|
|
||||||
(-> snapshot
|
|
||||||
(update :features into-array)
|
|
||||||
(update :migrations into-array)
|
|
||||||
(assoc :updated-at created-at)
|
|
||||||
(assoc :profile-id profile-id)
|
|
||||||
(assoc :session-id session-id)
|
|
||||||
(dissoc :modified-at))
|
|
||||||
{::db/return-keys false})
|
|
||||||
|
|
||||||
(fdata/upsert! cfg
|
|
||||||
{:id snapshot-id
|
|
||||||
:file-id (:id file)
|
|
||||||
:type "snapshot"
|
|
||||||
:data (blob/encode (:data file))
|
|
||||||
:created-at created-at
|
|
||||||
:deleted-at deleted-at})
|
|
||||||
|
|
||||||
snapshot))
|
|
||||||
|
|
||||||
(def ^:private schema:update-params
|
|
||||||
[:map {:title "SnapshotUpdateParams"}
|
|
||||||
[:id ::sm/uuid]
|
|
||||||
[:file-id ::sm/uuid]
|
|
||||||
[:label ::sm/text]
|
|
||||||
[:modified-at {:optional true} ::ct/inst]])
|
|
||||||
|
|
||||||
(def ^:private check-update-params
|
|
||||||
(sm/check-fn schema:update-params))
|
|
||||||
|
|
||||||
(defn update!
|
|
||||||
[cfg params]
|
|
||||||
|
|
||||||
(let [{:keys [id file-id label modified-at]}
|
|
||||||
(check-update-params params)
|
|
||||||
|
|
||||||
modified-at
|
|
||||||
(or modified-at (ct/now))]
|
|
||||||
|
|
||||||
(db/update! cfg :file-data
|
|
||||||
{:deleted-at nil
|
|
||||||
:modified-at modified-at}
|
|
||||||
{:file-id file-id
|
|
||||||
:id id
|
|
||||||
:type "snapshot"}
|
|
||||||
{::db/return-keys false})
|
|
||||||
|
|
||||||
(-> (db/update! cfg :file-change
|
|
||||||
{:label label
|
|
||||||
:created-by "user"
|
|
||||||
:updated-at modified-at
|
|
||||||
:deleted-at nil}
|
|
||||||
{:file-id file-id
|
|
||||||
:id id}
|
|
||||||
{::db/return-keys false})
|
|
||||||
(db/get-update-count)
|
|
||||||
(pos?))))
|
|
||||||
|
|
||||||
(defn restore!
|
|
||||||
[{:keys [::db/conn] :as cfg} file-id snapshot-id]
|
|
||||||
(let [file (get-minimal-file conn file-id {::db/for-update true})
|
|
||||||
vern (rand-int Integer/MAX_VALUE)
|
|
||||||
|
|
||||||
storage
|
|
||||||
(sto/resolve cfg {::db/reuse-conn true})
|
|
||||||
|
|
||||||
snapshot
|
|
||||||
(get-snapshot-data cfg file-id snapshot-id)]
|
|
||||||
|
|
||||||
(when-not snapshot
|
|
||||||
(ex/raise :type :not-found
|
|
||||||
:code :snapshot-not-found
|
|
||||||
:hint "unable to find snapshot with the provided label"
|
|
||||||
:snapshot-id snapshot-id
|
|
||||||
:file-id file-id))
|
|
||||||
|
|
||||||
(when-not (:data snapshot)
|
|
||||||
(ex/raise :type :internal
|
|
||||||
:code :snapshot-without-data
|
|
||||||
:hint "snapshot has no data"
|
|
||||||
:label (:label snapshot)
|
|
||||||
:file-id file-id))
|
|
||||||
|
|
||||||
(let [;; If the snapshot has applied migrations stored, we reuse
|
|
||||||
;; them, if not, we take a safest set of migrations as
|
|
||||||
;; starting point. This is because, at the time of
|
|
||||||
;; implementing snapshots, migrations were not taken into
|
|
||||||
;; account so we need to make this backward compatible in
|
|
||||||
;; some way.
|
|
||||||
migrations
|
|
||||||
(or (:migrations snapshot)
|
|
||||||
(fmg/generate-migrations-from-version 67))
|
|
||||||
|
|
||||||
file
|
|
||||||
(-> file
|
|
||||||
(update :revn inc)
|
|
||||||
(assoc :migrations migrations)
|
|
||||||
(assoc :data (:data snapshot))
|
|
||||||
(assoc :vern vern)
|
|
||||||
(assoc :version (:version snapshot))
|
|
||||||
(assoc :has-media-trimmed false)
|
|
||||||
(assoc :modified-at (:modified-at snapshot))
|
|
||||||
(assoc :features (:features snapshot)))]
|
|
||||||
|
|
||||||
(l/dbg :hint "restoring snapshot"
|
|
||||||
:file-id (str file-id)
|
|
||||||
:label (:label snapshot)
|
|
||||||
:snapshot-id (str (:id snapshot)))
|
|
||||||
|
|
||||||
;; In the same way, on reseting the file data, we need to restore
|
|
||||||
;; the applied migrations on the moment of taking the snapshot
|
|
||||||
(bfc/update-file! cfg file ::bfc/reset-migrations? true)
|
|
||||||
|
|
||||||
;; FIXME: this should be separated functions, we should not have
|
|
||||||
;; inline sql here.
|
|
||||||
|
|
||||||
;; clean object thumbnails
|
|
||||||
(let [sql (str "update file_tagged_object_thumbnail "
|
|
||||||
" set deleted_at = now() "
|
|
||||||
" where file_id=? returning media_id")
|
|
||||||
res (db/exec! conn [sql file-id])]
|
|
||||||
(doseq [media-id (into #{} (keep :media-id) res)]
|
|
||||||
(sto/touch-object! storage media-id)))
|
|
||||||
|
|
||||||
;; clean file thumbnails
|
|
||||||
(let [sql (str "update file_thumbnail "
|
|
||||||
" set deleted_at = now() "
|
|
||||||
" where file_id=? returning media_id")
|
|
||||||
res (db/exec! conn [sql file-id])]
|
|
||||||
(doseq [media-id (into #{} (keep :media-id) res)]
|
|
||||||
(sto/touch-object! storage media-id)))
|
|
||||||
|
|
||||||
vern)))
|
|
||||||
|
|
||||||
(defn delete!
|
|
||||||
[cfg & {:keys [id file-id deleted-at]}]
|
|
||||||
(assert (uuid? id) "missing id")
|
|
||||||
(assert (uuid? file-id) "missing file-id")
|
|
||||||
(assert (ct/inst? deleted-at) "missing deleted-at")
|
|
||||||
|
|
||||||
(wrk/submit! {::db/conn (db/get-connection cfg)
|
|
||||||
::wrk/task :delete-object
|
|
||||||
::wrk/params {:object :snapshot
|
|
||||||
:deleted-at deleted-at
|
|
||||||
:file-id file-id
|
|
||||||
:id id}})
|
|
||||||
(db/update! cfg :file-change
|
|
||||||
{:deleted-at deleted-at}
|
|
||||||
{:id id :file-id file-id}
|
|
||||||
{::db/return-keys false})
|
|
||||||
true)
|
|
||||||
|
|
||||||
(def ^:private sql:get-snapshots
|
|
||||||
(str sql:snapshots " AND c.file_id = ?"))
|
|
||||||
|
|
||||||
(defn lock-by!
|
|
||||||
[conn id profile-id]
|
|
||||||
(-> (db/update! conn :file-change
|
|
||||||
{:locked-by profile-id}
|
|
||||||
{:id id}
|
|
||||||
{::db/return-keys false})
|
|
||||||
(db/get-update-count)
|
|
||||||
(pos?)))
|
|
||||||
|
|
||||||
(defn unlock!
|
|
||||||
[conn id]
|
|
||||||
(-> (db/update! conn :file-change
|
|
||||||
{:locked-by nil}
|
|
||||||
{:id id}
|
|
||||||
{::db/return-keys false})
|
|
||||||
(db/get-update-count)
|
|
||||||
(pos?)))
|
|
||||||
|
|
||||||
(defn reduce-snapshots
|
|
||||||
"Process the file snapshots using efficient reduction; the file
|
|
||||||
reduction comes with all snapshots, including maked as deleted"
|
|
||||||
[cfg file-id xform f init]
|
|
||||||
(let [conn (db/get-connection cfg)
|
|
||||||
xform (comp
|
|
||||||
(map (partial fdata/resolve-file-data cfg))
|
|
||||||
(map (partial fdata/decode-file-data cfg))
|
|
||||||
xform)]
|
|
||||||
|
|
||||||
(->> (db/plan conn [sql:get-snapshots file-id] {:fetch-size 1})
|
|
||||||
(transduce xform f init))))
|
|
||||||
@ -1,32 +0,0 @@
|
|||||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
|
||||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
||||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
|
||||||
;;
|
|
||||||
;; Copyright (c) KALEIDOS INC
|
|
||||||
|
|
||||||
(ns app.features.logical-deletion
|
|
||||||
"A code related to handle logical deletion mechanism"
|
|
||||||
(:require
|
|
||||||
[app.common.time :as ct]
|
|
||||||
[app.config :as cf]))
|
|
||||||
|
|
||||||
(def ^:private canceled-status
|
|
||||||
#{"canceled" "unpaid"})
|
|
||||||
|
|
||||||
(defn get-deletion-delay
|
|
||||||
"Calculate the next deleted-at for a resource (file, team, etc) in function
|
|
||||||
of team settings"
|
|
||||||
[team]
|
|
||||||
(if-let [{:keys [type status]} (get team :subscription)]
|
|
||||||
(cond
|
|
||||||
(and (= "unlimited" type) (not (contains? canceled-status status)))
|
|
||||||
(ct/duration {:days 30})
|
|
||||||
|
|
||||||
(and (= "enterprise" type) (not (contains? canceled-status status)))
|
|
||||||
(ct/duration {:days 90})
|
|
||||||
|
|
||||||
:else
|
|
||||||
(cf/get-deletion-delay))
|
|
||||||
|
|
||||||
(cf/get-deletion-delay)))
|
|
||||||
|
|
||||||
@ -17,16 +17,16 @@
|
|||||||
[app.http.awsns :as-alias awsns]
|
[app.http.awsns :as-alias awsns]
|
||||||
[app.http.debug :as-alias debug]
|
[app.http.debug :as-alias debug]
|
||||||
[app.http.errors :as errors]
|
[app.http.errors :as errors]
|
||||||
[app.http.management :as mgmt]
|
|
||||||
[app.http.middleware :as mw]
|
[app.http.middleware :as mw]
|
||||||
[app.http.security :as sec]
|
|
||||||
[app.http.session :as session]
|
[app.http.session :as session]
|
||||||
[app.http.websocket :as-alias ws]
|
[app.http.websocket :as-alias ws]
|
||||||
[app.main :as-alias main]
|
[app.main :as-alias main]
|
||||||
[app.metrics :as mtx]
|
[app.metrics :as mtx]
|
||||||
[app.rpc :as-alias rpc]
|
[app.rpc :as-alias rpc]
|
||||||
|
[app.rpc.doc :as-alias rpc.doc]
|
||||||
[app.setup :as-alias setup]
|
[app.setup :as-alias setup]
|
||||||
[integrant.core :as ig]
|
[integrant.core :as ig]
|
||||||
|
[promesa.exec :as px]
|
||||||
[reitit.core :as r]
|
[reitit.core :as r]
|
||||||
[reitit.middleware :as rr]
|
[reitit.middleware :as rr]
|
||||||
[yetti.adapter :as yt]
|
[yetti.adapter :as yt]
|
||||||
@ -42,8 +42,8 @@
|
|||||||
(def default-params
|
(def default-params
|
||||||
{::port 6060
|
{::port 6060
|
||||||
::host "0.0.0.0"
|
::host "0.0.0.0"
|
||||||
::max-body-size 367001600 ; default 350 MiB
|
::max-body-size 31457280 ; default 30 MiB
|
||||||
})
|
::max-multipart-body-size 367001600}) ; default 350 MiB
|
||||||
|
|
||||||
(defmethod ig/expand-key ::server
|
(defmethod ig/expand-key ::server
|
||||||
[k v]
|
[k v]
|
||||||
@ -53,9 +53,8 @@
|
|||||||
[:map
|
[:map
|
||||||
[::port ::sm/int]
|
[::port ::sm/int]
|
||||||
[::host ::sm/text]
|
[::host ::sm/text]
|
||||||
[::io-threads {:optional true} ::sm/int]
|
|
||||||
[::max-worker-threads {:optional true} ::sm/int]
|
|
||||||
[::max-body-size {:optional true} ::sm/int]
|
[::max-body-size {:optional true} ::sm/int]
|
||||||
|
[::max-multipart-body-size {:optional true} ::sm/int]
|
||||||
[::router {:optional true} [:fn r/router?]]
|
[::router {:optional true} [:fn r/router?]]
|
||||||
[::handler {:optional true} ::sm/fn]])
|
[::handler {:optional true} ::sm/fn]])
|
||||||
|
|
||||||
@ -64,41 +63,30 @@
|
|||||||
(assert (sm/check schema:server-params params)))
|
(assert (sm/check schema:server-params params)))
|
||||||
|
|
||||||
(defmethod ig/init-key ::server
|
(defmethod ig/init-key ::server
|
||||||
[_ {:keys [::handler ::router ::host ::port ::mtx/metrics] :as cfg}]
|
[_ {:keys [::handler ::router ::host ::port] :as cfg}]
|
||||||
(l/info :hint "starting http server" :port port :host host)
|
(l/info :hint "starting http server" :port port :host host)
|
||||||
(let [on-dispatch
|
(let [options {:http/port port
|
||||||
(fn [_ start-at-ns]
|
:http/host host
|
||||||
(let [timing (- (System/nanoTime) start-at-ns)
|
:http/max-body-size (::max-body-size cfg)
|
||||||
timing (int (/ timing 1000000))]
|
:http/max-multipart-body-size (::max-multipart-body-size cfg)
|
||||||
(mtx/run! metrics
|
:xnio/io-threads (or (::io-threads cfg)
|
||||||
:id :http-server-dispatch-timing
|
(max 3 (px/get-available-processors)))
|
||||||
:val timing)))
|
:xnio/dispatch :virtual
|
||||||
|
:ring/compat :ring2
|
||||||
|
:socket/backlog 4069}
|
||||||
|
|
||||||
options
|
handler (cond
|
||||||
{:http/port port
|
(some? router)
|
||||||
:http/host host
|
(router-handler router)
|
||||||
:http/max-body-size (::max-body-size cfg)
|
|
||||||
:http/max-multipart-body-size (::max-body-size cfg)
|
|
||||||
:xnio/direct-buffers false
|
|
||||||
:xnio/io-threads (::io-threads cfg)
|
|
||||||
:xnio/max-worker-threads (::max-worker-threads cfg)
|
|
||||||
:ring/compat :ring2
|
|
||||||
:events/on-dispatch on-dispatch
|
|
||||||
:socket/backlog 4069}
|
|
||||||
|
|
||||||
handler
|
(some? handler)
|
||||||
(cond
|
handler
|
||||||
(some? router)
|
|
||||||
(router-handler router)
|
|
||||||
|
|
||||||
(some? handler)
|
:else
|
||||||
handler
|
(throw (UnsupportedOperationException. "handler or router are required")))
|
||||||
|
|
||||||
:else
|
options (d/without-nils options)
|
||||||
(throw (UnsupportedOperationException. "handler or router are required")))
|
server (yt/server handler options)]
|
||||||
|
|
||||||
server
|
|
||||||
(yt/server handler (d/without-nils options))]
|
|
||||||
|
|
||||||
(assoc cfg ::server (yt/start! server))))
|
(assoc cfg ::server (yt/start! server))))
|
||||||
|
|
||||||
@ -147,12 +135,12 @@
|
|||||||
[:map
|
[:map
|
||||||
[::ws/routes schema:routes]
|
[::ws/routes schema:routes]
|
||||||
[::rpc/routes schema:routes]
|
[::rpc/routes schema:routes]
|
||||||
|
[::rpc.doc/routes schema:routes]
|
||||||
[::oidc/routes schema:routes]
|
[::oidc/routes schema:routes]
|
||||||
[::assets/routes schema:routes]
|
[::assets/routes schema:routes]
|
||||||
[::debug/routes schema:routes]
|
[::debug/routes schema:routes]
|
||||||
[::mtx/routes schema:routes]
|
[::mtx/routes schema:routes]
|
||||||
[::awsns/routes schema:routes]
|
[::awsns/routes schema:routes]
|
||||||
[::mgmt/routes schema:routes]
|
|
||||||
::session/manager
|
::session/manager
|
||||||
::setup/props
|
::setup/props
|
||||||
::db/pool])
|
::db/pool])
|
||||||
@ -165,12 +153,10 @@
|
|||||||
[_ cfg]
|
[_ cfg]
|
||||||
(rr/router
|
(rr/router
|
||||||
[["" {:middleware [[mw/server-timing]
|
[["" {:middleware [[mw/server-timing]
|
||||||
[sec/sec-fetch-metadata]
|
|
||||||
[mw/params]
|
[mw/params]
|
||||||
[mw/format-response]
|
[mw/format-response]
|
||||||
[mw/auth {:bearer (partial session/decode-token cfg)
|
[session/soft-auth cfg]
|
||||||
:cookie (partial session/decode-token cfg)
|
[actoken/soft-auth cfg]
|
||||||
:token (partial actoken/decode-token cfg)}]
|
|
||||||
[mw/parse-request]
|
[mw/parse-request]
|
||||||
[mw/errors errors/handle]
|
[mw/errors errors/handle]
|
||||||
[mw/restrict-methods]]}
|
[mw/restrict-methods]]}
|
||||||
@ -182,9 +168,9 @@
|
|||||||
["/webhooks"
|
["/webhooks"
|
||||||
(::awsns/routes cfg)]
|
(::awsns/routes cfg)]
|
||||||
|
|
||||||
["/management"
|
|
||||||
(::mgmt/routes cfg)]
|
|
||||||
|
|
||||||
(::ws/routes cfg)
|
(::ws/routes cfg)
|
||||||
(::oidc/routes cfg)
|
|
||||||
(::rpc/routes cfg)]]))
|
["/api" {:middleware [[mw/cors]]}
|
||||||
|
(::oidc/routes cfg)
|
||||||
|
(::rpc.doc/routes cfg)
|
||||||
|
(::rpc/routes cfg)]]]))
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user