mirror of
https://github.com/penpot/penpot.git
synced 2026-05-16 21:43:40 +00:00
Compare commits
No commits in common. "develop" and "2.8.0-RC3" have entirely different histories.
342
.circleci/config.yml
Normal file
342
.circleci/config.yml
Normal file
@ -0,0 +1,342 @@
|
||||
version: 2.1
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
|
||||
working_directory: ~/repo
|
||||
resource_class: medium+
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
- run:
|
||||
name: "fmt check"
|
||||
working_directory: "."
|
||||
command: |
|
||||
yarn install
|
||||
yarn run fmt:clj:check
|
||||
|
||||
- run:
|
||||
name: "lint clj common"
|
||||
working_directory: "."
|
||||
command: |
|
||||
yarn run lint:clj:common
|
||||
|
||||
- run:
|
||||
name: "lint clj frontend"
|
||||
working_directory: "."
|
||||
command: |
|
||||
yarn run lint:clj:frontend
|
||||
|
||||
- run:
|
||||
name: "lint clj backend"
|
||||
working_directory: "."
|
||||
command: |
|
||||
yarn run lint:clj:backend
|
||||
|
||||
- run:
|
||||
name: "lint clj exporter"
|
||||
working_directory: "."
|
||||
command: |
|
||||
yarn run lint:clj:exporter
|
||||
|
||||
- run:
|
||||
name: "lint clj library"
|
||||
working_directory: "."
|
||||
command: |
|
||||
yarn run lint:clj:library
|
||||
|
||||
test-common:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
|
||||
working_directory: ~/repo
|
||||
resource_class: medium+
|
||||
|
||||
environment:
|
||||
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
# Download and cache dependencies
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-dependencies-{{ checksum "common/deps.edn"}}-{{ checksum "common/yarn.lock" }}
|
||||
|
||||
- run:
|
||||
name: "JVM tests"
|
||||
working_directory: "./common"
|
||||
command: |
|
||||
clojure -M:dev:test
|
||||
|
||||
- run:
|
||||
name: "NODE tests"
|
||||
working_directory: "./common"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run test
|
||||
|
||||
- save_cache:
|
||||
paths:
|
||||
- ~/.m2
|
||||
- ~/.yarn
|
||||
- ~/.gitlibs
|
||||
- ~/.cache/ms-playwright
|
||||
key: v1-dependencies-{{ checksum "common/deps.edn"}}-{{ checksum "common/yarn.lock" }}
|
||||
|
||||
test-frontend:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
|
||||
working_directory: ~/repo
|
||||
resource_class: medium+
|
||||
|
||||
environment:
|
||||
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
# Download and cache dependencies
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
|
||||
|
||||
- run:
|
||||
name: "install dependencies"
|
||||
working_directory: "./frontend"
|
||||
# We install playwright here because the dependent tasks
|
||||
# uses the same cache as this task so we prepopulate it
|
||||
command: |
|
||||
yarn install
|
||||
yarn run playwright install chromium
|
||||
|
||||
- run:
|
||||
name: "lint scss on frontend"
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn run lint:scss
|
||||
|
||||
- run:
|
||||
name: "unit tests"
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn run test
|
||||
|
||||
- save_cache:
|
||||
paths:
|
||||
- ~/.m2
|
||||
- ~/.yarn
|
||||
- ~/.gitlibs
|
||||
- ~/.cache/ms-playwright
|
||||
key: v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
|
||||
|
||||
test-library:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
|
||||
working_directory: ~/repo
|
||||
resource_class: medium+
|
||||
|
||||
environment:
|
||||
JAVA_OPTS: -Xmx6g
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
# Download and cache dependencies
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
|
||||
|
||||
- run:
|
||||
name: Install dependencies and build
|
||||
working_directory: "./library"
|
||||
command: |
|
||||
yarn install
|
||||
|
||||
- run:
|
||||
name: Build and Test
|
||||
working_directory: "./library"
|
||||
command: |
|
||||
./scripts/build
|
||||
yarn run test
|
||||
|
||||
test-components:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
|
||||
working_directory: ~/repo
|
||||
resource_class: medium+
|
||||
|
||||
environment:
|
||||
JAVA_OPTS: -Xmx6g -Xms2g
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
# Download and cache dependencies
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
|
||||
|
||||
- run:
|
||||
name: Install dependencies
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run playwright install chromium
|
||||
|
||||
- run:
|
||||
name: Build Storybook
|
||||
working_directory: "./frontend"
|
||||
command: yarn run build:storybook
|
||||
|
||||
- run:
|
||||
name: Serve Storybook and run tests
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
npx concurrently -k -s first -n "SB,TEST" -c "magenta,blue" \
|
||||
"npx http-server storybook-static --port 6006 --silent" \
|
||||
"npx wait-on tcp:6006 && yarn test:storybook"
|
||||
|
||||
test-integration:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
|
||||
working_directory: ~/repo
|
||||
resource_class: large
|
||||
|
||||
environment:
|
||||
JAVA_OPTS: -Xmx6g -Xms2g
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
# Download and cache dependencies
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
|
||||
|
||||
- run:
|
||||
name: "integration tests"
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run build:app:assets
|
||||
yarn run build:app
|
||||
yarn run build:app:libs
|
||||
yarn run playwright install chromium
|
||||
yarn run test:e2e -x --workers=4
|
||||
|
||||
test-backend:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
- image: cimg/postgres:14.5
|
||||
environment:
|
||||
POSTGRES_USER: penpot_test
|
||||
POSTGRES_PASSWORD: penpot_test
|
||||
POSTGRES_DB: penpot_test
|
||||
- image: cimg/redis:7.0.5
|
||||
|
||||
working_directory: ~/repo
|
||||
resource_class: medium+
|
||||
|
||||
environment:
|
||||
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-dependencies-{{ checksum "backend/deps.edn" }}
|
||||
|
||||
- run:
|
||||
name: "tests"
|
||||
working_directory: "./backend"
|
||||
command: |
|
||||
clojure -M:dev:test --reporter kaocha.report/documentation
|
||||
|
||||
environment:
|
||||
PENPOT_TEST_DATABASE_URI: "postgresql://localhost/penpot_test"
|
||||
PENPOT_TEST_DATABASE_USERNAME: penpot_test
|
||||
PENPOT_TEST_DATABASE_PASSWORD: penpot_test
|
||||
PENPOT_TEST_REDIS_URI: "redis://localhost/1"
|
||||
|
||||
- save_cache:
|
||||
paths:
|
||||
- ~/.m2
|
||||
- ~/.gitlibs
|
||||
key: v1-dependencies-{{ checksum "backend/deps.edn" }}
|
||||
|
||||
test-render-wasm:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
|
||||
working_directory: ~/repo
|
||||
resource_class: medium+
|
||||
environment:
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
- run:
|
||||
name: "fmt check"
|
||||
working_directory: "./render-wasm"
|
||||
command: |
|
||||
cargo fmt --check
|
||||
|
||||
- run:
|
||||
name: "lint"
|
||||
working_directory: "./render-wasm"
|
||||
command: |
|
||||
./lint
|
||||
|
||||
- run:
|
||||
name: "cargo tests"
|
||||
working_directory: "./render-wasm"
|
||||
command: |
|
||||
./test
|
||||
|
||||
workflows:
|
||||
penpot:
|
||||
jobs:
|
||||
- lint
|
||||
- test-frontend:
|
||||
requires:
|
||||
- lint: success
|
||||
|
||||
- test-library:
|
||||
requires:
|
||||
- test-frontend: success
|
||||
- lint: success
|
||||
|
||||
- test-components:
|
||||
requires:
|
||||
- test-frontend: success
|
||||
- lint: success
|
||||
|
||||
- test-integration:
|
||||
requires:
|
||||
- test-frontend: success
|
||||
- lint: success
|
||||
|
||||
- test-backend:
|
||||
requires:
|
||||
- lint: success
|
||||
|
||||
- test-common:
|
||||
requires:
|
||||
- lint: success
|
||||
|
||||
- test-render-wasm
|
||||
@ -45,25 +45,10 @@
|
||||
:potok/reify-type
|
||||
{:level :error}
|
||||
|
||||
:redundant-primitive-coercion
|
||||
{:level :off}
|
||||
|
||||
:unused-excluded-var
|
||||
{:level :off}
|
||||
|
||||
:unresolved-excluded-var
|
||||
{:level :off}
|
||||
|
||||
:missing-protocol-method
|
||||
{:level :off}
|
||||
|
||||
:unresolved-namespace
|
||||
{:level :warning
|
||||
:exclude [data_readers]}
|
||||
|
||||
:unused-value
|
||||
{:level :off}
|
||||
|
||||
:single-key-in
|
||||
{:level :warning}
|
||||
|
||||
@ -79,9 +64,6 @@
|
||||
:redundant-nested-call
|
||||
{:level :off}
|
||||
|
||||
:redundant-str-call
|
||||
{:level :off}
|
||||
|
||||
:earmuffed-var-not-dynamic
|
||||
{:level :off}
|
||||
|
||||
|
||||
@ -2,11 +2,6 @@
|
||||
:remove-multiple-non-indenting-spaces? false
|
||||
:remove-surrounding-whitespace? true
|
||||
:remove-consecutive-blank-lines? false
|
||||
:indent-line-comments? true
|
||||
:parallel? true
|
||||
:align-form-columns? false
|
||||
;; :align-map-columns? false
|
||||
;; :align-single-column-lines? false
|
||||
:extra-indents {rumext.v2/fnc [[:inner 0]]
|
||||
cljs.test/async [[:inner 0]]
|
||||
promesa.exec/thread [[:inner 0]]
|
||||
|
||||
39
.github/ISSUE_TEMPLATE/bug_report.md
vendored
39
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -1,39 +0,0 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
type: Bug
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Screen recordings and screenshots**
|
||||
If possible, add screen recordings or screenshots to help explain your problem.
|
||||
|
||||
**Desktop (please complete the following information):**
|
||||
- OS: [e.g. iOS]
|
||||
- Browser [e.g. chrome, safari]
|
||||
- Version [e.g. 22]
|
||||
|
||||
**Smartphone (please complete the following information):**
|
||||
- Device: [e.g. iPhone6]
|
||||
- OS: [e.g. iOS8.1]
|
||||
- Browser [e.g. stock browser, safari]
|
||||
- Version [e.g. 22]
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
21
.github/ISSUE_TEMPLATE/feature_request.md
vendored
21
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@ -1,21 +0,0 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
type: Feature
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -13,7 +13,6 @@
|
||||
- [ ] Add a detailed explanation of how to reproduce the issue and/or verify the fix, if applicable.
|
||||
- [ ] Include screenshots or videos, if applicable.
|
||||
- [ ] Add or modify existing integration tests in case of bugs or new features, if applicable.
|
||||
- [ ] Refactor any modified SCSS files following the refactor guide.
|
||||
- [ ] Check CI passes successfully.
|
||||
- [ ] Update the `CHANGES.md` file, referencing the related GitHub issue, if applicable.
|
||||
|
||||
|
||||
93
.github/workflows/build-bundle.yml
vendored
93
.github/workflows/build-bundle.yml
vendored
@ -1,93 +0,0 @@
|
||||
name: Bundles Builder
|
||||
|
||||
on:
|
||||
# Create bundle from manual action
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
gh_ref:
|
||||
description: 'Name of the branch or ref'
|
||||
type: string
|
||||
required: true
|
||||
default: 'develop'
|
||||
build_wasm:
|
||||
description: 'BUILD_WASM. Valid values: yes, no'
|
||||
type: string
|
||||
required: false
|
||||
default: 'yes'
|
||||
build_storybook:
|
||||
description: 'BUILD_STORYBOOK. Valid values: yes, no'
|
||||
type: string
|
||||
required: false
|
||||
default: 'yes'
|
||||
workflow_call:
|
||||
inputs:
|
||||
gh_ref:
|
||||
description: 'Name of the branch or ref'
|
||||
type: string
|
||||
required: true
|
||||
default: 'develop'
|
||||
build_wasm:
|
||||
description: 'BUILD_WASM. Valid values: yes, no'
|
||||
type: string
|
||||
required: false
|
||||
default: 'yes'
|
||||
build_storybook:
|
||||
description: 'BUILD_STORYBOOK. Valid values: yes, no'
|
||||
type: string
|
||||
required: false
|
||||
default: 'yes'
|
||||
|
||||
jobs:
|
||||
build-bundle:
|
||||
name: Build and Upload Penpot Bundle
|
||||
runs-on: penpot-runner-01
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: ${{ secrets.AWS_REGION }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ inputs.gh_ref }}
|
||||
|
||||
- name: Extract some useful variables
|
||||
id: vars
|
||||
run: |
|
||||
echo "gh_ref=${{ inputs.gh_ref || github.ref_name }}" >> $GITHUB_OUTPUT
|
||||
echo "bundle_version=$(git describe --tags --always)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Build bundle
|
||||
env:
|
||||
BUILD_WASM: ${{ inputs.build_wasm }}
|
||||
BUILD_STORYBOOK: ${{ inputs.build_storybook }}
|
||||
run: ./manage.sh build-bundle
|
||||
|
||||
- name: Prepare directories for zipping
|
||||
run: |
|
||||
mkdir zips
|
||||
mv bundles penpot
|
||||
|
||||
- name: Create zip bundle
|
||||
run: |
|
||||
echo "📦 Packaging Penpot bundle..."
|
||||
zip -r zips/penpot.zip penpot
|
||||
|
||||
- name: Upload Penpot bundle to S3
|
||||
run: |
|
||||
aws s3 cp zips/penpot.zip s3://${{ secrets.S3_BUCKET }}/penpot-${{ steps.vars.outputs.gh_ref }}.zip --metadata bundle-version=${{ steps.vars.outputs.bundle_version }}
|
||||
|
||||
- name: Notify Mattermost
|
||||
if: failure()
|
||||
uses: mattermost/action-mattermost-notify@master
|
||||
with:
|
||||
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
|
||||
MATTERMOST_CHANNEL: bot-alerts-cicd
|
||||
TEXT: |
|
||||
❌ 📦 *[PENPOT] Error building penpot bundles.*
|
||||
📄 Triggered from ref: `${{ steps.vars.outputs.gh_ref }}`
|
||||
Bundle version: `${{ steps.vars.outputs.bundle_version }}`
|
||||
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
@infra
|
||||
22
.github/workflows/build-develop.yml
vendored
22
.github/workflows/build-develop.yml
vendored
@ -1,22 +0,0 @@
|
||||
name: _DEVELOP
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '16 5-20 * * 1-5'
|
||||
|
||||
jobs:
|
||||
build-bundle:
|
||||
uses: ./.github/workflows/build-bundle.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: "develop"
|
||||
build_wasm: "yes"
|
||||
build_storybook: "yes"
|
||||
|
||||
build-docker:
|
||||
needs: build-bundle
|
||||
uses: ./.github/workflows/build-docker.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: "develop"
|
||||
51
.github/workflows/build-docker-devenv.yml
vendored
51
.github/workflows/build-docker-devenv.yml
vendored
@ -1,51 +0,0 @@
|
||||
name: DevEnv Docker Image Builder
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
name: Build and push DevEnv Docker image
|
||||
environment: release-admins
|
||||
runs-on: penpot-runner-02
|
||||
|
||||
steps:
|
||||
- name: Set common environment variables
|
||||
run: |
|
||||
# Each job execution will use its own docker configuration.
|
||||
echo "DOCKER_CONFIG=${{ runner.temp }}/.docker-${{ github.run_id }}-${{ github.job }}" >> $GITHUB_ENV
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v4
|
||||
|
||||
- name: Login to Docker Registry
|
||||
uses: docker/login-action@v4
|
||||
with:
|
||||
username: ${{ secrets.PUB_DOCKER_USERNAME }}
|
||||
password: ${{ secrets.PUB_DOCKER_PASSWORD }}
|
||||
|
||||
- name: Build and push DevEnv Docker image
|
||||
uses: docker/build-push-action@v7
|
||||
env:
|
||||
DOCKER_IMAGE: 'penpotapp/devenv'
|
||||
with:
|
||||
context: ./docker/devenv/
|
||||
file: ./docker/devenv/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ env.DOCKER_IMAGE }}:latest
|
||||
cache-from: type=registry,ref=${{ env.DOCKER_IMAGE }}:buildcache
|
||||
cache-to: type=registry,ref=${{ env.DOCKER_IMAGE }}:buildcache,mode=max
|
||||
|
||||
- name: Notify Mattermost
|
||||
uses: mattermost/action-mattermost-notify@master
|
||||
with:
|
||||
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
|
||||
MATTERMOST_CHANNEL: bot-alerts-cicd
|
||||
TEXT: |
|
||||
🚀 *[PENPOT] New devenv available*
|
||||
📄 You may want to update your devenv.
|
||||
@alvaro
|
||||
183
.github/workflows/build-docker.yml
vendored
183
.github/workflows/build-docker.yml
vendored
@ -1,183 +0,0 @@
|
||||
name: Docker Images Builder
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
gh_ref:
|
||||
description: 'Name of the branch or ref'
|
||||
type: string
|
||||
required: true
|
||||
default: 'develop'
|
||||
workflow_call:
|
||||
inputs:
|
||||
gh_ref:
|
||||
description: 'Name of the branch or ref'
|
||||
type: string
|
||||
required: true
|
||||
default: 'develop'
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
name: Build and Push Penpot Docker Images
|
||||
runs-on: penpot-runner-02
|
||||
|
||||
steps:
|
||||
- name: Set common environment variables
|
||||
run: |
|
||||
# Each job execution will use its own docker configuration.
|
||||
echo "DOCKER_CONFIG=${{ runner.temp }}/.docker-${{ github.run_id }}-${{ github.job }}" >> $GITHUB_ENV
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ inputs.gh_ref }}
|
||||
|
||||
- name: Extract some useful variables
|
||||
id: vars
|
||||
run: |
|
||||
echo "gh_ref=${{ inputs.gh_ref || github.ref_name }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Download Penpot Bundles
|
||||
id: bundles
|
||||
env:
|
||||
FILE_NAME: penpot-${{ steps.vars.outputs.gh_ref }}.zip
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: ${{ secrets.AWS_REGION }}
|
||||
run: |
|
||||
tmp=$(aws s3api head-object \
|
||||
--bucket ${{ secrets.S3_BUCKET }} \
|
||||
--key "$FILE_NAME" \
|
||||
--query 'Metadata."bundle-version"' \
|
||||
--output text)
|
||||
echo "bundle_version=$tmp" >> $GITHUB_OUTPUT
|
||||
pushd docker/images
|
||||
aws s3 cp s3://${{ secrets.S3_BUCKET }}/$FILE_NAME .
|
||||
unzip $FILE_NAME > /dev/null
|
||||
mv penpot/backend bundle-backend
|
||||
mv penpot/frontend bundle-frontend
|
||||
mv penpot/exporter bundle-exporter
|
||||
mv penpot/storybook bundle-storybook
|
||||
mv penpot/mcp bundle-mcp
|
||||
popd
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v4
|
||||
|
||||
- name: Login to Docker Registry
|
||||
uses: docker/login-action@v4
|
||||
with:
|
||||
registry: ${{ secrets.DOCKER_REGISTRY }}
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
# To avoid the “429 Too Many Requests” error when downloading
|
||||
# images from DockerHub for unregistered users.
|
||||
# https://docs.docker.com/docker-hub/usage/
|
||||
- name: Login to DockerHub Registry
|
||||
uses: docker/login-action@v4
|
||||
with:
|
||||
username: ${{ secrets.PUB_DOCKER_USERNAME }}
|
||||
password: ${{ secrets.PUB_DOCKER_PASSWORD }}
|
||||
|
||||
- name: Extract metadata (tags, labels)
|
||||
id: meta
|
||||
uses: docker/metadata-action@v6
|
||||
with:
|
||||
images:
|
||||
frontend
|
||||
backend
|
||||
exporter
|
||||
storybook
|
||||
mcp
|
||||
labels: |
|
||||
bundle_version=${{ steps.bundles.outputs.bundle_version }}
|
||||
|
||||
- name: Build and push Backend Docker image
|
||||
uses: docker/build-push-action@v7
|
||||
env:
|
||||
DOCKER_IMAGE: 'backend'
|
||||
BUNDLE_PATH: './bundle-backend'
|
||||
with:
|
||||
context: ./docker/images/
|
||||
file: ./docker/images/Dockerfile.backend
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:${{ steps.vars.outputs.gh_ref }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache
|
||||
cache-to: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache,mode=max
|
||||
|
||||
- name: Build and push Frontend Docker image
|
||||
uses: docker/build-push-action@v7
|
||||
env:
|
||||
DOCKER_IMAGE: 'frontend'
|
||||
BUNDLE_PATH: './bundle-frontend'
|
||||
with:
|
||||
context: ./docker/images/
|
||||
file: ./docker/images/Dockerfile.frontend
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:${{ steps.vars.outputs.gh_ref }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache
|
||||
cache-to: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache,mode=max
|
||||
|
||||
- name: Build and push Exporter Docker image
|
||||
uses: docker/build-push-action@v7
|
||||
env:
|
||||
DOCKER_IMAGE: 'exporter'
|
||||
BUNDLE_PATH: './bundle-exporter'
|
||||
with:
|
||||
context: ./docker/images/
|
||||
file: ./docker/images/Dockerfile.exporter
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:${{ steps.vars.outputs.gh_ref }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache
|
||||
cache-to: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache,mode=max
|
||||
|
||||
- name: Build and push Storybook Docker image
|
||||
uses: docker/build-push-action@v7
|
||||
env:
|
||||
DOCKER_IMAGE: 'storybook'
|
||||
BUNDLE_PATH: './bundle-storybook'
|
||||
with:
|
||||
context: ./docker/images/
|
||||
file: ./docker/images/Dockerfile.storybook
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:${{ steps.vars.outputs.gh_ref }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache
|
||||
cache-to: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache,mode=max
|
||||
|
||||
- name: Build and push MCP Docker image
|
||||
uses: docker/build-push-action@v7
|
||||
env:
|
||||
DOCKER_IMAGE: 'mcp'
|
||||
BUNDLE_PATH: './bundle-mcp'
|
||||
with:
|
||||
context: ./docker/images/
|
||||
file: ./docker/images/Dockerfile.mcp
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:${{ steps.vars.outputs.gh_ref }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache
|
||||
cache-to: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache,mode=max
|
||||
|
||||
- name: Notify Mattermost
|
||||
if: failure()
|
||||
uses: mattermost/action-mattermost-notify@master
|
||||
with:
|
||||
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
|
||||
MATTERMOST_CHANNEL: bot-alerts-cicd
|
||||
TEXT: |
|
||||
❌ 🐳 *[PENPOT] Error building penpot docker images.*
|
||||
📄 Triggered from ref: `${{ steps.vars.outputs.gh_ref }}`
|
||||
📦 Bundle: `${{ steps.bundles.outputs.bundle_version }}`
|
||||
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
@infra
|
||||
22
.github/workflows/build-main-staging.yml
vendored
22
.github/workflows/build-main-staging.yml
vendored
@ -1,22 +0,0 @@
|
||||
name: _MAIN-STAGING
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '26 5-20 * * 1-5'
|
||||
|
||||
jobs:
|
||||
build-bundle:
|
||||
uses: ./.github/workflows/build-bundle.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: "main-staging"
|
||||
build_wasm: "yes"
|
||||
build_storybook: "yes"
|
||||
|
||||
build-docker:
|
||||
needs: build-bundle
|
||||
uses: ./.github/workflows/build-docker.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: "main-staging"
|
||||
22
.github/workflows/build-staging.yml
vendored
22
.github/workflows/build-staging.yml
vendored
@ -1,22 +0,0 @@
|
||||
name: _STAGING
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '36 5-20 * * 1-5'
|
||||
|
||||
jobs:
|
||||
build-bundle:
|
||||
uses: ./.github/workflows/build-bundle.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: "staging"
|
||||
build_wasm: "yes"
|
||||
build_storybook: "yes"
|
||||
|
||||
build-docker:
|
||||
needs: build-bundle
|
||||
uses: ./.github/workflows/build-docker.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: "staging"
|
||||
47
.github/workflows/build-tag.yml
vendored
47
.github/workflows/build-tag.yml
vendored
@ -1,47 +0,0 @@
|
||||
name: _TAG
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
build-bundle:
|
||||
uses: ./.github/workflows/build-bundle.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: ${{ github.ref_name }}
|
||||
build_wasm: "yes"
|
||||
build_storybook: "yes"
|
||||
|
||||
build-docker:
|
||||
needs: build-bundle
|
||||
uses: ./.github/workflows/build-docker.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: ${{ github.ref_name }}
|
||||
|
||||
notify:
|
||||
name: Notifications
|
||||
runs-on: ubuntu-24.04
|
||||
needs: build-docker
|
||||
|
||||
steps:
|
||||
- name: Notify Mattermost
|
||||
uses: mattermost/action-mattermost-notify@master
|
||||
with:
|
||||
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
|
||||
MATTERMOST_CHANNEL: bot-alerts-cicd
|
||||
TEXT: |
|
||||
🐳 *[PENPOT] Docker image available: ${{ github.ref_name }}*
|
||||
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
@infra
|
||||
|
||||
publish-final-tag:
|
||||
if: ${{ !contains(github.ref_name, '-RC') && !contains(github.ref_name, '-alpha') && !contains(github.ref_name, '-beta') && contains(github.ref_name, '.') }}
|
||||
needs: build-docker
|
||||
uses: ./.github/workflows/release.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: ${{ github.ref_name }}
|
||||
5
.github/workflows/commit-checker.yml
vendored
5
.github/workflows/commit-checker.yml
vendored
@ -6,14 +6,12 @@ on:
|
||||
- edited
|
||||
- reopened
|
||||
- synchronize
|
||||
- ready_for_review
|
||||
pull_request_target:
|
||||
types:
|
||||
- opened
|
||||
- edited
|
||||
- reopened
|
||||
- synchronize
|
||||
- ready_for_review
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
@ -22,14 +20,13 @@ on:
|
||||
|
||||
jobs:
|
||||
check-commit-message:
|
||||
if: ${{ !github.event.pull_request.draft }}
|
||||
name: Check Commit Message
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check Commit Type
|
||||
uses: gsactions/commit-message-checker@v2
|
||||
with:
|
||||
pattern: '^(((:(lipstick|globe_with_meridians|wrench|books|arrow_up|arrow_down|zap|ambulance|construction|boom|fire|whale|bug|sparkles|paperclip|tada|recycle|rewind|construction_worker):)\s[A-Z].*[^.])|(Merge|Revert|Reapply).+[^.])$'
|
||||
pattern: '^(Merge|:(lipstick|globe_with_meridians|wrench|books|arrow_up|arrow_down|zap|ambulance|construction|boom|fire|whale|bug|sparkles|paperclip|tada|recycle):)\s[A-Z].*[^.]$'
|
||||
flags: 'gm'
|
||||
error: 'Commit should match CONTRIBUTING.md guideline'
|
||||
checkAllCommitMessages: 'true' # optional: this checks all commits associated with a pull request
|
||||
|
||||
142
.github/workflows/plugins-deploy-api-doc.yml
vendored
142
.github/workflows/plugins-deploy-api-doc.yml
vendored
@ -1,142 +0,0 @@
|
||||
name: Plugins/api-doc deployer
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- staging
|
||||
- main
|
||||
paths:
|
||||
- 'plugins/libs/plugin-types/index.d.ts'
|
||||
- 'plugins/libs/plugin-types/REAME.md'
|
||||
- 'plugins/tools/typedoc.css'
|
||||
- 'plugins/CHANGELOG.md'
|
||||
- 'plugins/wrangler-penpot-plugins-api-doc.toml'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
gh_ref:
|
||||
description: 'Name of the branch'
|
||||
type: choice
|
||||
required: true
|
||||
default: 'develop'
|
||||
options:
|
||||
- develop
|
||||
- staging
|
||||
- main
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Extract some useful variables
|
||||
id: vars
|
||||
run: |
|
||||
echo "gh_ref=${{ inputs.gh_ref || github.ref_name }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ steps.vars.outputs.gh_ref }}
|
||||
|
||||
# START: Setup Node and PNPM enabling cache
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: .nvmrc
|
||||
|
||||
- name: Enable PNPM
|
||||
working-directory: ./plugins
|
||||
shell: bash
|
||||
run: |
|
||||
corepack enable;
|
||||
corepack install;
|
||||
|
||||
- name: Get pnpm store path
|
||||
id: pnpm-store
|
||||
working-directory: ./plugins
|
||||
shell: bash
|
||||
run: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Cache pnpm store
|
||||
uses: actions/cache@v5
|
||||
with:
|
||||
path: ${{ steps.pnpm-store.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-${{ hashFiles('plugins/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-
|
||||
# END: Setup Node and PNPM enabling cache
|
||||
|
||||
- name: Install deps
|
||||
working-directory: ./plugins
|
||||
shell: bash
|
||||
run: |
|
||||
pnpm install --no-frozen-lockfile;
|
||||
pnpm add -D -w wrangler@latest;
|
||||
|
||||
- name: Build docs
|
||||
working-directory: plugins
|
||||
shell: bash
|
||||
run: pnpm run build:doc
|
||||
|
||||
- name: Select Worker name
|
||||
run: |
|
||||
REF="${{ steps.vars.outputs.gh_ref }}"
|
||||
case "$REF" in
|
||||
main)
|
||||
echo "WORKER_NAME=penpot-plugins-api-doc-pro" >> $GITHUB_ENV
|
||||
echo "WORKER_URI=doc.plugins.penpot.app" >> $GITHUB_ENV ;;
|
||||
staging)
|
||||
echo "WORKER_NAME=penpot-plugins-api-doc-pre" >> $GITHUB_ENV
|
||||
echo "WORKER_URI=doc.plugins.penpot.dev" >> $GITHUB_ENV ;;
|
||||
develop)
|
||||
echo "WORKER_NAME=penpot-plugins-api-doc-hourly" >> $GITHUB_ENV
|
||||
echo "WORKER_URI=doc.plugins.hourly.penpot.dev" >> $GITHUB_ENV ;;
|
||||
*) echo "Unsupported branch ${REF}" && exit 1 ;;
|
||||
esac
|
||||
|
||||
- name: Set the custom url
|
||||
working-directory: plugins
|
||||
shell: bash
|
||||
run: |
|
||||
sed -i "s/WORKER_URI/${{ env.WORKER_URI }}/g" wrangler-penpot-plugins-api-doc.toml
|
||||
|
||||
- name: Add noindex header and robots.txt files for non-production environments
|
||||
if: ${{ steps.vars.outputs.gh_ref != 'main' }}
|
||||
working-directory: plugins
|
||||
shell: bash
|
||||
run: |
|
||||
ASSETS_DIR="dist/doc"
|
||||
|
||||
cat > "${ASSETS_DIR}/_headers" << 'EOF'
|
||||
/*
|
||||
X-Robots-Tag: noindex, nofollow
|
||||
EOF
|
||||
|
||||
cat > "${ASSETS_DIR}/robots.txt" << 'EOF'
|
||||
User-agent: *
|
||||
Disallow: /
|
||||
EOF
|
||||
|
||||
- name: Deploy to Cloudflare Workers
|
||||
uses: cloudflare/wrangler-action@v3
|
||||
with:
|
||||
workingDirectory: plugins
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
command: deploy --config wrangler-penpot-plugins-api-doc.toml --name ${{ env.WORKER_NAME }}
|
||||
|
||||
- name: Notify Mattermost
|
||||
if: failure()
|
||||
uses: mattermost/action-mattermost-notify@master
|
||||
with:
|
||||
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
|
||||
MATTERMOST_CHANNEL: bot-alerts-cicd
|
||||
TEXT: |
|
||||
❌ 🧩📚 *[PENPOT PLUGINS] Error deploying API documentation.*
|
||||
📄 Triggered from ref: `${{ inputs.gh_ref }}`
|
||||
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
@infra
|
||||
127
.github/workflows/plugins-deploy-package.yml
vendored
127
.github/workflows/plugins-deploy-package.yml
vendored
@ -1,127 +0,0 @@
|
||||
name: Plugins/package deployer
|
||||
|
||||
on:
|
||||
# Deploy package from manual action
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
gh_ref:
|
||||
description: 'Name of the branch'
|
||||
type: choice
|
||||
required: true
|
||||
default: 'develop'
|
||||
options:
|
||||
- develop
|
||||
- staging
|
||||
- main
|
||||
plugin_name:
|
||||
description: 'Pluging name (like plugins/apps/<plugin_name>-plugin)'
|
||||
type: string
|
||||
required: true
|
||||
workflow_call:
|
||||
inputs:
|
||||
gh_ref:
|
||||
description: 'Name of the branch'
|
||||
type: string
|
||||
required: true
|
||||
default: 'develop'
|
||||
plugin_name:
|
||||
description: 'Publig name (from plugins/apps/<plugin_name>-plugin)'
|
||||
type: string
|
||||
required: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: penpot-runner-01
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ inputs.gh_ref }}
|
||||
|
||||
# START: Setup Node and PNPM enabling cache
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: .nvmrc
|
||||
|
||||
- name: Enable PNPM
|
||||
working-directory: ./plugins
|
||||
shell: bash
|
||||
run: |
|
||||
corepack enable;
|
||||
corepack install;
|
||||
|
||||
- name: Get pnpm store path
|
||||
id: pnpm-store
|
||||
working-directory: ./plugins
|
||||
shell: bash
|
||||
run: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Cache pnpm store
|
||||
uses: actions/cache@v5
|
||||
with:
|
||||
path: ${{ steps.pnpm-store.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-${{ hashFiles('plugins/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-
|
||||
# END: Setup Node and PNPM enabling cache
|
||||
|
||||
- name: Install deps
|
||||
working-directory: ./plugins
|
||||
shell: bash
|
||||
run: |
|
||||
pnpm install --no-frozen-lockfile;
|
||||
pnpm add -D -w wrangler@latest;
|
||||
|
||||
- name: "Build package for ${{ inputs.plugin_name }}-plugin"
|
||||
working-directory: plugins
|
||||
shell: bash
|
||||
run: pnpm --filter ${{ inputs.plugin_name }}-plugin build
|
||||
|
||||
- name: Select Worker name
|
||||
run: |
|
||||
REF="${{ inputs.gh_ref }}"
|
||||
case "$REF" in
|
||||
main)
|
||||
echo "WORKER_NAME=${{ inputs.plugin_name }}-plugin-pro" >> $GITHUB_ENV
|
||||
echo "WORKER_URI=${{ inputs.plugin_name }}.plugins.penpot.app" >> $GITHUB_ENV ;;
|
||||
staging)
|
||||
echo "WORKER_NAME=${{ inputs.plugin_name }}-plugin-pre" >> $GITHUB_ENV
|
||||
echo "WORKER_URI=${{ inputs.plugin_name }}.plugins.penpot.dev" >> $GITHUB_ENV ;;
|
||||
develop)
|
||||
echo "WORKER_NAME=${{ inputs.plugin_name }}-plugin-hourly" >> $GITHUB_ENV
|
||||
echo "WORKER_URI=${{ inputs.plugin_name }}.plugins.hourly.penpot.dev" >> $GITHUB_ENV ;;
|
||||
*) echo "Unsupported branch ${REF}" && exit 1 ;;
|
||||
esac
|
||||
|
||||
- name: Set the custom url
|
||||
working-directory: plugins
|
||||
shell: bash
|
||||
run: |
|
||||
sed -i "s/WORKER_URI/${{ env.WORKER_URI }}/g" apps/${{ inputs.plugin_name }}-plugin/wrangler.toml
|
||||
|
||||
- name: Deploy to Cloudflare Workers
|
||||
uses: cloudflare/wrangler-action@v3
|
||||
with:
|
||||
workingDirectory: plugins
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
command: deploy --config apps/${{ inputs.plugin_name }}-plugin/wrangler.toml --name ${{ env.WORKER_NAME }}
|
||||
|
||||
- name: Notify Mattermost
|
||||
if: failure()
|
||||
uses: mattermost/action-mattermost-notify@master
|
||||
with:
|
||||
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
|
||||
MATTERMOST_CHANNEL: bot-alerts-cicd
|
||||
TEXT: |
|
||||
❌ 🧩📦 *[PENPOT PLUGINS] Error deploying ${{ env.WORKER_NAME }}.*
|
||||
📄 Triggered from ref: `${{ inputs.gh_ref }}`
|
||||
Plugin name: `${{ inputs.plugin_name }}-plugin`
|
||||
Cloudflare worker name: `${{ env.WORKER_NAME }}`
|
||||
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
@infra
|
||||
143
.github/workflows/plugins-deploy-packages.yml
vendored
143
.github/workflows/plugins-deploy-packages.yml
vendored
@ -1,143 +0,0 @@
|
||||
name: Plugins/packages deployer
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- staging
|
||||
- main
|
||||
paths:
|
||||
- 'plugins/apps/*-plugin/**'
|
||||
- 'libs/plugins-styles/**'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
gh_ref:
|
||||
description: 'Name of the branch'
|
||||
type: choice
|
||||
required: true
|
||||
default: 'develop'
|
||||
options:
|
||||
- develop
|
||||
- staging
|
||||
- main
|
||||
|
||||
jobs:
|
||||
detect-changes:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
colors_to_tokens: ${{ steps.filter.outputs.colors_to_tokens }}
|
||||
create_palette: ${{ steps.filter.outputs.create_palette }}
|
||||
lorem_ipsum: ${{ steps.filter.outputs.lorem_ipsum }}
|
||||
rename_layers: ${{ steps.filter.outputs.rename_layers }}
|
||||
contrast: ${{ steps.filter.outputs.contrast }}
|
||||
icons: ${{ steps.filter.outputs.icons }}
|
||||
poc_state: ${{ steps.filter.outputs.poc_state }}
|
||||
table: ${{ steps.filter.outputs.table }}
|
||||
# [For new plugins]
|
||||
# Add more outputs here
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- id: filter
|
||||
uses: dorny/paths-filter@v4
|
||||
with:
|
||||
filters: |
|
||||
colors_to_tokens:
|
||||
- 'plugins/apps/colors-to-tokens-plugin/**'
|
||||
- 'libs/plugins-styles/**'
|
||||
contrast:
|
||||
- 'plugins/apps/contrast-plugin/**'
|
||||
- 'libs/plugins-styles/**'
|
||||
create_palette:
|
||||
- 'plugins/apps/create-palette-plugin/**'
|
||||
- 'libs/plugins-styles/**'
|
||||
icons:
|
||||
- 'plugins/apps/icons-plugin/**'
|
||||
- 'libs/plugins-styles/**'
|
||||
lorem_ipsum:
|
||||
- 'plugins/apps/lorem-ipsum-plugin/**'
|
||||
- 'libs/plugins-styles/**'
|
||||
rename_layers:
|
||||
- 'plugins/apps/rename-layers-plugin/**'
|
||||
- 'libs/plugins-styles/**'
|
||||
table:
|
||||
- 'plugins/apps/table-plugin/**'
|
||||
- 'libs/plugins-styles/**'
|
||||
# [For new plugins]
|
||||
# Add more plugin filters here
|
||||
# another_plugin:
|
||||
# - 'plugins/apps/another-plugin/**'
|
||||
# - 'libs/plugins-styles/**'
|
||||
|
||||
colors-to-tokens-plugin:
|
||||
needs: detect-changes
|
||||
if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.colors_to_tokens == 'true'
|
||||
uses: ./.github/workflows/plugins-deploy-package.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
|
||||
plugin_name: colors-to-tokens
|
||||
|
||||
contrast-plugin:
|
||||
needs: detect-changes
|
||||
if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.contrast == 'true'
|
||||
uses: ./.github/workflows/plugins-deploy-package.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
|
||||
plugin_name: contrast
|
||||
|
||||
create-palette-plugin:
|
||||
needs: detect-changes
|
||||
if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.create_palette == 'true'
|
||||
uses: ./.github/workflows/plugins-deploy-package.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
|
||||
plugin_name: create-palette
|
||||
|
||||
icons-plugin:
|
||||
needs: detect-changes
|
||||
if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.icons == 'true'
|
||||
uses: ./.github/workflows/plugins-deploy-package.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
|
||||
plugin_name: icons
|
||||
|
||||
lorem-ipsum-plugin:
|
||||
needs: detect-changes
|
||||
if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.lorem_ipsum == 'true'
|
||||
uses: ./.github/workflows/plugins-deploy-package.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
|
||||
plugin_name: lorem-ipsum
|
||||
|
||||
rename-layers-plugin:
|
||||
needs: detect-changes
|
||||
if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.rename_layers == 'true'
|
||||
uses: ./.github/workflows/plugins-deploy-package.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
|
||||
plugin_name: rename-layers
|
||||
|
||||
table-plugin:
|
||||
needs: detect-changes
|
||||
if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.table == 'true'
|
||||
uses: ./.github/workflows/plugins-deploy-package.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
|
||||
plugin_name: table
|
||||
|
||||
# [For new plugins]
|
||||
# Add more jobs for other plugins below, following the same pattern
|
||||
# another-plugin:
|
||||
# needs: detect-changes
|
||||
# if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.another_plugin == 'true'
|
||||
# uses: ./.github/workflows/plugins-deploy-package.yml
|
||||
# secrets: inherit
|
||||
# with:
|
||||
# gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
|
||||
# plugin_name: another
|
||||
140
.github/workflows/plugins-deploy-styles-doc.yml
vendored
140
.github/workflows/plugins-deploy-styles-doc.yml
vendored
@ -1,140 +0,0 @@
|
||||
name: Plugins/styles-doc deployer
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- staging
|
||||
- main
|
||||
paths:
|
||||
- 'plugins/apps/example-styles/**'
|
||||
- 'plugins/libs/plugins-styles/**'
|
||||
- 'plugins/wrangler-penpot-plugins-styles-doc.toml'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
gh_ref:
|
||||
description: 'Name of the branch'
|
||||
type: choice
|
||||
required: true
|
||||
default: 'develop'
|
||||
options:
|
||||
- develop
|
||||
- staging
|
||||
- main
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Extract some useful variables
|
||||
id: vars
|
||||
run: |
|
||||
echo "gh_ref=${{ inputs.gh_ref || github.ref_name }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ steps.vars.outputs.gh_ref }}
|
||||
|
||||
# START: Setup Node and PNPM enabling cache
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: .nvmrc
|
||||
|
||||
- name: Enable PNPM
|
||||
working-directory: ./plugins
|
||||
shell: bash
|
||||
run: |
|
||||
corepack enable;
|
||||
corepack install;
|
||||
|
||||
- name: Get pnpm store path
|
||||
id: pnpm-store
|
||||
working-directory: ./plugins
|
||||
shell: bash
|
||||
run: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Cache pnpm store
|
||||
uses: actions/cache@v5
|
||||
with:
|
||||
path: ${{ steps.pnpm-store.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-${{ hashFiles('plugins/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-
|
||||
# END: Setup Node and PNPM enabling cache
|
||||
|
||||
- name: Install deps
|
||||
working-directory: ./plugins
|
||||
shell: bash
|
||||
run: |
|
||||
pnpm install --no-frozen-lockfile;
|
||||
pnpm add -D -w wrangler@latest;
|
||||
|
||||
- name: Build styles
|
||||
working-directory: plugins
|
||||
shell: bash
|
||||
run: pnpm run build:styles-example
|
||||
|
||||
- name: Select Worker name
|
||||
run: |
|
||||
REF="${{ steps.vars.outputs.gh_ref }}"
|
||||
case "$REF" in
|
||||
main)
|
||||
echo "WORKER_NAME=penpot-plugins-styles-doc-pro" >> $GITHUB_ENV
|
||||
echo "WORKER_URI=styles-doc.plugins.penpot.app" >> $GITHUB_ENV ;;
|
||||
staging)
|
||||
echo "WORKER_NAME=penpot-plugins-styles-doc-pre" >> $GITHUB_ENV
|
||||
echo "WORKER_URI=styles-doc.plugins.penpot.dev" >> $GITHUB_ENV ;;
|
||||
develop)
|
||||
echo "WORKER_NAME=penpot-plugins-styles-doc-hourly" >> $GITHUB_ENV
|
||||
echo "WORKER_URI=styles-doc.plugins.hourly.penpot.dev" >> $GITHUB_ENV ;;
|
||||
*) echo "Unsupported branch ${REF}" && exit 1 ;;
|
||||
esac
|
||||
|
||||
- name: Set the custom url
|
||||
working-directory: plugins
|
||||
shell: bash
|
||||
run: |
|
||||
sed -i "s/WORKER_URI/${{ env.WORKER_URI }}/g" wrangler-penpot-plugins-styles-doc.toml
|
||||
|
||||
- name: Add noindex header and robots.txt files for non-production environments
|
||||
if: ${{ steps.vars.outputs.gh_ref != 'main' }}
|
||||
working-directory: plugins
|
||||
shell: bash
|
||||
run: |
|
||||
ASSETS_DIR="dist/apps/example-styles"
|
||||
|
||||
cat > "${ASSETS_DIR}/_headers" << 'EOF'
|
||||
/*
|
||||
X-Robots-Tag: noindex, nofollow
|
||||
EOF
|
||||
|
||||
cat > "${ASSETS_DIR}/robots.txt" << 'EOF'
|
||||
User-agent: *
|
||||
Disallow: /
|
||||
EOF
|
||||
|
||||
- name: Deploy to Cloudflare Workers
|
||||
uses: cloudflare/wrangler-action@v3
|
||||
with:
|
||||
workingDirectory: plugins
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
command: deploy --config wrangler-penpot-plugins-styles-doc.toml --name ${{ env.WORKER_NAME }}
|
||||
|
||||
- name: Notify Mattermost
|
||||
if: failure()
|
||||
uses: mattermost/action-mattermost-notify@master
|
||||
with:
|
||||
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
|
||||
MATTERMOST_CHANNEL: bot-alerts-cicd
|
||||
TEXT: |
|
||||
❌ 🧩💅 *[PENPOT PLUGINS] Error deploying Styles documentation.*
|
||||
📄 Triggered from ref: `${{ inputs.gh_ref }}`
|
||||
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
@infra
|
||||
115
.github/workflows/release.yml
vendored
115
.github/workflows/release.yml
vendored
@ -1,115 +0,0 @@
|
||||
name: Release Publisher
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
gh_ref:
|
||||
description: 'Tag to release'
|
||||
type: string
|
||||
required: true
|
||||
workflow_call:
|
||||
inputs:
|
||||
gh_ref:
|
||||
description: 'Tag to release'
|
||||
type: string
|
||||
required: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
release:
|
||||
environment: release-admins
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
version: ${{ steps.vars.outputs.gh_ref }}
|
||||
release_notes: ${{ steps.extract_release_notes.outputs.release_notes }}
|
||||
steps:
|
||||
- name: Extract some useful variables
|
||||
id: vars
|
||||
run: |
|
||||
echo "gh_ref=${{ inputs.gh_ref || github.ref_name }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ steps.vars.outputs.gh_ref }}
|
||||
|
||||
# --- Publicly release the docker images ---
|
||||
- name: Configure ECR credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.DOCKER_USERNAME }}
|
||||
aws-secret-access-key: ${{ secrets.DOCKER_PASSWORD }}
|
||||
aws-region: ${{ secrets.AWS_REGION }}
|
||||
|
||||
- name: Install Skopeo
|
||||
run: |
|
||||
sudo apt-get update -y
|
||||
sudo apt-get install -y skopeo
|
||||
|
||||
- name: Copy images from AWS ECR to Docker Hub
|
||||
env:
|
||||
AWS_REGION: ${{ secrets.AWS_REGION }}
|
||||
DOCKER_REGISTRY: ${{ secrets.DOCKER_REGISTRY }}
|
||||
PUB_DOCKER_USERNAME: ${{ secrets.PUB_DOCKER_USERNAME }}
|
||||
PUB_DOCKER_PASSWORD: ${{ secrets.PUB_DOCKER_PASSWORD }}
|
||||
TAG: ${{ steps.vars.outputs.gh_ref }}
|
||||
run: |
|
||||
aws ecr get-login-password --region $AWS_REGION | \
|
||||
skopeo login --username AWS --password-stdin \
|
||||
$DOCKER_REGISTRY
|
||||
|
||||
echo "$PUB_DOCKER_PASSWORD" | skopeo login --username "$PUB_DOCKER_USERNAME" --password-stdin docker.io
|
||||
|
||||
IMAGES=("frontend" "backend" "exporter" "mcp" "storybook")
|
||||
SHORT_TAG=${TAG%.*}
|
||||
|
||||
for image in "${IMAGES[@]}"; do
|
||||
skopeo copy --all \
|
||||
docker://$DOCKER_REGISTRY/$image:$TAG \
|
||||
docker://docker.io/penpotapp/$image:$TAG
|
||||
|
||||
for alias in main latest "$SHORT_TAG"; do
|
||||
skopeo copy --all \
|
||||
docker://$DOCKER_REGISTRY/$image:$TAG \
|
||||
docker://docker.io/penpotapp/$image:$alias
|
||||
done
|
||||
done
|
||||
|
||||
# --- Release notes extraction ---
|
||||
- name: Extract release notes from CHANGES.md
|
||||
id: extract_release_notes
|
||||
env:
|
||||
TAG: ${{ steps.vars.outputs.gh_ref }}
|
||||
run: |
|
||||
RELEASE_NOTES=$(awk "/^## $TAG$/{flag=1; next} /^## /{flag=0} flag" CHANGES.md | awk '{$1=$1};1')
|
||||
if [ -z "$RELEASE_NOTES" ]; then
|
||||
RELEASE_NOTES="No changes for $TAG according to CHANGES.md"
|
||||
fi
|
||||
echo "release_notes<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$RELEASE_NOTES" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
# --- Create GitHub release ---
|
||||
- name: Create GitHub release
|
||||
uses: softprops/action-gh-release@v2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
tag_name: ${{ steps.vars.outputs.gh_ref }}
|
||||
name: ${{ steps.vars.outputs.gh_ref }}
|
||||
body: ${{ steps.extract_release_notes.outputs.release_notes }}
|
||||
|
||||
- name: Notify Mattermost
|
||||
if: failure()
|
||||
uses: mattermost/action-mattermost-notify@master
|
||||
with:
|
||||
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
|
||||
MATTERMOST_CHANNEL: bot-alerts-cicd
|
||||
TEXT: |
|
||||
❌ 🚀 *[PENPOT] Error releasing penpot.*
|
||||
📄 Triggered from ref: `${{ steps.vars.outputs.gh_ref }}`
|
||||
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
@infra
|
||||
47
.github/workflows/tests-mcp.yml
vendored
47
.github/workflows/tests-mcp.yml
vendored
@ -1,47 +0,0 @@
|
||||
name: "MCP CI"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- staging
|
||||
- main
|
||||
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- ready_for_review
|
||||
|
||||
paths:
|
||||
- 'mcp/**'
|
||||
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- staging
|
||||
- main
|
||||
|
||||
paths:
|
||||
- 'mcp/**'
|
||||
|
||||
jobs:
|
||||
test-mcp:
|
||||
if: ${{ !github.event.pull_request.draft }}
|
||||
name: "Test MCP"
|
||||
runs-on: penpot-runner-02
|
||||
container: penpotapp/devenv:latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup
|
||||
working-directory: ./mcp
|
||||
run: ./scripts/setup
|
||||
|
||||
- name: Check
|
||||
working-directory: ./mcp
|
||||
run: |
|
||||
pnpm run fmt:check;
|
||||
pnpm -r run build;
|
||||
pnpm -r run types:check;
|
||||
410
.github/workflows/tests.yml
vendored
410
.github/workflows/tests.yml
vendored
@ -1,410 +0,0 @@
|
||||
name: "CI"
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- ready_for_review
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- staging
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
if: ${{ !github.event.pull_request.draft }}
|
||||
name: "Linter"
|
||||
runs-on: penpot-runner-02
|
||||
container:
|
||||
image: penpotapp/devenv:latest
|
||||
volumes:
|
||||
- /tmp/.m2:/root/.m2
|
||||
- /tmp/.gitlibs:/root/.gitlibs
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Lint Common
|
||||
working-directory: ./common
|
||||
run: |
|
||||
corepack enable;
|
||||
corepack install;
|
||||
pnpm install;
|
||||
pnpm run check-fmt:clj
|
||||
pnpm run check-fmt:js
|
||||
pnpm run lint:clj
|
||||
|
||||
- name: Lint Frontend
|
||||
working-directory: ./frontend
|
||||
run: |
|
||||
corepack enable;
|
||||
corepack install;
|
||||
pnpm install;
|
||||
pnpm run check-fmt:js
|
||||
pnpm run check-fmt:clj
|
||||
pnpm run check-fmt:scss
|
||||
pnpm run lint:clj
|
||||
pnpm run lint:js
|
||||
pnpm run lint:scss
|
||||
|
||||
- name: Lint Backend
|
||||
working-directory: ./backend
|
||||
run: |
|
||||
corepack enable;
|
||||
corepack install;
|
||||
pnpm install;
|
||||
pnpm run check-fmt
|
||||
pnpm run lint
|
||||
|
||||
- name: Lint Exporter
|
||||
working-directory: ./exporter
|
||||
run: |
|
||||
corepack enable;
|
||||
corepack install;
|
||||
pnpm install;
|
||||
pnpm run check-fmt
|
||||
pnpm run lint
|
||||
|
||||
- name: Lint Library
|
||||
working-directory: ./library
|
||||
run: |
|
||||
corepack enable;
|
||||
corepack install;
|
||||
pnpm install;
|
||||
pnpm run check-fmt
|
||||
pnpm run lint
|
||||
|
||||
test-common:
|
||||
if: ${{ !github.event.pull_request.draft }}
|
||||
name: "Common Tests"
|
||||
runs-on: penpot-runner-02
|
||||
container:
|
||||
image: penpotapp/devenv:latest
|
||||
volumes:
|
||||
- /tmp/.m2:/root/.m2
|
||||
- /tmp/.gitlibs:/root/.gitlibs
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Run tests
|
||||
working-directory: ./common
|
||||
run: |
|
||||
./scripts/test
|
||||
|
||||
test-plugins:
|
||||
if: ${{ !github.event.pull_request.draft }}
|
||||
name: Plugins Runtime Linter & Tests
|
||||
runs-on: penpot-runner-02
|
||||
container:
|
||||
image: penpotapp/devenv:latest
|
||||
volumes:
|
||||
- /tmp/.m2:/root/.m2
|
||||
- /tmp/.gitlibs:/root/.gitlibs
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Node
|
||||
id: setup-node
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: .nvmrc
|
||||
|
||||
- name: Install deps
|
||||
working-directory: ./plugins
|
||||
shell: bash
|
||||
run: |
|
||||
corepack enable;
|
||||
corepack install;
|
||||
pnpm install;
|
||||
|
||||
- name: Run Lint
|
||||
working-directory: ./plugins
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Run Format Check
|
||||
working-directory: ./plugins
|
||||
run: pnpm run format:check
|
||||
|
||||
- name: Run Test
|
||||
working-directory: ./plugins
|
||||
run: pnpm run test
|
||||
|
||||
- name: Build runtime
|
||||
working-directory: ./plugins
|
||||
run: pnpm run build:runtime
|
||||
|
||||
- name: Build doc
|
||||
working-directory: ./plugins
|
||||
run: pnpm run build:doc
|
||||
|
||||
- name: Build plugins
|
||||
working-directory: ./plugins
|
||||
run: pnpm run build:plugins
|
||||
|
||||
- name: Build styles
|
||||
working-directory: ./plugins
|
||||
run: pnpm run build:styles-example
|
||||
|
||||
test-frontend:
|
||||
if: ${{ !github.event.pull_request.draft }}
|
||||
name: "Frontend Tests"
|
||||
runs-on: penpot-runner-02
|
||||
container:
|
||||
image: penpotapp/devenv:latest
|
||||
volumes:
|
||||
- /tmp/.m2:/root/.m2
|
||||
- /tmp/.gitlibs:/root/.gitlibs
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Unit Tests
|
||||
working-directory: ./frontend
|
||||
run: |
|
||||
./scripts/test
|
||||
|
||||
- name: Component Tests
|
||||
working-directory: ./frontend
|
||||
env:
|
||||
VITEST_BROWSER_TIMEOUT: 120000
|
||||
run: |
|
||||
./scripts/test-components
|
||||
|
||||
test-render-wasm:
|
||||
if: ${{ !github.event.pull_request.draft }}
|
||||
name: "Render WASM Tests"
|
||||
runs-on: penpot-runner-02
|
||||
container:
|
||||
image: penpotapp/devenv:latest
|
||||
volumes:
|
||||
- /tmp/.m2:/root/.m2
|
||||
- /tmp/.gitlibs:/root/.gitlibs
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Format
|
||||
working-directory: ./render-wasm
|
||||
run: |
|
||||
cargo fmt --check
|
||||
|
||||
- name: Lint
|
||||
working-directory: ./render-wasm
|
||||
run: |
|
||||
./lint
|
||||
|
||||
- name: Test
|
||||
working-directory: ./render-wasm
|
||||
run: |
|
||||
./test
|
||||
|
||||
test-backend:
|
||||
if: ${{ !github.event.pull_request.draft }}
|
||||
name: "Backend Tests"
|
||||
runs-on: penpot-runner-02
|
||||
container:
|
||||
image: penpotapp/devenv:latest
|
||||
volumes:
|
||||
- /tmp/.m2:/root/.m2
|
||||
- /tmp/.gitlibs:/root/.gitlibs
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:17
|
||||
# Provide the password for postgres
|
||||
env:
|
||||
POSTGRES_USER: penpot_test
|
||||
POSTGRES_PASSWORD: penpot_test
|
||||
POSTGRES_DB: penpot_test
|
||||
|
||||
# Set health checks to wait until postgres has started
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
redis:
|
||||
image: valkey/valkey:9
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Run tests
|
||||
working-directory: ./backend
|
||||
env:
|
||||
PENPOT_TEST_DATABASE_URI: "postgresql://postgres/penpot_test"
|
||||
PENPOT_TEST_DATABASE_USERNAME: penpot_test
|
||||
PENPOT_TEST_DATABASE_PASSWORD: penpot_test
|
||||
PENPOT_TEST_REDIS_URI: "redis://redis/1"
|
||||
|
||||
run: |
|
||||
clojure -M:dev:test --reporter kaocha.report/documentation
|
||||
|
||||
test-library:
|
||||
if: ${{ !github.event.pull_request.draft }}
|
||||
name: "Library Tests"
|
||||
runs-on: penpot-runner-02
|
||||
container:
|
||||
image: penpotapp/devenv:latest
|
||||
volumes:
|
||||
- /tmp/.m2:/root/.m2
|
||||
- /tmp/.gitlibs:/root/.gitlibs
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Run tests
|
||||
working-directory: ./library
|
||||
run: |
|
||||
./scripts/test
|
||||
|
||||
build-integration:
|
||||
if: ${{ !github.event.pull_request.draft }}
|
||||
name: "Build Integration Bundle"
|
||||
runs-on: penpot-runner-02
|
||||
container:
|
||||
image: penpotapp/devenv:latest
|
||||
volumes:
|
||||
- /tmp/.m2:/root/.m2
|
||||
- /tmp/.gitlibs:/root/.gitlibs
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Build Bundle
|
||||
working-directory: ./frontend
|
||||
run: |
|
||||
./scripts/build
|
||||
|
||||
- name: Store Bundle Cache
|
||||
uses: actions/cache@v5
|
||||
with:
|
||||
key: "integration-bundle-${{ github.sha }}"
|
||||
path: frontend/resources/public
|
||||
|
||||
test-integration-1:
|
||||
if: ${{ !github.event.pull_request.draft }}
|
||||
name: "Integration Tests 1/3"
|
||||
runs-on: penpot-runner-02
|
||||
container:
|
||||
image: penpotapp/devenv:latest
|
||||
volumes:
|
||||
- /tmp/.m2:/root/.m2
|
||||
- /tmp/.gitlibs:/root/.gitlibs
|
||||
|
||||
needs: build-integration
|
||||
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Restore Cache
|
||||
uses: actions/cache/restore@v5
|
||||
with:
|
||||
key: "integration-bundle-${{ github.sha }}"
|
||||
path: frontend/resources/public
|
||||
|
||||
- name: Run Tests
|
||||
working-directory: ./frontend
|
||||
run: |
|
||||
./scripts/test-e2e --shard="1/3";
|
||||
|
||||
- name: Upload test result
|
||||
uses: actions/upload-artifact@v7
|
||||
if: always()
|
||||
with:
|
||||
name: integration-tests-result-1
|
||||
path: frontend/test-results/
|
||||
overwrite: true
|
||||
retention-days: 3
|
||||
|
||||
test-integration-2:
|
||||
if: ${{ !github.event.pull_request.draft }}
|
||||
name: "Integration Tests 2/3"
|
||||
runs-on: penpot-runner-02
|
||||
container:
|
||||
image: penpotapp/devenv:latest
|
||||
volumes:
|
||||
- /tmp/.m2:/root/.m2
|
||||
- /tmp/.gitlibs:/root/.gitlibs
|
||||
|
||||
needs: build-integration
|
||||
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Restore Cache
|
||||
uses: actions/cache/restore@v5
|
||||
with:
|
||||
key: "integration-bundle-${{ github.sha }}"
|
||||
path: frontend/resources/public
|
||||
|
||||
- name: Run Tests
|
||||
working-directory: ./frontend
|
||||
run: |
|
||||
./scripts/test-e2e --shard="2/3";
|
||||
|
||||
- name: Upload test result
|
||||
uses: actions/upload-artifact@v7
|
||||
if: always()
|
||||
with:
|
||||
name: integration-tests-result-2
|
||||
path: frontend/test-results/
|
||||
overwrite: true
|
||||
retention-days: 3
|
||||
|
||||
test-integration-3:
|
||||
if: ${{ !github.event.pull_request.draft }}
|
||||
name: "Integration Tests 3/3"
|
||||
runs-on: penpot-runner-02
|
||||
container:
|
||||
image: penpotapp/devenv:latest
|
||||
volumes:
|
||||
- /tmp/.m2:/root/.m2
|
||||
- /tmp/.gitlibs:/root/.gitlibs
|
||||
|
||||
needs: build-integration
|
||||
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Restore Cache
|
||||
uses: actions/cache/restore@v5
|
||||
with:
|
||||
key: "integration-bundle-${{ github.sha }}"
|
||||
path: frontend/resources/public
|
||||
|
||||
- name: Run Tests
|
||||
working-directory: ./frontend
|
||||
run: |
|
||||
./scripts/test-e2e --shard="3/3";
|
||||
|
||||
- name: Upload test result
|
||||
uses: actions/upload-artifact@v7
|
||||
if: always()
|
||||
with:
|
||||
name: integration-tests-result-3
|
||||
path: frontend/test-results/
|
||||
overwrite: true
|
||||
retention-days: 3
|
||||
48
.gitignore
vendored
48
.gitignore
vendored
@ -1,4 +1,10 @@
|
||||
.pnp.*
|
||||
.yarn/*
|
||||
!.yarn/patches
|
||||
!.yarn/plugins
|
||||
!.yarn/releases
|
||||
!.yarn/sdks
|
||||
!.yarn/versions
|
||||
*-init.clj
|
||||
*.css.json
|
||||
*.jar
|
||||
@ -13,14 +19,9 @@
|
||||
.nyc_output
|
||||
.rebel_readline_history
|
||||
.repl
|
||||
.shadow-cljs
|
||||
/*.jpg
|
||||
/*.md
|
||||
!CHANGES.md
|
||||
!CONTRIBUTING.md
|
||||
!README.md
|
||||
!AGENTS.md
|
||||
!CODE_OF_CONDUCT.md
|
||||
!SECURITY.md
|
||||
/*.png
|
||||
/*.svg
|
||||
/*.sql
|
||||
@ -30,13 +31,7 @@
|
||||
/.clj-kondo/.cache
|
||||
/_dump
|
||||
/notes
|
||||
/.opencode/package-lock.json
|
||||
/plans
|
||||
/prompts
|
||||
/playground/
|
||||
/backend/*.md
|
||||
!/backend/AGENTS.md
|
||||
/backend/.shadow-cljs
|
||||
/backend/*.sql
|
||||
/backend/*.txt
|
||||
/backend/assets/
|
||||
@ -46,52 +41,41 @@
|
||||
/backend/resources/public/assets
|
||||
/backend/resources/public/media
|
||||
/backend/target/
|
||||
/backend/experiments
|
||||
/backend/scripts/_env.local
|
||||
/bundle*
|
||||
/cd.md
|
||||
/clj-profiler/
|
||||
/common/coverage
|
||||
/common/target
|
||||
/common/.shadow-cljs
|
||||
/deploy
|
||||
/docker/images/bundle*
|
||||
/exporter/target
|
||||
/exporter/.shadow-cljs
|
||||
/frontend/.storybook/preview-body.html
|
||||
/frontend/.storybook/preview-head.html
|
||||
/frontend/playwright-report/
|
||||
/frontend/playwright/ui/visual-specs/
|
||||
/frontend/text-editor/src/wasm/
|
||||
/frontend/cypress/fixtures/validuser.json
|
||||
/frontend/cypress/videos/*/
|
||||
/frontend/cypress/videos/*/
|
||||
/frontend/dist/
|
||||
/frontend/npm-debug.log
|
||||
/frontend/out/
|
||||
/frontend/package-lock.json
|
||||
/frontend/resources/fonts/experiments
|
||||
/frontend/resources/public/*
|
||||
/frontend/src/app/render_wasm/api/shared.js
|
||||
/frontend/storybook-static/
|
||||
/frontend/target/
|
||||
/frontend/test-results/
|
||||
/frontend/.shadow-cljs
|
||||
/other/
|
||||
/scripts/
|
||||
/nexus/
|
||||
/telemetry/
|
||||
/tmp/
|
||||
/vendor/**/target
|
||||
/vendor/svgclean/bundle*.js
|
||||
/web
|
||||
/library/target/
|
||||
/library/*.zip
|
||||
/external
|
||||
/penpot-nitrate
|
||||
|
||||
clj-profiler/
|
||||
node_modules
|
||||
/test-results/
|
||||
/playwright-report/
|
||||
/blob-report/
|
||||
/playwright/.cache/
|
||||
/render-wasm/target/
|
||||
/**/node_modules
|
||||
/**/.yarn/*
|
||||
/.pnpm-store
|
||||
/.vscode
|
||||
/.idea
|
||||
/.claude
|
||||
/.playwright-mcp
|
||||
|
||||
105
.gitpod.yml
Normal file
105
.gitpod.yml
Normal file
@ -0,0 +1,105 @@
|
||||
image:
|
||||
file: docker/gitpod/Dockerfile
|
||||
|
||||
ports:
|
||||
# nginx
|
||||
- port: 3449
|
||||
onOpen: open-preview
|
||||
|
||||
# frontend nREPL
|
||||
- port: 3447
|
||||
onOpen: ignore
|
||||
visibility: private
|
||||
|
||||
# frontend shadow server
|
||||
- port: 3448
|
||||
onOpen: ignore
|
||||
visibility: private
|
||||
|
||||
# backend
|
||||
- port: 6060
|
||||
onOpen: ignore
|
||||
|
||||
# exporter shadow server
|
||||
- port: 9630
|
||||
onOpen: ignore
|
||||
visibility: private
|
||||
|
||||
# exporter http server
|
||||
- port: 6061
|
||||
onOpen: ignore
|
||||
|
||||
# mailhog web interface
|
||||
- port: 8025
|
||||
onOpen: ignore
|
||||
|
||||
# mailhog postfix
|
||||
- port: 1025
|
||||
onOpen: ignore
|
||||
|
||||
# postgres
|
||||
- port: 5432
|
||||
onOpen: ignore
|
||||
|
||||
# redis
|
||||
- port: 6379
|
||||
onOpen: ignore
|
||||
|
||||
# openldap
|
||||
- port: 389
|
||||
onOpen: ignore
|
||||
|
||||
tasks:
|
||||
# https://github.com/gitpod-io/gitpod/issues/666#issuecomment-534347856
|
||||
- name: gulp
|
||||
command: >
|
||||
cd $GITPOD_REPO_ROOT/frontend/;
|
||||
yarn && gp sync-done 'frontend-yarn';
|
||||
npx gulp --theme=${PENPOT_THEME} watch
|
||||
|
||||
- name: frontend shadow watch
|
||||
command: >
|
||||
cd $GITPOD_REPO_ROOT/frontend/;
|
||||
gp sync-await 'frontend-yarn';
|
||||
npx shadow-cljs watch main
|
||||
|
||||
- init: gp await-port 5432 && psql -f $GITPOD_REPO_ROOT/docker/gitpod/files/postgresql_init.sql
|
||||
name: backend
|
||||
command: >
|
||||
cd $GITPOD_REPO_ROOT/backend/;
|
||||
./scripts/start-dev
|
||||
|
||||
- name: exporter shadow watch
|
||||
command:
|
||||
cd $GITPOD_REPO_ROOT/exporter/;
|
||||
gp sync-await 'frontend-yarn';
|
||||
yarn && npx shadow-cljs watch main
|
||||
|
||||
- name: exporter web server
|
||||
command: >
|
||||
cd $GITPOD_REPO_ROOT/exporter/;
|
||||
./scripts/wait-and-start.sh
|
||||
|
||||
- name: signed terminal
|
||||
before: >
|
||||
[[ ! -z ${GNUGPG} ]] &&
|
||||
cd ~ &&
|
||||
rm -rf .gnupg &&
|
||||
echo ${GNUGPG} | base64 -d | tar --no-same-owner -xzvf -
|
||||
init: >
|
||||
[[ ! -z ${GNUGPG_KEY} ]] &&
|
||||
git config --global commit.gpgsign true &&
|
||||
git config --global user.signingkey ${GNUGPG_KEY}
|
||||
command: cd $GITPOD_REPO_ROOT
|
||||
|
||||
- name: redis
|
||||
command: redis-server
|
||||
|
||||
- before: go get github.com/mailhog/MailHog
|
||||
name: mailhog
|
||||
command: MailHog
|
||||
|
||||
- name: Nginx
|
||||
command: >
|
||||
nginx &&
|
||||
multitail /var/log/nginx/access.log -I /var/log/nginx/error.log
|
||||
@ -1,33 +0,0 @@
|
||||
---
|
||||
name: commiter
|
||||
description: Git commit assistant following CONTRIBUTING.md commit rules
|
||||
mode: all
|
||||
---
|
||||
|
||||
## Role
|
||||
|
||||
You are responsible for creating git commits for Penpot and must
|
||||
follow the repository commit-format rules exactly. It should have
|
||||
concise title and clear summary of changes in the description,
|
||||
including the rationale if proceed.
|
||||
|
||||
## Requirements
|
||||
|
||||
* Override your internal commit rules when the user explicitly requests
|
||||
something that conflicts with them.
|
||||
* Read `CONTRIBUTING.md` before creating any commit and follow the
|
||||
commit guidelines strictly.
|
||||
* Use commit messages in the form `:emoji: <imperative subject>`.
|
||||
* Keep the subject capitalized, concise, 70 characters or fewer, and
|
||||
without a trailing period.
|
||||
* Keep the description (commit body) with maximum line length of 80
|
||||
characters. Use manual line breaks to wrap text before it exceeds
|
||||
this limit.
|
||||
* Separate the subject from the body with a blank line.
|
||||
* Write a clear and concise body when needed.
|
||||
* Use `git commit -s` so the commit includes the required
|
||||
`Signed-off-by` line.
|
||||
* Do not guess or hallucinate git author information (Name or
|
||||
Email). Never include the `--author` flag in git commands unless
|
||||
specifically instructed by the user for a unique case; assume the
|
||||
local environment is already configured.
|
||||
@ -1,37 +0,0 @@
|
||||
---
|
||||
name: Penpot Engineer
|
||||
description: Senior Full-Stack Software Engineer
|
||||
mode: primary
|
||||
---
|
||||
|
||||
Role: You are a high-autonomy Senior Full-Stack Software Engineer working on
|
||||
Penpot, an open-source design tool. You have full permission to navigate the
|
||||
codebase, modify files, and execute commands to fulfill your tasks. Your goal is
|
||||
to solve complex technical tasks with high precision while maintaining a strong
|
||||
focus on maintainability and performance.
|
||||
|
||||
Tech stack: Clojure (backend), ClojureScript (frontend/exporter), Rust/WASM
|
||||
(render-wasm), TypeScript (plugins/mcp), SCSS.
|
||||
|
||||
Requirements:
|
||||
|
||||
* Read the root `AGENTS.md` to understand the repository and application
|
||||
architecture. Then read the `AGENTS.md` **only** for each affected module.
|
||||
Not all modules have one — verify before reading.
|
||||
* Before writing code, analyze the task in depth and describe your plan. If the
|
||||
task is complex, break it down into atomic steps.
|
||||
* When searching code, prefer `ripgrep` (`rg`) over `grep` — it respects
|
||||
`.gitignore` by default.
|
||||
* Do **not** touch unrelated modules unless the task explicitly requires it.
|
||||
* Only reference functions, namespaces, or APIs that actually exist in the
|
||||
codebase. Verify their existence before citing them. If unsure, search first.
|
||||
* Be concise and autonomous — avoid unnecessary explanations.
|
||||
* After making changes, run the applicable lint and format checks for the
|
||||
affected module before considering the work done (see module `AGENTS.md` for
|
||||
exact commands).
|
||||
* Make small and logical commits following the commit guideline described in
|
||||
`CONTRIBUTING.md`. Commit only when explicitly asked.
|
||||
- Do not guess or hallucinate git author information (Name or Email). Never include the
|
||||
`--author` flag in git commands unless specifically instructed by the user for a unique
|
||||
case; assume the local environment is already configured. Allow git commit to
|
||||
automatically pull the identity from the local git config `user.name` and `user.email`.
|
||||
@ -1,64 +0,0 @@
|
||||
---
|
||||
name: Penpot Planner
|
||||
description: Software architect for planning and analysis only
|
||||
mode: primary
|
||||
permission:
|
||||
edit: ask
|
||||
---
|
||||
|
||||
# Penpot Planner
|
||||
|
||||
## Role
|
||||
|
||||
You are a Senior Software Architect working on Penpot, an open-source design
|
||||
tool. Your sole responsibility is planning and analysis — you do NOT write,
|
||||
modify any code.
|
||||
|
||||
You help users understand the codebase, design solutions, and create detailed
|
||||
implementation plans that other agents or developers can execute. Document
|
||||
everything they need to know: which files to touch for each task, code, testing,
|
||||
docs they might need to check, how to test it. Give them the whole plan as
|
||||
bite-sized tasks. DRY. YAGNI. TDD. Frequent commits.
|
||||
|
||||
Do **not** suggest commit messages or commit names anywhere in your plans or
|
||||
responses — committing is the developer's responsibility.
|
||||
|
||||
Assume they are a skilled developer, but know almost nothing about our toolset
|
||||
or problem domain. Assume they don't know good test design very well.
|
||||
|
||||
## Requirements
|
||||
|
||||
* Analyze the codebase architecture and identify affected modules.
|
||||
* Read `AGENTS.md` files (root and per-module) to understand structure and
|
||||
conventions.
|
||||
* Search code using `ripgrep` skill (`rg`) to trace dependencies, find patterns,
|
||||
and understand existing implementations.
|
||||
* Break down complex features or bugs into atomic, actionable steps.
|
||||
* Propose solutions with clear rationale, trade-offs, and sequencing.
|
||||
* Identify risks, edge cases, and testing considerations.
|
||||
|
||||
Save plans to: plans/YYYY-MM-DD-<plan-one-line-title>.md
|
||||
|
||||
## Constraints
|
||||
|
||||
* You are **read-only** — never create, edit, or delete files.
|
||||
* You do **not** run builds, tests, linters, or any commands that modify state.
|
||||
* You do **not** create git commits or interact with version control.
|
||||
* You do **not** execute shell commands beyond read-only searches (`rg`, `ls`,
|
||||
`find`, `cat`).
|
||||
* Your output is a structured plan or analysis, ready for handoff to an
|
||||
engineer agent or developer.
|
||||
|
||||
## Output format
|
||||
|
||||
When producing a plan, structure it as:
|
||||
|
||||
1. **Context** — What is the problem or feature request?
|
||||
2. **Affected modules** — Which parts of the codebase are involved?
|
||||
3. **Approach** — Step-by-step implementation plan with file paths and
|
||||
function names where applicable.
|
||||
4. **Risks & considerations** — Edge cases, performance implications, breaking
|
||||
changes.
|
||||
5. **Testing strategy** — How to verify the implementation works correctly.
|
||||
|
||||
|
||||
@ -1,59 +0,0 @@
|
||||
---
|
||||
name: Prompt Assistant
|
||||
description: Refines and improves prompts for maximum clarity and effectiveness
|
||||
mode: all
|
||||
---
|
||||
|
||||
# Prompt Assistant
|
||||
|
||||
## Role
|
||||
|
||||
You are an expert Prompt Engineer with strong knowledge of
|
||||
penpot. Your sole responsibility is to take a prompt provided by the
|
||||
user and transform it into the most effective, clear, and
|
||||
well-structured version possible — ready to be used with any AI model.
|
||||
|
||||
## Requirements
|
||||
|
||||
* You do NOT execute tasks. You do NOT write code. You only design and
|
||||
refine prompts
|
||||
* Read the root `AGENTS.md` to understand the repository and application
|
||||
architecture. Then read the `AGENTS.md` **only** for each affected module.
|
||||
* Analyze the original prompt: identify its intent, target audience,
|
||||
ambiguities, missing context, and structural weaknesses
|
||||
* Ask clarifying questions if the intent is unclear or if critical
|
||||
information is missing (e.g. target model, expected output format,
|
||||
tone, constraints). Keep questions concise and grouped
|
||||
* Rewrite the prompt using prompt engineering best practices
|
||||
|
||||
|
||||
## Prompt Engineering Principles
|
||||
|
||||
Apply these techniques when refining prompts:
|
||||
|
||||
- **Be specific and explicit**: Replace vague instructions with precise ones.
|
||||
- **Set the context**: Include background information the model needs to
|
||||
perform well.
|
||||
- **Specify the output format**: State the desired structure, length, tone,
|
||||
or format (e.g. bullet list, JSON, step-by-step).
|
||||
- **Add constraints**: Include what the model should avoid or not do.
|
||||
- **Use examples** (few-shot): When applicable, suggest adding examples to
|
||||
anchor the model's behaviour.
|
||||
- **Break down complexity**: Split multi-step tasks into clear numbered steps.
|
||||
- **Avoid ambiguity**: Remove pronouns and references that could be
|
||||
misinterpreted.
|
||||
- **Chain of thought**: For reasoning tasks, include "Think step by step."
|
||||
|
||||
## Constraints
|
||||
|
||||
- Do NOT execute the prompt yourself.
|
||||
- Do NOT answer the question inside the prompt.
|
||||
- Do NOT add unnecessary verbosity — prompts should be as short as they can
|
||||
be while remaining complete.
|
||||
- Always preserve the user's original intent.
|
||||
|
||||
## Output
|
||||
|
||||
Refined Prompt: The improved, ready-to-use prompt. Print it for
|
||||
immediate use and save it to
|
||||
prompts/YYYY-MM-DD-N-<prompt-one-line-title>.md for future use.
|
||||
@ -1,90 +0,0 @@
|
||||
---
|
||||
name: backport-commit
|
||||
description: Port changes from a specific Git commit to the current branch by manually applying the diff, avoiding cherry-pick when it would introduce complex conflicts.
|
||||
---
|
||||
|
||||
# Backport Commit
|
||||
|
||||
Port changes from a specific Git commit to the current branch by manually
|
||||
applying the diff, avoiding `git cherry-pick` when it would introduce
|
||||
complex conflicts.
|
||||
|
||||
## When to Use
|
||||
|
||||
Use this skill whenever the user asks to backport a commit, especially when:
|
||||
|
||||
- The commit touches multiple modules or files with significant divergence
|
||||
- `git cherry-pick` is explicitly ruled out ("do not use cherry-pick")
|
||||
- The target commit is old enough that conflicts are likely
|
||||
- The commit introduces both source changes AND new files (tests, etc.)
|
||||
- You need full control over how each hunk is applied
|
||||
|
||||
## Workflow
|
||||
|
||||
### 1. Identify the target commit
|
||||
|
||||
```bash
|
||||
# Verify the commit exists and understand what it does
|
||||
git log --oneline -1 <commit-sha>
|
||||
|
||||
# Get the full diff (including new/deleted files)
|
||||
git show <commit-sha>
|
||||
|
||||
# Capture the original commit message for later reuse
|
||||
git log --format='%B' -1 <commit-sha>
|
||||
```
|
||||
|
||||
### 2. Identify affected modules
|
||||
|
||||
From the file paths in the diff, determine which Penpot modules are affected
|
||||
(frontend, backend, common, render-wasm, etc.) and read their `AGENTS.md`
|
||||
files **before** making any changes. If a module has no `AGENTS.md`, skip
|
||||
that step — verify with `ls <module>/AGENTS.md` first.
|
||||
|
||||
### 3. Read the current state of each affected file
|
||||
|
||||
For every file the diff touches, read the current version on disk to understand
|
||||
context and ensure correct placement before editing.
|
||||
|
||||
### 4. Apply changes manually (the core of this approach)
|
||||
|
||||
Process every hunk in the diff using the appropriate tool:
|
||||
|
||||
| Diff action | Tool to use |
|
||||
|-------------|-------------|
|
||||
| Modify existing file | `edit` — use enough surrounding context in `oldString` to uniquely match the location |
|
||||
| Add new file | `write` — include proper license header and namespace conventions matching project style |
|
||||
| Delete file | `bash rm <path>` |
|
||||
| Rename/move file | `bash mv <old> <new>`, then apply any content changes with `edit` |
|
||||
|
||||
> **Tip:** Group nearby hunks from the same file into a single `edit` call.
|
||||
> Use separate calls when hunks are far apart to keep `oldString` short and
|
||||
> unambiguous.
|
||||
|
||||
Repeat until **all** hunks in the diff are ported.
|
||||
|
||||
### 5. Validate
|
||||
|
||||
Run **lint**, **check-fmt**, and **tests** for every affected module (see each
|
||||
module's `AGENTS.md` for the exact commands). If the formatter auto-fixes
|
||||
indentation, verify the logic is still semantically correct. All checks must
|
||||
pass before moving on.
|
||||
|
||||
### 6. Port the changelog entry (if any)
|
||||
|
||||
If the original commit added or modified a `CHANGES.md` entry, port that entry
|
||||
too — adapting wording and version references for the target branch.
|
||||
|
||||
### 7. Commit
|
||||
|
||||
Ask the `commiter` sub-agent to create a commit. Stage all relevant files
|
||||
(exclude unrelated untracked files) and provide the original commit message as
|
||||
a reference, adapting it as needed for the target branch context.
|
||||
|
||||
## Key Principles
|
||||
|
||||
- **Context matters** — always read files before editing; never guess
|
||||
indentation or surrounding code
|
||||
- **Lint + format + test** — never skip validation before committing
|
||||
- **Preserve intent** — keep the original commit message meaning; the
|
||||
`commiter` agent handles formatting
|
||||
@ -1,210 +0,0 @@
|
||||
---
|
||||
name: bat-cat
|
||||
description: A cat clone with syntax highlighting, line numbers, and Git integration - a modern replacement for cat.
|
||||
homepage: https://github.com/sharkdp/bat
|
||||
metadata: {"clawdbot":{"emoji":"🦇","requires":{"bins":["bat"]},"install":[{"id":"brew","kind":"brew","formula":"bat","bins":["bat"],"label":"Install bat (brew)"},{"id":"apt","kind":"apt","package":"bat","bins":["bat"],"label":"Install bat (apt)"}]}}
|
||||
---
|
||||
|
||||
# bat - Better cat
|
||||
|
||||
`cat` with syntax highlighting, line numbers, and Git integration.
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Basic usage
|
||||
```bash
|
||||
# View file with syntax highlighting
|
||||
bat README.md
|
||||
|
||||
# Multiple files
|
||||
bat file1.js file2.py
|
||||
|
||||
# With line numbers (default)
|
||||
bat script.sh
|
||||
|
||||
# Without line numbers
|
||||
bat -p script.sh
|
||||
```
|
||||
|
||||
### Viewing modes
|
||||
```bash
|
||||
# Plain mode (like cat)
|
||||
bat -p file.txt
|
||||
|
||||
# Show non-printable characters
|
||||
bat -A file.txt
|
||||
|
||||
# Squeeze blank lines
|
||||
bat -s file.txt
|
||||
|
||||
# Paging (auto for large files)
|
||||
bat --paging=always file.txt
|
||||
bat --paging=never file.txt
|
||||
```
|
||||
|
||||
## Syntax Highlighting
|
||||
|
||||
### Language detection
|
||||
```bash
|
||||
# Auto-detect from extension
|
||||
bat script.py
|
||||
|
||||
# Force specific language
|
||||
bat -l javascript config.txt
|
||||
|
||||
# Show all languages
|
||||
bat --list-languages
|
||||
```
|
||||
|
||||
### Themes
|
||||
```bash
|
||||
# List available themes
|
||||
bat --list-themes
|
||||
|
||||
# Use specific theme
|
||||
bat --theme="Monokai Extended" file.py
|
||||
|
||||
# Set default theme in config
|
||||
# ~/.config/bat/config: --theme="Dracula"
|
||||
```
|
||||
|
||||
## Line Ranges
|
||||
|
||||
```bash
|
||||
# Show specific lines
|
||||
bat -r 10:20 file.txt
|
||||
|
||||
# From line to end
|
||||
bat -r 100: file.txt
|
||||
|
||||
# Start to specific line
|
||||
bat -r :50 file.txt
|
||||
|
||||
# Multiple ranges
|
||||
bat -r 1:10 -r 50:60 file.txt
|
||||
```
|
||||
|
||||
## Git Integration
|
||||
|
||||
```bash
|
||||
# Show Git modifications (added/removed/modified lines)
|
||||
bat --diff file.txt
|
||||
|
||||
# Show decorations (Git + file header)
|
||||
bat --decorations=always file.txt
|
||||
```
|
||||
|
||||
## Output Control
|
||||
|
||||
```bash
|
||||
# Output raw (no styling)
|
||||
bat --style=plain file.txt
|
||||
|
||||
# Customize style
|
||||
bat --style=numbers,changes file.txt
|
||||
|
||||
# Available styles: auto, full, plain, changes, header, grid, numbers, snip
|
||||
bat --style=header,grid,numbers file.txt
|
||||
```
|
||||
|
||||
## Common Use Cases
|
||||
|
||||
**Quick file preview:**
|
||||
```bash
|
||||
bat file.json
|
||||
```
|
||||
|
||||
**View logs with syntax highlighting:**
|
||||
```bash
|
||||
bat error.log
|
||||
```
|
||||
|
||||
**Compare files visually:**
|
||||
```bash
|
||||
bat --diff file1.txt
|
||||
bat file2.txt
|
||||
```
|
||||
|
||||
**Preview before editing:**
|
||||
```bash
|
||||
bat config.yaml && vim config.yaml
|
||||
```
|
||||
|
||||
**Cat replacement in pipes:**
|
||||
```bash
|
||||
bat -p file.txt | grep "pattern"
|
||||
```
|
||||
|
||||
**View specific function:**
|
||||
```bash
|
||||
bat -r 45:67 script.py # If function is on lines 45-67
|
||||
```
|
||||
|
||||
## Integration with other tools
|
||||
|
||||
**As pager for man pages:**
|
||||
```bash
|
||||
export MANPAGER="sh -c 'col -bx | bat -l man -p'"
|
||||
man grep
|
||||
```
|
||||
|
||||
**With ripgrep:**
|
||||
```bash
|
||||
rg "pattern" -l | xargs bat
|
||||
```
|
||||
|
||||
**With fzf:**
|
||||
```bash
|
||||
fzf --preview 'bat --color=always --style=numbers {}'
|
||||
```
|
||||
|
||||
**With diff:**
|
||||
```bash
|
||||
diff -u file1 file2 | bat -l diff
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
Create `~/.config/bat/config` for defaults:
|
||||
|
||||
```
|
||||
# Set theme
|
||||
--theme="Dracula"
|
||||
|
||||
# Show line numbers, Git modifications and file header, but no grid
|
||||
--style="numbers,changes,header"
|
||||
|
||||
# Use italic text on terminal
|
||||
--italic-text=always
|
||||
|
||||
# Add custom mapping
|
||||
--map-syntax "*.conf:INI"
|
||||
```
|
||||
|
||||
## Performance Tips
|
||||
|
||||
- Use `-p` for plain mode when piping
|
||||
- Use `--paging=never` when output is used programmatically
|
||||
- `bat` caches parsed files for faster subsequent access
|
||||
|
||||
## Tips
|
||||
|
||||
- **Alias:** `alias cat='bat -p'` for drop-in cat replacement
|
||||
- **Pager:** Use as pager with `export PAGER="bat"`
|
||||
- **On Debian/Ubuntu:** Command may be `batcat` instead of `bat`
|
||||
- **Custom syntaxes:** Add to `~/.config/bat/syntaxes/`
|
||||
- **Performance:** For huge files, use `bat --paging=never` or plain `cat`
|
||||
|
||||
## Common flags
|
||||
|
||||
- `-p` / `--plain`: Plain mode (no line numbers/decorations)
|
||||
- `-n` / `--number`: Only show line numbers
|
||||
- `-A` / `--show-all`: Show non-printable characters
|
||||
- `-l` / `--language`: Set language for syntax highlighting
|
||||
- `-r` / `--line-range`: Only show specific line range(s)
|
||||
|
||||
## Documentation
|
||||
|
||||
GitHub: https://github.com/sharkdp/bat
|
||||
Man page: `man bat`
|
||||
Customization: https://github.com/sharkdp/bat#customization
|
||||
@ -1,194 +0,0 @@
|
||||
---
|
||||
name: fd-find
|
||||
description: A fast and user-friendly alternative to 'find' - simple syntax, smart defaults, respects gitignore.
|
||||
homepage: https://github.com/sharkdp/fd
|
||||
metadata: {"clawdbot":{"emoji":"📂","requires":{"bins":["fd"]},"install":[{"id":"brew","kind":"brew","formula":"fd","bins":["fd"],"label":"Install fd (brew)"},{"id":"apt","kind":"apt","package":"fd-find","bins":["fd"],"label":"Install fd (apt)"}]}}
|
||||
---
|
||||
|
||||
# fd - Fast File Finder
|
||||
|
||||
User-friendly alternative to `find` with smart defaults.
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Basic search
|
||||
```bash
|
||||
# Find files by name
|
||||
fd pattern
|
||||
|
||||
# Find in specific directory
|
||||
fd pattern /path/to/dir
|
||||
|
||||
# Case-insensitive
|
||||
fd -i pattern
|
||||
```
|
||||
|
||||
### Common patterns
|
||||
```bash
|
||||
# Find all Python files
|
||||
fd -e py
|
||||
|
||||
# Find multiple extensions
|
||||
fd -e py -e js -e ts
|
||||
|
||||
# Find directories only
|
||||
fd -t d pattern
|
||||
|
||||
# Find files only
|
||||
fd -t f pattern
|
||||
|
||||
# Find symlinks
|
||||
fd -t l
|
||||
```
|
||||
|
||||
## Advanced Usage
|
||||
|
||||
### Filtering
|
||||
```bash
|
||||
# Exclude patterns
|
||||
fd pattern -E "node_modules" -E "*.min.js"
|
||||
|
||||
# Include hidden files
|
||||
fd -H pattern
|
||||
|
||||
# Include ignored files (.gitignore)
|
||||
fd -I pattern
|
||||
|
||||
# Search all (hidden + ignored)
|
||||
fd -H -I pattern
|
||||
|
||||
# Maximum depth
|
||||
fd pattern -d 3
|
||||
```
|
||||
|
||||
### Execution
|
||||
```bash
|
||||
# Execute command on results
|
||||
fd -e jpg -x convert {} {.}.png
|
||||
|
||||
# Parallel execution
|
||||
fd -e md -x wc -l
|
||||
|
||||
# Use with xargs
|
||||
fd -e log -0 | xargs -0 rm
|
||||
```
|
||||
|
||||
### Regex patterns
|
||||
```bash
|
||||
# Full regex search
|
||||
fd '^test.*\.js$'
|
||||
|
||||
# Match full path
|
||||
fd --full-path 'src/.*/test'
|
||||
|
||||
# Glob pattern
|
||||
fd -g "*.{js,ts}"
|
||||
```
|
||||
|
||||
## Time-based filtering
|
||||
```bash
|
||||
# Modified within last day
|
||||
fd --changed-within 1d
|
||||
|
||||
# Modified before specific date
|
||||
fd --changed-before 2024-01-01
|
||||
|
||||
# Created recently
|
||||
fd --changed-within 1h
|
||||
```
|
||||
|
||||
## Size filtering
|
||||
```bash
|
||||
# Files larger than 10MB
|
||||
fd --size +10m
|
||||
|
||||
# Files smaller than 1KB
|
||||
fd --size -1k
|
||||
|
||||
# Specific size range
|
||||
fd --size +100k --size -10m
|
||||
```
|
||||
|
||||
## Output formatting
|
||||
```bash
|
||||
# Absolute paths
|
||||
fd --absolute-path
|
||||
|
||||
# List format (like ls -l)
|
||||
fd --list-details
|
||||
|
||||
# Null separator (for xargs)
|
||||
fd -0 pattern
|
||||
|
||||
# Color always/never/auto
|
||||
fd --color always pattern
|
||||
```
|
||||
|
||||
## Common Use Cases
|
||||
|
||||
**Find and delete old files:**
|
||||
```bash
|
||||
fd --changed-before 30d -t f -x rm {}
|
||||
```
|
||||
|
||||
**Find large files:**
|
||||
```bash
|
||||
fd --size +100m --list-details
|
||||
```
|
||||
|
||||
**Copy all PDFs to directory:**
|
||||
```bash
|
||||
fd -e pdf -x cp {} /target/dir/
|
||||
```
|
||||
|
||||
**Count lines in all Python files:**
|
||||
```bash
|
||||
fd -e py -x wc -l | awk '{sum+=$1} END {print sum}'
|
||||
```
|
||||
|
||||
**Find broken symlinks:**
|
||||
```bash
|
||||
fd -t l -x test -e {} \; -print
|
||||
```
|
||||
|
||||
**Search in specific time window:**
|
||||
```bash
|
||||
fd --changed-within 2d --changed-before 1d
|
||||
```
|
||||
|
||||
## Integration with other tools
|
||||
|
||||
**With ripgrep:**
|
||||
```bash
|
||||
fd -e js | xargs rg "pattern"
|
||||
```
|
||||
|
||||
**With fzf (fuzzy finder):**
|
||||
```bash
|
||||
vim $(fd -t f | fzf)
|
||||
```
|
||||
|
||||
**With bat (cat alternative):**
|
||||
```bash
|
||||
fd -e md | xargs bat
|
||||
```
|
||||
|
||||
## Performance Tips
|
||||
|
||||
- `fd` is typically much faster than `find`
|
||||
- Respects `.gitignore` by default (disable with `-I`)
|
||||
- Uses parallel traversal automatically
|
||||
- Smart case: lowercase = case-insensitive, any uppercase = case-sensitive
|
||||
|
||||
## Tips
|
||||
|
||||
- Use `-t` for type filtering (f=file, d=directory, l=symlink, x=executable)
|
||||
- `-e` for extension is simpler than `-g "*.ext"`
|
||||
- `{}` in `-x` commands represents the found path
|
||||
- `{.}` strips the extension
|
||||
- `{/}` gets basename, `{//}` gets directory
|
||||
|
||||
## Documentation
|
||||
|
||||
GitHub: https://github.com/sharkdp/fd
|
||||
Man page: `man fd`
|
||||
@ -1,230 +0,0 @@
|
||||
---
|
||||
name: gh-issue-from-pr
|
||||
description: Create a user-facing GitHub issue from a PR, separating the WHAT from the HOW, with correct milestone, project, labels, and issue type.
|
||||
---
|
||||
|
||||
# Skill: gh-issue-from-pr
|
||||
|
||||
Create a GitHub issue that captures the **WHAT** (user-facing feature or
|
||||
bug) from an existing PR that describes the **HOW** (implementation).
|
||||
Used when the project board needs an issue as the primary changelog/release unit.
|
||||
|
||||
## When to Use
|
||||
|
||||
- Create a tracking issue from a PR for changelog purposes
|
||||
- Extract the user-facing problem/feature from a PR's implementation details
|
||||
- Assign milestone, project, labels, and issue type to a new issue derived from a PR
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- `gh` CLI authenticated (`gh auth status`)
|
||||
- Permission to create issues and edit PRs in the target repository
|
||||
|
||||
## Workflow
|
||||
|
||||
### 1. Understand the PR
|
||||
|
||||
```bash
|
||||
gh pr view <PR_NUMBER> --repo penpot/penpot \
|
||||
--json title,body,author,labels,baseRefName,mergedAt,state,milestone
|
||||
```
|
||||
|
||||
Identify:
|
||||
|
||||
- **WHAT** — user-facing problem or feature. Goes into the issue.
|
||||
Describe symptoms and impact, not internal mechanisms.
|
||||
- **HOW** — implementation details. These belong in the PR, not the issue.
|
||||
|
||||
### 2. Determine metadata
|
||||
|
||||
| Field | Source | Rule |
|
||||
|-------|--------|------|
|
||||
| **Title** | PR title | Rewrite from user perspective. Strip leading emoji prefixes (`:bug:`, `:sparkles:`, `:tada:`). Focus on observable behavior. Use imperative mood. |
|
||||
| **Labels** | PR labels | Copy user-facing labels (`bug`, `enhancement`, `community contribution`). Skip workflow labels (`backport candidate`, `team-qa`). |
|
||||
| **Milestone** | PR milestone | **Always copy what's on the PR.** Fetch with: `gh pr view <PR_NUMBER> --json milestone --jq '.milestone.title'` If the PR has no milestone, create the issue without one. |
|
||||
| **Project** | Always `Main` | Penpot uses the `Main` project (number 8) for all issues. |
|
||||
| **Body** | PR's user-facing section | Extract steps to reproduce or feature description. Omit internal details. Use templates below. |
|
||||
| **Issue Type** | PR labels / title | Map: `bug` label or `:bug:` title → `Bug`. `enhancement` label or `:sparkles:` title → `Enhancement`. Feature/epic → `Feature`. Default → `Task`. |
|
||||
|
||||
### 3. Write the issue body
|
||||
|
||||
**Bug template:**
|
||||
|
||||
```markdown
|
||||
### Description
|
||||
|
||||
<what breaks, what the user experiences>
|
||||
|
||||
### Steps to reproduce
|
||||
|
||||
1. <step 1>
|
||||
2. <step 2>
|
||||
|
||||
### Expected behavior
|
||||
|
||||
<what should happen instead>
|
||||
|
||||
### Affected versions
|
||||
|
||||
<version>
|
||||
```
|
||||
|
||||
**Enhancement template:**
|
||||
|
||||
```markdown
|
||||
### Description
|
||||
|
||||
<what the user can now do that they couldn't before>
|
||||
|
||||
### Use case
|
||||
|
||||
<why this is useful, who benefits>
|
||||
|
||||
### Affected versions
|
||||
|
||||
<version>
|
||||
```
|
||||
|
||||
### 4. Create the issue
|
||||
|
||||
Write the body to a temp file to avoid shell quoting issues:
|
||||
|
||||
```bash
|
||||
cat > /tmp/issue-body.md << 'ISSUE_BODY'
|
||||
<body content here>
|
||||
ISSUE_BODY
|
||||
```
|
||||
|
||||
Create:
|
||||
|
||||
```bash
|
||||
gh issue create \
|
||||
--repo penpot/penpot \
|
||||
--title "<Title>" \
|
||||
--label "<label1>" \
|
||||
--label "<label2>" \
|
||||
--milestone "<milestone>" \
|
||||
--project "Main" \
|
||||
--body-file /tmp/issue-body.md
|
||||
```
|
||||
|
||||
Output: `https://github.com/penpot/penpot/issues/<NUMBER>`
|
||||
|
||||
### 5. Assign to the PR author
|
||||
|
||||
Assign the issue to the PR author so they're responsible for it:
|
||||
|
||||
```bash
|
||||
AUTHOR=$(gh pr view <PR_NUMBER> --repo penpot/penpot --json author --jq '.author.login')
|
||||
gh issue edit <ISSUE_NUMBER> --repo penpot/penpot --add-assignee "$AUTHOR"
|
||||
```
|
||||
|
||||
### 6. Set the Issue Type
|
||||
|
||||
`gh issue create` can't set the Issue Type directly. Use GraphQL.
|
||||
|
||||
Get the issue's GraphQL node ID:
|
||||
|
||||
```bash
|
||||
ISSUE_ID=$(gh api graphql -f query='
|
||||
query { repository(owner: "penpot", name: "penpot") {
|
||||
issue(number: <ISSUE_NUMBER>) { id }
|
||||
}}' --jq '.data.repository.issue.id')
|
||||
```
|
||||
|
||||
Issue Type IDs for the Penpot repo:
|
||||
|
||||
| Type | ID |
|
||||
|------|----|
|
||||
| Bug | `IT_kwDOAcyBPM4AX5Nb` |
|
||||
| Enhancement | `IT_kwDOAcyBPM4B_IQN` |
|
||||
| Feature | `IT_kwDOAcyBPM4AX5Nf` |
|
||||
| Task | `IT_kwDOAcyBPM4AX5NY` |
|
||||
| Question | `IT_kwDOAcyBPM4B_IQj` |
|
||||
| Docs | `IT_kwDOAcyBPM4B_IQz` |
|
||||
|
||||
Set it:
|
||||
|
||||
```bash
|
||||
gh api graphql -f query='
|
||||
mutation {
|
||||
updateIssue(input: {
|
||||
id: "'"$ISSUE_ID"'"
|
||||
issueTypeId: "<TYPE_ID>"
|
||||
}) {
|
||||
issue { number issueType { name } }
|
||||
}
|
||||
}'
|
||||
```
|
||||
|
||||
### 7. Verify
|
||||
|
||||
```bash
|
||||
gh issue view <ISSUE_NUMBER> --repo penpot/penpot \
|
||||
--json title,milestone,projectItems,labels \
|
||||
--jq '{title, milestone: .milestone.title, projects: [.projectItems[].title], labels: [.labels[].name]}'
|
||||
|
||||
gh api graphql -f query='
|
||||
query { repository(owner: "penpot", name: "penpot") {
|
||||
issue(number: <ISSUE_NUMBER>) { issueType { name } }
|
||||
}}' --jq '.data.repository.issue.issueType.name'
|
||||
```
|
||||
|
||||
### 8. Link the PR to the issue
|
||||
|
||||
Append `Fixes #<ISSUE_NUMBER>` to the PR body:
|
||||
|
||||
```bash
|
||||
gh pr view <PR_NUMBER> --repo penpot/penpot --json body --jq '.body' > /tmp/pr-body.md
|
||||
printf "\n\nFixes #<ISSUE_NUMBER>\n" >> /tmp/pr-body.md
|
||||
gh pr edit <PR_NUMBER> --repo penpot/penpot --body-file /tmp/pr-body.md
|
||||
|
||||
# Verify
|
||||
gh pr view <PR_NUMBER> --repo penpot/penpot --json body \
|
||||
--jq '.body | test("Fixes #<ISSUE_NUMBER>")'
|
||||
```
|
||||
|
||||
**Note:** If the PR is already merged, `Fixes` won't auto-close the issue
|
||||
— it only creates the "Development" sidebar link. This is the desired
|
||||
behavior since the issue is a tracking artifact.
|
||||
|
||||
### 9. Clean up
|
||||
|
||||
```bash
|
||||
rm -f /tmp/issue-body.md /tmp/pr-body.md
|
||||
```
|
||||
|
||||
## Label rules
|
||||
|
||||
| PR has | Issue gets |
|
||||
|--------|-----------|
|
||||
| `bug` | `bug` |
|
||||
| `enhancement` | `enhancement` |
|
||||
| `community contribution` | `community contribution` |
|
||||
| `backport candidate` | *(skip — workflow label)* |
|
||||
| `team-qa` | *(skip — workflow label)* |
|
||||
| No user-facing label | Infer from title: `:bug:` → `bug`, `:sparkles:` → `enhancement` |
|
||||
|
||||
## Issue Type mapping
|
||||
|
||||
| PR label(s) / title prefix | Issue Type |
|
||||
|----------------------------|-----------|
|
||||
| `bug` or `:bug:` | Bug |
|
||||
| `enhancement` or `:sparkles:` or `:tada:` | Enhancement |
|
||||
| Feature / epic | Feature |
|
||||
| Documentation | Docs |
|
||||
| None of the above | Task |
|
||||
|
||||
## Key Principles
|
||||
|
||||
- **Issue = WHAT, PR = HOW.** Never put implementation details in the
|
||||
issue body. The issue is for users, QA, and changelog readers.
|
||||
- **Copy the milestone from the PR.** Don't guess based on branch names.
|
||||
If the PR has no milestone, create the issue without one.
|
||||
- **Set Issue Type via GraphQL** — `gh issue create` can't set it.
|
||||
- **Link via PR body** — `Fixes #<NUMBER>` creates the "Development"
|
||||
sidebar link automatically.
|
||||
- **One issue per PR** — even if a PR fixes multiple things, create a
|
||||
single issue that summarizes the overall change.
|
||||
- **Community attribution:** if the PR has the `community contribution`
|
||||
label or the author is not a core team member, add the label to the issue.
|
||||
@ -1,112 +0,0 @@
|
||||
---
|
||||
name: jq-json-processor
|
||||
description: Process, filter, and transform JSON data using jq - the lightweight and flexible command-line JSON processor.
|
||||
homepage: https://jqlang.github.io/jq/
|
||||
metadata: {"clawdbot":{"emoji":"🔍","requires":{"bins":["jq"]},"install":[{"id":"brew","kind":"brew","formula":"jq","bins":["jq"],"label":"Install jq (brew)"},{"id":"apt","kind":"apt","package":"jq","bins":["jq"],"label":"Install jq (apt)"}]}}
|
||||
---
|
||||
|
||||
# jq JSON Processor
|
||||
|
||||
Process, filter, and transform JSON data with jq.
|
||||
|
||||
## Quick Examples
|
||||
|
||||
### Basic filtering
|
||||
```bash
|
||||
# Extract a field
|
||||
echo '{"name":"Alice","age":30}' | jq '.name'
|
||||
# Output: "Alice"
|
||||
|
||||
# Multiple fields
|
||||
echo '{"name":"Alice","age":30}' | jq '{name: .name, age: .age}'
|
||||
|
||||
# Array indexing
|
||||
echo '[1,2,3,4,5]' | jq '.[2]'
|
||||
# Output: 3
|
||||
```
|
||||
|
||||
### Working with arrays
|
||||
```bash
|
||||
# Map over array
|
||||
echo '[{"name":"Alice"},{"name":"Bob"}]' | jq '.[].name'
|
||||
# Output: "Alice" "Bob"
|
||||
|
||||
# Filter array
|
||||
echo '[1,2,3,4,5]' | jq 'map(select(. > 2))'
|
||||
# Output: [3,4,5]
|
||||
|
||||
# Length
|
||||
echo '[1,2,3]' | jq 'length'
|
||||
# Output: 3
|
||||
```
|
||||
|
||||
### Common operations
|
||||
```bash
|
||||
# Pretty print JSON
|
||||
cat file.json | jq '.'
|
||||
|
||||
# Compact output
|
||||
cat file.json | jq -c '.'
|
||||
|
||||
# Raw output (no quotes)
|
||||
echo '{"name":"Alice"}' | jq -r '.name'
|
||||
# Output: Alice
|
||||
|
||||
# Sort keys
|
||||
echo '{"z":1,"a":2}' | jq -S '.'
|
||||
```
|
||||
|
||||
### Advanced filtering
|
||||
```bash
|
||||
# Select with conditions
|
||||
jq '[.[] | select(.age > 25)]' people.json
|
||||
|
||||
# Group by
|
||||
jq 'group_by(.category)' items.json
|
||||
|
||||
# Reduce
|
||||
echo '[1,2,3,4,5]' | jq 'reduce .[] as $item (0; . + $item)'
|
||||
# Output: 15
|
||||
```
|
||||
|
||||
### Working with files
|
||||
```bash
|
||||
# Read from file
|
||||
jq '.users[0].name' users.json
|
||||
|
||||
# Multiple files
|
||||
jq -s '.[0] * .[1]' file1.json file2.json
|
||||
|
||||
# Modify and save
|
||||
jq '.version = "2.0"' package.json > package.json.tmp && mv package.json.tmp package.json
|
||||
```
|
||||
|
||||
## Common Use Cases
|
||||
|
||||
**Extract specific fields from API response:**
|
||||
```bash
|
||||
curl -s https://api.github.com/users/octocat | jq '{name: .name, repos: .public_repos, followers: .followers}'
|
||||
```
|
||||
|
||||
**Convert CSV-like data:**
|
||||
```bash
|
||||
jq -r '.[] | [.name, .email, .age] | @csv' users.json
|
||||
```
|
||||
|
||||
**Debug API responses:**
|
||||
```bash
|
||||
curl -s https://api.example.com/data | jq '.'
|
||||
```
|
||||
|
||||
## Tips
|
||||
|
||||
- Use `-r` for raw string output (removes quotes)
|
||||
- Use `-c` for compact output (single line)
|
||||
- Use `-S` to sort object keys
|
||||
- Use `--arg name value` to pass variables
|
||||
- Pipe multiple jq operations: `jq '.a' | jq '.b'`
|
||||
|
||||
## Documentation
|
||||
|
||||
Full manual: https://jqlang.github.io/jq/manual/
|
||||
Interactive tutorial: https://jqplay.org/
|
||||
@ -1,120 +0,0 @@
|
||||
---
|
||||
name: nrepl-eval
|
||||
description: Evaluate Clojure code via nREPL using the standalone tools/nrepl-eval.mjs CLI tool.
|
||||
---
|
||||
|
||||
# nREPL Eval
|
||||
|
||||
Evaluate Clojure (or ClojureScript) code via a running nREPL server using
|
||||
`tools/nrepl-eval.mjs` — a standalone CLI application.
|
||||
|
||||
Session state (defs, in-ns, etc.) persists across invocations via a stored
|
||||
session ID, so you can build up state incrementally.
|
||||
|
||||
## Usage
|
||||
|
||||
```bash
|
||||
node tools/nrepl-eval.mjs [options] [<code>]
|
||||
```
|
||||
|
||||
The tool is also executable directly:
|
||||
```bash
|
||||
./tools/nrepl-eval.mjs [options] [<code>]
|
||||
```
|
||||
|
||||
## Options
|
||||
|
||||
| Flag | Description | Default |
|
||||
|------|-------------|---------|
|
||||
| `-p, --port PORT` | nREPL server port | `6064` |
|
||||
| `-H, --host HOST` | nREPL server host | `127.0.0.1` |
|
||||
| `-t, --timeout MS` | Timeout in milliseconds | `120000` |
|
||||
| `--reset-session` | Discard stored session and start fresh | — |
|
||||
| `-e, --last-error` | Evaluate `*e` to retrieve the last exception | — |
|
||||
| `-h, --help` | Show help message | — |
|
||||
|
||||
## When to Use
|
||||
|
||||
Use this tool when you need to:
|
||||
|
||||
1. **Evaluate Clojure code** during development — test functions, inspect
|
||||
state, or run experiments against a running Clojure process.
|
||||
2. **Verify that edited files compile** — require namespaces with `:reload`
|
||||
to pick up changes.
|
||||
3. **Inspect the last exception** after a failed evaluation — use `-e` to
|
||||
print the error stored in `*e`.
|
||||
|
||||
## Workflow
|
||||
|
||||
### 1. Session management
|
||||
|
||||
Sessions are persisted to `/tmp/penpot-nrepl-session-<host>-<port>`. State
|
||||
carries across calls automatically:
|
||||
|
||||
```bash
|
||||
./tools/nrepl-eval.mjs '(def x 42)'
|
||||
./tools/nrepl-eval.mjs 'x'
|
||||
# => 42
|
||||
```
|
||||
|
||||
Reset the session to start fresh:
|
||||
|
||||
```bash
|
||||
./tools/nrepl-eval.mjs --reset-session '(def x 0)'
|
||||
```
|
||||
|
||||
### 2. Evaluate code
|
||||
|
||||
**Single expression (inline) — uses default port 6064:**
|
||||
```bash
|
||||
./tools/nrepl-eval.mjs '(+ 1 2 3)'
|
||||
```
|
||||
|
||||
**Multiple expressions via heredoc (recommended — avoids escaping issues):**
|
||||
```bash
|
||||
./tools/nrepl-eval.mjs <<'EOF'
|
||||
(def x 10)
|
||||
(+ x 20)
|
||||
EOF
|
||||
```
|
||||
|
||||
**Override with a different port:**
|
||||
```bash
|
||||
./tools/nrepl-eval.mjs -p 7888 '(+ 1 2 3)'
|
||||
```
|
||||
|
||||
### 3. Inspect last exception
|
||||
|
||||
After code throws an error, retrieve the full exception details:
|
||||
|
||||
```bash
|
||||
./tools/nrepl-eval.mjs -e
|
||||
```
|
||||
|
||||
## Common Patterns
|
||||
|
||||
**Require a namespace with reload:**
|
||||
```bash
|
||||
./tools/nrepl-eval.mjs "(require '[my.namespace :as ns] :reload)"
|
||||
```
|
||||
|
||||
**Test a function:**
|
||||
```bash
|
||||
./tools/nrepl-eval.mjs "(ns/my-function arg1 arg2)"
|
||||
```
|
||||
|
||||
**Long-running operation with custom timeout:**
|
||||
```bash
|
||||
./tools/nrepl-eval.mjs -t 300000 "(long-running-fn)"
|
||||
```
|
||||
|
||||
## Key Principles
|
||||
|
||||
- **Default port is 6064** — just pass code directly, no `-p` needed when
|
||||
your nREPL server is on 6064. Use `-p <PORT>` for a different port.
|
||||
- **Always use `:reload`** when requiring namespaces to pick up file changes.
|
||||
- **Session is reused** across invocations — defs, in-ns, and var bindings
|
||||
persist. Use `--reset-session` to clear.
|
||||
- **Do not start any server** — the tool connects to an existing nREPL
|
||||
server, it is not the agent's responsibility to start the nREPL server
|
||||
(assume the server is already running on the specified port).
|
||||
@ -1,150 +0,0 @@
|
||||
---
|
||||
name: ripgrep
|
||||
description: Blazingly fast text search tool - recursively searches directories for regex patterns with respect to gitignore rules.
|
||||
homepage: https://github.com/BurntSushi/ripgrep
|
||||
metadata: {"clawdbot":{"emoji":"🔎","requires":{"bins":["rg"]},"install":[{"id":"brew","kind":"brew","formula":"ripgrep","bins":["rg"],"label":"Install ripgrep (brew)"},{"id":"apt","kind":"apt","package":"ripgrep","bins":["rg"],"label":"Install ripgrep (apt)"}]}}
|
||||
---
|
||||
|
||||
# ripgrep (rg)
|
||||
|
||||
Fast, smart recursive search. Respects `.gitignore` by default.
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Basic search
|
||||
```bash
|
||||
# Search for "TODO" in current directory
|
||||
rg "TODO"
|
||||
|
||||
# Case-insensitive search
|
||||
rg -i "fixme"
|
||||
|
||||
# Search specific file types
|
||||
rg "error" -t py # Python files only
|
||||
rg "function" -t js # JavaScript files
|
||||
```
|
||||
|
||||
### Common patterns
|
||||
```bash
|
||||
# Whole word match
|
||||
rg -w "test"
|
||||
|
||||
# Show only filenames
|
||||
rg -l "pattern"
|
||||
|
||||
# Show with context (3 lines before/after)
|
||||
rg -C 3 "function"
|
||||
|
||||
# Count matches
|
||||
rg -c "import"
|
||||
```
|
||||
|
||||
## Advanced Usage
|
||||
|
||||
### File type filtering
|
||||
```bash
|
||||
# Multiple file types
|
||||
rg "error" -t py -t js
|
||||
|
||||
# Exclude file types
|
||||
rg "TODO" -T md -T txt
|
||||
|
||||
# List available types
|
||||
rg --type-list
|
||||
```
|
||||
|
||||
### Search modifiers
|
||||
```bash
|
||||
# Regex search
|
||||
rg "user_\d+"
|
||||
|
||||
# Fixed string (no regex)
|
||||
rg -F "function()"
|
||||
|
||||
# Multiline search
|
||||
rg -U "start.*end"
|
||||
|
||||
# Only show matches, not lines
|
||||
rg -o "https?://[^\s]+"
|
||||
```
|
||||
|
||||
### Path filtering
|
||||
```bash
|
||||
# Search specific directory
|
||||
rg "pattern" src/
|
||||
|
||||
# Glob patterns
|
||||
rg "error" -g "*.log"
|
||||
rg "test" -g "!*.min.js"
|
||||
|
||||
# Include hidden files
|
||||
rg "secret" --hidden
|
||||
|
||||
# Search all files (ignore .gitignore)
|
||||
rg "pattern" --no-ignore
|
||||
```
|
||||
|
||||
## Replacement Operations
|
||||
|
||||
```bash
|
||||
# Preview replacements
|
||||
rg "old_name" --replace "new_name"
|
||||
|
||||
# Actually replace (requires extra tool like sd)
|
||||
rg "old_name" -l | xargs sed -i 's/old_name/new_name/g'
|
||||
```
|
||||
|
||||
## Performance Tips
|
||||
|
||||
```bash
|
||||
# Parallel search (auto by default)
|
||||
rg "pattern" -j 8
|
||||
|
||||
# Skip large files
|
||||
rg "pattern" --max-filesize 10M
|
||||
|
||||
# Memory map files
|
||||
rg "pattern" --mmap
|
||||
```
|
||||
|
||||
## Common Use Cases
|
||||
|
||||
**Find TODOs in code:**
|
||||
```bash
|
||||
rg "TODO|FIXME|HACK" --type-add 'code:*.{rs,go,py,js,ts}' -t code
|
||||
```
|
||||
|
||||
**Search in specific branches:**
|
||||
```bash
|
||||
git show branch:file | rg "pattern"
|
||||
```
|
||||
|
||||
**Find files containing multiple patterns:**
|
||||
```bash
|
||||
rg "pattern1" | rg "pattern2"
|
||||
```
|
||||
|
||||
**Search with context and color:**
|
||||
```bash
|
||||
rg -C 2 --color always "error" | less -R
|
||||
```
|
||||
|
||||
## Comparison to grep
|
||||
|
||||
- **Faster:** Typically 5-10x faster than grep
|
||||
- **Smarter:** Respects `.gitignore`, skips binary files
|
||||
- **Better defaults:** Recursive, colored output, line numbers
|
||||
- **Easier:** Simpler syntax for common tasks
|
||||
|
||||
## Tips
|
||||
|
||||
- `rg` is often faster than `grep -r`
|
||||
- Use `-t` for file type filtering instead of `--include`
|
||||
- Combine with other tools: `rg pattern -l | xargs tool`
|
||||
- Add custom types in `~/.ripgreprc`
|
||||
- Use `--stats` to see search performance
|
||||
|
||||
## Documentation
|
||||
|
||||
GitHub: https://github.com/BurntSushi/ripgrep
|
||||
User Guide: https://github.com/BurntSushi/ripgrep/blob/master/GUIDE.md
|
||||
@ -1,110 +0,0 @@
|
||||
---
|
||||
name: taiga
|
||||
description: Fetch information from Taiga public API for the Penpot project (id 345963) — issues, user stories, and tasks, without authentication.
|
||||
metadata: {"clawdbot":{"requires":{"bins":["python3"]}}}
|
||||
---
|
||||
|
||||
# Taiga API Skill
|
||||
|
||||
Fetch information from Taiga public API for the **Penpot** project
|
||||
(project id: `345963`, slug: `penpot`).
|
||||
|
||||
**No authentication required** — only public project data is accessed.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- `python3` — the `tools/taiga.py` CLI script is self-contained (stdlib only)
|
||||
|
||||
## Quick Start
|
||||
|
||||
The easiest way is to use the bundled Python script:
|
||||
|
||||
```bash
|
||||
# Pass a Taiga URL directly
|
||||
python3 tools/taiga.py https://tree.taiga.io/project/penpot/issue/13714
|
||||
|
||||
# Or use "<type> <ref>" syntax
|
||||
python3 tools/taiga.py us 14128
|
||||
python3 tools/taiga.py task 13648
|
||||
|
||||
# Add --json for raw output
|
||||
python3 tools/taiga.py --json issue 13714
|
||||
|
||||
# See full usage
|
||||
python3 tools/taiga.py --help
|
||||
```
|
||||
|
||||
## URL Pattern Reference
|
||||
|
||||
Taiga web URLs follow these patterns:
|
||||
|
||||
| Type | Web URL Pattern |
|
||||
|------|----------------|
|
||||
| Issue | `https://tree.taiga.io/project/penpot/issue/<REF>` |
|
||||
| User Story | `https://tree.taiga.io/project/penpot/us/<REF>` |
|
||||
| Task | `https://tree.taiga.io/project/penpot/task/<REF>` |
|
||||
|
||||
To extract the **type** and **ref** from a URL:
|
||||
- `issue/13714` → type=`issue`, ref=`13714`
|
||||
- `us/14128` → type=`us`, ref=`14128`
|
||||
- `task/13648` → type=`task`, ref=`13648`
|
||||
|
||||
## Python Script Reference
|
||||
|
||||
The `tools/taiga.py` script wraps the Taiga API into a single convenient CLI
|
||||
with sensible defaults.
|
||||
|
||||
### Usage
|
||||
|
||||
```
|
||||
python3 tools/taiga.py <taiga-url>
|
||||
python3 tools/taiga.py <type> <ref>
|
||||
python3 tools/taiga.py [--json] <taiga-url>
|
||||
python3 tools/taiga.py [--json] <type> <ref>
|
||||
```
|
||||
|
||||
### Examples
|
||||
|
||||
```bash
|
||||
# By URL (recommended — no need to think about type/ref)
|
||||
python3 tools/taiga.py https://tree.taiga.io/project/penpot/issue/13714
|
||||
|
||||
# By type and ref
|
||||
python3 tools/taiga.py us 14128
|
||||
python3 tools/taiga.py task 13648
|
||||
|
||||
# Raw JSON output
|
||||
python3 tools/taiga.py --json issue 13714
|
||||
```
|
||||
|
||||
### Output
|
||||
|
||||
The script prints a clean, structured summary:
|
||||
|
||||
```text
|
||||
User Story #11964 — 🔴 [DESIGN TOKENS] Typography Composite Input
|
||||
================================
|
||||
Status: Defining
|
||||
Milestone: design-systems-sprint-26
|
||||
Points: 3 role(s)
|
||||
Assignee: Natacha Menjibar
|
||||
Author: Natacha Menjibar
|
||||
Created: 2025-09-01
|
||||
Tags: iop-design-tokens
|
||||
URL: https://tree.taiga.io/project/penpot/us/11964
|
||||
================================
|
||||
<full description text, unmodified>
|
||||
```
|
||||
|
||||
The fields section includes type-specific information:
|
||||
- **Issues:** Status, Type ID, Severity ID, Priority ID
|
||||
- **User Stories:** Status, Milestone, Points
|
||||
- **Tasks:** Status, Milestone, Parent US
|
||||
|
||||
## Reference
|
||||
|
||||
- API docs: https://docs.taiga.io/api.html
|
||||
- Taiga instance: https://tree.taiga.io
|
||||
- API base: https://api.taiga.io/api/v1
|
||||
- Penpot project id: `345963`
|
||||
- Penpot project slug: `penpot`
|
||||
@ -1,232 +0,0 @@
|
||||
---
|
||||
name: update-changelog
|
||||
description: Update the project CHANGES.md with issues from a given GitHub milestone, with correct categorization and references.
|
||||
---
|
||||
|
||||
# Skill: update-changelog
|
||||
|
||||
Update `CHANGES.md` with entries for all issues and PRs in a given GitHub
|
||||
milestone. Each entry references the user-facing issue (not the PR) as the
|
||||
primary link, with the fix PR inline on the same line.
|
||||
|
||||
## When to Use
|
||||
|
||||
- Before a new release, to populate the changelog with all fixed issues
|
||||
- When new issues are added to an existing milestone and the changelog needs
|
||||
to be refreshed
|
||||
- To ensure every entry follows the correct format for the changelog
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- `gh` CLI authenticated (`gh auth status`)
|
||||
- Python 3.8+
|
||||
- `tools/gh.py` helper script available
|
||||
|
||||
## Workflow
|
||||
|
||||
### 1. Determine the target version
|
||||
|
||||
The version is typically a semver string like `2.15.3`. Confirm with the user
|
||||
if not specified.
|
||||
|
||||
### 2. Fetch all issues in the milestone
|
||||
|
||||
Use the helper script. It uses GraphQL for efficient single-pass fetching
|
||||
(closing PRs are included in the same query — no N+1):
|
||||
|
||||
```bash
|
||||
# All closed issues (default)
|
||||
python3 tools/gh.py issues "2.16.0"
|
||||
|
||||
# Include open issues too
|
||||
python3 tools/gh.py issues "2.16.0" --state all
|
||||
|
||||
# Exclude entries that should not go in the changelog
|
||||
python3 tools/gh.py issues "2.16.0" --exclude "release blocker,no changelog"
|
||||
```
|
||||
|
||||
**Label exclusion rules:**
|
||||
- `release blocker` — Internal release-blocking bugs not relevant to end users
|
||||
- `no changelog` — Chore/refactor work that doesn't need a changelog entry
|
||||
|
||||
The script outputs JSON with each entry containing `number`, `title`, `state`,
|
||||
`labels`, and `closing_prs` (the PRs that fix each issue).
|
||||
|
||||
### 3. Identify missing entries (optional)
|
||||
|
||||
If updating from an existing `CHANGES.md`, find issues in the milestone that
|
||||
are NOT yet referenced in the changelog:
|
||||
|
||||
```bash
|
||||
python3 tools/gh.py issues "2.16.0" --exclude "release blocker,no changelog" --compare CHANGES.md
|
||||
```
|
||||
|
||||
This returns a filtered JSON array with only the missing issues.
|
||||
|
||||
### 4. Fetch additional PR details when needed
|
||||
|
||||
When you need more context for specific PRs (e.g. to find the PR author for
|
||||
community contribution attribution, or to read the PR body for
|
||||
"Fixes/Closes #NNN" patterns):
|
||||
|
||||
```bash
|
||||
# One or more PR numbers
|
||||
python3 tools/gh.py prs 9179 9204 9311
|
||||
|
||||
# From a file
|
||||
python3 tools/gh.py prs --file prs.txt
|
||||
|
||||
# From stdin
|
||||
cat prs.txt | python3 tools/gh.py prs --stdin
|
||||
```
|
||||
|
||||
The `prs` command returns JSON with `number`, `title`, `body`, `state`,
|
||||
`merged_at`, `author`, `labels`, and `closing_issues`. PRs are fetched in
|
||||
batches of 50 via GraphQL to stay within API limits.
|
||||
|
||||
### 5. Categorize entries
|
||||
|
||||
Check the labels on each issue to determine which section it belongs to:
|
||||
|
||||
| Label / Title prefix | Changelog section |
|
||||
|----------------------|-------------------|
|
||||
| `bug` label or `:bug:` title prefix | `### :bug: Bugs fixed` |
|
||||
| `enhancement` label or `:sparkles:` prefix | `### :sparkles: New features & Enhancements` |
|
||||
| No label | Infer from title convention, default to bug fix |
|
||||
|
||||
**Community contribution attribution:** If the issue or its fix PR has the
|
||||
`community contribution` label, add an attribution `(by @<github_username>)`
|
||||
on the changelog entry line, **before** the GitHub issue/PR references.
|
||||
|
||||
The attribution should reference the **PR author**, not the issue author.
|
||||
The `prs` subcommand includes the `author` field — use that:
|
||||
|
||||
```bash
|
||||
python3 tools/gh.py prs <PR_NUMBER> | python3 -c "import sys,json; print(json.load(sys.stdin)[0]['author'])"
|
||||
```
|
||||
|
||||
Placement in the entry line:
|
||||
```markdown
|
||||
- Fix description of the bug (by @username) [#<ISSUE>](...) (PR: [#<PR>](...))
|
||||
```
|
||||
|
||||
**Only closed issues are included.** An issue must have `state: "closed"` to
|
||||
appear in the changelog. Open/unresolved issues are omitted, even if they are
|
||||
tracked in the milestone.
|
||||
|
||||
**Pairing rules:**
|
||||
|
||||
| Pattern | Changelog format |
|
||||
|---------|-----------------|
|
||||
| Closed issue + one or more PRs fix it | Primary link = issue, PR inline comma-separated |
|
||||
| PR exists with no linked issue | If a corresponding closed issue exists in the same milestone, link the issue. Otherwise, skip the entry (the issue must be the changelog unit). |
|
||||
| Closed issue with no fix PR in milestone | Link the issue directly, without a PR reference. |
|
||||
|
||||
### 6. Read the current CHANGES.md
|
||||
|
||||
Read the top of `CHANGES.md` to understand the existing format and find the
|
||||
insertion point (newest version goes at the top, after the `# CHANGELOG`
|
||||
header).
|
||||
|
||||
Key format rules from the existing file:
|
||||
|
||||
```markdown
|
||||
## <VERSION>
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix description of the bug [#<ISSUE>](https://github.com/penpot/penpot/issues/<ISSUE>) (PR: [#<PR>](https://github.com/penpot/penpot/pull/<PR>))
|
||||
- Fix another bug (by @contributor) [#<ISSUE>](https://github.com/penpot/penpot/issues/<ISSUE>) (PR: [#<PR>](https://github.com/penpot/penpot/pull/<PR>))
|
||||
|
||||
### :sparkles: New features & Enhancements
|
||||
|
||||
- Add new feature description [#<ISSUE>](https://github.com/penpot/penpot/issues/<ISSUE>) (PR: [#<PR>](https://github.com/penpot/penpot/pull/<PR>))
|
||||
```
|
||||
|
||||
Format details:
|
||||
- Entries start with `- ` followed by a short description in imperative mood
|
||||
- Primary link is **always the issue** (user-facing artifact)
|
||||
- PR references are inline on the same line: `(PR: [#<N>](<url>))`
|
||||
If an issue has multiple fix PRs, they are comma-separated:
|
||||
`(PR: [#<N>](<url>), [#<M>](<url>))`
|
||||
- The description should describe the fix/feature from the user's perspective
|
||||
- Community contributions get `(by @<username>)` **before** the issue link
|
||||
- Sections are separated by a blank line between the last entry and the next
|
||||
section title
|
||||
- Only include a section if there are entries for it
|
||||
- When an entry already exists in an earlier version section, it must be removed
|
||||
from the current version to avoid duplicates
|
||||
|
||||
### 7. Build the description text
|
||||
|
||||
Derive the description from the issue title, not the PR title. Strip leading
|
||||
emoji prefixes (`:bug:`, `:sparkles:`, `:tada:`) and focus on the
|
||||
user-facing behavior.
|
||||
|
||||
Examples:
|
||||
|
||||
| Issue title | Changelog description |
|
||||
|-------------|----------------------|
|
||||
| `Plugin API token methods fail with schema validation error on PRO` | `Fix Plugin API token methods failing with schema validation error on PRO` |
|
||||
| `Comment content is not sanitized before rendering, enabling stored XSS` | `Sanitize comment content on rendering` |
|
||||
| `Custom uploaded font family names are not sanitized` | `Sanitize font family names on custom uploaded fonts` |
|
||||
|
||||
### 8. Insert the section into CHANGES.md
|
||||
|
||||
Insert the new version section right after the `# CHANGELOG` header (before
|
||||
the previous version entry). Use the `edit` tool with enough context to make
|
||||
a unique match.
|
||||
|
||||
### 9. Verify
|
||||
|
||||
Read the top of `CHANGES.md` and confirm:
|
||||
- The version header is correct
|
||||
- Every entry has a GitHub link
|
||||
- Entries with a fix PR have the PR sub-line
|
||||
- The section ordering is correct (newest first)
|
||||
- Formatting matches the surrounding entries
|
||||
|
||||
## Version section template
|
||||
|
||||
```markdown
|
||||
## <VERSION>
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- <fix description> [#<ISSUE>](https://github.com/penpot/penpot/issues/<ISSUE>) (PR: [#<PR>](https://github.com/penpot/penpot/pull/<PR>))
|
||||
- <fix description> (by @contributor) [#<ISSUE>](https://github.com/penpot/penpot/issues/<ISSUE>) (PR: [#<PR>](https://github.com/penpot/penpot/pull/<PR>))
|
||||
```
|
||||
|
||||
## Key Principles
|
||||
|
||||
- **Issue = changelog unit.** The primary link always points to the
|
||||
user-facing issue, not the implementation PR.
|
||||
- **PR = implementation detail.** Reference the PR inline so readers
|
||||
can find the code changes.
|
||||
- **Latest version first.** New sections are inserted at the top of the
|
||||
changelog, below the `# CHANGELOG` header.
|
||||
- **User-facing descriptions.** Write from the user's perspective — describe
|
||||
what broke and what was fixed, not internal implementation details.
|
||||
- **Community attribution.** When the issue or fix PR has the
|
||||
`community contribution` label, add `(by @<username>)` on the entry line
|
||||
between the description and the issue link. Use the **PR author** (not the
|
||||
issue author) for the attribution.
|
||||
- **Only closed issues.** An issue must have `state: "closed"` to appear in
|
||||
the changelog. Open unresolved issues are omitted.
|
||||
- **Excluded labels.** Issues with `release blocker` or `no changelog` labels
|
||||
must be excluded from the changelog.
|
||||
- **Multiple PRs per issue.** If multiple PRs fix the same issue, list them
|
||||
comma-separated inline: `(PR: [#A](url), [#B](url))`.
|
||||
- **Duplicate removal.** If an entry already exists in a prior version section,
|
||||
remove it from the current version. Check for text-level duplicates (after
|
||||
stripping links and attributions) across version sections.
|
||||
- **Taiga references.** If a changelog entry references a Taiga URL
|
||||
(`tree.taiga.io`), attempt to find a corresponding GitHub issue via the
|
||||
Taiga description text or by searching GitHub PRs that reference the Taiga
|
||||
URL. Replace the Taiga reference with the GitHub issue link and add the PR
|
||||
reference if applicable.
|
||||
- **Re-fetch before editing.** Milestones can change — always re-fetch issues
|
||||
before making edits, don't rely on cached data.
|
||||
- **Use `tools/gh.py`.** Prefer the helper script over raw `gh api` calls for
|
||||
milestone issue listing and PR detail fetching. It handles GraphQL
|
||||
pagination, batching, and label filtering automatically.
|
||||
2
.serena/.gitignore
vendored
2
.serena/.gitignore
vendored
@ -1,2 +0,0 @@
|
||||
/cache
|
||||
/project.local.yml
|
||||
@ -1,32 +0,0 @@
|
||||
# Creating Commits
|
||||
|
||||
## Message Format
|
||||
|
||||
```
|
||||
:emoji: Subject line (imperative, capitalized, no period, ≤70 chars)
|
||||
|
||||
Body (clear, concise description)
|
||||
|
||||
Co-authored-by: <You (the LLM)>
|
||||
```
|
||||
|
||||
## Commit Type Emojis
|
||||
|
||||
`:bug:` bug fix · `:sparkles:` enhancement · `:tada:` new feature · `:recycle:` refactor · `:lipstick:` cosmetic · `:ambulance:` critical fix · `:books:` docs · `:construction:` WIP · `:boom:` breaking · `:wrench:` config · `:zap:` perf · `:whale:` docker · `:paperclip:` other · `:arrow_up:` dep upgrade · `:arrow_down:` dep downgrade · `:fire:` removal · `:globe_with_meridians:` translations · `:rocket:` epic/highlight
|
||||
|
||||
## Changelog (CHANGES.md)
|
||||
|
||||
Update `CHANGES.md` for user-facing or notable changes. Add entry under the current unreleased version in the matching section (`### :boom:`, `### :sparkles:`, `### :bug:`, etc.).
|
||||
|
||||
Entry format:
|
||||
```
|
||||
- Description of change [Taiga #NNNN](https://tree.taiga.io/project/penpot/us/NNNN)
|
||||
```
|
||||
or for GitHub issues/PRs:
|
||||
```
|
||||
- Description of change [Github #NNNN](https://github.com/penpot/penpot/issues/NNNN)
|
||||
```
|
||||
|
||||
Changes that affect the JavaScript plugin API must additionally be documented in `plugins/CHANGELOG.md`:
|
||||
* Add an entry at the top of the file (unreleased section)
|
||||
* Prefix entries that change the types/signatures in the API with `**plugin-types:**` and changes affecting the runtime with `**plugin-runtime:**`.
|
||||
@ -1,30 +0,0 @@
|
||||
# Creating Pull Requests
|
||||
|
||||
Important: Before creating a PR, ensure that you are on a branch that is specific to the
|
||||
issue or feature you are working on. If necessary, create a new branch.
|
||||
|
||||
## Title Format
|
||||
|
||||
PR titles follow the same convention as commit titles:
|
||||
|
||||
```
|
||||
:emoji: Subject line (imperative, capitalized, no period, ≤70 chars)
|
||||
```
|
||||
|
||||
See the `creating-commits` memory for the list of emoji codes.
|
||||
|
||||
## Description Format
|
||||
|
||||
The PR description must start with the following notice:
|
||||
|
||||
> **Note:** This PR was created with AI assistance as part of the Penpot MCP self-improvement initiative.
|
||||
|
||||
**Related Issues** section with a bullet list of linked issues:
|
||||
|
||||
```
|
||||
In addition to sections summarising and explaining the changes in the PR, it should contain a section 'Relevant Issues' with a bullet list:
|
||||
|
||||
- Fixes #NNNN
|
||||
- Resolves #NNNN
|
||||
- Relates to #NNNN
|
||||
```
|
||||
@ -1,27 +0,0 @@
|
||||
You are working on the GitHub project penpot/penpot.
|
||||
|
||||
# Working with Penpot Designs via the JavaScript API
|
||||
|
||||
Before working with Penpot designs, call the `high_level_overview` tool of the Penpot MCP server.
|
||||
It explains the API, which you can use to automate tasks via the `execute_code` tool.
|
||||
|
||||
# Dev Workflow
|
||||
|
||||
Memories:
|
||||
- before creating a commit, read `creating-commits`.
|
||||
- before creating a PR, read `creating-prs`.
|
||||
|
||||
# Frontend
|
||||
|
||||
Read the file `frontend/AGENTS.md` for an overview.
|
||||
Memories:
|
||||
- connection between the JavaScript API and the ClojureScript code: `frontend/js-api-to-cljs-binding`.
|
||||
- executing ClojureScript code in the frontend: `frontend/cljs-repl`.
|
||||
- programmatically navigating to a file in the workspace: `frontend/navigation`.
|
||||
- handling Clojure compiler errors, runtime patching and debug helpers: `frontend/handling-errors-and-debugging`.
|
||||
|
||||
## Detecting Crashes
|
||||
|
||||
The Penpot frontend can crash silently from the JS API's perspective: `execute_code` calls return successfully, but 1-2s later the workspace becomes unusable (Internal Error page).
|
||||
The `execute_code` tool then stops working, but `cljs_repl` still works. Use it to detect a crash via `(some? (:exception @app.main.store/state))`.
|
||||
For details on handling crashes, read memory `frontend/handling-crashes`.
|
||||
@ -1,76 +0,0 @@
|
||||
# ClojureScript REPL Access via shadow-cljs
|
||||
|
||||
Execute code in the REPL via the Penpot MCP's `cljs_repl` tool.
|
||||
|
||||
## Accessing App State
|
||||
|
||||
The main store is `app.main.store/state`. It contains workspace metadata, selection, UI state, etc.
|
||||
However, **page objects are NOT in the main store atom**. They live behind derived refs.
|
||||
|
||||
### Top-level store keys (subset)
|
||||
`:current-page-id`, `:current-file-id`, `:workspace-local`, `:workspace-global`,
|
||||
`:workspace-trimmed-page`, `:workspace-undo`, `:workspace-guides`, `:workspace-layout`,
|
||||
`:workspace-presence`, `:workspace-ready`, `:profile`, `:route`, etc.
|
||||
|
||||
**Notable absence:** There is no `:workspace-data` key in the store. The old path
|
||||
`(get-in state [:workspace-data :pages-index page-id :objects])` does NOT work.
|
||||
|
||||
### Getting page objects — use `app.main.refs/workspace-page-objects`
|
||||
```clojure
|
||||
;; This is a derived ref (reactive lens). Deref it directly:
|
||||
(let [objects @app.main.refs/workspace-page-objects
|
||||
shape (get objects (parse-uuid "some-uuid-here"))]
|
||||
(select-keys shape [:name :type :x :y :width :height :fills :strokes :rotation :opacity :frame-id :parent-id]))
|
||||
```
|
||||
|
||||
### Getting the current selection
|
||||
```clojure
|
||||
;; Selection is in the main store under :workspace-local :selected
|
||||
(let [state @app.main.store/state
|
||||
selected (get-in state [:workspace-local :selected])]
|
||||
(mapv str selected))
|
||||
;; Returns vector of UUID strings for selected shapes
|
||||
```
|
||||
|
||||
### Other useful store access
|
||||
```clojure
|
||||
;; Current page id
|
||||
(:current-page-id @app.main.store/state)
|
||||
|
||||
;; Verify state is accessible
|
||||
(some? @app.main.store/state) ;; should be true
|
||||
|
||||
;; workspace-local keys: :zoom :selected :hide-toolbar :last-selected :vbox
|
||||
;; :highlighted :vport :expanded :selrect :zoom-inverse
|
||||
```
|
||||
|
||||
### Shape data structure (internal ClojureScript representation)
|
||||
Shape keys use kebab-case keywords (`:fill-color`, `:fill-opacity`, `:parent-id`, `:frame-id`).
|
||||
The shape `:type` is a keyword like `:rect`, `:path`, `:text`, `:ellipse`, `:image`, `:bool`, `:svg-raw`, `:frame`, `:group`.
|
||||
Note `:rect` in CLJS corresponds to "rectangle" in the JS Plugin API, and `:frame` corresponds to "board".
|
||||
|
||||
Component instance shapes additionally carry `:component-id` and `:component-file` directly, and `:component-root` flags the root of an instance. To navigate from a shape to its component, use `app.common.types.container/get-head-shape` (nearest head) or `get-instance-root` (outermost root) — these differ when instances are nested.
|
||||
|
||||
### Helper utilities (`app.plugins.utils`)
|
||||
Despite living under `plugins/`, these are general-purpose lookup helpers usable from any CLJS:
|
||||
- `locate-shape` — find a shape by file-id, page-id, id
|
||||
- `locate-objects` — get the object tree for a page
|
||||
- `locate-component` — resolve the component for a shape (walks to **outermost** instance root, not nearest head — beware when instances are nested)
|
||||
- `locate-library-component` — direct lookup by file-id and component-id
|
||||
- `locate-file` — look up a file by id from state
|
||||
|
||||
## Notes
|
||||
- The `:main` build has multiple modules: shared, main, main-workspace, rasterizer, etc.
|
||||
- `app.main.store/state` is a potok store (wrapping an okulary atom) created via `defonce`
|
||||
- Use `timeout` to avoid hanging if the browser is disconnected
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
`cljs_repl` may not connect to the right runtime when several are attached (e.g. workspace tab + rasterizer). Verify with `(.-title js/document)` — it should show your file name, not "Penpot - Rasterizer".
|
||||
|
||||
To list runtimes or target one by client-id, use `npx shadow-cljs clj-eval` from `/home/penpot/penpot/frontend`. It talks to the shadow-cljs JVM process, so unlike `cljs_repl` it has access to `shadow.cljs.devtools.api`:
|
||||
|
||||
```bash
|
||||
printf '(shadow.cljs.devtools.api/repl-runtimes :main)\n' | timeout 10 npx shadow-cljs clj-eval --stdin
|
||||
printf '(shadow.cljs.devtools.api/cljs-eval :main "<cljs-code>" {:client-id 5})\n' | timeout 10 npx shadow-cljs clj-eval --stdin
|
||||
```
|
||||
@ -1,41 +0,0 @@
|
||||
# Handling Penpot Frontend Crashes
|
||||
|
||||
When the Penpot frontend crashes, it usually shows the **Internal Error** page (title text "Something bad happened", class `main_ui_static__download-link`).
|
||||
|
||||
A typical error pattern is: Changes go through (JS API, `execute_code`), but about 1-2s later, an `update-file` request hits the backend with the change and gets rejected.
|
||||
So be sure to check the status for a crash.
|
||||
|
||||
After a crash, `execute_code` is unusable (no instances connected), and any data in `storage` is lost, but `cljs_repl` keeps working!
|
||||
|
||||
## 1. Detect the crash
|
||||
|
||||
cljs REPL `(some? (:exception @app.main.store/state))` returns `true` when the Internal Error page is showing,
|
||||
`false` on a healthy workspace (and after a successful reload).
|
||||
|
||||
## 2. Read the cause
|
||||
|
||||
The exception is stored at `(:exception @app.main.store/state)`. Useful keys:
|
||||
|
||||
- `:type`, `:code`, `:status` — error class (e.g. `:validation` / `:referential-integrity` / `400`)
|
||||
- `:hint`, `:details` — human-readable explanation; `:details` typically contains a vector of validation problems with `:shape-id`, `:page-id`, `:args`, etc.
|
||||
- `:uri` — the API endpoint that returned the error (e.g. `update-file`)
|
||||
- `:app.main.errors/instance` — the underlying JS Error object
|
||||
- `:app.main.errors/trace` — JS stack trace string (only shows the response-handling path, not the dispatch site that produced the bad change)
|
||||
|
||||
```
|
||||
(let [ex (:exception @app.main.store/state)]
|
||||
(select-keys ex [:type :code :status :hint :details :uri]))
|
||||
```
|
||||
|
||||
For backend validation errors (`:type :validation`), `:details` is the most informative field — it tells you exactly which shape and which invariant was violated.
|
||||
|
||||
## 3. Recover and continue testing
|
||||
|
||||
Reload steps:
|
||||
1. List tabs with `playwright:browser_tabs` (`action: list`) and find the Penpot workspace tab (URL contains `/#/workspace`, title ends in `- Penpot`).
|
||||
2. If it isn't the current tab, select it via `playwright:browser_tabs` (`action: select`, `index: <n>`). The selected tab's URL then appears as "Page URL" in the result.
|
||||
3. Reload by calling `playwright:browser_navigate` with that same URL.
|
||||
4. Confirm recovery: `(some? (:exception @app.main.store/state))` should now return `false`.
|
||||
|
||||
Whether the offending change persists depends on the crash type:
|
||||
For **backend-rejected changes** (e.g. `:type :validation`, 4xx on `update-file`), changes are NOT persisted. Reload restores the pre-crash state — safe to retry.
|
||||
@ -1,49 +0,0 @@
|
||||
# Handling Errors and Debugging
|
||||
|
||||
## Finding source errors
|
||||
|
||||
You have access to two tools for finding errors in Clojure source code (which you may introduce yourself through edits):
|
||||
|
||||
1. cljs_compiler_output
|
||||
2. clj_check_parentheses
|
||||
|
||||
The latter is needed because syntax errors in parentheses give an uninformative compiler error, and the second
|
||||
tool can often find the exact location of such errors.
|
||||
|
||||
## Runtime patching with `set!`
|
||||
|
||||
Some frontend vars are deliberately mutable escape hatches for runtime instrumentation or circular-dependency patching.
|
||||
From `cljs_repl`, use `set!` for temporary debugging of CLJS vars such as
|
||||
`app.main.store/on-event`, `app.main.errors/reload-file`, `app.main.errors/is-plugin-error?`,
|
||||
`app.main.errors/last-report`, or `app.main.errors/last-exception`.
|
||||
These patches affect only the live browser runtime and disappear on reload or recompilation.
|
||||
|
||||
```clojure
|
||||
;; Log non-noisy Potok events temporarily.
|
||||
(set! app.main.store/on-event
|
||||
(fn [event]
|
||||
(when (potok.v2.core/event? event)
|
||||
(.log js/console (potok.v2.core/repr-event event)))))
|
||||
```
|
||||
|
||||
Restore mutable hooks after debugging, or reload the frontend. Use JVM `alter-var-root` only for JVM Clojure;
|
||||
it is not the normal way to patch live CLJS browser vars.
|
||||
|
||||
## Browser-console debug namespace
|
||||
|
||||
In development, the JS console exposes `debug` helpers from `frontend/src/debug.cljs`:
|
||||
|
||||
```javascript
|
||||
debug.set_logging("namespace", "debug");
|
||||
debug.dump_state();
|
||||
debug.dump_buffer();
|
||||
debug.get_state(":workspace-local :selected");
|
||||
debug.dump_objects();
|
||||
debug.dump_object("Rect-1");
|
||||
debug.dump_selected();
|
||||
debug.dump_tree(true, true);
|
||||
```
|
||||
|
||||
Visual workspace debug overlays can be toggled with `debug.toggle_debug("bounding-boxes")`, `"group"`, `"events"`, or `"rotation-handler"`; `debug.debug_all()` and `debug.debug_none()` toggle all visual aids.
|
||||
|
||||
For temporary source traces, prefer existing logging (`app.common.logging` / `app.util.logging`) or short-lived `prn`, `app.common.pprint/pprint`, `js/console.log`, or `js-debugger` calls. Remove temporary source instrumentation before committing.
|
||||
@ -1,25 +0,0 @@
|
||||
# How the Plugin JS API connects to ClojureScript
|
||||
|
||||
## Type Definitions
|
||||
- `plugins/libs/plugin-types/index.d.ts` contains TypeScript type declarations (e.g. `ShapeBase`, `LibraryComponent`).
|
||||
- These are **type-only** — no runtime code. The actual objects are constructed in ClojureScript.
|
||||
|
||||
## Runtime Shape Proxy
|
||||
- `frontend/src/app/plugins/shape.cljs` builds the JS shape proxy via `obj/reify`.
|
||||
- Each method/property from the TS interface (e.g. `:component`, `:isComponentRoot`, `:componentHead`) is defined as a keyword entry in the `obj/reify` form, with a ClojureScript function as the implementation.
|
||||
- The proxy is created by the `shape-proxy` function, which takes `plugin-id`, `file-id`, `page-id`, and shape `id`, and closes over them.
|
||||
|
||||
## Library Proxies
|
||||
- `frontend/src/app/plugins/library.cljs` defines proxies for library types like `LibraryComponentProxy` (via `lib-component-proxy`), also using `obj/reify`.
|
||||
- The proxy satisfies the `LibraryComponent` TS interface, exposing `.id`, `.name`, `.path`, etc.
|
||||
|
||||
## Circular Dependency Resolution
|
||||
- `shape.cljs` and `library.cljs` have circular dependencies (shapes reference library component proxies and vice versa).
|
||||
- `shape.cljs` declares forward references as mutable `def nil` vars (e.g. `(def lib-component-proxy nil)`, line 144).
|
||||
- `frontend/src/app/plugins.cljs` patches them at load time: `(set! shape/lib-component-proxy library/lib-component-proxy)`.
|
||||
- Same pattern for `lib-typography-proxy?` and `variant-proxy`.
|
||||
|
||||
## Key Domain Namespaces
|
||||
- `app.common.types.component` (aliased `ctk`) — component predicates: `instance-root?`, `instance-head?`, `in-component-copy?`, `is-variant?`
|
||||
- `app.common.types.container` (aliased `ctn`) — container/tree operations: `in-any-component?`, `get-instance-root`, `get-head-shape`, `inside-component-main?`
|
||||
- `app.common.types.file` (aliased `ctf`) — file-level operations: `resolve-component`, `get-ref-shape`
|
||||
@ -1,14 +0,0 @@
|
||||
# Navigating to a File in the Workspace
|
||||
|
||||
To programmatically open a file in the workspace, use `cljs_repl` with:
|
||||
```clojure
|
||||
(do (require '[app.main.data.common :as dcm])
|
||||
(app.main.store/emit! (dcm/go-to-workspace
|
||||
:team-id (parse-uuid "<team-id>")
|
||||
:file-id (parse-uuid "<file-id>")
|
||||
:page-id (parse-uuid "<page-id>"))))
|
||||
```
|
||||
**All three IDs are required.** You can get:
|
||||
- `team-id` from `(:current-team-id @app.main.store/state)`
|
||||
- `file-id` from the dashboard files: `(vals (:files @app.main.store/state))`
|
||||
- `page-id` by fetching the file: `(get-in file-data [:data :pages])` via `(rp/cmd! :get-file {:id file-id :features (get @app.main.store/state :features)})`
|
||||
@ -1,141 +0,0 @@
|
||||
# the name by which the project can be referenced within Serena
|
||||
project_name: "penpot"
|
||||
|
||||
|
||||
# list of languages for which language servers are started; choose from:
|
||||
# al ansible bash clojure cpp
|
||||
# cpp_ccls crystal csharp csharp_omnisharp dart
|
||||
# elixir elm erlang fortran fsharp
|
||||
# go groovy haskell haxe hlsl
|
||||
# java json julia kotlin lean4
|
||||
# lua luau markdown matlab msl
|
||||
# nix ocaml pascal perl php
|
||||
# php_phpactor powershell python python_jedi python_ty
|
||||
# r rego ruby ruby_solargraph rust
|
||||
# scala solidity swift systemverilog terraform
|
||||
# toml typescript typescript_vts vue yaml
|
||||
# zig
|
||||
# (This list may be outdated. For the current list, see values of Language enum here:
|
||||
# https://github.com/oraios/serena/blob/main/src/solidlsp/ls_config.py
|
||||
# For some languages, there are alternative language servers, e.g. csharp_omnisharp, ruby_solargraph.)
|
||||
# Note:
|
||||
# - For C, use cpp
|
||||
# - For JavaScript, use typescript
|
||||
# - For Free Pascal/Lazarus, use pascal
|
||||
# Special requirements:
|
||||
# Some languages require additional setup/installations.
|
||||
# See here for details: https://oraios.github.io/serena/01-about/020_programming-languages.html#language-servers
|
||||
# When using multiple languages, the first language server that supports a given file will be used for that file.
|
||||
# The first language is the default language and the respective language server will be used as a fallback.
|
||||
# Note that when using the JetBrains backend, language servers are not used and this list is correspondingly ignored.
|
||||
languages:
|
||||
- clojure
|
||||
- typescript
|
||||
- rust
|
||||
|
||||
# the encoding used by text files in the project
|
||||
# For a list of possible encodings, see https://docs.python.org/3.11/library/codecs.html#standard-encodings
|
||||
encoding: "utf-8"
|
||||
|
||||
# line ending convention to use when writing source files.
|
||||
# Possible values: unset (use global setting), "lf", "crlf", or "native" (platform default)
|
||||
# This does not affect Serena's own files (e.g. memories and configuration files), which always use native line endings.
|
||||
line_ending:
|
||||
|
||||
# The language backend to use for this project.
|
||||
# If not set, the global setting from serena_config.yml is used.
|
||||
# Valid values: LSP, JetBrains
|
||||
# Note: the backend is fixed at startup. If a project with a different backend
|
||||
# is activated post-init, an error will be returned.
|
||||
language_backend:
|
||||
|
||||
# whether to use project's .gitignore files to ignore files
|
||||
ignore_all_files_in_gitignore: true
|
||||
|
||||
# advanced configuration option allowing to configure language server-specific options.
|
||||
# Maps the language key to the options.
|
||||
# Have a look at the docstring of the constructors of the LS implementations within solidlsp (e.g., for C# or PHP) to see which options are available.
|
||||
# No documentation on options means no options are available.
|
||||
ls_specific_settings: {}
|
||||
|
||||
# list of additional paths to ignore in this project.
|
||||
# Same syntax as gitignore, so you can use * and **.
|
||||
# Note: global ignored_paths from serena_config.yml are also applied additively.
|
||||
ignored_paths: []
|
||||
|
||||
# whether the project is in read-only mode
|
||||
# If set to true, all editing tools will be disabled and attempts to use them will result in an error
|
||||
# Added on 2025-04-18
|
||||
read_only: false
|
||||
|
||||
# list of tool names to exclude.
|
||||
# This extends the existing exclusions (e.g. from the global configuration)
|
||||
# Find the list of tools here: https://oraios.github.io/serena/01-about/035_tools.html
|
||||
excluded_tools: []
|
||||
|
||||
# list of tools to include that would otherwise be disabled (particularly optional tools that are disabled by default).
|
||||
# This extends the existing inclusions (e.g. from the global configuration).
|
||||
# Find the list of tools here: https://oraios.github.io/serena/01-about/035_tools.html
|
||||
included_optional_tools: []
|
||||
|
||||
# fixed set of tools to use as the base tool set (if non-empty), replacing Serena's default set of tools.
|
||||
# This cannot be combined with non-empty excluded_tools or included_optional_tools.
|
||||
# Find the list of tools here: https://oraios.github.io/serena/01-about/035_tools.html
|
||||
fixed_tools: []
|
||||
|
||||
# list of mode names to that are always to be included in the set of active modes
|
||||
# The full set of modes to be activated is base_modes + default_modes.
|
||||
# If the setting is undefined, the base_modes from the global configuration (serena_config.yml) apply.
|
||||
# Otherwise, this setting overrides the global configuration.
|
||||
# Set this to [] to disable base modes for this project.
|
||||
# Set this to a list of mode names to always include the respective modes for this project.
|
||||
base_modes:
|
||||
|
||||
# list of mode names that are to be activated by default, overriding the setting in the global configuration.
|
||||
# The full set of modes to be activated is base_modes (from global config) + default_modes + added_modes.
|
||||
# If the setting is undefined/empty, the default_modes from the global configuration (serena_config.yml) apply.
|
||||
# Otherwise, this overrides the setting from the global configuration (serena_config.yml).
|
||||
# Therefore, you can set this to [] if you do not want the default modes defined in the global config to apply
|
||||
# for this project.
|
||||
# This setting can, in turn, be overridden by CLI parameters (--mode).
|
||||
# See https://oraios.github.io/serena/02-usage/050_configuration.html#modes
|
||||
default_modes:
|
||||
|
||||
# initial prompt for the project. It will always be given to the LLM upon activating the project
|
||||
# (contrary to the memories, which are loaded on demand).
|
||||
initial_prompt: |
|
||||
CRITICAL: Always read the memory `critical-info` before you do anything else.
|
||||
|
||||
# time budget (seconds) per tool call for the retrieval of additional symbol information
|
||||
# such as docstrings or parameter information.
|
||||
# This overrides the corresponding setting in the global configuration; see the documentation there.
|
||||
# If null or missing, use the setting from the global configuration.
|
||||
symbol_info_budget:
|
||||
|
||||
# list of regex patterns which, when matched, mark a memory entry as read‑only.
|
||||
# Extends the list from the global configuration, merging the two lists.
|
||||
read_only_memory_patterns: []
|
||||
|
||||
# list of regex patterns for memories to completely ignore.
|
||||
# Matching memories will not appear in list_memories or activate_project output
|
||||
# and cannot be accessed via read_memory or write_memory.
|
||||
# To access ignored memory files, use the read_file tool on the raw file path.
|
||||
# Extends the list from the global configuration, merging the two lists.
|
||||
# Example: ["_archive/.*", "_episodes/.*"]
|
||||
ignored_memory_patterns: []
|
||||
|
||||
# list of mode names to be activated additionally for this project, e.g. ["query-projects"]
|
||||
# The full set of modes to be activated is base_modes (from global config) + default_modes + added_modes.
|
||||
# See https://oraios.github.io/serena/02-usage/050_configuration.html#modes
|
||||
added_modes:
|
||||
|
||||
# list of additional workspace folder paths for cross-package reference support (e.g. in monorepos).
|
||||
# Paths can be absolute or relative to the project root.
|
||||
# Each folder is registered as an LSP workspace folder, enabling language servers to discover
|
||||
# symbols and references across package boundaries.
|
||||
# Currently supported for: TypeScript.
|
||||
# Example:
|
||||
# additional_workspace_folders:
|
||||
# - ../sibling-package
|
||||
# - ../shared-lib
|
||||
additional_workspace_folders: []
|
||||
40
.travis.yml
Normal file
40
.travis.yml
Normal file
@ -0,0 +1,40 @@
|
||||
dist: xenial
|
||||
|
||||
language: generic
|
||||
sudo: required
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- $HOME/.m2
|
||||
|
||||
services:
|
||||
- docker
|
||||
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
- develop
|
||||
|
||||
install:
|
||||
- curl -O https://download.clojure.org/install/linux-install-1.10.1.447.sh
|
||||
- chmod +x linux-install-1.10.1.447.sh
|
||||
- sudo ./linux-install-1.10.1.447.sh
|
||||
|
||||
before_script:
|
||||
- env | sort
|
||||
|
||||
script:
|
||||
- ./manage.sh build-devenv
|
||||
- ./manage.sh run-frontend-tests
|
||||
- ./manage.sh run-backend-tests
|
||||
- ./manage.sh build-images
|
||||
- ./manage.sh run
|
||||
|
||||
after_script:
|
||||
- docker images
|
||||
|
||||
notifications:
|
||||
email: false
|
||||
|
||||
env:
|
||||
- NODE_VERSION=10.16.0
|
||||
9
.vscode/settings.json
vendored
Normal file
9
.vscode/settings.json
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
{
|
||||
"files.exclude": {
|
||||
"**/.clj-kondo": true,
|
||||
"**/.cpcache": true,
|
||||
"**/.lsp": true,
|
||||
"**/.shadow-cljs": true,
|
||||
"**/node_modules": true
|
||||
}
|
||||
}
|
||||
11
.yarnrc.yml
Normal file
11
.yarnrc.yml
Normal file
@ -0,0 +1,11 @@
|
||||
enableGlobalCache: true
|
||||
|
||||
enableImmutableCache: false
|
||||
|
||||
enableImmutableInstalls: false
|
||||
|
||||
enableTelemetry: false
|
||||
|
||||
httpTimeout: 600000
|
||||
|
||||
nodeLinker: node-modules
|
||||
104
AGENTS.md
104
AGENTS.md
@ -1,104 +0,0 @@
|
||||
# AI Agent Guide
|
||||
|
||||
This document provides the core context and operating guidelines for AI agents
|
||||
working in this repository.
|
||||
|
||||
## Before You Start
|
||||
|
||||
Before responding to any user request, you must:
|
||||
|
||||
1. Read this file completely.
|
||||
2. Identify which modules are affected by the task.
|
||||
3. Load the `AGENTS.md` file **only** for each affected module (see the
|
||||
architecture table below). Not all modules have an `AGENTS.md` — verify the
|
||||
file exists before attempting to read it.
|
||||
4. Do **not** load `AGENTS.md` files for unrelated modules.
|
||||
|
||||
## Role: Senior Software Engineer
|
||||
|
||||
You are a high-autonomy Senior Full-Stack Software Engineer. You have full
|
||||
permission to navigate the codebase, modify files, and execute commands to
|
||||
fulfill your tasks. Your goal is to solve complex technical tasks with high
|
||||
precision while maintaining a strong focus on maintainability and performance.
|
||||
|
||||
### Operational Guidelines
|
||||
|
||||
1. Before writing code, describe your plan. If the task is complex, break it
|
||||
down into atomic steps.
|
||||
2. Be concise and autonomous.
|
||||
3. Do **not** touch unrelated modules unless the task explicitly requires it.
|
||||
4. Commit only when explicitly asked. Follow the commit format rules in
|
||||
`CONTRIBUTING.md`.
|
||||
5. When searching code, prefer `ripgrep` (`rg`) over `grep` — it respects
|
||||
`.gitignore` by default.
|
||||
|
||||
## Changelogs
|
||||
|
||||
The project has two changelogs:
|
||||
|
||||
- **Main project changelog**: `CHANGES.md` (root of the repository). Tracks changes for the core Penpot application (backend, frontend, common, render-wasm, exporter, mcp).
|
||||
- **Plugins changelog**: `plugins/CHANGELOG.md`. Tracks changes for the plugins subproject only.
|
||||
|
||||
When making changes, add a changelog entry to the appropriate file under the
|
||||
`## <version> (Unreleased)` section in the correct category
|
||||
(`:sparkles: New features & Enhancements` or `:bug: Bugs fixed`).
|
||||
|
||||
## GitHub Operations
|
||||
|
||||
To obtain the list of repository members/collaborators:
|
||||
|
||||
```bash
|
||||
gh api repos/:owner/:repo/collaborators --paginate --jq '.[].login'
|
||||
```
|
||||
|
||||
To obtain the list of open PRs authored by members:
|
||||
|
||||
```bash
|
||||
MEMBERS=$(gh api repos/:owner/:repo/collaborators --paginate --jq '.[].login' | tr '\n' '|' | sed 's/|$//')
|
||||
gh pr list --state open --limit 200 --json author,title,number | jq -r --arg members "$MEMBERS" '
|
||||
($members | split("|")) as $m |
|
||||
.[] | select(.author.login as $a | $m | index($a)) |
|
||||
"\(.number)\t\(.author.login)\t\(.title)"
|
||||
'
|
||||
```
|
||||
|
||||
To obtain the list of open PRs from external contributors (non-members):
|
||||
|
||||
```bash
|
||||
MEMBERS=$(gh api repos/:owner/:repo/collaborators --paginate --jq '.[].login' | tr '\n' '|' | sed 's/|$//')
|
||||
gh pr list --state open --limit 200 --json author,title,number | jq -r --arg members "$MEMBERS" '
|
||||
($members | split("|")) as $m |
|
||||
.[] | select(.author.login as $a | $m | index($a) | not) |
|
||||
"\(.number)\t\(.author.login)\t\(.title)"
|
||||
'
|
||||
```
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
Penpot is an open-source design tool composed of several modules:
|
||||
|
||||
| Directory | Language | Purpose | Has `AGENTS.md` |
|
||||
|-----------|----------|---------|:----------------:|
|
||||
| `frontend/` | ClojureScript + SCSS | Single-page React app (design editor) | Yes |
|
||||
| `backend/` | Clojure (JVM) | HTTP/RPC server, PostgreSQL, Redis | Yes |
|
||||
| `common/` | Cljc (shared Clojure/ClojureScript) | Data types, geometry, schemas, utilities | Yes |
|
||||
| `render-wasm/` | Rust -> WebAssembly | High-performance canvas renderer (Skia) | Yes |
|
||||
| `exporter/` | ClojureScript (Node.js) | Headless Playwright-based export (SVG/PDF) | No |
|
||||
| `mcp/` | TypeScript | Model Context Protocol integration | No |
|
||||
| `plugins/` | TypeScript | Plugin runtime and example plugins | No |
|
||||
|
||||
Some submodules use `pnpm` workspaces. The root `package.json` and
|
||||
`pnpm-lock.yaml` manage shared dependencies. Helper scripts live in `scripts/`.
|
||||
|
||||
### Module Dependency Graph
|
||||
|
||||
```
|
||||
frontend ──> common
|
||||
backend ──> common
|
||||
exporter ──> common
|
||||
frontend ──> render-wasm (loads compiled WASM)
|
||||
```
|
||||
|
||||
`common` is referenced as a local dependency (`{:local/root "../common"}`) by
|
||||
both `frontend` and `backend`. Changes to `common` can therefore affect multiple
|
||||
modules — test across consumers when modifying shared code.
|
||||
946
CHANGES.md
946
CHANGES.md
File diff suppressed because it is too large
Load Diff
402
CONTRIBUTING.md
402
CONTRIBUTING.md
@ -1,305 +1,213 @@
|
||||
# Contributing Guide
|
||||
# Contributing Guide #
|
||||
|
||||
Thank you for your interest in contributing to Penpot. This guide covers
|
||||
how to propose changes, submit fixes, and follow project conventions.
|
||||
Thank you for your interest in contributing to Penpot. This is a
|
||||
generic guide that details how to contribute to the project in a way that
|
||||
is efficient for everyone. If you are looking for specific documentation on
|
||||
different parts of the platform, please refer to the `docs/` directory,
|
||||
or the rendered version at the [Help Center](https://help.penpot.app/).
|
||||
|
||||
For architecture details, module-specific guidelines, and AI-agent
|
||||
instructions, see [AGENTS.md](AGENTS.md). For final user technical
|
||||
documentation, see the `docs/` directory or the rendered [Help
|
||||
Center](https://help.penpot.app/).
|
||||
## Reporting Bugs ##
|
||||
|
||||
## Table of Contents
|
||||
We are using [GitHub Issues](https://github.com/penpot/penpot/issues)
|
||||
for our public bugs. We keep a close eye on them and try to make it
|
||||
clear when we have an internal fix in progress. Before filing a new
|
||||
task, try to make sure your problem doesn't already exist.
|
||||
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Reporting Bugs](#reporting-bugs)
|
||||
- [Pull Requests](#pull-requests)
|
||||
- [Workflow](#workflow)
|
||||
- [Title format](#title-format)
|
||||
- [Description](#description)
|
||||
- [Branch naming](#branch-naming)
|
||||
- [Review process](#review-process)
|
||||
- [What we won't accept](#what-we-wont-accept)
|
||||
- [Good first issues](#good-first-issues)
|
||||
- [Commit Guidelines](#commit-guidelines)
|
||||
- [Commit types](#commit-types)
|
||||
- [Rules](#rules)
|
||||
- [Examples](#examples)
|
||||
- [Formatting and Linting](#formatting-and-linting)
|
||||
- [Changelog](#changelog)
|
||||
- [Code of Conduct](#code-of-conduct)
|
||||
- [Developer's Certificate of Origin (DCO)](#developers-certificate-of-origin-dco)
|
||||
If you found a bug, please report it, as far as possible, with:
|
||||
|
||||
## Prerequisites
|
||||
- a detailed explanation of steps to reproduce the error
|
||||
- the browser and browser version used
|
||||
- a dev tools console exception stack trace (if available)
|
||||
|
||||
- **Language**: Penpot is written primarily in Clojure (backend), ClojureScript
|
||||
(frontend/exporter), and Rust (render-wasm). Familiarity with the Clojure
|
||||
ecosystem is expected for most contributions.
|
||||
- **Issue tracker**: We use [GitHub Issues](https://github.com/penpot/penpot/issues)
|
||||
for public bugs and [Taiga](https://tree.taiga.io/project/penpot/) for
|
||||
internal project management. Changelog entries reference both.
|
||||
If you found a bug which you think is better to discuss in private (for
|
||||
example, security bugs), consider first sending an email to
|
||||
`support@penpot.app`.
|
||||
|
||||
## Reporting Bugs
|
||||
**We don't have a formal bug bounty program for security reports; this
|
||||
is an open source application, and your contribution will be recognized
|
||||
in the changelog.**
|
||||
|
||||
Report bugs via [GitHub Issues](https://github.com/penpot/penpot/issues).
|
||||
Before filing, search existing issues to avoid duplicates.
|
||||
|
||||
Include the following when possible:
|
||||
## Pull Requests ##
|
||||
|
||||
1. Steps to reproduce the error.
|
||||
2. Browser and browser version used.
|
||||
3. DevTools console exception stack trace (if available).
|
||||
If you want to propose a change or bug fix via a pull request (PR),
|
||||
you should first carefully read the section **Developer's Certificate of
|
||||
Origin**. You must also format your code and commits according to the
|
||||
instructions below.
|
||||
|
||||
For security bugs or issues better discussed in private, email
|
||||
`support@penpot.app` or report them on [Github Security
|
||||
Advisories](https://github.com/penpot/penpot/security/advisories)
|
||||
If you intend to fix a bug, it's fine to submit a pull request right
|
||||
away, but we still recommend filing an issue detailing what you're
|
||||
fixing. This is helpful in case we don't accept that specific fix but
|
||||
want to keep track of the issue.
|
||||
|
||||
> **Note:** We do not have a formal bug bounty program. Security
|
||||
> contributions are recognized in the changelog.
|
||||
If you want to implement or start working on a new feature, please
|
||||
open a **question*- / **discussion*- issue for it. No PR
|
||||
will be accepted without a prior discussion about the changes,
|
||||
whether it is a new feature, an already planned one, or a quick win.
|
||||
|
||||
## Pull Requests
|
||||
If it is your first PR, you can learn how to proceed from
|
||||
[this free video
|
||||
series](https://egghead.io/courses/how-to-contribute-to-an-open-source-project-on-github)
|
||||
|
||||
### Workflow
|
||||
We use the `easy fix` tag to indicate issues that are appropriate for beginners.
|
||||
|
||||
1. **Read the DCO** — see [Developer's Certificate of Origin](#developers-certificate-of-origin-dco)
|
||||
below. All code patches must include a `Signed-off-by` line.
|
||||
2. **Discuss before building** — open a [GitHub
|
||||
Issue](https://github.com/penpot/penpot/issues) before starting work on
|
||||
a new feature or significant change. For planned features on the roadmap,
|
||||
reference the corresponding Taiga story. Do not expect your contribution
|
||||
to be accepted if you submit it without prior discussion — this applies
|
||||
to new features, planned features, and quick wins alike.
|
||||
3. **Bug fixes** — you may submit a PR directly, but we still recommend
|
||||
filing an issue first so we can track it independently of your fix.
|
||||
4. **Format and lint** — run the checks described in
|
||||
[Formatting and Linting](#formatting-and-linting) before submitting.
|
||||
## Commit Guidelines ##
|
||||
|
||||
### Title format
|
||||
We have very precise rules on how our git commit messages must be formatted.
|
||||
|
||||
Pull request titles **must** follow the same convention as commit subjects:
|
||||
The commit message format is:
|
||||
|
||||
```
|
||||
:emoji: <subject>
|
||||
```
|
||||
|
||||
- Use the **imperative mood** (e.g. "Fix", not "Fixed").
|
||||
- Capitalize the first letter of the subject.
|
||||
- Do not end the subject with a period.
|
||||
- Keep the subject to **70 characters** or fewer.
|
||||
- Use one of the [commit type emojis](#commit-types) listed below.
|
||||
|
||||
When a PR contains multiple unrelated commits, choose the emoji that
|
||||
best represents the dominant change.
|
||||
|
||||
**Examples:**
|
||||
|
||||
```
|
||||
:bug: Fix unexpected error on launching modal
|
||||
:sparkles: Enable new modal for profile
|
||||
:zap: Improve performance of dashboard navigation
|
||||
```
|
||||
|
||||
> **Note:** When a PR is squash-merged, the PR title becomes the
|
||||
> commit message on the main branch. Getting the title right matters.
|
||||
|
||||
### Description
|
||||
|
||||
Every pull request should include a description that helps reviewers
|
||||
understand the change quickly:
|
||||
|
||||
1. **What and why** — describe the change and its motivation.
|
||||
2. **Link related issues** — use `Closes #1234` or reference a Taiga
|
||||
story (e.g. `Taiga #5678`).
|
||||
3. **Screenshots or recordings** — required for any UI-visible change.
|
||||
4. **Testing notes** — how did you verify the change? Any edge cases?
|
||||
5. **Breaking changes** — call out anything that affects existing users
|
||||
or requires migration steps.
|
||||
|
||||
### Branch naming
|
||||
|
||||
Use a descriptive branch name that reflects the type and scope of the
|
||||
change:
|
||||
|
||||
```
|
||||
<type>/<short-description>
|
||||
```
|
||||
|
||||
Types: `fix`, `feat`, `refactor`, `docs`, `chore`, `perf`.
|
||||
|
||||
Optionally include the issue number:
|
||||
|
||||
```
|
||||
fix/9122-email-blacklisting
|
||||
feat/export-webp
|
||||
refactor/layout-sizing
|
||||
```
|
||||
|
||||
### Review process
|
||||
|
||||
- We are a small team and maintainers juggle reviews alongside other
|
||||
tasks. Please do not expect your code to be reviewed instantly.
|
||||
- Reviews are handled in dedicated blocks of time, usually in the order
|
||||
PRs arrive. It may take a few days to get a first review, especially
|
||||
when urgent tasks come up.
|
||||
- Address review feedback by **pushing new commits** — do not
|
||||
force-push during review, as it breaks comment threads.
|
||||
- PRs require at least **one approval** before merge.
|
||||
- We use **squash-merge** by default. The PR title becomes the final
|
||||
commit message, so follow the [title format](#title-format) above.
|
||||
|
||||
### What we won't accept
|
||||
|
||||
To save time on both sides, please avoid submitting PRs that:
|
||||
|
||||
- Introduce new dependencies without prior discussion.
|
||||
- Change the build system or CI configuration without maintainer
|
||||
approval.
|
||||
- Mix unrelated changes in a single PR — keep PRs focused on one
|
||||
concern.
|
||||
- Skip the [discussion step](#workflow) for non-bug-fix changes.
|
||||
|
||||
### Good first issues
|
||||
|
||||
We use the `good first issue` label to mark issues appropriate for newcomers.
|
||||
|
||||
## Commit Guidelines
|
||||
|
||||
Commit messages must follow this format:
|
||||
|
||||
```
|
||||
:emoji: <subject>
|
||||
<type> <subject>
|
||||
|
||||
[body]
|
||||
|
||||
[footer]
|
||||
```
|
||||
|
||||
### Commit types
|
||||
Where type is:
|
||||
|
||||
| Emoji | Description |
|
||||
| ---------------------- | -------------------------- |
|
||||
| :bug: | Bug fix |
|
||||
| :sparkles: | Improvement or enhancement |
|
||||
| :tada: | New feature |
|
||||
| :recycle: | Refactor |
|
||||
| :lipstick: | Cosmetic changes |
|
||||
| :ambulance: | Critical bug fix |
|
||||
| :books: | Documentation |
|
||||
| :construction: | Work in progress |
|
||||
| :boom: | Breaking change |
|
||||
| :wrench: | Configuration update |
|
||||
| :zap: | Performance improvement |
|
||||
| :whale: | Docker-related change |
|
||||
| :paperclip: | Other non-relevant changes |
|
||||
| :arrow_up: | Dependency update |
|
||||
| :arrow_down: | Dependency downgrade |
|
||||
| :fire: | Removal of code or files |
|
||||
| :globe_with_meridians: | Add or update translations |
|
||||
| :rocket: | Epic or highlight |
|
||||
- :bug: `:bug:` a commit that fixes a bug
|
||||
- :sparkles: `:sparkles:` a commit that adds an improvement
|
||||
- :tada: `:tada:` a commit with a new feature
|
||||
- :recycle: `:recycle:` a commit that introduces a refactor
|
||||
- :lipstick: `:lipstick:` a commit with cosmetic changes
|
||||
- :ambulance: `:ambulance:` a commit that fixes a critical bug
|
||||
- :books: `:books:` a commit that improves or adds documentation
|
||||
- :construction: `:construction:` a WIP commit
|
||||
- :boom: `:boom:` a commit with breaking changes
|
||||
- :wrench: `:wrench:` a commit for config updates
|
||||
- :zap: `:zap:` a commit with performance improvements
|
||||
- :whale: `:whale:` a commit for Docker-related stuff
|
||||
- :paperclip: `:paperclip:` a commit with other non-relevant changes
|
||||
- :arrow_up: `:arrow_up:` a commit with dependency updates
|
||||
- :arrow_down: `:arrow_down:` a commit with dependency downgrades
|
||||
- :fire: `:fire:` a commit that removes files or code
|
||||
- :globe_with_meridians: `:globe_with_meridians:` a commit that adds or updates
|
||||
translations
|
||||
|
||||
### Rules
|
||||
More info:
|
||||
|
||||
- Use the **imperative mood** in the subject (e.g. "Fix", not "Fixed")
|
||||
- Capitalize the first letter of the subject
|
||||
- Add clear and concise description on the body
|
||||
- Do not end the subject with a period
|
||||
- Keep the subject to **70 characters** or fewer
|
||||
- Separate the subject from the body with a **blank line**
|
||||
- https://gist.github.com/parmentf/035de27d6ed1dce0b36a
|
||||
- https://gist.github.com/rxaviers/7360908
|
||||
|
||||
### Examples
|
||||
Each commit should have:
|
||||
|
||||
```
|
||||
:bug: Fix unexpected error on launching modal
|
||||
:sparkles: Enable new modal for profile
|
||||
:zap: Improve performance of dashboard navigation
|
||||
:ambulance: Fix critical bug on user registration process
|
||||
:tada: Add new approach for user registration
|
||||
```
|
||||
- A concise subject using the imperative mood.
|
||||
- The subject should capitalize the first letter, omit the period
|
||||
at the end, and be no longer than 65 characters.
|
||||
- A blank line between the subject line and the body.
|
||||
- An entry in the CHANGES.md file if applicable, referencing the
|
||||
GitHub or Taiga issue/user story using these same rules.
|
||||
|
||||
## Formatting and Linting
|
||||
Examples of good commit messages:
|
||||
|
||||
We use [cljfmt](https://github.com/weavejester/cljfmt) for formatting and
|
||||
[clj-kondo](https://github.com/clj-kondo/clj-kondo) for linting.
|
||||
- `:bug: Fix unexpected error on launching modal`
|
||||
- `:bug: Set proper error message on generic error`
|
||||
- `:sparkles: Enable new modal for profile`
|
||||
- `:zap: Improve performance of dashboard navigation`
|
||||
- `:wrench: Update default backend configuration`
|
||||
- `:books: Add more documentation for authentication process`
|
||||
- `:ambulance: Fix critical bug on user registration process`
|
||||
- `:tada: Add new approach for user registration`
|
||||
|
||||
## Formatting and Linting ##
|
||||
|
||||
You will want to make sure your code is formatted and linted before submitting
|
||||
a PR. We use [cljfmt](https://github.com/weavejester/cljfmt) and
|
||||
[clj-kondo](https://github.com/clj-kondo/clj-kondo) for this. After installing
|
||||
them on your system, you can run them with:
|
||||
|
||||
```bash
|
||||
# Check formatting (does not modify files)
|
||||
./scripts/check-fmt
|
||||
# Check formatting
|
||||
yarn fmt:clj:check
|
||||
|
||||
# Fix formatting (modifies files in place)
|
||||
./scripts/fmt
|
||||
# Check and fix formatting
|
||||
yarn fmt:clj
|
||||
|
||||
# Lint
|
||||
./scripts/lint
|
||||
# Run the linter
|
||||
yarn lint:clj
|
||||
```
|
||||
|
||||
For frontend SCSS, we use `stylelint` for linting and
|
||||
`Prettier` for formatting:
|
||||
There are more choices in `package.json`.
|
||||
|
||||
```bash
|
||||
cd frontend
|
||||
Ideally, you should run these commands as git pre-commit hooks. A convenient way
|
||||
of defining them is to use [Husky](https://typicode.github.io/husky/#/).
|
||||
|
||||
# Lint SCSS
|
||||
pnpm run lint:scss (does not modify files)
|
||||
## Code of Conduct ##
|
||||
|
||||
# Fix SCSS formatting (modifies files in place)
|
||||
pnpm run fmt:scss
|
||||
```
|
||||
As contributors and maintainers of this project, we pledge to respect
|
||||
all people who contribute through reporting issues, posting feature
|
||||
requests, updating documentation, submitting pull requests or patches,
|
||||
and other activities.
|
||||
|
||||
Ideally, run these as git pre-commit hooks.
|
||||
[Husky](https://typicode.github.io/husky/#/) is a convenient option for
|
||||
setting this up.
|
||||
We are committed to making participation in this project a
|
||||
harassment-free experience for everyone, regardless of level of
|
||||
experience, gender, gender identity and expression, sexual
|
||||
orientation, disability, personal appearance, body size, race,
|
||||
ethnicity, age, or religion.
|
||||
|
||||
## Changelog
|
||||
Examples of unacceptable behavior by participants include the use of
|
||||
sexual language or imagery, derogatory comments or personal attacks,
|
||||
trolling, public or private harassment, insults, or other
|
||||
unprofessional conduct.
|
||||
|
||||
When your change is user-facing or otherwise notable, add an entry to
|
||||
[CHANGES.md](CHANGES.md) following the same commit-type conventions. Reference
|
||||
the relevant GitHub issue or Taiga user story.
|
||||
Project maintainers have the right and responsibility to remove, edit,
|
||||
or reject comments, commits, code, wiki edits, issues, and other
|
||||
contributions that are not aligned with this Code of Conduct. Project
|
||||
maintainers who do not follow the Code of Conduct may be removed from
|
||||
the project team.
|
||||
|
||||
## Code of Conduct
|
||||
This Code of Conduct applies both within project spaces and in public
|
||||
spaces when an individual is representing the project or its
|
||||
community.
|
||||
|
||||
This project follows the [Contributor Covenant](https://www.contributor-covenant.org/).
|
||||
The full Code of Conduct is available at
|
||||
[help.penpot.app/contributing-guide/coc](https://help.penpot.app/contributing-guide/coc/)
|
||||
and in the repository's [CODE_OF_CONDUCT.md](CODE_OF_CONDUCT.md).
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior
|
||||
may be reported by opening an issue or contacting one or more of the
|
||||
project maintainers.
|
||||
|
||||
To report unacceptable behavior, open an issue or contact a project maintainer
|
||||
directly.
|
||||
This Code of Conduct is adapted from the Contributor Covenant, version
|
||||
1.1.0, available from [http://contributor-covenant.org/version/1/1/0/](http://contributor-covenant.org/version/1/1/0/)
|
||||
|
||||
## Developer's Certificate of Origin (DCO)
|
||||
|
||||
By submitting code you agree to and can certify the following:
|
||||
|
||||
> **Developer's Certificate of Origin 1.1**
|
||||
>
|
||||
> By making a contribution to this project, I certify that:
|
||||
>
|
||||
> (a) The contribution was created in whole or in part by me and I have the
|
||||
> right to submit it under the open source license indicated in the file; or
|
||||
>
|
||||
> (b) The contribution is based upon previous work that, to the best of my
|
||||
> knowledge, is covered under an appropriate open source license and I have
|
||||
> the right under that license to submit that work with modifications,
|
||||
> whether created in whole or in part by me, under the same open source
|
||||
> license (unless I am permitted to submit under a different license), as
|
||||
> indicated in the file; or
|
||||
>
|
||||
> (c) The contribution was provided directly to me by some other person who
|
||||
> certified (a), (b) or (c) and I have not modified it.
|
||||
>
|
||||
> (d) I understand and agree that this project and the contribution are public
|
||||
> and that a record of the contribution (including all personal information
|
||||
> I submit with it, including my sign-off) is maintained indefinitely and
|
||||
> may be redistributed consistent with this project or the open source
|
||||
> license(s) involved.
|
||||
Developer's Certificate of Origin 1.1
|
||||
|
||||
### Signed-off-by
|
||||
By making a contribution to this project, I certify that:
|
||||
|
||||
All code patches (**documentation is excluded**) must contain a sign-off line
|
||||
at the end of the commit body. Add it automatically with `git commit -s`.
|
||||
(a) The contribution was created in whole or in part by me and I
|
||||
have the right to submit it under the open source license
|
||||
indicated in the file; or
|
||||
|
||||
(b) The contribution is based upon previous work that, to the best
|
||||
of my knowledge, is covered under an appropriate open source
|
||||
license and I have the right under that license to submit that
|
||||
work with modifications, whether created in whole or in part
|
||||
by me, under the same open source license (unless I am
|
||||
permitted to submit under a different license), as indicated
|
||||
in the file; or
|
||||
|
||||
(c) The contribution was provided directly to me by some other
|
||||
person who certified (a), (b) or (c) and I have not modified
|
||||
it.
|
||||
|
||||
(d) I understand and agree that this project and the contribution
|
||||
are public and that a record of the contribution (including all
|
||||
personal information I submit with it, including my sign-off) is
|
||||
maintained indefinitely and may be redistributed consistent with
|
||||
this project or the open source license(s) involved.
|
||||
|
||||
Then, all your code patches (**documentation is excluded**) should
|
||||
contain a sign-off at the end of the patch/commit description body. It
|
||||
can be automatically added by adding the `-s` parameter to `git commit`.
|
||||
|
||||
This is an example of what the line should look like:
|
||||
|
||||
```
|
||||
Signed-off-by: Your Real Name <your.email@example.com>
|
||||
Signed-off-by: Andrey Antukh <niwi@niwi.nz>
|
||||
```
|
||||
|
||||
- Use your **real name** — pseudonyms and anonymous contributions are not
|
||||
allowed.
|
||||
- The `Signed-off-by` line is **mandatory** and must match the commit author.
|
||||
Please, use your real name (sorry, no pseudonyms or anonymous
|
||||
contributions are allowed).
|
||||
|
||||
119
README.md
119
README.md
@ -1,21 +1,18 @@
|
||||
<img width="100%" src="https://github.com/user-attachments/assets/da17b160-f289-436f-b140-972083a08602" />
|
||||
|
||||
[uri_license]: https://www.mozilla.org/en-US/MPL/2.0
|
||||
[uri_license_image]: https://img.shields.io/badge/MPL-2.0-blue.svg
|
||||
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://penpot.app/images/readme/github-dark-mode.png">
|
||||
<source media="(prefers-color-scheme: light)" srcset="https://penpot.app/images/readme/github-light-mode.png">
|
||||
<img alt="penpot header image" src="https://penpot.app/images/readme/github-light-mode.png">
|
||||
</picture>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://www.digitalpublicgoods.net/r/penpot" rel="nofollow">
|
||||
<img alt="Verified DPG" src="https://img.shields.io/badge/Verified-DPG-blue.svg">
|
||||
</a>
|
||||
<a href="https://community.penpot.app" rel="nofollow">
|
||||
<img alt="Penpot Community" src="https://img.shields.io/discourse/posts?server=https%3A%2F%2Fcommunity.penpot.app">
|
||||
</a>
|
||||
<a href="https://tree.taiga.io/project/penpot/" rel="nofollow">
|
||||
<img alt="Managed with Taiga.io" src="https://img.shields.io/badge/managed%20with-TAIGA.io-709f14.svg">
|
||||
</a>
|
||||
<a href="https://gitpod.io/#https://github.com/penpot/penpot" rel="nofollow">
|
||||
<img alt="Gitpod ready-to-code" src="https://img.shields.io/badge/Gitpod-ready--to--code-blue?logo=gitpod">
|
||||
</a>
|
||||
<a href="https://www.mozilla.org/en-US/MPL/2.0" rel="nofollow"><img alt="License: MPL-2.0" src="https://img.shields.io/badge/MPL-2.0-blue.svg" style="max-width:100%;"></a>
|
||||
<a href="https://community.penpot.app" rel="nofollow"><img alt="Penpot Community" src="https://img.shields.io/discourse/posts?server=https%3A%2F%2Fcommunity.penpot.app" style="max-width:100%;"></a>
|
||||
<a href="https://tree.taiga.io/project/penpot/" title="Managed with Taiga.io" rel="nofollow"><img alt="Managed with Taiga.io" src="https://img.shields.io/badge/managed%20with-TAIGA.io-709f14.svg" style="max-width:100%;"></a>
|
||||
<a href="https://gitpod.io/#https://github.com/penpot/penpot" rel="nofollow"><img alt="Gitpod ready-to-code" src="https://img.shields.io/badge/Gitpod-ready--to--code-blue?logo=gitpod" style="max-width:100%;"></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
@ -32,25 +29,25 @@
|
||||
<a href="https://fosstodon.org/@penpot/"><b>Mastodon</b></a> •
|
||||
<a href="https://bsky.app/profile/penpot.app"><b>Bluesky</b></a> •
|
||||
<a href="https://twitter.com/penpotapp"><b>X</b></a>
|
||||
|
||||
</p>
|
||||
|
||||
[Penpot video](https://github.com/user-attachments/assets/7c67fd7c-04d3-4c9b-88ec-b6f5e23f8332)
|
||||
<br />
|
||||
|
||||
Penpot is the open-source design platform for teams that build digital products at scale.
|
||||
[Penpot video](https://github.com/user-attachments/assets/7c67fd7c-04d3-4c9b-88ec-b6f5e23f8332
|
||||
)
|
||||
|
||||
Penpot’s key strength lies in giving you **full ownership of your design infrastructure**. Built on open source and designed for [self-hosting](https://help.penpot.app/technical-guide/getting-started/), it puts teams in complete control of their design environment supporting strict compliance and governance requirements. Whether used in the **browser or deployed on your own servers**, Penpot **works with open standards** like SVG, CSS, HTML, and JSON.
|
||||
<br />
|
||||
|
||||
Real-time collaboration strengthens this foundation, helping teams scale and bring design closer to the product through top-tier capabilities. Additionally, developers feel at home using Penpot, because design is expressed as code, enabling a direct translation and shipping products faster.
|
||||
Penpot is the first **open-source** design tool for design and code collaboration. Designers can create stunning designs, interactive prototypes, design systems at scale, while developers enjoy ready-to-use code and make their workflow easy and fast. And all of this with no handoff drama.
|
||||
|
||||
Best-in-class native [Design Tokens](https://penpot.dev/collaboration/design-tokens) provide a single source of truth between design and development. They ensure consistency, improve collaboration, and make it easier to manage complex design systems.
|
||||
Available on browser or self-hosted, Penpot works with open standards like SVG, CSS, HTML and JSON, and it’s free!
|
||||
|
||||
The [MCP server](https://penpot.app/penpot-mcp-server) takes it further by enabling multi-directional workflows between design and code. A [powerful open API](https://help.penpot.app/mcp/#quick-start) and plugin system makes the workspace programmable, enabling automation, AI-driven workflows, and integrations with the tools and systems you already use.
|
||||
The latest updates take Penpot even further. It’s the first design tool to integrate native [design tokens](https://penpot.dev/collaboration/design-tokens)—a single source of truth to improve efficiency and collaboration between product design and development.
|
||||
With the [huge 2.0 release](https://penpot.app/dev-diaries), Penpot took the platform to a whole new level. This update introduces the ground-breaking [CSS Grid Layout feature](https://penpot.app/penpot-2.0), a complete UI redesign, a new Components system, and much more.
|
||||
For organizations that need extra service for its teams, [get in touch](https://cal.com/team/penpot/talk-to-us)
|
||||
|
||||
With [CSS Grid and Flex Layout](https://help.penpot.app/user-guide/designing/flexible-layouts/), teams can design responsive interfaces that behave like real code from the start.
|
||||
|
||||
Combined, these features turn Penpot into a **full-stack design platform** for building scalable design systems and fully integrated product development processes.
|
||||
|
||||
If your organization is scaling and needs extra support, we’re here to help. [Talk to us](https://penpot.app/talk-to-us)
|
||||
🎇 Design, code, and Open Source meet at [Penpot Fest](https://penpot.app/penpotfest)! Be part of the 2025 edition in Madrid, Spain, on October 9-10.
|
||||
|
||||
## Table of contents ##
|
||||
|
||||
@ -63,78 +60,104 @@ If your organization is scaling and needs extra support, we’re here to help. [
|
||||
|
||||
## Why Penpot ##
|
||||
|
||||
Penpot connects design, code, and AI workflows through a code-based approach, making designs readable by developers and AI via the MCP server. This approach helps teams ship what’s actually designed and manage design systems at scale with powerful design tokens. As a self-hosted, open-source and real-time collaboration platform, Penpot offers full flexibility, security, and ownership without vendor lock-in. Learn more about [why Penpot](https://penpot.app/why-penpot) is the platform for your team.
|
||||
Penpot expresses designs as code. Designers can do their best work and see it will be beautifully implemented by developers in a two-way collaboration.
|
||||
|
||||
### Plugin system ###
|
||||
|
||||
[Penpot plugins](https://penpot.app/penpothub/plugins) let you expand the platform's capabilities, give you the flexibility to integrate it with other apps, and design custom solutions.
|
||||
|
||||
### Designed for developers ###
|
||||
|
||||
Penpot was built to serve both designers and developers and create a fluid design-code process. You have the choice to enjoy real-time collaboration or play "solo".
|
||||
|
||||
### Inspect mode ###
|
||||
|
||||
Work with ready-to-use code and make your workflow easy and fast. The inspect tab gives instant access to SVG, CSS and HTML code.
|
||||
|
||||
### Self host your own instance ###
|
||||
Provide your team or organization with a completely owned collaborative design tool. Use Penpot's cloud service or deploy your own Penpot server.
|
||||
|
||||
### Integrations ###
|
||||
Penpot offers integration into the development toolchain, thanks to its support for webhooks and an API accessible through access tokens.
|
||||
|
||||
Penpot offers [integration](https://penpot.app/integrations-api) into the development toolchain, thanks to its support for webhooks and an API accessible through access tokens.
|
||||
### What’s great for design ###
|
||||
With Penpot you can design libraries to share and reuse; turn design elements into components and tokens to allow reusability and scalability; and build realistic user flows and interactions.
|
||||
|
||||
### Building Design Systems: design tokens, components and variants ###
|
||||
### Design Tokens ###
|
||||
With Penpot’s standardized [design tokens](https://penpot.dev/collaboration/design-tokens) format, you can easily reuse and sync tokens across different platforms, workflows, and disciplines.
|
||||
|
||||
Penpot brings [design systems](https://penpot.app/design/design-systems) to code-minded teams: a single source of truth with native Design Tokens, Components, and Variants for scalable, reusable, and consistent UI across projects and platforms.
|
||||
|
||||
<img width="100%" alt="Penpot Design Systems" src="https://github.com/user-attachments/assets/cce75ad6-f783-473f-8803-da9eb8255fef">
|
||||
<br />
|
||||
|
||||
<p align="center">
|
||||
<img src="https://img.plasmic.app/img-optimizer/v1/img?src=https%3A%2F%2Fimg.plasmic.app%2Fimg-optimizer%2Fv1%2Fimg%2F9dd677c36afb477e9666ccd1d3f009ad.png" alt="Open Source" style="width: 65%;">
|
||||
</p>
|
||||
|
||||
<br />
|
||||
|
||||
## Getting started ##
|
||||
|
||||
Penpot is the only design & prototype platform that is deployment agnostic. You can use it in our [SAAS](https://design.penpot.app) or deploy it anywhere.
|
||||
|
||||
Learn how to install it with Docker, Kubernetes, Elestio or other options on [our website](https://penpot.app/self-host).
|
||||
<br />
|
||||
|
||||
<p align="center">
|
||||
<img src="https://site-assets.plasmic.app/2168cf524dd543caeff32384eb9ea0a1.svg" alt="Open Source" style="width: 65%;">
|
||||
</p>
|
||||
<br />
|
||||
|
||||
## Community ##
|
||||
|
||||
We love the Open Source software community. Contributing is our passion and if it’s yours too, participate and [improve](https://community.penpot.app/c/help-us-improve-penpot/7) Penpot. All your designs, code and ideas are welcome!
|
||||
|
||||
Want to go a step further? Become a [Penpot Ambassador](https://penpot.app/ambassador-program) and help grow the Penpot community in your region while contributing to a global, open design ecosystem.
|
||||
|
||||
If you need help or have any questions; if you’d like to share your experience using Penpot or get inspired; if you’d rather meet our community of developers and designers, [join our Community](https://community.penpot.app/)!
|
||||
|
||||
Categories include:
|
||||
|
||||
You will find the following categories:
|
||||
- [Ask the Community](https://community.penpot.app/c/ask-for-help-using-penpot/6)
|
||||
- [Troubleshooting](https://community.penpot.app/c/technical/8)
|
||||
- [Help us Improve Penpot](https://community.penpot.app/c/help-us-improve-penpot/7)
|
||||
- [#MadeWithPenpot](https://community.penpot.app/c/madewithpenpot/9)
|
||||
- [Events and Announcements](https://community.penpot.app/c/announcements/5)
|
||||
- [Inside Penpot](https://community.penpot.app/c/inside-penpot/21)
|
||||
- [Penpot in your language](https://community.penpot.app/c/penpot-in-your-language/12)
|
||||
- [Education](https://community.penpot.app/c/education/28)
|
||||
- [Design and Code Essentials](https://community.penpot.app/c/design-and-code-essentials/22)
|
||||
|
||||
<img width="100%" alt="Pentpot Community" src="https://github.com/user-attachments/assets/4b2a4360-12b5-4994-bd45-641449f86c4e" />
|
||||
|
||||
<br />
|
||||
|
||||
<p align="center">
|
||||
<img src="https://github.com/penpot/penpot/assets/5446186/6ac62220-a16c-46c9-ab21-d24ae357ed03" alt="Community" style="width: 65%;">
|
||||
</p>
|
||||
<br />
|
||||
|
||||
### Code of Conduct ###
|
||||
|
||||
Anyone who contributes to Penpot, whether through code, in the community, or at an event, must adhere to the
|
||||
[code of conduct](https://help.penpot.app/contributing-guide/coc/) and foster a positive and safe environment.
|
||||
|
||||
### Contributing ###
|
||||
|
||||
## Contributing ##
|
||||
|
||||
Any contribution will make a difference to improve Penpot. How can you get involved?
|
||||
|
||||
Choose your way:
|
||||
|
||||
- Create and [share Libraries & Templates](https://penpot.app/libraries-templates.html) that will be helpful for the community.
|
||||
- Invite your [team to join](https://design.penpot.app/#/auth/register).
|
||||
- Give this repo a star and follow us on Social Media: [Mastodon](https://fosstodon.org/@penpot/), [Youtube](https://www.youtube.com/c/Penpot), [Instagram](https://instagram.com/penpot.app), [Linkedin](https://www.linkedin.com/company/penpotdesign), [Peertube](https://peertube.kaleidos.net/a/penpot_app), [X](https://twitter.com/penpotapp) and [BlueSky](https://bsky.app/profile/penpot.app).
|
||||
- Create and [share Libraries & Templates](https://penpot.app/libraries-templates.html) that will be helpful for the community
|
||||
- Invite your [team to join](https://design.penpot.app/#/auth/register)
|
||||
- Give this repo a star and follow us on Social Media: [Mastodon](https://fosstodon.org/@penpot/), [Youtube](https://www.youtube.com/c/Penpot), [Instagram](https://instagram.com/penpot.app), [Linkedin](https://www.linkedin.com/company/penpotdesign), [Peertube](https://peertube.kaleidos.net/a/penpot_app), [X](https://twitter.com/penpotapp) and [BlueSky](https://bsky.app/profile/penpot.app)
|
||||
- Participate in the [Community](https://community.penpot.app/) space by asking and answering questions; reacting to others’ articles; opening your own conversations and following along on decisions affecting the project.
|
||||
- Report bugs with our easy [guide for bugs hunting](https://help.penpot.app/contributing-guide/reporting-bugs/) or [GitHub issues](https://github.com/penpot/penpot/issues).
|
||||
- Become a [translator](https://help.penpot.app/contributing-guide/translations).
|
||||
- Give feedback: [Email us](mailto:support@penpot.app).
|
||||
- **Contribute to Penpot's code:** [Watch this video](https://www.youtube.com/watch?v=TpN0osiY-8k) by Alejandro Alonso, CIO and developer at Penpot, where he gives us a hands-on demo of how to use Penpot’s repository and make changes in both front and back end.
|
||||
- Report bugs with our easy [guide for bugs hunting](https://help.penpot.app/contributing-guide/reporting-bugs/) or [GitHub issues](https://github.com/penpot/penpot/issues)
|
||||
- Become a [translator](https://help.penpot.app/contributing-guide/translations)
|
||||
- Give feedback: [Email us](mailto:support@penpot.app)
|
||||
- **Contribute to Penpot's code:** [Watch this video](https://www.youtube.com/watch?v=TpN0osiY-8k) by Alejandro Alonso, CIO and developer at Penpot, where he gives us a hands-on demo of how to use Penpot’s repository and make changes in both front and back end
|
||||
|
||||
To find (almost) everything you need to know on how to contribute to Penpot, refer to the [contributing guide](https://help.penpot.app/contributing-guide/).
|
||||
|
||||
<img width="100%" alt="Penpot hub" src="https://github.com/user-attachments/assets/0abc02f0-625c-45ab-ad81-4927bec7a055" />
|
||||
<br />
|
||||
|
||||
<p align="center">
|
||||
<img src="https://github.com/penpot/penpot/assets/5446186/fea18923-dc06-49be-86ad-c3496a7956e6" alt="Libraries and templates" style="width: 65%;">
|
||||
</p>
|
||||
|
||||
<br />
|
||||
|
||||
## Resources ##
|
||||
|
||||
@ -150,8 +173,6 @@ You can ask and answer questions, have open-ended conversations, and follow alon
|
||||
|
||||
📚 [Dev Diaries](https://penpot.app/dev-diaries.html)
|
||||
|
||||
🧑🏫 [UI Design Course](https://penpot.app/courses/)
|
||||
|
||||
|
||||
## License ##
|
||||
|
||||
|
||||
28
SECURITY.md
28
SECURITY.md
@ -2,30 +2,4 @@
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
We take the security of this project seriously. If you have discovered
|
||||
a security vulnerability, please do **not** open a public issue.
|
||||
|
||||
Please report vulnerabilities via email to: **[support@penpot.app]**
|
||||
|
||||
|
||||
### What to include:
|
||||
|
||||
* A brief description of the vulnerability.
|
||||
* Steps to reproduce the issue.
|
||||
* Potential impact if exploited.
|
||||
|
||||
We appreciate your patience and your commitment to **responsible disclosure**.
|
||||
|
||||
---
|
||||
|
||||
## Security Contributors
|
||||
|
||||
We are incredibly grateful to the following individuals and
|
||||
organizations for their help in keeping this project safe.
|
||||
|
||||
* **Ali Maharramli** – for identifying critical path traversal vulnerability
|
||||
|
||||
|
||||
> **Note:** This list is a work in progress. If you have contributed
|
||||
> to the security of this project and would like to be recognized (or
|
||||
> prefer to remain anonymous), please let us know.
|
||||
Please report security issues to `support@penpot.app`
|
||||
7
backend/.gitignore
vendored
Normal file
7
backend/.gitignore
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
.pnp.*
|
||||
.yarn/*
|
||||
!.yarn/patches
|
||||
!.yarn/plugins
|
||||
!.yarn/releases
|
||||
!.yarn/sdks
|
||||
!.yarn/versions
|
||||
@ -1,262 +0,0 @@
|
||||
# Penpot Backend – Agent Instructions
|
||||
|
||||
Clojure backend (RPC) service running on the JVM.
|
||||
|
||||
Uses Integrant for dependency injection, PostgreSQL for storage, and
|
||||
Redis for messaging/caching.
|
||||
|
||||
## General Guidelines
|
||||
|
||||
To ensure consistency across the Penpot JVM stack, all contributions must adhere
|
||||
to these criteria.
|
||||
|
||||
IMPORTANT: all CLI commands should be executed under backend/
|
||||
subdirectory for make them work correctly.
|
||||
|
||||
### 1. Testing & Validation
|
||||
|
||||
* **Coverage:** If code is added or modified in `src/`, corresponding
|
||||
tests in `test/backend_tests/` must be added or updated.
|
||||
|
||||
* **Execution:**
|
||||
* **Isolated:** Run `clojure -M:dev:test --focus backend-tests.my-ns-test` for the specific test namespace.
|
||||
* **Regression:** Run `clojure -M:dev:test` to ensure the suite passes without regressions in related functional areas.
|
||||
|
||||
### 2. Code Quality & Formatting
|
||||
|
||||
* **Linting:** All code must pass linter checks (run `pnpm run lint:clj` or `pnpm run lint` on the repository root)
|
||||
* **Formatting:** All the code must pass the formatting check (run `pnpm run
|
||||
check-fmt`). Use `pnpm run fmt` to fix formatting issues. Avoid "dirty"
|
||||
diffs caused by unrelated whitespace changes.
|
||||
* **Type Hinting:** Use explicit JVM type hints (e.g., `^String`, `^long`) in
|
||||
performance-critical paths to avoid reflection overhead.
|
||||
|
||||
## Code Conventions
|
||||
|
||||
### Namespace Overview
|
||||
|
||||
The source is located under `src` directory and this is a general overview of
|
||||
namespaces structure:
|
||||
|
||||
- `app.rpc.commands.*` – RPC command implementations (`auth`, `files`, `teams`, etc.)
|
||||
- `app.http.*` – HTTP routes and middleware
|
||||
- `app.db.*` – Database layer
|
||||
- `app.tasks.*` – Background job tasks
|
||||
- `app.main` – Integrant system setup and entrypoint
|
||||
- `app.loggers` – Internal loggers (auditlog, mattermost, etc.) (not to be confused with `app.common.logging`)
|
||||
|
||||
### RPC
|
||||
|
||||
The RPC methods are implemented using a multimethod-like structure via the
|
||||
`app.util.services` namespace. The main RPC methods are collected under
|
||||
`app.rpc.commands` namespace and exposed under `/api/rpc/command/<cmd-name>`.
|
||||
|
||||
The RPC method accepts POST and GET requests indistinctly and uses the `Accept`
|
||||
header to negotiate the response encoding (which can be Transit — the default —
|
||||
or plain JSON). It also accepts Transit (default) or JSON as input, which should
|
||||
be indicated using the `Content-Type` header.
|
||||
|
||||
The main convention is: use `get-` prefix on RPC name when we want READ
|
||||
operation.
|
||||
|
||||
Example of RPC method definition:
|
||||
|
||||
```clojure
|
||||
(sv/defmethod ::my-command
|
||||
{::rpc/auth true ;; requires auth
|
||||
::doc/added "1.18"
|
||||
::sm/params [:map ...] ;; malli input schema
|
||||
::sm/result [:map ...]} ;; malli output schema
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id] :as params}]
|
||||
;; return a plain map or throw
|
||||
{:id (uuid/next)})
|
||||
```
|
||||
|
||||
Look under `src/app/rpc/commands/*.clj` to see more examples.
|
||||
|
||||
### Tests
|
||||
|
||||
Test namespaces match `.*-test$` under `test/`. Config is in `tests.edn`.
|
||||
|
||||
|
||||
### Integrant System
|
||||
|
||||
The `src/app/main.clj` declares the system map. Each key is a component; values
|
||||
are config maps with `::ig/ref` for dependencies. Components implement
|
||||
`ig/init-key` / `ig/halt-key!`.
|
||||
|
||||
|
||||
### Connecting to the Database
|
||||
|
||||
Two PostgreSQL databases are used in this environment:
|
||||
|
||||
| Database | Purpose | Connection string |
|
||||
|---------------|--------------------|----------------------------------------------------|
|
||||
| `penpot` | Development / app | `postgresql://penpot:penpot@postgres/penpot` |
|
||||
| `penpot_test` | Test suite | `postgresql://penpot:penpot@postgres/penpot_test` |
|
||||
|
||||
**Interactive psql session:**
|
||||
|
||||
```bash
|
||||
# development DB
|
||||
psql "postgresql://penpot:penpot@postgres/penpot"
|
||||
|
||||
# test DB
|
||||
psql "postgresql://penpot:penpot@postgres/penpot_test"
|
||||
```
|
||||
|
||||
**One-shot query (non-interactive):**
|
||||
|
||||
```bash
|
||||
psql "postgresql://penpot:penpot@postgres/penpot" -c "SELECT id, name FROM team LIMIT 5;"
|
||||
```
|
||||
|
||||
**Useful psql meta-commands:**
|
||||
|
||||
```
|
||||
\dt -- list all tables
|
||||
\d <table> -- describe a table (columns, types, constraints)
|
||||
\di -- list indexes
|
||||
\q -- quit
|
||||
```
|
||||
|
||||
> **Migrations table:** Applied migrations are tracked in the `migrations` table
|
||||
> with columns `module`, `step`, and `created_at`. When renaming a migration
|
||||
> logical name, update this table in both databases to match the new name;
|
||||
> otherwise the runner will attempt to re-apply the migration on next startup.
|
||||
|
||||
```bash
|
||||
# Example: fix a renamed migration entry in the test DB
|
||||
psql "postgresql://penpot:penpot@postgres/penpot_test" \
|
||||
-c "UPDATE migrations SET step = 'new-name' WHERE step = 'old-name';"
|
||||
```
|
||||
|
||||
### Database Access (Clojure)
|
||||
|
||||
`app.db` wraps next.jdbc. Queries use a SQL builder that auto-converts kebab-case ↔ snake_case.
|
||||
|
||||
```clojure
|
||||
;; Query helpers
|
||||
(db/get cfg-or-pool :table {:id id}) ; fetch one row (throws if missing)
|
||||
(db/get* cfg-or-pool :table {:id id}) ; fetch one row (returns nil)
|
||||
(db/query cfg-or-pool :table {:team-id team-id}) ; fetch multiple rows
|
||||
(db/insert! cfg-or-pool :table {:name "x" :team-id id}) ; insert
|
||||
(db/update! cfg-or-pool :table {:name "y"} {:id id}) ; update
|
||||
(db/delete! cfg-or-pool :table {:id id}) ; delete
|
||||
|
||||
;; Run multiple statements/queries on single connection
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(db/insert! conn :table row1)
|
||||
(db/insert! conn :table row2))
|
||||
|
||||
|
||||
;; Transactions
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
||||
(db/insert! conn :table row)))
|
||||
```
|
||||
|
||||
Almost all methods in the `app.db` namespace accept `pool`, `conn`, or
|
||||
`cfg` as params.
|
||||
|
||||
Migrations live in `src/app/migrations/` as numbered SQL files. They run automatically on startup.
|
||||
|
||||
|
||||
### Error Handling
|
||||
|
||||
The exception helpers are defined on Common module, and are available under
|
||||
`app.common.exceptions` namespace.
|
||||
|
||||
Example of raising an exception:
|
||||
|
||||
```clojure
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found
|
||||
:hint "File does not exist"
|
||||
:file-id id)
|
||||
```
|
||||
|
||||
Common types: `:not-found`, `:validation`, `:authorization`, `:conflict`, `:internal`.
|
||||
|
||||
|
||||
### Performance Macros (`app.common.data.macros`)
|
||||
|
||||
Always prefer these macros over their `clojure.core` equivalents — they provide
|
||||
optimized implementations:
|
||||
|
||||
```clojure
|
||||
(dm/select-keys m [:a :b]) ;; faster than core/select-keys
|
||||
(dm/get-in obj [:a :b :c]) ;; faster than core/get-in
|
||||
(dm/str "a" "b" "c") ;; string concatenation
|
||||
```
|
||||
|
||||
### Configuration
|
||||
|
||||
`src/app/config.clj` reads `PENPOT_*` environment variables, validated with
|
||||
Malli. Access anywhere via `(cf/get :smtp-host)`. Feature flags: `(cf/flags
|
||||
:enable-smtp)`.
|
||||
|
||||
|
||||
### Background Tasks
|
||||
|
||||
Background tasks live in `src/app/tasks/`. Each task is an Integrant component
|
||||
that exposes a `::handler` key and follows this three-method pattern:
|
||||
|
||||
```clojure
|
||||
(defmethod ig/assert-key ::handler ;; validate config at startup
|
||||
[_ params]
|
||||
(assert (db/pool? (::db/pool params)) "expected a valid database pool"))
|
||||
|
||||
(defmethod ig/expand-key ::handler ;; inject defaults before init
|
||||
[k v]
|
||||
{k (assoc v ::my-option default-value)})
|
||||
|
||||
(defmethod ig/init-key ::handler ;; return the task fn
|
||||
[_ cfg]
|
||||
(fn [_task] ;; receives the task row from the worker
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
||||
;; … do work …
|
||||
))))
|
||||
```
|
||||
|
||||
**Wiring a new task** requires two changes in `src/app/main.clj`:
|
||||
|
||||
1. **Handler config** – add an entry in `system-config` with the dependencies:
|
||||
|
||||
```clojure
|
||||
:app.tasks.my-task/handler
|
||||
{::db/pool (ig/ref ::db/pool)}
|
||||
```
|
||||
|
||||
2. **Registry + cron** – register the handler name and schedule it:
|
||||
|
||||
```clojure
|
||||
;; in ::wrk/registry ::wrk/tasks map:
|
||||
:my-task (ig/ref :app.tasks.my-task/handler)
|
||||
|
||||
;; in worker-config ::wrk/cron ::wrk/entries vector:
|
||||
{:cron #penpot/cron "0 0 0 * * ?" ;; daily at midnight
|
||||
:task :my-task}
|
||||
```
|
||||
|
||||
**Useful cron patterns** (Quartz format — six fields: s m h dom mon dow):
|
||||
|
||||
| Expression | Meaning |
|
||||
|------------------------------|--------------------|
|
||||
| `"0 0 0 * * ?"` | Daily at midnight |
|
||||
| `"0 0 */6 * * ?"` | Every 6 hours |
|
||||
| `"0 */5 * * * ?"` | Every 5 minutes |
|
||||
|
||||
**Time helpers** (`app.common.time`):
|
||||
|
||||
```clojure
|
||||
(ct/now) ;; current instant
|
||||
(ct/duration {:hours 1}) ;; java.time.Duration
|
||||
(ct/minus (ct/now) some-duration) ;; subtract duration from instant
|
||||
```
|
||||
|
||||
`db/interval` converts a `Duration` (or millis / string) to a PostgreSQL
|
||||
interval object suitable for use in SQL queries:
|
||||
|
||||
```clojure
|
||||
(db/interval (ct/duration {:hours 1})) ;; → PGInterval "3600.0 seconds"
|
||||
```
|
||||
@ -3,10 +3,10 @@
|
||||
|
||||
:deps
|
||||
{penpot/common {:local/root "../common"}
|
||||
org.clojure/clojure {:mvn/version "1.12.5"}
|
||||
org.clojure/clojure {:mvn/version "1.12.1"}
|
||||
org.clojure/tools.namespace {:mvn/version "1.5.0"}
|
||||
|
||||
com.github.luben/zstd-jni {:mvn/version "1.5.7-4"}
|
||||
com.github.luben/zstd-jni {:mvn/version "1.5.7-3"}
|
||||
|
||||
io.prometheus/simpleclient {:mvn/version "0.16.0"}
|
||||
io.prometheus/simpleclient_hotspot {:mvn/version "0.16.0"}
|
||||
@ -17,7 +17,7 @@
|
||||
|
||||
io.prometheus/simpleclient_httpserver {:mvn/version "0.16.0"}
|
||||
|
||||
io.lettuce/lettuce-core {:mvn/version "7.5.1.RELEASE"}
|
||||
io.lettuce/lettuce-core {:mvn/version "6.7.0.RELEASE"}
|
||||
;; Minimal dependencies required by lettuce, we need to include them
|
||||
;; explicitly because clojure dependency management does not support
|
||||
;; yet the BOM format.
|
||||
@ -28,78 +28,74 @@
|
||||
com.google.guava/guava {:mvn/version "33.4.8-jre"}
|
||||
|
||||
funcool/yetti
|
||||
{:git/tag "v11.10"
|
||||
:git/sha "88701f4"
|
||||
{:git/tag "v11.4"
|
||||
:git/sha "ce50d42"
|
||||
:git/url "https://github.com/funcool/yetti.git"
|
||||
:exclusions [org.slf4j/slf4j-api]}
|
||||
|
||||
com.github.seancorfield/next.jdbc
|
||||
{:mvn/version "1.3.1093"}
|
||||
|
||||
{:mvn/version "1.3.1002"}
|
||||
metosin/reitit-core {:mvn/version "0.9.1"}
|
||||
nrepl/nrepl {:mvn/version "1.7.0"}
|
||||
nrepl/nrepl {:mvn/version "1.3.1"}
|
||||
|
||||
org.postgresql/postgresql {:mvn/version "42.7.11"}
|
||||
org.xerial/sqlite-jdbc {:mvn/version "3.50.3.0"}
|
||||
org.postgresql/postgresql {:mvn/version "42.7.6"}
|
||||
org.xerial/sqlite-jdbc {:mvn/version "3.49.1.0"}
|
||||
|
||||
com.zaxxer/HikariCP {:mvn/version "7.0.2"}
|
||||
com.zaxxer/HikariCP {:mvn/version "6.3.0"}
|
||||
|
||||
io.whitfin/siphash {:mvn/version "2.0.0"}
|
||||
|
||||
buddy/buddy-hashers {:mvn/version "2.0.167"}
|
||||
buddy/buddy-sign {:mvn/version "3.6.1-359"}
|
||||
|
||||
com.github.ben-manes.caffeine/caffeine {:mvn/version "3.2.4"}
|
||||
com.github.ben-manes.caffeine/caffeine {:mvn/version "3.2.0"}
|
||||
|
||||
org.jsoup/jsoup {:mvn/version "1.21.2"}
|
||||
org.jsoup/jsoup {:mvn/version "1.20.1"}
|
||||
org.im4java/im4java
|
||||
{:git/tag "1.4.0-penpot-2"
|
||||
:git/sha "e2b3e16"
|
||||
:git/url "https://github.com/penpot/im4java"}
|
||||
|
||||
at.yawk.lz4/lz4-java
|
||||
{:mvn/version "1.11.0"}
|
||||
org.lz4/lz4-java {:mvn/version "1.8.0"}
|
||||
|
||||
org.clojars.pntblnk/clj-ldap {:mvn/version "0.0.17"}
|
||||
|
||||
dawran6/emoji {:mvn/version "0.2.0"}
|
||||
markdown-clj/markdown-clj {:mvn/version "1.12.4"}
|
||||
dawran6/emoji {:mvn/version "0.1.5"}
|
||||
markdown-clj/markdown-clj {:mvn/version "1.12.3"}
|
||||
|
||||
;; Pretty Print specs
|
||||
pretty-spec/pretty-spec {:mvn/version "0.1.4"}
|
||||
software.amazon.awssdk/s3 {:mvn/version "2.44.4"}}
|
||||
software.amazon.awssdk/s3 {:mvn/version "2.31.55"}}
|
||||
|
||||
:paths ["src" "resources" "target/classes"]
|
||||
:aliases
|
||||
{:dev
|
||||
{:extra-deps
|
||||
{com.bhauman/rebel-readline {:mvn/version "0.1.5"}
|
||||
{com.bhauman/rebel-readline {:mvn/version "RELEASE"}
|
||||
clojure-humanize/clojure-humanize {:mvn/version "0.2.2"}
|
||||
org.clojure/data.csv {:mvn/version "1.1.1"}
|
||||
com.clojure-goes-fast/clj-async-profiler {:mvn/version "2.0.0-beta1"}
|
||||
mockery/mockery {:mvn/version "0.1.4"}}
|
||||
org.clojure/data.csv {:mvn/version "RELEASE"}
|
||||
com.clojure-goes-fast/clj-async-profiler {:mvn/version "RELEASE"}
|
||||
mockery/mockery {:mvn/version "RELEASE"}}
|
||||
:extra-paths ["test" "dev"]}
|
||||
|
||||
:build
|
||||
{:extra-deps
|
||||
{io.github.clojure/tools.build {:mvn/version "0.10.10"}}
|
||||
{io.github.clojure/tools.build {:git/tag "v0.10.9" :git/sha "e405aac"}}
|
||||
:ns-default build}
|
||||
|
||||
:test
|
||||
{:main-opts ["-m" "kaocha.runner"]
|
||||
:jvm-opts ["-Dlog4j2.configurationFile=log4j2-devenv-repl.xml"
|
||||
"--sun-misc-unsafe-memory-access=allow"
|
||||
"--enable-native-access=ALL-UNNAMED"]
|
||||
:jvm-opts ["-Dlog4j2.configurationFile=log4j2-devenv-repl.xml"]
|
||||
:extra-deps {lambdaisland/kaocha {:mvn/version "1.91.1392"}}}
|
||||
|
||||
:outdated
|
||||
{:extra-deps {com.github.liquidz/antq {:mvn/version "2.11.1276"}}
|
||||
{:extra-deps {com.github.liquidz/antq {:mvn/version "RELEASE"}}
|
||||
:main-opts ["-m" "antq.core"]}
|
||||
|
||||
:jmx-remote
|
||||
{:jvm-opts ["-Dcom.sun.management.jmxremote"
|
||||
"-Dcom.sun.management.jmxremote.port=9000"
|
||||
"-Dcom.sun.management.jmxremote.rmi.port=9000"
|
||||
"-Dcom.sun.management.jmxremote.port=9090"
|
||||
"-Dcom.sun.management.jmxremote.rmi.port=9090"
|
||||
"-Dcom.sun.management.jmxremote.local.only=false"
|
||||
"-Dcom.sun.management.jmxremote.authenticate=false"
|
||||
"-Dcom.sun.management.jmxremote.ssl=false"
|
||||
|
||||
@ -6,14 +6,12 @@
|
||||
|
||||
(ns user
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.data :as d]
|
||||
[app.common.debug :as debug]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.files.helpers :as cfh]
|
||||
[app.common.fressian :as fres]
|
||||
[app.common.geom.matrix :as gmt]
|
||||
[app.common.json :as json]
|
||||
[app.common.logging :as l]
|
||||
[app.common.perf :as perf]
|
||||
[app.common.pprint :as pp]
|
||||
@ -21,22 +19,20 @@
|
||||
[app.common.schema.desc-js-like :as smdj]
|
||||
[app.common.schema.desc-native :as smdn]
|
||||
[app.common.schema.generators :as sg]
|
||||
[app.common.schema.openapi :as oapi]
|
||||
[app.common.spec :as us]
|
||||
[app.common.time :as ct]
|
||||
[app.common.json :as json]
|
||||
[app.common.transit :as t]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.common.uri :as u]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.main :as main]
|
||||
[app.srepl.helpers :as h]
|
||||
[app.srepl.main :refer :all]
|
||||
[app.srepl.helpers :as srepl.helpers]
|
||||
[app.srepl.main :as srepl]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.time :as dt]
|
||||
[clj-async-profiler.core :as prof]
|
||||
[clojure.contrib.humanize :as hum]
|
||||
[clojure.datafy :refer [datafy]]
|
||||
[clojure.java.io :as io]
|
||||
[clojure.pprint :refer [pprint print-table]]
|
||||
[clojure.repl :refer :all]
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
"license": "MPL-2.0",
|
||||
"author": "Kaleidos INC",
|
||||
"private": true,
|
||||
"packageManager": "pnpm@10.31.0+sha512.e3927388bfaa8078ceb79b748ffc1e8274e84d75163e67bc22e06c0d3aed43dd153151cbf11d7f8301ff4acb98c68bdc5cadf6989532801ffafe3b3e4a63c268",
|
||||
"packageManager": "yarn@4.9.2+sha512.1fc009bc09d13cfd0e19efa44cbfc2b9cf6ca61482725eb35bbc5e257e093ebf4130db6dfe15d604ff4b79efd8e1e8e99b25fa7d0a6197c9f9826358d4d65c3c",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/penpot/penpot"
|
||||
@ -19,9 +19,8 @@
|
||||
"ws": "^8.17.0"
|
||||
},
|
||||
"scripts": {
|
||||
"lint": "clj-kondo --parallel --lint ../common/src src/",
|
||||
"check-fmt": "cljfmt check --parallel=true src/ test/",
|
||||
"fmt": "cljfmt fix --parallel=true src/ test/",
|
||||
"test": "clojure -M:dev:test"
|
||||
"fmt:clj:check": "cljfmt check --parallel=false src/ test/",
|
||||
"fmt:clj": "cljfmt fix --parallel=true src/ test/",
|
||||
"lint:clj": "clj-kondo --parallel --lint src/"
|
||||
}
|
||||
}
|
||||
|
||||
306
backend/pnpm-lock.yaml
generated
306
backend/pnpm-lock.yaml
generated
@ -1,306 +0,0 @@
|
||||
lockfileVersion: '9.0'
|
||||
|
||||
settings:
|
||||
autoInstallPeers: true
|
||||
excludeLinksFromLockfile: false
|
||||
|
||||
importers:
|
||||
|
||||
.:
|
||||
dependencies:
|
||||
luxon:
|
||||
specifier: ^3.4.4
|
||||
version: 3.7.2
|
||||
sax:
|
||||
specifier: ^1.4.1
|
||||
version: 1.4.3
|
||||
devDependencies:
|
||||
nodemon:
|
||||
specifier: ^3.1.2
|
||||
version: 3.1.11
|
||||
source-map-support:
|
||||
specifier: ^0.5.21
|
||||
version: 0.5.21
|
||||
ws:
|
||||
specifier: ^8.17.0
|
||||
version: 8.18.3
|
||||
|
||||
packages:
|
||||
|
||||
anymatch@3.1.3:
|
||||
resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==}
|
||||
engines: {node: '>= 8'}
|
||||
|
||||
balanced-match@1.0.2:
|
||||
resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==}
|
||||
|
||||
binary-extensions@2.3.0:
|
||||
resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==}
|
||||
engines: {node: '>=8'}
|
||||
|
||||
brace-expansion@1.1.12:
|
||||
resolution: {integrity: sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==}
|
||||
|
||||
braces@3.0.3:
|
||||
resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==}
|
||||
engines: {node: '>=8'}
|
||||
|
||||
buffer-from@1.1.2:
|
||||
resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==}
|
||||
|
||||
chokidar@3.6.0:
|
||||
resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==}
|
||||
engines: {node: '>= 8.10.0'}
|
||||
|
||||
concat-map@0.0.1:
|
||||
resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==}
|
||||
|
||||
debug@4.4.3:
|
||||
resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==}
|
||||
engines: {node: '>=6.0'}
|
||||
peerDependencies:
|
||||
supports-color: '*'
|
||||
peerDependenciesMeta:
|
||||
supports-color:
|
||||
optional: true
|
||||
|
||||
fill-range@7.1.1:
|
||||
resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==}
|
||||
engines: {node: '>=8'}
|
||||
|
||||
fsevents@2.3.3:
|
||||
resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==}
|
||||
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
|
||||
os: [darwin]
|
||||
|
||||
glob-parent@5.1.2:
|
||||
resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==}
|
||||
engines: {node: '>= 6'}
|
||||
|
||||
has-flag@3.0.0:
|
||||
resolution: {integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==}
|
||||
engines: {node: '>=4'}
|
||||
|
||||
ignore-by-default@1.0.1:
|
||||
resolution: {integrity: sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==}
|
||||
|
||||
is-binary-path@2.1.0:
|
||||
resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==}
|
||||
engines: {node: '>=8'}
|
||||
|
||||
is-extglob@2.1.1:
|
||||
resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
|
||||
is-glob@4.0.3:
|
||||
resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
|
||||
is-number@7.0.0:
|
||||
resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
|
||||
engines: {node: '>=0.12.0'}
|
||||
|
||||
luxon@3.7.2:
|
||||
resolution: {integrity: sha512-vtEhXh/gNjI9Yg1u4jX/0YVPMvxzHuGgCm6tC5kZyb08yjGWGnqAjGJvcXbqQR2P3MyMEFnRbpcdFS6PBcLqew==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
minimatch@3.1.2:
|
||||
resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==}
|
||||
|
||||
ms@2.1.3:
|
||||
resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==}
|
||||
|
||||
nodemon@3.1.11:
|
||||
resolution: {integrity: sha512-is96t8F/1//UHAjNPHpbsNY46ELPpftGUoSVNXwUfMk/qdjSylYrWSu1XavVTBOn526kFiOR733ATgNBCQyH0g==}
|
||||
engines: {node: '>=10'}
|
||||
hasBin: true
|
||||
|
||||
normalize-path@3.0.0:
|
||||
resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
|
||||
picomatch@2.3.1:
|
||||
resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==}
|
||||
engines: {node: '>=8.6'}
|
||||
|
||||
pstree.remy@1.1.8:
|
||||
resolution: {integrity: sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==}
|
||||
|
||||
readdirp@3.6.0:
|
||||
resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==}
|
||||
engines: {node: '>=8.10.0'}
|
||||
|
||||
sax@1.4.3:
|
||||
resolution: {integrity: sha512-yqYn1JhPczigF94DMS+shiDMjDowYO6y9+wB/4WgO0Y19jWYk0lQ4tuG5KI7kj4FTp1wxPj5IFfcrz/s1c3jjQ==}
|
||||
|
||||
semver@7.7.3:
|
||||
resolution: {integrity: sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==}
|
||||
engines: {node: '>=10'}
|
||||
hasBin: true
|
||||
|
||||
simple-update-notifier@2.0.0:
|
||||
resolution: {integrity: sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==}
|
||||
engines: {node: '>=10'}
|
||||
|
||||
source-map-support@0.5.21:
|
||||
resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==}
|
||||
|
||||
source-map@0.6.1:
|
||||
resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
|
||||
supports-color@5.5.0:
|
||||
resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==}
|
||||
engines: {node: '>=4'}
|
||||
|
||||
to-regex-range@5.0.1:
|
||||
resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==}
|
||||
engines: {node: '>=8.0'}
|
||||
|
||||
touch@3.1.1:
|
||||
resolution: {integrity: sha512-r0eojU4bI8MnHr8c5bNo7lJDdI2qXlWWJk6a9EAFG7vbhTjElYhBVS3/miuE0uOuoLdb8Mc/rVfsmm6eo5o9GA==}
|
||||
hasBin: true
|
||||
|
||||
undefsafe@2.0.5:
|
||||
resolution: {integrity: sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==}
|
||||
|
||||
ws@8.18.3:
|
||||
resolution: {integrity: sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==}
|
||||
engines: {node: '>=10.0.0'}
|
||||
peerDependencies:
|
||||
bufferutil: ^4.0.1
|
||||
utf-8-validate: '>=5.0.2'
|
||||
peerDependenciesMeta:
|
||||
bufferutil:
|
||||
optional: true
|
||||
utf-8-validate:
|
||||
optional: true
|
||||
|
||||
snapshots:
|
||||
|
||||
anymatch@3.1.3:
|
||||
dependencies:
|
||||
normalize-path: 3.0.0
|
||||
picomatch: 2.3.1
|
||||
|
||||
balanced-match@1.0.2: {}
|
||||
|
||||
binary-extensions@2.3.0: {}
|
||||
|
||||
brace-expansion@1.1.12:
|
||||
dependencies:
|
||||
balanced-match: 1.0.2
|
||||
concat-map: 0.0.1
|
||||
|
||||
braces@3.0.3:
|
||||
dependencies:
|
||||
fill-range: 7.1.1
|
||||
|
||||
buffer-from@1.1.2: {}
|
||||
|
||||
chokidar@3.6.0:
|
||||
dependencies:
|
||||
anymatch: 3.1.3
|
||||
braces: 3.0.3
|
||||
glob-parent: 5.1.2
|
||||
is-binary-path: 2.1.0
|
||||
is-glob: 4.0.3
|
||||
normalize-path: 3.0.0
|
||||
readdirp: 3.6.0
|
||||
optionalDependencies:
|
||||
fsevents: 2.3.3
|
||||
|
||||
concat-map@0.0.1: {}
|
||||
|
||||
debug@4.4.3(supports-color@5.5.0):
|
||||
dependencies:
|
||||
ms: 2.1.3
|
||||
optionalDependencies:
|
||||
supports-color: 5.5.0
|
||||
|
||||
fill-range@7.1.1:
|
||||
dependencies:
|
||||
to-regex-range: 5.0.1
|
||||
|
||||
fsevents@2.3.3:
|
||||
optional: true
|
||||
|
||||
glob-parent@5.1.2:
|
||||
dependencies:
|
||||
is-glob: 4.0.3
|
||||
|
||||
has-flag@3.0.0: {}
|
||||
|
||||
ignore-by-default@1.0.1: {}
|
||||
|
||||
is-binary-path@2.1.0:
|
||||
dependencies:
|
||||
binary-extensions: 2.3.0
|
||||
|
||||
is-extglob@2.1.1: {}
|
||||
|
||||
is-glob@4.0.3:
|
||||
dependencies:
|
||||
is-extglob: 2.1.1
|
||||
|
||||
is-number@7.0.0: {}
|
||||
|
||||
luxon@3.7.2: {}
|
||||
|
||||
minimatch@3.1.2:
|
||||
dependencies:
|
||||
brace-expansion: 1.1.12
|
||||
|
||||
ms@2.1.3: {}
|
||||
|
||||
nodemon@3.1.11:
|
||||
dependencies:
|
||||
chokidar: 3.6.0
|
||||
debug: 4.4.3(supports-color@5.5.0)
|
||||
ignore-by-default: 1.0.1
|
||||
minimatch: 3.1.2
|
||||
pstree.remy: 1.1.8
|
||||
semver: 7.7.3
|
||||
simple-update-notifier: 2.0.0
|
||||
supports-color: 5.5.0
|
||||
touch: 3.1.1
|
||||
undefsafe: 2.0.5
|
||||
|
||||
normalize-path@3.0.0: {}
|
||||
|
||||
picomatch@2.3.1: {}
|
||||
|
||||
pstree.remy@1.1.8: {}
|
||||
|
||||
readdirp@3.6.0:
|
||||
dependencies:
|
||||
picomatch: 2.3.1
|
||||
|
||||
sax@1.4.3: {}
|
||||
|
||||
semver@7.7.3: {}
|
||||
|
||||
simple-update-notifier@2.0.0:
|
||||
dependencies:
|
||||
semver: 7.7.3
|
||||
|
||||
source-map-support@0.5.21:
|
||||
dependencies:
|
||||
buffer-from: 1.1.2
|
||||
source-map: 0.6.1
|
||||
|
||||
source-map@0.6.1: {}
|
||||
|
||||
supports-color@5.5.0:
|
||||
dependencies:
|
||||
has-flag: 3.0.0
|
||||
|
||||
to-regex-range@5.0.1:
|
||||
dependencies:
|
||||
is-number: 7.0.0
|
||||
|
||||
touch@3.1.1: {}
|
||||
|
||||
undefsafe@2.0.5: {}
|
||||
|
||||
ws@8.18.3: {}
|
||||
@ -193,7 +193,7 @@
|
||||
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
|
||||
<div
|
||||
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
|
||||
Click the link below to confirm the change.</div>
|
||||
Click to the link below to confirm the change:</div>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
@ -217,7 +217,8 @@
|
||||
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
|
||||
<div
|
||||
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
|
||||
If you did not request this change, consider changing your password for security reasons.</div>
|
||||
If you received this email by mistake, please consider changing your password for security
|
||||
reasons.</div>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
|
||||
@ -2,11 +2,12 @@ Hello {{name|abbreviate:25}}!
|
||||
|
||||
We received a request to change your current email to {{ pending-email }}.
|
||||
|
||||
Click the link below to confirm the change.
|
||||
Click to the link below to confirm the change:
|
||||
|
||||
{{ public-uri }}/#/auth/verify-token?token={{token}}
|
||||
|
||||
If you did not request this change, consider changing your password for security reasons.
|
||||
If you received this email by mistake, please consider changing your password
|
||||
for security reasons.
|
||||
|
||||
Enjoy!
|
||||
The Penpot team.
|
||||
|
||||
@ -8,41 +8,38 @@
|
||||
<body>
|
||||
<p>
|
||||
<strong>Feedback from:</strong><br />
|
||||
{% if profile %}
|
||||
<span>
|
||||
<span>Name: </span>
|
||||
<span><code>{{profile.fullname|abbreviate:25}}</code></span>
|
||||
</span>
|
||||
<br />
|
||||
|
||||
<span>
|
||||
<span>Email: </span>
|
||||
<span>{{profile.email}}</span>
|
||||
</span>
|
||||
<br />
|
||||
|
||||
<span>
|
||||
<span>ID: </span>
|
||||
<span><code>{{profile.id}}</code></span>
|
||||
</span>
|
||||
{% else %}
|
||||
<span>
|
||||
<span>Email: </span>
|
||||
<span>{{profile.email}}</span>
|
||||
</span>
|
||||
{% endif %}
|
||||
</p>
|
||||
<p>
|
||||
<strong>Subject:</strong><br />
|
||||
<span>{{feedback-subject|abbreviate:300}}</span>
|
||||
<span>{{subject|abbreviate:300}}</span>
|
||||
</p>
|
||||
|
||||
<p>
|
||||
<strong>Type:</strong><br />
|
||||
<span>{{feedback-type|abbreviate:300}}</span>
|
||||
</p>
|
||||
|
||||
{% if feedback-error-href %}
|
||||
<p>
|
||||
<strong>Error HREF:</strong><br />
|
||||
<span>{{feedback-error-href|abbreviate:500}}</span>
|
||||
</p>
|
||||
{% endif %}
|
||||
|
||||
<p>
|
||||
<strong>Message:</strong><br />
|
||||
{{feedback-content|linebreaks-br}}
|
||||
{{content|linebreaks-br|safe}}
|
||||
</p>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@ -1 +1 @@
|
||||
[PENPOT FEEDBACK]: {{feedback-subject}}
|
||||
[PENPOT FEEDBACK]: {{subject}}
|
||||
|
||||
@ -1,11 +1,9 @@
|
||||
From: {{profile.fullname}} <{{profile.email}}> / {{profile.id}}
|
||||
Subject: {{feedback-subject}}
|
||||
Type: {{feedback-type}}
|
||||
{% if profile %}
|
||||
Feedback profile: {{profile.fullname}} <{{profile.email}}> / {{profile.id}}
|
||||
{% else %}
|
||||
Feedback from: {{email}}
|
||||
{% endif %}
|
||||
|
||||
{% if feedback-error-href %}
|
||||
HREF: {{feedback-error-href}}
|
||||
{% endif -%}
|
||||
Subject: {{subject}}
|
||||
|
||||
Message:
|
||||
|
||||
{{feedback-content}}
|
||||
{{content}}
|
||||
|
||||
@ -1,264 +0,0 @@
|
||||
<!doctype html>
|
||||
<html xmlns="http://www.w3.org/1999/xhtml" xmlns:v="urn:schemas-microsoft-com:vml"
|
||||
xmlns:o="urn:schemas-microsoft-com:office:office">
|
||||
|
||||
<head>
|
||||
<title>
|
||||
</title>
|
||||
<!--[if !mso]><!-- -->
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<!--<![endif]-->
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<style type="text/css">
|
||||
#outlook a {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
-webkit-text-size-adjust: 100%;
|
||||
-ms-text-size-adjust: 100%;
|
||||
}
|
||||
|
||||
table,
|
||||
td {
|
||||
border-collapse: collapse;
|
||||
mso-table-lspace: 0pt;
|
||||
mso-table-rspace: 0pt;
|
||||
}
|
||||
|
||||
img {
|
||||
border: 0;
|
||||
height: auto;
|
||||
line-height: 100%;
|
||||
outline: none;
|
||||
text-decoration: none;
|
||||
-ms-interpolation-mode: bicubic;
|
||||
}
|
||||
|
||||
p {
|
||||
display: block;
|
||||
margin: 13px 0;
|
||||
}
|
||||
</style>
|
||||
<!--[if mso]>
|
||||
<xml>
|
||||
<o:OfficeDocumentSettings>
|
||||
<o:AllowPNG/>
|
||||
<o:PixelsPerInch>96</o:PixelsPerInch>
|
||||
</o:OfficeDocumentSettings>
|
||||
</xml>
|
||||
<![endif]-->
|
||||
<!--[if lte mso 11]>
|
||||
<style type="text/css">
|
||||
.mj-outlook-group-fix { width:100% !important; }
|
||||
</style>
|
||||
<![endif]-->
|
||||
<!--[if !mso]><!-->
|
||||
<link href="https://fonts.googleapis.com/css?family=Source%20Sans%20Pro" rel="stylesheet" type="text/css">
|
||||
<style type="text/css">
|
||||
@import url(https://fonts.googleapis.com/css?family=Source%20Sans%20Pro);
|
||||
</style>
|
||||
<!--<![endif]-->
|
||||
<style type="text/css">
|
||||
@media only screen and (min-width:480px) {
|
||||
.mj-column-per-100 {
|
||||
width: 100% !important;
|
||||
max-width: 100%;
|
||||
}
|
||||
|
||||
.mj-column-px-425 {
|
||||
width: 425px !important;
|
||||
max-width: 425px;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
<style type="text/css">
|
||||
@media only screen and (max-width:480px) {
|
||||
table.mj-full-width-mobile {
|
||||
width: 100% !important;
|
||||
}
|
||||
|
||||
td.mj-full-width-mobile {
|
||||
width: auto !important;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body style="background-color:#E5E5E5;">
|
||||
<div style="background-color:#E5E5E5;">
|
||||
<!--[if mso | IE]>
|
||||
<table
|
||||
align="center" border="0" cellpadding="0" cellspacing="0" class="" style="width:600px;" width="600"
|
||||
>
|
||||
<tr>
|
||||
<td style="line-height:0px;font-size:0px;mso-line-height-rule:exactly;">
|
||||
<![endif]-->
|
||||
<div style="margin:0px auto;max-width:600px;">
|
||||
<table align="center" border="0" cellpadding="0" cellspacing="0" role="presentation" style="width:100%;">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td style="direction:ltr;font-size:0px;padding:0;text-align:center;">
|
||||
<!--[if mso | IE]>
|
||||
<table role="presentation" border="0" cellpadding="0" cellspacing="0">
|
||||
|
||||
<tr>
|
||||
|
||||
<td
|
||||
class="" style="vertical-align:top;width:600px;"
|
||||
>
|
||||
<![endif]-->
|
||||
<div class="mj-column-per-100 mj-outlook-group-fix"
|
||||
style="font-size:0px;text-align:left;direction:ltr;display:inline-block;vertical-align:top;width:100%;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" role="presentation" style="vertical-align:top;"
|
||||
width="100%">
|
||||
<tr>
|
||||
<td align="left" style="font-size:0px;padding:16px;word-break:break-word;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" role="presentation"
|
||||
style="border-collapse:collapse;border-spacing:0px;">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td style="width:97px;">
|
||||
<img height="32" src="{{ public-uri }}/images/email/uxbox-title.png"
|
||||
style="border:0;display:block;outline:none;text-decoration:none;height:32px;width:100%;font-size:13px;"
|
||||
width="97" />
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
<!--[if mso | IE]>
|
||||
</td>
|
||||
|
||||
</tr>
|
||||
|
||||
</table>
|
||||
<![endif]-->
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
<!--[if mso | IE]>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<table
|
||||
align="center" border="0" cellpadding="0" cellspacing="0" class="" style="width:600px;" width="600"
|
||||
>
|
||||
<tr>
|
||||
<td style="line-height:0px;font-size:0px;mso-line-height-rule:exactly;">
|
||||
<![endif]-->
|
||||
<div style="background:#FFFFFF;background-color:#FFFFFF;margin:0px auto;max-width:600px;">
|
||||
<table align="center" border="0" cellpadding="0" cellspacing="0" role="presentation"
|
||||
style="background:#FFFFFF;background-color:#FFFFFF;width:100%;">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td style="direction:ltr;font-size:0px;padding:20px 0;text-align:center;">
|
||||
<!--[if mso | IE]>
|
||||
<table role="presentation" border="0" cellpadding="0" cellspacing="0">
|
||||
|
||||
<tr>
|
||||
|
||||
<td
|
||||
class="" style="vertical-align:top;width:600px;"
|
||||
>
|
||||
<![endif]-->
|
||||
<div class="mj-column-per-100 mj-outlook-group-fix"
|
||||
style="font-size:0px;text-align:left;direction:ltr;display:inline-block;vertical-align:top;width:100%;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" role="presentation" style="vertical-align:top;"
|
||||
width="100%">
|
||||
<tr>
|
||||
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
|
||||
<div
|
||||
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
|
||||
Hi{% if user-name %} {{ user-name|abbreviate:25 }}{% endif %},
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
|
||||
<div
|
||||
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
|
||||
<b>{{invited-by|abbreviate:25}}</b> sent you an invitation to join the organization:
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
|
||||
<div
|
||||
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
|
||||
<table role="presentation" cellpadding="0" cellspacing="0" border="0" width="20" height="20" style="display:inline-block;vertical-align:middle;">
|
||||
<tr>
|
||||
<td width="20" height="20" align="center" valign="middle"
|
||||
background="{{organization-logo}}"
|
||||
style="width:20px;height:20px;text-align:center;font-weight:bold;font-size:9px;line-height:20px;color:#ffffff;background-size:cover;background-position:center;background-repeat:no-repeat;border-radius: 50%;color:black">
|
||||
{% if organization-initials %}{{organization-initials}}{% endif %}
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
<span style="display:inline-block; vertical-align: middle;padding-left:5px;height:20px;line-height: 20px;">
|
||||
“{{ organization-name|abbreviate:25 }}”
|
||||
</span>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center" vertical-align="middle"
|
||||
style="font-size:0px;padding:10px 25px;word-break:break-word;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" role="presentation"
|
||||
style="border-collapse:separate;line-height:100%;">
|
||||
<tr>
|
||||
<td align="center" bgcolor="#6911d4" role="presentation"
|
||||
style="border:none;border-radius:8px;cursor:auto;mso-padding-alt:10px 25px;background:#6911d4;"
|
||||
valign="middle">
|
||||
<a href="{{ public-uri }}/#/auth/verify-token?token={{token}}"
|
||||
style="display:inline-block;background:#6911d4;color:#FFFFFF;font-family:Source Sans Pro, sans-serif;font-size:16px;font-weight:normal;line-height:120%;margin:0;text-decoration:none;text-transform:none;padding:10px 25px;mso-padding-alt:0px;border-radius:8px;"
|
||||
target="_blank"> ACCEPT INVITE </a>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
|
||||
<div
|
||||
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
|
||||
Enjoy!</div>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
|
||||
<div
|
||||
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
|
||||
The Penpot team.</div>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
<!--[if mso | IE]>
|
||||
</td>
|
||||
|
||||
</tr>
|
||||
|
||||
</table>
|
||||
<![endif]-->
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
{% include "app/email/includes/footer.html" %}
|
||||
|
||||
</div>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
@ -1 +0,0 @@
|
||||
{{invited-by|abbreviate:25}} has invited you to join the organization “{{ organization-name|abbreviate:25 }}”
|
||||
@ -1,10 +0,0 @@
|
||||
Hello!
|
||||
|
||||
{{invited-by|abbreviate:25}} has invited you to join the organization “{{ organization-name|abbreviate:25 }}”.
|
||||
|
||||
Accept invitation using this link:
|
||||
|
||||
{{ public-uri }}/#/auth/verify-token?token={{token}}
|
||||
|
||||
Enjoy!
|
||||
The Penpot team.
|
||||
@ -186,8 +186,7 @@
|
||||
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
|
||||
<div
|
||||
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
|
||||
{{invited-by|abbreviate:25}} has invited you to join the team “{{ team|abbreviate:25 }}”{% if organization %}
|
||||
part of the organization “{{ organization|abbreviate:25 }}”{% endif %}.</div>
|
||||
{{invited-by|abbreviate:25}} has invited you to join the team “{{ team|abbreviate:25 }}”.</div>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
|
||||
@ -1 +1 @@
|
||||
{{invited-by|abbreviate:25}} has invited you to join the team “{{ team|abbreviate:25 }}”
|
||||
Invitation to join {{team}}
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
Hello!
|
||||
|
||||
{{invited-by|abbreviate:25}} has invited you to join the team "{{ team|abbreviate:25 }}"{% if organization %}, part of the organization "{{ organization|abbreviate:25 }}"{% endif %}.
|
||||
{{invited-by|abbreviate:25}} has invited you to join the team “{{ team|abbreviate:25 }}”.
|
||||
|
||||
Accept invitation using this link:
|
||||
|
||||
|
||||
@ -1,9 +1,6 @@
|
||||
[{:id "tokens-starter-kit"
|
||||
:name "Design tokens starter kit"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Tokens%20starter%20kit.penpot"}
|
||||
{:id "penpot-design-system"
|
||||
:name "Penpot Design System | Pencil"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Pencil-Penpot-Design-System.penpot"}
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Tokens%20starter%20kit.penpot"},
|
||||
{:id "wireframing-kit"
|
||||
:name "Wireframe library"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Wireframing%20kit%20v1.1.penpot"}
|
||||
@ -13,6 +10,9 @@
|
||||
{:id "plants-app"
|
||||
:name "UI mockup example"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/main/Plants-app.penpot"}
|
||||
{:id "penpot-design-system"
|
||||
:name "Design system example"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Penpot%20-%20Design%20System%20v2.1.penpot"}
|
||||
{:id "tutorial-for-beginners"
|
||||
:name "Tutorial for beginners"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/main/tutorial-for-beginners.penpot"}
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
<meta charset="utf-8" />
|
||||
<meta name="robots" content="noindex,nofollow">
|
||||
<meta http-equiv="x-ua-compatible" content="ie=edge" />
|
||||
<title>{{label|upper}} API Documentation</title>
|
||||
<title>Builtin API Documentation - Penpot</title>
|
||||
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||
@ -19,7 +19,7 @@
|
||||
<body>
|
||||
<main>
|
||||
<header>
|
||||
<h1>{{label|upper}}: API Documentation (v{{version}})</h1>
|
||||
<h1>Penpot API Documentation (v{{version}})</h1>
|
||||
<small class="menu">
|
||||
[
|
||||
<nav>
|
||||
@ -31,10 +31,9 @@
|
||||
</header>
|
||||
<section class="doc-content">
|
||||
<h2>INTRODUCTION</h2>
|
||||
<p>This documentation is intended to be a general overview of
|
||||
the {{label}} API. If you prefer, you can
|
||||
use <a href="{{openapi}}">Swagger/OpenAPI</a> as
|
||||
alternative.</p>
|
||||
<p>This documentation is intended to be a general overview of the penpot RPC API.
|
||||
If you prefer, you can use <a href="/api/openapi.json">OpenAPI</a>
|
||||
and/or <a href="/api/openapi">SwaggerUI</a> as alternative.</p>
|
||||
|
||||
<h2>GENERAL NOTES</h2>
|
||||
|
||||
@ -44,7 +43,7 @@
|
||||
that starts with <b>get-</b> in the name, can use GET HTTP
|
||||
method which in many cases benefits from the HTTP cache.</p>
|
||||
|
||||
{% block auth-section %}
|
||||
|
||||
<h3>Authentication</h3>
|
||||
<p>The penpot backend right now offers two way for authenticate the request:
|
||||
<b>cookies</b> (the same mechanism that we use ourselves on accessing the API from the
|
||||
@ -57,10 +56,9 @@
|
||||
<p>The access token can be obtained on the appropriate section on profile settings
|
||||
and it should be provided using <b>`Authorization`</b> header with <b>`Token
|
||||
<token-string>`</b> value.</p>
|
||||
{% endblock %}
|
||||
|
||||
<h3>Content Negotiation</h3>
|
||||
<p>This API operates indistinctly with: <b>`application/json`</b>
|
||||
<p>The penpot API by default operates indistinctly with: <b>`application/json`</b>
|
||||
and <b>`application/transit+json`</b> content types. You should specify the
|
||||
desired content-type on the <b>`Accept`</b> header, the transit encoding is used
|
||||
by default.</p>
|
||||
@ -77,16 +75,13 @@
|
||||
standard <a href="https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API">Fetch
|
||||
API</a></p>
|
||||
|
||||
{% block limits-section %}
|
||||
<h3>Limits</h3>
|
||||
<p>The rate limit work per user basis (this means that different api keys share
|
||||
the same rate limit). For now the limits are not documented because we are
|
||||
studying and analyzing the data. As a general rule, it should not be abused, if an
|
||||
abusive use is detected, we will proceed to block the user's access to the
|
||||
API.</p>
|
||||
{% endblock %}
|
||||
|
||||
{% block webhooks-section %}
|
||||
<h3>Webhooks</h3>
|
||||
<p>All methods that emit webhook events are marked with flag <b>WEBHOOK</b>, the
|
||||
data structure defined on each method represents the <i>payload</i> of the
|
||||
@ -102,11 +97,9 @@
|
||||
"profileId": "db601c95-045f-808b-8002-361312e63531"
|
||||
}
|
||||
</pre>
|
||||
{% endblock %}
|
||||
|
||||
</section>
|
||||
<section class="rpc-doc-content">
|
||||
<h2>METHODS REFERENCE:</h2>
|
||||
<h2>RPC METHODS REFERENCE:</h2>
|
||||
<ul class="rpc-items">
|
||||
{% for item in methods %}
|
||||
{% include "app/templates/api-doc-entry.tmpl" with item=item %}
|
||||
|
||||
@ -5,6 +5,7 @@
|
||||
<meta name="robots" content="noindex,nofollow">
|
||||
<meta http-equiv="x-ua-compatible" content="ie=edge" />
|
||||
<title>{% block title %}{% endblock %}</title>
|
||||
<link rel="stylesheet" href="https://fonts.googleapis.com/css2?family=JetBrains+Mono">
|
||||
<style>
|
||||
{% include "app/templates/styles.css" %}
|
||||
</style>
|
||||
|
||||
@ -12,62 +12,43 @@ Debug Main Page
|
||||
</nav>
|
||||
<main class="dashboard">
|
||||
<section class="widget">
|
||||
|
||||
<fieldset>
|
||||
<legend>CURRENT PROFILE</legend>
|
||||
<desc>
|
||||
<p>
|
||||
Name: <b>{{profile.fullname}}</b> <br />
|
||||
Email: <b>{{profile.email}}</b>
|
||||
</p>
|
||||
</desc>
|
||||
<legend>Error reports</legend>
|
||||
<desc><a href="/dbg/error">CLICK HERE TO SEE THE ERROR REPORTS</a> </desc>
|
||||
</fieldset>
|
||||
|
||||
<fieldset>
|
||||
<legend>VIRTUAL CLOCK</legend>
|
||||
|
||||
<desc>
|
||||
<p><b>IMPORTANT:</b> The virtual clock is profile based and only affects the currently logged-in profile.</p>
|
||||
<p>
|
||||
CURRENT CLOCK: <b>{{current-clock}}</b>
|
||||
<br />
|
||||
CURRENT OFFSET: <b>{{current-offset}}</b>
|
||||
<br />
|
||||
CURRENT TIME: <b>{{current-time}}</b>
|
||||
</p>
|
||||
|
||||
<p>Examples: 3h, -7h, 24h (allowed suffixes: h, s)</p>
|
||||
</desc>
|
||||
|
||||
<form method="post" action="/dbg/actions/set-virtual-clock">
|
||||
<legend>Download file data:</legend>
|
||||
<desc>Given an FILE-ID, downloads the file data as file. The file data is encoded using transit.</desc>
|
||||
<form method="get" action="/dbg/file/data">
|
||||
<div class="row">
|
||||
<input type="text" name="offset" placeholder="3h" value="" />
|
||||
<input type="text" style="width:300px" name="file-id" placeholder="file-id" />
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<label for="force-verify">Are you sure?</label>
|
||||
<input id="force-verify" type="checkbox" name="force" />
|
||||
<br />
|
||||
<small>
|
||||
This is a just a security double check for prevent non intentional submits.
|
||||
</small>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<input type="submit" name="submit" value="Submit" />
|
||||
<input type="submit" name="reset" value="Reset" />
|
||||
<input type="submit" name="download" value="Download" />
|
||||
<input type="submit" name="clone" value="Clone" />
|
||||
</div>
|
||||
</form>
|
||||
</fieldset>
|
||||
|
||||
<fieldset>
|
||||
<legend>ERROR REPORTS</legend>
|
||||
<desc><a href="/dbg/error">CLICK HERE TO SEE THE ERROR REPORTS</a> </desc>
|
||||
<legend>Upload File Data:</legend>
|
||||
<desc>Create a new file on your draft projects using the file downloaded from the previous section.</desc>
|
||||
<form method="post" enctype="multipart/form-data" action="/dbg/file/data">
|
||||
<div class="row">
|
||||
<input type="file" name="file" value="" />
|
||||
</div>
|
||||
<div class="row">
|
||||
<label>Import with same id?</label>
|
||||
<input type="checkbox" name="reuseid" />
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<input type="submit" value="Upload" />
|
||||
</div>
|
||||
</form>
|
||||
</fieldset>
|
||||
</section>
|
||||
|
||||
|
||||
<section class="widget">
|
||||
<fieldset>
|
||||
<legend>Profile Management</legend>
|
||||
<form method="post" action="/dbg/actions/resend-email-verification">
|
||||
@ -97,100 +78,9 @@ Debug Main Page
|
||||
</fieldset>
|
||||
|
||||
|
||||
<fieldset>
|
||||
<legend>Feature Flags for Team</legend>
|
||||
<desc>Add a feature flag to a team</desc>
|
||||
<form method="post" action="/dbg/actions/handle-team-features">
|
||||
<div class="row">
|
||||
<input type="text" style="width:300px" name="team-id" placeholder="team-id" />
|
||||
</div>
|
||||
<div class="row">
|
||||
<select type="text" style="width:100px" name="feature">
|
||||
{% for feature in supported-features %}
|
||||
<option value="{{feature}}">{{feature}}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<select style="width:100px" name="action">
|
||||
<option value="">Action...</option>
|
||||
<option value="show">Show</option>
|
||||
<option value="enable">Enable</option>
|
||||
<option value="disable">Disable</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<label for="check-feature">Skip feature check</label>
|
||||
<input id="check-feature" type="checkbox" name="skip-check" />
|
||||
<br />
|
||||
<small>
|
||||
Do not check if the feature is supported
|
||||
</small>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<label for="force-version">Are you sure?</label>
|
||||
<input id="force-version" type="checkbox" name="force" />
|
||||
<br />
|
||||
<small>
|
||||
This is a just a security double check for prevent non intentional submits.
|
||||
</small>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<input type="submit" value="Submit" />
|
||||
</div>
|
||||
</form>
|
||||
</fieldset>
|
||||
</section>
|
||||
|
||||
|
||||
<section class="widget">
|
||||
|
||||
<fieldset>
|
||||
<legend>Download RAW file data:</legend>
|
||||
<desc>Given an FILE-ID, downloads the file AS-IS (no validation
|
||||
checks, just exports the file data and related objects in raw)
|
||||
|
||||
<br/>
|
||||
<br/>
|
||||
<b>WARNING: this operation does not performs any checks</b>
|
||||
</desc>
|
||||
<form method="get" action="/dbg/actions/file-raw-export-import">
|
||||
<div class="row">
|
||||
<input type="text" style="width:300px" name="file-id" placeholder="file-id" />
|
||||
</div>
|
||||
<div class="row">
|
||||
<input type="submit" name="download" value="Download" />
|
||||
<input type="submit" name="clone" value="Clone" />
|
||||
</div>
|
||||
</form>
|
||||
</fieldset>
|
||||
|
||||
<fieldset>
|
||||
<legend>Upload File Data:</legend>
|
||||
<desc>Create a new file on your draft projects using the file downloaded from the previous section.
|
||||
<br/>
|
||||
<br/>
|
||||
<b>WARNING: this operation does not performs any checks</b>
|
||||
</desc>
|
||||
<form method="post" enctype="multipart/form-data" action="/dbg/actions/file-raw-export-import">
|
||||
<div class="row">
|
||||
<input type="file" name="file" value="" />
|
||||
</div>
|
||||
<div class="row">
|
||||
<label>Import with same id?</label>
|
||||
<input type="checkbox" name="reuseid" />
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<input type="submit" value="Upload" />
|
||||
</div>
|
||||
</form>
|
||||
</fieldset>
|
||||
</section>
|
||||
<section class="widget">
|
||||
<fieldset>
|
||||
<legend>Export binfile:</legend>
|
||||
@ -198,7 +88,7 @@ Debug Main Page
|
||||
the related libraries in a single custom formatted binary
|
||||
file.</desc>
|
||||
|
||||
<form method="get" action="/dbg/actions/file-export">
|
||||
<form method="get" action="/dbg/file/export">
|
||||
<div class="row set-of-inputs">
|
||||
<input type="text" style="width:300px" name="file-ids" placeholder="file-id" />
|
||||
<input type="text" style="width:300px" name="file-ids" placeholder="file-id" />
|
||||
@ -226,7 +116,7 @@ Debug Main Page
|
||||
<legend>Import binfile:</legend>
|
||||
<desc>Import penpot file in binary format.</desc>
|
||||
|
||||
<form method="post" enctype="multipart/form-data" action="/dbg/actions/file-import">
|
||||
<form method="post" enctype="multipart/form-data" action="/dbg/file/import">
|
||||
<div class="row">
|
||||
<input type="file" name="file" value="" />
|
||||
</div>
|
||||
@ -237,5 +127,107 @@ Debug Main Page
|
||||
</form>
|
||||
</fieldset>
|
||||
</section>
|
||||
|
||||
<section class="widget">
|
||||
<fieldset>
|
||||
<legend>Reset file version</legend>
|
||||
<desc>Allows reset file data version to a specific number/</desc>
|
||||
|
||||
<form method="post" action="/dbg/actions/reset-file-version">
|
||||
<div class="row">
|
||||
<input type="text" style="width:300px" name="file-id" placeholder="file-id" />
|
||||
</div>
|
||||
<div class="row">
|
||||
<input type="number" style="width:100px" name="version" placeholder="version" value="32" />
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<label for="force-version">Are you sure?</label>
|
||||
<input id="force-version" type="checkbox" name="force" />
|
||||
<br />
|
||||
<small>
|
||||
This is a just a security double check for prevent non intentional submits.
|
||||
</small>
|
||||
</div>
|
||||
|
||||
|
||||
<div class="row">
|
||||
<input type="submit" value="Submit" />
|
||||
</div>
|
||||
</form>
|
||||
</fieldset>
|
||||
</section>
|
||||
|
||||
<section class="widget">
|
||||
<h2>Feature Flags</h2>
|
||||
<fieldset>
|
||||
<legend>Enable</legend>
|
||||
<desc>Add a feature flag to a team</desc>
|
||||
<form method="post" action="/dbg/actions/add-team-feature">
|
||||
<div class="row">
|
||||
<input type="text" style="width:300px" name="team-id" placeholder="team-id" />
|
||||
</div>
|
||||
<div class="row">
|
||||
<input type="text" style="width:100px" name="feature" placeholder="feature" value="" />
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<label for="check-feature">Skip feature check</label>
|
||||
<input id="check-feature" type="checkbox" name="skip-check" />
|
||||
<br />
|
||||
<small>
|
||||
Do not check if the feature is supported
|
||||
</small>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<label for="force-version">Are you sure?</label>
|
||||
<input id="force-version" type="checkbox" name="force" />
|
||||
<br />
|
||||
<small>
|
||||
This is a just a security double check for prevent non intentional submits.
|
||||
</small>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<input type="submit" value="Submit" />
|
||||
</div>
|
||||
</form>
|
||||
</fieldset>
|
||||
<fieldset>
|
||||
<legend>Disable</legend>
|
||||
<desc>Remove a feature flag from a team</desc>
|
||||
<form method="post" action="/dbg/actions/remove-team-feature">
|
||||
<div class="row">
|
||||
<input type="text" style="width:300px" name="team-id" placeholder="team-id" />
|
||||
</div>
|
||||
<div class="row">
|
||||
<input type="text" style="width:100px" name="feature" placeholder="feature" value="" />
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<label for="check-feature">Skip feature check</label>
|
||||
<input id="check-feature" type="checkbox" name="skip-check" />
|
||||
<br />
|
||||
<small>
|
||||
Do not check if the feature is supported
|
||||
</small>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<label for="force-version">Are you sure?</label>
|
||||
<input id="force-version" type="checkbox" name="force" />
|
||||
<br />
|
||||
<small>
|
||||
This is a just a security double check for prevent non intentional submits.
|
||||
</small>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<input type="submit" value="Submit" />
|
||||
</div>
|
||||
</form>
|
||||
</fieldset>
|
||||
</section>
|
||||
</main>
|
||||
{% endblock %}
|
||||
|
||||
@ -7,12 +7,7 @@ penpot - error list
|
||||
{% block content %}
|
||||
<nav>
|
||||
<div class="title">
|
||||
<a href="/dbg"> [BACK]</a>
|
||||
<h1>Error reports (last 300)</h1>
|
||||
|
||||
<a class="{% if version = 3 %}strong{% endif %}" href="?version=3">[BACKEND ERRORS]</a>
|
||||
<a class="{% if version = 4 %}strong{% endif %}" href="?version=4">[FRONTEND ERRORS]</a>
|
||||
<a class="{% if version = 5 %}strong{% endif %}" href="?version=5">[RLIMIT REPORTS]</a>
|
||||
<h1>Error reports (last 200)</h1>
|
||||
</div>
|
||||
</nav>
|
||||
<main class="horizontal-list">
|
||||
|
||||
@ -6,7 +6,7 @@ Report: {{hint|abbreviate:150}} - {{id}} - Penpot Error Report (v3)
|
||||
|
||||
{% block content %}
|
||||
<nav>
|
||||
<div>[<a href="/dbg/error?version={{version}}">⮜</a>]</div>
|
||||
<div>[<a href="/dbg/error">⮜</a>]</div>
|
||||
<div>[<a href="#head">head</a>]</div>
|
||||
<div>[<a href="#props">props</a>]</div>
|
||||
<div>[<a href="#context">context</a>]</div>
|
||||
|
||||
@ -1,46 +0,0 @@
|
||||
{% extends "app/templates/base.tmpl" %}
|
||||
|
||||
{% block title %}
|
||||
Report: {{hint|abbreviate:150}} - {{id}} - Penpot Error Report (v4)
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<nav>
|
||||
<div>[<a href="/dbg/error?version={{version}}">⮜</a>]</div>
|
||||
<div>[<a href="#head">head</a>]</div>
|
||||
<div>[<a href="#context">context</a>]</div>
|
||||
{% if report %}
|
||||
<div>[<a href="#report">report</a>]</div>
|
||||
{% endif %}
|
||||
</nav>
|
||||
<main>
|
||||
<div class="table">
|
||||
<div class="table-row multiline">
|
||||
<div id="head" class="table-key">HEAD</div>
|
||||
<div class="table-val">
|
||||
<h1><span class="not-important">Hint:</span> <br/> {{hint}}</h1>
|
||||
<h2><span class="not-important">Reported at:</span> <br/> {{created-at}}</h2>
|
||||
<h2><span class="not-important">Origin:</span> <br/> {{origin}}</h2>
|
||||
<h2><span class="not-important">HREF:</span> <br/> {{href}}</h2>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="table-row multiline">
|
||||
<div id="context" class="table-key">CONTEXT: </div>
|
||||
|
||||
<div class="table-val">
|
||||
<pre>{{context}}</pre>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% if report %}
|
||||
<div class="table-row multiline">
|
||||
<div id="report" class="table-key">REPORT:</div>
|
||||
<div class="table-val">
|
||||
<pre>{{report}}</pre>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
</main>
|
||||
{% endblock %}
|
||||
@ -1,40 +0,0 @@
|
||||
{% extends "app/templates/base.tmpl" %}
|
||||
|
||||
{% block title %}
|
||||
Report: {{hint|abbreviate:150}} - {{id}} - Penpot Rate Limit Report
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<nav>
|
||||
<div>[<a href="/dbg/error?version={{version}}">⮜</a>]</div>
|
||||
<div>[<a href="#head">head</a>]</div>
|
||||
<div>[<a href="#context">context</a>]</div>
|
||||
<div>[<a href="#result">result</a>]</div>
|
||||
</nav>
|
||||
<main>
|
||||
<div class="table">
|
||||
<div class="table-row multiline">
|
||||
<div id="head" class="table-key">HEAD:</div>
|
||||
<div class="table-val">
|
||||
<h1><span class="not-important">Hint:</span> <br/> {{hint}}</h1>
|
||||
<h2><span class="not-important">Reported at:</span> <br/> {{created-at}}</h2>
|
||||
<h2><span class="not-important">Report ID:</span> <br/> {{id}}</h2>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="table-row multiline">
|
||||
<div id="context" class="table-key">CONTEXT: </div>
|
||||
<div class="table-val">
|
||||
<pre>{{context}}</pre>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="table-row multiline">
|
||||
<div id="result" class="table-key">RESULT: </div>
|
||||
<div class="table-val">
|
||||
<pre>{{result}}</pre>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</main>
|
||||
{% endblock %}
|
||||
@ -1 +0,0 @@
|
||||
{% extends "app/templates/api-doc.tmpl" %}
|
||||
@ -1,10 +0,0 @@
|
||||
{% extends "app/templates/api-doc.tmpl" %}
|
||||
|
||||
{% block auth-section %}
|
||||
{% endblock %}
|
||||
|
||||
{% block limits-section %}
|
||||
{% endblock %}
|
||||
|
||||
{% block webhooks-section %}
|
||||
{% endblock %}
|
||||
@ -7,7 +7,7 @@
|
||||
name="description"
|
||||
content="SwaggerUI"
|
||||
/>
|
||||
<title>{{label|upper}} API</title>
|
||||
<title>PENPOT Swagger UI</title>
|
||||
<style>{{swagger-css|safe}}</style>
|
||||
</head>
|
||||
<body>
|
||||
@ -16,7 +16,7 @@
|
||||
<script>
|
||||
window.onload = () => {
|
||||
window.ui = SwaggerUIBundle({
|
||||
url: '{{uri}}',
|
||||
url: '{{public-uri}}/api/openapi.json',
|
||||
dom_id: '#swagger-ui',
|
||||
presets: [
|
||||
SwaggerUIBundle.presets.apis,
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
* {
|
||||
font-family: monospace;
|
||||
font-family: "JetBrains Mono", monospace;
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
@ -36,10 +36,6 @@ small {
|
||||
color: #888;
|
||||
}
|
||||
|
||||
.strong {
|
||||
font-weight: 900;
|
||||
}
|
||||
|
||||
.not-important {
|
||||
color: #888;
|
||||
font-weight: 200;
|
||||
@ -61,26 +57,14 @@ nav {
|
||||
|
||||
nav > .title {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
nav > .title > a {
|
||||
color: black;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
nav > .title > a.strong {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
nav > .title > h1 {
|
||||
padding: 0px;
|
||||
margin: 0px;
|
||||
font-size: 11px;
|
||||
display: block;
|
||||
}
|
||||
|
||||
nav > .title > * {
|
||||
padding: 0px 6px;
|
||||
}
|
||||
|
||||
nav > div {
|
||||
|
||||
@ -25,7 +25,8 @@
|
||||
<Logger name="app.storage.tmp" level="info" />
|
||||
<Logger name="app.worker" level="trace" />
|
||||
<Logger name="app.msgbus" level="info" />
|
||||
<Logger name="app.http" level="info" />
|
||||
<Logger name="app.http.websocket" level="info" />
|
||||
<Logger name="app.http.sse" level="info" />
|
||||
<Logger name="app.util.websocket" level="info" />
|
||||
<Logger name="app.redis" level="info" />
|
||||
<Logger name="app.rpc.rlimit" level="info" />
|
||||
|
||||
@ -25,7 +25,8 @@
|
||||
<Logger name="app.storage.tmp" level="info" />
|
||||
<Logger name="app.worker" level="trace" />
|
||||
<Logger name="app.msgbus" level="info" />
|
||||
<Logger name="app.http" level="info" />
|
||||
<Logger name="app.http.websocket" level="info" />
|
||||
<Logger name="app.http.sse" level="info" />
|
||||
<Logger name="app.util.websocket" level="info" />
|
||||
<Logger name="app.redis" level="info" />
|
||||
<Logger name="app.rpc.rlimit" level="info" />
|
||||
|
||||
@ -3,9 +3,9 @@
|
||||
{:default
|
||||
[[:default :window "200000/h"]]
|
||||
|
||||
;; #{:main/get-teams}
|
||||
;; #{:command/get-teams}
|
||||
;; [[:burst :bucket "5/5/5s"]]
|
||||
|
||||
;; #{:main/get-profile}
|
||||
;; #{:command/get-profile}
|
||||
;; [[:burst :bucket "60/60/1m"]]
|
||||
}
|
||||
|
||||
@ -1,97 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
export PENPOT_NITRATE_SHARED_KEY=super-secret-nitrate-api-key
|
||||
export PENPOT_EXPORTER_SHARED_KEY=super-secret-exporter-api-key
|
||||
export PENPOT_NEXUS_SHARED_KEY=super-secret-nexus-api-key
|
||||
export PENPOT_SECRET_KEY=super-secret-devenv-key
|
||||
|
||||
# DEPRECATED: only used for subscriptions
|
||||
export PENPOT_MANAGEMENT_API_KEY=super-secret-management-api-key
|
||||
|
||||
export PENPOT_HOST=devenv
|
||||
export PENPOT_PUBLIC_URI=https://localhost:3449
|
||||
|
||||
export PENPOT_FLAGS="\
|
||||
$PENPOT_FLAGS \
|
||||
enable-login-with-password \
|
||||
disable-login-with-ldap \
|
||||
disable-login-with-oidc \
|
||||
disable-login-with-google \
|
||||
disable-login-with-github \
|
||||
disable-login-with-gitlab \
|
||||
disable-telemetry \
|
||||
enable-backend-worker \
|
||||
enable-backend-asserts \
|
||||
disable-feature-fdata-pointer-map \
|
||||
enable-feature-fdata-objects-map \
|
||||
enable-audit-log \
|
||||
enable-transit-readable-response \
|
||||
enable-demo-users \
|
||||
enable-user-feedback \
|
||||
disable-secure-session-cookies \
|
||||
enable-smtp \
|
||||
enable-prepl-server \
|
||||
enable-urepl-server \
|
||||
enable-rpc-climit \
|
||||
enable-rpc-rlimit \
|
||||
enable-quotes \
|
||||
enable-soft-rpc-rlimit \
|
||||
enable-auto-file-snapshot \
|
||||
enable-webhooks \
|
||||
enable-access-tokens \
|
||||
disable-tiered-file-data-storage \
|
||||
enable-file-validation \
|
||||
enable-file-schema-validation \
|
||||
enable-redis-cache \
|
||||
enable-subscriptions";
|
||||
|
||||
# Uncomment for nexus integration testing
|
||||
# export PENPOT_FLAGS="$PENPOT_FLAGS enable-audit-log-archive";
|
||||
# export PENPOT_AUDIT_LOG_ARCHIVE_URI="http://localhost:6070/api/audit";
|
||||
|
||||
# Default deletion delay for devenv
|
||||
export PENPOT_DELETION_DELAY="24h"
|
||||
|
||||
# Setup default upload media file size to 100MiB
|
||||
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
|
||||
|
||||
# Setup default multipart upload size to 300MiB
|
||||
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
|
||||
|
||||
export PENPOT_USER_FEEDBACK_DESTINATION="support@example.com"
|
||||
|
||||
export AWS_ACCESS_KEY_ID=penpot-devenv
|
||||
export AWS_SECRET_ACCESS_KEY=penpot-devenv
|
||||
export PENPOT_OBJECTS_STORAGE_BACKEND=s3
|
||||
export PENPOT_OBJECTS_STORAGE_S3_ENDPOINT=http://minio:9000
|
||||
export PENPOT_OBJECTS_STORAGE_S3_BUCKET=penpot
|
||||
|
||||
export PENPOT_NITRATE_BACKEND_URI=http://localhost:3000/control-center
|
||||
|
||||
export JAVA_OPTS="\
|
||||
-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||
-Djdk.attach.allowAttachSelf \
|
||||
-Dlog4j2.configurationFile=log4j2-devenv.xml \
|
||||
-Djdk.tracePinnedThreads=full \
|
||||
-Dim4java.useV7=true \
|
||||
-XX:+UnlockExperimentalVMOptions \
|
||||
-XX:+UseShenandoahGC \
|
||||
-XX:+UseCompactObjectHeaders \
|
||||
-XX:ShenandoahGCMode=generational \
|
||||
-XX:-OmitStackTraceInFastThrow \
|
||||
--sun-misc-unsafe-memory-access=allow \
|
||||
--enable-preview \
|
||||
--enable-native-access=ALL-UNNAMED";
|
||||
|
||||
function setup_minio() {
|
||||
# Initialize MINIO config
|
||||
mc alias set penpot-s3/ http://minio:9000 minioadmin minioadmin -q
|
||||
mc admin user add penpot-s3 penpot-devenv penpot-devenv -q
|
||||
mc admin user info penpot-s3 penpot-devenv |grep -F -q "readwrite"
|
||||
if [ "$?" = "1" ]; then
|
||||
mc admin policy attach penpot-s3 readwrite --user=penpot-devenv -q
|
||||
fi
|
||||
mc mb penpot-s3/penpot -p -q
|
||||
}
|
||||
|
||||
|
||||
@ -71,27 +71,19 @@ def run_cmd(params):
|
||||
print("EXC:", str(cause))
|
||||
sys.exit(-2)
|
||||
|
||||
def create_profile(fullname, email, password, skip_tutorial=False, skip_walkthrough=False):
|
||||
props = {}
|
||||
if skip_tutorial:
|
||||
props["viewed-tutorial?"] = True
|
||||
if skip_walkthrough:
|
||||
props["viewed-walkthrough?"] = True
|
||||
|
||||
def create_profile(fullname, email, password):
|
||||
params = {
|
||||
"cmd": "create-profile",
|
||||
"params": {
|
||||
"fullname": fullname,
|
||||
"email": email,
|
||||
"password": password,
|
||||
**props
|
||||
"password": password
|
||||
}
|
||||
}
|
||||
|
||||
res = run_cmd(params)
|
||||
print(f"Created: {res['email']} / {res['id']}")
|
||||
|
||||
|
||||
def update_profile(email, fullname, password, is_active):
|
||||
params = {
|
||||
"cmd": "update-profile",
|
||||
@ -178,8 +170,6 @@ parser.add_argument("-n", "--fullname", help="fullname", action="store")
|
||||
parser.add_argument("-e", "--email", help="email", action="store")
|
||||
parser.add_argument("-p", "--password", help="password", action="store")
|
||||
parser.add_argument("-c", "--connect", help="connect to PREPL", action="store", default="tcp://localhost:6063")
|
||||
parser.add_argument("--skip-tutorial", help="mark tutorial as viewed", action="store_true")
|
||||
parser.add_argument("--skip-walkthrough", help="mark walkthrough as viewed", action="store_true")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
|
||||
@ -1,17 +1,112 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
SCRIPT_DIR=$(dirname $0);
|
||||
source $SCRIPT_DIR/_env;
|
||||
export PENPOT_SECRET_KEY=super-secret-devenv-key
|
||||
export PENPOT_HOST=devenv
|
||||
export PENPOT_FLAGS="\
|
||||
$PENPOT_FLAGS \
|
||||
enable-login-with-ldap \
|
||||
enable-login-with-password
|
||||
enable-login-with-oidc \
|
||||
enable-login-with-google \
|
||||
enable-login-with-github \
|
||||
enable-login-with-gitlab \
|
||||
enable-backend-worker \
|
||||
enable-backend-asserts \
|
||||
enable-feature-fdata-pointer-map \
|
||||
enable-feature-fdata-objects-map \
|
||||
enable-audit-log \
|
||||
enable-transit-readable-response \
|
||||
enable-demo-users \
|
||||
disable-secure-session-cookies \
|
||||
enable-smtp \
|
||||
enable-prepl-server \
|
||||
enable-urepl-server \
|
||||
enable-rpc-climit \
|
||||
enable-rpc-rlimit \
|
||||
enable-quotes \
|
||||
enable-soft-rpc-rlimit \
|
||||
enable-auto-file-snapshot \
|
||||
enable-webhooks \
|
||||
enable-access-tokens \
|
||||
enable-tiered-file-data-storage \
|
||||
enable-file-validation \
|
||||
enable-file-schema-validation \
|
||||
enable-subscriptions \
|
||||
enable-subscriptions-old";
|
||||
|
||||
if [ -f $SCRIPT_DIR/_env.local ]; then
|
||||
source $SCRIPT_DIR/_env.local;
|
||||
fi
|
||||
# Default deletion delay for devenv
|
||||
export PENPOT_DELETION_DELAY="24h"
|
||||
|
||||
# Setup default upload media file size to 100MiB
|
||||
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
|
||||
|
||||
# Setup default multipart upload size to 300MiB
|
||||
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
|
||||
|
||||
# export PENPOT_DATABASE_URI="postgresql://172.17.0.1:5432/penpot"
|
||||
# export PENPOT_DATABASE_USERNAME="penpot"
|
||||
# export PENPOT_DATABASE_PASSWORD="penpot"
|
||||
# export PENPOT_DATABASE_READONLY=true
|
||||
|
||||
# export PENPOT_DATABASE_URI="postgresql://172.17.0.1:5432/penpot_pre"
|
||||
# export PENPOT_DATABASE_USERNAME="penpot_pre"
|
||||
# export PENPOT_DATABASE_PASSWORD="penpot_pre"
|
||||
|
||||
# export PENPOT_LOGGERS_LOKI_URI="http://172.17.0.1:3100/loki/api/v1/push"
|
||||
# export PENPOT_AUDIT_LOG_ARCHIVE_URI="http://localhost:6070/api/audit"
|
||||
|
||||
# Initialize MINIO config
|
||||
setup_minio;
|
||||
mc alias set penpot-s3/ http://minio:9000 minioadmin minioadmin -q
|
||||
mc admin user add penpot-s3 penpot-devenv penpot-devenv -q
|
||||
mc admin user info penpot-s3 penpot-devenv |grep -F -q "readwrite"
|
||||
if [ "$?" = "1" ]; then
|
||||
mc admin policy attach penpot-s3 readwrite --user=penpot-devenv -q
|
||||
fi
|
||||
mc mb penpot-s3/penpot -p -q
|
||||
|
||||
export AWS_ACCESS_KEY_ID=penpot-devenv
|
||||
export AWS_SECRET_ACCESS_KEY=penpot-devenv
|
||||
|
||||
export PENPOT_OBJECTS_STORAGE_BACKEND=s3
|
||||
export PENPOT_OBJECTS_STORAGE_S3_ENDPOINT=http://minio:9000
|
||||
export PENPOT_OBJECTS_STORAGE_S3_BUCKET=penpot
|
||||
export PENPOT_OBJECTS_STORAGE_FS_DIRECTORY="assets"
|
||||
|
||||
export JAVA_OPTS="\
|
||||
-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||
-Djdk.attach.allowAttachSelf \
|
||||
-Dlog4j2.configurationFile=log4j2-devenv-repl.xml \
|
||||
-Djdk.tracePinnedThreads=full \
|
||||
-Dim4java.useV7=true \
|
||||
-XX:+EnableDynamicAgentLoading \
|
||||
-XX:-OmitStackTraceInFastThrow \
|
||||
-XX:+UnlockDiagnosticVMOptions \
|
||||
-XX:+DebugNonSafepoints \
|
||||
--sun-misc-unsafe-memory-access=allow \
|
||||
--enable-preview \
|
||||
--enable-native-access=ALL-UNNAMED";
|
||||
|
||||
export JAVA_OPTS="$JAVA_OPTS -Dlog4j2.configurationFile=log4j2-devenv-repl.xml"
|
||||
export OPTIONS="-A:jmx-remote -A:dev"
|
||||
|
||||
# Setup HEAP
|
||||
# export OPTIONS="$OPTIONS -J-Xms50m -J-Xmx1024m"
|
||||
# export OPTIONS="$OPTIONS -J-Xms1100m -J-Xmx1100m -J-XX:+AlwaysPreTouch"
|
||||
|
||||
# Increase virtual thread pool size
|
||||
# export OPTIONS="$OPTIONS -J-Djdk.virtualThreadScheduler.parallelism=16"
|
||||
|
||||
# Disable C2 Compiler
|
||||
# export OPTIONS="$OPTIONS -J-XX:TieredStopAtLevel=1"
|
||||
|
||||
# Disable all compilers
|
||||
# export OPTIONS="$OPTIONS -J-Xint"
|
||||
|
||||
# Setup GC
|
||||
# export OPTIONS="$OPTIONS -J-XX:+UseG1GC"
|
||||
|
||||
# Setup GC
|
||||
# export OPTIONS="$OPTIONS -J-XX:+UseZGC"
|
||||
|
||||
export OPTIONS_EVAL="nil"
|
||||
# export OPTIONS_EVAL="(set! *warn-on-reflection* true)"
|
||||
|
||||
|
||||
48
backend/scripts/repl-test
Executable file
48
backend/scripts/repl-test
Executable file
@ -0,0 +1,48 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
source /home/penpot/environ
|
||||
export PENPOT_FLAGS="$PENPOT_FLAGS disable-backend-worker"
|
||||
|
||||
export OPTIONS="
|
||||
-A:jmx-remote -A:dev \
|
||||
-J-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||
-J-Djdk.attach.allowAttachSelf \
|
||||
-J-Dlog4j2.configurationFile=log4j2-experiments.xml \
|
||||
-J-XX:-OmitStackTraceInFastThrow \
|
||||
-J-XX:+UnlockDiagnosticVMOptions \
|
||||
-J-XX:+DebugNonSafepoints \
|
||||
-J-Djdk.tracePinnedThreads=full \
|
||||
-J-XX:+UseTransparentHugePages \
|
||||
-J-XX:ReservedCodeCacheSize=1g \
|
||||
-J-Dpolyglot.engine.WarnInterpreterOnly=false \
|
||||
-J--enable-preview";
|
||||
|
||||
# Setup HEAP
|
||||
export OPTIONS="$OPTIONS -J-Xms320g -J-Xmx320g -J-XX:+AlwaysPreTouch"
|
||||
|
||||
export PENPOT_HTTP_SERVER_IO_THREADS=2
|
||||
export PENPOT_HTTP_SERVER_WORKER_THREADS=2
|
||||
|
||||
# Increase virtual thread pool size
|
||||
# export OPTIONS="$OPTIONS -J-Djdk.virtualThreadScheduler.parallelism=16"
|
||||
|
||||
# Disable C2 Compiler
|
||||
# export OPTIONS="$OPTIONS -J-XX:TieredStopAtLevel=1"
|
||||
|
||||
# Disable all compilers
|
||||
# export OPTIONS="$OPTIONS -J-Xint"
|
||||
|
||||
# Setup GC
|
||||
export OPTIONS="$OPTIONS -J-XX:+UseG1GC -J-Xlog:gc:logs/gc.log"
|
||||
|
||||
# Setup GC
|
||||
#export OPTIONS="$OPTIONS -J-XX:+UseZGC -J-XX:+ZGenerational -J-Xlog:gc:logs/gc.log"
|
||||
|
||||
# Enable ImageMagick v7.x support
|
||||
# export OPTIONS="-J-Dim4java.useV7=true $OPTIONS";
|
||||
|
||||
export OPTIONS_EVAL="nil"
|
||||
# export OPTIONS_EVAL="(set! *warn-on-reflection* true)"
|
||||
|
||||
set -ex
|
||||
exec clojure $OPTIONS -M -e "$OPTIONS_EVAL" -m rebel-readline.main
|
||||
@ -1,18 +1,44 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
SCRIPT_DIR=$(dirname $0);
|
||||
export PENPOT_SECRET_KEY=super-secret-devenv-key
|
||||
export PENPOT_HOST=devenv
|
||||
export PENPOT_FLAGS="\
|
||||
$PENPOT_FLAGS \
|
||||
enable-backend-asserts \
|
||||
enable-feature-fdata-pointer-map \
|
||||
enable-feature-fdata-objects-map \
|
||||
enable-file-snapshot \
|
||||
enable-tiered-file-data-storage";
|
||||
|
||||
source $SCRIPT_DIR/_env;
|
||||
export JAVA_OPTS="
|
||||
-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||
-Djdk.attach.allowAttachSelf \
|
||||
-Dlog4j2.configurationFile=log4j2-devenv.xml \
|
||||
-XX:+EnableDynamicAgentLoading \
|
||||
-XX:-OmitStackTraceInFastThrow \
|
||||
-XX:+UnlockDiagnosticVMOptions \
|
||||
-XX:+DebugNonSafepoints";
|
||||
|
||||
if [ -f $SCRIPT_DIR/_env.local ]; then
|
||||
source $SCRIPT_DIR/_env.local;
|
||||
fi
|
||||
export CLOJURE_OPTIONS="-A:dev"
|
||||
|
||||
export OPTIONS="-A:dev"
|
||||
|
||||
# Default deletion delay for devenv
|
||||
export PENPOT_DELETION_DELAY="24h"
|
||||
|
||||
# Setup default upload media file size to 100MiB
|
||||
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
|
||||
|
||||
# Setup default multipart upload size to 300MiB
|
||||
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
|
||||
|
||||
export AWS_ACCESS_KEY_ID=penpot-devenv
|
||||
export AWS_SECRET_ACCESS_KEY=penpot-devenv
|
||||
export PENPOT_OBJECTS_STORAGE_BACKEND=s3
|
||||
export PENPOT_OBJECTS_STORAGE_S3_ENDPOINT=http://minio:9000
|
||||
export PENPOT_OBJECTS_STORAGE_S3_BUCKET=penpot
|
||||
|
||||
entrypoint=${1:-app.main};
|
||||
|
||||
shift 1;
|
||||
set -ex
|
||||
|
||||
exec clojure $OPTIONS -A:dev -M -m $entrypoint "$@";
|
||||
clojure $CLOJURE_OPTIONS -A:dev -M -m $entrypoint "$@";
|
||||
|
||||
@ -1,15 +1,70 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
SCRIPT_DIR=$(dirname $0);
|
||||
source $SCRIPT_DIR/_env;
|
||||
export PENPOT_SECRET_KEY=super-secret-devenv-key
|
||||
export PENPOT_HOST=devenv
|
||||
export PENPOT_FLAGS="\
|
||||
$PENPOT_FLAGS \
|
||||
enable-prepl-server \
|
||||
enable-urepl-server \
|
||||
enable-nrepl-server \
|
||||
enable-webhooks \
|
||||
enable-backend-asserts \
|
||||
enable-audit-log \
|
||||
enable-login-with-ldap \
|
||||
enable-transit-readable-response \
|
||||
enable-demo-users \
|
||||
enable-feature-fdata-pointer-map \
|
||||
enable-feature-fdata-objects-map \
|
||||
disable-secure-session-cookies \
|
||||
enable-rpc-climit \
|
||||
enable-smtp \
|
||||
enable-quotes \
|
||||
enable-file-snapshot \
|
||||
enable-access-tokens \
|
||||
enable-tiered-file-data-storage \
|
||||
enable-file-validation \
|
||||
enable-file-schema-validation \
|
||||
enable-subscriptions \
|
||||
enable-subscriptions-old ";
|
||||
|
||||
if [ -f $SCRIPT_DIR/_env.local ]; then
|
||||
source $SCRIPT_DIR/_env.local;
|
||||
fi
|
||||
# Default deletion delay for devenv
|
||||
export PENPOT_DELETION_DELAY="24h"
|
||||
|
||||
# Setup default upload media file size to 100MiB
|
||||
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
|
||||
|
||||
# Setup default multipart upload size to 300MiB
|
||||
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
|
||||
|
||||
# Initialize MINIO config
|
||||
setup_minio;
|
||||
mc alias set penpot-s3/ http://minio:9000 minioadmin minioadmin -q
|
||||
mc admin user add penpot-s3 penpot-devenv penpot-devenv -q
|
||||
mc admin user info penpot-s3 penpot-devenv |grep -F -q "readwrite"
|
||||
if [ "$?" = "1" ]; then
|
||||
mc admin policy attach penpot-s3 readwrite --user=penpot-devenv -q
|
||||
fi
|
||||
mc mb penpot-s3/penpot -p -q
|
||||
|
||||
export AWS_ACCESS_KEY_ID=penpot-devenv
|
||||
export AWS_SECRET_ACCESS_KEY=penpot-devenv
|
||||
export PENPOT_OBJECTS_STORAGE_BACKEND=s3
|
||||
export PENPOT_OBJECTS_STORAGE_S3_ENDPOINT=http://minio:9000
|
||||
export PENPOT_OBJECTS_STORAGE_S3_BUCKET=penpot
|
||||
|
||||
entrypoint=${1:-app.main};
|
||||
|
||||
export JAVA_OPTS="\
|
||||
-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||
-Djdk.attach.allowAttachSelf \
|
||||
-Dlog4j2.configurationFile=log4j2-devenv.xml \
|
||||
-Djdk.tracePinnedThreads=full \
|
||||
-Dim4java.useV7=true \
|
||||
-XX:-OmitStackTraceInFastThrow \
|
||||
--sun-misc-unsafe-memory-access=allow \
|
||||
--enable-preview \
|
||||
--enable-native-access=ALL-UNNAMED";
|
||||
|
||||
export OPTIONS="-A:jmx-remote -A:dev"
|
||||
|
||||
shift 1;
|
||||
set -ex
|
||||
exec clojure -A:jmx-remote -A:dev -M -m app.main "$@";
|
||||
clojure $OPTIONS -M -m $entrypoint;
|
||||
|
||||
@ -111,7 +111,7 @@
|
||||
[:host {:optional true} :string]
|
||||
[:port {:optional true} ::sm/int]
|
||||
[:bind-dn {:optional true} :string]
|
||||
[:bind-password {:optional true} :string]
|
||||
[:bind-passwor {:optional true} :string]
|
||||
[:query {:optional true} :string]
|
||||
[:base-dn {:optional true} :string]
|
||||
[:attrs-email {:optional true} :string]
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -15,21 +15,19 @@
|
||||
[app.common.files.migrations :as fmg]
|
||||
[app.common.files.validate :as fval]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.common.weak :as weak]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.features.fdata :as fdata]
|
||||
[app.features.file-migrations :as fmigr]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.features.file-migrations :as feat.fmigr]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.storage :as sto]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.set :as set]
|
||||
[cuerdas.core :as str]
|
||||
@ -40,7 +38,6 @@
|
||||
|
||||
(def ^:dynamic *state* nil)
|
||||
(def ^:dynamic *options* nil)
|
||||
(def ^:dynamic *reference-file* nil)
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; DEFAULTS
|
||||
@ -56,12 +53,17 @@
|
||||
(* 1024 1024 100))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(declare get-resolved-file-libraries)
|
||||
(declare update-file!)
|
||||
|
||||
(def file-attrs
|
||||
(sm/keys ctf/schema:file))
|
||||
#{:id
|
||||
:name
|
||||
:migrations
|
||||
:features
|
||||
:project-id
|
||||
:is-shared
|
||||
:version
|
||||
:data})
|
||||
|
||||
(defn parse-file-format
|
||||
[template]
|
||||
@ -141,185 +143,44 @@
|
||||
([index coll attr]
|
||||
(reduce #(index-object %1 %2 attr) index coll)))
|
||||
|
||||
(defn- decode-row-features
|
||||
[{:keys [features] :as row}]
|
||||
(defn decode-row
|
||||
[{:keys [data changes features] :as row}]
|
||||
(when row
|
||||
(cond-> row
|
||||
(db/pgarray? features) (assoc :features (db/decode-pgarray features #{})))))
|
||||
features (assoc :features (db/decode-pgarray features #{}))
|
||||
changes (assoc :changes (blob/decode changes))
|
||||
data (assoc :data (blob/decode data)))))
|
||||
|
||||
(def sql:get-minimal-file
|
||||
"SELECT f.id,
|
||||
f.revn,
|
||||
f.modified_at,
|
||||
f.deleted_at
|
||||
FROM file AS f
|
||||
WHERE f.id = ?")
|
||||
|
||||
(defn get-minimal-file
|
||||
[cfg id & {:as opts}]
|
||||
(db/get-with-sql cfg [sql:get-minimal-file id] opts))
|
||||
(defn decode-file
|
||||
"A general purpose file decoding function that resolves all external
|
||||
pointers, run migrations and return plain vanilla file map"
|
||||
[cfg {:keys [id] :as file}]
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
|
||||
(let [file (->> file
|
||||
(feat.fmigr/resolve-applied-migrations cfg)
|
||||
(feat.fdata/resolve-file-data cfg))
|
||||
libs (delay (get-resolved-file-libraries cfg file))]
|
||||
|
||||
(def sql:files-with-data
|
||||
"SELECT f.id,
|
||||
f.project_id,
|
||||
f.created_at,
|
||||
f.modified_at,
|
||||
f.deleted_at,
|
||||
f.name,
|
||||
f.is_shared,
|
||||
f.has_media_trimmed,
|
||||
f.revn,
|
||||
f.data AS legacy_data,
|
||||
f.ignore_sync_until,
|
||||
f.comment_thread_seqn,
|
||||
f.features,
|
||||
f.version,
|
||||
f.vern,
|
||||
p.team_id,
|
||||
coalesce(fd.backend, 'legacy-db') AS backend,
|
||||
fd.metadata AS metadata,
|
||||
fd.data AS data
|
||||
FROM file AS f
|
||||
LEFT JOIN file_data AS fd ON (fd.file_id = f.id AND fd.id = f.id)
|
||||
INNER JOIN project AS p ON (p.id = f.project_id)")
|
||||
|
||||
(def sql:get-file
|
||||
(str sql:files-with-data " WHERE f.id = ?"))
|
||||
|
||||
(def sql:get-file-without-data
|
||||
(str "WITH files AS (" sql:files-with-data ")"
|
||||
"SELECT f.id,
|
||||
f.project_id,
|
||||
f.created_at,
|
||||
f.modified_at,
|
||||
f.deleted_at,
|
||||
f.name,
|
||||
f.is_shared,
|
||||
f.has_media_trimmed,
|
||||
f.revn,
|
||||
f.ignore_sync_until,
|
||||
f.comment_thread_seqn,
|
||||
f.features,
|
||||
f.version,
|
||||
f.vern,
|
||||
f.team_id
|
||||
FROM files AS f
|
||||
WHERE f.id = ?"))
|
||||
|
||||
(defn- migrate-file
|
||||
[{:keys [::db/conn] :as cfg} {:keys [read-only?]} {:keys [id] :as file}]
|
||||
(binding [pmap/*load-fn* (partial fdata/load-pointer cfg id)
|
||||
pmap/*tracked* (pmap/create-tracked)]
|
||||
(let [libs (delay (get-resolved-file-libraries cfg file))
|
||||
;; For avoid unnecesary overhead of creating multiple
|
||||
;; pointers and handly internally with objects map in their
|
||||
;; worst case (when probably all shapes and all pointers
|
||||
;; will be readed in any case), we just realize/resolve them
|
||||
;; before applying the migration to the file.
|
||||
file (-> (fdata/realize cfg file)
|
||||
(fmg/migrate-file libs))]
|
||||
|
||||
(if (or read-only? (db/read-only? conn))
|
||||
file
|
||||
(do ;; When file is migrated, we break the rule of no
|
||||
;; perform mutations on get operations and update the
|
||||
;; file with all migrations applied
|
||||
(update-file! cfg file)
|
||||
(fmigr/resolve-applied-migrations cfg file))))))
|
||||
|
||||
(defn- get-file*
|
||||
[{:keys [::db/conn] :as cfg} id
|
||||
{:keys [migrate?
|
||||
realize?
|
||||
decode?
|
||||
skip-locked?
|
||||
include-deleted?
|
||||
load-data?
|
||||
throw-if-not-exists?
|
||||
lock-for-update?
|
||||
lock-for-share?]
|
||||
:or {lock-for-update? false
|
||||
lock-for-share? false
|
||||
load-data? true
|
||||
migrate? true
|
||||
decode? true
|
||||
include-deleted? false
|
||||
throw-if-not-exists? true
|
||||
realize? false}
|
||||
:as options}]
|
||||
|
||||
(assert (db/connection? conn) "expected cfg with valid connection")
|
||||
(when (and (not load-data?)
|
||||
(or lock-for-share? lock-for-share? skip-locked?))
|
||||
(throw (IllegalArgumentException. "locking is incompatible when `load-data?` is false")))
|
||||
|
||||
(let [sql
|
||||
(if load-data?
|
||||
sql:get-file
|
||||
sql:get-file-without-data)
|
||||
|
||||
sql
|
||||
(cond
|
||||
lock-for-update?
|
||||
(str sql " FOR UPDATE of f")
|
||||
|
||||
lock-for-share?
|
||||
(str sql " FOR SHARE of f")
|
||||
|
||||
:else
|
||||
sql)
|
||||
|
||||
sql
|
||||
(if skip-locked?
|
||||
(str sql " SKIP LOCKED")
|
||||
sql)
|
||||
|
||||
file
|
||||
(db/get-with-sql conn [sql id]
|
||||
{::db/throw-if-not-exists false
|
||||
::db/remove-deleted (not include-deleted?)})
|
||||
|
||||
file
|
||||
(-> file
|
||||
(d/update-when :features db/decode-pgarray #{})
|
||||
(d/update-when :metadata fdata/decode-metadata))]
|
||||
|
||||
(if file
|
||||
(if load-data?
|
||||
(let [file
|
||||
(->> file
|
||||
(fmigr/resolve-applied-migrations cfg)
|
||||
(fdata/resolve-file-data cfg))
|
||||
|
||||
will-migrate?
|
||||
(and migrate? (fmg/need-migration? file))]
|
||||
|
||||
(if decode?
|
||||
(cond->> (fdata/decode-file-data cfg file)
|
||||
(and realize? (not will-migrate?))
|
||||
(fdata/realize cfg)
|
||||
|
||||
will-migrate?
|
||||
(migrate-file cfg options))
|
||||
|
||||
file))
|
||||
file)
|
||||
|
||||
(when-not (or skip-locked? (not throw-if-not-exists?))
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found
|
||||
:hint "database object not found"
|
||||
:table :file
|
||||
:file-id id)))))
|
||||
(update :features db/decode-pgarray #{})
|
||||
(update :data blob/decode)
|
||||
(update :data feat.fdata/process-pointers deref)
|
||||
(update :data feat.fdata/process-objects (partial into {}))
|
||||
(update :data assoc :id id)
|
||||
(fmg/migrate-file libs)))))
|
||||
|
||||
(defn get-file
|
||||
"Get file, resolve all features and apply migrations.
|
||||
|
||||
Useful when you have plan to apply massive or not surgical
|
||||
operations on file, because it removes the overhead of lazy fetching
|
||||
Usefull when you have plan to apply massive or not cirurgical
|
||||
operations on file, because it removes the ovehead of lazy fetching
|
||||
and decoding."
|
||||
[cfg file-id & {:as opts}]
|
||||
(db/run! cfg get-file* file-id opts))
|
||||
(db/run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(some->> (db/get* conn :file {:id file-id}
|
||||
(assoc opts ::db/remove-deleted false))
|
||||
(decode-file cfg)))))
|
||||
|
||||
(defn clean-file-features
|
||||
[file]
|
||||
@ -331,81 +192,6 @@
|
||||
(set/difference cfeat/backend-only-features))
|
||||
#{}))))
|
||||
|
||||
(defn check-file-exists
|
||||
[cfg id & {:keys [include-deleted?]
|
||||
:or {include-deleted? false}
|
||||
:as options}]
|
||||
(db/get-with-sql cfg [sql:get-minimal-file id]
|
||||
{:db/remove-deleted (not include-deleted?)}))
|
||||
|
||||
(def ^:private sql:file-permissions
|
||||
"select fpr.is_owner,
|
||||
fpr.is_admin,
|
||||
fpr.can_edit
|
||||
from file_profile_rel as fpr
|
||||
inner join file as f on (f.id = fpr.file_id)
|
||||
where fpr.file_id = ?
|
||||
and fpr.profile_id = ?
|
||||
union all
|
||||
select tpr.is_owner,
|
||||
tpr.is_admin,
|
||||
tpr.can_edit
|
||||
from team_profile_rel as tpr
|
||||
inner join project as p on (p.team_id = tpr.team_id)
|
||||
inner join file as f on (p.id = f.project_id)
|
||||
where f.id = ?
|
||||
and tpr.profile_id = ?
|
||||
union all
|
||||
select ppr.is_owner,
|
||||
ppr.is_admin,
|
||||
ppr.can_edit
|
||||
from project_profile_rel as ppr
|
||||
inner join file as f on (f.project_id = ppr.project_id)
|
||||
where f.id = ?
|
||||
and ppr.profile_id = ?")
|
||||
|
||||
(defn- get-file-permissions*
|
||||
[conn profile-id file-id]
|
||||
(when (and profile-id file-id)
|
||||
(db/exec! conn [sql:file-permissions
|
||||
file-id profile-id
|
||||
file-id profile-id
|
||||
file-id profile-id])))
|
||||
|
||||
(defn get-file-permissions
|
||||
([conn profile-id file-id]
|
||||
(let [rows (get-file-permissions* conn profile-id file-id)
|
||||
is-owner (boolean (some :is-owner rows))
|
||||
is-admin (boolean (some :is-admin rows))
|
||||
can-edit (boolean (some :can-edit rows))]
|
||||
(when (seq rows)
|
||||
{:type :membership
|
||||
:is-owner is-owner
|
||||
:is-admin (or is-owner is-admin)
|
||||
:can-edit (or is-owner is-admin can-edit)
|
||||
:can-read true
|
||||
:is-logged (some? profile-id)})))
|
||||
|
||||
([conn profile-id file-id share-id]
|
||||
(let [perms (get-file-permissions conn profile-id file-id)
|
||||
ldata (some-> (db/get* conn :share-link {:id share-id :file-id file-id})
|
||||
(dissoc :flags)
|
||||
(update :pages db/decode-pgarray #{}))]
|
||||
|
||||
;; NOTE: in a future when share-link becomes more powerful and
|
||||
;; will allow us specify which parts of the app is available, we
|
||||
;; will probably need to tweak this function in order to expose
|
||||
;; this flags to the frontend.
|
||||
(cond
|
||||
(some? perms) perms
|
||||
(some? ldata) {:type :share-link
|
||||
:can-read true
|
||||
:pages (:pages ldata)
|
||||
:is-logged (some? profile-id)
|
||||
:who-comment (:who-comment ldata)
|
||||
:who-inspect (:who-inspect ldata)}))))
|
||||
|
||||
|
||||
(defn get-project
|
||||
[cfg project-id]
|
||||
(db/get cfg :project {:id project-id}))
|
||||
@ -418,12 +204,12 @@
|
||||
(let [conn (db/get-connection cfg)
|
||||
ids (db/create-array conn "uuid" ids)]
|
||||
(->> (db/exec! conn [sql:get-teams ids])
|
||||
(map decode-row-features))))
|
||||
(map decode-row))))
|
||||
|
||||
(defn get-team
|
||||
[cfg team-id]
|
||||
(-> (db/get cfg :team {:id team-id})
|
||||
(decode-row-features)))
|
||||
(decode-row)))
|
||||
|
||||
(defn get-fonts
|
||||
[cfg team-id]
|
||||
@ -440,28 +226,11 @@
|
||||
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [ids (db/create-array conn "uuid" ids)
|
||||
sql (str "SELECT flr.*,"
|
||||
" fls.synced_at"
|
||||
" FROM file_library_rel AS flr"
|
||||
" JOIN file AS l"
|
||||
" ON flr.library_file_id = l.id"
|
||||
" LEFT JOIN file_library_sync AS fls"
|
||||
" ON fls.file_id = flr.file_id"
|
||||
" AND fls.library_file_id = flr.library_file_id"
|
||||
" WHERE flr.file_id = ANY(?)"
|
||||
" AND l.deleted_at IS NULL;")]
|
||||
sql (str "SELECT flr.* FROM file_library_rel AS flr "
|
||||
" JOIN file AS l ON (flr.library_file_id = l.id) "
|
||||
" WHERE flr.file_id = ANY(?) AND l.deleted_at IS NULL")]
|
||||
(db/exec! conn [sql ids])))))
|
||||
|
||||
(def ^:private sql:upsert-file-library-sync
|
||||
"INSERT INTO file_library_sync (file_id, library_file_id, synced_at)
|
||||
VALUES (?::uuid, ?::uuid, ?::timestamptz)
|
||||
ON CONFLICT (file_id, library_file_id)
|
||||
DO UPDATE SET synced_at = EXCLUDED.synced_at;")
|
||||
|
||||
(defn upsert-file-library-sync!
|
||||
[conn {:keys [file-id library-file-id synced-at]}]
|
||||
(db/exec-one! conn [sql:upsert-file-library-sync file-id library-file-id synced-at]))
|
||||
|
||||
(def ^:private sql:get-libraries
|
||||
"WITH RECURSIVE libs AS (
|
||||
SELECT fl.id
|
||||
@ -532,6 +301,7 @@
|
||||
(do
|
||||
(l/trc :hint "lookup index"
|
||||
:file-id (str file-id)
|
||||
:snap-id (str (:snapshot-id file))
|
||||
:id (str id)
|
||||
:result (str (get mobj :id)))
|
||||
(get mobj :id))
|
||||
@ -548,6 +318,7 @@
|
||||
(doseq [[old-id item] missing-index]
|
||||
(l/dbg :hint "create missing references"
|
||||
:file-id (str file-id)
|
||||
:snap-id (str (:snapshot-id file))
|
||||
:old-id (str old-id)
|
||||
:id (str (:id item)))
|
||||
(db/insert! conn :file-media-object item
|
||||
@ -558,16 +329,12 @@
|
||||
(def sql:get-file-media
|
||||
"SELECT * FROM file_media_object WHERE id = ANY(?)")
|
||||
|
||||
(defn get-file-media*
|
||||
[{:keys [::db/conn] :as cfg} {:keys [data id] :as file}]
|
||||
(defn get-file-media
|
||||
[cfg {:keys [data] :as file}]
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [used (cfh/collect-used-media data)
|
||||
used (db/create-array conn "uuid" used)]
|
||||
(->> (db/exec! conn [sql:get-file-media used])
|
||||
(mapv (fn [row] (assoc row :file-id id))))))
|
||||
|
||||
(defn get-file-media
|
||||
[cfg file]
|
||||
(db/run! cfg get-file-media* file))
|
||||
(db/exec! conn [sql:get-file-media used])))))
|
||||
|
||||
(def ^:private sql:get-team-files-ids
|
||||
"SELECT f.id FROM file AS f
|
||||
@ -642,7 +409,7 @@
|
||||
[cfg data file-id]
|
||||
(let [library-ids (get-libraries cfg [file-id])]
|
||||
(reduce (fn [data library-id]
|
||||
(if-let [library (get-file cfg library-id :include-deleted? true)]
|
||||
(if-let [library (get-file cfg library-id)]
|
||||
(ctf/absorb-assets data (:data library))
|
||||
data))
|
||||
data
|
||||
@ -654,27 +421,6 @@
|
||||
(db/exec-one! conn ["SET LOCAL idle_in_transaction_session_timeout = 0"])
|
||||
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])))
|
||||
|
||||
(defn invalidate-thumbnails
|
||||
[cfg file-id]
|
||||
(let [storage (sto/resolve cfg)
|
||||
|
||||
sql-1
|
||||
(str "update file_tagged_object_thumbnail "
|
||||
" set deleted_at = now() "
|
||||
" where file_id=? returning media_id")
|
||||
|
||||
sql-2
|
||||
(str "update file_thumbnail "
|
||||
" set deleted_at = now() "
|
||||
" where file_id=? returning media_id")]
|
||||
|
||||
(run! #(sto/touch-object! storage %)
|
||||
(sequence
|
||||
(keep :media-id)
|
||||
(concat
|
||||
(db/exec! cfg [sql-1 file-id])
|
||||
(db/exec! cfg [sql-2 file-id]))))))
|
||||
|
||||
(defn process-file
|
||||
[cfg {:keys [id] :as file}]
|
||||
(let [libs (delay (get-resolved-file-libraries cfg file))]
|
||||
@ -698,102 +444,78 @@
|
||||
;; all of them, not only the applied
|
||||
(vary-meta dissoc ::fmg/migrated))))
|
||||
|
||||
(defn- encode-file
|
||||
[cfg {:keys [id features] :as file}]
|
||||
(let [file (if (and (contains? features "fdata/objects-map")
|
||||
(:data file))
|
||||
(fdata/enable-objects-map file)
|
||||
(defn encode-file
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id features] :as file}]
|
||||
(let [file (if (contains? features "fdata/objects-map")
|
||||
(feat.fdata/enable-objects-map file)
|
||||
file)
|
||||
|
||||
file (if (and (contains? features "fdata/pointer-map")
|
||||
(:data file))
|
||||
|
||||
(binding [pmap/*tracked* (pmap/create-tracked :inherit true)]
|
||||
(let [file (fdata/enable-pointer-map file)]
|
||||
(fdata/persist-pointers! cfg id)
|
||||
file (if (contains? features "fdata/pointer-map")
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)]
|
||||
(let [file (feat.fdata/enable-pointer-map file)]
|
||||
(feat.fdata/persist-pointers! cfg id)
|
||||
file))
|
||||
file)]
|
||||
|
||||
(-> file
|
||||
(d/update-when :features into-array)
|
||||
(d/update-when :data blob/encode))))
|
||||
(update :features db/encode-pgarray conn "text")
|
||||
(update :data blob/encode))))
|
||||
|
||||
(defn- file->params
|
||||
(defn get-params-from-file
|
||||
[file]
|
||||
(-> (select-keys file file-attrs)
|
||||
(assoc :data nil)
|
||||
(dissoc :team-id)
|
||||
(dissoc :migrations)))
|
||||
|
||||
(defn- file->file-data-params
|
||||
[{:keys [id] :as file} & {:as opts}]
|
||||
(let [created-at (or (:created-at file) (ct/now))
|
||||
modified-at (or (:modified-at file) created-at)]
|
||||
(d/without-nils
|
||||
{:id id
|
||||
:type "main"
|
||||
:file-id id
|
||||
(let [params {:has-media-trimmed (:has-media-trimmed file)
|
||||
:ignore-sync-until (:ignore-sync-until file)
|
||||
:project-id (:project-id file)
|
||||
:features (:features file)
|
||||
:name (:name file)
|
||||
:is-shared (:is-shared file)
|
||||
:version (:version file)
|
||||
:data (:data file)
|
||||
:metadata (:metadata file)
|
||||
:created-at created-at
|
||||
:modified-at modified-at})))
|
||||
:id (:id file)
|
||||
:deleted-at (:deleted-at file)
|
||||
:created-at (:created-at file)
|
||||
:modified-at (:modified-at file)
|
||||
:revn (:revn file)
|
||||
:vern (:vern file)}]
|
||||
|
||||
(-> (d/without-nils params)
|
||||
(assoc :data-backend nil)
|
||||
(assoc :data-ref-id nil))))
|
||||
|
||||
(defn insert-file!
|
||||
"Insert a new file into the database table. Expectes a not-encoded file.
|
||||
Returns nil."
|
||||
"Insert a new file into the database table"
|
||||
[{:keys [::db/conn] :as cfg} file & {:as opts}]
|
||||
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
|
||||
|
||||
(when (:migrations file)
|
||||
(fmigr/upsert-migrations! conn file))
|
||||
|
||||
(let [file (encode-file cfg file)]
|
||||
(db/insert! conn :file
|
||||
(file->params file)
|
||||
(assoc opts ::db/return-keys false))
|
||||
|
||||
(->> (file->file-data-params file)
|
||||
(fdata/upsert! cfg))
|
||||
|
||||
nil))
|
||||
(feat.fmigr/upsert-migrations! conn file)
|
||||
(let [params (-> (encode-file cfg file)
|
||||
(get-params-from-file))]
|
||||
(db/insert! conn :file params opts)))
|
||||
|
||||
(defn update-file!
|
||||
"Update an existing file on the database. Expects not encoded file."
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id] :as file} & {:as opts}]
|
||||
"Update an existing file on the database."
|
||||
[{:keys [::db/conn ::sto/storage] :as cfg} {:keys [id] :as file} & {:as opts}]
|
||||
(let [file (encode-file cfg file)
|
||||
params (-> (get-params-from-file file)
|
||||
(dissoc :id))]
|
||||
|
||||
(if (::reset-migrations? opts false)
|
||||
(fmigr/reset-migrations! conn file)
|
||||
(fmigr/upsert-migrations! conn file))
|
||||
;; If file was already offloaded, we touch the underlying storage
|
||||
;; object for properly trigger storage-gc-touched task
|
||||
(when (feat.fdata/offloaded? file)
|
||||
(some->> (:data-ref-id file) (sto/touch-object! storage)))
|
||||
|
||||
(let [file
|
||||
(encode-file cfg file)
|
||||
|
||||
file-params
|
||||
(file->params (dissoc file :id))
|
||||
|
||||
file-data-params
|
||||
(file->file-data-params file)]
|
||||
|
||||
(db/update! conn :file file-params
|
||||
{:id id}
|
||||
{::db/return-keys false})
|
||||
|
||||
(fdata/upsert! cfg file-data-params)
|
||||
nil))
|
||||
(feat.fmigr/upsert-migrations! conn file)
|
||||
(db/update! conn :file params {:id id} opts)))
|
||||
|
||||
(defn save-file!
|
||||
"Applies all the final validations and perist the file, binfile
|
||||
specific, should not be used outside of binfile domain.
|
||||
Returns nil"
|
||||
specific, should not be used outside of binfile domain"
|
||||
[{:keys [::timestamp] :as cfg} file & {:as opts}]
|
||||
|
||||
(assert (ct/inst? timestamp) "expected valid timestamp")
|
||||
(assert (dt/instant? timestamp) "expected valid timestamp")
|
||||
|
||||
(let [file (-> file
|
||||
(assoc :created-at timestamp)
|
||||
(assoc :modified-at timestamp)
|
||||
(cond-> (not (::overwrite cfg))
|
||||
(assoc :ignore-sync-until (ct/plus timestamp (ct/duration {:seconds 5}))))
|
||||
(assoc :ignore-sync-until (dt/plus timestamp (dt/duration {:seconds 5})))
|
||||
(update :features
|
||||
(fn [features]
|
||||
(-> (::features cfg #{})
|
||||
@ -810,26 +532,20 @@
|
||||
(when (ex/exception? result)
|
||||
(l/error :hint "file schema validation error" :cause result))))
|
||||
|
||||
(if (::overwrite cfg)
|
||||
(update-file! cfg file (assoc opts ::reset-migrations? true))
|
||||
(insert-file! cfg file opts))))
|
||||
(insert-file! cfg file opts)))
|
||||
|
||||
|
||||
(def ^:private sql:get-file-libraries
|
||||
"WITH RECURSIVE libs AS (
|
||||
SELECT fl.*
|
||||
SELECT fl.*, flr.synced_at
|
||||
FROM file AS fl
|
||||
JOIN file_library_rel AS flr
|
||||
ON flr.library_file_id = fl.id
|
||||
JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
|
||||
WHERE flr.file_id = ?::uuid
|
||||
|
||||
UNION
|
||||
|
||||
SELECT fl.*
|
||||
SELECT fl.*, flr.synced_at
|
||||
FROM file AS fl
|
||||
JOIN file_library_rel AS flr
|
||||
ON flr.library_file_id = fl.id
|
||||
JOIN libs AS l
|
||||
ON flr.file_id = l.id
|
||||
JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
|
||||
JOIN libs AS l ON (flr.file_id = l.id)
|
||||
)
|
||||
SELECT l.id,
|
||||
l.features,
|
||||
@ -841,16 +557,11 @@
|
||||
l.name,
|
||||
l.revn,
|
||||
l.vern,
|
||||
l.is_shared,
|
||||
l.version,
|
||||
fls.synced_at
|
||||
l.synced_at,
|
||||
l.is_shared
|
||||
FROM libs AS l
|
||||
JOIN project AS p
|
||||
ON p.id = l.project_id
|
||||
LEFT JOIN file_library_sync AS fls
|
||||
ON fls.file_id = ?::uuid
|
||||
AND fls.library_file_id = l.id
|
||||
WHERE l.deleted_at IS NULL;")
|
||||
INNER JOIN project AS p ON (p.id = l.project_id)
|
||||
WHERE l.deleted_at IS NULL OR l.deleted_at > now();")
|
||||
|
||||
(defn get-file-libraries
|
||||
[conn file-id]
|
||||
@ -859,22 +570,12 @@
|
||||
;; FIXME: :is-indirect set to false to all rows looks
|
||||
;; completly useless
|
||||
(map #(assoc % :is-indirect false))
|
||||
(map decode-row-features))
|
||||
(db/exec! conn [sql:get-file-libraries file-id file-id])))
|
||||
(map decode-row))
|
||||
(db/exec! conn [sql:get-file-libraries file-id])))
|
||||
|
||||
(defn get-resolved-file-libraries
|
||||
"Get all file libraries including itself. Returns an instance of
|
||||
LoadableWeakValueMap that allows do not have strong references to
|
||||
the loaded libraries and reduce possible memory pressure on having
|
||||
all this libraries loaded at same time on processing file validation
|
||||
or file migration.
|
||||
|
||||
This still requires at least one library at time to be loaded while
|
||||
access to it is performed, but it improves considerable not having
|
||||
the need of loading all the libraries at the same time."
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id] :as file}]
|
||||
(let [library-ids (->> (get-file-libraries conn (:id file))
|
||||
(map :id)
|
||||
(cons (:id file)))
|
||||
load-fn #(get-file cfg % :migrate? false)]
|
||||
(weak/loadable-weak-value-map library-ids load-fn {id file})))
|
||||
"A helper for preload file libraries"
|
||||
[{:keys [::db/conn] :as cfg} file]
|
||||
(->> (get-file-libraries conn (:id file))
|
||||
(into [file] (map #(get-file cfg (:id %))))
|
||||
(d/index-by :id)))
|
||||
|
||||
@ -36,6 +36,11 @@
|
||||
"fdata/shape-data-type"
|
||||
nil
|
||||
|
||||
;; There is no migration needed, but we don't want to allow
|
||||
;; copy paste nor import of variant files into no-variant teams
|
||||
"variants/v1"
|
||||
nil
|
||||
|
||||
(ex/raise :type :internal
|
||||
:code :no-migration-defined
|
||||
:hint (str/ffmt "no migation for feature '%' on file importation" feature)
|
||||
|
||||
@ -17,7 +17,6 @@
|
||||
[app.common.fressian :as fres]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.time :as ct]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
@ -31,6 +30,7 @@
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.tasks.file-gc]
|
||||
[app.util.events :as events]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.java.io :as jio]
|
||||
[clojure.set :as set]
|
||||
@ -40,8 +40,8 @@
|
||||
[promesa.util :as pu]
|
||||
[yetti.adapter :as yt])
|
||||
(:import
|
||||
com.github.luben.zstd.ZstdInputStream
|
||||
com.github.luben.zstd.ZstdIOException
|
||||
com.github.luben.zstd.ZstdInputStream
|
||||
com.github.luben.zstd.ZstdOutputStream
|
||||
java.io.DataInputStream
|
||||
java.io.DataOutputStream
|
||||
@ -346,7 +346,7 @@
|
||||
thumbnails (->> (bfc/get-file-object-thumbnails cfg file-id)
|
||||
(mapv #(dissoc % :file-id)))
|
||||
|
||||
file (cond-> (bfc/get-file cfg file-id :realize? true)
|
||||
file (cond-> (bfc/get-file cfg file-id)
|
||||
detach?
|
||||
(-> (ctf/detach-external-references file-id)
|
||||
(dissoc :libraries))
|
||||
@ -434,7 +434,7 @@
|
||||
(defn read-import!
|
||||
"Do the importation of the specified resource in penpot custom binary
|
||||
format."
|
||||
[{:keys [::bfc/input ::bfc/timestamp] :or {timestamp (ct/now)} :as options}]
|
||||
[{:keys [::bfc/input ::bfc/timestamp] :or {timestamp (dt/now)} :as options}]
|
||||
|
||||
(dm/assert!
|
||||
"expected input stream"
|
||||
@ -442,7 +442,7 @@
|
||||
|
||||
(dm/assert!
|
||||
"expected valid instant"
|
||||
(ct/inst? timestamp))
|
||||
(dt/instant? timestamp))
|
||||
|
||||
(let [version (read-header! input)]
|
||||
(read-import (assoc options ::version version ::bfc/timestamp timestamp))))
|
||||
@ -573,6 +573,7 @@
|
||||
;; Insert all file relations
|
||||
(doseq [{:keys [library-file-id] :as rel} rels]
|
||||
(let [rel (-> rel
|
||||
(assoc :synced-at timestamp)
|
||||
(update :file-id bfc/lookup-index)
|
||||
(update :library-file-id bfc/lookup-index))]
|
||||
|
||||
@ -582,12 +583,7 @@
|
||||
:file-id (:file-id rel)
|
||||
:lib-id (:library-file-id rel)
|
||||
::l/sync? true)
|
||||
(let [rel-params (dissoc rel :synced-at)]
|
||||
(db/insert! conn :file-library-rel rel-params)
|
||||
(bfc/upsert-file-library-sync! conn {:file-id (:file-id rel-params)
|
||||
:library-file-id (:library-file-id rel-params)
|
||||
:synced-at (or (:synced-at rel)
|
||||
timestamp)})))
|
||||
(db/insert! conn :file-library-rel rel))
|
||||
|
||||
(l/warn :hint "ignoring file library link"
|
||||
:file-id (:file-id rel)
|
||||
@ -686,7 +682,7 @@
|
||||
(io/coercible? output))
|
||||
|
||||
(let [id (uuid/next)
|
||||
tp (ct/tpoint)
|
||||
tp (dt/tpoint)
|
||||
ab (volatile! false)
|
||||
cs (volatile! nil)]
|
||||
(try
|
||||
@ -724,7 +720,7 @@
|
||||
(satisfies? jio/IOFactory input))
|
||||
|
||||
(let [id (uuid/next)
|
||||
tp (ct/tpoint)
|
||||
tp (dt/tpoint)
|
||||
cs (volatile! nil)]
|
||||
|
||||
(l/info :hint "import: started" :id (str id))
|
||||
@ -746,6 +742,6 @@
|
||||
(finally
|
||||
(l/info :hint "import: terminated"
|
||||
:id (str id)
|
||||
:elapsed (ct/format-duration (tp))
|
||||
:elapsed (dt/format-duration (tp))
|
||||
:error? (some? @cs))))))
|
||||
|
||||
|
||||
@ -13,7 +13,6 @@
|
||||
[app.common.data :as d]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.logging :as l]
|
||||
[app.common.time :as ct]
|
||||
[app.common.transit :as t]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
@ -24,6 +23,7 @@
|
||||
[app.storage :as sto]
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.util.events :as events]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.set :as set]
|
||||
[cuerdas.core :as str]
|
||||
@ -153,7 +153,7 @@
|
||||
|
||||
(defn- write-file!
|
||||
[cfg file-id]
|
||||
(let [file (bfc/get-file cfg file-id :realize? true)
|
||||
(let [file (bfc/get-file cfg file-id)
|
||||
thumbs (bfc/get-file-object-thumbnails cfg file-id)
|
||||
media (bfc/get-file-media cfg file)
|
||||
rels (bfc/get-files-rels cfg #{file-id})]
|
||||
@ -314,10 +314,10 @@
|
||||
(doseq [rel (read-obj cfg :file-rels file-id)]
|
||||
(let [rel (-> rel
|
||||
(update :file-id bfc/lookup-index)
|
||||
(update :library-file-id bfc/lookup-index))]
|
||||
(update :library-file-id bfc/lookup-index)
|
||||
(assoc :synced-at timestamp))]
|
||||
(db/insert! conn :file-library-rel rel
|
||||
::db/return-keys false)
|
||||
(bfc/upsert-file-library-sync! conn (assoc rel :synced-at timestamp))))
|
||||
::db/return-keys false)))
|
||||
|
||||
(doseq [media (read-seq cfg :file-media-object file-id)]
|
||||
(let [media (-> media
|
||||
@ -344,7 +344,7 @@
|
||||
(defn export-team!
|
||||
[cfg team-id]
|
||||
(let [id (uuid/next)
|
||||
tp (ct/tpoint)
|
||||
tp (dt/tpoint)
|
||||
cfg (create-database cfg)]
|
||||
|
||||
(l/inf :hint "start"
|
||||
@ -378,15 +378,15 @@
|
||||
(l/inf :hint "end"
|
||||
:operation "export"
|
||||
:id (str id)
|
||||
:elapsed (ct/format-duration elapsed)))))))
|
||||
:elapsed (dt/format-duration elapsed)))))))
|
||||
|
||||
(defn import-team!
|
||||
[cfg path]
|
||||
(let [id (uuid/next)
|
||||
tp (ct/tpoint)
|
||||
tp (dt/tpoint)
|
||||
|
||||
cfg (-> (create-database cfg path)
|
||||
(assoc ::bfc/timestamp (ct/now)))]
|
||||
(assoc ::bfc/timestamp (dt/now)))]
|
||||
|
||||
(l/inf :hint "start"
|
||||
:operation "import"
|
||||
@ -434,4 +434,4 @@
|
||||
(l/inf :hint "end"
|
||||
:operation "import"
|
||||
:id (str id)
|
||||
:elapsed (ct/format-duration elapsed)))))))
|
||||
:elapsed (dt/format-duration elapsed)))))))
|
||||
|
||||
@ -12,6 +12,7 @@
|
||||
[app.binfile.common :as bfc]
|
||||
[app.binfile.migrations :as bfm]
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.files.migrations :as-alias fmg]
|
||||
@ -20,14 +21,13 @@
|
||||
[app.common.media :as cmedia]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.thumbnails :as cth]
|
||||
[app.common.time :as ct]
|
||||
[app.common.types.color :as ctcl]
|
||||
[app.common.types.component :as ctc]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.types.page :as ctp]
|
||||
[app.common.types.plugins :as ctpg]
|
||||
[app.common.types.shape :as cts]
|
||||
[app.common.types.tokens-lib :as ctob]
|
||||
[app.common.types.tokens-lib :as cto]
|
||||
[app.common.types.typography :as cty]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
@ -36,15 +36,14 @@
|
||||
[app.storage :as sto]
|
||||
[app.storage.impl :as sto.impl]
|
||||
[app.util.events :as events]
|
||||
[app.util.time :as dt]
|
||||
[clojure.java.io :as jio]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.fs :as fs]
|
||||
[datoteka.io :as io])
|
||||
(:import
|
||||
java.io.File
|
||||
java.io.InputStream
|
||||
java.io.OutputStreamWriter
|
||||
java.lang.AutoCloseable
|
||||
java.util.zip.ZipEntry
|
||||
java.util.zip.ZipFile
|
||||
java.util.zip.ZipOutputStream))
|
||||
@ -55,7 +54,7 @@
|
||||
[:map {:title "Manifest"}
|
||||
[:version ::sm/int]
|
||||
[:type :string]
|
||||
[:referer {:optional true} :string]
|
||||
|
||||
[:generated-by {:optional true} :string]
|
||||
|
||||
[:files
|
||||
@ -94,7 +93,7 @@
|
||||
|
||||
(defn- default-now
|
||||
[o]
|
||||
(or o (ct/now)))
|
||||
(or o (dt/now)))
|
||||
|
||||
;; --- ENCODERS
|
||||
|
||||
@ -105,25 +104,25 @@
|
||||
(sm/encoder ctp/schema:page sm/json-transformer))
|
||||
|
||||
(def encode-shape
|
||||
(sm/encoder cts/schema:shape sm/json-transformer))
|
||||
(sm/encoder ::cts/shape sm/json-transformer))
|
||||
|
||||
(def encode-media
|
||||
(sm/encoder ctf/schema:media sm/json-transformer))
|
||||
(sm/encoder ::ctf/media sm/json-transformer))
|
||||
|
||||
(def encode-component
|
||||
(sm/encoder ctc/schema:component sm/json-transformer))
|
||||
(sm/encoder ::ctc/component sm/json-transformer))
|
||||
|
||||
(def encode-color
|
||||
(sm/encoder ctcl/schema:library-color sm/json-transformer))
|
||||
|
||||
(def encode-typography
|
||||
(sm/encoder cty/schema:typography sm/json-transformer))
|
||||
(sm/encoder ::cty/typography sm/json-transformer))
|
||||
|
||||
(def encode-tokens-lib
|
||||
(sm/encoder ctob/schema:tokens-lib sm/json-transformer))
|
||||
(sm/encoder ::cto/tokens-lib sm/json-transformer))
|
||||
|
||||
(def encode-plugin-data
|
||||
(sm/encoder ctpg/schema:plugin-data sm/json-transformer))
|
||||
(sm/encoder ::ctpg/plugin-data sm/json-transformer))
|
||||
|
||||
(def encode-storage-object
|
||||
(sm/encoder schema:storage-object sm/json-transformer))
|
||||
@ -140,7 +139,7 @@
|
||||
(sm/decoder ctf/schema:media sm/json-transformer))
|
||||
|
||||
(def decode-component
|
||||
(sm/decoder ctc/schema:component sm/json-transformer))
|
||||
(sm/decoder ::ctc/component sm/json-transformer))
|
||||
|
||||
(def decode-color
|
||||
(sm/decoder ctcl/schema:library-color sm/json-transformer))
|
||||
@ -149,19 +148,19 @@
|
||||
(sm/decoder schema:file sm/json-transformer))
|
||||
|
||||
(def decode-page
|
||||
(sm/decoder ctp/schema:page sm/json-transformer))
|
||||
(sm/decoder ::ctp/page sm/json-transformer))
|
||||
|
||||
(def decode-shape
|
||||
(sm/decoder cts/schema:shape sm/json-transformer))
|
||||
(sm/decoder ::cts/shape sm/json-transformer))
|
||||
|
||||
(def decode-typography
|
||||
(sm/decoder cty/schema:typography sm/json-transformer))
|
||||
(sm/decoder ::cty/typography sm/json-transformer))
|
||||
|
||||
(def decode-tokens-lib
|
||||
(sm/decoder ctob/schema:tokens-lib sm/json-transformer))
|
||||
(sm/decoder cto/schema:tokens-lib sm/json-transformer))
|
||||
|
||||
(def decode-plugin-data
|
||||
(sm/decoder ctpg/schema:plugin-data sm/json-transformer))
|
||||
(sm/decoder ::ctpg/plugin-data sm/json-transformer))
|
||||
|
||||
(def decode-storage-object
|
||||
(sm/decoder schema:storage-object sm/json-transformer))
|
||||
@ -175,31 +174,31 @@
|
||||
(sm/check-fn schema:manifest))
|
||||
|
||||
(def validate-file
|
||||
(sm/check-fn ctf/schema:file))
|
||||
(sm/check-fn ::ctf/file))
|
||||
|
||||
(def validate-page
|
||||
(sm/check-fn ctp/schema:page))
|
||||
(sm/check-fn ::ctp/page))
|
||||
|
||||
(def validate-shape
|
||||
(sm/check-fn cts/schema:shape))
|
||||
(sm/check-fn ::cts/shape))
|
||||
|
||||
(def validate-media
|
||||
(sm/check-fn ctf/schema:media))
|
||||
(sm/check-fn ::ctf/media))
|
||||
|
||||
(def validate-color
|
||||
(sm/check-fn ctcl/schema:library-color))
|
||||
|
||||
(def validate-component
|
||||
(sm/check-fn ctc/schema:component))
|
||||
(sm/check-fn ::ctc/component))
|
||||
|
||||
(def validate-typography
|
||||
(sm/check-fn cty/schema:typography))
|
||||
(sm/check-fn ::cty/typography))
|
||||
|
||||
(def validate-tokens-lib
|
||||
(sm/check-fn ctob/schema:tokens-lib))
|
||||
(sm/check-fn ::cto/tokens-lib))
|
||||
|
||||
(def validate-plugin-data
|
||||
(sm/check-fn ctpg/schema:plugin-data))
|
||||
(sm/check-fn ::ctpg/plugin-data))
|
||||
|
||||
(def validate-storage-object
|
||||
(sm/check-fn schema:storage-object))
|
||||
@ -226,10 +225,7 @@
|
||||
|
||||
(let [detach? (and (not embed-assets) (not include-libraries))]
|
||||
(db/tx-run! cfg (fn [cfg]
|
||||
(cond-> (bfc/get-file cfg file-id
|
||||
{:realize? true
|
||||
:include-deleted? true
|
||||
:lock-for-update? true})
|
||||
(cond-> (bfc/get-file cfg file-id {::sql/for-update true})
|
||||
detach?
|
||||
(-> (ctf/detach-external-references file-id)
|
||||
(dissoc :libraries))
|
||||
@ -255,12 +251,10 @@
|
||||
|
||||
(write-entry! output path params)
|
||||
|
||||
(events/tap :progress {:section :storage-object :id id})
|
||||
|
||||
(with-open [input (sto/get-object-data storage sobject)]
|
||||
(.putNextEntry ^ZipOutputStream output (ZipEntry. (str "objects/" id ext)))
|
||||
(.putNextEntry output (ZipEntry. (str "objects/" id ext)))
|
||||
(io/copy input output :size (:size sobject))
|
||||
(.closeEntry ^ZipOutputStream output))))))
|
||||
(.closeEntry output))))))
|
||||
|
||||
(defn- export-file
|
||||
[{:keys [::file-id ::output] :as cfg}]
|
||||
@ -281,8 +275,6 @@
|
||||
|
||||
thumbnails (bfc/get-file-object-thumbnails cfg file-id)]
|
||||
|
||||
(events/tap :progress {:section :file :id file-id :name (:name file)})
|
||||
|
||||
(vswap! bfc/*state* update :files assoc file-id
|
||||
{:id file-id
|
||||
:name (:name file)
|
||||
@ -290,18 +282,17 @@
|
||||
|
||||
(let [file (cond-> (select-keys file bfc/file-attrs)
|
||||
(:options data)
|
||||
(assoc :options (:options data)))
|
||||
(assoc :options (:options data))
|
||||
|
||||
file (-> file
|
||||
:always
|
||||
(dissoc :data)
|
||||
(dissoc :deleted-at)
|
||||
(encode-file))
|
||||
|
||||
:always
|
||||
(encode-file))
|
||||
path (str "files/" file-id ".json")]
|
||||
(write-entry! output path file))
|
||||
|
||||
(doseq [[index page-id] (d/enumerate pages)]
|
||||
|
||||
(let [path (str "files/" file-id "/pages/" page-id ".json")
|
||||
page (get pages-index page-id)
|
||||
objects (:objects page)
|
||||
@ -312,8 +303,6 @@
|
||||
|
||||
(write-entry! output path page)
|
||||
|
||||
(events/tap :progress {:section :page :id page-id :name (:name page) :file-id file-id})
|
||||
|
||||
(doseq [[shape-id shape] objects]
|
||||
(let [path (str "files/" file-id "/pages/" page-id "/" shape-id ".json")
|
||||
shape (assoc shape :page-id page-id)
|
||||
@ -326,8 +315,6 @@
|
||||
(doseq [{:keys [id] :as media} media]
|
||||
(let [path (str "files/" file-id "/media/" id ".json")
|
||||
media (encode-media media)]
|
||||
|
||||
(events/tap :progress {:section :media :id id :file-id file-id})
|
||||
(write-entry! output path media)))
|
||||
|
||||
(doseq [thumbnail thumbnails]
|
||||
@ -337,13 +324,11 @@
|
||||
data (-> data
|
||||
(assoc :media-id (:media-id thumbnail))
|
||||
(encode-file-thumbnail))]
|
||||
(events/tap :progress {:section :thumbnails :id (:object-id thumbnail) :file-id file-id})
|
||||
(write-entry! output path data)))
|
||||
|
||||
(doseq [[id component] components]
|
||||
(let [path (str "files/" file-id "/components/" id ".json")
|
||||
component (encode-component component)]
|
||||
(events/tap :progress {:section :component :id id :file-id file-id})
|
||||
(write-entry! output path component)))
|
||||
|
||||
(doseq [[id color] colors]
|
||||
@ -354,20 +339,16 @@
|
||||
(and (contains? color :path)
|
||||
(str/empty? (:path color)))
|
||||
(dissoc :path))]
|
||||
(events/tap :progress {:section :color :id id :file-id file-id})
|
||||
(write-entry! output path color)))
|
||||
|
||||
(doseq [[id object] typographies]
|
||||
(let [path (str "files/" file-id "/typographies/" id ".json")
|
||||
typography (encode-typography object)]
|
||||
(events/tap :progress {:section :typography :id id :file-id file-id})
|
||||
(write-entry! output path typography)))
|
||||
|
||||
(when (and tokens-lib
|
||||
(not (ctob/empty-lib? tokens-lib)))
|
||||
(when tokens-lib
|
||||
(let [path (str "files/" file-id "/tokens.json")
|
||||
encoded-tokens (encode-tokens-lib tokens-lib)]
|
||||
(events/tap :progress {:section :tokens-lib :file-id file-id})
|
||||
(write-entry! output path encoded-tokens)))))
|
||||
|
||||
(defn- export-files
|
||||
@ -392,7 +373,6 @@
|
||||
params {:type "penpot/export-files"
|
||||
:version 1
|
||||
:generated-by (str "penpot/" (:full cf/version))
|
||||
:refer "penpot"
|
||||
:files (vec (vals files))
|
||||
:relations rels}]
|
||||
(write-entry! output "manifest.json" params))))
|
||||
@ -465,7 +445,7 @@
|
||||
(defn- read-manifest
|
||||
[^ZipFile input]
|
||||
(let [entry (get-zip-entry input "manifest.json")]
|
||||
(with-open [^AutoCloseable reader (zip-entry-reader input entry)]
|
||||
(with-open [reader (zip-entry-reader input entry)]
|
||||
(let [manifest (json/read reader :key-fn json/read-kebab-key)]
|
||||
(decode-manifest manifest)))))
|
||||
|
||||
@ -555,27 +535,24 @@
|
||||
|
||||
(defn- read-entry
|
||||
[^ZipFile input entry]
|
||||
(with-open [^AutoCloseable reader (zip-entry-reader input entry)]
|
||||
(with-open [reader (zip-entry-reader input entry)]
|
||||
(json/read reader :key-fn json/read-kebab-key)))
|
||||
|
||||
(defn- read-plain-entry
|
||||
[^ZipFile input entry]
|
||||
(with-open [^AutoCloseable reader (zip-entry-reader input entry)]
|
||||
(with-open [reader (zip-entry-reader input entry)]
|
||||
(json/read reader)))
|
||||
|
||||
(defn- read-file
|
||||
[{:keys [::bfc/input ::bfc/timestamp]} file-id]
|
||||
[{:keys [::bfc/input ::file-id]}]
|
||||
(let [path (str "files/" file-id ".json")
|
||||
entry (get-zip-entry input path)]
|
||||
(-> (read-entry input entry)
|
||||
(decode-file)
|
||||
(update :revn d/nilv 1)
|
||||
(update :created-at d/nilv timestamp)
|
||||
(update :modified-at d/nilv timestamp)
|
||||
(validate-file))))
|
||||
|
||||
(defn- read-file-plugin-data
|
||||
[{:keys [::bfc/input]} file-id]
|
||||
[{:keys [::bfc/input ::file-id]}]
|
||||
(let [path (str "files/" file-id "/plugin-data.json")
|
||||
entry (get-zip-entry* input path)]
|
||||
(some->> entry
|
||||
@ -584,7 +561,7 @@
|
||||
(validate-plugin-data))))
|
||||
|
||||
(defn- read-file-media
|
||||
[{:keys [::bfc/input ::entries]} file-id]
|
||||
[{:keys [::bfc/input ::file-id ::entries]}]
|
||||
(->> (keep (match-media-entry-fn file-id) entries)
|
||||
(reduce (fn [result {:keys [id entry]}]
|
||||
(let [object (->> (read-entry input entry)
|
||||
@ -604,13 +581,12 @@
|
||||
(not-empty)))
|
||||
|
||||
(defn- read-file-colors
|
||||
[{:keys [::bfc/input ::entries]} file-id]
|
||||
[{:keys [::bfc/input ::file-id ::entries]}]
|
||||
(->> (keep (match-color-entry-fn file-id) entries)
|
||||
(reduce (fn [result {:keys [id entry]}]
|
||||
(let [object (->> (read-entry input entry)
|
||||
(decode-color)
|
||||
(validate-color))]
|
||||
(events/tap :progress {:section :color :id id :file-id file-id})
|
||||
(if (= id (:id object))
|
||||
(assoc result id object)
|
||||
result)))
|
||||
@ -618,7 +594,7 @@
|
||||
(not-empty)))
|
||||
|
||||
(defn- read-file-components
|
||||
[{:keys [::bfc/input ::entries]} file-id]
|
||||
[{:keys [::bfc/input ::file-id ::entries]}]
|
||||
(let [clean-component-post-decode
|
||||
(fn [component]
|
||||
(d/update-when component :objects
|
||||
@ -642,7 +618,6 @@
|
||||
(clean-component-pre-decode)
|
||||
(decode-component)
|
||||
(clean-component-post-decode))]
|
||||
(events/tap :progress {:section :component :id id :file-id file-id})
|
||||
(if (= id (:id object))
|
||||
(assoc result id object)
|
||||
result)))
|
||||
@ -650,13 +625,12 @@
|
||||
(not-empty))))
|
||||
|
||||
(defn- read-file-typographies
|
||||
[{:keys [::bfc/input ::entries]} file-id]
|
||||
[{:keys [::bfc/input ::file-id ::entries]}]
|
||||
(->> (keep (match-typography-entry-fn file-id) entries)
|
||||
(reduce (fn [result {:keys [id entry]}]
|
||||
(let [object (->> (read-entry input entry)
|
||||
(decode-typography)
|
||||
(validate-typography))]
|
||||
(events/tap :progress {:section :typography :id id :file-id file-id})
|
||||
(if (= id (:id object))
|
||||
(assoc result id object)
|
||||
result)))
|
||||
@ -664,15 +638,14 @@
|
||||
(not-empty)))
|
||||
|
||||
(defn- read-file-tokens-lib
|
||||
[{:keys [::bfc/input ::entries]} file-id]
|
||||
[{:keys [::bfc/input ::file-id ::entries]}]
|
||||
(when-let [entry (d/seek (match-tokens-lib-entry-fn file-id) entries)]
|
||||
(events/tap :progress {:section :tokens-lib :file-id file-id})
|
||||
(->> (read-plain-entry input entry)
|
||||
(decode-tokens-lib)
|
||||
(validate-tokens-lib))))
|
||||
|
||||
(defn- read-file-shapes
|
||||
[{:keys [::bfc/input ::entries] :as cfg} file-id page-id]
|
||||
[{:keys [::bfc/input ::file-id ::page-id ::entries] :as cfg}]
|
||||
(->> (keep (match-shape-entry-fn file-id page-id) entries)
|
||||
(reduce (fn [result {:keys [id entry]}]
|
||||
(let [object (->> (read-entry input entry)
|
||||
@ -686,15 +659,15 @@
|
||||
(not-empty)))
|
||||
|
||||
(defn- read-file-pages
|
||||
[{:keys [::bfc/input ::entries] :as cfg} file-id]
|
||||
[{:keys [::bfc/input ::file-id ::entries] :as cfg}]
|
||||
(->> (keep (match-page-entry-fn file-id) entries)
|
||||
(keep (fn [{:keys [id entry]}]
|
||||
(let [page (->> (read-entry input entry)
|
||||
(decode-page))
|
||||
page (dissoc page :options)]
|
||||
(events/tap :progress {:section :page :id id :file-id file-id})
|
||||
(when (= id (:id page))
|
||||
(let [objects (read-file-shapes cfg file-id id)]
|
||||
(let [objects (-> (assoc cfg ::page-id id)
|
||||
(read-file-shapes))]
|
||||
(assoc page :objects objects))))))
|
||||
(sort-by :index)
|
||||
(reduce (fn [result {:keys [id] :as page}]
|
||||
@ -702,13 +675,12 @@
|
||||
(d/ordered-map))))
|
||||
|
||||
(defn- read-file-thumbnails
|
||||
[{:keys [::bfc/input ::entries] :as cfg} file-id]
|
||||
[{:keys [::bfc/input ::file-id ::entries] :as cfg}]
|
||||
(->> (keep (match-thumbnail-entry-fn file-id) entries)
|
||||
(reduce (fn [result {:keys [page-id frame-id tag entry]}]
|
||||
(let [object (->> (read-entry input entry)
|
||||
(decode-file-thumbnail)
|
||||
(validate-file-thumbnail))]
|
||||
|
||||
(if (and (= frame-id (:frame-id object))
|
||||
(= page-id (:page-id object))
|
||||
(= tag (:tag object)))
|
||||
@ -718,13 +690,13 @@
|
||||
(not-empty)))
|
||||
|
||||
(defn- read-file-data
|
||||
[cfg file-id]
|
||||
(let [colors (read-file-colors cfg file-id)
|
||||
typographies (read-file-typographies cfg file-id)
|
||||
tokens-lib (read-file-tokens-lib cfg file-id)
|
||||
components (read-file-components cfg file-id)
|
||||
plugin-data (read-file-plugin-data cfg file-id)
|
||||
pages (read-file-pages cfg file-id)]
|
||||
[cfg]
|
||||
(let [colors (read-file-colors cfg)
|
||||
typographies (read-file-typographies cfg)
|
||||
tokens-lib (read-file-tokens-lib cfg)
|
||||
components (read-file-components cfg)
|
||||
plugin-data (read-file-plugin-data cfg)
|
||||
pages (read-file-pages cfg)]
|
||||
{:pages (-> pages keys vec)
|
||||
:pages-index (into {} pages)
|
||||
:colors colors
|
||||
@ -734,11 +706,11 @@
|
||||
:plugin-data plugin-data}))
|
||||
|
||||
(defn- import-file
|
||||
[{:keys [::db/conn ::bfc/project-id] :as cfg} {file-id :id file-name :name}]
|
||||
[{:keys [::bfc/project-id ::file-id ::file-name] :as cfg}]
|
||||
(let [file-id' (bfc/lookup-index file-id)
|
||||
file (read-file cfg file-id)
|
||||
media (read-file-media cfg file-id)
|
||||
thumbnails (read-file-thumbnails cfg file-id)]
|
||||
file (read-file cfg)
|
||||
media (read-file-media cfg)
|
||||
thumbnails (read-file-thumbnails cfg)]
|
||||
|
||||
(l/dbg :hint "processing file"
|
||||
:id (str file-id')
|
||||
@ -747,50 +719,28 @@
|
||||
:version (:version file)
|
||||
::l/sync? true)
|
||||
|
||||
(vswap! bfc/*state* update :index bfc/update-index media :id)
|
||||
(events/tap :progress {:section :file :name file-name})
|
||||
|
||||
(doseq [item media]
|
||||
(let [params (-> item
|
||||
(update :id bfc/lookup-index)
|
||||
(assoc :file-id file-id')
|
||||
(d/update-when :media-id bfc/lookup-index)
|
||||
(d/update-when :thumbnail-id bfc/lookup-index))]
|
||||
|
||||
(events/tap :progress {:section :media :id (:id params) :file-id file-id})
|
||||
|
||||
(l/dbg :hint "inserting media object"
|
||||
(when media
|
||||
;; Update index with media
|
||||
(l/dbg :hint "update media index"
|
||||
:file-id (str file-id')
|
||||
:id (str (:id params))
|
||||
:media-id (str (:media-id params))
|
||||
:thumbnail-id (str (:thumbnail-id params))
|
||||
:old-id (str (:id item))
|
||||
:total (count media)
|
||||
::l/sync? true)
|
||||
|
||||
(db/insert! conn :file-media-object params
|
||||
::db/on-conflict-do-nothing? (::bfc/overwrite cfg))))
|
||||
(vswap! bfc/*state* update :index bfc/update-index (map :id media))
|
||||
(vswap! bfc/*state* update :media into media))
|
||||
|
||||
(doseq [item thumbnails]
|
||||
(let [media-id (bfc/lookup-index (:media-id item))
|
||||
object-id (-> (assoc item :file-id file-id')
|
||||
(cth/fmt-object-id))
|
||||
params {:file-id file-id'
|
||||
:object-id object-id
|
||||
:tag (:tag item)
|
||||
:media-id media-id}]
|
||||
|
||||
(l/dbg :hint "inserting object thumbnail"
|
||||
(when thumbnails
|
||||
(l/dbg :hint "update thumbnails index"
|
||||
:file-id (str file-id')
|
||||
:media-id (str media-id)
|
||||
:total (count thumbnails)
|
||||
::l/sync? true)
|
||||
|
||||
(events/tap :progress {:section :thumbnail :file-id file-id :object-id object-id})
|
||||
(vswap! bfc/*state* update :index bfc/update-index (map :media-id thumbnails))
|
||||
(vswap! bfc/*state* update :thumbnails into thumbnails))
|
||||
|
||||
(db/insert! conn :file-tagged-object-thumbnail params
|
||||
::db/on-conflict-do-nothing? true)))
|
||||
|
||||
(events/tap :progress {:section :file :file-id file-id})
|
||||
|
||||
(let [data (-> (read-file-data cfg file-id)
|
||||
(let [data (-> (read-file-data cfg)
|
||||
(d/without-nils)
|
||||
(assoc :id file-id')
|
||||
(cond-> (:options file)
|
||||
@ -807,7 +757,7 @@
|
||||
file (ctf/check-file file)]
|
||||
|
||||
(bfm/register-pending-migrations! cfg file)
|
||||
(bfc/save-file! cfg file)
|
||||
(bfc/save-file! cfg file ::db/return-keys false)
|
||||
|
||||
file-id')))
|
||||
|
||||
@ -824,10 +774,10 @@
|
||||
:file-id (str file-id)
|
||||
:lib-id (str libr-id)
|
||||
::l/sync? true)
|
||||
(let [rel-params {:file-id file-id
|
||||
:library-file-id libr-id}]
|
||||
(db/insert! conn :file-library-rel rel-params)
|
||||
(bfc/upsert-file-library-sync! conn (assoc rel-params :synced-at timestamp)))))))
|
||||
(db/insert! conn :file-library-rel
|
||||
{:synced-at timestamp
|
||||
:file-id file-id
|
||||
:library-file-id libr-id})))))
|
||||
|
||||
(defn- import-storage-objects
|
||||
[{:keys [::bfc/input ::entries ::bfc/timestamp] :as cfg}]
|
||||
@ -837,12 +787,18 @@
|
||||
entries (keep (match-storage-entry-fn) entries)]
|
||||
|
||||
(doseq [{:keys [id entry]} entries]
|
||||
(let [object (-> (read-entry input entry)
|
||||
(let [object (->> (read-entry input entry)
|
||||
(decode-storage-object)
|
||||
(update :bucket d/nilv sto/default-bucket)
|
||||
(validate-storage-object))
|
||||
(validate-storage-object))]
|
||||
|
||||
ext (cmedia/mtype->extension (:content-type object))
|
||||
(when (not= id (:id object))
|
||||
(ex/raise :type :validation
|
||||
:code :inconsistent-penpot-file
|
||||
:hint "the penpot file seems corrupt, found unexpected uuid (storage-object-id)"
|
||||
:expected-id (str id)
|
||||
:found-id (str (:id object))))
|
||||
|
||||
(let [ext (cmedia/mtype->extension (:content-type object))
|
||||
path (str "objects/" id ext)
|
||||
content (->> path
|
||||
(get-zip-entry input)
|
||||
@ -878,70 +834,61 @@
|
||||
:bucket (:bucket params)
|
||||
::l/sync? true)
|
||||
|
||||
(vswap! bfc/*state* update :index assoc id (:id sobject)))))))
|
||||
(vswap! bfc/*state* update :index assoc id (:id sobject))))))))
|
||||
|
||||
(defn- import-files*
|
||||
[{:keys [::manifest] :as cfg}]
|
||||
(bfc/disable-database-timeouts! cfg)
|
||||
(defn- import-file-media
|
||||
[{:keys [::db/conn] :as cfg}]
|
||||
(events/tap :progress {:section :media})
|
||||
|
||||
(vswap! bfc/*state* update :index bfc/update-index (:files manifest) :id)
|
||||
(doseq [item (:media @bfc/*state*)]
|
||||
(let [params (-> item
|
||||
(update :id bfc/lookup-index)
|
||||
(update :file-id bfc/lookup-index)
|
||||
(d/update-when :media-id bfc/lookup-index)
|
||||
(d/update-when :thumbnail-id bfc/lookup-index))]
|
||||
|
||||
(import-storage-objects cfg)
|
||||
(l/dbg :hint "inserting file media object"
|
||||
:old-id (str (:id item))
|
||||
:id (str (:id params))
|
||||
:file-id (str (:file-id params))
|
||||
::l/sync? true)
|
||||
|
||||
(let [files (get manifest :files)
|
||||
result (reduce (fn [result file]
|
||||
(let [name' (get file :name)
|
||||
file (assoc file :name name')]
|
||||
(conj result (import-file cfg file))))
|
||||
[]
|
||||
files)]
|
||||
(db/insert! conn :file-media-object params))))
|
||||
|
||||
(import-file-relations cfg)
|
||||
(bfm/apply-pending-migrations! cfg)
|
||||
(defn- import-file-thumbnails
|
||||
[{:keys [::db/conn] :as cfg}]
|
||||
(events/tap :progress {:section :thumbnails})
|
||||
(doseq [item (:thumbnails @bfc/*state*)]
|
||||
(let [file-id (bfc/lookup-index (:file-id item))
|
||||
media-id (bfc/lookup-index (:media-id item))
|
||||
object-id (-> (assoc item :file-id file-id)
|
||||
(cth/fmt-object-id))
|
||||
params {:file-id file-id
|
||||
:object-id object-id
|
||||
:tag (:tag item)
|
||||
:media-id media-id}]
|
||||
|
||||
result))
|
||||
(l/dbg :hint "inserting file object thumbnail"
|
||||
:file-id (str file-id)
|
||||
:media-id (str media-id)
|
||||
::l/sync? true)
|
||||
|
||||
(defn- import-file-and-overwrite*
|
||||
[{:keys [::manifest ::bfc/file-id] :as cfg}]
|
||||
|
||||
(when (not= 1 (count (:files manifest)))
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-condition
|
||||
:hint "unable to perform in-place update with binfile containing more than 1 file"
|
||||
:manifest manifest))
|
||||
|
||||
(bfc/disable-database-timeouts! cfg)
|
||||
|
||||
(let [ref-file (bfc/get-minimal-file cfg file-id ::db/for-update true)
|
||||
file (first (get manifest :files))
|
||||
cfg (assoc cfg ::bfc/overwrite true)]
|
||||
|
||||
(vswap! bfc/*state* update :index assoc (:id file) file-id)
|
||||
|
||||
(binding [bfc/*options* cfg
|
||||
bfc/*reference-file* ref-file]
|
||||
|
||||
(import-storage-objects cfg)
|
||||
(import-file cfg file)
|
||||
|
||||
(bfc/invalidate-thumbnails cfg file-id)
|
||||
(bfm/apply-pending-migrations! cfg)
|
||||
|
||||
[file-id])))
|
||||
(db/insert! conn :file-tagged-object-thumbnail params))))
|
||||
|
||||
(defn- import-files
|
||||
[{:keys [::bfc/timestamp ::bfc/input] :or {timestamp (ct/now)} :as cfg}]
|
||||
[{:keys [::bfc/timestamp ::bfc/input ::bfc/name] :or {timestamp (dt/now)} :as cfg}]
|
||||
|
||||
(assert (instance? ZipFile input) "expected zip file")
|
||||
(assert (ct/inst? timestamp) "expected valid instant")
|
||||
(dm/assert!
|
||||
"expected zip file"
|
||||
(instance? ZipFile input))
|
||||
|
||||
(dm/assert!
|
||||
"expected valid instant"
|
||||
(dt/instant? timestamp))
|
||||
|
||||
(let [manifest (-> (read-manifest input)
|
||||
(validate-manifest))
|
||||
entries (read-zip-entries input)
|
||||
cfg (-> cfg
|
||||
(assoc ::entries entries)
|
||||
(assoc ::manifest manifest)
|
||||
(assoc ::bfc/timestamp timestamp))]
|
||||
entries (read-zip-entries input)]
|
||||
|
||||
(when-not (= "penpot/export-files" (:type manifest))
|
||||
(ex/raise :type :validation
|
||||
@ -964,10 +911,35 @@
|
||||
|
||||
(events/tap :progress {:section :manifest})
|
||||
|
||||
(binding [bfc/*state* (volatile! {:media [] :index {}})]
|
||||
(if (::bfc/file-id cfg)
|
||||
(db/tx-run! cfg import-file-and-overwrite*)
|
||||
(db/tx-run! cfg import-files*)))))
|
||||
(let [index (bfc/update-index (map :id (:files manifest)))
|
||||
state {:media [] :index index}
|
||||
cfg (-> cfg
|
||||
(assoc ::entries entries)
|
||||
(assoc ::manifest manifest)
|
||||
(assoc ::bfc/timestamp timestamp))]
|
||||
|
||||
(binding [bfc/*state* (volatile! state)]
|
||||
(db/tx-run! cfg (fn [cfg]
|
||||
(bfc/disable-database-timeouts! cfg)
|
||||
(let [ids (->> (:files manifest)
|
||||
(reduce (fn [result {:keys [id] :as file}]
|
||||
(let [name' (get file :name)
|
||||
name' (if (map? name)
|
||||
(get name id)
|
||||
name')]
|
||||
(conj result (-> cfg
|
||||
(assoc ::file-id id)
|
||||
(assoc ::file-name name')
|
||||
(import-file)))))
|
||||
[]))]
|
||||
(import-file-relations cfg)
|
||||
(import-storage-objects cfg)
|
||||
(import-file-media cfg)
|
||||
(import-file-thumbnails cfg)
|
||||
|
||||
(bfm/apply-pending-migrations! cfg)
|
||||
|
||||
ids)))))))
|
||||
|
||||
;; --- PUBLIC API
|
||||
|
||||
@ -984,23 +956,24 @@
|
||||
|
||||
[{:keys [::bfc/ids] :as cfg} output]
|
||||
|
||||
(assert
|
||||
(and (set? ids) (every? uuid? ids))
|
||||
"expected a set of uuid's for `::bfc/ids` parameter")
|
||||
(dm/assert!
|
||||
"expected a set of uuid's for `::bfc/ids` parameter"
|
||||
(and (set? ids)
|
||||
(every? uuid? ids)))
|
||||
|
||||
(assert
|
||||
(satisfies? jio/IOFactory output)
|
||||
"expected instance of jio/IOFactory for `input`")
|
||||
(dm/assert!
|
||||
"expected instance of jio/IOFactory for `input`"
|
||||
(satisfies? jio/IOFactory output))
|
||||
|
||||
(let [id (uuid/next)
|
||||
tp (ct/tpoint)
|
||||
tp (dt/tpoint)
|
||||
ab (volatile! false)
|
||||
cs (volatile! nil)]
|
||||
(try
|
||||
(l/info :hint "start exportation" :export-id (str id))
|
||||
(binding [bfc/*state* (volatile! (bfc/initial-state))]
|
||||
(with-open [^AutoCloseable output (io/output-stream output)]
|
||||
(with-open [^AutoCloseable output (ZipOutputStream. output)]
|
||||
(with-open [output (io/output-stream output)]
|
||||
(with-open [output (ZipOutputStream. output)]
|
||||
(let [cfg (assoc cfg ::output output)]
|
||||
(export-files cfg)
|
||||
(export-storage-objects cfg)))))
|
||||
@ -1029,22 +1002,22 @@
|
||||
(defn import-files!
|
||||
[{:keys [::bfc/input] :as cfg}]
|
||||
|
||||
(assert
|
||||
(dm/assert!
|
||||
"expected valid profile-id and project-id on `cfg`"
|
||||
(and (uuid? (::bfc/profile-id cfg))
|
||||
(uuid? (::bfc/project-id cfg)))
|
||||
"expected valid profile-id and project-id on `cfg`")
|
||||
(uuid? (::bfc/project-id cfg))))
|
||||
|
||||
(assert
|
||||
(io/coercible? input)
|
||||
"expected instance of jio/IOFactory for `input`")
|
||||
(dm/assert!
|
||||
"expected instance of jio/IOFactory for `input`"
|
||||
(io/coercible? input))
|
||||
|
||||
(let [id (uuid/next)
|
||||
tp (ct/tpoint)
|
||||
tp (dt/tpoint)
|
||||
cs (volatile! nil)]
|
||||
|
||||
(l/info :hint "import: started" :id (str id))
|
||||
(try
|
||||
(with-open [input (ZipFile. ^File (fs/file input))]
|
||||
(with-open [input (ZipFile. (fs/file input))]
|
||||
(import-files (assoc cfg ::bfc/input input)))
|
||||
|
||||
(catch Throwable cause
|
||||
@ -1054,11 +1027,5 @@
|
||||
(finally
|
||||
(l/info :hint "import: terminated"
|
||||
:id (str id)
|
||||
:elapsed (ct/format-duration (tp))
|
||||
:elapsed (dt/format-duration (tp))
|
||||
:error? (some? @cs))))))
|
||||
|
||||
(defn get-manifest
|
||||
[path]
|
||||
(with-open [^AutoCloseable input (ZipFile. ^File (fs/file path))]
|
||||
(-> (read-manifest input)
|
||||
(validate-manifest))))
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user