Compare commits
277 Commits
feature/ho
...
master
Author | SHA1 | Date | |
---|---|---|---|
adbba165a6 | |||
d837b6f4ef | |||
c700df21c1 | |||
9b6eabced0 | |||
7d2df472a6 | |||
ff49aa4da3 | |||
d94706dd54 | |||
|
40efc5bea9 | ||
3318b6d62f | |||
abd8c8fc95 | |||
fd38c9a98b | |||
1123cc8e19 | |||
47f07ad059 | |||
6871cb70c8 | |||
57aa057198 | |||
19d79af2db | |||
a61e416bb5 | |||
06da0b1e92 | |||
c13261154f | |||
77f4adf134 | |||
|
0a42ee66b0 | ||
|
6416c2c32a | ||
|
4ab054c502 | ||
|
016440fd4b | ||
|
cb06c6f6e1 | ||
|
276c5da9f5 | ||
|
84a8191de9 | ||
|
6fc2b27fcf | ||
c981844975 | |||
fc505d7f85 | |||
5382012065 | |||
42bcd0adf1 | |||
6910d565c7 | |||
8e3fff2ca1 | |||
5ebcce6742 | |||
35ade80ab1 | |||
c2e4633b6c | |||
9e9f6d37d2 | |||
9a134fc2ba | |||
b104e22c7a | |||
1d22c1e9c1 | |||
82ee7b2a4c | |||
c091b8908a | |||
6d6a87fe5b | |||
e1adffa72b | |||
|
09ac8b5463 | ||
|
cd8177e0ae | ||
|
012b35d2f8 | ||
|
5ef86cd090 | ||
|
6e424a5541 | ||
d800394eb9 | |||
7e2a6604dd | |||
|
709bfdd734 | ||
|
f42aed52ab | ||
|
c37270f61f | ||
|
24d737d5b4 | ||
|
4bc4f62720 | ||
|
0b60ccb92b | ||
|
c413936f1c | ||
|
9400eedc06 | ||
|
80b4014588 | ||
|
4ae34b4d02 | ||
|
48add9dc89 | ||
|
7d32cac40a | ||
|
989647852e | ||
|
816b4823d2 | ||
|
a97a8f0aaf | ||
|
e2080865b9 | ||
|
ad7086886c | ||
|
8e08fed4ee | ||
|
4c5e5f20ee | ||
|
708684ccc1 | ||
|
c61323257b | ||
|
06fe4034f0 | ||
|
b011e73f64 | ||
|
44eb6c45f2 | ||
|
4499ca1d34 | ||
|
09b9a73637 | ||
|
f455bdf433 | ||
|
f03bc7ec18 | ||
|
648ff7322f | ||
|
00b188d9ff | ||
db576c7add | |||
3dc36ac0af | |||
2e1e603daf | |||
|
6f716134dd | ||
|
0fb195b958 | ||
|
4636320e3b | ||
|
4c05627d1f | ||
|
985bc4020a | ||
|
46215d4372 | ||
|
934d17b994 | ||
|
83825d26e9 | ||
|
a3d7ed7ceb | ||
|
cac79dea01 | ||
|
22ab86d2d6 | ||
|
f98d6a4ae3 | ||
|
c017ed1680 | ||
|
562b248198 | ||
|
611db1f4a9 | ||
|
8bb2ab6321 | ||
|
26f3d34ac3 | ||
|
759bed5c14 | ||
|
eb643e3adb | ||
|
19debc5181 | ||
|
7fa2f598a1 | ||
|
01f8337dcb | ||
|
edb6e9b044 | ||
|
3fb3b2f3de | ||
|
b074535366 | ||
|
b59175d13f | ||
|
e5f8e69d89 | ||
|
574b3c92b4 | ||
|
47503e6c98 | ||
|
c7090252ff | ||
|
730dd71ac5 | ||
|
452dd4996d | ||
|
1957d38907 | ||
|
6c0c7085c9 | ||
|
cdc73fe379 | ||
|
e1d7cfd5e8 | ||
|
92dd2833d1 | ||
|
d98eb758b5 | ||
|
a3973583f8 | ||
|
fcbbe2979f | ||
|
311db051fb | ||
|
46683cbc4e | ||
|
784ecca724 | ||
|
1c72796657 | ||
|
7b7e9bc66a | ||
|
e32047ffcc | ||
fa24b10607 | |||
|
ad066be1c8 | ||
20e8f4453c | |||
f739252479 | |||
b27a88676d | |||
e16d0d211b | |||
38336fc566 | |||
1b9dc9b4ba | |||
ccc2dc426c | |||
|
29bb07d2cb | ||
|
67fbd9d9c8 | ||
|
bac88530c1 | ||
|
b9bfa7b90c | ||
|
61a5a021cf | ||
|
713a06e999 | ||
|
e44acbebf7 | ||
|
3626f8cc6c | ||
|
207bd5dac7 | ||
|
3d7ac29ee1 | ||
|
2379128240 | ||
|
bd4908d7c8 | ||
|
083ab2c6a6 | ||
|
34180710cf | ||
|
8887cb7a26 | ||
|
8a4525cb80 | ||
|
05c5042985 | ||
|
2213fc70f5 | ||
|
06a05223b5 | ||
|
143e77fdfc | ||
|
8acbfdbeb7 | ||
|
73ec78a7da | ||
|
46cd6febae | ||
|
c16635ea71 | ||
|
29e4ec3027 | ||
|
c7a1fd04a4 | ||
|
c4ddb99710 | ||
|
9494e08e8f | ||
|
86cbb01bbf | ||
|
a6562f4017 | ||
|
01606746c1 | ||
|
558353ee99 | ||
|
19b66de202 | ||
|
5c17c711b1 | ||
|
61d388372b | ||
98ee9148fc | |||
aaa77fce4e | |||
cc7b5378e1 | |||
4a4fc6c25f | |||
1cb1c932dc | |||
|
9bcd24fbd6 | ||
12c8cb1982 | |||
67bda4f479 | |||
f89a097732 | |||
fb26b20a42 | |||
e0f402c25a | |||
b2bc4e81f3 | |||
5170ab1e8d | |||
|
83dc4f3e07 | ||
eb2e39acef | |||
|
b9f2fdaf9d | ||
26e178542a | |||
|
3c33cedcca | ||
6e4b6a4e6c | |||
70b846c733 | |||
216cadc50f | |||
b0fe26d31b | |||
|
f070cdbd1f | ||
8384d08c26 | |||
a94d5c0efa | |||
d84ddc9834 | |||
3b6095bcf5 | |||
99180c44e4 | |||
32eda02f32 | |||
37d6dd41fd | |||
0ba22e894f | |||
5ce2129406 | |||
|
95e27e2655 | ||
|
452dfeb56d | ||
|
80acbe974d | ||
|
4fb0a1e712 | ||
|
39d2e303f1 | ||
|
decb1ed860 | ||
|
a830c53218 | ||
16d5eecd74 | |||
f1849ef17f | |||
bc9d3c9575 | |||
10f303bc63 | |||
037eaea85b | |||
101ac458f5 | |||
a4176e1dd6 | |||
a3813d64dc | |||
|
bb160d345e | ||
|
3cf36a79cc | ||
01c3c74b9a | |||
56011315db | |||
610d7b8e45 | |||
6f4eb40f91 | |||
58334f6ab9 | |||
e50679fd0d | |||
cee6f7b507 | |||
2514966acd | |||
00a3164f4a | |||
2a34984f86 | |||
33e7e8dfd0 | |||
a1aa8b21e2 | |||
d9b82fad23 | |||
ba8a436cb4 | |||
05aa933b9a | |||
|
e17831904d | ||
|
21f5019e1c | ||
|
b2e720e66c | ||
|
4c959d62b7 | ||
|
c5fb8f1fe0 | ||
|
973409cc8f | ||
|
33faf622c8 | ||
|
44a96b1fde | ||
|
26ef312557 | ||
|
2eb99ffb20 | ||
|
ca39675055 | ||
|
a31a579d24 | ||
|
32a8a24a0f | ||
|
0613f8a0ca | ||
|
a23477301e | ||
|
d654094671 | ||
|
93a0526a7c | ||
|
5bf1ff715f | ||
|
c6e7d68016 | ||
|
13a01e7c56 | ||
|
3b830417a2 | ||
|
245659807a | ||
|
2f26419ac7 | ||
|
67df644858 | ||
|
ca6b3a67a3 | ||
|
f2c708cff2 | ||
|
2f26cea790 | ||
|
30c5612cfb | ||
|
cbb749d77c | ||
|
1cfe84b8b8 | ||
|
30ca844513 | ||
|
c611b2b080 | ||
|
9dc5f5d993 | ||
|
393006d306 | ||
|
ea0fcf3951 | ||
|
cbff89bdea | ||
|
93740b7c37 | ||
|
4cc63380f2 |
1
.envrc
Normal file
1
.envrc
Normal file
@ -0,0 +1 @@
|
|||||||
|
use nix
|
174
.gitea/workflows/4testing-build.yml.arch
Normal file
174
.gitea/workflows/4testing-build.yml.arch
Normal file
@ -0,0 +1,174 @@
|
|||||||
|
### This workflow setup instance then build and push images ###
|
||||||
|
name: 4testing multiarch-build
|
||||||
|
run-name: >-
|
||||||
|
Build #${{ inputs.build }} [
|
||||||
|
${{ inputs.amd64 && 'AMD64' || '-' }}
|
||||||
|
${{ inputs.arm64 && 'ARM64' || '-' }}
|
||||||
|
] [
|
||||||
|
${{ inputs.community && 'CE' || '-' }}
|
||||||
|
${{ inputs.developer && 'DE' || '-' }}
|
||||||
|
${{ inputs.enterprise && 'EE' || '-' }}
|
||||||
|
]
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
build:
|
||||||
|
description: 'Build number (ex. 45)'
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
amd64:
|
||||||
|
type: boolean
|
||||||
|
description: 'Build AMD64'
|
||||||
|
default: true
|
||||||
|
arm64:
|
||||||
|
type: boolean
|
||||||
|
description: 'Build ARM64'
|
||||||
|
default: true
|
||||||
|
community:
|
||||||
|
type: boolean
|
||||||
|
description: 'Build Community Edition'
|
||||||
|
default: true
|
||||||
|
enterprise:
|
||||||
|
type: boolean
|
||||||
|
description: 'Build Enterprise Edition'
|
||||||
|
default: true
|
||||||
|
developer:
|
||||||
|
type: boolean
|
||||||
|
description: 'Build Developer Edition'
|
||||||
|
default: true
|
||||||
|
|
||||||
|
env:
|
||||||
|
COMPANY_NAME: "onlyoffice"
|
||||||
|
PRODUCT_NAME: "documentserver"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
prepare:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- id: matrix
|
||||||
|
run: |
|
||||||
|
set -ex
|
||||||
|
|
||||||
|
BRANCH_NAME=${GITHUB_REF#refs/heads/}
|
||||||
|
if ! [[ $BRANCH_NAME == develop || $BRANCH_NAME =~ hotfix || $BRANCH_NAME =~ release ]]; then
|
||||||
|
echo "Wrong branch."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
[ ${{ github.event.inputs.amd64 }} = true ] && PLATFORMS+=("amd64")
|
||||||
|
[ ${{ github.event.inputs.arm64 }} = true ] && PLATFORMS+=("arm64")
|
||||||
|
if [ -z ${PLATFORMS} ]; then
|
||||||
|
echo "None of the platforms are selected."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
[ ${{ github.event.inputs.community }} = true ] && EDITIONS+=("community")
|
||||||
|
[ ${{ github.event.inputs.enterprise }} = true ] && EDITIONS+=("enterprise")
|
||||||
|
[ ${{ github.event.inputs.developer }} = true ] && EDITIONS+=("developer")
|
||||||
|
if [ -z ${EDITIONS} ]; then
|
||||||
|
echo "None of the editions are selected."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "editions=$(jq -n -c --arg s "${EDITIONS[*]}" '($s|split(" "))')" >> $GITHUB_OUTPUT
|
||||||
|
outputs:
|
||||||
|
editions: ${{ steps.matrix.outputs.editions }}
|
||||||
|
|
||||||
|
build:
|
||||||
|
name: "Build ${{ matrix.image }}-${{ matrix.edition }}"
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
image: ["documentserver"]
|
||||||
|
edition: ${{ fromJSON(needs.prepare.outputs.editions) }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
id: buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
|
||||||
|
- name: Login to Docker Hub
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||||
|
|
||||||
|
- name: Build 4testing
|
||||||
|
id: build-ds
|
||||||
|
run: |
|
||||||
|
set -eux
|
||||||
|
|
||||||
|
### ==>> At this step build variable declaration ###
|
||||||
|
|
||||||
|
case ${{ matrix.edition }} in
|
||||||
|
community)
|
||||||
|
PRODUCT_EDITION=""
|
||||||
|
;;
|
||||||
|
enterprise)
|
||||||
|
PRODUCT_EDITION="-ee"
|
||||||
|
;;
|
||||||
|
developer)
|
||||||
|
PRODUCT_EDITION="-de"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
[ ${{ github.event.inputs.amd64 }} = true ] && PLATFORMS+=("amd64")
|
||||||
|
[ ${{ github.event.inputs.arm64 }} = true ] && PLATFORMS+=("arm64")
|
||||||
|
PLATFORM=$(echo ${PLATFORMS[*]/#/linux/} | tr ' ' ',')
|
||||||
|
|
||||||
|
BRANCH_NAME=${GITHUB_REF#refs/heads/}
|
||||||
|
if [ $BRANCH_NAME = develop ]; then
|
||||||
|
BUILD_CHANNEL=nightly
|
||||||
|
PRODUCT_VERSION=99.99.99
|
||||||
|
elif [[ $BRANCH_NAME =~ hotfix || $BRANCH_NAME =~ release ]]; then
|
||||||
|
BUILD_CHANNEL=test
|
||||||
|
PRODUCT_VERSION=${BRANCH_NAME#*/v}
|
||||||
|
fi
|
||||||
|
BUILD_NUMBER=${{ github.event.inputs.build }}
|
||||||
|
|
||||||
|
export PRODUCT_EDITION
|
||||||
|
export PACKAGE_VERSION=${PRODUCT_VERSION}-${BUILD_NUMBER}
|
||||||
|
export PACKAGE_BASEURL=${{ secrets.REPO_BASEURL }}
|
||||||
|
export BUILD_CHANNEL
|
||||||
|
export PLATFORM
|
||||||
|
export DOCKERFILE=Dockerfile
|
||||||
|
export PREFIX_NAME=4testing-
|
||||||
|
export TAG=${PRODUCT_VERSION}.${BUILD_NUMBER}
|
||||||
|
|
||||||
|
### ==>> Build and push images at this step ###
|
||||||
|
|
||||||
|
docker buildx bake -f docker-bake.hcl ${{ matrix.image }} --push
|
||||||
|
echo "DONE: Build success"
|
||||||
|
|
||||||
|
### Set output for Zap scanner
|
||||||
|
### NOTE: Output will be used only in release/hotfix branches
|
||||||
|
|
||||||
|
echo "version=${TAG}" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "branch=${BRANCH_NAME}" >> "$GITHUB_OUTPUT"
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
# Run scanner only when edition is community
|
||||||
|
# and branch hit release/ or hotfix/
|
||||||
|
- name: Trigger zap manualy
|
||||||
|
if: >-
|
||||||
|
matrix.edition == 'community' &&
|
||||||
|
(startsWith(steps.build-ds.outputs.branch, 'release/') ||
|
||||||
|
startsWith(steps.build-ds.outputs.branch, 'hotfix/'))
|
||||||
|
env:
|
||||||
|
VERSION: ${{ steps.build-ds.outputs.version }}
|
||||||
|
BRANCH: ${{ steps.build-ds.outputs.branch }}
|
||||||
|
GITHUB_TOKEN: ${{ secrets.TOKEN }}
|
||||||
|
run: |
|
||||||
|
gh workflow run zap-ds.yaml \
|
||||||
|
--repo ${{ github.repository }} \
|
||||||
|
-f branch=${BRANCH} \
|
||||||
|
-f version=${VERSION}
|
||||||
|
shell: bash
|
||||||
|
|
66
.gitea/workflows/build.yml
Normal file
66
.gitea/workflows/build.yml
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
name: Build
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: "0 0 * * 1"
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- 'manual_build'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Get upstream repository
|
||||||
|
run: |
|
||||||
|
git clone https://github.com/ONLYOFFICE/DocumentServer/ upstream
|
||||||
|
|
||||||
|
- name: Extract tags and labels for Docker
|
||||||
|
id: meta
|
||||||
|
run: |
|
||||||
|
export VERSION="$(cd upstream && git describe --tags --abbrev=0 | cut -d 'v' -f2-)"
|
||||||
|
echo "Version: ${VERSION}"
|
||||||
|
echo "tag=${VERSION}" >> $GITHUB_OUTPUT
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
- name: Check if the image was already built
|
||||||
|
id: check-build-status
|
||||||
|
run: |
|
||||||
|
echo ${{ steps.meta.outputs.tag }}
|
||||||
|
if curl --silent -f -lSL "https://hub.docker.com/v2/repositories/jiriks74/onlyoffice-documentserver/tags/$(echo ${{ steps.meta.outputs.tag }})" > /dev/null; then echo "Already exists" && exit 1; else echo "Desn't exist" && exit 0; fi
|
||||||
|
|
||||||
|
- name: Set tag in Dockerfile
|
||||||
|
run: |
|
||||||
|
sed -i "s/ARG PACKAGE_VERSION=/ARG PACKAGE_VERSION=$(echo ${{ steps.meta.outputs.tag }} | cut -d ':' -f2)/g" Dockerfile
|
||||||
|
|
||||||
|
- name: Update repositories
|
||||||
|
run: sudo apt update
|
||||||
|
- name: Install Docker
|
||||||
|
uses: awalsh128/cache-apt-pkgs-action@latest
|
||||||
|
with:
|
||||||
|
packages: docker.io
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v1
|
||||||
|
with:
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
|
||||||
|
- name: Login to DockerHub
|
||||||
|
uses: docker/login-action@v1
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Build and push image
|
||||||
|
uses: docker/build-push-action@v2
|
||||||
|
with:
|
||||||
|
push: true
|
||||||
|
tags: jiriks74/onlyoffice-documentserver:${{ steps.meta.outputs.tag }},jiriks74/onlyoffice-documentserver:latest
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
context: .
|
||||||
|
file: ./Dockerfile
|
22
.gitea/workflows/cron-rebuild-trigger.yml.arch
Normal file
22
.gitea/workflows/cron-rebuild-trigger.yml.arch
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
---
|
||||||
|
name: Trigger 4testing rebuild
|
||||||
|
|
||||||
|
run-name: "Weekly 4testing rebuild trigger"
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
# Run every Saturday at 10 p.m.
|
||||||
|
- cron: '00 22 * * 6'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
trigger-rebuild:
|
||||||
|
name: "trigget-rebuild"
|
||||||
|
runs-on: "ubuntu-latest"
|
||||||
|
steps:
|
||||||
|
- name: Rebuild 4testing manualy
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.TOKEN }}
|
||||||
|
run: |
|
||||||
|
gh workflow run rebuild.yml \
|
||||||
|
--repo ONLYOFFICE/Docker-DocumentServer \
|
||||||
|
-f repo=4test
|
224
.gitea/workflows/rebuild.yml.arch
Normal file
224
.gitea/workflows/rebuild.yml.arch
Normal file
@ -0,0 +1,224 @@
|
|||||||
|
---
|
||||||
|
name: Rebuild Docker-Documentserver
|
||||||
|
|
||||||
|
run-name: >
|
||||||
|
Rebuild DocumentServer with secure updates for repo: ${{ github.event.inputs.repo }}
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
repo:
|
||||||
|
type: choice
|
||||||
|
description: Please, choose upload repo..
|
||||||
|
options:
|
||||||
|
- '4test'
|
||||||
|
- 'stable'
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
# All other permissions are set to none
|
||||||
|
contents: read
|
||||||
|
# Technically read access while waiting for images should be more than enough. However,
|
||||||
|
# there is a bug in GitHub Actions/Packages and in case private repositories are used, you get a permission
|
||||||
|
# denied error when attempting to just pull private image, changing the token permission to write solves the
|
||||||
|
# issue. This is not dangerous, because if it is for "ONLYOFFICE/Docker-DocumentServer", only maintainers can use ds-rebuild.yaml
|
||||||
|
# If it is for a fork, then the token is read-only anyway.
|
||||||
|
packages: read
|
||||||
|
|
||||||
|
env:
|
||||||
|
COMPANY_NAME: "onlyoffice"
|
||||||
|
PRODUCT_NAME: "documentserver"
|
||||||
|
REGISTRY_URL: "https://hub.docker.com/v2/repositories"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
rebuild-info:
|
||||||
|
name: "Rebuild-info"
|
||||||
|
runs-on: "ubuntu-22.04"
|
||||||
|
env:
|
||||||
|
REPO_INPUTS: ${{ github.event.inputs.repo }}
|
||||||
|
EVENT: ${{ github.event_name }}
|
||||||
|
outputs:
|
||||||
|
stable-versions: ${{ steps.selective-checks.outputs.stable-versions }}
|
||||||
|
ucs-versions: ${{ steps.selective-checks.outputs.ucs-versions }}
|
||||||
|
minor-tags: ${{ steps.selective-checks.outputs.minor-tags }}
|
||||||
|
ucs-rebuild-condition: ${{ steps.selective-checks.outputs.ucs-rebuild-condition }}
|
||||||
|
prefix-name: ${{ steps.selective-checks.outputs.prefix-name }}
|
||||||
|
repo: ${{ steps.selective-checks.outputs.repo }}
|
||||||
|
steps:
|
||||||
|
- name: Selective checks
|
||||||
|
id: selective-checks
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
|
||||||
|
REPO=${REPO_INPUTS:-"4test"}
|
||||||
|
|
||||||
|
if [ "${REPO}" == "stable" ]; then
|
||||||
|
UCS_REBUILD=true
|
||||||
|
UCS_VERSIONS=($(curl -s -H -X ${REGISTRY_URL}/${COMPANY_NAME}/${PRODUCT_NAME}-ucs/tags/?page_size=100 | \
|
||||||
|
jq -r '.results|.[]|.name' | grep -oxE '[0-9]{1,}.[0-9]{1,}.[0-9]{1,}.1' || true))
|
||||||
|
echo "ucs-versions=$(jq -c -n '$ARGS.positional' --args "${UCS_VERSIONS[@]}")" >> "$GITHUB_OUTPUT"
|
||||||
|
elif
|
||||||
|
[ "${REPO}" == "4test" ]; then
|
||||||
|
UCS_REBUILD=false
|
||||||
|
PREFIX_NAME=4testing-
|
||||||
|
fi
|
||||||
|
|
||||||
|
STABLE_VERSIONS=($(curl -s -H -X ${REGISTRY_URL}/${COMPANY_NAME}/${PRODUCT_NAME}/tags/?page_size=100 | \
|
||||||
|
jq -r '.results|.[]|.name' | grep -oxE '[0-9]{1,}.[0-9]{1,}.[0-9]{1,}.1' || true))
|
||||||
|
|
||||||
|
# When rebuilding stable versions of the document server,
|
||||||
|
# it is necessary to determine the version from which the
|
||||||
|
# minor x.x tag will need to be pushed.
|
||||||
|
|
||||||
|
VERSIONS=(${STABLE_VERSIONS[@]})
|
||||||
|
for i in {1..10}; do
|
||||||
|
if [ -z "${VERSIONS}" ]; then
|
||||||
|
break
|
||||||
|
else
|
||||||
|
TEMPLATE=${VERSIONS[0]%.*.*}
|
||||||
|
TEMPLATE_MINOR=$(printf -- '%s\n' "${VERSIONS[@]}" | grep -o -m 1 "${VERSIONS[0]%.*.*}.[0-9].[0-9]")
|
||||||
|
MINOR_TAGS+=(${TEMPLATE_MINOR%.*})
|
||||||
|
|
||||||
|
for v in ${MINOR_TAGS[@]}; do
|
||||||
|
VERSIONS=(${VERSIONS[@]//${v%.*}.*.*})
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "Stable releases that will be rebuilded"
|
||||||
|
echo "--------------------------------------"
|
||||||
|
echo "${STABLE_VERSIONS[@]}"
|
||||||
|
echo
|
||||||
|
echo
|
||||||
|
echo "Ucs releases that will be rebuilded"
|
||||||
|
echo "-----------------------------------"
|
||||||
|
echo "${UCS_VERSIONS[@]}"
|
||||||
|
|
||||||
|
echo "stable-versions=$(jq -c -n '$ARGS.positional' --args "${STABLE_VERSIONS[@]}")" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "minor-tags=${MINOR_TAGS[@]}" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "ucs-rebuild-condition=${UCS_REBUILD}" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "prefix-name=${PREFIX_NAME}" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "repo=${REPO}" >> "$GITHUB_OUTPUT"
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
re-build-stable:
|
||||||
|
name: "Rebuild stable:${{ matrix.version }} ${{ matrix.edition }}"
|
||||||
|
needs: [rebuild-info]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
type: ["stable"]
|
||||||
|
edition: ["", "-ee", "-de"]
|
||||||
|
version: ${{fromJSON(needs.rebuild-info.outputs.stable-versions)}}
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
- name: Login to Docker Hub
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||||
|
# Determines the new build number based
|
||||||
|
# on data from the hub.docker registry
|
||||||
|
- name: Declare release number
|
||||||
|
id: release-number
|
||||||
|
env:
|
||||||
|
REBUILD_VERSION: ${{ matrix.version }}
|
||||||
|
run: |
|
||||||
|
MINOR_VERSION=${REBUILD_VERSION%.*}
|
||||||
|
LAST_RELEASE=$(curl -s -H -X ${REGISTRY_URL}/${COMPANY_NAME}/${PRODUCT_NAME}/tags/?page_size=100 \
|
||||||
|
| jq -r '.results|.[]|.name' | grep -Eo -m1 "${MINOR_VERSION}.[0-9]{1,}")
|
||||||
|
LAST_RELEASE=${LAST_RELEASE#*.*.*.}
|
||||||
|
echo "release-number=$((LAST_RELEASE+1))" >> "$GITHUB_OUTPUT"
|
||||||
|
shell: bash
|
||||||
|
# Note: Rebuilding images with an
|
||||||
|
# extra layer to update security and
|
||||||
|
# all dependencies. Update tags got +1 to previous release.
|
||||||
|
- name: Re-build documentserver-stable
|
||||||
|
env:
|
||||||
|
MINOR_TAGS_ST: ${{ needs.rebuild-info.outputs.minor-tags }}
|
||||||
|
VERSION: ${{ matrix.version }}
|
||||||
|
RELEASE_NUMBER: ${{ steps.release-number.outputs.release-number }}
|
||||||
|
PREFIX_NAME: ${{ needs.rebuild-info.outputs.prefix-name }}
|
||||||
|
REPO: ${{ needs.rebuild-info.outputs.repo }}
|
||||||
|
PRODUCT_EDITION: ${{ matrix.edition }}
|
||||||
|
run: |
|
||||||
|
set -eux
|
||||||
|
export PULL_TAG=${VERSION}
|
||||||
|
export TAG=${VERSION%.*}.${RELEASE_NUMBER}
|
||||||
|
export SHORTER_TAG=${VERSION%.*}
|
||||||
|
export SHORTEST_TAG=${VERSION%.*.*}
|
||||||
|
|
||||||
|
if [ "${REPO}" == "stable" ]; then
|
||||||
|
MINOR_TAGS=(${MINOR_TAGS_ST})
|
||||||
|
for v in ${MINOR_TAGS[@]}; do
|
||||||
|
if [ "${SHORTER_TAG}" == "${v}" ]; then
|
||||||
|
export PUSH_MAJOR="true"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
if [ "${SHORTER_TAG}" == "${MINOR_TAGS[0]}" ]; then
|
||||||
|
export LATEST="true"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
docker buildx bake -f docker-bake.hcl documentserver-stable-rebuild --push
|
||||||
|
shell: bash
|
||||||
|
re-build-ucs:
|
||||||
|
name: "Rebuild ucs: ${{ matrix.version }} ${{ matrix.edition }}"
|
||||||
|
if: needs.rebuild-info.outputs.ucs-rebuild-condition == 'true'
|
||||||
|
needs: [rebuild-info]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
type: ["ucs"]
|
||||||
|
edition: ["", "-ee"]
|
||||||
|
version: ${{fromJSON(needs.rebuild-info.outputs.ucs-versions)}}
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
- name: Login to Docker Hub
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||||
|
# Determines the new build number based
|
||||||
|
# on data from the hub.docker registry
|
||||||
|
- name: Declare release number
|
||||||
|
id: release-number
|
||||||
|
env:
|
||||||
|
REBUILD_VERSION: ${{ matrix.version }}
|
||||||
|
run: |
|
||||||
|
MINOR_VERSION=${REBUILD_VERSION%.*}
|
||||||
|
LAST_RELEASE=$(curl -s -H -X ${REGISTRY_URL}/${COMPANY_NAME}/${PRODUCT_NAME}/tags/?page_size=100 \
|
||||||
|
| jq -r '.results|.[]|.name' | grep -Eo -m1 "${MINOR_VERSION}.[0-9]{1,}")
|
||||||
|
LAST_RELEASE=${LAST_RELEASE#*.*.*.}
|
||||||
|
echo "release-number=$((LAST_RELEASE+1))" >> "$GITHUB_OUTPUT"
|
||||||
|
shell: bash
|
||||||
|
# Note: Rebuilding images with an
|
||||||
|
# extra layer to update security and
|
||||||
|
# all dependencies. Update tags +1 to previous release.
|
||||||
|
- name: Re-build documentserver-ucs
|
||||||
|
env:
|
||||||
|
VERSION: ${{ matrix.version }}
|
||||||
|
RELEASE_NUMBER: ${{ steps.release-number.outputs.release-number }}
|
||||||
|
PRODUCT_EDITION: ${{ matrix.edition }}
|
||||||
|
run: |
|
||||||
|
set -eux
|
||||||
|
export PULL_TAG=${VERSION}
|
||||||
|
export TAG=${VERSION%.*}.${RELEASE_NUMBER}
|
||||||
|
export SHORTER_TAG=${VERSION%.*}
|
||||||
|
export SHORTEST_TAG=${VERSION%.*.*}
|
||||||
|
|
||||||
|
export UCS_REBUILD=true
|
||||||
|
export UCS_PREFIX=-ucs
|
||||||
|
|
||||||
|
docker buildx bake -f docker-bake.hcl documentserver-stable-rebuild --push
|
||||||
|
shell: bash
|
137
.gitea/workflows/stable-build.yml.arch
Normal file
137
.gitea/workflows/stable-build.yml.arch
Normal file
@ -0,0 +1,137 @@
|
|||||||
|
### This workflow setup instance then build and push images ###
|
||||||
|
name: Multi-arch build stable
|
||||||
|
run-name: ${{ inputs.tag }} (${{ inputs.release_number }})
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
tag:
|
||||||
|
description: 'Tag for release (ex. 1.2.3.45)'
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
release_number:
|
||||||
|
description: 'Sequence number of the release (ex. x.x.x.<number>)'
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
default: '1'
|
||||||
|
|
||||||
|
env:
|
||||||
|
COMPANY_NAME: "onlyoffice"
|
||||||
|
PRODUCT_NAME: "documentserver"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
name: "Release image: DocumentServer${{ matrix.edition }}"
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
images: ["documentserver-stable"]
|
||||||
|
edition: ["", "-ee", "-de"]
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
|
||||||
|
- name: Login to Docker Hub
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||||
|
|
||||||
|
- name: Build documentserver-release
|
||||||
|
run: |
|
||||||
|
set -eux
|
||||||
|
VERSION=${{ github.event.inputs.tag }}
|
||||||
|
RELEASE_NUMBER=${{ github.event.inputs.release_number }}
|
||||||
|
PRODUCT_EDITION=${{ matrix.edition }}
|
||||||
|
TESTING_IMAGE=${COMPANY_NAME}/4testing-${PRODUCT_NAME}${PRODUCT_EDITION}
|
||||||
|
export PRODUCT_EDITION
|
||||||
|
export PULL_TAG=${VERSION}
|
||||||
|
export TAG=${VERSION%.*}.${RELEASE_NUMBER}
|
||||||
|
export SHORTER_TAG=${VERSION%.*}
|
||||||
|
export SHORTEST_TAG=${VERSION%.*.*}
|
||||||
|
docker buildx bake -f docker-bake.hcl ${{ matrix.images }} --push
|
||||||
|
echo "DONE: Build success >> exit with 0"
|
||||||
|
exit 0
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
build-nonexample:
|
||||||
|
name: "Release image: DocumentServer${{ matrix.edition }}-nonExample"
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [build]
|
||||||
|
if: ${{ false }}
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
images: ["documentserver-nonexample"]
|
||||||
|
edition: ["", "-ee", "-de"]
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
|
||||||
|
- name: Login to Docker Hub
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||||
|
|
||||||
|
- name: build image
|
||||||
|
run: |
|
||||||
|
set -eux
|
||||||
|
VERSION=${{ github.event.inputs.tag }}
|
||||||
|
RELEASE_NUMBER=${{ github.event.inputs.release_number }}
|
||||||
|
export PULL_TAG=${VERSION%.*}.${RELEASE_NUMBER}
|
||||||
|
export PRODUCT_EDITION=${{ matrix.edition }}
|
||||||
|
export TAG=${VERSION%.*}.${RELEASE_NUMBER}
|
||||||
|
docker buildx bake -f docker-bake.hcl ${{ matrix.images }} --push
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
build-ucs-ubuntu20:
|
||||||
|
name: "Release image: DocumentServer${{ matrix.edition }}-ucs"
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
edition: ["", "-ee"]
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
|
||||||
|
- name: Login to Docker Hub
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||||
|
|
||||||
|
- name: build UCS
|
||||||
|
run: |
|
||||||
|
set -eux
|
||||||
|
VERSION=${{ github.event.inputs.tag }}
|
||||||
|
RELEASE_NUMBER=${{ github.event.inputs.release_number }}
|
||||||
|
export PRODUCT_EDITION=${{ matrix.edition }}
|
||||||
|
export PACKAGE_BASEURL=${{ secrets.REPO_BASEURL }}
|
||||||
|
export DOCKERFILE=Dockerfile
|
||||||
|
export BASE_IMAGE=ubuntu:20.04
|
||||||
|
export PG_VERSION=12
|
||||||
|
export TAG=${VERSION%.*}.${RELEASE_NUMBER}
|
||||||
|
export PACKAGE_VERSION=$( echo ${VERSION} | sed -E 's/(.*)\./\1-/')
|
||||||
|
docker buildx bake -f docker-bake.hcl documentserver-ucs --push
|
||||||
|
shell: bash
|
70
.gitea/workflows/zap-ds.yaml.arch
Normal file
70
.gitea/workflows/zap-ds.yaml.arch
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
---
|
||||||
|
name: Scanning DocumentServer with ZAP
|
||||||
|
|
||||||
|
run-name: >
|
||||||
|
ZAP DocumentServer ver: ${{ github.event.inputs.version }} from branch: ${{ github.event.inputs.branch }}
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: 'Set DocumentServer version that will be deployed'
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
branch:
|
||||||
|
description: 'The branch from which the scan will be performed'
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
jobs:
|
||||||
|
zap:
|
||||||
|
name: "Zap scanning DocumentServer"
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Run DS
|
||||||
|
id: run-ds
|
||||||
|
env:
|
||||||
|
TAG: ${{ github.event.inputs.version }}
|
||||||
|
run: |
|
||||||
|
# Create ssl certs
|
||||||
|
openssl genrsa -out tls.key 2048
|
||||||
|
openssl req -new -key tls.key -out tls.csr -subj "/C=RU/ST=NizhObl/L=NizhNov/O=RK-Tech/OU=TestUnit/CN=TestName"
|
||||||
|
openssl x509 -req -days 365 -in tls.csr -signkey tls.key -out tls.crt
|
||||||
|
openssl dhparam -out dhparam.pem 2048
|
||||||
|
sudo mkdir -p /app/onlyoffice/DocumentServer/data/certs
|
||||||
|
sudo cp ./tls.key /app/onlyoffice/DocumentServer/data/certs/
|
||||||
|
sudo cp ./tls.crt /app/onlyoffice/DocumentServer/data/certs/
|
||||||
|
sudo cp ./dhparam.pem /app/onlyoffice/DocumentServer/data/certs/
|
||||||
|
sudo chmod 400 /app/onlyoffice/DocumentServer/data/certs/tls.key
|
||||||
|
rm ./tls.key ./tls.crt ./dhparam.pem
|
||||||
|
|
||||||
|
# Run Ds with enabled ssl
|
||||||
|
export CONTAINER_NAME="documentserver"
|
||||||
|
sudo docker run -itd \
|
||||||
|
--name ${CONTAINER_NAME} \
|
||||||
|
-p 80:80 \
|
||||||
|
-p 443:443 \
|
||||||
|
-v /app/onlyoffice/DocumentServer/data:/var/www/onlyoffice/Data \
|
||||||
|
onlyoffice/4testing-documentserver:${TAG}
|
||||||
|
sleep 60
|
||||||
|
sudo docker exec ${CONTAINER_NAME} sudo supervisorctl start ds:example
|
||||||
|
LOCAL_IP=$(hostname -I | awk '{print $1}')
|
||||||
|
echo "local-ip=${LOCAL_IP}" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
# Scan DocumentServer with ZAP.
|
||||||
|
# NOTE: Full scan get a lot of time.
|
||||||
|
# If you want make scan more faster (but less accurate) remove `cmd options` field
|
||||||
|
# -j mean that scanning use AJAX Spider, with this spider the scan takes approximately an hour
|
||||||
|
# Without any cmd options will be used default spider and the scan takes approximately ~10-15 minutes
|
||||||
|
- name: ZAP Scan
|
||||||
|
uses: zaproxy/action-full-scan@v0.8.0
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
docker_name: 'ghcr.io/zaproxy/zaproxy:stable'
|
||||||
|
target: 'https://${{ steps.run-ds.outputs.local-ip }}/'
|
||||||
|
allow_issue_writing: false
|
||||||
|
cmd_options: '-j'
|
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
# Nix, direnv
|
||||||
|
.direnv
|
32
.travis.yml
32
.travis.yml
@ -32,23 +32,23 @@ env:
|
|||||||
SSL_KEY_PATH: /var/www/onlyoffice/Data/certs/mycert.key
|
SSL_KEY_PATH: /var/www/onlyoffice/Data/certs/mycert.key
|
||||||
|
|
||||||
|
|
||||||
|
# postgresql 16
|
||||||
|
- config: postgres.yml
|
||||||
|
POSTGRES_VERSION: 16
|
||||||
|
|
||||||
|
# postgresql 15
|
||||||
|
- config: postgres.yml
|
||||||
|
POSTGRES_VERSION: 15
|
||||||
|
|
||||||
|
# postgresql 14
|
||||||
|
- config: postgres.yml
|
||||||
|
POSTGRES_VERSION: 14
|
||||||
|
|
||||||
|
# postgresql 13
|
||||||
|
- config: postgres.yml
|
||||||
|
POSTGRES_VERSION: 13
|
||||||
|
|
||||||
# postgresql 12
|
# postgresql 12
|
||||||
- config: postgres.yml
|
|
||||||
POSTGRES_VERSION: 12
|
|
||||||
|
|
||||||
# postgresql 11
|
|
||||||
- config: postgres.yml
|
|
||||||
POSTGRES_VERSION: 11
|
|
||||||
|
|
||||||
# postgresql 10
|
|
||||||
- config: postgres.yml
|
|
||||||
POSTGRES_VERSION: 10
|
|
||||||
|
|
||||||
# postgresql 9
|
|
||||||
- config: postgres.yml
|
|
||||||
POSTGRES_VERSION: 9
|
|
||||||
|
|
||||||
# postgresql 9.5
|
|
||||||
- config: postgres.yml
|
- config: postgres.yml
|
||||||
|
|
||||||
# postgresql custom values
|
# postgresql custom values
|
||||||
|
39
Dockerfile
39
Dockerfile
@ -1,14 +1,17 @@
|
|||||||
FROM ubuntu:20.04
|
ARG BASE_IMAGE=ubuntu:22.04
|
||||||
LABEL maintainer Ascensio System SIA <support@onlyoffice.com>
|
|
||||||
|
|
||||||
ENV LANG=en_US.UTF-8 LANGUAGE=en_US:en LC_ALL=en_US.UTF-8 DEBIAN_FRONTEND=noninteractive PG_VERSION=12
|
FROM ${BASE_IMAGE} as documentserver
|
||||||
|
LABEL maintainer Jiří Štefka <jiri.stefka.js@gmail.com>
|
||||||
|
|
||||||
|
ARG PG_VERSION=14
|
||||||
|
|
||||||
|
ENV LANG=en_US.UTF-8 LANGUAGE=en_US:en LC_ALL=en_US.UTF-8 DEBIAN_FRONTEND=noninteractive PG_VERSION=${PG_VERSION}
|
||||||
|
|
||||||
ARG ONLYOFFICE_VALUE=onlyoffice
|
ARG ONLYOFFICE_VALUE=onlyoffice
|
||||||
|
|
||||||
RUN echo "#!/bin/sh\nexit 0" > /usr/sbin/policy-rc.d && \
|
RUN echo "#!/bin/sh\nexit 0" > /usr/sbin/policy-rc.d && \
|
||||||
apt-get -y update && \
|
apt-get -y update && \
|
||||||
apt-get -yq install wget apt-transport-https gnupg locales && \
|
apt-get -yq install wget apt-transport-https gnupg locales lsb-release && \
|
||||||
apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 0x8320ca65cb2de8e5 && \
|
|
||||||
locale-gen en_US.UTF-8 && \
|
locale-gen en_US.UTF-8 && \
|
||||||
echo ttf-mscorefonts-installer msttcorefonts/accepted-mscorefonts-eula select true | debconf-set-selections && \
|
echo ttf-mscorefonts-installer msttcorefonts/accepted-mscorefonts-eula select true | debconf-set-selections && \
|
||||||
apt-get -yq install \
|
apt-get -yq install \
|
||||||
@ -16,8 +19,8 @@ RUN echo "#!/bin/sh\nexit 0" > /usr/sbin/policy-rc.d && \
|
|||||||
apt-utils \
|
apt-utils \
|
||||||
bomstrip \
|
bomstrip \
|
||||||
certbot \
|
certbot \
|
||||||
|
cron \
|
||||||
curl \
|
curl \
|
||||||
gconf-service \
|
|
||||||
htop \
|
htop \
|
||||||
libasound2 \
|
libasound2 \
|
||||||
libboost-regex-dev \
|
libboost-regex-dev \
|
||||||
@ -54,9 +57,8 @@ RUN echo "#!/bin/sh\nexit 0" > /usr/sbin/policy-rc.d && \
|
|||||||
sed 's|\(application\/zip.*\)|\1\n application\/wasm wasm;|' -i /etc/nginx/mime.types && \
|
sed 's|\(application\/zip.*\)|\1\n application\/wasm wasm;|' -i /etc/nginx/mime.types && \
|
||||||
pg_conftool $PG_VERSION main set listen_addresses 'localhost' && \
|
pg_conftool $PG_VERSION main set listen_addresses 'localhost' && \
|
||||||
service postgresql restart && \
|
service postgresql restart && \
|
||||||
sudo -u postgres psql -c "CREATE DATABASE $ONLYOFFICE_VALUE;" && \
|
|
||||||
sudo -u postgres psql -c "CREATE USER $ONLYOFFICE_VALUE WITH password '$ONLYOFFICE_VALUE';" && \
|
sudo -u postgres psql -c "CREATE USER $ONLYOFFICE_VALUE WITH password '$ONLYOFFICE_VALUE';" && \
|
||||||
sudo -u postgres psql -c "GRANT ALL privileges ON DATABASE $ONLYOFFICE_VALUE TO $ONLYOFFICE_VALUE;" && \
|
sudo -u postgres psql -c "CREATE DATABASE $ONLYOFFICE_VALUE OWNER $ONLYOFFICE_VALUE;" && \
|
||||||
service postgresql stop && \
|
service postgresql stop && \
|
||||||
service redis-server stop && \
|
service redis-server stop && \
|
||||||
service rabbitmq-server stop && \
|
service rabbitmq-server stop && \
|
||||||
@ -64,26 +66,35 @@ RUN echo "#!/bin/sh\nexit 0" > /usr/sbin/policy-rc.d && \
|
|||||||
service nginx stop && \
|
service nginx stop && \
|
||||||
rm -rf /var/lib/apt/lists/*
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
COPY config /app/ds/setup/config/
|
COPY config/supervisor/supervisor /etc/init.d/
|
||||||
|
COPY config/supervisor/ds/*.conf /etc/supervisor/conf.d/
|
||||||
COPY run-document-server.sh /app/ds/run-document-server.sh
|
COPY run-document-server.sh /app/ds/run-document-server.sh
|
||||||
|
|
||||||
EXPOSE 80 443
|
EXPOSE 80 443
|
||||||
|
|
||||||
ARG COMPANY_NAME=onlyoffice
|
ARG COMPANY_NAME=onlyoffice
|
||||||
ARG PRODUCT_NAME=documentserver
|
ARG PRODUCT_NAME=documentserver
|
||||||
ARG PACKAGE_URL="http://download.onlyoffice.com/install/documentserver/linux/${COMPANY_NAME}-${PRODUCT_NAME}_amd64.deb"
|
ARG PRODUCT_EDITION=
|
||||||
|
ARG PACKAGE_VERSION=
|
||||||
|
ARG TARGETARCH
|
||||||
|
ARG PACKAGE_BASEURL="http://download.onlyoffice.com/install/documentserver/linux"
|
||||||
|
|
||||||
ENV COMPANY_NAME=$COMPANY_NAME \
|
ENV COMPANY_NAME=$COMPANY_NAME \
|
||||||
PRODUCT_NAME=$PRODUCT_NAME
|
PRODUCT_NAME=$PRODUCT_NAME \
|
||||||
|
PRODUCT_EDITION=$PRODUCT_EDITION \
|
||||||
|
DS_DOCKER_INSTALLATION=true
|
||||||
|
|
||||||
RUN wget -q -P /tmp "$PACKAGE_URL" && \
|
RUN PACKAGE_FILE="${COMPANY_NAME}-${PRODUCT_NAME}${PRODUCT_EDITION}${PACKAGE_VERSION:+_$PACKAGE_VERSION}_${TARGETARCH:-$(dpkg --print-architecture)}.deb" && \
|
||||||
|
wget -q -P /tmp "$PACKAGE_BASEURL/$PACKAGE_FILE" && \
|
||||||
apt-get -y update && \
|
apt-get -y update && \
|
||||||
service postgresql start && \
|
service postgresql start && \
|
||||||
apt-get -yq install /tmp/$(basename "$PACKAGE_URL") && \
|
apt-get -yq install /tmp/$PACKAGE_FILE && \
|
||||||
service postgresql stop && \
|
service postgresql stop && \
|
||||||
|
chmod 755 /etc/init.d/supervisor && \
|
||||||
|
sed "s/COMPANY_NAME/${COMPANY_NAME}/g" -i /etc/supervisor/conf.d/*.conf && \
|
||||||
service supervisor stop && \
|
service supervisor stop && \
|
||||||
chmod 755 /app/ds/*.sh && \
|
chmod 755 /app/ds/*.sh && \
|
||||||
rm -f /tmp/$(basename "$PACKAGE_URL") && \
|
rm -f /tmp/$PACKAGE_FILE && \
|
||||||
rm -rf /var/log/$COMPANY_NAME && \
|
rm -rf /var/log/$COMPANY_NAME && \
|
||||||
rm -rf /var/lib/apt/lists/*
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
85
Makefile
85
Makefile
@ -1,75 +1,66 @@
|
|||||||
COMPANY_NAME ?= ONLYOFFICE
|
COMPANY_NAME ?= ONLYOFFICE
|
||||||
GIT_BRANCH ?= develop
|
GIT_BRANCH ?= develop
|
||||||
PRODUCT_NAME ?= DocumentServer
|
PRODUCT_NAME ?= documentserver
|
||||||
|
PRODUCT_EDITION ?=
|
||||||
PRODUCT_VERSION ?= 0.0.0
|
PRODUCT_VERSION ?= 0.0.0
|
||||||
BUILD_NUMBER ?= 0
|
BUILD_NUMBER ?= 0
|
||||||
|
BUILD_CHANNEL ?= nightly
|
||||||
ONLYOFFICE_VALUE ?= onlyoffice
|
ONLYOFFICE_VALUE ?= onlyoffice
|
||||||
S3_BUCKET ?= repo-doc-onlyoffice-com
|
|
||||||
RELEASE_BRANCH ?= unstable
|
|
||||||
|
|
||||||
COMPANY_NAME_LOW = $(shell echo $(COMPANY_NAME) | tr A-Z a-z)
|
COMPANY_NAME_LOW = $(shell echo $(COMPANY_NAME) | tr A-Z a-z)
|
||||||
PRODUCT_NAME_LOW = $(shell echo $(PRODUCT_NAME) | tr A-Z a-z)
|
|
||||||
COMPANY_NAME_LOW_ESCAPED = $(subst -,,$(COMPANY_NAME_LOW))
|
|
||||||
|
|
||||||
PACKAGE_NAME := $(COMPANY_NAME_LOW)-$(PRODUCT_NAME_LOW)
|
PACKAGE_NAME := $(COMPANY_NAME_LOW)-$(PRODUCT_NAME)$(PRODUCT_EDITION)
|
||||||
PACKAGE_VERSION := $(PRODUCT_VERSION)-$(BUILD_NUMBER)
|
PACKAGE_VERSION ?= $(PRODUCT_VERSION)-$(BUILD_NUMBER)~stretch
|
||||||
PACKAGE_URL := http://$(S3_BUCKET).s3.amazonaws.com/$(COMPANY_NAME_LOW)/$(RELEASE_BRANCH)/ubuntu/$(PACKAGE_NAME)_$(PACKAGE_VERSION)_amd64.deb
|
PACKAGE_BASEURL ?= https://s3.eu-west-1.amazonaws.com/repo-doc-onlyoffice-com/server/linux/debian
|
||||||
|
|
||||||
UPDATE_LATEST := false
|
ifeq ($(BUILD_CHANNEL),$(filter $(BUILD_CHANNEL),nightly test))
|
||||||
|
DOCKER_TAG := $(PRODUCT_VERSION).$(BUILD_NUMBER)
|
||||||
ifneq (,$(findstring develop,$(GIT_BRANCH)))
|
|
||||||
DOCKER_TAG += $(subst -,.,$(PACKAGE_VERSION))
|
|
||||||
DOCKER_TAGS += latest
|
|
||||||
else ifneq (,$(findstring release,$(GIT_BRANCH)))
|
|
||||||
DOCKER_TAG += $(subst -,.,$(PACKAGE_VERSION))
|
|
||||||
else ifneq (,$(findstring hotfix,$(GIT_BRANCH)))
|
|
||||||
DOCKER_TAG += $(subst -,.,$(PACKAGE_VERSION))
|
|
||||||
else
|
else
|
||||||
DOCKER_TAG += $(subst -,.,$(PACKAGE_VERSION))-$(subst /,-,$(GIT_BRANCH))
|
DOCKER_TAG := $(PRODUCT_VERSION).$(BUILD_NUMBER)-$(subst /,-,$(GIT_BRANCH))
|
||||||
endif
|
endif
|
||||||
|
|
||||||
DOCKER_TAGS += $(DOCKER_TAG)
|
DOCKER_ORG ?= $(COMPANY_NAME_LOW)
|
||||||
|
DOCKER_IMAGE := $(DOCKER_ORG)/4testing-$(PRODUCT_NAME)$(PRODUCT_EDITION)
|
||||||
|
DOCKER_DUMMY := $(COMPANY_NAME_LOW)-$(PRODUCT_NAME)$(PRODUCT_EDITION)__$(DOCKER_TAG).dummy
|
||||||
|
DOCKER_ARCH := $(COMPANY_NAME_LOW)-$(PRODUCT_NAME)_$(DOCKER_TAG).tar.gz
|
||||||
|
|
||||||
DOCKER_REPO = $(COMPANY_NAME_LOW_ESCAPED)/4testing-$(PRODUCT_NAME_LOW)
|
.PHONY: all clean clean-docker image deploy docker
|
||||||
|
|
||||||
COLON := __colon__
|
|
||||||
DOCKER_TARGETS := $(foreach TAG,$(DOCKER_TAGS),$(DOCKER_REPO)$(COLON)$(TAG))
|
|
||||||
|
|
||||||
DOCKER_ARCH := $(COMPANY_NAME_LOW)-$(PRODUCT_NAME_LOW)_$(PACKAGE_VERSION).tar.gz
|
|
||||||
|
|
||||||
DOCKER_ARCH_URI := $(COMPANY_NAME_LOW)/$(RELEASE_BRANCH)/docker/$(notdir $(DOCKER_ARCH))
|
|
||||||
|
|
||||||
.PHONY: all clean clean-docker deploy docker publish
|
|
||||||
|
|
||||||
$(DOCKER_TARGETS): $(DEB_REPO_DATA)
|
|
||||||
|
|
||||||
|
$(DOCKER_DUMMY):
|
||||||
|
docker pull ubuntu:22.04
|
||||||
docker build \
|
docker build \
|
||||||
--build-arg PACKAGE_URL=$(PACKAGE_URL) \
|
|
||||||
--build-arg COMPANY_NAME=$(COMPANY_NAME_LOW) \
|
--build-arg COMPANY_NAME=$(COMPANY_NAME_LOW) \
|
||||||
--build-arg PRODUCT_NAME=$(PRODUCT_NAME_LOW) \
|
--build-arg PRODUCT_NAME=$(PRODUCT_NAME) \
|
||||||
|
--build-arg PRODUCT_EDITION=$(PRODUCT_EDITION) \
|
||||||
|
--build-arg PACKAGE_VERSION=$(PACKAGE_VERSION) \
|
||||||
|
--build-arg PACKAGE_BASEURL=$(PACKAGE_BASEURL) \
|
||||||
|
--build-arg TARGETARCH=amd64 \
|
||||||
--build-arg ONLYOFFICE_VALUE=$(ONLYOFFICE_VALUE) \
|
--build-arg ONLYOFFICE_VALUE=$(ONLYOFFICE_VALUE) \
|
||||||
-t $(subst $(COLON),:,$@) . &&\
|
-t $(DOCKER_IMAGE):$(DOCKER_TAG) . && \
|
||||||
mkdir -p $$(dirname $@) &&\
|
mkdir -p $$(dirname $@) && \
|
||||||
echo "Done" > $@
|
echo "Done" > $@
|
||||||
|
|
||||||
$(DOCKER_ARCH): $(DOCKER_TARGETS)
|
$(DOCKER_ARCH): $(DOCKER_DUMMY)
|
||||||
docker save $(DOCKER_REPO):$(DOCKER_TAG) | \
|
docker save $(DOCKER_IMAGE):$(DOCKER_TAG) | \
|
||||||
gzip > $@
|
gzip > $@
|
||||||
|
|
||||||
all: $(DOCKER_TARGETS)
|
all: image
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
rm -rfv $(DOCKER_TARGETS) $(DOCKER_ARCH)
|
rm -rfv *.dummy *.tar.gz
|
||||||
|
|
||||||
clean-docker:
|
clean-docker:
|
||||||
docker rmi -f $$(docker images -q $(COMPANY_NAME_LOW)/*) || exit 0
|
docker rmi -f $$(docker images -q $(COMPANY_NAME_LOW)/*) || exit 0
|
||||||
|
|
||||||
deploy: $(DOCKER_TARGETS)
|
image: $(DOCKER_DUMMY)
|
||||||
$(foreach TARGET,$(DOCKER_TARGETS), \
|
|
||||||
for i in {1..3}; do \
|
|
||||||
docker push $(subst $(COLON),:,$(TARGET)) && break || sleep 1m; \
|
|
||||||
done;)
|
|
||||||
|
|
||||||
publish: $(DOCKER_ARCH)
|
deploy: $(DOCKER_DUMMY)
|
||||||
aws s3 cp --no-progress --acl public-read \
|
for i in {1..3}; do \
|
||||||
$(DOCKER_ARCH) s3://$(S3_BUCKET)/$(DOCKER_ARCH_URI)
|
docker push $(DOCKER_IMAGE):$(DOCKER_TAG) && break || sleep 1m; \
|
||||||
|
done
|
||||||
|
ifeq ($(BUILD_CHANNEL),nightly)
|
||||||
|
docker tag $(DOCKER_IMAGE):$(DOCKER_TAG) $(DOCKER_IMAGE):latest
|
||||||
|
for i in {1..3}; do \
|
||||||
|
docker push $(DOCKER_IMAGE):latest && break || sleep 1m; \
|
||||||
|
done
|
||||||
|
endif
|
||||||
|
272
README.md
272
README.md
@ -1,3 +1,190 @@
|
|||||||
|
[![Docker Pulls](https://img.shields.io/docker/pulls/jiriks74/onlyoffice-documentserver.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=pulls&logo=docker)](https://hub.docker.com/r/jiriks74/onlyoffice-documentserver)
|
||||||
|
[![Docker Stars](https://img.shields.io/docker/stars/jiriks74/onlyoffice-documentserver.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=stars&logo=docker)](https://hub.docker.com/r/jiriks74/onlyoffice-documentserver)
|
||||||
|
[![Docker Size](https://img.shields.io/docker/image-size/jiriks74/onlyoffice-documentserver/latest.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=Size&logo=docker)](https://hub.docker.com/r/jiriks74/onlyoffice-documentserver)
|
||||||
|
|
||||||
|
#### This repository is based on the official `Dockerfile` and `docker-compose.yml` files with all the needed files as well
|
||||||
|
|
||||||
|
# Table of contents
|
||||||
|
|
||||||
|
- [Usage](#usage)
|
||||||
|
- [Setting up secret key with nextcloud](#setting-up-secret-key-with-nextcloud)
|
||||||
|
- [Larger file limits](#setting-up-larger-file-limits)
|
||||||
|
- [Generating custom presentation themes](#generating-custom-presentation-themes)
|
||||||
|
- [Tags on DockerHub](#tags-used-on-dockerhub)
|
||||||
|
- [Building the image from source](#building-the-image-yourself-not-recommended---may-take-a-lot-of-time)
|
||||||
|
- [Updating the image yourself](#updating-the-image-yourself)
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
#### docker-compose with prebuilt image (recommended)
|
||||||
|
|
||||||
|
- Docker will pull the correct architecture automatically
|
||||||
|
- Below in `Details` you'll find a `docker-compose.yml` templete.
|
||||||
|
To use it, make a directory where data from the container will be saved, create
|
||||||
|
`docker-compose.yml` file in it and put the template from below in it.
|
||||||
|
Then modify the template to your needs.
|
||||||
|
|
||||||
|
<details>
|
||||||
|
|
||||||
|
```yml
|
||||||
|
version: '2'
|
||||||
|
services:
|
||||||
|
onlyoffice-documentserver:
|
||||||
|
image: jiriks74/onlyoffice-documentserver:latest
|
||||||
|
container_name: onlyoffice-documentserver
|
||||||
|
depends_on:
|
||||||
|
- onlyoffice-postgresql
|
||||||
|
- onlyoffice-rabbitmq
|
||||||
|
environment:
|
||||||
|
- DB_TYPE=postgres
|
||||||
|
- DB_HOST=onlyoffice-postgresql
|
||||||
|
- DB_PORT=5432
|
||||||
|
- DB_NAME=onlyoffice
|
||||||
|
- DB_USER=onlyoffice
|
||||||
|
- AMQP_URI=amqp://guest:guest@onlyoffice-rabbitmq
|
||||||
|
# Uncomment strings below to enable the JSON Web Token validation.
|
||||||
|
#- JWT_ENABLED=true
|
||||||
|
#- JWT_SECRET=secret
|
||||||
|
#- JWT_HEADER=AuthorizationJwt
|
||||||
|
#- JWT_IN_BODY=true
|
||||||
|
# Uncomment the line below to set larger file limits (about 1GB)
|
||||||
|
#- LARGER_FILE_LIMITS=true
|
||||||
|
ports:
|
||||||
|
- '80:80'
|
||||||
|
- '443:443'
|
||||||
|
stdin_open: true
|
||||||
|
restart: always
|
||||||
|
stop_grace_period: 120s
|
||||||
|
volumes:
|
||||||
|
# Uncomment the line below to get access to the slide themes directory.
|
||||||
|
# To use the themes, copy them to the slideThemes directory and run `docker exec -it <container-name> /usr/bin/documentserver-generate-allfonts.sh`
|
||||||
|
#- ./slideThemes:/var/www/onlyoffice/documentserver/sdkjs/slide/themes/src
|
||||||
|
- /var/www/onlyoffice/Data
|
||||||
|
- /var/log/onlyoffice
|
||||||
|
- /var/lib/onlyoffice/documentserver/App_Data/cache/files
|
||||||
|
- /var/www/onlyoffice/documentserver-example/public/files
|
||||||
|
- /usr/share/fonts
|
||||||
|
|
||||||
|
onlyoffice-rabbitmq:
|
||||||
|
container_name: onlyoffice-rabbitmq
|
||||||
|
image: rabbitmq
|
||||||
|
restart: always
|
||||||
|
expose:
|
||||||
|
- '5672'
|
||||||
|
|
||||||
|
onlyoffice-postgresql:
|
||||||
|
container_name: onlyoffice-postgresql
|
||||||
|
image: postgres:9.5
|
||||||
|
environment:
|
||||||
|
- POSTGRES_DB=onlyoffice
|
||||||
|
- POSTGRES_USER=onlyoffice
|
||||||
|
- POSTGRES_HOST_AUTH_METHOD=trust
|
||||||
|
restart: always
|
||||||
|
expose:
|
||||||
|
- '5432'
|
||||||
|
volumes:
|
||||||
|
- postgresql_data:/var/lib/postgresql
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
postgresql_data:
|
||||||
|
```
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
### Setting up `Secret key` with Nextcloud
|
||||||
|
|
||||||
|
1. Uncomment four lines starting with `JWT` in `docker-compose`
|
||||||
|
2. Set your secret on line `JWT_SECRET=yourSecret`
|
||||||
|
3. Open Nexcloud's `config.php` (by default `/var/www/nextcloud/config/config.php`)
|
||||||
|
4. Add this to the 2nd last line (before the line with `);`) and paste in your secret (3rd last line)
|
||||||
|
|
||||||
|
```php
|
||||||
|
'onlyoffice' =>
|
||||||
|
array (
|
||||||
|
"jwt_secret" => "yourSecret",
|
||||||
|
"jwt_header" => "AuthorizationJwt"
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
5. Go to your Nextcloud settings, navigate to Onlyoffice settings
|
||||||
|
6. Add your server Address and Secret key
|
||||||
|
7. Save
|
||||||
|
|
||||||
|
### Setting up larger file limits
|
||||||
|
|
||||||
|
- Uncomment the `- LARGER_FILE_LIMITS=true` line in `docker-compose.yml`
|
||||||
|
|
||||||
|
### Generating custom presentation themes
|
||||||
|
|
||||||
|
1. Uncomment the
|
||||||
|
`- ./slideThemes:/var/www/onlyoffice/documentserver/sdkjs/slide/themes/src`
|
||||||
|
line in `docker-compose.yml`
|
||||||
|
2. Put your themes into the `slideThemes` directory
|
||||||
|
3. Run `docker exec -it <container-name> /usr/bin/documentserver-generate-allfonts.sh`
|
||||||
|
- (This will take some time. I have totally 35 themes and it took about 30 minutes to generate them on a Raspberry Pi 4 4GB on an external HDD - SSD may be faster)
|
||||||
|
4. If you want to add more themes later, repeat step 2 and 3.
|
||||||
|
|
||||||
|
## Tags used on DockerHub
|
||||||
|
|
||||||
|
- `latest` - the latest version of the Documentserver
|
||||||
|
- Version tags (eg. `7.0.1-37`) - these tags are equal to the Documentserver
|
||||||
|
version of the `onlyoffice-documentserver` debian package used in the image
|
||||||
|
|
||||||
|
## Building the image yourself (not recommended - may take a lot of time)
|
||||||
|
|
||||||
|
#### 1. Clone the repository (for example to your home directory `cd /home/$USER/`)
|
||||||
|
|
||||||
|
`git clone https://github.com/jiriks74/Docker-DocumentServer.git && cd Docker-DocumentServer`
|
||||||
|
|
||||||
|
#### 2. Build the docker image
|
||||||
|
|
||||||
|
##### Building only for the architecture you are building the image on (when building on Raspberry Pi result will be `arm64`, when on pc result will be `amd64`)
|
||||||
|
|
||||||
|
`docker-compose build`
|
||||||
|
|
||||||
|
##### Building for all supported architectures (you have to have your environment setup for emulation of arm64 with `qemu` and build it on `amd64`) - you have to push to DockerHub
|
||||||
|
|
||||||
|
`docker buildx build --push --platform linux/arm64,linux/amd64,linux/386 .`
|
||||||
|
|
||||||
|
#### 3. Create and start the container
|
||||||
|
|
||||||
|
`docker-compose up -d`
|
||||||
|
|
||||||
|
- This will start the server. It is set to be automatically started/restarted so as long you have docker running on startup this will start automatically
|
||||||
|
|
||||||
|
### Updating the image yourself
|
||||||
|
|
||||||
|
#### 1. Stop and delete the old container
|
||||||
|
|
||||||
|
`docker-compose down`
|
||||||
|
|
||||||
|
#### 2. (optional) Clear the docker cache
|
||||||
|
|
||||||
|
#### - ! This will remove all unused cache images ! (good for saving space, bad if you develop with and need cache, but you understand it at that point)
|
||||||
|
|
||||||
|
`docker rmi $(docker images -f "dangling=true" -q)`
|
||||||
|
|
||||||
|
#### 4. Rebuild the image without cache
|
||||||
|
|
||||||
|
##### Building only for the architecture you are building the image on (when building on Raspberry Pi result will be `arm64`, when on pc result will be `amd64`)
|
||||||
|
|
||||||
|
`docker-compose build`
|
||||||
|
|
||||||
|
##### Building for all supported architectures (you have to have your environment setup for emulation of arm64 with `qemu`) - you'll have to push to DockerHub, multiplatform images cannot be saved locally (for whatever reason)
|
||||||
|
|
||||||
|
`docker buildx build --push --platform linux/arm64,linux/amd64,linux/386 .`
|
||||||
|
|
||||||
|
#### 3. Create and start the new container
|
||||||
|
|
||||||
|
`docker-compose up -d`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## The rest of this file is the official [`README.md` from OnlyOffice-Documentserver repository](https://github.com/ONLYOFFICE/Docker-DocumentServer). I will not change anything in it, it may not work, but considering the changes I made, it should be fully compatible (beware that you must change the `docker-compose.yml` from building the image locally to using this repository). If you want to change something, make a issue on my repository and we'll figure it out.
|
||||||
|
|
||||||
|
<details>
|
||||||
|
|
||||||
|
|
||||||
* [Overview](#overview)
|
* [Overview](#overview)
|
||||||
* [Functionality](#functionality)
|
* [Functionality](#functionality)
|
||||||
* [Recommended System Requirements](#recommended-system-requirements)
|
* [Recommended System Requirements](#recommended-system-requirements)
|
||||||
@ -11,6 +198,7 @@
|
|||||||
+ [Installation of the SSL Certificates](#installation-of-the-ssl-certificates)
|
+ [Installation of the SSL Certificates](#installation-of-the-ssl-certificates)
|
||||||
+ [Available Configuration Parameters](#available-configuration-parameters)
|
+ [Available Configuration Parameters](#available-configuration-parameters)
|
||||||
* [Installing ONLYOFFICE Document Server integrated with Community and Mail Servers](#installing-onlyoffice-document-server-integrated-with-community-and-mail-servers)
|
* [Installing ONLYOFFICE Document Server integrated with Community and Mail Servers](#installing-onlyoffice-document-server-integrated-with-community-and-mail-servers)
|
||||||
|
* [ONLYOFFICE Document Server ipv6 setup](#onlyoffice-document-server-ipv6-setup)
|
||||||
* [Issues](#issues)
|
* [Issues](#issues)
|
||||||
- [Docker Issues](#docker-issues)
|
- [Docker Issues](#docker-issues)
|
||||||
- [Document Server usage Issues](#document-server-usage-issues)
|
- [Document Server usage Issues](#document-server-usage-issues)
|
||||||
@ -25,6 +213,8 @@ Starting from version 6.0, Document Server is distributed as ONLYOFFICE Docs. It
|
|||||||
|
|
||||||
ONLYOFFICE Docs can be used as a part of ONLYOFFICE Workspace or with third-party sync&share solutions (e.g. Nextcloud, ownCloud, Seafile) to enable collaborative editing within their interface.
|
ONLYOFFICE Docs can be used as a part of ONLYOFFICE Workspace or with third-party sync&share solutions (e.g. Nextcloud, ownCloud, Seafile) to enable collaborative editing within their interface.
|
||||||
|
|
||||||
|
***Important*** Please update `docker-engine` to latest version (`20.10.21` as of writing this doc) before using it. We use `ubuntu:22.04` as base image and it older versions of docker have compatibility problems with it
|
||||||
|
|
||||||
## Functionality ##
|
## Functionality ##
|
||||||
* ONLYOFFICE Document Editor
|
* ONLYOFFICE Document Editor
|
||||||
* ONLYOFFICE Spreadsheet Editor
|
* ONLYOFFICE Spreadsheet Editor
|
||||||
@ -107,7 +297,7 @@ When using CA certified certificates (e.g [Let's encrypt](https://letsencrypt.or
|
|||||||
|
|
||||||
#### Using the automatically generated Let's Encrypt SSL Certificates
|
#### Using the automatically generated Let's Encrypt SSL Certificates
|
||||||
|
|
||||||
sudo docker run -i -t -d -p 443:443 \
|
sudo docker run -i -t -d -p 80:80 -p 443:443 \
|
||||||
-e LETS_ENCRYPT_DOMAIN=your_domain -e LETS_ENCRYPT_MAIL=your_mail onlyoffice/documentserver
|
-e LETS_ENCRYPT_DOMAIN=your_domain -e LETS_ENCRYPT_MAIL=your_mail onlyoffice/documentserver
|
||||||
|
|
||||||
If you want to get and extend Let's Encrypt SSL Certificates automatically just set LETS_ENCRYPT_DOMAIN and LETS_ENCRYPT_MAIL variables.
|
If you want to get and extend Let's Encrypt SSL Certificates automatically just set LETS_ENCRYPT_DOMAIN and LETS_ENCRYPT_MAIL variables.
|
||||||
@ -178,19 +368,24 @@ Below is the complete list of parameters that can be set using environment varia
|
|||||||
- **DB_TYPE**: The database type. Supported values are `postgres`, `mariadb` or `mysql`. Defaults to `postgres`.
|
- **DB_TYPE**: The database type. Supported values are `postgres`, `mariadb` or `mysql`. Defaults to `postgres`.
|
||||||
- **DB_HOST**: The IP address or the name of the host where the database server is running.
|
- **DB_HOST**: The IP address or the name of the host where the database server is running.
|
||||||
- **DB_PORT**: The database server port number.
|
- **DB_PORT**: The database server port number.
|
||||||
- **DB_NAME**: The name of a database to be created on the image startup.
|
- **DB_NAME**: The name of a database to use. Should be existing on container startup.
|
||||||
- **DB_USER**: The new user name with superuser permissions for the database account.
|
- **DB_USER**: The new user name with superuser permissions for the database account.
|
||||||
- **DB_PWD**: The password set for the database account.
|
- **DB_PWD**: The password set for the database account.
|
||||||
- **AMQP_URI**: The [AMQP URI](https://www.rabbitmq.com/uri-spec.html "RabbitMQ URI Specification") to connect to message broker server.
|
- **AMQP_URI**: The [AMQP URI](https://www.rabbitmq.com/uri-spec.html "RabbitMQ URI Specification") to connect to message broker server.
|
||||||
- **AMQP_TYPE**: The message broker type. Supported values are `rabbitmq` or `activemq`. Defaults to `rabbitmq`.
|
- **AMQP_TYPE**: The message broker type. Supported values are `rabbitmq` or `activemq`. Defaults to `rabbitmq`.
|
||||||
- **REDIS_SERVER_HOST**: The IP address or the name of the host where the Redis server is running.
|
- **REDIS_SERVER_HOST**: The IP address or the name of the host where the Redis server is running.
|
||||||
- **REDIS_SERVER_PORT**: The Redis server port number.
|
- **REDIS_SERVER_PORT**: The Redis server port number.
|
||||||
|
- **REDIS_SERVER_PASS**: The Redis server password. The password is not set by default.
|
||||||
- **NGINX_WORKER_PROCESSES**: Defines the number of nginx worker processes.
|
- **NGINX_WORKER_PROCESSES**: Defines the number of nginx worker processes.
|
||||||
- **NGINX_WORKER_CONNECTIONS**: Sets the maximum number of simultaneous connections that can be opened by a nginx worker process.
|
- **NGINX_WORKER_CONNECTIONS**: Sets the maximum number of simultaneous connections that can be opened by a nginx worker process.
|
||||||
- **JWT_ENABLED**: Specifies the enabling the JSON Web Token validation by the ONLYOFFICE Document Server. Defaults to `false`.
|
- **SECURE_LINK_SECRET**: Defines secret for the nginx config directive [secure_link_md5](https://nginx.org/en/docs/http/ngx_http_secure_link_module.html#secure_link_md5). Defaults to `random string`.
|
||||||
- **JWT_SECRET**: Defines the secret key to validate the JSON Web Token in the request to the ONLYOFFICE Document Server. Defaults to `secret`.
|
- **JWT_ENABLED**: Specifies the enabling the JSON Web Token validation by the ONLYOFFICE Document Server. Defaults to `true`.
|
||||||
|
- **JWT_SECRET**: Defines the secret key to validate the JSON Web Token in the request to the ONLYOFFICE Document Server. Defaults to random value.
|
||||||
- **JWT_HEADER**: Defines the http header that will be used to send the JSON Web Token. Defaults to `Authorization`.
|
- **JWT_HEADER**: Defines the http header that will be used to send the JSON Web Token. Defaults to `Authorization`.
|
||||||
- **JWT_IN_BODY**: Specifies the enabling the token validation in the request body to the ONLYOFFICE Document Server. Defaults to `false`.
|
- **JWT_IN_BODY**: Specifies the enabling the token validation in the request body to the ONLYOFFICE Document Server. Defaults to `false`.
|
||||||
|
- **WOPI_ENABLED**: Specifies the enabling the wopi handlers. Defaults to `false`.
|
||||||
|
- **ALLOW_META_IP_ADDRESS**: Defines if it is allowed to connect meta IP address or not. Defaults to `false`.
|
||||||
|
- **ALLOW_PRIVATE_IP_ADDRESS**: Defines if it is allowed to connect private IP address or not. Defaults to `false`.
|
||||||
- **USE_UNAUTHORIZED_STORAGE**: Set to `true`if using selfsigned certificates for your storage server e.g. Nextcloud. Defaults to `false`
|
- **USE_UNAUTHORIZED_STORAGE**: Set to `true`if using selfsigned certificates for your storage server e.g. Nextcloud. Defaults to `false`
|
||||||
- **GENERATE_FONTS**: When 'true' regenerates fonts list and the fonts thumbnails etc. at each start. Defaults to `true`
|
- **GENERATE_FONTS**: When 'true' regenerates fonts list and the fonts thumbnails etc. at each start. Defaults to `true`
|
||||||
- **METRICS_ENABLED**: Specifies the enabling StatsD for ONLYOFFICE Document Server. Defaults to `false`.
|
- **METRICS_ENABLED**: Specifies the enabling StatsD for ONLYOFFICE Document Server. Defaults to `false`.
|
||||||
@ -215,18 +410,28 @@ Then launch containers on it using the 'docker run --net onlyoffice' option:
|
|||||||
|
|
||||||
Follow [these steps](#installing-mysql) to install MySQL server.
|
Follow [these steps](#installing-mysql) to install MySQL server.
|
||||||
|
|
||||||
**STEP 3**: Install ONLYOFFICE Document Server.
|
**STEP 3**: Generate JWT Secret
|
||||||
|
|
||||||
|
JWT secret defines the secret key to validate the JSON Web Token in the request to the **ONLYOFFICE Document Server**. You can specify it yourself or easily get it using the command:
|
||||||
|
```
|
||||||
|
JWT_SECRET=$(cat /dev/urandom | tr -dc A-Za-z0-9 | head -c 12);
|
||||||
|
```
|
||||||
|
|
||||||
|
**STEP 4**: Install ONLYOFFICE Document Server.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
sudo docker run --net onlyoffice -i -t -d --restart=always --name onlyoffice-document-server \
|
sudo docker run --net onlyoffice -i -t -d --restart=always --name onlyoffice-document-server \
|
||||||
-v /app/onlyoffice/DocumentServer/logs:/var/log/onlyoffice \
|
-e JWT_ENABLED=true \
|
||||||
-v /app/onlyoffice/DocumentServer/data:/var/www/onlyoffice/Data \
|
-e JWT_SECRET=${JWT_SECRET} \
|
||||||
-v /app/onlyoffice/DocumentServer/lib:/var/lib/onlyoffice \
|
-e JWT_HEADER=AuthorizationJwt \
|
||||||
-v /app/onlyoffice/DocumentServer/db:/var/lib/postgresql \
|
-v /app/onlyoffice/DocumentServer/logs:/var/log/onlyoffice \
|
||||||
onlyoffice/documentserver
|
-v /app/onlyoffice/DocumentServer/data:/var/www/onlyoffice/Data \
|
||||||
|
-v /app/onlyoffice/DocumentServer/lib:/var/lib/onlyoffice \
|
||||||
|
-v /app/onlyoffice/DocumentServer/db:/var/lib/postgresql \
|
||||||
|
onlyoffice/documentserver
|
||||||
```
|
```
|
||||||
|
|
||||||
**STEP 4**: Install ONLYOFFICE Mail Server.
|
**STEP 5**: Install ONLYOFFICE Mail Server.
|
||||||
|
|
||||||
For the mail server correct work you need to specify its hostname 'yourdomain.com'.
|
For the mail server correct work you need to specify its hostname 'yourdomain.com'.
|
||||||
|
|
||||||
@ -244,14 +449,14 @@ sudo docker run --init --net onlyoffice --privileged -i -t -d --restart=always -
|
|||||||
onlyoffice/mailserver
|
onlyoffice/mailserver
|
||||||
```
|
```
|
||||||
|
|
||||||
The additional parameters for mail server are available [here](https://github.com/ONLYOFFICE/Docker-CommunityServer/blob/master/docker-compose.yml#L75).
|
The additional parameters for mail server are available [here](https://github.com/ONLYOFFICE/Docker-CommunityServer/blob/master/docker-compose.workspace_enterprise.yml#L87).
|
||||||
|
|
||||||
To learn more, refer to the [ONLYOFFICE Mail Server documentation](https://github.com/ONLYOFFICE/Docker-MailServer "ONLYOFFICE Mail Server documentation").
|
To learn more, refer to the [ONLYOFFICE Mail Server documentation](https://github.com/ONLYOFFICE/Docker-MailServer "ONLYOFFICE Mail Server documentation").
|
||||||
|
|
||||||
**STEP 5**: Install ONLYOFFICE Community Server
|
**STEP 6**: Install ONLYOFFICE Community Server
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
sudo docker run --net onlyoffice -i -t -d --restart=always --name onlyoffice-community-server -p 80:80 -p 443:443 -p 5222:5222 \
|
sudo docker run --net onlyoffice -i -t -d --privileged --restart=always --name onlyoffice-community-server -p 80:80 -p 443:443 -p 5222:5222 --cgroupns=host \
|
||||||
-e MYSQL_SERVER_ROOT_PASSWORD=my-secret-pw \
|
-e MYSQL_SERVER_ROOT_PASSWORD=my-secret-pw \
|
||||||
-e MYSQL_SERVER_DB_NAME=onlyoffice \
|
-e MYSQL_SERVER_DB_NAME=onlyoffice \
|
||||||
-e MYSQL_SERVER_HOST=onlyoffice-mysql-server \
|
-e MYSQL_SERVER_HOST=onlyoffice-mysql-server \
|
||||||
@ -259,6 +464,9 @@ sudo docker run --net onlyoffice -i -t -d --restart=always --name onlyoffice-com
|
|||||||
-e MYSQL_SERVER_PASS=onlyoffice_pass \
|
-e MYSQL_SERVER_PASS=onlyoffice_pass \
|
||||||
|
|
||||||
-e DOCUMENT_SERVER_PORT_80_TCP_ADDR=onlyoffice-document-server \
|
-e DOCUMENT_SERVER_PORT_80_TCP_ADDR=onlyoffice-document-server \
|
||||||
|
-e DOCUMENT_SERVER_JWT_ENABLED=true \
|
||||||
|
-e DOCUMENT_SERVER_JWT_SECRET=${JWT_SECRET} \
|
||||||
|
-e DOCUMENT_SERVER_JWT_HEADER=AuthorizationJwt \
|
||||||
|
|
||||||
-e MAIL_SERVER_API_HOST=${MAIL_SERVER_IP} \
|
-e MAIL_SERVER_API_HOST=${MAIL_SERVER_IP} \
|
||||||
-e MAIL_SERVER_DB_HOST=onlyoffice-mysql-server \
|
-e MAIL_SERVER_DB_HOST=onlyoffice-mysql-server \
|
||||||
@ -269,12 +477,14 @@ sudo docker run --net onlyoffice -i -t -d --restart=always --name onlyoffice-com
|
|||||||
|
|
||||||
-v /app/onlyoffice/CommunityServer/data:/var/www/onlyoffice/Data \
|
-v /app/onlyoffice/CommunityServer/data:/var/www/onlyoffice/Data \
|
||||||
-v /app/onlyoffice/CommunityServer/logs:/var/log/onlyoffice \
|
-v /app/onlyoffice/CommunityServer/logs:/var/log/onlyoffice \
|
||||||
|
-v /app/onlyoffice/CommunityServer/letsencrypt:/etc/letsencrypt \
|
||||||
|
-v /sys/fs/cgroup:/sys/fs/cgroup:rw \
|
||||||
onlyoffice/communityserver
|
onlyoffice/communityserver
|
||||||
```
|
```
|
||||||
|
|
||||||
Where `${MAIL_SERVER_IP}` is the IP address for **ONLYOFFICE Mail Server**. You can easily get it using the command:
|
Where `${MAIL_SERVER_IP}` is the IP address for **ONLYOFFICE Mail Server**. You can easily get it using the command:
|
||||||
```
|
```
|
||||||
docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' onlyoffice-mail-server
|
MAIL_SERVER_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' onlyoffice-mail-server)
|
||||||
```
|
```
|
||||||
|
|
||||||
Alternatively, you can use an automatic installation script to install the whole ONLYOFFICE Community Edition at once. For the mail server correct work you need to specify its hostname 'yourdomain.com'.
|
Alternatively, you can use an automatic installation script to install the whole ONLYOFFICE Community Edition at once. For the mail server correct work you need to specify its hostname 'yourdomain.com'.
|
||||||
@ -294,10 +504,34 @@ bash opensource-install.sh -md yourdomain.com
|
|||||||
Or, use [docker-compose](https://docs.docker.com/compose/install "docker-compose"). For the mail server correct work you need to specify its hostname 'yourdomain.com'. Assuming you have docker-compose installed, execute the following command:
|
Or, use [docker-compose](https://docs.docker.com/compose/install "docker-compose"). For the mail server correct work you need to specify its hostname 'yourdomain.com'. Assuming you have docker-compose installed, execute the following command:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
wget https://raw.githubusercontent.com/ONLYOFFICE/Docker-CommunityServer/master/docker-compose.yml
|
wget https://raw.githubusercontent.com/ONLYOFFICE/Docker-CommunityServer/master/docker-compose.groups.yml
|
||||||
docker-compose up -d
|
docker-compose up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## ONLYOFFICE Document Server ipv6 setup
|
||||||
|
|
||||||
|
(Works and is supported only for Linux hosts)
|
||||||
|
|
||||||
|
Docker does not currently provide ipv6 addresses to containers by default. This function is experimental now.
|
||||||
|
|
||||||
|
To set up interaction via ipv6, you need to enable support for this feature in your Docker. For this you need:
|
||||||
|
- create the `/etc/docker/daemon.json` file with the following content:
|
||||||
|
|
||||||
|
```
|
||||||
|
{
|
||||||
|
"ipv6": true,
|
||||||
|
"fixed-cidr-v6": "2001:db8:abc1::/64"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
- restart docker with the following command: `systemctl restart docker`
|
||||||
|
|
||||||
|
After that, all running containers receive an ipv6 address and have an inet6 interface.
|
||||||
|
|
||||||
|
You can check your default bridge network and see the field there
|
||||||
|
`EnableIPv6=true`. A new ipv6 subnet will also be added.
|
||||||
|
|
||||||
|
For more information, visit the official [Docker manual site](https://docs.docker.com/config/daemon/ipv6/)
|
||||||
|
|
||||||
## Issues
|
## Issues
|
||||||
|
|
||||||
### Docker Issues
|
### Docker Issues
|
||||||
@ -332,7 +566,9 @@ SaaS version: [https://www.onlyoffice.com/cloud-office.aspx](https://www.onlyoff
|
|||||||
|
|
||||||
## User Feedback and Support
|
## User Feedback and Support
|
||||||
|
|
||||||
If you have any problems with or questions about this image, please visit our official forum to find answers to your questions: [dev.onlyoffice.org][1] or you can ask and answer ONLYOFFICE development questions on [Stack Overflow][2].
|
If you have any problems with or questions about this image, please visit our official forum to find answers to your questions: [forum.onlyoffice.com][1] or you can ask and answer ONLYOFFICE development questions on [Stack Overflow][2].
|
||||||
|
|
||||||
[1]: https://dev.onlyoffice.org
|
[1]: https://forum.onlyoffice.com
|
||||||
[2]: https://stackoverflow.com/questions/tagged/onlyoffice
|
[2]: https://stackoverflow.com/questions/tagged/onlyoffice
|
||||||
|
|
||||||
|
</details>
|
||||||
|
18
build_and_push_all_images.py
Normal file
18
build_and_push_all_images.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
import requests
|
||||||
|
import os
|
||||||
|
|
||||||
|
response = requests.get("https://api.github.com/repos/ONLYOFFICE/DocumentServer/releases/latest")
|
||||||
|
|
||||||
|
print('docker buildx build --push --platform linux/arm64,linux/amd64 --tag jiriks74/onlyoffice-documentserver:latest .')
|
||||||
|
os.system(f'docker buildx build --push --platform linux/arm64,linux/amd64 --tag jiriks74/onlyoffice-documentserver:latest .')
|
||||||
|
print("///////////////////////////////////////////////////////////////////////////")
|
||||||
|
print('Build and push ":latest" .........................................finished')
|
||||||
|
print("///////////////////////////////////////////////////////////////////////////")
|
||||||
|
print()
|
||||||
|
|
||||||
|
print(f'docker buildx build --push --platform linux/arm64,linux/amd64 --tag jiriks74/onlyoffice-documentserver:{response.json()["name"].replace("ONLYOFFICE-DocumentServer-", "")} .')
|
||||||
|
os.system(f'docker buildx build --push --platform linux/arm64,linux/amd64 --tag jiriks74/onlyoffice-documentserver:{response.json()["name"].replace("ONLYOFFICE-DocumentServer-", "")} .')
|
||||||
|
print("///////////////////////////////////////////////////////////////////////////")
|
||||||
|
print(f'Build and push ":{response.json()["name"].replace("ONLYOFFICE-DocumentServer-", "")}".........................................finished')
|
||||||
|
print("///////////////////////////////////////////////////////////////////////////")
|
||||||
|
|
108
cluster.yml
108
cluster.yml
@ -1,108 +0,0 @@
|
|||||||
version: '2.1'
|
|
||||||
|
|
||||||
x-ds-image:
|
|
||||||
&ds-image
|
|
||||||
${COMPANY_NAME:-onlyoffice}/${PRODUCT_NAME:-documentserver-de}:${PRODUCT_VERSION:-latest}
|
|
||||||
|
|
||||||
services:
|
|
||||||
onlyoffice-documentserver-data:
|
|
||||||
container_name: onlyoffice-documentserver-data
|
|
||||||
image: *ds-image
|
|
||||||
environment:
|
|
||||||
- ONLYOFFICE_DATA_CONTAINER=true
|
|
||||||
- DB_HOST=onlyoffice-postgresql
|
|
||||||
- DB_PORT=5432
|
|
||||||
- DB_NAME=onlyoffice
|
|
||||||
- DB_USER=onlyoffice
|
|
||||||
- AMQP_URI=amqp://guest:guest@onlyoffice-rabbitmq
|
|
||||||
- REDIS_SERVER_HOST=onlyoffice-redis
|
|
||||||
- REDIS_SERVER_PORT=6379
|
|
||||||
# Uncomment strings below to enable the JSON Web Token validation.
|
|
||||||
#- JWT_ENABLED=true
|
|
||||||
#- JWT_SECRET=secret
|
|
||||||
#- JWT_HEADER=Authorization
|
|
||||||
#- JWT_IN_BODY=true
|
|
||||||
stdin_open: true
|
|
||||||
restart: always
|
|
||||||
volumes:
|
|
||||||
- /etc/onlyoffice
|
|
||||||
- /var/www/onlyoffice/Data
|
|
||||||
- /var/log/onlyoffice
|
|
||||||
- /var/lib/onlyoffice/documentserver/App_Data/cache/files
|
|
||||||
- /var/www/onlyoffice/documentserver-example/public/files
|
|
||||||
- /usr/share/fonts
|
|
||||||
|
|
||||||
onlyoffice-documentserver:
|
|
||||||
image: *ds-image
|
|
||||||
depends_on:
|
|
||||||
- onlyoffice-documentserver-data
|
|
||||||
- onlyoffice-postgresql
|
|
||||||
- onlyoffice-redis
|
|
||||||
- onlyoffice-rabbitmq
|
|
||||||
environment:
|
|
||||||
- ONLYOFFICE_DATA_CONTAINER_HOST=onlyoffice-documentserver-data
|
|
||||||
- BALANCE=uri depth 3
|
|
||||||
- EXCLUDE_PORTS=443
|
|
||||||
- HTTP_CHECK=GET /healthcheck
|
|
||||||
- EXTRA_SETTINGS=http-check expect string true
|
|
||||||
# Uncomment the string below to redirect HTTP request to HTTPS request.
|
|
||||||
#- FORCE_SSL=true
|
|
||||||
stdin_open: true
|
|
||||||
restart: always
|
|
||||||
expose:
|
|
||||||
- '80'
|
|
||||||
volumes_from:
|
|
||||||
- onlyoffice-documentserver-data
|
|
||||||
|
|
||||||
onlyoffice-haproxy:
|
|
||||||
container_name: onlyoffice-haproxy
|
|
||||||
image: dockercloud/haproxy:1.5.1
|
|
||||||
depends_on:
|
|
||||||
- onlyoffice-documentserver
|
|
||||||
environment:
|
|
||||||
- MODE=http
|
|
||||||
# Uncomment the string below to specify the path of ssl certificates
|
|
||||||
#- CERT_FOLDER=/certs/
|
|
||||||
stdin_open: true
|
|
||||||
links:
|
|
||||||
- onlyoffice-documentserver
|
|
||||||
volumes:
|
|
||||||
- /var/run/docker.sock:/var/run/docker.sock
|
|
||||||
# Uncomment the string below to map a ssl certificate from host
|
|
||||||
# to the proxy container
|
|
||||||
#- /app/onlyoffice/DocumentServer/data/certs/onlyoffice.pem:/certs/cert1.pem
|
|
||||||
restart: always
|
|
||||||
ports:
|
|
||||||
- '80:80'
|
|
||||||
- '443:443'
|
|
||||||
- '1936:1936'
|
|
||||||
|
|
||||||
onlyoffice-redis:
|
|
||||||
container_name: onlyoffice-redis
|
|
||||||
image: redis
|
|
||||||
restart: always
|
|
||||||
expose:
|
|
||||||
- '6379'
|
|
||||||
|
|
||||||
onlyoffice-rabbitmq:
|
|
||||||
container_name: onlyoffice-rabbitmq
|
|
||||||
image: rabbitmq
|
|
||||||
restart: always
|
|
||||||
expose:
|
|
||||||
- '5672'
|
|
||||||
|
|
||||||
onlyoffice-postgresql:
|
|
||||||
container_name: onlyoffice-postgresql
|
|
||||||
image: postgres:9.5
|
|
||||||
environment:
|
|
||||||
- POSTGRES_DB=onlyoffice
|
|
||||||
- POSTGRES_USER=onlyoffice
|
|
||||||
- POSTGRES_HOST_AUTH_METHOD=trust
|
|
||||||
restart: always
|
|
||||||
expose:
|
|
||||||
- '5432'
|
|
||||||
volumes:
|
|
||||||
- postgresql_data:/var/lib/postgresql
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
postgresql_data:
|
|
13
config/supervisor/ds/ds-converter.conf
Normal file
13
config/supervisor/ds/ds-converter.conf
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
[program:converter]
|
||||||
|
command=/var/www/COMPANY_NAME/documentserver/server/FileConverter/converter
|
||||||
|
directory=/var/www/COMPANY_NAME/documentserver/server/FileConverter
|
||||||
|
user=ds
|
||||||
|
environment=NODE_ENV=production-linux,NODE_CONFIG_DIR=/etc/COMPANY_NAME/documentserver,NODE_DISABLE_COLORS=1,APPLICATION_NAME=COMPANY_NAME
|
||||||
|
stdout_logfile=/var/log/COMPANY_NAME/documentserver/converter/out.log
|
||||||
|
stdout_logfile_backups=0
|
||||||
|
stdout_logfile_maxbytes=0
|
||||||
|
stderr_logfile=/var/log/COMPANY_NAME/documentserver/converter/err.log
|
||||||
|
stderr_logfile_backups=0
|
||||||
|
stderr_logfile_maxbytes=0
|
||||||
|
autostart=true
|
||||||
|
autorestart=true
|
13
config/supervisor/ds/ds-docservice.conf
Normal file
13
config/supervisor/ds/ds-docservice.conf
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
[program:docservice]
|
||||||
|
command=/var/www/COMPANY_NAME/documentserver/server/DocService/docservice
|
||||||
|
directory=/var/www/COMPANY_NAME/documentserver/server/DocService
|
||||||
|
user=ds
|
||||||
|
environment=NODE_ENV=production-linux,NODE_CONFIG_DIR=/etc/COMPANY_NAME/documentserver,NODE_DISABLE_COLORS=1
|
||||||
|
stdout_logfile=/var/log/COMPANY_NAME/documentserver/docservice/out.log
|
||||||
|
stdout_logfile_backups=0
|
||||||
|
stdout_logfile_maxbytes=0
|
||||||
|
stderr_logfile=/var/log/COMPANY_NAME/documentserver/docservice/err.log
|
||||||
|
stderr_logfile_backups=0
|
||||||
|
stderr_logfile_maxbytes=0
|
||||||
|
autostart=true
|
||||||
|
autorestart=true
|
14
config/supervisor/ds/ds-example.conf
Normal file
14
config/supervisor/ds/ds-example.conf
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
[program:example]
|
||||||
|
command=/var/www/COMPANY_NAME/documentserver-example/example
|
||||||
|
directory=/var/www/COMPANY_NAME/documentserver-example/
|
||||||
|
user=ds
|
||||||
|
environment=NODE_ENV=production-linux,NODE_CONFIG_DIR=/etc/COMPANY_NAME/documentserver-example,NODE_DISABLE_COLORS=1
|
||||||
|
stdout_logfile=/var/log/COMPANY_NAME/documentserver-example/out.log
|
||||||
|
stdout_logfile_backups=0
|
||||||
|
stdout_logfile_maxbytes=0
|
||||||
|
stderr_logfile=/var/log/COMPANY_NAME/documentserver-example/err.log
|
||||||
|
stderr_logfile_backups=0
|
||||||
|
stderr_logfile_maxbytes=0
|
||||||
|
autostart=false
|
||||||
|
autorestart=true
|
||||||
|
redirect_stderr=true
|
13
config/supervisor/ds/ds-metrics.conf
Normal file
13
config/supervisor/ds/ds-metrics.conf
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
[program:metrics]
|
||||||
|
command=/var/www/COMPANY_NAME/documentserver/server/Metrics/metrics ./config/config.js
|
||||||
|
directory=/var/www/COMPANY_NAME/documentserver/server/Metrics
|
||||||
|
user=ds
|
||||||
|
environment=NODE_DISABLE_COLORS=1
|
||||||
|
stdout_logfile=/var/log/COMPANY_NAME/documentserver/metrics/out.log
|
||||||
|
stdout_logfile_backups=0
|
||||||
|
stdout_logfile_maxbytes=0
|
||||||
|
stderr_logfile=/var/log/COMPANY_NAME/documentserver/metrics/err.log
|
||||||
|
stderr_logfile_backups=0
|
||||||
|
stderr_logfile_maxbytes=0
|
||||||
|
autostart=true
|
||||||
|
autorestart=true
|
2
config/supervisor/ds/ds.conf
Normal file
2
config/supervisor/ds/ds.conf
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
[group:ds]
|
||||||
|
programs=docservice,converter,metrics,example
|
@ -1,27 +0,0 @@
|
|||||||
; supervisor config file
|
|
||||||
|
|
||||||
[inet_http_server]
|
|
||||||
port = 127.0.0.1:9001
|
|
||||||
|
|
||||||
[supervisord]
|
|
||||||
logfile=/var/log/supervisor/supervisord.log ; (main log file;default $CWD/supervisord.log)
|
|
||||||
pidfile=/var/run/supervisord.pid ; (supervisord pidfile;default supervisord.pid)
|
|
||||||
childlogdir=/var/log/supervisor ; ('AUTO' child log dir, default $TEMP)
|
|
||||||
|
|
||||||
; the below section must remain in the config file for RPC
|
|
||||||
; (supervisorctl/web interface) to work, additional interfaces may be
|
|
||||||
; added by defining them in separate rpcinterface: sections
|
|
||||||
[rpcinterface:supervisor]
|
|
||||||
supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
|
|
||||||
|
|
||||||
[supervisorctl]
|
|
||||||
serverurl = http://localhost:9001 ; use a unix:// URL for a unix socket
|
|
||||||
|
|
||||||
; The [include] section can just contain the "files" setting. This
|
|
||||||
; setting can list multiple files (separated by whitespace or
|
|
||||||
; newlines). It can also contain wildcards. The filenames are
|
|
||||||
; interpreted as relative to this file. Included files *cannot*
|
|
||||||
; include files themselves.
|
|
||||||
|
|
||||||
[include]
|
|
||||||
files = /etc/supervisor/conf.d/*.conf
|
|
11
default.nix
Normal file
11
default.nix
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
{ pkgs ? (import <nixpkgs> {
|
||||||
|
config.allowUnfree = true;
|
||||||
|
}),
|
||||||
|
}:
|
||||||
|
pkgs.mkShell {
|
||||||
|
pure = true;
|
||||||
|
packages = with pkgs; [
|
||||||
|
# Choose the build tools that you need
|
||||||
|
act
|
||||||
|
];
|
||||||
|
}
|
169
docker-bake.hcl
Normal file
169
docker-bake.hcl
Normal file
@ -0,0 +1,169 @@
|
|||||||
|
variable "TAG" {
|
||||||
|
default = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "SHORTER_TAG" {
|
||||||
|
default = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "SHORTEST_TAG" {
|
||||||
|
default = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "PULL_TAG" {
|
||||||
|
default = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "COMPANY_NAME" {
|
||||||
|
default = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "PREFIX_NAME" {
|
||||||
|
default = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "PRODUCT_EDITION" {
|
||||||
|
default = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "PRODUCT_NAME" {
|
||||||
|
default = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "PACKAGE_VERSION" {
|
||||||
|
default = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "DOCKERFILE" {
|
||||||
|
default = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "PLATFORM" {
|
||||||
|
default = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "PACKAGE_BASEURL" {
|
||||||
|
default = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "PACKAGE_FILE" {
|
||||||
|
default = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "BUILD_CHANNEL" {
|
||||||
|
default = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "PUSH_MAJOR" {
|
||||||
|
default = "false"
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "LATEST" {
|
||||||
|
default = "false"
|
||||||
|
}
|
||||||
|
|
||||||
|
### ↓ Variables for UCS build ↓
|
||||||
|
|
||||||
|
variable "BASE_IMAGE" {
|
||||||
|
default = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "PG_VERSION" {
|
||||||
|
default = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "UCS_REBUILD" {
|
||||||
|
default = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "UCS_PREFIX" {
|
||||||
|
default = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
### ↑ Variables for UCS build ↑
|
||||||
|
|
||||||
|
target "documentserver" {
|
||||||
|
target = "documentserver"
|
||||||
|
dockerfile = "${DOCKERFILE}"
|
||||||
|
tags = [
|
||||||
|
"docker.io/${COMPANY_NAME}/${PREFIX_NAME}${PRODUCT_NAME}${PRODUCT_EDITION}:${TAG}",
|
||||||
|
equal("nightly",BUILD_CHANNEL) ? "docker.io/${COMPANY_NAME}/${PREFIX_NAME}${PRODUCT_NAME}${PRODUCT_EDITION}:latest": "",
|
||||||
|
]
|
||||||
|
platforms = ["${PLATFORM}"]
|
||||||
|
args = {
|
||||||
|
"COMPANY_NAME": "${COMPANY_NAME}"
|
||||||
|
"PRODUCT_NAME": "${PRODUCT_NAME}"
|
||||||
|
"PRODUCT_EDITION": "${PRODUCT_EDITION}"
|
||||||
|
"PACKAGE_VERSION": "${PACKAGE_VERSION}"
|
||||||
|
"PACKAGE_BASEURL": "${PACKAGE_BASEURL}"
|
||||||
|
"PLATFORM": "${PLATFORM}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
target "documentserver-stable" {
|
||||||
|
target = "documentserver-stable"
|
||||||
|
dockerfile = "production.dockerfile"
|
||||||
|
tags = ["docker.io/${COMPANY_NAME}/${PREFIX_NAME}${PRODUCT_NAME}${PRODUCT_EDITION}:${TAG}",
|
||||||
|
"docker.io/${COMPANY_NAME}/${PREFIX_NAME}${PRODUCT_NAME}${PRODUCT_EDITION}:${SHORTER_TAG}",
|
||||||
|
"docker.io/${COMPANY_NAME}/${PREFIX_NAME}${PRODUCT_NAME}${PRODUCT_EDITION}:${SHORTEST_TAG}",
|
||||||
|
"docker.io/${COMPANY_NAME}/${PREFIX_NAME}${PRODUCT_NAME}${PRODUCT_EDITION}:latest",
|
||||||
|
equal("-ee",PRODUCT_EDITION) ? "docker.io/${COMPANY_NAME}4enterprise/${PREFIX_NAME}${PRODUCT_NAME}${PRODUCT_EDITION}:${TAG}": "",]
|
||||||
|
platforms = ["linux/amd64", "linux/arm64"]
|
||||||
|
args = {
|
||||||
|
"PULL_TAG": "${PULL_TAG}"
|
||||||
|
"COMPANY_NAME": "${COMPANY_NAME}"
|
||||||
|
"PRODUCT_NAME": "${PRODUCT_NAME}"
|
||||||
|
"PRODUCT_EDITION": "${PRODUCT_EDITION}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
target "documentserver-ucs" {
|
||||||
|
target = "documentserver"
|
||||||
|
dockerfile = "${DOCKERFILE}"
|
||||||
|
tags = [
|
||||||
|
"docker.io/${COMPANY_NAME}/${PRODUCT_NAME}${PRODUCT_EDITION}-ucs:${TAG}"
|
||||||
|
]
|
||||||
|
platforms = ["linux/amd64", "linux/arm64"]
|
||||||
|
args = {
|
||||||
|
"PRODUCT_EDITION": "${PRODUCT_EDITION}"
|
||||||
|
"PRODUCT_NAME": "${PRODUCT_NAME}"
|
||||||
|
"COMPANY_NAME": "${COMPANY_NAME}"
|
||||||
|
"PACKAGE_VERSION": "${PACKAGE_VERSION}"
|
||||||
|
"PACKAGE_BASEURL": "${PACKAGE_BASEURL}"
|
||||||
|
"BASE_IMAGE": "${BASE_IMAGE}"
|
||||||
|
"PG_VERSION": "${PG_VERSION}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
target "documentserver-nonexample" {
|
||||||
|
target = "documentserver-nonexample"
|
||||||
|
dockerfile = "production.dockerfile"
|
||||||
|
tags = [ "docker.io/${COMPANY_NAME}/${PRODUCT_NAME}${PREFIX_NAME}${PRODUCT_EDITION}:${TAG}-nonexample" ]
|
||||||
|
platforms = ["linux/amd64", "linux/arm64"]
|
||||||
|
args = {
|
||||||
|
"PULL_TAG": "${PULL_TAG}"
|
||||||
|
"COMPANY_NAME": "${COMPANY_NAME}"
|
||||||
|
"PRODUCT_NAME": "${PRODUCT_NAME}"
|
||||||
|
"PRODUCT_EDITION": "${PRODUCT_EDITION}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
target "documentserver-stable-rebuild" {
|
||||||
|
target = "documentserver-stable-rebuild"
|
||||||
|
dockerfile = "production.dockerfile"
|
||||||
|
tags = equal("true",UCS_REBUILD) ? ["docker.io/${COMPANY_NAME}/${PREFIX_NAME}${PRODUCT_NAME}${PRODUCT_EDITION}-ucs:${TAG}",] : [
|
||||||
|
"docker.io/${COMPANY_NAME}/${PREFIX_NAME}${PRODUCT_NAME}${PRODUCT_EDITION}:${TAG}",
|
||||||
|
equal("",PREFIX_NAME) ? "docker.io/${COMPANY_NAME}/${PREFIX_NAME}${PRODUCT_NAME}${PRODUCT_EDITION}:${SHORTER_TAG}": "",
|
||||||
|
equal("true",PUSH_MAJOR) ? "docker.io/${COMPANY_NAME}/${PREFIX_NAME}${PRODUCT_NAME}${PRODUCT_EDITION}:${SHORTEST_TAG}": "",
|
||||||
|
equal("",PREFIX_NAME) && equal("true",LATEST) ? "docker.io/${COMPANY_NAME}/${PREFIX_NAME}${PRODUCT_NAME}${PRODUCT_EDITION}:latest": "",
|
||||||
|
equal("-ee",PRODUCT_EDITION) && equal("",PREFIX_NAME) ? "docker.io/${COMPANY_NAME}4enterprise/${PREFIX_NAME}${PRODUCT_NAME}${PRODUCT_EDITION}:${TAG}": "",
|
||||||
|
]
|
||||||
|
platforms = ["linux/amd64", "linux/arm64"]
|
||||||
|
args = {
|
||||||
|
"UCS_PREFIX": "${UCS_PREFIX}"
|
||||||
|
"PULL_TAG": "${PULL_TAG}"
|
||||||
|
"COMPANY_NAME": "${COMPANY_NAME}"
|
||||||
|
"PRODUCT_NAME": "${PRODUCT_NAME}"
|
||||||
|
"PRODUCT_EDITION": "${PRODUCT_EDITION}"
|
||||||
|
}
|
||||||
|
}
|
@ -17,15 +17,20 @@ services:
|
|||||||
# Uncomment strings below to enable the JSON Web Token validation.
|
# Uncomment strings below to enable the JSON Web Token validation.
|
||||||
#- JWT_ENABLED=true
|
#- JWT_ENABLED=true
|
||||||
#- JWT_SECRET=secret
|
#- JWT_SECRET=secret
|
||||||
#- JWT_HEADER=Authorization
|
#- JWT_HEADER=AuthorizationJwt
|
||||||
#- JWT_IN_BODY=true
|
#- JWT_IN_BODY=true
|
||||||
|
# Uncomment the line below to set larger file limits (about 1GB)
|
||||||
|
#- LARGER_FILE_LIMITS=true
|
||||||
ports:
|
ports:
|
||||||
- '80:80'
|
- '80:80'
|
||||||
- '443:443'
|
- '443:443'
|
||||||
stdin_open: true
|
stdin_open: true
|
||||||
restart: always
|
restart: always
|
||||||
stop_grace_period: 60s
|
stop_grace_period: 120s
|
||||||
volumes:
|
volumes:
|
||||||
|
# Uncomment the line below to get access to the slide themes directory.
|
||||||
|
# To use the themes, copy them to the slideThemes directory and run `docker exec -it <container-name> /usr/bin/documentserver-generate-allfonts.sh`
|
||||||
|
#- ./slideThemes:/var/www/onlyoffice/documentserver/sdkjs/slide/themes/src
|
||||||
- /var/www/onlyoffice/Data
|
- /var/www/onlyoffice/Data
|
||||||
- /var/log/onlyoffice
|
- /var/log/onlyoffice
|
||||||
- /var/lib/onlyoffice/documentserver/App_Data/cache/files
|
- /var/lib/onlyoffice/documentserver/App_Data/cache/files
|
||||||
@ -41,7 +46,7 @@ services:
|
|||||||
|
|
||||||
onlyoffice-postgresql:
|
onlyoffice-postgresql:
|
||||||
container_name: onlyoffice-postgresql
|
container_name: onlyoffice-postgresql
|
||||||
image: postgres:9.5
|
image: postgres:12
|
||||||
environment:
|
environment:
|
||||||
- POSTGRES_DB=onlyoffice
|
- POSTGRES_DB=onlyoffice
|
||||||
- POSTGRES_USER=onlyoffice
|
- POSTGRES_USER=onlyoffice
|
||||||
|
16
hooks/build
16
hooks/build
@ -1,16 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
TAG="${SOURCE_BRANCH:1}"
|
|
||||||
if [[ $TAG =~ ^[0-9]{1,4}\.[0-9]{1,4}\.[0-9]{1,4}\.[0-9]{1,4}$ ]]; then
|
|
||||||
TAG=${TAG%.*}-${TAG##*.}
|
|
||||||
if [ ${TAG:0:8} = "99.99.99" ]; then
|
|
||||||
URL_FOLDER="unstable"
|
|
||||||
else
|
|
||||||
URL_FOLDER="testing"
|
|
||||||
fi
|
|
||||||
URL="https://repo-doc-onlyoffice-com.s3-eu-west-1.amazonaws.com/onlyoffice/${URL_FOLDER}/ubuntu/${COMPANY_NAME}-${PRODUCT_NAME}_${TAG}_amd64.deb"
|
|
||||||
docker build --build-arg PACKAGE_URL=$URL -f $DOCKERFILE_PATH -t $IMAGE_NAME .
|
|
||||||
else
|
|
||||||
echo Tag is not correct, Image will be built with default URL.
|
|
||||||
docker build -f $DOCKERFILE_PATH -t $IMAGE_NAME .
|
|
||||||
fi
|
|
33
production.dockerfile
Normal file
33
production.dockerfile
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
### Arguments avavlivable only for FROM instruction ###
|
||||||
|
ARG PULL_TAG=latest
|
||||||
|
ARG COMPANY_NAME=onlyoffice
|
||||||
|
ARG PRODUCT_EDITION=
|
||||||
|
### Rebuild arguments
|
||||||
|
ARG UCS_PREFIX=
|
||||||
|
ARG IMAGE=${COMPANY_NAME}/documentserver${PRODUCT_EDITION}${UCS_PREFIX}:${PULL_TAG}
|
||||||
|
|
||||||
|
### Build main-release ###
|
||||||
|
|
||||||
|
FROM ${COMPANY_NAME}/4testing-documentserver${PRODUCT_EDITION}:${PULL_TAG} as documentserver-stable
|
||||||
|
|
||||||
|
### Rebuild stable images with secure updates
|
||||||
|
FROM ${IMAGE} as documentserver-stable-rebuild
|
||||||
|
RUN echo "This is rebuild" \
|
||||||
|
&& apt-get update -y \
|
||||||
|
&& apt-get upgrade -y
|
||||||
|
|
||||||
|
### Build nonexample ###
|
||||||
|
|
||||||
|
FROM ${COMPANY_NAME}/documentserver${PRODUCT_EDITION}:${PULL_TAG} as documentserver-nonexample
|
||||||
|
|
||||||
|
ARG COMPANY_NAME=onlyoffice
|
||||||
|
ARG PRODUCT_NAME=documentserver
|
||||||
|
ARG DS_SUPERVISOR_CONF=/etc/supervisor/conf.d/ds.conf
|
||||||
|
|
||||||
|
### Remove all documentserver-example data ###
|
||||||
|
|
||||||
|
RUN rm -rf /var/www/$COMPANY_NAME/$PRODUCT_NAME-example \
|
||||||
|
&& rm -rf /etc/$COMPANY_NAME/$PRODUCT_NAME-example \
|
||||||
|
&& rm -f $DS_SUPERVISOR_CONF \
|
||||||
|
&& rm -f /etc/nginx/includes/ds-example.conf \
|
||||||
|
&& ln -s /etc/$COMPANY_NAME/$PRODUCT_NAME/supervisor/ds.conf $DS_SUPERVISOR_CONF
|
217
run-document-server.sh
Executable file → Normal file
217
run-document-server.sh
Executable file → Normal file
@ -1,7 +1,12 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
|
umask 0022
|
||||||
|
|
||||||
function clean_exit {
|
function clean_exit {
|
||||||
/usr/bin/documentserver-prepare4shutdown.sh
|
if [ ${ONLYOFFICE_DATA_CONTAINER} == "false" ] && \
|
||||||
|
[ ${ONLYOFFICE_DATA_CONTAINER_HOST} == "localhost" ]; then
|
||||||
|
/usr/bin/documentserver-prepare4shutdown.sh
|
||||||
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
trap clean_exit SIGTERM
|
trap clean_exit SIGTERM
|
||||||
@ -11,24 +16,47 @@ shopt -s globstar
|
|||||||
|
|
||||||
APP_DIR="/var/www/${COMPANY_NAME}/documentserver"
|
APP_DIR="/var/www/${COMPANY_NAME}/documentserver"
|
||||||
DATA_DIR="/var/www/${COMPANY_NAME}/Data"
|
DATA_DIR="/var/www/${COMPANY_NAME}/Data"
|
||||||
|
PRIVATE_DATA_DIR="${DATA_DIR}/.private"
|
||||||
|
DS_RELEASE_DATE="${PRIVATE_DATA_DIR}/ds_release_date"
|
||||||
LOG_DIR="/var/log/${COMPANY_NAME}"
|
LOG_DIR="/var/log/${COMPANY_NAME}"
|
||||||
DS_LOG_DIR="${LOG_DIR}/documentserver"
|
DS_LOG_DIR="${LOG_DIR}/documentserver"
|
||||||
LIB_DIR="/var/lib/${COMPANY_NAME}"
|
LIB_DIR="/var/lib/${COMPANY_NAME}"
|
||||||
DS_LIB_DIR="${LIB_DIR}/documentserver"
|
DS_LIB_DIR="${LIB_DIR}/documentserver"
|
||||||
CONF_DIR="/etc/${COMPANY_NAME}/documentserver"
|
CONF_DIR="/etc/${COMPANY_NAME}/documentserver"
|
||||||
|
IS_UPGRADE="false"
|
||||||
|
|
||||||
ONLYOFFICE_DATA_CONTAINER=${ONLYOFFICE_DATA_CONTAINER:-false}
|
ONLYOFFICE_DATA_CONTAINER=${ONLYOFFICE_DATA_CONTAINER:-false}
|
||||||
ONLYOFFICE_DATA_CONTAINER_HOST=${ONLYOFFICE_DATA_CONTAINER_HOST:-localhost}
|
ONLYOFFICE_DATA_CONTAINER_HOST=${ONLYOFFICE_DATA_CONTAINER_HOST:-localhost}
|
||||||
ONLYOFFICE_DATA_CONTAINER_PORT=80
|
ONLYOFFICE_DATA_CONTAINER_PORT=80
|
||||||
|
|
||||||
SSL_CERTIFICATES_DIR="${DATA_DIR}/certs"
|
RELEASE_DATE="$(stat -c="%y" ${APP_DIR}/server/DocService/docservice | sed -r 's/=([0-9]+)-([0-9]+)-([0-9]+) ([0-9:.+ ]+)/\1-\2-\3/')";
|
||||||
if [[ -z $SSL_CERTIFICATE_PATH ]] && [[ -f ${SSL_CERTIFICATES_DIR}/onlyoffice.crt ]]; then
|
if [ -f ${DS_RELEASE_DATE} ]; then
|
||||||
SSL_CERTIFICATE_PATH=${SSL_CERTIFICATES_DIR}/onlyoffice.crt
|
PREV_RELEASE_DATE=$(head -n 1 ${DS_RELEASE_DATE})
|
||||||
|
else
|
||||||
|
PREV_RELEASE_DATE="0"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "${RELEASE_DATE}" != "${PREV_RELEASE_DATE}" ]; then
|
||||||
|
if [ ${ONLYOFFICE_DATA_CONTAINER} != "true" ]; then
|
||||||
|
IS_UPGRADE="true";
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
SSL_CERTIFICATES_DIR="/usr/share/ca-certificates/ds"
|
||||||
|
mkdir -p ${SSL_CERTIFICATES_DIR}
|
||||||
|
if [[ -d ${DATA_DIR}/certs ]] && [ -e ${DATA_DIR}/certs/*.crt ]; then
|
||||||
|
cp -f ${DATA_DIR}/certs/* ${SSL_CERTIFICATES_DIR}
|
||||||
|
chmod 644 ${SSL_CERTIFICATES_DIR}/*.crt ${SSL_CERTIFICATES_DIR}/*.pem
|
||||||
|
chmod 400 ${SSL_CERTIFICATES_DIR}/*.key
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -z $SSL_CERTIFICATE_PATH ]] && [[ -f ${SSL_CERTIFICATES_DIR}/${COMPANY_NAME}.crt ]]; then
|
||||||
|
SSL_CERTIFICATE_PATH=${SSL_CERTIFICATES_DIR}/${COMPANY_NAME}.crt
|
||||||
else
|
else
|
||||||
SSL_CERTIFICATE_PATH=${SSL_CERTIFICATE_PATH:-${SSL_CERTIFICATES_DIR}/tls.crt}
|
SSL_CERTIFICATE_PATH=${SSL_CERTIFICATE_PATH:-${SSL_CERTIFICATES_DIR}/tls.crt}
|
||||||
fi
|
fi
|
||||||
if [[ -z $SSL_KEY_PATH ]] && [[ -f ${SSL_CERTIFICATES_DIR}/onlyoffice.key ]]; then
|
if [[ -z $SSL_KEY_PATH ]] && [[ -f ${SSL_CERTIFICATES_DIR}/${COMPANY_NAME}.key ]]; then
|
||||||
SSL_KEY_PATH=${SSL_CERTIFICATES_DIR}/onlyoffice.key
|
SSL_KEY_PATH=${SSL_CERTIFICATES_DIR}/${COMPANY_NAME}.key
|
||||||
else
|
else
|
||||||
SSL_KEY_PATH=${SSL_KEY_PATH:-${SSL_CERTIFICATES_DIR}/tls.key}
|
SSL_KEY_PATH=${SSL_KEY_PATH:-${SSL_CERTIFICATES_DIR}/tls.key}
|
||||||
fi
|
fi
|
||||||
@ -48,16 +76,31 @@ NGINX_ONLYOFFICE_EXAMPLE_CONF="${NGINX_ONLYOFFICE_EXAMPLE_PATH}/includes/ds-exam
|
|||||||
|
|
||||||
NGINX_CONFIG_PATH="/etc/nginx/nginx.conf"
|
NGINX_CONFIG_PATH="/etc/nginx/nginx.conf"
|
||||||
NGINX_WORKER_PROCESSES=${NGINX_WORKER_PROCESSES:-1}
|
NGINX_WORKER_PROCESSES=${NGINX_WORKER_PROCESSES:-1}
|
||||||
NGINX_WORKER_CONNECTIONS=${NGINX_WORKER_CONNECTIONS:-$(ulimit -n)}
|
# Limiting the maximum number of simultaneous connections due to possible memory shortage
|
||||||
|
[ $(ulimit -n) -gt 1048576 ] && NGINX_WORKER_CONNECTIONS=${NGINX_WORKER_CONNECTIONS:-1048576} || NGINX_WORKER_CONNECTIONS=${NGINX_WORKER_CONNECTIONS:-$(ulimit -n)}
|
||||||
|
|
||||||
JWT_ENABLED=${JWT_ENABLED:-false}
|
JWT_ENABLED=${JWT_ENABLED:-true}
|
||||||
JWT_SECRET=${JWT_SECRET:-secret}
|
|
||||||
|
# validate user's vars before usinig in json
|
||||||
|
if [ "${JWT_ENABLED}" == "true" ]; then
|
||||||
|
JWT_ENABLED="true"
|
||||||
|
else
|
||||||
|
JWT_ENABLED="false"
|
||||||
|
fi
|
||||||
|
|
||||||
|
[ -z $JWT_SECRET ] && JWT_MESSAGE='JWT is enabled by default. A random secret is generated automatically. Run the command "docker exec $(sudo docker ps -q) sudo documentserver-jwt-status.sh" to get information about JWT.'
|
||||||
|
|
||||||
|
JWT_SECRET=${JWT_SECRET:-$(pwgen -s 32)}
|
||||||
JWT_HEADER=${JWT_HEADER:-Authorization}
|
JWT_HEADER=${JWT_HEADER:-Authorization}
|
||||||
JWT_IN_BODY=${JWT_IN_BODY:-false}
|
JWT_IN_BODY=${JWT_IN_BODY:-false}
|
||||||
|
|
||||||
|
WOPI_ENABLED=${WOPI_ENABLED:-false}
|
||||||
|
ALLOW_META_IP_ADDRESS=${ALLOW_META_IP_ADDRESS:-false}
|
||||||
|
ALLOW_PRIVATE_IP_ADDRESS=${ALLOW_PRIVATE_IP_ADDRESS:-false}
|
||||||
|
|
||||||
GENERATE_FONTS=${GENERATE_FONTS:-true}
|
GENERATE_FONTS=${GENERATE_FONTS:-true}
|
||||||
|
|
||||||
if [[ ${PRODUCT_NAME} == "documentserver" ]]; then
|
if [[ ${PRODUCT_NAME}${PRODUCT_EDITION} == "documentserver" ]]; then
|
||||||
REDIS_ENABLED=false
|
REDIS_ENABLED=false
|
||||||
else
|
else
|
||||||
REDIS_ENABLED=true
|
REDIS_ENABLED=true
|
||||||
@ -103,6 +146,7 @@ read_setting(){
|
|||||||
METRICS_PREFIX="${METRICS_PREFIX:-.ds}"
|
METRICS_PREFIX="${METRICS_PREFIX:-.ds}"
|
||||||
|
|
||||||
DB_HOST=${DB_HOST:-${POSTGRESQL_SERVER_HOST:-$(${JSON} services.CoAuthoring.sql.dbHost)}}
|
DB_HOST=${DB_HOST:-${POSTGRESQL_SERVER_HOST:-$(${JSON} services.CoAuthoring.sql.dbHost)}}
|
||||||
|
DB_TYPE=${DB_TYPE:-$(${JSON} services.CoAuthoring.sql.type)}
|
||||||
case $DB_TYPE in
|
case $DB_TYPE in
|
||||||
"postgres")
|
"postgres")
|
||||||
DB_PORT=${DB_PORT:-"5432"}
|
DB_PORT=${DB_PORT:-"5432"}
|
||||||
@ -121,7 +165,6 @@ read_setting(){
|
|||||||
DB_NAME=${DB_NAME:-${POSTGRESQL_SERVER_DB_NAME:-$(${JSON} services.CoAuthoring.sql.dbName)}}
|
DB_NAME=${DB_NAME:-${POSTGRESQL_SERVER_DB_NAME:-$(${JSON} services.CoAuthoring.sql.dbName)}}
|
||||||
DB_USER=${DB_USER:-${POSTGRESQL_SERVER_USER:-$(${JSON} services.CoAuthoring.sql.dbUser)}}
|
DB_USER=${DB_USER:-${POSTGRESQL_SERVER_USER:-$(${JSON} services.CoAuthoring.sql.dbUser)}}
|
||||||
DB_PWD=${DB_PWD:-${POSTGRESQL_SERVER_PASS:-$(${JSON} services.CoAuthoring.sql.dbPass)}}
|
DB_PWD=${DB_PWD:-${POSTGRESQL_SERVER_PASS:-$(${JSON} services.CoAuthoring.sql.dbPass)}}
|
||||||
DB_TYPE=${DB_TYPE:-$(${JSON} services.CoAuthoring.sql.type)}
|
|
||||||
|
|
||||||
RABBITMQ_SERVER_URL=${RABBITMQ_SERVER_URL:-$(${JSON} rabbitmq.url)}
|
RABBITMQ_SERVER_URL=${RABBITMQ_SERVER_URL:-$(${JSON} rabbitmq.url)}
|
||||||
AMQP_URI=${AMQP_URI:-${AMQP_SERVER_URL:-${RABBITMQ_SERVER_URL}}}
|
AMQP_URI=${AMQP_URI:-${AMQP_SERVER_URL:-${RABBITMQ_SERVER_URL}}}
|
||||||
@ -162,7 +205,7 @@ parse_rabbitmq_url(){
|
|||||||
# extract the host
|
# extract the host
|
||||||
local hostport="$(echo ${url/$userpass@/} | cut -d/ -f1)"
|
local hostport="$(echo ${url/$userpass@/} | cut -d/ -f1)"
|
||||||
# by request - try to extract the port
|
# by request - try to extract the port
|
||||||
local port="$(echo $hostport | sed -e 's,^.*:,:,g' -e 's,.*:\([0-9]*\).*,\1,g' -e 's,[^0-9],,g')"
|
local port="$(echo $hostport | grep : | sed -r 's_^.*:+|/.*$__g')"
|
||||||
|
|
||||||
local host
|
local host
|
||||||
if [ -n "$port" ]; then
|
if [ -n "$port" ]; then
|
||||||
@ -266,37 +309,52 @@ update_rabbitmq_setting(){
|
|||||||
}
|
}
|
||||||
|
|
||||||
update_redis_settings(){
|
update_redis_settings(){
|
||||||
|
${JSON} -I -e "if(this.services.CoAuthoring.redis===undefined)this.services.CoAuthoring.redis={};"
|
||||||
${JSON} -I -e "this.services.CoAuthoring.redis.host = '${REDIS_SERVER_HOST}'"
|
${JSON} -I -e "this.services.CoAuthoring.redis.host = '${REDIS_SERVER_HOST}'"
|
||||||
${JSON} -I -e "this.services.CoAuthoring.redis.port = '${REDIS_SERVER_PORT}'"
|
${JSON} -I -e "this.services.CoAuthoring.redis.port = '${REDIS_SERVER_PORT}'"
|
||||||
|
|
||||||
|
if [ -n "${REDIS_SERVER_PASS}" ]; then
|
||||||
|
${JSON} -I -e "this.services.CoAuthoring.redis.options = {'password':'${REDIS_SERVER_PASS}'}"
|
||||||
|
fi
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
update_ds_settings(){
|
update_ds_settings(){
|
||||||
if [ "${JWT_ENABLED}" == "true" ]; then
|
${JSON} -I -e "this.services.CoAuthoring.token.enable.browser = ${JWT_ENABLED}"
|
||||||
${JSON} -I -e "this.services.CoAuthoring.token.enable.browser = ${JWT_ENABLED}"
|
${JSON} -I -e "this.services.CoAuthoring.token.enable.request.inbox = ${JWT_ENABLED}"
|
||||||
${JSON} -I -e "this.services.CoAuthoring.token.enable.request.inbox = ${JWT_ENABLED}"
|
${JSON} -I -e "this.services.CoAuthoring.token.enable.request.outbox = ${JWT_ENABLED}"
|
||||||
${JSON} -I -e "this.services.CoAuthoring.token.enable.request.outbox = ${JWT_ENABLED}"
|
|
||||||
|
|
||||||
${JSON} -I -e "this.services.CoAuthoring.secret.inbox.string = '${JWT_SECRET}'"
|
${JSON} -I -e "this.services.CoAuthoring.secret.inbox.string = '${JWT_SECRET}'"
|
||||||
${JSON} -I -e "this.services.CoAuthoring.secret.outbox.string = '${JWT_SECRET}'"
|
${JSON} -I -e "this.services.CoAuthoring.secret.outbox.string = '${JWT_SECRET}'"
|
||||||
${JSON} -I -e "this.services.CoAuthoring.secret.session.string = '${JWT_SECRET}'"
|
${JSON} -I -e "this.services.CoAuthoring.secret.session.string = '${JWT_SECRET}'"
|
||||||
|
|
||||||
${JSON} -I -e "this.services.CoAuthoring.token.inbox.header = '${JWT_HEADER}'"
|
${JSON} -I -e "this.services.CoAuthoring.token.inbox.header = '${JWT_HEADER}'"
|
||||||
${JSON} -I -e "this.services.CoAuthoring.token.outbox.header = '${JWT_HEADER}'"
|
${JSON} -I -e "this.services.CoAuthoring.token.outbox.header = '${JWT_HEADER}'"
|
||||||
|
|
||||||
${JSON} -I -e "this.services.CoAuthoring.token.inbox.inBody = ${JWT_IN_BODY}"
|
${JSON} -I -e "this.services.CoAuthoring.token.inbox.inBody = ${JWT_IN_BODY}"
|
||||||
${JSON} -I -e "this.services.CoAuthoring.token.outbox.inBody = ${JWT_IN_BODY}"
|
${JSON} -I -e "this.services.CoAuthoring.token.outbox.inBody = ${JWT_IN_BODY}"
|
||||||
|
|
||||||
if [ -f "${ONLYOFFICE_EXAMPLE_CONFIG}" ] && [ "${JWT_ENABLED}" == "true" ]; then
|
if [ -f "${ONLYOFFICE_EXAMPLE_CONFIG}" ]; then
|
||||||
${JSON_EXAMPLE} -I -e "this.server.token.enable = ${JWT_ENABLED}"
|
${JSON_EXAMPLE} -I -e "this.server.token.enable = ${JWT_ENABLED}"
|
||||||
${JSON_EXAMPLE} -I -e "this.server.token.secret = '${JWT_SECRET}'"
|
${JSON_EXAMPLE} -I -e "this.server.token.secret = '${JWT_SECRET}'"
|
||||||
${JSON_EXAMPLE} -I -e "this.server.token.authorizationHeader = '${JWT_HEADER}'"
|
${JSON_EXAMPLE} -I -e "this.server.token.authorizationHeader = '${JWT_HEADER}'"
|
||||||
fi
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "${USE_UNAUTHORIZED_STORAGE}" == "true" ]; then
|
if [ "${USE_UNAUTHORIZED_STORAGE}" == "true" ]; then
|
||||||
${JSON} -I -e "if(this.services.CoAuthoring.requestDefaults===undefined)this.services.CoAuthoring.requestDefaults={}"
|
${JSON} -I -e "if(this.services.CoAuthoring.requestDefaults===undefined)this.services.CoAuthoring.requestDefaults={}"
|
||||||
${JSON} -I -e "if(this.services.CoAuthoring.requestDefaults.rejectUnauthorized===undefined)this.services.CoAuthoring.requestDefaults.rejectUnauthorized=false"
|
${JSON} -I -e "if(this.services.CoAuthoring.requestDefaults.rejectUnauthorized===undefined)this.services.CoAuthoring.requestDefaults.rejectUnauthorized=false"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if [ "${WOPI_ENABLED}" == "true" ]; then
|
||||||
|
${JSON} -I -e "if(this.wopi===undefined)this.wopi={}"
|
||||||
|
${JSON} -I -e "this.wopi.enable = true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "${ALLOW_META_IP_ADDRESS}" = "true" ] || [ "${ALLOW_PRIVATE_IP_ADDRESS}" = "true" ]; then
|
||||||
|
${JSON} -I -e "if(this.services.CoAuthoring['request-filtering-agent']===undefined)this.services.CoAuthoring['request-filtering-agent']={}"
|
||||||
|
[ "${ALLOW_META_IP_ADDRESS}" = "true" ] && ${JSON} -I -e "this.services.CoAuthoring['request-filtering-agent'].allowMetaIPAddress = true"
|
||||||
|
[ "${ALLOW_PRIVATE_IP_ADDRESS}" = "true" ] && ${JSON} -I -e "this.services.CoAuthoring['request-filtering-agent'].allowPrivateIPAddress = true"
|
||||||
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
create_postgresql_cluster(){
|
create_postgresql_cluster(){
|
||||||
@ -311,9 +369,8 @@ create_postgresql_cluster(){
|
|||||||
}
|
}
|
||||||
|
|
||||||
create_postgresql_db(){
|
create_postgresql_db(){
|
||||||
sudo -u postgres psql -c "CREATE DATABASE $DB_NAME;"
|
|
||||||
sudo -u postgres psql -c "CREATE USER $DB_USER WITH password '"$DB_PWD"';"
|
sudo -u postgres psql -c "CREATE USER $DB_USER WITH password '"$DB_PWD"';"
|
||||||
sudo -u postgres psql -c "GRANT ALL privileges ON DATABASE $DB_NAME TO $DB_USER;"
|
sudo -u postgres psql -c "CREATE DATABASE $DB_NAME OWNER $DB_USER;"
|
||||||
}
|
}
|
||||||
|
|
||||||
create_db_tbl() {
|
create_db_tbl() {
|
||||||
@ -327,6 +384,36 @@ create_db_tbl() {
|
|||||||
esac
|
esac
|
||||||
}
|
}
|
||||||
|
|
||||||
|
upgrade_db_tbl() {
|
||||||
|
case $DB_TYPE in
|
||||||
|
"postgres")
|
||||||
|
upgrade_postgresql_tbl
|
||||||
|
;;
|
||||||
|
"mariadb"|"mysql")
|
||||||
|
upgrade_mysql_tbl
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
upgrade_postgresql_tbl() {
|
||||||
|
if [ -n "$DB_PWD" ]; then
|
||||||
|
export PGPASSWORD=$DB_PWD
|
||||||
|
fi
|
||||||
|
|
||||||
|
PSQL="psql -q -h$DB_HOST -p$DB_PORT -d$DB_NAME -U$DB_USER -w"
|
||||||
|
|
||||||
|
$PSQL -f "$APP_DIR/server/schema/postgresql/removetbl.sql"
|
||||||
|
$PSQL -f "$APP_DIR/server/schema/postgresql/createdb.sql"
|
||||||
|
}
|
||||||
|
|
||||||
|
upgrade_mysql_tbl() {
|
||||||
|
CONNECTION_PARAMS="-h$DB_HOST -P$DB_PORT -u$DB_USER -p$DB_PWD -w"
|
||||||
|
MYSQL="mysql -q $CONNECTION_PARAMS"
|
||||||
|
|
||||||
|
$MYSQL $DB_NAME < "$APP_DIR/server/schema/mysql/removetbl.sql" >/dev/null 2>&1
|
||||||
|
$MYSQL $DB_NAME < "$APP_DIR/server/schema/mysql/createdb.sql" >/dev/null 2>&1
|
||||||
|
}
|
||||||
|
|
||||||
create_postgresql_tbl() {
|
create_postgresql_tbl() {
|
||||||
if [ -n "$DB_PWD" ]; then
|
if [ -n "$DB_PWD" ]; then
|
||||||
export PGPASSWORD=$DB_PWD
|
export PGPASSWORD=$DB_PWD
|
||||||
@ -350,11 +437,16 @@ update_welcome_page() {
|
|||||||
WELCOME_PAGE="${APP_DIR}-example/welcome/docker.html"
|
WELCOME_PAGE="${APP_DIR}-example/welcome/docker.html"
|
||||||
if [[ -e $WELCOME_PAGE ]]; then
|
if [[ -e $WELCOME_PAGE ]]; then
|
||||||
DOCKER_CONTAINER_ID=$(basename $(cat /proc/1/cpuset))
|
DOCKER_CONTAINER_ID=$(basename $(cat /proc/1/cpuset))
|
||||||
if [[ -x $(command -v docker) ]]; then
|
(( ${#DOCKER_CONTAINER_ID} < 12 )) && DOCKER_CONTAINER_ID=$(hostname)
|
||||||
DOCKER_CONTAINER_NAME=$(docker inspect --format="{{.Name}}" $DOCKER_CONTAINER_ID)
|
if (( ${#DOCKER_CONTAINER_ID} >= 12 )); then
|
||||||
sed 's/$(sudo docker ps -q)/'"${DOCKER_CONTAINER_NAME#/}"'/' -i $WELCOME_PAGE
|
if [[ -x $(command -v docker) ]]; then
|
||||||
else
|
DOCKER_CONTAINER_NAME=$(docker inspect --format="{{.Name}}" $DOCKER_CONTAINER_ID)
|
||||||
sed 's/$(sudo docker ps -q)/'"${DOCKER_CONTAINER_ID::12}"'/' -i $WELCOME_PAGE
|
sed 's/$(sudo docker ps -q)/'"${DOCKER_CONTAINER_NAME#/}"'/' -i $WELCOME_PAGE
|
||||||
|
JWT_MESSAGE=$(echo $JWT_MESSAGE | sed 's/$(sudo docker ps -q)/'"${DOCKER_CONTAINER_NAME#/}"'/')
|
||||||
|
else
|
||||||
|
sed 's/$(sudo docker ps -q)/'"${DOCKER_CONTAINER_ID::12}"'/' -i $WELCOME_PAGE
|
||||||
|
JWT_MESSAGE=$(echo $JWT_MESSAGE | sed 's/$(sudo docker ps -q)/'"${DOCKER_CONTAINER_ID::12}"'/')
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
@ -406,13 +498,8 @@ update_nginx_settings(){
|
|||||||
if [ -f "${NGINX_ONLYOFFICE_EXAMPLE_CONF}" ]; then
|
if [ -f "${NGINX_ONLYOFFICE_EXAMPLE_CONF}" ]; then
|
||||||
sed 's/linux/docker/' -i ${NGINX_ONLYOFFICE_EXAMPLE_CONF}
|
sed 's/linux/docker/' -i ${NGINX_ONLYOFFICE_EXAMPLE_CONF}
|
||||||
fi
|
fi
|
||||||
}
|
|
||||||
|
|
||||||
update_supervisor_settings(){
|
documentserver-update-securelink.sh -s ${SECURE_LINK_SECRET:-$(pwgen -s 20)} -r false
|
||||||
# Copy modified supervisor start script
|
|
||||||
cp ${SYSCONF_TEMPLATES_DIR}/supervisor/supervisor /etc/init.d/
|
|
||||||
# Copy modified supervisor config
|
|
||||||
cp ${SYSCONF_TEMPLATES_DIR}/supervisor/supervisord.conf /etc/supervisor/supervisord.conf
|
|
||||||
}
|
}
|
||||||
|
|
||||||
update_log_settings(){
|
update_log_settings(){
|
||||||
@ -423,8 +510,13 @@ update_logrotate_settings(){
|
|||||||
sed 's|\(^su\b\).*|\1 root root|' -i /etc/logrotate.conf
|
sed 's|\(^su\b\).*|\1 root root|' -i /etc/logrotate.conf
|
||||||
}
|
}
|
||||||
|
|
||||||
|
update_release_date(){
|
||||||
|
mkdir -p ${PRIVATE_DATA_DIR}
|
||||||
|
echo ${RELEASE_DATE} > ${DS_RELEASE_DATE}
|
||||||
|
}
|
||||||
|
|
||||||
# create base folders
|
# create base folders
|
||||||
for i in converter docservice spellchecker metrics; do
|
for i in converter docservice metrics; do
|
||||||
mkdir -p "${DS_LOG_DIR}/$i"
|
mkdir -p "${DS_LOG_DIR}/$i"
|
||||||
done
|
done
|
||||||
|
|
||||||
@ -436,7 +528,7 @@ for i in ${DS_LIB_DIR}/App_Data/cache/files ${DS_LIB_DIR}/App_Data/docbuilder ${
|
|||||||
done
|
done
|
||||||
|
|
||||||
# change folder rights
|
# change folder rights
|
||||||
for i in ${LOG_DIR} ${LIB_DIR} ${DATA_DIR}; do
|
for i in ${LOG_DIR} ${LIB_DIR}; do
|
||||||
chown -R ds:ds "$i"
|
chown -R ds:ds "$i"
|
||||||
chmod -R 755 "$i"
|
chmod -R 755 "$i"
|
||||||
done
|
done
|
||||||
@ -510,6 +602,8 @@ else
|
|||||||
update_welcome_page
|
update_welcome_page
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
find /etc/${COMPANY_NAME} ! -path '*logrotate*' -exec chown ds:ds {} \;
|
||||||
|
|
||||||
#start needed local services
|
#start needed local services
|
||||||
for i in ${LOCAL_SERVICES[@]}; do
|
for i in ${LOCAL_SERVICES[@]}; do
|
||||||
service $i start
|
service $i start
|
||||||
@ -527,9 +621,13 @@ if [ ${ONLYOFFICE_DATA_CONTAINER} != "true" ]; then
|
|||||||
waiting_for_redis
|
waiting_for_redis
|
||||||
fi
|
fi
|
||||||
|
|
||||||
update_nginx_settings
|
if [ "${IS_UPGRADE}" = "true" ]; then
|
||||||
|
upgrade_db_tbl
|
||||||
|
update_release_date
|
||||||
|
fi
|
||||||
|
|
||||||
update_supervisor_settings
|
update_nginx_settings
|
||||||
|
|
||||||
service supervisor start
|
service supervisor start
|
||||||
|
|
||||||
# start cron to enable log rotating
|
# start cron to enable log rotating
|
||||||
@ -553,5 +651,32 @@ if [ "${GENERATE_FONTS}" == "true" ]; then
|
|||||||
fi
|
fi
|
||||||
documentserver-static-gzip.sh ${ONLYOFFICE_DATA_CONTAINER}
|
documentserver-static-gzip.sh ${ONLYOFFICE_DATA_CONTAINER}
|
||||||
|
|
||||||
|
echo "${JWT_MESSAGE}"
|
||||||
|
|
||||||
|
# Check if lager file limits should be set
|
||||||
|
if [ "$LARGER_FILE_LIMITS" = "true" ]; then
|
||||||
|
if [ -e /app/ds/file_limits_set ]; then
|
||||||
|
echo ""
|
||||||
|
else
|
||||||
|
touch /app/ds/file_limits_set
|
||||||
|
|
||||||
|
sed -i -e 's/104857600/10485760000/g' /etc/onlyoffice/documentserver-example/production-linux.json
|
||||||
|
|
||||||
|
sed -i '9iclient_max_body_size 1000M;' /etc/onlyoffice/documentserver-example/nginx/includes/ds-example.conf
|
||||||
|
sed -i '16iclient_max_body_size 1000M;' /etc/nginx/nginx.conf
|
||||||
|
|
||||||
|
sed -i -e 's/104857600/10485760000/g' /etc/onlyoffice/documentserver/default.json
|
||||||
|
sed -i -e 's/50MB/5000MB/g' /etc/onlyoffice/documentserver/default.json
|
||||||
|
sed -i -e 's/300MB/3000MB/g' /etc/onlyoffice/documentserver/default.json
|
||||||
|
|
||||||
|
sed -i 's/^client_max_body_size 100m;$/client_max_body_size 1000m;/' /etc/onlyoffice/documentserver/nginx/includes/ds-common.conf
|
||||||
|
|
||||||
|
service nginx restart
|
||||||
|
supervisorctl restart all
|
||||||
|
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
tail -f /var/log/${COMPANY_NAME}/**/*.log &
|
tail -f /var/log/${COMPANY_NAME}/**/*.log &
|
||||||
wait $!
|
wait $!
|
||||||
|
|
||||||
|
BIN
slideThemes/01_blank.pptx
Normal file
BIN
slideThemes/01_blank.pptx
Normal file
Binary file not shown.
BIN
slideThemes/02_basic.pptx
Normal file
BIN
slideThemes/02_basic.pptx
Normal file
Binary file not shown.
BIN
slideThemes/03_classic.pptx
Normal file
BIN
slideThemes/03_classic.pptx
Normal file
Binary file not shown.
BIN
slideThemes/04_official.pptx
Normal file
BIN
slideThemes/04_official.pptx
Normal file
Binary file not shown.
BIN
slideThemes/05_green leaf.pptx
Normal file
BIN
slideThemes/05_green leaf.pptx
Normal file
Binary file not shown.
BIN
slideThemes/06_lines.pptx
Normal file
BIN
slideThemes/06_lines.pptx
Normal file
Binary file not shown.
BIN
slideThemes/07_office.pptx
Normal file
BIN
slideThemes/07_office.pptx
Normal file
Binary file not shown.
BIN
slideThemes/08_safari.pptx
Normal file
BIN
slideThemes/08_safari.pptx
Normal file
Binary file not shown.
BIN
slideThemes/09_dotted.pptx
Normal file
BIN
slideThemes/09_dotted.pptx
Normal file
Binary file not shown.
BIN
slideThemes/10_corner.pptx
Normal file
BIN
slideThemes/10_corner.pptx
Normal file
Binary file not shown.
BIN
slideThemes/11_tort.pptx
Normal file
BIN
slideThemes/11_tort.pptx
Normal file
Binary file not shown.
@ -1,32 +0,0 @@
|
|||||||
version: '2.1'
|
|
||||||
services:
|
|
||||||
onlyoffice-documentserver:
|
|
||||||
container_name: onlyoffice-documentserver
|
|
||||||
build:
|
|
||||||
context: ../.
|
|
||||||
environment:
|
|
||||||
- AMQP_TYPE=${AMQP_TYPE:-activemq}
|
|
||||||
- AMQP_URI=${AMQP_URI:-amqp://guest:guest@onlyoffice-activemq}
|
|
||||||
stdin_open: true
|
|
||||||
restart: always
|
|
||||||
ports:
|
|
||||||
- '80:80'
|
|
||||||
- '443:443'
|
|
||||||
networks:
|
|
||||||
- onlyoffice
|
|
||||||
|
|
||||||
onlyoffice-activemq:
|
|
||||||
container_name: onlyoffice-activemq
|
|
||||||
image: webcenter/activemq:${ACTIVEMQ_VERSION:-5.14.3}
|
|
||||||
environment:
|
|
||||||
- ACTIVEMQ_USERS_guest=${ACTIVEMQ_USERS_guest:-guest}
|
|
||||||
- ACTIVEMQ_GROUPS_owners=${ACTIVEMQ_GROUPS_owners:-guest}
|
|
||||||
restart: always
|
|
||||||
networks:
|
|
||||||
- onlyoffice
|
|
||||||
expose:
|
|
||||||
- '5672'
|
|
||||||
|
|
||||||
networks:
|
|
||||||
onlyoffice:
|
|
||||||
driver: 'bridge'
|
|
@ -1,18 +0,0 @@
|
|||||||
version: '2.1'
|
|
||||||
services:
|
|
||||||
onlyoffice-documentserver:
|
|
||||||
container_name: onlyoffice-documentserver
|
|
||||||
build:
|
|
||||||
context: ../.
|
|
||||||
environment:
|
|
||||||
- SSL_CERTIFICATE_PATH=${SSL_CERTIFICATE_PATH:-/var/www/onlyoffice/Data/certs/tls.crt}
|
|
||||||
- SSL_KEY_PATH=${SSL_KEY_PATH:-/var/www/onlyoffice/Data/certs/tls.key}
|
|
||||||
- CA_CERTIFICATES_PATH=${CA_CERTIFICATES_PATH:-/var/www/onlyoffice/Data/certs/ca-certificates.pem}
|
|
||||||
- SSL_DHPARAM_PATH=${SSL_DHPARAM_PATH:-/var/www/onlyoffice/Data/certs/dhparam.pem}
|
|
||||||
stdin_open: true
|
|
||||||
restart: always
|
|
||||||
ports:
|
|
||||||
- '80:80'
|
|
||||||
- '443:443'
|
|
||||||
volumes:
|
|
||||||
- ./data:/var/www/onlyoffice/Data
|
|
@ -1,13 +0,0 @@
|
|||||||
version: '2.1'
|
|
||||||
services:
|
|
||||||
onlyoffice-documentserver:
|
|
||||||
container_name: onlyoffice-documentserver
|
|
||||||
build:
|
|
||||||
context: ../.
|
|
||||||
stdin_open: true
|
|
||||||
restart: always
|
|
||||||
ports:
|
|
||||||
- '80:80'
|
|
||||||
- '443:443'
|
|
||||||
volumes:
|
|
||||||
- ./data:/var/www/onlyoffice/Data
|
|
@ -1,32 +0,0 @@
|
|||||||
version: '2.1'
|
|
||||||
services:
|
|
||||||
onlyoffice-documentserver:
|
|
||||||
container_name: onlyoffice-documentserver
|
|
||||||
build:
|
|
||||||
context: ../.
|
|
||||||
depends_on:
|
|
||||||
- onlyoffice-graphite
|
|
||||||
environment:
|
|
||||||
- METRICS_ENABLED=${METRICS_ENABLED:-true}
|
|
||||||
- METRICS_HOST=${METRICS_HOST:-localhost}
|
|
||||||
- METRICS_PORT=${METRICS_PORT:-8125}
|
|
||||||
- METRICS_PREFIX=${METRICS_PREFIX:-ds.}
|
|
||||||
stdin_open: true
|
|
||||||
restart: always
|
|
||||||
expose:
|
|
||||||
- '2003'
|
|
||||||
ports:
|
|
||||||
- '80:80'
|
|
||||||
volumes:
|
|
||||||
- ./graphite/statsd:/var/www/onlyoffice/documentserver/server/Metrics/config
|
|
||||||
|
|
||||||
onlyoffice-graphite:
|
|
||||||
container_name: onlyoffice-graphite
|
|
||||||
image: graphiteapp/graphite-statsd
|
|
||||||
environment:
|
|
||||||
- GRAPHITE_STATSD_HOST=${GRAPHITE_STATSD_HOST:-onlyoffice-documentserver}
|
|
||||||
- GRAPHITE_TIME_ZONE=${GRAPHITE_TIME_ZONE:-Etc/UTC}
|
|
||||||
ports:
|
|
||||||
- '8888:80'
|
|
||||||
stdin_open: true
|
|
||||||
restart: always
|
|
@ -1,7 +0,0 @@
|
|||||||
{
|
|
||||||
"graphiteHost": "onlyoffice-graphite",
|
|
||||||
"graphitePort": 2003,
|
|
||||||
"port": 8125,
|
|
||||||
"flushInterval": 60000,
|
|
||||||
"backends": [ "./backends/graphite.js" ]
|
|
||||||
}
|
|
@ -1,36 +0,0 @@
|
|||||||
version: '2.1'
|
|
||||||
services:
|
|
||||||
ds:
|
|
||||||
container_name: ds
|
|
||||||
build:
|
|
||||||
context: ../.
|
|
||||||
depends_on:
|
|
||||||
- onlyoffice-mariadb
|
|
||||||
environment:
|
|
||||||
- DB_TYPE=${DB_TYPE:-mysql}
|
|
||||||
- DB_HOST=${DB_HOST:-onlyoffice-mariadb}
|
|
||||||
- DB_PORT=${DB_PORT:-3306}
|
|
||||||
- DB_NAME=${DB_NAME:-onlyoffice}
|
|
||||||
- DB_USER=${DB_USER:-onlyoffice}
|
|
||||||
- DB_PWD=${DB_PWD:-onlyoffice}
|
|
||||||
stdin_open: true
|
|
||||||
restart: always
|
|
||||||
ports:
|
|
||||||
- '80:80'
|
|
||||||
|
|
||||||
onlyoffice-mariadb:
|
|
||||||
container_name: onlyoffice-mariadb
|
|
||||||
image: mariadb:${MARIADB_VERSION:-10.5}
|
|
||||||
environment:
|
|
||||||
- MYSQL_DATABASE=${MYSQL_DATABASE:-onlyoffice}
|
|
||||||
- MYSQL_USER=${MYSQL_USER:-onlyoffice}
|
|
||||||
- MYSQL_PASSWORD=${MYSQL_PASSWORD:-onlyoffice}
|
|
||||||
- MYSQL_ALLOW_EMPTY_PASSWORD=${MYSQL_ALLOW_EMPTY_PASSWORD:-yes}
|
|
||||||
restart: always
|
|
||||||
volumes:
|
|
||||||
- mysql_data:/var/lib/mysql
|
|
||||||
expose:
|
|
||||||
- '3306'
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
mysql_data:
|
|
@ -1,37 +0,0 @@
|
|||||||
version: '2.1'
|
|
||||||
services:
|
|
||||||
onlyoffice-documentserver:
|
|
||||||
container_name: onlyoffice-documentserver
|
|
||||||
build:
|
|
||||||
context: ../.
|
|
||||||
depends_on:
|
|
||||||
- onlyoffice-mysql
|
|
||||||
environment:
|
|
||||||
- DB_TYPE=${DB_TYPE:-mysql}
|
|
||||||
- DB_HOST=${DB_HOST:-onlyoffice-mysql}
|
|
||||||
- DB_PORT=${DB_PORT:-3306}
|
|
||||||
- DB_NAME=${DB_NAME:-onlyoffice}
|
|
||||||
- DB_USER=${DB_USER:-onlyoffice}
|
|
||||||
- DB_PWD=${DB_PWD:-onlyoffice}
|
|
||||||
stdin_open: true
|
|
||||||
restart: always
|
|
||||||
ports:
|
|
||||||
- '80:80'
|
|
||||||
|
|
||||||
onlyoffice-mysql:
|
|
||||||
container_name: onlyoffice-mysql
|
|
||||||
image: mysql:${MYSQL_VERSION:-5.7}
|
|
||||||
command: --default-authentication-plugin=mysql_native_password
|
|
||||||
environment:
|
|
||||||
- MYSQL_DATABASE=${MYSQL_DATABASE:-onlyoffice}
|
|
||||||
- MYSQL_USER=${MYSQL_USER:-onlyoffice}
|
|
||||||
- MYSQL_PASSWORD=${MYSQL_PASSWORD:-onlyoffice}
|
|
||||||
- MYSQL_ALLOW_EMPTY_PASSWORD=${MYSQL_ALLOW_EMPTY_PASSWORD:-yes}
|
|
||||||
restart: always
|
|
||||||
volumes:
|
|
||||||
- mysql_data:/var/lib/mysql
|
|
||||||
expose:
|
|
||||||
- '3306'
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
mysql_data:
|
|
@ -1,34 +0,0 @@
|
|||||||
version: '2.1'
|
|
||||||
services:
|
|
||||||
onlyoffice-documentserver:
|
|
||||||
container_name: onlyoffice-documentserver
|
|
||||||
build:
|
|
||||||
context: ../.
|
|
||||||
depends_on:
|
|
||||||
- onlyoffice-postgresql
|
|
||||||
environment:
|
|
||||||
- POSTGRESQL_SERVER_HOST=${DB_HOST:-onlyoffice-postgresql}
|
|
||||||
- POSTGRESQL_SERVER_PORT=${DB_PORT:-5432}
|
|
||||||
- POSTGRESQL_SERVER_DB_NAME=${DB_NAME:-onlyoffice}
|
|
||||||
- POSTGRESQL_SERVER_USER=${DB_USER:-onlyoffice}
|
|
||||||
- POSTGRESQL_SERVER_PASS=${DB_PWD:-onlyoffice}
|
|
||||||
stdin_open: true
|
|
||||||
restart: always
|
|
||||||
ports:
|
|
||||||
- '80:80'
|
|
||||||
|
|
||||||
onlyoffice-postgresql:
|
|
||||||
container_name: onlyoffice-postgresql
|
|
||||||
image: postgres:9.5
|
|
||||||
environment:
|
|
||||||
- POSTGRES_DB=${POSTGRES_DB:-onlyoffice}
|
|
||||||
- POSTGRES_USER=${POSTGRES_USER:-onlyoffice}
|
|
||||||
- POSTGRES_HOST_AUTH_METHOD=${POSTGRES_HOST_AUTH_METHOD:-trust}
|
|
||||||
restart: always
|
|
||||||
expose:
|
|
||||||
- '5432'
|
|
||||||
volumes:
|
|
||||||
- postgresql_data:/var/lib/postgresql
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
postgresql_data:
|
|
@ -20,7 +20,7 @@ services:
|
|||||||
|
|
||||||
onlyoffice-postgresql:
|
onlyoffice-postgresql:
|
||||||
container_name: onlyoffice-postgresql
|
container_name: onlyoffice-postgresql
|
||||||
image: postgres:${POSTGRES_VERSION:-9.5}
|
image: postgres:${POSTGRES_VERSION:-12}
|
||||||
environment:
|
environment:
|
||||||
- POSTGRES_DB=${POSTGRES_DB:-onlyoffice}
|
- POSTGRES_DB=${POSTGRES_DB:-onlyoffice}
|
||||||
- POSTGRES_USER=${POSTGRES_USER:-onlyoffice}
|
- POSTGRES_USER=${POSTGRES_USER:-onlyoffice}
|
||||||
|
46
tests/prometheus.yml
Normal file
46
tests/prometheus.yml
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
version: '2.1'
|
||||||
|
services:
|
||||||
|
onlyoffice-documentserver:
|
||||||
|
container_name: onlyoffice-documentserver
|
||||||
|
build:
|
||||||
|
context: ../.
|
||||||
|
depends_on:
|
||||||
|
- onlyoffice-statsd-exporter
|
||||||
|
environment:
|
||||||
|
- METRICS_ENABLED=${METRICS_ENABLED:-true}
|
||||||
|
- METRICS_HOST=${METRICS_HOST:-onlyoffice-statsd-exporter}
|
||||||
|
- METRICS_PORT=${METRICS_PORT:-9125}
|
||||||
|
- METRICS_PREFIX=${METRICS_PREFIX:-ds.}
|
||||||
|
stdin_open: true
|
||||||
|
restart: always
|
||||||
|
ports:
|
||||||
|
- '80:80'
|
||||||
|
|
||||||
|
onlyoffice-statsd-exporter:
|
||||||
|
container_name: onlyoffice-statsd-exporter
|
||||||
|
image: prom/statsd-exporter
|
||||||
|
command: --statsd.event-flush-interval=30000ms
|
||||||
|
ports:
|
||||||
|
- '9102:9102'
|
||||||
|
- '9125:9125/tcp'
|
||||||
|
- '9125:9125/udp'
|
||||||
|
|
||||||
|
onlyoffice-prometheus:
|
||||||
|
container_name: onlyoffice-prometheus
|
||||||
|
image: prom/prometheus
|
||||||
|
ports:
|
||||||
|
- '9090:9090'
|
||||||
|
volumes:
|
||||||
|
- ./prometheus/prometheus-scrape/statsd-exporter.yml:/etc/prometheus/prometheus.yml
|
||||||
|
|
||||||
|
grafana:
|
||||||
|
container_name: onlyoffice-grafana
|
||||||
|
image: bitnami/grafana
|
||||||
|
ports:
|
||||||
|
- '3000:3000'
|
||||||
|
environment:
|
||||||
|
- 'GF_SECURITY_ADMIN_PASSWORD=G0pGE4'
|
||||||
|
volumes:
|
||||||
|
- ./prometheus/grafana/conf/prometheus.yml:/opt/bitnami/grafana/conf/provisioning/datasources/prometheus.yml
|
||||||
|
- ./prometheus/grafana/conf/default-provider.yaml:/opt/bitnami/grafana/conf/provisioning/dashboards/default-provider.yaml
|
||||||
|
- ./prometheus/grafana/dashboards:/opt/bitnami/grafana/dashboards
|
23
tests/prometheus/grafana/conf/default-provider.yaml
Normal file
23
tests/prometheus/grafana/conf/default-provider.yaml
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
apiVersion: 1
|
||||||
|
providers:
|
||||||
|
# <string> an unique provider name
|
||||||
|
- name: 'default-provider'
|
||||||
|
# <int> org id. will default to orgId 1 if not specified
|
||||||
|
orgId: 1
|
||||||
|
# <string, required> name of the dashboard folder. Required
|
||||||
|
folder: dashboards
|
||||||
|
# <string> folder UID. will be automatically generated if not specified
|
||||||
|
folderUid: ''
|
||||||
|
# <string, required> provider type. Required
|
||||||
|
type: file
|
||||||
|
# <bool> disable dashboard deletion
|
||||||
|
disableDeletion: false
|
||||||
|
# <bool> enable dashboard editing
|
||||||
|
editable: true
|
||||||
|
# <int> how often Grafana will scan for changed dashboards
|
||||||
|
updateIntervalSeconds: 10
|
||||||
|
options:
|
||||||
|
# <string, required> path to dashboard files on disk. Required
|
||||||
|
path: /opt/bitnami/grafana/dashboards
|
||||||
|
# <bool> enable folders creation for dashboards
|
||||||
|
#foldersFromFilesStructure: true
|
6
tests/prometheus/grafana/conf/prometheus.yml
Normal file
6
tests/prometheus/grafana/conf/prometheus.yml
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
apiVersion: 1
|
||||||
|
datasources:
|
||||||
|
- name: Prometheus
|
||||||
|
type: prometheus
|
||||||
|
url: http://onlyoffice-prometheus:9090
|
||||||
|
editable: true
|
File diff suppressed because it is too large
Load Diff
6
tests/prometheus/prometheus-scrape/statsd-exporter.yml
Normal file
6
tests/prometheus/prometheus-scrape/statsd-exporter.yml
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
scrape_configs:
|
||||||
|
- job_name: 'statsd'
|
||||||
|
scrape_interval: 30s
|
||||||
|
static_configs:
|
||||||
|
- targets:
|
||||||
|
- onlyoffice-statsd-exporter:9102
|
@ -1,29 +0,0 @@
|
|||||||
version: '2.1'
|
|
||||||
services:
|
|
||||||
onlyoffice-documentserver:
|
|
||||||
container_name: onlyoffice-documentserver
|
|
||||||
build:
|
|
||||||
context: ../.
|
|
||||||
environment:
|
|
||||||
- AMQP_SERVER_TYPE=${AMQP_SERVER_TYPE:-rabbitmq}
|
|
||||||
- AMQP_SERVER_URL=${AMQP_SERVER_URL:-amqp://guest:guest@onlyoffice-rabbitmq}
|
|
||||||
stdin_open: true
|
|
||||||
restart: always
|
|
||||||
ports:
|
|
||||||
- '80:80'
|
|
||||||
- '443:443'
|
|
||||||
networks:
|
|
||||||
- onlyoffice
|
|
||||||
|
|
||||||
onlyoffice-rabbitmq:
|
|
||||||
container_name: onlyoffice-rabbitmq
|
|
||||||
image: rabbitmq
|
|
||||||
restart: always
|
|
||||||
networks:
|
|
||||||
- onlyoffice
|
|
||||||
expose:
|
|
||||||
- '5672'
|
|
||||||
|
|
||||||
networks:
|
|
||||||
onlyoffice:
|
|
||||||
driver: 'bridge'
|
|
@ -1,29 +0,0 @@
|
|||||||
version: '2.1'
|
|
||||||
services:
|
|
||||||
onlyoffice-documentserver:
|
|
||||||
container_name: onlyoffice-documentserver
|
|
||||||
build:
|
|
||||||
context: ../.
|
|
||||||
environment:
|
|
||||||
- AMQP_TYPE=${AMQP_TYPE:-rabbitmq}
|
|
||||||
- AMQP_URI=${AMQP_URI:-amqp://guest:guest@onlyoffice-rabbitmq}
|
|
||||||
stdin_open: true
|
|
||||||
restart: always
|
|
||||||
ports:
|
|
||||||
- '80:80'
|
|
||||||
- '443:443'
|
|
||||||
networks:
|
|
||||||
- onlyoffice
|
|
||||||
|
|
||||||
onlyoffice-rabbitmq:
|
|
||||||
container_name: onlyoffice-rabbitmq
|
|
||||||
image: rabbitmq:${RABBITMQ_VERSION:-latest}
|
|
||||||
restart: always
|
|
||||||
networks:
|
|
||||||
- onlyoffice
|
|
||||||
expose:
|
|
||||||
- '5672'
|
|
||||||
|
|
||||||
networks:
|
|
||||||
onlyoffice:
|
|
||||||
driver: 'bridge'
|
|
@ -1,31 +0,0 @@
|
|||||||
version: '2.1'
|
|
||||||
services:
|
|
||||||
onlyoffice-documentserver:
|
|
||||||
container_name: onlyoffice-documentserver
|
|
||||||
build:
|
|
||||||
context: ../.
|
|
||||||
args:
|
|
||||||
- PRODUCT_NAME=${PRODUCT_NAME:-documentserver}
|
|
||||||
environment:
|
|
||||||
- REDIS_SERVER_HOST=${REDIS_SERVER_HOST:-onlyoffice-redis}
|
|
||||||
- REDIS_SERVER_PORT=${REDIS_SERVER_PORT:-6379}
|
|
||||||
stdin_open: true
|
|
||||||
restart: always
|
|
||||||
ports:
|
|
||||||
- '80:80'
|
|
||||||
- '443:443'
|
|
||||||
networks:
|
|
||||||
- onlyoffice
|
|
||||||
|
|
||||||
onlyoffice-redis:
|
|
||||||
container_name: onlyoffice-redis
|
|
||||||
image: redis:${REDIS_VERSION:-latest}
|
|
||||||
restart: always
|
|
||||||
networks:
|
|
||||||
- onlyoffice
|
|
||||||
expose:
|
|
||||||
- '6379'
|
|
||||||
|
|
||||||
networks:
|
|
||||||
onlyoffice:
|
|
||||||
driver: 'bridge'
|
|
@ -1,12 +0,0 @@
|
|||||||
version: '2.1'
|
|
||||||
services:
|
|
||||||
onlyoffice-documentserver:
|
|
||||||
container_name: onlyoffice-documentserver
|
|
||||||
build:
|
|
||||||
context: ../.
|
|
||||||
args:
|
|
||||||
- PRODUCT_NAME=${PRODUCT_NAME:-documentserver}
|
|
||||||
stdin_open: true
|
|
||||||
restart: always
|
|
||||||
ports:
|
|
||||||
- '80:80'
|
|
@ -1,54 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
ssl=${ssl:-false}
|
|
||||||
private_key=${private_key:-tls.key}
|
|
||||||
certificate_request=${certificate_request:-tls.csr}
|
|
||||||
certificate=${certificate:-tls.crt}
|
|
||||||
|
|
||||||
# Generate certificate
|
|
||||||
if [[ $ssl == "true" ]]; then
|
|
||||||
url=${url:-"https://localhost"}
|
|
||||||
|
|
||||||
mkdir -p data/certs
|
|
||||||
pushd data/certs
|
|
||||||
|
|
||||||
openssl genrsa -out ${private_key} 2048
|
|
||||||
openssl req \
|
|
||||||
-new \
|
|
||||||
-subj "/C=US/ST=Denial/L=Springfield/O=Dis/CN=www.example.com" \
|
|
||||||
-key ${private_key} \
|
|
||||||
-out ${certificate_request}
|
|
||||||
openssl x509 -req -days 365 -in ${certificate_request} -signkey ${private_key} -out ${certificate}
|
|
||||||
openssl dhparam -out dhparam.pem 2048
|
|
||||||
chmod 400 ${private_key}
|
|
||||||
|
|
||||||
popd
|
|
||||||
else
|
|
||||||
url=${url:-"http://localhost"}
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check if the yml exists
|
|
||||||
if [[ ! -f $config ]]; then
|
|
||||||
echo "File $config doesn't exist!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Run test environment
|
|
||||||
docker-compose -p ds -f $config up -d
|
|
||||||
|
|
||||||
wakeup_timeout=90
|
|
||||||
|
|
||||||
# Get documentserver healthcheck status
|
|
||||||
echo "Wait for service wake up"
|
|
||||||
sleep $wakeup_timeout
|
|
||||||
healthcheck_res=$(wget --no-check-certificate -qO - ${url}/healthcheck)
|
|
||||||
|
|
||||||
# Fail if it isn't true
|
|
||||||
if [[ $healthcheck_res == "true" ]]; then
|
|
||||||
echo "Healthcheck passed."
|
|
||||||
else
|
|
||||||
echo "Healthcheck failed!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
docker-compose -p ds -f $config down
|
|
Loading…
x
Reference in New Issue
Block a user