mirror of
https://bitbucket.org/atlassian-docker/docker-atlassian-confluence-server.git
synced 2024-08-30 18:22:16 +00:00
Merged in switch-to-master (pull request #37)
Switch to master Approved-by: Steve Smith <ssmith@atlassian.com> Approved-by: Minh Tran <mtran@atlassian.com>
This commit is contained in:
commit
26d7047500
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
__pycache__/
|
40
Dockerfile-alpine
Normal file
40
Dockerfile-alpine
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
FROM adoptopenjdk/openjdk8:alpine
|
||||||
|
MAINTAINER Atlassian Confluence
|
||||||
|
|
||||||
|
ENV RUN_USER daemon
|
||||||
|
ENV RUN_GROUP daemon
|
||||||
|
|
||||||
|
# https://confluence.atlassian.com/doc/confluence-home-and-other-important-directories-590259707.html
|
||||||
|
ENV CONFLUENCE_HOME /var/atlassian/application-data/confluence
|
||||||
|
ENV CONFLUENCE_INSTALL_DIR /opt/atlassian/confluence
|
||||||
|
|
||||||
|
VOLUME ["${CONFLUENCE_HOME}"]
|
||||||
|
|
||||||
|
# Expose HTTP and Synchrony ports
|
||||||
|
EXPOSE 8090
|
||||||
|
EXPOSE 8091
|
||||||
|
|
||||||
|
WORKDIR $CONFLUENCE_HOME
|
||||||
|
|
||||||
|
CMD ["/entrypoint.sh", "-fg"]
|
||||||
|
ENTRYPOINT ["/sbin/tini", "--"]
|
||||||
|
|
||||||
|
RUN apk add --no-cache ca-certificates wget curl openssh bash procps openssl perl ttf-dejavu tini
|
||||||
|
|
||||||
|
# Workaround for AdoptOpenJDK Alpine fontconfig bug
|
||||||
|
RUN ln -s /usr/lib/libfontconfig.so.1 /usr/lib/libfontconfig.so \
|
||||||
|
&& ln -s /lib/libuuid.so.1 /usr/lib/libuuid.so.1 \
|
||||||
|
&& ln -s /lib/libc.musl-x86_64.so.1 /usr/lib/libc.musl-x86_64.so.1
|
||||||
|
ENV LD_LIBRARY_PATH /usr/lib
|
||||||
|
|
||||||
|
COPY entrypoint.sh /entrypoint.sh
|
||||||
|
|
||||||
|
ARG CONFLUENCE_VERSION
|
||||||
|
ARG DOWNLOAD_URL=http://www.atlassian.com/software/confluence/downloads/binary/atlassian-confluence-${CONFLUENCE_VERSION}.tar.gz
|
||||||
|
|
||||||
|
RUN mkdir -p ${CONFLUENCE_INSTALL_DIR} \
|
||||||
|
&& curl -L --silent ${DOWNLOAD_URL} | tar -xz --strip-components=1 -C "$CONFLUENCE_INSTALL_DIR" \
|
||||||
|
&& chown -R ${RUN_USER}:${RUN_GROUP} ${CONFLUENCE_INSTALL_DIR}/ \
|
||||||
|
&& sed -i -e 's/-Xms\([0-9]\+[kmg]\) -Xmx\([0-9]\+[kmg]\)/-Xms\${JVM_MINIMUM_MEMORY:=\1} -Xmx\${JVM_MAXIMUM_MEMORY:=\2} \${JVM_SUPPORT_RECOMMENDED_ARGS} -Dconfluence.home=\${CONFLUENCE_HOME}/g' ${CONFLUENCE_INSTALL_DIR}/bin/setenv.sh \
|
||||||
|
&& sed -i -e 's/port="8090"/port="8090" secure="${catalinaConnectorSecure}" scheme="${catalinaConnectorScheme}" proxyName="${catalinaConnectorProxyName}" proxyPort="${catalinaConnectorProxyPort}"/' ${CONFLUENCE_INSTALL_DIR}/conf/server.xml \
|
||||||
|
&& sed -i -e 's/Context path=""/Context path="${catalinaContextPath}"/' ${CONFLUENCE_INSTALL_DIR}/conf/server.xml
|
@ -2,8 +2,23 @@ image: atlassian/docker-release-maker:latest
|
|||||||
|
|
||||||
pipelines:
|
pipelines:
|
||||||
custom:
|
custom:
|
||||||
create-new-releases:
|
new-releases:
|
||||||
- step:
|
- step:
|
||||||
|
name: Confluence Alpine
|
||||||
|
services:
|
||||||
|
- docker
|
||||||
|
script:
|
||||||
|
- export BASE_VERSION='6'
|
||||||
|
- export DEFAULT_RELEASE='false'
|
||||||
|
- export DOCKER_REPO='atlassian/confluence-server'
|
||||||
|
- export DOCKERFILE='Dockerfile-alpine'
|
||||||
|
- export DOCKERFILE_VERSION_ARG='CONFLUENCE_VERSION'
|
||||||
|
- export MAC_PRODUCT_KEY='confluence'
|
||||||
|
- export TAG_SUFFIXES='alpine,alpine-adoptopenjdk8'
|
||||||
|
- echo ${DOCKER_PASSWORD} | docker login --username ${DOCKER_USERNAME} --password-stdin
|
||||||
|
- python /usr/src/app/run.py --create
|
||||||
|
- step:
|
||||||
|
name: Confluence Ubuntu
|
||||||
services:
|
services:
|
||||||
- docker
|
- docker
|
||||||
script:
|
script:
|
||||||
@ -15,7 +30,7 @@ pipelines:
|
|||||||
- export TAG_SUFFIXES='adoptopenjdk8,jdk8,ubuntu,ubuntu-18.04-adoptopenjdk8'
|
- export TAG_SUFFIXES='adoptopenjdk8,jdk8,ubuntu,ubuntu-18.04-adoptopenjdk8'
|
||||||
- echo ${DOCKER_PASSWORD} | docker login --username ${DOCKER_USERNAME} --password-stdin
|
- echo ${DOCKER_PASSWORD} | docker login --username ${DOCKER_USERNAME} --password-stdin
|
||||||
- python /usr/src/app/run.py --create
|
- python /usr/src/app/run.py --create
|
||||||
create-custom-release:
|
ubuntu-custom-release:
|
||||||
- variables:
|
- variables:
|
||||||
- name: CONFLUENCE_VERSION
|
- name: CONFLUENCE_VERSION
|
||||||
- name: DOCKER_TAG
|
- name: DOCKER_TAG
|
||||||
@ -24,11 +39,42 @@ pipelines:
|
|||||||
- docker
|
- docker
|
||||||
script:
|
script:
|
||||||
- echo ${DOCKER_PASSWORD} | docker login --username ${DOCKER_USERNAME} --password-stdin
|
- echo ${DOCKER_PASSWORD} | docker login --username ${DOCKER_USERNAME} --password-stdin
|
||||||
- docker build -t atlassian/confluence-server:${DOCKER_TAG} --build-arg CONFLUENCE_VERSION=${CONFLUENCE_VERSION} .
|
- >
|
||||||
|
docker build -t atlassian/confluence-server:${DOCKER_TAG}
|
||||||
|
--build-arg CONFLUENCE_VERSION=${CONFLUENCE_VERSION} .
|
||||||
|
- docker push atlassian/confluence-server:${DOCKER_TAG}
|
||||||
|
alpine-custom-release:
|
||||||
|
- variables:
|
||||||
|
- name: CONFLUENCE_VERSION
|
||||||
|
- name: DOCKER_TAG
|
||||||
|
- step:
|
||||||
|
services:
|
||||||
|
- docker
|
||||||
|
script:
|
||||||
|
- echo ${DOCKER_PASSWORD} | docker login --username ${DOCKER_USERNAME} --password-stdin
|
||||||
|
- >
|
||||||
|
docker build -t atlassian/confluence-server:${DOCKER_TAG}
|
||||||
|
-f Dockerfile-alpine
|
||||||
|
--build-arg CONFLUENCE_VERSION=${CONFLUENCE_VERSION} .
|
||||||
- docker push atlassian/confluence-server:${DOCKER_TAG}
|
- docker push atlassian/confluence-server:${DOCKER_TAG}
|
||||||
branches:
|
branches:
|
||||||
base-6-adoptopenjdk8:
|
master:
|
||||||
- step:
|
- step:
|
||||||
|
name: Confluence Alpine
|
||||||
|
services:
|
||||||
|
- docker
|
||||||
|
script:
|
||||||
|
- export BASE_VERSION='6'
|
||||||
|
- export DEFAULT_RELEASE='false'
|
||||||
|
- export DOCKER_REPO='atlassian/confluence-server'
|
||||||
|
- export DOCKERFILE='Dockerfile-alpine'
|
||||||
|
- export DOCKERFILE_VERSION_ARG='CONFLUENCE_VERSION'
|
||||||
|
- export MAC_PRODUCT_KEY='confluence'
|
||||||
|
- export TAG_SUFFIXES='alpine,alpine-adoptopenjdk8'
|
||||||
|
- echo ${DOCKER_PASSWORD} | docker login --username ${DOCKER_USERNAME} --password-stdin
|
||||||
|
- python /usr/src/app/run.py --update
|
||||||
|
- step:
|
||||||
|
name: Confluence Ubuntu
|
||||||
services:
|
services:
|
||||||
- docker
|
- docker
|
||||||
script:
|
script:
|
||||||
@ -39,4 +85,17 @@ pipelines:
|
|||||||
- export MAC_PRODUCT_KEY='confluence'
|
- export MAC_PRODUCT_KEY='confluence'
|
||||||
- export TAG_SUFFIXES='adoptopenjdk8,jdk8,ubuntu,ubuntu-18.04-adoptopenjdk8'
|
- export TAG_SUFFIXES='adoptopenjdk8,jdk8,ubuntu,ubuntu-18.04-adoptopenjdk8'
|
||||||
- echo ${DOCKER_PASSWORD} | docker login --username ${DOCKER_USERNAME} --password-stdin
|
- echo ${DOCKER_PASSWORD} | docker login --username ${DOCKER_USERNAME} --password-stdin
|
||||||
- python /usr/src/app/run.py --update
|
- python /usr/src/app/run.py --update
|
||||||
|
pull-requests:
|
||||||
|
'**':
|
||||||
|
- step:
|
||||||
|
image: python:3.7-alpine3.9
|
||||||
|
services:
|
||||||
|
- docker
|
||||||
|
script:
|
||||||
|
- pip install -q -r tests/test-requirements.txt
|
||||||
|
- py.test tests/
|
||||||
|
definitions:
|
||||||
|
services:
|
||||||
|
docker:
|
||||||
|
memory: 2048
|
35
tests/conftest.py
Normal file
35
tests/conftest.py
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
import docker
|
||||||
|
import requests
|
||||||
|
|
||||||
|
|
||||||
|
MAC_PRODUCT_KEY = 'confluence'
|
||||||
|
DOCKER_VERSION_ARG = 'CONFLUENCE_VERSION'
|
||||||
|
DOCKERFILES = ['Dockerfile', 'Dockerfile-alpine']
|
||||||
|
IMAGE_NAME = 'confluence-dev'
|
||||||
|
|
||||||
|
# This fixture cleans up running containers whose base image matches IMAGE_NAME after each test
|
||||||
|
@pytest.fixture
|
||||||
|
def docker_cli():
|
||||||
|
docker_cli = docker.from_env()
|
||||||
|
yield docker_cli
|
||||||
|
for container in docker_cli.containers.list():
|
||||||
|
for tag in container.image.tags:
|
||||||
|
if tag.startswith(IMAGE_NAME):
|
||||||
|
container.remove(force=True)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='module', params=DOCKERFILES)
|
||||||
|
def image(request):
|
||||||
|
r = requests.get(f'https://marketplace.atlassian.com/rest/2/products/key/{MAC_PRODUCT_KEY}/versions/latest')
|
||||||
|
version = r.json().get('name')
|
||||||
|
buildargs = {DOCKER_VERSION_ARG: version}
|
||||||
|
docker_cli = docker.from_env()
|
||||||
|
image = docker_cli.images.build(path='.',
|
||||||
|
tag=f'{IMAGE_NAME}:{request.param.lower()}',
|
||||||
|
buildargs=buildargs,
|
||||||
|
dockerfile=request.param,
|
||||||
|
rm=True)[0]
|
||||||
|
return image
|
19
tests/test-requirements.txt
Normal file
19
tests/test-requirements.txt
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
atomicwrites==1.3.0
|
||||||
|
attrs==19.1.0
|
||||||
|
certifi==2019.6.16
|
||||||
|
chardet==3.0.4
|
||||||
|
docker==4.0.2
|
||||||
|
idna==2.8
|
||||||
|
importlib-metadata==0.19
|
||||||
|
more-itertools==7.2.0
|
||||||
|
packaging==19.1
|
||||||
|
pluggy==0.12.0
|
||||||
|
py==1.8.0
|
||||||
|
pyparsing==2.4.2
|
||||||
|
pytest==5.0.1
|
||||||
|
requests==2.22.0
|
||||||
|
six==1.12.0
|
||||||
|
urllib3==1.25.3
|
||||||
|
wcwidth==0.1.7
|
||||||
|
websocket-client==0.56.0
|
||||||
|
zipp==0.5.2
|
134
tests/test_image.py
Normal file
134
tests/test_image.py
Normal file
@ -0,0 +1,134 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
import io
|
||||||
|
import tarfile
|
||||||
|
import time
|
||||||
|
import xml.etree.ElementTree as etree
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
|
||||||
|
# Helper function to get a file-like object from an image
|
||||||
|
def get_fileobj_from_container(container, filepath):
|
||||||
|
time.sleep(0.5) # Give container a moment if just started
|
||||||
|
stream, stat = container.get_archive(filepath)
|
||||||
|
f = io.BytesIO()
|
||||||
|
for chunk in stream:
|
||||||
|
f.write(chunk)
|
||||||
|
f.seek(0)
|
||||||
|
with tarfile.open(fileobj=f, mode='r') as tar:
|
||||||
|
filename = tar.getmembers()[0].name
|
||||||
|
file = tar.extractfile(filename)
|
||||||
|
return file
|
||||||
|
|
||||||
|
|
||||||
|
# def test_server_xml_defaults(docker_cli, image):
|
||||||
|
# container = docker_cli.containers.run(image, detach=True)
|
||||||
|
# server_xml = get_fileobj_from_container(container, '/opt/atlassian/jira/conf/server.xml')
|
||||||
|
# xml = etree.parse(server_xml)
|
||||||
|
# connector = xml.find('.//Connector')
|
||||||
|
# context = xml.find('.//Context')
|
||||||
|
#
|
||||||
|
# assert connector.get('port') == '8090'
|
||||||
|
# assert connector.get('maxThreads') == '200'
|
||||||
|
# assert connector.get('minSpareThreads') == '10'
|
||||||
|
# assert connector.get('connectionTimeout') == '20000'
|
||||||
|
# assert connector.get('enableLookups') == 'false'
|
||||||
|
# assert connector.get('protocol') == 'HTTP/1.1'
|
||||||
|
# assert connector.get('acceptCount') == '10'
|
||||||
|
# assert connector.get('secure') == 'false'
|
||||||
|
# assert connector.get('scheme') == 'http'
|
||||||
|
# assert connector.get('proxyName') == ''
|
||||||
|
# assert connector.get('proxyPort') == ''
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# def test_server_xml_params(docker_cli, image):
|
||||||
|
# environment = {
|
||||||
|
# 'ATL_TOMCAT_MGMT_PORT': '8006',
|
||||||
|
# 'ATL_TOMCAT_PORT': '9090',
|
||||||
|
# 'ATL_TOMCAT_MAXTHREADS': '201',
|
||||||
|
# 'ATL_TOMCAT_MINSPARETHREADS': '11',
|
||||||
|
# 'ATL_TOMCAT_CONNECTIONTIMEOUT': '20001',
|
||||||
|
# 'ATL_TOMCAT_ENABLELOOKUPS': 'true',
|
||||||
|
# 'ATL_TOMCAT_PROTOCOL': 'HTTP/2',
|
||||||
|
# 'ATL_TOMCAT_ACCEPTCOUNT': '11',
|
||||||
|
# 'ATL_TOMCAT_SECURE': 'true',
|
||||||
|
# 'ATL_TOMCAT_SCHEME': 'https',
|
||||||
|
# 'ATL_PROXY_NAME': 'jira.atlassian.com',
|
||||||
|
# 'ATL_PROXY_PORT': '443',
|
||||||
|
# 'ATL_TOMCAT_CONTEXTPATH': '/myjira',
|
||||||
|
# }
|
||||||
|
# container = docker_cli.containers.run(image, environment=environment, detach=True)
|
||||||
|
# server_xml = get_fileobj_from_container(container, '/opt/atlassian/jira/conf/server.xml')
|
||||||
|
# xml = etree.parse(server_xml)
|
||||||
|
# server = xml.getroot()
|
||||||
|
# connector = xml.find('.//Connector')
|
||||||
|
# context = xml.find('.//Context')
|
||||||
|
#
|
||||||
|
# assert server.get('port') == environment.get('ATL_TOMCAT_MGMT_PORT')
|
||||||
|
#
|
||||||
|
# assert connector.get('port') == environment.get('ATL_TOMCAT_PORT')
|
||||||
|
# assert connector.get('maxThreads') == environment.get('ATL_TOMCAT_MAXTHREADS')
|
||||||
|
# assert connector.get('minSpareThreads') == environment.get('ATL_TOMCAT_MINSPARETHREADS')
|
||||||
|
# assert connector.get('connectionTimeout') == environment.get('ATL_TOMCAT_CONNECTIONTIMEOUT')
|
||||||
|
# assert connector.get('enableLookups') == environment.get('ATL_TOMCAT_ENABLELOOKUPS')
|
||||||
|
# assert connector.get('protocol') == environment.get('ATL_TOMCAT_PROTOCOL')
|
||||||
|
# assert connector.get('acceptCount') == environment.get('ATL_TOMCAT_ACCEPTCOUNT')
|
||||||
|
# assert connector.get('secure') == environment.get('ATL_TOMCAT_SECURE')
|
||||||
|
# assert connector.get('scheme') == environment.get('ATL_TOMCAT_SCHEME')
|
||||||
|
# assert connector.get('proxyName') == environment.get('ATL_PROXY_NAME')
|
||||||
|
# assert connector.get('proxyPort') == environment.get('ATL_PROXY_PORT')
|
||||||
|
#
|
||||||
|
# assert context.get('path') == environment.get('ATL_TOMCAT_CONTEXTPATH')
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# def test_confluence_cfg_xml_defaults(docker_cli, image):
|
||||||
|
# environment = {
|
||||||
|
#
|
||||||
|
# }
|
||||||
|
# container = docker_cli.containers.run(image, environment=environment, detach=True)
|
||||||
|
# confluence_cfg_xml = get_fileobj_from_container(container, '/var/atlassian/application-data/confluence/confluence.cfg.xml')
|
||||||
|
# xml = etree.parse(confluence_cfg_xml)
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# def test_confluence_cfg_xml_params(docker_cli, image):
|
||||||
|
# environment = {
|
||||||
|
#
|
||||||
|
# }
|
||||||
|
# container = docker_cli.containers.run(image, environment=environment, detach=True)
|
||||||
|
# confluence_cfg_xml = get_fileobj_from_container(container, '/var/atlassian/application-data/confluence/confluence.cfg.xml')
|
||||||
|
# xml = etree.parse(confluence_cfg_xml)
|
||||||
|
|
||||||
|
|
||||||
|
def test_jvm_args(docker_cli, image):
|
||||||
|
environment = {
|
||||||
|
'JVM_MINIMUM_MEMORY': '383m',
|
||||||
|
'JVM_MAXIMUM_MEMORY': '2047m',
|
||||||
|
'JVM_SUPPORT_RECOMMENDED_ARGS': '-verbose:gc',
|
||||||
|
}
|
||||||
|
container = docker_cli.containers.run(image, environment=environment, detach=True)
|
||||||
|
time.sleep(0.5) # JVM doesn't start immediately when container runs
|
||||||
|
procs = container.exec_run('ps aux')
|
||||||
|
procs_list = procs.output.decode().split('\n')
|
||||||
|
jvm = [proc for proc in procs_list if '-Dconfluence.home' in proc][0]
|
||||||
|
assert f'-Xms{environment.get("JVM_MINIMUM_MEMORY")}' in jvm
|
||||||
|
assert f'-Xmx{environment.get("JVM_MAXIMUM_MEMORY")}' in jvm
|
||||||
|
assert environment.get('JVM_SUPPORT_RECOMMENDED_ARGS') in jvm
|
||||||
|
|
||||||
|
|
||||||
|
def test_first_run_state(docker_cli, image):
|
||||||
|
PORT = 8090
|
||||||
|
container = docker_cli.containers.run(image, ports={PORT: PORT}, detach=True)
|
||||||
|
for i in range(20):
|
||||||
|
try:
|
||||||
|
r = requests.get(f'http://localhost:{PORT}/status')
|
||||||
|
except requests.exceptions.ConnectionError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
if r.status_code == 200:
|
||||||
|
state = r.json().get('state')
|
||||||
|
assert state in ('STARTING', 'FIRST_RUN')
|
||||||
|
return
|
||||||
|
time.sleep(1)
|
||||||
|
raise TimeoutError
|
||||||
|
|
Loading…
Reference in New Issue
Block a user